loop.c (emit_prefetch_instructions): Properly place the address computation.
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 /* Middle-to-low level generation of rtx code and insns.
24
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
28
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
31
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
37
38 #include "config.h"
39 #include "system.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "obstack.h"
54 #include "bitmap.h"
55 #include "basic-block.h"
56 #include "ggc.h"
57 #include "debug.h"
58 #include "langhooks.h"
59
60 /* Commonly used modes. */
61
62 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
63 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
64 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
65 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
66
67
68 /* This is *not* reset after each function. It gives each CODE_LABEL
69 in the entire compilation a unique label number. */
70
71 static int label_num = 1;
72
73 /* Highest label number in current function.
74 Zero means use the value of label_num instead.
75 This is nonzero only when belatedly compiling an inline function. */
76
77 static int last_label_num;
78
79 /* Value label_num had when set_new_first_and_last_label_number was called.
80 If label_num has not changed since then, last_label_num is valid. */
81
82 static int base_label_num;
83
84 /* Nonzero means do not generate NOTEs for source line numbers. */
85
86 static int no_line_numbers;
87
88 /* Commonly used rtx's, so that we only need space for one copy.
89 These are initialized once for the entire compilation.
90 All of these are unique; no other rtx-object will be equal to any
91 of these. */
92
93 rtx global_rtl[GR_MAX];
94
95 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
96 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
97 record a copy of const[012]_rtx. */
98
99 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
100
101 rtx const_true_rtx;
102
103 REAL_VALUE_TYPE dconst0;
104 REAL_VALUE_TYPE dconst1;
105 REAL_VALUE_TYPE dconst2;
106 REAL_VALUE_TYPE dconstm1;
107
108 /* All references to the following fixed hard registers go through
109 these unique rtl objects. On machines where the frame-pointer and
110 arg-pointer are the same register, they use the same unique object.
111
112 After register allocation, other rtl objects which used to be pseudo-regs
113 may be clobbered to refer to the frame-pointer register.
114 But references that were originally to the frame-pointer can be
115 distinguished from the others because they contain frame_pointer_rtx.
116
117 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
118 tricky: until register elimination has taken place hard_frame_pointer_rtx
119 should be used if it is being set, and frame_pointer_rtx otherwise. After
120 register elimination hard_frame_pointer_rtx should always be used.
121 On machines where the two registers are same (most) then these are the
122 same.
123
124 In an inline procedure, the stack and frame pointer rtxs may not be
125 used for anything else. */
126 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
127 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
128 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
129 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
130 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
131
132 /* This is used to implement __builtin_return_address for some machines.
133 See for instance the MIPS port. */
134 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
135
136 /* We make one copy of (const_int C) where C is in
137 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
138 to save space during the compilation and simplify comparisons of
139 integers. */
140
141 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
142
143 /* A hash table storing CONST_INTs whose absolute value is greater
144 than MAX_SAVED_CONST_INT. */
145
146 static htab_t const_int_htab;
147
148 /* A hash table storing memory attribute structures. */
149 static htab_t mem_attrs_htab;
150
151 /* A hash table storing all CONST_DOUBLEs. */
152 static htab_t const_double_htab;
153
154 #define first_insn (cfun->emit->x_first_insn)
155 #define last_insn (cfun->emit->x_last_insn)
156 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
157 #define last_linenum (cfun->emit->x_last_linenum)
158 #define last_filename (cfun->emit->x_last_filename)
159 #define first_label_num (cfun->emit->x_first_label_num)
160
161 static rtx make_jump_insn_raw PARAMS ((rtx));
162 static rtx make_call_insn_raw PARAMS ((rtx));
163 static rtx find_line_note PARAMS ((rtx));
164 static void mark_sequence_stack PARAMS ((struct sequence_stack *));
165 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
166 int));
167 static void unshare_all_rtl_1 PARAMS ((rtx));
168 static void unshare_all_decls PARAMS ((tree));
169 static void reset_used_decls PARAMS ((tree));
170 static void mark_label_nuses PARAMS ((rtx));
171 static hashval_t const_int_htab_hash PARAMS ((const void *));
172 static int const_int_htab_eq PARAMS ((const void *,
173 const void *));
174 static hashval_t const_double_htab_hash PARAMS ((const void *));
175 static int const_double_htab_eq PARAMS ((const void *,
176 const void *));
177 static rtx lookup_const_double PARAMS ((rtx));
178 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
179 static int mem_attrs_htab_eq PARAMS ((const void *,
180 const void *));
181 static void mem_attrs_mark PARAMS ((const void *));
182 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
183 rtx, unsigned int,
184 enum machine_mode));
185 static tree component_ref_for_mem_expr PARAMS ((tree));
186 static rtx gen_const_vector_0 PARAMS ((enum machine_mode));
187
188 /* Probability of the conditional branch currently proceeded by try_split.
189 Set to -1 otherwise. */
190 int split_branch_probability = -1;
191 \f
192 /* Returns a hash code for X (which is a really a CONST_INT). */
193
194 static hashval_t
195 const_int_htab_hash (x)
196 const void *x;
197 {
198 return (hashval_t) INTVAL ((struct rtx_def *) x);
199 }
200
201 /* Returns non-zero if the value represented by X (which is really a
202 CONST_INT) is the same as that given by Y (which is really a
203 HOST_WIDE_INT *). */
204
205 static int
206 const_int_htab_eq (x, y)
207 const void *x;
208 const void *y;
209 {
210 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
211 }
212
213 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
214 static hashval_t
215 const_double_htab_hash (x)
216 const void *x;
217 {
218 hashval_t h = 0;
219 size_t i;
220 rtx value = (rtx) x;
221
222 for (i = 0; i < sizeof(CONST_DOUBLE_FORMAT)-1; i++)
223 h ^= XWINT (value, i);
224 return h;
225 }
226
227 /* Returns non-zero if the value represented by X (really a ...)
228 is the same as that represented by Y (really a ...) */
229 static int
230 const_double_htab_eq (x, y)
231 const void *x;
232 const void *y;
233 {
234 rtx a = (rtx)x, b = (rtx)y;
235 size_t i;
236
237 if (GET_MODE (a) != GET_MODE (b))
238 return 0;
239 for (i = 0; i < sizeof(CONST_DOUBLE_FORMAT)-1; i++)
240 if (XWINT (a, i) != XWINT (b, i))
241 return 0;
242
243 return 1;
244 }
245
246 /* Returns a hash code for X (which is a really a mem_attrs *). */
247
248 static hashval_t
249 mem_attrs_htab_hash (x)
250 const void *x;
251 {
252 mem_attrs *p = (mem_attrs *) x;
253
254 return (p->alias ^ (p->align * 1000)
255 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
256 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
257 ^ (size_t) p->expr);
258 }
259
260 /* Returns non-zero if the value represented by X (which is really a
261 mem_attrs *) is the same as that given by Y (which is also really a
262 mem_attrs *). */
263
264 static int
265 mem_attrs_htab_eq (x, y)
266 const void *x;
267 const void *y;
268 {
269 mem_attrs *p = (mem_attrs *) x;
270 mem_attrs *q = (mem_attrs *) y;
271
272 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
273 && p->size == q->size && p->align == q->align);
274 }
275
276 /* This routine is called when we determine that we need a mem_attrs entry.
277 It marks the associated decl and RTL as being used, if present. */
278
279 static void
280 mem_attrs_mark (x)
281 const void *x;
282 {
283 mem_attrs *p = (mem_attrs *) x;
284
285 if (p->expr)
286 ggc_mark_tree (p->expr);
287
288 if (p->offset)
289 ggc_mark_rtx (p->offset);
290
291 if (p->size)
292 ggc_mark_rtx (p->size);
293 }
294
295 /* Allocate a new mem_attrs structure and insert it into the hash table if
296 one identical to it is not already in the table. We are doing this for
297 MEM of mode MODE. */
298
299 static mem_attrs *
300 get_mem_attrs (alias, expr, offset, size, align, mode)
301 HOST_WIDE_INT alias;
302 tree expr;
303 rtx offset;
304 rtx size;
305 unsigned int align;
306 enum machine_mode mode;
307 {
308 mem_attrs attrs;
309 void **slot;
310
311 /* If everything is the default, we can just return zero. */
312 if (alias == 0 && expr == 0 && offset == 0
313 && (size == 0
314 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
315 && (align == BITS_PER_UNIT
316 || (STRICT_ALIGNMENT
317 && mode != BLKmode && align == GET_MODE_ALIGNMENT (mode))))
318 return 0;
319
320 attrs.alias = alias;
321 attrs.expr = expr;
322 attrs.offset = offset;
323 attrs.size = size;
324 attrs.align = align;
325
326 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
327 if (*slot == 0)
328 {
329 *slot = ggc_alloc (sizeof (mem_attrs));
330 memcpy (*slot, &attrs, sizeof (mem_attrs));
331 }
332
333 return *slot;
334 }
335
336 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
337 don't attempt to share with the various global pieces of rtl (such as
338 frame_pointer_rtx). */
339
340 rtx
341 gen_raw_REG (mode, regno)
342 enum machine_mode mode;
343 int regno;
344 {
345 rtx x = gen_rtx_raw_REG (mode, regno);
346 ORIGINAL_REGNO (x) = regno;
347 return x;
348 }
349
350 /* There are some RTL codes that require special attention; the generation
351 functions do the raw handling. If you add to this list, modify
352 special_rtx in gengenrtl.c as well. */
353
354 rtx
355 gen_rtx_CONST_INT (mode, arg)
356 enum machine_mode mode ATTRIBUTE_UNUSED;
357 HOST_WIDE_INT arg;
358 {
359 void **slot;
360
361 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
362 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
363
364 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
365 if (const_true_rtx && arg == STORE_FLAG_VALUE)
366 return const_true_rtx;
367 #endif
368
369 /* Look up the CONST_INT in the hash table. */
370 slot = htab_find_slot_with_hash (const_int_htab, &arg,
371 (hashval_t) arg, INSERT);
372 if (*slot == 0)
373 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
374
375 return (rtx) *slot;
376 }
377
378 rtx
379 gen_int_mode (c, mode)
380 HOST_WIDE_INT c;
381 enum machine_mode mode;
382 {
383 return GEN_INT (trunc_int_for_mode (c, mode));
384 }
385
386 /* CONST_DOUBLEs might be created from pairs of integers, or from
387 REAL_VALUE_TYPEs. Also, their length is known only at run time,
388 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
389
390 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
391 hash table. If so, return its counterpart; otherwise add it
392 to the hash table and return it. */
393 static rtx
394 lookup_const_double (real)
395 rtx real;
396 {
397 void **slot = htab_find_slot (const_double_htab, real, INSERT);
398 if (*slot == 0)
399 *slot = real;
400
401 return (rtx) *slot;
402 }
403
404 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
405 VALUE in mode MODE. */
406 rtx
407 const_double_from_real_value (value, mode)
408 REAL_VALUE_TYPE value;
409 enum machine_mode mode;
410 {
411 rtx real = rtx_alloc (CONST_DOUBLE);
412 PUT_MODE (real, mode);
413
414 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
415
416 return lookup_const_double (real);
417 }
418
419 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
420 of ints: I0 is the low-order word and I1 is the high-order word.
421 Do not use this routine for non-integer modes; convert to
422 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
423
424 rtx
425 immed_double_const (i0, i1, mode)
426 HOST_WIDE_INT i0, i1;
427 enum machine_mode mode;
428 {
429 rtx value;
430 unsigned int i;
431
432 if (mode != VOIDmode)
433 {
434 int width;
435 if (GET_MODE_CLASS (mode) != MODE_INT
436 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
437 abort ();
438
439 /* We clear out all bits that don't belong in MODE, unless they and
440 our sign bit are all one. So we get either a reasonable negative
441 value or a reasonable unsigned value for this mode. */
442 width = GET_MODE_BITSIZE (mode);
443 if (width < HOST_BITS_PER_WIDE_INT
444 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
445 != ((HOST_WIDE_INT) (-1) << (width - 1))))
446 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
447 else if (width == HOST_BITS_PER_WIDE_INT
448 && ! (i1 == ~0 && i0 < 0))
449 i1 = 0;
450 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
451 /* We cannot represent this value as a constant. */
452 abort ();
453
454 /* If this would be an entire word for the target, but is not for
455 the host, then sign-extend on the host so that the number will
456 look the same way on the host that it would on the target.
457
458 For example, when building a 64 bit alpha hosted 32 bit sparc
459 targeted compiler, then we want the 32 bit unsigned value -1 to be
460 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
461 The latter confuses the sparc backend. */
462
463 if (width < HOST_BITS_PER_WIDE_INT
464 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
465 i0 |= ((HOST_WIDE_INT) (-1) << width);
466
467 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
468 CONST_INT.
469
470 ??? Strictly speaking, this is wrong if we create a CONST_INT for
471 a large unsigned constant with the size of MODE being
472 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
473 in a wider mode. In that case we will mis-interpret it as a
474 negative number.
475
476 Unfortunately, the only alternative is to make a CONST_DOUBLE for
477 any constant in any mode if it is an unsigned constant larger
478 than the maximum signed integer in an int on the host. However,
479 doing this will break everyone that always expects to see a
480 CONST_INT for SImode and smaller.
481
482 We have always been making CONST_INTs in this case, so nothing
483 new is being broken. */
484
485 if (width <= HOST_BITS_PER_WIDE_INT)
486 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
487 }
488
489 /* If this integer fits in one word, return a CONST_INT. */
490 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
491 return GEN_INT (i0);
492
493 /* We use VOIDmode for integers. */
494 value = rtx_alloc (CONST_DOUBLE);
495 PUT_MODE (value, VOIDmode);
496
497 CONST_DOUBLE_LOW (value) = i0;
498 CONST_DOUBLE_HIGH (value) = i1;
499
500 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
501 XWINT (value, i) = 0;
502
503 return lookup_const_double (value);
504 }
505
506 rtx
507 gen_rtx_REG (mode, regno)
508 enum machine_mode mode;
509 unsigned int regno;
510 {
511 /* In case the MD file explicitly references the frame pointer, have
512 all such references point to the same frame pointer. This is
513 used during frame pointer elimination to distinguish the explicit
514 references to these registers from pseudos that happened to be
515 assigned to them.
516
517 If we have eliminated the frame pointer or arg pointer, we will
518 be using it as a normal register, for example as a spill
519 register. In such cases, we might be accessing it in a mode that
520 is not Pmode and therefore cannot use the pre-allocated rtx.
521
522 Also don't do this when we are making new REGs in reload, since
523 we don't want to get confused with the real pointers. */
524
525 if (mode == Pmode && !reload_in_progress)
526 {
527 if (regno == FRAME_POINTER_REGNUM)
528 return frame_pointer_rtx;
529 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
530 if (regno == HARD_FRAME_POINTER_REGNUM)
531 return hard_frame_pointer_rtx;
532 #endif
533 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
534 if (regno == ARG_POINTER_REGNUM)
535 return arg_pointer_rtx;
536 #endif
537 #ifdef RETURN_ADDRESS_POINTER_REGNUM
538 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
539 return return_address_pointer_rtx;
540 #endif
541 if (regno == PIC_OFFSET_TABLE_REGNUM
542 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
543 return pic_offset_table_rtx;
544 if (regno == STACK_POINTER_REGNUM)
545 return stack_pointer_rtx;
546 }
547
548 return gen_raw_REG (mode, regno);
549 }
550
551 rtx
552 gen_rtx_MEM (mode, addr)
553 enum machine_mode mode;
554 rtx addr;
555 {
556 rtx rt = gen_rtx_raw_MEM (mode, addr);
557
558 /* This field is not cleared by the mere allocation of the rtx, so
559 we clear it here. */
560 MEM_ATTRS (rt) = 0;
561
562 return rt;
563 }
564
565 rtx
566 gen_rtx_SUBREG (mode, reg, offset)
567 enum machine_mode mode;
568 rtx reg;
569 int offset;
570 {
571 /* This is the most common failure type.
572 Catch it early so we can see who does it. */
573 if ((offset % GET_MODE_SIZE (mode)) != 0)
574 abort ();
575
576 /* This check isn't usable right now because combine will
577 throw arbitrary crap like a CALL into a SUBREG in
578 gen_lowpart_for_combine so we must just eat it. */
579 #if 0
580 /* Check for this too. */
581 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
582 abort ();
583 #endif
584 return gen_rtx_raw_SUBREG (mode, reg, offset);
585 }
586
587 /* Generate a SUBREG representing the least-significant part of REG if MODE
588 is smaller than mode of REG, otherwise paradoxical SUBREG. */
589
590 rtx
591 gen_lowpart_SUBREG (mode, reg)
592 enum machine_mode mode;
593 rtx reg;
594 {
595 enum machine_mode inmode;
596
597 inmode = GET_MODE (reg);
598 if (inmode == VOIDmode)
599 inmode = mode;
600 return gen_rtx_SUBREG (mode, reg,
601 subreg_lowpart_offset (mode, inmode));
602 }
603 \f
604 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
605 **
606 ** This routine generates an RTX of the size specified by
607 ** <code>, which is an RTX code. The RTX structure is initialized
608 ** from the arguments <element1> through <elementn>, which are
609 ** interpreted according to the specific RTX type's format. The
610 ** special machine mode associated with the rtx (if any) is specified
611 ** in <mode>.
612 **
613 ** gen_rtx can be invoked in a way which resembles the lisp-like
614 ** rtx it will generate. For example, the following rtx structure:
615 **
616 ** (plus:QI (mem:QI (reg:SI 1))
617 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
618 **
619 ** ...would be generated by the following C code:
620 **
621 ** gen_rtx (PLUS, QImode,
622 ** gen_rtx (MEM, QImode,
623 ** gen_rtx (REG, SImode, 1)),
624 ** gen_rtx (MEM, QImode,
625 ** gen_rtx (PLUS, SImode,
626 ** gen_rtx (REG, SImode, 2),
627 ** gen_rtx (REG, SImode, 3)))),
628 */
629
630 /*VARARGS2*/
631 rtx
632 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
633 {
634 int i; /* Array indices... */
635 const char *fmt; /* Current rtx's format... */
636 rtx rt_val; /* RTX to return to caller... */
637
638 VA_OPEN (p, mode);
639 VA_FIXEDARG (p, enum rtx_code, code);
640 VA_FIXEDARG (p, enum machine_mode, mode);
641
642 switch (code)
643 {
644 case CONST_INT:
645 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
646 break;
647
648 case CONST_DOUBLE:
649 {
650 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
651 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
652
653 rt_val = immed_double_const (arg0, arg1, mode);
654 }
655 break;
656
657 case REG:
658 rt_val = gen_rtx_REG (mode, va_arg (p, int));
659 break;
660
661 case MEM:
662 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
663 break;
664
665 default:
666 rt_val = rtx_alloc (code); /* Allocate the storage space. */
667 rt_val->mode = mode; /* Store the machine mode... */
668
669 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
670 for (i = 0; i < GET_RTX_LENGTH (code); i++)
671 {
672 switch (*fmt++)
673 {
674 case '0': /* Unused field. */
675 break;
676
677 case 'i': /* An integer? */
678 XINT (rt_val, i) = va_arg (p, int);
679 break;
680
681 case 'w': /* A wide integer? */
682 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
683 break;
684
685 case 's': /* A string? */
686 XSTR (rt_val, i) = va_arg (p, char *);
687 break;
688
689 case 'e': /* An expression? */
690 case 'u': /* An insn? Same except when printing. */
691 XEXP (rt_val, i) = va_arg (p, rtx);
692 break;
693
694 case 'E': /* An RTX vector? */
695 XVEC (rt_val, i) = va_arg (p, rtvec);
696 break;
697
698 case 'b': /* A bitmap? */
699 XBITMAP (rt_val, i) = va_arg (p, bitmap);
700 break;
701
702 case 't': /* A tree? */
703 XTREE (rt_val, i) = va_arg (p, tree);
704 break;
705
706 default:
707 abort ();
708 }
709 }
710 break;
711 }
712
713 VA_CLOSE (p);
714 return rt_val;
715 }
716
717 /* gen_rtvec (n, [rt1, ..., rtn])
718 **
719 ** This routine creates an rtvec and stores within it the
720 ** pointers to rtx's which are its arguments.
721 */
722
723 /*VARARGS1*/
724 rtvec
725 gen_rtvec VPARAMS ((int n, ...))
726 {
727 int i, save_n;
728 rtx *vector;
729
730 VA_OPEN (p, n);
731 VA_FIXEDARG (p, int, n);
732
733 if (n == 0)
734 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
735
736 vector = (rtx *) alloca (n * sizeof (rtx));
737
738 for (i = 0; i < n; i++)
739 vector[i] = va_arg (p, rtx);
740
741 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
742 save_n = n;
743 VA_CLOSE (p);
744
745 return gen_rtvec_v (save_n, vector);
746 }
747
748 rtvec
749 gen_rtvec_v (n, argp)
750 int n;
751 rtx *argp;
752 {
753 int i;
754 rtvec rt_val;
755
756 if (n == 0)
757 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
758
759 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
760
761 for (i = 0; i < n; i++)
762 rt_val->elem[i] = *argp++;
763
764 return rt_val;
765 }
766 \f
767 /* Generate a REG rtx for a new pseudo register of mode MODE.
768 This pseudo is assigned the next sequential register number. */
769
770 rtx
771 gen_reg_rtx (mode)
772 enum machine_mode mode;
773 {
774 struct function *f = cfun;
775 rtx val;
776
777 /* Don't let anything called after initial flow analysis create new
778 registers. */
779 if (no_new_pseudos)
780 abort ();
781
782 if (generating_concat_p
783 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
784 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
785 {
786 /* For complex modes, don't make a single pseudo.
787 Instead, make a CONCAT of two pseudos.
788 This allows noncontiguous allocation of the real and imaginary parts,
789 which makes much better code. Besides, allocating DCmode
790 pseudos overstrains reload on some machines like the 386. */
791 rtx realpart, imagpart;
792 int size = GET_MODE_UNIT_SIZE (mode);
793 enum machine_mode partmode
794 = mode_for_size (size * BITS_PER_UNIT,
795 (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
796 ? MODE_FLOAT : MODE_INT),
797 0);
798
799 realpart = gen_reg_rtx (partmode);
800 imagpart = gen_reg_rtx (partmode);
801 return gen_rtx_CONCAT (mode, realpart, imagpart);
802 }
803
804 /* Make sure regno_pointer_align, regno_decl, and regno_reg_rtx are large
805 enough to have an element for this pseudo reg number. */
806
807 if (reg_rtx_no == f->emit->regno_pointer_align_length)
808 {
809 int old_size = f->emit->regno_pointer_align_length;
810 char *new;
811 rtx *new1;
812 tree *new2;
813
814 new = xrealloc (f->emit->regno_pointer_align, old_size * 2);
815 memset (new + old_size, 0, old_size);
816 f->emit->regno_pointer_align = (unsigned char *) new;
817
818 new1 = (rtx *) xrealloc (f->emit->x_regno_reg_rtx,
819 old_size * 2 * sizeof (rtx));
820 memset (new1 + old_size, 0, old_size * sizeof (rtx));
821 regno_reg_rtx = new1;
822
823 new2 = (tree *) xrealloc (f->emit->regno_decl,
824 old_size * 2 * sizeof (tree));
825 memset (new2 + old_size, 0, old_size * sizeof (tree));
826 f->emit->regno_decl = new2;
827
828 f->emit->regno_pointer_align_length = old_size * 2;
829 }
830
831 val = gen_raw_REG (mode, reg_rtx_no);
832 regno_reg_rtx[reg_rtx_no++] = val;
833 return val;
834 }
835
836 /* Identify REG (which may be a CONCAT) as a user register. */
837
838 void
839 mark_user_reg (reg)
840 rtx reg;
841 {
842 if (GET_CODE (reg) == CONCAT)
843 {
844 REG_USERVAR_P (XEXP (reg, 0)) = 1;
845 REG_USERVAR_P (XEXP (reg, 1)) = 1;
846 }
847 else if (GET_CODE (reg) == REG)
848 REG_USERVAR_P (reg) = 1;
849 else
850 abort ();
851 }
852
853 /* Identify REG as a probable pointer register and show its alignment
854 as ALIGN, if nonzero. */
855
856 void
857 mark_reg_pointer (reg, align)
858 rtx reg;
859 int align;
860 {
861 if (! REG_POINTER (reg))
862 {
863 REG_POINTER (reg) = 1;
864
865 if (align)
866 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
867 }
868 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
869 /* We can no-longer be sure just how aligned this pointer is */
870 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
871 }
872
873 /* Return 1 plus largest pseudo reg number used in the current function. */
874
875 int
876 max_reg_num ()
877 {
878 return reg_rtx_no;
879 }
880
881 /* Return 1 + the largest label number used so far in the current function. */
882
883 int
884 max_label_num ()
885 {
886 if (last_label_num && label_num == base_label_num)
887 return last_label_num;
888 return label_num;
889 }
890
891 /* Return first label number used in this function (if any were used). */
892
893 int
894 get_first_label_num ()
895 {
896 return first_label_num;
897 }
898 \f
899 /* Return the final regno of X, which is a SUBREG of a hard
900 register. */
901 int
902 subreg_hard_regno (x, check_mode)
903 rtx x;
904 int check_mode;
905 {
906 enum machine_mode mode = GET_MODE (x);
907 unsigned int byte_offset, base_regno, final_regno;
908 rtx reg = SUBREG_REG (x);
909
910 /* This is where we attempt to catch illegal subregs
911 created by the compiler. */
912 if (GET_CODE (x) != SUBREG
913 || GET_CODE (reg) != REG)
914 abort ();
915 base_regno = REGNO (reg);
916 if (base_regno >= FIRST_PSEUDO_REGISTER)
917 abort ();
918 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
919 abort ();
920
921 /* Catch non-congruent offsets too. */
922 byte_offset = SUBREG_BYTE (x);
923 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
924 abort ();
925
926 final_regno = subreg_regno (x);
927
928 return final_regno;
929 }
930
931 /* Return a value representing some low-order bits of X, where the number
932 of low-order bits is given by MODE. Note that no conversion is done
933 between floating-point and fixed-point values, rather, the bit
934 representation is returned.
935
936 This function handles the cases in common between gen_lowpart, below,
937 and two variants in cse.c and combine.c. These are the cases that can
938 be safely handled at all points in the compilation.
939
940 If this is not a case we can handle, return 0. */
941
942 rtx
943 gen_lowpart_common (mode, x)
944 enum machine_mode mode;
945 rtx x;
946 {
947 int msize = GET_MODE_SIZE (mode);
948 int xsize = GET_MODE_SIZE (GET_MODE (x));
949 int offset = 0;
950
951 if (GET_MODE (x) == mode)
952 return x;
953
954 /* MODE must occupy no more words than the mode of X. */
955 if (GET_MODE (x) != VOIDmode
956 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
957 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
958 return 0;
959
960 offset = subreg_lowpart_offset (mode, GET_MODE (x));
961
962 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
963 && (GET_MODE_CLASS (mode) == MODE_INT
964 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
965 {
966 /* If we are getting the low-order part of something that has been
967 sign- or zero-extended, we can either just use the object being
968 extended or make a narrower extension. If we want an even smaller
969 piece than the size of the object being extended, call ourselves
970 recursively.
971
972 This case is used mostly by combine and cse. */
973
974 if (GET_MODE (XEXP (x, 0)) == mode)
975 return XEXP (x, 0);
976 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
977 return gen_lowpart_common (mode, XEXP (x, 0));
978 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
979 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
980 }
981 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
982 || GET_CODE (x) == CONCAT)
983 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
984 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
985 from the low-order part of the constant. */
986 else if ((GET_MODE_CLASS (mode) == MODE_INT
987 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
988 && GET_MODE (x) == VOIDmode
989 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
990 {
991 /* If MODE is twice the host word size, X is already the desired
992 representation. Otherwise, if MODE is wider than a word, we can't
993 do this. If MODE is exactly a word, return just one CONST_INT. */
994
995 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
996 return x;
997 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
998 return 0;
999 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
1000 return (GET_CODE (x) == CONST_INT ? x
1001 : GEN_INT (CONST_DOUBLE_LOW (x)));
1002 else
1003 {
1004 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
1005 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
1006 : CONST_DOUBLE_LOW (x));
1007
1008 /* Sign extend to HOST_WIDE_INT. */
1009 val = trunc_int_for_mode (val, mode);
1010
1011 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
1012 : GEN_INT (val));
1013 }
1014 }
1015
1016 /* The floating-point emulator can handle all conversions between
1017 FP and integer operands. This simplifies reload because it
1018 doesn't have to deal with constructs like (subreg:DI
1019 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
1020 /* Single-precision floats are always 32-bits and double-precision
1021 floats are always 64-bits. */
1022
1023 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1024 && GET_MODE_BITSIZE (mode) == 32
1025 && GET_CODE (x) == CONST_INT)
1026 {
1027 REAL_VALUE_TYPE r;
1028 HOST_WIDE_INT i;
1029
1030 i = INTVAL (x);
1031 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
1032 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1033 }
1034 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1035 && GET_MODE_BITSIZE (mode) == 64
1036 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1037 && GET_MODE (x) == VOIDmode)
1038 {
1039 REAL_VALUE_TYPE r;
1040 HOST_WIDE_INT i[2];
1041 HOST_WIDE_INT low, high;
1042
1043 if (GET_CODE (x) == CONST_INT)
1044 {
1045 low = INTVAL (x);
1046 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1047 }
1048 else
1049 {
1050 low = CONST_DOUBLE_LOW (x);
1051 high = CONST_DOUBLE_HIGH (x);
1052 }
1053
1054 #if HOST_BITS_PER_WIDE_INT == 32
1055 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1056 target machine. */
1057 if (WORDS_BIG_ENDIAN)
1058 i[0] = high, i[1] = low;
1059 else
1060 i[0] = low, i[1] = high;
1061 #else
1062 i[0] = low;
1063 #endif
1064
1065 r = REAL_VALUE_FROM_TARGET_DOUBLE (i);
1066 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1067 }
1068 else if ((GET_MODE_CLASS (mode) == MODE_INT
1069 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1070 && GET_CODE (x) == CONST_DOUBLE
1071 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1072 {
1073 REAL_VALUE_TYPE r;
1074 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1075 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1076
1077 /* Convert 'r' into an array of four 32-bit words in target word
1078 order. */
1079 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1080 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1081 {
1082 case 32:
1083 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
1084 i[1] = 0;
1085 i[2] = 0;
1086 i[3 - 3 * endian] = 0;
1087 break;
1088 case 64:
1089 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
1090 i[2 - 2 * endian] = 0;
1091 i[3 - 2 * endian] = 0;
1092 break;
1093 case 96:
1094 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1095 i[3 - 3 * endian] = 0;
1096 break;
1097 case 128:
1098 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1099 break;
1100 default:
1101 abort ();
1102 }
1103 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1104 and return it. */
1105 #if HOST_BITS_PER_WIDE_INT == 32
1106 return immed_double_const (i[3 * endian], i[1 + endian], mode);
1107 #else
1108 if (HOST_BITS_PER_WIDE_INT != 64)
1109 abort ();
1110
1111 return immed_double_const ((((unsigned long) i[3 * endian])
1112 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1113 (((unsigned long) i[2 - endian])
1114 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
1115 mode);
1116 #endif
1117 }
1118
1119 /* Otherwise, we can't do this. */
1120 return 0;
1121 }
1122 \f
1123 /* Return the real part (which has mode MODE) of a complex value X.
1124 This always comes at the low address in memory. */
1125
1126 rtx
1127 gen_realpart (mode, x)
1128 enum machine_mode mode;
1129 rtx x;
1130 {
1131 if (WORDS_BIG_ENDIAN
1132 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1133 && REG_P (x)
1134 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1135 internal_error
1136 ("can't access real part of complex value in hard register");
1137 else if (WORDS_BIG_ENDIAN)
1138 return gen_highpart (mode, x);
1139 else
1140 return gen_lowpart (mode, x);
1141 }
1142
1143 /* Return the imaginary part (which has mode MODE) of a complex value X.
1144 This always comes at the high address in memory. */
1145
1146 rtx
1147 gen_imagpart (mode, x)
1148 enum machine_mode mode;
1149 rtx x;
1150 {
1151 if (WORDS_BIG_ENDIAN)
1152 return gen_lowpart (mode, x);
1153 else if (! WORDS_BIG_ENDIAN
1154 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1155 && REG_P (x)
1156 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1157 internal_error
1158 ("can't access imaginary part of complex value in hard register");
1159 else
1160 return gen_highpart (mode, x);
1161 }
1162
1163 /* Return 1 iff X, assumed to be a SUBREG,
1164 refers to the real part of the complex value in its containing reg.
1165 Complex values are always stored with the real part in the first word,
1166 regardless of WORDS_BIG_ENDIAN. */
1167
1168 int
1169 subreg_realpart_p (x)
1170 rtx x;
1171 {
1172 if (GET_CODE (x) != SUBREG)
1173 abort ();
1174
1175 return ((unsigned int) SUBREG_BYTE (x)
1176 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1177 }
1178 \f
1179 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1180 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1181 least-significant part of X.
1182 MODE specifies how big a part of X to return;
1183 it usually should not be larger than a word.
1184 If X is a MEM whose address is a QUEUED, the value may be so also. */
1185
1186 rtx
1187 gen_lowpart (mode, x)
1188 enum machine_mode mode;
1189 rtx x;
1190 {
1191 rtx result = gen_lowpart_common (mode, x);
1192
1193 if (result)
1194 return result;
1195 else if (GET_CODE (x) == REG)
1196 {
1197 /* Must be a hard reg that's not valid in MODE. */
1198 result = gen_lowpart_common (mode, copy_to_reg (x));
1199 if (result == 0)
1200 abort ();
1201 return result;
1202 }
1203 else if (GET_CODE (x) == MEM)
1204 {
1205 /* The only additional case we can do is MEM. */
1206 int offset = 0;
1207 if (WORDS_BIG_ENDIAN)
1208 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1209 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1210
1211 if (BYTES_BIG_ENDIAN)
1212 /* Adjust the address so that the address-after-the-data
1213 is unchanged. */
1214 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1215 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1216
1217 return adjust_address (x, mode, offset);
1218 }
1219 else if (GET_CODE (x) == ADDRESSOF)
1220 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1221 else
1222 abort ();
1223 }
1224
1225 /* Like `gen_lowpart', but refer to the most significant part.
1226 This is used to access the imaginary part of a complex number. */
1227
1228 rtx
1229 gen_highpart (mode, x)
1230 enum machine_mode mode;
1231 rtx x;
1232 {
1233 unsigned int msize = GET_MODE_SIZE (mode);
1234 rtx result;
1235
1236 /* This case loses if X is a subreg. To catch bugs early,
1237 complain if an invalid MODE is used even in other cases. */
1238 if (msize > UNITS_PER_WORD
1239 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1240 abort ();
1241
1242 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1243 subreg_highpart_offset (mode, GET_MODE (x)));
1244
1245 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1246 the target if we have a MEM. gen_highpart must return a valid operand,
1247 emitting code if necessary to do so. */
1248 if (result != NULL_RTX && GET_CODE (result) == MEM)
1249 result = validize_mem (result);
1250
1251 if (!result)
1252 abort ();
1253 return result;
1254 }
1255
1256 /* Like gen_highpart_mode, but accept mode of EXP operand in case EXP can
1257 be VOIDmode constant. */
1258 rtx
1259 gen_highpart_mode (outermode, innermode, exp)
1260 enum machine_mode outermode, innermode;
1261 rtx exp;
1262 {
1263 if (GET_MODE (exp) != VOIDmode)
1264 {
1265 if (GET_MODE (exp) != innermode)
1266 abort ();
1267 return gen_highpart (outermode, exp);
1268 }
1269 return simplify_gen_subreg (outermode, exp, innermode,
1270 subreg_highpart_offset (outermode, innermode));
1271 }
1272
1273 /* Return offset in bytes to get OUTERMODE low part
1274 of the value in mode INNERMODE stored in memory in target format. */
1275
1276 unsigned int
1277 subreg_lowpart_offset (outermode, innermode)
1278 enum machine_mode outermode, innermode;
1279 {
1280 unsigned int offset = 0;
1281 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1282
1283 if (difference > 0)
1284 {
1285 if (WORDS_BIG_ENDIAN)
1286 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1287 if (BYTES_BIG_ENDIAN)
1288 offset += difference % UNITS_PER_WORD;
1289 }
1290
1291 return offset;
1292 }
1293
1294 /* Return offset in bytes to get OUTERMODE high part
1295 of the value in mode INNERMODE stored in memory in target format. */
1296 unsigned int
1297 subreg_highpart_offset (outermode, innermode)
1298 enum machine_mode outermode, innermode;
1299 {
1300 unsigned int offset = 0;
1301 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1302
1303 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1304 abort ();
1305
1306 if (difference > 0)
1307 {
1308 if (! WORDS_BIG_ENDIAN)
1309 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1310 if (! BYTES_BIG_ENDIAN)
1311 offset += difference % UNITS_PER_WORD;
1312 }
1313
1314 return offset;
1315 }
1316
1317 /* Return 1 iff X, assumed to be a SUBREG,
1318 refers to the least significant part of its containing reg.
1319 If X is not a SUBREG, always return 1 (it is its own low part!). */
1320
1321 int
1322 subreg_lowpart_p (x)
1323 rtx x;
1324 {
1325 if (GET_CODE (x) != SUBREG)
1326 return 1;
1327 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1328 return 0;
1329
1330 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1331 == SUBREG_BYTE (x));
1332 }
1333 \f
1334
1335 /* Helper routine for all the constant cases of operand_subword.
1336 Some places invoke this directly. */
1337
1338 rtx
1339 constant_subword (op, offset, mode)
1340 rtx op;
1341 int offset;
1342 enum machine_mode mode;
1343 {
1344 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1345 HOST_WIDE_INT val;
1346
1347 /* If OP is already an integer word, return it. */
1348 if (GET_MODE_CLASS (mode) == MODE_INT
1349 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1350 return op;
1351
1352 /* The output is some bits, the width of the target machine's word.
1353 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1354 host can't. */
1355 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1356 && GET_MODE_CLASS (mode) == MODE_FLOAT
1357 && GET_MODE_BITSIZE (mode) == 64
1358 && GET_CODE (op) == CONST_DOUBLE)
1359 {
1360 long k[2];
1361 REAL_VALUE_TYPE rv;
1362
1363 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1364 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1365
1366 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1367 which the words are written depends on the word endianness.
1368 ??? This is a potential portability problem and should
1369 be fixed at some point.
1370
1371 We must exercise caution with the sign bit. By definition there
1372 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1373 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1374 So we explicitly mask and sign-extend as necessary. */
1375 if (BITS_PER_WORD == 32)
1376 {
1377 val = k[offset];
1378 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1379 return GEN_INT (val);
1380 }
1381 #if HOST_BITS_PER_WIDE_INT >= 64
1382 else if (BITS_PER_WORD >= 64 && offset == 0)
1383 {
1384 val = k[! WORDS_BIG_ENDIAN];
1385 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1386 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1387 return GEN_INT (val);
1388 }
1389 #endif
1390 else if (BITS_PER_WORD == 16)
1391 {
1392 val = k[offset >> 1];
1393 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1394 val >>= 16;
1395 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1396 return GEN_INT (val);
1397 }
1398 else
1399 abort ();
1400 }
1401 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1402 && GET_MODE_CLASS (mode) == MODE_FLOAT
1403 && GET_MODE_BITSIZE (mode) > 64
1404 && GET_CODE (op) == CONST_DOUBLE)
1405 {
1406 long k[4];
1407 REAL_VALUE_TYPE rv;
1408
1409 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1410 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1411
1412 if (BITS_PER_WORD == 32)
1413 {
1414 val = k[offset];
1415 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1416 return GEN_INT (val);
1417 }
1418 #if HOST_BITS_PER_WIDE_INT >= 64
1419 else if (BITS_PER_WORD >= 64 && offset <= 1)
1420 {
1421 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1422 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1423 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1424 return GEN_INT (val);
1425 }
1426 #endif
1427 else
1428 abort ();
1429 }
1430
1431 /* Single word float is a little harder, since single- and double-word
1432 values often do not have the same high-order bits. We have already
1433 verified that we want the only defined word of the single-word value. */
1434 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1435 && GET_MODE_BITSIZE (mode) == 32
1436 && GET_CODE (op) == CONST_DOUBLE)
1437 {
1438 long l;
1439 REAL_VALUE_TYPE rv;
1440
1441 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1442 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1443
1444 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1445 val = l;
1446 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1447
1448 if (BITS_PER_WORD == 16)
1449 {
1450 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1451 val >>= 16;
1452 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1453 }
1454
1455 return GEN_INT (val);
1456 }
1457
1458 /* The only remaining cases that we can handle are integers.
1459 Convert to proper endianness now since these cases need it.
1460 At this point, offset == 0 means the low-order word.
1461
1462 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1463 in general. However, if OP is (const_int 0), we can just return
1464 it for any word. */
1465
1466 if (op == const0_rtx)
1467 return op;
1468
1469 if (GET_MODE_CLASS (mode) != MODE_INT
1470 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1471 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1472 return 0;
1473
1474 if (WORDS_BIG_ENDIAN)
1475 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1476
1477 /* Find out which word on the host machine this value is in and get
1478 it from the constant. */
1479 val = (offset / size_ratio == 0
1480 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1481 : (GET_CODE (op) == CONST_INT
1482 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1483
1484 /* Get the value we want into the low bits of val. */
1485 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1486 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1487
1488 val = trunc_int_for_mode (val, word_mode);
1489
1490 return GEN_INT (val);
1491 }
1492
1493 /* Return subword OFFSET of operand OP.
1494 The word number, OFFSET, is interpreted as the word number starting
1495 at the low-order address. OFFSET 0 is the low-order word if not
1496 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1497
1498 If we cannot extract the required word, we return zero. Otherwise,
1499 an rtx corresponding to the requested word will be returned.
1500
1501 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1502 reload has completed, a valid address will always be returned. After
1503 reload, if a valid address cannot be returned, we return zero.
1504
1505 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1506 it is the responsibility of the caller.
1507
1508 MODE is the mode of OP in case it is a CONST_INT.
1509
1510 ??? This is still rather broken for some cases. The problem for the
1511 moment is that all callers of this thing provide no 'goal mode' to
1512 tell us to work with. This exists because all callers were written
1513 in a word based SUBREG world.
1514 Now use of this function can be deprecated by simplify_subreg in most
1515 cases.
1516 */
1517
1518 rtx
1519 operand_subword (op, offset, validate_address, mode)
1520 rtx op;
1521 unsigned int offset;
1522 int validate_address;
1523 enum machine_mode mode;
1524 {
1525 if (mode == VOIDmode)
1526 mode = GET_MODE (op);
1527
1528 if (mode == VOIDmode)
1529 abort ();
1530
1531 /* If OP is narrower than a word, fail. */
1532 if (mode != BLKmode
1533 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1534 return 0;
1535
1536 /* If we want a word outside OP, return zero. */
1537 if (mode != BLKmode
1538 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1539 return const0_rtx;
1540
1541 /* Form a new MEM at the requested address. */
1542 if (GET_CODE (op) == MEM)
1543 {
1544 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1545
1546 if (! validate_address)
1547 return new;
1548
1549 else if (reload_completed)
1550 {
1551 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1552 return 0;
1553 }
1554 else
1555 return replace_equiv_address (new, XEXP (new, 0));
1556 }
1557
1558 /* Rest can be handled by simplify_subreg. */
1559 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1560 }
1561
1562 /* Similar to `operand_subword', but never return 0. If we can't extract
1563 the required subword, put OP into a register and try again. If that fails,
1564 abort. We always validate the address in this case.
1565
1566 MODE is the mode of OP, in case it is CONST_INT. */
1567
1568 rtx
1569 operand_subword_force (op, offset, mode)
1570 rtx op;
1571 unsigned int offset;
1572 enum machine_mode mode;
1573 {
1574 rtx result = operand_subword (op, offset, 1, mode);
1575
1576 if (result)
1577 return result;
1578
1579 if (mode != BLKmode && mode != VOIDmode)
1580 {
1581 /* If this is a register which can not be accessed by words, copy it
1582 to a pseudo register. */
1583 if (GET_CODE (op) == REG)
1584 op = copy_to_reg (op);
1585 else
1586 op = force_reg (mode, op);
1587 }
1588
1589 result = operand_subword (op, offset, 1, mode);
1590 if (result == 0)
1591 abort ();
1592
1593 return result;
1594 }
1595 \f
1596 /* Given a compare instruction, swap the operands.
1597 A test instruction is changed into a compare of 0 against the operand. */
1598
1599 void
1600 reverse_comparison (insn)
1601 rtx insn;
1602 {
1603 rtx body = PATTERN (insn);
1604 rtx comp;
1605
1606 if (GET_CODE (body) == SET)
1607 comp = SET_SRC (body);
1608 else
1609 comp = SET_SRC (XVECEXP (body, 0, 0));
1610
1611 if (GET_CODE (comp) == COMPARE)
1612 {
1613 rtx op0 = XEXP (comp, 0);
1614 rtx op1 = XEXP (comp, 1);
1615 XEXP (comp, 0) = op1;
1616 XEXP (comp, 1) = op0;
1617 }
1618 else
1619 {
1620 rtx new = gen_rtx_COMPARE (VOIDmode,
1621 CONST0_RTX (GET_MODE (comp)), comp);
1622 if (GET_CODE (body) == SET)
1623 SET_SRC (body) = new;
1624 else
1625 SET_SRC (XVECEXP (body, 0, 0)) = new;
1626 }
1627 }
1628 \f
1629 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1630 or (2) a component ref of something variable. Represent the later with
1631 a NULL expression. */
1632
1633 static tree
1634 component_ref_for_mem_expr (ref)
1635 tree ref;
1636 {
1637 tree inner = TREE_OPERAND (ref, 0);
1638
1639 if (TREE_CODE (inner) == COMPONENT_REF)
1640 inner = component_ref_for_mem_expr (inner);
1641 else
1642 {
1643 tree placeholder_ptr = 0;
1644
1645 /* Now remove any conversions: they don't change what the underlying
1646 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1647 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1648 || TREE_CODE (inner) == NON_LVALUE_EXPR
1649 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1650 || TREE_CODE (inner) == SAVE_EXPR
1651 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1652 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1653 inner = find_placeholder (inner, &placeholder_ptr);
1654 else
1655 inner = TREE_OPERAND (inner, 0);
1656
1657 if (! DECL_P (inner))
1658 inner = NULL_TREE;
1659 }
1660
1661 if (inner == TREE_OPERAND (ref, 0))
1662 return ref;
1663 else
1664 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1665 TREE_OPERAND (ref, 1));
1666 }
1667
1668 /* Given REF, a MEM, and T, either the type of X or the expression
1669 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1670 if we are making a new object of this type. */
1671
1672 void
1673 set_mem_attributes (ref, t, objectp)
1674 rtx ref;
1675 tree t;
1676 int objectp;
1677 {
1678 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1679 tree expr = MEM_EXPR (ref);
1680 rtx offset = MEM_OFFSET (ref);
1681 rtx size = MEM_SIZE (ref);
1682 unsigned int align = MEM_ALIGN (ref);
1683 tree type;
1684
1685 /* It can happen that type_for_mode was given a mode for which there
1686 is no language-level type. In which case it returns NULL, which
1687 we can see here. */
1688 if (t == NULL_TREE)
1689 return;
1690
1691 type = TYPE_P (t) ? t : TREE_TYPE (t);
1692
1693 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1694 wrong answer, as it assumes that DECL_RTL already has the right alias
1695 info. Callers should not set DECL_RTL until after the call to
1696 set_mem_attributes. */
1697 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1698 abort ();
1699
1700 /* Get the alias set from the expression or type (perhaps using a
1701 front-end routine) and use it. */
1702 alias = get_alias_set (t);
1703
1704 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1705 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1706 RTX_UNCHANGING_P (ref)
1707 |= ((lang_hooks.honor_readonly
1708 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1709 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1710
1711 /* If we are making an object of this type, or if this is a DECL, we know
1712 that it is a scalar if the type is not an aggregate. */
1713 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1714 MEM_SCALAR_P (ref) = 1;
1715
1716 /* We can set the alignment from the type if we are making an object,
1717 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1718 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1719 align = MAX (align, TYPE_ALIGN (type));
1720
1721 /* If the size is known, we can set that. */
1722 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1723 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1724
1725 /* If T is not a type, we may be able to deduce some more information about
1726 the expression. */
1727 if (! TYPE_P (t))
1728 {
1729 maybe_set_unchanging (ref, t);
1730 if (TREE_THIS_VOLATILE (t))
1731 MEM_VOLATILE_P (ref) = 1;
1732
1733 /* Now remove any conversions: they don't change what the underlying
1734 object is. Likewise for SAVE_EXPR. */
1735 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1736 || TREE_CODE (t) == NON_LVALUE_EXPR
1737 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1738 || TREE_CODE (t) == SAVE_EXPR)
1739 t = TREE_OPERAND (t, 0);
1740
1741 /* If this expression can't be addressed (e.g., it contains a reference
1742 to a non-addressable field), show we don't change its alias set. */
1743 if (! can_address_p (t))
1744 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1745
1746 /* If this is a decl, set the attributes of the MEM from it. */
1747 if (DECL_P (t))
1748 {
1749 expr = t;
1750 offset = const0_rtx;
1751 size = (DECL_SIZE_UNIT (t)
1752 && host_integerp (DECL_SIZE_UNIT (t), 1)
1753 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1754 align = DECL_ALIGN (t);
1755 }
1756
1757 /* If this is a constant, we know the alignment. */
1758 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1759 {
1760 align = TYPE_ALIGN (type);
1761 #ifdef CONSTANT_ALIGNMENT
1762 align = CONSTANT_ALIGNMENT (t, align);
1763 #endif
1764 }
1765
1766 /* If this is a field reference and not a bit-field, record it. */
1767 /* ??? There is some information that can be gleened from bit-fields,
1768 such as the word offset in the structure that might be modified.
1769 But skip it for now. */
1770 else if (TREE_CODE (t) == COMPONENT_REF
1771 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1772 {
1773 expr = component_ref_for_mem_expr (t);
1774 offset = const0_rtx;
1775 /* ??? Any reason the field size would be different than
1776 the size we got from the type? */
1777 }
1778
1779 /* If this is an array reference, look for an outer field reference. */
1780 else if (TREE_CODE (t) == ARRAY_REF)
1781 {
1782 tree off_tree = size_zero_node;
1783
1784 do
1785 {
1786 off_tree
1787 = fold (build (PLUS_EXPR, sizetype,
1788 fold (build (MULT_EXPR, sizetype,
1789 TREE_OPERAND (t, 1),
1790 TYPE_SIZE_UNIT (TREE_TYPE (t)))),
1791 off_tree));
1792 t = TREE_OPERAND (t, 0);
1793 }
1794 while (TREE_CODE (t) == ARRAY_REF);
1795
1796 if (TREE_CODE (t) == COMPONENT_REF)
1797 {
1798 expr = component_ref_for_mem_expr (t);
1799 if (host_integerp (off_tree, 1))
1800 offset = GEN_INT (tree_low_cst (off_tree, 1));
1801 /* ??? Any reason the field size would be different than
1802 the size we got from the type? */
1803 }
1804 }
1805 }
1806
1807 /* Now set the attributes we computed above. */
1808 MEM_ATTRS (ref)
1809 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1810
1811 /* If this is already known to be a scalar or aggregate, we are done. */
1812 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1813 return;
1814
1815 /* If it is a reference into an aggregate, this is part of an aggregate.
1816 Otherwise we don't know. */
1817 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1818 || TREE_CODE (t) == ARRAY_RANGE_REF
1819 || TREE_CODE (t) == BIT_FIELD_REF)
1820 MEM_IN_STRUCT_P (ref) = 1;
1821 }
1822
1823 /* Set the alias set of MEM to SET. */
1824
1825 void
1826 set_mem_alias_set (mem, set)
1827 rtx mem;
1828 HOST_WIDE_INT set;
1829 {
1830 #ifdef ENABLE_CHECKING
1831 /* If the new and old alias sets don't conflict, something is wrong. */
1832 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1833 abort ();
1834 #endif
1835
1836 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1837 MEM_SIZE (mem), MEM_ALIGN (mem),
1838 GET_MODE (mem));
1839 }
1840
1841 /* Set the alignment of MEM to ALIGN bits. */
1842
1843 void
1844 set_mem_align (mem, align)
1845 rtx mem;
1846 unsigned int align;
1847 {
1848 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1849 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1850 GET_MODE (mem));
1851 }
1852
1853 /* Set the expr for MEM to EXPR. */
1854
1855 void
1856 set_mem_expr (mem, expr)
1857 rtx mem;
1858 tree expr;
1859 {
1860 MEM_ATTRS (mem)
1861 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1862 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1863 }
1864
1865 /* Set the offset of MEM to OFFSET. */
1866
1867 void
1868 set_mem_offset (mem, offset)
1869 rtx mem, offset;
1870 {
1871 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1872 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1873 GET_MODE (mem));
1874 }
1875 \f
1876 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1877 and its address changed to ADDR. (VOIDmode means don't change the mode.
1878 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1879 returned memory location is required to be valid. The memory
1880 attributes are not changed. */
1881
1882 static rtx
1883 change_address_1 (memref, mode, addr, validate)
1884 rtx memref;
1885 enum machine_mode mode;
1886 rtx addr;
1887 int validate;
1888 {
1889 rtx new;
1890
1891 if (GET_CODE (memref) != MEM)
1892 abort ();
1893 if (mode == VOIDmode)
1894 mode = GET_MODE (memref);
1895 if (addr == 0)
1896 addr = XEXP (memref, 0);
1897
1898 if (validate)
1899 {
1900 if (reload_in_progress || reload_completed)
1901 {
1902 if (! memory_address_p (mode, addr))
1903 abort ();
1904 }
1905 else
1906 addr = memory_address (mode, addr);
1907 }
1908
1909 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1910 return memref;
1911
1912 new = gen_rtx_MEM (mode, addr);
1913 MEM_COPY_ATTRIBUTES (new, memref);
1914 return new;
1915 }
1916
1917 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1918 way we are changing MEMREF, so we only preserve the alias set. */
1919
1920 rtx
1921 change_address (memref, mode, addr)
1922 rtx memref;
1923 enum machine_mode mode;
1924 rtx addr;
1925 {
1926 rtx new = change_address_1 (memref, mode, addr, 1);
1927 enum machine_mode mmode = GET_MODE (new);
1928
1929 MEM_ATTRS (new)
1930 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
1931 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
1932 (mmode == BLKmode ? BITS_PER_UNIT
1933 : GET_MODE_ALIGNMENT (mmode)),
1934 mmode);
1935
1936 return new;
1937 }
1938
1939 /* Return a memory reference like MEMREF, but with its mode changed
1940 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1941 nonzero, the memory address is forced to be valid.
1942 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1943 and caller is responsible for adjusting MEMREF base register. */
1944
1945 rtx
1946 adjust_address_1 (memref, mode, offset, validate, adjust)
1947 rtx memref;
1948 enum machine_mode mode;
1949 HOST_WIDE_INT offset;
1950 int validate, adjust;
1951 {
1952 rtx addr = XEXP (memref, 0);
1953 rtx new;
1954 rtx memoffset = MEM_OFFSET (memref);
1955 rtx size = 0;
1956 unsigned int memalign = MEM_ALIGN (memref);
1957
1958 /* ??? Prefer to create garbage instead of creating shared rtl.
1959 This may happen even if offset is non-zero -- consider
1960 (plus (plus reg reg) const_int) -- so do this always. */
1961 addr = copy_rtx (addr);
1962
1963 if (adjust)
1964 {
1965 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1966 object, we can merge it into the LO_SUM. */
1967 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1968 && offset >= 0
1969 && (unsigned HOST_WIDE_INT) offset
1970 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1971 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1972 plus_constant (XEXP (addr, 1), offset));
1973 else
1974 addr = plus_constant (addr, offset);
1975 }
1976
1977 new = change_address_1 (memref, mode, addr, validate);
1978
1979 /* Compute the new values of the memory attributes due to this adjustment.
1980 We add the offsets and update the alignment. */
1981 if (memoffset)
1982 memoffset = GEN_INT (offset + INTVAL (memoffset));
1983
1984 /* Compute the new alignment by taking the MIN of the alignment and the
1985 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1986 if zero. */
1987 if (offset != 0)
1988 memalign
1989 = MIN (memalign,
1990 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1991
1992 /* We can compute the size in a number of ways. */
1993 if (GET_MODE (new) != BLKmode)
1994 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1995 else if (MEM_SIZE (memref))
1996 size = plus_constant (MEM_SIZE (memref), -offset);
1997
1998 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1999 memoffset, size, memalign, GET_MODE (new));
2000
2001 /* At some point, we should validate that this offset is within the object,
2002 if all the appropriate values are known. */
2003 return new;
2004 }
2005
2006 /* Return a memory reference like MEMREF, but with its mode changed
2007 to MODE and its address changed to ADDR, which is assumed to be
2008 MEMREF offseted by OFFSET bytes. If VALIDATE is
2009 nonzero, the memory address is forced to be valid. */
2010
2011 rtx
2012 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
2013 rtx memref;
2014 enum machine_mode mode;
2015 rtx addr;
2016 HOST_WIDE_INT offset;
2017 int validate;
2018 {
2019 memref = change_address_1 (memref, VOIDmode, addr, validate);
2020 return adjust_address_1 (memref, mode, offset, validate, 0);
2021 }
2022
2023 /* Return a memory reference like MEMREF, but whose address is changed by
2024 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2025 known to be in OFFSET (possibly 1). */
2026
2027 rtx
2028 offset_address (memref, offset, pow2)
2029 rtx memref;
2030 rtx offset;
2031 HOST_WIDE_INT pow2;
2032 {
2033 rtx new, addr = XEXP (memref, 0);
2034
2035 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2036
2037 /* At this point we don't know _why_ the address is invalid. It
2038 could have secondary memory refereces, multiplies or anything.
2039
2040 However, if we did go and rearrange things, we can wind up not
2041 being able to recognize the magic around pic_offset_table_rtx.
2042 This stuff is fragile, and is yet another example of why it is
2043 bad to expose PIC machinery too early. */
2044 if (! memory_address_p (GET_MODE (memref), new)
2045 && GET_CODE (addr) == PLUS
2046 && XEXP (addr, 0) == pic_offset_table_rtx)
2047 {
2048 addr = force_reg (GET_MODE (addr), addr);
2049 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2050 }
2051
2052 update_temp_slot_address (XEXP (memref, 0), new);
2053 new = change_address_1 (memref, VOIDmode, new, 1);
2054
2055 /* Update the alignment to reflect the offset. Reset the offset, which
2056 we don't know. */
2057 MEM_ATTRS (new)
2058 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2059 MIN (MEM_ALIGN (memref),
2060 (unsigned HOST_WIDE_INT) pow2 * BITS_PER_UNIT),
2061 GET_MODE (new));
2062 return new;
2063 }
2064
2065 /* Return a memory reference like MEMREF, but with its address changed to
2066 ADDR. The caller is asserting that the actual piece of memory pointed
2067 to is the same, just the form of the address is being changed, such as
2068 by putting something into a register. */
2069
2070 rtx
2071 replace_equiv_address (memref, addr)
2072 rtx memref;
2073 rtx addr;
2074 {
2075 /* change_address_1 copies the memory attribute structure without change
2076 and that's exactly what we want here. */
2077 update_temp_slot_address (XEXP (memref, 0), addr);
2078 return change_address_1 (memref, VOIDmode, addr, 1);
2079 }
2080
2081 /* Likewise, but the reference is not required to be valid. */
2082
2083 rtx
2084 replace_equiv_address_nv (memref, addr)
2085 rtx memref;
2086 rtx addr;
2087 {
2088 return change_address_1 (memref, VOIDmode, addr, 0);
2089 }
2090
2091 /* Return a memory reference like MEMREF, but with its mode widened to
2092 MODE and offset by OFFSET. This would be used by targets that e.g.
2093 cannot issue QImode memory operations and have to use SImode memory
2094 operations plus masking logic. */
2095
2096 rtx
2097 widen_memory_access (memref, mode, offset)
2098 rtx memref;
2099 enum machine_mode mode;
2100 HOST_WIDE_INT offset;
2101 {
2102 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2103 tree expr = MEM_EXPR (new);
2104 rtx memoffset = MEM_OFFSET (new);
2105 unsigned int size = GET_MODE_SIZE (mode);
2106
2107 /* If we don't know what offset we were at within the expression, then
2108 we can't know if we've overstepped the bounds. */
2109 if (! memoffset)
2110 expr = NULL_TREE;
2111
2112 while (expr)
2113 {
2114 if (TREE_CODE (expr) == COMPONENT_REF)
2115 {
2116 tree field = TREE_OPERAND (expr, 1);
2117
2118 if (! DECL_SIZE_UNIT (field))
2119 {
2120 expr = NULL_TREE;
2121 break;
2122 }
2123
2124 /* Is the field at least as large as the access? If so, ok,
2125 otherwise strip back to the containing structure. */
2126 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2127 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2128 && INTVAL (memoffset) >= 0)
2129 break;
2130
2131 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2132 {
2133 expr = NULL_TREE;
2134 break;
2135 }
2136
2137 expr = TREE_OPERAND (expr, 0);
2138 memoffset = (GEN_INT (INTVAL (memoffset)
2139 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2140 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2141 / BITS_PER_UNIT)));
2142 }
2143 /* Similarly for the decl. */
2144 else if (DECL_P (expr)
2145 && DECL_SIZE_UNIT (expr)
2146 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2147 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2148 && (! memoffset || INTVAL (memoffset) >= 0))
2149 break;
2150 else
2151 {
2152 /* The widened memory access overflows the expression, which means
2153 that it could alias another expression. Zap it. */
2154 expr = NULL_TREE;
2155 break;
2156 }
2157 }
2158
2159 if (! expr)
2160 memoffset = NULL_RTX;
2161
2162 /* The widened memory may alias other stuff, so zap the alias set. */
2163 /* ??? Maybe use get_alias_set on any remaining expression. */
2164
2165 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2166 MEM_ALIGN (new), mode);
2167
2168 return new;
2169 }
2170 \f
2171 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2172
2173 rtx
2174 gen_label_rtx ()
2175 {
2176 rtx label;
2177
2178 label = gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2179 NULL, label_num++, NULL, NULL);
2180
2181 LABEL_NUSES (label) = 0;
2182 LABEL_ALTERNATE_NAME (label) = NULL;
2183 return label;
2184 }
2185 \f
2186 /* For procedure integration. */
2187
2188 /* Install new pointers to the first and last insns in the chain.
2189 Also, set cur_insn_uid to one higher than the last in use.
2190 Used for an inline-procedure after copying the insn chain. */
2191
2192 void
2193 set_new_first_and_last_insn (first, last)
2194 rtx first, last;
2195 {
2196 rtx insn;
2197
2198 first_insn = first;
2199 last_insn = last;
2200 cur_insn_uid = 0;
2201
2202 for (insn = first; insn; insn = NEXT_INSN (insn))
2203 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2204
2205 cur_insn_uid++;
2206 }
2207
2208 /* Set the range of label numbers found in the current function.
2209 This is used when belatedly compiling an inline function. */
2210
2211 void
2212 set_new_first_and_last_label_num (first, last)
2213 int first, last;
2214 {
2215 base_label_num = label_num;
2216 first_label_num = first;
2217 last_label_num = last;
2218 }
2219
2220 /* Set the last label number found in the current function.
2221 This is used when belatedly compiling an inline function. */
2222
2223 void
2224 set_new_last_label_num (last)
2225 int last;
2226 {
2227 base_label_num = label_num;
2228 last_label_num = last;
2229 }
2230 \f
2231 /* Restore all variables describing the current status from the structure *P.
2232 This is used after a nested function. */
2233
2234 void
2235 restore_emit_status (p)
2236 struct function *p ATTRIBUTE_UNUSED;
2237 {
2238 last_label_num = 0;
2239 }
2240
2241 /* Clear out all parts of the state in F that can safely be discarded
2242 after the function has been compiled, to let garbage collection
2243 reclaim the memory. */
2244
2245 void
2246 free_emit_status (f)
2247 struct function *f;
2248 {
2249 free (f->emit->x_regno_reg_rtx);
2250 free (f->emit->regno_pointer_align);
2251 free (f->emit->regno_decl);
2252 free (f->emit);
2253 f->emit = NULL;
2254 }
2255 \f
2256 /* Go through all the RTL insn bodies and copy any invalid shared
2257 structure. This routine should only be called once. */
2258
2259 void
2260 unshare_all_rtl (fndecl, insn)
2261 tree fndecl;
2262 rtx insn;
2263 {
2264 tree decl;
2265
2266 /* Make sure that virtual parameters are not shared. */
2267 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2268 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2269
2270 /* Make sure that virtual stack slots are not shared. */
2271 unshare_all_decls (DECL_INITIAL (fndecl));
2272
2273 /* Unshare just about everything else. */
2274 unshare_all_rtl_1 (insn);
2275
2276 /* Make sure the addresses of stack slots found outside the insn chain
2277 (such as, in DECL_RTL of a variable) are not shared
2278 with the insn chain.
2279
2280 This special care is necessary when the stack slot MEM does not
2281 actually appear in the insn chain. If it does appear, its address
2282 is unshared from all else at that point. */
2283 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2284 }
2285
2286 /* Go through all the RTL insn bodies and copy any invalid shared
2287 structure, again. This is a fairly expensive thing to do so it
2288 should be done sparingly. */
2289
2290 void
2291 unshare_all_rtl_again (insn)
2292 rtx insn;
2293 {
2294 rtx p;
2295 tree decl;
2296
2297 for (p = insn; p; p = NEXT_INSN (p))
2298 if (INSN_P (p))
2299 {
2300 reset_used_flags (PATTERN (p));
2301 reset_used_flags (REG_NOTES (p));
2302 reset_used_flags (LOG_LINKS (p));
2303 }
2304
2305 /* Make sure that virtual stack slots are not shared. */
2306 reset_used_decls (DECL_INITIAL (cfun->decl));
2307
2308 /* Make sure that virtual parameters are not shared. */
2309 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2310 reset_used_flags (DECL_RTL (decl));
2311
2312 reset_used_flags (stack_slot_list);
2313
2314 unshare_all_rtl (cfun->decl, insn);
2315 }
2316
2317 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2318 Assumes the mark bits are cleared at entry. */
2319
2320 static void
2321 unshare_all_rtl_1 (insn)
2322 rtx insn;
2323 {
2324 for (; insn; insn = NEXT_INSN (insn))
2325 if (INSN_P (insn))
2326 {
2327 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2328 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2329 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2330 }
2331 }
2332
2333 /* Go through all virtual stack slots of a function and copy any
2334 shared structure. */
2335 static void
2336 unshare_all_decls (blk)
2337 tree blk;
2338 {
2339 tree t;
2340
2341 /* Copy shared decls. */
2342 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2343 if (DECL_RTL_SET_P (t))
2344 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2345
2346 /* Now process sub-blocks. */
2347 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2348 unshare_all_decls (t);
2349 }
2350
2351 /* Go through all virtual stack slots of a function and mark them as
2352 not shared. */
2353 static void
2354 reset_used_decls (blk)
2355 tree blk;
2356 {
2357 tree t;
2358
2359 /* Mark decls. */
2360 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2361 if (DECL_RTL_SET_P (t))
2362 reset_used_flags (DECL_RTL (t));
2363
2364 /* Now process sub-blocks. */
2365 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2366 reset_used_decls (t);
2367 }
2368
2369 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2370 placed in the result directly, rather than being copied. MAY_SHARE is
2371 either a MEM of an EXPR_LIST of MEMs. */
2372
2373 rtx
2374 copy_most_rtx (orig, may_share)
2375 rtx orig;
2376 rtx may_share;
2377 {
2378 rtx copy;
2379 int i, j;
2380 RTX_CODE code;
2381 const char *format_ptr;
2382
2383 if (orig == may_share
2384 || (GET_CODE (may_share) == EXPR_LIST
2385 && in_expr_list_p (may_share, orig)))
2386 return orig;
2387
2388 code = GET_CODE (orig);
2389
2390 switch (code)
2391 {
2392 case REG:
2393 case QUEUED:
2394 case CONST_INT:
2395 case CONST_DOUBLE:
2396 case CONST_VECTOR:
2397 case SYMBOL_REF:
2398 case CODE_LABEL:
2399 case PC:
2400 case CC0:
2401 return orig;
2402 default:
2403 break;
2404 }
2405
2406 copy = rtx_alloc (code);
2407 PUT_MODE (copy, GET_MODE (orig));
2408 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2409 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2410 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2411 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2412 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2413
2414 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2415
2416 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2417 {
2418 switch (*format_ptr++)
2419 {
2420 case 'e':
2421 XEXP (copy, i) = XEXP (orig, i);
2422 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2423 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2424 break;
2425
2426 case 'u':
2427 XEXP (copy, i) = XEXP (orig, i);
2428 break;
2429
2430 case 'E':
2431 case 'V':
2432 XVEC (copy, i) = XVEC (orig, i);
2433 if (XVEC (orig, i) != NULL)
2434 {
2435 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2436 for (j = 0; j < XVECLEN (copy, i); j++)
2437 XVECEXP (copy, i, j)
2438 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2439 }
2440 break;
2441
2442 case 'w':
2443 XWINT (copy, i) = XWINT (orig, i);
2444 break;
2445
2446 case 'n':
2447 case 'i':
2448 XINT (copy, i) = XINT (orig, i);
2449 break;
2450
2451 case 't':
2452 XTREE (copy, i) = XTREE (orig, i);
2453 break;
2454
2455 case 's':
2456 case 'S':
2457 XSTR (copy, i) = XSTR (orig, i);
2458 break;
2459
2460 case '0':
2461 /* Copy this through the wide int field; that's safest. */
2462 X0WINT (copy, i) = X0WINT (orig, i);
2463 break;
2464
2465 default:
2466 abort ();
2467 }
2468 }
2469 return copy;
2470 }
2471
2472 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2473 Recursively does the same for subexpressions. */
2474
2475 rtx
2476 copy_rtx_if_shared (orig)
2477 rtx orig;
2478 {
2479 rtx x = orig;
2480 int i;
2481 enum rtx_code code;
2482 const char *format_ptr;
2483 int copied = 0;
2484
2485 if (x == 0)
2486 return 0;
2487
2488 code = GET_CODE (x);
2489
2490 /* These types may be freely shared. */
2491
2492 switch (code)
2493 {
2494 case REG:
2495 case QUEUED:
2496 case CONST_INT:
2497 case CONST_DOUBLE:
2498 case CONST_VECTOR:
2499 case SYMBOL_REF:
2500 case CODE_LABEL:
2501 case PC:
2502 case CC0:
2503 case SCRATCH:
2504 /* SCRATCH must be shared because they represent distinct values. */
2505 return x;
2506
2507 case CONST:
2508 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2509 a LABEL_REF, it isn't sharable. */
2510 if (GET_CODE (XEXP (x, 0)) == PLUS
2511 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2512 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2513 return x;
2514 break;
2515
2516 case INSN:
2517 case JUMP_INSN:
2518 case CALL_INSN:
2519 case NOTE:
2520 case BARRIER:
2521 /* The chain of insns is not being copied. */
2522 return x;
2523
2524 case MEM:
2525 /* A MEM is allowed to be shared if its address is constant.
2526
2527 We used to allow sharing of MEMs which referenced
2528 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2529 that can lose. instantiate_virtual_regs will not unshare
2530 the MEMs, and combine may change the structure of the address
2531 because it looks safe and profitable in one context, but
2532 in some other context it creates unrecognizable RTL. */
2533 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2534 return x;
2535
2536 break;
2537
2538 default:
2539 break;
2540 }
2541
2542 /* This rtx may not be shared. If it has already been seen,
2543 replace it with a copy of itself. */
2544
2545 if (RTX_FLAG (x, used))
2546 {
2547 rtx copy;
2548
2549 copy = rtx_alloc (code);
2550 memcpy (copy, x,
2551 (sizeof (*copy) - sizeof (copy->fld)
2552 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2553 x = copy;
2554 copied = 1;
2555 }
2556 RTX_FLAG (x, used) = 1;
2557
2558 /* Now scan the subexpressions recursively.
2559 We can store any replaced subexpressions directly into X
2560 since we know X is not shared! Any vectors in X
2561 must be copied if X was copied. */
2562
2563 format_ptr = GET_RTX_FORMAT (code);
2564
2565 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2566 {
2567 switch (*format_ptr++)
2568 {
2569 case 'e':
2570 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2571 break;
2572
2573 case 'E':
2574 if (XVEC (x, i) != NULL)
2575 {
2576 int j;
2577 int len = XVECLEN (x, i);
2578
2579 if (copied && len > 0)
2580 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2581 for (j = 0; j < len; j++)
2582 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2583 }
2584 break;
2585 }
2586 }
2587 return x;
2588 }
2589
2590 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2591 to look for shared sub-parts. */
2592
2593 void
2594 reset_used_flags (x)
2595 rtx x;
2596 {
2597 int i, j;
2598 enum rtx_code code;
2599 const char *format_ptr;
2600
2601 if (x == 0)
2602 return;
2603
2604 code = GET_CODE (x);
2605
2606 /* These types may be freely shared so we needn't do any resetting
2607 for them. */
2608
2609 switch (code)
2610 {
2611 case REG:
2612 case QUEUED:
2613 case CONST_INT:
2614 case CONST_DOUBLE:
2615 case CONST_VECTOR:
2616 case SYMBOL_REF:
2617 case CODE_LABEL:
2618 case PC:
2619 case CC0:
2620 return;
2621
2622 case INSN:
2623 case JUMP_INSN:
2624 case CALL_INSN:
2625 case NOTE:
2626 case LABEL_REF:
2627 case BARRIER:
2628 /* The chain of insns is not being copied. */
2629 return;
2630
2631 default:
2632 break;
2633 }
2634
2635 RTX_FLAG (x, used) = 0;
2636
2637 format_ptr = GET_RTX_FORMAT (code);
2638 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2639 {
2640 switch (*format_ptr++)
2641 {
2642 case 'e':
2643 reset_used_flags (XEXP (x, i));
2644 break;
2645
2646 case 'E':
2647 for (j = 0; j < XVECLEN (x, i); j++)
2648 reset_used_flags (XVECEXP (x, i, j));
2649 break;
2650 }
2651 }
2652 }
2653 \f
2654 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2655 Return X or the rtx for the pseudo reg the value of X was copied into.
2656 OTHER must be valid as a SET_DEST. */
2657
2658 rtx
2659 make_safe_from (x, other)
2660 rtx x, other;
2661 {
2662 while (1)
2663 switch (GET_CODE (other))
2664 {
2665 case SUBREG:
2666 other = SUBREG_REG (other);
2667 break;
2668 case STRICT_LOW_PART:
2669 case SIGN_EXTEND:
2670 case ZERO_EXTEND:
2671 other = XEXP (other, 0);
2672 break;
2673 default:
2674 goto done;
2675 }
2676 done:
2677 if ((GET_CODE (other) == MEM
2678 && ! CONSTANT_P (x)
2679 && GET_CODE (x) != REG
2680 && GET_CODE (x) != SUBREG)
2681 || (GET_CODE (other) == REG
2682 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2683 || reg_mentioned_p (other, x))))
2684 {
2685 rtx temp = gen_reg_rtx (GET_MODE (x));
2686 emit_move_insn (temp, x);
2687 return temp;
2688 }
2689 return x;
2690 }
2691 \f
2692 /* Emission of insns (adding them to the doubly-linked list). */
2693
2694 /* Return the first insn of the current sequence or current function. */
2695
2696 rtx
2697 get_insns ()
2698 {
2699 return first_insn;
2700 }
2701
2702 /* Specify a new insn as the first in the chain. */
2703
2704 void
2705 set_first_insn (insn)
2706 rtx insn;
2707 {
2708 if (PREV_INSN (insn) != 0)
2709 abort ();
2710 first_insn = insn;
2711 }
2712
2713 /* Return the last insn emitted in current sequence or current function. */
2714
2715 rtx
2716 get_last_insn ()
2717 {
2718 return last_insn;
2719 }
2720
2721 /* Specify a new insn as the last in the chain. */
2722
2723 void
2724 set_last_insn (insn)
2725 rtx insn;
2726 {
2727 if (NEXT_INSN (insn) != 0)
2728 abort ();
2729 last_insn = insn;
2730 }
2731
2732 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2733
2734 rtx
2735 get_last_insn_anywhere ()
2736 {
2737 struct sequence_stack *stack;
2738 if (last_insn)
2739 return last_insn;
2740 for (stack = seq_stack; stack; stack = stack->next)
2741 if (stack->last != 0)
2742 return stack->last;
2743 return 0;
2744 }
2745
2746 /* Return a number larger than any instruction's uid in this function. */
2747
2748 int
2749 get_max_uid ()
2750 {
2751 return cur_insn_uid;
2752 }
2753
2754 /* Renumber instructions so that no instruction UIDs are wasted. */
2755
2756 void
2757 renumber_insns (stream)
2758 FILE *stream;
2759 {
2760 rtx insn;
2761
2762 /* If we're not supposed to renumber instructions, don't. */
2763 if (!flag_renumber_insns)
2764 return;
2765
2766 /* If there aren't that many instructions, then it's not really
2767 worth renumbering them. */
2768 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2769 return;
2770
2771 cur_insn_uid = 1;
2772
2773 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2774 {
2775 if (stream)
2776 fprintf (stream, "Renumbering insn %d to %d\n",
2777 INSN_UID (insn), cur_insn_uid);
2778 INSN_UID (insn) = cur_insn_uid++;
2779 }
2780 }
2781 \f
2782 /* Return the next insn. If it is a SEQUENCE, return the first insn
2783 of the sequence. */
2784
2785 rtx
2786 next_insn (insn)
2787 rtx insn;
2788 {
2789 if (insn)
2790 {
2791 insn = NEXT_INSN (insn);
2792 if (insn && GET_CODE (insn) == INSN
2793 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2794 insn = XVECEXP (PATTERN (insn), 0, 0);
2795 }
2796
2797 return insn;
2798 }
2799
2800 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2801 of the sequence. */
2802
2803 rtx
2804 previous_insn (insn)
2805 rtx insn;
2806 {
2807 if (insn)
2808 {
2809 insn = PREV_INSN (insn);
2810 if (insn && GET_CODE (insn) == INSN
2811 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2812 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2813 }
2814
2815 return insn;
2816 }
2817
2818 /* Return the next insn after INSN that is not a NOTE. This routine does not
2819 look inside SEQUENCEs. */
2820
2821 rtx
2822 next_nonnote_insn (insn)
2823 rtx insn;
2824 {
2825 while (insn)
2826 {
2827 insn = NEXT_INSN (insn);
2828 if (insn == 0 || GET_CODE (insn) != NOTE)
2829 break;
2830 }
2831
2832 return insn;
2833 }
2834
2835 /* Return the previous insn before INSN that is not a NOTE. This routine does
2836 not look inside SEQUENCEs. */
2837
2838 rtx
2839 prev_nonnote_insn (insn)
2840 rtx insn;
2841 {
2842 while (insn)
2843 {
2844 insn = PREV_INSN (insn);
2845 if (insn == 0 || GET_CODE (insn) != NOTE)
2846 break;
2847 }
2848
2849 return insn;
2850 }
2851
2852 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2853 or 0, if there is none. This routine does not look inside
2854 SEQUENCEs. */
2855
2856 rtx
2857 next_real_insn (insn)
2858 rtx insn;
2859 {
2860 while (insn)
2861 {
2862 insn = NEXT_INSN (insn);
2863 if (insn == 0 || GET_CODE (insn) == INSN
2864 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2865 break;
2866 }
2867
2868 return insn;
2869 }
2870
2871 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2872 or 0, if there is none. This routine does not look inside
2873 SEQUENCEs. */
2874
2875 rtx
2876 prev_real_insn (insn)
2877 rtx insn;
2878 {
2879 while (insn)
2880 {
2881 insn = PREV_INSN (insn);
2882 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2883 || GET_CODE (insn) == JUMP_INSN)
2884 break;
2885 }
2886
2887 return insn;
2888 }
2889
2890 /* Find the next insn after INSN that really does something. This routine
2891 does not look inside SEQUENCEs. Until reload has completed, this is the
2892 same as next_real_insn. */
2893
2894 int
2895 active_insn_p (insn)
2896 rtx insn;
2897 {
2898 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2899 || (GET_CODE (insn) == INSN
2900 && (! reload_completed
2901 || (GET_CODE (PATTERN (insn)) != USE
2902 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2903 }
2904
2905 rtx
2906 next_active_insn (insn)
2907 rtx insn;
2908 {
2909 while (insn)
2910 {
2911 insn = NEXT_INSN (insn);
2912 if (insn == 0 || active_insn_p (insn))
2913 break;
2914 }
2915
2916 return insn;
2917 }
2918
2919 /* Find the last insn before INSN that really does something. This routine
2920 does not look inside SEQUENCEs. Until reload has completed, this is the
2921 same as prev_real_insn. */
2922
2923 rtx
2924 prev_active_insn (insn)
2925 rtx insn;
2926 {
2927 while (insn)
2928 {
2929 insn = PREV_INSN (insn);
2930 if (insn == 0 || active_insn_p (insn))
2931 break;
2932 }
2933
2934 return insn;
2935 }
2936
2937 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2938
2939 rtx
2940 next_label (insn)
2941 rtx insn;
2942 {
2943 while (insn)
2944 {
2945 insn = NEXT_INSN (insn);
2946 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2947 break;
2948 }
2949
2950 return insn;
2951 }
2952
2953 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2954
2955 rtx
2956 prev_label (insn)
2957 rtx insn;
2958 {
2959 while (insn)
2960 {
2961 insn = PREV_INSN (insn);
2962 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2963 break;
2964 }
2965
2966 return insn;
2967 }
2968 \f
2969 #ifdef HAVE_cc0
2970 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2971 and REG_CC_USER notes so we can find it. */
2972
2973 void
2974 link_cc0_insns (insn)
2975 rtx insn;
2976 {
2977 rtx user = next_nonnote_insn (insn);
2978
2979 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
2980 user = XVECEXP (PATTERN (user), 0, 0);
2981
2982 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
2983 REG_NOTES (user));
2984 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
2985 }
2986
2987 /* Return the next insn that uses CC0 after INSN, which is assumed to
2988 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
2989 applied to the result of this function should yield INSN).
2990
2991 Normally, this is simply the next insn. However, if a REG_CC_USER note
2992 is present, it contains the insn that uses CC0.
2993
2994 Return 0 if we can't find the insn. */
2995
2996 rtx
2997 next_cc0_user (insn)
2998 rtx insn;
2999 {
3000 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3001
3002 if (note)
3003 return XEXP (note, 0);
3004
3005 insn = next_nonnote_insn (insn);
3006 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3007 insn = XVECEXP (PATTERN (insn), 0, 0);
3008
3009 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3010 return insn;
3011
3012 return 0;
3013 }
3014
3015 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3016 note, it is the previous insn. */
3017
3018 rtx
3019 prev_cc0_setter (insn)
3020 rtx insn;
3021 {
3022 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3023
3024 if (note)
3025 return XEXP (note, 0);
3026
3027 insn = prev_nonnote_insn (insn);
3028 if (! sets_cc0_p (PATTERN (insn)))
3029 abort ();
3030
3031 return insn;
3032 }
3033 #endif
3034
3035 /* Increment the label uses for all labels present in rtx. */
3036
3037 static void
3038 mark_label_nuses (x)
3039 rtx x;
3040 {
3041 enum rtx_code code;
3042 int i, j;
3043 const char *fmt;
3044
3045 code = GET_CODE (x);
3046 if (code == LABEL_REF)
3047 LABEL_NUSES (XEXP (x, 0))++;
3048
3049 fmt = GET_RTX_FORMAT (code);
3050 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3051 {
3052 if (fmt[i] == 'e')
3053 mark_label_nuses (XEXP (x, i));
3054 else if (fmt[i] == 'E')
3055 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3056 mark_label_nuses (XVECEXP (x, i, j));
3057 }
3058 }
3059
3060 \f
3061 /* Try splitting insns that can be split for better scheduling.
3062 PAT is the pattern which might split.
3063 TRIAL is the insn providing PAT.
3064 LAST is non-zero if we should return the last insn of the sequence produced.
3065
3066 If this routine succeeds in splitting, it returns the first or last
3067 replacement insn depending on the value of LAST. Otherwise, it
3068 returns TRIAL. If the insn to be returned can be split, it will be. */
3069
3070 rtx
3071 try_split (pat, trial, last)
3072 rtx pat, trial;
3073 int last;
3074 {
3075 rtx before = PREV_INSN (trial);
3076 rtx after = NEXT_INSN (trial);
3077 int has_barrier = 0;
3078 rtx tem;
3079 rtx note, seq;
3080 int probability;
3081
3082 if (any_condjump_p (trial)
3083 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3084 split_branch_probability = INTVAL (XEXP (note, 0));
3085 probability = split_branch_probability;
3086
3087 seq = split_insns (pat, trial);
3088
3089 split_branch_probability = -1;
3090
3091 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3092 We may need to handle this specially. */
3093 if (after && GET_CODE (after) == BARRIER)
3094 {
3095 has_barrier = 1;
3096 after = NEXT_INSN (after);
3097 }
3098
3099 if (seq)
3100 {
3101 /* SEQ can either be a SEQUENCE or the pattern of a single insn.
3102 The latter case will normally arise only when being done so that
3103 it, in turn, will be split (SFmode on the 29k is an example). */
3104 if (GET_CODE (seq) == SEQUENCE)
3105 {
3106 int i, njumps = 0;
3107
3108 /* Avoid infinite loop if any insn of the result matches
3109 the original pattern. */
3110 for (i = 0; i < XVECLEN (seq, 0); i++)
3111 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN
3112 && rtx_equal_p (PATTERN (XVECEXP (seq, 0, i)), pat))
3113 return trial;
3114
3115 /* Mark labels. */
3116 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3117 if (GET_CODE (XVECEXP (seq, 0, i)) == JUMP_INSN)
3118 {
3119 rtx insn = XVECEXP (seq, 0, i);
3120 mark_jump_label (PATTERN (insn),
3121 XVECEXP (seq, 0, i), 0);
3122 njumps++;
3123 if (probability != -1
3124 && any_condjump_p (insn)
3125 && !find_reg_note (insn, REG_BR_PROB, 0))
3126 {
3127 /* We can preserve the REG_BR_PROB notes only if exactly
3128 one jump is created, otherwise the machine description
3129 is responsible for this step using
3130 split_branch_probability variable. */
3131 if (njumps != 1)
3132 abort ();
3133 REG_NOTES (insn)
3134 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3135 GEN_INT (probability),
3136 REG_NOTES (insn));
3137 }
3138 }
3139
3140 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3141 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3142 if (GET_CODE (trial) == CALL_INSN)
3143 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3144 if (GET_CODE (XVECEXP (seq, 0, i)) == CALL_INSN)
3145 CALL_INSN_FUNCTION_USAGE (XVECEXP (seq, 0, i))
3146 = CALL_INSN_FUNCTION_USAGE (trial);
3147
3148 /* Copy notes, particularly those related to the CFG. */
3149 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3150 {
3151 switch (REG_NOTE_KIND (note))
3152 {
3153 case REG_EH_REGION:
3154 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3155 {
3156 rtx insn = XVECEXP (seq, 0, i);
3157 if (GET_CODE (insn) == CALL_INSN
3158 || (flag_non_call_exceptions
3159 && may_trap_p (PATTERN (insn))))
3160 REG_NOTES (insn)
3161 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3162 XEXP (note, 0),
3163 REG_NOTES (insn));
3164 }
3165 break;
3166
3167 case REG_NORETURN:
3168 case REG_SETJMP:
3169 case REG_ALWAYS_RETURN:
3170 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3171 {
3172 rtx insn = XVECEXP (seq, 0, i);
3173 if (GET_CODE (insn) == CALL_INSN)
3174 REG_NOTES (insn)
3175 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3176 XEXP (note, 0),
3177 REG_NOTES (insn));
3178 }
3179 break;
3180
3181 case REG_NON_LOCAL_GOTO:
3182 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3183 {
3184 rtx insn = XVECEXP (seq, 0, i);
3185 if (GET_CODE (insn) == JUMP_INSN)
3186 REG_NOTES (insn)
3187 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3188 XEXP (note, 0),
3189 REG_NOTES (insn));
3190 }
3191 break;
3192
3193 default:
3194 break;
3195 }
3196 }
3197
3198 /* If there are LABELS inside the split insns increment the
3199 usage count so we don't delete the label. */
3200 if (GET_CODE (trial) == INSN)
3201 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3202 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN)
3203 mark_label_nuses (PATTERN (XVECEXP (seq, 0, i)));
3204
3205 tem = emit_insn_after (seq, trial);
3206
3207 delete_insn (trial);
3208 if (has_barrier)
3209 emit_barrier_after (tem);
3210
3211 /* Recursively call try_split for each new insn created; by the
3212 time control returns here that insn will be fully split, so
3213 set LAST and continue from the insn after the one returned.
3214 We can't use next_active_insn here since AFTER may be a note.
3215 Ignore deleted insns, which can be occur if not optimizing. */
3216 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3217 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3218 tem = try_split (PATTERN (tem), tem, 1);
3219 }
3220 /* Avoid infinite loop if the result matches the original pattern. */
3221 else if (rtx_equal_p (seq, pat))
3222 return trial;
3223 else
3224 {
3225 PATTERN (trial) = seq;
3226 INSN_CODE (trial) = -1;
3227 try_split (seq, trial, last);
3228 }
3229
3230 /* Return either the first or the last insn, depending on which was
3231 requested. */
3232 return last
3233 ? (after ? PREV_INSN (after) : last_insn)
3234 : NEXT_INSN (before);
3235 }
3236
3237 return trial;
3238 }
3239 \f
3240 /* Make and return an INSN rtx, initializing all its slots.
3241 Store PATTERN in the pattern slots. */
3242
3243 rtx
3244 make_insn_raw (pattern)
3245 rtx pattern;
3246 {
3247 rtx insn;
3248
3249 insn = rtx_alloc (INSN);
3250
3251 INSN_UID (insn) = cur_insn_uid++;
3252 PATTERN (insn) = pattern;
3253 INSN_CODE (insn) = -1;
3254 LOG_LINKS (insn) = NULL;
3255 REG_NOTES (insn) = NULL;
3256 INSN_SCOPE (insn) = NULL;
3257 BLOCK_FOR_INSN (insn) = NULL;
3258
3259 #ifdef ENABLE_RTL_CHECKING
3260 if (insn
3261 && INSN_P (insn)
3262 && (returnjump_p (insn)
3263 || (GET_CODE (insn) == SET
3264 && SET_DEST (insn) == pc_rtx)))
3265 {
3266 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3267 debug_rtx (insn);
3268 }
3269 #endif
3270
3271 return insn;
3272 }
3273
3274 /* Like `make_insn' but make a JUMP_INSN instead of an insn. */
3275
3276 static rtx
3277 make_jump_insn_raw (pattern)
3278 rtx pattern;
3279 {
3280 rtx insn;
3281
3282 insn = rtx_alloc (JUMP_INSN);
3283 INSN_UID (insn) = cur_insn_uid++;
3284
3285 PATTERN (insn) = pattern;
3286 INSN_CODE (insn) = -1;
3287 LOG_LINKS (insn) = NULL;
3288 REG_NOTES (insn) = NULL;
3289 JUMP_LABEL (insn) = NULL;
3290 INSN_SCOPE (insn) = NULL;
3291 BLOCK_FOR_INSN (insn) = NULL;
3292
3293 return insn;
3294 }
3295
3296 /* Like `make_insn' but make a CALL_INSN instead of an insn. */
3297
3298 static rtx
3299 make_call_insn_raw (pattern)
3300 rtx pattern;
3301 {
3302 rtx insn;
3303
3304 insn = rtx_alloc (CALL_INSN);
3305 INSN_UID (insn) = cur_insn_uid++;
3306
3307 PATTERN (insn) = pattern;
3308 INSN_CODE (insn) = -1;
3309 LOG_LINKS (insn) = NULL;
3310 REG_NOTES (insn) = NULL;
3311 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3312 INSN_SCOPE (insn) = NULL;
3313 BLOCK_FOR_INSN (insn) = NULL;
3314
3315 return insn;
3316 }
3317 \f
3318 /* Add INSN to the end of the doubly-linked list.
3319 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3320
3321 void
3322 add_insn (insn)
3323 rtx insn;
3324 {
3325 PREV_INSN (insn) = last_insn;
3326 NEXT_INSN (insn) = 0;
3327
3328 if (NULL != last_insn)
3329 NEXT_INSN (last_insn) = insn;
3330
3331 if (NULL == first_insn)
3332 first_insn = insn;
3333
3334 last_insn = insn;
3335 }
3336
3337 /* Add INSN into the doubly-linked list after insn AFTER. This and
3338 the next should be the only functions called to insert an insn once
3339 delay slots have been filled since only they know how to update a
3340 SEQUENCE. */
3341
3342 void
3343 add_insn_after (insn, after)
3344 rtx insn, after;
3345 {
3346 rtx next = NEXT_INSN (after);
3347 basic_block bb;
3348
3349 if (optimize && INSN_DELETED_P (after))
3350 abort ();
3351
3352 NEXT_INSN (insn) = next;
3353 PREV_INSN (insn) = after;
3354
3355 if (next)
3356 {
3357 PREV_INSN (next) = insn;
3358 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3359 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3360 }
3361 else if (last_insn == after)
3362 last_insn = insn;
3363 else
3364 {
3365 struct sequence_stack *stack = seq_stack;
3366 /* Scan all pending sequences too. */
3367 for (; stack; stack = stack->next)
3368 if (after == stack->last)
3369 {
3370 stack->last = insn;
3371 break;
3372 }
3373
3374 if (stack == 0)
3375 abort ();
3376 }
3377
3378 if (GET_CODE (after) != BARRIER
3379 && GET_CODE (insn) != BARRIER
3380 && (bb = BLOCK_FOR_INSN (after)))
3381 {
3382 set_block_for_insn (insn, bb);
3383 if (INSN_P (insn))
3384 bb->flags |= BB_DIRTY;
3385 /* Should not happen as first in the BB is always
3386 either NOTE or LABEL. */
3387 if (bb->end == after
3388 /* Avoid clobbering of structure when creating new BB. */
3389 && GET_CODE (insn) != BARRIER
3390 && (GET_CODE (insn) != NOTE
3391 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3392 bb->end = insn;
3393 }
3394
3395 NEXT_INSN (after) = insn;
3396 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3397 {
3398 rtx sequence = PATTERN (after);
3399 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3400 }
3401 }
3402
3403 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3404 the previous should be the only functions called to insert an insn once
3405 delay slots have been filled since only they know how to update a
3406 SEQUENCE. */
3407
3408 void
3409 add_insn_before (insn, before)
3410 rtx insn, before;
3411 {
3412 rtx prev = PREV_INSN (before);
3413 basic_block bb;
3414
3415 if (optimize && INSN_DELETED_P (before))
3416 abort ();
3417
3418 PREV_INSN (insn) = prev;
3419 NEXT_INSN (insn) = before;
3420
3421 if (prev)
3422 {
3423 NEXT_INSN (prev) = insn;
3424 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3425 {
3426 rtx sequence = PATTERN (prev);
3427 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3428 }
3429 }
3430 else if (first_insn == before)
3431 first_insn = insn;
3432 else
3433 {
3434 struct sequence_stack *stack = seq_stack;
3435 /* Scan all pending sequences too. */
3436 for (; stack; stack = stack->next)
3437 if (before == stack->first)
3438 {
3439 stack->first = insn;
3440 break;
3441 }
3442
3443 if (stack == 0)
3444 abort ();
3445 }
3446
3447 if (GET_CODE (before) != BARRIER
3448 && GET_CODE (insn) != BARRIER
3449 && (bb = BLOCK_FOR_INSN (before)))
3450 {
3451 set_block_for_insn (insn, bb);
3452 if (INSN_P (insn))
3453 bb->flags |= BB_DIRTY;
3454 /* Should not happen as first in the BB is always
3455 either NOTE or LABEl. */
3456 if (bb->head == insn
3457 /* Avoid clobbering of structure when creating new BB. */
3458 && GET_CODE (insn) != BARRIER
3459 && (GET_CODE (insn) != NOTE
3460 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3461 abort ();
3462 }
3463
3464 PREV_INSN (before) = insn;
3465 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3466 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3467 }
3468
3469 /* Remove an insn from its doubly-linked list. This function knows how
3470 to handle sequences. */
3471 void
3472 remove_insn (insn)
3473 rtx insn;
3474 {
3475 rtx next = NEXT_INSN (insn);
3476 rtx prev = PREV_INSN (insn);
3477 basic_block bb;
3478
3479 if (prev)
3480 {
3481 NEXT_INSN (prev) = next;
3482 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3483 {
3484 rtx sequence = PATTERN (prev);
3485 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3486 }
3487 }
3488 else if (first_insn == insn)
3489 first_insn = next;
3490 else
3491 {
3492 struct sequence_stack *stack = seq_stack;
3493 /* Scan all pending sequences too. */
3494 for (; stack; stack = stack->next)
3495 if (insn == stack->first)
3496 {
3497 stack->first = next;
3498 break;
3499 }
3500
3501 if (stack == 0)
3502 abort ();
3503 }
3504
3505 if (next)
3506 {
3507 PREV_INSN (next) = prev;
3508 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3509 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3510 }
3511 else if (last_insn == insn)
3512 last_insn = prev;
3513 else
3514 {
3515 struct sequence_stack *stack = seq_stack;
3516 /* Scan all pending sequences too. */
3517 for (; stack; stack = stack->next)
3518 if (insn == stack->last)
3519 {
3520 stack->last = prev;
3521 break;
3522 }
3523
3524 if (stack == 0)
3525 abort ();
3526 }
3527 if (GET_CODE (insn) != BARRIER
3528 && (bb = BLOCK_FOR_INSN (insn)))
3529 {
3530 if (INSN_P (insn))
3531 bb->flags |= BB_DIRTY;
3532 if (bb->head == insn)
3533 {
3534 /* Never ever delete the basic block note without deleting whole
3535 basic block. */
3536 if (GET_CODE (insn) == NOTE)
3537 abort ();
3538 bb->head = next;
3539 }
3540 if (bb->end == insn)
3541 bb->end = prev;
3542 }
3543 }
3544
3545 /* Delete all insns made since FROM.
3546 FROM becomes the new last instruction. */
3547
3548 void
3549 delete_insns_since (from)
3550 rtx from;
3551 {
3552 if (from == 0)
3553 first_insn = 0;
3554 else
3555 NEXT_INSN (from) = 0;
3556 last_insn = from;
3557 }
3558
3559 /* This function is deprecated, please use sequences instead.
3560
3561 Move a consecutive bunch of insns to a different place in the chain.
3562 The insns to be moved are those between FROM and TO.
3563 They are moved to a new position after the insn AFTER.
3564 AFTER must not be FROM or TO or any insn in between.
3565
3566 This function does not know about SEQUENCEs and hence should not be
3567 called after delay-slot filling has been done. */
3568
3569 void
3570 reorder_insns_nobb (from, to, after)
3571 rtx from, to, after;
3572 {
3573 /* Splice this bunch out of where it is now. */
3574 if (PREV_INSN (from))
3575 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3576 if (NEXT_INSN (to))
3577 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3578 if (last_insn == to)
3579 last_insn = PREV_INSN (from);
3580 if (first_insn == from)
3581 first_insn = NEXT_INSN (to);
3582
3583 /* Make the new neighbors point to it and it to them. */
3584 if (NEXT_INSN (after))
3585 PREV_INSN (NEXT_INSN (after)) = to;
3586
3587 NEXT_INSN (to) = NEXT_INSN (after);
3588 PREV_INSN (from) = after;
3589 NEXT_INSN (after) = from;
3590 if (after == last_insn)
3591 last_insn = to;
3592 }
3593
3594 /* Same as function above, but take care to update BB boundaries. */
3595 void
3596 reorder_insns (from, to, after)
3597 rtx from, to, after;
3598 {
3599 rtx prev = PREV_INSN (from);
3600 basic_block bb, bb2;
3601
3602 reorder_insns_nobb (from, to, after);
3603
3604 if (GET_CODE (after) != BARRIER
3605 && (bb = BLOCK_FOR_INSN (after)))
3606 {
3607 rtx x;
3608 bb->flags |= BB_DIRTY;
3609
3610 if (GET_CODE (from) != BARRIER
3611 && (bb2 = BLOCK_FOR_INSN (from)))
3612 {
3613 if (bb2->end == to)
3614 bb2->end = prev;
3615 bb2->flags |= BB_DIRTY;
3616 }
3617
3618 if (bb->end == after)
3619 bb->end = to;
3620
3621 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3622 set_block_for_insn (x, bb);
3623 }
3624 }
3625
3626 /* Return the line note insn preceding INSN. */
3627
3628 static rtx
3629 find_line_note (insn)
3630 rtx insn;
3631 {
3632 if (no_line_numbers)
3633 return 0;
3634
3635 for (; insn; insn = PREV_INSN (insn))
3636 if (GET_CODE (insn) == NOTE
3637 && NOTE_LINE_NUMBER (insn) >= 0)
3638 break;
3639
3640 return insn;
3641 }
3642
3643 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3644 of the moved insns when debugging. This may insert a note between AFTER
3645 and FROM, and another one after TO. */
3646
3647 void
3648 reorder_insns_with_line_notes (from, to, after)
3649 rtx from, to, after;
3650 {
3651 rtx from_line = find_line_note (from);
3652 rtx after_line = find_line_note (after);
3653
3654 reorder_insns (from, to, after);
3655
3656 if (from_line == after_line)
3657 return;
3658
3659 if (from_line)
3660 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3661 NOTE_LINE_NUMBER (from_line),
3662 after);
3663 if (after_line)
3664 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3665 NOTE_LINE_NUMBER (after_line),
3666 to);
3667 }
3668
3669 /* Remove unnecessary notes from the instruction stream. */
3670
3671 void
3672 remove_unnecessary_notes ()
3673 {
3674 rtx block_stack = NULL_RTX;
3675 rtx eh_stack = NULL_RTX;
3676 rtx insn;
3677 rtx next;
3678 rtx tmp;
3679
3680 /* We must not remove the first instruction in the function because
3681 the compiler depends on the first instruction being a note. */
3682 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3683 {
3684 /* Remember what's next. */
3685 next = NEXT_INSN (insn);
3686
3687 /* We're only interested in notes. */
3688 if (GET_CODE (insn) != NOTE)
3689 continue;
3690
3691 switch (NOTE_LINE_NUMBER (insn))
3692 {
3693 case NOTE_INSN_DELETED:
3694 case NOTE_INSN_LOOP_END_TOP_COND:
3695 remove_insn (insn);
3696 break;
3697
3698 case NOTE_INSN_EH_REGION_BEG:
3699 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3700 break;
3701
3702 case NOTE_INSN_EH_REGION_END:
3703 /* Too many end notes. */
3704 if (eh_stack == NULL_RTX)
3705 abort ();
3706 /* Mismatched nesting. */
3707 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3708 abort ();
3709 tmp = eh_stack;
3710 eh_stack = XEXP (eh_stack, 1);
3711 free_INSN_LIST_node (tmp);
3712 break;
3713
3714 case NOTE_INSN_BLOCK_BEG:
3715 /* By now, all notes indicating lexical blocks should have
3716 NOTE_BLOCK filled in. */
3717 if (NOTE_BLOCK (insn) == NULL_TREE)
3718 abort ();
3719 block_stack = alloc_INSN_LIST (insn, block_stack);
3720 break;
3721
3722 case NOTE_INSN_BLOCK_END:
3723 /* Too many end notes. */
3724 if (block_stack == NULL_RTX)
3725 abort ();
3726 /* Mismatched nesting. */
3727 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3728 abort ();
3729 tmp = block_stack;
3730 block_stack = XEXP (block_stack, 1);
3731 free_INSN_LIST_node (tmp);
3732
3733 /* Scan back to see if there are any non-note instructions
3734 between INSN and the beginning of this block. If not,
3735 then there is no PC range in the generated code that will
3736 actually be in this block, so there's no point in
3737 remembering the existence of the block. */
3738 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
3739 {
3740 /* This block contains a real instruction. Note that we
3741 don't include labels; if the only thing in the block
3742 is a label, then there are still no PC values that
3743 lie within the block. */
3744 if (INSN_P (tmp))
3745 break;
3746
3747 /* We're only interested in NOTEs. */
3748 if (GET_CODE (tmp) != NOTE)
3749 continue;
3750
3751 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3752 {
3753 /* We just verified that this BLOCK matches us with
3754 the block_stack check above. Never delete the
3755 BLOCK for the outermost scope of the function; we
3756 can refer to names from that scope even if the
3757 block notes are messed up. */
3758 if (! is_body_block (NOTE_BLOCK (insn))
3759 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3760 {
3761 remove_insn (tmp);
3762 remove_insn (insn);
3763 }
3764 break;
3765 }
3766 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3767 /* There's a nested block. We need to leave the
3768 current block in place since otherwise the debugger
3769 wouldn't be able to show symbols from our block in
3770 the nested block. */
3771 break;
3772 }
3773 }
3774 }
3775
3776 /* Too many begin notes. */
3777 if (block_stack || eh_stack)
3778 abort ();
3779 }
3780
3781 \f
3782 /* Emit an insn of given code and pattern
3783 at a specified place within the doubly-linked list. */
3784
3785 /* Make an instruction with body PATTERN
3786 and output it before the instruction BEFORE. */
3787
3788 rtx
3789 emit_insn_before (pattern, before)
3790 rtx pattern, before;
3791 {
3792 rtx insn = before;
3793
3794 if (GET_CODE (pattern) == SEQUENCE)
3795 {
3796 int i;
3797
3798 for (i = 0; i < XVECLEN (pattern, 0); i++)
3799 {
3800 insn = XVECEXP (pattern, 0, i);
3801 add_insn_before (insn, before);
3802 }
3803 }
3804 else
3805 {
3806 insn = make_insn_raw (pattern);
3807 add_insn_before (insn, before);
3808 }
3809
3810 return insn;
3811 }
3812
3813 /* Make an instruction with body PATTERN and code JUMP_INSN
3814 and output it before the instruction BEFORE. */
3815
3816 rtx
3817 emit_jump_insn_before (pattern, before)
3818 rtx pattern, before;
3819 {
3820 rtx insn;
3821
3822 if (GET_CODE (pattern) == SEQUENCE)
3823 insn = emit_insn_before (pattern, before);
3824 else
3825 {
3826 insn = make_jump_insn_raw (pattern);
3827 add_insn_before (insn, before);
3828 }
3829
3830 return insn;
3831 }
3832
3833 /* Make an instruction with body PATTERN and code CALL_INSN
3834 and output it before the instruction BEFORE. */
3835
3836 rtx
3837 emit_call_insn_before (pattern, before)
3838 rtx pattern, before;
3839 {
3840 rtx insn;
3841
3842 if (GET_CODE (pattern) == SEQUENCE)
3843 insn = emit_insn_before (pattern, before);
3844 else
3845 {
3846 insn = make_call_insn_raw (pattern);
3847 add_insn_before (insn, before);
3848 PUT_CODE (insn, CALL_INSN);
3849 }
3850
3851 return insn;
3852 }
3853
3854 /* Make an instruction with body PATTERN and code CALL_INSN
3855 and output it before the instruction BEFORE. */
3856
3857 rtx
3858 emit_call_insn_after (pattern, before)
3859 rtx pattern, before;
3860 {
3861 rtx insn;
3862
3863 if (GET_CODE (pattern) == SEQUENCE)
3864 insn = emit_insn_after (pattern, before);
3865 else
3866 {
3867 insn = make_call_insn_raw (pattern);
3868 add_insn_after (insn, before);
3869 PUT_CODE (insn, CALL_INSN);
3870 }
3871
3872 return insn;
3873 }
3874
3875 /* Make an insn of code BARRIER
3876 and output it before the insn BEFORE. */
3877
3878 rtx
3879 emit_barrier_before (before)
3880 rtx before;
3881 {
3882 rtx insn = rtx_alloc (BARRIER);
3883
3884 INSN_UID (insn) = cur_insn_uid++;
3885
3886 add_insn_before (insn, before);
3887 return insn;
3888 }
3889
3890 /* Emit the label LABEL before the insn BEFORE. */
3891
3892 rtx
3893 emit_label_before (label, before)
3894 rtx label, before;
3895 {
3896 /* This can be called twice for the same label as a result of the
3897 confusion that follows a syntax error! So make it harmless. */
3898 if (INSN_UID (label) == 0)
3899 {
3900 INSN_UID (label) = cur_insn_uid++;
3901 add_insn_before (label, before);
3902 }
3903
3904 return label;
3905 }
3906
3907 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3908
3909 rtx
3910 emit_note_before (subtype, before)
3911 int subtype;
3912 rtx before;
3913 {
3914 rtx note = rtx_alloc (NOTE);
3915 INSN_UID (note) = cur_insn_uid++;
3916 NOTE_SOURCE_FILE (note) = 0;
3917 NOTE_LINE_NUMBER (note) = subtype;
3918 BLOCK_FOR_INSN (note) = NULL;
3919
3920 add_insn_before (note, before);
3921 return note;
3922 }
3923 \f
3924 /* Make an insn of code INSN with body PATTERN
3925 and output it after the insn AFTER. */
3926
3927 rtx
3928 emit_insn_after (pattern, after)
3929 rtx pattern, after;
3930 {
3931 rtx insn = after;
3932
3933 if (GET_CODE (pattern) == SEQUENCE)
3934 {
3935 int i;
3936
3937 for (i = 0; i < XVECLEN (pattern, 0); i++)
3938 {
3939 insn = XVECEXP (pattern, 0, i);
3940 add_insn_after (insn, after);
3941 after = insn;
3942 }
3943 }
3944 else
3945 {
3946 insn = make_insn_raw (pattern);
3947 add_insn_after (insn, after);
3948 }
3949
3950 return insn;
3951 }
3952
3953 /* Similar to emit_insn_after, except that line notes are to be inserted so
3954 as to act as if this insn were at FROM. */
3955
3956 void
3957 emit_insn_after_with_line_notes (pattern, after, from)
3958 rtx pattern, after, from;
3959 {
3960 rtx from_line = find_line_note (from);
3961 rtx after_line = find_line_note (after);
3962 rtx insn = emit_insn_after (pattern, after);
3963
3964 if (from_line)
3965 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3966 NOTE_LINE_NUMBER (from_line),
3967 after);
3968
3969 if (after_line)
3970 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3971 NOTE_LINE_NUMBER (after_line),
3972 insn);
3973 }
3974
3975 /* Make an insn of code JUMP_INSN with body PATTERN
3976 and output it after the insn AFTER. */
3977
3978 rtx
3979 emit_jump_insn_after (pattern, after)
3980 rtx pattern, after;
3981 {
3982 rtx insn;
3983
3984 if (GET_CODE (pattern) == SEQUENCE)
3985 insn = emit_insn_after (pattern, after);
3986 else
3987 {
3988 insn = make_jump_insn_raw (pattern);
3989 add_insn_after (insn, after);
3990 }
3991
3992 return insn;
3993 }
3994
3995 /* Make an insn of code BARRIER
3996 and output it after the insn AFTER. */
3997
3998 rtx
3999 emit_barrier_after (after)
4000 rtx after;
4001 {
4002 rtx insn = rtx_alloc (BARRIER);
4003
4004 INSN_UID (insn) = cur_insn_uid++;
4005
4006 add_insn_after (insn, after);
4007 return insn;
4008 }
4009
4010 /* Emit the label LABEL after the insn AFTER. */
4011
4012 rtx
4013 emit_label_after (label, after)
4014 rtx label, after;
4015 {
4016 /* This can be called twice for the same label
4017 as a result of the confusion that follows a syntax error!
4018 So make it harmless. */
4019 if (INSN_UID (label) == 0)
4020 {
4021 INSN_UID (label) = cur_insn_uid++;
4022 add_insn_after (label, after);
4023 }
4024
4025 return label;
4026 }
4027
4028 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4029
4030 rtx
4031 emit_note_after (subtype, after)
4032 int subtype;
4033 rtx after;
4034 {
4035 rtx note = rtx_alloc (NOTE);
4036 INSN_UID (note) = cur_insn_uid++;
4037 NOTE_SOURCE_FILE (note) = 0;
4038 NOTE_LINE_NUMBER (note) = subtype;
4039 BLOCK_FOR_INSN (note) = NULL;
4040 add_insn_after (note, after);
4041 return note;
4042 }
4043
4044 /* Emit a line note for FILE and LINE after the insn AFTER. */
4045
4046 rtx
4047 emit_line_note_after (file, line, after)
4048 const char *file;
4049 int line;
4050 rtx after;
4051 {
4052 rtx note;
4053
4054 if (no_line_numbers && line > 0)
4055 {
4056 cur_insn_uid++;
4057 return 0;
4058 }
4059
4060 note = rtx_alloc (NOTE);
4061 INSN_UID (note) = cur_insn_uid++;
4062 NOTE_SOURCE_FILE (note) = file;
4063 NOTE_LINE_NUMBER (note) = line;
4064 BLOCK_FOR_INSN (note) = NULL;
4065 add_insn_after (note, after);
4066 return note;
4067 }
4068 \f
4069 /* Make an insn of code INSN with pattern PATTERN
4070 and add it to the end of the doubly-linked list.
4071 If PATTERN is a SEQUENCE, take the elements of it
4072 and emit an insn for each element.
4073
4074 Returns the last insn emitted. */
4075
4076 rtx
4077 emit_insn (pattern)
4078 rtx pattern;
4079 {
4080 rtx insn = last_insn;
4081
4082 if (GET_CODE (pattern) == SEQUENCE)
4083 {
4084 int i;
4085
4086 for (i = 0; i < XVECLEN (pattern, 0); i++)
4087 {
4088 insn = XVECEXP (pattern, 0, i);
4089 add_insn (insn);
4090 }
4091 }
4092 else
4093 {
4094 insn = make_insn_raw (pattern);
4095 add_insn (insn);
4096 }
4097
4098 return insn;
4099 }
4100
4101 /* Emit the insns in a chain starting with INSN.
4102 Return the last insn emitted. */
4103
4104 rtx
4105 emit_insns (insn)
4106 rtx insn;
4107 {
4108 rtx last = 0;
4109
4110 while (insn)
4111 {
4112 rtx next = NEXT_INSN (insn);
4113 add_insn (insn);
4114 last = insn;
4115 insn = next;
4116 }
4117
4118 return last;
4119 }
4120
4121 /* Emit the insns in a chain starting with INSN and place them in front of
4122 the insn BEFORE. Return the last insn emitted. */
4123
4124 rtx
4125 emit_insns_before (insn, before)
4126 rtx insn;
4127 rtx before;
4128 {
4129 rtx last = 0;
4130
4131 while (insn)
4132 {
4133 rtx next = NEXT_INSN (insn);
4134 add_insn_before (insn, before);
4135 last = insn;
4136 insn = next;
4137 }
4138
4139 return last;
4140 }
4141
4142 /* Emit the insns in a chain starting with FIRST and place them in back of
4143 the insn AFTER. Return the last insn emitted. */
4144
4145 rtx
4146 emit_insns_after (first, after)
4147 rtx first;
4148 rtx after;
4149 {
4150 rtx last;
4151 rtx after_after;
4152 basic_block bb;
4153
4154 if (!after)
4155 abort ();
4156
4157 if (!first)
4158 return after;
4159
4160 if (GET_CODE (after) != BARRIER
4161 && (bb = BLOCK_FOR_INSN (after)))
4162 {
4163 bb->flags |= BB_DIRTY;
4164 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4165 if (GET_CODE (last) != BARRIER)
4166 set_block_for_insn (last, bb);
4167 if (GET_CODE (last) != BARRIER)
4168 set_block_for_insn (last, bb);
4169 if (bb->end == after)
4170 bb->end = last;
4171 }
4172 else
4173 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4174 continue;
4175
4176 after_after = NEXT_INSN (after);
4177
4178 NEXT_INSN (after) = first;
4179 PREV_INSN (first) = after;
4180 NEXT_INSN (last) = after_after;
4181 if (after_after)
4182 PREV_INSN (after_after) = last;
4183
4184 if (after == last_insn)
4185 last_insn = last;
4186 return last;
4187 }
4188
4189 /* Make an insn of code JUMP_INSN with pattern PATTERN
4190 and add it to the end of the doubly-linked list. */
4191
4192 rtx
4193 emit_jump_insn (pattern)
4194 rtx pattern;
4195 {
4196 if (GET_CODE (pattern) == SEQUENCE)
4197 return emit_insn (pattern);
4198 else
4199 {
4200 rtx insn = make_jump_insn_raw (pattern);
4201 add_insn (insn);
4202 return insn;
4203 }
4204 }
4205
4206 /* Make an insn of code CALL_INSN with pattern PATTERN
4207 and add it to the end of the doubly-linked list. */
4208
4209 rtx
4210 emit_call_insn (pattern)
4211 rtx pattern;
4212 {
4213 if (GET_CODE (pattern) == SEQUENCE)
4214 return emit_insn (pattern);
4215 else
4216 {
4217 rtx insn = make_call_insn_raw (pattern);
4218 add_insn (insn);
4219 PUT_CODE (insn, CALL_INSN);
4220 return insn;
4221 }
4222 }
4223
4224 /* Add the label LABEL to the end of the doubly-linked list. */
4225
4226 rtx
4227 emit_label (label)
4228 rtx label;
4229 {
4230 /* This can be called twice for the same label
4231 as a result of the confusion that follows a syntax error!
4232 So make it harmless. */
4233 if (INSN_UID (label) == 0)
4234 {
4235 INSN_UID (label) = cur_insn_uid++;
4236 add_insn (label);
4237 }
4238 return label;
4239 }
4240
4241 /* Make an insn of code BARRIER
4242 and add it to the end of the doubly-linked list. */
4243
4244 rtx
4245 emit_barrier ()
4246 {
4247 rtx barrier = rtx_alloc (BARRIER);
4248 INSN_UID (barrier) = cur_insn_uid++;
4249 add_insn (barrier);
4250 return barrier;
4251 }
4252
4253 /* Make an insn of code NOTE
4254 with data-fields specified by FILE and LINE
4255 and add it to the end of the doubly-linked list,
4256 but only if line-numbers are desired for debugging info. */
4257
4258 rtx
4259 emit_line_note (file, line)
4260 const char *file;
4261 int line;
4262 {
4263 set_file_and_line_for_stmt (file, line);
4264
4265 #if 0
4266 if (no_line_numbers)
4267 return 0;
4268 #endif
4269
4270 return emit_note (file, line);
4271 }
4272
4273 /* Make an insn of code NOTE
4274 with data-fields specified by FILE and LINE
4275 and add it to the end of the doubly-linked list.
4276 If it is a line-number NOTE, omit it if it matches the previous one. */
4277
4278 rtx
4279 emit_note (file, line)
4280 const char *file;
4281 int line;
4282 {
4283 rtx note;
4284
4285 if (line > 0)
4286 {
4287 if (file && last_filename && !strcmp (file, last_filename)
4288 && line == last_linenum)
4289 return 0;
4290 last_filename = file;
4291 last_linenum = line;
4292 }
4293
4294 if (no_line_numbers && line > 0)
4295 {
4296 cur_insn_uid++;
4297 return 0;
4298 }
4299
4300 note = rtx_alloc (NOTE);
4301 INSN_UID (note) = cur_insn_uid++;
4302 NOTE_SOURCE_FILE (note) = file;
4303 NOTE_LINE_NUMBER (note) = line;
4304 BLOCK_FOR_INSN (note) = NULL;
4305 add_insn (note);
4306 return note;
4307 }
4308
4309 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
4310
4311 rtx
4312 emit_line_note_force (file, line)
4313 const char *file;
4314 int line;
4315 {
4316 last_linenum = -1;
4317 return emit_line_note (file, line);
4318 }
4319
4320 /* Cause next statement to emit a line note even if the line number
4321 has not changed. This is used at the beginning of a function. */
4322
4323 void
4324 force_next_line_note ()
4325 {
4326 last_linenum = -1;
4327 }
4328
4329 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4330 note of this type already exists, remove it first. */
4331
4332 rtx
4333 set_unique_reg_note (insn, kind, datum)
4334 rtx insn;
4335 enum reg_note kind;
4336 rtx datum;
4337 {
4338 rtx note = find_reg_note (insn, kind, NULL_RTX);
4339
4340 switch (kind)
4341 {
4342 case REG_EQUAL:
4343 case REG_EQUIV:
4344 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4345 has multiple sets (some callers assume single_set
4346 means the insn only has one set, when in fact it
4347 means the insn only has one * useful * set). */
4348 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4349 {
4350 if (note)
4351 abort ();
4352 return NULL_RTX;
4353 }
4354
4355 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4356 It serves no useful purpose and breaks eliminate_regs. */
4357 if (GET_CODE (datum) == ASM_OPERANDS)
4358 return NULL_RTX;
4359 break;
4360
4361 default:
4362 break;
4363 }
4364
4365 if (note)
4366 {
4367 XEXP (note, 0) = datum;
4368 return note;
4369 }
4370
4371 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4372 return REG_NOTES (insn);
4373 }
4374 \f
4375 /* Return an indication of which type of insn should have X as a body.
4376 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4377
4378 enum rtx_code
4379 classify_insn (x)
4380 rtx x;
4381 {
4382 if (GET_CODE (x) == CODE_LABEL)
4383 return CODE_LABEL;
4384 if (GET_CODE (x) == CALL)
4385 return CALL_INSN;
4386 if (GET_CODE (x) == RETURN)
4387 return JUMP_INSN;
4388 if (GET_CODE (x) == SET)
4389 {
4390 if (SET_DEST (x) == pc_rtx)
4391 return JUMP_INSN;
4392 else if (GET_CODE (SET_SRC (x)) == CALL)
4393 return CALL_INSN;
4394 else
4395 return INSN;
4396 }
4397 if (GET_CODE (x) == PARALLEL)
4398 {
4399 int j;
4400 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4401 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4402 return CALL_INSN;
4403 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4404 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4405 return JUMP_INSN;
4406 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4407 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4408 return CALL_INSN;
4409 }
4410 return INSN;
4411 }
4412
4413 /* Emit the rtl pattern X as an appropriate kind of insn.
4414 If X is a label, it is simply added into the insn chain. */
4415
4416 rtx
4417 emit (x)
4418 rtx x;
4419 {
4420 enum rtx_code code = classify_insn (x);
4421
4422 if (code == CODE_LABEL)
4423 return emit_label (x);
4424 else if (code == INSN)
4425 return emit_insn (x);
4426 else if (code == JUMP_INSN)
4427 {
4428 rtx insn = emit_jump_insn (x);
4429 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4430 return emit_barrier ();
4431 return insn;
4432 }
4433 else if (code == CALL_INSN)
4434 return emit_call_insn (x);
4435 else
4436 abort ();
4437 }
4438 \f
4439 /* Begin emitting insns to a sequence which can be packaged in an
4440 RTL_EXPR. If this sequence will contain something that might cause
4441 the compiler to pop arguments to function calls (because those
4442 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4443 details), use do_pending_stack_adjust before calling this function.
4444 That will ensure that the deferred pops are not accidentally
4445 emitted in the middle of this sequence. */
4446
4447 void
4448 start_sequence ()
4449 {
4450 struct sequence_stack *tem;
4451
4452 tem = (struct sequence_stack *) xmalloc (sizeof (struct sequence_stack));
4453
4454 tem->next = seq_stack;
4455 tem->first = first_insn;
4456 tem->last = last_insn;
4457 tem->sequence_rtl_expr = seq_rtl_expr;
4458
4459 seq_stack = tem;
4460
4461 first_insn = 0;
4462 last_insn = 0;
4463 }
4464
4465 /* Similarly, but indicate that this sequence will be placed in T, an
4466 RTL_EXPR. See the documentation for start_sequence for more
4467 information about how to use this function. */
4468
4469 void
4470 start_sequence_for_rtl_expr (t)
4471 tree t;
4472 {
4473 start_sequence ();
4474
4475 seq_rtl_expr = t;
4476 }
4477
4478 /* Set up the insn chain starting with FIRST as the current sequence,
4479 saving the previously current one. See the documentation for
4480 start_sequence for more information about how to use this function. */
4481
4482 void
4483 push_to_sequence (first)
4484 rtx first;
4485 {
4486 rtx last;
4487
4488 start_sequence ();
4489
4490 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4491
4492 first_insn = first;
4493 last_insn = last;
4494 }
4495
4496 /* Set up the insn chain from a chain stort in FIRST to LAST. */
4497
4498 void
4499 push_to_full_sequence (first, last)
4500 rtx first, last;
4501 {
4502 start_sequence ();
4503 first_insn = first;
4504 last_insn = last;
4505 /* We really should have the end of the insn chain here. */
4506 if (last && NEXT_INSN (last))
4507 abort ();
4508 }
4509
4510 /* Set up the outer-level insn chain
4511 as the current sequence, saving the previously current one. */
4512
4513 void
4514 push_topmost_sequence ()
4515 {
4516 struct sequence_stack *stack, *top = NULL;
4517
4518 start_sequence ();
4519
4520 for (stack = seq_stack; stack; stack = stack->next)
4521 top = stack;
4522
4523 first_insn = top->first;
4524 last_insn = top->last;
4525 seq_rtl_expr = top->sequence_rtl_expr;
4526 }
4527
4528 /* After emitting to the outer-level insn chain, update the outer-level
4529 insn chain, and restore the previous saved state. */
4530
4531 void
4532 pop_topmost_sequence ()
4533 {
4534 struct sequence_stack *stack, *top = NULL;
4535
4536 for (stack = seq_stack; stack; stack = stack->next)
4537 top = stack;
4538
4539 top->first = first_insn;
4540 top->last = last_insn;
4541 /* ??? Why don't we save seq_rtl_expr here? */
4542
4543 end_sequence ();
4544 }
4545
4546 /* After emitting to a sequence, restore previous saved state.
4547
4548 To get the contents of the sequence just made, you must call
4549 `gen_sequence' *before* calling here.
4550
4551 If the compiler might have deferred popping arguments while
4552 generating this sequence, and this sequence will not be immediately
4553 inserted into the instruction stream, use do_pending_stack_adjust
4554 before calling gen_sequence. That will ensure that the deferred
4555 pops are inserted into this sequence, and not into some random
4556 location in the instruction stream. See INHIBIT_DEFER_POP for more
4557 information about deferred popping of arguments. */
4558
4559 void
4560 end_sequence ()
4561 {
4562 struct sequence_stack *tem = seq_stack;
4563
4564 first_insn = tem->first;
4565 last_insn = tem->last;
4566 seq_rtl_expr = tem->sequence_rtl_expr;
4567 seq_stack = tem->next;
4568
4569 free (tem);
4570 }
4571
4572 /* This works like end_sequence, but records the old sequence in FIRST
4573 and LAST. */
4574
4575 void
4576 end_full_sequence (first, last)
4577 rtx *first, *last;
4578 {
4579 *first = first_insn;
4580 *last = last_insn;
4581 end_sequence ();
4582 }
4583
4584 /* Return 1 if currently emitting into a sequence. */
4585
4586 int
4587 in_sequence_p ()
4588 {
4589 return seq_stack != 0;
4590 }
4591
4592 /* Generate a SEQUENCE rtx containing the insns already emitted
4593 to the current sequence.
4594
4595 This is how the gen_... function from a DEFINE_EXPAND
4596 constructs the SEQUENCE that it returns. */
4597
4598 rtx
4599 gen_sequence ()
4600 {
4601 rtx result;
4602 rtx tem;
4603 int i;
4604 int len;
4605
4606 /* Count the insns in the chain. */
4607 len = 0;
4608 for (tem = first_insn; tem; tem = NEXT_INSN (tem))
4609 len++;
4610
4611 /* If only one insn, return it rather than a SEQUENCE.
4612 (Now that we cache SEQUENCE expressions, it isn't worth special-casing
4613 the case of an empty list.)
4614 We only return the pattern of an insn if its code is INSN and it
4615 has no notes. This ensures that no information gets lost. */
4616 if (len == 1
4617 && GET_CODE (first_insn) == INSN
4618 && ! RTX_FRAME_RELATED_P (first_insn)
4619 /* Don't throw away any reg notes. */
4620 && REG_NOTES (first_insn) == 0)
4621 return PATTERN (first_insn);
4622
4623 result = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (len));
4624
4625 for (i = 0, tem = first_insn; tem; tem = NEXT_INSN (tem), i++)
4626 XVECEXP (result, 0, i) = tem;
4627
4628 return result;
4629 }
4630 \f
4631 /* Put the various virtual registers into REGNO_REG_RTX. */
4632
4633 void
4634 init_virtual_regs (es)
4635 struct emit_status *es;
4636 {
4637 rtx *ptr = es->x_regno_reg_rtx;
4638 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4639 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4640 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4641 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4642 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4643 }
4644
4645 \f
4646 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4647 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4648 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4649 static int copy_insn_n_scratches;
4650
4651 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4652 copied an ASM_OPERANDS.
4653 In that case, it is the original input-operand vector. */
4654 static rtvec orig_asm_operands_vector;
4655
4656 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4657 copied an ASM_OPERANDS.
4658 In that case, it is the copied input-operand vector. */
4659 static rtvec copy_asm_operands_vector;
4660
4661 /* Likewise for the constraints vector. */
4662 static rtvec orig_asm_constraints_vector;
4663 static rtvec copy_asm_constraints_vector;
4664
4665 /* Recursively create a new copy of an rtx for copy_insn.
4666 This function differs from copy_rtx in that it handles SCRATCHes and
4667 ASM_OPERANDs properly.
4668 Normally, this function is not used directly; use copy_insn as front end.
4669 However, you could first copy an insn pattern with copy_insn and then use
4670 this function afterwards to properly copy any REG_NOTEs containing
4671 SCRATCHes. */
4672
4673 rtx
4674 copy_insn_1 (orig)
4675 rtx orig;
4676 {
4677 rtx copy;
4678 int i, j;
4679 RTX_CODE code;
4680 const char *format_ptr;
4681
4682 code = GET_CODE (orig);
4683
4684 switch (code)
4685 {
4686 case REG:
4687 case QUEUED:
4688 case CONST_INT:
4689 case CONST_DOUBLE:
4690 case CONST_VECTOR:
4691 case SYMBOL_REF:
4692 case CODE_LABEL:
4693 case PC:
4694 case CC0:
4695 case ADDRESSOF:
4696 return orig;
4697
4698 case SCRATCH:
4699 for (i = 0; i < copy_insn_n_scratches; i++)
4700 if (copy_insn_scratch_in[i] == orig)
4701 return copy_insn_scratch_out[i];
4702 break;
4703
4704 case CONST:
4705 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4706 a LABEL_REF, it isn't sharable. */
4707 if (GET_CODE (XEXP (orig, 0)) == PLUS
4708 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4709 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4710 return orig;
4711 break;
4712
4713 /* A MEM with a constant address is not sharable. The problem is that
4714 the constant address may need to be reloaded. If the mem is shared,
4715 then reloading one copy of this mem will cause all copies to appear
4716 to have been reloaded. */
4717
4718 default:
4719 break;
4720 }
4721
4722 copy = rtx_alloc (code);
4723
4724 /* Copy the various flags, and other information. We assume that
4725 all fields need copying, and then clear the fields that should
4726 not be copied. That is the sensible default behavior, and forces
4727 us to explicitly document why we are *not* copying a flag. */
4728 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
4729
4730 /* We do not copy the USED flag, which is used as a mark bit during
4731 walks over the RTL. */
4732 RTX_FLAG (copy, used) = 0;
4733
4734 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
4735 if (GET_RTX_CLASS (code) == 'i')
4736 {
4737 RTX_FLAG (copy, jump) = 0;
4738 RTX_FLAG (copy, call) = 0;
4739 RTX_FLAG (copy, frame_related) = 0;
4740 }
4741
4742 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4743
4744 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4745 {
4746 copy->fld[i] = orig->fld[i];
4747 switch (*format_ptr++)
4748 {
4749 case 'e':
4750 if (XEXP (orig, i) != NULL)
4751 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4752 break;
4753
4754 case 'E':
4755 case 'V':
4756 if (XVEC (orig, i) == orig_asm_constraints_vector)
4757 XVEC (copy, i) = copy_asm_constraints_vector;
4758 else if (XVEC (orig, i) == orig_asm_operands_vector)
4759 XVEC (copy, i) = copy_asm_operands_vector;
4760 else if (XVEC (orig, i) != NULL)
4761 {
4762 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4763 for (j = 0; j < XVECLEN (copy, i); j++)
4764 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4765 }
4766 break;
4767
4768 case 't':
4769 case 'w':
4770 case 'i':
4771 case 's':
4772 case 'S':
4773 case 'u':
4774 case '0':
4775 /* These are left unchanged. */
4776 break;
4777
4778 default:
4779 abort ();
4780 }
4781 }
4782
4783 if (code == SCRATCH)
4784 {
4785 i = copy_insn_n_scratches++;
4786 if (i >= MAX_RECOG_OPERANDS)
4787 abort ();
4788 copy_insn_scratch_in[i] = orig;
4789 copy_insn_scratch_out[i] = copy;
4790 }
4791 else if (code == ASM_OPERANDS)
4792 {
4793 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4794 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4795 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
4796 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
4797 }
4798
4799 return copy;
4800 }
4801
4802 /* Create a new copy of an rtx.
4803 This function differs from copy_rtx in that it handles SCRATCHes and
4804 ASM_OPERANDs properly.
4805 INSN doesn't really have to be a full INSN; it could be just the
4806 pattern. */
4807 rtx
4808 copy_insn (insn)
4809 rtx insn;
4810 {
4811 copy_insn_n_scratches = 0;
4812 orig_asm_operands_vector = 0;
4813 orig_asm_constraints_vector = 0;
4814 copy_asm_operands_vector = 0;
4815 copy_asm_constraints_vector = 0;
4816 return copy_insn_1 (insn);
4817 }
4818
4819 /* Initialize data structures and variables in this file
4820 before generating rtl for each function. */
4821
4822 void
4823 init_emit ()
4824 {
4825 struct function *f = cfun;
4826
4827 f->emit = (struct emit_status *) xmalloc (sizeof (struct emit_status));
4828 first_insn = NULL;
4829 last_insn = NULL;
4830 seq_rtl_expr = NULL;
4831 cur_insn_uid = 1;
4832 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
4833 last_linenum = 0;
4834 last_filename = 0;
4835 first_label_num = label_num;
4836 last_label_num = 0;
4837 seq_stack = NULL;
4838
4839 /* Init the tables that describe all the pseudo regs. */
4840
4841 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
4842
4843 f->emit->regno_pointer_align
4844 = (unsigned char *) xcalloc (f->emit->regno_pointer_align_length,
4845 sizeof (unsigned char));
4846
4847 regno_reg_rtx
4848 = (rtx *) xcalloc (f->emit->regno_pointer_align_length, sizeof (rtx));
4849
4850 f->emit->regno_decl
4851 = (tree *) xcalloc (f->emit->regno_pointer_align_length, sizeof (tree));
4852
4853 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
4854 init_virtual_regs (f->emit);
4855
4856 /* Indicate that the virtual registers and stack locations are
4857 all pointers. */
4858 REG_POINTER (stack_pointer_rtx) = 1;
4859 REG_POINTER (frame_pointer_rtx) = 1;
4860 REG_POINTER (hard_frame_pointer_rtx) = 1;
4861 REG_POINTER (arg_pointer_rtx) = 1;
4862
4863 REG_POINTER (virtual_incoming_args_rtx) = 1;
4864 REG_POINTER (virtual_stack_vars_rtx) = 1;
4865 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
4866 REG_POINTER (virtual_outgoing_args_rtx) = 1;
4867 REG_POINTER (virtual_cfa_rtx) = 1;
4868
4869 #ifdef STACK_BOUNDARY
4870 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
4871 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4872 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4873 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
4874
4875 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
4876 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
4877 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
4878 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
4879 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
4880 #endif
4881
4882 #ifdef INIT_EXPANDERS
4883 INIT_EXPANDERS;
4884 #endif
4885 }
4886
4887 /* Mark SS for GC. */
4888
4889 static void
4890 mark_sequence_stack (ss)
4891 struct sequence_stack *ss;
4892 {
4893 while (ss)
4894 {
4895 ggc_mark_rtx (ss->first);
4896 ggc_mark_tree (ss->sequence_rtl_expr);
4897 ss = ss->next;
4898 }
4899 }
4900
4901 /* Mark ES for GC. */
4902
4903 void
4904 mark_emit_status (es)
4905 struct emit_status *es;
4906 {
4907 rtx *r;
4908 tree *t;
4909 int i;
4910
4911 if (es == 0)
4912 return;
4913
4914 for (i = es->regno_pointer_align_length, r = es->x_regno_reg_rtx,
4915 t = es->regno_decl;
4916 i > 0; --i, ++r, ++t)
4917 {
4918 ggc_mark_rtx (*r);
4919 ggc_mark_tree (*t);
4920 }
4921
4922 mark_sequence_stack (es->sequence_stack);
4923 ggc_mark_tree (es->sequence_rtl_expr);
4924 ggc_mark_rtx (es->x_first_insn);
4925 }
4926
4927 /* Generate the constant 0. */
4928
4929 static rtx
4930 gen_const_vector_0 (mode)
4931 enum machine_mode mode;
4932 {
4933 rtx tem;
4934 rtvec v;
4935 int units, i;
4936 enum machine_mode inner;
4937
4938 units = GET_MODE_NUNITS (mode);
4939 inner = GET_MODE_INNER (mode);
4940
4941 v = rtvec_alloc (units);
4942
4943 /* We need to call this function after we to set CONST0_RTX first. */
4944 if (!CONST0_RTX (inner))
4945 abort ();
4946
4947 for (i = 0; i < units; ++i)
4948 RTVEC_ELT (v, i) = CONST0_RTX (inner);
4949
4950 tem = gen_rtx_CONST_VECTOR (mode, v);
4951 return tem;
4952 }
4953
4954 /* Create some permanent unique rtl objects shared between all functions.
4955 LINE_NUMBERS is nonzero if line numbers are to be generated. */
4956
4957 void
4958 init_emit_once (line_numbers)
4959 int line_numbers;
4960 {
4961 int i;
4962 enum machine_mode mode;
4963 enum machine_mode double_mode;
4964
4965 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
4966 tables. */
4967 const_int_htab = htab_create (37, const_int_htab_hash,
4968 const_int_htab_eq, NULL);
4969 ggc_add_deletable_htab (const_int_htab, 0, 0);
4970
4971 const_double_htab = htab_create (37, const_double_htab_hash,
4972 const_double_htab_eq, NULL);
4973 ggc_add_deletable_htab (const_double_htab, 0, 0);
4974
4975 mem_attrs_htab = htab_create (37, mem_attrs_htab_hash,
4976 mem_attrs_htab_eq, NULL);
4977 ggc_add_deletable_htab (mem_attrs_htab, 0, mem_attrs_mark);
4978
4979 no_line_numbers = ! line_numbers;
4980
4981 /* Compute the word and byte modes. */
4982
4983 byte_mode = VOIDmode;
4984 word_mode = VOIDmode;
4985 double_mode = VOIDmode;
4986
4987 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
4988 mode = GET_MODE_WIDER_MODE (mode))
4989 {
4990 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
4991 && byte_mode == VOIDmode)
4992 byte_mode = mode;
4993
4994 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
4995 && word_mode == VOIDmode)
4996 word_mode = mode;
4997 }
4998
4999 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5000 mode = GET_MODE_WIDER_MODE (mode))
5001 {
5002 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5003 && double_mode == VOIDmode)
5004 double_mode = mode;
5005 }
5006
5007 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5008
5009 /* Assign register numbers to the globally defined register rtx.
5010 This must be done at runtime because the register number field
5011 is in a union and some compilers can't initialize unions. */
5012
5013 pc_rtx = gen_rtx (PC, VOIDmode);
5014 cc0_rtx = gen_rtx (CC0, VOIDmode);
5015 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5016 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5017 if (hard_frame_pointer_rtx == 0)
5018 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5019 HARD_FRAME_POINTER_REGNUM);
5020 if (arg_pointer_rtx == 0)
5021 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5022 virtual_incoming_args_rtx =
5023 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5024 virtual_stack_vars_rtx =
5025 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5026 virtual_stack_dynamic_rtx =
5027 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5028 virtual_outgoing_args_rtx =
5029 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5030 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5031
5032 /* These rtx must be roots if GC is enabled. */
5033 ggc_add_rtx_root (global_rtl, GR_MAX);
5034
5035 #ifdef INIT_EXPANDERS
5036 /* This is to initialize {init|mark|free}_machine_status before the first
5037 call to push_function_context_to. This is needed by the Chill front
5038 end which calls push_function_context_to before the first call to
5039 init_function_start. */
5040 INIT_EXPANDERS;
5041 #endif
5042
5043 /* Create the unique rtx's for certain rtx codes and operand values. */
5044
5045 /* Don't use gen_rtx here since gen_rtx in this case
5046 tries to use these variables. */
5047 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5048 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5049 gen_rtx_raw_CONST_INT (VOIDmode, i);
5050 ggc_add_rtx_root (const_int_rtx, 2 * MAX_SAVED_CONST_INT + 1);
5051
5052 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5053 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5054 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5055 else
5056 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5057
5058 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5059 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5060 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5061 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5062
5063 for (i = 0; i <= 2; i++)
5064 {
5065 REAL_VALUE_TYPE *r =
5066 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5067
5068 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5069 mode = GET_MODE_WIDER_MODE (mode))
5070 const_tiny_rtx[i][(int) mode] =
5071 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5072
5073 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5074
5075 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5076 mode = GET_MODE_WIDER_MODE (mode))
5077 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5078
5079 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5080 mode != VOIDmode;
5081 mode = GET_MODE_WIDER_MODE (mode))
5082 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5083 }
5084
5085 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5086 mode != VOIDmode;
5087 mode = GET_MODE_WIDER_MODE (mode))
5088 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5089
5090 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5091 mode != VOIDmode;
5092 mode = GET_MODE_WIDER_MODE (mode))
5093 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5094
5095 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5096 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5097 const_tiny_rtx[0][i] = const0_rtx;
5098
5099 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5100 if (STORE_FLAG_VALUE == 1)
5101 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5102
5103 /* For bounded pointers, `&const_tiny_rtx[0][0]' is not the same as
5104 `(rtx *) const_tiny_rtx'. The former has bounds that only cover
5105 `const_tiny_rtx[0]', whereas the latter has bounds that cover all. */
5106 ggc_add_rtx_root ((rtx *) const_tiny_rtx, sizeof const_tiny_rtx / sizeof (rtx));
5107 ggc_add_rtx_root (&const_true_rtx, 1);
5108
5109 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5110 return_address_pointer_rtx
5111 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5112 #endif
5113
5114 #ifdef STRUCT_VALUE
5115 struct_value_rtx = STRUCT_VALUE;
5116 #else
5117 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
5118 #endif
5119
5120 #ifdef STRUCT_VALUE_INCOMING
5121 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
5122 #else
5123 #ifdef STRUCT_VALUE_INCOMING_REGNUM
5124 struct_value_incoming_rtx
5125 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
5126 #else
5127 struct_value_incoming_rtx = struct_value_rtx;
5128 #endif
5129 #endif
5130
5131 #ifdef STATIC_CHAIN_REGNUM
5132 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5133
5134 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5135 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5136 static_chain_incoming_rtx
5137 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5138 else
5139 #endif
5140 static_chain_incoming_rtx = static_chain_rtx;
5141 #endif
5142
5143 #ifdef STATIC_CHAIN
5144 static_chain_rtx = STATIC_CHAIN;
5145
5146 #ifdef STATIC_CHAIN_INCOMING
5147 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5148 #else
5149 static_chain_incoming_rtx = static_chain_rtx;
5150 #endif
5151 #endif
5152
5153 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5154 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5155
5156 ggc_add_rtx_root (&pic_offset_table_rtx, 1);
5157 ggc_add_rtx_root (&struct_value_rtx, 1);
5158 ggc_add_rtx_root (&struct_value_incoming_rtx, 1);
5159 ggc_add_rtx_root (&static_chain_rtx, 1);
5160 ggc_add_rtx_root (&static_chain_incoming_rtx, 1);
5161 ggc_add_rtx_root (&return_address_pointer_rtx, 1);
5162 }
5163 \f
5164 /* Query and clear/ restore no_line_numbers. This is used by the
5165 switch / case handling in stmt.c to give proper line numbers in
5166 warnings about unreachable code. */
5167
5168 int
5169 force_line_numbers ()
5170 {
5171 int old = no_line_numbers;
5172
5173 no_line_numbers = 0;
5174 if (old)
5175 force_next_line_note ();
5176 return old;
5177 }
5178
5179 void
5180 restore_line_number_status (old_value)
5181 int old_value;
5182 {
5183 no_line_numbers = old_value;
5184 }
5185
5186 /* Produce exact duplicate of insn INSN after AFTER.
5187 Care updating of libcall regions if present. */
5188
5189 rtx
5190 emit_copy_of_insn_after (insn, after)
5191 rtx insn, after;
5192 {
5193 rtx new;
5194 rtx note1, note2, link;
5195
5196 switch (GET_CODE (insn))
5197 {
5198 case INSN:
5199 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5200 break;
5201
5202 case JUMP_INSN:
5203 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5204 break;
5205
5206 case CALL_INSN:
5207 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5208 if (CALL_INSN_FUNCTION_USAGE (insn))
5209 CALL_INSN_FUNCTION_USAGE (new)
5210 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5211 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5212 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5213 break;
5214
5215 default:
5216 abort ();
5217 }
5218
5219 /* Update LABEL_NUSES. */
5220 mark_jump_label (PATTERN (new), new, 0);
5221
5222 INSN_SCOPE (new) = INSN_SCOPE (insn);
5223
5224 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5225 make them. */
5226 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5227 if (REG_NOTE_KIND (link) != REG_LABEL)
5228 {
5229 if (GET_CODE (link) == EXPR_LIST)
5230 REG_NOTES (new)
5231 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5232 XEXP (link, 0),
5233 REG_NOTES (new)));
5234 else
5235 REG_NOTES (new)
5236 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5237 XEXP (link, 0),
5238 REG_NOTES (new)));
5239 }
5240
5241 /* Fix the libcall sequences. */
5242 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5243 {
5244 rtx p = new;
5245 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5246 p = PREV_INSN (p);
5247 XEXP (note1, 0) = p;
5248 XEXP (note2, 0) = new;
5249 }
5250 return new;
5251 }