Merge from pch-branch up to tag pch-commit-20020603.
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 /* Middle-to-low level generation of rtx code and insns.
24
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
28
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
31
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
37
38 #include "config.h"
39 #include "system.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "obstack.h"
54 #include "bitmap.h"
55 #include "basic-block.h"
56 #include "ggc.h"
57 #include "debug.h"
58 #include "langhooks.h"
59
60 /* Commonly used modes. */
61
62 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
63 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
64 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
65 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
66
67
68 /* This is *not* reset after each function. It gives each CODE_LABEL
69 in the entire compilation a unique label number. */
70
71 static int label_num = 1;
72
73 /* Highest label number in current function.
74 Zero means use the value of label_num instead.
75 This is nonzero only when belatedly compiling an inline function. */
76
77 static int last_label_num;
78
79 /* Value label_num had when set_new_first_and_last_label_number was called.
80 If label_num has not changed since then, last_label_num is valid. */
81
82 static int base_label_num;
83
84 /* Nonzero means do not generate NOTEs for source line numbers. */
85
86 static int no_line_numbers;
87
88 /* Commonly used rtx's, so that we only need space for one copy.
89 These are initialized once for the entire compilation.
90 All of these are unique; no other rtx-object will be equal to any
91 of these. */
92
93 rtx global_rtl[GR_MAX];
94
95 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
96 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
97 record a copy of const[012]_rtx. */
98
99 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
100
101 rtx const_true_rtx;
102
103 REAL_VALUE_TYPE dconst0;
104 REAL_VALUE_TYPE dconst1;
105 REAL_VALUE_TYPE dconst2;
106 REAL_VALUE_TYPE dconstm1;
107
108 /* All references to the following fixed hard registers go through
109 these unique rtl objects. On machines where the frame-pointer and
110 arg-pointer are the same register, they use the same unique object.
111
112 After register allocation, other rtl objects which used to be pseudo-regs
113 may be clobbered to refer to the frame-pointer register.
114 But references that were originally to the frame-pointer can be
115 distinguished from the others because they contain frame_pointer_rtx.
116
117 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
118 tricky: until register elimination has taken place hard_frame_pointer_rtx
119 should be used if it is being set, and frame_pointer_rtx otherwise. After
120 register elimination hard_frame_pointer_rtx should always be used.
121 On machines where the two registers are same (most) then these are the
122 same.
123
124 In an inline procedure, the stack and frame pointer rtxs may not be
125 used for anything else. */
126 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
127 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
128 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
129 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
130 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
131
132 /* This is used to implement __builtin_return_address for some machines.
133 See for instance the MIPS port. */
134 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
135
136 /* We make one copy of (const_int C) where C is in
137 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
138 to save space during the compilation and simplify comparisons of
139 integers. */
140
141 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
142
143 /* A hash table storing CONST_INTs whose absolute value is greater
144 than MAX_SAVED_CONST_INT. */
145
146 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
147 htab_t const_int_htab;
148
149 /* A hash table storing memory attribute structures. */
150 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
151 htab_t mem_attrs_htab;
152
153 /* A hash table storing all CONST_DOUBLEs. */
154 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
155 htab_t const_double_htab;
156
157 #define first_insn (cfun->emit->x_first_insn)
158 #define last_insn (cfun->emit->x_last_insn)
159 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
160 #define last_linenum (cfun->emit->x_last_linenum)
161 #define last_filename (cfun->emit->x_last_filename)
162 #define first_label_num (cfun->emit->x_first_label_num)
163
164 static rtx make_jump_insn_raw PARAMS ((rtx));
165 static rtx make_call_insn_raw PARAMS ((rtx));
166 static rtx find_line_note PARAMS ((rtx));
167 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
168 int));
169 static void unshare_all_rtl_1 PARAMS ((rtx));
170 static void unshare_all_decls PARAMS ((tree));
171 static void reset_used_decls PARAMS ((tree));
172 static void mark_label_nuses PARAMS ((rtx));
173 static hashval_t const_int_htab_hash PARAMS ((const void *));
174 static int const_int_htab_eq PARAMS ((const void *,
175 const void *));
176 static hashval_t const_double_htab_hash PARAMS ((const void *));
177 static int const_double_htab_eq PARAMS ((const void *,
178 const void *));
179 static rtx lookup_const_double PARAMS ((rtx));
180 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
181 static int mem_attrs_htab_eq PARAMS ((const void *,
182 const void *));
183 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
184 rtx, unsigned int,
185 enum machine_mode));
186 static tree component_ref_for_mem_expr PARAMS ((tree));
187 static rtx gen_const_vector_0 PARAMS ((enum machine_mode));
188
189 /* Probability of the conditional branch currently proceeded by try_split.
190 Set to -1 otherwise. */
191 int split_branch_probability = -1;
192 \f
193 /* Returns a hash code for X (which is a really a CONST_INT). */
194
195 static hashval_t
196 const_int_htab_hash (x)
197 const void *x;
198 {
199 return (hashval_t) INTVAL ((struct rtx_def *) x);
200 }
201
202 /* Returns non-zero if the value represented by X (which is really a
203 CONST_INT) is the same as that given by Y (which is really a
204 HOST_WIDE_INT *). */
205
206 static int
207 const_int_htab_eq (x, y)
208 const void *x;
209 const void *y;
210 {
211 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
212 }
213
214 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
215 static hashval_t
216 const_double_htab_hash (x)
217 const void *x;
218 {
219 hashval_t h = 0;
220 size_t i;
221 rtx value = (rtx) x;
222
223 for (i = 0; i < sizeof(CONST_DOUBLE_FORMAT)-1; i++)
224 h ^= XWINT (value, i);
225 return h;
226 }
227
228 /* Returns non-zero if the value represented by X (really a ...)
229 is the same as that represented by Y (really a ...) */
230 static int
231 const_double_htab_eq (x, y)
232 const void *x;
233 const void *y;
234 {
235 rtx a = (rtx)x, b = (rtx)y;
236 size_t i;
237
238 if (GET_MODE (a) != GET_MODE (b))
239 return 0;
240 for (i = 0; i < sizeof(CONST_DOUBLE_FORMAT)-1; i++)
241 if (XWINT (a, i) != XWINT (b, i))
242 return 0;
243
244 return 1;
245 }
246
247 /* Returns a hash code for X (which is a really a mem_attrs *). */
248
249 static hashval_t
250 mem_attrs_htab_hash (x)
251 const void *x;
252 {
253 mem_attrs *p = (mem_attrs *) x;
254
255 return (p->alias ^ (p->align * 1000)
256 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
257 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
258 ^ (size_t) p->expr);
259 }
260
261 /* Returns non-zero if the value represented by X (which is really a
262 mem_attrs *) is the same as that given by Y (which is also really a
263 mem_attrs *). */
264
265 static int
266 mem_attrs_htab_eq (x, y)
267 const void *x;
268 const void *y;
269 {
270 mem_attrs *p = (mem_attrs *) x;
271 mem_attrs *q = (mem_attrs *) y;
272
273 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
274 && p->size == q->size && p->align == q->align);
275 }
276
277 /* Allocate a new mem_attrs structure and insert it into the hash table if
278 one identical to it is not already in the table. We are doing this for
279 MEM of mode MODE. */
280
281 static mem_attrs *
282 get_mem_attrs (alias, expr, offset, size, align, mode)
283 HOST_WIDE_INT alias;
284 tree expr;
285 rtx offset;
286 rtx size;
287 unsigned int align;
288 enum machine_mode mode;
289 {
290 mem_attrs attrs;
291 void **slot;
292
293 /* If everything is the default, we can just return zero. */
294 if (alias == 0 && expr == 0 && offset == 0
295 && (size == 0
296 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
297 && (align == BITS_PER_UNIT
298 || (STRICT_ALIGNMENT
299 && mode != BLKmode && align == GET_MODE_ALIGNMENT (mode))))
300 return 0;
301
302 attrs.alias = alias;
303 attrs.expr = expr;
304 attrs.offset = offset;
305 attrs.size = size;
306 attrs.align = align;
307
308 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
309 if (*slot == 0)
310 {
311 *slot = ggc_alloc (sizeof (mem_attrs));
312 memcpy (*slot, &attrs, sizeof (mem_attrs));
313 }
314
315 return *slot;
316 }
317
318 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
319 don't attempt to share with the various global pieces of rtl (such as
320 frame_pointer_rtx). */
321
322 rtx
323 gen_raw_REG (mode, regno)
324 enum machine_mode mode;
325 int regno;
326 {
327 rtx x = gen_rtx_raw_REG (mode, regno);
328 ORIGINAL_REGNO (x) = regno;
329 return x;
330 }
331
332 /* There are some RTL codes that require special attention; the generation
333 functions do the raw handling. If you add to this list, modify
334 special_rtx in gengenrtl.c as well. */
335
336 rtx
337 gen_rtx_CONST_INT (mode, arg)
338 enum machine_mode mode ATTRIBUTE_UNUSED;
339 HOST_WIDE_INT arg;
340 {
341 void **slot;
342
343 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
344 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
345
346 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
347 if (const_true_rtx && arg == STORE_FLAG_VALUE)
348 return const_true_rtx;
349 #endif
350
351 /* Look up the CONST_INT in the hash table. */
352 slot = htab_find_slot_with_hash (const_int_htab, &arg,
353 (hashval_t) arg, INSERT);
354 if (*slot == 0)
355 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
356
357 return (rtx) *slot;
358 }
359
360 rtx
361 gen_int_mode (c, mode)
362 HOST_WIDE_INT c;
363 enum machine_mode mode;
364 {
365 return GEN_INT (trunc_int_for_mode (c, mode));
366 }
367
368 /* CONST_DOUBLEs might be created from pairs of integers, or from
369 REAL_VALUE_TYPEs. Also, their length is known only at run time,
370 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
371
372 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
373 hash table. If so, return its counterpart; otherwise add it
374 to the hash table and return it. */
375 static rtx
376 lookup_const_double (real)
377 rtx real;
378 {
379 void **slot = htab_find_slot (const_double_htab, real, INSERT);
380 if (*slot == 0)
381 *slot = real;
382
383 return (rtx) *slot;
384 }
385
386 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
387 VALUE in mode MODE. */
388 rtx
389 const_double_from_real_value (value, mode)
390 REAL_VALUE_TYPE value;
391 enum machine_mode mode;
392 {
393 rtx real = rtx_alloc (CONST_DOUBLE);
394 PUT_MODE (real, mode);
395
396 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
397
398 return lookup_const_double (real);
399 }
400
401 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
402 of ints: I0 is the low-order word and I1 is the high-order word.
403 Do not use this routine for non-integer modes; convert to
404 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
405
406 rtx
407 immed_double_const (i0, i1, mode)
408 HOST_WIDE_INT i0, i1;
409 enum machine_mode mode;
410 {
411 rtx value;
412 unsigned int i;
413
414 if (mode != VOIDmode)
415 {
416 int width;
417 if (GET_MODE_CLASS (mode) != MODE_INT
418 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
419 abort ();
420
421 /* We clear out all bits that don't belong in MODE, unless they and
422 our sign bit are all one. So we get either a reasonable negative
423 value or a reasonable unsigned value for this mode. */
424 width = GET_MODE_BITSIZE (mode);
425 if (width < HOST_BITS_PER_WIDE_INT
426 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
427 != ((HOST_WIDE_INT) (-1) << (width - 1))))
428 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
429 else if (width == HOST_BITS_PER_WIDE_INT
430 && ! (i1 == ~0 && i0 < 0))
431 i1 = 0;
432 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
433 /* We cannot represent this value as a constant. */
434 abort ();
435
436 /* If this would be an entire word for the target, but is not for
437 the host, then sign-extend on the host so that the number will
438 look the same way on the host that it would on the target.
439
440 For example, when building a 64 bit alpha hosted 32 bit sparc
441 targeted compiler, then we want the 32 bit unsigned value -1 to be
442 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
443 The latter confuses the sparc backend. */
444
445 if (width < HOST_BITS_PER_WIDE_INT
446 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
447 i0 |= ((HOST_WIDE_INT) (-1) << width);
448
449 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
450 CONST_INT.
451
452 ??? Strictly speaking, this is wrong if we create a CONST_INT for
453 a large unsigned constant with the size of MODE being
454 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
455 in a wider mode. In that case we will mis-interpret it as a
456 negative number.
457
458 Unfortunately, the only alternative is to make a CONST_DOUBLE for
459 any constant in any mode if it is an unsigned constant larger
460 than the maximum signed integer in an int on the host. However,
461 doing this will break everyone that always expects to see a
462 CONST_INT for SImode and smaller.
463
464 We have always been making CONST_INTs in this case, so nothing
465 new is being broken. */
466
467 if (width <= HOST_BITS_PER_WIDE_INT)
468 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
469 }
470
471 /* If this integer fits in one word, return a CONST_INT. */
472 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
473 return GEN_INT (i0);
474
475 /* We use VOIDmode for integers. */
476 value = rtx_alloc (CONST_DOUBLE);
477 PUT_MODE (value, VOIDmode);
478
479 CONST_DOUBLE_LOW (value) = i0;
480 CONST_DOUBLE_HIGH (value) = i1;
481
482 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
483 XWINT (value, i) = 0;
484
485 return lookup_const_double (value);
486 }
487
488 rtx
489 gen_rtx_REG (mode, regno)
490 enum machine_mode mode;
491 unsigned int regno;
492 {
493 /* In case the MD file explicitly references the frame pointer, have
494 all such references point to the same frame pointer. This is
495 used during frame pointer elimination to distinguish the explicit
496 references to these registers from pseudos that happened to be
497 assigned to them.
498
499 If we have eliminated the frame pointer or arg pointer, we will
500 be using it as a normal register, for example as a spill
501 register. In such cases, we might be accessing it in a mode that
502 is not Pmode and therefore cannot use the pre-allocated rtx.
503
504 Also don't do this when we are making new REGs in reload, since
505 we don't want to get confused with the real pointers. */
506
507 if (mode == Pmode && !reload_in_progress)
508 {
509 if (regno == FRAME_POINTER_REGNUM)
510 return frame_pointer_rtx;
511 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
512 if (regno == HARD_FRAME_POINTER_REGNUM)
513 return hard_frame_pointer_rtx;
514 #endif
515 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
516 if (regno == ARG_POINTER_REGNUM)
517 return arg_pointer_rtx;
518 #endif
519 #ifdef RETURN_ADDRESS_POINTER_REGNUM
520 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
521 return return_address_pointer_rtx;
522 #endif
523 if (regno == PIC_OFFSET_TABLE_REGNUM
524 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
525 return pic_offset_table_rtx;
526 if (regno == STACK_POINTER_REGNUM)
527 return stack_pointer_rtx;
528 }
529
530 return gen_raw_REG (mode, regno);
531 }
532
533 rtx
534 gen_rtx_MEM (mode, addr)
535 enum machine_mode mode;
536 rtx addr;
537 {
538 rtx rt = gen_rtx_raw_MEM (mode, addr);
539
540 /* This field is not cleared by the mere allocation of the rtx, so
541 we clear it here. */
542 MEM_ATTRS (rt) = 0;
543
544 return rt;
545 }
546
547 rtx
548 gen_rtx_SUBREG (mode, reg, offset)
549 enum machine_mode mode;
550 rtx reg;
551 int offset;
552 {
553 /* This is the most common failure type.
554 Catch it early so we can see who does it. */
555 if ((offset % GET_MODE_SIZE (mode)) != 0)
556 abort ();
557
558 /* This check isn't usable right now because combine will
559 throw arbitrary crap like a CALL into a SUBREG in
560 gen_lowpart_for_combine so we must just eat it. */
561 #if 0
562 /* Check for this too. */
563 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
564 abort ();
565 #endif
566 return gen_rtx_raw_SUBREG (mode, reg, offset);
567 }
568
569 /* Generate a SUBREG representing the least-significant part of REG if MODE
570 is smaller than mode of REG, otherwise paradoxical SUBREG. */
571
572 rtx
573 gen_lowpart_SUBREG (mode, reg)
574 enum machine_mode mode;
575 rtx reg;
576 {
577 enum machine_mode inmode;
578
579 inmode = GET_MODE (reg);
580 if (inmode == VOIDmode)
581 inmode = mode;
582 return gen_rtx_SUBREG (mode, reg,
583 subreg_lowpart_offset (mode, inmode));
584 }
585 \f
586 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
587 **
588 ** This routine generates an RTX of the size specified by
589 ** <code>, which is an RTX code. The RTX structure is initialized
590 ** from the arguments <element1> through <elementn>, which are
591 ** interpreted according to the specific RTX type's format. The
592 ** special machine mode associated with the rtx (if any) is specified
593 ** in <mode>.
594 **
595 ** gen_rtx can be invoked in a way which resembles the lisp-like
596 ** rtx it will generate. For example, the following rtx structure:
597 **
598 ** (plus:QI (mem:QI (reg:SI 1))
599 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
600 **
601 ** ...would be generated by the following C code:
602 **
603 ** gen_rtx (PLUS, QImode,
604 ** gen_rtx (MEM, QImode,
605 ** gen_rtx (REG, SImode, 1)),
606 ** gen_rtx (MEM, QImode,
607 ** gen_rtx (PLUS, SImode,
608 ** gen_rtx (REG, SImode, 2),
609 ** gen_rtx (REG, SImode, 3)))),
610 */
611
612 /*VARARGS2*/
613 rtx
614 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
615 {
616 int i; /* Array indices... */
617 const char *fmt; /* Current rtx's format... */
618 rtx rt_val; /* RTX to return to caller... */
619
620 VA_OPEN (p, mode);
621 VA_FIXEDARG (p, enum rtx_code, code);
622 VA_FIXEDARG (p, enum machine_mode, mode);
623
624 switch (code)
625 {
626 case CONST_INT:
627 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
628 break;
629
630 case CONST_DOUBLE:
631 {
632 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
633 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
634
635 rt_val = immed_double_const (arg0, arg1, mode);
636 }
637 break;
638
639 case REG:
640 rt_val = gen_rtx_REG (mode, va_arg (p, int));
641 break;
642
643 case MEM:
644 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
645 break;
646
647 default:
648 rt_val = rtx_alloc (code); /* Allocate the storage space. */
649 rt_val->mode = mode; /* Store the machine mode... */
650
651 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
652 for (i = 0; i < GET_RTX_LENGTH (code); i++)
653 {
654 switch (*fmt++)
655 {
656 case '0': /* Unused field. */
657 break;
658
659 case 'i': /* An integer? */
660 XINT (rt_val, i) = va_arg (p, int);
661 break;
662
663 case 'w': /* A wide integer? */
664 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
665 break;
666
667 case 's': /* A string? */
668 XSTR (rt_val, i) = va_arg (p, char *);
669 break;
670
671 case 'e': /* An expression? */
672 case 'u': /* An insn? Same except when printing. */
673 XEXP (rt_val, i) = va_arg (p, rtx);
674 break;
675
676 case 'E': /* An RTX vector? */
677 XVEC (rt_val, i) = va_arg (p, rtvec);
678 break;
679
680 case 'b': /* A bitmap? */
681 XBITMAP (rt_val, i) = va_arg (p, bitmap);
682 break;
683
684 case 't': /* A tree? */
685 XTREE (rt_val, i) = va_arg (p, tree);
686 break;
687
688 default:
689 abort ();
690 }
691 }
692 break;
693 }
694
695 VA_CLOSE (p);
696 return rt_val;
697 }
698
699 /* gen_rtvec (n, [rt1, ..., rtn])
700 **
701 ** This routine creates an rtvec and stores within it the
702 ** pointers to rtx's which are its arguments.
703 */
704
705 /*VARARGS1*/
706 rtvec
707 gen_rtvec VPARAMS ((int n, ...))
708 {
709 int i, save_n;
710 rtx *vector;
711
712 VA_OPEN (p, n);
713 VA_FIXEDARG (p, int, n);
714
715 if (n == 0)
716 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
717
718 vector = (rtx *) alloca (n * sizeof (rtx));
719
720 for (i = 0; i < n; i++)
721 vector[i] = va_arg (p, rtx);
722
723 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
724 save_n = n;
725 VA_CLOSE (p);
726
727 return gen_rtvec_v (save_n, vector);
728 }
729
730 rtvec
731 gen_rtvec_v (n, argp)
732 int n;
733 rtx *argp;
734 {
735 int i;
736 rtvec rt_val;
737
738 if (n == 0)
739 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
740
741 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
742
743 for (i = 0; i < n; i++)
744 rt_val->elem[i] = *argp++;
745
746 return rt_val;
747 }
748 \f
749 /* Generate a REG rtx for a new pseudo register of mode MODE.
750 This pseudo is assigned the next sequential register number. */
751
752 rtx
753 gen_reg_rtx (mode)
754 enum machine_mode mode;
755 {
756 struct function *f = cfun;
757 rtx val;
758
759 /* Don't let anything called after initial flow analysis create new
760 registers. */
761 if (no_new_pseudos)
762 abort ();
763
764 if (generating_concat_p
765 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
766 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
767 {
768 /* For complex modes, don't make a single pseudo.
769 Instead, make a CONCAT of two pseudos.
770 This allows noncontiguous allocation of the real and imaginary parts,
771 which makes much better code. Besides, allocating DCmode
772 pseudos overstrains reload on some machines like the 386. */
773 rtx realpart, imagpart;
774 int size = GET_MODE_UNIT_SIZE (mode);
775 enum machine_mode partmode
776 = mode_for_size (size * BITS_PER_UNIT,
777 (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
778 ? MODE_FLOAT : MODE_INT),
779 0);
780
781 realpart = gen_reg_rtx (partmode);
782 imagpart = gen_reg_rtx (partmode);
783 return gen_rtx_CONCAT (mode, realpart, imagpart);
784 }
785
786 /* Make sure regno_pointer_align, regno_decl, and regno_reg_rtx are large
787 enough to have an element for this pseudo reg number. */
788
789 if (reg_rtx_no == f->emit->regno_pointer_align_length)
790 {
791 int old_size = f->emit->regno_pointer_align_length;
792 char *new;
793 rtx *new1;
794 tree *new2;
795
796 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
797 memset (new + old_size, 0, old_size);
798 f->emit->regno_pointer_align = (unsigned char *) new;
799
800 new1 = (rtx *) ggc_realloc (f->emit->x_regno_reg_rtx,
801 old_size * 2 * sizeof (rtx));
802 memset (new1 + old_size, 0, old_size * sizeof (rtx));
803 regno_reg_rtx = new1;
804
805 new2 = (tree *) ggc_realloc (f->emit->regno_decl,
806 old_size * 2 * sizeof (tree));
807 memset (new2 + old_size, 0, old_size * sizeof (tree));
808 f->emit->regno_decl = new2;
809
810 f->emit->regno_pointer_align_length = old_size * 2;
811 }
812
813 val = gen_raw_REG (mode, reg_rtx_no);
814 regno_reg_rtx[reg_rtx_no++] = val;
815 return val;
816 }
817
818 /* Identify REG (which may be a CONCAT) as a user register. */
819
820 void
821 mark_user_reg (reg)
822 rtx reg;
823 {
824 if (GET_CODE (reg) == CONCAT)
825 {
826 REG_USERVAR_P (XEXP (reg, 0)) = 1;
827 REG_USERVAR_P (XEXP (reg, 1)) = 1;
828 }
829 else if (GET_CODE (reg) == REG)
830 REG_USERVAR_P (reg) = 1;
831 else
832 abort ();
833 }
834
835 /* Identify REG as a probable pointer register and show its alignment
836 as ALIGN, if nonzero. */
837
838 void
839 mark_reg_pointer (reg, align)
840 rtx reg;
841 int align;
842 {
843 if (! REG_POINTER (reg))
844 {
845 REG_POINTER (reg) = 1;
846
847 if (align)
848 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
849 }
850 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
851 /* We can no-longer be sure just how aligned this pointer is */
852 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
853 }
854
855 /* Return 1 plus largest pseudo reg number used in the current function. */
856
857 int
858 max_reg_num ()
859 {
860 return reg_rtx_no;
861 }
862
863 /* Return 1 + the largest label number used so far in the current function. */
864
865 int
866 max_label_num ()
867 {
868 if (last_label_num && label_num == base_label_num)
869 return last_label_num;
870 return label_num;
871 }
872
873 /* Return first label number used in this function (if any were used). */
874
875 int
876 get_first_label_num ()
877 {
878 return first_label_num;
879 }
880 \f
881 /* Return the final regno of X, which is a SUBREG of a hard
882 register. */
883 int
884 subreg_hard_regno (x, check_mode)
885 rtx x;
886 int check_mode;
887 {
888 enum machine_mode mode = GET_MODE (x);
889 unsigned int byte_offset, base_regno, final_regno;
890 rtx reg = SUBREG_REG (x);
891
892 /* This is where we attempt to catch illegal subregs
893 created by the compiler. */
894 if (GET_CODE (x) != SUBREG
895 || GET_CODE (reg) != REG)
896 abort ();
897 base_regno = REGNO (reg);
898 if (base_regno >= FIRST_PSEUDO_REGISTER)
899 abort ();
900 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
901 abort ();
902
903 /* Catch non-congruent offsets too. */
904 byte_offset = SUBREG_BYTE (x);
905 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
906 abort ();
907
908 final_regno = subreg_regno (x);
909
910 return final_regno;
911 }
912
913 /* Return a value representing some low-order bits of X, where the number
914 of low-order bits is given by MODE. Note that no conversion is done
915 between floating-point and fixed-point values, rather, the bit
916 representation is returned.
917
918 This function handles the cases in common between gen_lowpart, below,
919 and two variants in cse.c and combine.c. These are the cases that can
920 be safely handled at all points in the compilation.
921
922 If this is not a case we can handle, return 0. */
923
924 rtx
925 gen_lowpart_common (mode, x)
926 enum machine_mode mode;
927 rtx x;
928 {
929 int msize = GET_MODE_SIZE (mode);
930 int xsize = GET_MODE_SIZE (GET_MODE (x));
931 int offset = 0;
932
933 if (GET_MODE (x) == mode)
934 return x;
935
936 /* MODE must occupy no more words than the mode of X. */
937 if (GET_MODE (x) != VOIDmode
938 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
939 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
940 return 0;
941
942 offset = subreg_lowpart_offset (mode, GET_MODE (x));
943
944 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
945 && (GET_MODE_CLASS (mode) == MODE_INT
946 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
947 {
948 /* If we are getting the low-order part of something that has been
949 sign- or zero-extended, we can either just use the object being
950 extended or make a narrower extension. If we want an even smaller
951 piece than the size of the object being extended, call ourselves
952 recursively.
953
954 This case is used mostly by combine and cse. */
955
956 if (GET_MODE (XEXP (x, 0)) == mode)
957 return XEXP (x, 0);
958 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
959 return gen_lowpart_common (mode, XEXP (x, 0));
960 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
961 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
962 }
963 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
964 || GET_CODE (x) == CONCAT)
965 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
966 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
967 from the low-order part of the constant. */
968 else if ((GET_MODE_CLASS (mode) == MODE_INT
969 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
970 && GET_MODE (x) == VOIDmode
971 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
972 {
973 /* If MODE is twice the host word size, X is already the desired
974 representation. Otherwise, if MODE is wider than a word, we can't
975 do this. If MODE is exactly a word, return just one CONST_INT. */
976
977 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
978 return x;
979 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
980 return 0;
981 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
982 return (GET_CODE (x) == CONST_INT ? x
983 : GEN_INT (CONST_DOUBLE_LOW (x)));
984 else
985 {
986 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
987 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
988 : CONST_DOUBLE_LOW (x));
989
990 /* Sign extend to HOST_WIDE_INT. */
991 val = trunc_int_for_mode (val, mode);
992
993 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
994 : GEN_INT (val));
995 }
996 }
997
998 /* The floating-point emulator can handle all conversions between
999 FP and integer operands. This simplifies reload because it
1000 doesn't have to deal with constructs like (subreg:DI
1001 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
1002 /* Single-precision floats are always 32-bits and double-precision
1003 floats are always 64-bits. */
1004
1005 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1006 && GET_MODE_BITSIZE (mode) == 32
1007 && GET_CODE (x) == CONST_INT)
1008 {
1009 REAL_VALUE_TYPE r;
1010 HOST_WIDE_INT i;
1011
1012 i = INTVAL (x);
1013 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
1014 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1015 }
1016 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1017 && GET_MODE_BITSIZE (mode) == 64
1018 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1019 && GET_MODE (x) == VOIDmode)
1020 {
1021 REAL_VALUE_TYPE r;
1022 HOST_WIDE_INT i[2];
1023 HOST_WIDE_INT low, high;
1024
1025 if (GET_CODE (x) == CONST_INT)
1026 {
1027 low = INTVAL (x);
1028 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1029 }
1030 else
1031 {
1032 low = CONST_DOUBLE_LOW (x);
1033 high = CONST_DOUBLE_HIGH (x);
1034 }
1035
1036 #if HOST_BITS_PER_WIDE_INT == 32
1037 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1038 target machine. */
1039 if (WORDS_BIG_ENDIAN)
1040 i[0] = high, i[1] = low;
1041 else
1042 i[0] = low, i[1] = high;
1043 #else
1044 i[0] = low;
1045 #endif
1046
1047 r = REAL_VALUE_FROM_TARGET_DOUBLE (i);
1048 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1049 }
1050 else if ((GET_MODE_CLASS (mode) == MODE_INT
1051 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1052 && GET_CODE (x) == CONST_DOUBLE
1053 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1054 {
1055 REAL_VALUE_TYPE r;
1056 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1057 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1058
1059 /* Convert 'r' into an array of four 32-bit words in target word
1060 order. */
1061 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1062 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1063 {
1064 case 32:
1065 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
1066 i[1] = 0;
1067 i[2] = 0;
1068 i[3 - 3 * endian] = 0;
1069 break;
1070 case 64:
1071 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
1072 i[2 - 2 * endian] = 0;
1073 i[3 - 2 * endian] = 0;
1074 break;
1075 case 96:
1076 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1077 i[3 - 3 * endian] = 0;
1078 break;
1079 case 128:
1080 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1081 break;
1082 default:
1083 abort ();
1084 }
1085 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1086 and return it. */
1087 #if HOST_BITS_PER_WIDE_INT == 32
1088 return immed_double_const (i[3 * endian], i[1 + endian], mode);
1089 #else
1090 if (HOST_BITS_PER_WIDE_INT != 64)
1091 abort ();
1092
1093 return immed_double_const ((((unsigned long) i[3 * endian])
1094 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1095 (((unsigned long) i[2 - endian])
1096 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
1097 mode);
1098 #endif
1099 }
1100
1101 /* Otherwise, we can't do this. */
1102 return 0;
1103 }
1104 \f
1105 /* Return the real part (which has mode MODE) of a complex value X.
1106 This always comes at the low address in memory. */
1107
1108 rtx
1109 gen_realpart (mode, x)
1110 enum machine_mode mode;
1111 rtx x;
1112 {
1113 if (WORDS_BIG_ENDIAN
1114 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1115 && REG_P (x)
1116 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1117 internal_error
1118 ("can't access real part of complex value in hard register");
1119 else if (WORDS_BIG_ENDIAN)
1120 return gen_highpart (mode, x);
1121 else
1122 return gen_lowpart (mode, x);
1123 }
1124
1125 /* Return the imaginary part (which has mode MODE) of a complex value X.
1126 This always comes at the high address in memory. */
1127
1128 rtx
1129 gen_imagpart (mode, x)
1130 enum machine_mode mode;
1131 rtx x;
1132 {
1133 if (WORDS_BIG_ENDIAN)
1134 return gen_lowpart (mode, x);
1135 else if (! WORDS_BIG_ENDIAN
1136 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1137 && REG_P (x)
1138 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1139 internal_error
1140 ("can't access imaginary part of complex value in hard register");
1141 else
1142 return gen_highpart (mode, x);
1143 }
1144
1145 /* Return 1 iff X, assumed to be a SUBREG,
1146 refers to the real part of the complex value in its containing reg.
1147 Complex values are always stored with the real part in the first word,
1148 regardless of WORDS_BIG_ENDIAN. */
1149
1150 int
1151 subreg_realpart_p (x)
1152 rtx x;
1153 {
1154 if (GET_CODE (x) != SUBREG)
1155 abort ();
1156
1157 return ((unsigned int) SUBREG_BYTE (x)
1158 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1159 }
1160 \f
1161 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1162 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1163 least-significant part of X.
1164 MODE specifies how big a part of X to return;
1165 it usually should not be larger than a word.
1166 If X is a MEM whose address is a QUEUED, the value may be so also. */
1167
1168 rtx
1169 gen_lowpart (mode, x)
1170 enum machine_mode mode;
1171 rtx x;
1172 {
1173 rtx result = gen_lowpart_common (mode, x);
1174
1175 if (result)
1176 return result;
1177 else if (GET_CODE (x) == REG)
1178 {
1179 /* Must be a hard reg that's not valid in MODE. */
1180 result = gen_lowpart_common (mode, copy_to_reg (x));
1181 if (result == 0)
1182 abort ();
1183 return result;
1184 }
1185 else if (GET_CODE (x) == MEM)
1186 {
1187 /* The only additional case we can do is MEM. */
1188 int offset = 0;
1189 if (WORDS_BIG_ENDIAN)
1190 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1191 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1192
1193 if (BYTES_BIG_ENDIAN)
1194 /* Adjust the address so that the address-after-the-data
1195 is unchanged. */
1196 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1197 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1198
1199 return adjust_address (x, mode, offset);
1200 }
1201 else if (GET_CODE (x) == ADDRESSOF)
1202 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1203 else
1204 abort ();
1205 }
1206
1207 /* Like `gen_lowpart', but refer to the most significant part.
1208 This is used to access the imaginary part of a complex number. */
1209
1210 rtx
1211 gen_highpart (mode, x)
1212 enum machine_mode mode;
1213 rtx x;
1214 {
1215 unsigned int msize = GET_MODE_SIZE (mode);
1216 rtx result;
1217
1218 /* This case loses if X is a subreg. To catch bugs early,
1219 complain if an invalid MODE is used even in other cases. */
1220 if (msize > UNITS_PER_WORD
1221 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1222 abort ();
1223
1224 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1225 subreg_highpart_offset (mode, GET_MODE (x)));
1226
1227 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1228 the target if we have a MEM. gen_highpart must return a valid operand,
1229 emitting code if necessary to do so. */
1230 if (result != NULL_RTX && GET_CODE (result) == MEM)
1231 result = validize_mem (result);
1232
1233 if (!result)
1234 abort ();
1235 return result;
1236 }
1237
1238 /* Like gen_highpart_mode, but accept mode of EXP operand in case EXP can
1239 be VOIDmode constant. */
1240 rtx
1241 gen_highpart_mode (outermode, innermode, exp)
1242 enum machine_mode outermode, innermode;
1243 rtx exp;
1244 {
1245 if (GET_MODE (exp) != VOIDmode)
1246 {
1247 if (GET_MODE (exp) != innermode)
1248 abort ();
1249 return gen_highpart (outermode, exp);
1250 }
1251 return simplify_gen_subreg (outermode, exp, innermode,
1252 subreg_highpart_offset (outermode, innermode));
1253 }
1254
1255 /* Return offset in bytes to get OUTERMODE low part
1256 of the value in mode INNERMODE stored in memory in target format. */
1257
1258 unsigned int
1259 subreg_lowpart_offset (outermode, innermode)
1260 enum machine_mode outermode, innermode;
1261 {
1262 unsigned int offset = 0;
1263 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1264
1265 if (difference > 0)
1266 {
1267 if (WORDS_BIG_ENDIAN)
1268 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1269 if (BYTES_BIG_ENDIAN)
1270 offset += difference % UNITS_PER_WORD;
1271 }
1272
1273 return offset;
1274 }
1275
1276 /* Return offset in bytes to get OUTERMODE high part
1277 of the value in mode INNERMODE stored in memory in target format. */
1278 unsigned int
1279 subreg_highpart_offset (outermode, innermode)
1280 enum machine_mode outermode, innermode;
1281 {
1282 unsigned int offset = 0;
1283 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1284
1285 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1286 abort ();
1287
1288 if (difference > 0)
1289 {
1290 if (! WORDS_BIG_ENDIAN)
1291 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1292 if (! BYTES_BIG_ENDIAN)
1293 offset += difference % UNITS_PER_WORD;
1294 }
1295
1296 return offset;
1297 }
1298
1299 /* Return 1 iff X, assumed to be a SUBREG,
1300 refers to the least significant part of its containing reg.
1301 If X is not a SUBREG, always return 1 (it is its own low part!). */
1302
1303 int
1304 subreg_lowpart_p (x)
1305 rtx x;
1306 {
1307 if (GET_CODE (x) != SUBREG)
1308 return 1;
1309 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1310 return 0;
1311
1312 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1313 == SUBREG_BYTE (x));
1314 }
1315 \f
1316
1317 /* Helper routine for all the constant cases of operand_subword.
1318 Some places invoke this directly. */
1319
1320 rtx
1321 constant_subword (op, offset, mode)
1322 rtx op;
1323 int offset;
1324 enum machine_mode mode;
1325 {
1326 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1327 HOST_WIDE_INT val;
1328
1329 /* If OP is already an integer word, return it. */
1330 if (GET_MODE_CLASS (mode) == MODE_INT
1331 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1332 return op;
1333
1334 /* The output is some bits, the width of the target machine's word.
1335 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1336 host can't. */
1337 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1338 && GET_MODE_CLASS (mode) == MODE_FLOAT
1339 && GET_MODE_BITSIZE (mode) == 64
1340 && GET_CODE (op) == CONST_DOUBLE)
1341 {
1342 long k[2];
1343 REAL_VALUE_TYPE rv;
1344
1345 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1346 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1347
1348 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1349 which the words are written depends on the word endianness.
1350 ??? This is a potential portability problem and should
1351 be fixed at some point.
1352
1353 We must exercise caution with the sign bit. By definition there
1354 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1355 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1356 So we explicitly mask and sign-extend as necessary. */
1357 if (BITS_PER_WORD == 32)
1358 {
1359 val = k[offset];
1360 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1361 return GEN_INT (val);
1362 }
1363 #if HOST_BITS_PER_WIDE_INT >= 64
1364 else if (BITS_PER_WORD >= 64 && offset == 0)
1365 {
1366 val = k[! WORDS_BIG_ENDIAN];
1367 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1368 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1369 return GEN_INT (val);
1370 }
1371 #endif
1372 else if (BITS_PER_WORD == 16)
1373 {
1374 val = k[offset >> 1];
1375 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1376 val >>= 16;
1377 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1378 return GEN_INT (val);
1379 }
1380 else
1381 abort ();
1382 }
1383 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1384 && GET_MODE_CLASS (mode) == MODE_FLOAT
1385 && GET_MODE_BITSIZE (mode) > 64
1386 && GET_CODE (op) == CONST_DOUBLE)
1387 {
1388 long k[4];
1389 REAL_VALUE_TYPE rv;
1390
1391 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1392 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1393
1394 if (BITS_PER_WORD == 32)
1395 {
1396 val = k[offset];
1397 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1398 return GEN_INT (val);
1399 }
1400 #if HOST_BITS_PER_WIDE_INT >= 64
1401 else if (BITS_PER_WORD >= 64 && offset <= 1)
1402 {
1403 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1404 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1405 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1406 return GEN_INT (val);
1407 }
1408 #endif
1409 else
1410 abort ();
1411 }
1412
1413 /* Single word float is a little harder, since single- and double-word
1414 values often do not have the same high-order bits. We have already
1415 verified that we want the only defined word of the single-word value. */
1416 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1417 && GET_MODE_BITSIZE (mode) == 32
1418 && GET_CODE (op) == CONST_DOUBLE)
1419 {
1420 long l;
1421 REAL_VALUE_TYPE rv;
1422
1423 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1424 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1425
1426 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1427 val = l;
1428 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1429
1430 if (BITS_PER_WORD == 16)
1431 {
1432 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1433 val >>= 16;
1434 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1435 }
1436
1437 return GEN_INT (val);
1438 }
1439
1440 /* The only remaining cases that we can handle are integers.
1441 Convert to proper endianness now since these cases need it.
1442 At this point, offset == 0 means the low-order word.
1443
1444 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1445 in general. However, if OP is (const_int 0), we can just return
1446 it for any word. */
1447
1448 if (op == const0_rtx)
1449 return op;
1450
1451 if (GET_MODE_CLASS (mode) != MODE_INT
1452 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1453 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1454 return 0;
1455
1456 if (WORDS_BIG_ENDIAN)
1457 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1458
1459 /* Find out which word on the host machine this value is in and get
1460 it from the constant. */
1461 val = (offset / size_ratio == 0
1462 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1463 : (GET_CODE (op) == CONST_INT
1464 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1465
1466 /* Get the value we want into the low bits of val. */
1467 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1468 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1469
1470 val = trunc_int_for_mode (val, word_mode);
1471
1472 return GEN_INT (val);
1473 }
1474
1475 /* Return subword OFFSET of operand OP.
1476 The word number, OFFSET, is interpreted as the word number starting
1477 at the low-order address. OFFSET 0 is the low-order word if not
1478 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1479
1480 If we cannot extract the required word, we return zero. Otherwise,
1481 an rtx corresponding to the requested word will be returned.
1482
1483 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1484 reload has completed, a valid address will always be returned. After
1485 reload, if a valid address cannot be returned, we return zero.
1486
1487 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1488 it is the responsibility of the caller.
1489
1490 MODE is the mode of OP in case it is a CONST_INT.
1491
1492 ??? This is still rather broken for some cases. The problem for the
1493 moment is that all callers of this thing provide no 'goal mode' to
1494 tell us to work with. This exists because all callers were written
1495 in a word based SUBREG world.
1496 Now use of this function can be deprecated by simplify_subreg in most
1497 cases.
1498 */
1499
1500 rtx
1501 operand_subword (op, offset, validate_address, mode)
1502 rtx op;
1503 unsigned int offset;
1504 int validate_address;
1505 enum machine_mode mode;
1506 {
1507 if (mode == VOIDmode)
1508 mode = GET_MODE (op);
1509
1510 if (mode == VOIDmode)
1511 abort ();
1512
1513 /* If OP is narrower than a word, fail. */
1514 if (mode != BLKmode
1515 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1516 return 0;
1517
1518 /* If we want a word outside OP, return zero. */
1519 if (mode != BLKmode
1520 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1521 return const0_rtx;
1522
1523 /* Form a new MEM at the requested address. */
1524 if (GET_CODE (op) == MEM)
1525 {
1526 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1527
1528 if (! validate_address)
1529 return new;
1530
1531 else if (reload_completed)
1532 {
1533 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1534 return 0;
1535 }
1536 else
1537 return replace_equiv_address (new, XEXP (new, 0));
1538 }
1539
1540 /* Rest can be handled by simplify_subreg. */
1541 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1542 }
1543
1544 /* Similar to `operand_subword', but never return 0. If we can't extract
1545 the required subword, put OP into a register and try again. If that fails,
1546 abort. We always validate the address in this case.
1547
1548 MODE is the mode of OP, in case it is CONST_INT. */
1549
1550 rtx
1551 operand_subword_force (op, offset, mode)
1552 rtx op;
1553 unsigned int offset;
1554 enum machine_mode mode;
1555 {
1556 rtx result = operand_subword (op, offset, 1, mode);
1557
1558 if (result)
1559 return result;
1560
1561 if (mode != BLKmode && mode != VOIDmode)
1562 {
1563 /* If this is a register which can not be accessed by words, copy it
1564 to a pseudo register. */
1565 if (GET_CODE (op) == REG)
1566 op = copy_to_reg (op);
1567 else
1568 op = force_reg (mode, op);
1569 }
1570
1571 result = operand_subword (op, offset, 1, mode);
1572 if (result == 0)
1573 abort ();
1574
1575 return result;
1576 }
1577 \f
1578 /* Given a compare instruction, swap the operands.
1579 A test instruction is changed into a compare of 0 against the operand. */
1580
1581 void
1582 reverse_comparison (insn)
1583 rtx insn;
1584 {
1585 rtx body = PATTERN (insn);
1586 rtx comp;
1587
1588 if (GET_CODE (body) == SET)
1589 comp = SET_SRC (body);
1590 else
1591 comp = SET_SRC (XVECEXP (body, 0, 0));
1592
1593 if (GET_CODE (comp) == COMPARE)
1594 {
1595 rtx op0 = XEXP (comp, 0);
1596 rtx op1 = XEXP (comp, 1);
1597 XEXP (comp, 0) = op1;
1598 XEXP (comp, 1) = op0;
1599 }
1600 else
1601 {
1602 rtx new = gen_rtx_COMPARE (VOIDmode,
1603 CONST0_RTX (GET_MODE (comp)), comp);
1604 if (GET_CODE (body) == SET)
1605 SET_SRC (body) = new;
1606 else
1607 SET_SRC (XVECEXP (body, 0, 0)) = new;
1608 }
1609 }
1610 \f
1611 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1612 or (2) a component ref of something variable. Represent the later with
1613 a NULL expression. */
1614
1615 static tree
1616 component_ref_for_mem_expr (ref)
1617 tree ref;
1618 {
1619 tree inner = TREE_OPERAND (ref, 0);
1620
1621 if (TREE_CODE (inner) == COMPONENT_REF)
1622 inner = component_ref_for_mem_expr (inner);
1623 else
1624 {
1625 tree placeholder_ptr = 0;
1626
1627 /* Now remove any conversions: they don't change what the underlying
1628 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1629 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1630 || TREE_CODE (inner) == NON_LVALUE_EXPR
1631 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1632 || TREE_CODE (inner) == SAVE_EXPR
1633 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1634 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1635 inner = find_placeholder (inner, &placeholder_ptr);
1636 else
1637 inner = TREE_OPERAND (inner, 0);
1638
1639 if (! DECL_P (inner))
1640 inner = NULL_TREE;
1641 }
1642
1643 if (inner == TREE_OPERAND (ref, 0))
1644 return ref;
1645 else
1646 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1647 TREE_OPERAND (ref, 1));
1648 }
1649
1650 /* Given REF, a MEM, and T, either the type of X or the expression
1651 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1652 if we are making a new object of this type. */
1653
1654 void
1655 set_mem_attributes (ref, t, objectp)
1656 rtx ref;
1657 tree t;
1658 int objectp;
1659 {
1660 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1661 tree expr = MEM_EXPR (ref);
1662 rtx offset = MEM_OFFSET (ref);
1663 rtx size = MEM_SIZE (ref);
1664 unsigned int align = MEM_ALIGN (ref);
1665 tree type;
1666
1667 /* It can happen that type_for_mode was given a mode for which there
1668 is no language-level type. In which case it returns NULL, which
1669 we can see here. */
1670 if (t == NULL_TREE)
1671 return;
1672
1673 type = TYPE_P (t) ? t : TREE_TYPE (t);
1674
1675 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1676 wrong answer, as it assumes that DECL_RTL already has the right alias
1677 info. Callers should not set DECL_RTL until after the call to
1678 set_mem_attributes. */
1679 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1680 abort ();
1681
1682 /* Get the alias set from the expression or type (perhaps using a
1683 front-end routine) and use it. */
1684 alias = get_alias_set (t);
1685
1686 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1687 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1688 RTX_UNCHANGING_P (ref)
1689 |= ((lang_hooks.honor_readonly
1690 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1691 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1692
1693 /* If we are making an object of this type, or if this is a DECL, we know
1694 that it is a scalar if the type is not an aggregate. */
1695 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1696 MEM_SCALAR_P (ref) = 1;
1697
1698 /* We can set the alignment from the type if we are making an object,
1699 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1700 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1701 align = MAX (align, TYPE_ALIGN (type));
1702
1703 /* If the size is known, we can set that. */
1704 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1705 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1706
1707 /* If T is not a type, we may be able to deduce some more information about
1708 the expression. */
1709 if (! TYPE_P (t))
1710 {
1711 maybe_set_unchanging (ref, t);
1712 if (TREE_THIS_VOLATILE (t))
1713 MEM_VOLATILE_P (ref) = 1;
1714
1715 /* Now remove any conversions: they don't change what the underlying
1716 object is. Likewise for SAVE_EXPR. */
1717 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1718 || TREE_CODE (t) == NON_LVALUE_EXPR
1719 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1720 || TREE_CODE (t) == SAVE_EXPR)
1721 t = TREE_OPERAND (t, 0);
1722
1723 /* If this expression can't be addressed (e.g., it contains a reference
1724 to a non-addressable field), show we don't change its alias set. */
1725 if (! can_address_p (t))
1726 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1727
1728 /* If this is a decl, set the attributes of the MEM from it. */
1729 if (DECL_P (t))
1730 {
1731 expr = t;
1732 offset = const0_rtx;
1733 size = (DECL_SIZE_UNIT (t)
1734 && host_integerp (DECL_SIZE_UNIT (t), 1)
1735 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1736 align = DECL_ALIGN (t);
1737 }
1738
1739 /* If this is a constant, we know the alignment. */
1740 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1741 {
1742 align = TYPE_ALIGN (type);
1743 #ifdef CONSTANT_ALIGNMENT
1744 align = CONSTANT_ALIGNMENT (t, align);
1745 #endif
1746 }
1747
1748 /* If this is a field reference and not a bit-field, record it. */
1749 /* ??? There is some information that can be gleened from bit-fields,
1750 such as the word offset in the structure that might be modified.
1751 But skip it for now. */
1752 else if (TREE_CODE (t) == COMPONENT_REF
1753 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1754 {
1755 expr = component_ref_for_mem_expr (t);
1756 offset = const0_rtx;
1757 /* ??? Any reason the field size would be different than
1758 the size we got from the type? */
1759 }
1760
1761 /* If this is an array reference, look for an outer field reference. */
1762 else if (TREE_CODE (t) == ARRAY_REF)
1763 {
1764 tree off_tree = size_zero_node;
1765
1766 do
1767 {
1768 off_tree
1769 = fold (build (PLUS_EXPR, sizetype,
1770 fold (build (MULT_EXPR, sizetype,
1771 TREE_OPERAND (t, 1),
1772 TYPE_SIZE_UNIT (TREE_TYPE (t)))),
1773 off_tree));
1774 t = TREE_OPERAND (t, 0);
1775 }
1776 while (TREE_CODE (t) == ARRAY_REF);
1777
1778 if (TREE_CODE (t) == COMPONENT_REF)
1779 {
1780 expr = component_ref_for_mem_expr (t);
1781 if (host_integerp (off_tree, 1))
1782 offset = GEN_INT (tree_low_cst (off_tree, 1));
1783 /* ??? Any reason the field size would be different than
1784 the size we got from the type? */
1785 }
1786 }
1787 }
1788
1789 /* Now set the attributes we computed above. */
1790 MEM_ATTRS (ref)
1791 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1792
1793 /* If this is already known to be a scalar or aggregate, we are done. */
1794 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1795 return;
1796
1797 /* If it is a reference into an aggregate, this is part of an aggregate.
1798 Otherwise we don't know. */
1799 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1800 || TREE_CODE (t) == ARRAY_RANGE_REF
1801 || TREE_CODE (t) == BIT_FIELD_REF)
1802 MEM_IN_STRUCT_P (ref) = 1;
1803 }
1804
1805 /* Set the alias set of MEM to SET. */
1806
1807 void
1808 set_mem_alias_set (mem, set)
1809 rtx mem;
1810 HOST_WIDE_INT set;
1811 {
1812 #ifdef ENABLE_CHECKING
1813 /* If the new and old alias sets don't conflict, something is wrong. */
1814 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1815 abort ();
1816 #endif
1817
1818 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1819 MEM_SIZE (mem), MEM_ALIGN (mem),
1820 GET_MODE (mem));
1821 }
1822
1823 /* Set the alignment of MEM to ALIGN bits. */
1824
1825 void
1826 set_mem_align (mem, align)
1827 rtx mem;
1828 unsigned int align;
1829 {
1830 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1831 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1832 GET_MODE (mem));
1833 }
1834
1835 /* Set the expr for MEM to EXPR. */
1836
1837 void
1838 set_mem_expr (mem, expr)
1839 rtx mem;
1840 tree expr;
1841 {
1842 MEM_ATTRS (mem)
1843 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1844 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1845 }
1846
1847 /* Set the offset of MEM to OFFSET. */
1848
1849 void
1850 set_mem_offset (mem, offset)
1851 rtx mem, offset;
1852 {
1853 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1854 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1855 GET_MODE (mem));
1856 }
1857 \f
1858 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1859 and its address changed to ADDR. (VOIDmode means don't change the mode.
1860 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1861 returned memory location is required to be valid. The memory
1862 attributes are not changed. */
1863
1864 static rtx
1865 change_address_1 (memref, mode, addr, validate)
1866 rtx memref;
1867 enum machine_mode mode;
1868 rtx addr;
1869 int validate;
1870 {
1871 rtx new;
1872
1873 if (GET_CODE (memref) != MEM)
1874 abort ();
1875 if (mode == VOIDmode)
1876 mode = GET_MODE (memref);
1877 if (addr == 0)
1878 addr = XEXP (memref, 0);
1879
1880 if (validate)
1881 {
1882 if (reload_in_progress || reload_completed)
1883 {
1884 if (! memory_address_p (mode, addr))
1885 abort ();
1886 }
1887 else
1888 addr = memory_address (mode, addr);
1889 }
1890
1891 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1892 return memref;
1893
1894 new = gen_rtx_MEM (mode, addr);
1895 MEM_COPY_ATTRIBUTES (new, memref);
1896 return new;
1897 }
1898
1899 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1900 way we are changing MEMREF, so we only preserve the alias set. */
1901
1902 rtx
1903 change_address (memref, mode, addr)
1904 rtx memref;
1905 enum machine_mode mode;
1906 rtx addr;
1907 {
1908 rtx new = change_address_1 (memref, mode, addr, 1);
1909 enum machine_mode mmode = GET_MODE (new);
1910
1911 MEM_ATTRS (new)
1912 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
1913 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
1914 (mmode == BLKmode ? BITS_PER_UNIT
1915 : GET_MODE_ALIGNMENT (mmode)),
1916 mmode);
1917
1918 return new;
1919 }
1920
1921 /* Return a memory reference like MEMREF, but with its mode changed
1922 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1923 nonzero, the memory address is forced to be valid.
1924 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1925 and caller is responsible for adjusting MEMREF base register. */
1926
1927 rtx
1928 adjust_address_1 (memref, mode, offset, validate, adjust)
1929 rtx memref;
1930 enum machine_mode mode;
1931 HOST_WIDE_INT offset;
1932 int validate, adjust;
1933 {
1934 rtx addr = XEXP (memref, 0);
1935 rtx new;
1936 rtx memoffset = MEM_OFFSET (memref);
1937 rtx size = 0;
1938 unsigned int memalign = MEM_ALIGN (memref);
1939
1940 /* ??? Prefer to create garbage instead of creating shared rtl.
1941 This may happen even if offset is non-zero -- consider
1942 (plus (plus reg reg) const_int) -- so do this always. */
1943 addr = copy_rtx (addr);
1944
1945 if (adjust)
1946 {
1947 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1948 object, we can merge it into the LO_SUM. */
1949 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1950 && offset >= 0
1951 && (unsigned HOST_WIDE_INT) offset
1952 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1953 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1954 plus_constant (XEXP (addr, 1), offset));
1955 else
1956 addr = plus_constant (addr, offset);
1957 }
1958
1959 new = change_address_1 (memref, mode, addr, validate);
1960
1961 /* Compute the new values of the memory attributes due to this adjustment.
1962 We add the offsets and update the alignment. */
1963 if (memoffset)
1964 memoffset = GEN_INT (offset + INTVAL (memoffset));
1965
1966 /* Compute the new alignment by taking the MIN of the alignment and the
1967 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1968 if zero. */
1969 if (offset != 0)
1970 memalign
1971 = MIN (memalign,
1972 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1973
1974 /* We can compute the size in a number of ways. */
1975 if (GET_MODE (new) != BLKmode)
1976 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1977 else if (MEM_SIZE (memref))
1978 size = plus_constant (MEM_SIZE (memref), -offset);
1979
1980 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1981 memoffset, size, memalign, GET_MODE (new));
1982
1983 /* At some point, we should validate that this offset is within the object,
1984 if all the appropriate values are known. */
1985 return new;
1986 }
1987
1988 /* Return a memory reference like MEMREF, but with its mode changed
1989 to MODE and its address changed to ADDR, which is assumed to be
1990 MEMREF offseted by OFFSET bytes. If VALIDATE is
1991 nonzero, the memory address is forced to be valid. */
1992
1993 rtx
1994 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
1995 rtx memref;
1996 enum machine_mode mode;
1997 rtx addr;
1998 HOST_WIDE_INT offset;
1999 int validate;
2000 {
2001 memref = change_address_1 (memref, VOIDmode, addr, validate);
2002 return adjust_address_1 (memref, mode, offset, validate, 0);
2003 }
2004
2005 /* Return a memory reference like MEMREF, but whose address is changed by
2006 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2007 known to be in OFFSET (possibly 1). */
2008
2009 rtx
2010 offset_address (memref, offset, pow2)
2011 rtx memref;
2012 rtx offset;
2013 HOST_WIDE_INT pow2;
2014 {
2015 rtx new, addr = XEXP (memref, 0);
2016
2017 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2018
2019 /* At this point we don't know _why_ the address is invalid. It
2020 could have secondary memory refereces, multiplies or anything.
2021
2022 However, if we did go and rearrange things, we can wind up not
2023 being able to recognize the magic around pic_offset_table_rtx.
2024 This stuff is fragile, and is yet another example of why it is
2025 bad to expose PIC machinery too early. */
2026 if (! memory_address_p (GET_MODE (memref), new)
2027 && GET_CODE (addr) == PLUS
2028 && XEXP (addr, 0) == pic_offset_table_rtx)
2029 {
2030 addr = force_reg (GET_MODE (addr), addr);
2031 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2032 }
2033
2034 update_temp_slot_address (XEXP (memref, 0), new);
2035 new = change_address_1 (memref, VOIDmode, new, 1);
2036
2037 /* Update the alignment to reflect the offset. Reset the offset, which
2038 we don't know. */
2039 MEM_ATTRS (new)
2040 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2041 MIN (MEM_ALIGN (memref),
2042 (unsigned HOST_WIDE_INT) pow2 * BITS_PER_UNIT),
2043 GET_MODE (new));
2044 return new;
2045 }
2046
2047 /* Return a memory reference like MEMREF, but with its address changed to
2048 ADDR. The caller is asserting that the actual piece of memory pointed
2049 to is the same, just the form of the address is being changed, such as
2050 by putting something into a register. */
2051
2052 rtx
2053 replace_equiv_address (memref, addr)
2054 rtx memref;
2055 rtx addr;
2056 {
2057 /* change_address_1 copies the memory attribute structure without change
2058 and that's exactly what we want here. */
2059 update_temp_slot_address (XEXP (memref, 0), addr);
2060 return change_address_1 (memref, VOIDmode, addr, 1);
2061 }
2062
2063 /* Likewise, but the reference is not required to be valid. */
2064
2065 rtx
2066 replace_equiv_address_nv (memref, addr)
2067 rtx memref;
2068 rtx addr;
2069 {
2070 return change_address_1 (memref, VOIDmode, addr, 0);
2071 }
2072
2073 /* Return a memory reference like MEMREF, but with its mode widened to
2074 MODE and offset by OFFSET. This would be used by targets that e.g.
2075 cannot issue QImode memory operations and have to use SImode memory
2076 operations plus masking logic. */
2077
2078 rtx
2079 widen_memory_access (memref, mode, offset)
2080 rtx memref;
2081 enum machine_mode mode;
2082 HOST_WIDE_INT offset;
2083 {
2084 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2085 tree expr = MEM_EXPR (new);
2086 rtx memoffset = MEM_OFFSET (new);
2087 unsigned int size = GET_MODE_SIZE (mode);
2088
2089 /* If we don't know what offset we were at within the expression, then
2090 we can't know if we've overstepped the bounds. */
2091 if (! memoffset)
2092 expr = NULL_TREE;
2093
2094 while (expr)
2095 {
2096 if (TREE_CODE (expr) == COMPONENT_REF)
2097 {
2098 tree field = TREE_OPERAND (expr, 1);
2099
2100 if (! DECL_SIZE_UNIT (field))
2101 {
2102 expr = NULL_TREE;
2103 break;
2104 }
2105
2106 /* Is the field at least as large as the access? If so, ok,
2107 otherwise strip back to the containing structure. */
2108 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2109 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2110 && INTVAL (memoffset) >= 0)
2111 break;
2112
2113 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2114 {
2115 expr = NULL_TREE;
2116 break;
2117 }
2118
2119 expr = TREE_OPERAND (expr, 0);
2120 memoffset = (GEN_INT (INTVAL (memoffset)
2121 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2122 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2123 / BITS_PER_UNIT)));
2124 }
2125 /* Similarly for the decl. */
2126 else if (DECL_P (expr)
2127 && DECL_SIZE_UNIT (expr)
2128 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2129 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2130 && (! memoffset || INTVAL (memoffset) >= 0))
2131 break;
2132 else
2133 {
2134 /* The widened memory access overflows the expression, which means
2135 that it could alias another expression. Zap it. */
2136 expr = NULL_TREE;
2137 break;
2138 }
2139 }
2140
2141 if (! expr)
2142 memoffset = NULL_RTX;
2143
2144 /* The widened memory may alias other stuff, so zap the alias set. */
2145 /* ??? Maybe use get_alias_set on any remaining expression. */
2146
2147 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2148 MEM_ALIGN (new), mode);
2149
2150 return new;
2151 }
2152 \f
2153 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2154
2155 rtx
2156 gen_label_rtx ()
2157 {
2158 rtx label;
2159
2160 label = gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2161 NULL, label_num++, NULL, NULL);
2162
2163 LABEL_NUSES (label) = 0;
2164 LABEL_ALTERNATE_NAME (label) = NULL;
2165 return label;
2166 }
2167 \f
2168 /* For procedure integration. */
2169
2170 /* Install new pointers to the first and last insns in the chain.
2171 Also, set cur_insn_uid to one higher than the last in use.
2172 Used for an inline-procedure after copying the insn chain. */
2173
2174 void
2175 set_new_first_and_last_insn (first, last)
2176 rtx first, last;
2177 {
2178 rtx insn;
2179
2180 first_insn = first;
2181 last_insn = last;
2182 cur_insn_uid = 0;
2183
2184 for (insn = first; insn; insn = NEXT_INSN (insn))
2185 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2186
2187 cur_insn_uid++;
2188 }
2189
2190 /* Set the range of label numbers found in the current function.
2191 This is used when belatedly compiling an inline function. */
2192
2193 void
2194 set_new_first_and_last_label_num (first, last)
2195 int first, last;
2196 {
2197 base_label_num = label_num;
2198 first_label_num = first;
2199 last_label_num = last;
2200 }
2201
2202 /* Set the last label number found in the current function.
2203 This is used when belatedly compiling an inline function. */
2204
2205 void
2206 set_new_last_label_num (last)
2207 int last;
2208 {
2209 base_label_num = label_num;
2210 last_label_num = last;
2211 }
2212 \f
2213 /* Restore all variables describing the current status from the structure *P.
2214 This is used after a nested function. */
2215
2216 void
2217 restore_emit_status (p)
2218 struct function *p ATTRIBUTE_UNUSED;
2219 {
2220 last_label_num = 0;
2221 }
2222 \f
2223 /* Go through all the RTL insn bodies and copy any invalid shared
2224 structure. This routine should only be called once. */
2225
2226 void
2227 unshare_all_rtl (fndecl, insn)
2228 tree fndecl;
2229 rtx insn;
2230 {
2231 tree decl;
2232
2233 /* Make sure that virtual parameters are not shared. */
2234 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2235 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2236
2237 /* Make sure that virtual stack slots are not shared. */
2238 unshare_all_decls (DECL_INITIAL (fndecl));
2239
2240 /* Unshare just about everything else. */
2241 unshare_all_rtl_1 (insn);
2242
2243 /* Make sure the addresses of stack slots found outside the insn chain
2244 (such as, in DECL_RTL of a variable) are not shared
2245 with the insn chain.
2246
2247 This special care is necessary when the stack slot MEM does not
2248 actually appear in the insn chain. If it does appear, its address
2249 is unshared from all else at that point. */
2250 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2251 }
2252
2253 /* Go through all the RTL insn bodies and copy any invalid shared
2254 structure, again. This is a fairly expensive thing to do so it
2255 should be done sparingly. */
2256
2257 void
2258 unshare_all_rtl_again (insn)
2259 rtx insn;
2260 {
2261 rtx p;
2262 tree decl;
2263
2264 for (p = insn; p; p = NEXT_INSN (p))
2265 if (INSN_P (p))
2266 {
2267 reset_used_flags (PATTERN (p));
2268 reset_used_flags (REG_NOTES (p));
2269 reset_used_flags (LOG_LINKS (p));
2270 }
2271
2272 /* Make sure that virtual stack slots are not shared. */
2273 reset_used_decls (DECL_INITIAL (cfun->decl));
2274
2275 /* Make sure that virtual parameters are not shared. */
2276 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2277 reset_used_flags (DECL_RTL (decl));
2278
2279 reset_used_flags (stack_slot_list);
2280
2281 unshare_all_rtl (cfun->decl, insn);
2282 }
2283
2284 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2285 Assumes the mark bits are cleared at entry. */
2286
2287 static void
2288 unshare_all_rtl_1 (insn)
2289 rtx insn;
2290 {
2291 for (; insn; insn = NEXT_INSN (insn))
2292 if (INSN_P (insn))
2293 {
2294 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2295 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2296 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2297 }
2298 }
2299
2300 /* Go through all virtual stack slots of a function and copy any
2301 shared structure. */
2302 static void
2303 unshare_all_decls (blk)
2304 tree blk;
2305 {
2306 tree t;
2307
2308 /* Copy shared decls. */
2309 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2310 if (DECL_RTL_SET_P (t))
2311 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2312
2313 /* Now process sub-blocks. */
2314 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2315 unshare_all_decls (t);
2316 }
2317
2318 /* Go through all virtual stack slots of a function and mark them as
2319 not shared. */
2320 static void
2321 reset_used_decls (blk)
2322 tree blk;
2323 {
2324 tree t;
2325
2326 /* Mark decls. */
2327 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2328 if (DECL_RTL_SET_P (t))
2329 reset_used_flags (DECL_RTL (t));
2330
2331 /* Now process sub-blocks. */
2332 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2333 reset_used_decls (t);
2334 }
2335
2336 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2337 placed in the result directly, rather than being copied. MAY_SHARE is
2338 either a MEM of an EXPR_LIST of MEMs. */
2339
2340 rtx
2341 copy_most_rtx (orig, may_share)
2342 rtx orig;
2343 rtx may_share;
2344 {
2345 rtx copy;
2346 int i, j;
2347 RTX_CODE code;
2348 const char *format_ptr;
2349
2350 if (orig == may_share
2351 || (GET_CODE (may_share) == EXPR_LIST
2352 && in_expr_list_p (may_share, orig)))
2353 return orig;
2354
2355 code = GET_CODE (orig);
2356
2357 switch (code)
2358 {
2359 case REG:
2360 case QUEUED:
2361 case CONST_INT:
2362 case CONST_DOUBLE:
2363 case CONST_VECTOR:
2364 case SYMBOL_REF:
2365 case CODE_LABEL:
2366 case PC:
2367 case CC0:
2368 return orig;
2369 default:
2370 break;
2371 }
2372
2373 copy = rtx_alloc (code);
2374 PUT_MODE (copy, GET_MODE (orig));
2375 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2376 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2377 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2378 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2379 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2380
2381 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2382
2383 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2384 {
2385 switch (*format_ptr++)
2386 {
2387 case 'e':
2388 XEXP (copy, i) = XEXP (orig, i);
2389 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2390 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2391 break;
2392
2393 case 'u':
2394 XEXP (copy, i) = XEXP (orig, i);
2395 break;
2396
2397 case 'E':
2398 case 'V':
2399 XVEC (copy, i) = XVEC (orig, i);
2400 if (XVEC (orig, i) != NULL)
2401 {
2402 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2403 for (j = 0; j < XVECLEN (copy, i); j++)
2404 XVECEXP (copy, i, j)
2405 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2406 }
2407 break;
2408
2409 case 'w':
2410 XWINT (copy, i) = XWINT (orig, i);
2411 break;
2412
2413 case 'n':
2414 case 'i':
2415 XINT (copy, i) = XINT (orig, i);
2416 break;
2417
2418 case 't':
2419 XTREE (copy, i) = XTREE (orig, i);
2420 break;
2421
2422 case 's':
2423 case 'S':
2424 XSTR (copy, i) = XSTR (orig, i);
2425 break;
2426
2427 case '0':
2428 /* Copy this through the wide int field; that's safest. */
2429 X0WINT (copy, i) = X0WINT (orig, i);
2430 break;
2431
2432 default:
2433 abort ();
2434 }
2435 }
2436 return copy;
2437 }
2438
2439 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2440 Recursively does the same for subexpressions. */
2441
2442 rtx
2443 copy_rtx_if_shared (orig)
2444 rtx orig;
2445 {
2446 rtx x = orig;
2447 int i;
2448 enum rtx_code code;
2449 const char *format_ptr;
2450 int copied = 0;
2451
2452 if (x == 0)
2453 return 0;
2454
2455 code = GET_CODE (x);
2456
2457 /* These types may be freely shared. */
2458
2459 switch (code)
2460 {
2461 case REG:
2462 case QUEUED:
2463 case CONST_INT:
2464 case CONST_DOUBLE:
2465 case CONST_VECTOR:
2466 case SYMBOL_REF:
2467 case CODE_LABEL:
2468 case PC:
2469 case CC0:
2470 case SCRATCH:
2471 /* SCRATCH must be shared because they represent distinct values. */
2472 return x;
2473
2474 case CONST:
2475 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2476 a LABEL_REF, it isn't sharable. */
2477 if (GET_CODE (XEXP (x, 0)) == PLUS
2478 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2479 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2480 return x;
2481 break;
2482
2483 case INSN:
2484 case JUMP_INSN:
2485 case CALL_INSN:
2486 case NOTE:
2487 case BARRIER:
2488 /* The chain of insns is not being copied. */
2489 return x;
2490
2491 case MEM:
2492 /* A MEM is allowed to be shared if its address is constant.
2493
2494 We used to allow sharing of MEMs which referenced
2495 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2496 that can lose. instantiate_virtual_regs will not unshare
2497 the MEMs, and combine may change the structure of the address
2498 because it looks safe and profitable in one context, but
2499 in some other context it creates unrecognizable RTL. */
2500 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2501 return x;
2502
2503 break;
2504
2505 default:
2506 break;
2507 }
2508
2509 /* This rtx may not be shared. If it has already been seen,
2510 replace it with a copy of itself. */
2511
2512 if (RTX_FLAG (x, used))
2513 {
2514 rtx copy;
2515
2516 copy = rtx_alloc (code);
2517 memcpy (copy, x,
2518 (sizeof (*copy) - sizeof (copy->fld)
2519 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2520 x = copy;
2521 copied = 1;
2522 }
2523 RTX_FLAG (x, used) = 1;
2524
2525 /* Now scan the subexpressions recursively.
2526 We can store any replaced subexpressions directly into X
2527 since we know X is not shared! Any vectors in X
2528 must be copied if X was copied. */
2529
2530 format_ptr = GET_RTX_FORMAT (code);
2531
2532 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2533 {
2534 switch (*format_ptr++)
2535 {
2536 case 'e':
2537 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2538 break;
2539
2540 case 'E':
2541 if (XVEC (x, i) != NULL)
2542 {
2543 int j;
2544 int len = XVECLEN (x, i);
2545
2546 if (copied && len > 0)
2547 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2548 for (j = 0; j < len; j++)
2549 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2550 }
2551 break;
2552 }
2553 }
2554 return x;
2555 }
2556
2557 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2558 to look for shared sub-parts. */
2559
2560 void
2561 reset_used_flags (x)
2562 rtx x;
2563 {
2564 int i, j;
2565 enum rtx_code code;
2566 const char *format_ptr;
2567
2568 if (x == 0)
2569 return;
2570
2571 code = GET_CODE (x);
2572
2573 /* These types may be freely shared so we needn't do any resetting
2574 for them. */
2575
2576 switch (code)
2577 {
2578 case REG:
2579 case QUEUED:
2580 case CONST_INT:
2581 case CONST_DOUBLE:
2582 case CONST_VECTOR:
2583 case SYMBOL_REF:
2584 case CODE_LABEL:
2585 case PC:
2586 case CC0:
2587 return;
2588
2589 case INSN:
2590 case JUMP_INSN:
2591 case CALL_INSN:
2592 case NOTE:
2593 case LABEL_REF:
2594 case BARRIER:
2595 /* The chain of insns is not being copied. */
2596 return;
2597
2598 default:
2599 break;
2600 }
2601
2602 RTX_FLAG (x, used) = 0;
2603
2604 format_ptr = GET_RTX_FORMAT (code);
2605 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2606 {
2607 switch (*format_ptr++)
2608 {
2609 case 'e':
2610 reset_used_flags (XEXP (x, i));
2611 break;
2612
2613 case 'E':
2614 for (j = 0; j < XVECLEN (x, i); j++)
2615 reset_used_flags (XVECEXP (x, i, j));
2616 break;
2617 }
2618 }
2619 }
2620 \f
2621 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2622 Return X or the rtx for the pseudo reg the value of X was copied into.
2623 OTHER must be valid as a SET_DEST. */
2624
2625 rtx
2626 make_safe_from (x, other)
2627 rtx x, other;
2628 {
2629 while (1)
2630 switch (GET_CODE (other))
2631 {
2632 case SUBREG:
2633 other = SUBREG_REG (other);
2634 break;
2635 case STRICT_LOW_PART:
2636 case SIGN_EXTEND:
2637 case ZERO_EXTEND:
2638 other = XEXP (other, 0);
2639 break;
2640 default:
2641 goto done;
2642 }
2643 done:
2644 if ((GET_CODE (other) == MEM
2645 && ! CONSTANT_P (x)
2646 && GET_CODE (x) != REG
2647 && GET_CODE (x) != SUBREG)
2648 || (GET_CODE (other) == REG
2649 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2650 || reg_mentioned_p (other, x))))
2651 {
2652 rtx temp = gen_reg_rtx (GET_MODE (x));
2653 emit_move_insn (temp, x);
2654 return temp;
2655 }
2656 return x;
2657 }
2658 \f
2659 /* Emission of insns (adding them to the doubly-linked list). */
2660
2661 /* Return the first insn of the current sequence or current function. */
2662
2663 rtx
2664 get_insns ()
2665 {
2666 return first_insn;
2667 }
2668
2669 /* Specify a new insn as the first in the chain. */
2670
2671 void
2672 set_first_insn (insn)
2673 rtx insn;
2674 {
2675 if (PREV_INSN (insn) != 0)
2676 abort ();
2677 first_insn = insn;
2678 }
2679
2680 /* Return the last insn emitted in current sequence or current function. */
2681
2682 rtx
2683 get_last_insn ()
2684 {
2685 return last_insn;
2686 }
2687
2688 /* Specify a new insn as the last in the chain. */
2689
2690 void
2691 set_last_insn (insn)
2692 rtx insn;
2693 {
2694 if (NEXT_INSN (insn) != 0)
2695 abort ();
2696 last_insn = insn;
2697 }
2698
2699 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2700
2701 rtx
2702 get_last_insn_anywhere ()
2703 {
2704 struct sequence_stack *stack;
2705 if (last_insn)
2706 return last_insn;
2707 for (stack = seq_stack; stack; stack = stack->next)
2708 if (stack->last != 0)
2709 return stack->last;
2710 return 0;
2711 }
2712
2713 /* Return a number larger than any instruction's uid in this function. */
2714
2715 int
2716 get_max_uid ()
2717 {
2718 return cur_insn_uid;
2719 }
2720
2721 /* Renumber instructions so that no instruction UIDs are wasted. */
2722
2723 void
2724 renumber_insns (stream)
2725 FILE *stream;
2726 {
2727 rtx insn;
2728
2729 /* If we're not supposed to renumber instructions, don't. */
2730 if (!flag_renumber_insns)
2731 return;
2732
2733 /* If there aren't that many instructions, then it's not really
2734 worth renumbering them. */
2735 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2736 return;
2737
2738 cur_insn_uid = 1;
2739
2740 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2741 {
2742 if (stream)
2743 fprintf (stream, "Renumbering insn %d to %d\n",
2744 INSN_UID (insn), cur_insn_uid);
2745 INSN_UID (insn) = cur_insn_uid++;
2746 }
2747 }
2748 \f
2749 /* Return the next insn. If it is a SEQUENCE, return the first insn
2750 of the sequence. */
2751
2752 rtx
2753 next_insn (insn)
2754 rtx insn;
2755 {
2756 if (insn)
2757 {
2758 insn = NEXT_INSN (insn);
2759 if (insn && GET_CODE (insn) == INSN
2760 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2761 insn = XVECEXP (PATTERN (insn), 0, 0);
2762 }
2763
2764 return insn;
2765 }
2766
2767 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2768 of the sequence. */
2769
2770 rtx
2771 previous_insn (insn)
2772 rtx insn;
2773 {
2774 if (insn)
2775 {
2776 insn = PREV_INSN (insn);
2777 if (insn && GET_CODE (insn) == INSN
2778 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2779 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2780 }
2781
2782 return insn;
2783 }
2784
2785 /* Return the next insn after INSN that is not a NOTE. This routine does not
2786 look inside SEQUENCEs. */
2787
2788 rtx
2789 next_nonnote_insn (insn)
2790 rtx insn;
2791 {
2792 while (insn)
2793 {
2794 insn = NEXT_INSN (insn);
2795 if (insn == 0 || GET_CODE (insn) != NOTE)
2796 break;
2797 }
2798
2799 return insn;
2800 }
2801
2802 /* Return the previous insn before INSN that is not a NOTE. This routine does
2803 not look inside SEQUENCEs. */
2804
2805 rtx
2806 prev_nonnote_insn (insn)
2807 rtx insn;
2808 {
2809 while (insn)
2810 {
2811 insn = PREV_INSN (insn);
2812 if (insn == 0 || GET_CODE (insn) != NOTE)
2813 break;
2814 }
2815
2816 return insn;
2817 }
2818
2819 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2820 or 0, if there is none. This routine does not look inside
2821 SEQUENCEs. */
2822
2823 rtx
2824 next_real_insn (insn)
2825 rtx insn;
2826 {
2827 while (insn)
2828 {
2829 insn = NEXT_INSN (insn);
2830 if (insn == 0 || GET_CODE (insn) == INSN
2831 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2832 break;
2833 }
2834
2835 return insn;
2836 }
2837
2838 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2839 or 0, if there is none. This routine does not look inside
2840 SEQUENCEs. */
2841
2842 rtx
2843 prev_real_insn (insn)
2844 rtx insn;
2845 {
2846 while (insn)
2847 {
2848 insn = PREV_INSN (insn);
2849 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2850 || GET_CODE (insn) == JUMP_INSN)
2851 break;
2852 }
2853
2854 return insn;
2855 }
2856
2857 /* Find the next insn after INSN that really does something. This routine
2858 does not look inside SEQUENCEs. Until reload has completed, this is the
2859 same as next_real_insn. */
2860
2861 int
2862 active_insn_p (insn)
2863 rtx insn;
2864 {
2865 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2866 || (GET_CODE (insn) == INSN
2867 && (! reload_completed
2868 || (GET_CODE (PATTERN (insn)) != USE
2869 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2870 }
2871
2872 rtx
2873 next_active_insn (insn)
2874 rtx insn;
2875 {
2876 while (insn)
2877 {
2878 insn = NEXT_INSN (insn);
2879 if (insn == 0 || active_insn_p (insn))
2880 break;
2881 }
2882
2883 return insn;
2884 }
2885
2886 /* Find the last insn before INSN that really does something. This routine
2887 does not look inside SEQUENCEs. Until reload has completed, this is the
2888 same as prev_real_insn. */
2889
2890 rtx
2891 prev_active_insn (insn)
2892 rtx insn;
2893 {
2894 while (insn)
2895 {
2896 insn = PREV_INSN (insn);
2897 if (insn == 0 || active_insn_p (insn))
2898 break;
2899 }
2900
2901 return insn;
2902 }
2903
2904 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2905
2906 rtx
2907 next_label (insn)
2908 rtx insn;
2909 {
2910 while (insn)
2911 {
2912 insn = NEXT_INSN (insn);
2913 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2914 break;
2915 }
2916
2917 return insn;
2918 }
2919
2920 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2921
2922 rtx
2923 prev_label (insn)
2924 rtx insn;
2925 {
2926 while (insn)
2927 {
2928 insn = PREV_INSN (insn);
2929 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2930 break;
2931 }
2932
2933 return insn;
2934 }
2935 \f
2936 #ifdef HAVE_cc0
2937 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2938 and REG_CC_USER notes so we can find it. */
2939
2940 void
2941 link_cc0_insns (insn)
2942 rtx insn;
2943 {
2944 rtx user = next_nonnote_insn (insn);
2945
2946 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
2947 user = XVECEXP (PATTERN (user), 0, 0);
2948
2949 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
2950 REG_NOTES (user));
2951 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
2952 }
2953
2954 /* Return the next insn that uses CC0 after INSN, which is assumed to
2955 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
2956 applied to the result of this function should yield INSN).
2957
2958 Normally, this is simply the next insn. However, if a REG_CC_USER note
2959 is present, it contains the insn that uses CC0.
2960
2961 Return 0 if we can't find the insn. */
2962
2963 rtx
2964 next_cc0_user (insn)
2965 rtx insn;
2966 {
2967 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
2968
2969 if (note)
2970 return XEXP (note, 0);
2971
2972 insn = next_nonnote_insn (insn);
2973 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
2974 insn = XVECEXP (PATTERN (insn), 0, 0);
2975
2976 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
2977 return insn;
2978
2979 return 0;
2980 }
2981
2982 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
2983 note, it is the previous insn. */
2984
2985 rtx
2986 prev_cc0_setter (insn)
2987 rtx insn;
2988 {
2989 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2990
2991 if (note)
2992 return XEXP (note, 0);
2993
2994 insn = prev_nonnote_insn (insn);
2995 if (! sets_cc0_p (PATTERN (insn)))
2996 abort ();
2997
2998 return insn;
2999 }
3000 #endif
3001
3002 /* Increment the label uses for all labels present in rtx. */
3003
3004 static void
3005 mark_label_nuses (x)
3006 rtx x;
3007 {
3008 enum rtx_code code;
3009 int i, j;
3010 const char *fmt;
3011
3012 code = GET_CODE (x);
3013 if (code == LABEL_REF)
3014 LABEL_NUSES (XEXP (x, 0))++;
3015
3016 fmt = GET_RTX_FORMAT (code);
3017 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3018 {
3019 if (fmt[i] == 'e')
3020 mark_label_nuses (XEXP (x, i));
3021 else if (fmt[i] == 'E')
3022 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3023 mark_label_nuses (XVECEXP (x, i, j));
3024 }
3025 }
3026
3027 \f
3028 /* Try splitting insns that can be split for better scheduling.
3029 PAT is the pattern which might split.
3030 TRIAL is the insn providing PAT.
3031 LAST is non-zero if we should return the last insn of the sequence produced.
3032
3033 If this routine succeeds in splitting, it returns the first or last
3034 replacement insn depending on the value of LAST. Otherwise, it
3035 returns TRIAL. If the insn to be returned can be split, it will be. */
3036
3037 rtx
3038 try_split (pat, trial, last)
3039 rtx pat, trial;
3040 int last;
3041 {
3042 rtx before = PREV_INSN (trial);
3043 rtx after = NEXT_INSN (trial);
3044 int has_barrier = 0;
3045 rtx tem;
3046 rtx note, seq;
3047 int probability;
3048
3049 if (any_condjump_p (trial)
3050 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3051 split_branch_probability = INTVAL (XEXP (note, 0));
3052 probability = split_branch_probability;
3053
3054 seq = split_insns (pat, trial);
3055
3056 split_branch_probability = -1;
3057
3058 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3059 We may need to handle this specially. */
3060 if (after && GET_CODE (after) == BARRIER)
3061 {
3062 has_barrier = 1;
3063 after = NEXT_INSN (after);
3064 }
3065
3066 if (seq)
3067 {
3068 /* SEQ can either be a SEQUENCE or the pattern of a single insn.
3069 The latter case will normally arise only when being done so that
3070 it, in turn, will be split (SFmode on the 29k is an example). */
3071 if (GET_CODE (seq) == SEQUENCE)
3072 {
3073 int i, njumps = 0;
3074
3075 /* Avoid infinite loop if any insn of the result matches
3076 the original pattern. */
3077 for (i = 0; i < XVECLEN (seq, 0); i++)
3078 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN
3079 && rtx_equal_p (PATTERN (XVECEXP (seq, 0, i)), pat))
3080 return trial;
3081
3082 /* Mark labels. */
3083 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3084 if (GET_CODE (XVECEXP (seq, 0, i)) == JUMP_INSN)
3085 {
3086 rtx insn = XVECEXP (seq, 0, i);
3087 mark_jump_label (PATTERN (insn),
3088 XVECEXP (seq, 0, i), 0);
3089 njumps++;
3090 if (probability != -1
3091 && any_condjump_p (insn)
3092 && !find_reg_note (insn, REG_BR_PROB, 0))
3093 {
3094 /* We can preserve the REG_BR_PROB notes only if exactly
3095 one jump is created, otherwise the machine description
3096 is responsible for this step using
3097 split_branch_probability variable. */
3098 if (njumps != 1)
3099 abort ();
3100 REG_NOTES (insn)
3101 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3102 GEN_INT (probability),
3103 REG_NOTES (insn));
3104 }
3105 }
3106
3107 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3108 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3109 if (GET_CODE (trial) == CALL_INSN)
3110 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3111 if (GET_CODE (XVECEXP (seq, 0, i)) == CALL_INSN)
3112 CALL_INSN_FUNCTION_USAGE (XVECEXP (seq, 0, i))
3113 = CALL_INSN_FUNCTION_USAGE (trial);
3114
3115 /* Copy notes, particularly those related to the CFG. */
3116 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3117 {
3118 switch (REG_NOTE_KIND (note))
3119 {
3120 case REG_EH_REGION:
3121 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3122 {
3123 rtx insn = XVECEXP (seq, 0, i);
3124 if (GET_CODE (insn) == CALL_INSN
3125 || (flag_non_call_exceptions
3126 && may_trap_p (PATTERN (insn))))
3127 REG_NOTES (insn)
3128 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3129 XEXP (note, 0),
3130 REG_NOTES (insn));
3131 }
3132 break;
3133
3134 case REG_NORETURN:
3135 case REG_SETJMP:
3136 case REG_ALWAYS_RETURN:
3137 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3138 {
3139 rtx insn = XVECEXP (seq, 0, i);
3140 if (GET_CODE (insn) == CALL_INSN)
3141 REG_NOTES (insn)
3142 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3143 XEXP (note, 0),
3144 REG_NOTES (insn));
3145 }
3146 break;
3147
3148 case REG_NON_LOCAL_GOTO:
3149 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3150 {
3151 rtx insn = XVECEXP (seq, 0, i);
3152 if (GET_CODE (insn) == JUMP_INSN)
3153 REG_NOTES (insn)
3154 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3155 XEXP (note, 0),
3156 REG_NOTES (insn));
3157 }
3158 break;
3159
3160 default:
3161 break;
3162 }
3163 }
3164
3165 /* If there are LABELS inside the split insns increment the
3166 usage count so we don't delete the label. */
3167 if (GET_CODE (trial) == INSN)
3168 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3169 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN)
3170 mark_label_nuses (PATTERN (XVECEXP (seq, 0, i)));
3171
3172 tem = emit_insn_after (seq, trial);
3173
3174 delete_insn (trial);
3175 if (has_barrier)
3176 emit_barrier_after (tem);
3177
3178 /* Recursively call try_split for each new insn created; by the
3179 time control returns here that insn will be fully split, so
3180 set LAST and continue from the insn after the one returned.
3181 We can't use next_active_insn here since AFTER may be a note.
3182 Ignore deleted insns, which can be occur if not optimizing. */
3183 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3184 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3185 tem = try_split (PATTERN (tem), tem, 1);
3186 }
3187 /* Avoid infinite loop if the result matches the original pattern. */
3188 else if (rtx_equal_p (seq, pat))
3189 return trial;
3190 else
3191 {
3192 PATTERN (trial) = seq;
3193 INSN_CODE (trial) = -1;
3194 try_split (seq, trial, last);
3195 }
3196
3197 /* Return either the first or the last insn, depending on which was
3198 requested. */
3199 return last
3200 ? (after ? PREV_INSN (after) : last_insn)
3201 : NEXT_INSN (before);
3202 }
3203
3204 return trial;
3205 }
3206 \f
3207 /* Make and return an INSN rtx, initializing all its slots.
3208 Store PATTERN in the pattern slots. */
3209
3210 rtx
3211 make_insn_raw (pattern)
3212 rtx pattern;
3213 {
3214 rtx insn;
3215
3216 insn = rtx_alloc (INSN);
3217
3218 INSN_UID (insn) = cur_insn_uid++;
3219 PATTERN (insn) = pattern;
3220 INSN_CODE (insn) = -1;
3221 LOG_LINKS (insn) = NULL;
3222 REG_NOTES (insn) = NULL;
3223 INSN_SCOPE (insn) = NULL;
3224 BLOCK_FOR_INSN (insn) = NULL;
3225
3226 #ifdef ENABLE_RTL_CHECKING
3227 if (insn
3228 && INSN_P (insn)
3229 && (returnjump_p (insn)
3230 || (GET_CODE (insn) == SET
3231 && SET_DEST (insn) == pc_rtx)))
3232 {
3233 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3234 debug_rtx (insn);
3235 }
3236 #endif
3237
3238 return insn;
3239 }
3240
3241 /* Like `make_insn' but make a JUMP_INSN instead of an insn. */
3242
3243 static rtx
3244 make_jump_insn_raw (pattern)
3245 rtx pattern;
3246 {
3247 rtx insn;
3248
3249 insn = rtx_alloc (JUMP_INSN);
3250 INSN_UID (insn) = cur_insn_uid++;
3251
3252 PATTERN (insn) = pattern;
3253 INSN_CODE (insn) = -1;
3254 LOG_LINKS (insn) = NULL;
3255 REG_NOTES (insn) = NULL;
3256 JUMP_LABEL (insn) = NULL;
3257 INSN_SCOPE (insn) = NULL;
3258 BLOCK_FOR_INSN (insn) = NULL;
3259
3260 return insn;
3261 }
3262
3263 /* Like `make_insn' but make a CALL_INSN instead of an insn. */
3264
3265 static rtx
3266 make_call_insn_raw (pattern)
3267 rtx pattern;
3268 {
3269 rtx insn;
3270
3271 insn = rtx_alloc (CALL_INSN);
3272 INSN_UID (insn) = cur_insn_uid++;
3273
3274 PATTERN (insn) = pattern;
3275 INSN_CODE (insn) = -1;
3276 LOG_LINKS (insn) = NULL;
3277 REG_NOTES (insn) = NULL;
3278 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3279 INSN_SCOPE (insn) = NULL;
3280 BLOCK_FOR_INSN (insn) = NULL;
3281
3282 return insn;
3283 }
3284 \f
3285 /* Add INSN to the end of the doubly-linked list.
3286 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3287
3288 void
3289 add_insn (insn)
3290 rtx insn;
3291 {
3292 PREV_INSN (insn) = last_insn;
3293 NEXT_INSN (insn) = 0;
3294
3295 if (NULL != last_insn)
3296 NEXT_INSN (last_insn) = insn;
3297
3298 if (NULL == first_insn)
3299 first_insn = insn;
3300
3301 last_insn = insn;
3302 }
3303
3304 /* Add INSN into the doubly-linked list after insn AFTER. This and
3305 the next should be the only functions called to insert an insn once
3306 delay slots have been filled since only they know how to update a
3307 SEQUENCE. */
3308
3309 void
3310 add_insn_after (insn, after)
3311 rtx insn, after;
3312 {
3313 rtx next = NEXT_INSN (after);
3314 basic_block bb;
3315
3316 if (optimize && INSN_DELETED_P (after))
3317 abort ();
3318
3319 NEXT_INSN (insn) = next;
3320 PREV_INSN (insn) = after;
3321
3322 if (next)
3323 {
3324 PREV_INSN (next) = insn;
3325 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3326 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3327 }
3328 else if (last_insn == after)
3329 last_insn = insn;
3330 else
3331 {
3332 struct sequence_stack *stack = seq_stack;
3333 /* Scan all pending sequences too. */
3334 for (; stack; stack = stack->next)
3335 if (after == stack->last)
3336 {
3337 stack->last = insn;
3338 break;
3339 }
3340
3341 if (stack == 0)
3342 abort ();
3343 }
3344
3345 if (GET_CODE (after) != BARRIER
3346 && GET_CODE (insn) != BARRIER
3347 && (bb = BLOCK_FOR_INSN (after)))
3348 {
3349 set_block_for_insn (insn, bb);
3350 if (INSN_P (insn))
3351 bb->flags |= BB_DIRTY;
3352 /* Should not happen as first in the BB is always
3353 either NOTE or LABEL. */
3354 if (bb->end == after
3355 /* Avoid clobbering of structure when creating new BB. */
3356 && GET_CODE (insn) != BARRIER
3357 && (GET_CODE (insn) != NOTE
3358 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3359 bb->end = insn;
3360 }
3361
3362 NEXT_INSN (after) = insn;
3363 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3364 {
3365 rtx sequence = PATTERN (after);
3366 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3367 }
3368 }
3369
3370 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3371 the previous should be the only functions called to insert an insn once
3372 delay slots have been filled since only they know how to update a
3373 SEQUENCE. */
3374
3375 void
3376 add_insn_before (insn, before)
3377 rtx insn, before;
3378 {
3379 rtx prev = PREV_INSN (before);
3380 basic_block bb;
3381
3382 if (optimize && INSN_DELETED_P (before))
3383 abort ();
3384
3385 PREV_INSN (insn) = prev;
3386 NEXT_INSN (insn) = before;
3387
3388 if (prev)
3389 {
3390 NEXT_INSN (prev) = insn;
3391 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3392 {
3393 rtx sequence = PATTERN (prev);
3394 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3395 }
3396 }
3397 else if (first_insn == before)
3398 first_insn = insn;
3399 else
3400 {
3401 struct sequence_stack *stack = seq_stack;
3402 /* Scan all pending sequences too. */
3403 for (; stack; stack = stack->next)
3404 if (before == stack->first)
3405 {
3406 stack->first = insn;
3407 break;
3408 }
3409
3410 if (stack == 0)
3411 abort ();
3412 }
3413
3414 if (GET_CODE (before) != BARRIER
3415 && GET_CODE (insn) != BARRIER
3416 && (bb = BLOCK_FOR_INSN (before)))
3417 {
3418 set_block_for_insn (insn, bb);
3419 if (INSN_P (insn))
3420 bb->flags |= BB_DIRTY;
3421 /* Should not happen as first in the BB is always
3422 either NOTE or LABEl. */
3423 if (bb->head == insn
3424 /* Avoid clobbering of structure when creating new BB. */
3425 && GET_CODE (insn) != BARRIER
3426 && (GET_CODE (insn) != NOTE
3427 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3428 abort ();
3429 }
3430
3431 PREV_INSN (before) = insn;
3432 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3433 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3434 }
3435
3436 /* Remove an insn from its doubly-linked list. This function knows how
3437 to handle sequences. */
3438 void
3439 remove_insn (insn)
3440 rtx insn;
3441 {
3442 rtx next = NEXT_INSN (insn);
3443 rtx prev = PREV_INSN (insn);
3444 basic_block bb;
3445
3446 if (prev)
3447 {
3448 NEXT_INSN (prev) = next;
3449 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3450 {
3451 rtx sequence = PATTERN (prev);
3452 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3453 }
3454 }
3455 else if (first_insn == insn)
3456 first_insn = next;
3457 else
3458 {
3459 struct sequence_stack *stack = seq_stack;
3460 /* Scan all pending sequences too. */
3461 for (; stack; stack = stack->next)
3462 if (insn == stack->first)
3463 {
3464 stack->first = next;
3465 break;
3466 }
3467
3468 if (stack == 0)
3469 abort ();
3470 }
3471
3472 if (next)
3473 {
3474 PREV_INSN (next) = prev;
3475 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3476 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3477 }
3478 else if (last_insn == insn)
3479 last_insn = prev;
3480 else
3481 {
3482 struct sequence_stack *stack = seq_stack;
3483 /* Scan all pending sequences too. */
3484 for (; stack; stack = stack->next)
3485 if (insn == stack->last)
3486 {
3487 stack->last = prev;
3488 break;
3489 }
3490
3491 if (stack == 0)
3492 abort ();
3493 }
3494 if (GET_CODE (insn) != BARRIER
3495 && (bb = BLOCK_FOR_INSN (insn)))
3496 {
3497 if (INSN_P (insn))
3498 bb->flags |= BB_DIRTY;
3499 if (bb->head == insn)
3500 {
3501 /* Never ever delete the basic block note without deleting whole
3502 basic block. */
3503 if (GET_CODE (insn) == NOTE)
3504 abort ();
3505 bb->head = next;
3506 }
3507 if (bb->end == insn)
3508 bb->end = prev;
3509 }
3510 }
3511
3512 /* Delete all insns made since FROM.
3513 FROM becomes the new last instruction. */
3514
3515 void
3516 delete_insns_since (from)
3517 rtx from;
3518 {
3519 if (from == 0)
3520 first_insn = 0;
3521 else
3522 NEXT_INSN (from) = 0;
3523 last_insn = from;
3524 }
3525
3526 /* This function is deprecated, please use sequences instead.
3527
3528 Move a consecutive bunch of insns to a different place in the chain.
3529 The insns to be moved are those between FROM and TO.
3530 They are moved to a new position after the insn AFTER.
3531 AFTER must not be FROM or TO or any insn in between.
3532
3533 This function does not know about SEQUENCEs and hence should not be
3534 called after delay-slot filling has been done. */
3535
3536 void
3537 reorder_insns_nobb (from, to, after)
3538 rtx from, to, after;
3539 {
3540 /* Splice this bunch out of where it is now. */
3541 if (PREV_INSN (from))
3542 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3543 if (NEXT_INSN (to))
3544 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3545 if (last_insn == to)
3546 last_insn = PREV_INSN (from);
3547 if (first_insn == from)
3548 first_insn = NEXT_INSN (to);
3549
3550 /* Make the new neighbors point to it and it to them. */
3551 if (NEXT_INSN (after))
3552 PREV_INSN (NEXT_INSN (after)) = to;
3553
3554 NEXT_INSN (to) = NEXT_INSN (after);
3555 PREV_INSN (from) = after;
3556 NEXT_INSN (after) = from;
3557 if (after == last_insn)
3558 last_insn = to;
3559 }
3560
3561 /* Same as function above, but take care to update BB boundaries. */
3562 void
3563 reorder_insns (from, to, after)
3564 rtx from, to, after;
3565 {
3566 rtx prev = PREV_INSN (from);
3567 basic_block bb, bb2;
3568
3569 reorder_insns_nobb (from, to, after);
3570
3571 if (GET_CODE (after) != BARRIER
3572 && (bb = BLOCK_FOR_INSN (after)))
3573 {
3574 rtx x;
3575 bb->flags |= BB_DIRTY;
3576
3577 if (GET_CODE (from) != BARRIER
3578 && (bb2 = BLOCK_FOR_INSN (from)))
3579 {
3580 if (bb2->end == to)
3581 bb2->end = prev;
3582 bb2->flags |= BB_DIRTY;
3583 }
3584
3585 if (bb->end == after)
3586 bb->end = to;
3587
3588 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3589 set_block_for_insn (x, bb);
3590 }
3591 }
3592
3593 /* Return the line note insn preceding INSN. */
3594
3595 static rtx
3596 find_line_note (insn)
3597 rtx insn;
3598 {
3599 if (no_line_numbers)
3600 return 0;
3601
3602 for (; insn; insn = PREV_INSN (insn))
3603 if (GET_CODE (insn) == NOTE
3604 && NOTE_LINE_NUMBER (insn) >= 0)
3605 break;
3606
3607 return insn;
3608 }
3609
3610 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3611 of the moved insns when debugging. This may insert a note between AFTER
3612 and FROM, and another one after TO. */
3613
3614 void
3615 reorder_insns_with_line_notes (from, to, after)
3616 rtx from, to, after;
3617 {
3618 rtx from_line = find_line_note (from);
3619 rtx after_line = find_line_note (after);
3620
3621 reorder_insns (from, to, after);
3622
3623 if (from_line == after_line)
3624 return;
3625
3626 if (from_line)
3627 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3628 NOTE_LINE_NUMBER (from_line),
3629 after);
3630 if (after_line)
3631 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3632 NOTE_LINE_NUMBER (after_line),
3633 to);
3634 }
3635
3636 /* Remove unnecessary notes from the instruction stream. */
3637
3638 void
3639 remove_unnecessary_notes ()
3640 {
3641 rtx block_stack = NULL_RTX;
3642 rtx eh_stack = NULL_RTX;
3643 rtx insn;
3644 rtx next;
3645 rtx tmp;
3646
3647 /* We must not remove the first instruction in the function because
3648 the compiler depends on the first instruction being a note. */
3649 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3650 {
3651 /* Remember what's next. */
3652 next = NEXT_INSN (insn);
3653
3654 /* We're only interested in notes. */
3655 if (GET_CODE (insn) != NOTE)
3656 continue;
3657
3658 switch (NOTE_LINE_NUMBER (insn))
3659 {
3660 case NOTE_INSN_DELETED:
3661 case NOTE_INSN_LOOP_END_TOP_COND:
3662 remove_insn (insn);
3663 break;
3664
3665 case NOTE_INSN_EH_REGION_BEG:
3666 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3667 break;
3668
3669 case NOTE_INSN_EH_REGION_END:
3670 /* Too many end notes. */
3671 if (eh_stack == NULL_RTX)
3672 abort ();
3673 /* Mismatched nesting. */
3674 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3675 abort ();
3676 tmp = eh_stack;
3677 eh_stack = XEXP (eh_stack, 1);
3678 free_INSN_LIST_node (tmp);
3679 break;
3680
3681 case NOTE_INSN_BLOCK_BEG:
3682 /* By now, all notes indicating lexical blocks should have
3683 NOTE_BLOCK filled in. */
3684 if (NOTE_BLOCK (insn) == NULL_TREE)
3685 abort ();
3686 block_stack = alloc_INSN_LIST (insn, block_stack);
3687 break;
3688
3689 case NOTE_INSN_BLOCK_END:
3690 /* Too many end notes. */
3691 if (block_stack == NULL_RTX)
3692 abort ();
3693 /* Mismatched nesting. */
3694 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3695 abort ();
3696 tmp = block_stack;
3697 block_stack = XEXP (block_stack, 1);
3698 free_INSN_LIST_node (tmp);
3699
3700 /* Scan back to see if there are any non-note instructions
3701 between INSN and the beginning of this block. If not,
3702 then there is no PC range in the generated code that will
3703 actually be in this block, so there's no point in
3704 remembering the existence of the block. */
3705 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
3706 {
3707 /* This block contains a real instruction. Note that we
3708 don't include labels; if the only thing in the block
3709 is a label, then there are still no PC values that
3710 lie within the block. */
3711 if (INSN_P (tmp))
3712 break;
3713
3714 /* We're only interested in NOTEs. */
3715 if (GET_CODE (tmp) != NOTE)
3716 continue;
3717
3718 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3719 {
3720 /* We just verified that this BLOCK matches us with
3721 the block_stack check above. Never delete the
3722 BLOCK for the outermost scope of the function; we
3723 can refer to names from that scope even if the
3724 block notes are messed up. */
3725 if (! is_body_block (NOTE_BLOCK (insn))
3726 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3727 {
3728 remove_insn (tmp);
3729 remove_insn (insn);
3730 }
3731 break;
3732 }
3733 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3734 /* There's a nested block. We need to leave the
3735 current block in place since otherwise the debugger
3736 wouldn't be able to show symbols from our block in
3737 the nested block. */
3738 break;
3739 }
3740 }
3741 }
3742
3743 /* Too many begin notes. */
3744 if (block_stack || eh_stack)
3745 abort ();
3746 }
3747
3748 \f
3749 /* Emit an insn of given code and pattern
3750 at a specified place within the doubly-linked list. */
3751
3752 /* Make an instruction with body PATTERN
3753 and output it before the instruction BEFORE. */
3754
3755 rtx
3756 emit_insn_before (pattern, before)
3757 rtx pattern, before;
3758 {
3759 rtx insn = before;
3760
3761 if (GET_CODE (pattern) == SEQUENCE)
3762 {
3763 int i;
3764
3765 for (i = 0; i < XVECLEN (pattern, 0); i++)
3766 {
3767 insn = XVECEXP (pattern, 0, i);
3768 add_insn_before (insn, before);
3769 }
3770 }
3771 else
3772 {
3773 insn = make_insn_raw (pattern);
3774 add_insn_before (insn, before);
3775 }
3776
3777 return insn;
3778 }
3779
3780 /* Make an instruction with body PATTERN and code JUMP_INSN
3781 and output it before the instruction BEFORE. */
3782
3783 rtx
3784 emit_jump_insn_before (pattern, before)
3785 rtx pattern, before;
3786 {
3787 rtx insn;
3788
3789 if (GET_CODE (pattern) == SEQUENCE)
3790 insn = emit_insn_before (pattern, before);
3791 else
3792 {
3793 insn = make_jump_insn_raw (pattern);
3794 add_insn_before (insn, before);
3795 }
3796
3797 return insn;
3798 }
3799
3800 /* Make an instruction with body PATTERN and code CALL_INSN
3801 and output it before the instruction BEFORE. */
3802
3803 rtx
3804 emit_call_insn_before (pattern, before)
3805 rtx pattern, before;
3806 {
3807 rtx insn;
3808
3809 if (GET_CODE (pattern) == SEQUENCE)
3810 insn = emit_insn_before (pattern, before);
3811 else
3812 {
3813 insn = make_call_insn_raw (pattern);
3814 add_insn_before (insn, before);
3815 PUT_CODE (insn, CALL_INSN);
3816 }
3817
3818 return insn;
3819 }
3820
3821 /* Make an instruction with body PATTERN and code CALL_INSN
3822 and output it before the instruction BEFORE. */
3823
3824 rtx
3825 emit_call_insn_after (pattern, before)
3826 rtx pattern, before;
3827 {
3828 rtx insn;
3829
3830 if (GET_CODE (pattern) == SEQUENCE)
3831 insn = emit_insn_after (pattern, before);
3832 else
3833 {
3834 insn = make_call_insn_raw (pattern);
3835 add_insn_after (insn, before);
3836 PUT_CODE (insn, CALL_INSN);
3837 }
3838
3839 return insn;
3840 }
3841
3842 /* Make an insn of code BARRIER
3843 and output it before the insn BEFORE. */
3844
3845 rtx
3846 emit_barrier_before (before)
3847 rtx before;
3848 {
3849 rtx insn = rtx_alloc (BARRIER);
3850
3851 INSN_UID (insn) = cur_insn_uid++;
3852
3853 add_insn_before (insn, before);
3854 return insn;
3855 }
3856
3857 /* Emit the label LABEL before the insn BEFORE. */
3858
3859 rtx
3860 emit_label_before (label, before)
3861 rtx label, before;
3862 {
3863 /* This can be called twice for the same label as a result of the
3864 confusion that follows a syntax error! So make it harmless. */
3865 if (INSN_UID (label) == 0)
3866 {
3867 INSN_UID (label) = cur_insn_uid++;
3868 add_insn_before (label, before);
3869 }
3870
3871 return label;
3872 }
3873
3874 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3875
3876 rtx
3877 emit_note_before (subtype, before)
3878 int subtype;
3879 rtx before;
3880 {
3881 rtx note = rtx_alloc (NOTE);
3882 INSN_UID (note) = cur_insn_uid++;
3883 NOTE_SOURCE_FILE (note) = 0;
3884 NOTE_LINE_NUMBER (note) = subtype;
3885 BLOCK_FOR_INSN (note) = NULL;
3886
3887 add_insn_before (note, before);
3888 return note;
3889 }
3890 \f
3891 /* Make an insn of code INSN with body PATTERN
3892 and output it after the insn AFTER. */
3893
3894 rtx
3895 emit_insn_after (pattern, after)
3896 rtx pattern, after;
3897 {
3898 rtx insn = after;
3899
3900 if (GET_CODE (pattern) == SEQUENCE)
3901 {
3902 int i;
3903
3904 for (i = 0; i < XVECLEN (pattern, 0); i++)
3905 {
3906 insn = XVECEXP (pattern, 0, i);
3907 add_insn_after (insn, after);
3908 after = insn;
3909 }
3910 }
3911 else
3912 {
3913 insn = make_insn_raw (pattern);
3914 add_insn_after (insn, after);
3915 }
3916
3917 return insn;
3918 }
3919
3920 /* Similar to emit_insn_after, except that line notes are to be inserted so
3921 as to act as if this insn were at FROM. */
3922
3923 void
3924 emit_insn_after_with_line_notes (pattern, after, from)
3925 rtx pattern, after, from;
3926 {
3927 rtx from_line = find_line_note (from);
3928 rtx after_line = find_line_note (after);
3929 rtx insn = emit_insn_after (pattern, after);
3930
3931 if (from_line)
3932 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3933 NOTE_LINE_NUMBER (from_line),
3934 after);
3935
3936 if (after_line)
3937 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3938 NOTE_LINE_NUMBER (after_line),
3939 insn);
3940 }
3941
3942 /* Make an insn of code JUMP_INSN with body PATTERN
3943 and output it after the insn AFTER. */
3944
3945 rtx
3946 emit_jump_insn_after (pattern, after)
3947 rtx pattern, after;
3948 {
3949 rtx insn;
3950
3951 if (GET_CODE (pattern) == SEQUENCE)
3952 insn = emit_insn_after (pattern, after);
3953 else
3954 {
3955 insn = make_jump_insn_raw (pattern);
3956 add_insn_after (insn, after);
3957 }
3958
3959 return insn;
3960 }
3961
3962 /* Make an insn of code BARRIER
3963 and output it after the insn AFTER. */
3964
3965 rtx
3966 emit_barrier_after (after)
3967 rtx after;
3968 {
3969 rtx insn = rtx_alloc (BARRIER);
3970
3971 INSN_UID (insn) = cur_insn_uid++;
3972
3973 add_insn_after (insn, after);
3974 return insn;
3975 }
3976
3977 /* Emit the label LABEL after the insn AFTER. */
3978
3979 rtx
3980 emit_label_after (label, after)
3981 rtx label, after;
3982 {
3983 /* This can be called twice for the same label
3984 as a result of the confusion that follows a syntax error!
3985 So make it harmless. */
3986 if (INSN_UID (label) == 0)
3987 {
3988 INSN_UID (label) = cur_insn_uid++;
3989 add_insn_after (label, after);
3990 }
3991
3992 return label;
3993 }
3994
3995 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
3996
3997 rtx
3998 emit_note_after (subtype, after)
3999 int subtype;
4000 rtx after;
4001 {
4002 rtx note = rtx_alloc (NOTE);
4003 INSN_UID (note) = cur_insn_uid++;
4004 NOTE_SOURCE_FILE (note) = 0;
4005 NOTE_LINE_NUMBER (note) = subtype;
4006 BLOCK_FOR_INSN (note) = NULL;
4007 add_insn_after (note, after);
4008 return note;
4009 }
4010
4011 /* Emit a line note for FILE and LINE after the insn AFTER. */
4012
4013 rtx
4014 emit_line_note_after (file, line, after)
4015 const char *file;
4016 int line;
4017 rtx after;
4018 {
4019 rtx note;
4020
4021 if (no_line_numbers && line > 0)
4022 {
4023 cur_insn_uid++;
4024 return 0;
4025 }
4026
4027 note = rtx_alloc (NOTE);
4028 INSN_UID (note) = cur_insn_uid++;
4029 NOTE_SOURCE_FILE (note) = file;
4030 NOTE_LINE_NUMBER (note) = line;
4031 BLOCK_FOR_INSN (note) = NULL;
4032 add_insn_after (note, after);
4033 return note;
4034 }
4035 \f
4036 /* Make an insn of code INSN with pattern PATTERN
4037 and add it to the end of the doubly-linked list.
4038 If PATTERN is a SEQUENCE, take the elements of it
4039 and emit an insn for each element.
4040
4041 Returns the last insn emitted. */
4042
4043 rtx
4044 emit_insn (pattern)
4045 rtx pattern;
4046 {
4047 rtx insn = last_insn;
4048
4049 if (GET_CODE (pattern) == SEQUENCE)
4050 {
4051 int i;
4052
4053 for (i = 0; i < XVECLEN (pattern, 0); i++)
4054 {
4055 insn = XVECEXP (pattern, 0, i);
4056 add_insn (insn);
4057 }
4058 }
4059 else
4060 {
4061 insn = make_insn_raw (pattern);
4062 add_insn (insn);
4063 }
4064
4065 return insn;
4066 }
4067
4068 /* Emit the insns in a chain starting with INSN.
4069 Return the last insn emitted. */
4070
4071 rtx
4072 emit_insns (insn)
4073 rtx insn;
4074 {
4075 rtx last = 0;
4076
4077 while (insn)
4078 {
4079 rtx next = NEXT_INSN (insn);
4080 add_insn (insn);
4081 last = insn;
4082 insn = next;
4083 }
4084
4085 return last;
4086 }
4087
4088 /* Emit the insns in a chain starting with INSN and place them in front of
4089 the insn BEFORE. Return the last insn emitted. */
4090
4091 rtx
4092 emit_insns_before (insn, before)
4093 rtx insn;
4094 rtx before;
4095 {
4096 rtx last = 0;
4097
4098 while (insn)
4099 {
4100 rtx next = NEXT_INSN (insn);
4101 add_insn_before (insn, before);
4102 last = insn;
4103 insn = next;
4104 }
4105
4106 return last;
4107 }
4108
4109 /* Emit the insns in a chain starting with FIRST and place them in back of
4110 the insn AFTER. Return the last insn emitted. */
4111
4112 rtx
4113 emit_insns_after (first, after)
4114 rtx first;
4115 rtx after;
4116 {
4117 rtx last;
4118 rtx after_after;
4119 basic_block bb;
4120
4121 if (!after)
4122 abort ();
4123
4124 if (!first)
4125 return after;
4126
4127 if (GET_CODE (after) != BARRIER
4128 && (bb = BLOCK_FOR_INSN (after)))
4129 {
4130 bb->flags |= BB_DIRTY;
4131 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4132 if (GET_CODE (last) != BARRIER)
4133 set_block_for_insn (last, bb);
4134 if (GET_CODE (last) != BARRIER)
4135 set_block_for_insn (last, bb);
4136 if (bb->end == after)
4137 bb->end = last;
4138 }
4139 else
4140 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4141 continue;
4142
4143 after_after = NEXT_INSN (after);
4144
4145 NEXT_INSN (after) = first;
4146 PREV_INSN (first) = after;
4147 NEXT_INSN (last) = after_after;
4148 if (after_after)
4149 PREV_INSN (after_after) = last;
4150
4151 if (after == last_insn)
4152 last_insn = last;
4153 return last;
4154 }
4155
4156 /* Make an insn of code JUMP_INSN with pattern PATTERN
4157 and add it to the end of the doubly-linked list. */
4158
4159 rtx
4160 emit_jump_insn (pattern)
4161 rtx pattern;
4162 {
4163 if (GET_CODE (pattern) == SEQUENCE)
4164 return emit_insn (pattern);
4165 else
4166 {
4167 rtx insn = make_jump_insn_raw (pattern);
4168 add_insn (insn);
4169 return insn;
4170 }
4171 }
4172
4173 /* Make an insn of code CALL_INSN with pattern PATTERN
4174 and add it to the end of the doubly-linked list. */
4175
4176 rtx
4177 emit_call_insn (pattern)
4178 rtx pattern;
4179 {
4180 if (GET_CODE (pattern) == SEQUENCE)
4181 return emit_insn (pattern);
4182 else
4183 {
4184 rtx insn = make_call_insn_raw (pattern);
4185 add_insn (insn);
4186 PUT_CODE (insn, CALL_INSN);
4187 return insn;
4188 }
4189 }
4190
4191 /* Add the label LABEL to the end of the doubly-linked list. */
4192
4193 rtx
4194 emit_label (label)
4195 rtx label;
4196 {
4197 /* This can be called twice for the same label
4198 as a result of the confusion that follows a syntax error!
4199 So make it harmless. */
4200 if (INSN_UID (label) == 0)
4201 {
4202 INSN_UID (label) = cur_insn_uid++;
4203 add_insn (label);
4204 }
4205 return label;
4206 }
4207
4208 /* Make an insn of code BARRIER
4209 and add it to the end of the doubly-linked list. */
4210
4211 rtx
4212 emit_barrier ()
4213 {
4214 rtx barrier = rtx_alloc (BARRIER);
4215 INSN_UID (barrier) = cur_insn_uid++;
4216 add_insn (barrier);
4217 return barrier;
4218 }
4219
4220 /* Make an insn of code NOTE
4221 with data-fields specified by FILE and LINE
4222 and add it to the end of the doubly-linked list,
4223 but only if line-numbers are desired for debugging info. */
4224
4225 rtx
4226 emit_line_note (file, line)
4227 const char *file;
4228 int line;
4229 {
4230 set_file_and_line_for_stmt (file, line);
4231
4232 #if 0
4233 if (no_line_numbers)
4234 return 0;
4235 #endif
4236
4237 return emit_note (file, line);
4238 }
4239
4240 /* Make an insn of code NOTE
4241 with data-fields specified by FILE and LINE
4242 and add it to the end of the doubly-linked list.
4243 If it is a line-number NOTE, omit it if it matches the previous one. */
4244
4245 rtx
4246 emit_note (file, line)
4247 const char *file;
4248 int line;
4249 {
4250 rtx note;
4251
4252 if (line > 0)
4253 {
4254 if (file && last_filename && !strcmp (file, last_filename)
4255 && line == last_linenum)
4256 return 0;
4257 last_filename = file;
4258 last_linenum = line;
4259 }
4260
4261 if (no_line_numbers && line > 0)
4262 {
4263 cur_insn_uid++;
4264 return 0;
4265 }
4266
4267 note = rtx_alloc (NOTE);
4268 INSN_UID (note) = cur_insn_uid++;
4269 NOTE_SOURCE_FILE (note) = file;
4270 NOTE_LINE_NUMBER (note) = line;
4271 BLOCK_FOR_INSN (note) = NULL;
4272 add_insn (note);
4273 return note;
4274 }
4275
4276 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
4277
4278 rtx
4279 emit_line_note_force (file, line)
4280 const char *file;
4281 int line;
4282 {
4283 last_linenum = -1;
4284 return emit_line_note (file, line);
4285 }
4286
4287 /* Cause next statement to emit a line note even if the line number
4288 has not changed. This is used at the beginning of a function. */
4289
4290 void
4291 force_next_line_note ()
4292 {
4293 last_linenum = -1;
4294 }
4295
4296 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4297 note of this type already exists, remove it first. */
4298
4299 rtx
4300 set_unique_reg_note (insn, kind, datum)
4301 rtx insn;
4302 enum reg_note kind;
4303 rtx datum;
4304 {
4305 rtx note = find_reg_note (insn, kind, NULL_RTX);
4306
4307 switch (kind)
4308 {
4309 case REG_EQUAL:
4310 case REG_EQUIV:
4311 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4312 has multiple sets (some callers assume single_set
4313 means the insn only has one set, when in fact it
4314 means the insn only has one * useful * set). */
4315 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4316 {
4317 if (note)
4318 abort ();
4319 return NULL_RTX;
4320 }
4321
4322 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4323 It serves no useful purpose and breaks eliminate_regs. */
4324 if (GET_CODE (datum) == ASM_OPERANDS)
4325 return NULL_RTX;
4326 break;
4327
4328 default:
4329 break;
4330 }
4331
4332 if (note)
4333 {
4334 XEXP (note, 0) = datum;
4335 return note;
4336 }
4337
4338 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4339 return REG_NOTES (insn);
4340 }
4341 \f
4342 /* Return an indication of which type of insn should have X as a body.
4343 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4344
4345 enum rtx_code
4346 classify_insn (x)
4347 rtx x;
4348 {
4349 if (GET_CODE (x) == CODE_LABEL)
4350 return CODE_LABEL;
4351 if (GET_CODE (x) == CALL)
4352 return CALL_INSN;
4353 if (GET_CODE (x) == RETURN)
4354 return JUMP_INSN;
4355 if (GET_CODE (x) == SET)
4356 {
4357 if (SET_DEST (x) == pc_rtx)
4358 return JUMP_INSN;
4359 else if (GET_CODE (SET_SRC (x)) == CALL)
4360 return CALL_INSN;
4361 else
4362 return INSN;
4363 }
4364 if (GET_CODE (x) == PARALLEL)
4365 {
4366 int j;
4367 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4368 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4369 return CALL_INSN;
4370 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4371 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4372 return JUMP_INSN;
4373 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4374 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4375 return CALL_INSN;
4376 }
4377 return INSN;
4378 }
4379
4380 /* Emit the rtl pattern X as an appropriate kind of insn.
4381 If X is a label, it is simply added into the insn chain. */
4382
4383 rtx
4384 emit (x)
4385 rtx x;
4386 {
4387 enum rtx_code code = classify_insn (x);
4388
4389 if (code == CODE_LABEL)
4390 return emit_label (x);
4391 else if (code == INSN)
4392 return emit_insn (x);
4393 else if (code == JUMP_INSN)
4394 {
4395 rtx insn = emit_jump_insn (x);
4396 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4397 return emit_barrier ();
4398 return insn;
4399 }
4400 else if (code == CALL_INSN)
4401 return emit_call_insn (x);
4402 else
4403 abort ();
4404 }
4405 \f
4406 /* Space for free sequence stack entries. */
4407 static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
4408
4409 /* Begin emitting insns to a sequence which can be packaged in an
4410 RTL_EXPR. If this sequence will contain something that might cause
4411 the compiler to pop arguments to function calls (because those
4412 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4413 details), use do_pending_stack_adjust before calling this function.
4414 That will ensure that the deferred pops are not accidentally
4415 emitted in the middle of this sequence. */
4416
4417 void
4418 start_sequence ()
4419 {
4420 struct sequence_stack *tem;
4421
4422 if (free_sequence_stack != NULL)
4423 {
4424 tem = free_sequence_stack;
4425 free_sequence_stack = tem->next;
4426 }
4427 else
4428 tem = (struct sequence_stack *) ggc_alloc (sizeof (struct sequence_stack));
4429
4430 tem->next = seq_stack;
4431 tem->first = first_insn;
4432 tem->last = last_insn;
4433 tem->sequence_rtl_expr = seq_rtl_expr;
4434
4435 seq_stack = tem;
4436
4437 first_insn = 0;
4438 last_insn = 0;
4439 }
4440
4441 /* Similarly, but indicate that this sequence will be placed in T, an
4442 RTL_EXPR. See the documentation for start_sequence for more
4443 information about how to use this function. */
4444
4445 void
4446 start_sequence_for_rtl_expr (t)
4447 tree t;
4448 {
4449 start_sequence ();
4450
4451 seq_rtl_expr = t;
4452 }
4453
4454 /* Set up the insn chain starting with FIRST as the current sequence,
4455 saving the previously current one. See the documentation for
4456 start_sequence for more information about how to use this function. */
4457
4458 void
4459 push_to_sequence (first)
4460 rtx first;
4461 {
4462 rtx last;
4463
4464 start_sequence ();
4465
4466 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4467
4468 first_insn = first;
4469 last_insn = last;
4470 }
4471
4472 /* Set up the insn chain from a chain stort in FIRST to LAST. */
4473
4474 void
4475 push_to_full_sequence (first, last)
4476 rtx first, last;
4477 {
4478 start_sequence ();
4479 first_insn = first;
4480 last_insn = last;
4481 /* We really should have the end of the insn chain here. */
4482 if (last && NEXT_INSN (last))
4483 abort ();
4484 }
4485
4486 /* Set up the outer-level insn chain
4487 as the current sequence, saving the previously current one. */
4488
4489 void
4490 push_topmost_sequence ()
4491 {
4492 struct sequence_stack *stack, *top = NULL;
4493
4494 start_sequence ();
4495
4496 for (stack = seq_stack; stack; stack = stack->next)
4497 top = stack;
4498
4499 first_insn = top->first;
4500 last_insn = top->last;
4501 seq_rtl_expr = top->sequence_rtl_expr;
4502 }
4503
4504 /* After emitting to the outer-level insn chain, update the outer-level
4505 insn chain, and restore the previous saved state. */
4506
4507 void
4508 pop_topmost_sequence ()
4509 {
4510 struct sequence_stack *stack, *top = NULL;
4511
4512 for (stack = seq_stack; stack; stack = stack->next)
4513 top = stack;
4514
4515 top->first = first_insn;
4516 top->last = last_insn;
4517 /* ??? Why don't we save seq_rtl_expr here? */
4518
4519 end_sequence ();
4520 }
4521
4522 /* After emitting to a sequence, restore previous saved state.
4523
4524 To get the contents of the sequence just made, you must call
4525 `gen_sequence' *before* calling here.
4526
4527 If the compiler might have deferred popping arguments while
4528 generating this sequence, and this sequence will not be immediately
4529 inserted into the instruction stream, use do_pending_stack_adjust
4530 before calling gen_sequence. That will ensure that the deferred
4531 pops are inserted into this sequence, and not into some random
4532 location in the instruction stream. See INHIBIT_DEFER_POP for more
4533 information about deferred popping of arguments. */
4534
4535 void
4536 end_sequence ()
4537 {
4538 struct sequence_stack *tem = seq_stack;
4539
4540 first_insn = tem->first;
4541 last_insn = tem->last;
4542 seq_rtl_expr = tem->sequence_rtl_expr;
4543 seq_stack = tem->next;
4544
4545 memset (tem, 0, sizeof (*tem));
4546 tem->next = free_sequence_stack;
4547 free_sequence_stack = tem;
4548 }
4549
4550 /* This works like end_sequence, but records the old sequence in FIRST
4551 and LAST. */
4552
4553 void
4554 end_full_sequence (first, last)
4555 rtx *first, *last;
4556 {
4557 *first = first_insn;
4558 *last = last_insn;
4559 end_sequence ();
4560 }
4561
4562 /* Return 1 if currently emitting into a sequence. */
4563
4564 int
4565 in_sequence_p ()
4566 {
4567 return seq_stack != 0;
4568 }
4569
4570 /* Generate a SEQUENCE rtx containing the insns already emitted
4571 to the current sequence.
4572
4573 This is how the gen_... function from a DEFINE_EXPAND
4574 constructs the SEQUENCE that it returns. */
4575
4576 rtx
4577 gen_sequence ()
4578 {
4579 rtx result;
4580 rtx tem;
4581 int i;
4582 int len;
4583
4584 /* Count the insns in the chain. */
4585 len = 0;
4586 for (tem = first_insn; tem; tem = NEXT_INSN (tem))
4587 len++;
4588
4589 /* If only one insn, return it rather than a SEQUENCE.
4590 (Now that we cache SEQUENCE expressions, it isn't worth special-casing
4591 the case of an empty list.)
4592 We only return the pattern of an insn if its code is INSN and it
4593 has no notes. This ensures that no information gets lost. */
4594 if (len == 1
4595 && GET_CODE (first_insn) == INSN
4596 && ! RTX_FRAME_RELATED_P (first_insn)
4597 /* Don't throw away any reg notes. */
4598 && REG_NOTES (first_insn) == 0)
4599 return PATTERN (first_insn);
4600
4601 result = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (len));
4602
4603 for (i = 0, tem = first_insn; tem; tem = NEXT_INSN (tem), i++)
4604 XVECEXP (result, 0, i) = tem;
4605
4606 return result;
4607 }
4608 \f
4609 /* Put the various virtual registers into REGNO_REG_RTX. */
4610
4611 void
4612 init_virtual_regs (es)
4613 struct emit_status *es;
4614 {
4615 rtx *ptr = es->x_regno_reg_rtx;
4616 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4617 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4618 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4619 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4620 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4621 }
4622
4623 \f
4624 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4625 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4626 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4627 static int copy_insn_n_scratches;
4628
4629 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4630 copied an ASM_OPERANDS.
4631 In that case, it is the original input-operand vector. */
4632 static rtvec orig_asm_operands_vector;
4633
4634 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4635 copied an ASM_OPERANDS.
4636 In that case, it is the copied input-operand vector. */
4637 static rtvec copy_asm_operands_vector;
4638
4639 /* Likewise for the constraints vector. */
4640 static rtvec orig_asm_constraints_vector;
4641 static rtvec copy_asm_constraints_vector;
4642
4643 /* Recursively create a new copy of an rtx for copy_insn.
4644 This function differs from copy_rtx in that it handles SCRATCHes and
4645 ASM_OPERANDs properly.
4646 Normally, this function is not used directly; use copy_insn as front end.
4647 However, you could first copy an insn pattern with copy_insn and then use
4648 this function afterwards to properly copy any REG_NOTEs containing
4649 SCRATCHes. */
4650
4651 rtx
4652 copy_insn_1 (orig)
4653 rtx orig;
4654 {
4655 rtx copy;
4656 int i, j;
4657 RTX_CODE code;
4658 const char *format_ptr;
4659
4660 code = GET_CODE (orig);
4661
4662 switch (code)
4663 {
4664 case REG:
4665 case QUEUED:
4666 case CONST_INT:
4667 case CONST_DOUBLE:
4668 case CONST_VECTOR:
4669 case SYMBOL_REF:
4670 case CODE_LABEL:
4671 case PC:
4672 case CC0:
4673 case ADDRESSOF:
4674 return orig;
4675
4676 case SCRATCH:
4677 for (i = 0; i < copy_insn_n_scratches; i++)
4678 if (copy_insn_scratch_in[i] == orig)
4679 return copy_insn_scratch_out[i];
4680 break;
4681
4682 case CONST:
4683 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4684 a LABEL_REF, it isn't sharable. */
4685 if (GET_CODE (XEXP (orig, 0)) == PLUS
4686 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4687 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4688 return orig;
4689 break;
4690
4691 /* A MEM with a constant address is not sharable. The problem is that
4692 the constant address may need to be reloaded. If the mem is shared,
4693 then reloading one copy of this mem will cause all copies to appear
4694 to have been reloaded. */
4695
4696 default:
4697 break;
4698 }
4699
4700 copy = rtx_alloc (code);
4701
4702 /* Copy the various flags, and other information. We assume that
4703 all fields need copying, and then clear the fields that should
4704 not be copied. That is the sensible default behavior, and forces
4705 us to explicitly document why we are *not* copying a flag. */
4706 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
4707
4708 /* We do not copy the USED flag, which is used as a mark bit during
4709 walks over the RTL. */
4710 RTX_FLAG (copy, used) = 0;
4711
4712 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
4713 if (GET_RTX_CLASS (code) == 'i')
4714 {
4715 RTX_FLAG (copy, jump) = 0;
4716 RTX_FLAG (copy, call) = 0;
4717 RTX_FLAG (copy, frame_related) = 0;
4718 }
4719
4720 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4721
4722 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4723 {
4724 copy->fld[i] = orig->fld[i];
4725 switch (*format_ptr++)
4726 {
4727 case 'e':
4728 if (XEXP (orig, i) != NULL)
4729 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4730 break;
4731
4732 case 'E':
4733 case 'V':
4734 if (XVEC (orig, i) == orig_asm_constraints_vector)
4735 XVEC (copy, i) = copy_asm_constraints_vector;
4736 else if (XVEC (orig, i) == orig_asm_operands_vector)
4737 XVEC (copy, i) = copy_asm_operands_vector;
4738 else if (XVEC (orig, i) != NULL)
4739 {
4740 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4741 for (j = 0; j < XVECLEN (copy, i); j++)
4742 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4743 }
4744 break;
4745
4746 case 't':
4747 case 'w':
4748 case 'i':
4749 case 's':
4750 case 'S':
4751 case 'u':
4752 case '0':
4753 /* These are left unchanged. */
4754 break;
4755
4756 default:
4757 abort ();
4758 }
4759 }
4760
4761 if (code == SCRATCH)
4762 {
4763 i = copy_insn_n_scratches++;
4764 if (i >= MAX_RECOG_OPERANDS)
4765 abort ();
4766 copy_insn_scratch_in[i] = orig;
4767 copy_insn_scratch_out[i] = copy;
4768 }
4769 else if (code == ASM_OPERANDS)
4770 {
4771 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4772 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4773 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
4774 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
4775 }
4776
4777 return copy;
4778 }
4779
4780 /* Create a new copy of an rtx.
4781 This function differs from copy_rtx in that it handles SCRATCHes and
4782 ASM_OPERANDs properly.
4783 INSN doesn't really have to be a full INSN; it could be just the
4784 pattern. */
4785 rtx
4786 copy_insn (insn)
4787 rtx insn;
4788 {
4789 copy_insn_n_scratches = 0;
4790 orig_asm_operands_vector = 0;
4791 orig_asm_constraints_vector = 0;
4792 copy_asm_operands_vector = 0;
4793 copy_asm_constraints_vector = 0;
4794 return copy_insn_1 (insn);
4795 }
4796
4797 /* Initialize data structures and variables in this file
4798 before generating rtl for each function. */
4799
4800 void
4801 init_emit ()
4802 {
4803 struct function *f = cfun;
4804
4805 f->emit = (struct emit_status *) ggc_alloc (sizeof (struct emit_status));
4806 first_insn = NULL;
4807 last_insn = NULL;
4808 seq_rtl_expr = NULL;
4809 cur_insn_uid = 1;
4810 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
4811 last_linenum = 0;
4812 last_filename = 0;
4813 first_label_num = label_num;
4814 last_label_num = 0;
4815 seq_stack = NULL;
4816
4817 /* Init the tables that describe all the pseudo regs. */
4818
4819 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
4820
4821 f->emit->regno_pointer_align
4822 = (unsigned char *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
4823 * sizeof (unsigned char));
4824
4825 regno_reg_rtx
4826 = (rtx *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
4827 * sizeof (rtx));
4828
4829 f->emit->regno_decl
4830 = (tree *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
4831 * sizeof (tree));
4832
4833 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
4834 init_virtual_regs (f->emit);
4835
4836 /* Indicate that the virtual registers and stack locations are
4837 all pointers. */
4838 REG_POINTER (stack_pointer_rtx) = 1;
4839 REG_POINTER (frame_pointer_rtx) = 1;
4840 REG_POINTER (hard_frame_pointer_rtx) = 1;
4841 REG_POINTER (arg_pointer_rtx) = 1;
4842
4843 REG_POINTER (virtual_incoming_args_rtx) = 1;
4844 REG_POINTER (virtual_stack_vars_rtx) = 1;
4845 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
4846 REG_POINTER (virtual_outgoing_args_rtx) = 1;
4847 REG_POINTER (virtual_cfa_rtx) = 1;
4848
4849 #ifdef STACK_BOUNDARY
4850 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
4851 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4852 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4853 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
4854
4855 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
4856 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
4857 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
4858 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
4859 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
4860 #endif
4861
4862 #ifdef INIT_EXPANDERS
4863 INIT_EXPANDERS;
4864 #endif
4865 }
4866
4867 /* Generate the constant 0. */
4868
4869 static rtx
4870 gen_const_vector_0 (mode)
4871 enum machine_mode mode;
4872 {
4873 rtx tem;
4874 rtvec v;
4875 int units, i;
4876 enum machine_mode inner;
4877
4878 units = GET_MODE_NUNITS (mode);
4879 inner = GET_MODE_INNER (mode);
4880
4881 v = rtvec_alloc (units);
4882
4883 /* We need to call this function after we to set CONST0_RTX first. */
4884 if (!CONST0_RTX (inner))
4885 abort ();
4886
4887 for (i = 0; i < units; ++i)
4888 RTVEC_ELT (v, i) = CONST0_RTX (inner);
4889
4890 tem = gen_rtx_CONST_VECTOR (mode, v);
4891 return tem;
4892 }
4893
4894 /* Create some permanent unique rtl objects shared between all functions.
4895 LINE_NUMBERS is nonzero if line numbers are to be generated. */
4896
4897 void
4898 init_emit_once (line_numbers)
4899 int line_numbers;
4900 {
4901 int i;
4902 enum machine_mode mode;
4903 enum machine_mode double_mode;
4904
4905 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
4906 tables. */
4907 const_int_htab = htab_create (37, const_int_htab_hash,
4908 const_int_htab_eq, NULL);
4909
4910 const_double_htab = htab_create (37, const_double_htab_hash,
4911 const_double_htab_eq, NULL);
4912
4913 mem_attrs_htab = htab_create (37, mem_attrs_htab_hash,
4914 mem_attrs_htab_eq, NULL);
4915
4916 no_line_numbers = ! line_numbers;
4917
4918 /* Compute the word and byte modes. */
4919
4920 byte_mode = VOIDmode;
4921 word_mode = VOIDmode;
4922 double_mode = VOIDmode;
4923
4924 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
4925 mode = GET_MODE_WIDER_MODE (mode))
4926 {
4927 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
4928 && byte_mode == VOIDmode)
4929 byte_mode = mode;
4930
4931 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
4932 && word_mode == VOIDmode)
4933 word_mode = mode;
4934 }
4935
4936 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
4937 mode = GET_MODE_WIDER_MODE (mode))
4938 {
4939 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
4940 && double_mode == VOIDmode)
4941 double_mode = mode;
4942 }
4943
4944 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
4945
4946 /* Assign register numbers to the globally defined register rtx.
4947 This must be done at runtime because the register number field
4948 is in a union and some compilers can't initialize unions. */
4949
4950 pc_rtx = gen_rtx (PC, VOIDmode);
4951 cc0_rtx = gen_rtx (CC0, VOIDmode);
4952 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
4953 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
4954 if (hard_frame_pointer_rtx == 0)
4955 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
4956 HARD_FRAME_POINTER_REGNUM);
4957 if (arg_pointer_rtx == 0)
4958 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
4959 virtual_incoming_args_rtx =
4960 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
4961 virtual_stack_vars_rtx =
4962 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
4963 virtual_stack_dynamic_rtx =
4964 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
4965 virtual_outgoing_args_rtx =
4966 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
4967 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
4968
4969 #ifdef INIT_EXPANDERS
4970 /* This is to initialize {init|mark|free}_machine_status before the first
4971 call to push_function_context_to. This is needed by the Chill front
4972 end which calls push_function_context_to before the first call to
4973 init_function_start. */
4974 INIT_EXPANDERS;
4975 #endif
4976
4977 /* Create the unique rtx's for certain rtx codes and operand values. */
4978
4979 /* Don't use gen_rtx here since gen_rtx in this case
4980 tries to use these variables. */
4981 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
4982 const_int_rtx[i + MAX_SAVED_CONST_INT] =
4983 gen_rtx_raw_CONST_INT (VOIDmode, i);
4984
4985 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
4986 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
4987 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
4988 else
4989 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
4990
4991 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
4992 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
4993 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
4994 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
4995
4996 for (i = 0; i <= 2; i++)
4997 {
4998 REAL_VALUE_TYPE *r =
4999 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5000
5001 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5002 mode = GET_MODE_WIDER_MODE (mode))
5003 const_tiny_rtx[i][(int) mode] =
5004 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5005
5006 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5007
5008 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5009 mode = GET_MODE_WIDER_MODE (mode))
5010 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5011
5012 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5013 mode != VOIDmode;
5014 mode = GET_MODE_WIDER_MODE (mode))
5015 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5016 }
5017
5018 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5019 mode != VOIDmode;
5020 mode = GET_MODE_WIDER_MODE (mode))
5021 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5022
5023 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5024 mode != VOIDmode;
5025 mode = GET_MODE_WIDER_MODE (mode))
5026 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5027
5028 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5029 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5030 const_tiny_rtx[0][i] = const0_rtx;
5031
5032 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5033 if (STORE_FLAG_VALUE == 1)
5034 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5035
5036 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5037 return_address_pointer_rtx
5038 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5039 #endif
5040
5041 #ifdef STRUCT_VALUE
5042 struct_value_rtx = STRUCT_VALUE;
5043 #else
5044 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
5045 #endif
5046
5047 #ifdef STRUCT_VALUE_INCOMING
5048 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
5049 #else
5050 #ifdef STRUCT_VALUE_INCOMING_REGNUM
5051 struct_value_incoming_rtx
5052 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
5053 #else
5054 struct_value_incoming_rtx = struct_value_rtx;
5055 #endif
5056 #endif
5057
5058 #ifdef STATIC_CHAIN_REGNUM
5059 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5060
5061 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5062 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5063 static_chain_incoming_rtx
5064 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5065 else
5066 #endif
5067 static_chain_incoming_rtx = static_chain_rtx;
5068 #endif
5069
5070 #ifdef STATIC_CHAIN
5071 static_chain_rtx = STATIC_CHAIN;
5072
5073 #ifdef STATIC_CHAIN_INCOMING
5074 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5075 #else
5076 static_chain_incoming_rtx = static_chain_rtx;
5077 #endif
5078 #endif
5079
5080 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5081 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5082 }
5083 \f
5084 /* Query and clear/ restore no_line_numbers. This is used by the
5085 switch / case handling in stmt.c to give proper line numbers in
5086 warnings about unreachable code. */
5087
5088 int
5089 force_line_numbers ()
5090 {
5091 int old = no_line_numbers;
5092
5093 no_line_numbers = 0;
5094 if (old)
5095 force_next_line_note ();
5096 return old;
5097 }
5098
5099 void
5100 restore_line_number_status (old_value)
5101 int old_value;
5102 {
5103 no_line_numbers = old_value;
5104 }
5105
5106 /* Produce exact duplicate of insn INSN after AFTER.
5107 Care updating of libcall regions if present. */
5108
5109 rtx
5110 emit_copy_of_insn_after (insn, after)
5111 rtx insn, after;
5112 {
5113 rtx new;
5114 rtx note1, note2, link;
5115
5116 switch (GET_CODE (insn))
5117 {
5118 case INSN:
5119 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5120 break;
5121
5122 case JUMP_INSN:
5123 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5124 break;
5125
5126 case CALL_INSN:
5127 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5128 if (CALL_INSN_FUNCTION_USAGE (insn))
5129 CALL_INSN_FUNCTION_USAGE (new)
5130 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5131 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5132 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5133 break;
5134
5135 default:
5136 abort ();
5137 }
5138
5139 /* Update LABEL_NUSES. */
5140 mark_jump_label (PATTERN (new), new, 0);
5141
5142 INSN_SCOPE (new) = INSN_SCOPE (insn);
5143
5144 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5145 make them. */
5146 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5147 if (REG_NOTE_KIND (link) != REG_LABEL)
5148 {
5149 if (GET_CODE (link) == EXPR_LIST)
5150 REG_NOTES (new)
5151 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5152 XEXP (link, 0),
5153 REG_NOTES (new)));
5154 else
5155 REG_NOTES (new)
5156 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5157 XEXP (link, 0),
5158 REG_NOTES (new)));
5159 }
5160
5161 /* Fix the libcall sequences. */
5162 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5163 {
5164 rtx p = new;
5165 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5166 p = PREV_INSN (p);
5167 XEXP (note1, 0) = p;
5168 XEXP (note2, 0) = new;
5169 }
5170 return new;
5171 }
5172
5173 #include "gt-emit-rtl.h"