tree.h (contains_placeholder_p): Now returns bool.
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 /* Middle-to-low level generation of rtx code and insns.
24
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
28
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
31
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
37
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "tm.h"
42 #include "toplev.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "tm_p.h"
46 #include "flags.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "real.h"
55 #include "bitmap.h"
56 #include "basic-block.h"
57 #include "ggc.h"
58 #include "debug.h"
59 #include "langhooks.h"
60
61 /* Commonly used modes. */
62
63 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
64 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
65 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
66 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
67
68
69 /* This is *not* reset after each function. It gives each CODE_LABEL
70 in the entire compilation a unique label number. */
71
72 static GTY(()) int label_num = 1;
73
74 /* Highest label number in current function.
75 Zero means use the value of label_num instead.
76 This is nonzero only when belatedly compiling an inline function. */
77
78 static int last_label_num;
79
80 /* Value label_num had when set_new_first_and_last_label_number was called.
81 If label_num has not changed since then, last_label_num is valid. */
82
83 static int base_label_num;
84
85 /* Nonzero means do not generate NOTEs for source line numbers. */
86
87 static int no_line_numbers;
88
89 /* Commonly used rtx's, so that we only need space for one copy.
90 These are initialized once for the entire compilation.
91 All of these are unique; no other rtx-object will be equal to any
92 of these. */
93
94 rtx global_rtl[GR_MAX];
95
96 /* Commonly used RTL for hard registers. These objects are not necessarily
97 unique, so we allocate them separately from global_rtl. They are
98 initialized once per compilation unit, then copied into regno_reg_rtx
99 at the beginning of each function. */
100 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
101
102 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
103 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
104 record a copy of const[012]_rtx. */
105
106 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
107
108 rtx const_true_rtx;
109
110 REAL_VALUE_TYPE dconst0;
111 REAL_VALUE_TYPE dconst1;
112 REAL_VALUE_TYPE dconst2;
113 REAL_VALUE_TYPE dconstm1;
114 REAL_VALUE_TYPE dconstm2;
115 REAL_VALUE_TYPE dconsthalf;
116
117 /* All references to the following fixed hard registers go through
118 these unique rtl objects. On machines where the frame-pointer and
119 arg-pointer are the same register, they use the same unique object.
120
121 After register allocation, other rtl objects which used to be pseudo-regs
122 may be clobbered to refer to the frame-pointer register.
123 But references that were originally to the frame-pointer can be
124 distinguished from the others because they contain frame_pointer_rtx.
125
126 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
127 tricky: until register elimination has taken place hard_frame_pointer_rtx
128 should be used if it is being set, and frame_pointer_rtx otherwise. After
129 register elimination hard_frame_pointer_rtx should always be used.
130 On machines where the two registers are same (most) then these are the
131 same.
132
133 In an inline procedure, the stack and frame pointer rtxs may not be
134 used for anything else. */
135 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
136 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
137 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
138 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
139 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
140
141 /* This is used to implement __builtin_return_address for some machines.
142 See for instance the MIPS port. */
143 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
144
145 /* We make one copy of (const_int C) where C is in
146 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
147 to save space during the compilation and simplify comparisons of
148 integers. */
149
150 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
151
152 /* A hash table storing CONST_INTs whose absolute value is greater
153 than MAX_SAVED_CONST_INT. */
154
155 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
156 htab_t const_int_htab;
157
158 /* A hash table storing memory attribute structures. */
159 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
160 htab_t mem_attrs_htab;
161
162 /* A hash table storing register attribute structures. */
163 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
164 htab_t reg_attrs_htab;
165
166 /* A hash table storing all CONST_DOUBLEs. */
167 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
168 htab_t const_double_htab;
169
170 #define first_insn (cfun->emit->x_first_insn)
171 #define last_insn (cfun->emit->x_last_insn)
172 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
173 #define last_linenum (cfun->emit->x_last_linenum)
174 #define last_filename (cfun->emit->x_last_filename)
175 #define first_label_num (cfun->emit->x_first_label_num)
176
177 static rtx make_jump_insn_raw PARAMS ((rtx));
178 static rtx make_call_insn_raw PARAMS ((rtx));
179 static rtx find_line_note PARAMS ((rtx));
180 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
181 int));
182 static void unshare_all_rtl_1 PARAMS ((rtx));
183 static void unshare_all_decls PARAMS ((tree));
184 static void reset_used_decls PARAMS ((tree));
185 static void mark_label_nuses PARAMS ((rtx));
186 static hashval_t const_int_htab_hash PARAMS ((const void *));
187 static int const_int_htab_eq PARAMS ((const void *,
188 const void *));
189 static hashval_t const_double_htab_hash PARAMS ((const void *));
190 static int const_double_htab_eq PARAMS ((const void *,
191 const void *));
192 static rtx lookup_const_double PARAMS ((rtx));
193 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
194 static int mem_attrs_htab_eq PARAMS ((const void *,
195 const void *));
196 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
197 rtx, unsigned int,
198 enum machine_mode));
199 static hashval_t reg_attrs_htab_hash PARAMS ((const void *));
200 static int reg_attrs_htab_eq PARAMS ((const void *,
201 const void *));
202 static reg_attrs *get_reg_attrs PARAMS ((tree, int));
203 static tree component_ref_for_mem_expr PARAMS ((tree));
204 static rtx gen_const_vector_0 PARAMS ((enum machine_mode));
205
206 /* Probability of the conditional branch currently proceeded by try_split.
207 Set to -1 otherwise. */
208 int split_branch_probability = -1;
209 \f
210 /* Returns a hash code for X (which is a really a CONST_INT). */
211
212 static hashval_t
213 const_int_htab_hash (x)
214 const void *x;
215 {
216 return (hashval_t) INTVAL ((struct rtx_def *) x);
217 }
218
219 /* Returns nonzero if the value represented by X (which is really a
220 CONST_INT) is the same as that given by Y (which is really a
221 HOST_WIDE_INT *). */
222
223 static int
224 const_int_htab_eq (x, y)
225 const void *x;
226 const void *y;
227 {
228 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
229 }
230
231 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
232 static hashval_t
233 const_double_htab_hash (x)
234 const void *x;
235 {
236 rtx value = (rtx) x;
237 hashval_t h;
238
239 if (GET_MODE (value) == VOIDmode)
240 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
241 else
242 {
243 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
244 /* MODE is used in the comparison, so it should be in the hash. */
245 h ^= GET_MODE (value);
246 }
247 return h;
248 }
249
250 /* Returns nonzero if the value represented by X (really a ...)
251 is the same as that represented by Y (really a ...) */
252 static int
253 const_double_htab_eq (x, y)
254 const void *x;
255 const void *y;
256 {
257 rtx a = (rtx)x, b = (rtx)y;
258
259 if (GET_MODE (a) != GET_MODE (b))
260 return 0;
261 if (GET_MODE (a) == VOIDmode)
262 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
263 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
264 else
265 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
266 CONST_DOUBLE_REAL_VALUE (b));
267 }
268
269 /* Returns a hash code for X (which is a really a mem_attrs *). */
270
271 static hashval_t
272 mem_attrs_htab_hash (x)
273 const void *x;
274 {
275 mem_attrs *p = (mem_attrs *) x;
276
277 return (p->alias ^ (p->align * 1000)
278 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
279 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
280 ^ (size_t) p->expr);
281 }
282
283 /* Returns nonzero if the value represented by X (which is really a
284 mem_attrs *) is the same as that given by Y (which is also really a
285 mem_attrs *). */
286
287 static int
288 mem_attrs_htab_eq (x, y)
289 const void *x;
290 const void *y;
291 {
292 mem_attrs *p = (mem_attrs *) x;
293 mem_attrs *q = (mem_attrs *) y;
294
295 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
296 && p->size == q->size && p->align == q->align);
297 }
298
299 /* Allocate a new mem_attrs structure and insert it into the hash table if
300 one identical to it is not already in the table. We are doing this for
301 MEM of mode MODE. */
302
303 static mem_attrs *
304 get_mem_attrs (alias, expr, offset, size, align, mode)
305 HOST_WIDE_INT alias;
306 tree expr;
307 rtx offset;
308 rtx size;
309 unsigned int align;
310 enum machine_mode mode;
311 {
312 mem_attrs attrs;
313 void **slot;
314
315 /* If everything is the default, we can just return zero.
316 This must match what the corresponding MEM_* macros return when the
317 field is not present. */
318 if (alias == 0 && expr == 0 && offset == 0
319 && (size == 0
320 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
321 && (STRICT_ALIGNMENT && mode != BLKmode
322 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
323 return 0;
324
325 attrs.alias = alias;
326 attrs.expr = expr;
327 attrs.offset = offset;
328 attrs.size = size;
329 attrs.align = align;
330
331 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
332 if (*slot == 0)
333 {
334 *slot = ggc_alloc (sizeof (mem_attrs));
335 memcpy (*slot, &attrs, sizeof (mem_attrs));
336 }
337
338 return *slot;
339 }
340
341 /* Returns a hash code for X (which is a really a reg_attrs *). */
342
343 static hashval_t
344 reg_attrs_htab_hash (x)
345 const void *x;
346 {
347 reg_attrs *p = (reg_attrs *) x;
348
349 return ((p->offset * 1000) ^ (long) p->decl);
350 }
351
352 /* Returns non-zero if the value represented by X (which is really a
353 reg_attrs *) is the same as that given by Y (which is also really a
354 reg_attrs *). */
355
356 static int
357 reg_attrs_htab_eq (x, y)
358 const void *x;
359 const void *y;
360 {
361 reg_attrs *p = (reg_attrs *) x;
362 reg_attrs *q = (reg_attrs *) y;
363
364 return (p->decl == q->decl && p->offset == q->offset);
365 }
366 /* Allocate a new reg_attrs structure and insert it into the hash table if
367 one identical to it is not already in the table. We are doing this for
368 MEM of mode MODE. */
369
370 static reg_attrs *
371 get_reg_attrs (decl, offset)
372 tree decl;
373 int offset;
374 {
375 reg_attrs attrs;
376 void **slot;
377
378 /* If everything is the default, we can just return zero. */
379 if (decl == 0 && offset == 0)
380 return 0;
381
382 attrs.decl = decl;
383 attrs.offset = offset;
384
385 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
386 if (*slot == 0)
387 {
388 *slot = ggc_alloc (sizeof (reg_attrs));
389 memcpy (*slot, &attrs, sizeof (reg_attrs));
390 }
391
392 return *slot;
393 }
394
395 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
396 don't attempt to share with the various global pieces of rtl (such as
397 frame_pointer_rtx). */
398
399 rtx
400 gen_raw_REG (mode, regno)
401 enum machine_mode mode;
402 int regno;
403 {
404 rtx x = gen_rtx_raw_REG (mode, regno);
405 ORIGINAL_REGNO (x) = regno;
406 return x;
407 }
408
409 /* There are some RTL codes that require special attention; the generation
410 functions do the raw handling. If you add to this list, modify
411 special_rtx in gengenrtl.c as well. */
412
413 rtx
414 gen_rtx_CONST_INT (mode, arg)
415 enum machine_mode mode ATTRIBUTE_UNUSED;
416 HOST_WIDE_INT arg;
417 {
418 void **slot;
419
420 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
421 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
422
423 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
424 if (const_true_rtx && arg == STORE_FLAG_VALUE)
425 return const_true_rtx;
426 #endif
427
428 /* Look up the CONST_INT in the hash table. */
429 slot = htab_find_slot_with_hash (const_int_htab, &arg,
430 (hashval_t) arg, INSERT);
431 if (*slot == 0)
432 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
433
434 return (rtx) *slot;
435 }
436
437 rtx
438 gen_int_mode (c, mode)
439 HOST_WIDE_INT c;
440 enum machine_mode mode;
441 {
442 return GEN_INT (trunc_int_for_mode (c, mode));
443 }
444
445 /* CONST_DOUBLEs might be created from pairs of integers, or from
446 REAL_VALUE_TYPEs. Also, their length is known only at run time,
447 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
448
449 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
450 hash table. If so, return its counterpart; otherwise add it
451 to the hash table and return it. */
452 static rtx
453 lookup_const_double (real)
454 rtx real;
455 {
456 void **slot = htab_find_slot (const_double_htab, real, INSERT);
457 if (*slot == 0)
458 *slot = real;
459
460 return (rtx) *slot;
461 }
462
463 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
464 VALUE in mode MODE. */
465 rtx
466 const_double_from_real_value (value, mode)
467 REAL_VALUE_TYPE value;
468 enum machine_mode mode;
469 {
470 rtx real = rtx_alloc (CONST_DOUBLE);
471 PUT_MODE (real, mode);
472
473 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
474
475 return lookup_const_double (real);
476 }
477
478 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
479 of ints: I0 is the low-order word and I1 is the high-order word.
480 Do not use this routine for non-integer modes; convert to
481 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
482
483 rtx
484 immed_double_const (i0, i1, mode)
485 HOST_WIDE_INT i0, i1;
486 enum machine_mode mode;
487 {
488 rtx value;
489 unsigned int i;
490
491 if (mode != VOIDmode)
492 {
493 int width;
494 if (GET_MODE_CLASS (mode) != MODE_INT
495 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
496 /* We can get a 0 for an error mark. */
497 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
498 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
499 abort ();
500
501 /* We clear out all bits that don't belong in MODE, unless they and
502 our sign bit are all one. So we get either a reasonable negative
503 value or a reasonable unsigned value for this mode. */
504 width = GET_MODE_BITSIZE (mode);
505 if (width < HOST_BITS_PER_WIDE_INT
506 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
507 != ((HOST_WIDE_INT) (-1) << (width - 1))))
508 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
509 else if (width == HOST_BITS_PER_WIDE_INT
510 && ! (i1 == ~0 && i0 < 0))
511 i1 = 0;
512 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
513 /* We cannot represent this value as a constant. */
514 abort ();
515
516 /* If this would be an entire word for the target, but is not for
517 the host, then sign-extend on the host so that the number will
518 look the same way on the host that it would on the target.
519
520 For example, when building a 64 bit alpha hosted 32 bit sparc
521 targeted compiler, then we want the 32 bit unsigned value -1 to be
522 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
523 The latter confuses the sparc backend. */
524
525 if (width < HOST_BITS_PER_WIDE_INT
526 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
527 i0 |= ((HOST_WIDE_INT) (-1) << width);
528
529 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
530 CONST_INT.
531
532 ??? Strictly speaking, this is wrong if we create a CONST_INT for
533 a large unsigned constant with the size of MODE being
534 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
535 in a wider mode. In that case we will mis-interpret it as a
536 negative number.
537
538 Unfortunately, the only alternative is to make a CONST_DOUBLE for
539 any constant in any mode if it is an unsigned constant larger
540 than the maximum signed integer in an int on the host. However,
541 doing this will break everyone that always expects to see a
542 CONST_INT for SImode and smaller.
543
544 We have always been making CONST_INTs in this case, so nothing
545 new is being broken. */
546
547 if (width <= HOST_BITS_PER_WIDE_INT)
548 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
549 }
550
551 /* If this integer fits in one word, return a CONST_INT. */
552 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
553 return GEN_INT (i0);
554
555 /* We use VOIDmode for integers. */
556 value = rtx_alloc (CONST_DOUBLE);
557 PUT_MODE (value, VOIDmode);
558
559 CONST_DOUBLE_LOW (value) = i0;
560 CONST_DOUBLE_HIGH (value) = i1;
561
562 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
563 XWINT (value, i) = 0;
564
565 return lookup_const_double (value);
566 }
567
568 rtx
569 gen_rtx_REG (mode, regno)
570 enum machine_mode mode;
571 unsigned int regno;
572 {
573 /* In case the MD file explicitly references the frame pointer, have
574 all such references point to the same frame pointer. This is
575 used during frame pointer elimination to distinguish the explicit
576 references to these registers from pseudos that happened to be
577 assigned to them.
578
579 If we have eliminated the frame pointer or arg pointer, we will
580 be using it as a normal register, for example as a spill
581 register. In such cases, we might be accessing it in a mode that
582 is not Pmode and therefore cannot use the pre-allocated rtx.
583
584 Also don't do this when we are making new REGs in reload, since
585 we don't want to get confused with the real pointers. */
586
587 if (mode == Pmode && !reload_in_progress)
588 {
589 if (regno == FRAME_POINTER_REGNUM
590 && (!reload_completed || frame_pointer_needed))
591 return frame_pointer_rtx;
592 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
593 if (regno == HARD_FRAME_POINTER_REGNUM
594 && (!reload_completed || frame_pointer_needed))
595 return hard_frame_pointer_rtx;
596 #endif
597 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
598 if (regno == ARG_POINTER_REGNUM)
599 return arg_pointer_rtx;
600 #endif
601 #ifdef RETURN_ADDRESS_POINTER_REGNUM
602 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
603 return return_address_pointer_rtx;
604 #endif
605 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
606 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
607 return pic_offset_table_rtx;
608 if (regno == STACK_POINTER_REGNUM)
609 return stack_pointer_rtx;
610 }
611
612 #if 0
613 /* If the per-function register table has been set up, try to re-use
614 an existing entry in that table to avoid useless generation of RTL.
615
616 This code is disabled for now until we can fix the various backends
617 which depend on having non-shared hard registers in some cases. Long
618 term we want to re-enable this code as it can significantly cut down
619 on the amount of useless RTL that gets generated.
620
621 We'll also need to fix some code that runs after reload that wants to
622 set ORIGINAL_REGNO. */
623
624 if (cfun
625 && cfun->emit
626 && regno_reg_rtx
627 && regno < FIRST_PSEUDO_REGISTER
628 && reg_raw_mode[regno] == mode)
629 return regno_reg_rtx[regno];
630 #endif
631
632 return gen_raw_REG (mode, regno);
633 }
634
635 rtx
636 gen_rtx_MEM (mode, addr)
637 enum machine_mode mode;
638 rtx addr;
639 {
640 rtx rt = gen_rtx_raw_MEM (mode, addr);
641
642 /* This field is not cleared by the mere allocation of the rtx, so
643 we clear it here. */
644 MEM_ATTRS (rt) = 0;
645
646 return rt;
647 }
648
649 rtx
650 gen_rtx_SUBREG (mode, reg, offset)
651 enum machine_mode mode;
652 rtx reg;
653 int offset;
654 {
655 /* This is the most common failure type.
656 Catch it early so we can see who does it. */
657 if ((offset % GET_MODE_SIZE (mode)) != 0)
658 abort ();
659
660 /* This check isn't usable right now because combine will
661 throw arbitrary crap like a CALL into a SUBREG in
662 gen_lowpart_for_combine so we must just eat it. */
663 #if 0
664 /* Check for this too. */
665 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
666 abort ();
667 #endif
668 return gen_rtx_raw_SUBREG (mode, reg, offset);
669 }
670
671 /* Generate a SUBREG representing the least-significant part of REG if MODE
672 is smaller than mode of REG, otherwise paradoxical SUBREG. */
673
674 rtx
675 gen_lowpart_SUBREG (mode, reg)
676 enum machine_mode mode;
677 rtx reg;
678 {
679 enum machine_mode inmode;
680
681 inmode = GET_MODE (reg);
682 if (inmode == VOIDmode)
683 inmode = mode;
684 return gen_rtx_SUBREG (mode, reg,
685 subreg_lowpart_offset (mode, inmode));
686 }
687 \f
688 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
689 **
690 ** This routine generates an RTX of the size specified by
691 ** <code>, which is an RTX code. The RTX structure is initialized
692 ** from the arguments <element1> through <elementn>, which are
693 ** interpreted according to the specific RTX type's format. The
694 ** special machine mode associated with the rtx (if any) is specified
695 ** in <mode>.
696 **
697 ** gen_rtx can be invoked in a way which resembles the lisp-like
698 ** rtx it will generate. For example, the following rtx structure:
699 **
700 ** (plus:QI (mem:QI (reg:SI 1))
701 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
702 **
703 ** ...would be generated by the following C code:
704 **
705 ** gen_rtx (PLUS, QImode,
706 ** gen_rtx (MEM, QImode,
707 ** gen_rtx (REG, SImode, 1)),
708 ** gen_rtx (MEM, QImode,
709 ** gen_rtx (PLUS, SImode,
710 ** gen_rtx (REG, SImode, 2),
711 ** gen_rtx (REG, SImode, 3)))),
712 */
713
714 /*VARARGS2*/
715 rtx
716 gen_rtx (enum rtx_code code, enum machine_mode mode, ...)
717 {
718 int i; /* Array indices... */
719 const char *fmt; /* Current rtx's format... */
720 rtx rt_val; /* RTX to return to caller... */
721 va_list p;
722
723 va_start (p, mode);
724
725 switch (code)
726 {
727 case CONST_INT:
728 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
729 break;
730
731 case CONST_DOUBLE:
732 {
733 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
734 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
735
736 rt_val = immed_double_const (arg0, arg1, mode);
737 }
738 break;
739
740 case REG:
741 rt_val = gen_rtx_REG (mode, va_arg (p, int));
742 break;
743
744 case MEM:
745 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
746 break;
747
748 default:
749 rt_val = rtx_alloc (code); /* Allocate the storage space. */
750 rt_val->mode = mode; /* Store the machine mode... */
751
752 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
753 for (i = 0; i < GET_RTX_LENGTH (code); i++)
754 {
755 switch (*fmt++)
756 {
757 case '0': /* Field with unknown use. Zero it. */
758 X0EXP (rt_val, i) = NULL_RTX;
759 break;
760
761 case 'i': /* An integer? */
762 XINT (rt_val, i) = va_arg (p, int);
763 break;
764
765 case 'w': /* A wide integer? */
766 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
767 break;
768
769 case 's': /* A string? */
770 XSTR (rt_val, i) = va_arg (p, char *);
771 break;
772
773 case 'e': /* An expression? */
774 case 'u': /* An insn? Same except when printing. */
775 XEXP (rt_val, i) = va_arg (p, rtx);
776 break;
777
778 case 'E': /* An RTX vector? */
779 XVEC (rt_val, i) = va_arg (p, rtvec);
780 break;
781
782 case 'b': /* A bitmap? */
783 XBITMAP (rt_val, i) = va_arg (p, bitmap);
784 break;
785
786 case 't': /* A tree? */
787 XTREE (rt_val, i) = va_arg (p, tree);
788 break;
789
790 default:
791 abort ();
792 }
793 }
794 break;
795 }
796
797 va_end (p);
798 return rt_val;
799 }
800
801 /* gen_rtvec (n, [rt1, ..., rtn])
802 **
803 ** This routine creates an rtvec and stores within it the
804 ** pointers to rtx's which are its arguments.
805 */
806
807 /*VARARGS1*/
808 rtvec
809 gen_rtvec (int n, ...)
810 {
811 int i, save_n;
812 rtx *vector;
813 va_list p;
814
815 va_start (p, n);
816
817 if (n == 0)
818 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
819
820 vector = (rtx *) alloca (n * sizeof (rtx));
821
822 for (i = 0; i < n; i++)
823 vector[i] = va_arg (p, rtx);
824
825 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
826 save_n = n;
827 va_end (p);
828
829 return gen_rtvec_v (save_n, vector);
830 }
831
832 rtvec
833 gen_rtvec_v (n, argp)
834 int n;
835 rtx *argp;
836 {
837 int i;
838 rtvec rt_val;
839
840 if (n == 0)
841 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
842
843 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
844
845 for (i = 0; i < n; i++)
846 rt_val->elem[i] = *argp++;
847
848 return rt_val;
849 }
850 \f
851 /* Generate a REG rtx for a new pseudo register of mode MODE.
852 This pseudo is assigned the next sequential register number. */
853
854 rtx
855 gen_reg_rtx (mode)
856 enum machine_mode mode;
857 {
858 struct function *f = cfun;
859 rtx val;
860
861 /* Don't let anything called after initial flow analysis create new
862 registers. */
863 if (no_new_pseudos)
864 abort ();
865
866 if (generating_concat_p
867 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
868 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
869 {
870 /* For complex modes, don't make a single pseudo.
871 Instead, make a CONCAT of two pseudos.
872 This allows noncontiguous allocation of the real and imaginary parts,
873 which makes much better code. Besides, allocating DCmode
874 pseudos overstrains reload on some machines like the 386. */
875 rtx realpart, imagpart;
876 enum machine_mode partmode = GET_MODE_INNER (mode);
877
878 realpart = gen_reg_rtx (partmode);
879 imagpart = gen_reg_rtx (partmode);
880 return gen_rtx_CONCAT (mode, realpart, imagpart);
881 }
882
883 /* Make sure regno_pointer_align, and regno_reg_rtx are large
884 enough to have an element for this pseudo reg number. */
885
886 if (reg_rtx_no == f->emit->regno_pointer_align_length)
887 {
888 int old_size = f->emit->regno_pointer_align_length;
889 char *new;
890 rtx *new1;
891
892 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
893 memset (new + old_size, 0, old_size);
894 f->emit->regno_pointer_align = (unsigned char *) new;
895
896 new1 = (rtx *) ggc_realloc (f->emit->x_regno_reg_rtx,
897 old_size * 2 * sizeof (rtx));
898 memset (new1 + old_size, 0, old_size * sizeof (rtx));
899 regno_reg_rtx = new1;
900
901 f->emit->regno_pointer_align_length = old_size * 2;
902 }
903
904 val = gen_raw_REG (mode, reg_rtx_no);
905 regno_reg_rtx[reg_rtx_no++] = val;
906 return val;
907 }
908
909 /* Generate an register with same attributes as REG,
910 but offsetted by OFFSET. */
911
912 rtx
913 gen_rtx_REG_offset (reg, mode, regno, offset)
914 enum machine_mode mode;
915 unsigned int regno;
916 int offset;
917 rtx reg;
918 {
919 rtx new = gen_rtx_REG (mode, regno);
920 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
921 REG_OFFSET (reg) + offset);
922 return new;
923 }
924
925 /* Set the decl for MEM to DECL. */
926
927 void
928 set_reg_attrs_from_mem (reg, mem)
929 rtx reg;
930 rtx mem;
931 {
932 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
933 REG_ATTRS (reg)
934 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
935 }
936
937 /* Set the register attributes for registers contained in PARM_RTX.
938 Use needed values from memory attributes of MEM. */
939
940 void
941 set_reg_attrs_for_parm (parm_rtx, mem)
942 rtx parm_rtx;
943 rtx mem;
944 {
945 if (GET_CODE (parm_rtx) == REG)
946 set_reg_attrs_from_mem (parm_rtx, mem);
947 else if (GET_CODE (parm_rtx) == PARALLEL)
948 {
949 /* Check for a NULL entry in the first slot, used to indicate that the
950 parameter goes both on the stack and in registers. */
951 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
952 for (; i < XVECLEN (parm_rtx, 0); i++)
953 {
954 rtx x = XVECEXP (parm_rtx, 0, i);
955 if (GET_CODE (XEXP (x, 0)) == REG)
956 REG_ATTRS (XEXP (x, 0))
957 = get_reg_attrs (MEM_EXPR (mem),
958 INTVAL (XEXP (x, 1)));
959 }
960 }
961 }
962
963 /* Assign the RTX X to declaration T. */
964 void
965 set_decl_rtl (t, x)
966 tree t;
967 rtx x;
968 {
969 DECL_CHECK (t)->decl.rtl = x;
970
971 if (!x)
972 return;
973 /* For register, we maitain the reverse information too. */
974 if (GET_CODE (x) == REG)
975 REG_ATTRS (x) = get_reg_attrs (t, 0);
976 else if (GET_CODE (x) == SUBREG)
977 REG_ATTRS (SUBREG_REG (x))
978 = get_reg_attrs (t, -SUBREG_BYTE (x));
979 if (GET_CODE (x) == CONCAT)
980 {
981 if (REG_P (XEXP (x, 0)))
982 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
983 if (REG_P (XEXP (x, 1)))
984 REG_ATTRS (XEXP (x, 1))
985 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
986 }
987 if (GET_CODE (x) == PARALLEL)
988 {
989 int i;
990 for (i = 0; i < XVECLEN (x, 0); i++)
991 {
992 rtx y = XVECEXP (x, 0, i);
993 if (REG_P (XEXP (y, 0)))
994 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
995 }
996 }
997 }
998
999 /* Identify REG (which may be a CONCAT) as a user register. */
1000
1001 void
1002 mark_user_reg (reg)
1003 rtx reg;
1004 {
1005 if (GET_CODE (reg) == CONCAT)
1006 {
1007 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1008 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1009 }
1010 else if (GET_CODE (reg) == REG)
1011 REG_USERVAR_P (reg) = 1;
1012 else
1013 abort ();
1014 }
1015
1016 /* Identify REG as a probable pointer register and show its alignment
1017 as ALIGN, if nonzero. */
1018
1019 void
1020 mark_reg_pointer (reg, align)
1021 rtx reg;
1022 int align;
1023 {
1024 if (! REG_POINTER (reg))
1025 {
1026 REG_POINTER (reg) = 1;
1027
1028 if (align)
1029 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1030 }
1031 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1032 /* We can no-longer be sure just how aligned this pointer is */
1033 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1034 }
1035
1036 /* Return 1 plus largest pseudo reg number used in the current function. */
1037
1038 int
1039 max_reg_num ()
1040 {
1041 return reg_rtx_no;
1042 }
1043
1044 /* Return 1 + the largest label number used so far in the current function. */
1045
1046 int
1047 max_label_num ()
1048 {
1049 if (last_label_num && label_num == base_label_num)
1050 return last_label_num;
1051 return label_num;
1052 }
1053
1054 /* Return first label number used in this function (if any were used). */
1055
1056 int
1057 get_first_label_num ()
1058 {
1059 return first_label_num;
1060 }
1061 \f
1062 /* Return the final regno of X, which is a SUBREG of a hard
1063 register. */
1064 int
1065 subreg_hard_regno (x, check_mode)
1066 rtx x;
1067 int check_mode;
1068 {
1069 enum machine_mode mode = GET_MODE (x);
1070 unsigned int byte_offset, base_regno, final_regno;
1071 rtx reg = SUBREG_REG (x);
1072
1073 /* This is where we attempt to catch illegal subregs
1074 created by the compiler. */
1075 if (GET_CODE (x) != SUBREG
1076 || GET_CODE (reg) != REG)
1077 abort ();
1078 base_regno = REGNO (reg);
1079 if (base_regno >= FIRST_PSEUDO_REGISTER)
1080 abort ();
1081 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
1082 abort ();
1083 #ifdef ENABLE_CHECKING
1084 if (!subreg_offset_representable_p (REGNO (reg), GET_MODE (reg),
1085 SUBREG_BYTE (x), mode))
1086 abort ();
1087 #endif
1088 /* Catch non-congruent offsets too. */
1089 byte_offset = SUBREG_BYTE (x);
1090 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
1091 abort ();
1092
1093 final_regno = subreg_regno (x);
1094
1095 return final_regno;
1096 }
1097
1098 /* Return a value representing some low-order bits of X, where the number
1099 of low-order bits is given by MODE. Note that no conversion is done
1100 between floating-point and fixed-point values, rather, the bit
1101 representation is returned.
1102
1103 This function handles the cases in common between gen_lowpart, below,
1104 and two variants in cse.c and combine.c. These are the cases that can
1105 be safely handled at all points in the compilation.
1106
1107 If this is not a case we can handle, return 0. */
1108
1109 rtx
1110 gen_lowpart_common (mode, x)
1111 enum machine_mode mode;
1112 rtx x;
1113 {
1114 int msize = GET_MODE_SIZE (mode);
1115 int xsize = GET_MODE_SIZE (GET_MODE (x));
1116 int offset = 0;
1117
1118 if (GET_MODE (x) == mode)
1119 return x;
1120
1121 /* MODE must occupy no more words than the mode of X. */
1122 if (GET_MODE (x) != VOIDmode
1123 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1124 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
1125 return 0;
1126
1127 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1128 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1129 && GET_MODE (x) != VOIDmode && msize > xsize)
1130 return 0;
1131
1132 offset = subreg_lowpart_offset (mode, GET_MODE (x));
1133
1134 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1135 && (GET_MODE_CLASS (mode) == MODE_INT
1136 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1137 {
1138 /* If we are getting the low-order part of something that has been
1139 sign- or zero-extended, we can either just use the object being
1140 extended or make a narrower extension. If we want an even smaller
1141 piece than the size of the object being extended, call ourselves
1142 recursively.
1143
1144 This case is used mostly by combine and cse. */
1145
1146 if (GET_MODE (XEXP (x, 0)) == mode)
1147 return XEXP (x, 0);
1148 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1149 return gen_lowpart_common (mode, XEXP (x, 0));
1150 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
1151 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1152 }
1153 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
1154 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR)
1155 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
1156 else if ((GET_MODE_CLASS (mode) == MODE_VECTOR_INT
1157 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
1158 && GET_MODE (x) == VOIDmode)
1159 return simplify_gen_subreg (mode, x, int_mode_for_mode (mode), offset);
1160 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
1161 from the low-order part of the constant. */
1162 else if ((GET_MODE_CLASS (mode) == MODE_INT
1163 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1164 && GET_MODE (x) == VOIDmode
1165 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
1166 {
1167 /* If MODE is twice the host word size, X is already the desired
1168 representation. Otherwise, if MODE is wider than a word, we can't
1169 do this. If MODE is exactly a word, return just one CONST_INT. */
1170
1171 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
1172 return x;
1173 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1174 return 0;
1175 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
1176 return (GET_CODE (x) == CONST_INT ? x
1177 : GEN_INT (CONST_DOUBLE_LOW (x)));
1178 else
1179 {
1180 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
1181 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
1182 : CONST_DOUBLE_LOW (x));
1183
1184 /* Sign extend to HOST_WIDE_INT. */
1185 val = trunc_int_for_mode (val, mode);
1186
1187 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
1188 : GEN_INT (val));
1189 }
1190 }
1191
1192 /* The floating-point emulator can handle all conversions between
1193 FP and integer operands. This simplifies reload because it
1194 doesn't have to deal with constructs like (subreg:DI
1195 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
1196 /* Single-precision floats are always 32-bits and double-precision
1197 floats are always 64-bits. */
1198
1199 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1200 && GET_MODE_BITSIZE (mode) == 32
1201 && GET_CODE (x) == CONST_INT)
1202 {
1203 REAL_VALUE_TYPE r;
1204 long i = INTVAL (x);
1205
1206 real_from_target (&r, &i, mode);
1207 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1208 }
1209 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1210 && GET_MODE_BITSIZE (mode) == 64
1211 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1212 && GET_MODE (x) == VOIDmode)
1213 {
1214 REAL_VALUE_TYPE r;
1215 HOST_WIDE_INT low, high;
1216 long i[2];
1217
1218 if (GET_CODE (x) == CONST_INT)
1219 {
1220 low = INTVAL (x);
1221 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1222 }
1223 else
1224 {
1225 low = CONST_DOUBLE_LOW (x);
1226 high = CONST_DOUBLE_HIGH (x);
1227 }
1228
1229 if (HOST_BITS_PER_WIDE_INT > 32)
1230 high = low >> 31 >> 1;
1231
1232 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1233 target machine. */
1234 if (WORDS_BIG_ENDIAN)
1235 i[0] = high, i[1] = low;
1236 else
1237 i[0] = low, i[1] = high;
1238
1239 real_from_target (&r, i, mode);
1240 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1241 }
1242 else if ((GET_MODE_CLASS (mode) == MODE_INT
1243 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1244 && GET_CODE (x) == CONST_DOUBLE
1245 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1246 {
1247 REAL_VALUE_TYPE r;
1248 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1249 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1250
1251 /* Convert 'r' into an array of four 32-bit words in target word
1252 order. */
1253 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1254 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1255 {
1256 case 32:
1257 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
1258 i[1] = 0;
1259 i[2] = 0;
1260 i[3 - 3 * endian] = 0;
1261 break;
1262 case 64:
1263 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
1264 i[2 - 2 * endian] = 0;
1265 i[3 - 2 * endian] = 0;
1266 break;
1267 case 96:
1268 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1269 i[3 - 3 * endian] = 0;
1270 break;
1271 case 128:
1272 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1273 break;
1274 default:
1275 abort ();
1276 }
1277 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1278 and return it. */
1279 #if HOST_BITS_PER_WIDE_INT == 32
1280 return immed_double_const (i[3 * endian], i[1 + endian], mode);
1281 #else
1282 if (HOST_BITS_PER_WIDE_INT != 64)
1283 abort ();
1284
1285 return immed_double_const ((((unsigned long) i[3 * endian])
1286 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1287 (((unsigned long) i[2 - endian])
1288 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
1289 mode);
1290 #endif
1291 }
1292
1293 /* Otherwise, we can't do this. */
1294 return 0;
1295 }
1296 \f
1297 /* Return the real part (which has mode MODE) of a complex value X.
1298 This always comes at the low address in memory. */
1299
1300 rtx
1301 gen_realpart (mode, x)
1302 enum machine_mode mode;
1303 rtx x;
1304 {
1305 if (WORDS_BIG_ENDIAN
1306 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1307 && REG_P (x)
1308 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1309 internal_error
1310 ("can't access real part of complex value in hard register");
1311 else if (WORDS_BIG_ENDIAN)
1312 return gen_highpart (mode, x);
1313 else
1314 return gen_lowpart (mode, x);
1315 }
1316
1317 /* Return the imaginary part (which has mode MODE) of a complex value X.
1318 This always comes at the high address in memory. */
1319
1320 rtx
1321 gen_imagpart (mode, x)
1322 enum machine_mode mode;
1323 rtx x;
1324 {
1325 if (WORDS_BIG_ENDIAN)
1326 return gen_lowpart (mode, x);
1327 else if (! WORDS_BIG_ENDIAN
1328 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1329 && REG_P (x)
1330 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1331 internal_error
1332 ("can't access imaginary part of complex value in hard register");
1333 else
1334 return gen_highpart (mode, x);
1335 }
1336
1337 /* Return 1 iff X, assumed to be a SUBREG,
1338 refers to the real part of the complex value in its containing reg.
1339 Complex values are always stored with the real part in the first word,
1340 regardless of WORDS_BIG_ENDIAN. */
1341
1342 int
1343 subreg_realpart_p (x)
1344 rtx x;
1345 {
1346 if (GET_CODE (x) != SUBREG)
1347 abort ();
1348
1349 return ((unsigned int) SUBREG_BYTE (x)
1350 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1351 }
1352 \f
1353 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1354 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1355 least-significant part of X.
1356 MODE specifies how big a part of X to return;
1357 it usually should not be larger than a word.
1358 If X is a MEM whose address is a QUEUED, the value may be so also. */
1359
1360 rtx
1361 gen_lowpart (mode, x)
1362 enum machine_mode mode;
1363 rtx x;
1364 {
1365 rtx result = gen_lowpart_common (mode, x);
1366
1367 if (result)
1368 return result;
1369 else if (GET_CODE (x) == REG)
1370 {
1371 /* Must be a hard reg that's not valid in MODE. */
1372 result = gen_lowpart_common (mode, copy_to_reg (x));
1373 if (result == 0)
1374 abort ();
1375 return result;
1376 }
1377 else if (GET_CODE (x) == MEM)
1378 {
1379 /* The only additional case we can do is MEM. */
1380 int offset = 0;
1381
1382 /* The following exposes the use of "x" to CSE. */
1383 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
1384 && SCALAR_INT_MODE_P (GET_MODE (x))
1385 && ! no_new_pseudos)
1386 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1387
1388 if (WORDS_BIG_ENDIAN)
1389 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1390 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1391
1392 if (BYTES_BIG_ENDIAN)
1393 /* Adjust the address so that the address-after-the-data
1394 is unchanged. */
1395 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1396 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1397
1398 return adjust_address (x, mode, offset);
1399 }
1400 else if (GET_CODE (x) == ADDRESSOF)
1401 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1402 else
1403 abort ();
1404 }
1405
1406 /* Like `gen_lowpart', but refer to the most significant part.
1407 This is used to access the imaginary part of a complex number. */
1408
1409 rtx
1410 gen_highpart (mode, x)
1411 enum machine_mode mode;
1412 rtx x;
1413 {
1414 unsigned int msize = GET_MODE_SIZE (mode);
1415 rtx result;
1416
1417 /* This case loses if X is a subreg. To catch bugs early,
1418 complain if an invalid MODE is used even in other cases. */
1419 if (msize > UNITS_PER_WORD
1420 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1421 abort ();
1422
1423 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1424 subreg_highpart_offset (mode, GET_MODE (x)));
1425
1426 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1427 the target if we have a MEM. gen_highpart must return a valid operand,
1428 emitting code if necessary to do so. */
1429 if (result != NULL_RTX && GET_CODE (result) == MEM)
1430 result = validize_mem (result);
1431
1432 if (!result)
1433 abort ();
1434 return result;
1435 }
1436
1437 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1438 be VOIDmode constant. */
1439 rtx
1440 gen_highpart_mode (outermode, innermode, exp)
1441 enum machine_mode outermode, innermode;
1442 rtx exp;
1443 {
1444 if (GET_MODE (exp) != VOIDmode)
1445 {
1446 if (GET_MODE (exp) != innermode)
1447 abort ();
1448 return gen_highpart (outermode, exp);
1449 }
1450 return simplify_gen_subreg (outermode, exp, innermode,
1451 subreg_highpart_offset (outermode, innermode));
1452 }
1453
1454 /* Return offset in bytes to get OUTERMODE low part
1455 of the value in mode INNERMODE stored in memory in target format. */
1456
1457 unsigned int
1458 subreg_lowpart_offset (outermode, innermode)
1459 enum machine_mode outermode, innermode;
1460 {
1461 unsigned int offset = 0;
1462 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1463
1464 if (difference > 0)
1465 {
1466 if (WORDS_BIG_ENDIAN)
1467 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1468 if (BYTES_BIG_ENDIAN)
1469 offset += difference % UNITS_PER_WORD;
1470 }
1471
1472 return offset;
1473 }
1474
1475 /* Return offset in bytes to get OUTERMODE high part
1476 of the value in mode INNERMODE stored in memory in target format. */
1477 unsigned int
1478 subreg_highpart_offset (outermode, innermode)
1479 enum machine_mode outermode, innermode;
1480 {
1481 unsigned int offset = 0;
1482 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1483
1484 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1485 abort ();
1486
1487 if (difference > 0)
1488 {
1489 if (! WORDS_BIG_ENDIAN)
1490 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1491 if (! BYTES_BIG_ENDIAN)
1492 offset += difference % UNITS_PER_WORD;
1493 }
1494
1495 return offset;
1496 }
1497
1498 /* Return 1 iff X, assumed to be a SUBREG,
1499 refers to the least significant part of its containing reg.
1500 If X is not a SUBREG, always return 1 (it is its own low part!). */
1501
1502 int
1503 subreg_lowpart_p (x)
1504 rtx x;
1505 {
1506 if (GET_CODE (x) != SUBREG)
1507 return 1;
1508 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1509 return 0;
1510
1511 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1512 == SUBREG_BYTE (x));
1513 }
1514 \f
1515
1516 /* Helper routine for all the constant cases of operand_subword.
1517 Some places invoke this directly. */
1518
1519 rtx
1520 constant_subword (op, offset, mode)
1521 rtx op;
1522 int offset;
1523 enum machine_mode mode;
1524 {
1525 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1526 HOST_WIDE_INT val;
1527
1528 /* If OP is already an integer word, return it. */
1529 if (GET_MODE_CLASS (mode) == MODE_INT
1530 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1531 return op;
1532
1533 /* The output is some bits, the width of the target machine's word.
1534 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1535 host can't. */
1536 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1537 && GET_MODE_CLASS (mode) == MODE_FLOAT
1538 && GET_MODE_BITSIZE (mode) == 64
1539 && GET_CODE (op) == CONST_DOUBLE)
1540 {
1541 long k[2];
1542 REAL_VALUE_TYPE rv;
1543
1544 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1545 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1546
1547 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1548 which the words are written depends on the word endianness.
1549 ??? This is a potential portability problem and should
1550 be fixed at some point.
1551
1552 We must exercise caution with the sign bit. By definition there
1553 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1554 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1555 So we explicitly mask and sign-extend as necessary. */
1556 if (BITS_PER_WORD == 32)
1557 {
1558 val = k[offset];
1559 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1560 return GEN_INT (val);
1561 }
1562 #if HOST_BITS_PER_WIDE_INT >= 64
1563 else if (BITS_PER_WORD >= 64 && offset == 0)
1564 {
1565 val = k[! WORDS_BIG_ENDIAN];
1566 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1567 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1568 return GEN_INT (val);
1569 }
1570 #endif
1571 else if (BITS_PER_WORD == 16)
1572 {
1573 val = k[offset >> 1];
1574 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1575 val >>= 16;
1576 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1577 return GEN_INT (val);
1578 }
1579 else
1580 abort ();
1581 }
1582 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1583 && GET_MODE_CLASS (mode) == MODE_FLOAT
1584 && GET_MODE_BITSIZE (mode) > 64
1585 && GET_CODE (op) == CONST_DOUBLE)
1586 {
1587 long k[4];
1588 REAL_VALUE_TYPE rv;
1589
1590 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1591 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1592
1593 if (BITS_PER_WORD == 32)
1594 {
1595 val = k[offset];
1596 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1597 return GEN_INT (val);
1598 }
1599 #if HOST_BITS_PER_WIDE_INT >= 64
1600 else if (BITS_PER_WORD >= 64 && offset <= 1)
1601 {
1602 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1603 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1604 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1605 return GEN_INT (val);
1606 }
1607 #endif
1608 else
1609 abort ();
1610 }
1611
1612 /* Single word float is a little harder, since single- and double-word
1613 values often do not have the same high-order bits. We have already
1614 verified that we want the only defined word of the single-word value. */
1615 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1616 && GET_MODE_BITSIZE (mode) == 32
1617 && GET_CODE (op) == CONST_DOUBLE)
1618 {
1619 long l;
1620 REAL_VALUE_TYPE rv;
1621
1622 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1623 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1624
1625 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1626 val = l;
1627 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1628
1629 if (BITS_PER_WORD == 16)
1630 {
1631 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1632 val >>= 16;
1633 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1634 }
1635
1636 return GEN_INT (val);
1637 }
1638
1639 /* The only remaining cases that we can handle are integers.
1640 Convert to proper endianness now since these cases need it.
1641 At this point, offset == 0 means the low-order word.
1642
1643 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1644 in general. However, if OP is (const_int 0), we can just return
1645 it for any word. */
1646
1647 if (op == const0_rtx)
1648 return op;
1649
1650 if (GET_MODE_CLASS (mode) != MODE_INT
1651 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1652 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1653 return 0;
1654
1655 if (WORDS_BIG_ENDIAN)
1656 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1657
1658 /* Find out which word on the host machine this value is in and get
1659 it from the constant. */
1660 val = (offset / size_ratio == 0
1661 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1662 : (GET_CODE (op) == CONST_INT
1663 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1664
1665 /* Get the value we want into the low bits of val. */
1666 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1667 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1668
1669 val = trunc_int_for_mode (val, word_mode);
1670
1671 return GEN_INT (val);
1672 }
1673
1674 /* Return subword OFFSET of operand OP.
1675 The word number, OFFSET, is interpreted as the word number starting
1676 at the low-order address. OFFSET 0 is the low-order word if not
1677 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1678
1679 If we cannot extract the required word, we return zero. Otherwise,
1680 an rtx corresponding to the requested word will be returned.
1681
1682 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1683 reload has completed, a valid address will always be returned. After
1684 reload, if a valid address cannot be returned, we return zero.
1685
1686 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1687 it is the responsibility of the caller.
1688
1689 MODE is the mode of OP in case it is a CONST_INT.
1690
1691 ??? This is still rather broken for some cases. The problem for the
1692 moment is that all callers of this thing provide no 'goal mode' to
1693 tell us to work with. This exists because all callers were written
1694 in a word based SUBREG world.
1695 Now use of this function can be deprecated by simplify_subreg in most
1696 cases.
1697 */
1698
1699 rtx
1700 operand_subword (op, offset, validate_address, mode)
1701 rtx op;
1702 unsigned int offset;
1703 int validate_address;
1704 enum machine_mode mode;
1705 {
1706 if (mode == VOIDmode)
1707 mode = GET_MODE (op);
1708
1709 if (mode == VOIDmode)
1710 abort ();
1711
1712 /* If OP is narrower than a word, fail. */
1713 if (mode != BLKmode
1714 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1715 return 0;
1716
1717 /* If we want a word outside OP, return zero. */
1718 if (mode != BLKmode
1719 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1720 return const0_rtx;
1721
1722 /* Form a new MEM at the requested address. */
1723 if (GET_CODE (op) == MEM)
1724 {
1725 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1726
1727 if (! validate_address)
1728 return new;
1729
1730 else if (reload_completed)
1731 {
1732 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1733 return 0;
1734 }
1735 else
1736 return replace_equiv_address (new, XEXP (new, 0));
1737 }
1738
1739 /* Rest can be handled by simplify_subreg. */
1740 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1741 }
1742
1743 /* Similar to `operand_subword', but never return 0. If we can't extract
1744 the required subword, put OP into a register and try again. If that fails,
1745 abort. We always validate the address in this case.
1746
1747 MODE is the mode of OP, in case it is CONST_INT. */
1748
1749 rtx
1750 operand_subword_force (op, offset, mode)
1751 rtx op;
1752 unsigned int offset;
1753 enum machine_mode mode;
1754 {
1755 rtx result = operand_subword (op, offset, 1, mode);
1756
1757 if (result)
1758 return result;
1759
1760 if (mode != BLKmode && mode != VOIDmode)
1761 {
1762 /* If this is a register which can not be accessed by words, copy it
1763 to a pseudo register. */
1764 if (GET_CODE (op) == REG)
1765 op = copy_to_reg (op);
1766 else
1767 op = force_reg (mode, op);
1768 }
1769
1770 result = operand_subword (op, offset, 1, mode);
1771 if (result == 0)
1772 abort ();
1773
1774 return result;
1775 }
1776 \f
1777 /* Given a compare instruction, swap the operands.
1778 A test instruction is changed into a compare of 0 against the operand. */
1779
1780 void
1781 reverse_comparison (insn)
1782 rtx insn;
1783 {
1784 rtx body = PATTERN (insn);
1785 rtx comp;
1786
1787 if (GET_CODE (body) == SET)
1788 comp = SET_SRC (body);
1789 else
1790 comp = SET_SRC (XVECEXP (body, 0, 0));
1791
1792 if (GET_CODE (comp) == COMPARE)
1793 {
1794 rtx op0 = XEXP (comp, 0);
1795 rtx op1 = XEXP (comp, 1);
1796 XEXP (comp, 0) = op1;
1797 XEXP (comp, 1) = op0;
1798 }
1799 else
1800 {
1801 rtx new = gen_rtx_COMPARE (VOIDmode,
1802 CONST0_RTX (GET_MODE (comp)), comp);
1803 if (GET_CODE (body) == SET)
1804 SET_SRC (body) = new;
1805 else
1806 SET_SRC (XVECEXP (body, 0, 0)) = new;
1807 }
1808 }
1809 \f
1810 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1811 or (2) a component ref of something variable. Represent the later with
1812 a NULL expression. */
1813
1814 static tree
1815 component_ref_for_mem_expr (ref)
1816 tree ref;
1817 {
1818 tree inner = TREE_OPERAND (ref, 0);
1819
1820 if (TREE_CODE (inner) == COMPONENT_REF)
1821 inner = component_ref_for_mem_expr (inner);
1822 else
1823 {
1824 tree placeholder_ptr = 0;
1825
1826 /* Now remove any conversions: they don't change what the underlying
1827 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1828 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1829 || TREE_CODE (inner) == NON_LVALUE_EXPR
1830 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1831 || TREE_CODE (inner) == SAVE_EXPR
1832 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1833 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1834 inner = find_placeholder (inner, &placeholder_ptr);
1835 else
1836 inner = TREE_OPERAND (inner, 0);
1837
1838 if (! DECL_P (inner))
1839 inner = NULL_TREE;
1840 }
1841
1842 if (inner == TREE_OPERAND (ref, 0))
1843 return ref;
1844 else
1845 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1846 TREE_OPERAND (ref, 1));
1847 }
1848
1849 /* Given REF, a MEM, and T, either the type of X or the expression
1850 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1851 if we are making a new object of this type. BITPOS is nonzero if
1852 there is an offset outstanding on T that will be applied later. */
1853
1854 void
1855 set_mem_attributes_minus_bitpos (ref, t, objectp, bitpos)
1856 rtx ref;
1857 tree t;
1858 int objectp;
1859 HOST_WIDE_INT bitpos;
1860 {
1861 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1862 tree expr = MEM_EXPR (ref);
1863 rtx offset = MEM_OFFSET (ref);
1864 rtx size = MEM_SIZE (ref);
1865 unsigned int align = MEM_ALIGN (ref);
1866 HOST_WIDE_INT apply_bitpos = 0;
1867 tree type;
1868
1869 /* It can happen that type_for_mode was given a mode for which there
1870 is no language-level type. In which case it returns NULL, which
1871 we can see here. */
1872 if (t == NULL_TREE)
1873 return;
1874
1875 type = TYPE_P (t) ? t : TREE_TYPE (t);
1876
1877 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1878 wrong answer, as it assumes that DECL_RTL already has the right alias
1879 info. Callers should not set DECL_RTL until after the call to
1880 set_mem_attributes. */
1881 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1882 abort ();
1883
1884 /* Get the alias set from the expression or type (perhaps using a
1885 front-end routine) and use it. */
1886 alias = get_alias_set (t);
1887
1888 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1889 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1890 RTX_UNCHANGING_P (ref)
1891 |= ((lang_hooks.honor_readonly
1892 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1893 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1894
1895 /* If we are making an object of this type, or if this is a DECL, we know
1896 that it is a scalar if the type is not an aggregate. */
1897 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1898 MEM_SCALAR_P (ref) = 1;
1899
1900 /* We can set the alignment from the type if we are making an object,
1901 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1902 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1903 align = MAX (align, TYPE_ALIGN (type));
1904
1905 /* If the size is known, we can set that. */
1906 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1907 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1908
1909 /* If T is not a type, we may be able to deduce some more information about
1910 the expression. */
1911 if (! TYPE_P (t))
1912 {
1913 maybe_set_unchanging (ref, t);
1914 if (TREE_THIS_VOLATILE (t))
1915 MEM_VOLATILE_P (ref) = 1;
1916
1917 /* Now remove any conversions: they don't change what the underlying
1918 object is. Likewise for SAVE_EXPR. */
1919 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1920 || TREE_CODE (t) == NON_LVALUE_EXPR
1921 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1922 || TREE_CODE (t) == SAVE_EXPR)
1923 t = TREE_OPERAND (t, 0);
1924
1925 /* If this expression can't be addressed (e.g., it contains a reference
1926 to a non-addressable field), show we don't change its alias set. */
1927 if (! can_address_p (t))
1928 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1929
1930 /* If this is a decl, set the attributes of the MEM from it. */
1931 if (DECL_P (t))
1932 {
1933 expr = t;
1934 offset = const0_rtx;
1935 apply_bitpos = bitpos;
1936 size = (DECL_SIZE_UNIT (t)
1937 && host_integerp (DECL_SIZE_UNIT (t), 1)
1938 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1939 align = DECL_ALIGN (t);
1940 }
1941
1942 /* If this is a constant, we know the alignment. */
1943 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1944 {
1945 align = TYPE_ALIGN (type);
1946 #ifdef CONSTANT_ALIGNMENT
1947 align = CONSTANT_ALIGNMENT (t, align);
1948 #endif
1949 }
1950
1951 /* If this is a field reference and not a bit-field, record it. */
1952 /* ??? There is some information that can be gleened from bit-fields,
1953 such as the word offset in the structure that might be modified.
1954 But skip it for now. */
1955 else if (TREE_CODE (t) == COMPONENT_REF
1956 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1957 {
1958 expr = component_ref_for_mem_expr (t);
1959 offset = const0_rtx;
1960 apply_bitpos = bitpos;
1961 /* ??? Any reason the field size would be different than
1962 the size we got from the type? */
1963 }
1964
1965 /* If this is an array reference, look for an outer field reference. */
1966 else if (TREE_CODE (t) == ARRAY_REF)
1967 {
1968 tree off_tree = size_zero_node;
1969
1970 do
1971 {
1972 tree index = TREE_OPERAND (t, 1);
1973 tree array = TREE_OPERAND (t, 0);
1974 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
1975 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
1976 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
1977
1978 /* We assume all arrays have sizes that are a multiple of a byte.
1979 First subtract the lower bound, if any, in the type of the
1980 index, then convert to sizetype and multiply by the size of the
1981 array element. */
1982 if (low_bound != 0 && ! integer_zerop (low_bound))
1983 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
1984 index, low_bound));
1985
1986 /* If the index has a self-referential type, pass it to a
1987 WITH_RECORD_EXPR; if the component size is, pass our
1988 component to one. */
1989 if (CONTAINS_PLACEHOLDER_P (index))
1990 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, t);
1991 if (CONTAINS_PLACEHOLDER_P (unit_size))
1992 unit_size = build (WITH_RECORD_EXPR, sizetype,
1993 unit_size, array);
1994
1995 off_tree
1996 = fold (build (PLUS_EXPR, sizetype,
1997 fold (build (MULT_EXPR, sizetype,
1998 index,
1999 unit_size)),
2000 off_tree));
2001 t = TREE_OPERAND (t, 0);
2002 }
2003 while (TREE_CODE (t) == ARRAY_REF);
2004
2005 if (DECL_P (t))
2006 {
2007 expr = t;
2008 offset = NULL;
2009 if (host_integerp (off_tree, 1))
2010 {
2011 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
2012 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
2013 align = DECL_ALIGN (t);
2014 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
2015 align = aoff;
2016 offset = GEN_INT (ioff);
2017 apply_bitpos = bitpos;
2018 }
2019 }
2020 else if (TREE_CODE (t) == COMPONENT_REF)
2021 {
2022 expr = component_ref_for_mem_expr (t);
2023 if (host_integerp (off_tree, 1))
2024 {
2025 offset = GEN_INT (tree_low_cst (off_tree, 1));
2026 apply_bitpos = bitpos;
2027 }
2028 /* ??? Any reason the field size would be different than
2029 the size we got from the type? */
2030 }
2031 else if (flag_argument_noalias > 1
2032 && TREE_CODE (t) == INDIRECT_REF
2033 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
2034 {
2035 expr = t;
2036 offset = NULL;
2037 }
2038 }
2039
2040 /* If this is a Fortran indirect argument reference, record the
2041 parameter decl. */
2042 else if (flag_argument_noalias > 1
2043 && TREE_CODE (t) == INDIRECT_REF
2044 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
2045 {
2046 expr = t;
2047 offset = NULL;
2048 }
2049 }
2050
2051 /* If we modified OFFSET based on T, then subtract the outstanding
2052 bit position offset. Similarly, increase the size of the accessed
2053 object to contain the negative offset. */
2054 if (apply_bitpos)
2055 {
2056 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
2057 if (size)
2058 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
2059 }
2060
2061 /* Now set the attributes we computed above. */
2062 MEM_ATTRS (ref)
2063 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
2064
2065 /* If this is already known to be a scalar or aggregate, we are done. */
2066 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
2067 return;
2068
2069 /* If it is a reference into an aggregate, this is part of an aggregate.
2070 Otherwise we don't know. */
2071 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
2072 || TREE_CODE (t) == ARRAY_RANGE_REF
2073 || TREE_CODE (t) == BIT_FIELD_REF)
2074 MEM_IN_STRUCT_P (ref) = 1;
2075 }
2076
2077 void
2078 set_mem_attributes (ref, t, objectp)
2079 rtx ref;
2080 tree t;
2081 int objectp;
2082 {
2083 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2084 }
2085
2086 /* Set the decl for MEM to DECL. */
2087
2088 void
2089 set_mem_attrs_from_reg (mem, reg)
2090 rtx mem;
2091 rtx reg;
2092 {
2093 MEM_ATTRS (mem)
2094 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
2095 GEN_INT (REG_OFFSET (reg)),
2096 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2097 }
2098
2099 /* Set the alias set of MEM to SET. */
2100
2101 void
2102 set_mem_alias_set (mem, set)
2103 rtx mem;
2104 HOST_WIDE_INT set;
2105 {
2106 #ifdef ENABLE_CHECKING
2107 /* If the new and old alias sets don't conflict, something is wrong. */
2108 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
2109 abort ();
2110 #endif
2111
2112 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
2113 MEM_SIZE (mem), MEM_ALIGN (mem),
2114 GET_MODE (mem));
2115 }
2116
2117 /* Set the alignment of MEM to ALIGN bits. */
2118
2119 void
2120 set_mem_align (mem, align)
2121 rtx mem;
2122 unsigned int align;
2123 {
2124 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2125 MEM_OFFSET (mem), MEM_SIZE (mem), align,
2126 GET_MODE (mem));
2127 }
2128
2129 /* Set the expr for MEM to EXPR. */
2130
2131 void
2132 set_mem_expr (mem, expr)
2133 rtx mem;
2134 tree expr;
2135 {
2136 MEM_ATTRS (mem)
2137 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
2138 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2139 }
2140
2141 /* Set the offset of MEM to OFFSET. */
2142
2143 void
2144 set_mem_offset (mem, offset)
2145 rtx mem, offset;
2146 {
2147 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2148 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
2149 GET_MODE (mem));
2150 }
2151
2152 /* Set the size of MEM to SIZE. */
2153
2154 void
2155 set_mem_size (mem, size)
2156 rtx mem, size;
2157 {
2158 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2159 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
2160 GET_MODE (mem));
2161 }
2162 \f
2163 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2164 and its address changed to ADDR. (VOIDmode means don't change the mode.
2165 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2166 returned memory location is required to be valid. The memory
2167 attributes are not changed. */
2168
2169 static rtx
2170 change_address_1 (memref, mode, addr, validate)
2171 rtx memref;
2172 enum machine_mode mode;
2173 rtx addr;
2174 int validate;
2175 {
2176 rtx new;
2177
2178 if (GET_CODE (memref) != MEM)
2179 abort ();
2180 if (mode == VOIDmode)
2181 mode = GET_MODE (memref);
2182 if (addr == 0)
2183 addr = XEXP (memref, 0);
2184
2185 if (validate)
2186 {
2187 if (reload_in_progress || reload_completed)
2188 {
2189 if (! memory_address_p (mode, addr))
2190 abort ();
2191 }
2192 else
2193 addr = memory_address (mode, addr);
2194 }
2195
2196 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2197 return memref;
2198
2199 new = gen_rtx_MEM (mode, addr);
2200 MEM_COPY_ATTRIBUTES (new, memref);
2201 return new;
2202 }
2203
2204 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2205 way we are changing MEMREF, so we only preserve the alias set. */
2206
2207 rtx
2208 change_address (memref, mode, addr)
2209 rtx memref;
2210 enum machine_mode mode;
2211 rtx addr;
2212 {
2213 rtx new = change_address_1 (memref, mode, addr, 1);
2214 enum machine_mode mmode = GET_MODE (new);
2215
2216 MEM_ATTRS (new)
2217 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
2218 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
2219 (mmode == BLKmode ? BITS_PER_UNIT
2220 : GET_MODE_ALIGNMENT (mmode)),
2221 mmode);
2222
2223 return new;
2224 }
2225
2226 /* Return a memory reference like MEMREF, but with its mode changed
2227 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2228 nonzero, the memory address is forced to be valid.
2229 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2230 and caller is responsible for adjusting MEMREF base register. */
2231
2232 rtx
2233 adjust_address_1 (memref, mode, offset, validate, adjust)
2234 rtx memref;
2235 enum machine_mode mode;
2236 HOST_WIDE_INT offset;
2237 int validate, adjust;
2238 {
2239 rtx addr = XEXP (memref, 0);
2240 rtx new;
2241 rtx memoffset = MEM_OFFSET (memref);
2242 rtx size = 0;
2243 unsigned int memalign = MEM_ALIGN (memref);
2244
2245 /* ??? Prefer to create garbage instead of creating shared rtl.
2246 This may happen even if offset is nonzero -- consider
2247 (plus (plus reg reg) const_int) -- so do this always. */
2248 addr = copy_rtx (addr);
2249
2250 if (adjust)
2251 {
2252 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2253 object, we can merge it into the LO_SUM. */
2254 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2255 && offset >= 0
2256 && (unsigned HOST_WIDE_INT) offset
2257 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2258 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
2259 plus_constant (XEXP (addr, 1), offset));
2260 else
2261 addr = plus_constant (addr, offset);
2262 }
2263
2264 new = change_address_1 (memref, mode, addr, validate);
2265
2266 /* Compute the new values of the memory attributes due to this adjustment.
2267 We add the offsets and update the alignment. */
2268 if (memoffset)
2269 memoffset = GEN_INT (offset + INTVAL (memoffset));
2270
2271 /* Compute the new alignment by taking the MIN of the alignment and the
2272 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2273 if zero. */
2274 if (offset != 0)
2275 memalign
2276 = MIN (memalign,
2277 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2278
2279 /* We can compute the size in a number of ways. */
2280 if (GET_MODE (new) != BLKmode)
2281 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
2282 else if (MEM_SIZE (memref))
2283 size = plus_constant (MEM_SIZE (memref), -offset);
2284
2285 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2286 memoffset, size, memalign, GET_MODE (new));
2287
2288 /* At some point, we should validate that this offset is within the object,
2289 if all the appropriate values are known. */
2290 return new;
2291 }
2292
2293 /* Return a memory reference like MEMREF, but with its mode changed
2294 to MODE and its address changed to ADDR, which is assumed to be
2295 MEMREF offseted by OFFSET bytes. If VALIDATE is
2296 nonzero, the memory address is forced to be valid. */
2297
2298 rtx
2299 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
2300 rtx memref;
2301 enum machine_mode mode;
2302 rtx addr;
2303 HOST_WIDE_INT offset;
2304 int validate;
2305 {
2306 memref = change_address_1 (memref, VOIDmode, addr, validate);
2307 return adjust_address_1 (memref, mode, offset, validate, 0);
2308 }
2309
2310 /* Return a memory reference like MEMREF, but whose address is changed by
2311 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2312 known to be in OFFSET (possibly 1). */
2313
2314 rtx
2315 offset_address (memref, offset, pow2)
2316 rtx memref;
2317 rtx offset;
2318 unsigned HOST_WIDE_INT pow2;
2319 {
2320 rtx new, addr = XEXP (memref, 0);
2321
2322 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2323
2324 /* At this point we don't know _why_ the address is invalid. It
2325 could have secondary memory refereces, multiplies or anything.
2326
2327 However, if we did go and rearrange things, we can wind up not
2328 being able to recognize the magic around pic_offset_table_rtx.
2329 This stuff is fragile, and is yet another example of why it is
2330 bad to expose PIC machinery too early. */
2331 if (! memory_address_p (GET_MODE (memref), new)
2332 && GET_CODE (addr) == PLUS
2333 && XEXP (addr, 0) == pic_offset_table_rtx)
2334 {
2335 addr = force_reg (GET_MODE (addr), addr);
2336 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2337 }
2338
2339 update_temp_slot_address (XEXP (memref, 0), new);
2340 new = change_address_1 (memref, VOIDmode, new, 1);
2341
2342 /* Update the alignment to reflect the offset. Reset the offset, which
2343 we don't know. */
2344 MEM_ATTRS (new)
2345 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2346 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2347 GET_MODE (new));
2348 return new;
2349 }
2350
2351 /* Return a memory reference like MEMREF, but with its address changed to
2352 ADDR. The caller is asserting that the actual piece of memory pointed
2353 to is the same, just the form of the address is being changed, such as
2354 by putting something into a register. */
2355
2356 rtx
2357 replace_equiv_address (memref, addr)
2358 rtx memref;
2359 rtx addr;
2360 {
2361 /* change_address_1 copies the memory attribute structure without change
2362 and that's exactly what we want here. */
2363 update_temp_slot_address (XEXP (memref, 0), addr);
2364 return change_address_1 (memref, VOIDmode, addr, 1);
2365 }
2366
2367 /* Likewise, but the reference is not required to be valid. */
2368
2369 rtx
2370 replace_equiv_address_nv (memref, addr)
2371 rtx memref;
2372 rtx addr;
2373 {
2374 return change_address_1 (memref, VOIDmode, addr, 0);
2375 }
2376
2377 /* Return a memory reference like MEMREF, but with its mode widened to
2378 MODE and offset by OFFSET. This would be used by targets that e.g.
2379 cannot issue QImode memory operations and have to use SImode memory
2380 operations plus masking logic. */
2381
2382 rtx
2383 widen_memory_access (memref, mode, offset)
2384 rtx memref;
2385 enum machine_mode mode;
2386 HOST_WIDE_INT offset;
2387 {
2388 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2389 tree expr = MEM_EXPR (new);
2390 rtx memoffset = MEM_OFFSET (new);
2391 unsigned int size = GET_MODE_SIZE (mode);
2392
2393 /* If we don't know what offset we were at within the expression, then
2394 we can't know if we've overstepped the bounds. */
2395 if (! memoffset)
2396 expr = NULL_TREE;
2397
2398 while (expr)
2399 {
2400 if (TREE_CODE (expr) == COMPONENT_REF)
2401 {
2402 tree field = TREE_OPERAND (expr, 1);
2403
2404 if (! DECL_SIZE_UNIT (field))
2405 {
2406 expr = NULL_TREE;
2407 break;
2408 }
2409
2410 /* Is the field at least as large as the access? If so, ok,
2411 otherwise strip back to the containing structure. */
2412 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2413 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2414 && INTVAL (memoffset) >= 0)
2415 break;
2416
2417 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2418 {
2419 expr = NULL_TREE;
2420 break;
2421 }
2422
2423 expr = TREE_OPERAND (expr, 0);
2424 memoffset = (GEN_INT (INTVAL (memoffset)
2425 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2426 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2427 / BITS_PER_UNIT)));
2428 }
2429 /* Similarly for the decl. */
2430 else if (DECL_P (expr)
2431 && DECL_SIZE_UNIT (expr)
2432 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2433 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2434 && (! memoffset || INTVAL (memoffset) >= 0))
2435 break;
2436 else
2437 {
2438 /* The widened memory access overflows the expression, which means
2439 that it could alias another expression. Zap it. */
2440 expr = NULL_TREE;
2441 break;
2442 }
2443 }
2444
2445 if (! expr)
2446 memoffset = NULL_RTX;
2447
2448 /* The widened memory may alias other stuff, so zap the alias set. */
2449 /* ??? Maybe use get_alias_set on any remaining expression. */
2450
2451 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2452 MEM_ALIGN (new), mode);
2453
2454 return new;
2455 }
2456 \f
2457 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2458
2459 rtx
2460 gen_label_rtx ()
2461 {
2462 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2463 NULL, label_num++, NULL);
2464 }
2465 \f
2466 /* For procedure integration. */
2467
2468 /* Install new pointers to the first and last insns in the chain.
2469 Also, set cur_insn_uid to one higher than the last in use.
2470 Used for an inline-procedure after copying the insn chain. */
2471
2472 void
2473 set_new_first_and_last_insn (first, last)
2474 rtx first, last;
2475 {
2476 rtx insn;
2477
2478 first_insn = first;
2479 last_insn = last;
2480 cur_insn_uid = 0;
2481
2482 for (insn = first; insn; insn = NEXT_INSN (insn))
2483 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2484
2485 cur_insn_uid++;
2486 }
2487
2488 /* Set the range of label numbers found in the current function.
2489 This is used when belatedly compiling an inline function. */
2490
2491 void
2492 set_new_first_and_last_label_num (first, last)
2493 int first, last;
2494 {
2495 base_label_num = label_num;
2496 first_label_num = first;
2497 last_label_num = last;
2498 }
2499
2500 /* Set the last label number found in the current function.
2501 This is used when belatedly compiling an inline function. */
2502
2503 void
2504 set_new_last_label_num (last)
2505 int last;
2506 {
2507 base_label_num = label_num;
2508 last_label_num = last;
2509 }
2510 \f
2511 /* Restore all variables describing the current status from the structure *P.
2512 This is used after a nested function. */
2513
2514 void
2515 restore_emit_status (p)
2516 struct function *p ATTRIBUTE_UNUSED;
2517 {
2518 last_label_num = 0;
2519 }
2520 \f
2521 /* Go through all the RTL insn bodies and copy any invalid shared
2522 structure. This routine should only be called once. */
2523
2524 void
2525 unshare_all_rtl (fndecl, insn)
2526 tree fndecl;
2527 rtx insn;
2528 {
2529 tree decl;
2530
2531 /* Make sure that virtual parameters are not shared. */
2532 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2533 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2534
2535 /* Make sure that virtual stack slots are not shared. */
2536 unshare_all_decls (DECL_INITIAL (fndecl));
2537
2538 /* Unshare just about everything else. */
2539 unshare_all_rtl_1 (insn);
2540
2541 /* Make sure the addresses of stack slots found outside the insn chain
2542 (such as, in DECL_RTL of a variable) are not shared
2543 with the insn chain.
2544
2545 This special care is necessary when the stack slot MEM does not
2546 actually appear in the insn chain. If it does appear, its address
2547 is unshared from all else at that point. */
2548 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2549 }
2550
2551 /* Go through all the RTL insn bodies and copy any invalid shared
2552 structure, again. This is a fairly expensive thing to do so it
2553 should be done sparingly. */
2554
2555 void
2556 unshare_all_rtl_again (insn)
2557 rtx insn;
2558 {
2559 rtx p;
2560 tree decl;
2561
2562 for (p = insn; p; p = NEXT_INSN (p))
2563 if (INSN_P (p))
2564 {
2565 reset_used_flags (PATTERN (p));
2566 reset_used_flags (REG_NOTES (p));
2567 reset_used_flags (LOG_LINKS (p));
2568 }
2569
2570 /* Make sure that virtual stack slots are not shared. */
2571 reset_used_decls (DECL_INITIAL (cfun->decl));
2572
2573 /* Make sure that virtual parameters are not shared. */
2574 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2575 reset_used_flags (DECL_RTL (decl));
2576
2577 reset_used_flags (stack_slot_list);
2578
2579 unshare_all_rtl (cfun->decl, insn);
2580 }
2581
2582 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2583 Assumes the mark bits are cleared at entry. */
2584
2585 static void
2586 unshare_all_rtl_1 (insn)
2587 rtx insn;
2588 {
2589 for (; insn; insn = NEXT_INSN (insn))
2590 if (INSN_P (insn))
2591 {
2592 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2593 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2594 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2595 }
2596 }
2597
2598 /* Go through all virtual stack slots of a function and copy any
2599 shared structure. */
2600 static void
2601 unshare_all_decls (blk)
2602 tree blk;
2603 {
2604 tree t;
2605
2606 /* Copy shared decls. */
2607 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2608 if (DECL_RTL_SET_P (t))
2609 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2610
2611 /* Now process sub-blocks. */
2612 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2613 unshare_all_decls (t);
2614 }
2615
2616 /* Go through all virtual stack slots of a function and mark them as
2617 not shared. */
2618 static void
2619 reset_used_decls (blk)
2620 tree blk;
2621 {
2622 tree t;
2623
2624 /* Mark decls. */
2625 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2626 if (DECL_RTL_SET_P (t))
2627 reset_used_flags (DECL_RTL (t));
2628
2629 /* Now process sub-blocks. */
2630 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2631 reset_used_decls (t);
2632 }
2633
2634 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2635 placed in the result directly, rather than being copied. MAY_SHARE is
2636 either a MEM of an EXPR_LIST of MEMs. */
2637
2638 rtx
2639 copy_most_rtx (orig, may_share)
2640 rtx orig;
2641 rtx may_share;
2642 {
2643 rtx copy;
2644 int i, j;
2645 RTX_CODE code;
2646 const char *format_ptr;
2647
2648 if (orig == may_share
2649 || (GET_CODE (may_share) == EXPR_LIST
2650 && in_expr_list_p (may_share, orig)))
2651 return orig;
2652
2653 code = GET_CODE (orig);
2654
2655 switch (code)
2656 {
2657 case REG:
2658 case QUEUED:
2659 case CONST_INT:
2660 case CONST_DOUBLE:
2661 case CONST_VECTOR:
2662 case SYMBOL_REF:
2663 case CODE_LABEL:
2664 case PC:
2665 case CC0:
2666 return orig;
2667 default:
2668 break;
2669 }
2670
2671 copy = rtx_alloc (code);
2672 PUT_MODE (copy, GET_MODE (orig));
2673 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2674 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2675 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2676 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2677 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2678
2679 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2680
2681 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2682 {
2683 switch (*format_ptr++)
2684 {
2685 case 'e':
2686 XEXP (copy, i) = XEXP (orig, i);
2687 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2688 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2689 break;
2690
2691 case 'u':
2692 XEXP (copy, i) = XEXP (orig, i);
2693 break;
2694
2695 case 'E':
2696 case 'V':
2697 XVEC (copy, i) = XVEC (orig, i);
2698 if (XVEC (orig, i) != NULL)
2699 {
2700 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2701 for (j = 0; j < XVECLEN (copy, i); j++)
2702 XVECEXP (copy, i, j)
2703 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2704 }
2705 break;
2706
2707 case 'w':
2708 XWINT (copy, i) = XWINT (orig, i);
2709 break;
2710
2711 case 'n':
2712 case 'i':
2713 XINT (copy, i) = XINT (orig, i);
2714 break;
2715
2716 case 't':
2717 XTREE (copy, i) = XTREE (orig, i);
2718 break;
2719
2720 case 's':
2721 case 'S':
2722 XSTR (copy, i) = XSTR (orig, i);
2723 break;
2724
2725 case '0':
2726 /* Copy this through the wide int field; that's safest. */
2727 X0WINT (copy, i) = X0WINT (orig, i);
2728 break;
2729
2730 default:
2731 abort ();
2732 }
2733 }
2734 return copy;
2735 }
2736
2737 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2738 Recursively does the same for subexpressions. */
2739
2740 rtx
2741 copy_rtx_if_shared (orig)
2742 rtx orig;
2743 {
2744 rtx x = orig;
2745 int i;
2746 enum rtx_code code;
2747 const char *format_ptr;
2748 int copied = 0;
2749
2750 if (x == 0)
2751 return 0;
2752
2753 code = GET_CODE (x);
2754
2755 /* These types may be freely shared. */
2756
2757 switch (code)
2758 {
2759 case REG:
2760 case QUEUED:
2761 case CONST_INT:
2762 case CONST_DOUBLE:
2763 case CONST_VECTOR:
2764 case SYMBOL_REF:
2765 case CODE_LABEL:
2766 case PC:
2767 case CC0:
2768 case SCRATCH:
2769 /* SCRATCH must be shared because they represent distinct values. */
2770 return x;
2771
2772 case CONST:
2773 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2774 a LABEL_REF, it isn't sharable. */
2775 if (GET_CODE (XEXP (x, 0)) == PLUS
2776 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2777 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2778 return x;
2779 break;
2780
2781 case INSN:
2782 case JUMP_INSN:
2783 case CALL_INSN:
2784 case NOTE:
2785 case BARRIER:
2786 /* The chain of insns is not being copied. */
2787 return x;
2788
2789 case MEM:
2790 /* A MEM is allowed to be shared if its address is constant.
2791
2792 We used to allow sharing of MEMs which referenced
2793 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2794 that can lose. instantiate_virtual_regs will not unshare
2795 the MEMs, and combine may change the structure of the address
2796 because it looks safe and profitable in one context, but
2797 in some other context it creates unrecognizable RTL. */
2798 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2799 return x;
2800
2801 break;
2802
2803 default:
2804 break;
2805 }
2806
2807 /* This rtx may not be shared. If it has already been seen,
2808 replace it with a copy of itself. */
2809
2810 if (RTX_FLAG (x, used))
2811 {
2812 rtx copy;
2813
2814 copy = rtx_alloc (code);
2815 memcpy (copy, x,
2816 (sizeof (*copy) - sizeof (copy->fld)
2817 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2818 x = copy;
2819 copied = 1;
2820 }
2821 RTX_FLAG (x, used) = 1;
2822
2823 /* Now scan the subexpressions recursively.
2824 We can store any replaced subexpressions directly into X
2825 since we know X is not shared! Any vectors in X
2826 must be copied if X was copied. */
2827
2828 format_ptr = GET_RTX_FORMAT (code);
2829
2830 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2831 {
2832 switch (*format_ptr++)
2833 {
2834 case 'e':
2835 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2836 break;
2837
2838 case 'E':
2839 if (XVEC (x, i) != NULL)
2840 {
2841 int j;
2842 int len = XVECLEN (x, i);
2843
2844 if (copied && len > 0)
2845 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2846 for (j = 0; j < len; j++)
2847 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2848 }
2849 break;
2850 }
2851 }
2852 return x;
2853 }
2854
2855 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2856 to look for shared sub-parts. */
2857
2858 void
2859 reset_used_flags (x)
2860 rtx x;
2861 {
2862 int i, j;
2863 enum rtx_code code;
2864 const char *format_ptr;
2865
2866 if (x == 0)
2867 return;
2868
2869 code = GET_CODE (x);
2870
2871 /* These types may be freely shared so we needn't do any resetting
2872 for them. */
2873
2874 switch (code)
2875 {
2876 case REG:
2877 case QUEUED:
2878 case CONST_INT:
2879 case CONST_DOUBLE:
2880 case CONST_VECTOR:
2881 case SYMBOL_REF:
2882 case CODE_LABEL:
2883 case PC:
2884 case CC0:
2885 return;
2886
2887 case INSN:
2888 case JUMP_INSN:
2889 case CALL_INSN:
2890 case NOTE:
2891 case LABEL_REF:
2892 case BARRIER:
2893 /* The chain of insns is not being copied. */
2894 return;
2895
2896 default:
2897 break;
2898 }
2899
2900 RTX_FLAG (x, used) = 0;
2901
2902 format_ptr = GET_RTX_FORMAT (code);
2903 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2904 {
2905 switch (*format_ptr++)
2906 {
2907 case 'e':
2908 reset_used_flags (XEXP (x, i));
2909 break;
2910
2911 case 'E':
2912 for (j = 0; j < XVECLEN (x, i); j++)
2913 reset_used_flags (XVECEXP (x, i, j));
2914 break;
2915 }
2916 }
2917 }
2918 \f
2919 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2920 Return X or the rtx for the pseudo reg the value of X was copied into.
2921 OTHER must be valid as a SET_DEST. */
2922
2923 rtx
2924 make_safe_from (x, other)
2925 rtx x, other;
2926 {
2927 while (1)
2928 switch (GET_CODE (other))
2929 {
2930 case SUBREG:
2931 other = SUBREG_REG (other);
2932 break;
2933 case STRICT_LOW_PART:
2934 case SIGN_EXTEND:
2935 case ZERO_EXTEND:
2936 other = XEXP (other, 0);
2937 break;
2938 default:
2939 goto done;
2940 }
2941 done:
2942 if ((GET_CODE (other) == MEM
2943 && ! CONSTANT_P (x)
2944 && GET_CODE (x) != REG
2945 && GET_CODE (x) != SUBREG)
2946 || (GET_CODE (other) == REG
2947 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2948 || reg_mentioned_p (other, x))))
2949 {
2950 rtx temp = gen_reg_rtx (GET_MODE (x));
2951 emit_move_insn (temp, x);
2952 return temp;
2953 }
2954 return x;
2955 }
2956 \f
2957 /* Emission of insns (adding them to the doubly-linked list). */
2958
2959 /* Return the first insn of the current sequence or current function. */
2960
2961 rtx
2962 get_insns ()
2963 {
2964 return first_insn;
2965 }
2966
2967 /* Specify a new insn as the first in the chain. */
2968
2969 void
2970 set_first_insn (insn)
2971 rtx insn;
2972 {
2973 if (PREV_INSN (insn) != 0)
2974 abort ();
2975 first_insn = insn;
2976 }
2977
2978 /* Return the last insn emitted in current sequence or current function. */
2979
2980 rtx
2981 get_last_insn ()
2982 {
2983 return last_insn;
2984 }
2985
2986 /* Specify a new insn as the last in the chain. */
2987
2988 void
2989 set_last_insn (insn)
2990 rtx insn;
2991 {
2992 if (NEXT_INSN (insn) != 0)
2993 abort ();
2994 last_insn = insn;
2995 }
2996
2997 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2998
2999 rtx
3000 get_last_insn_anywhere ()
3001 {
3002 struct sequence_stack *stack;
3003 if (last_insn)
3004 return last_insn;
3005 for (stack = seq_stack; stack; stack = stack->next)
3006 if (stack->last != 0)
3007 return stack->last;
3008 return 0;
3009 }
3010
3011 /* Return the first nonnote insn emitted in current sequence or current
3012 function. This routine looks inside SEQUENCEs. */
3013
3014 rtx
3015 get_first_nonnote_insn ()
3016 {
3017 rtx insn = first_insn;
3018
3019 while (insn)
3020 {
3021 insn = next_insn (insn);
3022 if (insn == 0 || GET_CODE (insn) != NOTE)
3023 break;
3024 }
3025
3026 return insn;
3027 }
3028
3029 /* Return the last nonnote insn emitted in current sequence or current
3030 function. This routine looks inside SEQUENCEs. */
3031
3032 rtx
3033 get_last_nonnote_insn ()
3034 {
3035 rtx insn = last_insn;
3036
3037 while (insn)
3038 {
3039 insn = previous_insn (insn);
3040 if (insn == 0 || GET_CODE (insn) != NOTE)
3041 break;
3042 }
3043
3044 return insn;
3045 }
3046
3047 /* Return a number larger than any instruction's uid in this function. */
3048
3049 int
3050 get_max_uid ()
3051 {
3052 return cur_insn_uid;
3053 }
3054
3055 /* Renumber instructions so that no instruction UIDs are wasted. */
3056
3057 void
3058 renumber_insns (stream)
3059 FILE *stream;
3060 {
3061 rtx insn;
3062
3063 /* If we're not supposed to renumber instructions, don't. */
3064 if (!flag_renumber_insns)
3065 return;
3066
3067 /* If there aren't that many instructions, then it's not really
3068 worth renumbering them. */
3069 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
3070 return;
3071
3072 cur_insn_uid = 1;
3073
3074 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3075 {
3076 if (stream)
3077 fprintf (stream, "Renumbering insn %d to %d\n",
3078 INSN_UID (insn), cur_insn_uid);
3079 INSN_UID (insn) = cur_insn_uid++;
3080 }
3081 }
3082 \f
3083 /* Return the next insn. If it is a SEQUENCE, return the first insn
3084 of the sequence. */
3085
3086 rtx
3087 next_insn (insn)
3088 rtx insn;
3089 {
3090 if (insn)
3091 {
3092 insn = NEXT_INSN (insn);
3093 if (insn && GET_CODE (insn) == INSN
3094 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3095 insn = XVECEXP (PATTERN (insn), 0, 0);
3096 }
3097
3098 return insn;
3099 }
3100
3101 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3102 of the sequence. */
3103
3104 rtx
3105 previous_insn (insn)
3106 rtx insn;
3107 {
3108 if (insn)
3109 {
3110 insn = PREV_INSN (insn);
3111 if (insn && GET_CODE (insn) == INSN
3112 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3113 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3114 }
3115
3116 return insn;
3117 }
3118
3119 /* Return the next insn after INSN that is not a NOTE. This routine does not
3120 look inside SEQUENCEs. */
3121
3122 rtx
3123 next_nonnote_insn (insn)
3124 rtx insn;
3125 {
3126 while (insn)
3127 {
3128 insn = NEXT_INSN (insn);
3129 if (insn == 0 || GET_CODE (insn) != NOTE)
3130 break;
3131 }
3132
3133 return insn;
3134 }
3135
3136 /* Return the previous insn before INSN that is not a NOTE. This routine does
3137 not look inside SEQUENCEs. */
3138
3139 rtx
3140 prev_nonnote_insn (insn)
3141 rtx insn;
3142 {
3143 while (insn)
3144 {
3145 insn = PREV_INSN (insn);
3146 if (insn == 0 || GET_CODE (insn) != NOTE)
3147 break;
3148 }
3149
3150 return insn;
3151 }
3152
3153 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3154 or 0, if there is none. This routine does not look inside
3155 SEQUENCEs. */
3156
3157 rtx
3158 next_real_insn (insn)
3159 rtx insn;
3160 {
3161 while (insn)
3162 {
3163 insn = NEXT_INSN (insn);
3164 if (insn == 0 || GET_CODE (insn) == INSN
3165 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
3166 break;
3167 }
3168
3169 return insn;
3170 }
3171
3172 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3173 or 0, if there is none. This routine does not look inside
3174 SEQUENCEs. */
3175
3176 rtx
3177 prev_real_insn (insn)
3178 rtx insn;
3179 {
3180 while (insn)
3181 {
3182 insn = PREV_INSN (insn);
3183 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
3184 || GET_CODE (insn) == JUMP_INSN)
3185 break;
3186 }
3187
3188 return insn;
3189 }
3190
3191 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3192 This routine does not look inside SEQUENCEs. */
3193
3194 rtx
3195 last_call_insn ()
3196 {
3197 rtx insn;
3198
3199 for (insn = get_last_insn ();
3200 insn && GET_CODE (insn) != CALL_INSN;
3201 insn = PREV_INSN (insn))
3202 ;
3203
3204 return insn;
3205 }
3206
3207 /* Find the next insn after INSN that really does something. This routine
3208 does not look inside SEQUENCEs. Until reload has completed, this is the
3209 same as next_real_insn. */
3210
3211 int
3212 active_insn_p (insn)
3213 rtx insn;
3214 {
3215 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
3216 || (GET_CODE (insn) == INSN
3217 && (! reload_completed
3218 || (GET_CODE (PATTERN (insn)) != USE
3219 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3220 }
3221
3222 rtx
3223 next_active_insn (insn)
3224 rtx insn;
3225 {
3226 while (insn)
3227 {
3228 insn = NEXT_INSN (insn);
3229 if (insn == 0 || active_insn_p (insn))
3230 break;
3231 }
3232
3233 return insn;
3234 }
3235
3236 /* Find the last insn before INSN that really does something. This routine
3237 does not look inside SEQUENCEs. Until reload has completed, this is the
3238 same as prev_real_insn. */
3239
3240 rtx
3241 prev_active_insn (insn)
3242 rtx insn;
3243 {
3244 while (insn)
3245 {
3246 insn = PREV_INSN (insn);
3247 if (insn == 0 || active_insn_p (insn))
3248 break;
3249 }
3250
3251 return insn;
3252 }
3253
3254 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3255
3256 rtx
3257 next_label (insn)
3258 rtx insn;
3259 {
3260 while (insn)
3261 {
3262 insn = NEXT_INSN (insn);
3263 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3264 break;
3265 }
3266
3267 return insn;
3268 }
3269
3270 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3271
3272 rtx
3273 prev_label (insn)
3274 rtx insn;
3275 {
3276 while (insn)
3277 {
3278 insn = PREV_INSN (insn);
3279 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3280 break;
3281 }
3282
3283 return insn;
3284 }
3285 \f
3286 #ifdef HAVE_cc0
3287 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3288 and REG_CC_USER notes so we can find it. */
3289
3290 void
3291 link_cc0_insns (insn)
3292 rtx insn;
3293 {
3294 rtx user = next_nonnote_insn (insn);
3295
3296 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3297 user = XVECEXP (PATTERN (user), 0, 0);
3298
3299 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3300 REG_NOTES (user));
3301 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3302 }
3303
3304 /* Return the next insn that uses CC0 after INSN, which is assumed to
3305 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3306 applied to the result of this function should yield INSN).
3307
3308 Normally, this is simply the next insn. However, if a REG_CC_USER note
3309 is present, it contains the insn that uses CC0.
3310
3311 Return 0 if we can't find the insn. */
3312
3313 rtx
3314 next_cc0_user (insn)
3315 rtx insn;
3316 {
3317 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3318
3319 if (note)
3320 return XEXP (note, 0);
3321
3322 insn = next_nonnote_insn (insn);
3323 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3324 insn = XVECEXP (PATTERN (insn), 0, 0);
3325
3326 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3327 return insn;
3328
3329 return 0;
3330 }
3331
3332 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3333 note, it is the previous insn. */
3334
3335 rtx
3336 prev_cc0_setter (insn)
3337 rtx insn;
3338 {
3339 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3340
3341 if (note)
3342 return XEXP (note, 0);
3343
3344 insn = prev_nonnote_insn (insn);
3345 if (! sets_cc0_p (PATTERN (insn)))
3346 abort ();
3347
3348 return insn;
3349 }
3350 #endif
3351
3352 /* Increment the label uses for all labels present in rtx. */
3353
3354 static void
3355 mark_label_nuses (x)
3356 rtx x;
3357 {
3358 enum rtx_code code;
3359 int i, j;
3360 const char *fmt;
3361
3362 code = GET_CODE (x);
3363 if (code == LABEL_REF)
3364 LABEL_NUSES (XEXP (x, 0))++;
3365
3366 fmt = GET_RTX_FORMAT (code);
3367 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3368 {
3369 if (fmt[i] == 'e')
3370 mark_label_nuses (XEXP (x, i));
3371 else if (fmt[i] == 'E')
3372 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3373 mark_label_nuses (XVECEXP (x, i, j));
3374 }
3375 }
3376
3377 \f
3378 /* Try splitting insns that can be split for better scheduling.
3379 PAT is the pattern which might split.
3380 TRIAL is the insn providing PAT.
3381 LAST is nonzero if we should return the last insn of the sequence produced.
3382
3383 If this routine succeeds in splitting, it returns the first or last
3384 replacement insn depending on the value of LAST. Otherwise, it
3385 returns TRIAL. If the insn to be returned can be split, it will be. */
3386
3387 rtx
3388 try_split (pat, trial, last)
3389 rtx pat, trial;
3390 int last;
3391 {
3392 rtx before = PREV_INSN (trial);
3393 rtx after = NEXT_INSN (trial);
3394 int has_barrier = 0;
3395 rtx tem;
3396 rtx note, seq;
3397 int probability;
3398 rtx insn_last, insn;
3399 int njumps = 0;
3400
3401 if (any_condjump_p (trial)
3402 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3403 split_branch_probability = INTVAL (XEXP (note, 0));
3404 probability = split_branch_probability;
3405
3406 seq = split_insns (pat, trial);
3407
3408 split_branch_probability = -1;
3409
3410 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3411 We may need to handle this specially. */
3412 if (after && GET_CODE (after) == BARRIER)
3413 {
3414 has_barrier = 1;
3415 after = NEXT_INSN (after);
3416 }
3417
3418 if (!seq)
3419 return trial;
3420
3421 /* Avoid infinite loop if any insn of the result matches
3422 the original pattern. */
3423 insn_last = seq;
3424 while (1)
3425 {
3426 if (INSN_P (insn_last)
3427 && rtx_equal_p (PATTERN (insn_last), pat))
3428 return trial;
3429 if (!NEXT_INSN (insn_last))
3430 break;
3431 insn_last = NEXT_INSN (insn_last);
3432 }
3433
3434 /* Mark labels. */
3435 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3436 {
3437 if (GET_CODE (insn) == JUMP_INSN)
3438 {
3439 mark_jump_label (PATTERN (insn), insn, 0);
3440 njumps++;
3441 if (probability != -1
3442 && any_condjump_p (insn)
3443 && !find_reg_note (insn, REG_BR_PROB, 0))
3444 {
3445 /* We can preserve the REG_BR_PROB notes only if exactly
3446 one jump is created, otherwise the machine description
3447 is responsible for this step using
3448 split_branch_probability variable. */
3449 if (njumps != 1)
3450 abort ();
3451 REG_NOTES (insn)
3452 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3453 GEN_INT (probability),
3454 REG_NOTES (insn));
3455 }
3456 }
3457 }
3458
3459 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3460 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3461 if (GET_CODE (trial) == CALL_INSN)
3462 {
3463 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3464 if (GET_CODE (insn) == CALL_INSN)
3465 {
3466 CALL_INSN_FUNCTION_USAGE (insn)
3467 = CALL_INSN_FUNCTION_USAGE (trial);
3468 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3469 }
3470 }
3471
3472 /* Copy notes, particularly those related to the CFG. */
3473 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3474 {
3475 switch (REG_NOTE_KIND (note))
3476 {
3477 case REG_EH_REGION:
3478 insn = insn_last;
3479 while (insn != NULL_RTX)
3480 {
3481 if (GET_CODE (insn) == CALL_INSN
3482 || (flag_non_call_exceptions
3483 && may_trap_p (PATTERN (insn))))
3484 REG_NOTES (insn)
3485 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3486 XEXP (note, 0),
3487 REG_NOTES (insn));
3488 insn = PREV_INSN (insn);
3489 }
3490 break;
3491
3492 case REG_NORETURN:
3493 case REG_SETJMP:
3494 case REG_ALWAYS_RETURN:
3495 insn = insn_last;
3496 while (insn != NULL_RTX)
3497 {
3498 if (GET_CODE (insn) == CALL_INSN)
3499 REG_NOTES (insn)
3500 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3501 XEXP (note, 0),
3502 REG_NOTES (insn));
3503 insn = PREV_INSN (insn);
3504 }
3505 break;
3506
3507 case REG_NON_LOCAL_GOTO:
3508 insn = insn_last;
3509 while (insn != NULL_RTX)
3510 {
3511 if (GET_CODE (insn) == JUMP_INSN)
3512 REG_NOTES (insn)
3513 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3514 XEXP (note, 0),
3515 REG_NOTES (insn));
3516 insn = PREV_INSN (insn);
3517 }
3518 break;
3519
3520 default:
3521 break;
3522 }
3523 }
3524
3525 /* If there are LABELS inside the split insns increment the
3526 usage count so we don't delete the label. */
3527 if (GET_CODE (trial) == INSN)
3528 {
3529 insn = insn_last;
3530 while (insn != NULL_RTX)
3531 {
3532 if (GET_CODE (insn) == INSN)
3533 mark_label_nuses (PATTERN (insn));
3534
3535 insn = PREV_INSN (insn);
3536 }
3537 }
3538
3539 tem = emit_insn_after_scope (seq, trial, INSN_SCOPE (trial));
3540
3541 delete_insn (trial);
3542 if (has_barrier)
3543 emit_barrier_after (tem);
3544
3545 /* Recursively call try_split for each new insn created; by the
3546 time control returns here that insn will be fully split, so
3547 set LAST and continue from the insn after the one returned.
3548 We can't use next_active_insn here since AFTER may be a note.
3549 Ignore deleted insns, which can be occur if not optimizing. */
3550 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3551 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3552 tem = try_split (PATTERN (tem), tem, 1);
3553
3554 /* Return either the first or the last insn, depending on which was
3555 requested. */
3556 return last
3557 ? (after ? PREV_INSN (after) : last_insn)
3558 : NEXT_INSN (before);
3559 }
3560 \f
3561 /* Make and return an INSN rtx, initializing all its slots.
3562 Store PATTERN in the pattern slots. */
3563
3564 rtx
3565 make_insn_raw (pattern)
3566 rtx pattern;
3567 {
3568 rtx insn;
3569
3570 insn = rtx_alloc (INSN);
3571
3572 INSN_UID (insn) = cur_insn_uid++;
3573 PATTERN (insn) = pattern;
3574 INSN_CODE (insn) = -1;
3575 LOG_LINKS (insn) = NULL;
3576 REG_NOTES (insn) = NULL;
3577 INSN_SCOPE (insn) = NULL;
3578 BLOCK_FOR_INSN (insn) = NULL;
3579
3580 #ifdef ENABLE_RTL_CHECKING
3581 if (insn
3582 && INSN_P (insn)
3583 && (returnjump_p (insn)
3584 || (GET_CODE (insn) == SET
3585 && SET_DEST (insn) == pc_rtx)))
3586 {
3587 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3588 debug_rtx (insn);
3589 }
3590 #endif
3591
3592 return insn;
3593 }
3594
3595 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3596
3597 static rtx
3598 make_jump_insn_raw (pattern)
3599 rtx pattern;
3600 {
3601 rtx insn;
3602
3603 insn = rtx_alloc (JUMP_INSN);
3604 INSN_UID (insn) = cur_insn_uid++;
3605
3606 PATTERN (insn) = pattern;
3607 INSN_CODE (insn) = -1;
3608 LOG_LINKS (insn) = NULL;
3609 REG_NOTES (insn) = NULL;
3610 JUMP_LABEL (insn) = NULL;
3611 INSN_SCOPE (insn) = NULL;
3612 BLOCK_FOR_INSN (insn) = NULL;
3613
3614 return insn;
3615 }
3616
3617 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3618
3619 static rtx
3620 make_call_insn_raw (pattern)
3621 rtx pattern;
3622 {
3623 rtx insn;
3624
3625 insn = rtx_alloc (CALL_INSN);
3626 INSN_UID (insn) = cur_insn_uid++;
3627
3628 PATTERN (insn) = pattern;
3629 INSN_CODE (insn) = -1;
3630 LOG_LINKS (insn) = NULL;
3631 REG_NOTES (insn) = NULL;
3632 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3633 INSN_SCOPE (insn) = NULL;
3634 BLOCK_FOR_INSN (insn) = NULL;
3635
3636 return insn;
3637 }
3638 \f
3639 /* Add INSN to the end of the doubly-linked list.
3640 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3641
3642 void
3643 add_insn (insn)
3644 rtx insn;
3645 {
3646 PREV_INSN (insn) = last_insn;
3647 NEXT_INSN (insn) = 0;
3648
3649 if (NULL != last_insn)
3650 NEXT_INSN (last_insn) = insn;
3651
3652 if (NULL == first_insn)
3653 first_insn = insn;
3654
3655 last_insn = insn;
3656 }
3657
3658 /* Add INSN into the doubly-linked list after insn AFTER. This and
3659 the next should be the only functions called to insert an insn once
3660 delay slots have been filled since only they know how to update a
3661 SEQUENCE. */
3662
3663 void
3664 add_insn_after (insn, after)
3665 rtx insn, after;
3666 {
3667 rtx next = NEXT_INSN (after);
3668 basic_block bb;
3669
3670 if (optimize && INSN_DELETED_P (after))
3671 abort ();
3672
3673 NEXT_INSN (insn) = next;
3674 PREV_INSN (insn) = after;
3675
3676 if (next)
3677 {
3678 PREV_INSN (next) = insn;
3679 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3680 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3681 }
3682 else if (last_insn == after)
3683 last_insn = insn;
3684 else
3685 {
3686 struct sequence_stack *stack = seq_stack;
3687 /* Scan all pending sequences too. */
3688 for (; stack; stack = stack->next)
3689 if (after == stack->last)
3690 {
3691 stack->last = insn;
3692 break;
3693 }
3694
3695 if (stack == 0)
3696 abort ();
3697 }
3698
3699 if (GET_CODE (after) != BARRIER
3700 && GET_CODE (insn) != BARRIER
3701 && (bb = BLOCK_FOR_INSN (after)))
3702 {
3703 set_block_for_insn (insn, bb);
3704 if (INSN_P (insn))
3705 bb->flags |= BB_DIRTY;
3706 /* Should not happen as first in the BB is always
3707 either NOTE or LABEL. */
3708 if (bb->end == after
3709 /* Avoid clobbering of structure when creating new BB. */
3710 && GET_CODE (insn) != BARRIER
3711 && (GET_CODE (insn) != NOTE
3712 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3713 bb->end = insn;
3714 }
3715
3716 NEXT_INSN (after) = insn;
3717 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3718 {
3719 rtx sequence = PATTERN (after);
3720 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3721 }
3722 }
3723
3724 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3725 the previous should be the only functions called to insert an insn once
3726 delay slots have been filled since only they know how to update a
3727 SEQUENCE. */
3728
3729 void
3730 add_insn_before (insn, before)
3731 rtx insn, before;
3732 {
3733 rtx prev = PREV_INSN (before);
3734 basic_block bb;
3735
3736 if (optimize && INSN_DELETED_P (before))
3737 abort ();
3738
3739 PREV_INSN (insn) = prev;
3740 NEXT_INSN (insn) = before;
3741
3742 if (prev)
3743 {
3744 NEXT_INSN (prev) = insn;
3745 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3746 {
3747 rtx sequence = PATTERN (prev);
3748 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3749 }
3750 }
3751 else if (first_insn == before)
3752 first_insn = insn;
3753 else
3754 {
3755 struct sequence_stack *stack = seq_stack;
3756 /* Scan all pending sequences too. */
3757 for (; stack; stack = stack->next)
3758 if (before == stack->first)
3759 {
3760 stack->first = insn;
3761 break;
3762 }
3763
3764 if (stack == 0)
3765 abort ();
3766 }
3767
3768 if (GET_CODE (before) != BARRIER
3769 && GET_CODE (insn) != BARRIER
3770 && (bb = BLOCK_FOR_INSN (before)))
3771 {
3772 set_block_for_insn (insn, bb);
3773 if (INSN_P (insn))
3774 bb->flags |= BB_DIRTY;
3775 /* Should not happen as first in the BB is always
3776 either NOTE or LABEl. */
3777 if (bb->head == insn
3778 /* Avoid clobbering of structure when creating new BB. */
3779 && GET_CODE (insn) != BARRIER
3780 && (GET_CODE (insn) != NOTE
3781 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3782 abort ();
3783 }
3784
3785 PREV_INSN (before) = insn;
3786 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3787 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3788 }
3789
3790 /* Remove an insn from its doubly-linked list. This function knows how
3791 to handle sequences. */
3792 void
3793 remove_insn (insn)
3794 rtx insn;
3795 {
3796 rtx next = NEXT_INSN (insn);
3797 rtx prev = PREV_INSN (insn);
3798 basic_block bb;
3799
3800 if (prev)
3801 {
3802 NEXT_INSN (prev) = next;
3803 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3804 {
3805 rtx sequence = PATTERN (prev);
3806 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3807 }
3808 }
3809 else if (first_insn == insn)
3810 first_insn = next;
3811 else
3812 {
3813 struct sequence_stack *stack = seq_stack;
3814 /* Scan all pending sequences too. */
3815 for (; stack; stack = stack->next)
3816 if (insn == stack->first)
3817 {
3818 stack->first = next;
3819 break;
3820 }
3821
3822 if (stack == 0)
3823 abort ();
3824 }
3825
3826 if (next)
3827 {
3828 PREV_INSN (next) = prev;
3829 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3830 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3831 }
3832 else if (last_insn == insn)
3833 last_insn = prev;
3834 else
3835 {
3836 struct sequence_stack *stack = seq_stack;
3837 /* Scan all pending sequences too. */
3838 for (; stack; stack = stack->next)
3839 if (insn == stack->last)
3840 {
3841 stack->last = prev;
3842 break;
3843 }
3844
3845 if (stack == 0)
3846 abort ();
3847 }
3848 if (GET_CODE (insn) != BARRIER
3849 && (bb = BLOCK_FOR_INSN (insn)))
3850 {
3851 if (INSN_P (insn))
3852 bb->flags |= BB_DIRTY;
3853 if (bb->head == insn)
3854 {
3855 /* Never ever delete the basic block note without deleting whole
3856 basic block. */
3857 if (GET_CODE (insn) == NOTE)
3858 abort ();
3859 bb->head = next;
3860 }
3861 if (bb->end == insn)
3862 bb->end = prev;
3863 }
3864 }
3865
3866 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3867
3868 void
3869 add_function_usage_to (call_insn, call_fusage)
3870 rtx call_insn, call_fusage;
3871 {
3872 if (! call_insn || GET_CODE (call_insn) != CALL_INSN)
3873 abort ();
3874
3875 /* Put the register usage information on the CALL. If there is already
3876 some usage information, put ours at the end. */
3877 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3878 {
3879 rtx link;
3880
3881 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3882 link = XEXP (link, 1))
3883 ;
3884
3885 XEXP (link, 1) = call_fusage;
3886 }
3887 else
3888 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3889 }
3890
3891 /* Delete all insns made since FROM.
3892 FROM becomes the new last instruction. */
3893
3894 void
3895 delete_insns_since (from)
3896 rtx from;
3897 {
3898 if (from == 0)
3899 first_insn = 0;
3900 else
3901 NEXT_INSN (from) = 0;
3902 last_insn = from;
3903 }
3904
3905 /* This function is deprecated, please use sequences instead.
3906
3907 Move a consecutive bunch of insns to a different place in the chain.
3908 The insns to be moved are those between FROM and TO.
3909 They are moved to a new position after the insn AFTER.
3910 AFTER must not be FROM or TO or any insn in between.
3911
3912 This function does not know about SEQUENCEs and hence should not be
3913 called after delay-slot filling has been done. */
3914
3915 void
3916 reorder_insns_nobb (from, to, after)
3917 rtx from, to, after;
3918 {
3919 /* Splice this bunch out of where it is now. */
3920 if (PREV_INSN (from))
3921 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3922 if (NEXT_INSN (to))
3923 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3924 if (last_insn == to)
3925 last_insn = PREV_INSN (from);
3926 if (first_insn == from)
3927 first_insn = NEXT_INSN (to);
3928
3929 /* Make the new neighbors point to it and it to them. */
3930 if (NEXT_INSN (after))
3931 PREV_INSN (NEXT_INSN (after)) = to;
3932
3933 NEXT_INSN (to) = NEXT_INSN (after);
3934 PREV_INSN (from) = after;
3935 NEXT_INSN (after) = from;
3936 if (after == last_insn)
3937 last_insn = to;
3938 }
3939
3940 /* Same as function above, but take care to update BB boundaries. */
3941 void
3942 reorder_insns (from, to, after)
3943 rtx from, to, after;
3944 {
3945 rtx prev = PREV_INSN (from);
3946 basic_block bb, bb2;
3947
3948 reorder_insns_nobb (from, to, after);
3949
3950 if (GET_CODE (after) != BARRIER
3951 && (bb = BLOCK_FOR_INSN (after)))
3952 {
3953 rtx x;
3954 bb->flags |= BB_DIRTY;
3955
3956 if (GET_CODE (from) != BARRIER
3957 && (bb2 = BLOCK_FOR_INSN (from)))
3958 {
3959 if (bb2->end == to)
3960 bb2->end = prev;
3961 bb2->flags |= BB_DIRTY;
3962 }
3963
3964 if (bb->end == after)
3965 bb->end = to;
3966
3967 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3968 set_block_for_insn (x, bb);
3969 }
3970 }
3971
3972 /* Return the line note insn preceding INSN. */
3973
3974 static rtx
3975 find_line_note (insn)
3976 rtx insn;
3977 {
3978 if (no_line_numbers)
3979 return 0;
3980
3981 for (; insn; insn = PREV_INSN (insn))
3982 if (GET_CODE (insn) == NOTE
3983 && NOTE_LINE_NUMBER (insn) >= 0)
3984 break;
3985
3986 return insn;
3987 }
3988
3989 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3990 of the moved insns when debugging. This may insert a note between AFTER
3991 and FROM, and another one after TO. */
3992
3993 void
3994 reorder_insns_with_line_notes (from, to, after)
3995 rtx from, to, after;
3996 {
3997 rtx from_line = find_line_note (from);
3998 rtx after_line = find_line_note (after);
3999
4000 reorder_insns (from, to, after);
4001
4002 if (from_line == after_line)
4003 return;
4004
4005 if (from_line)
4006 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
4007 NOTE_LINE_NUMBER (from_line),
4008 after);
4009 if (after_line)
4010 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
4011 NOTE_LINE_NUMBER (after_line),
4012 to);
4013 }
4014
4015 /* Remove unnecessary notes from the instruction stream. */
4016
4017 void
4018 remove_unnecessary_notes ()
4019 {
4020 rtx block_stack = NULL_RTX;
4021 rtx eh_stack = NULL_RTX;
4022 rtx insn;
4023 rtx next;
4024 rtx tmp;
4025
4026 /* We must not remove the first instruction in the function because
4027 the compiler depends on the first instruction being a note. */
4028 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
4029 {
4030 /* Remember what's next. */
4031 next = NEXT_INSN (insn);
4032
4033 /* We're only interested in notes. */
4034 if (GET_CODE (insn) != NOTE)
4035 continue;
4036
4037 switch (NOTE_LINE_NUMBER (insn))
4038 {
4039 case NOTE_INSN_DELETED:
4040 case NOTE_INSN_LOOP_END_TOP_COND:
4041 remove_insn (insn);
4042 break;
4043
4044 case NOTE_INSN_EH_REGION_BEG:
4045 eh_stack = alloc_INSN_LIST (insn, eh_stack);
4046 break;
4047
4048 case NOTE_INSN_EH_REGION_END:
4049 /* Too many end notes. */
4050 if (eh_stack == NULL_RTX)
4051 abort ();
4052 /* Mismatched nesting. */
4053 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
4054 abort ();
4055 tmp = eh_stack;
4056 eh_stack = XEXP (eh_stack, 1);
4057 free_INSN_LIST_node (tmp);
4058 break;
4059
4060 case NOTE_INSN_BLOCK_BEG:
4061 /* By now, all notes indicating lexical blocks should have
4062 NOTE_BLOCK filled in. */
4063 if (NOTE_BLOCK (insn) == NULL_TREE)
4064 abort ();
4065 block_stack = alloc_INSN_LIST (insn, block_stack);
4066 break;
4067
4068 case NOTE_INSN_BLOCK_END:
4069 /* Too many end notes. */
4070 if (block_stack == NULL_RTX)
4071 abort ();
4072 /* Mismatched nesting. */
4073 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
4074 abort ();
4075 tmp = block_stack;
4076 block_stack = XEXP (block_stack, 1);
4077 free_INSN_LIST_node (tmp);
4078
4079 /* Scan back to see if there are any non-note instructions
4080 between INSN and the beginning of this block. If not,
4081 then there is no PC range in the generated code that will
4082 actually be in this block, so there's no point in
4083 remembering the existence of the block. */
4084 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
4085 {
4086 /* This block contains a real instruction. Note that we
4087 don't include labels; if the only thing in the block
4088 is a label, then there are still no PC values that
4089 lie within the block. */
4090 if (INSN_P (tmp))
4091 break;
4092
4093 /* We're only interested in NOTEs. */
4094 if (GET_CODE (tmp) != NOTE)
4095 continue;
4096
4097 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
4098 {
4099 /* We just verified that this BLOCK matches us with
4100 the block_stack check above. Never delete the
4101 BLOCK for the outermost scope of the function; we
4102 can refer to names from that scope even if the
4103 block notes are messed up. */
4104 if (! is_body_block (NOTE_BLOCK (insn))
4105 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
4106 {
4107 remove_insn (tmp);
4108 remove_insn (insn);
4109 }
4110 break;
4111 }
4112 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
4113 /* There's a nested block. We need to leave the
4114 current block in place since otherwise the debugger
4115 wouldn't be able to show symbols from our block in
4116 the nested block. */
4117 break;
4118 }
4119 }
4120 }
4121
4122 /* Too many begin notes. */
4123 if (block_stack || eh_stack)
4124 abort ();
4125 }
4126
4127 \f
4128 /* Emit insn(s) of given code and pattern
4129 at a specified place within the doubly-linked list.
4130
4131 All of the emit_foo global entry points accept an object
4132 X which is either an insn list or a PATTERN of a single
4133 instruction.
4134
4135 There are thus a few canonical ways to generate code and
4136 emit it at a specific place in the instruction stream. For
4137 example, consider the instruction named SPOT and the fact that
4138 we would like to emit some instructions before SPOT. We might
4139 do it like this:
4140
4141 start_sequence ();
4142 ... emit the new instructions ...
4143 insns_head = get_insns ();
4144 end_sequence ();
4145
4146 emit_insn_before (insns_head, SPOT);
4147
4148 It used to be common to generate SEQUENCE rtl instead, but that
4149 is a relic of the past which no longer occurs. The reason is that
4150 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4151 generated would almost certainly die right after it was created. */
4152
4153 /* Make X be output before the instruction BEFORE. */
4154
4155 rtx
4156 emit_insn_before (x, before)
4157 rtx x, before;
4158 {
4159 rtx last = before;
4160 rtx insn;
4161
4162 #ifdef ENABLE_RTL_CHECKING
4163 if (before == NULL_RTX)
4164 abort ();
4165 #endif
4166
4167 if (x == NULL_RTX)
4168 return last;
4169
4170 switch (GET_CODE (x))
4171 {
4172 case INSN:
4173 case JUMP_INSN:
4174 case CALL_INSN:
4175 case CODE_LABEL:
4176 case BARRIER:
4177 case NOTE:
4178 insn = x;
4179 while (insn)
4180 {
4181 rtx next = NEXT_INSN (insn);
4182 add_insn_before (insn, before);
4183 last = insn;
4184 insn = next;
4185 }
4186 break;
4187
4188 #ifdef ENABLE_RTL_CHECKING
4189 case SEQUENCE:
4190 abort ();
4191 break;
4192 #endif
4193
4194 default:
4195 last = make_insn_raw (x);
4196 add_insn_before (last, before);
4197 break;
4198 }
4199
4200 return last;
4201 }
4202
4203 /* Make an instruction with body X and code JUMP_INSN
4204 and output it before the instruction BEFORE. */
4205
4206 rtx
4207 emit_jump_insn_before (x, before)
4208 rtx x, before;
4209 {
4210 rtx insn, last = NULL_RTX;
4211
4212 #ifdef ENABLE_RTL_CHECKING
4213 if (before == NULL_RTX)
4214 abort ();
4215 #endif
4216
4217 switch (GET_CODE (x))
4218 {
4219 case INSN:
4220 case JUMP_INSN:
4221 case CALL_INSN:
4222 case CODE_LABEL:
4223 case BARRIER:
4224 case NOTE:
4225 insn = x;
4226 while (insn)
4227 {
4228 rtx next = NEXT_INSN (insn);
4229 add_insn_before (insn, before);
4230 last = insn;
4231 insn = next;
4232 }
4233 break;
4234
4235 #ifdef ENABLE_RTL_CHECKING
4236 case SEQUENCE:
4237 abort ();
4238 break;
4239 #endif
4240
4241 default:
4242 last = make_jump_insn_raw (x);
4243 add_insn_before (last, before);
4244 break;
4245 }
4246
4247 return last;
4248 }
4249
4250 /* Make an instruction with body X and code CALL_INSN
4251 and output it before the instruction BEFORE. */
4252
4253 rtx
4254 emit_call_insn_before (x, before)
4255 rtx x, before;
4256 {
4257 rtx last = NULL_RTX, insn;
4258
4259 #ifdef ENABLE_RTL_CHECKING
4260 if (before == NULL_RTX)
4261 abort ();
4262 #endif
4263
4264 switch (GET_CODE (x))
4265 {
4266 case INSN:
4267 case JUMP_INSN:
4268 case CALL_INSN:
4269 case CODE_LABEL:
4270 case BARRIER:
4271 case NOTE:
4272 insn = x;
4273 while (insn)
4274 {
4275 rtx next = NEXT_INSN (insn);
4276 add_insn_before (insn, before);
4277 last = insn;
4278 insn = next;
4279 }
4280 break;
4281
4282 #ifdef ENABLE_RTL_CHECKING
4283 case SEQUENCE:
4284 abort ();
4285 break;
4286 #endif
4287
4288 default:
4289 last = make_call_insn_raw (x);
4290 add_insn_before (last, before);
4291 break;
4292 }
4293
4294 return last;
4295 }
4296
4297 /* Make an insn of code BARRIER
4298 and output it before the insn BEFORE. */
4299
4300 rtx
4301 emit_barrier_before (before)
4302 rtx before;
4303 {
4304 rtx insn = rtx_alloc (BARRIER);
4305
4306 INSN_UID (insn) = cur_insn_uid++;
4307
4308 add_insn_before (insn, before);
4309 return insn;
4310 }
4311
4312 /* Emit the label LABEL before the insn BEFORE. */
4313
4314 rtx
4315 emit_label_before (label, before)
4316 rtx label, before;
4317 {
4318 /* This can be called twice for the same label as a result of the
4319 confusion that follows a syntax error! So make it harmless. */
4320 if (INSN_UID (label) == 0)
4321 {
4322 INSN_UID (label) = cur_insn_uid++;
4323 add_insn_before (label, before);
4324 }
4325
4326 return label;
4327 }
4328
4329 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4330
4331 rtx
4332 emit_note_before (subtype, before)
4333 int subtype;
4334 rtx before;
4335 {
4336 rtx note = rtx_alloc (NOTE);
4337 INSN_UID (note) = cur_insn_uid++;
4338 NOTE_SOURCE_FILE (note) = 0;
4339 NOTE_LINE_NUMBER (note) = subtype;
4340 BLOCK_FOR_INSN (note) = NULL;
4341
4342 add_insn_before (note, before);
4343 return note;
4344 }
4345 \f
4346 /* Helper for emit_insn_after, handles lists of instructions
4347 efficiently. */
4348
4349 static rtx emit_insn_after_1 PARAMS ((rtx, rtx));
4350
4351 static rtx
4352 emit_insn_after_1 (first, after)
4353 rtx first, after;
4354 {
4355 rtx last;
4356 rtx after_after;
4357 basic_block bb;
4358
4359 if (GET_CODE (after) != BARRIER
4360 && (bb = BLOCK_FOR_INSN (after)))
4361 {
4362 bb->flags |= BB_DIRTY;
4363 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4364 if (GET_CODE (last) != BARRIER)
4365 set_block_for_insn (last, bb);
4366 if (GET_CODE (last) != BARRIER)
4367 set_block_for_insn (last, bb);
4368 if (bb->end == after)
4369 bb->end = last;
4370 }
4371 else
4372 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4373 continue;
4374
4375 after_after = NEXT_INSN (after);
4376
4377 NEXT_INSN (after) = first;
4378 PREV_INSN (first) = after;
4379 NEXT_INSN (last) = after_after;
4380 if (after_after)
4381 PREV_INSN (after_after) = last;
4382
4383 if (after == last_insn)
4384 last_insn = last;
4385 return last;
4386 }
4387
4388 /* Make X be output after the insn AFTER. */
4389
4390 rtx
4391 emit_insn_after (x, after)
4392 rtx x, after;
4393 {
4394 rtx last = after;
4395
4396 #ifdef ENABLE_RTL_CHECKING
4397 if (after == NULL_RTX)
4398 abort ();
4399 #endif
4400
4401 if (x == NULL_RTX)
4402 return last;
4403
4404 switch (GET_CODE (x))
4405 {
4406 case INSN:
4407 case JUMP_INSN:
4408 case CALL_INSN:
4409 case CODE_LABEL:
4410 case BARRIER:
4411 case NOTE:
4412 last = emit_insn_after_1 (x, after);
4413 break;
4414
4415 #ifdef ENABLE_RTL_CHECKING
4416 case SEQUENCE:
4417 abort ();
4418 break;
4419 #endif
4420
4421 default:
4422 last = make_insn_raw (x);
4423 add_insn_after (last, after);
4424 break;
4425 }
4426
4427 return last;
4428 }
4429
4430 /* Similar to emit_insn_after, except that line notes are to be inserted so
4431 as to act as if this insn were at FROM. */
4432
4433 void
4434 emit_insn_after_with_line_notes (x, after, from)
4435 rtx x, after, from;
4436 {
4437 rtx from_line = find_line_note (from);
4438 rtx after_line = find_line_note (after);
4439 rtx insn = emit_insn_after (x, after);
4440
4441 if (from_line)
4442 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
4443 NOTE_LINE_NUMBER (from_line),
4444 after);
4445
4446 if (after_line)
4447 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
4448 NOTE_LINE_NUMBER (after_line),
4449 insn);
4450 }
4451
4452 /* Make an insn of code JUMP_INSN with body X
4453 and output it after the insn AFTER. */
4454
4455 rtx
4456 emit_jump_insn_after (x, after)
4457 rtx x, after;
4458 {
4459 rtx last;
4460
4461 #ifdef ENABLE_RTL_CHECKING
4462 if (after == NULL_RTX)
4463 abort ();
4464 #endif
4465
4466 switch (GET_CODE (x))
4467 {
4468 case INSN:
4469 case JUMP_INSN:
4470 case CALL_INSN:
4471 case CODE_LABEL:
4472 case BARRIER:
4473 case NOTE:
4474 last = emit_insn_after_1 (x, after);
4475 break;
4476
4477 #ifdef ENABLE_RTL_CHECKING
4478 case SEQUENCE:
4479 abort ();
4480 break;
4481 #endif
4482
4483 default:
4484 last = make_jump_insn_raw (x);
4485 add_insn_after (last, after);
4486 break;
4487 }
4488
4489 return last;
4490 }
4491
4492 /* Make an instruction with body X and code CALL_INSN
4493 and output it after the instruction AFTER. */
4494
4495 rtx
4496 emit_call_insn_after (x, after)
4497 rtx x, after;
4498 {
4499 rtx last;
4500
4501 #ifdef ENABLE_RTL_CHECKING
4502 if (after == NULL_RTX)
4503 abort ();
4504 #endif
4505
4506 switch (GET_CODE (x))
4507 {
4508 case INSN:
4509 case JUMP_INSN:
4510 case CALL_INSN:
4511 case CODE_LABEL:
4512 case BARRIER:
4513 case NOTE:
4514 last = emit_insn_after_1 (x, after);
4515 break;
4516
4517 #ifdef ENABLE_RTL_CHECKING
4518 case SEQUENCE:
4519 abort ();
4520 break;
4521 #endif
4522
4523 default:
4524 last = make_call_insn_raw (x);
4525 add_insn_after (last, after);
4526 break;
4527 }
4528
4529 return last;
4530 }
4531
4532 /* Make an insn of code BARRIER
4533 and output it after the insn AFTER. */
4534
4535 rtx
4536 emit_barrier_after (after)
4537 rtx after;
4538 {
4539 rtx insn = rtx_alloc (BARRIER);
4540
4541 INSN_UID (insn) = cur_insn_uid++;
4542
4543 add_insn_after (insn, after);
4544 return insn;
4545 }
4546
4547 /* Emit the label LABEL after the insn AFTER. */
4548
4549 rtx
4550 emit_label_after (label, after)
4551 rtx label, after;
4552 {
4553 /* This can be called twice for the same label
4554 as a result of the confusion that follows a syntax error!
4555 So make it harmless. */
4556 if (INSN_UID (label) == 0)
4557 {
4558 INSN_UID (label) = cur_insn_uid++;
4559 add_insn_after (label, after);
4560 }
4561
4562 return label;
4563 }
4564
4565 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4566
4567 rtx
4568 emit_note_after (subtype, after)
4569 int subtype;
4570 rtx after;
4571 {
4572 rtx note = rtx_alloc (NOTE);
4573 INSN_UID (note) = cur_insn_uid++;
4574 NOTE_SOURCE_FILE (note) = 0;
4575 NOTE_LINE_NUMBER (note) = subtype;
4576 BLOCK_FOR_INSN (note) = NULL;
4577 add_insn_after (note, after);
4578 return note;
4579 }
4580
4581 /* Emit a line note for FILE and LINE after the insn AFTER. */
4582
4583 rtx
4584 emit_line_note_after (file, line, after)
4585 const char *file;
4586 int line;
4587 rtx after;
4588 {
4589 rtx note;
4590
4591 if (no_line_numbers && line > 0)
4592 {
4593 cur_insn_uid++;
4594 return 0;
4595 }
4596
4597 note = rtx_alloc (NOTE);
4598 INSN_UID (note) = cur_insn_uid++;
4599 NOTE_SOURCE_FILE (note) = file;
4600 NOTE_LINE_NUMBER (note) = line;
4601 BLOCK_FOR_INSN (note) = NULL;
4602 add_insn_after (note, after);
4603 return note;
4604 }
4605 \f
4606 /* Like emit_insn_after, but set INSN_SCOPE according to SCOPE. */
4607 rtx
4608 emit_insn_after_scope (pattern, after, scope)
4609 rtx pattern, after;
4610 tree scope;
4611 {
4612 rtx last = emit_insn_after (pattern, after);
4613
4614 after = NEXT_INSN (after);
4615 while (1)
4616 {
4617 if (active_insn_p (after))
4618 INSN_SCOPE (after) = scope;
4619 if (after == last)
4620 break;
4621 after = NEXT_INSN (after);
4622 }
4623 return last;
4624 }
4625
4626 /* Like emit_jump_insn_after, but set INSN_SCOPE according to SCOPE. */
4627 rtx
4628 emit_jump_insn_after_scope (pattern, after, scope)
4629 rtx pattern, after;
4630 tree scope;
4631 {
4632 rtx last = emit_jump_insn_after (pattern, after);
4633
4634 after = NEXT_INSN (after);
4635 while (1)
4636 {
4637 if (active_insn_p (after))
4638 INSN_SCOPE (after) = scope;
4639 if (after == last)
4640 break;
4641 after = NEXT_INSN (after);
4642 }
4643 return last;
4644 }
4645
4646 /* Like emit_call_insn_after, but set INSN_SCOPE according to SCOPE. */
4647 rtx
4648 emit_call_insn_after_scope (pattern, after, scope)
4649 rtx pattern, after;
4650 tree scope;
4651 {
4652 rtx last = emit_call_insn_after (pattern, after);
4653
4654 after = NEXT_INSN (after);
4655 while (1)
4656 {
4657 if (active_insn_p (after))
4658 INSN_SCOPE (after) = scope;
4659 if (after == last)
4660 break;
4661 after = NEXT_INSN (after);
4662 }
4663 return last;
4664 }
4665
4666 /* Like emit_insn_before, but set INSN_SCOPE according to SCOPE. */
4667 rtx
4668 emit_insn_before_scope (pattern, before, scope)
4669 rtx pattern, before;
4670 tree scope;
4671 {
4672 rtx first = PREV_INSN (before);
4673 rtx last = emit_insn_before (pattern, before);
4674
4675 first = NEXT_INSN (first);
4676 while (1)
4677 {
4678 if (active_insn_p (first))
4679 INSN_SCOPE (first) = scope;
4680 if (first == last)
4681 break;
4682 first = NEXT_INSN (first);
4683 }
4684 return last;
4685 }
4686 \f
4687 /* Take X and emit it at the end of the doubly-linked
4688 INSN list.
4689
4690 Returns the last insn emitted. */
4691
4692 rtx
4693 emit_insn (x)
4694 rtx x;
4695 {
4696 rtx last = last_insn;
4697 rtx insn;
4698
4699 if (x == NULL_RTX)
4700 return last;
4701
4702 switch (GET_CODE (x))
4703 {
4704 case INSN:
4705 case JUMP_INSN:
4706 case CALL_INSN:
4707 case CODE_LABEL:
4708 case BARRIER:
4709 case NOTE:
4710 insn = x;
4711 while (insn)
4712 {
4713 rtx next = NEXT_INSN (insn);
4714 add_insn (insn);
4715 last = insn;
4716 insn = next;
4717 }
4718 break;
4719
4720 #ifdef ENABLE_RTL_CHECKING
4721 case SEQUENCE:
4722 abort ();
4723 break;
4724 #endif
4725
4726 default:
4727 last = make_insn_raw (x);
4728 add_insn (last);
4729 break;
4730 }
4731
4732 return last;
4733 }
4734
4735 /* Make an insn of code JUMP_INSN with pattern X
4736 and add it to the end of the doubly-linked list. */
4737
4738 rtx
4739 emit_jump_insn (x)
4740 rtx x;
4741 {
4742 rtx last = NULL_RTX, insn;
4743
4744 switch (GET_CODE (x))
4745 {
4746 case INSN:
4747 case JUMP_INSN:
4748 case CALL_INSN:
4749 case CODE_LABEL:
4750 case BARRIER:
4751 case NOTE:
4752 insn = x;
4753 while (insn)
4754 {
4755 rtx next = NEXT_INSN (insn);
4756 add_insn (insn);
4757 last = insn;
4758 insn = next;
4759 }
4760 break;
4761
4762 #ifdef ENABLE_RTL_CHECKING
4763 case SEQUENCE:
4764 abort ();
4765 break;
4766 #endif
4767
4768 default:
4769 last = make_jump_insn_raw (x);
4770 add_insn (last);
4771 break;
4772 }
4773
4774 return last;
4775 }
4776
4777 /* Make an insn of code CALL_INSN with pattern X
4778 and add it to the end of the doubly-linked list. */
4779
4780 rtx
4781 emit_call_insn (x)
4782 rtx x;
4783 {
4784 rtx insn;
4785
4786 switch (GET_CODE (x))
4787 {
4788 case INSN:
4789 case JUMP_INSN:
4790 case CALL_INSN:
4791 case CODE_LABEL:
4792 case BARRIER:
4793 case NOTE:
4794 insn = emit_insn (x);
4795 break;
4796
4797 #ifdef ENABLE_RTL_CHECKING
4798 case SEQUENCE:
4799 abort ();
4800 break;
4801 #endif
4802
4803 default:
4804 insn = make_call_insn_raw (x);
4805 add_insn (insn);
4806 break;
4807 }
4808
4809 return insn;
4810 }
4811
4812 /* Add the label LABEL to the end of the doubly-linked list. */
4813
4814 rtx
4815 emit_label (label)
4816 rtx label;
4817 {
4818 /* This can be called twice for the same label
4819 as a result of the confusion that follows a syntax error!
4820 So make it harmless. */
4821 if (INSN_UID (label) == 0)
4822 {
4823 INSN_UID (label) = cur_insn_uid++;
4824 add_insn (label);
4825 }
4826 return label;
4827 }
4828
4829 /* Make an insn of code BARRIER
4830 and add it to the end of the doubly-linked list. */
4831
4832 rtx
4833 emit_barrier ()
4834 {
4835 rtx barrier = rtx_alloc (BARRIER);
4836 INSN_UID (barrier) = cur_insn_uid++;
4837 add_insn (barrier);
4838 return barrier;
4839 }
4840
4841 /* Make an insn of code NOTE
4842 with data-fields specified by FILE and LINE
4843 and add it to the end of the doubly-linked list,
4844 but only if line-numbers are desired for debugging info. */
4845
4846 rtx
4847 emit_line_note (file, line)
4848 const char *file;
4849 int line;
4850 {
4851 set_file_and_line_for_stmt (file, line);
4852
4853 #if 0
4854 if (no_line_numbers)
4855 return 0;
4856 #endif
4857
4858 return emit_note (file, line);
4859 }
4860
4861 /* Make an insn of code NOTE
4862 with data-fields specified by FILE and LINE
4863 and add it to the end of the doubly-linked list.
4864 If it is a line-number NOTE, omit it if it matches the previous one. */
4865
4866 rtx
4867 emit_note (file, line)
4868 const char *file;
4869 int line;
4870 {
4871 rtx note;
4872
4873 if (line > 0)
4874 {
4875 if (file && last_filename && !strcmp (file, last_filename)
4876 && line == last_linenum)
4877 return 0;
4878 last_filename = file;
4879 last_linenum = line;
4880 }
4881
4882 if (no_line_numbers && line > 0)
4883 {
4884 cur_insn_uid++;
4885 return 0;
4886 }
4887
4888 note = rtx_alloc (NOTE);
4889 INSN_UID (note) = cur_insn_uid++;
4890 NOTE_SOURCE_FILE (note) = file;
4891 NOTE_LINE_NUMBER (note) = line;
4892 BLOCK_FOR_INSN (note) = NULL;
4893 add_insn (note);
4894 return note;
4895 }
4896
4897 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
4898
4899 rtx
4900 emit_line_note_force (file, line)
4901 const char *file;
4902 int line;
4903 {
4904 last_linenum = -1;
4905 return emit_line_note (file, line);
4906 }
4907
4908 /* Cause next statement to emit a line note even if the line number
4909 has not changed. This is used at the beginning of a function. */
4910
4911 void
4912 force_next_line_note ()
4913 {
4914 last_linenum = -1;
4915 }
4916
4917 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4918 note of this type already exists, remove it first. */
4919
4920 rtx
4921 set_unique_reg_note (insn, kind, datum)
4922 rtx insn;
4923 enum reg_note kind;
4924 rtx datum;
4925 {
4926 rtx note = find_reg_note (insn, kind, NULL_RTX);
4927
4928 switch (kind)
4929 {
4930 case REG_EQUAL:
4931 case REG_EQUIV:
4932 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4933 has multiple sets (some callers assume single_set
4934 means the insn only has one set, when in fact it
4935 means the insn only has one * useful * set). */
4936 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4937 {
4938 if (note)
4939 abort ();
4940 return NULL_RTX;
4941 }
4942
4943 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4944 It serves no useful purpose and breaks eliminate_regs. */
4945 if (GET_CODE (datum) == ASM_OPERANDS)
4946 return NULL_RTX;
4947 break;
4948
4949 default:
4950 break;
4951 }
4952
4953 if (note)
4954 {
4955 XEXP (note, 0) = datum;
4956 return note;
4957 }
4958
4959 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4960 return REG_NOTES (insn);
4961 }
4962 \f
4963 /* Return an indication of which type of insn should have X as a body.
4964 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4965
4966 enum rtx_code
4967 classify_insn (x)
4968 rtx x;
4969 {
4970 if (GET_CODE (x) == CODE_LABEL)
4971 return CODE_LABEL;
4972 if (GET_CODE (x) == CALL)
4973 return CALL_INSN;
4974 if (GET_CODE (x) == RETURN)
4975 return JUMP_INSN;
4976 if (GET_CODE (x) == SET)
4977 {
4978 if (SET_DEST (x) == pc_rtx)
4979 return JUMP_INSN;
4980 else if (GET_CODE (SET_SRC (x)) == CALL)
4981 return CALL_INSN;
4982 else
4983 return INSN;
4984 }
4985 if (GET_CODE (x) == PARALLEL)
4986 {
4987 int j;
4988 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4989 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4990 return CALL_INSN;
4991 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4992 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4993 return JUMP_INSN;
4994 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4995 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4996 return CALL_INSN;
4997 }
4998 return INSN;
4999 }
5000
5001 /* Emit the rtl pattern X as an appropriate kind of insn.
5002 If X is a label, it is simply added into the insn chain. */
5003
5004 rtx
5005 emit (x)
5006 rtx x;
5007 {
5008 enum rtx_code code = classify_insn (x);
5009
5010 if (code == CODE_LABEL)
5011 return emit_label (x);
5012 else if (code == INSN)
5013 return emit_insn (x);
5014 else if (code == JUMP_INSN)
5015 {
5016 rtx insn = emit_jump_insn (x);
5017 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5018 return emit_barrier ();
5019 return insn;
5020 }
5021 else if (code == CALL_INSN)
5022 return emit_call_insn (x);
5023 else
5024 abort ();
5025 }
5026 \f
5027 /* Space for free sequence stack entries. */
5028 static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
5029
5030 /* Begin emitting insns to a sequence which can be packaged in an
5031 RTL_EXPR. If this sequence will contain something that might cause
5032 the compiler to pop arguments to function calls (because those
5033 pops have previously been deferred; see INHIBIT_DEFER_POP for more
5034 details), use do_pending_stack_adjust before calling this function.
5035 That will ensure that the deferred pops are not accidentally
5036 emitted in the middle of this sequence. */
5037
5038 void
5039 start_sequence ()
5040 {
5041 struct sequence_stack *tem;
5042
5043 if (free_sequence_stack != NULL)
5044 {
5045 tem = free_sequence_stack;
5046 free_sequence_stack = tem->next;
5047 }
5048 else
5049 tem = (struct sequence_stack *) ggc_alloc (sizeof (struct sequence_stack));
5050
5051 tem->next = seq_stack;
5052 tem->first = first_insn;
5053 tem->last = last_insn;
5054 tem->sequence_rtl_expr = seq_rtl_expr;
5055
5056 seq_stack = tem;
5057
5058 first_insn = 0;
5059 last_insn = 0;
5060 }
5061
5062 /* Similarly, but indicate that this sequence will be placed in T, an
5063 RTL_EXPR. See the documentation for start_sequence for more
5064 information about how to use this function. */
5065
5066 void
5067 start_sequence_for_rtl_expr (t)
5068 tree t;
5069 {
5070 start_sequence ();
5071
5072 seq_rtl_expr = t;
5073 }
5074
5075 /* Set up the insn chain starting with FIRST as the current sequence,
5076 saving the previously current one. See the documentation for
5077 start_sequence for more information about how to use this function. */
5078
5079 void
5080 push_to_sequence (first)
5081 rtx first;
5082 {
5083 rtx last;
5084
5085 start_sequence ();
5086
5087 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5088
5089 first_insn = first;
5090 last_insn = last;
5091 }
5092
5093 /* Set up the insn chain from a chain stort in FIRST to LAST. */
5094
5095 void
5096 push_to_full_sequence (first, last)
5097 rtx first, last;
5098 {
5099 start_sequence ();
5100 first_insn = first;
5101 last_insn = last;
5102 /* We really should have the end of the insn chain here. */
5103 if (last && NEXT_INSN (last))
5104 abort ();
5105 }
5106
5107 /* Set up the outer-level insn chain
5108 as the current sequence, saving the previously current one. */
5109
5110 void
5111 push_topmost_sequence ()
5112 {
5113 struct sequence_stack *stack, *top = NULL;
5114
5115 start_sequence ();
5116
5117 for (stack = seq_stack; stack; stack = stack->next)
5118 top = stack;
5119
5120 first_insn = top->first;
5121 last_insn = top->last;
5122 seq_rtl_expr = top->sequence_rtl_expr;
5123 }
5124
5125 /* After emitting to the outer-level insn chain, update the outer-level
5126 insn chain, and restore the previous saved state. */
5127
5128 void
5129 pop_topmost_sequence ()
5130 {
5131 struct sequence_stack *stack, *top = NULL;
5132
5133 for (stack = seq_stack; stack; stack = stack->next)
5134 top = stack;
5135
5136 top->first = first_insn;
5137 top->last = last_insn;
5138 /* ??? Why don't we save seq_rtl_expr here? */
5139
5140 end_sequence ();
5141 }
5142
5143 /* After emitting to a sequence, restore previous saved state.
5144
5145 To get the contents of the sequence just made, you must call
5146 `get_insns' *before* calling here.
5147
5148 If the compiler might have deferred popping arguments while
5149 generating this sequence, and this sequence will not be immediately
5150 inserted into the instruction stream, use do_pending_stack_adjust
5151 before calling get_insns. That will ensure that the deferred
5152 pops are inserted into this sequence, and not into some random
5153 location in the instruction stream. See INHIBIT_DEFER_POP for more
5154 information about deferred popping of arguments. */
5155
5156 void
5157 end_sequence ()
5158 {
5159 struct sequence_stack *tem = seq_stack;
5160
5161 first_insn = tem->first;
5162 last_insn = tem->last;
5163 seq_rtl_expr = tem->sequence_rtl_expr;
5164 seq_stack = tem->next;
5165
5166 memset (tem, 0, sizeof (*tem));
5167 tem->next = free_sequence_stack;
5168 free_sequence_stack = tem;
5169 }
5170
5171 /* This works like end_sequence, but records the old sequence in FIRST
5172 and LAST. */
5173
5174 void
5175 end_full_sequence (first, last)
5176 rtx *first, *last;
5177 {
5178 *first = first_insn;
5179 *last = last_insn;
5180 end_sequence ();
5181 }
5182
5183 /* Return 1 if currently emitting into a sequence. */
5184
5185 int
5186 in_sequence_p ()
5187 {
5188 return seq_stack != 0;
5189 }
5190 \f
5191 /* Put the various virtual registers into REGNO_REG_RTX. */
5192
5193 void
5194 init_virtual_regs (es)
5195 struct emit_status *es;
5196 {
5197 rtx *ptr = es->x_regno_reg_rtx;
5198 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5199 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5200 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5201 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5202 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5203 }
5204
5205 \f
5206 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5207 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5208 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5209 static int copy_insn_n_scratches;
5210
5211 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5212 copied an ASM_OPERANDS.
5213 In that case, it is the original input-operand vector. */
5214 static rtvec orig_asm_operands_vector;
5215
5216 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5217 copied an ASM_OPERANDS.
5218 In that case, it is the copied input-operand vector. */
5219 static rtvec copy_asm_operands_vector;
5220
5221 /* Likewise for the constraints vector. */
5222 static rtvec orig_asm_constraints_vector;
5223 static rtvec copy_asm_constraints_vector;
5224
5225 /* Recursively create a new copy of an rtx for copy_insn.
5226 This function differs from copy_rtx in that it handles SCRATCHes and
5227 ASM_OPERANDs properly.
5228 Normally, this function is not used directly; use copy_insn as front end.
5229 However, you could first copy an insn pattern with copy_insn and then use
5230 this function afterwards to properly copy any REG_NOTEs containing
5231 SCRATCHes. */
5232
5233 rtx
5234 copy_insn_1 (orig)
5235 rtx orig;
5236 {
5237 rtx copy;
5238 int i, j;
5239 RTX_CODE code;
5240 const char *format_ptr;
5241
5242 code = GET_CODE (orig);
5243
5244 switch (code)
5245 {
5246 case REG:
5247 case QUEUED:
5248 case CONST_INT:
5249 case CONST_DOUBLE:
5250 case CONST_VECTOR:
5251 case SYMBOL_REF:
5252 case CODE_LABEL:
5253 case PC:
5254 case CC0:
5255 case ADDRESSOF:
5256 return orig;
5257
5258 case SCRATCH:
5259 for (i = 0; i < copy_insn_n_scratches; i++)
5260 if (copy_insn_scratch_in[i] == orig)
5261 return copy_insn_scratch_out[i];
5262 break;
5263
5264 case CONST:
5265 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
5266 a LABEL_REF, it isn't sharable. */
5267 if (GET_CODE (XEXP (orig, 0)) == PLUS
5268 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5269 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5270 return orig;
5271 break;
5272
5273 /* A MEM with a constant address is not sharable. The problem is that
5274 the constant address may need to be reloaded. If the mem is shared,
5275 then reloading one copy of this mem will cause all copies to appear
5276 to have been reloaded. */
5277
5278 default:
5279 break;
5280 }
5281
5282 copy = rtx_alloc (code);
5283
5284 /* Copy the various flags, and other information. We assume that
5285 all fields need copying, and then clear the fields that should
5286 not be copied. That is the sensible default behavior, and forces
5287 us to explicitly document why we are *not* copying a flag. */
5288 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
5289
5290 /* We do not copy the USED flag, which is used as a mark bit during
5291 walks over the RTL. */
5292 RTX_FLAG (copy, used) = 0;
5293
5294 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5295 if (GET_RTX_CLASS (code) == 'i')
5296 {
5297 RTX_FLAG (copy, jump) = 0;
5298 RTX_FLAG (copy, call) = 0;
5299 RTX_FLAG (copy, frame_related) = 0;
5300 }
5301
5302 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5303
5304 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5305 {
5306 copy->fld[i] = orig->fld[i];
5307 switch (*format_ptr++)
5308 {
5309 case 'e':
5310 if (XEXP (orig, i) != NULL)
5311 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5312 break;
5313
5314 case 'E':
5315 case 'V':
5316 if (XVEC (orig, i) == orig_asm_constraints_vector)
5317 XVEC (copy, i) = copy_asm_constraints_vector;
5318 else if (XVEC (orig, i) == orig_asm_operands_vector)
5319 XVEC (copy, i) = copy_asm_operands_vector;
5320 else if (XVEC (orig, i) != NULL)
5321 {
5322 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5323 for (j = 0; j < XVECLEN (copy, i); j++)
5324 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5325 }
5326 break;
5327
5328 case 't':
5329 case 'w':
5330 case 'i':
5331 case 's':
5332 case 'S':
5333 case 'u':
5334 case '0':
5335 /* These are left unchanged. */
5336 break;
5337
5338 default:
5339 abort ();
5340 }
5341 }
5342
5343 if (code == SCRATCH)
5344 {
5345 i = copy_insn_n_scratches++;
5346 if (i >= MAX_RECOG_OPERANDS)
5347 abort ();
5348 copy_insn_scratch_in[i] = orig;
5349 copy_insn_scratch_out[i] = copy;
5350 }
5351 else if (code == ASM_OPERANDS)
5352 {
5353 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5354 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5355 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5356 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5357 }
5358
5359 return copy;
5360 }
5361
5362 /* Create a new copy of an rtx.
5363 This function differs from copy_rtx in that it handles SCRATCHes and
5364 ASM_OPERANDs properly.
5365 INSN doesn't really have to be a full INSN; it could be just the
5366 pattern. */
5367 rtx
5368 copy_insn (insn)
5369 rtx insn;
5370 {
5371 copy_insn_n_scratches = 0;
5372 orig_asm_operands_vector = 0;
5373 orig_asm_constraints_vector = 0;
5374 copy_asm_operands_vector = 0;
5375 copy_asm_constraints_vector = 0;
5376 return copy_insn_1 (insn);
5377 }
5378
5379 /* Initialize data structures and variables in this file
5380 before generating rtl for each function. */
5381
5382 void
5383 init_emit ()
5384 {
5385 struct function *f = cfun;
5386
5387 f->emit = (struct emit_status *) ggc_alloc (sizeof (struct emit_status));
5388 first_insn = NULL;
5389 last_insn = NULL;
5390 seq_rtl_expr = NULL;
5391 cur_insn_uid = 1;
5392 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5393 last_linenum = 0;
5394 last_filename = 0;
5395 first_label_num = label_num;
5396 last_label_num = 0;
5397 seq_stack = NULL;
5398
5399 /* Init the tables that describe all the pseudo regs. */
5400
5401 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5402
5403 f->emit->regno_pointer_align
5404 = (unsigned char *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5405 * sizeof (unsigned char));
5406
5407 regno_reg_rtx
5408 = (rtx *) ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
5409
5410 /* Put copies of all the hard registers into regno_reg_rtx. */
5411 memcpy (regno_reg_rtx,
5412 static_regno_reg_rtx,
5413 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5414
5415 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5416 init_virtual_regs (f->emit);
5417
5418 /* Indicate that the virtual registers and stack locations are
5419 all pointers. */
5420 REG_POINTER (stack_pointer_rtx) = 1;
5421 REG_POINTER (frame_pointer_rtx) = 1;
5422 REG_POINTER (hard_frame_pointer_rtx) = 1;
5423 REG_POINTER (arg_pointer_rtx) = 1;
5424
5425 REG_POINTER (virtual_incoming_args_rtx) = 1;
5426 REG_POINTER (virtual_stack_vars_rtx) = 1;
5427 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5428 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5429 REG_POINTER (virtual_cfa_rtx) = 1;
5430
5431 #ifdef STACK_BOUNDARY
5432 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5433 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5434 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5435 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5436
5437 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5438 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5439 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5440 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5441 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5442 #endif
5443
5444 #ifdef INIT_EXPANDERS
5445 INIT_EXPANDERS;
5446 #endif
5447 }
5448
5449 /* Generate the constant 0. */
5450
5451 static rtx
5452 gen_const_vector_0 (mode)
5453 enum machine_mode mode;
5454 {
5455 rtx tem;
5456 rtvec v;
5457 int units, i;
5458 enum machine_mode inner;
5459
5460 units = GET_MODE_NUNITS (mode);
5461 inner = GET_MODE_INNER (mode);
5462
5463 v = rtvec_alloc (units);
5464
5465 /* We need to call this function after we to set CONST0_RTX first. */
5466 if (!CONST0_RTX (inner))
5467 abort ();
5468
5469 for (i = 0; i < units; ++i)
5470 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5471
5472 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5473 return tem;
5474 }
5475
5476 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5477 all elements are zero. */
5478 rtx
5479 gen_rtx_CONST_VECTOR (mode, v)
5480 enum machine_mode mode;
5481 rtvec v;
5482 {
5483 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5484 int i;
5485
5486 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5487 if (RTVEC_ELT (v, i) != inner_zero)
5488 return gen_rtx_raw_CONST_VECTOR (mode, v);
5489 return CONST0_RTX (mode);
5490 }
5491
5492 /* Create some permanent unique rtl objects shared between all functions.
5493 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5494
5495 void
5496 init_emit_once (line_numbers)
5497 int line_numbers;
5498 {
5499 int i;
5500 enum machine_mode mode;
5501 enum machine_mode double_mode;
5502
5503 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5504 tables. */
5505 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5506 const_int_htab_eq, NULL);
5507
5508 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5509 const_double_htab_eq, NULL);
5510
5511 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5512 mem_attrs_htab_eq, NULL);
5513 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5514 reg_attrs_htab_eq, NULL);
5515
5516 no_line_numbers = ! line_numbers;
5517
5518 /* Compute the word and byte modes. */
5519
5520 byte_mode = VOIDmode;
5521 word_mode = VOIDmode;
5522 double_mode = VOIDmode;
5523
5524 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5525 mode = GET_MODE_WIDER_MODE (mode))
5526 {
5527 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5528 && byte_mode == VOIDmode)
5529 byte_mode = mode;
5530
5531 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5532 && word_mode == VOIDmode)
5533 word_mode = mode;
5534 }
5535
5536 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5537 mode = GET_MODE_WIDER_MODE (mode))
5538 {
5539 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5540 && double_mode == VOIDmode)
5541 double_mode = mode;
5542 }
5543
5544 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5545
5546 /* Assign register numbers to the globally defined register rtx.
5547 This must be done at runtime because the register number field
5548 is in a union and some compilers can't initialize unions. */
5549
5550 pc_rtx = gen_rtx (PC, VOIDmode);
5551 cc0_rtx = gen_rtx (CC0, VOIDmode);
5552 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5553 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5554 if (hard_frame_pointer_rtx == 0)
5555 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5556 HARD_FRAME_POINTER_REGNUM);
5557 if (arg_pointer_rtx == 0)
5558 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5559 virtual_incoming_args_rtx =
5560 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5561 virtual_stack_vars_rtx =
5562 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5563 virtual_stack_dynamic_rtx =
5564 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5565 virtual_outgoing_args_rtx =
5566 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5567 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5568
5569 /* Initialize RTL for commonly used hard registers. These are
5570 copied into regno_reg_rtx as we begin to compile each function. */
5571 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5572 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5573
5574 #ifdef INIT_EXPANDERS
5575 /* This is to initialize {init|mark|free}_machine_status before the first
5576 call to push_function_context_to. This is needed by the Chill front
5577 end which calls push_function_context_to before the first call to
5578 init_function_start. */
5579 INIT_EXPANDERS;
5580 #endif
5581
5582 /* Create the unique rtx's for certain rtx codes and operand values. */
5583
5584 /* Don't use gen_rtx here since gen_rtx in this case
5585 tries to use these variables. */
5586 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5587 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5588 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5589
5590 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5591 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5592 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5593 else
5594 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5595
5596 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5597 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5598 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5599 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5600 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5601
5602 dconsthalf = dconst1;
5603 dconsthalf.exp--;
5604
5605 for (i = 0; i <= 2; i++)
5606 {
5607 REAL_VALUE_TYPE *r =
5608 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5609
5610 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5611 mode = GET_MODE_WIDER_MODE (mode))
5612 const_tiny_rtx[i][(int) mode] =
5613 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5614
5615 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5616
5617 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5618 mode = GET_MODE_WIDER_MODE (mode))
5619 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5620
5621 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5622 mode != VOIDmode;
5623 mode = GET_MODE_WIDER_MODE (mode))
5624 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5625 }
5626
5627 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5628 mode != VOIDmode;
5629 mode = GET_MODE_WIDER_MODE (mode))
5630 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5631
5632 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5633 mode != VOIDmode;
5634 mode = GET_MODE_WIDER_MODE (mode))
5635 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5636
5637 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5638 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5639 const_tiny_rtx[0][i] = const0_rtx;
5640
5641 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5642 if (STORE_FLAG_VALUE == 1)
5643 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5644
5645 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5646 return_address_pointer_rtx
5647 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5648 #endif
5649
5650 #ifdef STRUCT_VALUE
5651 struct_value_rtx = STRUCT_VALUE;
5652 #else
5653 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
5654 #endif
5655
5656 #ifdef STRUCT_VALUE_INCOMING
5657 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
5658 #else
5659 #ifdef STRUCT_VALUE_INCOMING_REGNUM
5660 struct_value_incoming_rtx
5661 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
5662 #else
5663 struct_value_incoming_rtx = struct_value_rtx;
5664 #endif
5665 #endif
5666
5667 #ifdef STATIC_CHAIN_REGNUM
5668 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5669
5670 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5671 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5672 static_chain_incoming_rtx
5673 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5674 else
5675 #endif
5676 static_chain_incoming_rtx = static_chain_rtx;
5677 #endif
5678
5679 #ifdef STATIC_CHAIN
5680 static_chain_rtx = STATIC_CHAIN;
5681
5682 #ifdef STATIC_CHAIN_INCOMING
5683 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5684 #else
5685 static_chain_incoming_rtx = static_chain_rtx;
5686 #endif
5687 #endif
5688
5689 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5690 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5691 }
5692 \f
5693 /* Query and clear/ restore no_line_numbers. This is used by the
5694 switch / case handling in stmt.c to give proper line numbers in
5695 warnings about unreachable code. */
5696
5697 int
5698 force_line_numbers ()
5699 {
5700 int old = no_line_numbers;
5701
5702 no_line_numbers = 0;
5703 if (old)
5704 force_next_line_note ();
5705 return old;
5706 }
5707
5708 void
5709 restore_line_number_status (old_value)
5710 int old_value;
5711 {
5712 no_line_numbers = old_value;
5713 }
5714
5715 /* Produce exact duplicate of insn INSN after AFTER.
5716 Care updating of libcall regions if present. */
5717
5718 rtx
5719 emit_copy_of_insn_after (insn, after)
5720 rtx insn, after;
5721 {
5722 rtx new;
5723 rtx note1, note2, link;
5724
5725 switch (GET_CODE (insn))
5726 {
5727 case INSN:
5728 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5729 break;
5730
5731 case JUMP_INSN:
5732 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5733 break;
5734
5735 case CALL_INSN:
5736 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5737 if (CALL_INSN_FUNCTION_USAGE (insn))
5738 CALL_INSN_FUNCTION_USAGE (new)
5739 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5740 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5741 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5742 break;
5743
5744 default:
5745 abort ();
5746 }
5747
5748 /* Update LABEL_NUSES. */
5749 mark_jump_label (PATTERN (new), new, 0);
5750
5751 INSN_SCOPE (new) = INSN_SCOPE (insn);
5752
5753 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5754 make them. */
5755 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5756 if (REG_NOTE_KIND (link) != REG_LABEL)
5757 {
5758 if (GET_CODE (link) == EXPR_LIST)
5759 REG_NOTES (new)
5760 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5761 XEXP (link, 0),
5762 REG_NOTES (new)));
5763 else
5764 REG_NOTES (new)
5765 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5766 XEXP (link, 0),
5767 REG_NOTES (new)));
5768 }
5769
5770 /* Fix the libcall sequences. */
5771 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5772 {
5773 rtx p = new;
5774 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5775 p = PREV_INSN (p);
5776 XEXP (note1, 0) = p;
5777 XEXP (note2, 0) = new;
5778 }
5779 INSN_CODE (new) = INSN_CODE (insn);
5780 return new;
5781 }
5782
5783 #include "gt-emit-rtl.h"