emit-rtl.c (gen_lowpart): Don't attempt to load a part of a complex or vector type...
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 /* Middle-to-low level generation of rtx code and insns.
24
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
28
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
31
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
37
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "tm.h"
42 #include "toplev.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "tm_p.h"
46 #include "flags.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "real.h"
55 #include "bitmap.h"
56 #include "basic-block.h"
57 #include "ggc.h"
58 #include "debug.h"
59 #include "langhooks.h"
60
61 /* Commonly used modes. */
62
63 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
64 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
65 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
66 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
67
68
69 /* This is *not* reset after each function. It gives each CODE_LABEL
70 in the entire compilation a unique label number. */
71
72 static GTY(()) int label_num = 1;
73
74 /* Highest label number in current function.
75 Zero means use the value of label_num instead.
76 This is nonzero only when belatedly compiling an inline function. */
77
78 static int last_label_num;
79
80 /* Value label_num had when set_new_first_and_last_label_number was called.
81 If label_num has not changed since then, last_label_num is valid. */
82
83 static int base_label_num;
84
85 /* Nonzero means do not generate NOTEs for source line numbers. */
86
87 static int no_line_numbers;
88
89 /* Commonly used rtx's, so that we only need space for one copy.
90 These are initialized once for the entire compilation.
91 All of these are unique; no other rtx-object will be equal to any
92 of these. */
93
94 rtx global_rtl[GR_MAX];
95
96 /* Commonly used RTL for hard registers. These objects are not necessarily
97 unique, so we allocate them separately from global_rtl. They are
98 initialized once per compilation unit, then copied into regno_reg_rtx
99 at the beginning of each function. */
100 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
101
102 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
103 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
104 record a copy of const[012]_rtx. */
105
106 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
107
108 rtx const_true_rtx;
109
110 REAL_VALUE_TYPE dconst0;
111 REAL_VALUE_TYPE dconst1;
112 REAL_VALUE_TYPE dconst2;
113 REAL_VALUE_TYPE dconstm1;
114
115 /* All references to the following fixed hard registers go through
116 these unique rtl objects. On machines where the frame-pointer and
117 arg-pointer are the same register, they use the same unique object.
118
119 After register allocation, other rtl objects which used to be pseudo-regs
120 may be clobbered to refer to the frame-pointer register.
121 But references that were originally to the frame-pointer can be
122 distinguished from the others because they contain frame_pointer_rtx.
123
124 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
125 tricky: until register elimination has taken place hard_frame_pointer_rtx
126 should be used if it is being set, and frame_pointer_rtx otherwise. After
127 register elimination hard_frame_pointer_rtx should always be used.
128 On machines where the two registers are same (most) then these are the
129 same.
130
131 In an inline procedure, the stack and frame pointer rtxs may not be
132 used for anything else. */
133 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
134 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
135 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
136 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
137 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
138
139 /* This is used to implement __builtin_return_address for some machines.
140 See for instance the MIPS port. */
141 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
142
143 /* We make one copy of (const_int C) where C is in
144 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
145 to save space during the compilation and simplify comparisons of
146 integers. */
147
148 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
149
150 /* A hash table storing CONST_INTs whose absolute value is greater
151 than MAX_SAVED_CONST_INT. */
152
153 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
154 htab_t const_int_htab;
155
156 /* A hash table storing memory attribute structures. */
157 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
158 htab_t mem_attrs_htab;
159
160 /* A hash table storing register attribute structures. */
161 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
162 htab_t reg_attrs_htab;
163
164 /* A hash table storing all CONST_DOUBLEs. */
165 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
166 htab_t const_double_htab;
167
168 #define first_insn (cfun->emit->x_first_insn)
169 #define last_insn (cfun->emit->x_last_insn)
170 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
171 #define last_linenum (cfun->emit->x_last_linenum)
172 #define last_filename (cfun->emit->x_last_filename)
173 #define first_label_num (cfun->emit->x_first_label_num)
174
175 static rtx make_jump_insn_raw PARAMS ((rtx));
176 static rtx make_call_insn_raw PARAMS ((rtx));
177 static rtx find_line_note PARAMS ((rtx));
178 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
179 int));
180 static void unshare_all_rtl_1 PARAMS ((rtx));
181 static void unshare_all_decls PARAMS ((tree));
182 static void reset_used_decls PARAMS ((tree));
183 static void mark_label_nuses PARAMS ((rtx));
184 static hashval_t const_int_htab_hash PARAMS ((const void *));
185 static int const_int_htab_eq PARAMS ((const void *,
186 const void *));
187 static hashval_t const_double_htab_hash PARAMS ((const void *));
188 static int const_double_htab_eq PARAMS ((const void *,
189 const void *));
190 static rtx lookup_const_double PARAMS ((rtx));
191 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
192 static int mem_attrs_htab_eq PARAMS ((const void *,
193 const void *));
194 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
195 rtx, unsigned int,
196 enum machine_mode));
197 static hashval_t reg_attrs_htab_hash PARAMS ((const void *));
198 static int reg_attrs_htab_eq PARAMS ((const void *,
199 const void *));
200 static reg_attrs *get_reg_attrs PARAMS ((tree, int));
201 static tree component_ref_for_mem_expr PARAMS ((tree));
202 static rtx gen_const_vector_0 PARAMS ((enum machine_mode));
203
204 /* Probability of the conditional branch currently proceeded by try_split.
205 Set to -1 otherwise. */
206 int split_branch_probability = -1;
207 \f
208 /* Returns a hash code for X (which is a really a CONST_INT). */
209
210 static hashval_t
211 const_int_htab_hash (x)
212 const void *x;
213 {
214 return (hashval_t) INTVAL ((struct rtx_def *) x);
215 }
216
217 /* Returns nonzero if the value represented by X (which is really a
218 CONST_INT) is the same as that given by Y (which is really a
219 HOST_WIDE_INT *). */
220
221 static int
222 const_int_htab_eq (x, y)
223 const void *x;
224 const void *y;
225 {
226 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
227 }
228
229 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
230 static hashval_t
231 const_double_htab_hash (x)
232 const void *x;
233 {
234 rtx value = (rtx) x;
235 hashval_t h;
236
237 if (GET_MODE (value) == VOIDmode)
238 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
239 else
240 {
241 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
242 /* MODE is used in the comparison, so it should be in the hash. */
243 h ^= GET_MODE (value);
244 }
245 return h;
246 }
247
248 /* Returns nonzero if the value represented by X (really a ...)
249 is the same as that represented by Y (really a ...) */
250 static int
251 const_double_htab_eq (x, y)
252 const void *x;
253 const void *y;
254 {
255 rtx a = (rtx)x, b = (rtx)y;
256
257 if (GET_MODE (a) != GET_MODE (b))
258 return 0;
259 if (GET_MODE (a) == VOIDmode)
260 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
261 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
262 else
263 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
264 CONST_DOUBLE_REAL_VALUE (b));
265 }
266
267 /* Returns a hash code for X (which is a really a mem_attrs *). */
268
269 static hashval_t
270 mem_attrs_htab_hash (x)
271 const void *x;
272 {
273 mem_attrs *p = (mem_attrs *) x;
274
275 return (p->alias ^ (p->align * 1000)
276 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
277 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
278 ^ (size_t) p->expr);
279 }
280
281 /* Returns nonzero if the value represented by X (which is really a
282 mem_attrs *) is the same as that given by Y (which is also really a
283 mem_attrs *). */
284
285 static int
286 mem_attrs_htab_eq (x, y)
287 const void *x;
288 const void *y;
289 {
290 mem_attrs *p = (mem_attrs *) x;
291 mem_attrs *q = (mem_attrs *) y;
292
293 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
294 && p->size == q->size && p->align == q->align);
295 }
296
297 /* Allocate a new mem_attrs structure and insert it into the hash table if
298 one identical to it is not already in the table. We are doing this for
299 MEM of mode MODE. */
300
301 static mem_attrs *
302 get_mem_attrs (alias, expr, offset, size, align, mode)
303 HOST_WIDE_INT alias;
304 tree expr;
305 rtx offset;
306 rtx size;
307 unsigned int align;
308 enum machine_mode mode;
309 {
310 mem_attrs attrs;
311 void **slot;
312
313 /* If everything is the default, we can just return zero. */
314 if (alias == 0 && expr == 0 && offset == 0
315 && (size == 0
316 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
317 && (align == BITS_PER_UNIT
318 || (STRICT_ALIGNMENT
319 && mode != BLKmode && align == GET_MODE_ALIGNMENT (mode))))
320 return 0;
321
322 attrs.alias = alias;
323 attrs.expr = expr;
324 attrs.offset = offset;
325 attrs.size = size;
326 attrs.align = align;
327
328 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
329 if (*slot == 0)
330 {
331 *slot = ggc_alloc (sizeof (mem_attrs));
332 memcpy (*slot, &attrs, sizeof (mem_attrs));
333 }
334
335 return *slot;
336 }
337
338 /* Returns a hash code for X (which is a really a reg_attrs *). */
339
340 static hashval_t
341 reg_attrs_htab_hash (x)
342 const void *x;
343 {
344 reg_attrs *p = (reg_attrs *) x;
345
346 return ((p->offset * 1000) ^ (long) p->decl);
347 }
348
349 /* Returns non-zero if the value represented by X (which is really a
350 reg_attrs *) is the same as that given by Y (which is also really a
351 reg_attrs *). */
352
353 static int
354 reg_attrs_htab_eq (x, y)
355 const void *x;
356 const void *y;
357 {
358 reg_attrs *p = (reg_attrs *) x;
359 reg_attrs *q = (reg_attrs *) y;
360
361 return (p->decl == q->decl && p->offset == q->offset);
362 }
363 /* Allocate a new reg_attrs structure and insert it into the hash table if
364 one identical to it is not already in the table. We are doing this for
365 MEM of mode MODE. */
366
367 static reg_attrs *
368 get_reg_attrs (decl, offset)
369 tree decl;
370 int offset;
371 {
372 reg_attrs attrs;
373 void **slot;
374
375 /* If everything is the default, we can just return zero. */
376 if (decl == 0 && offset == 0)
377 return 0;
378
379 attrs.decl = decl;
380 attrs.offset = offset;
381
382 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
383 if (*slot == 0)
384 {
385 *slot = ggc_alloc (sizeof (reg_attrs));
386 memcpy (*slot, &attrs, sizeof (reg_attrs));
387 }
388
389 return *slot;
390 }
391
392 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
393 don't attempt to share with the various global pieces of rtl (such as
394 frame_pointer_rtx). */
395
396 rtx
397 gen_raw_REG (mode, regno)
398 enum machine_mode mode;
399 int regno;
400 {
401 rtx x = gen_rtx_raw_REG (mode, regno);
402 ORIGINAL_REGNO (x) = regno;
403 return x;
404 }
405
406 /* There are some RTL codes that require special attention; the generation
407 functions do the raw handling. If you add to this list, modify
408 special_rtx in gengenrtl.c as well. */
409
410 rtx
411 gen_rtx_CONST_INT (mode, arg)
412 enum machine_mode mode ATTRIBUTE_UNUSED;
413 HOST_WIDE_INT arg;
414 {
415 void **slot;
416
417 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
418 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
419
420 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
421 if (const_true_rtx && arg == STORE_FLAG_VALUE)
422 return const_true_rtx;
423 #endif
424
425 /* Look up the CONST_INT in the hash table. */
426 slot = htab_find_slot_with_hash (const_int_htab, &arg,
427 (hashval_t) arg, INSERT);
428 if (*slot == 0)
429 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
430
431 return (rtx) *slot;
432 }
433
434 rtx
435 gen_int_mode (c, mode)
436 HOST_WIDE_INT c;
437 enum machine_mode mode;
438 {
439 return GEN_INT (trunc_int_for_mode (c, mode));
440 }
441
442 /* CONST_DOUBLEs might be created from pairs of integers, or from
443 REAL_VALUE_TYPEs. Also, their length is known only at run time,
444 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
445
446 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
447 hash table. If so, return its counterpart; otherwise add it
448 to the hash table and return it. */
449 static rtx
450 lookup_const_double (real)
451 rtx real;
452 {
453 void **slot = htab_find_slot (const_double_htab, real, INSERT);
454 if (*slot == 0)
455 *slot = real;
456
457 return (rtx) *slot;
458 }
459
460 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
461 VALUE in mode MODE. */
462 rtx
463 const_double_from_real_value (value, mode)
464 REAL_VALUE_TYPE value;
465 enum machine_mode mode;
466 {
467 rtx real = rtx_alloc (CONST_DOUBLE);
468 PUT_MODE (real, mode);
469
470 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
471
472 return lookup_const_double (real);
473 }
474
475 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
476 of ints: I0 is the low-order word and I1 is the high-order word.
477 Do not use this routine for non-integer modes; convert to
478 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
479
480 rtx
481 immed_double_const (i0, i1, mode)
482 HOST_WIDE_INT i0, i1;
483 enum machine_mode mode;
484 {
485 rtx value;
486 unsigned int i;
487
488 if (mode != VOIDmode)
489 {
490 int width;
491 if (GET_MODE_CLASS (mode) != MODE_INT
492 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
493 /* We can get a 0 for an error mark. */
494 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
495 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
496 abort ();
497
498 /* We clear out all bits that don't belong in MODE, unless they and
499 our sign bit are all one. So we get either a reasonable negative
500 value or a reasonable unsigned value for this mode. */
501 width = GET_MODE_BITSIZE (mode);
502 if (width < HOST_BITS_PER_WIDE_INT
503 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
504 != ((HOST_WIDE_INT) (-1) << (width - 1))))
505 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
506 else if (width == HOST_BITS_PER_WIDE_INT
507 && ! (i1 == ~0 && i0 < 0))
508 i1 = 0;
509 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
510 /* We cannot represent this value as a constant. */
511 abort ();
512
513 /* If this would be an entire word for the target, but is not for
514 the host, then sign-extend on the host so that the number will
515 look the same way on the host that it would on the target.
516
517 For example, when building a 64 bit alpha hosted 32 bit sparc
518 targeted compiler, then we want the 32 bit unsigned value -1 to be
519 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
520 The latter confuses the sparc backend. */
521
522 if (width < HOST_BITS_PER_WIDE_INT
523 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
524 i0 |= ((HOST_WIDE_INT) (-1) << width);
525
526 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
527 CONST_INT.
528
529 ??? Strictly speaking, this is wrong if we create a CONST_INT for
530 a large unsigned constant with the size of MODE being
531 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
532 in a wider mode. In that case we will mis-interpret it as a
533 negative number.
534
535 Unfortunately, the only alternative is to make a CONST_DOUBLE for
536 any constant in any mode if it is an unsigned constant larger
537 than the maximum signed integer in an int on the host. However,
538 doing this will break everyone that always expects to see a
539 CONST_INT for SImode and smaller.
540
541 We have always been making CONST_INTs in this case, so nothing
542 new is being broken. */
543
544 if (width <= HOST_BITS_PER_WIDE_INT)
545 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
546 }
547
548 /* If this integer fits in one word, return a CONST_INT. */
549 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
550 return GEN_INT (i0);
551
552 /* We use VOIDmode for integers. */
553 value = rtx_alloc (CONST_DOUBLE);
554 PUT_MODE (value, VOIDmode);
555
556 CONST_DOUBLE_LOW (value) = i0;
557 CONST_DOUBLE_HIGH (value) = i1;
558
559 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
560 XWINT (value, i) = 0;
561
562 return lookup_const_double (value);
563 }
564
565 rtx
566 gen_rtx_REG (mode, regno)
567 enum machine_mode mode;
568 unsigned int regno;
569 {
570 /* In case the MD file explicitly references the frame pointer, have
571 all such references point to the same frame pointer. This is
572 used during frame pointer elimination to distinguish the explicit
573 references to these registers from pseudos that happened to be
574 assigned to them.
575
576 If we have eliminated the frame pointer or arg pointer, we will
577 be using it as a normal register, for example as a spill
578 register. In such cases, we might be accessing it in a mode that
579 is not Pmode and therefore cannot use the pre-allocated rtx.
580
581 Also don't do this when we are making new REGs in reload, since
582 we don't want to get confused with the real pointers. */
583
584 if (mode == Pmode && !reload_in_progress)
585 {
586 if (regno == FRAME_POINTER_REGNUM
587 && (!reload_completed || frame_pointer_needed))
588 return frame_pointer_rtx;
589 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
590 if (regno == HARD_FRAME_POINTER_REGNUM
591 && (!reload_completed || frame_pointer_needed))
592 return hard_frame_pointer_rtx;
593 #endif
594 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
595 if (regno == ARG_POINTER_REGNUM)
596 return arg_pointer_rtx;
597 #endif
598 #ifdef RETURN_ADDRESS_POINTER_REGNUM
599 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
600 return return_address_pointer_rtx;
601 #endif
602 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
603 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
604 return pic_offset_table_rtx;
605 if (regno == STACK_POINTER_REGNUM)
606 return stack_pointer_rtx;
607 }
608
609 #if 0
610 /* If the per-function register table has been set up, try to re-use
611 an existing entry in that table to avoid useless generation of RTL.
612
613 This code is disabled for now until we can fix the various backends
614 which depend on having non-shared hard registers in some cases. Long
615 term we want to re-enable this code as it can significantly cut down
616 on the amount of useless RTL that gets generated.
617
618 We'll also need to fix some code that runs after reload that wants to
619 set ORIGINAL_REGNO. */
620
621 if (cfun
622 && cfun->emit
623 && regno_reg_rtx
624 && regno < FIRST_PSEUDO_REGISTER
625 && reg_raw_mode[regno] == mode)
626 return regno_reg_rtx[regno];
627 #endif
628
629 return gen_raw_REG (mode, regno);
630 }
631
632 rtx
633 gen_rtx_MEM (mode, addr)
634 enum machine_mode mode;
635 rtx addr;
636 {
637 rtx rt = gen_rtx_raw_MEM (mode, addr);
638
639 /* This field is not cleared by the mere allocation of the rtx, so
640 we clear it here. */
641 MEM_ATTRS (rt) = 0;
642
643 return rt;
644 }
645
646 rtx
647 gen_rtx_SUBREG (mode, reg, offset)
648 enum machine_mode mode;
649 rtx reg;
650 int offset;
651 {
652 /* This is the most common failure type.
653 Catch it early so we can see who does it. */
654 if ((offset % GET_MODE_SIZE (mode)) != 0)
655 abort ();
656
657 /* This check isn't usable right now because combine will
658 throw arbitrary crap like a CALL into a SUBREG in
659 gen_lowpart_for_combine so we must just eat it. */
660 #if 0
661 /* Check for this too. */
662 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
663 abort ();
664 #endif
665 return gen_rtx_raw_SUBREG (mode, reg, offset);
666 }
667
668 /* Generate a SUBREG representing the least-significant part of REG if MODE
669 is smaller than mode of REG, otherwise paradoxical SUBREG. */
670
671 rtx
672 gen_lowpart_SUBREG (mode, reg)
673 enum machine_mode mode;
674 rtx reg;
675 {
676 enum machine_mode inmode;
677
678 inmode = GET_MODE (reg);
679 if (inmode == VOIDmode)
680 inmode = mode;
681 return gen_rtx_SUBREG (mode, reg,
682 subreg_lowpart_offset (mode, inmode));
683 }
684 \f
685 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
686 **
687 ** This routine generates an RTX of the size specified by
688 ** <code>, which is an RTX code. The RTX structure is initialized
689 ** from the arguments <element1> through <elementn>, which are
690 ** interpreted according to the specific RTX type's format. The
691 ** special machine mode associated with the rtx (if any) is specified
692 ** in <mode>.
693 **
694 ** gen_rtx can be invoked in a way which resembles the lisp-like
695 ** rtx it will generate. For example, the following rtx structure:
696 **
697 ** (plus:QI (mem:QI (reg:SI 1))
698 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
699 **
700 ** ...would be generated by the following C code:
701 **
702 ** gen_rtx (PLUS, QImode,
703 ** gen_rtx (MEM, QImode,
704 ** gen_rtx (REG, SImode, 1)),
705 ** gen_rtx (MEM, QImode,
706 ** gen_rtx (PLUS, SImode,
707 ** gen_rtx (REG, SImode, 2),
708 ** gen_rtx (REG, SImode, 3)))),
709 */
710
711 /*VARARGS2*/
712 rtx
713 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
714 {
715 int i; /* Array indices... */
716 const char *fmt; /* Current rtx's format... */
717 rtx rt_val; /* RTX to return to caller... */
718
719 VA_OPEN (p, mode);
720 VA_FIXEDARG (p, enum rtx_code, code);
721 VA_FIXEDARG (p, enum machine_mode, mode);
722
723 switch (code)
724 {
725 case CONST_INT:
726 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
727 break;
728
729 case CONST_DOUBLE:
730 {
731 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
732 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
733
734 rt_val = immed_double_const (arg0, arg1, mode);
735 }
736 break;
737
738 case REG:
739 rt_val = gen_rtx_REG (mode, va_arg (p, int));
740 break;
741
742 case MEM:
743 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
744 break;
745
746 default:
747 rt_val = rtx_alloc (code); /* Allocate the storage space. */
748 rt_val->mode = mode; /* Store the machine mode... */
749
750 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
751 for (i = 0; i < GET_RTX_LENGTH (code); i++)
752 {
753 switch (*fmt++)
754 {
755 case '0': /* Unused field. */
756 break;
757
758 case 'i': /* An integer? */
759 XINT (rt_val, i) = va_arg (p, int);
760 break;
761
762 case 'w': /* A wide integer? */
763 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
764 break;
765
766 case 's': /* A string? */
767 XSTR (rt_val, i) = va_arg (p, char *);
768 break;
769
770 case 'e': /* An expression? */
771 case 'u': /* An insn? Same except when printing. */
772 XEXP (rt_val, i) = va_arg (p, rtx);
773 break;
774
775 case 'E': /* An RTX vector? */
776 XVEC (rt_val, i) = va_arg (p, rtvec);
777 break;
778
779 case 'b': /* A bitmap? */
780 XBITMAP (rt_val, i) = va_arg (p, bitmap);
781 break;
782
783 case 't': /* A tree? */
784 XTREE (rt_val, i) = va_arg (p, tree);
785 break;
786
787 default:
788 abort ();
789 }
790 }
791 break;
792 }
793
794 VA_CLOSE (p);
795 return rt_val;
796 }
797
798 /* gen_rtvec (n, [rt1, ..., rtn])
799 **
800 ** This routine creates an rtvec and stores within it the
801 ** pointers to rtx's which are its arguments.
802 */
803
804 /*VARARGS1*/
805 rtvec
806 gen_rtvec VPARAMS ((int n, ...))
807 {
808 int i, save_n;
809 rtx *vector;
810
811 VA_OPEN (p, n);
812 VA_FIXEDARG (p, int, n);
813
814 if (n == 0)
815 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
816
817 vector = (rtx *) alloca (n * sizeof (rtx));
818
819 for (i = 0; i < n; i++)
820 vector[i] = va_arg (p, rtx);
821
822 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
823 save_n = n;
824 VA_CLOSE (p);
825
826 return gen_rtvec_v (save_n, vector);
827 }
828
829 rtvec
830 gen_rtvec_v (n, argp)
831 int n;
832 rtx *argp;
833 {
834 int i;
835 rtvec rt_val;
836
837 if (n == 0)
838 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
839
840 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
841
842 for (i = 0; i < n; i++)
843 rt_val->elem[i] = *argp++;
844
845 return rt_val;
846 }
847 \f
848 /* Generate a REG rtx for a new pseudo register of mode MODE.
849 This pseudo is assigned the next sequential register number. */
850
851 rtx
852 gen_reg_rtx (mode)
853 enum machine_mode mode;
854 {
855 struct function *f = cfun;
856 rtx val;
857
858 /* Don't let anything called after initial flow analysis create new
859 registers. */
860 if (no_new_pseudos)
861 abort ();
862
863 if (generating_concat_p
864 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
865 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
866 {
867 /* For complex modes, don't make a single pseudo.
868 Instead, make a CONCAT of two pseudos.
869 This allows noncontiguous allocation of the real and imaginary parts,
870 which makes much better code. Besides, allocating DCmode
871 pseudos overstrains reload on some machines like the 386. */
872 rtx realpart, imagpart;
873 enum machine_mode partmode = GET_MODE_INNER (mode);
874
875 realpart = gen_reg_rtx (partmode);
876 imagpart = gen_reg_rtx (partmode);
877 return gen_rtx_CONCAT (mode, realpart, imagpart);
878 }
879
880 /* Make sure regno_pointer_align, and regno_reg_rtx are large
881 enough to have an element for this pseudo reg number. */
882
883 if (reg_rtx_no == f->emit->regno_pointer_align_length)
884 {
885 int old_size = f->emit->regno_pointer_align_length;
886 char *new;
887 rtx *new1;
888
889 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
890 memset (new + old_size, 0, old_size);
891 f->emit->regno_pointer_align = (unsigned char *) new;
892
893 new1 = (rtx *) ggc_realloc (f->emit->x_regno_reg_rtx,
894 old_size * 2 * sizeof (rtx));
895 memset (new1 + old_size, 0, old_size * sizeof (rtx));
896 regno_reg_rtx = new1;
897
898 f->emit->regno_pointer_align_length = old_size * 2;
899 }
900
901 val = gen_raw_REG (mode, reg_rtx_no);
902 regno_reg_rtx[reg_rtx_no++] = val;
903 return val;
904 }
905
906 /* Generate an register with same attributes as REG,
907 but offsetted by OFFSET. */
908
909 rtx
910 gen_rtx_REG_offset (reg, mode, regno, offset)
911 enum machine_mode mode;
912 unsigned int regno;
913 int offset;
914 rtx reg;
915 {
916 rtx new = gen_rtx_REG (mode, regno);
917 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
918 REG_OFFSET (reg) + offset);
919 return new;
920 }
921
922 /* Set the decl for MEM to DECL. */
923
924 void
925 set_reg_attrs_from_mem (reg, mem)
926 rtx reg;
927 rtx mem;
928 {
929 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
930 REG_ATTRS (reg)
931 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
932 }
933
934 /* Set the register attributes for registers contained in PARM_RTX.
935 Use needed values from memory attributes of MEM. */
936
937 void
938 set_reg_attrs_for_parm (parm_rtx, mem)
939 rtx parm_rtx;
940 rtx mem;
941 {
942 if (GET_CODE (parm_rtx) == REG)
943 set_reg_attrs_from_mem (parm_rtx, mem);
944 else if (GET_CODE (parm_rtx) == PARALLEL)
945 {
946 /* Check for a NULL entry in the first slot, used to indicate that the
947 parameter goes both on the stack and in registers. */
948 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
949 for (; i < XVECLEN (parm_rtx, 0); i++)
950 {
951 rtx x = XVECEXP (parm_rtx, 0, i);
952 if (GET_CODE (XEXP (x, 0)) == REG)
953 REG_ATTRS (XEXP (x, 0))
954 = get_reg_attrs (MEM_EXPR (mem),
955 INTVAL (XEXP (x, 1)));
956 }
957 }
958 }
959
960 /* Assign the RTX X to declaration T. */
961 void
962 set_decl_rtl (t, x)
963 tree t;
964 rtx x;
965 {
966 DECL_CHECK (t)->decl.rtl = x;
967
968 if (!x)
969 return;
970 /* For register, we maitain the reverse information too. */
971 if (GET_CODE (x) == REG)
972 REG_ATTRS (x) = get_reg_attrs (t, 0);
973 else if (GET_CODE (x) == SUBREG)
974 REG_ATTRS (SUBREG_REG (x))
975 = get_reg_attrs (t, -SUBREG_BYTE (x));
976 if (GET_CODE (x) == CONCAT)
977 {
978 if (REG_P (XEXP (x, 0)))
979 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
980 if (REG_P (XEXP (x, 1)))
981 REG_ATTRS (XEXP (x, 1))
982 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
983 }
984 if (GET_CODE (x) == PARALLEL)
985 {
986 int i;
987 for (i = 0; i < XVECLEN (x, 0); i++)
988 {
989 rtx y = XVECEXP (x, 0, i);
990 if (REG_P (XEXP (y, 0)))
991 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
992 }
993 }
994 }
995
996 /* Identify REG (which may be a CONCAT) as a user register. */
997
998 void
999 mark_user_reg (reg)
1000 rtx reg;
1001 {
1002 if (GET_CODE (reg) == CONCAT)
1003 {
1004 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1005 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1006 }
1007 else if (GET_CODE (reg) == REG)
1008 REG_USERVAR_P (reg) = 1;
1009 else
1010 abort ();
1011 }
1012
1013 /* Identify REG as a probable pointer register and show its alignment
1014 as ALIGN, if nonzero. */
1015
1016 void
1017 mark_reg_pointer (reg, align)
1018 rtx reg;
1019 int align;
1020 {
1021 if (! REG_POINTER (reg))
1022 {
1023 REG_POINTER (reg) = 1;
1024
1025 if (align)
1026 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1027 }
1028 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1029 /* We can no-longer be sure just how aligned this pointer is */
1030 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1031 }
1032
1033 /* Return 1 plus largest pseudo reg number used in the current function. */
1034
1035 int
1036 max_reg_num ()
1037 {
1038 return reg_rtx_no;
1039 }
1040
1041 /* Return 1 + the largest label number used so far in the current function. */
1042
1043 int
1044 max_label_num ()
1045 {
1046 if (last_label_num && label_num == base_label_num)
1047 return last_label_num;
1048 return label_num;
1049 }
1050
1051 /* Return first label number used in this function (if any were used). */
1052
1053 int
1054 get_first_label_num ()
1055 {
1056 return first_label_num;
1057 }
1058 \f
1059 /* Return the final regno of X, which is a SUBREG of a hard
1060 register. */
1061 int
1062 subreg_hard_regno (x, check_mode)
1063 rtx x;
1064 int check_mode;
1065 {
1066 enum machine_mode mode = GET_MODE (x);
1067 unsigned int byte_offset, base_regno, final_regno;
1068 rtx reg = SUBREG_REG (x);
1069
1070 /* This is where we attempt to catch illegal subregs
1071 created by the compiler. */
1072 if (GET_CODE (x) != SUBREG
1073 || GET_CODE (reg) != REG)
1074 abort ();
1075 base_regno = REGNO (reg);
1076 if (base_regno >= FIRST_PSEUDO_REGISTER)
1077 abort ();
1078 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
1079 abort ();
1080
1081 /* Catch non-congruent offsets too. */
1082 byte_offset = SUBREG_BYTE (x);
1083 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
1084 abort ();
1085
1086 final_regno = subreg_regno (x);
1087
1088 return final_regno;
1089 }
1090
1091 /* Return a value representing some low-order bits of X, where the number
1092 of low-order bits is given by MODE. Note that no conversion is done
1093 between floating-point and fixed-point values, rather, the bit
1094 representation is returned.
1095
1096 This function handles the cases in common between gen_lowpart, below,
1097 and two variants in cse.c and combine.c. These are the cases that can
1098 be safely handled at all points in the compilation.
1099
1100 If this is not a case we can handle, return 0. */
1101
1102 rtx
1103 gen_lowpart_common (mode, x)
1104 enum machine_mode mode;
1105 rtx x;
1106 {
1107 int msize = GET_MODE_SIZE (mode);
1108 int xsize = GET_MODE_SIZE (GET_MODE (x));
1109 int offset = 0;
1110
1111 if (GET_MODE (x) == mode)
1112 return x;
1113
1114 /* MODE must occupy no more words than the mode of X. */
1115 if (GET_MODE (x) != VOIDmode
1116 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1117 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
1118 return 0;
1119
1120 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1121 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1122 && GET_MODE (x) != VOIDmode && msize > xsize)
1123 return 0;
1124
1125 offset = subreg_lowpart_offset (mode, GET_MODE (x));
1126
1127 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1128 && (GET_MODE_CLASS (mode) == MODE_INT
1129 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1130 {
1131 /* If we are getting the low-order part of something that has been
1132 sign- or zero-extended, we can either just use the object being
1133 extended or make a narrower extension. If we want an even smaller
1134 piece than the size of the object being extended, call ourselves
1135 recursively.
1136
1137 This case is used mostly by combine and cse. */
1138
1139 if (GET_MODE (XEXP (x, 0)) == mode)
1140 return XEXP (x, 0);
1141 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1142 return gen_lowpart_common (mode, XEXP (x, 0));
1143 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
1144 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1145 }
1146 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
1147 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR)
1148 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
1149 else if ((GET_MODE_CLASS (mode) == MODE_VECTOR_INT
1150 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
1151 && GET_MODE (x) == VOIDmode)
1152 return simplify_gen_subreg (mode, x, int_mode_for_mode (mode), offset);
1153 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
1154 from the low-order part of the constant. */
1155 else if ((GET_MODE_CLASS (mode) == MODE_INT
1156 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1157 && GET_MODE (x) == VOIDmode
1158 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
1159 {
1160 /* If MODE is twice the host word size, X is already the desired
1161 representation. Otherwise, if MODE is wider than a word, we can't
1162 do this. If MODE is exactly a word, return just one CONST_INT. */
1163
1164 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
1165 return x;
1166 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1167 return 0;
1168 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
1169 return (GET_CODE (x) == CONST_INT ? x
1170 : GEN_INT (CONST_DOUBLE_LOW (x)));
1171 else
1172 {
1173 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
1174 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
1175 : CONST_DOUBLE_LOW (x));
1176
1177 /* Sign extend to HOST_WIDE_INT. */
1178 val = trunc_int_for_mode (val, mode);
1179
1180 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
1181 : GEN_INT (val));
1182 }
1183 }
1184
1185 /* The floating-point emulator can handle all conversions between
1186 FP and integer operands. This simplifies reload because it
1187 doesn't have to deal with constructs like (subreg:DI
1188 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
1189 /* Single-precision floats are always 32-bits and double-precision
1190 floats are always 64-bits. */
1191
1192 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1193 && GET_MODE_BITSIZE (mode) == 32
1194 && GET_CODE (x) == CONST_INT)
1195 {
1196 REAL_VALUE_TYPE r;
1197 long i = INTVAL (x);
1198
1199 real_from_target (&r, &i, mode);
1200 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1201 }
1202 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1203 && GET_MODE_BITSIZE (mode) == 64
1204 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1205 && GET_MODE (x) == VOIDmode)
1206 {
1207 REAL_VALUE_TYPE r;
1208 HOST_WIDE_INT low, high;
1209 long i[2];
1210
1211 if (GET_CODE (x) == CONST_INT)
1212 {
1213 low = INTVAL (x);
1214 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1215 }
1216 else
1217 {
1218 low = CONST_DOUBLE_LOW (x);
1219 high = CONST_DOUBLE_HIGH (x);
1220 }
1221
1222 if (HOST_BITS_PER_WIDE_INT > 32)
1223 high = low >> 31 >> 1;
1224
1225 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1226 target machine. */
1227 if (WORDS_BIG_ENDIAN)
1228 i[0] = high, i[1] = low;
1229 else
1230 i[0] = low, i[1] = high;
1231
1232 real_from_target (&r, i, mode);
1233 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1234 }
1235 else if ((GET_MODE_CLASS (mode) == MODE_INT
1236 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1237 && GET_CODE (x) == CONST_DOUBLE
1238 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1239 {
1240 REAL_VALUE_TYPE r;
1241 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1242 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1243
1244 /* Convert 'r' into an array of four 32-bit words in target word
1245 order. */
1246 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1247 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1248 {
1249 case 32:
1250 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
1251 i[1] = 0;
1252 i[2] = 0;
1253 i[3 - 3 * endian] = 0;
1254 break;
1255 case 64:
1256 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
1257 i[2 - 2 * endian] = 0;
1258 i[3 - 2 * endian] = 0;
1259 break;
1260 case 96:
1261 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1262 i[3 - 3 * endian] = 0;
1263 break;
1264 case 128:
1265 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1266 break;
1267 default:
1268 abort ();
1269 }
1270 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1271 and return it. */
1272 #if HOST_BITS_PER_WIDE_INT == 32
1273 return immed_double_const (i[3 * endian], i[1 + endian], mode);
1274 #else
1275 if (HOST_BITS_PER_WIDE_INT != 64)
1276 abort ();
1277
1278 return immed_double_const ((((unsigned long) i[3 * endian])
1279 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1280 (((unsigned long) i[2 - endian])
1281 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
1282 mode);
1283 #endif
1284 }
1285
1286 /* Otherwise, we can't do this. */
1287 return 0;
1288 }
1289 \f
1290 /* Return the real part (which has mode MODE) of a complex value X.
1291 This always comes at the low address in memory. */
1292
1293 rtx
1294 gen_realpart (mode, x)
1295 enum machine_mode mode;
1296 rtx x;
1297 {
1298 if (WORDS_BIG_ENDIAN
1299 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1300 && REG_P (x)
1301 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1302 internal_error
1303 ("can't access real part of complex value in hard register");
1304 else if (WORDS_BIG_ENDIAN)
1305 return gen_highpart (mode, x);
1306 else
1307 return gen_lowpart (mode, x);
1308 }
1309
1310 /* Return the imaginary part (which has mode MODE) of a complex value X.
1311 This always comes at the high address in memory. */
1312
1313 rtx
1314 gen_imagpart (mode, x)
1315 enum machine_mode mode;
1316 rtx x;
1317 {
1318 if (WORDS_BIG_ENDIAN)
1319 return gen_lowpart (mode, x);
1320 else if (! WORDS_BIG_ENDIAN
1321 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1322 && REG_P (x)
1323 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1324 internal_error
1325 ("can't access imaginary part of complex value in hard register");
1326 else
1327 return gen_highpart (mode, x);
1328 }
1329
1330 /* Return 1 iff X, assumed to be a SUBREG,
1331 refers to the real part of the complex value in its containing reg.
1332 Complex values are always stored with the real part in the first word,
1333 regardless of WORDS_BIG_ENDIAN. */
1334
1335 int
1336 subreg_realpart_p (x)
1337 rtx x;
1338 {
1339 if (GET_CODE (x) != SUBREG)
1340 abort ();
1341
1342 return ((unsigned int) SUBREG_BYTE (x)
1343 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1344 }
1345 \f
1346 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1347 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1348 least-significant part of X.
1349 MODE specifies how big a part of X to return;
1350 it usually should not be larger than a word.
1351 If X is a MEM whose address is a QUEUED, the value may be so also. */
1352
1353 rtx
1354 gen_lowpart (mode, x)
1355 enum machine_mode mode;
1356 rtx x;
1357 {
1358 rtx result = gen_lowpart_common (mode, x);
1359
1360 if (result)
1361 return result;
1362 else if (GET_CODE (x) == REG)
1363 {
1364 /* Must be a hard reg that's not valid in MODE. */
1365 result = gen_lowpart_common (mode, copy_to_reg (x));
1366 if (result == 0)
1367 abort ();
1368 return result;
1369 }
1370 else if (GET_CODE (x) == MEM)
1371 {
1372 /* The only additional case we can do is MEM. */
1373 int offset = 0;
1374
1375 /* The following exposes the use of "x" to CSE. */
1376 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
1377 && SCALAR_INT_MODE_P (GET_MODE (x))
1378 && ! no_new_pseudos)
1379 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1380
1381 if (WORDS_BIG_ENDIAN)
1382 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1383 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1384
1385 if (BYTES_BIG_ENDIAN)
1386 /* Adjust the address so that the address-after-the-data
1387 is unchanged. */
1388 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1389 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1390
1391 return adjust_address (x, mode, offset);
1392 }
1393 else if (GET_CODE (x) == ADDRESSOF)
1394 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1395 else
1396 abort ();
1397 }
1398
1399 /* Like `gen_lowpart', but refer to the most significant part.
1400 This is used to access the imaginary part of a complex number. */
1401
1402 rtx
1403 gen_highpart (mode, x)
1404 enum machine_mode mode;
1405 rtx x;
1406 {
1407 unsigned int msize = GET_MODE_SIZE (mode);
1408 rtx result;
1409
1410 /* This case loses if X is a subreg. To catch bugs early,
1411 complain if an invalid MODE is used even in other cases. */
1412 if (msize > UNITS_PER_WORD
1413 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1414 abort ();
1415
1416 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1417 subreg_highpart_offset (mode, GET_MODE (x)));
1418
1419 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1420 the target if we have a MEM. gen_highpart must return a valid operand,
1421 emitting code if necessary to do so. */
1422 if (result != NULL_RTX && GET_CODE (result) == MEM)
1423 result = validize_mem (result);
1424
1425 if (!result)
1426 abort ();
1427 return result;
1428 }
1429
1430 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1431 be VOIDmode constant. */
1432 rtx
1433 gen_highpart_mode (outermode, innermode, exp)
1434 enum machine_mode outermode, innermode;
1435 rtx exp;
1436 {
1437 if (GET_MODE (exp) != VOIDmode)
1438 {
1439 if (GET_MODE (exp) != innermode)
1440 abort ();
1441 return gen_highpart (outermode, exp);
1442 }
1443 return simplify_gen_subreg (outermode, exp, innermode,
1444 subreg_highpart_offset (outermode, innermode));
1445 }
1446
1447 /* Return offset in bytes to get OUTERMODE low part
1448 of the value in mode INNERMODE stored in memory in target format. */
1449
1450 unsigned int
1451 subreg_lowpart_offset (outermode, innermode)
1452 enum machine_mode outermode, innermode;
1453 {
1454 unsigned int offset = 0;
1455 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1456
1457 if (difference > 0)
1458 {
1459 if (WORDS_BIG_ENDIAN)
1460 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1461 if (BYTES_BIG_ENDIAN)
1462 offset += difference % UNITS_PER_WORD;
1463 }
1464
1465 return offset;
1466 }
1467
1468 /* Return offset in bytes to get OUTERMODE high part
1469 of the value in mode INNERMODE stored in memory in target format. */
1470 unsigned int
1471 subreg_highpart_offset (outermode, innermode)
1472 enum machine_mode outermode, innermode;
1473 {
1474 unsigned int offset = 0;
1475 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1476
1477 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1478 abort ();
1479
1480 if (difference > 0)
1481 {
1482 if (! WORDS_BIG_ENDIAN)
1483 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1484 if (! BYTES_BIG_ENDIAN)
1485 offset += difference % UNITS_PER_WORD;
1486 }
1487
1488 return offset;
1489 }
1490
1491 /* Return 1 iff X, assumed to be a SUBREG,
1492 refers to the least significant part of its containing reg.
1493 If X is not a SUBREG, always return 1 (it is its own low part!). */
1494
1495 int
1496 subreg_lowpart_p (x)
1497 rtx x;
1498 {
1499 if (GET_CODE (x) != SUBREG)
1500 return 1;
1501 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1502 return 0;
1503
1504 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1505 == SUBREG_BYTE (x));
1506 }
1507 \f
1508
1509 /* Helper routine for all the constant cases of operand_subword.
1510 Some places invoke this directly. */
1511
1512 rtx
1513 constant_subword (op, offset, mode)
1514 rtx op;
1515 int offset;
1516 enum machine_mode mode;
1517 {
1518 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1519 HOST_WIDE_INT val;
1520
1521 /* If OP is already an integer word, return it. */
1522 if (GET_MODE_CLASS (mode) == MODE_INT
1523 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1524 return op;
1525
1526 /* The output is some bits, the width of the target machine's word.
1527 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1528 host can't. */
1529 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1530 && GET_MODE_CLASS (mode) == MODE_FLOAT
1531 && GET_MODE_BITSIZE (mode) == 64
1532 && GET_CODE (op) == CONST_DOUBLE)
1533 {
1534 long k[2];
1535 REAL_VALUE_TYPE rv;
1536
1537 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1538 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1539
1540 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1541 which the words are written depends on the word endianness.
1542 ??? This is a potential portability problem and should
1543 be fixed at some point.
1544
1545 We must exercise caution with the sign bit. By definition there
1546 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1547 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1548 So we explicitly mask and sign-extend as necessary. */
1549 if (BITS_PER_WORD == 32)
1550 {
1551 val = k[offset];
1552 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1553 return GEN_INT (val);
1554 }
1555 #if HOST_BITS_PER_WIDE_INT >= 64
1556 else if (BITS_PER_WORD >= 64 && offset == 0)
1557 {
1558 val = k[! WORDS_BIG_ENDIAN];
1559 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1560 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1561 return GEN_INT (val);
1562 }
1563 #endif
1564 else if (BITS_PER_WORD == 16)
1565 {
1566 val = k[offset >> 1];
1567 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1568 val >>= 16;
1569 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1570 return GEN_INT (val);
1571 }
1572 else
1573 abort ();
1574 }
1575 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1576 && GET_MODE_CLASS (mode) == MODE_FLOAT
1577 && GET_MODE_BITSIZE (mode) > 64
1578 && GET_CODE (op) == CONST_DOUBLE)
1579 {
1580 long k[4];
1581 REAL_VALUE_TYPE rv;
1582
1583 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1584 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1585
1586 if (BITS_PER_WORD == 32)
1587 {
1588 val = k[offset];
1589 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1590 return GEN_INT (val);
1591 }
1592 #if HOST_BITS_PER_WIDE_INT >= 64
1593 else if (BITS_PER_WORD >= 64 && offset <= 1)
1594 {
1595 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1596 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1597 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1598 return GEN_INT (val);
1599 }
1600 #endif
1601 else
1602 abort ();
1603 }
1604
1605 /* Single word float is a little harder, since single- and double-word
1606 values often do not have the same high-order bits. We have already
1607 verified that we want the only defined word of the single-word value. */
1608 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1609 && GET_MODE_BITSIZE (mode) == 32
1610 && GET_CODE (op) == CONST_DOUBLE)
1611 {
1612 long l;
1613 REAL_VALUE_TYPE rv;
1614
1615 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1616 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1617
1618 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1619 val = l;
1620 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1621
1622 if (BITS_PER_WORD == 16)
1623 {
1624 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1625 val >>= 16;
1626 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1627 }
1628
1629 return GEN_INT (val);
1630 }
1631
1632 /* The only remaining cases that we can handle are integers.
1633 Convert to proper endianness now since these cases need it.
1634 At this point, offset == 0 means the low-order word.
1635
1636 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1637 in general. However, if OP is (const_int 0), we can just return
1638 it for any word. */
1639
1640 if (op == const0_rtx)
1641 return op;
1642
1643 if (GET_MODE_CLASS (mode) != MODE_INT
1644 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1645 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1646 return 0;
1647
1648 if (WORDS_BIG_ENDIAN)
1649 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1650
1651 /* Find out which word on the host machine this value is in and get
1652 it from the constant. */
1653 val = (offset / size_ratio == 0
1654 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1655 : (GET_CODE (op) == CONST_INT
1656 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1657
1658 /* Get the value we want into the low bits of val. */
1659 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1660 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1661
1662 val = trunc_int_for_mode (val, word_mode);
1663
1664 return GEN_INT (val);
1665 }
1666
1667 /* Return subword OFFSET of operand OP.
1668 The word number, OFFSET, is interpreted as the word number starting
1669 at the low-order address. OFFSET 0 is the low-order word if not
1670 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1671
1672 If we cannot extract the required word, we return zero. Otherwise,
1673 an rtx corresponding to the requested word will be returned.
1674
1675 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1676 reload has completed, a valid address will always be returned. After
1677 reload, if a valid address cannot be returned, we return zero.
1678
1679 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1680 it is the responsibility of the caller.
1681
1682 MODE is the mode of OP in case it is a CONST_INT.
1683
1684 ??? This is still rather broken for some cases. The problem for the
1685 moment is that all callers of this thing provide no 'goal mode' to
1686 tell us to work with. This exists because all callers were written
1687 in a word based SUBREG world.
1688 Now use of this function can be deprecated by simplify_subreg in most
1689 cases.
1690 */
1691
1692 rtx
1693 operand_subword (op, offset, validate_address, mode)
1694 rtx op;
1695 unsigned int offset;
1696 int validate_address;
1697 enum machine_mode mode;
1698 {
1699 if (mode == VOIDmode)
1700 mode = GET_MODE (op);
1701
1702 if (mode == VOIDmode)
1703 abort ();
1704
1705 /* If OP is narrower than a word, fail. */
1706 if (mode != BLKmode
1707 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1708 return 0;
1709
1710 /* If we want a word outside OP, return zero. */
1711 if (mode != BLKmode
1712 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1713 return const0_rtx;
1714
1715 /* Form a new MEM at the requested address. */
1716 if (GET_CODE (op) == MEM)
1717 {
1718 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1719
1720 if (! validate_address)
1721 return new;
1722
1723 else if (reload_completed)
1724 {
1725 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1726 return 0;
1727 }
1728 else
1729 return replace_equiv_address (new, XEXP (new, 0));
1730 }
1731
1732 /* Rest can be handled by simplify_subreg. */
1733 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1734 }
1735
1736 /* Similar to `operand_subword', but never return 0. If we can't extract
1737 the required subword, put OP into a register and try again. If that fails,
1738 abort. We always validate the address in this case.
1739
1740 MODE is the mode of OP, in case it is CONST_INT. */
1741
1742 rtx
1743 operand_subword_force (op, offset, mode)
1744 rtx op;
1745 unsigned int offset;
1746 enum machine_mode mode;
1747 {
1748 rtx result = operand_subword (op, offset, 1, mode);
1749
1750 if (result)
1751 return result;
1752
1753 if (mode != BLKmode && mode != VOIDmode)
1754 {
1755 /* If this is a register which can not be accessed by words, copy it
1756 to a pseudo register. */
1757 if (GET_CODE (op) == REG)
1758 op = copy_to_reg (op);
1759 else
1760 op = force_reg (mode, op);
1761 }
1762
1763 result = operand_subword (op, offset, 1, mode);
1764 if (result == 0)
1765 abort ();
1766
1767 return result;
1768 }
1769 \f
1770 /* Given a compare instruction, swap the operands.
1771 A test instruction is changed into a compare of 0 against the operand. */
1772
1773 void
1774 reverse_comparison (insn)
1775 rtx insn;
1776 {
1777 rtx body = PATTERN (insn);
1778 rtx comp;
1779
1780 if (GET_CODE (body) == SET)
1781 comp = SET_SRC (body);
1782 else
1783 comp = SET_SRC (XVECEXP (body, 0, 0));
1784
1785 if (GET_CODE (comp) == COMPARE)
1786 {
1787 rtx op0 = XEXP (comp, 0);
1788 rtx op1 = XEXP (comp, 1);
1789 XEXP (comp, 0) = op1;
1790 XEXP (comp, 1) = op0;
1791 }
1792 else
1793 {
1794 rtx new = gen_rtx_COMPARE (VOIDmode,
1795 CONST0_RTX (GET_MODE (comp)), comp);
1796 if (GET_CODE (body) == SET)
1797 SET_SRC (body) = new;
1798 else
1799 SET_SRC (XVECEXP (body, 0, 0)) = new;
1800 }
1801 }
1802 \f
1803 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1804 or (2) a component ref of something variable. Represent the later with
1805 a NULL expression. */
1806
1807 static tree
1808 component_ref_for_mem_expr (ref)
1809 tree ref;
1810 {
1811 tree inner = TREE_OPERAND (ref, 0);
1812
1813 if (TREE_CODE (inner) == COMPONENT_REF)
1814 inner = component_ref_for_mem_expr (inner);
1815 else
1816 {
1817 tree placeholder_ptr = 0;
1818
1819 /* Now remove any conversions: they don't change what the underlying
1820 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1821 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1822 || TREE_CODE (inner) == NON_LVALUE_EXPR
1823 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1824 || TREE_CODE (inner) == SAVE_EXPR
1825 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1826 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1827 inner = find_placeholder (inner, &placeholder_ptr);
1828 else
1829 inner = TREE_OPERAND (inner, 0);
1830
1831 if (! DECL_P (inner))
1832 inner = NULL_TREE;
1833 }
1834
1835 if (inner == TREE_OPERAND (ref, 0))
1836 return ref;
1837 else
1838 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1839 TREE_OPERAND (ref, 1));
1840 }
1841
1842 /* Given REF, a MEM, and T, either the type of X or the expression
1843 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1844 if we are making a new object of this type. BITPOS is nonzero if
1845 there is an offset outstanding on T that will be applied later. */
1846
1847 void
1848 set_mem_attributes_minus_bitpos (ref, t, objectp, bitpos)
1849 rtx ref;
1850 tree t;
1851 int objectp;
1852 HOST_WIDE_INT bitpos;
1853 {
1854 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1855 tree expr = MEM_EXPR (ref);
1856 rtx offset = MEM_OFFSET (ref);
1857 rtx size = MEM_SIZE (ref);
1858 unsigned int align = MEM_ALIGN (ref);
1859 HOST_WIDE_INT apply_bitpos = 0;
1860 tree type;
1861
1862 /* It can happen that type_for_mode was given a mode for which there
1863 is no language-level type. In which case it returns NULL, which
1864 we can see here. */
1865 if (t == NULL_TREE)
1866 return;
1867
1868 type = TYPE_P (t) ? t : TREE_TYPE (t);
1869
1870 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1871 wrong answer, as it assumes that DECL_RTL already has the right alias
1872 info. Callers should not set DECL_RTL until after the call to
1873 set_mem_attributes. */
1874 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1875 abort ();
1876
1877 /* Get the alias set from the expression or type (perhaps using a
1878 front-end routine) and use it. */
1879 alias = get_alias_set (t);
1880
1881 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1882 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1883 RTX_UNCHANGING_P (ref)
1884 |= ((lang_hooks.honor_readonly
1885 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1886 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1887
1888 /* If we are making an object of this type, or if this is a DECL, we know
1889 that it is a scalar if the type is not an aggregate. */
1890 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1891 MEM_SCALAR_P (ref) = 1;
1892
1893 /* We can set the alignment from the type if we are making an object,
1894 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1895 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1896 align = MAX (align, TYPE_ALIGN (type));
1897
1898 /* If the size is known, we can set that. */
1899 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1900 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1901
1902 /* If T is not a type, we may be able to deduce some more information about
1903 the expression. */
1904 if (! TYPE_P (t))
1905 {
1906 maybe_set_unchanging (ref, t);
1907 if (TREE_THIS_VOLATILE (t))
1908 MEM_VOLATILE_P (ref) = 1;
1909
1910 /* Now remove any conversions: they don't change what the underlying
1911 object is. Likewise for SAVE_EXPR. */
1912 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1913 || TREE_CODE (t) == NON_LVALUE_EXPR
1914 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1915 || TREE_CODE (t) == SAVE_EXPR)
1916 t = TREE_OPERAND (t, 0);
1917
1918 /* If this expression can't be addressed (e.g., it contains a reference
1919 to a non-addressable field), show we don't change its alias set. */
1920 if (! can_address_p (t))
1921 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1922
1923 /* If this is a decl, set the attributes of the MEM from it. */
1924 if (DECL_P (t))
1925 {
1926 expr = t;
1927 offset = const0_rtx;
1928 apply_bitpos = bitpos;
1929 size = (DECL_SIZE_UNIT (t)
1930 && host_integerp (DECL_SIZE_UNIT (t), 1)
1931 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1932 align = DECL_ALIGN (t);
1933 }
1934
1935 /* If this is a constant, we know the alignment. */
1936 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1937 {
1938 align = TYPE_ALIGN (type);
1939 #ifdef CONSTANT_ALIGNMENT
1940 align = CONSTANT_ALIGNMENT (t, align);
1941 #endif
1942 }
1943
1944 /* If this is a field reference and not a bit-field, record it. */
1945 /* ??? There is some information that can be gleened from bit-fields,
1946 such as the word offset in the structure that might be modified.
1947 But skip it for now. */
1948 else if (TREE_CODE (t) == COMPONENT_REF
1949 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1950 {
1951 expr = component_ref_for_mem_expr (t);
1952 offset = const0_rtx;
1953 apply_bitpos = bitpos;
1954 /* ??? Any reason the field size would be different than
1955 the size we got from the type? */
1956 }
1957
1958 /* If this is an array reference, look for an outer field reference. */
1959 else if (TREE_CODE (t) == ARRAY_REF)
1960 {
1961 tree off_tree = size_zero_node;
1962
1963 do
1964 {
1965 tree index = TREE_OPERAND (t, 1);
1966 tree array = TREE_OPERAND (t, 0);
1967 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
1968 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
1969 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
1970
1971 /* We assume all arrays have sizes that are a multiple of a byte.
1972 First subtract the lower bound, if any, in the type of the
1973 index, then convert to sizetype and multiply by the size of the
1974 array element. */
1975 if (low_bound != 0 && ! integer_zerop (low_bound))
1976 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
1977 index, low_bound));
1978
1979 /* If the index has a self-referential type, pass it to a
1980 WITH_RECORD_EXPR; if the component size is, pass our
1981 component to one. */
1982 if (! TREE_CONSTANT (index)
1983 && contains_placeholder_p (index))
1984 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, t);
1985 if (! TREE_CONSTANT (unit_size)
1986 && contains_placeholder_p (unit_size))
1987 unit_size = build (WITH_RECORD_EXPR, sizetype,
1988 unit_size, array);
1989
1990 off_tree
1991 = fold (build (PLUS_EXPR, sizetype,
1992 fold (build (MULT_EXPR, sizetype,
1993 index,
1994 unit_size)),
1995 off_tree));
1996 t = TREE_OPERAND (t, 0);
1997 }
1998 while (TREE_CODE (t) == ARRAY_REF);
1999
2000 if (DECL_P (t))
2001 {
2002 expr = t;
2003 offset = NULL;
2004 if (host_integerp (off_tree, 1))
2005 {
2006 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
2007 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
2008 align = DECL_ALIGN (t);
2009 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
2010 align = aoff;
2011 offset = GEN_INT (ioff);
2012 apply_bitpos = bitpos;
2013 }
2014 }
2015 else if (TREE_CODE (t) == COMPONENT_REF)
2016 {
2017 expr = component_ref_for_mem_expr (t);
2018 if (host_integerp (off_tree, 1))
2019 {
2020 offset = GEN_INT (tree_low_cst (off_tree, 1));
2021 apply_bitpos = bitpos;
2022 }
2023 /* ??? Any reason the field size would be different than
2024 the size we got from the type? */
2025 }
2026 else if (flag_argument_noalias > 1
2027 && TREE_CODE (t) == INDIRECT_REF
2028 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
2029 {
2030 expr = t;
2031 offset = NULL;
2032 }
2033 }
2034
2035 /* If this is a Fortran indirect argument reference, record the
2036 parameter decl. */
2037 else if (flag_argument_noalias > 1
2038 && TREE_CODE (t) == INDIRECT_REF
2039 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
2040 {
2041 expr = t;
2042 offset = NULL;
2043 }
2044 }
2045
2046 /* If we modified OFFSET based on T, then subtract the outstanding
2047 bit position offset. Similarly, increase the size of the accessed
2048 object to contain the negative offset. */
2049 if (apply_bitpos)
2050 {
2051 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
2052 if (size)
2053 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
2054 }
2055
2056 /* Now set the attributes we computed above. */
2057 MEM_ATTRS (ref)
2058 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
2059
2060 /* If this is already known to be a scalar or aggregate, we are done. */
2061 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
2062 return;
2063
2064 /* If it is a reference into an aggregate, this is part of an aggregate.
2065 Otherwise we don't know. */
2066 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
2067 || TREE_CODE (t) == ARRAY_RANGE_REF
2068 || TREE_CODE (t) == BIT_FIELD_REF)
2069 MEM_IN_STRUCT_P (ref) = 1;
2070 }
2071
2072 void
2073 set_mem_attributes (ref, t, objectp)
2074 rtx ref;
2075 tree t;
2076 int objectp;
2077 {
2078 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2079 }
2080
2081 /* Set the decl for MEM to DECL. */
2082
2083 void
2084 set_mem_attrs_from_reg (mem, reg)
2085 rtx mem;
2086 rtx reg;
2087 {
2088 MEM_ATTRS (mem)
2089 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
2090 GEN_INT (REG_OFFSET (reg)),
2091 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2092 }
2093
2094 /* Set the alias set of MEM to SET. */
2095
2096 void
2097 set_mem_alias_set (mem, set)
2098 rtx mem;
2099 HOST_WIDE_INT set;
2100 {
2101 #ifdef ENABLE_CHECKING
2102 /* If the new and old alias sets don't conflict, something is wrong. */
2103 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
2104 abort ();
2105 #endif
2106
2107 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
2108 MEM_SIZE (mem), MEM_ALIGN (mem),
2109 GET_MODE (mem));
2110 }
2111
2112 /* Set the alignment of MEM to ALIGN bits. */
2113
2114 void
2115 set_mem_align (mem, align)
2116 rtx mem;
2117 unsigned int align;
2118 {
2119 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2120 MEM_OFFSET (mem), MEM_SIZE (mem), align,
2121 GET_MODE (mem));
2122 }
2123
2124 /* Set the expr for MEM to EXPR. */
2125
2126 void
2127 set_mem_expr (mem, expr)
2128 rtx mem;
2129 tree expr;
2130 {
2131 MEM_ATTRS (mem)
2132 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
2133 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2134 }
2135
2136 /* Set the offset of MEM to OFFSET. */
2137
2138 void
2139 set_mem_offset (mem, offset)
2140 rtx mem, offset;
2141 {
2142 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2143 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
2144 GET_MODE (mem));
2145 }
2146
2147 /* Set the size of MEM to SIZE. */
2148
2149 void
2150 set_mem_size (mem, size)
2151 rtx mem, size;
2152 {
2153 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2154 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
2155 GET_MODE (mem));
2156 }
2157 \f
2158 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2159 and its address changed to ADDR. (VOIDmode means don't change the mode.
2160 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2161 returned memory location is required to be valid. The memory
2162 attributes are not changed. */
2163
2164 static rtx
2165 change_address_1 (memref, mode, addr, validate)
2166 rtx memref;
2167 enum machine_mode mode;
2168 rtx addr;
2169 int validate;
2170 {
2171 rtx new;
2172
2173 if (GET_CODE (memref) != MEM)
2174 abort ();
2175 if (mode == VOIDmode)
2176 mode = GET_MODE (memref);
2177 if (addr == 0)
2178 addr = XEXP (memref, 0);
2179
2180 if (validate)
2181 {
2182 if (reload_in_progress || reload_completed)
2183 {
2184 if (! memory_address_p (mode, addr))
2185 abort ();
2186 }
2187 else
2188 addr = memory_address (mode, addr);
2189 }
2190
2191 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2192 return memref;
2193
2194 new = gen_rtx_MEM (mode, addr);
2195 MEM_COPY_ATTRIBUTES (new, memref);
2196 return new;
2197 }
2198
2199 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2200 way we are changing MEMREF, so we only preserve the alias set. */
2201
2202 rtx
2203 change_address (memref, mode, addr)
2204 rtx memref;
2205 enum machine_mode mode;
2206 rtx addr;
2207 {
2208 rtx new = change_address_1 (memref, mode, addr, 1);
2209 enum machine_mode mmode = GET_MODE (new);
2210
2211 MEM_ATTRS (new)
2212 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
2213 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
2214 (mmode == BLKmode ? BITS_PER_UNIT
2215 : GET_MODE_ALIGNMENT (mmode)),
2216 mmode);
2217
2218 return new;
2219 }
2220
2221 /* Return a memory reference like MEMREF, but with its mode changed
2222 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2223 nonzero, the memory address is forced to be valid.
2224 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2225 and caller is responsible for adjusting MEMREF base register. */
2226
2227 rtx
2228 adjust_address_1 (memref, mode, offset, validate, adjust)
2229 rtx memref;
2230 enum machine_mode mode;
2231 HOST_WIDE_INT offset;
2232 int validate, adjust;
2233 {
2234 rtx addr = XEXP (memref, 0);
2235 rtx new;
2236 rtx memoffset = MEM_OFFSET (memref);
2237 rtx size = 0;
2238 unsigned int memalign = MEM_ALIGN (memref);
2239
2240 /* ??? Prefer to create garbage instead of creating shared rtl.
2241 This may happen even if offset is nonzero -- consider
2242 (plus (plus reg reg) const_int) -- so do this always. */
2243 addr = copy_rtx (addr);
2244
2245 if (adjust)
2246 {
2247 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2248 object, we can merge it into the LO_SUM. */
2249 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2250 && offset >= 0
2251 && (unsigned HOST_WIDE_INT) offset
2252 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2253 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
2254 plus_constant (XEXP (addr, 1), offset));
2255 else
2256 addr = plus_constant (addr, offset);
2257 }
2258
2259 new = change_address_1 (memref, mode, addr, validate);
2260
2261 /* Compute the new values of the memory attributes due to this adjustment.
2262 We add the offsets and update the alignment. */
2263 if (memoffset)
2264 memoffset = GEN_INT (offset + INTVAL (memoffset));
2265
2266 /* Compute the new alignment by taking the MIN of the alignment and the
2267 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2268 if zero. */
2269 if (offset != 0)
2270 memalign
2271 = MIN (memalign,
2272 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2273
2274 /* We can compute the size in a number of ways. */
2275 if (GET_MODE (new) != BLKmode)
2276 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
2277 else if (MEM_SIZE (memref))
2278 size = plus_constant (MEM_SIZE (memref), -offset);
2279
2280 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2281 memoffset, size, memalign, GET_MODE (new));
2282
2283 /* At some point, we should validate that this offset is within the object,
2284 if all the appropriate values are known. */
2285 return new;
2286 }
2287
2288 /* Return a memory reference like MEMREF, but with its mode changed
2289 to MODE and its address changed to ADDR, which is assumed to be
2290 MEMREF offseted by OFFSET bytes. If VALIDATE is
2291 nonzero, the memory address is forced to be valid. */
2292
2293 rtx
2294 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
2295 rtx memref;
2296 enum machine_mode mode;
2297 rtx addr;
2298 HOST_WIDE_INT offset;
2299 int validate;
2300 {
2301 memref = change_address_1 (memref, VOIDmode, addr, validate);
2302 return adjust_address_1 (memref, mode, offset, validate, 0);
2303 }
2304
2305 /* Return a memory reference like MEMREF, but whose address is changed by
2306 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2307 known to be in OFFSET (possibly 1). */
2308
2309 rtx
2310 offset_address (memref, offset, pow2)
2311 rtx memref;
2312 rtx offset;
2313 HOST_WIDE_INT pow2;
2314 {
2315 rtx new, addr = XEXP (memref, 0);
2316
2317 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2318
2319 /* At this point we don't know _why_ the address is invalid. It
2320 could have secondary memory refereces, multiplies or anything.
2321
2322 However, if we did go and rearrange things, we can wind up not
2323 being able to recognize the magic around pic_offset_table_rtx.
2324 This stuff is fragile, and is yet another example of why it is
2325 bad to expose PIC machinery too early. */
2326 if (! memory_address_p (GET_MODE (memref), new)
2327 && GET_CODE (addr) == PLUS
2328 && XEXP (addr, 0) == pic_offset_table_rtx)
2329 {
2330 addr = force_reg (GET_MODE (addr), addr);
2331 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2332 }
2333
2334 update_temp_slot_address (XEXP (memref, 0), new);
2335 new = change_address_1 (memref, VOIDmode, new, 1);
2336
2337 /* Update the alignment to reflect the offset. Reset the offset, which
2338 we don't know. */
2339 MEM_ATTRS (new)
2340 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2341 MIN (MEM_ALIGN (memref),
2342 (unsigned HOST_WIDE_INT) pow2 * BITS_PER_UNIT),
2343 GET_MODE (new));
2344 return new;
2345 }
2346
2347 /* Return a memory reference like MEMREF, but with its address changed to
2348 ADDR. The caller is asserting that the actual piece of memory pointed
2349 to is the same, just the form of the address is being changed, such as
2350 by putting something into a register. */
2351
2352 rtx
2353 replace_equiv_address (memref, addr)
2354 rtx memref;
2355 rtx addr;
2356 {
2357 /* change_address_1 copies the memory attribute structure without change
2358 and that's exactly what we want here. */
2359 update_temp_slot_address (XEXP (memref, 0), addr);
2360 return change_address_1 (memref, VOIDmode, addr, 1);
2361 }
2362
2363 /* Likewise, but the reference is not required to be valid. */
2364
2365 rtx
2366 replace_equiv_address_nv (memref, addr)
2367 rtx memref;
2368 rtx addr;
2369 {
2370 return change_address_1 (memref, VOIDmode, addr, 0);
2371 }
2372
2373 /* Return a memory reference like MEMREF, but with its mode widened to
2374 MODE and offset by OFFSET. This would be used by targets that e.g.
2375 cannot issue QImode memory operations and have to use SImode memory
2376 operations plus masking logic. */
2377
2378 rtx
2379 widen_memory_access (memref, mode, offset)
2380 rtx memref;
2381 enum machine_mode mode;
2382 HOST_WIDE_INT offset;
2383 {
2384 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2385 tree expr = MEM_EXPR (new);
2386 rtx memoffset = MEM_OFFSET (new);
2387 unsigned int size = GET_MODE_SIZE (mode);
2388
2389 /* If we don't know what offset we were at within the expression, then
2390 we can't know if we've overstepped the bounds. */
2391 if (! memoffset)
2392 expr = NULL_TREE;
2393
2394 while (expr)
2395 {
2396 if (TREE_CODE (expr) == COMPONENT_REF)
2397 {
2398 tree field = TREE_OPERAND (expr, 1);
2399
2400 if (! DECL_SIZE_UNIT (field))
2401 {
2402 expr = NULL_TREE;
2403 break;
2404 }
2405
2406 /* Is the field at least as large as the access? If so, ok,
2407 otherwise strip back to the containing structure. */
2408 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2409 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2410 && INTVAL (memoffset) >= 0)
2411 break;
2412
2413 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2414 {
2415 expr = NULL_TREE;
2416 break;
2417 }
2418
2419 expr = TREE_OPERAND (expr, 0);
2420 memoffset = (GEN_INT (INTVAL (memoffset)
2421 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2422 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2423 / BITS_PER_UNIT)));
2424 }
2425 /* Similarly for the decl. */
2426 else if (DECL_P (expr)
2427 && DECL_SIZE_UNIT (expr)
2428 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2429 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2430 && (! memoffset || INTVAL (memoffset) >= 0))
2431 break;
2432 else
2433 {
2434 /* The widened memory access overflows the expression, which means
2435 that it could alias another expression. Zap it. */
2436 expr = NULL_TREE;
2437 break;
2438 }
2439 }
2440
2441 if (! expr)
2442 memoffset = NULL_RTX;
2443
2444 /* The widened memory may alias other stuff, so zap the alias set. */
2445 /* ??? Maybe use get_alias_set on any remaining expression. */
2446
2447 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2448 MEM_ALIGN (new), mode);
2449
2450 return new;
2451 }
2452 \f
2453 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2454
2455 rtx
2456 gen_label_rtx ()
2457 {
2458 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2459 NULL, label_num++, NULL);
2460 }
2461 \f
2462 /* For procedure integration. */
2463
2464 /* Install new pointers to the first and last insns in the chain.
2465 Also, set cur_insn_uid to one higher than the last in use.
2466 Used for an inline-procedure after copying the insn chain. */
2467
2468 void
2469 set_new_first_and_last_insn (first, last)
2470 rtx first, last;
2471 {
2472 rtx insn;
2473
2474 first_insn = first;
2475 last_insn = last;
2476 cur_insn_uid = 0;
2477
2478 for (insn = first; insn; insn = NEXT_INSN (insn))
2479 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2480
2481 cur_insn_uid++;
2482 }
2483
2484 /* Set the range of label numbers found in the current function.
2485 This is used when belatedly compiling an inline function. */
2486
2487 void
2488 set_new_first_and_last_label_num (first, last)
2489 int first, last;
2490 {
2491 base_label_num = label_num;
2492 first_label_num = first;
2493 last_label_num = last;
2494 }
2495
2496 /* Set the last label number found in the current function.
2497 This is used when belatedly compiling an inline function. */
2498
2499 void
2500 set_new_last_label_num (last)
2501 int last;
2502 {
2503 base_label_num = label_num;
2504 last_label_num = last;
2505 }
2506 \f
2507 /* Restore all variables describing the current status from the structure *P.
2508 This is used after a nested function. */
2509
2510 void
2511 restore_emit_status (p)
2512 struct function *p ATTRIBUTE_UNUSED;
2513 {
2514 last_label_num = 0;
2515 }
2516 \f
2517 /* Go through all the RTL insn bodies and copy any invalid shared
2518 structure. This routine should only be called once. */
2519
2520 void
2521 unshare_all_rtl (fndecl, insn)
2522 tree fndecl;
2523 rtx insn;
2524 {
2525 tree decl;
2526
2527 /* Make sure that virtual parameters are not shared. */
2528 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2529 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2530
2531 /* Make sure that virtual stack slots are not shared. */
2532 unshare_all_decls (DECL_INITIAL (fndecl));
2533
2534 /* Unshare just about everything else. */
2535 unshare_all_rtl_1 (insn);
2536
2537 /* Make sure the addresses of stack slots found outside the insn chain
2538 (such as, in DECL_RTL of a variable) are not shared
2539 with the insn chain.
2540
2541 This special care is necessary when the stack slot MEM does not
2542 actually appear in the insn chain. If it does appear, its address
2543 is unshared from all else at that point. */
2544 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2545 }
2546
2547 /* Go through all the RTL insn bodies and copy any invalid shared
2548 structure, again. This is a fairly expensive thing to do so it
2549 should be done sparingly. */
2550
2551 void
2552 unshare_all_rtl_again (insn)
2553 rtx insn;
2554 {
2555 rtx p;
2556 tree decl;
2557
2558 for (p = insn; p; p = NEXT_INSN (p))
2559 if (INSN_P (p))
2560 {
2561 reset_used_flags (PATTERN (p));
2562 reset_used_flags (REG_NOTES (p));
2563 reset_used_flags (LOG_LINKS (p));
2564 }
2565
2566 /* Make sure that virtual stack slots are not shared. */
2567 reset_used_decls (DECL_INITIAL (cfun->decl));
2568
2569 /* Make sure that virtual parameters are not shared. */
2570 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2571 reset_used_flags (DECL_RTL (decl));
2572
2573 reset_used_flags (stack_slot_list);
2574
2575 unshare_all_rtl (cfun->decl, insn);
2576 }
2577
2578 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2579 Assumes the mark bits are cleared at entry. */
2580
2581 static void
2582 unshare_all_rtl_1 (insn)
2583 rtx insn;
2584 {
2585 for (; insn; insn = NEXT_INSN (insn))
2586 if (INSN_P (insn))
2587 {
2588 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2589 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2590 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2591 }
2592 }
2593
2594 /* Go through all virtual stack slots of a function and copy any
2595 shared structure. */
2596 static void
2597 unshare_all_decls (blk)
2598 tree blk;
2599 {
2600 tree t;
2601
2602 /* Copy shared decls. */
2603 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2604 if (DECL_RTL_SET_P (t))
2605 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2606
2607 /* Now process sub-blocks. */
2608 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2609 unshare_all_decls (t);
2610 }
2611
2612 /* Go through all virtual stack slots of a function and mark them as
2613 not shared. */
2614 static void
2615 reset_used_decls (blk)
2616 tree blk;
2617 {
2618 tree t;
2619
2620 /* Mark decls. */
2621 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2622 if (DECL_RTL_SET_P (t))
2623 reset_used_flags (DECL_RTL (t));
2624
2625 /* Now process sub-blocks. */
2626 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2627 reset_used_decls (t);
2628 }
2629
2630 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2631 placed in the result directly, rather than being copied. MAY_SHARE is
2632 either a MEM of an EXPR_LIST of MEMs. */
2633
2634 rtx
2635 copy_most_rtx (orig, may_share)
2636 rtx orig;
2637 rtx may_share;
2638 {
2639 rtx copy;
2640 int i, j;
2641 RTX_CODE code;
2642 const char *format_ptr;
2643
2644 if (orig == may_share
2645 || (GET_CODE (may_share) == EXPR_LIST
2646 && in_expr_list_p (may_share, orig)))
2647 return orig;
2648
2649 code = GET_CODE (orig);
2650
2651 switch (code)
2652 {
2653 case REG:
2654 case QUEUED:
2655 case CONST_INT:
2656 case CONST_DOUBLE:
2657 case CONST_VECTOR:
2658 case SYMBOL_REF:
2659 case CODE_LABEL:
2660 case PC:
2661 case CC0:
2662 return orig;
2663 default:
2664 break;
2665 }
2666
2667 copy = rtx_alloc (code);
2668 PUT_MODE (copy, GET_MODE (orig));
2669 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2670 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2671 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2672 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2673 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2674
2675 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2676
2677 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2678 {
2679 switch (*format_ptr++)
2680 {
2681 case 'e':
2682 XEXP (copy, i) = XEXP (orig, i);
2683 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2684 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2685 break;
2686
2687 case 'u':
2688 XEXP (copy, i) = XEXP (orig, i);
2689 break;
2690
2691 case 'E':
2692 case 'V':
2693 XVEC (copy, i) = XVEC (orig, i);
2694 if (XVEC (orig, i) != NULL)
2695 {
2696 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2697 for (j = 0; j < XVECLEN (copy, i); j++)
2698 XVECEXP (copy, i, j)
2699 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2700 }
2701 break;
2702
2703 case 'w':
2704 XWINT (copy, i) = XWINT (orig, i);
2705 break;
2706
2707 case 'n':
2708 case 'i':
2709 XINT (copy, i) = XINT (orig, i);
2710 break;
2711
2712 case 't':
2713 XTREE (copy, i) = XTREE (orig, i);
2714 break;
2715
2716 case 's':
2717 case 'S':
2718 XSTR (copy, i) = XSTR (orig, i);
2719 break;
2720
2721 case '0':
2722 /* Copy this through the wide int field; that's safest. */
2723 X0WINT (copy, i) = X0WINT (orig, i);
2724 break;
2725
2726 default:
2727 abort ();
2728 }
2729 }
2730 return copy;
2731 }
2732
2733 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2734 Recursively does the same for subexpressions. */
2735
2736 rtx
2737 copy_rtx_if_shared (orig)
2738 rtx orig;
2739 {
2740 rtx x = orig;
2741 int i;
2742 enum rtx_code code;
2743 const char *format_ptr;
2744 int copied = 0;
2745
2746 if (x == 0)
2747 return 0;
2748
2749 code = GET_CODE (x);
2750
2751 /* These types may be freely shared. */
2752
2753 switch (code)
2754 {
2755 case REG:
2756 case QUEUED:
2757 case CONST_INT:
2758 case CONST_DOUBLE:
2759 case CONST_VECTOR:
2760 case SYMBOL_REF:
2761 case CODE_LABEL:
2762 case PC:
2763 case CC0:
2764 case SCRATCH:
2765 /* SCRATCH must be shared because they represent distinct values. */
2766 return x;
2767
2768 case CONST:
2769 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2770 a LABEL_REF, it isn't sharable. */
2771 if (GET_CODE (XEXP (x, 0)) == PLUS
2772 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2773 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2774 return x;
2775 break;
2776
2777 case INSN:
2778 case JUMP_INSN:
2779 case CALL_INSN:
2780 case NOTE:
2781 case BARRIER:
2782 /* The chain of insns is not being copied. */
2783 return x;
2784
2785 case MEM:
2786 /* A MEM is allowed to be shared if its address is constant.
2787
2788 We used to allow sharing of MEMs which referenced
2789 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2790 that can lose. instantiate_virtual_regs will not unshare
2791 the MEMs, and combine may change the structure of the address
2792 because it looks safe and profitable in one context, but
2793 in some other context it creates unrecognizable RTL. */
2794 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2795 return x;
2796
2797 break;
2798
2799 default:
2800 break;
2801 }
2802
2803 /* This rtx may not be shared. If it has already been seen,
2804 replace it with a copy of itself. */
2805
2806 if (RTX_FLAG (x, used))
2807 {
2808 rtx copy;
2809
2810 copy = rtx_alloc (code);
2811 memcpy (copy, x,
2812 (sizeof (*copy) - sizeof (copy->fld)
2813 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2814 x = copy;
2815 copied = 1;
2816 }
2817 RTX_FLAG (x, used) = 1;
2818
2819 /* Now scan the subexpressions recursively.
2820 We can store any replaced subexpressions directly into X
2821 since we know X is not shared! Any vectors in X
2822 must be copied if X was copied. */
2823
2824 format_ptr = GET_RTX_FORMAT (code);
2825
2826 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2827 {
2828 switch (*format_ptr++)
2829 {
2830 case 'e':
2831 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2832 break;
2833
2834 case 'E':
2835 if (XVEC (x, i) != NULL)
2836 {
2837 int j;
2838 int len = XVECLEN (x, i);
2839
2840 if (copied && len > 0)
2841 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2842 for (j = 0; j < len; j++)
2843 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2844 }
2845 break;
2846 }
2847 }
2848 return x;
2849 }
2850
2851 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2852 to look for shared sub-parts. */
2853
2854 void
2855 reset_used_flags (x)
2856 rtx x;
2857 {
2858 int i, j;
2859 enum rtx_code code;
2860 const char *format_ptr;
2861
2862 if (x == 0)
2863 return;
2864
2865 code = GET_CODE (x);
2866
2867 /* These types may be freely shared so we needn't do any resetting
2868 for them. */
2869
2870 switch (code)
2871 {
2872 case REG:
2873 case QUEUED:
2874 case CONST_INT:
2875 case CONST_DOUBLE:
2876 case CONST_VECTOR:
2877 case SYMBOL_REF:
2878 case CODE_LABEL:
2879 case PC:
2880 case CC0:
2881 return;
2882
2883 case INSN:
2884 case JUMP_INSN:
2885 case CALL_INSN:
2886 case NOTE:
2887 case LABEL_REF:
2888 case BARRIER:
2889 /* The chain of insns is not being copied. */
2890 return;
2891
2892 default:
2893 break;
2894 }
2895
2896 RTX_FLAG (x, used) = 0;
2897
2898 format_ptr = GET_RTX_FORMAT (code);
2899 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2900 {
2901 switch (*format_ptr++)
2902 {
2903 case 'e':
2904 reset_used_flags (XEXP (x, i));
2905 break;
2906
2907 case 'E':
2908 for (j = 0; j < XVECLEN (x, i); j++)
2909 reset_used_flags (XVECEXP (x, i, j));
2910 break;
2911 }
2912 }
2913 }
2914 \f
2915 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2916 Return X or the rtx for the pseudo reg the value of X was copied into.
2917 OTHER must be valid as a SET_DEST. */
2918
2919 rtx
2920 make_safe_from (x, other)
2921 rtx x, other;
2922 {
2923 while (1)
2924 switch (GET_CODE (other))
2925 {
2926 case SUBREG:
2927 other = SUBREG_REG (other);
2928 break;
2929 case STRICT_LOW_PART:
2930 case SIGN_EXTEND:
2931 case ZERO_EXTEND:
2932 other = XEXP (other, 0);
2933 break;
2934 default:
2935 goto done;
2936 }
2937 done:
2938 if ((GET_CODE (other) == MEM
2939 && ! CONSTANT_P (x)
2940 && GET_CODE (x) != REG
2941 && GET_CODE (x) != SUBREG)
2942 || (GET_CODE (other) == REG
2943 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2944 || reg_mentioned_p (other, x))))
2945 {
2946 rtx temp = gen_reg_rtx (GET_MODE (x));
2947 emit_move_insn (temp, x);
2948 return temp;
2949 }
2950 return x;
2951 }
2952 \f
2953 /* Emission of insns (adding them to the doubly-linked list). */
2954
2955 /* Return the first insn of the current sequence or current function. */
2956
2957 rtx
2958 get_insns ()
2959 {
2960 return first_insn;
2961 }
2962
2963 /* Specify a new insn as the first in the chain. */
2964
2965 void
2966 set_first_insn (insn)
2967 rtx insn;
2968 {
2969 if (PREV_INSN (insn) != 0)
2970 abort ();
2971 first_insn = insn;
2972 }
2973
2974 /* Return the last insn emitted in current sequence or current function. */
2975
2976 rtx
2977 get_last_insn ()
2978 {
2979 return last_insn;
2980 }
2981
2982 /* Specify a new insn as the last in the chain. */
2983
2984 void
2985 set_last_insn (insn)
2986 rtx insn;
2987 {
2988 if (NEXT_INSN (insn) != 0)
2989 abort ();
2990 last_insn = insn;
2991 }
2992
2993 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2994
2995 rtx
2996 get_last_insn_anywhere ()
2997 {
2998 struct sequence_stack *stack;
2999 if (last_insn)
3000 return last_insn;
3001 for (stack = seq_stack; stack; stack = stack->next)
3002 if (stack->last != 0)
3003 return stack->last;
3004 return 0;
3005 }
3006
3007 /* Return the first nonnote insn emitted in current sequence or current
3008 function. This routine looks inside SEQUENCEs. */
3009
3010 rtx
3011 get_first_nonnote_insn ()
3012 {
3013 rtx insn = first_insn;
3014
3015 while (insn)
3016 {
3017 insn = next_insn (insn);
3018 if (insn == 0 || GET_CODE (insn) != NOTE)
3019 break;
3020 }
3021
3022 return insn;
3023 }
3024
3025 /* Return the last nonnote insn emitted in current sequence or current
3026 function. This routine looks inside SEQUENCEs. */
3027
3028 rtx
3029 get_last_nonnote_insn ()
3030 {
3031 rtx insn = last_insn;
3032
3033 while (insn)
3034 {
3035 insn = previous_insn (insn);
3036 if (insn == 0 || GET_CODE (insn) != NOTE)
3037 break;
3038 }
3039
3040 return insn;
3041 }
3042
3043 /* Return a number larger than any instruction's uid in this function. */
3044
3045 int
3046 get_max_uid ()
3047 {
3048 return cur_insn_uid;
3049 }
3050
3051 /* Renumber instructions so that no instruction UIDs are wasted. */
3052
3053 void
3054 renumber_insns (stream)
3055 FILE *stream;
3056 {
3057 rtx insn;
3058
3059 /* If we're not supposed to renumber instructions, don't. */
3060 if (!flag_renumber_insns)
3061 return;
3062
3063 /* If there aren't that many instructions, then it's not really
3064 worth renumbering them. */
3065 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
3066 return;
3067
3068 cur_insn_uid = 1;
3069
3070 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3071 {
3072 if (stream)
3073 fprintf (stream, "Renumbering insn %d to %d\n",
3074 INSN_UID (insn), cur_insn_uid);
3075 INSN_UID (insn) = cur_insn_uid++;
3076 }
3077 }
3078 \f
3079 /* Return the next insn. If it is a SEQUENCE, return the first insn
3080 of the sequence. */
3081
3082 rtx
3083 next_insn (insn)
3084 rtx insn;
3085 {
3086 if (insn)
3087 {
3088 insn = NEXT_INSN (insn);
3089 if (insn && GET_CODE (insn) == INSN
3090 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3091 insn = XVECEXP (PATTERN (insn), 0, 0);
3092 }
3093
3094 return insn;
3095 }
3096
3097 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3098 of the sequence. */
3099
3100 rtx
3101 previous_insn (insn)
3102 rtx insn;
3103 {
3104 if (insn)
3105 {
3106 insn = PREV_INSN (insn);
3107 if (insn && GET_CODE (insn) == INSN
3108 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3109 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3110 }
3111
3112 return insn;
3113 }
3114
3115 /* Return the next insn after INSN that is not a NOTE. This routine does not
3116 look inside SEQUENCEs. */
3117
3118 rtx
3119 next_nonnote_insn (insn)
3120 rtx insn;
3121 {
3122 while (insn)
3123 {
3124 insn = NEXT_INSN (insn);
3125 if (insn == 0 || GET_CODE (insn) != NOTE)
3126 break;
3127 }
3128
3129 return insn;
3130 }
3131
3132 /* Return the previous insn before INSN that is not a NOTE. This routine does
3133 not look inside SEQUENCEs. */
3134
3135 rtx
3136 prev_nonnote_insn (insn)
3137 rtx insn;
3138 {
3139 while (insn)
3140 {
3141 insn = PREV_INSN (insn);
3142 if (insn == 0 || GET_CODE (insn) != NOTE)
3143 break;
3144 }
3145
3146 return insn;
3147 }
3148
3149 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3150 or 0, if there is none. This routine does not look inside
3151 SEQUENCEs. */
3152
3153 rtx
3154 next_real_insn (insn)
3155 rtx insn;
3156 {
3157 while (insn)
3158 {
3159 insn = NEXT_INSN (insn);
3160 if (insn == 0 || GET_CODE (insn) == INSN
3161 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
3162 break;
3163 }
3164
3165 return insn;
3166 }
3167
3168 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3169 or 0, if there is none. This routine does not look inside
3170 SEQUENCEs. */
3171
3172 rtx
3173 prev_real_insn (insn)
3174 rtx insn;
3175 {
3176 while (insn)
3177 {
3178 insn = PREV_INSN (insn);
3179 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
3180 || GET_CODE (insn) == JUMP_INSN)
3181 break;
3182 }
3183
3184 return insn;
3185 }
3186
3187 /* Find the next insn after INSN that really does something. This routine
3188 does not look inside SEQUENCEs. Until reload has completed, this is the
3189 same as next_real_insn. */
3190
3191 int
3192 active_insn_p (insn)
3193 rtx insn;
3194 {
3195 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
3196 || (GET_CODE (insn) == INSN
3197 && (! reload_completed
3198 || (GET_CODE (PATTERN (insn)) != USE
3199 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3200 }
3201
3202 rtx
3203 next_active_insn (insn)
3204 rtx insn;
3205 {
3206 while (insn)
3207 {
3208 insn = NEXT_INSN (insn);
3209 if (insn == 0 || active_insn_p (insn))
3210 break;
3211 }
3212
3213 return insn;
3214 }
3215
3216 /* Find the last insn before INSN that really does something. This routine
3217 does not look inside SEQUENCEs. Until reload has completed, this is the
3218 same as prev_real_insn. */
3219
3220 rtx
3221 prev_active_insn (insn)
3222 rtx insn;
3223 {
3224 while (insn)
3225 {
3226 insn = PREV_INSN (insn);
3227 if (insn == 0 || active_insn_p (insn))
3228 break;
3229 }
3230
3231 return insn;
3232 }
3233
3234 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3235
3236 rtx
3237 next_label (insn)
3238 rtx insn;
3239 {
3240 while (insn)
3241 {
3242 insn = NEXT_INSN (insn);
3243 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3244 break;
3245 }
3246
3247 return insn;
3248 }
3249
3250 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3251
3252 rtx
3253 prev_label (insn)
3254 rtx insn;
3255 {
3256 while (insn)
3257 {
3258 insn = PREV_INSN (insn);
3259 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3260 break;
3261 }
3262
3263 return insn;
3264 }
3265 \f
3266 #ifdef HAVE_cc0
3267 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3268 and REG_CC_USER notes so we can find it. */
3269
3270 void
3271 link_cc0_insns (insn)
3272 rtx insn;
3273 {
3274 rtx user = next_nonnote_insn (insn);
3275
3276 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3277 user = XVECEXP (PATTERN (user), 0, 0);
3278
3279 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3280 REG_NOTES (user));
3281 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3282 }
3283
3284 /* Return the next insn that uses CC0 after INSN, which is assumed to
3285 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3286 applied to the result of this function should yield INSN).
3287
3288 Normally, this is simply the next insn. However, if a REG_CC_USER note
3289 is present, it contains the insn that uses CC0.
3290
3291 Return 0 if we can't find the insn. */
3292
3293 rtx
3294 next_cc0_user (insn)
3295 rtx insn;
3296 {
3297 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3298
3299 if (note)
3300 return XEXP (note, 0);
3301
3302 insn = next_nonnote_insn (insn);
3303 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3304 insn = XVECEXP (PATTERN (insn), 0, 0);
3305
3306 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3307 return insn;
3308
3309 return 0;
3310 }
3311
3312 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3313 note, it is the previous insn. */
3314
3315 rtx
3316 prev_cc0_setter (insn)
3317 rtx insn;
3318 {
3319 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3320
3321 if (note)
3322 return XEXP (note, 0);
3323
3324 insn = prev_nonnote_insn (insn);
3325 if (! sets_cc0_p (PATTERN (insn)))
3326 abort ();
3327
3328 return insn;
3329 }
3330 #endif
3331
3332 /* Increment the label uses for all labels present in rtx. */
3333
3334 static void
3335 mark_label_nuses (x)
3336 rtx x;
3337 {
3338 enum rtx_code code;
3339 int i, j;
3340 const char *fmt;
3341
3342 code = GET_CODE (x);
3343 if (code == LABEL_REF)
3344 LABEL_NUSES (XEXP (x, 0))++;
3345
3346 fmt = GET_RTX_FORMAT (code);
3347 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3348 {
3349 if (fmt[i] == 'e')
3350 mark_label_nuses (XEXP (x, i));
3351 else if (fmt[i] == 'E')
3352 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3353 mark_label_nuses (XVECEXP (x, i, j));
3354 }
3355 }
3356
3357 \f
3358 /* Try splitting insns that can be split for better scheduling.
3359 PAT is the pattern which might split.
3360 TRIAL is the insn providing PAT.
3361 LAST is nonzero if we should return the last insn of the sequence produced.
3362
3363 If this routine succeeds in splitting, it returns the first or last
3364 replacement insn depending on the value of LAST. Otherwise, it
3365 returns TRIAL. If the insn to be returned can be split, it will be. */
3366
3367 rtx
3368 try_split (pat, trial, last)
3369 rtx pat, trial;
3370 int last;
3371 {
3372 rtx before = PREV_INSN (trial);
3373 rtx after = NEXT_INSN (trial);
3374 int has_barrier = 0;
3375 rtx tem;
3376 rtx note, seq;
3377 int probability;
3378
3379 if (any_condjump_p (trial)
3380 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3381 split_branch_probability = INTVAL (XEXP (note, 0));
3382 probability = split_branch_probability;
3383
3384 seq = split_insns (pat, trial);
3385
3386 split_branch_probability = -1;
3387
3388 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3389 We may need to handle this specially. */
3390 if (after && GET_CODE (after) == BARRIER)
3391 {
3392 has_barrier = 1;
3393 after = NEXT_INSN (after);
3394 }
3395
3396 if (seq)
3397 {
3398 /* Sometimes there will be only one insn in that list, this case will
3399 normally arise only when we want it in turn to be split (SFmode on
3400 the 29k is an example). */
3401 if (NEXT_INSN (seq) != NULL_RTX)
3402 {
3403 rtx insn_last, insn;
3404 int njumps = 0;
3405
3406 /* Avoid infinite loop if any insn of the result matches
3407 the original pattern. */
3408 insn_last = seq;
3409 while (1)
3410 {
3411 if (INSN_P (insn_last)
3412 && rtx_equal_p (PATTERN (insn_last), pat))
3413 return trial;
3414 if (NEXT_INSN (insn_last) == NULL_RTX)
3415 break;
3416 insn_last = NEXT_INSN (insn_last);
3417 }
3418
3419 /* Mark labels. */
3420 insn = insn_last;
3421 while (insn != NULL_RTX)
3422 {
3423 if (GET_CODE (insn) == JUMP_INSN)
3424 {
3425 mark_jump_label (PATTERN (insn), insn, 0);
3426 njumps++;
3427 if (probability != -1
3428 && any_condjump_p (insn)
3429 && !find_reg_note (insn, REG_BR_PROB, 0))
3430 {
3431 /* We can preserve the REG_BR_PROB notes only if exactly
3432 one jump is created, otherwise the machine description
3433 is responsible for this step using
3434 split_branch_probability variable. */
3435 if (njumps != 1)
3436 abort ();
3437 REG_NOTES (insn)
3438 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3439 GEN_INT (probability),
3440 REG_NOTES (insn));
3441 }
3442 }
3443
3444 insn = PREV_INSN (insn);
3445 }
3446
3447 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3448 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3449 if (GET_CODE (trial) == CALL_INSN)
3450 {
3451 insn = insn_last;
3452 while (insn != NULL_RTX)
3453 {
3454 if (GET_CODE (insn) == CALL_INSN)
3455 CALL_INSN_FUNCTION_USAGE (insn)
3456 = CALL_INSN_FUNCTION_USAGE (trial);
3457
3458 insn = PREV_INSN (insn);
3459 }
3460 }
3461
3462 /* Copy notes, particularly those related to the CFG. */
3463 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3464 {
3465 switch (REG_NOTE_KIND (note))
3466 {
3467 case REG_EH_REGION:
3468 insn = insn_last;
3469 while (insn != NULL_RTX)
3470 {
3471 if (GET_CODE (insn) == CALL_INSN
3472 || (flag_non_call_exceptions
3473 && may_trap_p (PATTERN (insn))))
3474 REG_NOTES (insn)
3475 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3476 XEXP (note, 0),
3477 REG_NOTES (insn));
3478 insn = PREV_INSN (insn);
3479 }
3480 break;
3481
3482 case REG_NORETURN:
3483 case REG_SETJMP:
3484 case REG_ALWAYS_RETURN:
3485 insn = insn_last;
3486 while (insn != NULL_RTX)
3487 {
3488 if (GET_CODE (insn) == CALL_INSN)
3489 REG_NOTES (insn)
3490 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3491 XEXP (note, 0),
3492 REG_NOTES (insn));
3493 insn = PREV_INSN (insn);
3494 }
3495 break;
3496
3497 case REG_NON_LOCAL_GOTO:
3498 insn = insn_last;
3499 while (insn != NULL_RTX)
3500 {
3501 if (GET_CODE (insn) == JUMP_INSN)
3502 REG_NOTES (insn)
3503 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3504 XEXP (note, 0),
3505 REG_NOTES (insn));
3506 insn = PREV_INSN (insn);
3507 }
3508 break;
3509
3510 default:
3511 break;
3512 }
3513 }
3514
3515 /* If there are LABELS inside the split insns increment the
3516 usage count so we don't delete the label. */
3517 if (GET_CODE (trial) == INSN)
3518 {
3519 insn = insn_last;
3520 while (insn != NULL_RTX)
3521 {
3522 if (GET_CODE (insn) == INSN)
3523 mark_label_nuses (PATTERN (insn));
3524
3525 insn = PREV_INSN (insn);
3526 }
3527 }
3528
3529 tem = emit_insn_after_scope (seq, trial, INSN_SCOPE (trial));
3530
3531 delete_insn (trial);
3532 if (has_barrier)
3533 emit_barrier_after (tem);
3534
3535 /* Recursively call try_split for each new insn created; by the
3536 time control returns here that insn will be fully split, so
3537 set LAST and continue from the insn after the one returned.
3538 We can't use next_active_insn here since AFTER may be a note.
3539 Ignore deleted insns, which can be occur if not optimizing. */
3540 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3541 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3542 tem = try_split (PATTERN (tem), tem, 1);
3543 }
3544 /* Avoid infinite loop if the result matches the original pattern. */
3545 else if (rtx_equal_p (PATTERN (seq), pat))
3546 return trial;
3547 else
3548 {
3549 PATTERN (trial) = PATTERN (seq);
3550 INSN_CODE (trial) = -1;
3551 try_split (PATTERN (trial), trial, last);
3552 }
3553
3554 /* Return either the first or the last insn, depending on which was
3555 requested. */
3556 return last
3557 ? (after ? PREV_INSN (after) : last_insn)
3558 : NEXT_INSN (before);
3559 }
3560
3561 return trial;
3562 }
3563 \f
3564 /* Make and return an INSN rtx, initializing all its slots.
3565 Store PATTERN in the pattern slots. */
3566
3567 rtx
3568 make_insn_raw (pattern)
3569 rtx pattern;
3570 {
3571 rtx insn;
3572
3573 insn = rtx_alloc (INSN);
3574
3575 INSN_UID (insn) = cur_insn_uid++;
3576 PATTERN (insn) = pattern;
3577 INSN_CODE (insn) = -1;
3578 LOG_LINKS (insn) = NULL;
3579 REG_NOTES (insn) = NULL;
3580 INSN_SCOPE (insn) = NULL;
3581 BLOCK_FOR_INSN (insn) = NULL;
3582
3583 #ifdef ENABLE_RTL_CHECKING
3584 if (insn
3585 && INSN_P (insn)
3586 && (returnjump_p (insn)
3587 || (GET_CODE (insn) == SET
3588 && SET_DEST (insn) == pc_rtx)))
3589 {
3590 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3591 debug_rtx (insn);
3592 }
3593 #endif
3594
3595 return insn;
3596 }
3597
3598 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3599
3600 static rtx
3601 make_jump_insn_raw (pattern)
3602 rtx pattern;
3603 {
3604 rtx insn;
3605
3606 insn = rtx_alloc (JUMP_INSN);
3607 INSN_UID (insn) = cur_insn_uid++;
3608
3609 PATTERN (insn) = pattern;
3610 INSN_CODE (insn) = -1;
3611 LOG_LINKS (insn) = NULL;
3612 REG_NOTES (insn) = NULL;
3613 JUMP_LABEL (insn) = NULL;
3614 INSN_SCOPE (insn) = NULL;
3615 BLOCK_FOR_INSN (insn) = NULL;
3616
3617 return insn;
3618 }
3619
3620 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3621
3622 static rtx
3623 make_call_insn_raw (pattern)
3624 rtx pattern;
3625 {
3626 rtx insn;
3627
3628 insn = rtx_alloc (CALL_INSN);
3629 INSN_UID (insn) = cur_insn_uid++;
3630
3631 PATTERN (insn) = pattern;
3632 INSN_CODE (insn) = -1;
3633 LOG_LINKS (insn) = NULL;
3634 REG_NOTES (insn) = NULL;
3635 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3636 INSN_SCOPE (insn) = NULL;
3637 BLOCK_FOR_INSN (insn) = NULL;
3638
3639 return insn;
3640 }
3641 \f
3642 /* Add INSN to the end of the doubly-linked list.
3643 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3644
3645 void
3646 add_insn (insn)
3647 rtx insn;
3648 {
3649 PREV_INSN (insn) = last_insn;
3650 NEXT_INSN (insn) = 0;
3651
3652 if (NULL != last_insn)
3653 NEXT_INSN (last_insn) = insn;
3654
3655 if (NULL == first_insn)
3656 first_insn = insn;
3657
3658 last_insn = insn;
3659 }
3660
3661 /* Add INSN into the doubly-linked list after insn AFTER. This and
3662 the next should be the only functions called to insert an insn once
3663 delay slots have been filled since only they know how to update a
3664 SEQUENCE. */
3665
3666 void
3667 add_insn_after (insn, after)
3668 rtx insn, after;
3669 {
3670 rtx next = NEXT_INSN (after);
3671 basic_block bb;
3672
3673 if (optimize && INSN_DELETED_P (after))
3674 abort ();
3675
3676 NEXT_INSN (insn) = next;
3677 PREV_INSN (insn) = after;
3678
3679 if (next)
3680 {
3681 PREV_INSN (next) = insn;
3682 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3683 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3684 }
3685 else if (last_insn == after)
3686 last_insn = insn;
3687 else
3688 {
3689 struct sequence_stack *stack = seq_stack;
3690 /* Scan all pending sequences too. */
3691 for (; stack; stack = stack->next)
3692 if (after == stack->last)
3693 {
3694 stack->last = insn;
3695 break;
3696 }
3697
3698 if (stack == 0)
3699 abort ();
3700 }
3701
3702 if (GET_CODE (after) != BARRIER
3703 && GET_CODE (insn) != BARRIER
3704 && (bb = BLOCK_FOR_INSN (after)))
3705 {
3706 set_block_for_insn (insn, bb);
3707 if (INSN_P (insn))
3708 bb->flags |= BB_DIRTY;
3709 /* Should not happen as first in the BB is always
3710 either NOTE or LABEL. */
3711 if (bb->end == after
3712 /* Avoid clobbering of structure when creating new BB. */
3713 && GET_CODE (insn) != BARRIER
3714 && (GET_CODE (insn) != NOTE
3715 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3716 bb->end = insn;
3717 }
3718
3719 NEXT_INSN (after) = insn;
3720 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3721 {
3722 rtx sequence = PATTERN (after);
3723 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3724 }
3725 }
3726
3727 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3728 the previous should be the only functions called to insert an insn once
3729 delay slots have been filled since only they know how to update a
3730 SEQUENCE. */
3731
3732 void
3733 add_insn_before (insn, before)
3734 rtx insn, before;
3735 {
3736 rtx prev = PREV_INSN (before);
3737 basic_block bb;
3738
3739 if (optimize && INSN_DELETED_P (before))
3740 abort ();
3741
3742 PREV_INSN (insn) = prev;
3743 NEXT_INSN (insn) = before;
3744
3745 if (prev)
3746 {
3747 NEXT_INSN (prev) = insn;
3748 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3749 {
3750 rtx sequence = PATTERN (prev);
3751 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3752 }
3753 }
3754 else if (first_insn == before)
3755 first_insn = insn;
3756 else
3757 {
3758 struct sequence_stack *stack = seq_stack;
3759 /* Scan all pending sequences too. */
3760 for (; stack; stack = stack->next)
3761 if (before == stack->first)
3762 {
3763 stack->first = insn;
3764 break;
3765 }
3766
3767 if (stack == 0)
3768 abort ();
3769 }
3770
3771 if (GET_CODE (before) != BARRIER
3772 && GET_CODE (insn) != BARRIER
3773 && (bb = BLOCK_FOR_INSN (before)))
3774 {
3775 set_block_for_insn (insn, bb);
3776 if (INSN_P (insn))
3777 bb->flags |= BB_DIRTY;
3778 /* Should not happen as first in the BB is always
3779 either NOTE or LABEl. */
3780 if (bb->head == insn
3781 /* Avoid clobbering of structure when creating new BB. */
3782 && GET_CODE (insn) != BARRIER
3783 && (GET_CODE (insn) != NOTE
3784 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3785 abort ();
3786 }
3787
3788 PREV_INSN (before) = insn;
3789 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3790 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3791 }
3792
3793 /* Remove an insn from its doubly-linked list. This function knows how
3794 to handle sequences. */
3795 void
3796 remove_insn (insn)
3797 rtx insn;
3798 {
3799 rtx next = NEXT_INSN (insn);
3800 rtx prev = PREV_INSN (insn);
3801 basic_block bb;
3802
3803 if (prev)
3804 {
3805 NEXT_INSN (prev) = next;
3806 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3807 {
3808 rtx sequence = PATTERN (prev);
3809 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3810 }
3811 }
3812 else if (first_insn == insn)
3813 first_insn = next;
3814 else
3815 {
3816 struct sequence_stack *stack = seq_stack;
3817 /* Scan all pending sequences too. */
3818 for (; stack; stack = stack->next)
3819 if (insn == stack->first)
3820 {
3821 stack->first = next;
3822 break;
3823 }
3824
3825 if (stack == 0)
3826 abort ();
3827 }
3828
3829 if (next)
3830 {
3831 PREV_INSN (next) = prev;
3832 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3833 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3834 }
3835 else if (last_insn == insn)
3836 last_insn = prev;
3837 else
3838 {
3839 struct sequence_stack *stack = seq_stack;
3840 /* Scan all pending sequences too. */
3841 for (; stack; stack = stack->next)
3842 if (insn == stack->last)
3843 {
3844 stack->last = prev;
3845 break;
3846 }
3847
3848 if (stack == 0)
3849 abort ();
3850 }
3851 if (GET_CODE (insn) != BARRIER
3852 && (bb = BLOCK_FOR_INSN (insn)))
3853 {
3854 if (INSN_P (insn))
3855 bb->flags |= BB_DIRTY;
3856 if (bb->head == insn)
3857 {
3858 /* Never ever delete the basic block note without deleting whole
3859 basic block. */
3860 if (GET_CODE (insn) == NOTE)
3861 abort ();
3862 bb->head = next;
3863 }
3864 if (bb->end == insn)
3865 bb->end = prev;
3866 }
3867 }
3868
3869 /* Delete all insns made since FROM.
3870 FROM becomes the new last instruction. */
3871
3872 void
3873 delete_insns_since (from)
3874 rtx from;
3875 {
3876 if (from == 0)
3877 first_insn = 0;
3878 else
3879 NEXT_INSN (from) = 0;
3880 last_insn = from;
3881 }
3882
3883 /* This function is deprecated, please use sequences instead.
3884
3885 Move a consecutive bunch of insns to a different place in the chain.
3886 The insns to be moved are those between FROM and TO.
3887 They are moved to a new position after the insn AFTER.
3888 AFTER must not be FROM or TO or any insn in between.
3889
3890 This function does not know about SEQUENCEs and hence should not be
3891 called after delay-slot filling has been done. */
3892
3893 void
3894 reorder_insns_nobb (from, to, after)
3895 rtx from, to, after;
3896 {
3897 /* Splice this bunch out of where it is now. */
3898 if (PREV_INSN (from))
3899 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3900 if (NEXT_INSN (to))
3901 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3902 if (last_insn == to)
3903 last_insn = PREV_INSN (from);
3904 if (first_insn == from)
3905 first_insn = NEXT_INSN (to);
3906
3907 /* Make the new neighbors point to it and it to them. */
3908 if (NEXT_INSN (after))
3909 PREV_INSN (NEXT_INSN (after)) = to;
3910
3911 NEXT_INSN (to) = NEXT_INSN (after);
3912 PREV_INSN (from) = after;
3913 NEXT_INSN (after) = from;
3914 if (after == last_insn)
3915 last_insn = to;
3916 }
3917
3918 /* Same as function above, but take care to update BB boundaries. */
3919 void
3920 reorder_insns (from, to, after)
3921 rtx from, to, after;
3922 {
3923 rtx prev = PREV_INSN (from);
3924 basic_block bb, bb2;
3925
3926 reorder_insns_nobb (from, to, after);
3927
3928 if (GET_CODE (after) != BARRIER
3929 && (bb = BLOCK_FOR_INSN (after)))
3930 {
3931 rtx x;
3932 bb->flags |= BB_DIRTY;
3933
3934 if (GET_CODE (from) != BARRIER
3935 && (bb2 = BLOCK_FOR_INSN (from)))
3936 {
3937 if (bb2->end == to)
3938 bb2->end = prev;
3939 bb2->flags |= BB_DIRTY;
3940 }
3941
3942 if (bb->end == after)
3943 bb->end = to;
3944
3945 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3946 set_block_for_insn (x, bb);
3947 }
3948 }
3949
3950 /* Return the line note insn preceding INSN. */
3951
3952 static rtx
3953 find_line_note (insn)
3954 rtx insn;
3955 {
3956 if (no_line_numbers)
3957 return 0;
3958
3959 for (; insn; insn = PREV_INSN (insn))
3960 if (GET_CODE (insn) == NOTE
3961 && NOTE_LINE_NUMBER (insn) >= 0)
3962 break;
3963
3964 return insn;
3965 }
3966
3967 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3968 of the moved insns when debugging. This may insert a note between AFTER
3969 and FROM, and another one after TO. */
3970
3971 void
3972 reorder_insns_with_line_notes (from, to, after)
3973 rtx from, to, after;
3974 {
3975 rtx from_line = find_line_note (from);
3976 rtx after_line = find_line_note (after);
3977
3978 reorder_insns (from, to, after);
3979
3980 if (from_line == after_line)
3981 return;
3982
3983 if (from_line)
3984 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3985 NOTE_LINE_NUMBER (from_line),
3986 after);
3987 if (after_line)
3988 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3989 NOTE_LINE_NUMBER (after_line),
3990 to);
3991 }
3992
3993 /* Remove unnecessary notes from the instruction stream. */
3994
3995 void
3996 remove_unnecessary_notes ()
3997 {
3998 rtx block_stack = NULL_RTX;
3999 rtx eh_stack = NULL_RTX;
4000 rtx insn;
4001 rtx next;
4002 rtx tmp;
4003
4004 /* We must not remove the first instruction in the function because
4005 the compiler depends on the first instruction being a note. */
4006 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
4007 {
4008 /* Remember what's next. */
4009 next = NEXT_INSN (insn);
4010
4011 /* We're only interested in notes. */
4012 if (GET_CODE (insn) != NOTE)
4013 continue;
4014
4015 switch (NOTE_LINE_NUMBER (insn))
4016 {
4017 case NOTE_INSN_DELETED:
4018 case NOTE_INSN_LOOP_END_TOP_COND:
4019 remove_insn (insn);
4020 break;
4021
4022 case NOTE_INSN_EH_REGION_BEG:
4023 eh_stack = alloc_INSN_LIST (insn, eh_stack);
4024 break;
4025
4026 case NOTE_INSN_EH_REGION_END:
4027 /* Too many end notes. */
4028 if (eh_stack == NULL_RTX)
4029 abort ();
4030 /* Mismatched nesting. */
4031 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
4032 abort ();
4033 tmp = eh_stack;
4034 eh_stack = XEXP (eh_stack, 1);
4035 free_INSN_LIST_node (tmp);
4036 break;
4037
4038 case NOTE_INSN_BLOCK_BEG:
4039 /* By now, all notes indicating lexical blocks should have
4040 NOTE_BLOCK filled in. */
4041 if (NOTE_BLOCK (insn) == NULL_TREE)
4042 abort ();
4043 block_stack = alloc_INSN_LIST (insn, block_stack);
4044 break;
4045
4046 case NOTE_INSN_BLOCK_END:
4047 /* Too many end notes. */
4048 if (block_stack == NULL_RTX)
4049 abort ();
4050 /* Mismatched nesting. */
4051 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
4052 abort ();
4053 tmp = block_stack;
4054 block_stack = XEXP (block_stack, 1);
4055 free_INSN_LIST_node (tmp);
4056
4057 /* Scan back to see if there are any non-note instructions
4058 between INSN and the beginning of this block. If not,
4059 then there is no PC range in the generated code that will
4060 actually be in this block, so there's no point in
4061 remembering the existence of the block. */
4062 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
4063 {
4064 /* This block contains a real instruction. Note that we
4065 don't include labels; if the only thing in the block
4066 is a label, then there are still no PC values that
4067 lie within the block. */
4068 if (INSN_P (tmp))
4069 break;
4070
4071 /* We're only interested in NOTEs. */
4072 if (GET_CODE (tmp) != NOTE)
4073 continue;
4074
4075 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
4076 {
4077 /* We just verified that this BLOCK matches us with
4078 the block_stack check above. Never delete the
4079 BLOCK for the outermost scope of the function; we
4080 can refer to names from that scope even if the
4081 block notes are messed up. */
4082 if (! is_body_block (NOTE_BLOCK (insn))
4083 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
4084 {
4085 remove_insn (tmp);
4086 remove_insn (insn);
4087 }
4088 break;
4089 }
4090 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
4091 /* There's a nested block. We need to leave the
4092 current block in place since otherwise the debugger
4093 wouldn't be able to show symbols from our block in
4094 the nested block. */
4095 break;
4096 }
4097 }
4098 }
4099
4100 /* Too many begin notes. */
4101 if (block_stack || eh_stack)
4102 abort ();
4103 }
4104
4105 \f
4106 /* Emit insn(s) of given code and pattern
4107 at a specified place within the doubly-linked list.
4108
4109 All of the emit_foo global entry points accept an object
4110 X which is either an insn list or a PATTERN of a single
4111 instruction.
4112
4113 There are thus a few canonical ways to generate code and
4114 emit it at a specific place in the instruction stream. For
4115 example, consider the instruction named SPOT and the fact that
4116 we would like to emit some instructions before SPOT. We might
4117 do it like this:
4118
4119 start_sequence ();
4120 ... emit the new instructions ...
4121 insns_head = get_insns ();
4122 end_sequence ();
4123
4124 emit_insn_before (insns_head, SPOT);
4125
4126 It used to be common to generate SEQUENCE rtl instead, but that
4127 is a relic of the past which no longer occurs. The reason is that
4128 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4129 generated would almost certainly die right after it was created. */
4130
4131 /* Make X be output before the instruction BEFORE. */
4132
4133 rtx
4134 emit_insn_before (x, before)
4135 rtx x, before;
4136 {
4137 rtx last = before;
4138 rtx insn;
4139
4140 #ifdef ENABLE_RTL_CHECKING
4141 if (before == NULL_RTX)
4142 abort ();
4143 #endif
4144
4145 if (x == NULL_RTX)
4146 return last;
4147
4148 switch (GET_CODE (x))
4149 {
4150 case INSN:
4151 case JUMP_INSN:
4152 case CALL_INSN:
4153 case CODE_LABEL:
4154 case BARRIER:
4155 case NOTE:
4156 insn = x;
4157 while (insn)
4158 {
4159 rtx next = NEXT_INSN (insn);
4160 add_insn_before (insn, before);
4161 last = insn;
4162 insn = next;
4163 }
4164 break;
4165
4166 #ifdef ENABLE_RTL_CHECKING
4167 case SEQUENCE:
4168 abort ();
4169 break;
4170 #endif
4171
4172 default:
4173 last = make_insn_raw (x);
4174 add_insn_before (last, before);
4175 break;
4176 }
4177
4178 return last;
4179 }
4180
4181 /* Make an instruction with body X and code JUMP_INSN
4182 and output it before the instruction BEFORE. */
4183
4184 rtx
4185 emit_jump_insn_before (x, before)
4186 rtx x, before;
4187 {
4188 rtx insn, last = NULL_RTX;
4189
4190 #ifdef ENABLE_RTL_CHECKING
4191 if (before == NULL_RTX)
4192 abort ();
4193 #endif
4194
4195 switch (GET_CODE (x))
4196 {
4197 case INSN:
4198 case JUMP_INSN:
4199 case CALL_INSN:
4200 case CODE_LABEL:
4201 case BARRIER:
4202 case NOTE:
4203 insn = x;
4204 while (insn)
4205 {
4206 rtx next = NEXT_INSN (insn);
4207 add_insn_before (insn, before);
4208 last = insn;
4209 insn = next;
4210 }
4211 break;
4212
4213 #ifdef ENABLE_RTL_CHECKING
4214 case SEQUENCE:
4215 abort ();
4216 break;
4217 #endif
4218
4219 default:
4220 last = make_jump_insn_raw (x);
4221 add_insn_before (last, before);
4222 break;
4223 }
4224
4225 return last;
4226 }
4227
4228 /* Make an instruction with body X and code CALL_INSN
4229 and output it before the instruction BEFORE. */
4230
4231 rtx
4232 emit_call_insn_before (x, before)
4233 rtx x, before;
4234 {
4235 rtx last = NULL_RTX, insn;
4236
4237 #ifdef ENABLE_RTL_CHECKING
4238 if (before == NULL_RTX)
4239 abort ();
4240 #endif
4241
4242 switch (GET_CODE (x))
4243 {
4244 case INSN:
4245 case JUMP_INSN:
4246 case CALL_INSN:
4247 case CODE_LABEL:
4248 case BARRIER:
4249 case NOTE:
4250 insn = x;
4251 while (insn)
4252 {
4253 rtx next = NEXT_INSN (insn);
4254 add_insn_before (insn, before);
4255 last = insn;
4256 insn = next;
4257 }
4258 break;
4259
4260 #ifdef ENABLE_RTL_CHECKING
4261 case SEQUENCE:
4262 abort ();
4263 break;
4264 #endif
4265
4266 default:
4267 last = make_call_insn_raw (x);
4268 add_insn_before (last, before);
4269 break;
4270 }
4271
4272 return last;
4273 }
4274
4275 /* Make an insn of code BARRIER
4276 and output it before the insn BEFORE. */
4277
4278 rtx
4279 emit_barrier_before (before)
4280 rtx before;
4281 {
4282 rtx insn = rtx_alloc (BARRIER);
4283
4284 INSN_UID (insn) = cur_insn_uid++;
4285
4286 add_insn_before (insn, before);
4287 return insn;
4288 }
4289
4290 /* Emit the label LABEL before the insn BEFORE. */
4291
4292 rtx
4293 emit_label_before (label, before)
4294 rtx label, before;
4295 {
4296 /* This can be called twice for the same label as a result of the
4297 confusion that follows a syntax error! So make it harmless. */
4298 if (INSN_UID (label) == 0)
4299 {
4300 INSN_UID (label) = cur_insn_uid++;
4301 add_insn_before (label, before);
4302 }
4303
4304 return label;
4305 }
4306
4307 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4308
4309 rtx
4310 emit_note_before (subtype, before)
4311 int subtype;
4312 rtx before;
4313 {
4314 rtx note = rtx_alloc (NOTE);
4315 INSN_UID (note) = cur_insn_uid++;
4316 NOTE_SOURCE_FILE (note) = 0;
4317 NOTE_LINE_NUMBER (note) = subtype;
4318 BLOCK_FOR_INSN (note) = NULL;
4319
4320 add_insn_before (note, before);
4321 return note;
4322 }
4323 \f
4324 /* Helper for emit_insn_after, handles lists of instructions
4325 efficiently. */
4326
4327 static rtx emit_insn_after_1 PARAMS ((rtx, rtx));
4328
4329 static rtx
4330 emit_insn_after_1 (first, after)
4331 rtx first, after;
4332 {
4333 rtx last;
4334 rtx after_after;
4335 basic_block bb;
4336
4337 if (GET_CODE (after) != BARRIER
4338 && (bb = BLOCK_FOR_INSN (after)))
4339 {
4340 bb->flags |= BB_DIRTY;
4341 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4342 if (GET_CODE (last) != BARRIER)
4343 set_block_for_insn (last, bb);
4344 if (GET_CODE (last) != BARRIER)
4345 set_block_for_insn (last, bb);
4346 if (bb->end == after)
4347 bb->end = last;
4348 }
4349 else
4350 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4351 continue;
4352
4353 after_after = NEXT_INSN (after);
4354
4355 NEXT_INSN (after) = first;
4356 PREV_INSN (first) = after;
4357 NEXT_INSN (last) = after_after;
4358 if (after_after)
4359 PREV_INSN (after_after) = last;
4360
4361 if (after == last_insn)
4362 last_insn = last;
4363 return last;
4364 }
4365
4366 /* Make X be output after the insn AFTER. */
4367
4368 rtx
4369 emit_insn_after (x, after)
4370 rtx x, after;
4371 {
4372 rtx last = after;
4373
4374 #ifdef ENABLE_RTL_CHECKING
4375 if (after == NULL_RTX)
4376 abort ();
4377 #endif
4378
4379 if (x == NULL_RTX)
4380 return last;
4381
4382 switch (GET_CODE (x))
4383 {
4384 case INSN:
4385 case JUMP_INSN:
4386 case CALL_INSN:
4387 case CODE_LABEL:
4388 case BARRIER:
4389 case NOTE:
4390 last = emit_insn_after_1 (x, after);
4391 break;
4392
4393 #ifdef ENABLE_RTL_CHECKING
4394 case SEQUENCE:
4395 abort ();
4396 break;
4397 #endif
4398
4399 default:
4400 last = make_insn_raw (x);
4401 add_insn_after (last, after);
4402 break;
4403 }
4404
4405 return last;
4406 }
4407
4408 /* Similar to emit_insn_after, except that line notes are to be inserted so
4409 as to act as if this insn were at FROM. */
4410
4411 void
4412 emit_insn_after_with_line_notes (x, after, from)
4413 rtx x, after, from;
4414 {
4415 rtx from_line = find_line_note (from);
4416 rtx after_line = find_line_note (after);
4417 rtx insn = emit_insn_after (x, after);
4418
4419 if (from_line)
4420 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
4421 NOTE_LINE_NUMBER (from_line),
4422 after);
4423
4424 if (after_line)
4425 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
4426 NOTE_LINE_NUMBER (after_line),
4427 insn);
4428 }
4429
4430 /* Make an insn of code JUMP_INSN with body X
4431 and output it after the insn AFTER. */
4432
4433 rtx
4434 emit_jump_insn_after (x, after)
4435 rtx x, after;
4436 {
4437 rtx last;
4438
4439 #ifdef ENABLE_RTL_CHECKING
4440 if (after == NULL_RTX)
4441 abort ();
4442 #endif
4443
4444 switch (GET_CODE (x))
4445 {
4446 case INSN:
4447 case JUMP_INSN:
4448 case CALL_INSN:
4449 case CODE_LABEL:
4450 case BARRIER:
4451 case NOTE:
4452 last = emit_insn_after_1 (x, after);
4453 break;
4454
4455 #ifdef ENABLE_RTL_CHECKING
4456 case SEQUENCE:
4457 abort ();
4458 break;
4459 #endif
4460
4461 default:
4462 last = make_jump_insn_raw (x);
4463 add_insn_after (last, after);
4464 break;
4465 }
4466
4467 return last;
4468 }
4469
4470 /* Make an instruction with body X and code CALL_INSN
4471 and output it after the instruction AFTER. */
4472
4473 rtx
4474 emit_call_insn_after (x, after)
4475 rtx x, after;
4476 {
4477 rtx last;
4478
4479 #ifdef ENABLE_RTL_CHECKING
4480 if (after == NULL_RTX)
4481 abort ();
4482 #endif
4483
4484 switch (GET_CODE (x))
4485 {
4486 case INSN:
4487 case JUMP_INSN:
4488 case CALL_INSN:
4489 case CODE_LABEL:
4490 case BARRIER:
4491 case NOTE:
4492 last = emit_insn_after_1 (x, after);
4493 break;
4494
4495 #ifdef ENABLE_RTL_CHECKING
4496 case SEQUENCE:
4497 abort ();
4498 break;
4499 #endif
4500
4501 default:
4502 last = make_call_insn_raw (x);
4503 add_insn_after (last, after);
4504 break;
4505 }
4506
4507 return last;
4508 }
4509
4510 /* Make an insn of code BARRIER
4511 and output it after the insn AFTER. */
4512
4513 rtx
4514 emit_barrier_after (after)
4515 rtx after;
4516 {
4517 rtx insn = rtx_alloc (BARRIER);
4518
4519 INSN_UID (insn) = cur_insn_uid++;
4520
4521 add_insn_after (insn, after);
4522 return insn;
4523 }
4524
4525 /* Emit the label LABEL after the insn AFTER. */
4526
4527 rtx
4528 emit_label_after (label, after)
4529 rtx label, after;
4530 {
4531 /* This can be called twice for the same label
4532 as a result of the confusion that follows a syntax error!
4533 So make it harmless. */
4534 if (INSN_UID (label) == 0)
4535 {
4536 INSN_UID (label) = cur_insn_uid++;
4537 add_insn_after (label, after);
4538 }
4539
4540 return label;
4541 }
4542
4543 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4544
4545 rtx
4546 emit_note_after (subtype, after)
4547 int subtype;
4548 rtx after;
4549 {
4550 rtx note = rtx_alloc (NOTE);
4551 INSN_UID (note) = cur_insn_uid++;
4552 NOTE_SOURCE_FILE (note) = 0;
4553 NOTE_LINE_NUMBER (note) = subtype;
4554 BLOCK_FOR_INSN (note) = NULL;
4555 add_insn_after (note, after);
4556 return note;
4557 }
4558
4559 /* Emit a line note for FILE and LINE after the insn AFTER. */
4560
4561 rtx
4562 emit_line_note_after (file, line, after)
4563 const char *file;
4564 int line;
4565 rtx after;
4566 {
4567 rtx note;
4568
4569 if (no_line_numbers && line > 0)
4570 {
4571 cur_insn_uid++;
4572 return 0;
4573 }
4574
4575 note = rtx_alloc (NOTE);
4576 INSN_UID (note) = cur_insn_uid++;
4577 NOTE_SOURCE_FILE (note) = file;
4578 NOTE_LINE_NUMBER (note) = line;
4579 BLOCK_FOR_INSN (note) = NULL;
4580 add_insn_after (note, after);
4581 return note;
4582 }
4583 \f
4584 /* Like emit_insn_after, but set INSN_SCOPE according to SCOPE. */
4585 rtx
4586 emit_insn_after_scope (pattern, after, scope)
4587 rtx pattern, after;
4588 tree scope;
4589 {
4590 rtx last = emit_insn_after (pattern, after);
4591
4592 after = NEXT_INSN (after);
4593 while (1)
4594 {
4595 if (active_insn_p (after))
4596 INSN_SCOPE (after) = scope;
4597 if (after == last)
4598 break;
4599 after = NEXT_INSN (after);
4600 }
4601 return last;
4602 }
4603
4604 /* Like emit_jump_insn_after, but set INSN_SCOPE according to SCOPE. */
4605 rtx
4606 emit_jump_insn_after_scope (pattern, after, scope)
4607 rtx pattern, after;
4608 tree scope;
4609 {
4610 rtx last = emit_jump_insn_after (pattern, after);
4611
4612 after = NEXT_INSN (after);
4613 while (1)
4614 {
4615 if (active_insn_p (after))
4616 INSN_SCOPE (after) = scope;
4617 if (after == last)
4618 break;
4619 after = NEXT_INSN (after);
4620 }
4621 return last;
4622 }
4623
4624 /* Like emit_call_insn_after, but set INSN_SCOPE according to SCOPE. */
4625 rtx
4626 emit_call_insn_after_scope (pattern, after, scope)
4627 rtx pattern, after;
4628 tree scope;
4629 {
4630 rtx last = emit_call_insn_after (pattern, after);
4631
4632 after = NEXT_INSN (after);
4633 while (1)
4634 {
4635 if (active_insn_p (after))
4636 INSN_SCOPE (after) = scope;
4637 if (after == last)
4638 break;
4639 after = NEXT_INSN (after);
4640 }
4641 return last;
4642 }
4643
4644 /* Like emit_insn_before, but set INSN_SCOPE according to SCOPE. */
4645 rtx
4646 emit_insn_before_scope (pattern, before, scope)
4647 rtx pattern, before;
4648 tree scope;
4649 {
4650 rtx first = PREV_INSN (before);
4651 rtx last = emit_insn_before (pattern, before);
4652
4653 first = NEXT_INSN (first);
4654 while (1)
4655 {
4656 if (active_insn_p (first))
4657 INSN_SCOPE (first) = scope;
4658 if (first == last)
4659 break;
4660 first = NEXT_INSN (first);
4661 }
4662 return last;
4663 }
4664 \f
4665 /* Take X and emit it at the end of the doubly-linked
4666 INSN list.
4667
4668 Returns the last insn emitted. */
4669
4670 rtx
4671 emit_insn (x)
4672 rtx x;
4673 {
4674 rtx last = last_insn;
4675 rtx insn;
4676
4677 if (x == NULL_RTX)
4678 return last;
4679
4680 switch (GET_CODE (x))
4681 {
4682 case INSN:
4683 case JUMP_INSN:
4684 case CALL_INSN:
4685 case CODE_LABEL:
4686 case BARRIER:
4687 case NOTE:
4688 insn = x;
4689 while (insn)
4690 {
4691 rtx next = NEXT_INSN (insn);
4692 add_insn (insn);
4693 last = insn;
4694 insn = next;
4695 }
4696 break;
4697
4698 #ifdef ENABLE_RTL_CHECKING
4699 case SEQUENCE:
4700 abort ();
4701 break;
4702 #endif
4703
4704 default:
4705 last = make_insn_raw (x);
4706 add_insn (last);
4707 break;
4708 }
4709
4710 return last;
4711 }
4712
4713 /* Make an insn of code JUMP_INSN with pattern X
4714 and add it to the end of the doubly-linked list. */
4715
4716 rtx
4717 emit_jump_insn (x)
4718 rtx x;
4719 {
4720 rtx last = NULL_RTX, insn;
4721
4722 switch (GET_CODE (x))
4723 {
4724 case INSN:
4725 case JUMP_INSN:
4726 case CALL_INSN:
4727 case CODE_LABEL:
4728 case BARRIER:
4729 case NOTE:
4730 insn = x;
4731 while (insn)
4732 {
4733 rtx next = NEXT_INSN (insn);
4734 add_insn (insn);
4735 last = insn;
4736 insn = next;
4737 }
4738 break;
4739
4740 #ifdef ENABLE_RTL_CHECKING
4741 case SEQUENCE:
4742 abort ();
4743 break;
4744 #endif
4745
4746 default:
4747 last = make_jump_insn_raw (x);
4748 add_insn (last);
4749 break;
4750 }
4751
4752 return last;
4753 }
4754
4755 /* Make an insn of code CALL_INSN with pattern X
4756 and add it to the end of the doubly-linked list. */
4757
4758 rtx
4759 emit_call_insn (x)
4760 rtx x;
4761 {
4762 rtx insn;
4763
4764 switch (GET_CODE (x))
4765 {
4766 case INSN:
4767 case JUMP_INSN:
4768 case CALL_INSN:
4769 case CODE_LABEL:
4770 case BARRIER:
4771 case NOTE:
4772 insn = emit_insn (x);
4773 break;
4774
4775 #ifdef ENABLE_RTL_CHECKING
4776 case SEQUENCE:
4777 abort ();
4778 break;
4779 #endif
4780
4781 default:
4782 insn = make_call_insn_raw (x);
4783 add_insn (insn);
4784 break;
4785 }
4786
4787 return insn;
4788 }
4789
4790 /* Add the label LABEL to the end of the doubly-linked list. */
4791
4792 rtx
4793 emit_label (label)
4794 rtx label;
4795 {
4796 /* This can be called twice for the same label
4797 as a result of the confusion that follows a syntax error!
4798 So make it harmless. */
4799 if (INSN_UID (label) == 0)
4800 {
4801 INSN_UID (label) = cur_insn_uid++;
4802 add_insn (label);
4803 }
4804 return label;
4805 }
4806
4807 /* Make an insn of code BARRIER
4808 and add it to the end of the doubly-linked list. */
4809
4810 rtx
4811 emit_barrier ()
4812 {
4813 rtx barrier = rtx_alloc (BARRIER);
4814 INSN_UID (barrier) = cur_insn_uid++;
4815 add_insn (barrier);
4816 return barrier;
4817 }
4818
4819 /* Make an insn of code NOTE
4820 with data-fields specified by FILE and LINE
4821 and add it to the end of the doubly-linked list,
4822 but only if line-numbers are desired for debugging info. */
4823
4824 rtx
4825 emit_line_note (file, line)
4826 const char *file;
4827 int line;
4828 {
4829 set_file_and_line_for_stmt (file, line);
4830
4831 #if 0
4832 if (no_line_numbers)
4833 return 0;
4834 #endif
4835
4836 return emit_note (file, line);
4837 }
4838
4839 /* Make an insn of code NOTE
4840 with data-fields specified by FILE and LINE
4841 and add it to the end of the doubly-linked list.
4842 If it is a line-number NOTE, omit it if it matches the previous one. */
4843
4844 rtx
4845 emit_note (file, line)
4846 const char *file;
4847 int line;
4848 {
4849 rtx note;
4850
4851 if (line > 0)
4852 {
4853 if (file && last_filename && !strcmp (file, last_filename)
4854 && line == last_linenum)
4855 return 0;
4856 last_filename = file;
4857 last_linenum = line;
4858 }
4859
4860 if (no_line_numbers && line > 0)
4861 {
4862 cur_insn_uid++;
4863 return 0;
4864 }
4865
4866 note = rtx_alloc (NOTE);
4867 INSN_UID (note) = cur_insn_uid++;
4868 NOTE_SOURCE_FILE (note) = file;
4869 NOTE_LINE_NUMBER (note) = line;
4870 BLOCK_FOR_INSN (note) = NULL;
4871 add_insn (note);
4872 return note;
4873 }
4874
4875 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
4876
4877 rtx
4878 emit_line_note_force (file, line)
4879 const char *file;
4880 int line;
4881 {
4882 last_linenum = -1;
4883 return emit_line_note (file, line);
4884 }
4885
4886 /* Cause next statement to emit a line note even if the line number
4887 has not changed. This is used at the beginning of a function. */
4888
4889 void
4890 force_next_line_note ()
4891 {
4892 last_linenum = -1;
4893 }
4894
4895 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4896 note of this type already exists, remove it first. */
4897
4898 rtx
4899 set_unique_reg_note (insn, kind, datum)
4900 rtx insn;
4901 enum reg_note kind;
4902 rtx datum;
4903 {
4904 rtx note = find_reg_note (insn, kind, NULL_RTX);
4905
4906 switch (kind)
4907 {
4908 case REG_EQUAL:
4909 case REG_EQUIV:
4910 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4911 has multiple sets (some callers assume single_set
4912 means the insn only has one set, when in fact it
4913 means the insn only has one * useful * set). */
4914 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4915 {
4916 if (note)
4917 abort ();
4918 return NULL_RTX;
4919 }
4920
4921 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4922 It serves no useful purpose and breaks eliminate_regs. */
4923 if (GET_CODE (datum) == ASM_OPERANDS)
4924 return NULL_RTX;
4925 break;
4926
4927 default:
4928 break;
4929 }
4930
4931 if (note)
4932 {
4933 XEXP (note, 0) = datum;
4934 return note;
4935 }
4936
4937 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4938 return REG_NOTES (insn);
4939 }
4940 \f
4941 /* Return an indication of which type of insn should have X as a body.
4942 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4943
4944 enum rtx_code
4945 classify_insn (x)
4946 rtx x;
4947 {
4948 if (GET_CODE (x) == CODE_LABEL)
4949 return CODE_LABEL;
4950 if (GET_CODE (x) == CALL)
4951 return CALL_INSN;
4952 if (GET_CODE (x) == RETURN)
4953 return JUMP_INSN;
4954 if (GET_CODE (x) == SET)
4955 {
4956 if (SET_DEST (x) == pc_rtx)
4957 return JUMP_INSN;
4958 else if (GET_CODE (SET_SRC (x)) == CALL)
4959 return CALL_INSN;
4960 else
4961 return INSN;
4962 }
4963 if (GET_CODE (x) == PARALLEL)
4964 {
4965 int j;
4966 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4967 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4968 return CALL_INSN;
4969 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4970 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4971 return JUMP_INSN;
4972 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4973 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4974 return CALL_INSN;
4975 }
4976 return INSN;
4977 }
4978
4979 /* Emit the rtl pattern X as an appropriate kind of insn.
4980 If X is a label, it is simply added into the insn chain. */
4981
4982 rtx
4983 emit (x)
4984 rtx x;
4985 {
4986 enum rtx_code code = classify_insn (x);
4987
4988 if (code == CODE_LABEL)
4989 return emit_label (x);
4990 else if (code == INSN)
4991 return emit_insn (x);
4992 else if (code == JUMP_INSN)
4993 {
4994 rtx insn = emit_jump_insn (x);
4995 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4996 return emit_barrier ();
4997 return insn;
4998 }
4999 else if (code == CALL_INSN)
5000 return emit_call_insn (x);
5001 else
5002 abort ();
5003 }
5004 \f
5005 /* Space for free sequence stack entries. */
5006 static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
5007
5008 /* Begin emitting insns to a sequence which can be packaged in an
5009 RTL_EXPR. If this sequence will contain something that might cause
5010 the compiler to pop arguments to function calls (because those
5011 pops have previously been deferred; see INHIBIT_DEFER_POP for more
5012 details), use do_pending_stack_adjust before calling this function.
5013 That will ensure that the deferred pops are not accidentally
5014 emitted in the middle of this sequence. */
5015
5016 void
5017 start_sequence ()
5018 {
5019 struct sequence_stack *tem;
5020
5021 if (free_sequence_stack != NULL)
5022 {
5023 tem = free_sequence_stack;
5024 free_sequence_stack = tem->next;
5025 }
5026 else
5027 tem = (struct sequence_stack *) ggc_alloc (sizeof (struct sequence_stack));
5028
5029 tem->next = seq_stack;
5030 tem->first = first_insn;
5031 tem->last = last_insn;
5032 tem->sequence_rtl_expr = seq_rtl_expr;
5033
5034 seq_stack = tem;
5035
5036 first_insn = 0;
5037 last_insn = 0;
5038 }
5039
5040 /* Similarly, but indicate that this sequence will be placed in T, an
5041 RTL_EXPR. See the documentation for start_sequence for more
5042 information about how to use this function. */
5043
5044 void
5045 start_sequence_for_rtl_expr (t)
5046 tree t;
5047 {
5048 start_sequence ();
5049
5050 seq_rtl_expr = t;
5051 }
5052
5053 /* Set up the insn chain starting with FIRST as the current sequence,
5054 saving the previously current one. See the documentation for
5055 start_sequence for more information about how to use this function. */
5056
5057 void
5058 push_to_sequence (first)
5059 rtx first;
5060 {
5061 rtx last;
5062
5063 start_sequence ();
5064
5065 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5066
5067 first_insn = first;
5068 last_insn = last;
5069 }
5070
5071 /* Set up the insn chain from a chain stort in FIRST to LAST. */
5072
5073 void
5074 push_to_full_sequence (first, last)
5075 rtx first, last;
5076 {
5077 start_sequence ();
5078 first_insn = first;
5079 last_insn = last;
5080 /* We really should have the end of the insn chain here. */
5081 if (last && NEXT_INSN (last))
5082 abort ();
5083 }
5084
5085 /* Set up the outer-level insn chain
5086 as the current sequence, saving the previously current one. */
5087
5088 void
5089 push_topmost_sequence ()
5090 {
5091 struct sequence_stack *stack, *top = NULL;
5092
5093 start_sequence ();
5094
5095 for (stack = seq_stack; stack; stack = stack->next)
5096 top = stack;
5097
5098 first_insn = top->first;
5099 last_insn = top->last;
5100 seq_rtl_expr = top->sequence_rtl_expr;
5101 }
5102
5103 /* After emitting to the outer-level insn chain, update the outer-level
5104 insn chain, and restore the previous saved state. */
5105
5106 void
5107 pop_topmost_sequence ()
5108 {
5109 struct sequence_stack *stack, *top = NULL;
5110
5111 for (stack = seq_stack; stack; stack = stack->next)
5112 top = stack;
5113
5114 top->first = first_insn;
5115 top->last = last_insn;
5116 /* ??? Why don't we save seq_rtl_expr here? */
5117
5118 end_sequence ();
5119 }
5120
5121 /* After emitting to a sequence, restore previous saved state.
5122
5123 To get the contents of the sequence just made, you must call
5124 `get_insns' *before* calling here.
5125
5126 If the compiler might have deferred popping arguments while
5127 generating this sequence, and this sequence will not be immediately
5128 inserted into the instruction stream, use do_pending_stack_adjust
5129 before calling get_insns. That will ensure that the deferred
5130 pops are inserted into this sequence, and not into some random
5131 location in the instruction stream. See INHIBIT_DEFER_POP for more
5132 information about deferred popping of arguments. */
5133
5134 void
5135 end_sequence ()
5136 {
5137 struct sequence_stack *tem = seq_stack;
5138
5139 first_insn = tem->first;
5140 last_insn = tem->last;
5141 seq_rtl_expr = tem->sequence_rtl_expr;
5142 seq_stack = tem->next;
5143
5144 memset (tem, 0, sizeof (*tem));
5145 tem->next = free_sequence_stack;
5146 free_sequence_stack = tem;
5147 }
5148
5149 /* This works like end_sequence, but records the old sequence in FIRST
5150 and LAST. */
5151
5152 void
5153 end_full_sequence (first, last)
5154 rtx *first, *last;
5155 {
5156 *first = first_insn;
5157 *last = last_insn;
5158 end_sequence ();
5159 }
5160
5161 /* Return 1 if currently emitting into a sequence. */
5162
5163 int
5164 in_sequence_p ()
5165 {
5166 return seq_stack != 0;
5167 }
5168 \f
5169 /* Put the various virtual registers into REGNO_REG_RTX. */
5170
5171 void
5172 init_virtual_regs (es)
5173 struct emit_status *es;
5174 {
5175 rtx *ptr = es->x_regno_reg_rtx;
5176 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5177 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5178 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5179 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5180 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5181 }
5182
5183 \f
5184 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5185 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5186 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5187 static int copy_insn_n_scratches;
5188
5189 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5190 copied an ASM_OPERANDS.
5191 In that case, it is the original input-operand vector. */
5192 static rtvec orig_asm_operands_vector;
5193
5194 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5195 copied an ASM_OPERANDS.
5196 In that case, it is the copied input-operand vector. */
5197 static rtvec copy_asm_operands_vector;
5198
5199 /* Likewise for the constraints vector. */
5200 static rtvec orig_asm_constraints_vector;
5201 static rtvec copy_asm_constraints_vector;
5202
5203 /* Recursively create a new copy of an rtx for copy_insn.
5204 This function differs from copy_rtx in that it handles SCRATCHes and
5205 ASM_OPERANDs properly.
5206 Normally, this function is not used directly; use copy_insn as front end.
5207 However, you could first copy an insn pattern with copy_insn and then use
5208 this function afterwards to properly copy any REG_NOTEs containing
5209 SCRATCHes. */
5210
5211 rtx
5212 copy_insn_1 (orig)
5213 rtx orig;
5214 {
5215 rtx copy;
5216 int i, j;
5217 RTX_CODE code;
5218 const char *format_ptr;
5219
5220 code = GET_CODE (orig);
5221
5222 switch (code)
5223 {
5224 case REG:
5225 case QUEUED:
5226 case CONST_INT:
5227 case CONST_DOUBLE:
5228 case CONST_VECTOR:
5229 case SYMBOL_REF:
5230 case CODE_LABEL:
5231 case PC:
5232 case CC0:
5233 case ADDRESSOF:
5234 return orig;
5235
5236 case SCRATCH:
5237 for (i = 0; i < copy_insn_n_scratches; i++)
5238 if (copy_insn_scratch_in[i] == orig)
5239 return copy_insn_scratch_out[i];
5240 break;
5241
5242 case CONST:
5243 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
5244 a LABEL_REF, it isn't sharable. */
5245 if (GET_CODE (XEXP (orig, 0)) == PLUS
5246 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5247 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5248 return orig;
5249 break;
5250
5251 /* A MEM with a constant address is not sharable. The problem is that
5252 the constant address may need to be reloaded. If the mem is shared,
5253 then reloading one copy of this mem will cause all copies to appear
5254 to have been reloaded. */
5255
5256 default:
5257 break;
5258 }
5259
5260 copy = rtx_alloc (code);
5261
5262 /* Copy the various flags, and other information. We assume that
5263 all fields need copying, and then clear the fields that should
5264 not be copied. That is the sensible default behavior, and forces
5265 us to explicitly document why we are *not* copying a flag. */
5266 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
5267
5268 /* We do not copy the USED flag, which is used as a mark bit during
5269 walks over the RTL. */
5270 RTX_FLAG (copy, used) = 0;
5271
5272 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5273 if (GET_RTX_CLASS (code) == 'i')
5274 {
5275 RTX_FLAG (copy, jump) = 0;
5276 RTX_FLAG (copy, call) = 0;
5277 RTX_FLAG (copy, frame_related) = 0;
5278 }
5279
5280 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5281
5282 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5283 {
5284 copy->fld[i] = orig->fld[i];
5285 switch (*format_ptr++)
5286 {
5287 case 'e':
5288 if (XEXP (orig, i) != NULL)
5289 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5290 break;
5291
5292 case 'E':
5293 case 'V':
5294 if (XVEC (orig, i) == orig_asm_constraints_vector)
5295 XVEC (copy, i) = copy_asm_constraints_vector;
5296 else if (XVEC (orig, i) == orig_asm_operands_vector)
5297 XVEC (copy, i) = copy_asm_operands_vector;
5298 else if (XVEC (orig, i) != NULL)
5299 {
5300 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5301 for (j = 0; j < XVECLEN (copy, i); j++)
5302 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5303 }
5304 break;
5305
5306 case 't':
5307 case 'w':
5308 case 'i':
5309 case 's':
5310 case 'S':
5311 case 'u':
5312 case '0':
5313 /* These are left unchanged. */
5314 break;
5315
5316 default:
5317 abort ();
5318 }
5319 }
5320
5321 if (code == SCRATCH)
5322 {
5323 i = copy_insn_n_scratches++;
5324 if (i >= MAX_RECOG_OPERANDS)
5325 abort ();
5326 copy_insn_scratch_in[i] = orig;
5327 copy_insn_scratch_out[i] = copy;
5328 }
5329 else if (code == ASM_OPERANDS)
5330 {
5331 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5332 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5333 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5334 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5335 }
5336
5337 return copy;
5338 }
5339
5340 /* Create a new copy of an rtx.
5341 This function differs from copy_rtx in that it handles SCRATCHes and
5342 ASM_OPERANDs properly.
5343 INSN doesn't really have to be a full INSN; it could be just the
5344 pattern. */
5345 rtx
5346 copy_insn (insn)
5347 rtx insn;
5348 {
5349 copy_insn_n_scratches = 0;
5350 orig_asm_operands_vector = 0;
5351 orig_asm_constraints_vector = 0;
5352 copy_asm_operands_vector = 0;
5353 copy_asm_constraints_vector = 0;
5354 return copy_insn_1 (insn);
5355 }
5356
5357 /* Initialize data structures and variables in this file
5358 before generating rtl for each function. */
5359
5360 void
5361 init_emit ()
5362 {
5363 struct function *f = cfun;
5364
5365 f->emit = (struct emit_status *) ggc_alloc (sizeof (struct emit_status));
5366 first_insn = NULL;
5367 last_insn = NULL;
5368 seq_rtl_expr = NULL;
5369 cur_insn_uid = 1;
5370 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5371 last_linenum = 0;
5372 last_filename = 0;
5373 first_label_num = label_num;
5374 last_label_num = 0;
5375 seq_stack = NULL;
5376
5377 /* Init the tables that describe all the pseudo regs. */
5378
5379 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5380
5381 f->emit->regno_pointer_align
5382 = (unsigned char *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5383 * sizeof (unsigned char));
5384
5385 regno_reg_rtx
5386 = (rtx *) ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
5387
5388 /* Put copies of all the hard registers into regno_reg_rtx. */
5389 memcpy (regno_reg_rtx,
5390 static_regno_reg_rtx,
5391 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5392
5393 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5394 init_virtual_regs (f->emit);
5395
5396 /* Indicate that the virtual registers and stack locations are
5397 all pointers. */
5398 REG_POINTER (stack_pointer_rtx) = 1;
5399 REG_POINTER (frame_pointer_rtx) = 1;
5400 REG_POINTER (hard_frame_pointer_rtx) = 1;
5401 REG_POINTER (arg_pointer_rtx) = 1;
5402
5403 REG_POINTER (virtual_incoming_args_rtx) = 1;
5404 REG_POINTER (virtual_stack_vars_rtx) = 1;
5405 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5406 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5407 REG_POINTER (virtual_cfa_rtx) = 1;
5408
5409 #ifdef STACK_BOUNDARY
5410 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5411 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5412 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5413 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5414
5415 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5416 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5417 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5418 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5419 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5420 #endif
5421
5422 #ifdef INIT_EXPANDERS
5423 INIT_EXPANDERS;
5424 #endif
5425 }
5426
5427 /* Generate the constant 0. */
5428
5429 static rtx
5430 gen_const_vector_0 (mode)
5431 enum machine_mode mode;
5432 {
5433 rtx tem;
5434 rtvec v;
5435 int units, i;
5436 enum machine_mode inner;
5437
5438 units = GET_MODE_NUNITS (mode);
5439 inner = GET_MODE_INNER (mode);
5440
5441 v = rtvec_alloc (units);
5442
5443 /* We need to call this function after we to set CONST0_RTX first. */
5444 if (!CONST0_RTX (inner))
5445 abort ();
5446
5447 for (i = 0; i < units; ++i)
5448 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5449
5450 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5451 return tem;
5452 }
5453
5454 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5455 all elements are zero. */
5456 rtx
5457 gen_rtx_CONST_VECTOR (mode, v)
5458 enum machine_mode mode;
5459 rtvec v;
5460 {
5461 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5462 int i;
5463
5464 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5465 if (RTVEC_ELT (v, i) != inner_zero)
5466 return gen_rtx_raw_CONST_VECTOR (mode, v);
5467 return CONST0_RTX (mode);
5468 }
5469
5470 /* Create some permanent unique rtl objects shared between all functions.
5471 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5472
5473 void
5474 init_emit_once (line_numbers)
5475 int line_numbers;
5476 {
5477 int i;
5478 enum machine_mode mode;
5479 enum machine_mode double_mode;
5480
5481 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5482 tables. */
5483 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5484 const_int_htab_eq, NULL);
5485
5486 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5487 const_double_htab_eq, NULL);
5488
5489 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5490 mem_attrs_htab_eq, NULL);
5491 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5492 reg_attrs_htab_eq, NULL);
5493
5494 no_line_numbers = ! line_numbers;
5495
5496 /* Compute the word and byte modes. */
5497
5498 byte_mode = VOIDmode;
5499 word_mode = VOIDmode;
5500 double_mode = VOIDmode;
5501
5502 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5503 mode = GET_MODE_WIDER_MODE (mode))
5504 {
5505 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5506 && byte_mode == VOIDmode)
5507 byte_mode = mode;
5508
5509 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5510 && word_mode == VOIDmode)
5511 word_mode = mode;
5512 }
5513
5514 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5515 mode = GET_MODE_WIDER_MODE (mode))
5516 {
5517 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5518 && double_mode == VOIDmode)
5519 double_mode = mode;
5520 }
5521
5522 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5523
5524 /* Assign register numbers to the globally defined register rtx.
5525 This must be done at runtime because the register number field
5526 is in a union and some compilers can't initialize unions. */
5527
5528 pc_rtx = gen_rtx (PC, VOIDmode);
5529 cc0_rtx = gen_rtx (CC0, VOIDmode);
5530 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5531 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5532 if (hard_frame_pointer_rtx == 0)
5533 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5534 HARD_FRAME_POINTER_REGNUM);
5535 if (arg_pointer_rtx == 0)
5536 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5537 virtual_incoming_args_rtx =
5538 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5539 virtual_stack_vars_rtx =
5540 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5541 virtual_stack_dynamic_rtx =
5542 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5543 virtual_outgoing_args_rtx =
5544 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5545 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5546
5547 /* Initialize RTL for commonly used hard registers. These are
5548 copied into regno_reg_rtx as we begin to compile each function. */
5549 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5550 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5551
5552 #ifdef INIT_EXPANDERS
5553 /* This is to initialize {init|mark|free}_machine_status before the first
5554 call to push_function_context_to. This is needed by the Chill front
5555 end which calls push_function_context_to before the first call to
5556 init_function_start. */
5557 INIT_EXPANDERS;
5558 #endif
5559
5560 /* Create the unique rtx's for certain rtx codes and operand values. */
5561
5562 /* Don't use gen_rtx here since gen_rtx in this case
5563 tries to use these variables. */
5564 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5565 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5566 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5567
5568 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5569 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5570 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5571 else
5572 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5573
5574 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5575 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5576 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5577 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5578
5579 for (i = 0; i <= 2; i++)
5580 {
5581 REAL_VALUE_TYPE *r =
5582 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5583
5584 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5585 mode = GET_MODE_WIDER_MODE (mode))
5586 const_tiny_rtx[i][(int) mode] =
5587 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5588
5589 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5590
5591 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5592 mode = GET_MODE_WIDER_MODE (mode))
5593 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5594
5595 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5596 mode != VOIDmode;
5597 mode = GET_MODE_WIDER_MODE (mode))
5598 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5599 }
5600
5601 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5602 mode != VOIDmode;
5603 mode = GET_MODE_WIDER_MODE (mode))
5604 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5605
5606 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5607 mode != VOIDmode;
5608 mode = GET_MODE_WIDER_MODE (mode))
5609 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5610
5611 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5612 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5613 const_tiny_rtx[0][i] = const0_rtx;
5614
5615 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5616 if (STORE_FLAG_VALUE == 1)
5617 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5618
5619 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5620 return_address_pointer_rtx
5621 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5622 #endif
5623
5624 #ifdef STRUCT_VALUE
5625 struct_value_rtx = STRUCT_VALUE;
5626 #else
5627 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
5628 #endif
5629
5630 #ifdef STRUCT_VALUE_INCOMING
5631 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
5632 #else
5633 #ifdef STRUCT_VALUE_INCOMING_REGNUM
5634 struct_value_incoming_rtx
5635 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
5636 #else
5637 struct_value_incoming_rtx = struct_value_rtx;
5638 #endif
5639 #endif
5640
5641 #ifdef STATIC_CHAIN_REGNUM
5642 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5643
5644 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5645 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5646 static_chain_incoming_rtx
5647 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5648 else
5649 #endif
5650 static_chain_incoming_rtx = static_chain_rtx;
5651 #endif
5652
5653 #ifdef STATIC_CHAIN
5654 static_chain_rtx = STATIC_CHAIN;
5655
5656 #ifdef STATIC_CHAIN_INCOMING
5657 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5658 #else
5659 static_chain_incoming_rtx = static_chain_rtx;
5660 #endif
5661 #endif
5662
5663 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5664 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5665 }
5666 \f
5667 /* Query and clear/ restore no_line_numbers. This is used by the
5668 switch / case handling in stmt.c to give proper line numbers in
5669 warnings about unreachable code. */
5670
5671 int
5672 force_line_numbers ()
5673 {
5674 int old = no_line_numbers;
5675
5676 no_line_numbers = 0;
5677 if (old)
5678 force_next_line_note ();
5679 return old;
5680 }
5681
5682 void
5683 restore_line_number_status (old_value)
5684 int old_value;
5685 {
5686 no_line_numbers = old_value;
5687 }
5688
5689 /* Produce exact duplicate of insn INSN after AFTER.
5690 Care updating of libcall regions if present. */
5691
5692 rtx
5693 emit_copy_of_insn_after (insn, after)
5694 rtx insn, after;
5695 {
5696 rtx new;
5697 rtx note1, note2, link;
5698
5699 switch (GET_CODE (insn))
5700 {
5701 case INSN:
5702 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5703 break;
5704
5705 case JUMP_INSN:
5706 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5707 break;
5708
5709 case CALL_INSN:
5710 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5711 if (CALL_INSN_FUNCTION_USAGE (insn))
5712 CALL_INSN_FUNCTION_USAGE (new)
5713 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5714 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5715 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5716 break;
5717
5718 default:
5719 abort ();
5720 }
5721
5722 /* Update LABEL_NUSES. */
5723 mark_jump_label (PATTERN (new), new, 0);
5724
5725 INSN_SCOPE (new) = INSN_SCOPE (insn);
5726
5727 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5728 make them. */
5729 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5730 if (REG_NOTE_KIND (link) != REG_LABEL)
5731 {
5732 if (GET_CODE (link) == EXPR_LIST)
5733 REG_NOTES (new)
5734 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5735 XEXP (link, 0),
5736 REG_NOTES (new)));
5737 else
5738 REG_NOTES (new)
5739 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5740 XEXP (link, 0),
5741 REG_NOTES (new)));
5742 }
5743
5744 /* Fix the libcall sequences. */
5745 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5746 {
5747 rtx p = new;
5748 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5749 p = PREV_INSN (p);
5750 XEXP (note1, 0) = p;
5751 XEXP (note2, 0) = new;
5752 }
5753 INSN_CODE (new) = INSN_CODE (insn);
5754 return new;
5755 }
5756
5757 #include "gt-emit-rtl.h"