emit-rtl.c (set_reg_attrs_for_parm): New function.
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 /* Middle-to-low level generation of rtx code and insns.
24
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
28
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
31
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
37
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "tm.h"
42 #include "toplev.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "tm_p.h"
46 #include "flags.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "real.h"
55 #include "bitmap.h"
56 #include "basic-block.h"
57 #include "ggc.h"
58 #include "debug.h"
59 #include "langhooks.h"
60
61 /* Commonly used modes. */
62
63 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
64 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
65 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
66 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
67
68
69 /* This is *not* reset after each function. It gives each CODE_LABEL
70 in the entire compilation a unique label number. */
71
72 static GTY(()) int label_num = 1;
73
74 /* Highest label number in current function.
75 Zero means use the value of label_num instead.
76 This is nonzero only when belatedly compiling an inline function. */
77
78 static int last_label_num;
79
80 /* Value label_num had when set_new_first_and_last_label_number was called.
81 If label_num has not changed since then, last_label_num is valid. */
82
83 static int base_label_num;
84
85 /* Nonzero means do not generate NOTEs for source line numbers. */
86
87 static int no_line_numbers;
88
89 /* Commonly used rtx's, so that we only need space for one copy.
90 These are initialized once for the entire compilation.
91 All of these are unique; no other rtx-object will be equal to any
92 of these. */
93
94 rtx global_rtl[GR_MAX];
95
96 /* Commonly used RTL for hard registers. These objects are not necessarily
97 unique, so we allocate them separately from global_rtl. They are
98 initialized once per compilation unit, then copied into regno_reg_rtx
99 at the beginning of each function. */
100 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
101
102 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
103 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
104 record a copy of const[012]_rtx. */
105
106 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
107
108 rtx const_true_rtx;
109
110 REAL_VALUE_TYPE dconst0;
111 REAL_VALUE_TYPE dconst1;
112 REAL_VALUE_TYPE dconst2;
113 REAL_VALUE_TYPE dconstm1;
114
115 /* All references to the following fixed hard registers go through
116 these unique rtl objects. On machines where the frame-pointer and
117 arg-pointer are the same register, they use the same unique object.
118
119 After register allocation, other rtl objects which used to be pseudo-regs
120 may be clobbered to refer to the frame-pointer register.
121 But references that were originally to the frame-pointer can be
122 distinguished from the others because they contain frame_pointer_rtx.
123
124 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
125 tricky: until register elimination has taken place hard_frame_pointer_rtx
126 should be used if it is being set, and frame_pointer_rtx otherwise. After
127 register elimination hard_frame_pointer_rtx should always be used.
128 On machines where the two registers are same (most) then these are the
129 same.
130
131 In an inline procedure, the stack and frame pointer rtxs may not be
132 used for anything else. */
133 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
134 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
135 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
136 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
137 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
138
139 /* This is used to implement __builtin_return_address for some machines.
140 See for instance the MIPS port. */
141 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
142
143 /* We make one copy of (const_int C) where C is in
144 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
145 to save space during the compilation and simplify comparisons of
146 integers. */
147
148 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
149
150 /* A hash table storing CONST_INTs whose absolute value is greater
151 than MAX_SAVED_CONST_INT. */
152
153 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
154 htab_t const_int_htab;
155
156 /* A hash table storing memory attribute structures. */
157 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
158 htab_t mem_attrs_htab;
159
160 /* A hash table storing register attribute structures. */
161 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
162 htab_t reg_attrs_htab;
163
164 /* A hash table storing all CONST_DOUBLEs. */
165 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
166 htab_t const_double_htab;
167
168 #define first_insn (cfun->emit->x_first_insn)
169 #define last_insn (cfun->emit->x_last_insn)
170 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
171 #define last_linenum (cfun->emit->x_last_linenum)
172 #define last_filename (cfun->emit->x_last_filename)
173 #define first_label_num (cfun->emit->x_first_label_num)
174
175 static rtx make_jump_insn_raw PARAMS ((rtx));
176 static rtx make_call_insn_raw PARAMS ((rtx));
177 static rtx find_line_note PARAMS ((rtx));
178 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
179 int));
180 static void unshare_all_rtl_1 PARAMS ((rtx));
181 static void unshare_all_decls PARAMS ((tree));
182 static void reset_used_decls PARAMS ((tree));
183 static void mark_label_nuses PARAMS ((rtx));
184 static hashval_t const_int_htab_hash PARAMS ((const void *));
185 static int const_int_htab_eq PARAMS ((const void *,
186 const void *));
187 static hashval_t const_double_htab_hash PARAMS ((const void *));
188 static int const_double_htab_eq PARAMS ((const void *,
189 const void *));
190 static rtx lookup_const_double PARAMS ((rtx));
191 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
192 static int mem_attrs_htab_eq PARAMS ((const void *,
193 const void *));
194 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
195 rtx, unsigned int,
196 enum machine_mode));
197 static hashval_t reg_attrs_htab_hash PARAMS ((const void *));
198 static int reg_attrs_htab_eq PARAMS ((const void *,
199 const void *));
200 static reg_attrs *get_reg_attrs PARAMS ((tree, int));
201 static tree component_ref_for_mem_expr PARAMS ((tree));
202 static rtx gen_const_vector_0 PARAMS ((enum machine_mode));
203
204 /* Probability of the conditional branch currently proceeded by try_split.
205 Set to -1 otherwise. */
206 int split_branch_probability = -1;
207 \f
208 /* Returns a hash code for X (which is a really a CONST_INT). */
209
210 static hashval_t
211 const_int_htab_hash (x)
212 const void *x;
213 {
214 return (hashval_t) INTVAL ((struct rtx_def *) x);
215 }
216
217 /* Returns nonzero if the value represented by X (which is really a
218 CONST_INT) is the same as that given by Y (which is really a
219 HOST_WIDE_INT *). */
220
221 static int
222 const_int_htab_eq (x, y)
223 const void *x;
224 const void *y;
225 {
226 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
227 }
228
229 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
230 static hashval_t
231 const_double_htab_hash (x)
232 const void *x;
233 {
234 rtx value = (rtx) x;
235 hashval_t h;
236
237 if (GET_MODE (value) == VOIDmode)
238 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
239 else
240 {
241 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
242 /* MODE is used in the comparison, so it should be in the hash. */
243 h ^= GET_MODE (value);
244 }
245 return h;
246 }
247
248 /* Returns nonzero if the value represented by X (really a ...)
249 is the same as that represented by Y (really a ...) */
250 static int
251 const_double_htab_eq (x, y)
252 const void *x;
253 const void *y;
254 {
255 rtx a = (rtx)x, b = (rtx)y;
256
257 if (GET_MODE (a) != GET_MODE (b))
258 return 0;
259 if (GET_MODE (a) == VOIDmode)
260 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
261 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
262 else
263 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
264 CONST_DOUBLE_REAL_VALUE (b));
265 }
266
267 /* Returns a hash code for X (which is a really a mem_attrs *). */
268
269 static hashval_t
270 mem_attrs_htab_hash (x)
271 const void *x;
272 {
273 mem_attrs *p = (mem_attrs *) x;
274
275 return (p->alias ^ (p->align * 1000)
276 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
277 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
278 ^ (size_t) p->expr);
279 }
280
281 /* Returns nonzero if the value represented by X (which is really a
282 mem_attrs *) is the same as that given by Y (which is also really a
283 mem_attrs *). */
284
285 static int
286 mem_attrs_htab_eq (x, y)
287 const void *x;
288 const void *y;
289 {
290 mem_attrs *p = (mem_attrs *) x;
291 mem_attrs *q = (mem_attrs *) y;
292
293 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
294 && p->size == q->size && p->align == q->align);
295 }
296
297 /* Allocate a new mem_attrs structure and insert it into the hash table if
298 one identical to it is not already in the table. We are doing this for
299 MEM of mode MODE. */
300
301 static mem_attrs *
302 get_mem_attrs (alias, expr, offset, size, align, mode)
303 HOST_WIDE_INT alias;
304 tree expr;
305 rtx offset;
306 rtx size;
307 unsigned int align;
308 enum machine_mode mode;
309 {
310 mem_attrs attrs;
311 void **slot;
312
313 /* If everything is the default, we can just return zero. */
314 if (alias == 0 && expr == 0 && offset == 0
315 && (size == 0
316 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
317 && (align == BITS_PER_UNIT
318 || (STRICT_ALIGNMENT
319 && mode != BLKmode && align == GET_MODE_ALIGNMENT (mode))))
320 return 0;
321
322 attrs.alias = alias;
323 attrs.expr = expr;
324 attrs.offset = offset;
325 attrs.size = size;
326 attrs.align = align;
327
328 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
329 if (*slot == 0)
330 {
331 *slot = ggc_alloc (sizeof (mem_attrs));
332 memcpy (*slot, &attrs, sizeof (mem_attrs));
333 }
334
335 return *slot;
336 }
337
338 /* Returns a hash code for X (which is a really a reg_attrs *). */
339
340 static hashval_t
341 reg_attrs_htab_hash (x)
342 const void *x;
343 {
344 reg_attrs *p = (reg_attrs *) x;
345
346 return ((p->offset * 1000) ^ (long) p->decl);
347 }
348
349 /* Returns non-zero if the value represented by X (which is really a
350 reg_attrs *) is the same as that given by Y (which is also really a
351 reg_attrs *). */
352
353 static int
354 reg_attrs_htab_eq (x, y)
355 const void *x;
356 const void *y;
357 {
358 reg_attrs *p = (reg_attrs *) x;
359 reg_attrs *q = (reg_attrs *) y;
360
361 return (p->decl == q->decl && p->offset == q->offset);
362 }
363 /* Allocate a new reg_attrs structure and insert it into the hash table if
364 one identical to it is not already in the table. We are doing this for
365 MEM of mode MODE. */
366
367 static reg_attrs *
368 get_reg_attrs (decl, offset)
369 tree decl;
370 int offset;
371 {
372 reg_attrs attrs;
373 void **slot;
374
375 /* If everything is the default, we can just return zero. */
376 if (decl == 0 && offset == 0)
377 return 0;
378
379 attrs.decl = decl;
380 attrs.offset = offset;
381
382 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
383 if (*slot == 0)
384 {
385 *slot = ggc_alloc (sizeof (reg_attrs));
386 memcpy (*slot, &attrs, sizeof (reg_attrs));
387 }
388
389 return *slot;
390 }
391
392 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
393 don't attempt to share with the various global pieces of rtl (such as
394 frame_pointer_rtx). */
395
396 rtx
397 gen_raw_REG (mode, regno)
398 enum machine_mode mode;
399 int regno;
400 {
401 rtx x = gen_rtx_raw_REG (mode, regno);
402 ORIGINAL_REGNO (x) = regno;
403 return x;
404 }
405
406 /* There are some RTL codes that require special attention; the generation
407 functions do the raw handling. If you add to this list, modify
408 special_rtx in gengenrtl.c as well. */
409
410 rtx
411 gen_rtx_CONST_INT (mode, arg)
412 enum machine_mode mode ATTRIBUTE_UNUSED;
413 HOST_WIDE_INT arg;
414 {
415 void **slot;
416
417 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
418 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
419
420 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
421 if (const_true_rtx && arg == STORE_FLAG_VALUE)
422 return const_true_rtx;
423 #endif
424
425 /* Look up the CONST_INT in the hash table. */
426 slot = htab_find_slot_with_hash (const_int_htab, &arg,
427 (hashval_t) arg, INSERT);
428 if (*slot == 0)
429 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
430
431 return (rtx) *slot;
432 }
433
434 rtx
435 gen_int_mode (c, mode)
436 HOST_WIDE_INT c;
437 enum machine_mode mode;
438 {
439 return GEN_INT (trunc_int_for_mode (c, mode));
440 }
441
442 /* CONST_DOUBLEs might be created from pairs of integers, or from
443 REAL_VALUE_TYPEs. Also, their length is known only at run time,
444 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
445
446 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
447 hash table. If so, return its counterpart; otherwise add it
448 to the hash table and return it. */
449 static rtx
450 lookup_const_double (real)
451 rtx real;
452 {
453 void **slot = htab_find_slot (const_double_htab, real, INSERT);
454 if (*slot == 0)
455 *slot = real;
456
457 return (rtx) *slot;
458 }
459
460 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
461 VALUE in mode MODE. */
462 rtx
463 const_double_from_real_value (value, mode)
464 REAL_VALUE_TYPE value;
465 enum machine_mode mode;
466 {
467 rtx real = rtx_alloc (CONST_DOUBLE);
468 PUT_MODE (real, mode);
469
470 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
471
472 return lookup_const_double (real);
473 }
474
475 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
476 of ints: I0 is the low-order word and I1 is the high-order word.
477 Do not use this routine for non-integer modes; convert to
478 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
479
480 rtx
481 immed_double_const (i0, i1, mode)
482 HOST_WIDE_INT i0, i1;
483 enum machine_mode mode;
484 {
485 rtx value;
486 unsigned int i;
487
488 if (mode != VOIDmode)
489 {
490 int width;
491 if (GET_MODE_CLASS (mode) != MODE_INT
492 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
493 /* We can get a 0 for an error mark. */
494 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
495 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
496 abort ();
497
498 /* We clear out all bits that don't belong in MODE, unless they and
499 our sign bit are all one. So we get either a reasonable negative
500 value or a reasonable unsigned value for this mode. */
501 width = GET_MODE_BITSIZE (mode);
502 if (width < HOST_BITS_PER_WIDE_INT
503 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
504 != ((HOST_WIDE_INT) (-1) << (width - 1))))
505 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
506 else if (width == HOST_BITS_PER_WIDE_INT
507 && ! (i1 == ~0 && i0 < 0))
508 i1 = 0;
509 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
510 /* We cannot represent this value as a constant. */
511 abort ();
512
513 /* If this would be an entire word for the target, but is not for
514 the host, then sign-extend on the host so that the number will
515 look the same way on the host that it would on the target.
516
517 For example, when building a 64 bit alpha hosted 32 bit sparc
518 targeted compiler, then we want the 32 bit unsigned value -1 to be
519 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
520 The latter confuses the sparc backend. */
521
522 if (width < HOST_BITS_PER_WIDE_INT
523 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
524 i0 |= ((HOST_WIDE_INT) (-1) << width);
525
526 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
527 CONST_INT.
528
529 ??? Strictly speaking, this is wrong if we create a CONST_INT for
530 a large unsigned constant with the size of MODE being
531 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
532 in a wider mode. In that case we will mis-interpret it as a
533 negative number.
534
535 Unfortunately, the only alternative is to make a CONST_DOUBLE for
536 any constant in any mode if it is an unsigned constant larger
537 than the maximum signed integer in an int on the host. However,
538 doing this will break everyone that always expects to see a
539 CONST_INT for SImode and smaller.
540
541 We have always been making CONST_INTs in this case, so nothing
542 new is being broken. */
543
544 if (width <= HOST_BITS_PER_WIDE_INT)
545 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
546 }
547
548 /* If this integer fits in one word, return a CONST_INT. */
549 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
550 return GEN_INT (i0);
551
552 /* We use VOIDmode for integers. */
553 value = rtx_alloc (CONST_DOUBLE);
554 PUT_MODE (value, VOIDmode);
555
556 CONST_DOUBLE_LOW (value) = i0;
557 CONST_DOUBLE_HIGH (value) = i1;
558
559 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
560 XWINT (value, i) = 0;
561
562 return lookup_const_double (value);
563 }
564
565 rtx
566 gen_rtx_REG (mode, regno)
567 enum machine_mode mode;
568 unsigned int regno;
569 {
570 /* In case the MD file explicitly references the frame pointer, have
571 all such references point to the same frame pointer. This is
572 used during frame pointer elimination to distinguish the explicit
573 references to these registers from pseudos that happened to be
574 assigned to them.
575
576 If we have eliminated the frame pointer or arg pointer, we will
577 be using it as a normal register, for example as a spill
578 register. In such cases, we might be accessing it in a mode that
579 is not Pmode and therefore cannot use the pre-allocated rtx.
580
581 Also don't do this when we are making new REGs in reload, since
582 we don't want to get confused with the real pointers. */
583
584 if (mode == Pmode && !reload_in_progress)
585 {
586 if (regno == FRAME_POINTER_REGNUM
587 && (!reload_completed || frame_pointer_needed))
588 return frame_pointer_rtx;
589 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
590 if (regno == HARD_FRAME_POINTER_REGNUM
591 && (!reload_completed || frame_pointer_needed))
592 return hard_frame_pointer_rtx;
593 #endif
594 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
595 if (regno == ARG_POINTER_REGNUM)
596 return arg_pointer_rtx;
597 #endif
598 #ifdef RETURN_ADDRESS_POINTER_REGNUM
599 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
600 return return_address_pointer_rtx;
601 #endif
602 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
603 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
604 return pic_offset_table_rtx;
605 if (regno == STACK_POINTER_REGNUM)
606 return stack_pointer_rtx;
607 }
608
609 #if 0
610 /* If the per-function register table has been set up, try to re-use
611 an existing entry in that table to avoid useless generation of RTL.
612
613 This code is disabled for now until we can fix the various backends
614 which depend on having non-shared hard registers in some cases. Long
615 term we want to re-enable this code as it can significantly cut down
616 on the amount of useless RTL that gets generated.
617
618 We'll also need to fix some code that runs after reload that wants to
619 set ORIGINAL_REGNO. */
620
621 if (cfun
622 && cfun->emit
623 && regno_reg_rtx
624 && regno < FIRST_PSEUDO_REGISTER
625 && reg_raw_mode[regno] == mode)
626 return regno_reg_rtx[regno];
627 #endif
628
629 return gen_raw_REG (mode, regno);
630 }
631
632 rtx
633 gen_rtx_MEM (mode, addr)
634 enum machine_mode mode;
635 rtx addr;
636 {
637 rtx rt = gen_rtx_raw_MEM (mode, addr);
638
639 /* This field is not cleared by the mere allocation of the rtx, so
640 we clear it here. */
641 MEM_ATTRS (rt) = 0;
642
643 return rt;
644 }
645
646 rtx
647 gen_rtx_SUBREG (mode, reg, offset)
648 enum machine_mode mode;
649 rtx reg;
650 int offset;
651 {
652 /* This is the most common failure type.
653 Catch it early so we can see who does it. */
654 if ((offset % GET_MODE_SIZE (mode)) != 0)
655 abort ();
656
657 /* This check isn't usable right now because combine will
658 throw arbitrary crap like a CALL into a SUBREG in
659 gen_lowpart_for_combine so we must just eat it. */
660 #if 0
661 /* Check for this too. */
662 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
663 abort ();
664 #endif
665 return gen_rtx_raw_SUBREG (mode, reg, offset);
666 }
667
668 /* Generate a SUBREG representing the least-significant part of REG if MODE
669 is smaller than mode of REG, otherwise paradoxical SUBREG. */
670
671 rtx
672 gen_lowpart_SUBREG (mode, reg)
673 enum machine_mode mode;
674 rtx reg;
675 {
676 enum machine_mode inmode;
677
678 inmode = GET_MODE (reg);
679 if (inmode == VOIDmode)
680 inmode = mode;
681 return gen_rtx_SUBREG (mode, reg,
682 subreg_lowpart_offset (mode, inmode));
683 }
684 \f
685 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
686 **
687 ** This routine generates an RTX of the size specified by
688 ** <code>, which is an RTX code. The RTX structure is initialized
689 ** from the arguments <element1> through <elementn>, which are
690 ** interpreted according to the specific RTX type's format. The
691 ** special machine mode associated with the rtx (if any) is specified
692 ** in <mode>.
693 **
694 ** gen_rtx can be invoked in a way which resembles the lisp-like
695 ** rtx it will generate. For example, the following rtx structure:
696 **
697 ** (plus:QI (mem:QI (reg:SI 1))
698 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
699 **
700 ** ...would be generated by the following C code:
701 **
702 ** gen_rtx (PLUS, QImode,
703 ** gen_rtx (MEM, QImode,
704 ** gen_rtx (REG, SImode, 1)),
705 ** gen_rtx (MEM, QImode,
706 ** gen_rtx (PLUS, SImode,
707 ** gen_rtx (REG, SImode, 2),
708 ** gen_rtx (REG, SImode, 3)))),
709 */
710
711 /*VARARGS2*/
712 rtx
713 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
714 {
715 int i; /* Array indices... */
716 const char *fmt; /* Current rtx's format... */
717 rtx rt_val; /* RTX to return to caller... */
718
719 VA_OPEN (p, mode);
720 VA_FIXEDARG (p, enum rtx_code, code);
721 VA_FIXEDARG (p, enum machine_mode, mode);
722
723 switch (code)
724 {
725 case CONST_INT:
726 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
727 break;
728
729 case CONST_DOUBLE:
730 {
731 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
732 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
733
734 rt_val = immed_double_const (arg0, arg1, mode);
735 }
736 break;
737
738 case REG:
739 rt_val = gen_rtx_REG (mode, va_arg (p, int));
740 break;
741
742 case MEM:
743 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
744 break;
745
746 default:
747 rt_val = rtx_alloc (code); /* Allocate the storage space. */
748 rt_val->mode = mode; /* Store the machine mode... */
749
750 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
751 for (i = 0; i < GET_RTX_LENGTH (code); i++)
752 {
753 switch (*fmt++)
754 {
755 case '0': /* Unused field. */
756 break;
757
758 case 'i': /* An integer? */
759 XINT (rt_val, i) = va_arg (p, int);
760 break;
761
762 case 'w': /* A wide integer? */
763 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
764 break;
765
766 case 's': /* A string? */
767 XSTR (rt_val, i) = va_arg (p, char *);
768 break;
769
770 case 'e': /* An expression? */
771 case 'u': /* An insn? Same except when printing. */
772 XEXP (rt_val, i) = va_arg (p, rtx);
773 break;
774
775 case 'E': /* An RTX vector? */
776 XVEC (rt_val, i) = va_arg (p, rtvec);
777 break;
778
779 case 'b': /* A bitmap? */
780 XBITMAP (rt_val, i) = va_arg (p, bitmap);
781 break;
782
783 case 't': /* A tree? */
784 XTREE (rt_val, i) = va_arg (p, tree);
785 break;
786
787 default:
788 abort ();
789 }
790 }
791 break;
792 }
793
794 VA_CLOSE (p);
795 return rt_val;
796 }
797
798 /* gen_rtvec (n, [rt1, ..., rtn])
799 **
800 ** This routine creates an rtvec and stores within it the
801 ** pointers to rtx's which are its arguments.
802 */
803
804 /*VARARGS1*/
805 rtvec
806 gen_rtvec VPARAMS ((int n, ...))
807 {
808 int i, save_n;
809 rtx *vector;
810
811 VA_OPEN (p, n);
812 VA_FIXEDARG (p, int, n);
813
814 if (n == 0)
815 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
816
817 vector = (rtx *) alloca (n * sizeof (rtx));
818
819 for (i = 0; i < n; i++)
820 vector[i] = va_arg (p, rtx);
821
822 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
823 save_n = n;
824 VA_CLOSE (p);
825
826 return gen_rtvec_v (save_n, vector);
827 }
828
829 rtvec
830 gen_rtvec_v (n, argp)
831 int n;
832 rtx *argp;
833 {
834 int i;
835 rtvec rt_val;
836
837 if (n == 0)
838 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
839
840 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
841
842 for (i = 0; i < n; i++)
843 rt_val->elem[i] = *argp++;
844
845 return rt_val;
846 }
847 \f
848 /* Generate a REG rtx for a new pseudo register of mode MODE.
849 This pseudo is assigned the next sequential register number. */
850
851 rtx
852 gen_reg_rtx (mode)
853 enum machine_mode mode;
854 {
855 struct function *f = cfun;
856 rtx val;
857
858 /* Don't let anything called after initial flow analysis create new
859 registers. */
860 if (no_new_pseudos)
861 abort ();
862
863 if (generating_concat_p
864 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
865 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
866 {
867 /* For complex modes, don't make a single pseudo.
868 Instead, make a CONCAT of two pseudos.
869 This allows noncontiguous allocation of the real and imaginary parts,
870 which makes much better code. Besides, allocating DCmode
871 pseudos overstrains reload on some machines like the 386. */
872 rtx realpart, imagpart;
873 enum machine_mode partmode = GET_MODE_INNER (mode);
874
875 realpart = gen_reg_rtx (partmode);
876 imagpart = gen_reg_rtx (partmode);
877 return gen_rtx_CONCAT (mode, realpart, imagpart);
878 }
879
880 /* Make sure regno_pointer_align, and regno_reg_rtx are large
881 enough to have an element for this pseudo reg number. */
882
883 if (reg_rtx_no == f->emit->regno_pointer_align_length)
884 {
885 int old_size = f->emit->regno_pointer_align_length;
886 char *new;
887 rtx *new1;
888
889 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
890 memset (new + old_size, 0, old_size);
891 f->emit->regno_pointer_align = (unsigned char *) new;
892
893 new1 = (rtx *) ggc_realloc (f->emit->x_regno_reg_rtx,
894 old_size * 2 * sizeof (rtx));
895 memset (new1 + old_size, 0, old_size * sizeof (rtx));
896 regno_reg_rtx = new1;
897
898 f->emit->regno_pointer_align_length = old_size * 2;
899 }
900
901 val = gen_raw_REG (mode, reg_rtx_no);
902 regno_reg_rtx[reg_rtx_no++] = val;
903 return val;
904 }
905
906 /* Generate an register with same attributes as REG,
907 but offsetted by OFFSET. */
908
909 rtx
910 gen_rtx_REG_offset (reg, mode, regno, offset)
911 enum machine_mode mode;
912 unsigned int regno;
913 int offset;
914 rtx reg;
915 {
916 rtx new = gen_rtx_REG (mode, regno);
917 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
918 REG_OFFSET (reg) + offset);
919 return new;
920 }
921
922 /* Set the decl for MEM to DECL. */
923
924 void
925 set_reg_attrs_from_mem (reg, mem)
926 rtx reg;
927 rtx mem;
928 {
929 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
930 REG_ATTRS (reg)
931 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
932 }
933
934 /* Set the register attributes for registers contained in PARM_RTX.
935 Use needed values from memory attributes of MEM. */
936
937 void
938 set_reg_attrs_for_parm (parm_rtx, mem)
939 rtx parm_rtx;
940 rtx mem;
941 {
942 if (GET_CODE (parm_rtx) == REG)
943 set_reg_attrs_from_mem (parm_rtx, mem);
944 else if (GET_CODE (parm_rtx) == PARALLEL)
945 {
946 /* Check for a NULL entry in the first slot, used to indicate that the
947 parameter goes both on the stack and in registers. */
948 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
949 for (; i < XVECLEN (parm_rtx, 0); i++)
950 {
951 rtx x = XVECEXP (parm_rtx, 0, i);
952 if (GET_CODE (XEXP (x, 0)) == REG)
953 REG_ATTRS (XEXP (x, 0))
954 = get_reg_attrs (MEM_EXPR (mem),
955 INTVAL (XEXP (x, 1)));
956 }
957 }
958 }
959
960 /* Assign the RTX X to declaration T. */
961 void
962 set_decl_rtl (t, x)
963 tree t;
964 rtx x;
965 {
966 DECL_CHECK (t)->decl.rtl = x;
967
968 if (!x)
969 return;
970 /* For register, we maitain the reverse information too. */
971 if (GET_CODE (x) == REG)
972 REG_ATTRS (x) = get_reg_attrs (t, 0);
973 else if (GET_CODE (x) == SUBREG)
974 REG_ATTRS (SUBREG_REG (x))
975 = get_reg_attrs (t, -SUBREG_BYTE (x));
976 if (GET_CODE (x) == CONCAT)
977 {
978 if (REG_P (XEXP (x, 0)))
979 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
980 if (REG_P (XEXP (x, 1)))
981 REG_ATTRS (XEXP (x, 1))
982 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
983 }
984 if (GET_CODE (x) == PARALLEL)
985 {
986 int i;
987 for (i = 0; i < XVECLEN (x, 0); i++)
988 {
989 rtx y = XVECEXP (x, 0, i);
990 if (REG_P (XEXP (y, 0)))
991 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
992 }
993 }
994 }
995
996 /* Identify REG (which may be a CONCAT) as a user register. */
997
998 void
999 mark_user_reg (reg)
1000 rtx reg;
1001 {
1002 if (GET_CODE (reg) == CONCAT)
1003 {
1004 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1005 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1006 }
1007 else if (GET_CODE (reg) == REG)
1008 REG_USERVAR_P (reg) = 1;
1009 else
1010 abort ();
1011 }
1012
1013 /* Identify REG as a probable pointer register and show its alignment
1014 as ALIGN, if nonzero. */
1015
1016 void
1017 mark_reg_pointer (reg, align)
1018 rtx reg;
1019 int align;
1020 {
1021 if (! REG_POINTER (reg))
1022 {
1023 REG_POINTER (reg) = 1;
1024
1025 if (align)
1026 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1027 }
1028 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1029 /* We can no-longer be sure just how aligned this pointer is */
1030 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1031 }
1032
1033 /* Return 1 plus largest pseudo reg number used in the current function. */
1034
1035 int
1036 max_reg_num ()
1037 {
1038 return reg_rtx_no;
1039 }
1040
1041 /* Return 1 + the largest label number used so far in the current function. */
1042
1043 int
1044 max_label_num ()
1045 {
1046 if (last_label_num && label_num == base_label_num)
1047 return last_label_num;
1048 return label_num;
1049 }
1050
1051 /* Return first label number used in this function (if any were used). */
1052
1053 int
1054 get_first_label_num ()
1055 {
1056 return first_label_num;
1057 }
1058 \f
1059 /* Return the final regno of X, which is a SUBREG of a hard
1060 register. */
1061 int
1062 subreg_hard_regno (x, check_mode)
1063 rtx x;
1064 int check_mode;
1065 {
1066 enum machine_mode mode = GET_MODE (x);
1067 unsigned int byte_offset, base_regno, final_regno;
1068 rtx reg = SUBREG_REG (x);
1069
1070 /* This is where we attempt to catch illegal subregs
1071 created by the compiler. */
1072 if (GET_CODE (x) != SUBREG
1073 || GET_CODE (reg) != REG)
1074 abort ();
1075 base_regno = REGNO (reg);
1076 if (base_regno >= FIRST_PSEUDO_REGISTER)
1077 abort ();
1078 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
1079 abort ();
1080
1081 /* Catch non-congruent offsets too. */
1082 byte_offset = SUBREG_BYTE (x);
1083 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
1084 abort ();
1085
1086 final_regno = subreg_regno (x);
1087
1088 return final_regno;
1089 }
1090
1091 /* Return a value representing some low-order bits of X, where the number
1092 of low-order bits is given by MODE. Note that no conversion is done
1093 between floating-point and fixed-point values, rather, the bit
1094 representation is returned.
1095
1096 This function handles the cases in common between gen_lowpart, below,
1097 and two variants in cse.c and combine.c. These are the cases that can
1098 be safely handled at all points in the compilation.
1099
1100 If this is not a case we can handle, return 0. */
1101
1102 rtx
1103 gen_lowpart_common (mode, x)
1104 enum machine_mode mode;
1105 rtx x;
1106 {
1107 int msize = GET_MODE_SIZE (mode);
1108 int xsize = GET_MODE_SIZE (GET_MODE (x));
1109 int offset = 0;
1110
1111 if (GET_MODE (x) == mode)
1112 return x;
1113
1114 /* MODE must occupy no more words than the mode of X. */
1115 if (GET_MODE (x) != VOIDmode
1116 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1117 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
1118 return 0;
1119
1120 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1121 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1122 && GET_MODE (x) != VOIDmode && msize > xsize)
1123 return 0;
1124
1125 offset = subreg_lowpart_offset (mode, GET_MODE (x));
1126
1127 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1128 && (GET_MODE_CLASS (mode) == MODE_INT
1129 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1130 {
1131 /* If we are getting the low-order part of something that has been
1132 sign- or zero-extended, we can either just use the object being
1133 extended or make a narrower extension. If we want an even smaller
1134 piece than the size of the object being extended, call ourselves
1135 recursively.
1136
1137 This case is used mostly by combine and cse. */
1138
1139 if (GET_MODE (XEXP (x, 0)) == mode)
1140 return XEXP (x, 0);
1141 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1142 return gen_lowpart_common (mode, XEXP (x, 0));
1143 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
1144 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1145 }
1146 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
1147 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR)
1148 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
1149 else if ((GET_MODE_CLASS (mode) == MODE_VECTOR_INT
1150 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
1151 && GET_MODE (x) == VOIDmode)
1152 return simplify_gen_subreg (mode, x, int_mode_for_mode (mode), offset);
1153 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
1154 from the low-order part of the constant. */
1155 else if ((GET_MODE_CLASS (mode) == MODE_INT
1156 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1157 && GET_MODE (x) == VOIDmode
1158 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
1159 {
1160 /* If MODE is twice the host word size, X is already the desired
1161 representation. Otherwise, if MODE is wider than a word, we can't
1162 do this. If MODE is exactly a word, return just one CONST_INT. */
1163
1164 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
1165 return x;
1166 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1167 return 0;
1168 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
1169 return (GET_CODE (x) == CONST_INT ? x
1170 : GEN_INT (CONST_DOUBLE_LOW (x)));
1171 else
1172 {
1173 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
1174 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
1175 : CONST_DOUBLE_LOW (x));
1176
1177 /* Sign extend to HOST_WIDE_INT. */
1178 val = trunc_int_for_mode (val, mode);
1179
1180 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
1181 : GEN_INT (val));
1182 }
1183 }
1184
1185 /* The floating-point emulator can handle all conversions between
1186 FP and integer operands. This simplifies reload because it
1187 doesn't have to deal with constructs like (subreg:DI
1188 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
1189 /* Single-precision floats are always 32-bits and double-precision
1190 floats are always 64-bits. */
1191
1192 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1193 && GET_MODE_BITSIZE (mode) == 32
1194 && GET_CODE (x) == CONST_INT)
1195 {
1196 REAL_VALUE_TYPE r;
1197 long i = INTVAL (x);
1198
1199 real_from_target (&r, &i, mode);
1200 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1201 }
1202 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1203 && GET_MODE_BITSIZE (mode) == 64
1204 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1205 && GET_MODE (x) == VOIDmode)
1206 {
1207 REAL_VALUE_TYPE r;
1208 HOST_WIDE_INT low, high;
1209 long i[2];
1210
1211 if (GET_CODE (x) == CONST_INT)
1212 {
1213 low = INTVAL (x);
1214 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1215 }
1216 else
1217 {
1218 low = CONST_DOUBLE_LOW (x);
1219 high = CONST_DOUBLE_HIGH (x);
1220 }
1221
1222 if (HOST_BITS_PER_WIDE_INT > 32)
1223 high = low >> 31 >> 1;
1224
1225 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1226 target machine. */
1227 if (WORDS_BIG_ENDIAN)
1228 i[0] = high, i[1] = low;
1229 else
1230 i[0] = low, i[1] = high;
1231
1232 real_from_target (&r, i, mode);
1233 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1234 }
1235 else if ((GET_MODE_CLASS (mode) == MODE_INT
1236 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1237 && GET_CODE (x) == CONST_DOUBLE
1238 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1239 {
1240 REAL_VALUE_TYPE r;
1241 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1242 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1243
1244 /* Convert 'r' into an array of four 32-bit words in target word
1245 order. */
1246 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1247 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1248 {
1249 case 32:
1250 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
1251 i[1] = 0;
1252 i[2] = 0;
1253 i[3 - 3 * endian] = 0;
1254 break;
1255 case 64:
1256 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
1257 i[2 - 2 * endian] = 0;
1258 i[3 - 2 * endian] = 0;
1259 break;
1260 case 96:
1261 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1262 i[3 - 3 * endian] = 0;
1263 break;
1264 case 128:
1265 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1266 break;
1267 default:
1268 abort ();
1269 }
1270 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1271 and return it. */
1272 #if HOST_BITS_PER_WIDE_INT == 32
1273 return immed_double_const (i[3 * endian], i[1 + endian], mode);
1274 #else
1275 if (HOST_BITS_PER_WIDE_INT != 64)
1276 abort ();
1277
1278 return immed_double_const ((((unsigned long) i[3 * endian])
1279 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1280 (((unsigned long) i[2 - endian])
1281 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
1282 mode);
1283 #endif
1284 }
1285
1286 /* Otherwise, we can't do this. */
1287 return 0;
1288 }
1289 \f
1290 /* Return the real part (which has mode MODE) of a complex value X.
1291 This always comes at the low address in memory. */
1292
1293 rtx
1294 gen_realpart (mode, x)
1295 enum machine_mode mode;
1296 rtx x;
1297 {
1298 if (WORDS_BIG_ENDIAN
1299 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1300 && REG_P (x)
1301 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1302 internal_error
1303 ("can't access real part of complex value in hard register");
1304 else if (WORDS_BIG_ENDIAN)
1305 return gen_highpart (mode, x);
1306 else
1307 return gen_lowpart (mode, x);
1308 }
1309
1310 /* Return the imaginary part (which has mode MODE) of a complex value X.
1311 This always comes at the high address in memory. */
1312
1313 rtx
1314 gen_imagpart (mode, x)
1315 enum machine_mode mode;
1316 rtx x;
1317 {
1318 if (WORDS_BIG_ENDIAN)
1319 return gen_lowpart (mode, x);
1320 else if (! WORDS_BIG_ENDIAN
1321 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1322 && REG_P (x)
1323 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1324 internal_error
1325 ("can't access imaginary part of complex value in hard register");
1326 else
1327 return gen_highpart (mode, x);
1328 }
1329
1330 /* Return 1 iff X, assumed to be a SUBREG,
1331 refers to the real part of the complex value in its containing reg.
1332 Complex values are always stored with the real part in the first word,
1333 regardless of WORDS_BIG_ENDIAN. */
1334
1335 int
1336 subreg_realpart_p (x)
1337 rtx x;
1338 {
1339 if (GET_CODE (x) != SUBREG)
1340 abort ();
1341
1342 return ((unsigned int) SUBREG_BYTE (x)
1343 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1344 }
1345 \f
1346 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1347 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1348 least-significant part of X.
1349 MODE specifies how big a part of X to return;
1350 it usually should not be larger than a word.
1351 If X is a MEM whose address is a QUEUED, the value may be so also. */
1352
1353 rtx
1354 gen_lowpart (mode, x)
1355 enum machine_mode mode;
1356 rtx x;
1357 {
1358 rtx result = gen_lowpart_common (mode, x);
1359
1360 if (result)
1361 return result;
1362 else if (GET_CODE (x) == REG)
1363 {
1364 /* Must be a hard reg that's not valid in MODE. */
1365 result = gen_lowpart_common (mode, copy_to_reg (x));
1366 if (result == 0)
1367 abort ();
1368 return result;
1369 }
1370 else if (GET_CODE (x) == MEM)
1371 {
1372 /* The only additional case we can do is MEM. */
1373 int offset = 0;
1374 if (WORDS_BIG_ENDIAN)
1375 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1376 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1377
1378 if (BYTES_BIG_ENDIAN)
1379 /* Adjust the address so that the address-after-the-data
1380 is unchanged. */
1381 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1382 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1383
1384 return adjust_address (x, mode, offset);
1385 }
1386 else if (GET_CODE (x) == ADDRESSOF)
1387 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1388 else
1389 abort ();
1390 }
1391
1392 /* Like `gen_lowpart', but refer to the most significant part.
1393 This is used to access the imaginary part of a complex number. */
1394
1395 rtx
1396 gen_highpart (mode, x)
1397 enum machine_mode mode;
1398 rtx x;
1399 {
1400 unsigned int msize = GET_MODE_SIZE (mode);
1401 rtx result;
1402
1403 /* This case loses if X is a subreg. To catch bugs early,
1404 complain if an invalid MODE is used even in other cases. */
1405 if (msize > UNITS_PER_WORD
1406 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1407 abort ();
1408
1409 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1410 subreg_highpart_offset (mode, GET_MODE (x)));
1411
1412 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1413 the target if we have a MEM. gen_highpart must return a valid operand,
1414 emitting code if necessary to do so. */
1415 if (result != NULL_RTX && GET_CODE (result) == MEM)
1416 result = validize_mem (result);
1417
1418 if (!result)
1419 abort ();
1420 return result;
1421 }
1422
1423 /* Like gen_highpart_mode, but accept mode of EXP operand in case EXP can
1424 be VOIDmode constant. */
1425 rtx
1426 gen_highpart_mode (outermode, innermode, exp)
1427 enum machine_mode outermode, innermode;
1428 rtx exp;
1429 {
1430 if (GET_MODE (exp) != VOIDmode)
1431 {
1432 if (GET_MODE (exp) != innermode)
1433 abort ();
1434 return gen_highpart (outermode, exp);
1435 }
1436 return simplify_gen_subreg (outermode, exp, innermode,
1437 subreg_highpart_offset (outermode, innermode));
1438 }
1439
1440 /* Return offset in bytes to get OUTERMODE low part
1441 of the value in mode INNERMODE stored in memory in target format. */
1442
1443 unsigned int
1444 subreg_lowpart_offset (outermode, innermode)
1445 enum machine_mode outermode, innermode;
1446 {
1447 unsigned int offset = 0;
1448 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1449
1450 if (difference > 0)
1451 {
1452 if (WORDS_BIG_ENDIAN)
1453 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1454 if (BYTES_BIG_ENDIAN)
1455 offset += difference % UNITS_PER_WORD;
1456 }
1457
1458 return offset;
1459 }
1460
1461 /* Return offset in bytes to get OUTERMODE high part
1462 of the value in mode INNERMODE stored in memory in target format. */
1463 unsigned int
1464 subreg_highpart_offset (outermode, innermode)
1465 enum machine_mode outermode, innermode;
1466 {
1467 unsigned int offset = 0;
1468 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1469
1470 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1471 abort ();
1472
1473 if (difference > 0)
1474 {
1475 if (! WORDS_BIG_ENDIAN)
1476 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1477 if (! BYTES_BIG_ENDIAN)
1478 offset += difference % UNITS_PER_WORD;
1479 }
1480
1481 return offset;
1482 }
1483
1484 /* Return 1 iff X, assumed to be a SUBREG,
1485 refers to the least significant part of its containing reg.
1486 If X is not a SUBREG, always return 1 (it is its own low part!). */
1487
1488 int
1489 subreg_lowpart_p (x)
1490 rtx x;
1491 {
1492 if (GET_CODE (x) != SUBREG)
1493 return 1;
1494 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1495 return 0;
1496
1497 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1498 == SUBREG_BYTE (x));
1499 }
1500 \f
1501
1502 /* Helper routine for all the constant cases of operand_subword.
1503 Some places invoke this directly. */
1504
1505 rtx
1506 constant_subword (op, offset, mode)
1507 rtx op;
1508 int offset;
1509 enum machine_mode mode;
1510 {
1511 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1512 HOST_WIDE_INT val;
1513
1514 /* If OP is already an integer word, return it. */
1515 if (GET_MODE_CLASS (mode) == MODE_INT
1516 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1517 return op;
1518
1519 /* The output is some bits, the width of the target machine's word.
1520 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1521 host can't. */
1522 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1523 && GET_MODE_CLASS (mode) == MODE_FLOAT
1524 && GET_MODE_BITSIZE (mode) == 64
1525 && GET_CODE (op) == CONST_DOUBLE)
1526 {
1527 long k[2];
1528 REAL_VALUE_TYPE rv;
1529
1530 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1531 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1532
1533 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1534 which the words are written depends on the word endianness.
1535 ??? This is a potential portability problem and should
1536 be fixed at some point.
1537
1538 We must exercise caution with the sign bit. By definition there
1539 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1540 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1541 So we explicitly mask and sign-extend as necessary. */
1542 if (BITS_PER_WORD == 32)
1543 {
1544 val = k[offset];
1545 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1546 return GEN_INT (val);
1547 }
1548 #if HOST_BITS_PER_WIDE_INT >= 64
1549 else if (BITS_PER_WORD >= 64 && offset == 0)
1550 {
1551 val = k[! WORDS_BIG_ENDIAN];
1552 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1553 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1554 return GEN_INT (val);
1555 }
1556 #endif
1557 else if (BITS_PER_WORD == 16)
1558 {
1559 val = k[offset >> 1];
1560 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1561 val >>= 16;
1562 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1563 return GEN_INT (val);
1564 }
1565 else
1566 abort ();
1567 }
1568 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1569 && GET_MODE_CLASS (mode) == MODE_FLOAT
1570 && GET_MODE_BITSIZE (mode) > 64
1571 && GET_CODE (op) == CONST_DOUBLE)
1572 {
1573 long k[4];
1574 REAL_VALUE_TYPE rv;
1575
1576 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1577 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1578
1579 if (BITS_PER_WORD == 32)
1580 {
1581 val = k[offset];
1582 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1583 return GEN_INT (val);
1584 }
1585 #if HOST_BITS_PER_WIDE_INT >= 64
1586 else if (BITS_PER_WORD >= 64 && offset <= 1)
1587 {
1588 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1589 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1590 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1591 return GEN_INT (val);
1592 }
1593 #endif
1594 else
1595 abort ();
1596 }
1597
1598 /* Single word float is a little harder, since single- and double-word
1599 values often do not have the same high-order bits. We have already
1600 verified that we want the only defined word of the single-word value. */
1601 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1602 && GET_MODE_BITSIZE (mode) == 32
1603 && GET_CODE (op) == CONST_DOUBLE)
1604 {
1605 long l;
1606 REAL_VALUE_TYPE rv;
1607
1608 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1609 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1610
1611 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1612 val = l;
1613 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1614
1615 if (BITS_PER_WORD == 16)
1616 {
1617 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1618 val >>= 16;
1619 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1620 }
1621
1622 return GEN_INT (val);
1623 }
1624
1625 /* The only remaining cases that we can handle are integers.
1626 Convert to proper endianness now since these cases need it.
1627 At this point, offset == 0 means the low-order word.
1628
1629 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1630 in general. However, if OP is (const_int 0), we can just return
1631 it for any word. */
1632
1633 if (op == const0_rtx)
1634 return op;
1635
1636 if (GET_MODE_CLASS (mode) != MODE_INT
1637 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1638 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1639 return 0;
1640
1641 if (WORDS_BIG_ENDIAN)
1642 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1643
1644 /* Find out which word on the host machine this value is in and get
1645 it from the constant. */
1646 val = (offset / size_ratio == 0
1647 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1648 : (GET_CODE (op) == CONST_INT
1649 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1650
1651 /* Get the value we want into the low bits of val. */
1652 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1653 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1654
1655 val = trunc_int_for_mode (val, word_mode);
1656
1657 return GEN_INT (val);
1658 }
1659
1660 /* Return subword OFFSET of operand OP.
1661 The word number, OFFSET, is interpreted as the word number starting
1662 at the low-order address. OFFSET 0 is the low-order word if not
1663 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1664
1665 If we cannot extract the required word, we return zero. Otherwise,
1666 an rtx corresponding to the requested word will be returned.
1667
1668 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1669 reload has completed, a valid address will always be returned. After
1670 reload, if a valid address cannot be returned, we return zero.
1671
1672 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1673 it is the responsibility of the caller.
1674
1675 MODE is the mode of OP in case it is a CONST_INT.
1676
1677 ??? This is still rather broken for some cases. The problem for the
1678 moment is that all callers of this thing provide no 'goal mode' to
1679 tell us to work with. This exists because all callers were written
1680 in a word based SUBREG world.
1681 Now use of this function can be deprecated by simplify_subreg in most
1682 cases.
1683 */
1684
1685 rtx
1686 operand_subword (op, offset, validate_address, mode)
1687 rtx op;
1688 unsigned int offset;
1689 int validate_address;
1690 enum machine_mode mode;
1691 {
1692 if (mode == VOIDmode)
1693 mode = GET_MODE (op);
1694
1695 if (mode == VOIDmode)
1696 abort ();
1697
1698 /* If OP is narrower than a word, fail. */
1699 if (mode != BLKmode
1700 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1701 return 0;
1702
1703 /* If we want a word outside OP, return zero. */
1704 if (mode != BLKmode
1705 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1706 return const0_rtx;
1707
1708 /* Form a new MEM at the requested address. */
1709 if (GET_CODE (op) == MEM)
1710 {
1711 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1712
1713 if (! validate_address)
1714 return new;
1715
1716 else if (reload_completed)
1717 {
1718 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1719 return 0;
1720 }
1721 else
1722 return replace_equiv_address (new, XEXP (new, 0));
1723 }
1724
1725 /* Rest can be handled by simplify_subreg. */
1726 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1727 }
1728
1729 /* Similar to `operand_subword', but never return 0. If we can't extract
1730 the required subword, put OP into a register and try again. If that fails,
1731 abort. We always validate the address in this case.
1732
1733 MODE is the mode of OP, in case it is CONST_INT. */
1734
1735 rtx
1736 operand_subword_force (op, offset, mode)
1737 rtx op;
1738 unsigned int offset;
1739 enum machine_mode mode;
1740 {
1741 rtx result = operand_subword (op, offset, 1, mode);
1742
1743 if (result)
1744 return result;
1745
1746 if (mode != BLKmode && mode != VOIDmode)
1747 {
1748 /* If this is a register which can not be accessed by words, copy it
1749 to a pseudo register. */
1750 if (GET_CODE (op) == REG)
1751 op = copy_to_reg (op);
1752 else
1753 op = force_reg (mode, op);
1754 }
1755
1756 result = operand_subword (op, offset, 1, mode);
1757 if (result == 0)
1758 abort ();
1759
1760 return result;
1761 }
1762 \f
1763 /* Given a compare instruction, swap the operands.
1764 A test instruction is changed into a compare of 0 against the operand. */
1765
1766 void
1767 reverse_comparison (insn)
1768 rtx insn;
1769 {
1770 rtx body = PATTERN (insn);
1771 rtx comp;
1772
1773 if (GET_CODE (body) == SET)
1774 comp = SET_SRC (body);
1775 else
1776 comp = SET_SRC (XVECEXP (body, 0, 0));
1777
1778 if (GET_CODE (comp) == COMPARE)
1779 {
1780 rtx op0 = XEXP (comp, 0);
1781 rtx op1 = XEXP (comp, 1);
1782 XEXP (comp, 0) = op1;
1783 XEXP (comp, 1) = op0;
1784 }
1785 else
1786 {
1787 rtx new = gen_rtx_COMPARE (VOIDmode,
1788 CONST0_RTX (GET_MODE (comp)), comp);
1789 if (GET_CODE (body) == SET)
1790 SET_SRC (body) = new;
1791 else
1792 SET_SRC (XVECEXP (body, 0, 0)) = new;
1793 }
1794 }
1795 \f
1796 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1797 or (2) a component ref of something variable. Represent the later with
1798 a NULL expression. */
1799
1800 static tree
1801 component_ref_for_mem_expr (ref)
1802 tree ref;
1803 {
1804 tree inner = TREE_OPERAND (ref, 0);
1805
1806 if (TREE_CODE (inner) == COMPONENT_REF)
1807 inner = component_ref_for_mem_expr (inner);
1808 else
1809 {
1810 tree placeholder_ptr = 0;
1811
1812 /* Now remove any conversions: they don't change what the underlying
1813 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1814 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1815 || TREE_CODE (inner) == NON_LVALUE_EXPR
1816 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1817 || TREE_CODE (inner) == SAVE_EXPR
1818 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1819 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1820 inner = find_placeholder (inner, &placeholder_ptr);
1821 else
1822 inner = TREE_OPERAND (inner, 0);
1823
1824 if (! DECL_P (inner))
1825 inner = NULL_TREE;
1826 }
1827
1828 if (inner == TREE_OPERAND (ref, 0))
1829 return ref;
1830 else
1831 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1832 TREE_OPERAND (ref, 1));
1833 }
1834
1835 /* Given REF, a MEM, and T, either the type of X or the expression
1836 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1837 if we are making a new object of this type. BITPOS is nonzero if
1838 there is an offset outstanding on T that will be applied later. */
1839
1840 void
1841 set_mem_attributes_minus_bitpos (ref, t, objectp, bitpos)
1842 rtx ref;
1843 tree t;
1844 int objectp;
1845 HOST_WIDE_INT bitpos;
1846 {
1847 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1848 tree expr = MEM_EXPR (ref);
1849 rtx offset = MEM_OFFSET (ref);
1850 rtx size = MEM_SIZE (ref);
1851 unsigned int align = MEM_ALIGN (ref);
1852 HOST_WIDE_INT apply_bitpos = 0;
1853 tree type;
1854
1855 /* It can happen that type_for_mode was given a mode for which there
1856 is no language-level type. In which case it returns NULL, which
1857 we can see here. */
1858 if (t == NULL_TREE)
1859 return;
1860
1861 type = TYPE_P (t) ? t : TREE_TYPE (t);
1862
1863 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1864 wrong answer, as it assumes that DECL_RTL already has the right alias
1865 info. Callers should not set DECL_RTL until after the call to
1866 set_mem_attributes. */
1867 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1868 abort ();
1869
1870 /* Get the alias set from the expression or type (perhaps using a
1871 front-end routine) and use it. */
1872 alias = get_alias_set (t);
1873
1874 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1875 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1876 RTX_UNCHANGING_P (ref)
1877 |= ((lang_hooks.honor_readonly
1878 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1879 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1880
1881 /* If we are making an object of this type, or if this is a DECL, we know
1882 that it is a scalar if the type is not an aggregate. */
1883 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1884 MEM_SCALAR_P (ref) = 1;
1885
1886 /* We can set the alignment from the type if we are making an object,
1887 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1888 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1889 align = MAX (align, TYPE_ALIGN (type));
1890
1891 /* If the size is known, we can set that. */
1892 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1893 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1894
1895 /* If T is not a type, we may be able to deduce some more information about
1896 the expression. */
1897 if (! TYPE_P (t))
1898 {
1899 maybe_set_unchanging (ref, t);
1900 if (TREE_THIS_VOLATILE (t))
1901 MEM_VOLATILE_P (ref) = 1;
1902
1903 /* Now remove any conversions: they don't change what the underlying
1904 object is. Likewise for SAVE_EXPR. */
1905 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1906 || TREE_CODE (t) == NON_LVALUE_EXPR
1907 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1908 || TREE_CODE (t) == SAVE_EXPR)
1909 t = TREE_OPERAND (t, 0);
1910
1911 /* If this expression can't be addressed (e.g., it contains a reference
1912 to a non-addressable field), show we don't change its alias set. */
1913 if (! can_address_p (t))
1914 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1915
1916 /* If this is a decl, set the attributes of the MEM from it. */
1917 if (DECL_P (t))
1918 {
1919 expr = t;
1920 offset = const0_rtx;
1921 apply_bitpos = bitpos;
1922 size = (DECL_SIZE_UNIT (t)
1923 && host_integerp (DECL_SIZE_UNIT (t), 1)
1924 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1925 align = DECL_ALIGN (t);
1926 }
1927
1928 /* If this is a constant, we know the alignment. */
1929 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1930 {
1931 align = TYPE_ALIGN (type);
1932 #ifdef CONSTANT_ALIGNMENT
1933 align = CONSTANT_ALIGNMENT (t, align);
1934 #endif
1935 }
1936
1937 /* If this is a field reference and not a bit-field, record it. */
1938 /* ??? There is some information that can be gleened from bit-fields,
1939 such as the word offset in the structure that might be modified.
1940 But skip it for now. */
1941 else if (TREE_CODE (t) == COMPONENT_REF
1942 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1943 {
1944 expr = component_ref_for_mem_expr (t);
1945 offset = const0_rtx;
1946 apply_bitpos = bitpos;
1947 /* ??? Any reason the field size would be different than
1948 the size we got from the type? */
1949 }
1950
1951 /* If this is an array reference, look for an outer field reference. */
1952 else if (TREE_CODE (t) == ARRAY_REF)
1953 {
1954 tree off_tree = size_zero_node;
1955
1956 do
1957 {
1958 tree index = TREE_OPERAND (t, 1);
1959 tree array = TREE_OPERAND (t, 0);
1960 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
1961 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
1962 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
1963
1964 /* We assume all arrays have sizes that are a multiple of a byte.
1965 First subtract the lower bound, if any, in the type of the
1966 index, then convert to sizetype and multiply by the size of the
1967 array element. */
1968 if (low_bound != 0 && ! integer_zerop (low_bound))
1969 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
1970 index, low_bound));
1971
1972 /* If the index has a self-referential type, pass it to a
1973 WITH_RECORD_EXPR; if the component size is, pass our
1974 component to one. */
1975 if (! TREE_CONSTANT (index)
1976 && contains_placeholder_p (index))
1977 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, t);
1978 if (! TREE_CONSTANT (unit_size)
1979 && contains_placeholder_p (unit_size))
1980 unit_size = build (WITH_RECORD_EXPR, sizetype,
1981 unit_size, array);
1982
1983 off_tree
1984 = fold (build (PLUS_EXPR, sizetype,
1985 fold (build (MULT_EXPR, sizetype,
1986 index,
1987 unit_size)),
1988 off_tree));
1989 t = TREE_OPERAND (t, 0);
1990 }
1991 while (TREE_CODE (t) == ARRAY_REF);
1992
1993 if (DECL_P (t))
1994 {
1995 expr = t;
1996 offset = NULL;
1997 if (host_integerp (off_tree, 1))
1998 {
1999 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
2000 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
2001 align = DECL_ALIGN (t);
2002 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
2003 align = aoff;
2004 offset = GEN_INT (ioff);
2005 apply_bitpos = bitpos;
2006 }
2007 }
2008 else if (TREE_CODE (t) == COMPONENT_REF)
2009 {
2010 expr = component_ref_for_mem_expr (t);
2011 if (host_integerp (off_tree, 1))
2012 {
2013 offset = GEN_INT (tree_low_cst (off_tree, 1));
2014 apply_bitpos = bitpos;
2015 }
2016 /* ??? Any reason the field size would be different than
2017 the size we got from the type? */
2018 }
2019 else if (flag_argument_noalias > 1
2020 && TREE_CODE (t) == INDIRECT_REF
2021 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
2022 {
2023 expr = t;
2024 offset = NULL;
2025 }
2026 }
2027
2028 /* If this is a Fortran indirect argument reference, record the
2029 parameter decl. */
2030 else if (flag_argument_noalias > 1
2031 && TREE_CODE (t) == INDIRECT_REF
2032 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
2033 {
2034 expr = t;
2035 offset = NULL;
2036 }
2037 }
2038
2039 /* If we modified OFFSET based on T, then subtract the outstanding
2040 bit position offset. Similarly, increase the size of the accessed
2041 object to contain the negative offset. */
2042 if (apply_bitpos)
2043 {
2044 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
2045 if (size)
2046 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
2047 }
2048
2049 /* Now set the attributes we computed above. */
2050 MEM_ATTRS (ref)
2051 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
2052
2053 /* If this is already known to be a scalar or aggregate, we are done. */
2054 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
2055 return;
2056
2057 /* If it is a reference into an aggregate, this is part of an aggregate.
2058 Otherwise we don't know. */
2059 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
2060 || TREE_CODE (t) == ARRAY_RANGE_REF
2061 || TREE_CODE (t) == BIT_FIELD_REF)
2062 MEM_IN_STRUCT_P (ref) = 1;
2063 }
2064
2065 void
2066 set_mem_attributes (ref, t, objectp)
2067 rtx ref;
2068 tree t;
2069 int objectp;
2070 {
2071 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2072 }
2073
2074 /* Set the decl for MEM to DECL. */
2075
2076 void
2077 set_mem_attrs_from_reg (mem, reg)
2078 rtx mem;
2079 rtx reg;
2080 {
2081 MEM_ATTRS (mem)
2082 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
2083 GEN_INT (REG_OFFSET (reg)),
2084 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2085 }
2086
2087 /* Set the alias set of MEM to SET. */
2088
2089 void
2090 set_mem_alias_set (mem, set)
2091 rtx mem;
2092 HOST_WIDE_INT set;
2093 {
2094 #ifdef ENABLE_CHECKING
2095 /* If the new and old alias sets don't conflict, something is wrong. */
2096 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
2097 abort ();
2098 #endif
2099
2100 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
2101 MEM_SIZE (mem), MEM_ALIGN (mem),
2102 GET_MODE (mem));
2103 }
2104
2105 /* Set the alignment of MEM to ALIGN bits. */
2106
2107 void
2108 set_mem_align (mem, align)
2109 rtx mem;
2110 unsigned int align;
2111 {
2112 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2113 MEM_OFFSET (mem), MEM_SIZE (mem), align,
2114 GET_MODE (mem));
2115 }
2116
2117 /* Set the expr for MEM to EXPR. */
2118
2119 void
2120 set_mem_expr (mem, expr)
2121 rtx mem;
2122 tree expr;
2123 {
2124 MEM_ATTRS (mem)
2125 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
2126 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2127 }
2128
2129 /* Set the offset of MEM to OFFSET. */
2130
2131 void
2132 set_mem_offset (mem, offset)
2133 rtx mem, offset;
2134 {
2135 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2136 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
2137 GET_MODE (mem));
2138 }
2139
2140 /* Set the size of MEM to SIZE. */
2141
2142 void
2143 set_mem_size (mem, size)
2144 rtx mem, size;
2145 {
2146 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2147 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
2148 GET_MODE (mem));
2149 }
2150 \f
2151 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2152 and its address changed to ADDR. (VOIDmode means don't change the mode.
2153 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2154 returned memory location is required to be valid. The memory
2155 attributes are not changed. */
2156
2157 static rtx
2158 change_address_1 (memref, mode, addr, validate)
2159 rtx memref;
2160 enum machine_mode mode;
2161 rtx addr;
2162 int validate;
2163 {
2164 rtx new;
2165
2166 if (GET_CODE (memref) != MEM)
2167 abort ();
2168 if (mode == VOIDmode)
2169 mode = GET_MODE (memref);
2170 if (addr == 0)
2171 addr = XEXP (memref, 0);
2172
2173 if (validate)
2174 {
2175 if (reload_in_progress || reload_completed)
2176 {
2177 if (! memory_address_p (mode, addr))
2178 abort ();
2179 }
2180 else
2181 addr = memory_address (mode, addr);
2182 }
2183
2184 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2185 return memref;
2186
2187 new = gen_rtx_MEM (mode, addr);
2188 MEM_COPY_ATTRIBUTES (new, memref);
2189 return new;
2190 }
2191
2192 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2193 way we are changing MEMREF, so we only preserve the alias set. */
2194
2195 rtx
2196 change_address (memref, mode, addr)
2197 rtx memref;
2198 enum machine_mode mode;
2199 rtx addr;
2200 {
2201 rtx new = change_address_1 (memref, mode, addr, 1);
2202 enum machine_mode mmode = GET_MODE (new);
2203
2204 MEM_ATTRS (new)
2205 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
2206 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
2207 (mmode == BLKmode ? BITS_PER_UNIT
2208 : GET_MODE_ALIGNMENT (mmode)),
2209 mmode);
2210
2211 return new;
2212 }
2213
2214 /* Return a memory reference like MEMREF, but with its mode changed
2215 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2216 nonzero, the memory address is forced to be valid.
2217 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2218 and caller is responsible for adjusting MEMREF base register. */
2219
2220 rtx
2221 adjust_address_1 (memref, mode, offset, validate, adjust)
2222 rtx memref;
2223 enum machine_mode mode;
2224 HOST_WIDE_INT offset;
2225 int validate, adjust;
2226 {
2227 rtx addr = XEXP (memref, 0);
2228 rtx new;
2229 rtx memoffset = MEM_OFFSET (memref);
2230 rtx size = 0;
2231 unsigned int memalign = MEM_ALIGN (memref);
2232
2233 /* ??? Prefer to create garbage instead of creating shared rtl.
2234 This may happen even if offset is nonzero -- consider
2235 (plus (plus reg reg) const_int) -- so do this always. */
2236 addr = copy_rtx (addr);
2237
2238 if (adjust)
2239 {
2240 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2241 object, we can merge it into the LO_SUM. */
2242 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2243 && offset >= 0
2244 && (unsigned HOST_WIDE_INT) offset
2245 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2246 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
2247 plus_constant (XEXP (addr, 1), offset));
2248 else
2249 addr = plus_constant (addr, offset);
2250 }
2251
2252 new = change_address_1 (memref, mode, addr, validate);
2253
2254 /* Compute the new values of the memory attributes due to this adjustment.
2255 We add the offsets and update the alignment. */
2256 if (memoffset)
2257 memoffset = GEN_INT (offset + INTVAL (memoffset));
2258
2259 /* Compute the new alignment by taking the MIN of the alignment and the
2260 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2261 if zero. */
2262 if (offset != 0)
2263 memalign
2264 = MIN (memalign,
2265 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2266
2267 /* We can compute the size in a number of ways. */
2268 if (GET_MODE (new) != BLKmode)
2269 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
2270 else if (MEM_SIZE (memref))
2271 size = plus_constant (MEM_SIZE (memref), -offset);
2272
2273 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2274 memoffset, size, memalign, GET_MODE (new));
2275
2276 /* At some point, we should validate that this offset is within the object,
2277 if all the appropriate values are known. */
2278 return new;
2279 }
2280
2281 /* Return a memory reference like MEMREF, but with its mode changed
2282 to MODE and its address changed to ADDR, which is assumed to be
2283 MEMREF offseted by OFFSET bytes. If VALIDATE is
2284 nonzero, the memory address is forced to be valid. */
2285
2286 rtx
2287 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
2288 rtx memref;
2289 enum machine_mode mode;
2290 rtx addr;
2291 HOST_WIDE_INT offset;
2292 int validate;
2293 {
2294 memref = change_address_1 (memref, VOIDmode, addr, validate);
2295 return adjust_address_1 (memref, mode, offset, validate, 0);
2296 }
2297
2298 /* Return a memory reference like MEMREF, but whose address is changed by
2299 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2300 known to be in OFFSET (possibly 1). */
2301
2302 rtx
2303 offset_address (memref, offset, pow2)
2304 rtx memref;
2305 rtx offset;
2306 HOST_WIDE_INT pow2;
2307 {
2308 rtx new, addr = XEXP (memref, 0);
2309
2310 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2311
2312 /* At this point we don't know _why_ the address is invalid. It
2313 could have secondary memory refereces, multiplies or anything.
2314
2315 However, if we did go and rearrange things, we can wind up not
2316 being able to recognize the magic around pic_offset_table_rtx.
2317 This stuff is fragile, and is yet another example of why it is
2318 bad to expose PIC machinery too early. */
2319 if (! memory_address_p (GET_MODE (memref), new)
2320 && GET_CODE (addr) == PLUS
2321 && XEXP (addr, 0) == pic_offset_table_rtx)
2322 {
2323 addr = force_reg (GET_MODE (addr), addr);
2324 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2325 }
2326
2327 update_temp_slot_address (XEXP (memref, 0), new);
2328 new = change_address_1 (memref, VOIDmode, new, 1);
2329
2330 /* Update the alignment to reflect the offset. Reset the offset, which
2331 we don't know. */
2332 MEM_ATTRS (new)
2333 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2334 MIN (MEM_ALIGN (memref),
2335 (unsigned HOST_WIDE_INT) pow2 * BITS_PER_UNIT),
2336 GET_MODE (new));
2337 return new;
2338 }
2339
2340 /* Return a memory reference like MEMREF, but with its address changed to
2341 ADDR. The caller is asserting that the actual piece of memory pointed
2342 to is the same, just the form of the address is being changed, such as
2343 by putting something into a register. */
2344
2345 rtx
2346 replace_equiv_address (memref, addr)
2347 rtx memref;
2348 rtx addr;
2349 {
2350 /* change_address_1 copies the memory attribute structure without change
2351 and that's exactly what we want here. */
2352 update_temp_slot_address (XEXP (memref, 0), addr);
2353 return change_address_1 (memref, VOIDmode, addr, 1);
2354 }
2355
2356 /* Likewise, but the reference is not required to be valid. */
2357
2358 rtx
2359 replace_equiv_address_nv (memref, addr)
2360 rtx memref;
2361 rtx addr;
2362 {
2363 return change_address_1 (memref, VOIDmode, addr, 0);
2364 }
2365
2366 /* Return a memory reference like MEMREF, but with its mode widened to
2367 MODE and offset by OFFSET. This would be used by targets that e.g.
2368 cannot issue QImode memory operations and have to use SImode memory
2369 operations plus masking logic. */
2370
2371 rtx
2372 widen_memory_access (memref, mode, offset)
2373 rtx memref;
2374 enum machine_mode mode;
2375 HOST_WIDE_INT offset;
2376 {
2377 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2378 tree expr = MEM_EXPR (new);
2379 rtx memoffset = MEM_OFFSET (new);
2380 unsigned int size = GET_MODE_SIZE (mode);
2381
2382 /* If we don't know what offset we were at within the expression, then
2383 we can't know if we've overstepped the bounds. */
2384 if (! memoffset)
2385 expr = NULL_TREE;
2386
2387 while (expr)
2388 {
2389 if (TREE_CODE (expr) == COMPONENT_REF)
2390 {
2391 tree field = TREE_OPERAND (expr, 1);
2392
2393 if (! DECL_SIZE_UNIT (field))
2394 {
2395 expr = NULL_TREE;
2396 break;
2397 }
2398
2399 /* Is the field at least as large as the access? If so, ok,
2400 otherwise strip back to the containing structure. */
2401 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2402 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2403 && INTVAL (memoffset) >= 0)
2404 break;
2405
2406 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2407 {
2408 expr = NULL_TREE;
2409 break;
2410 }
2411
2412 expr = TREE_OPERAND (expr, 0);
2413 memoffset = (GEN_INT (INTVAL (memoffset)
2414 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2415 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2416 / BITS_PER_UNIT)));
2417 }
2418 /* Similarly for the decl. */
2419 else if (DECL_P (expr)
2420 && DECL_SIZE_UNIT (expr)
2421 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2422 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2423 && (! memoffset || INTVAL (memoffset) >= 0))
2424 break;
2425 else
2426 {
2427 /* The widened memory access overflows the expression, which means
2428 that it could alias another expression. Zap it. */
2429 expr = NULL_TREE;
2430 break;
2431 }
2432 }
2433
2434 if (! expr)
2435 memoffset = NULL_RTX;
2436
2437 /* The widened memory may alias other stuff, so zap the alias set. */
2438 /* ??? Maybe use get_alias_set on any remaining expression. */
2439
2440 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2441 MEM_ALIGN (new), mode);
2442
2443 return new;
2444 }
2445 \f
2446 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2447
2448 rtx
2449 gen_label_rtx ()
2450 {
2451 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2452 NULL, label_num++, NULL);
2453 }
2454 \f
2455 /* For procedure integration. */
2456
2457 /* Install new pointers to the first and last insns in the chain.
2458 Also, set cur_insn_uid to one higher than the last in use.
2459 Used for an inline-procedure after copying the insn chain. */
2460
2461 void
2462 set_new_first_and_last_insn (first, last)
2463 rtx first, last;
2464 {
2465 rtx insn;
2466
2467 first_insn = first;
2468 last_insn = last;
2469 cur_insn_uid = 0;
2470
2471 for (insn = first; insn; insn = NEXT_INSN (insn))
2472 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2473
2474 cur_insn_uid++;
2475 }
2476
2477 /* Set the range of label numbers found in the current function.
2478 This is used when belatedly compiling an inline function. */
2479
2480 void
2481 set_new_first_and_last_label_num (first, last)
2482 int first, last;
2483 {
2484 base_label_num = label_num;
2485 first_label_num = first;
2486 last_label_num = last;
2487 }
2488
2489 /* Set the last label number found in the current function.
2490 This is used when belatedly compiling an inline function. */
2491
2492 void
2493 set_new_last_label_num (last)
2494 int last;
2495 {
2496 base_label_num = label_num;
2497 last_label_num = last;
2498 }
2499 \f
2500 /* Restore all variables describing the current status from the structure *P.
2501 This is used after a nested function. */
2502
2503 void
2504 restore_emit_status (p)
2505 struct function *p ATTRIBUTE_UNUSED;
2506 {
2507 last_label_num = 0;
2508 }
2509 \f
2510 /* Go through all the RTL insn bodies and copy any invalid shared
2511 structure. This routine should only be called once. */
2512
2513 void
2514 unshare_all_rtl (fndecl, insn)
2515 tree fndecl;
2516 rtx insn;
2517 {
2518 tree decl;
2519
2520 /* Make sure that virtual parameters are not shared. */
2521 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2522 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2523
2524 /* Make sure that virtual stack slots are not shared. */
2525 unshare_all_decls (DECL_INITIAL (fndecl));
2526
2527 /* Unshare just about everything else. */
2528 unshare_all_rtl_1 (insn);
2529
2530 /* Make sure the addresses of stack slots found outside the insn chain
2531 (such as, in DECL_RTL of a variable) are not shared
2532 with the insn chain.
2533
2534 This special care is necessary when the stack slot MEM does not
2535 actually appear in the insn chain. If it does appear, its address
2536 is unshared from all else at that point. */
2537 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2538 }
2539
2540 /* Go through all the RTL insn bodies and copy any invalid shared
2541 structure, again. This is a fairly expensive thing to do so it
2542 should be done sparingly. */
2543
2544 void
2545 unshare_all_rtl_again (insn)
2546 rtx insn;
2547 {
2548 rtx p;
2549 tree decl;
2550
2551 for (p = insn; p; p = NEXT_INSN (p))
2552 if (INSN_P (p))
2553 {
2554 reset_used_flags (PATTERN (p));
2555 reset_used_flags (REG_NOTES (p));
2556 reset_used_flags (LOG_LINKS (p));
2557 }
2558
2559 /* Make sure that virtual stack slots are not shared. */
2560 reset_used_decls (DECL_INITIAL (cfun->decl));
2561
2562 /* Make sure that virtual parameters are not shared. */
2563 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2564 reset_used_flags (DECL_RTL (decl));
2565
2566 reset_used_flags (stack_slot_list);
2567
2568 unshare_all_rtl (cfun->decl, insn);
2569 }
2570
2571 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2572 Assumes the mark bits are cleared at entry. */
2573
2574 static void
2575 unshare_all_rtl_1 (insn)
2576 rtx insn;
2577 {
2578 for (; insn; insn = NEXT_INSN (insn))
2579 if (INSN_P (insn))
2580 {
2581 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2582 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2583 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2584 }
2585 }
2586
2587 /* Go through all virtual stack slots of a function and copy any
2588 shared structure. */
2589 static void
2590 unshare_all_decls (blk)
2591 tree blk;
2592 {
2593 tree t;
2594
2595 /* Copy shared decls. */
2596 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2597 if (DECL_RTL_SET_P (t))
2598 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2599
2600 /* Now process sub-blocks. */
2601 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2602 unshare_all_decls (t);
2603 }
2604
2605 /* Go through all virtual stack slots of a function and mark them as
2606 not shared. */
2607 static void
2608 reset_used_decls (blk)
2609 tree blk;
2610 {
2611 tree t;
2612
2613 /* Mark decls. */
2614 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2615 if (DECL_RTL_SET_P (t))
2616 reset_used_flags (DECL_RTL (t));
2617
2618 /* Now process sub-blocks. */
2619 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2620 reset_used_decls (t);
2621 }
2622
2623 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2624 placed in the result directly, rather than being copied. MAY_SHARE is
2625 either a MEM of an EXPR_LIST of MEMs. */
2626
2627 rtx
2628 copy_most_rtx (orig, may_share)
2629 rtx orig;
2630 rtx may_share;
2631 {
2632 rtx copy;
2633 int i, j;
2634 RTX_CODE code;
2635 const char *format_ptr;
2636
2637 if (orig == may_share
2638 || (GET_CODE (may_share) == EXPR_LIST
2639 && in_expr_list_p (may_share, orig)))
2640 return orig;
2641
2642 code = GET_CODE (orig);
2643
2644 switch (code)
2645 {
2646 case REG:
2647 case QUEUED:
2648 case CONST_INT:
2649 case CONST_DOUBLE:
2650 case CONST_VECTOR:
2651 case SYMBOL_REF:
2652 case CODE_LABEL:
2653 case PC:
2654 case CC0:
2655 return orig;
2656 default:
2657 break;
2658 }
2659
2660 copy = rtx_alloc (code);
2661 PUT_MODE (copy, GET_MODE (orig));
2662 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2663 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2664 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2665 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2666 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2667
2668 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2669
2670 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2671 {
2672 switch (*format_ptr++)
2673 {
2674 case 'e':
2675 XEXP (copy, i) = XEXP (orig, i);
2676 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2677 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2678 break;
2679
2680 case 'u':
2681 XEXP (copy, i) = XEXP (orig, i);
2682 break;
2683
2684 case 'E':
2685 case 'V':
2686 XVEC (copy, i) = XVEC (orig, i);
2687 if (XVEC (orig, i) != NULL)
2688 {
2689 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2690 for (j = 0; j < XVECLEN (copy, i); j++)
2691 XVECEXP (copy, i, j)
2692 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2693 }
2694 break;
2695
2696 case 'w':
2697 XWINT (copy, i) = XWINT (orig, i);
2698 break;
2699
2700 case 'n':
2701 case 'i':
2702 XINT (copy, i) = XINT (orig, i);
2703 break;
2704
2705 case 't':
2706 XTREE (copy, i) = XTREE (orig, i);
2707 break;
2708
2709 case 's':
2710 case 'S':
2711 XSTR (copy, i) = XSTR (orig, i);
2712 break;
2713
2714 case '0':
2715 /* Copy this through the wide int field; that's safest. */
2716 X0WINT (copy, i) = X0WINT (orig, i);
2717 break;
2718
2719 default:
2720 abort ();
2721 }
2722 }
2723 return copy;
2724 }
2725
2726 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2727 Recursively does the same for subexpressions. */
2728
2729 rtx
2730 copy_rtx_if_shared (orig)
2731 rtx orig;
2732 {
2733 rtx x = orig;
2734 int i;
2735 enum rtx_code code;
2736 const char *format_ptr;
2737 int copied = 0;
2738
2739 if (x == 0)
2740 return 0;
2741
2742 code = GET_CODE (x);
2743
2744 /* These types may be freely shared. */
2745
2746 switch (code)
2747 {
2748 case REG:
2749 case QUEUED:
2750 case CONST_INT:
2751 case CONST_DOUBLE:
2752 case CONST_VECTOR:
2753 case SYMBOL_REF:
2754 case CODE_LABEL:
2755 case PC:
2756 case CC0:
2757 case SCRATCH:
2758 /* SCRATCH must be shared because they represent distinct values. */
2759 return x;
2760
2761 case CONST:
2762 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2763 a LABEL_REF, it isn't sharable. */
2764 if (GET_CODE (XEXP (x, 0)) == PLUS
2765 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2766 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2767 return x;
2768 break;
2769
2770 case INSN:
2771 case JUMP_INSN:
2772 case CALL_INSN:
2773 case NOTE:
2774 case BARRIER:
2775 /* The chain of insns is not being copied. */
2776 return x;
2777
2778 case MEM:
2779 /* A MEM is allowed to be shared if its address is constant.
2780
2781 We used to allow sharing of MEMs which referenced
2782 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2783 that can lose. instantiate_virtual_regs will not unshare
2784 the MEMs, and combine may change the structure of the address
2785 because it looks safe and profitable in one context, but
2786 in some other context it creates unrecognizable RTL. */
2787 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2788 return x;
2789
2790 break;
2791
2792 default:
2793 break;
2794 }
2795
2796 /* This rtx may not be shared. If it has already been seen,
2797 replace it with a copy of itself. */
2798
2799 if (RTX_FLAG (x, used))
2800 {
2801 rtx copy;
2802
2803 copy = rtx_alloc (code);
2804 memcpy (copy, x,
2805 (sizeof (*copy) - sizeof (copy->fld)
2806 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2807 x = copy;
2808 copied = 1;
2809 }
2810 RTX_FLAG (x, used) = 1;
2811
2812 /* Now scan the subexpressions recursively.
2813 We can store any replaced subexpressions directly into X
2814 since we know X is not shared! Any vectors in X
2815 must be copied if X was copied. */
2816
2817 format_ptr = GET_RTX_FORMAT (code);
2818
2819 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2820 {
2821 switch (*format_ptr++)
2822 {
2823 case 'e':
2824 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2825 break;
2826
2827 case 'E':
2828 if (XVEC (x, i) != NULL)
2829 {
2830 int j;
2831 int len = XVECLEN (x, i);
2832
2833 if (copied && len > 0)
2834 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2835 for (j = 0; j < len; j++)
2836 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2837 }
2838 break;
2839 }
2840 }
2841 return x;
2842 }
2843
2844 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2845 to look for shared sub-parts. */
2846
2847 void
2848 reset_used_flags (x)
2849 rtx x;
2850 {
2851 int i, j;
2852 enum rtx_code code;
2853 const char *format_ptr;
2854
2855 if (x == 0)
2856 return;
2857
2858 code = GET_CODE (x);
2859
2860 /* These types may be freely shared so we needn't do any resetting
2861 for them. */
2862
2863 switch (code)
2864 {
2865 case REG:
2866 case QUEUED:
2867 case CONST_INT:
2868 case CONST_DOUBLE:
2869 case CONST_VECTOR:
2870 case SYMBOL_REF:
2871 case CODE_LABEL:
2872 case PC:
2873 case CC0:
2874 return;
2875
2876 case INSN:
2877 case JUMP_INSN:
2878 case CALL_INSN:
2879 case NOTE:
2880 case LABEL_REF:
2881 case BARRIER:
2882 /* The chain of insns is not being copied. */
2883 return;
2884
2885 default:
2886 break;
2887 }
2888
2889 RTX_FLAG (x, used) = 0;
2890
2891 format_ptr = GET_RTX_FORMAT (code);
2892 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2893 {
2894 switch (*format_ptr++)
2895 {
2896 case 'e':
2897 reset_used_flags (XEXP (x, i));
2898 break;
2899
2900 case 'E':
2901 for (j = 0; j < XVECLEN (x, i); j++)
2902 reset_used_flags (XVECEXP (x, i, j));
2903 break;
2904 }
2905 }
2906 }
2907 \f
2908 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2909 Return X or the rtx for the pseudo reg the value of X was copied into.
2910 OTHER must be valid as a SET_DEST. */
2911
2912 rtx
2913 make_safe_from (x, other)
2914 rtx x, other;
2915 {
2916 while (1)
2917 switch (GET_CODE (other))
2918 {
2919 case SUBREG:
2920 other = SUBREG_REG (other);
2921 break;
2922 case STRICT_LOW_PART:
2923 case SIGN_EXTEND:
2924 case ZERO_EXTEND:
2925 other = XEXP (other, 0);
2926 break;
2927 default:
2928 goto done;
2929 }
2930 done:
2931 if ((GET_CODE (other) == MEM
2932 && ! CONSTANT_P (x)
2933 && GET_CODE (x) != REG
2934 && GET_CODE (x) != SUBREG)
2935 || (GET_CODE (other) == REG
2936 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2937 || reg_mentioned_p (other, x))))
2938 {
2939 rtx temp = gen_reg_rtx (GET_MODE (x));
2940 emit_move_insn (temp, x);
2941 return temp;
2942 }
2943 return x;
2944 }
2945 \f
2946 /* Emission of insns (adding them to the doubly-linked list). */
2947
2948 /* Return the first insn of the current sequence or current function. */
2949
2950 rtx
2951 get_insns ()
2952 {
2953 return first_insn;
2954 }
2955
2956 /* Specify a new insn as the first in the chain. */
2957
2958 void
2959 set_first_insn (insn)
2960 rtx insn;
2961 {
2962 if (PREV_INSN (insn) != 0)
2963 abort ();
2964 first_insn = insn;
2965 }
2966
2967 /* Return the last insn emitted in current sequence or current function. */
2968
2969 rtx
2970 get_last_insn ()
2971 {
2972 return last_insn;
2973 }
2974
2975 /* Specify a new insn as the last in the chain. */
2976
2977 void
2978 set_last_insn (insn)
2979 rtx insn;
2980 {
2981 if (NEXT_INSN (insn) != 0)
2982 abort ();
2983 last_insn = insn;
2984 }
2985
2986 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2987
2988 rtx
2989 get_last_insn_anywhere ()
2990 {
2991 struct sequence_stack *stack;
2992 if (last_insn)
2993 return last_insn;
2994 for (stack = seq_stack; stack; stack = stack->next)
2995 if (stack->last != 0)
2996 return stack->last;
2997 return 0;
2998 }
2999
3000 /* Return the first nonnote insn emitted in current sequence or current
3001 function. This routine looks inside SEQUENCEs. */
3002
3003 rtx
3004 get_first_nonnote_insn ()
3005 {
3006 rtx insn = first_insn;
3007
3008 while (insn)
3009 {
3010 insn = next_insn (insn);
3011 if (insn == 0 || GET_CODE (insn) != NOTE)
3012 break;
3013 }
3014
3015 return insn;
3016 }
3017
3018 /* Return the last nonnote insn emitted in current sequence or current
3019 function. This routine looks inside SEQUENCEs. */
3020
3021 rtx
3022 get_last_nonnote_insn ()
3023 {
3024 rtx insn = last_insn;
3025
3026 while (insn)
3027 {
3028 insn = previous_insn (insn);
3029 if (insn == 0 || GET_CODE (insn) != NOTE)
3030 break;
3031 }
3032
3033 return insn;
3034 }
3035
3036 /* Return a number larger than any instruction's uid in this function. */
3037
3038 int
3039 get_max_uid ()
3040 {
3041 return cur_insn_uid;
3042 }
3043
3044 /* Renumber instructions so that no instruction UIDs are wasted. */
3045
3046 void
3047 renumber_insns (stream)
3048 FILE *stream;
3049 {
3050 rtx insn;
3051
3052 /* If we're not supposed to renumber instructions, don't. */
3053 if (!flag_renumber_insns)
3054 return;
3055
3056 /* If there aren't that many instructions, then it's not really
3057 worth renumbering them. */
3058 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
3059 return;
3060
3061 cur_insn_uid = 1;
3062
3063 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3064 {
3065 if (stream)
3066 fprintf (stream, "Renumbering insn %d to %d\n",
3067 INSN_UID (insn), cur_insn_uid);
3068 INSN_UID (insn) = cur_insn_uid++;
3069 }
3070 }
3071 \f
3072 /* Return the next insn. If it is a SEQUENCE, return the first insn
3073 of the sequence. */
3074
3075 rtx
3076 next_insn (insn)
3077 rtx insn;
3078 {
3079 if (insn)
3080 {
3081 insn = NEXT_INSN (insn);
3082 if (insn && GET_CODE (insn) == INSN
3083 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3084 insn = XVECEXP (PATTERN (insn), 0, 0);
3085 }
3086
3087 return insn;
3088 }
3089
3090 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3091 of the sequence. */
3092
3093 rtx
3094 previous_insn (insn)
3095 rtx insn;
3096 {
3097 if (insn)
3098 {
3099 insn = PREV_INSN (insn);
3100 if (insn && GET_CODE (insn) == INSN
3101 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3102 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3103 }
3104
3105 return insn;
3106 }
3107
3108 /* Return the next insn after INSN that is not a NOTE. This routine does not
3109 look inside SEQUENCEs. */
3110
3111 rtx
3112 next_nonnote_insn (insn)
3113 rtx insn;
3114 {
3115 while (insn)
3116 {
3117 insn = NEXT_INSN (insn);
3118 if (insn == 0 || GET_CODE (insn) != NOTE)
3119 break;
3120 }
3121
3122 return insn;
3123 }
3124
3125 /* Return the previous insn before INSN that is not a NOTE. This routine does
3126 not look inside SEQUENCEs. */
3127
3128 rtx
3129 prev_nonnote_insn (insn)
3130 rtx insn;
3131 {
3132 while (insn)
3133 {
3134 insn = PREV_INSN (insn);
3135 if (insn == 0 || GET_CODE (insn) != NOTE)
3136 break;
3137 }
3138
3139 return insn;
3140 }
3141
3142 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3143 or 0, if there is none. This routine does not look inside
3144 SEQUENCEs. */
3145
3146 rtx
3147 next_real_insn (insn)
3148 rtx insn;
3149 {
3150 while (insn)
3151 {
3152 insn = NEXT_INSN (insn);
3153 if (insn == 0 || GET_CODE (insn) == INSN
3154 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
3155 break;
3156 }
3157
3158 return insn;
3159 }
3160
3161 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3162 or 0, if there is none. This routine does not look inside
3163 SEQUENCEs. */
3164
3165 rtx
3166 prev_real_insn (insn)
3167 rtx insn;
3168 {
3169 while (insn)
3170 {
3171 insn = PREV_INSN (insn);
3172 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
3173 || GET_CODE (insn) == JUMP_INSN)
3174 break;
3175 }
3176
3177 return insn;
3178 }
3179
3180 /* Find the next insn after INSN that really does something. This routine
3181 does not look inside SEQUENCEs. Until reload has completed, this is the
3182 same as next_real_insn. */
3183
3184 int
3185 active_insn_p (insn)
3186 rtx insn;
3187 {
3188 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
3189 || (GET_CODE (insn) == INSN
3190 && (! reload_completed
3191 || (GET_CODE (PATTERN (insn)) != USE
3192 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3193 }
3194
3195 rtx
3196 next_active_insn (insn)
3197 rtx insn;
3198 {
3199 while (insn)
3200 {
3201 insn = NEXT_INSN (insn);
3202 if (insn == 0 || active_insn_p (insn))
3203 break;
3204 }
3205
3206 return insn;
3207 }
3208
3209 /* Find the last insn before INSN that really does something. This routine
3210 does not look inside SEQUENCEs. Until reload has completed, this is the
3211 same as prev_real_insn. */
3212
3213 rtx
3214 prev_active_insn (insn)
3215 rtx insn;
3216 {
3217 while (insn)
3218 {
3219 insn = PREV_INSN (insn);
3220 if (insn == 0 || active_insn_p (insn))
3221 break;
3222 }
3223
3224 return insn;
3225 }
3226
3227 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3228
3229 rtx
3230 next_label (insn)
3231 rtx insn;
3232 {
3233 while (insn)
3234 {
3235 insn = NEXT_INSN (insn);
3236 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3237 break;
3238 }
3239
3240 return insn;
3241 }
3242
3243 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3244
3245 rtx
3246 prev_label (insn)
3247 rtx insn;
3248 {
3249 while (insn)
3250 {
3251 insn = PREV_INSN (insn);
3252 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3253 break;
3254 }
3255
3256 return insn;
3257 }
3258 \f
3259 #ifdef HAVE_cc0
3260 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3261 and REG_CC_USER notes so we can find it. */
3262
3263 void
3264 link_cc0_insns (insn)
3265 rtx insn;
3266 {
3267 rtx user = next_nonnote_insn (insn);
3268
3269 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3270 user = XVECEXP (PATTERN (user), 0, 0);
3271
3272 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3273 REG_NOTES (user));
3274 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3275 }
3276
3277 /* Return the next insn that uses CC0 after INSN, which is assumed to
3278 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3279 applied to the result of this function should yield INSN).
3280
3281 Normally, this is simply the next insn. However, if a REG_CC_USER note
3282 is present, it contains the insn that uses CC0.
3283
3284 Return 0 if we can't find the insn. */
3285
3286 rtx
3287 next_cc0_user (insn)
3288 rtx insn;
3289 {
3290 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3291
3292 if (note)
3293 return XEXP (note, 0);
3294
3295 insn = next_nonnote_insn (insn);
3296 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3297 insn = XVECEXP (PATTERN (insn), 0, 0);
3298
3299 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3300 return insn;
3301
3302 return 0;
3303 }
3304
3305 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3306 note, it is the previous insn. */
3307
3308 rtx
3309 prev_cc0_setter (insn)
3310 rtx insn;
3311 {
3312 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3313
3314 if (note)
3315 return XEXP (note, 0);
3316
3317 insn = prev_nonnote_insn (insn);
3318 if (! sets_cc0_p (PATTERN (insn)))
3319 abort ();
3320
3321 return insn;
3322 }
3323 #endif
3324
3325 /* Increment the label uses for all labels present in rtx. */
3326
3327 static void
3328 mark_label_nuses (x)
3329 rtx x;
3330 {
3331 enum rtx_code code;
3332 int i, j;
3333 const char *fmt;
3334
3335 code = GET_CODE (x);
3336 if (code == LABEL_REF)
3337 LABEL_NUSES (XEXP (x, 0))++;
3338
3339 fmt = GET_RTX_FORMAT (code);
3340 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3341 {
3342 if (fmt[i] == 'e')
3343 mark_label_nuses (XEXP (x, i));
3344 else if (fmt[i] == 'E')
3345 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3346 mark_label_nuses (XVECEXP (x, i, j));
3347 }
3348 }
3349
3350 \f
3351 /* Try splitting insns that can be split for better scheduling.
3352 PAT is the pattern which might split.
3353 TRIAL is the insn providing PAT.
3354 LAST is nonzero if we should return the last insn of the sequence produced.
3355
3356 If this routine succeeds in splitting, it returns the first or last
3357 replacement insn depending on the value of LAST. Otherwise, it
3358 returns TRIAL. If the insn to be returned can be split, it will be. */
3359
3360 rtx
3361 try_split (pat, trial, last)
3362 rtx pat, trial;
3363 int last;
3364 {
3365 rtx before = PREV_INSN (trial);
3366 rtx after = NEXT_INSN (trial);
3367 int has_barrier = 0;
3368 rtx tem;
3369 rtx note, seq;
3370 int probability;
3371
3372 if (any_condjump_p (trial)
3373 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3374 split_branch_probability = INTVAL (XEXP (note, 0));
3375 probability = split_branch_probability;
3376
3377 seq = split_insns (pat, trial);
3378
3379 split_branch_probability = -1;
3380
3381 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3382 We may need to handle this specially. */
3383 if (after && GET_CODE (after) == BARRIER)
3384 {
3385 has_barrier = 1;
3386 after = NEXT_INSN (after);
3387 }
3388
3389 if (seq)
3390 {
3391 /* Sometimes there will be only one insn in that list, this case will
3392 normally arise only when we want it in turn to be split (SFmode on
3393 the 29k is an example). */
3394 if (NEXT_INSN (seq) != NULL_RTX)
3395 {
3396 rtx insn_last, insn;
3397 int njumps = 0;
3398
3399 /* Avoid infinite loop if any insn of the result matches
3400 the original pattern. */
3401 insn_last = seq;
3402 while (1)
3403 {
3404 if (INSN_P (insn_last)
3405 && rtx_equal_p (PATTERN (insn_last), pat))
3406 return trial;
3407 if (NEXT_INSN (insn_last) == NULL_RTX)
3408 break;
3409 insn_last = NEXT_INSN (insn_last);
3410 }
3411
3412 /* Mark labels. */
3413 insn = insn_last;
3414 while (insn != NULL_RTX)
3415 {
3416 if (GET_CODE (insn) == JUMP_INSN)
3417 {
3418 mark_jump_label (PATTERN (insn), insn, 0);
3419 njumps++;
3420 if (probability != -1
3421 && any_condjump_p (insn)
3422 && !find_reg_note (insn, REG_BR_PROB, 0))
3423 {
3424 /* We can preserve the REG_BR_PROB notes only if exactly
3425 one jump is created, otherwise the machine description
3426 is responsible for this step using
3427 split_branch_probability variable. */
3428 if (njumps != 1)
3429 abort ();
3430 REG_NOTES (insn)
3431 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3432 GEN_INT (probability),
3433 REG_NOTES (insn));
3434 }
3435 }
3436
3437 insn = PREV_INSN (insn);
3438 }
3439
3440 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3441 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3442 if (GET_CODE (trial) == CALL_INSN)
3443 {
3444 insn = insn_last;
3445 while (insn != NULL_RTX)
3446 {
3447 if (GET_CODE (insn) == CALL_INSN)
3448 CALL_INSN_FUNCTION_USAGE (insn)
3449 = CALL_INSN_FUNCTION_USAGE (trial);
3450
3451 insn = PREV_INSN (insn);
3452 }
3453 }
3454
3455 /* Copy notes, particularly those related to the CFG. */
3456 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3457 {
3458 switch (REG_NOTE_KIND (note))
3459 {
3460 case REG_EH_REGION:
3461 insn = insn_last;
3462 while (insn != NULL_RTX)
3463 {
3464 if (GET_CODE (insn) == CALL_INSN
3465 || (flag_non_call_exceptions
3466 && may_trap_p (PATTERN (insn))))
3467 REG_NOTES (insn)
3468 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3469 XEXP (note, 0),
3470 REG_NOTES (insn));
3471 insn = PREV_INSN (insn);
3472 }
3473 break;
3474
3475 case REG_NORETURN:
3476 case REG_SETJMP:
3477 case REG_ALWAYS_RETURN:
3478 insn = insn_last;
3479 while (insn != NULL_RTX)
3480 {
3481 if (GET_CODE (insn) == CALL_INSN)
3482 REG_NOTES (insn)
3483 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3484 XEXP (note, 0),
3485 REG_NOTES (insn));
3486 insn = PREV_INSN (insn);
3487 }
3488 break;
3489
3490 case REG_NON_LOCAL_GOTO:
3491 insn = insn_last;
3492 while (insn != NULL_RTX)
3493 {
3494 if (GET_CODE (insn) == JUMP_INSN)
3495 REG_NOTES (insn)
3496 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3497 XEXP (note, 0),
3498 REG_NOTES (insn));
3499 insn = PREV_INSN (insn);
3500 }
3501 break;
3502
3503 default:
3504 break;
3505 }
3506 }
3507
3508 /* If there are LABELS inside the split insns increment the
3509 usage count so we don't delete the label. */
3510 if (GET_CODE (trial) == INSN)
3511 {
3512 insn = insn_last;
3513 while (insn != NULL_RTX)
3514 {
3515 if (GET_CODE (insn) == INSN)
3516 mark_label_nuses (PATTERN (insn));
3517
3518 insn = PREV_INSN (insn);
3519 }
3520 }
3521
3522 tem = emit_insn_after_scope (seq, trial, INSN_SCOPE (trial));
3523
3524 delete_insn (trial);
3525 if (has_barrier)
3526 emit_barrier_after (tem);
3527
3528 /* Recursively call try_split for each new insn created; by the
3529 time control returns here that insn will be fully split, so
3530 set LAST and continue from the insn after the one returned.
3531 We can't use next_active_insn here since AFTER may be a note.
3532 Ignore deleted insns, which can be occur if not optimizing. */
3533 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3534 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3535 tem = try_split (PATTERN (tem), tem, 1);
3536 }
3537 /* Avoid infinite loop if the result matches the original pattern. */
3538 else if (rtx_equal_p (PATTERN (seq), pat))
3539 return trial;
3540 else
3541 {
3542 PATTERN (trial) = PATTERN (seq);
3543 INSN_CODE (trial) = -1;
3544 try_split (PATTERN (trial), trial, last);
3545 }
3546
3547 /* Return either the first or the last insn, depending on which was
3548 requested. */
3549 return last
3550 ? (after ? PREV_INSN (after) : last_insn)
3551 : NEXT_INSN (before);
3552 }
3553
3554 return trial;
3555 }
3556 \f
3557 /* Make and return an INSN rtx, initializing all its slots.
3558 Store PATTERN in the pattern slots. */
3559
3560 rtx
3561 make_insn_raw (pattern)
3562 rtx pattern;
3563 {
3564 rtx insn;
3565
3566 insn = rtx_alloc (INSN);
3567
3568 INSN_UID (insn) = cur_insn_uid++;
3569 PATTERN (insn) = pattern;
3570 INSN_CODE (insn) = -1;
3571 LOG_LINKS (insn) = NULL;
3572 REG_NOTES (insn) = NULL;
3573 INSN_SCOPE (insn) = NULL;
3574 BLOCK_FOR_INSN (insn) = NULL;
3575
3576 #ifdef ENABLE_RTL_CHECKING
3577 if (insn
3578 && INSN_P (insn)
3579 && (returnjump_p (insn)
3580 || (GET_CODE (insn) == SET
3581 && SET_DEST (insn) == pc_rtx)))
3582 {
3583 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3584 debug_rtx (insn);
3585 }
3586 #endif
3587
3588 return insn;
3589 }
3590
3591 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3592
3593 static rtx
3594 make_jump_insn_raw (pattern)
3595 rtx pattern;
3596 {
3597 rtx insn;
3598
3599 insn = rtx_alloc (JUMP_INSN);
3600 INSN_UID (insn) = cur_insn_uid++;
3601
3602 PATTERN (insn) = pattern;
3603 INSN_CODE (insn) = -1;
3604 LOG_LINKS (insn) = NULL;
3605 REG_NOTES (insn) = NULL;
3606 JUMP_LABEL (insn) = NULL;
3607 INSN_SCOPE (insn) = NULL;
3608 BLOCK_FOR_INSN (insn) = NULL;
3609
3610 return insn;
3611 }
3612
3613 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3614
3615 static rtx
3616 make_call_insn_raw (pattern)
3617 rtx pattern;
3618 {
3619 rtx insn;
3620
3621 insn = rtx_alloc (CALL_INSN);
3622 INSN_UID (insn) = cur_insn_uid++;
3623
3624 PATTERN (insn) = pattern;
3625 INSN_CODE (insn) = -1;
3626 LOG_LINKS (insn) = NULL;
3627 REG_NOTES (insn) = NULL;
3628 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3629 INSN_SCOPE (insn) = NULL;
3630 BLOCK_FOR_INSN (insn) = NULL;
3631
3632 return insn;
3633 }
3634 \f
3635 /* Add INSN to the end of the doubly-linked list.
3636 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3637
3638 void
3639 add_insn (insn)
3640 rtx insn;
3641 {
3642 PREV_INSN (insn) = last_insn;
3643 NEXT_INSN (insn) = 0;
3644
3645 if (NULL != last_insn)
3646 NEXT_INSN (last_insn) = insn;
3647
3648 if (NULL == first_insn)
3649 first_insn = insn;
3650
3651 last_insn = insn;
3652 }
3653
3654 /* Add INSN into the doubly-linked list after insn AFTER. This and
3655 the next should be the only functions called to insert an insn once
3656 delay slots have been filled since only they know how to update a
3657 SEQUENCE. */
3658
3659 void
3660 add_insn_after (insn, after)
3661 rtx insn, after;
3662 {
3663 rtx next = NEXT_INSN (after);
3664 basic_block bb;
3665
3666 if (optimize && INSN_DELETED_P (after))
3667 abort ();
3668
3669 NEXT_INSN (insn) = next;
3670 PREV_INSN (insn) = after;
3671
3672 if (next)
3673 {
3674 PREV_INSN (next) = insn;
3675 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3676 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3677 }
3678 else if (last_insn == after)
3679 last_insn = insn;
3680 else
3681 {
3682 struct sequence_stack *stack = seq_stack;
3683 /* Scan all pending sequences too. */
3684 for (; stack; stack = stack->next)
3685 if (after == stack->last)
3686 {
3687 stack->last = insn;
3688 break;
3689 }
3690
3691 if (stack == 0)
3692 abort ();
3693 }
3694
3695 if (GET_CODE (after) != BARRIER
3696 && GET_CODE (insn) != BARRIER
3697 && (bb = BLOCK_FOR_INSN (after)))
3698 {
3699 set_block_for_insn (insn, bb);
3700 if (INSN_P (insn))
3701 bb->flags |= BB_DIRTY;
3702 /* Should not happen as first in the BB is always
3703 either NOTE or LABEL. */
3704 if (bb->end == after
3705 /* Avoid clobbering of structure when creating new BB. */
3706 && GET_CODE (insn) != BARRIER
3707 && (GET_CODE (insn) != NOTE
3708 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3709 bb->end = insn;
3710 }
3711
3712 NEXT_INSN (after) = insn;
3713 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3714 {
3715 rtx sequence = PATTERN (after);
3716 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3717 }
3718 }
3719
3720 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3721 the previous should be the only functions called to insert an insn once
3722 delay slots have been filled since only they know how to update a
3723 SEQUENCE. */
3724
3725 void
3726 add_insn_before (insn, before)
3727 rtx insn, before;
3728 {
3729 rtx prev = PREV_INSN (before);
3730 basic_block bb;
3731
3732 if (optimize && INSN_DELETED_P (before))
3733 abort ();
3734
3735 PREV_INSN (insn) = prev;
3736 NEXT_INSN (insn) = before;
3737
3738 if (prev)
3739 {
3740 NEXT_INSN (prev) = insn;
3741 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3742 {
3743 rtx sequence = PATTERN (prev);
3744 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3745 }
3746 }
3747 else if (first_insn == before)
3748 first_insn = insn;
3749 else
3750 {
3751 struct sequence_stack *stack = seq_stack;
3752 /* Scan all pending sequences too. */
3753 for (; stack; stack = stack->next)
3754 if (before == stack->first)
3755 {
3756 stack->first = insn;
3757 break;
3758 }
3759
3760 if (stack == 0)
3761 abort ();
3762 }
3763
3764 if (GET_CODE (before) != BARRIER
3765 && GET_CODE (insn) != BARRIER
3766 && (bb = BLOCK_FOR_INSN (before)))
3767 {
3768 set_block_for_insn (insn, bb);
3769 if (INSN_P (insn))
3770 bb->flags |= BB_DIRTY;
3771 /* Should not happen as first in the BB is always
3772 either NOTE or LABEl. */
3773 if (bb->head == insn
3774 /* Avoid clobbering of structure when creating new BB. */
3775 && GET_CODE (insn) != BARRIER
3776 && (GET_CODE (insn) != NOTE
3777 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3778 abort ();
3779 }
3780
3781 PREV_INSN (before) = insn;
3782 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3783 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3784 }
3785
3786 /* Remove an insn from its doubly-linked list. This function knows how
3787 to handle sequences. */
3788 void
3789 remove_insn (insn)
3790 rtx insn;
3791 {
3792 rtx next = NEXT_INSN (insn);
3793 rtx prev = PREV_INSN (insn);
3794 basic_block bb;
3795
3796 if (prev)
3797 {
3798 NEXT_INSN (prev) = next;
3799 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3800 {
3801 rtx sequence = PATTERN (prev);
3802 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3803 }
3804 }
3805 else if (first_insn == insn)
3806 first_insn = next;
3807 else
3808 {
3809 struct sequence_stack *stack = seq_stack;
3810 /* Scan all pending sequences too. */
3811 for (; stack; stack = stack->next)
3812 if (insn == stack->first)
3813 {
3814 stack->first = next;
3815 break;
3816 }
3817
3818 if (stack == 0)
3819 abort ();
3820 }
3821
3822 if (next)
3823 {
3824 PREV_INSN (next) = prev;
3825 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3826 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3827 }
3828 else if (last_insn == insn)
3829 last_insn = prev;
3830 else
3831 {
3832 struct sequence_stack *stack = seq_stack;
3833 /* Scan all pending sequences too. */
3834 for (; stack; stack = stack->next)
3835 if (insn == stack->last)
3836 {
3837 stack->last = prev;
3838 break;
3839 }
3840
3841 if (stack == 0)
3842 abort ();
3843 }
3844 if (GET_CODE (insn) != BARRIER
3845 && (bb = BLOCK_FOR_INSN (insn)))
3846 {
3847 if (INSN_P (insn))
3848 bb->flags |= BB_DIRTY;
3849 if (bb->head == insn)
3850 {
3851 /* Never ever delete the basic block note without deleting whole
3852 basic block. */
3853 if (GET_CODE (insn) == NOTE)
3854 abort ();
3855 bb->head = next;
3856 }
3857 if (bb->end == insn)
3858 bb->end = prev;
3859 }
3860 }
3861
3862 /* Delete all insns made since FROM.
3863 FROM becomes the new last instruction. */
3864
3865 void
3866 delete_insns_since (from)
3867 rtx from;
3868 {
3869 if (from == 0)
3870 first_insn = 0;
3871 else
3872 NEXT_INSN (from) = 0;
3873 last_insn = from;
3874 }
3875
3876 /* This function is deprecated, please use sequences instead.
3877
3878 Move a consecutive bunch of insns to a different place in the chain.
3879 The insns to be moved are those between FROM and TO.
3880 They are moved to a new position after the insn AFTER.
3881 AFTER must not be FROM or TO or any insn in between.
3882
3883 This function does not know about SEQUENCEs and hence should not be
3884 called after delay-slot filling has been done. */
3885
3886 void
3887 reorder_insns_nobb (from, to, after)
3888 rtx from, to, after;
3889 {
3890 /* Splice this bunch out of where it is now. */
3891 if (PREV_INSN (from))
3892 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3893 if (NEXT_INSN (to))
3894 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3895 if (last_insn == to)
3896 last_insn = PREV_INSN (from);
3897 if (first_insn == from)
3898 first_insn = NEXT_INSN (to);
3899
3900 /* Make the new neighbors point to it and it to them. */
3901 if (NEXT_INSN (after))
3902 PREV_INSN (NEXT_INSN (after)) = to;
3903
3904 NEXT_INSN (to) = NEXT_INSN (after);
3905 PREV_INSN (from) = after;
3906 NEXT_INSN (after) = from;
3907 if (after == last_insn)
3908 last_insn = to;
3909 }
3910
3911 /* Same as function above, but take care to update BB boundaries. */
3912 void
3913 reorder_insns (from, to, after)
3914 rtx from, to, after;
3915 {
3916 rtx prev = PREV_INSN (from);
3917 basic_block bb, bb2;
3918
3919 reorder_insns_nobb (from, to, after);
3920
3921 if (GET_CODE (after) != BARRIER
3922 && (bb = BLOCK_FOR_INSN (after)))
3923 {
3924 rtx x;
3925 bb->flags |= BB_DIRTY;
3926
3927 if (GET_CODE (from) != BARRIER
3928 && (bb2 = BLOCK_FOR_INSN (from)))
3929 {
3930 if (bb2->end == to)
3931 bb2->end = prev;
3932 bb2->flags |= BB_DIRTY;
3933 }
3934
3935 if (bb->end == after)
3936 bb->end = to;
3937
3938 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3939 set_block_for_insn (x, bb);
3940 }
3941 }
3942
3943 /* Return the line note insn preceding INSN. */
3944
3945 static rtx
3946 find_line_note (insn)
3947 rtx insn;
3948 {
3949 if (no_line_numbers)
3950 return 0;
3951
3952 for (; insn; insn = PREV_INSN (insn))
3953 if (GET_CODE (insn) == NOTE
3954 && NOTE_LINE_NUMBER (insn) >= 0)
3955 break;
3956
3957 return insn;
3958 }
3959
3960 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3961 of the moved insns when debugging. This may insert a note between AFTER
3962 and FROM, and another one after TO. */
3963
3964 void
3965 reorder_insns_with_line_notes (from, to, after)
3966 rtx from, to, after;
3967 {
3968 rtx from_line = find_line_note (from);
3969 rtx after_line = find_line_note (after);
3970
3971 reorder_insns (from, to, after);
3972
3973 if (from_line == after_line)
3974 return;
3975
3976 if (from_line)
3977 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3978 NOTE_LINE_NUMBER (from_line),
3979 after);
3980 if (after_line)
3981 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3982 NOTE_LINE_NUMBER (after_line),
3983 to);
3984 }
3985
3986 /* Remove unnecessary notes from the instruction stream. */
3987
3988 void
3989 remove_unnecessary_notes ()
3990 {
3991 rtx block_stack = NULL_RTX;
3992 rtx eh_stack = NULL_RTX;
3993 rtx insn;
3994 rtx next;
3995 rtx tmp;
3996
3997 /* We must not remove the first instruction in the function because
3998 the compiler depends on the first instruction being a note. */
3999 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
4000 {
4001 /* Remember what's next. */
4002 next = NEXT_INSN (insn);
4003
4004 /* We're only interested in notes. */
4005 if (GET_CODE (insn) != NOTE)
4006 continue;
4007
4008 switch (NOTE_LINE_NUMBER (insn))
4009 {
4010 case NOTE_INSN_DELETED:
4011 case NOTE_INSN_LOOP_END_TOP_COND:
4012 remove_insn (insn);
4013 break;
4014
4015 case NOTE_INSN_EH_REGION_BEG:
4016 eh_stack = alloc_INSN_LIST (insn, eh_stack);
4017 break;
4018
4019 case NOTE_INSN_EH_REGION_END:
4020 /* Too many end notes. */
4021 if (eh_stack == NULL_RTX)
4022 abort ();
4023 /* Mismatched nesting. */
4024 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
4025 abort ();
4026 tmp = eh_stack;
4027 eh_stack = XEXP (eh_stack, 1);
4028 free_INSN_LIST_node (tmp);
4029 break;
4030
4031 case NOTE_INSN_BLOCK_BEG:
4032 /* By now, all notes indicating lexical blocks should have
4033 NOTE_BLOCK filled in. */
4034 if (NOTE_BLOCK (insn) == NULL_TREE)
4035 abort ();
4036 block_stack = alloc_INSN_LIST (insn, block_stack);
4037 break;
4038
4039 case NOTE_INSN_BLOCK_END:
4040 /* Too many end notes. */
4041 if (block_stack == NULL_RTX)
4042 abort ();
4043 /* Mismatched nesting. */
4044 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
4045 abort ();
4046 tmp = block_stack;
4047 block_stack = XEXP (block_stack, 1);
4048 free_INSN_LIST_node (tmp);
4049
4050 /* Scan back to see if there are any non-note instructions
4051 between INSN and the beginning of this block. If not,
4052 then there is no PC range in the generated code that will
4053 actually be in this block, so there's no point in
4054 remembering the existence of the block. */
4055 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
4056 {
4057 /* This block contains a real instruction. Note that we
4058 don't include labels; if the only thing in the block
4059 is a label, then there are still no PC values that
4060 lie within the block. */
4061 if (INSN_P (tmp))
4062 break;
4063
4064 /* We're only interested in NOTEs. */
4065 if (GET_CODE (tmp) != NOTE)
4066 continue;
4067
4068 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
4069 {
4070 /* We just verified that this BLOCK matches us with
4071 the block_stack check above. Never delete the
4072 BLOCK for the outermost scope of the function; we
4073 can refer to names from that scope even if the
4074 block notes are messed up. */
4075 if (! is_body_block (NOTE_BLOCK (insn))
4076 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
4077 {
4078 remove_insn (tmp);
4079 remove_insn (insn);
4080 }
4081 break;
4082 }
4083 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
4084 /* There's a nested block. We need to leave the
4085 current block in place since otherwise the debugger
4086 wouldn't be able to show symbols from our block in
4087 the nested block. */
4088 break;
4089 }
4090 }
4091 }
4092
4093 /* Too many begin notes. */
4094 if (block_stack || eh_stack)
4095 abort ();
4096 }
4097
4098 \f
4099 /* Emit insn(s) of given code and pattern
4100 at a specified place within the doubly-linked list.
4101
4102 All of the emit_foo global entry points accept an object
4103 X which is either an insn list or a PATTERN of a single
4104 instruction.
4105
4106 There are thus a few canonical ways to generate code and
4107 emit it at a specific place in the instruction stream. For
4108 example, consider the instruction named SPOT and the fact that
4109 we would like to emit some instructions before SPOT. We might
4110 do it like this:
4111
4112 start_sequence ();
4113 ... emit the new instructions ...
4114 insns_head = get_insns ();
4115 end_sequence ();
4116
4117 emit_insn_before (insns_head, SPOT);
4118
4119 It used to be common to generate SEQUENCE rtl instead, but that
4120 is a relic of the past which no longer occurs. The reason is that
4121 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4122 generated would almost certainly die right after it was created. */
4123
4124 /* Make X be output before the instruction BEFORE. */
4125
4126 rtx
4127 emit_insn_before (x, before)
4128 rtx x, before;
4129 {
4130 rtx last = before;
4131 rtx insn;
4132
4133 #ifdef ENABLE_RTL_CHECKING
4134 if (before == NULL_RTX)
4135 abort ();
4136 #endif
4137
4138 if (x == NULL_RTX)
4139 return last;
4140
4141 switch (GET_CODE (x))
4142 {
4143 case INSN:
4144 case JUMP_INSN:
4145 case CALL_INSN:
4146 case CODE_LABEL:
4147 case BARRIER:
4148 case NOTE:
4149 insn = x;
4150 while (insn)
4151 {
4152 rtx next = NEXT_INSN (insn);
4153 add_insn_before (insn, before);
4154 last = insn;
4155 insn = next;
4156 }
4157 break;
4158
4159 #ifdef ENABLE_RTL_CHECKING
4160 case SEQUENCE:
4161 abort ();
4162 break;
4163 #endif
4164
4165 default:
4166 last = make_insn_raw (x);
4167 add_insn_before (last, before);
4168 break;
4169 }
4170
4171 return last;
4172 }
4173
4174 /* Make an instruction with body X and code JUMP_INSN
4175 and output it before the instruction BEFORE. */
4176
4177 rtx
4178 emit_jump_insn_before (x, before)
4179 rtx x, before;
4180 {
4181 rtx insn, last = NULL_RTX;
4182
4183 #ifdef ENABLE_RTL_CHECKING
4184 if (before == NULL_RTX)
4185 abort ();
4186 #endif
4187
4188 switch (GET_CODE (x))
4189 {
4190 case INSN:
4191 case JUMP_INSN:
4192 case CALL_INSN:
4193 case CODE_LABEL:
4194 case BARRIER:
4195 case NOTE:
4196 insn = x;
4197 while (insn)
4198 {
4199 rtx next = NEXT_INSN (insn);
4200 add_insn_before (insn, before);
4201 last = insn;
4202 insn = next;
4203 }
4204 break;
4205
4206 #ifdef ENABLE_RTL_CHECKING
4207 case SEQUENCE:
4208 abort ();
4209 break;
4210 #endif
4211
4212 default:
4213 last = make_jump_insn_raw (x);
4214 add_insn_before (last, before);
4215 break;
4216 }
4217
4218 return last;
4219 }
4220
4221 /* Make an instruction with body X and code CALL_INSN
4222 and output it before the instruction BEFORE. */
4223
4224 rtx
4225 emit_call_insn_before (x, before)
4226 rtx x, before;
4227 {
4228 rtx last = NULL_RTX, insn;
4229
4230 #ifdef ENABLE_RTL_CHECKING
4231 if (before == NULL_RTX)
4232 abort ();
4233 #endif
4234
4235 switch (GET_CODE (x))
4236 {
4237 case INSN:
4238 case JUMP_INSN:
4239 case CALL_INSN:
4240 case CODE_LABEL:
4241 case BARRIER:
4242 case NOTE:
4243 insn = x;
4244 while (insn)
4245 {
4246 rtx next = NEXT_INSN (insn);
4247 add_insn_before (insn, before);
4248 last = insn;
4249 insn = next;
4250 }
4251 break;
4252
4253 #ifdef ENABLE_RTL_CHECKING
4254 case SEQUENCE:
4255 abort ();
4256 break;
4257 #endif
4258
4259 default:
4260 last = make_call_insn_raw (x);
4261 add_insn_before (last, before);
4262 break;
4263 }
4264
4265 return last;
4266 }
4267
4268 /* Make an insn of code BARRIER
4269 and output it before the insn BEFORE. */
4270
4271 rtx
4272 emit_barrier_before (before)
4273 rtx before;
4274 {
4275 rtx insn = rtx_alloc (BARRIER);
4276
4277 INSN_UID (insn) = cur_insn_uid++;
4278
4279 add_insn_before (insn, before);
4280 return insn;
4281 }
4282
4283 /* Emit the label LABEL before the insn BEFORE. */
4284
4285 rtx
4286 emit_label_before (label, before)
4287 rtx label, before;
4288 {
4289 /* This can be called twice for the same label as a result of the
4290 confusion that follows a syntax error! So make it harmless. */
4291 if (INSN_UID (label) == 0)
4292 {
4293 INSN_UID (label) = cur_insn_uid++;
4294 add_insn_before (label, before);
4295 }
4296
4297 return label;
4298 }
4299
4300 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4301
4302 rtx
4303 emit_note_before (subtype, before)
4304 int subtype;
4305 rtx before;
4306 {
4307 rtx note = rtx_alloc (NOTE);
4308 INSN_UID (note) = cur_insn_uid++;
4309 NOTE_SOURCE_FILE (note) = 0;
4310 NOTE_LINE_NUMBER (note) = subtype;
4311 BLOCK_FOR_INSN (note) = NULL;
4312
4313 add_insn_before (note, before);
4314 return note;
4315 }
4316 \f
4317 /* Helper for emit_insn_after, handles lists of instructions
4318 efficiently. */
4319
4320 static rtx emit_insn_after_1 PARAMS ((rtx, rtx));
4321
4322 static rtx
4323 emit_insn_after_1 (first, after)
4324 rtx first, after;
4325 {
4326 rtx last;
4327 rtx after_after;
4328 basic_block bb;
4329
4330 if (GET_CODE (after) != BARRIER
4331 && (bb = BLOCK_FOR_INSN (after)))
4332 {
4333 bb->flags |= BB_DIRTY;
4334 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4335 if (GET_CODE (last) != BARRIER)
4336 set_block_for_insn (last, bb);
4337 if (GET_CODE (last) != BARRIER)
4338 set_block_for_insn (last, bb);
4339 if (bb->end == after)
4340 bb->end = last;
4341 }
4342 else
4343 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4344 continue;
4345
4346 after_after = NEXT_INSN (after);
4347
4348 NEXT_INSN (after) = first;
4349 PREV_INSN (first) = after;
4350 NEXT_INSN (last) = after_after;
4351 if (after_after)
4352 PREV_INSN (after_after) = last;
4353
4354 if (after == last_insn)
4355 last_insn = last;
4356 return last;
4357 }
4358
4359 /* Make X be output after the insn AFTER. */
4360
4361 rtx
4362 emit_insn_after (x, after)
4363 rtx x, after;
4364 {
4365 rtx last = after;
4366
4367 #ifdef ENABLE_RTL_CHECKING
4368 if (after == NULL_RTX)
4369 abort ();
4370 #endif
4371
4372 if (x == NULL_RTX)
4373 return last;
4374
4375 switch (GET_CODE (x))
4376 {
4377 case INSN:
4378 case JUMP_INSN:
4379 case CALL_INSN:
4380 case CODE_LABEL:
4381 case BARRIER:
4382 case NOTE:
4383 last = emit_insn_after_1 (x, after);
4384 break;
4385
4386 #ifdef ENABLE_RTL_CHECKING
4387 case SEQUENCE:
4388 abort ();
4389 break;
4390 #endif
4391
4392 default:
4393 last = make_insn_raw (x);
4394 add_insn_after (last, after);
4395 break;
4396 }
4397
4398 return last;
4399 }
4400
4401 /* Similar to emit_insn_after, except that line notes are to be inserted so
4402 as to act as if this insn were at FROM. */
4403
4404 void
4405 emit_insn_after_with_line_notes (x, after, from)
4406 rtx x, after, from;
4407 {
4408 rtx from_line = find_line_note (from);
4409 rtx after_line = find_line_note (after);
4410 rtx insn = emit_insn_after (x, after);
4411
4412 if (from_line)
4413 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
4414 NOTE_LINE_NUMBER (from_line),
4415 after);
4416
4417 if (after_line)
4418 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
4419 NOTE_LINE_NUMBER (after_line),
4420 insn);
4421 }
4422
4423 /* Make an insn of code JUMP_INSN with body X
4424 and output it after the insn AFTER. */
4425
4426 rtx
4427 emit_jump_insn_after (x, after)
4428 rtx x, after;
4429 {
4430 rtx last;
4431
4432 #ifdef ENABLE_RTL_CHECKING
4433 if (after == NULL_RTX)
4434 abort ();
4435 #endif
4436
4437 switch (GET_CODE (x))
4438 {
4439 case INSN:
4440 case JUMP_INSN:
4441 case CALL_INSN:
4442 case CODE_LABEL:
4443 case BARRIER:
4444 case NOTE:
4445 last = emit_insn_after_1 (x, after);
4446 break;
4447
4448 #ifdef ENABLE_RTL_CHECKING
4449 case SEQUENCE:
4450 abort ();
4451 break;
4452 #endif
4453
4454 default:
4455 last = make_jump_insn_raw (x);
4456 add_insn_after (last, after);
4457 break;
4458 }
4459
4460 return last;
4461 }
4462
4463 /* Make an instruction with body X and code CALL_INSN
4464 and output it after the instruction AFTER. */
4465
4466 rtx
4467 emit_call_insn_after (x, after)
4468 rtx x, after;
4469 {
4470 rtx last;
4471
4472 #ifdef ENABLE_RTL_CHECKING
4473 if (after == NULL_RTX)
4474 abort ();
4475 #endif
4476
4477 switch (GET_CODE (x))
4478 {
4479 case INSN:
4480 case JUMP_INSN:
4481 case CALL_INSN:
4482 case CODE_LABEL:
4483 case BARRIER:
4484 case NOTE:
4485 last = emit_insn_after_1 (x, after);
4486 break;
4487
4488 #ifdef ENABLE_RTL_CHECKING
4489 case SEQUENCE:
4490 abort ();
4491 break;
4492 #endif
4493
4494 default:
4495 last = make_call_insn_raw (x);
4496 add_insn_after (last, after);
4497 break;
4498 }
4499
4500 return last;
4501 }
4502
4503 /* Make an insn of code BARRIER
4504 and output it after the insn AFTER. */
4505
4506 rtx
4507 emit_barrier_after (after)
4508 rtx after;
4509 {
4510 rtx insn = rtx_alloc (BARRIER);
4511
4512 INSN_UID (insn) = cur_insn_uid++;
4513
4514 add_insn_after (insn, after);
4515 return insn;
4516 }
4517
4518 /* Emit the label LABEL after the insn AFTER. */
4519
4520 rtx
4521 emit_label_after (label, after)
4522 rtx label, after;
4523 {
4524 /* This can be called twice for the same label
4525 as a result of the confusion that follows a syntax error!
4526 So make it harmless. */
4527 if (INSN_UID (label) == 0)
4528 {
4529 INSN_UID (label) = cur_insn_uid++;
4530 add_insn_after (label, after);
4531 }
4532
4533 return label;
4534 }
4535
4536 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4537
4538 rtx
4539 emit_note_after (subtype, after)
4540 int subtype;
4541 rtx after;
4542 {
4543 rtx note = rtx_alloc (NOTE);
4544 INSN_UID (note) = cur_insn_uid++;
4545 NOTE_SOURCE_FILE (note) = 0;
4546 NOTE_LINE_NUMBER (note) = subtype;
4547 BLOCK_FOR_INSN (note) = NULL;
4548 add_insn_after (note, after);
4549 return note;
4550 }
4551
4552 /* Emit a line note for FILE and LINE after the insn AFTER. */
4553
4554 rtx
4555 emit_line_note_after (file, line, after)
4556 const char *file;
4557 int line;
4558 rtx after;
4559 {
4560 rtx note;
4561
4562 if (no_line_numbers && line > 0)
4563 {
4564 cur_insn_uid++;
4565 return 0;
4566 }
4567
4568 note = rtx_alloc (NOTE);
4569 INSN_UID (note) = cur_insn_uid++;
4570 NOTE_SOURCE_FILE (note) = file;
4571 NOTE_LINE_NUMBER (note) = line;
4572 BLOCK_FOR_INSN (note) = NULL;
4573 add_insn_after (note, after);
4574 return note;
4575 }
4576 \f
4577 /* Like emit_insn_after, but set INSN_SCOPE according to SCOPE. */
4578 rtx
4579 emit_insn_after_scope (pattern, after, scope)
4580 rtx pattern, after;
4581 tree scope;
4582 {
4583 rtx last = emit_insn_after (pattern, after);
4584
4585 after = NEXT_INSN (after);
4586 while (1)
4587 {
4588 if (active_insn_p (after))
4589 INSN_SCOPE (after) = scope;
4590 if (after == last)
4591 break;
4592 after = NEXT_INSN (after);
4593 }
4594 return last;
4595 }
4596
4597 /* Like emit_jump_insn_after, but set INSN_SCOPE according to SCOPE. */
4598 rtx
4599 emit_jump_insn_after_scope (pattern, after, scope)
4600 rtx pattern, after;
4601 tree scope;
4602 {
4603 rtx last = emit_jump_insn_after (pattern, after);
4604
4605 after = NEXT_INSN (after);
4606 while (1)
4607 {
4608 if (active_insn_p (after))
4609 INSN_SCOPE (after) = scope;
4610 if (after == last)
4611 break;
4612 after = NEXT_INSN (after);
4613 }
4614 return last;
4615 }
4616
4617 /* Like emit_call_insn_after, but set INSN_SCOPE according to SCOPE. */
4618 rtx
4619 emit_call_insn_after_scope (pattern, after, scope)
4620 rtx pattern, after;
4621 tree scope;
4622 {
4623 rtx last = emit_call_insn_after (pattern, after);
4624
4625 after = NEXT_INSN (after);
4626 while (1)
4627 {
4628 if (active_insn_p (after))
4629 INSN_SCOPE (after) = scope;
4630 if (after == last)
4631 break;
4632 after = NEXT_INSN (after);
4633 }
4634 return last;
4635 }
4636
4637 /* Like emit_insn_before, but set INSN_SCOPE according to SCOPE. */
4638 rtx
4639 emit_insn_before_scope (pattern, before, scope)
4640 rtx pattern, before;
4641 tree scope;
4642 {
4643 rtx first = PREV_INSN (before);
4644 rtx last = emit_insn_before (pattern, before);
4645
4646 first = NEXT_INSN (first);
4647 while (1)
4648 {
4649 if (active_insn_p (first))
4650 INSN_SCOPE (first) = scope;
4651 if (first == last)
4652 break;
4653 first = NEXT_INSN (first);
4654 }
4655 return last;
4656 }
4657 \f
4658 /* Take X and emit it at the end of the doubly-linked
4659 INSN list.
4660
4661 Returns the last insn emitted. */
4662
4663 rtx
4664 emit_insn (x)
4665 rtx x;
4666 {
4667 rtx last = last_insn;
4668 rtx insn;
4669
4670 if (x == NULL_RTX)
4671 return last;
4672
4673 switch (GET_CODE (x))
4674 {
4675 case INSN:
4676 case JUMP_INSN:
4677 case CALL_INSN:
4678 case CODE_LABEL:
4679 case BARRIER:
4680 case NOTE:
4681 insn = x;
4682 while (insn)
4683 {
4684 rtx next = NEXT_INSN (insn);
4685 add_insn (insn);
4686 last = insn;
4687 insn = next;
4688 }
4689 break;
4690
4691 #ifdef ENABLE_RTL_CHECKING
4692 case SEQUENCE:
4693 abort ();
4694 break;
4695 #endif
4696
4697 default:
4698 last = make_insn_raw (x);
4699 add_insn (last);
4700 break;
4701 }
4702
4703 return last;
4704 }
4705
4706 /* Make an insn of code JUMP_INSN with pattern X
4707 and add it to the end of the doubly-linked list. */
4708
4709 rtx
4710 emit_jump_insn (x)
4711 rtx x;
4712 {
4713 rtx last = NULL_RTX, insn;
4714
4715 switch (GET_CODE (x))
4716 {
4717 case INSN:
4718 case JUMP_INSN:
4719 case CALL_INSN:
4720 case CODE_LABEL:
4721 case BARRIER:
4722 case NOTE:
4723 insn = x;
4724 while (insn)
4725 {
4726 rtx next = NEXT_INSN (insn);
4727 add_insn (insn);
4728 last = insn;
4729 insn = next;
4730 }
4731 break;
4732
4733 #ifdef ENABLE_RTL_CHECKING
4734 case SEQUENCE:
4735 abort ();
4736 break;
4737 #endif
4738
4739 default:
4740 last = make_jump_insn_raw (x);
4741 add_insn (last);
4742 break;
4743 }
4744
4745 return last;
4746 }
4747
4748 /* Make an insn of code CALL_INSN with pattern X
4749 and add it to the end of the doubly-linked list. */
4750
4751 rtx
4752 emit_call_insn (x)
4753 rtx x;
4754 {
4755 rtx insn;
4756
4757 switch (GET_CODE (x))
4758 {
4759 case INSN:
4760 case JUMP_INSN:
4761 case CALL_INSN:
4762 case CODE_LABEL:
4763 case BARRIER:
4764 case NOTE:
4765 insn = emit_insn (x);
4766 break;
4767
4768 #ifdef ENABLE_RTL_CHECKING
4769 case SEQUENCE:
4770 abort ();
4771 break;
4772 #endif
4773
4774 default:
4775 insn = make_call_insn_raw (x);
4776 add_insn (insn);
4777 break;
4778 }
4779
4780 return insn;
4781 }
4782
4783 /* Add the label LABEL to the end of the doubly-linked list. */
4784
4785 rtx
4786 emit_label (label)
4787 rtx label;
4788 {
4789 /* This can be called twice for the same label
4790 as a result of the confusion that follows a syntax error!
4791 So make it harmless. */
4792 if (INSN_UID (label) == 0)
4793 {
4794 INSN_UID (label) = cur_insn_uid++;
4795 add_insn (label);
4796 }
4797 return label;
4798 }
4799
4800 /* Make an insn of code BARRIER
4801 and add it to the end of the doubly-linked list. */
4802
4803 rtx
4804 emit_barrier ()
4805 {
4806 rtx barrier = rtx_alloc (BARRIER);
4807 INSN_UID (barrier) = cur_insn_uid++;
4808 add_insn (barrier);
4809 return barrier;
4810 }
4811
4812 /* Make an insn of code NOTE
4813 with data-fields specified by FILE and LINE
4814 and add it to the end of the doubly-linked list,
4815 but only if line-numbers are desired for debugging info. */
4816
4817 rtx
4818 emit_line_note (file, line)
4819 const char *file;
4820 int line;
4821 {
4822 set_file_and_line_for_stmt (file, line);
4823
4824 #if 0
4825 if (no_line_numbers)
4826 return 0;
4827 #endif
4828
4829 return emit_note (file, line);
4830 }
4831
4832 /* Make an insn of code NOTE
4833 with data-fields specified by FILE and LINE
4834 and add it to the end of the doubly-linked list.
4835 If it is a line-number NOTE, omit it if it matches the previous one. */
4836
4837 rtx
4838 emit_note (file, line)
4839 const char *file;
4840 int line;
4841 {
4842 rtx note;
4843
4844 if (line > 0)
4845 {
4846 if (file && last_filename && !strcmp (file, last_filename)
4847 && line == last_linenum)
4848 return 0;
4849 last_filename = file;
4850 last_linenum = line;
4851 }
4852
4853 if (no_line_numbers && line > 0)
4854 {
4855 cur_insn_uid++;
4856 return 0;
4857 }
4858
4859 note = rtx_alloc (NOTE);
4860 INSN_UID (note) = cur_insn_uid++;
4861 NOTE_SOURCE_FILE (note) = file;
4862 NOTE_LINE_NUMBER (note) = line;
4863 BLOCK_FOR_INSN (note) = NULL;
4864 add_insn (note);
4865 return note;
4866 }
4867
4868 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
4869
4870 rtx
4871 emit_line_note_force (file, line)
4872 const char *file;
4873 int line;
4874 {
4875 last_linenum = -1;
4876 return emit_line_note (file, line);
4877 }
4878
4879 /* Cause next statement to emit a line note even if the line number
4880 has not changed. This is used at the beginning of a function. */
4881
4882 void
4883 force_next_line_note ()
4884 {
4885 last_linenum = -1;
4886 }
4887
4888 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4889 note of this type already exists, remove it first. */
4890
4891 rtx
4892 set_unique_reg_note (insn, kind, datum)
4893 rtx insn;
4894 enum reg_note kind;
4895 rtx datum;
4896 {
4897 rtx note = find_reg_note (insn, kind, NULL_RTX);
4898
4899 switch (kind)
4900 {
4901 case REG_EQUAL:
4902 case REG_EQUIV:
4903 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4904 has multiple sets (some callers assume single_set
4905 means the insn only has one set, when in fact it
4906 means the insn only has one * useful * set). */
4907 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4908 {
4909 if (note)
4910 abort ();
4911 return NULL_RTX;
4912 }
4913
4914 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4915 It serves no useful purpose and breaks eliminate_regs. */
4916 if (GET_CODE (datum) == ASM_OPERANDS)
4917 return NULL_RTX;
4918 break;
4919
4920 default:
4921 break;
4922 }
4923
4924 if (note)
4925 {
4926 XEXP (note, 0) = datum;
4927 return note;
4928 }
4929
4930 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4931 return REG_NOTES (insn);
4932 }
4933 \f
4934 /* Return an indication of which type of insn should have X as a body.
4935 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4936
4937 enum rtx_code
4938 classify_insn (x)
4939 rtx x;
4940 {
4941 if (GET_CODE (x) == CODE_LABEL)
4942 return CODE_LABEL;
4943 if (GET_CODE (x) == CALL)
4944 return CALL_INSN;
4945 if (GET_CODE (x) == RETURN)
4946 return JUMP_INSN;
4947 if (GET_CODE (x) == SET)
4948 {
4949 if (SET_DEST (x) == pc_rtx)
4950 return JUMP_INSN;
4951 else if (GET_CODE (SET_SRC (x)) == CALL)
4952 return CALL_INSN;
4953 else
4954 return INSN;
4955 }
4956 if (GET_CODE (x) == PARALLEL)
4957 {
4958 int j;
4959 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4960 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4961 return CALL_INSN;
4962 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4963 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4964 return JUMP_INSN;
4965 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4966 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4967 return CALL_INSN;
4968 }
4969 return INSN;
4970 }
4971
4972 /* Emit the rtl pattern X as an appropriate kind of insn.
4973 If X is a label, it is simply added into the insn chain. */
4974
4975 rtx
4976 emit (x)
4977 rtx x;
4978 {
4979 enum rtx_code code = classify_insn (x);
4980
4981 if (code == CODE_LABEL)
4982 return emit_label (x);
4983 else if (code == INSN)
4984 return emit_insn (x);
4985 else if (code == JUMP_INSN)
4986 {
4987 rtx insn = emit_jump_insn (x);
4988 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4989 return emit_barrier ();
4990 return insn;
4991 }
4992 else if (code == CALL_INSN)
4993 return emit_call_insn (x);
4994 else
4995 abort ();
4996 }
4997 \f
4998 /* Space for free sequence stack entries. */
4999 static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
5000
5001 /* Begin emitting insns to a sequence which can be packaged in an
5002 RTL_EXPR. If this sequence will contain something that might cause
5003 the compiler to pop arguments to function calls (because those
5004 pops have previously been deferred; see INHIBIT_DEFER_POP for more
5005 details), use do_pending_stack_adjust before calling this function.
5006 That will ensure that the deferred pops are not accidentally
5007 emitted in the middle of this sequence. */
5008
5009 void
5010 start_sequence ()
5011 {
5012 struct sequence_stack *tem;
5013
5014 if (free_sequence_stack != NULL)
5015 {
5016 tem = free_sequence_stack;
5017 free_sequence_stack = tem->next;
5018 }
5019 else
5020 tem = (struct sequence_stack *) ggc_alloc (sizeof (struct sequence_stack));
5021
5022 tem->next = seq_stack;
5023 tem->first = first_insn;
5024 tem->last = last_insn;
5025 tem->sequence_rtl_expr = seq_rtl_expr;
5026
5027 seq_stack = tem;
5028
5029 first_insn = 0;
5030 last_insn = 0;
5031 }
5032
5033 /* Similarly, but indicate that this sequence will be placed in T, an
5034 RTL_EXPR. See the documentation for start_sequence for more
5035 information about how to use this function. */
5036
5037 void
5038 start_sequence_for_rtl_expr (t)
5039 tree t;
5040 {
5041 start_sequence ();
5042
5043 seq_rtl_expr = t;
5044 }
5045
5046 /* Set up the insn chain starting with FIRST as the current sequence,
5047 saving the previously current one. See the documentation for
5048 start_sequence for more information about how to use this function. */
5049
5050 void
5051 push_to_sequence (first)
5052 rtx first;
5053 {
5054 rtx last;
5055
5056 start_sequence ();
5057
5058 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5059
5060 first_insn = first;
5061 last_insn = last;
5062 }
5063
5064 /* Set up the insn chain from a chain stort in FIRST to LAST. */
5065
5066 void
5067 push_to_full_sequence (first, last)
5068 rtx first, last;
5069 {
5070 start_sequence ();
5071 first_insn = first;
5072 last_insn = last;
5073 /* We really should have the end of the insn chain here. */
5074 if (last && NEXT_INSN (last))
5075 abort ();
5076 }
5077
5078 /* Set up the outer-level insn chain
5079 as the current sequence, saving the previously current one. */
5080
5081 void
5082 push_topmost_sequence ()
5083 {
5084 struct sequence_stack *stack, *top = NULL;
5085
5086 start_sequence ();
5087
5088 for (stack = seq_stack; stack; stack = stack->next)
5089 top = stack;
5090
5091 first_insn = top->first;
5092 last_insn = top->last;
5093 seq_rtl_expr = top->sequence_rtl_expr;
5094 }
5095
5096 /* After emitting to the outer-level insn chain, update the outer-level
5097 insn chain, and restore the previous saved state. */
5098
5099 void
5100 pop_topmost_sequence ()
5101 {
5102 struct sequence_stack *stack, *top = NULL;
5103
5104 for (stack = seq_stack; stack; stack = stack->next)
5105 top = stack;
5106
5107 top->first = first_insn;
5108 top->last = last_insn;
5109 /* ??? Why don't we save seq_rtl_expr here? */
5110
5111 end_sequence ();
5112 }
5113
5114 /* After emitting to a sequence, restore previous saved state.
5115
5116 To get the contents of the sequence just made, you must call
5117 `get_insns' *before* calling here.
5118
5119 If the compiler might have deferred popping arguments while
5120 generating this sequence, and this sequence will not be immediately
5121 inserted into the instruction stream, use do_pending_stack_adjust
5122 before calling get_insns. That will ensure that the deferred
5123 pops are inserted into this sequence, and not into some random
5124 location in the instruction stream. See INHIBIT_DEFER_POP for more
5125 information about deferred popping of arguments. */
5126
5127 void
5128 end_sequence ()
5129 {
5130 struct sequence_stack *tem = seq_stack;
5131
5132 first_insn = tem->first;
5133 last_insn = tem->last;
5134 seq_rtl_expr = tem->sequence_rtl_expr;
5135 seq_stack = tem->next;
5136
5137 memset (tem, 0, sizeof (*tem));
5138 tem->next = free_sequence_stack;
5139 free_sequence_stack = tem;
5140 }
5141
5142 /* This works like end_sequence, but records the old sequence in FIRST
5143 and LAST. */
5144
5145 void
5146 end_full_sequence (first, last)
5147 rtx *first, *last;
5148 {
5149 *first = first_insn;
5150 *last = last_insn;
5151 end_sequence ();
5152 }
5153
5154 /* Return 1 if currently emitting into a sequence. */
5155
5156 int
5157 in_sequence_p ()
5158 {
5159 return seq_stack != 0;
5160 }
5161 \f
5162 /* Put the various virtual registers into REGNO_REG_RTX. */
5163
5164 void
5165 init_virtual_regs (es)
5166 struct emit_status *es;
5167 {
5168 rtx *ptr = es->x_regno_reg_rtx;
5169 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5170 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5171 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5172 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5173 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5174 }
5175
5176 \f
5177 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5178 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5179 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5180 static int copy_insn_n_scratches;
5181
5182 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5183 copied an ASM_OPERANDS.
5184 In that case, it is the original input-operand vector. */
5185 static rtvec orig_asm_operands_vector;
5186
5187 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5188 copied an ASM_OPERANDS.
5189 In that case, it is the copied input-operand vector. */
5190 static rtvec copy_asm_operands_vector;
5191
5192 /* Likewise for the constraints vector. */
5193 static rtvec orig_asm_constraints_vector;
5194 static rtvec copy_asm_constraints_vector;
5195
5196 /* Recursively create a new copy of an rtx for copy_insn.
5197 This function differs from copy_rtx in that it handles SCRATCHes and
5198 ASM_OPERANDs properly.
5199 Normally, this function is not used directly; use copy_insn as front end.
5200 However, you could first copy an insn pattern with copy_insn and then use
5201 this function afterwards to properly copy any REG_NOTEs containing
5202 SCRATCHes. */
5203
5204 rtx
5205 copy_insn_1 (orig)
5206 rtx orig;
5207 {
5208 rtx copy;
5209 int i, j;
5210 RTX_CODE code;
5211 const char *format_ptr;
5212
5213 code = GET_CODE (orig);
5214
5215 switch (code)
5216 {
5217 case REG:
5218 case QUEUED:
5219 case CONST_INT:
5220 case CONST_DOUBLE:
5221 case CONST_VECTOR:
5222 case SYMBOL_REF:
5223 case CODE_LABEL:
5224 case PC:
5225 case CC0:
5226 case ADDRESSOF:
5227 return orig;
5228
5229 case SCRATCH:
5230 for (i = 0; i < copy_insn_n_scratches; i++)
5231 if (copy_insn_scratch_in[i] == orig)
5232 return copy_insn_scratch_out[i];
5233 break;
5234
5235 case CONST:
5236 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
5237 a LABEL_REF, it isn't sharable. */
5238 if (GET_CODE (XEXP (orig, 0)) == PLUS
5239 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5240 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5241 return orig;
5242 break;
5243
5244 /* A MEM with a constant address is not sharable. The problem is that
5245 the constant address may need to be reloaded. If the mem is shared,
5246 then reloading one copy of this mem will cause all copies to appear
5247 to have been reloaded. */
5248
5249 default:
5250 break;
5251 }
5252
5253 copy = rtx_alloc (code);
5254
5255 /* Copy the various flags, and other information. We assume that
5256 all fields need copying, and then clear the fields that should
5257 not be copied. That is the sensible default behavior, and forces
5258 us to explicitly document why we are *not* copying a flag. */
5259 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
5260
5261 /* We do not copy the USED flag, which is used as a mark bit during
5262 walks over the RTL. */
5263 RTX_FLAG (copy, used) = 0;
5264
5265 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5266 if (GET_RTX_CLASS (code) == 'i')
5267 {
5268 RTX_FLAG (copy, jump) = 0;
5269 RTX_FLAG (copy, call) = 0;
5270 RTX_FLAG (copy, frame_related) = 0;
5271 }
5272
5273 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5274
5275 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5276 {
5277 copy->fld[i] = orig->fld[i];
5278 switch (*format_ptr++)
5279 {
5280 case 'e':
5281 if (XEXP (orig, i) != NULL)
5282 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5283 break;
5284
5285 case 'E':
5286 case 'V':
5287 if (XVEC (orig, i) == orig_asm_constraints_vector)
5288 XVEC (copy, i) = copy_asm_constraints_vector;
5289 else if (XVEC (orig, i) == orig_asm_operands_vector)
5290 XVEC (copy, i) = copy_asm_operands_vector;
5291 else if (XVEC (orig, i) != NULL)
5292 {
5293 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5294 for (j = 0; j < XVECLEN (copy, i); j++)
5295 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5296 }
5297 break;
5298
5299 case 't':
5300 case 'w':
5301 case 'i':
5302 case 's':
5303 case 'S':
5304 case 'u':
5305 case '0':
5306 /* These are left unchanged. */
5307 break;
5308
5309 default:
5310 abort ();
5311 }
5312 }
5313
5314 if (code == SCRATCH)
5315 {
5316 i = copy_insn_n_scratches++;
5317 if (i >= MAX_RECOG_OPERANDS)
5318 abort ();
5319 copy_insn_scratch_in[i] = orig;
5320 copy_insn_scratch_out[i] = copy;
5321 }
5322 else if (code == ASM_OPERANDS)
5323 {
5324 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5325 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5326 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5327 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5328 }
5329
5330 return copy;
5331 }
5332
5333 /* Create a new copy of an rtx.
5334 This function differs from copy_rtx in that it handles SCRATCHes and
5335 ASM_OPERANDs properly.
5336 INSN doesn't really have to be a full INSN; it could be just the
5337 pattern. */
5338 rtx
5339 copy_insn (insn)
5340 rtx insn;
5341 {
5342 copy_insn_n_scratches = 0;
5343 orig_asm_operands_vector = 0;
5344 orig_asm_constraints_vector = 0;
5345 copy_asm_operands_vector = 0;
5346 copy_asm_constraints_vector = 0;
5347 return copy_insn_1 (insn);
5348 }
5349
5350 /* Initialize data structures and variables in this file
5351 before generating rtl for each function. */
5352
5353 void
5354 init_emit ()
5355 {
5356 struct function *f = cfun;
5357
5358 f->emit = (struct emit_status *) ggc_alloc (sizeof (struct emit_status));
5359 first_insn = NULL;
5360 last_insn = NULL;
5361 seq_rtl_expr = NULL;
5362 cur_insn_uid = 1;
5363 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5364 last_linenum = 0;
5365 last_filename = 0;
5366 first_label_num = label_num;
5367 last_label_num = 0;
5368 seq_stack = NULL;
5369
5370 /* Init the tables that describe all the pseudo regs. */
5371
5372 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5373
5374 f->emit->regno_pointer_align
5375 = (unsigned char *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5376 * sizeof (unsigned char));
5377
5378 regno_reg_rtx
5379 = (rtx *) ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
5380
5381 /* Put copies of all the hard registers into regno_reg_rtx. */
5382 memcpy (regno_reg_rtx,
5383 static_regno_reg_rtx,
5384 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5385
5386 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5387 init_virtual_regs (f->emit);
5388
5389 /* Indicate that the virtual registers and stack locations are
5390 all pointers. */
5391 REG_POINTER (stack_pointer_rtx) = 1;
5392 REG_POINTER (frame_pointer_rtx) = 1;
5393 REG_POINTER (hard_frame_pointer_rtx) = 1;
5394 REG_POINTER (arg_pointer_rtx) = 1;
5395
5396 REG_POINTER (virtual_incoming_args_rtx) = 1;
5397 REG_POINTER (virtual_stack_vars_rtx) = 1;
5398 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5399 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5400 REG_POINTER (virtual_cfa_rtx) = 1;
5401
5402 #ifdef STACK_BOUNDARY
5403 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5404 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5405 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5406 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5407
5408 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5409 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5410 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5411 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5412 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5413 #endif
5414
5415 #ifdef INIT_EXPANDERS
5416 INIT_EXPANDERS;
5417 #endif
5418 }
5419
5420 /* Generate the constant 0. */
5421
5422 static rtx
5423 gen_const_vector_0 (mode)
5424 enum machine_mode mode;
5425 {
5426 rtx tem;
5427 rtvec v;
5428 int units, i;
5429 enum machine_mode inner;
5430
5431 units = GET_MODE_NUNITS (mode);
5432 inner = GET_MODE_INNER (mode);
5433
5434 v = rtvec_alloc (units);
5435
5436 /* We need to call this function after we to set CONST0_RTX first. */
5437 if (!CONST0_RTX (inner))
5438 abort ();
5439
5440 for (i = 0; i < units; ++i)
5441 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5442
5443 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5444 return tem;
5445 }
5446
5447 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5448 all elements are zero. */
5449 rtx
5450 gen_rtx_CONST_VECTOR (mode, v)
5451 enum machine_mode mode;
5452 rtvec v;
5453 {
5454 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5455 int i;
5456
5457 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5458 if (RTVEC_ELT (v, i) != inner_zero)
5459 return gen_rtx_raw_CONST_VECTOR (mode, v);
5460 return CONST0_RTX (mode);
5461 }
5462
5463 /* Create some permanent unique rtl objects shared between all functions.
5464 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5465
5466 void
5467 init_emit_once (line_numbers)
5468 int line_numbers;
5469 {
5470 int i;
5471 enum machine_mode mode;
5472 enum machine_mode double_mode;
5473
5474 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5475 tables. */
5476 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5477 const_int_htab_eq, NULL);
5478
5479 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5480 const_double_htab_eq, NULL);
5481
5482 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5483 mem_attrs_htab_eq, NULL);
5484 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5485 reg_attrs_htab_eq, NULL);
5486
5487 no_line_numbers = ! line_numbers;
5488
5489 /* Compute the word and byte modes. */
5490
5491 byte_mode = VOIDmode;
5492 word_mode = VOIDmode;
5493 double_mode = VOIDmode;
5494
5495 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5496 mode = GET_MODE_WIDER_MODE (mode))
5497 {
5498 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5499 && byte_mode == VOIDmode)
5500 byte_mode = mode;
5501
5502 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5503 && word_mode == VOIDmode)
5504 word_mode = mode;
5505 }
5506
5507 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5508 mode = GET_MODE_WIDER_MODE (mode))
5509 {
5510 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5511 && double_mode == VOIDmode)
5512 double_mode = mode;
5513 }
5514
5515 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5516
5517 /* Assign register numbers to the globally defined register rtx.
5518 This must be done at runtime because the register number field
5519 is in a union and some compilers can't initialize unions. */
5520
5521 pc_rtx = gen_rtx (PC, VOIDmode);
5522 cc0_rtx = gen_rtx (CC0, VOIDmode);
5523 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5524 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5525 if (hard_frame_pointer_rtx == 0)
5526 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5527 HARD_FRAME_POINTER_REGNUM);
5528 if (arg_pointer_rtx == 0)
5529 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5530 virtual_incoming_args_rtx =
5531 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5532 virtual_stack_vars_rtx =
5533 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5534 virtual_stack_dynamic_rtx =
5535 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5536 virtual_outgoing_args_rtx =
5537 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5538 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5539
5540 /* Initialize RTL for commonly used hard registers. These are
5541 copied into regno_reg_rtx as we begin to compile each function. */
5542 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5543 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5544
5545 #ifdef INIT_EXPANDERS
5546 /* This is to initialize {init|mark|free}_machine_status before the first
5547 call to push_function_context_to. This is needed by the Chill front
5548 end which calls push_function_context_to before the first call to
5549 init_function_start. */
5550 INIT_EXPANDERS;
5551 #endif
5552
5553 /* Create the unique rtx's for certain rtx codes and operand values. */
5554
5555 /* Don't use gen_rtx here since gen_rtx in this case
5556 tries to use these variables. */
5557 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5558 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5559 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5560
5561 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5562 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5563 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5564 else
5565 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5566
5567 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5568 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5569 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5570 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5571
5572 for (i = 0; i <= 2; i++)
5573 {
5574 REAL_VALUE_TYPE *r =
5575 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5576
5577 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5578 mode = GET_MODE_WIDER_MODE (mode))
5579 const_tiny_rtx[i][(int) mode] =
5580 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5581
5582 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5583
5584 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5585 mode = GET_MODE_WIDER_MODE (mode))
5586 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5587
5588 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5589 mode != VOIDmode;
5590 mode = GET_MODE_WIDER_MODE (mode))
5591 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5592 }
5593
5594 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5595 mode != VOIDmode;
5596 mode = GET_MODE_WIDER_MODE (mode))
5597 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5598
5599 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5600 mode != VOIDmode;
5601 mode = GET_MODE_WIDER_MODE (mode))
5602 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5603
5604 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5605 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5606 const_tiny_rtx[0][i] = const0_rtx;
5607
5608 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5609 if (STORE_FLAG_VALUE == 1)
5610 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5611
5612 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5613 return_address_pointer_rtx
5614 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5615 #endif
5616
5617 #ifdef STRUCT_VALUE
5618 struct_value_rtx = STRUCT_VALUE;
5619 #else
5620 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
5621 #endif
5622
5623 #ifdef STRUCT_VALUE_INCOMING
5624 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
5625 #else
5626 #ifdef STRUCT_VALUE_INCOMING_REGNUM
5627 struct_value_incoming_rtx
5628 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
5629 #else
5630 struct_value_incoming_rtx = struct_value_rtx;
5631 #endif
5632 #endif
5633
5634 #ifdef STATIC_CHAIN_REGNUM
5635 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5636
5637 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5638 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5639 static_chain_incoming_rtx
5640 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5641 else
5642 #endif
5643 static_chain_incoming_rtx = static_chain_rtx;
5644 #endif
5645
5646 #ifdef STATIC_CHAIN
5647 static_chain_rtx = STATIC_CHAIN;
5648
5649 #ifdef STATIC_CHAIN_INCOMING
5650 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5651 #else
5652 static_chain_incoming_rtx = static_chain_rtx;
5653 #endif
5654 #endif
5655
5656 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5657 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5658 }
5659 \f
5660 /* Query and clear/ restore no_line_numbers. This is used by the
5661 switch / case handling in stmt.c to give proper line numbers in
5662 warnings about unreachable code. */
5663
5664 int
5665 force_line_numbers ()
5666 {
5667 int old = no_line_numbers;
5668
5669 no_line_numbers = 0;
5670 if (old)
5671 force_next_line_note ();
5672 return old;
5673 }
5674
5675 void
5676 restore_line_number_status (old_value)
5677 int old_value;
5678 {
5679 no_line_numbers = old_value;
5680 }
5681
5682 /* Produce exact duplicate of insn INSN after AFTER.
5683 Care updating of libcall regions if present. */
5684
5685 rtx
5686 emit_copy_of_insn_after (insn, after)
5687 rtx insn, after;
5688 {
5689 rtx new;
5690 rtx note1, note2, link;
5691
5692 switch (GET_CODE (insn))
5693 {
5694 case INSN:
5695 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5696 break;
5697
5698 case JUMP_INSN:
5699 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5700 break;
5701
5702 case CALL_INSN:
5703 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5704 if (CALL_INSN_FUNCTION_USAGE (insn))
5705 CALL_INSN_FUNCTION_USAGE (new)
5706 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5707 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5708 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5709 break;
5710
5711 default:
5712 abort ();
5713 }
5714
5715 /* Update LABEL_NUSES. */
5716 mark_jump_label (PATTERN (new), new, 0);
5717
5718 INSN_SCOPE (new) = INSN_SCOPE (insn);
5719
5720 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5721 make them. */
5722 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5723 if (REG_NOTE_KIND (link) != REG_LABEL)
5724 {
5725 if (GET_CODE (link) == EXPR_LIST)
5726 REG_NOTES (new)
5727 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5728 XEXP (link, 0),
5729 REG_NOTES (new)));
5730 else
5731 REG_NOTES (new)
5732 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5733 XEXP (link, 0),
5734 REG_NOTES (new)));
5735 }
5736
5737 /* Fix the libcall sequences. */
5738 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5739 {
5740 rtx p = new;
5741 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5742 p = PREV_INSN (p);
5743 XEXP (note1, 0) = p;
5744 XEXP (note2, 0) = new;
5745 }
5746 INSN_CODE (new) = INSN_CODE (insn);
5747 return new;
5748 }
5749
5750 #include "gt-emit-rtl.h"