emit-rtl.c (gen_lowpart_common): Handle interpreting integer constants as condition...
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 /* Middle-to-low level generation of rtx code and insns.
24
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
28
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
31
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
37
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "tm.h"
42 #include "toplev.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "tm_p.h"
46 #include "flags.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "real.h"
55 #include "bitmap.h"
56 #include "basic-block.h"
57 #include "ggc.h"
58 #include "debug.h"
59 #include "langhooks.h"
60
61 /* Commonly used modes. */
62
63 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
64 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
65 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
66 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
67
68
69 /* This is *not* reset after each function. It gives each CODE_LABEL
70 in the entire compilation a unique label number. */
71
72 static GTY(()) int label_num = 1;
73
74 /* Highest label number in current function.
75 Zero means use the value of label_num instead.
76 This is nonzero only when belatedly compiling an inline function. */
77
78 static int last_label_num;
79
80 /* Value label_num had when set_new_first_and_last_label_number was called.
81 If label_num has not changed since then, last_label_num is valid. */
82
83 static int base_label_num;
84
85 /* Nonzero means do not generate NOTEs for source line numbers. */
86
87 static int no_line_numbers;
88
89 /* Commonly used rtx's, so that we only need space for one copy.
90 These are initialized once for the entire compilation.
91 All of these are unique; no other rtx-object will be equal to any
92 of these. */
93
94 rtx global_rtl[GR_MAX];
95
96 /* Commonly used RTL for hard registers. These objects are not necessarily
97 unique, so we allocate them separately from global_rtl. They are
98 initialized once per compilation unit, then copied into regno_reg_rtx
99 at the beginning of each function. */
100 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
101
102 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
103 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
104 record a copy of const[012]_rtx. */
105
106 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
107
108 rtx const_true_rtx;
109
110 REAL_VALUE_TYPE dconst0;
111 REAL_VALUE_TYPE dconst1;
112 REAL_VALUE_TYPE dconst2;
113 REAL_VALUE_TYPE dconstm1;
114 REAL_VALUE_TYPE dconstm2;
115 REAL_VALUE_TYPE dconsthalf;
116
117 /* All references to the following fixed hard registers go through
118 these unique rtl objects. On machines where the frame-pointer and
119 arg-pointer are the same register, they use the same unique object.
120
121 After register allocation, other rtl objects which used to be pseudo-regs
122 may be clobbered to refer to the frame-pointer register.
123 But references that were originally to the frame-pointer can be
124 distinguished from the others because they contain frame_pointer_rtx.
125
126 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
127 tricky: until register elimination has taken place hard_frame_pointer_rtx
128 should be used if it is being set, and frame_pointer_rtx otherwise. After
129 register elimination hard_frame_pointer_rtx should always be used.
130 On machines where the two registers are same (most) then these are the
131 same.
132
133 In an inline procedure, the stack and frame pointer rtxs may not be
134 used for anything else. */
135 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
136 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
137 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
138 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
139 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
140
141 /* This is used to implement __builtin_return_address for some machines.
142 See for instance the MIPS port. */
143 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
144
145 /* We make one copy of (const_int C) where C is in
146 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
147 to save space during the compilation and simplify comparisons of
148 integers. */
149
150 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
151
152 /* A hash table storing CONST_INTs whose absolute value is greater
153 than MAX_SAVED_CONST_INT. */
154
155 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
156 htab_t const_int_htab;
157
158 /* A hash table storing memory attribute structures. */
159 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
160 htab_t mem_attrs_htab;
161
162 /* A hash table storing register attribute structures. */
163 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
164 htab_t reg_attrs_htab;
165
166 /* A hash table storing all CONST_DOUBLEs. */
167 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
168 htab_t const_double_htab;
169
170 #define first_insn (cfun->emit->x_first_insn)
171 #define last_insn (cfun->emit->x_last_insn)
172 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
173 #define last_linenum (cfun->emit->x_last_linenum)
174 #define last_filename (cfun->emit->x_last_filename)
175 #define first_label_num (cfun->emit->x_first_label_num)
176
177 static rtx make_jump_insn_raw PARAMS ((rtx));
178 static rtx make_call_insn_raw PARAMS ((rtx));
179 static rtx find_line_note PARAMS ((rtx));
180 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
181 int));
182 static void unshare_all_rtl_1 PARAMS ((rtx));
183 static void unshare_all_decls PARAMS ((tree));
184 static void reset_used_decls PARAMS ((tree));
185 static void mark_label_nuses PARAMS ((rtx));
186 static hashval_t const_int_htab_hash PARAMS ((const void *));
187 static int const_int_htab_eq PARAMS ((const void *,
188 const void *));
189 static hashval_t const_double_htab_hash PARAMS ((const void *));
190 static int const_double_htab_eq PARAMS ((const void *,
191 const void *));
192 static rtx lookup_const_double PARAMS ((rtx));
193 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
194 static int mem_attrs_htab_eq PARAMS ((const void *,
195 const void *));
196 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
197 rtx, unsigned int,
198 enum machine_mode));
199 static hashval_t reg_attrs_htab_hash PARAMS ((const void *));
200 static int reg_attrs_htab_eq PARAMS ((const void *,
201 const void *));
202 static reg_attrs *get_reg_attrs PARAMS ((tree, int));
203 static tree component_ref_for_mem_expr PARAMS ((tree));
204 static rtx gen_const_vector_0 PARAMS ((enum machine_mode));
205 static rtx gen_complex_constant_part PARAMS ((enum machine_mode,
206 rtx, int));
207
208 /* Probability of the conditional branch currently proceeded by try_split.
209 Set to -1 otherwise. */
210 int split_branch_probability = -1;
211 \f
212 /* Returns a hash code for X (which is a really a CONST_INT). */
213
214 static hashval_t
215 const_int_htab_hash (x)
216 const void *x;
217 {
218 return (hashval_t) INTVAL ((struct rtx_def *) x);
219 }
220
221 /* Returns nonzero if the value represented by X (which is really a
222 CONST_INT) is the same as that given by Y (which is really a
223 HOST_WIDE_INT *). */
224
225 static int
226 const_int_htab_eq (x, y)
227 const void *x;
228 const void *y;
229 {
230 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
231 }
232
233 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
234 static hashval_t
235 const_double_htab_hash (x)
236 const void *x;
237 {
238 rtx value = (rtx) x;
239 hashval_t h;
240
241 if (GET_MODE (value) == VOIDmode)
242 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
243 else
244 {
245 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
246 /* MODE is used in the comparison, so it should be in the hash. */
247 h ^= GET_MODE (value);
248 }
249 return h;
250 }
251
252 /* Returns nonzero if the value represented by X (really a ...)
253 is the same as that represented by Y (really a ...) */
254 static int
255 const_double_htab_eq (x, y)
256 const void *x;
257 const void *y;
258 {
259 rtx a = (rtx)x, b = (rtx)y;
260
261 if (GET_MODE (a) != GET_MODE (b))
262 return 0;
263 if (GET_MODE (a) == VOIDmode)
264 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
265 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
266 else
267 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
268 CONST_DOUBLE_REAL_VALUE (b));
269 }
270
271 /* Returns a hash code for X (which is a really a mem_attrs *). */
272
273 static hashval_t
274 mem_attrs_htab_hash (x)
275 const void *x;
276 {
277 mem_attrs *p = (mem_attrs *) x;
278
279 return (p->alias ^ (p->align * 1000)
280 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
281 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
282 ^ (size_t) p->expr);
283 }
284
285 /* Returns nonzero if the value represented by X (which is really a
286 mem_attrs *) is the same as that given by Y (which is also really a
287 mem_attrs *). */
288
289 static int
290 mem_attrs_htab_eq (x, y)
291 const void *x;
292 const void *y;
293 {
294 mem_attrs *p = (mem_attrs *) x;
295 mem_attrs *q = (mem_attrs *) y;
296
297 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
298 && p->size == q->size && p->align == q->align);
299 }
300
301 /* Allocate a new mem_attrs structure and insert it into the hash table if
302 one identical to it is not already in the table. We are doing this for
303 MEM of mode MODE. */
304
305 static mem_attrs *
306 get_mem_attrs (alias, expr, offset, size, align, mode)
307 HOST_WIDE_INT alias;
308 tree expr;
309 rtx offset;
310 rtx size;
311 unsigned int align;
312 enum machine_mode mode;
313 {
314 mem_attrs attrs;
315 void **slot;
316
317 /* If everything is the default, we can just return zero.
318 This must match what the corresponding MEM_* macros return when the
319 field is not present. */
320 if (alias == 0 && expr == 0 && offset == 0
321 && (size == 0
322 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
323 && (STRICT_ALIGNMENT && mode != BLKmode
324 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
325 return 0;
326
327 attrs.alias = alias;
328 attrs.expr = expr;
329 attrs.offset = offset;
330 attrs.size = size;
331 attrs.align = align;
332
333 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
334 if (*slot == 0)
335 {
336 *slot = ggc_alloc (sizeof (mem_attrs));
337 memcpy (*slot, &attrs, sizeof (mem_attrs));
338 }
339
340 return *slot;
341 }
342
343 /* Returns a hash code for X (which is a really a reg_attrs *). */
344
345 static hashval_t
346 reg_attrs_htab_hash (x)
347 const void *x;
348 {
349 reg_attrs *p = (reg_attrs *) x;
350
351 return ((p->offset * 1000) ^ (long) p->decl);
352 }
353
354 /* Returns non-zero if the value represented by X (which is really a
355 reg_attrs *) is the same as that given by Y (which is also really a
356 reg_attrs *). */
357
358 static int
359 reg_attrs_htab_eq (x, y)
360 const void *x;
361 const void *y;
362 {
363 reg_attrs *p = (reg_attrs *) x;
364 reg_attrs *q = (reg_attrs *) y;
365
366 return (p->decl == q->decl && p->offset == q->offset);
367 }
368 /* Allocate a new reg_attrs structure and insert it into the hash table if
369 one identical to it is not already in the table. We are doing this for
370 MEM of mode MODE. */
371
372 static reg_attrs *
373 get_reg_attrs (decl, offset)
374 tree decl;
375 int offset;
376 {
377 reg_attrs attrs;
378 void **slot;
379
380 /* If everything is the default, we can just return zero. */
381 if (decl == 0 && offset == 0)
382 return 0;
383
384 attrs.decl = decl;
385 attrs.offset = offset;
386
387 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
388 if (*slot == 0)
389 {
390 *slot = ggc_alloc (sizeof (reg_attrs));
391 memcpy (*slot, &attrs, sizeof (reg_attrs));
392 }
393
394 return *slot;
395 }
396
397 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
398 don't attempt to share with the various global pieces of rtl (such as
399 frame_pointer_rtx). */
400
401 rtx
402 gen_raw_REG (mode, regno)
403 enum machine_mode mode;
404 int regno;
405 {
406 rtx x = gen_rtx_raw_REG (mode, regno);
407 ORIGINAL_REGNO (x) = regno;
408 return x;
409 }
410
411 /* There are some RTL codes that require special attention; the generation
412 functions do the raw handling. If you add to this list, modify
413 special_rtx in gengenrtl.c as well. */
414
415 rtx
416 gen_rtx_CONST_INT (mode, arg)
417 enum machine_mode mode ATTRIBUTE_UNUSED;
418 HOST_WIDE_INT arg;
419 {
420 void **slot;
421
422 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
423 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
424
425 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
426 if (const_true_rtx && arg == STORE_FLAG_VALUE)
427 return const_true_rtx;
428 #endif
429
430 /* Look up the CONST_INT in the hash table. */
431 slot = htab_find_slot_with_hash (const_int_htab, &arg,
432 (hashval_t) arg, INSERT);
433 if (*slot == 0)
434 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
435
436 return (rtx) *slot;
437 }
438
439 rtx
440 gen_int_mode (c, mode)
441 HOST_WIDE_INT c;
442 enum machine_mode mode;
443 {
444 return GEN_INT (trunc_int_for_mode (c, mode));
445 }
446
447 /* CONST_DOUBLEs might be created from pairs of integers, or from
448 REAL_VALUE_TYPEs. Also, their length is known only at run time,
449 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
450
451 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
452 hash table. If so, return its counterpart; otherwise add it
453 to the hash table and return it. */
454 static rtx
455 lookup_const_double (real)
456 rtx real;
457 {
458 void **slot = htab_find_slot (const_double_htab, real, INSERT);
459 if (*slot == 0)
460 *slot = real;
461
462 return (rtx) *slot;
463 }
464
465 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
466 VALUE in mode MODE. */
467 rtx
468 const_double_from_real_value (value, mode)
469 REAL_VALUE_TYPE value;
470 enum machine_mode mode;
471 {
472 rtx real = rtx_alloc (CONST_DOUBLE);
473 PUT_MODE (real, mode);
474
475 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
476
477 return lookup_const_double (real);
478 }
479
480 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
481 of ints: I0 is the low-order word and I1 is the high-order word.
482 Do not use this routine for non-integer modes; convert to
483 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
484
485 rtx
486 immed_double_const (i0, i1, mode)
487 HOST_WIDE_INT i0, i1;
488 enum machine_mode mode;
489 {
490 rtx value;
491 unsigned int i;
492
493 if (mode != VOIDmode)
494 {
495 int width;
496 if (GET_MODE_CLASS (mode) != MODE_INT
497 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
498 /* We can get a 0 for an error mark. */
499 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
500 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
501 abort ();
502
503 /* We clear out all bits that don't belong in MODE, unless they and
504 our sign bit are all one. So we get either a reasonable negative
505 value or a reasonable unsigned value for this mode. */
506 width = GET_MODE_BITSIZE (mode);
507 if (width < HOST_BITS_PER_WIDE_INT
508 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
509 != ((HOST_WIDE_INT) (-1) << (width - 1))))
510 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
511 else if (width == HOST_BITS_PER_WIDE_INT
512 && ! (i1 == ~0 && i0 < 0))
513 i1 = 0;
514 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
515 /* We cannot represent this value as a constant. */
516 abort ();
517
518 /* If this would be an entire word for the target, but is not for
519 the host, then sign-extend on the host so that the number will
520 look the same way on the host that it would on the target.
521
522 For example, when building a 64 bit alpha hosted 32 bit sparc
523 targeted compiler, then we want the 32 bit unsigned value -1 to be
524 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
525 The latter confuses the sparc backend. */
526
527 if (width < HOST_BITS_PER_WIDE_INT
528 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
529 i0 |= ((HOST_WIDE_INT) (-1) << width);
530
531 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
532 CONST_INT.
533
534 ??? Strictly speaking, this is wrong if we create a CONST_INT for
535 a large unsigned constant with the size of MODE being
536 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
537 in a wider mode. In that case we will mis-interpret it as a
538 negative number.
539
540 Unfortunately, the only alternative is to make a CONST_DOUBLE for
541 any constant in any mode if it is an unsigned constant larger
542 than the maximum signed integer in an int on the host. However,
543 doing this will break everyone that always expects to see a
544 CONST_INT for SImode and smaller.
545
546 We have always been making CONST_INTs in this case, so nothing
547 new is being broken. */
548
549 if (width <= HOST_BITS_PER_WIDE_INT)
550 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
551 }
552
553 /* If this integer fits in one word, return a CONST_INT. */
554 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
555 return GEN_INT (i0);
556
557 /* We use VOIDmode for integers. */
558 value = rtx_alloc (CONST_DOUBLE);
559 PUT_MODE (value, VOIDmode);
560
561 CONST_DOUBLE_LOW (value) = i0;
562 CONST_DOUBLE_HIGH (value) = i1;
563
564 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
565 XWINT (value, i) = 0;
566
567 return lookup_const_double (value);
568 }
569
570 rtx
571 gen_rtx_REG (mode, regno)
572 enum machine_mode mode;
573 unsigned int regno;
574 {
575 /* In case the MD file explicitly references the frame pointer, have
576 all such references point to the same frame pointer. This is
577 used during frame pointer elimination to distinguish the explicit
578 references to these registers from pseudos that happened to be
579 assigned to them.
580
581 If we have eliminated the frame pointer or arg pointer, we will
582 be using it as a normal register, for example as a spill
583 register. In such cases, we might be accessing it in a mode that
584 is not Pmode and therefore cannot use the pre-allocated rtx.
585
586 Also don't do this when we are making new REGs in reload, since
587 we don't want to get confused with the real pointers. */
588
589 if (mode == Pmode && !reload_in_progress)
590 {
591 if (regno == FRAME_POINTER_REGNUM
592 && (!reload_completed || frame_pointer_needed))
593 return frame_pointer_rtx;
594 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
595 if (regno == HARD_FRAME_POINTER_REGNUM
596 && (!reload_completed || frame_pointer_needed))
597 return hard_frame_pointer_rtx;
598 #endif
599 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
600 if (regno == ARG_POINTER_REGNUM)
601 return arg_pointer_rtx;
602 #endif
603 #ifdef RETURN_ADDRESS_POINTER_REGNUM
604 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
605 return return_address_pointer_rtx;
606 #endif
607 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
608 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
609 return pic_offset_table_rtx;
610 if (regno == STACK_POINTER_REGNUM)
611 return stack_pointer_rtx;
612 }
613
614 #if 0
615 /* If the per-function register table has been set up, try to re-use
616 an existing entry in that table to avoid useless generation of RTL.
617
618 This code is disabled for now until we can fix the various backends
619 which depend on having non-shared hard registers in some cases. Long
620 term we want to re-enable this code as it can significantly cut down
621 on the amount of useless RTL that gets generated.
622
623 We'll also need to fix some code that runs after reload that wants to
624 set ORIGINAL_REGNO. */
625
626 if (cfun
627 && cfun->emit
628 && regno_reg_rtx
629 && regno < FIRST_PSEUDO_REGISTER
630 && reg_raw_mode[regno] == mode)
631 return regno_reg_rtx[regno];
632 #endif
633
634 return gen_raw_REG (mode, regno);
635 }
636
637 rtx
638 gen_rtx_MEM (mode, addr)
639 enum machine_mode mode;
640 rtx addr;
641 {
642 rtx rt = gen_rtx_raw_MEM (mode, addr);
643
644 /* This field is not cleared by the mere allocation of the rtx, so
645 we clear it here. */
646 MEM_ATTRS (rt) = 0;
647
648 return rt;
649 }
650
651 rtx
652 gen_rtx_SUBREG (mode, reg, offset)
653 enum machine_mode mode;
654 rtx reg;
655 int offset;
656 {
657 /* This is the most common failure type.
658 Catch it early so we can see who does it. */
659 if ((offset % GET_MODE_SIZE (mode)) != 0)
660 abort ();
661
662 /* This check isn't usable right now because combine will
663 throw arbitrary crap like a CALL into a SUBREG in
664 gen_lowpart_for_combine so we must just eat it. */
665 #if 0
666 /* Check for this too. */
667 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
668 abort ();
669 #endif
670 return gen_rtx_raw_SUBREG (mode, reg, offset);
671 }
672
673 /* Generate a SUBREG representing the least-significant part of REG if MODE
674 is smaller than mode of REG, otherwise paradoxical SUBREG. */
675
676 rtx
677 gen_lowpart_SUBREG (mode, reg)
678 enum machine_mode mode;
679 rtx reg;
680 {
681 enum machine_mode inmode;
682
683 inmode = GET_MODE (reg);
684 if (inmode == VOIDmode)
685 inmode = mode;
686 return gen_rtx_SUBREG (mode, reg,
687 subreg_lowpart_offset (mode, inmode));
688 }
689 \f
690 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
691 **
692 ** This routine generates an RTX of the size specified by
693 ** <code>, which is an RTX code. The RTX structure is initialized
694 ** from the arguments <element1> through <elementn>, which are
695 ** interpreted according to the specific RTX type's format. The
696 ** special machine mode associated with the rtx (if any) is specified
697 ** in <mode>.
698 **
699 ** gen_rtx can be invoked in a way which resembles the lisp-like
700 ** rtx it will generate. For example, the following rtx structure:
701 **
702 ** (plus:QI (mem:QI (reg:SI 1))
703 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
704 **
705 ** ...would be generated by the following C code:
706 **
707 ** gen_rtx (PLUS, QImode,
708 ** gen_rtx (MEM, QImode,
709 ** gen_rtx (REG, SImode, 1)),
710 ** gen_rtx (MEM, QImode,
711 ** gen_rtx (PLUS, SImode,
712 ** gen_rtx (REG, SImode, 2),
713 ** gen_rtx (REG, SImode, 3)))),
714 */
715
716 /*VARARGS2*/
717 rtx
718 gen_rtx (enum rtx_code code, enum machine_mode mode, ...)
719 {
720 int i; /* Array indices... */
721 const char *fmt; /* Current rtx's format... */
722 rtx rt_val; /* RTX to return to caller... */
723 va_list p;
724
725 va_start (p, mode);
726
727 switch (code)
728 {
729 case CONST_INT:
730 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
731 break;
732
733 case CONST_DOUBLE:
734 {
735 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
736 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
737
738 rt_val = immed_double_const (arg0, arg1, mode);
739 }
740 break;
741
742 case REG:
743 rt_val = gen_rtx_REG (mode, va_arg (p, int));
744 break;
745
746 case MEM:
747 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
748 break;
749
750 default:
751 rt_val = rtx_alloc (code); /* Allocate the storage space. */
752 rt_val->mode = mode; /* Store the machine mode... */
753
754 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
755 for (i = 0; i < GET_RTX_LENGTH (code); i++)
756 {
757 switch (*fmt++)
758 {
759 case '0': /* Field with unknown use. Zero it. */
760 X0EXP (rt_val, i) = NULL_RTX;
761 break;
762
763 case 'i': /* An integer? */
764 XINT (rt_val, i) = va_arg (p, int);
765 break;
766
767 case 'w': /* A wide integer? */
768 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
769 break;
770
771 case 's': /* A string? */
772 XSTR (rt_val, i) = va_arg (p, char *);
773 break;
774
775 case 'e': /* An expression? */
776 case 'u': /* An insn? Same except when printing. */
777 XEXP (rt_val, i) = va_arg (p, rtx);
778 break;
779
780 case 'E': /* An RTX vector? */
781 XVEC (rt_val, i) = va_arg (p, rtvec);
782 break;
783
784 case 'b': /* A bitmap? */
785 XBITMAP (rt_val, i) = va_arg (p, bitmap);
786 break;
787
788 case 't': /* A tree? */
789 XTREE (rt_val, i) = va_arg (p, tree);
790 break;
791
792 default:
793 abort ();
794 }
795 }
796 break;
797 }
798
799 va_end (p);
800 return rt_val;
801 }
802
803 /* gen_rtvec (n, [rt1, ..., rtn])
804 **
805 ** This routine creates an rtvec and stores within it the
806 ** pointers to rtx's which are its arguments.
807 */
808
809 /*VARARGS1*/
810 rtvec
811 gen_rtvec (int n, ...)
812 {
813 int i, save_n;
814 rtx *vector;
815 va_list p;
816
817 va_start (p, n);
818
819 if (n == 0)
820 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
821
822 vector = (rtx *) alloca (n * sizeof (rtx));
823
824 for (i = 0; i < n; i++)
825 vector[i] = va_arg (p, rtx);
826
827 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
828 save_n = n;
829 va_end (p);
830
831 return gen_rtvec_v (save_n, vector);
832 }
833
834 rtvec
835 gen_rtvec_v (n, argp)
836 int n;
837 rtx *argp;
838 {
839 int i;
840 rtvec rt_val;
841
842 if (n == 0)
843 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
844
845 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
846
847 for (i = 0; i < n; i++)
848 rt_val->elem[i] = *argp++;
849
850 return rt_val;
851 }
852 \f
853 /* Generate a REG rtx for a new pseudo register of mode MODE.
854 This pseudo is assigned the next sequential register number. */
855
856 rtx
857 gen_reg_rtx (mode)
858 enum machine_mode mode;
859 {
860 struct function *f = cfun;
861 rtx val;
862
863 /* Don't let anything called after initial flow analysis create new
864 registers. */
865 if (no_new_pseudos)
866 abort ();
867
868 if (generating_concat_p
869 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
870 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
871 {
872 /* For complex modes, don't make a single pseudo.
873 Instead, make a CONCAT of two pseudos.
874 This allows noncontiguous allocation of the real and imaginary parts,
875 which makes much better code. Besides, allocating DCmode
876 pseudos overstrains reload on some machines like the 386. */
877 rtx realpart, imagpart;
878 enum machine_mode partmode = GET_MODE_INNER (mode);
879
880 realpart = gen_reg_rtx (partmode);
881 imagpart = gen_reg_rtx (partmode);
882 return gen_rtx_CONCAT (mode, realpart, imagpart);
883 }
884
885 /* Make sure regno_pointer_align, and regno_reg_rtx are large
886 enough to have an element for this pseudo reg number. */
887
888 if (reg_rtx_no == f->emit->regno_pointer_align_length)
889 {
890 int old_size = f->emit->regno_pointer_align_length;
891 char *new;
892 rtx *new1;
893
894 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
895 memset (new + old_size, 0, old_size);
896 f->emit->regno_pointer_align = (unsigned char *) new;
897
898 new1 = (rtx *) ggc_realloc (f->emit->x_regno_reg_rtx,
899 old_size * 2 * sizeof (rtx));
900 memset (new1 + old_size, 0, old_size * sizeof (rtx));
901 regno_reg_rtx = new1;
902
903 f->emit->regno_pointer_align_length = old_size * 2;
904 }
905
906 val = gen_raw_REG (mode, reg_rtx_no);
907 regno_reg_rtx[reg_rtx_no++] = val;
908 return val;
909 }
910
911 /* Generate an register with same attributes as REG,
912 but offsetted by OFFSET. */
913
914 rtx
915 gen_rtx_REG_offset (reg, mode, regno, offset)
916 enum machine_mode mode;
917 unsigned int regno;
918 int offset;
919 rtx reg;
920 {
921 rtx new = gen_rtx_REG (mode, regno);
922 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
923 REG_OFFSET (reg) + offset);
924 return new;
925 }
926
927 /* Set the decl for MEM to DECL. */
928
929 void
930 set_reg_attrs_from_mem (reg, mem)
931 rtx reg;
932 rtx mem;
933 {
934 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
935 REG_ATTRS (reg)
936 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
937 }
938
939 /* Set the register attributes for registers contained in PARM_RTX.
940 Use needed values from memory attributes of MEM. */
941
942 void
943 set_reg_attrs_for_parm (parm_rtx, mem)
944 rtx parm_rtx;
945 rtx mem;
946 {
947 if (GET_CODE (parm_rtx) == REG)
948 set_reg_attrs_from_mem (parm_rtx, mem);
949 else if (GET_CODE (parm_rtx) == PARALLEL)
950 {
951 /* Check for a NULL entry in the first slot, used to indicate that the
952 parameter goes both on the stack and in registers. */
953 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
954 for (; i < XVECLEN (parm_rtx, 0); i++)
955 {
956 rtx x = XVECEXP (parm_rtx, 0, i);
957 if (GET_CODE (XEXP (x, 0)) == REG)
958 REG_ATTRS (XEXP (x, 0))
959 = get_reg_attrs (MEM_EXPR (mem),
960 INTVAL (XEXP (x, 1)));
961 }
962 }
963 }
964
965 /* Assign the RTX X to declaration T. */
966 void
967 set_decl_rtl (t, x)
968 tree t;
969 rtx x;
970 {
971 DECL_CHECK (t)->decl.rtl = x;
972
973 if (!x)
974 return;
975 /* For register, we maitain the reverse information too. */
976 if (GET_CODE (x) == REG)
977 REG_ATTRS (x) = get_reg_attrs (t, 0);
978 else if (GET_CODE (x) == SUBREG)
979 REG_ATTRS (SUBREG_REG (x))
980 = get_reg_attrs (t, -SUBREG_BYTE (x));
981 if (GET_CODE (x) == CONCAT)
982 {
983 if (REG_P (XEXP (x, 0)))
984 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
985 if (REG_P (XEXP (x, 1)))
986 REG_ATTRS (XEXP (x, 1))
987 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
988 }
989 if (GET_CODE (x) == PARALLEL)
990 {
991 int i;
992 for (i = 0; i < XVECLEN (x, 0); i++)
993 {
994 rtx y = XVECEXP (x, 0, i);
995 if (REG_P (XEXP (y, 0)))
996 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
997 }
998 }
999 }
1000
1001 /* Identify REG (which may be a CONCAT) as a user register. */
1002
1003 void
1004 mark_user_reg (reg)
1005 rtx reg;
1006 {
1007 if (GET_CODE (reg) == CONCAT)
1008 {
1009 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1010 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1011 }
1012 else if (GET_CODE (reg) == REG)
1013 REG_USERVAR_P (reg) = 1;
1014 else
1015 abort ();
1016 }
1017
1018 /* Identify REG as a probable pointer register and show its alignment
1019 as ALIGN, if nonzero. */
1020
1021 void
1022 mark_reg_pointer (reg, align)
1023 rtx reg;
1024 int align;
1025 {
1026 if (! REG_POINTER (reg))
1027 {
1028 REG_POINTER (reg) = 1;
1029
1030 if (align)
1031 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1032 }
1033 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1034 /* We can no-longer be sure just how aligned this pointer is */
1035 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1036 }
1037
1038 /* Return 1 plus largest pseudo reg number used in the current function. */
1039
1040 int
1041 max_reg_num ()
1042 {
1043 return reg_rtx_no;
1044 }
1045
1046 /* Return 1 + the largest label number used so far in the current function. */
1047
1048 int
1049 max_label_num ()
1050 {
1051 if (last_label_num && label_num == base_label_num)
1052 return last_label_num;
1053 return label_num;
1054 }
1055
1056 /* Return first label number used in this function (if any were used). */
1057
1058 int
1059 get_first_label_num ()
1060 {
1061 return first_label_num;
1062 }
1063 \f
1064 /* Return the final regno of X, which is a SUBREG of a hard
1065 register. */
1066 int
1067 subreg_hard_regno (x, check_mode)
1068 rtx x;
1069 int check_mode;
1070 {
1071 enum machine_mode mode = GET_MODE (x);
1072 unsigned int byte_offset, base_regno, final_regno;
1073 rtx reg = SUBREG_REG (x);
1074
1075 /* This is where we attempt to catch illegal subregs
1076 created by the compiler. */
1077 if (GET_CODE (x) != SUBREG
1078 || GET_CODE (reg) != REG)
1079 abort ();
1080 base_regno = REGNO (reg);
1081 if (base_regno >= FIRST_PSEUDO_REGISTER)
1082 abort ();
1083 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
1084 abort ();
1085 #ifdef ENABLE_CHECKING
1086 if (!subreg_offset_representable_p (REGNO (reg), GET_MODE (reg),
1087 SUBREG_BYTE (x), mode))
1088 abort ();
1089 #endif
1090 /* Catch non-congruent offsets too. */
1091 byte_offset = SUBREG_BYTE (x);
1092 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
1093 abort ();
1094
1095 final_regno = subreg_regno (x);
1096
1097 return final_regno;
1098 }
1099
1100 /* Return a value representing some low-order bits of X, where the number
1101 of low-order bits is given by MODE. Note that no conversion is done
1102 between floating-point and fixed-point values, rather, the bit
1103 representation is returned.
1104
1105 This function handles the cases in common between gen_lowpart, below,
1106 and two variants in cse.c and combine.c. These are the cases that can
1107 be safely handled at all points in the compilation.
1108
1109 If this is not a case we can handle, return 0. */
1110
1111 rtx
1112 gen_lowpart_common (mode, x)
1113 enum machine_mode mode;
1114 rtx x;
1115 {
1116 int msize = GET_MODE_SIZE (mode);
1117 int xsize = GET_MODE_SIZE (GET_MODE (x));
1118 int offset = 0;
1119
1120 if (GET_MODE (x) == mode)
1121 return x;
1122
1123 /* MODE must occupy no more words than the mode of X. */
1124 if (GET_MODE (x) != VOIDmode
1125 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1126 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
1127 return 0;
1128
1129 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1130 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1131 && GET_MODE (x) != VOIDmode && msize > xsize)
1132 return 0;
1133
1134 offset = subreg_lowpart_offset (mode, GET_MODE (x));
1135
1136 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1137 && (GET_MODE_CLASS (mode) == MODE_INT
1138 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1139 {
1140 /* If we are getting the low-order part of something that has been
1141 sign- or zero-extended, we can either just use the object being
1142 extended or make a narrower extension. If we want an even smaller
1143 piece than the size of the object being extended, call ourselves
1144 recursively.
1145
1146 This case is used mostly by combine and cse. */
1147
1148 if (GET_MODE (XEXP (x, 0)) == mode)
1149 return XEXP (x, 0);
1150 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1151 return gen_lowpart_common (mode, XEXP (x, 0));
1152 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
1153 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1154 }
1155 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
1156 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR)
1157 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
1158 else if ((GET_MODE_CLASS (mode) == MODE_VECTOR_INT
1159 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
1160 && GET_MODE (x) == VOIDmode)
1161 return simplify_gen_subreg (mode, x, int_mode_for_mode (mode), offset);
1162 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
1163 from the low-order part of the constant. */
1164 else if ((GET_MODE_CLASS (mode) == MODE_INT
1165 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1166 && GET_MODE (x) == VOIDmode
1167 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
1168 {
1169 /* If MODE is twice the host word size, X is already the desired
1170 representation. Otherwise, if MODE is wider than a word, we can't
1171 do this. If MODE is exactly a word, return just one CONST_INT. */
1172
1173 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
1174 return x;
1175 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1176 return 0;
1177 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
1178 return (GET_CODE (x) == CONST_INT ? x
1179 : GEN_INT (CONST_DOUBLE_LOW (x)));
1180 else
1181 {
1182 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
1183 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
1184 : CONST_DOUBLE_LOW (x));
1185
1186 /* Sign extend to HOST_WIDE_INT. */
1187 val = trunc_int_for_mode (val, mode);
1188
1189 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
1190 : GEN_INT (val));
1191 }
1192 }
1193
1194 /* The floating-point emulator can handle all conversions between
1195 FP and integer operands. This simplifies reload because it
1196 doesn't have to deal with constructs like (subreg:DI
1197 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
1198 /* Single-precision floats are always 32-bits and double-precision
1199 floats are always 64-bits. */
1200
1201 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1202 && GET_MODE_BITSIZE (mode) == 32
1203 && GET_CODE (x) == CONST_INT)
1204 {
1205 REAL_VALUE_TYPE r;
1206 long i = INTVAL (x);
1207
1208 real_from_target (&r, &i, mode);
1209 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1210 }
1211 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1212 && GET_MODE_BITSIZE (mode) == 64
1213 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1214 && GET_MODE (x) == VOIDmode)
1215 {
1216 REAL_VALUE_TYPE r;
1217 HOST_WIDE_INT low, high;
1218 long i[2];
1219
1220 if (GET_CODE (x) == CONST_INT)
1221 {
1222 low = INTVAL (x);
1223 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1224 }
1225 else
1226 {
1227 low = CONST_DOUBLE_LOW (x);
1228 high = CONST_DOUBLE_HIGH (x);
1229 }
1230
1231 if (HOST_BITS_PER_WIDE_INT > 32)
1232 high = low >> 31 >> 1;
1233
1234 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1235 target machine. */
1236 if (WORDS_BIG_ENDIAN)
1237 i[0] = high, i[1] = low;
1238 else
1239 i[0] = low, i[1] = high;
1240
1241 real_from_target (&r, i, mode);
1242 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1243 }
1244 else if ((GET_MODE_CLASS (mode) == MODE_INT
1245 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1246 && GET_CODE (x) == CONST_DOUBLE
1247 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1248 {
1249 REAL_VALUE_TYPE r;
1250 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1251 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1252
1253 /* Convert 'r' into an array of four 32-bit words in target word
1254 order. */
1255 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1256 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1257 {
1258 case 32:
1259 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
1260 i[1] = 0;
1261 i[2] = 0;
1262 i[3 - 3 * endian] = 0;
1263 break;
1264 case 64:
1265 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
1266 i[2 - 2 * endian] = 0;
1267 i[3 - 2 * endian] = 0;
1268 break;
1269 case 96:
1270 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1271 i[3 - 3 * endian] = 0;
1272 break;
1273 case 128:
1274 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1275 break;
1276 default:
1277 abort ();
1278 }
1279 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1280 and return it. */
1281 #if HOST_BITS_PER_WIDE_INT == 32
1282 return immed_double_const (i[3 * endian], i[1 + endian], mode);
1283 #else
1284 if (HOST_BITS_PER_WIDE_INT != 64)
1285 abort ();
1286
1287 return immed_double_const ((((unsigned long) i[3 * endian])
1288 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1289 (((unsigned long) i[2 - endian])
1290 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
1291 mode);
1292 #endif
1293 }
1294 /* If MODE is a condition code and X is a CONST_INT, the value of X
1295 must already have been "recognized" by the back-end, and we can
1296 assume that it is valid for this mode. */
1297 else if (GET_MODE_CLASS (mode) == MODE_CC
1298 && GET_CODE (x) == CONST_INT)
1299 return x;
1300
1301 /* Otherwise, we can't do this. */
1302 return 0;
1303 }
1304 \f
1305 /* Return the constant real or imaginary part (which has mode MODE)
1306 of a complex value X. The IMAGPART_P argument determines whether
1307 the real or complex component should be returned. This function
1308 returns NULL_RTX if the component isn't a constant. */
1309
1310 static rtx
1311 gen_complex_constant_part (mode, x, imagpart_p)
1312 enum machine_mode mode;
1313 rtx x;
1314 int imagpart_p;
1315 {
1316 tree decl, part;
1317
1318 if (GET_CODE (x) == MEM
1319 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
1320 && TREE_CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
1321 {
1322 decl = SYMBOL_REF_DECL (XEXP (x, 0));
1323 if (decl != NULL_TREE && TREE_CODE (decl) == COMPLEX_CST)
1324 {
1325 part = imagpart_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
1326 if (TREE_CODE (part) == REAL_CST
1327 || TREE_CODE (part) == INTEGER_CST)
1328 return expand_expr (part, NULL_RTX, mode, 0);
1329 }
1330 }
1331 return NULL_RTX;
1332 }
1333
1334 /* Return the real part (which has mode MODE) of a complex value X.
1335 This always comes at the low address in memory. */
1336
1337 rtx
1338 gen_realpart (mode, x)
1339 enum machine_mode mode;
1340 rtx x;
1341 {
1342 rtx part;
1343
1344 /* Handle complex constants. */
1345 part = gen_complex_constant_part (mode, x, 0);
1346 if (part != NULL_RTX)
1347 return part;
1348
1349 if (WORDS_BIG_ENDIAN
1350 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1351 && REG_P (x)
1352 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1353 internal_error
1354 ("can't access real part of complex value in hard register");
1355 else if (WORDS_BIG_ENDIAN)
1356 return gen_highpart (mode, x);
1357 else
1358 return gen_lowpart (mode, x);
1359 }
1360
1361 /* Return the imaginary part (which has mode MODE) of a complex value X.
1362 This always comes at the high address in memory. */
1363
1364 rtx
1365 gen_imagpart (mode, x)
1366 enum machine_mode mode;
1367 rtx x;
1368 {
1369 rtx part;
1370
1371 /* Handle complex constants. */
1372 part = gen_complex_constant_part (mode, x, 1);
1373 if (part != NULL_RTX)
1374 return part;
1375
1376 if (WORDS_BIG_ENDIAN)
1377 return gen_lowpart (mode, x);
1378 else if (! WORDS_BIG_ENDIAN
1379 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1380 && REG_P (x)
1381 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1382 internal_error
1383 ("can't access imaginary part of complex value in hard register");
1384 else
1385 return gen_highpart (mode, x);
1386 }
1387
1388 /* Return 1 iff X, assumed to be a SUBREG,
1389 refers to the real part of the complex value in its containing reg.
1390 Complex values are always stored with the real part in the first word,
1391 regardless of WORDS_BIG_ENDIAN. */
1392
1393 int
1394 subreg_realpart_p (x)
1395 rtx x;
1396 {
1397 if (GET_CODE (x) != SUBREG)
1398 abort ();
1399
1400 return ((unsigned int) SUBREG_BYTE (x)
1401 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1402 }
1403 \f
1404 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1405 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1406 least-significant part of X.
1407 MODE specifies how big a part of X to return;
1408 it usually should not be larger than a word.
1409 If X is a MEM whose address is a QUEUED, the value may be so also. */
1410
1411 rtx
1412 gen_lowpart (mode, x)
1413 enum machine_mode mode;
1414 rtx x;
1415 {
1416 rtx result = gen_lowpart_common (mode, x);
1417
1418 if (result)
1419 return result;
1420 else if (GET_CODE (x) == REG)
1421 {
1422 /* Must be a hard reg that's not valid in MODE. */
1423 result = gen_lowpart_common (mode, copy_to_reg (x));
1424 if (result == 0)
1425 abort ();
1426 return result;
1427 }
1428 else if (GET_CODE (x) == MEM)
1429 {
1430 /* The only additional case we can do is MEM. */
1431 int offset = 0;
1432
1433 /* The following exposes the use of "x" to CSE. */
1434 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
1435 && SCALAR_INT_MODE_P (GET_MODE (x))
1436 && ! no_new_pseudos)
1437 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1438
1439 if (WORDS_BIG_ENDIAN)
1440 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1441 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1442
1443 if (BYTES_BIG_ENDIAN)
1444 /* Adjust the address so that the address-after-the-data
1445 is unchanged. */
1446 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1447 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1448
1449 return adjust_address (x, mode, offset);
1450 }
1451 else if (GET_CODE (x) == ADDRESSOF)
1452 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1453 else
1454 abort ();
1455 }
1456
1457 /* Like `gen_lowpart', but refer to the most significant part.
1458 This is used to access the imaginary part of a complex number. */
1459
1460 rtx
1461 gen_highpart (mode, x)
1462 enum machine_mode mode;
1463 rtx x;
1464 {
1465 unsigned int msize = GET_MODE_SIZE (mode);
1466 rtx result;
1467
1468 /* This case loses if X is a subreg. To catch bugs early,
1469 complain if an invalid MODE is used even in other cases. */
1470 if (msize > UNITS_PER_WORD
1471 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1472 abort ();
1473
1474 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1475 subreg_highpart_offset (mode, GET_MODE (x)));
1476
1477 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1478 the target if we have a MEM. gen_highpart must return a valid operand,
1479 emitting code if necessary to do so. */
1480 if (result != NULL_RTX && GET_CODE (result) == MEM)
1481 result = validize_mem (result);
1482
1483 if (!result)
1484 abort ();
1485 return result;
1486 }
1487
1488 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1489 be VOIDmode constant. */
1490 rtx
1491 gen_highpart_mode (outermode, innermode, exp)
1492 enum machine_mode outermode, innermode;
1493 rtx exp;
1494 {
1495 if (GET_MODE (exp) != VOIDmode)
1496 {
1497 if (GET_MODE (exp) != innermode)
1498 abort ();
1499 return gen_highpart (outermode, exp);
1500 }
1501 return simplify_gen_subreg (outermode, exp, innermode,
1502 subreg_highpart_offset (outermode, innermode));
1503 }
1504
1505 /* Return offset in bytes to get OUTERMODE low part
1506 of the value in mode INNERMODE stored in memory in target format. */
1507
1508 unsigned int
1509 subreg_lowpart_offset (outermode, innermode)
1510 enum machine_mode outermode, innermode;
1511 {
1512 unsigned int offset = 0;
1513 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1514
1515 if (difference > 0)
1516 {
1517 if (WORDS_BIG_ENDIAN)
1518 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1519 if (BYTES_BIG_ENDIAN)
1520 offset += difference % UNITS_PER_WORD;
1521 }
1522
1523 return offset;
1524 }
1525
1526 /* Return offset in bytes to get OUTERMODE high part
1527 of the value in mode INNERMODE stored in memory in target format. */
1528 unsigned int
1529 subreg_highpart_offset (outermode, innermode)
1530 enum machine_mode outermode, innermode;
1531 {
1532 unsigned int offset = 0;
1533 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1534
1535 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1536 abort ();
1537
1538 if (difference > 0)
1539 {
1540 if (! WORDS_BIG_ENDIAN)
1541 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1542 if (! BYTES_BIG_ENDIAN)
1543 offset += difference % UNITS_PER_WORD;
1544 }
1545
1546 return offset;
1547 }
1548
1549 /* Return 1 iff X, assumed to be a SUBREG,
1550 refers to the least significant part of its containing reg.
1551 If X is not a SUBREG, always return 1 (it is its own low part!). */
1552
1553 int
1554 subreg_lowpart_p (x)
1555 rtx x;
1556 {
1557 if (GET_CODE (x) != SUBREG)
1558 return 1;
1559 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1560 return 0;
1561
1562 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1563 == SUBREG_BYTE (x));
1564 }
1565 \f
1566
1567 /* Helper routine for all the constant cases of operand_subword.
1568 Some places invoke this directly. */
1569
1570 rtx
1571 constant_subword (op, offset, mode)
1572 rtx op;
1573 int offset;
1574 enum machine_mode mode;
1575 {
1576 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1577 HOST_WIDE_INT val;
1578
1579 /* If OP is already an integer word, return it. */
1580 if (GET_MODE_CLASS (mode) == MODE_INT
1581 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1582 return op;
1583
1584 /* The output is some bits, the width of the target machine's word.
1585 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1586 host can't. */
1587 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1588 && GET_MODE_CLASS (mode) == MODE_FLOAT
1589 && GET_MODE_BITSIZE (mode) == 64
1590 && GET_CODE (op) == CONST_DOUBLE)
1591 {
1592 long k[2];
1593 REAL_VALUE_TYPE rv;
1594
1595 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1596 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1597
1598 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1599 which the words are written depends on the word endianness.
1600 ??? This is a potential portability problem and should
1601 be fixed at some point.
1602
1603 We must exercise caution with the sign bit. By definition there
1604 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1605 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1606 So we explicitly mask and sign-extend as necessary. */
1607 if (BITS_PER_WORD == 32)
1608 {
1609 val = k[offset];
1610 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1611 return GEN_INT (val);
1612 }
1613 #if HOST_BITS_PER_WIDE_INT >= 64
1614 else if (BITS_PER_WORD >= 64 && offset == 0)
1615 {
1616 val = k[! WORDS_BIG_ENDIAN];
1617 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1618 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1619 return GEN_INT (val);
1620 }
1621 #endif
1622 else if (BITS_PER_WORD == 16)
1623 {
1624 val = k[offset >> 1];
1625 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1626 val >>= 16;
1627 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1628 return GEN_INT (val);
1629 }
1630 else
1631 abort ();
1632 }
1633 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1634 && GET_MODE_CLASS (mode) == MODE_FLOAT
1635 && GET_MODE_BITSIZE (mode) > 64
1636 && GET_CODE (op) == CONST_DOUBLE)
1637 {
1638 long k[4];
1639 REAL_VALUE_TYPE rv;
1640
1641 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1642 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1643
1644 if (BITS_PER_WORD == 32)
1645 {
1646 val = k[offset];
1647 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1648 return GEN_INT (val);
1649 }
1650 #if HOST_BITS_PER_WIDE_INT >= 64
1651 else if (BITS_PER_WORD >= 64 && offset <= 1)
1652 {
1653 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1654 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1655 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1656 return GEN_INT (val);
1657 }
1658 #endif
1659 else
1660 abort ();
1661 }
1662
1663 /* Single word float is a little harder, since single- and double-word
1664 values often do not have the same high-order bits. We have already
1665 verified that we want the only defined word of the single-word value. */
1666 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1667 && GET_MODE_BITSIZE (mode) == 32
1668 && GET_CODE (op) == CONST_DOUBLE)
1669 {
1670 long l;
1671 REAL_VALUE_TYPE rv;
1672
1673 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1674 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1675
1676 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1677 val = l;
1678 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1679
1680 if (BITS_PER_WORD == 16)
1681 {
1682 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1683 val >>= 16;
1684 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1685 }
1686
1687 return GEN_INT (val);
1688 }
1689
1690 /* The only remaining cases that we can handle are integers.
1691 Convert to proper endianness now since these cases need it.
1692 At this point, offset == 0 means the low-order word.
1693
1694 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1695 in general. However, if OP is (const_int 0), we can just return
1696 it for any word. */
1697
1698 if (op == const0_rtx)
1699 return op;
1700
1701 if (GET_MODE_CLASS (mode) != MODE_INT
1702 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1703 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1704 return 0;
1705
1706 if (WORDS_BIG_ENDIAN)
1707 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1708
1709 /* Find out which word on the host machine this value is in and get
1710 it from the constant. */
1711 val = (offset / size_ratio == 0
1712 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1713 : (GET_CODE (op) == CONST_INT
1714 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1715
1716 /* Get the value we want into the low bits of val. */
1717 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1718 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1719
1720 val = trunc_int_for_mode (val, word_mode);
1721
1722 return GEN_INT (val);
1723 }
1724
1725 /* Return subword OFFSET of operand OP.
1726 The word number, OFFSET, is interpreted as the word number starting
1727 at the low-order address. OFFSET 0 is the low-order word if not
1728 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1729
1730 If we cannot extract the required word, we return zero. Otherwise,
1731 an rtx corresponding to the requested word will be returned.
1732
1733 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1734 reload has completed, a valid address will always be returned. After
1735 reload, if a valid address cannot be returned, we return zero.
1736
1737 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1738 it is the responsibility of the caller.
1739
1740 MODE is the mode of OP in case it is a CONST_INT.
1741
1742 ??? This is still rather broken for some cases. The problem for the
1743 moment is that all callers of this thing provide no 'goal mode' to
1744 tell us to work with. This exists because all callers were written
1745 in a word based SUBREG world.
1746 Now use of this function can be deprecated by simplify_subreg in most
1747 cases.
1748 */
1749
1750 rtx
1751 operand_subword (op, offset, validate_address, mode)
1752 rtx op;
1753 unsigned int offset;
1754 int validate_address;
1755 enum machine_mode mode;
1756 {
1757 if (mode == VOIDmode)
1758 mode = GET_MODE (op);
1759
1760 if (mode == VOIDmode)
1761 abort ();
1762
1763 /* If OP is narrower than a word, fail. */
1764 if (mode != BLKmode
1765 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1766 return 0;
1767
1768 /* If we want a word outside OP, return zero. */
1769 if (mode != BLKmode
1770 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1771 return const0_rtx;
1772
1773 /* Form a new MEM at the requested address. */
1774 if (GET_CODE (op) == MEM)
1775 {
1776 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1777
1778 if (! validate_address)
1779 return new;
1780
1781 else if (reload_completed)
1782 {
1783 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1784 return 0;
1785 }
1786 else
1787 return replace_equiv_address (new, XEXP (new, 0));
1788 }
1789
1790 /* Rest can be handled by simplify_subreg. */
1791 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1792 }
1793
1794 /* Similar to `operand_subword', but never return 0. If we can't extract
1795 the required subword, put OP into a register and try again. If that fails,
1796 abort. We always validate the address in this case.
1797
1798 MODE is the mode of OP, in case it is CONST_INT. */
1799
1800 rtx
1801 operand_subword_force (op, offset, mode)
1802 rtx op;
1803 unsigned int offset;
1804 enum machine_mode mode;
1805 {
1806 rtx result = operand_subword (op, offset, 1, mode);
1807
1808 if (result)
1809 return result;
1810
1811 if (mode != BLKmode && mode != VOIDmode)
1812 {
1813 /* If this is a register which can not be accessed by words, copy it
1814 to a pseudo register. */
1815 if (GET_CODE (op) == REG)
1816 op = copy_to_reg (op);
1817 else
1818 op = force_reg (mode, op);
1819 }
1820
1821 result = operand_subword (op, offset, 1, mode);
1822 if (result == 0)
1823 abort ();
1824
1825 return result;
1826 }
1827 \f
1828 /* Given a compare instruction, swap the operands.
1829 A test instruction is changed into a compare of 0 against the operand. */
1830
1831 void
1832 reverse_comparison (insn)
1833 rtx insn;
1834 {
1835 rtx body = PATTERN (insn);
1836 rtx comp;
1837
1838 if (GET_CODE (body) == SET)
1839 comp = SET_SRC (body);
1840 else
1841 comp = SET_SRC (XVECEXP (body, 0, 0));
1842
1843 if (GET_CODE (comp) == COMPARE)
1844 {
1845 rtx op0 = XEXP (comp, 0);
1846 rtx op1 = XEXP (comp, 1);
1847 XEXP (comp, 0) = op1;
1848 XEXP (comp, 1) = op0;
1849 }
1850 else
1851 {
1852 rtx new = gen_rtx_COMPARE (VOIDmode,
1853 CONST0_RTX (GET_MODE (comp)), comp);
1854 if (GET_CODE (body) == SET)
1855 SET_SRC (body) = new;
1856 else
1857 SET_SRC (XVECEXP (body, 0, 0)) = new;
1858 }
1859 }
1860 \f
1861 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1862 or (2) a component ref of something variable. Represent the later with
1863 a NULL expression. */
1864
1865 static tree
1866 component_ref_for_mem_expr (ref)
1867 tree ref;
1868 {
1869 tree inner = TREE_OPERAND (ref, 0);
1870
1871 if (TREE_CODE (inner) == COMPONENT_REF)
1872 inner = component_ref_for_mem_expr (inner);
1873 else
1874 {
1875 tree placeholder_ptr = 0;
1876
1877 /* Now remove any conversions: they don't change what the underlying
1878 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1879 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1880 || TREE_CODE (inner) == NON_LVALUE_EXPR
1881 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1882 || TREE_CODE (inner) == SAVE_EXPR
1883 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1884 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1885 inner = find_placeholder (inner, &placeholder_ptr);
1886 else
1887 inner = TREE_OPERAND (inner, 0);
1888
1889 if (! DECL_P (inner))
1890 inner = NULL_TREE;
1891 }
1892
1893 if (inner == TREE_OPERAND (ref, 0))
1894 return ref;
1895 else
1896 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1897 TREE_OPERAND (ref, 1));
1898 }
1899
1900 /* Given REF, a MEM, and T, either the type of X or the expression
1901 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1902 if we are making a new object of this type. BITPOS is nonzero if
1903 there is an offset outstanding on T that will be applied later. */
1904
1905 void
1906 set_mem_attributes_minus_bitpos (ref, t, objectp, bitpos)
1907 rtx ref;
1908 tree t;
1909 int objectp;
1910 HOST_WIDE_INT bitpos;
1911 {
1912 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1913 tree expr = MEM_EXPR (ref);
1914 rtx offset = MEM_OFFSET (ref);
1915 rtx size = MEM_SIZE (ref);
1916 unsigned int align = MEM_ALIGN (ref);
1917 HOST_WIDE_INT apply_bitpos = 0;
1918 tree type;
1919
1920 /* It can happen that type_for_mode was given a mode for which there
1921 is no language-level type. In which case it returns NULL, which
1922 we can see here. */
1923 if (t == NULL_TREE)
1924 return;
1925
1926 type = TYPE_P (t) ? t : TREE_TYPE (t);
1927
1928 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1929 wrong answer, as it assumes that DECL_RTL already has the right alias
1930 info. Callers should not set DECL_RTL until after the call to
1931 set_mem_attributes. */
1932 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1933 abort ();
1934
1935 /* Get the alias set from the expression or type (perhaps using a
1936 front-end routine) and use it. */
1937 alias = get_alias_set (t);
1938
1939 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1940 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1941 RTX_UNCHANGING_P (ref)
1942 |= ((lang_hooks.honor_readonly
1943 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1944 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1945
1946 /* If we are making an object of this type, or if this is a DECL, we know
1947 that it is a scalar if the type is not an aggregate. */
1948 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1949 MEM_SCALAR_P (ref) = 1;
1950
1951 /* We can set the alignment from the type if we are making an object,
1952 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1953 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1954 align = MAX (align, TYPE_ALIGN (type));
1955
1956 /* If the size is known, we can set that. */
1957 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1958 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1959
1960 /* If T is not a type, we may be able to deduce some more information about
1961 the expression. */
1962 if (! TYPE_P (t))
1963 {
1964 maybe_set_unchanging (ref, t);
1965 if (TREE_THIS_VOLATILE (t))
1966 MEM_VOLATILE_P (ref) = 1;
1967
1968 /* Now remove any conversions: they don't change what the underlying
1969 object is. Likewise for SAVE_EXPR. */
1970 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1971 || TREE_CODE (t) == NON_LVALUE_EXPR
1972 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1973 || TREE_CODE (t) == SAVE_EXPR)
1974 t = TREE_OPERAND (t, 0);
1975
1976 /* If this expression can't be addressed (e.g., it contains a reference
1977 to a non-addressable field), show we don't change its alias set. */
1978 if (! can_address_p (t))
1979 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1980
1981 /* If this is a decl, set the attributes of the MEM from it. */
1982 if (DECL_P (t))
1983 {
1984 expr = t;
1985 offset = const0_rtx;
1986 apply_bitpos = bitpos;
1987 size = (DECL_SIZE_UNIT (t)
1988 && host_integerp (DECL_SIZE_UNIT (t), 1)
1989 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1990 align = DECL_ALIGN (t);
1991 }
1992
1993 /* If this is a constant, we know the alignment. */
1994 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1995 {
1996 align = TYPE_ALIGN (type);
1997 #ifdef CONSTANT_ALIGNMENT
1998 align = CONSTANT_ALIGNMENT (t, align);
1999 #endif
2000 }
2001
2002 /* If this is a field reference and not a bit-field, record it. */
2003 /* ??? There is some information that can be gleened from bit-fields,
2004 such as the word offset in the structure that might be modified.
2005 But skip it for now. */
2006 else if (TREE_CODE (t) == COMPONENT_REF
2007 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
2008 {
2009 expr = component_ref_for_mem_expr (t);
2010 offset = const0_rtx;
2011 apply_bitpos = bitpos;
2012 /* ??? Any reason the field size would be different than
2013 the size we got from the type? */
2014 }
2015
2016 /* If this is an array reference, look for an outer field reference. */
2017 else if (TREE_CODE (t) == ARRAY_REF)
2018 {
2019 tree off_tree = size_zero_node;
2020
2021 do
2022 {
2023 tree index = TREE_OPERAND (t, 1);
2024 tree array = TREE_OPERAND (t, 0);
2025 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
2026 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
2027 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
2028
2029 /* We assume all arrays have sizes that are a multiple of a byte.
2030 First subtract the lower bound, if any, in the type of the
2031 index, then convert to sizetype and multiply by the size of the
2032 array element. */
2033 if (low_bound != 0 && ! integer_zerop (low_bound))
2034 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
2035 index, low_bound));
2036
2037 /* If the index has a self-referential type, pass it to a
2038 WITH_RECORD_EXPR; if the component size is, pass our
2039 component to one. */
2040 if (CONTAINS_PLACEHOLDER_P (index))
2041 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, t);
2042 if (CONTAINS_PLACEHOLDER_P (unit_size))
2043 unit_size = build (WITH_RECORD_EXPR, sizetype,
2044 unit_size, array);
2045
2046 off_tree
2047 = fold (build (PLUS_EXPR, sizetype,
2048 fold (build (MULT_EXPR, sizetype,
2049 index,
2050 unit_size)),
2051 off_tree));
2052 t = TREE_OPERAND (t, 0);
2053 }
2054 while (TREE_CODE (t) == ARRAY_REF);
2055
2056 if (DECL_P (t))
2057 {
2058 expr = t;
2059 offset = NULL;
2060 if (host_integerp (off_tree, 1))
2061 {
2062 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
2063 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
2064 align = DECL_ALIGN (t);
2065 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
2066 align = aoff;
2067 offset = GEN_INT (ioff);
2068 apply_bitpos = bitpos;
2069 }
2070 }
2071 else if (TREE_CODE (t) == COMPONENT_REF)
2072 {
2073 expr = component_ref_for_mem_expr (t);
2074 if (host_integerp (off_tree, 1))
2075 {
2076 offset = GEN_INT (tree_low_cst (off_tree, 1));
2077 apply_bitpos = bitpos;
2078 }
2079 /* ??? Any reason the field size would be different than
2080 the size we got from the type? */
2081 }
2082 else if (flag_argument_noalias > 1
2083 && TREE_CODE (t) == INDIRECT_REF
2084 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
2085 {
2086 expr = t;
2087 offset = NULL;
2088 }
2089 }
2090
2091 /* If this is a Fortran indirect argument reference, record the
2092 parameter decl. */
2093 else if (flag_argument_noalias > 1
2094 && TREE_CODE (t) == INDIRECT_REF
2095 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
2096 {
2097 expr = t;
2098 offset = NULL;
2099 }
2100 }
2101
2102 /* If we modified OFFSET based on T, then subtract the outstanding
2103 bit position offset. Similarly, increase the size of the accessed
2104 object to contain the negative offset. */
2105 if (apply_bitpos)
2106 {
2107 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
2108 if (size)
2109 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
2110 }
2111
2112 /* Now set the attributes we computed above. */
2113 MEM_ATTRS (ref)
2114 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
2115
2116 /* If this is already known to be a scalar or aggregate, we are done. */
2117 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
2118 return;
2119
2120 /* If it is a reference into an aggregate, this is part of an aggregate.
2121 Otherwise we don't know. */
2122 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
2123 || TREE_CODE (t) == ARRAY_RANGE_REF
2124 || TREE_CODE (t) == BIT_FIELD_REF)
2125 MEM_IN_STRUCT_P (ref) = 1;
2126 }
2127
2128 void
2129 set_mem_attributes (ref, t, objectp)
2130 rtx ref;
2131 tree t;
2132 int objectp;
2133 {
2134 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2135 }
2136
2137 /* Set the decl for MEM to DECL. */
2138
2139 void
2140 set_mem_attrs_from_reg (mem, reg)
2141 rtx mem;
2142 rtx reg;
2143 {
2144 MEM_ATTRS (mem)
2145 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
2146 GEN_INT (REG_OFFSET (reg)),
2147 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2148 }
2149
2150 /* Set the alias set of MEM to SET. */
2151
2152 void
2153 set_mem_alias_set (mem, set)
2154 rtx mem;
2155 HOST_WIDE_INT set;
2156 {
2157 #ifdef ENABLE_CHECKING
2158 /* If the new and old alias sets don't conflict, something is wrong. */
2159 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
2160 abort ();
2161 #endif
2162
2163 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
2164 MEM_SIZE (mem), MEM_ALIGN (mem),
2165 GET_MODE (mem));
2166 }
2167
2168 /* Set the alignment of MEM to ALIGN bits. */
2169
2170 void
2171 set_mem_align (mem, align)
2172 rtx mem;
2173 unsigned int align;
2174 {
2175 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2176 MEM_OFFSET (mem), MEM_SIZE (mem), align,
2177 GET_MODE (mem));
2178 }
2179
2180 /* Set the expr for MEM to EXPR. */
2181
2182 void
2183 set_mem_expr (mem, expr)
2184 rtx mem;
2185 tree expr;
2186 {
2187 MEM_ATTRS (mem)
2188 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
2189 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2190 }
2191
2192 /* Set the offset of MEM to OFFSET. */
2193
2194 void
2195 set_mem_offset (mem, offset)
2196 rtx mem, offset;
2197 {
2198 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2199 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
2200 GET_MODE (mem));
2201 }
2202
2203 /* Set the size of MEM to SIZE. */
2204
2205 void
2206 set_mem_size (mem, size)
2207 rtx mem, size;
2208 {
2209 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2210 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
2211 GET_MODE (mem));
2212 }
2213 \f
2214 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2215 and its address changed to ADDR. (VOIDmode means don't change the mode.
2216 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2217 returned memory location is required to be valid. The memory
2218 attributes are not changed. */
2219
2220 static rtx
2221 change_address_1 (memref, mode, addr, validate)
2222 rtx memref;
2223 enum machine_mode mode;
2224 rtx addr;
2225 int validate;
2226 {
2227 rtx new;
2228
2229 if (GET_CODE (memref) != MEM)
2230 abort ();
2231 if (mode == VOIDmode)
2232 mode = GET_MODE (memref);
2233 if (addr == 0)
2234 addr = XEXP (memref, 0);
2235
2236 if (validate)
2237 {
2238 if (reload_in_progress || reload_completed)
2239 {
2240 if (! memory_address_p (mode, addr))
2241 abort ();
2242 }
2243 else
2244 addr = memory_address (mode, addr);
2245 }
2246
2247 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2248 return memref;
2249
2250 new = gen_rtx_MEM (mode, addr);
2251 MEM_COPY_ATTRIBUTES (new, memref);
2252 return new;
2253 }
2254
2255 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2256 way we are changing MEMREF, so we only preserve the alias set. */
2257
2258 rtx
2259 change_address (memref, mode, addr)
2260 rtx memref;
2261 enum machine_mode mode;
2262 rtx addr;
2263 {
2264 rtx new = change_address_1 (memref, mode, addr, 1);
2265 enum machine_mode mmode = GET_MODE (new);
2266
2267 MEM_ATTRS (new)
2268 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
2269 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
2270 (mmode == BLKmode ? BITS_PER_UNIT
2271 : GET_MODE_ALIGNMENT (mmode)),
2272 mmode);
2273
2274 return new;
2275 }
2276
2277 /* Return a memory reference like MEMREF, but with its mode changed
2278 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2279 nonzero, the memory address is forced to be valid.
2280 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2281 and caller is responsible for adjusting MEMREF base register. */
2282
2283 rtx
2284 adjust_address_1 (memref, mode, offset, validate, adjust)
2285 rtx memref;
2286 enum machine_mode mode;
2287 HOST_WIDE_INT offset;
2288 int validate, adjust;
2289 {
2290 rtx addr = XEXP (memref, 0);
2291 rtx new;
2292 rtx memoffset = MEM_OFFSET (memref);
2293 rtx size = 0;
2294 unsigned int memalign = MEM_ALIGN (memref);
2295
2296 /* ??? Prefer to create garbage instead of creating shared rtl.
2297 This may happen even if offset is nonzero -- consider
2298 (plus (plus reg reg) const_int) -- so do this always. */
2299 addr = copy_rtx (addr);
2300
2301 if (adjust)
2302 {
2303 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2304 object, we can merge it into the LO_SUM. */
2305 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2306 && offset >= 0
2307 && (unsigned HOST_WIDE_INT) offset
2308 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2309 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
2310 plus_constant (XEXP (addr, 1), offset));
2311 else
2312 addr = plus_constant (addr, offset);
2313 }
2314
2315 new = change_address_1 (memref, mode, addr, validate);
2316
2317 /* Compute the new values of the memory attributes due to this adjustment.
2318 We add the offsets and update the alignment. */
2319 if (memoffset)
2320 memoffset = GEN_INT (offset + INTVAL (memoffset));
2321
2322 /* Compute the new alignment by taking the MIN of the alignment and the
2323 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2324 if zero. */
2325 if (offset != 0)
2326 memalign
2327 = MIN (memalign,
2328 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2329
2330 /* We can compute the size in a number of ways. */
2331 if (GET_MODE (new) != BLKmode)
2332 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
2333 else if (MEM_SIZE (memref))
2334 size = plus_constant (MEM_SIZE (memref), -offset);
2335
2336 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2337 memoffset, size, memalign, GET_MODE (new));
2338
2339 /* At some point, we should validate that this offset is within the object,
2340 if all the appropriate values are known. */
2341 return new;
2342 }
2343
2344 /* Return a memory reference like MEMREF, but with its mode changed
2345 to MODE and its address changed to ADDR, which is assumed to be
2346 MEMREF offseted by OFFSET bytes. If VALIDATE is
2347 nonzero, the memory address is forced to be valid. */
2348
2349 rtx
2350 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
2351 rtx memref;
2352 enum machine_mode mode;
2353 rtx addr;
2354 HOST_WIDE_INT offset;
2355 int validate;
2356 {
2357 memref = change_address_1 (memref, VOIDmode, addr, validate);
2358 return adjust_address_1 (memref, mode, offset, validate, 0);
2359 }
2360
2361 /* Return a memory reference like MEMREF, but whose address is changed by
2362 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2363 known to be in OFFSET (possibly 1). */
2364
2365 rtx
2366 offset_address (memref, offset, pow2)
2367 rtx memref;
2368 rtx offset;
2369 unsigned HOST_WIDE_INT pow2;
2370 {
2371 rtx new, addr = XEXP (memref, 0);
2372
2373 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2374
2375 /* At this point we don't know _why_ the address is invalid. It
2376 could have secondary memory refereces, multiplies or anything.
2377
2378 However, if we did go and rearrange things, we can wind up not
2379 being able to recognize the magic around pic_offset_table_rtx.
2380 This stuff is fragile, and is yet another example of why it is
2381 bad to expose PIC machinery too early. */
2382 if (! memory_address_p (GET_MODE (memref), new)
2383 && GET_CODE (addr) == PLUS
2384 && XEXP (addr, 0) == pic_offset_table_rtx)
2385 {
2386 addr = force_reg (GET_MODE (addr), addr);
2387 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2388 }
2389
2390 update_temp_slot_address (XEXP (memref, 0), new);
2391 new = change_address_1 (memref, VOIDmode, new, 1);
2392
2393 /* Update the alignment to reflect the offset. Reset the offset, which
2394 we don't know. */
2395 MEM_ATTRS (new)
2396 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2397 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2398 GET_MODE (new));
2399 return new;
2400 }
2401
2402 /* Return a memory reference like MEMREF, but with its address changed to
2403 ADDR. The caller is asserting that the actual piece of memory pointed
2404 to is the same, just the form of the address is being changed, such as
2405 by putting something into a register. */
2406
2407 rtx
2408 replace_equiv_address (memref, addr)
2409 rtx memref;
2410 rtx addr;
2411 {
2412 /* change_address_1 copies the memory attribute structure without change
2413 and that's exactly what we want here. */
2414 update_temp_slot_address (XEXP (memref, 0), addr);
2415 return change_address_1 (memref, VOIDmode, addr, 1);
2416 }
2417
2418 /* Likewise, but the reference is not required to be valid. */
2419
2420 rtx
2421 replace_equiv_address_nv (memref, addr)
2422 rtx memref;
2423 rtx addr;
2424 {
2425 return change_address_1 (memref, VOIDmode, addr, 0);
2426 }
2427
2428 /* Return a memory reference like MEMREF, but with its mode widened to
2429 MODE and offset by OFFSET. This would be used by targets that e.g.
2430 cannot issue QImode memory operations and have to use SImode memory
2431 operations plus masking logic. */
2432
2433 rtx
2434 widen_memory_access (memref, mode, offset)
2435 rtx memref;
2436 enum machine_mode mode;
2437 HOST_WIDE_INT offset;
2438 {
2439 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2440 tree expr = MEM_EXPR (new);
2441 rtx memoffset = MEM_OFFSET (new);
2442 unsigned int size = GET_MODE_SIZE (mode);
2443
2444 /* If we don't know what offset we were at within the expression, then
2445 we can't know if we've overstepped the bounds. */
2446 if (! memoffset)
2447 expr = NULL_TREE;
2448
2449 while (expr)
2450 {
2451 if (TREE_CODE (expr) == COMPONENT_REF)
2452 {
2453 tree field = TREE_OPERAND (expr, 1);
2454
2455 if (! DECL_SIZE_UNIT (field))
2456 {
2457 expr = NULL_TREE;
2458 break;
2459 }
2460
2461 /* Is the field at least as large as the access? If so, ok,
2462 otherwise strip back to the containing structure. */
2463 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2464 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2465 && INTVAL (memoffset) >= 0)
2466 break;
2467
2468 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2469 {
2470 expr = NULL_TREE;
2471 break;
2472 }
2473
2474 expr = TREE_OPERAND (expr, 0);
2475 memoffset = (GEN_INT (INTVAL (memoffset)
2476 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2477 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2478 / BITS_PER_UNIT)));
2479 }
2480 /* Similarly for the decl. */
2481 else if (DECL_P (expr)
2482 && DECL_SIZE_UNIT (expr)
2483 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2484 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2485 && (! memoffset || INTVAL (memoffset) >= 0))
2486 break;
2487 else
2488 {
2489 /* The widened memory access overflows the expression, which means
2490 that it could alias another expression. Zap it. */
2491 expr = NULL_TREE;
2492 break;
2493 }
2494 }
2495
2496 if (! expr)
2497 memoffset = NULL_RTX;
2498
2499 /* The widened memory may alias other stuff, so zap the alias set. */
2500 /* ??? Maybe use get_alias_set on any remaining expression. */
2501
2502 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2503 MEM_ALIGN (new), mode);
2504
2505 return new;
2506 }
2507 \f
2508 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2509
2510 rtx
2511 gen_label_rtx ()
2512 {
2513 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2514 NULL, label_num++, NULL);
2515 }
2516 \f
2517 /* For procedure integration. */
2518
2519 /* Install new pointers to the first and last insns in the chain.
2520 Also, set cur_insn_uid to one higher than the last in use.
2521 Used for an inline-procedure after copying the insn chain. */
2522
2523 void
2524 set_new_first_and_last_insn (first, last)
2525 rtx first, last;
2526 {
2527 rtx insn;
2528
2529 first_insn = first;
2530 last_insn = last;
2531 cur_insn_uid = 0;
2532
2533 for (insn = first; insn; insn = NEXT_INSN (insn))
2534 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2535
2536 cur_insn_uid++;
2537 }
2538
2539 /* Set the range of label numbers found in the current function.
2540 This is used when belatedly compiling an inline function. */
2541
2542 void
2543 set_new_first_and_last_label_num (first, last)
2544 int first, last;
2545 {
2546 base_label_num = label_num;
2547 first_label_num = first;
2548 last_label_num = last;
2549 }
2550
2551 /* Set the last label number found in the current function.
2552 This is used when belatedly compiling an inline function. */
2553
2554 void
2555 set_new_last_label_num (last)
2556 int last;
2557 {
2558 base_label_num = label_num;
2559 last_label_num = last;
2560 }
2561 \f
2562 /* Restore all variables describing the current status from the structure *P.
2563 This is used after a nested function. */
2564
2565 void
2566 restore_emit_status (p)
2567 struct function *p ATTRIBUTE_UNUSED;
2568 {
2569 last_label_num = 0;
2570 }
2571 \f
2572 /* Go through all the RTL insn bodies and copy any invalid shared
2573 structure. This routine should only be called once. */
2574
2575 void
2576 unshare_all_rtl (fndecl, insn)
2577 tree fndecl;
2578 rtx insn;
2579 {
2580 tree decl;
2581
2582 /* Make sure that virtual parameters are not shared. */
2583 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2584 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2585
2586 /* Make sure that virtual stack slots are not shared. */
2587 unshare_all_decls (DECL_INITIAL (fndecl));
2588
2589 /* Unshare just about everything else. */
2590 unshare_all_rtl_1 (insn);
2591
2592 /* Make sure the addresses of stack slots found outside the insn chain
2593 (such as, in DECL_RTL of a variable) are not shared
2594 with the insn chain.
2595
2596 This special care is necessary when the stack slot MEM does not
2597 actually appear in the insn chain. If it does appear, its address
2598 is unshared from all else at that point. */
2599 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2600 }
2601
2602 /* Go through all the RTL insn bodies and copy any invalid shared
2603 structure, again. This is a fairly expensive thing to do so it
2604 should be done sparingly. */
2605
2606 void
2607 unshare_all_rtl_again (insn)
2608 rtx insn;
2609 {
2610 rtx p;
2611 tree decl;
2612
2613 for (p = insn; p; p = NEXT_INSN (p))
2614 if (INSN_P (p))
2615 {
2616 reset_used_flags (PATTERN (p));
2617 reset_used_flags (REG_NOTES (p));
2618 reset_used_flags (LOG_LINKS (p));
2619 }
2620
2621 /* Make sure that virtual stack slots are not shared. */
2622 reset_used_decls (DECL_INITIAL (cfun->decl));
2623
2624 /* Make sure that virtual parameters are not shared. */
2625 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2626 reset_used_flags (DECL_RTL (decl));
2627
2628 reset_used_flags (stack_slot_list);
2629
2630 unshare_all_rtl (cfun->decl, insn);
2631 }
2632
2633 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2634 Assumes the mark bits are cleared at entry. */
2635
2636 static void
2637 unshare_all_rtl_1 (insn)
2638 rtx insn;
2639 {
2640 for (; insn; insn = NEXT_INSN (insn))
2641 if (INSN_P (insn))
2642 {
2643 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2644 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2645 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2646 }
2647 }
2648
2649 /* Go through all virtual stack slots of a function and copy any
2650 shared structure. */
2651 static void
2652 unshare_all_decls (blk)
2653 tree blk;
2654 {
2655 tree t;
2656
2657 /* Copy shared decls. */
2658 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2659 if (DECL_RTL_SET_P (t))
2660 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2661
2662 /* Now process sub-blocks. */
2663 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2664 unshare_all_decls (t);
2665 }
2666
2667 /* Go through all virtual stack slots of a function and mark them as
2668 not shared. */
2669 static void
2670 reset_used_decls (blk)
2671 tree blk;
2672 {
2673 tree t;
2674
2675 /* Mark decls. */
2676 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2677 if (DECL_RTL_SET_P (t))
2678 reset_used_flags (DECL_RTL (t));
2679
2680 /* Now process sub-blocks. */
2681 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2682 reset_used_decls (t);
2683 }
2684
2685 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2686 placed in the result directly, rather than being copied. MAY_SHARE is
2687 either a MEM of an EXPR_LIST of MEMs. */
2688
2689 rtx
2690 copy_most_rtx (orig, may_share)
2691 rtx orig;
2692 rtx may_share;
2693 {
2694 rtx copy;
2695 int i, j;
2696 RTX_CODE code;
2697 const char *format_ptr;
2698
2699 if (orig == may_share
2700 || (GET_CODE (may_share) == EXPR_LIST
2701 && in_expr_list_p (may_share, orig)))
2702 return orig;
2703
2704 code = GET_CODE (orig);
2705
2706 switch (code)
2707 {
2708 case REG:
2709 case QUEUED:
2710 case CONST_INT:
2711 case CONST_DOUBLE:
2712 case CONST_VECTOR:
2713 case SYMBOL_REF:
2714 case CODE_LABEL:
2715 case PC:
2716 case CC0:
2717 return orig;
2718 default:
2719 break;
2720 }
2721
2722 copy = rtx_alloc (code);
2723 PUT_MODE (copy, GET_MODE (orig));
2724 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2725 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2726 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2727 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2728 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2729
2730 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2731
2732 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2733 {
2734 switch (*format_ptr++)
2735 {
2736 case 'e':
2737 XEXP (copy, i) = XEXP (orig, i);
2738 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2739 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2740 break;
2741
2742 case 'u':
2743 XEXP (copy, i) = XEXP (orig, i);
2744 break;
2745
2746 case 'E':
2747 case 'V':
2748 XVEC (copy, i) = XVEC (orig, i);
2749 if (XVEC (orig, i) != NULL)
2750 {
2751 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2752 for (j = 0; j < XVECLEN (copy, i); j++)
2753 XVECEXP (copy, i, j)
2754 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2755 }
2756 break;
2757
2758 case 'w':
2759 XWINT (copy, i) = XWINT (orig, i);
2760 break;
2761
2762 case 'n':
2763 case 'i':
2764 XINT (copy, i) = XINT (orig, i);
2765 break;
2766
2767 case 't':
2768 XTREE (copy, i) = XTREE (orig, i);
2769 break;
2770
2771 case 's':
2772 case 'S':
2773 XSTR (copy, i) = XSTR (orig, i);
2774 break;
2775
2776 case '0':
2777 /* Copy this through the wide int field; that's safest. */
2778 X0WINT (copy, i) = X0WINT (orig, i);
2779 break;
2780
2781 default:
2782 abort ();
2783 }
2784 }
2785 return copy;
2786 }
2787
2788 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2789 Recursively does the same for subexpressions. */
2790
2791 rtx
2792 copy_rtx_if_shared (orig)
2793 rtx orig;
2794 {
2795 rtx x = orig;
2796 int i;
2797 enum rtx_code code;
2798 const char *format_ptr;
2799 int copied = 0;
2800
2801 if (x == 0)
2802 return 0;
2803
2804 code = GET_CODE (x);
2805
2806 /* These types may be freely shared. */
2807
2808 switch (code)
2809 {
2810 case REG:
2811 case QUEUED:
2812 case CONST_INT:
2813 case CONST_DOUBLE:
2814 case CONST_VECTOR:
2815 case SYMBOL_REF:
2816 case CODE_LABEL:
2817 case PC:
2818 case CC0:
2819 case SCRATCH:
2820 /* SCRATCH must be shared because they represent distinct values. */
2821 return x;
2822
2823 case CONST:
2824 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2825 a LABEL_REF, it isn't sharable. */
2826 if (GET_CODE (XEXP (x, 0)) == PLUS
2827 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2828 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2829 return x;
2830 break;
2831
2832 case INSN:
2833 case JUMP_INSN:
2834 case CALL_INSN:
2835 case NOTE:
2836 case BARRIER:
2837 /* The chain of insns is not being copied. */
2838 return x;
2839
2840 case MEM:
2841 /* A MEM is allowed to be shared if its address is constant.
2842
2843 We used to allow sharing of MEMs which referenced
2844 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2845 that can lose. instantiate_virtual_regs will not unshare
2846 the MEMs, and combine may change the structure of the address
2847 because it looks safe and profitable in one context, but
2848 in some other context it creates unrecognizable RTL. */
2849 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2850 return x;
2851
2852 break;
2853
2854 default:
2855 break;
2856 }
2857
2858 /* This rtx may not be shared. If it has already been seen,
2859 replace it with a copy of itself. */
2860
2861 if (RTX_FLAG (x, used))
2862 {
2863 rtx copy;
2864
2865 copy = rtx_alloc (code);
2866 memcpy (copy, x,
2867 (sizeof (*copy) - sizeof (copy->fld)
2868 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2869 x = copy;
2870 copied = 1;
2871 }
2872 RTX_FLAG (x, used) = 1;
2873
2874 /* Now scan the subexpressions recursively.
2875 We can store any replaced subexpressions directly into X
2876 since we know X is not shared! Any vectors in X
2877 must be copied if X was copied. */
2878
2879 format_ptr = GET_RTX_FORMAT (code);
2880
2881 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2882 {
2883 switch (*format_ptr++)
2884 {
2885 case 'e':
2886 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2887 break;
2888
2889 case 'E':
2890 if (XVEC (x, i) != NULL)
2891 {
2892 int j;
2893 int len = XVECLEN (x, i);
2894
2895 if (copied && len > 0)
2896 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2897 for (j = 0; j < len; j++)
2898 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2899 }
2900 break;
2901 }
2902 }
2903 return x;
2904 }
2905
2906 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2907 to look for shared sub-parts. */
2908
2909 void
2910 reset_used_flags (x)
2911 rtx x;
2912 {
2913 int i, j;
2914 enum rtx_code code;
2915 const char *format_ptr;
2916
2917 if (x == 0)
2918 return;
2919
2920 code = GET_CODE (x);
2921
2922 /* These types may be freely shared so we needn't do any resetting
2923 for them. */
2924
2925 switch (code)
2926 {
2927 case REG:
2928 case QUEUED:
2929 case CONST_INT:
2930 case CONST_DOUBLE:
2931 case CONST_VECTOR:
2932 case SYMBOL_REF:
2933 case CODE_LABEL:
2934 case PC:
2935 case CC0:
2936 return;
2937
2938 case INSN:
2939 case JUMP_INSN:
2940 case CALL_INSN:
2941 case NOTE:
2942 case LABEL_REF:
2943 case BARRIER:
2944 /* The chain of insns is not being copied. */
2945 return;
2946
2947 default:
2948 break;
2949 }
2950
2951 RTX_FLAG (x, used) = 0;
2952
2953 format_ptr = GET_RTX_FORMAT (code);
2954 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2955 {
2956 switch (*format_ptr++)
2957 {
2958 case 'e':
2959 reset_used_flags (XEXP (x, i));
2960 break;
2961
2962 case 'E':
2963 for (j = 0; j < XVECLEN (x, i); j++)
2964 reset_used_flags (XVECEXP (x, i, j));
2965 break;
2966 }
2967 }
2968 }
2969 \f
2970 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2971 Return X or the rtx for the pseudo reg the value of X was copied into.
2972 OTHER must be valid as a SET_DEST. */
2973
2974 rtx
2975 make_safe_from (x, other)
2976 rtx x, other;
2977 {
2978 while (1)
2979 switch (GET_CODE (other))
2980 {
2981 case SUBREG:
2982 other = SUBREG_REG (other);
2983 break;
2984 case STRICT_LOW_PART:
2985 case SIGN_EXTEND:
2986 case ZERO_EXTEND:
2987 other = XEXP (other, 0);
2988 break;
2989 default:
2990 goto done;
2991 }
2992 done:
2993 if ((GET_CODE (other) == MEM
2994 && ! CONSTANT_P (x)
2995 && GET_CODE (x) != REG
2996 && GET_CODE (x) != SUBREG)
2997 || (GET_CODE (other) == REG
2998 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2999 || reg_mentioned_p (other, x))))
3000 {
3001 rtx temp = gen_reg_rtx (GET_MODE (x));
3002 emit_move_insn (temp, x);
3003 return temp;
3004 }
3005 return x;
3006 }
3007 \f
3008 /* Emission of insns (adding them to the doubly-linked list). */
3009
3010 /* Return the first insn of the current sequence or current function. */
3011
3012 rtx
3013 get_insns ()
3014 {
3015 return first_insn;
3016 }
3017
3018 /* Specify a new insn as the first in the chain. */
3019
3020 void
3021 set_first_insn (insn)
3022 rtx insn;
3023 {
3024 if (PREV_INSN (insn) != 0)
3025 abort ();
3026 first_insn = insn;
3027 }
3028
3029 /* Return the last insn emitted in current sequence or current function. */
3030
3031 rtx
3032 get_last_insn ()
3033 {
3034 return last_insn;
3035 }
3036
3037 /* Specify a new insn as the last in the chain. */
3038
3039 void
3040 set_last_insn (insn)
3041 rtx insn;
3042 {
3043 if (NEXT_INSN (insn) != 0)
3044 abort ();
3045 last_insn = insn;
3046 }
3047
3048 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3049
3050 rtx
3051 get_last_insn_anywhere ()
3052 {
3053 struct sequence_stack *stack;
3054 if (last_insn)
3055 return last_insn;
3056 for (stack = seq_stack; stack; stack = stack->next)
3057 if (stack->last != 0)
3058 return stack->last;
3059 return 0;
3060 }
3061
3062 /* Return the first nonnote insn emitted in current sequence or current
3063 function. This routine looks inside SEQUENCEs. */
3064
3065 rtx
3066 get_first_nonnote_insn ()
3067 {
3068 rtx insn = first_insn;
3069
3070 while (insn)
3071 {
3072 insn = next_insn (insn);
3073 if (insn == 0 || GET_CODE (insn) != NOTE)
3074 break;
3075 }
3076
3077 return insn;
3078 }
3079
3080 /* Return the last nonnote insn emitted in current sequence or current
3081 function. This routine looks inside SEQUENCEs. */
3082
3083 rtx
3084 get_last_nonnote_insn ()
3085 {
3086 rtx insn = last_insn;
3087
3088 while (insn)
3089 {
3090 insn = previous_insn (insn);
3091 if (insn == 0 || GET_CODE (insn) != NOTE)
3092 break;
3093 }
3094
3095 return insn;
3096 }
3097
3098 /* Return a number larger than any instruction's uid in this function. */
3099
3100 int
3101 get_max_uid ()
3102 {
3103 return cur_insn_uid;
3104 }
3105
3106 /* Renumber instructions so that no instruction UIDs are wasted. */
3107
3108 void
3109 renumber_insns (stream)
3110 FILE *stream;
3111 {
3112 rtx insn;
3113
3114 /* If we're not supposed to renumber instructions, don't. */
3115 if (!flag_renumber_insns)
3116 return;
3117
3118 /* If there aren't that many instructions, then it's not really
3119 worth renumbering them. */
3120 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
3121 return;
3122
3123 cur_insn_uid = 1;
3124
3125 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3126 {
3127 if (stream)
3128 fprintf (stream, "Renumbering insn %d to %d\n",
3129 INSN_UID (insn), cur_insn_uid);
3130 INSN_UID (insn) = cur_insn_uid++;
3131 }
3132 }
3133 \f
3134 /* Return the next insn. If it is a SEQUENCE, return the first insn
3135 of the sequence. */
3136
3137 rtx
3138 next_insn (insn)
3139 rtx insn;
3140 {
3141 if (insn)
3142 {
3143 insn = NEXT_INSN (insn);
3144 if (insn && GET_CODE (insn) == INSN
3145 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3146 insn = XVECEXP (PATTERN (insn), 0, 0);
3147 }
3148
3149 return insn;
3150 }
3151
3152 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3153 of the sequence. */
3154
3155 rtx
3156 previous_insn (insn)
3157 rtx insn;
3158 {
3159 if (insn)
3160 {
3161 insn = PREV_INSN (insn);
3162 if (insn && GET_CODE (insn) == INSN
3163 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3164 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3165 }
3166
3167 return insn;
3168 }
3169
3170 /* Return the next insn after INSN that is not a NOTE. This routine does not
3171 look inside SEQUENCEs. */
3172
3173 rtx
3174 next_nonnote_insn (insn)
3175 rtx insn;
3176 {
3177 while (insn)
3178 {
3179 insn = NEXT_INSN (insn);
3180 if (insn == 0 || GET_CODE (insn) != NOTE)
3181 break;
3182 }
3183
3184 return insn;
3185 }
3186
3187 /* Return the previous insn before INSN that is not a NOTE. This routine does
3188 not look inside SEQUENCEs. */
3189
3190 rtx
3191 prev_nonnote_insn (insn)
3192 rtx insn;
3193 {
3194 while (insn)
3195 {
3196 insn = PREV_INSN (insn);
3197 if (insn == 0 || GET_CODE (insn) != NOTE)
3198 break;
3199 }
3200
3201 return insn;
3202 }
3203
3204 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3205 or 0, if there is none. This routine does not look inside
3206 SEQUENCEs. */
3207
3208 rtx
3209 next_real_insn (insn)
3210 rtx insn;
3211 {
3212 while (insn)
3213 {
3214 insn = NEXT_INSN (insn);
3215 if (insn == 0 || GET_CODE (insn) == INSN
3216 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
3217 break;
3218 }
3219
3220 return insn;
3221 }
3222
3223 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3224 or 0, if there is none. This routine does not look inside
3225 SEQUENCEs. */
3226
3227 rtx
3228 prev_real_insn (insn)
3229 rtx insn;
3230 {
3231 while (insn)
3232 {
3233 insn = PREV_INSN (insn);
3234 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
3235 || GET_CODE (insn) == JUMP_INSN)
3236 break;
3237 }
3238
3239 return insn;
3240 }
3241
3242 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3243 This routine does not look inside SEQUENCEs. */
3244
3245 rtx
3246 last_call_insn ()
3247 {
3248 rtx insn;
3249
3250 for (insn = get_last_insn ();
3251 insn && GET_CODE (insn) != CALL_INSN;
3252 insn = PREV_INSN (insn))
3253 ;
3254
3255 return insn;
3256 }
3257
3258 /* Find the next insn after INSN that really does something. This routine
3259 does not look inside SEQUENCEs. Until reload has completed, this is the
3260 same as next_real_insn. */
3261
3262 int
3263 active_insn_p (insn)
3264 rtx insn;
3265 {
3266 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
3267 || (GET_CODE (insn) == INSN
3268 && (! reload_completed
3269 || (GET_CODE (PATTERN (insn)) != USE
3270 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3271 }
3272
3273 rtx
3274 next_active_insn (insn)
3275 rtx insn;
3276 {
3277 while (insn)
3278 {
3279 insn = NEXT_INSN (insn);
3280 if (insn == 0 || active_insn_p (insn))
3281 break;
3282 }
3283
3284 return insn;
3285 }
3286
3287 /* Find the last insn before INSN that really does something. This routine
3288 does not look inside SEQUENCEs. Until reload has completed, this is the
3289 same as prev_real_insn. */
3290
3291 rtx
3292 prev_active_insn (insn)
3293 rtx insn;
3294 {
3295 while (insn)
3296 {
3297 insn = PREV_INSN (insn);
3298 if (insn == 0 || active_insn_p (insn))
3299 break;
3300 }
3301
3302 return insn;
3303 }
3304
3305 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3306
3307 rtx
3308 next_label (insn)
3309 rtx insn;
3310 {
3311 while (insn)
3312 {
3313 insn = NEXT_INSN (insn);
3314 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3315 break;
3316 }
3317
3318 return insn;
3319 }
3320
3321 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3322
3323 rtx
3324 prev_label (insn)
3325 rtx insn;
3326 {
3327 while (insn)
3328 {
3329 insn = PREV_INSN (insn);
3330 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3331 break;
3332 }
3333
3334 return insn;
3335 }
3336 \f
3337 #ifdef HAVE_cc0
3338 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3339 and REG_CC_USER notes so we can find it. */
3340
3341 void
3342 link_cc0_insns (insn)
3343 rtx insn;
3344 {
3345 rtx user = next_nonnote_insn (insn);
3346
3347 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3348 user = XVECEXP (PATTERN (user), 0, 0);
3349
3350 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3351 REG_NOTES (user));
3352 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3353 }
3354
3355 /* Return the next insn that uses CC0 after INSN, which is assumed to
3356 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3357 applied to the result of this function should yield INSN).
3358
3359 Normally, this is simply the next insn. However, if a REG_CC_USER note
3360 is present, it contains the insn that uses CC0.
3361
3362 Return 0 if we can't find the insn. */
3363
3364 rtx
3365 next_cc0_user (insn)
3366 rtx insn;
3367 {
3368 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3369
3370 if (note)
3371 return XEXP (note, 0);
3372
3373 insn = next_nonnote_insn (insn);
3374 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3375 insn = XVECEXP (PATTERN (insn), 0, 0);
3376
3377 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3378 return insn;
3379
3380 return 0;
3381 }
3382
3383 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3384 note, it is the previous insn. */
3385
3386 rtx
3387 prev_cc0_setter (insn)
3388 rtx insn;
3389 {
3390 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3391
3392 if (note)
3393 return XEXP (note, 0);
3394
3395 insn = prev_nonnote_insn (insn);
3396 if (! sets_cc0_p (PATTERN (insn)))
3397 abort ();
3398
3399 return insn;
3400 }
3401 #endif
3402
3403 /* Increment the label uses for all labels present in rtx. */
3404
3405 static void
3406 mark_label_nuses (x)
3407 rtx x;
3408 {
3409 enum rtx_code code;
3410 int i, j;
3411 const char *fmt;
3412
3413 code = GET_CODE (x);
3414 if (code == LABEL_REF)
3415 LABEL_NUSES (XEXP (x, 0))++;
3416
3417 fmt = GET_RTX_FORMAT (code);
3418 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3419 {
3420 if (fmt[i] == 'e')
3421 mark_label_nuses (XEXP (x, i));
3422 else if (fmt[i] == 'E')
3423 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3424 mark_label_nuses (XVECEXP (x, i, j));
3425 }
3426 }
3427
3428 \f
3429 /* Try splitting insns that can be split for better scheduling.
3430 PAT is the pattern which might split.
3431 TRIAL is the insn providing PAT.
3432 LAST is nonzero if we should return the last insn of the sequence produced.
3433
3434 If this routine succeeds in splitting, it returns the first or last
3435 replacement insn depending on the value of LAST. Otherwise, it
3436 returns TRIAL. If the insn to be returned can be split, it will be. */
3437
3438 rtx
3439 try_split (pat, trial, last)
3440 rtx pat, trial;
3441 int last;
3442 {
3443 rtx before = PREV_INSN (trial);
3444 rtx after = NEXT_INSN (trial);
3445 int has_barrier = 0;
3446 rtx tem;
3447 rtx note, seq;
3448 int probability;
3449 rtx insn_last, insn;
3450 int njumps = 0;
3451
3452 if (any_condjump_p (trial)
3453 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3454 split_branch_probability = INTVAL (XEXP (note, 0));
3455 probability = split_branch_probability;
3456
3457 seq = split_insns (pat, trial);
3458
3459 split_branch_probability = -1;
3460
3461 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3462 We may need to handle this specially. */
3463 if (after && GET_CODE (after) == BARRIER)
3464 {
3465 has_barrier = 1;
3466 after = NEXT_INSN (after);
3467 }
3468
3469 if (!seq)
3470 return trial;
3471
3472 /* Avoid infinite loop if any insn of the result matches
3473 the original pattern. */
3474 insn_last = seq;
3475 while (1)
3476 {
3477 if (INSN_P (insn_last)
3478 && rtx_equal_p (PATTERN (insn_last), pat))
3479 return trial;
3480 if (!NEXT_INSN (insn_last))
3481 break;
3482 insn_last = NEXT_INSN (insn_last);
3483 }
3484
3485 /* Mark labels. */
3486 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3487 {
3488 if (GET_CODE (insn) == JUMP_INSN)
3489 {
3490 mark_jump_label (PATTERN (insn), insn, 0);
3491 njumps++;
3492 if (probability != -1
3493 && any_condjump_p (insn)
3494 && !find_reg_note (insn, REG_BR_PROB, 0))
3495 {
3496 /* We can preserve the REG_BR_PROB notes only if exactly
3497 one jump is created, otherwise the machine description
3498 is responsible for this step using
3499 split_branch_probability variable. */
3500 if (njumps != 1)
3501 abort ();
3502 REG_NOTES (insn)
3503 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3504 GEN_INT (probability),
3505 REG_NOTES (insn));
3506 }
3507 }
3508 }
3509
3510 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3511 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3512 if (GET_CODE (trial) == CALL_INSN)
3513 {
3514 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3515 if (GET_CODE (insn) == CALL_INSN)
3516 {
3517 CALL_INSN_FUNCTION_USAGE (insn)
3518 = CALL_INSN_FUNCTION_USAGE (trial);
3519 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3520 }
3521 }
3522
3523 /* Copy notes, particularly those related to the CFG. */
3524 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3525 {
3526 switch (REG_NOTE_KIND (note))
3527 {
3528 case REG_EH_REGION:
3529 insn = insn_last;
3530 while (insn != NULL_RTX)
3531 {
3532 if (GET_CODE (insn) == CALL_INSN
3533 || (flag_non_call_exceptions
3534 && may_trap_p (PATTERN (insn))))
3535 REG_NOTES (insn)
3536 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3537 XEXP (note, 0),
3538 REG_NOTES (insn));
3539 insn = PREV_INSN (insn);
3540 }
3541 break;
3542
3543 case REG_NORETURN:
3544 case REG_SETJMP:
3545 case REG_ALWAYS_RETURN:
3546 insn = insn_last;
3547 while (insn != NULL_RTX)
3548 {
3549 if (GET_CODE (insn) == CALL_INSN)
3550 REG_NOTES (insn)
3551 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3552 XEXP (note, 0),
3553 REG_NOTES (insn));
3554 insn = PREV_INSN (insn);
3555 }
3556 break;
3557
3558 case REG_NON_LOCAL_GOTO:
3559 insn = insn_last;
3560 while (insn != NULL_RTX)
3561 {
3562 if (GET_CODE (insn) == JUMP_INSN)
3563 REG_NOTES (insn)
3564 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3565 XEXP (note, 0),
3566 REG_NOTES (insn));
3567 insn = PREV_INSN (insn);
3568 }
3569 break;
3570
3571 default:
3572 break;
3573 }
3574 }
3575
3576 /* If there are LABELS inside the split insns increment the
3577 usage count so we don't delete the label. */
3578 if (GET_CODE (trial) == INSN)
3579 {
3580 insn = insn_last;
3581 while (insn != NULL_RTX)
3582 {
3583 if (GET_CODE (insn) == INSN)
3584 mark_label_nuses (PATTERN (insn));
3585
3586 insn = PREV_INSN (insn);
3587 }
3588 }
3589
3590 tem = emit_insn_after_scope (seq, trial, INSN_SCOPE (trial));
3591
3592 delete_insn (trial);
3593 if (has_barrier)
3594 emit_barrier_after (tem);
3595
3596 /* Recursively call try_split for each new insn created; by the
3597 time control returns here that insn will be fully split, so
3598 set LAST and continue from the insn after the one returned.
3599 We can't use next_active_insn here since AFTER may be a note.
3600 Ignore deleted insns, which can be occur if not optimizing. */
3601 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3602 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3603 tem = try_split (PATTERN (tem), tem, 1);
3604
3605 /* Return either the first or the last insn, depending on which was
3606 requested. */
3607 return last
3608 ? (after ? PREV_INSN (after) : last_insn)
3609 : NEXT_INSN (before);
3610 }
3611 \f
3612 /* Make and return an INSN rtx, initializing all its slots.
3613 Store PATTERN in the pattern slots. */
3614
3615 rtx
3616 make_insn_raw (pattern)
3617 rtx pattern;
3618 {
3619 rtx insn;
3620
3621 insn = rtx_alloc (INSN);
3622
3623 INSN_UID (insn) = cur_insn_uid++;
3624 PATTERN (insn) = pattern;
3625 INSN_CODE (insn) = -1;
3626 LOG_LINKS (insn) = NULL;
3627 REG_NOTES (insn) = NULL;
3628 INSN_SCOPE (insn) = NULL;
3629 BLOCK_FOR_INSN (insn) = NULL;
3630
3631 #ifdef ENABLE_RTL_CHECKING
3632 if (insn
3633 && INSN_P (insn)
3634 && (returnjump_p (insn)
3635 || (GET_CODE (insn) == SET
3636 && SET_DEST (insn) == pc_rtx)))
3637 {
3638 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3639 debug_rtx (insn);
3640 }
3641 #endif
3642
3643 return insn;
3644 }
3645
3646 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3647
3648 static rtx
3649 make_jump_insn_raw (pattern)
3650 rtx pattern;
3651 {
3652 rtx insn;
3653
3654 insn = rtx_alloc (JUMP_INSN);
3655 INSN_UID (insn) = cur_insn_uid++;
3656
3657 PATTERN (insn) = pattern;
3658 INSN_CODE (insn) = -1;
3659 LOG_LINKS (insn) = NULL;
3660 REG_NOTES (insn) = NULL;
3661 JUMP_LABEL (insn) = NULL;
3662 INSN_SCOPE (insn) = NULL;
3663 BLOCK_FOR_INSN (insn) = NULL;
3664
3665 return insn;
3666 }
3667
3668 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3669
3670 static rtx
3671 make_call_insn_raw (pattern)
3672 rtx pattern;
3673 {
3674 rtx insn;
3675
3676 insn = rtx_alloc (CALL_INSN);
3677 INSN_UID (insn) = cur_insn_uid++;
3678
3679 PATTERN (insn) = pattern;
3680 INSN_CODE (insn) = -1;
3681 LOG_LINKS (insn) = NULL;
3682 REG_NOTES (insn) = NULL;
3683 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3684 INSN_SCOPE (insn) = NULL;
3685 BLOCK_FOR_INSN (insn) = NULL;
3686
3687 return insn;
3688 }
3689 \f
3690 /* Add INSN to the end of the doubly-linked list.
3691 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3692
3693 void
3694 add_insn (insn)
3695 rtx insn;
3696 {
3697 PREV_INSN (insn) = last_insn;
3698 NEXT_INSN (insn) = 0;
3699
3700 if (NULL != last_insn)
3701 NEXT_INSN (last_insn) = insn;
3702
3703 if (NULL == first_insn)
3704 first_insn = insn;
3705
3706 last_insn = insn;
3707 }
3708
3709 /* Add INSN into the doubly-linked list after insn AFTER. This and
3710 the next should be the only functions called to insert an insn once
3711 delay slots have been filled since only they know how to update a
3712 SEQUENCE. */
3713
3714 void
3715 add_insn_after (insn, after)
3716 rtx insn, after;
3717 {
3718 rtx next = NEXT_INSN (after);
3719 basic_block bb;
3720
3721 if (optimize && INSN_DELETED_P (after))
3722 abort ();
3723
3724 NEXT_INSN (insn) = next;
3725 PREV_INSN (insn) = after;
3726
3727 if (next)
3728 {
3729 PREV_INSN (next) = insn;
3730 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3731 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3732 }
3733 else if (last_insn == after)
3734 last_insn = insn;
3735 else
3736 {
3737 struct sequence_stack *stack = seq_stack;
3738 /* Scan all pending sequences too. */
3739 for (; stack; stack = stack->next)
3740 if (after == stack->last)
3741 {
3742 stack->last = insn;
3743 break;
3744 }
3745
3746 if (stack == 0)
3747 abort ();
3748 }
3749
3750 if (GET_CODE (after) != BARRIER
3751 && GET_CODE (insn) != BARRIER
3752 && (bb = BLOCK_FOR_INSN (after)))
3753 {
3754 set_block_for_insn (insn, bb);
3755 if (INSN_P (insn))
3756 bb->flags |= BB_DIRTY;
3757 /* Should not happen as first in the BB is always
3758 either NOTE or LABEL. */
3759 if (bb->end == after
3760 /* Avoid clobbering of structure when creating new BB. */
3761 && GET_CODE (insn) != BARRIER
3762 && (GET_CODE (insn) != NOTE
3763 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3764 bb->end = insn;
3765 }
3766
3767 NEXT_INSN (after) = insn;
3768 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3769 {
3770 rtx sequence = PATTERN (after);
3771 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3772 }
3773 }
3774
3775 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3776 the previous should be the only functions called to insert an insn once
3777 delay slots have been filled since only they know how to update a
3778 SEQUENCE. */
3779
3780 void
3781 add_insn_before (insn, before)
3782 rtx insn, before;
3783 {
3784 rtx prev = PREV_INSN (before);
3785 basic_block bb;
3786
3787 if (optimize && INSN_DELETED_P (before))
3788 abort ();
3789
3790 PREV_INSN (insn) = prev;
3791 NEXT_INSN (insn) = before;
3792
3793 if (prev)
3794 {
3795 NEXT_INSN (prev) = insn;
3796 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3797 {
3798 rtx sequence = PATTERN (prev);
3799 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3800 }
3801 }
3802 else if (first_insn == before)
3803 first_insn = insn;
3804 else
3805 {
3806 struct sequence_stack *stack = seq_stack;
3807 /* Scan all pending sequences too. */
3808 for (; stack; stack = stack->next)
3809 if (before == stack->first)
3810 {
3811 stack->first = insn;
3812 break;
3813 }
3814
3815 if (stack == 0)
3816 abort ();
3817 }
3818
3819 if (GET_CODE (before) != BARRIER
3820 && GET_CODE (insn) != BARRIER
3821 && (bb = BLOCK_FOR_INSN (before)))
3822 {
3823 set_block_for_insn (insn, bb);
3824 if (INSN_P (insn))
3825 bb->flags |= BB_DIRTY;
3826 /* Should not happen as first in the BB is always
3827 either NOTE or LABEl. */
3828 if (bb->head == insn
3829 /* Avoid clobbering of structure when creating new BB. */
3830 && GET_CODE (insn) != BARRIER
3831 && (GET_CODE (insn) != NOTE
3832 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3833 abort ();
3834 }
3835
3836 PREV_INSN (before) = insn;
3837 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3838 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3839 }
3840
3841 /* Remove an insn from its doubly-linked list. This function knows how
3842 to handle sequences. */
3843 void
3844 remove_insn (insn)
3845 rtx insn;
3846 {
3847 rtx next = NEXT_INSN (insn);
3848 rtx prev = PREV_INSN (insn);
3849 basic_block bb;
3850
3851 if (prev)
3852 {
3853 NEXT_INSN (prev) = next;
3854 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3855 {
3856 rtx sequence = PATTERN (prev);
3857 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3858 }
3859 }
3860 else if (first_insn == insn)
3861 first_insn = next;
3862 else
3863 {
3864 struct sequence_stack *stack = seq_stack;
3865 /* Scan all pending sequences too. */
3866 for (; stack; stack = stack->next)
3867 if (insn == stack->first)
3868 {
3869 stack->first = next;
3870 break;
3871 }
3872
3873 if (stack == 0)
3874 abort ();
3875 }
3876
3877 if (next)
3878 {
3879 PREV_INSN (next) = prev;
3880 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3881 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3882 }
3883 else if (last_insn == insn)
3884 last_insn = prev;
3885 else
3886 {
3887 struct sequence_stack *stack = seq_stack;
3888 /* Scan all pending sequences too. */
3889 for (; stack; stack = stack->next)
3890 if (insn == stack->last)
3891 {
3892 stack->last = prev;
3893 break;
3894 }
3895
3896 if (stack == 0)
3897 abort ();
3898 }
3899 if (GET_CODE (insn) != BARRIER
3900 && (bb = BLOCK_FOR_INSN (insn)))
3901 {
3902 if (INSN_P (insn))
3903 bb->flags |= BB_DIRTY;
3904 if (bb->head == insn)
3905 {
3906 /* Never ever delete the basic block note without deleting whole
3907 basic block. */
3908 if (GET_CODE (insn) == NOTE)
3909 abort ();
3910 bb->head = next;
3911 }
3912 if (bb->end == insn)
3913 bb->end = prev;
3914 }
3915 }
3916
3917 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3918
3919 void
3920 add_function_usage_to (call_insn, call_fusage)
3921 rtx call_insn, call_fusage;
3922 {
3923 if (! call_insn || GET_CODE (call_insn) != CALL_INSN)
3924 abort ();
3925
3926 /* Put the register usage information on the CALL. If there is already
3927 some usage information, put ours at the end. */
3928 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3929 {
3930 rtx link;
3931
3932 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3933 link = XEXP (link, 1))
3934 ;
3935
3936 XEXP (link, 1) = call_fusage;
3937 }
3938 else
3939 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3940 }
3941
3942 /* Delete all insns made since FROM.
3943 FROM becomes the new last instruction. */
3944
3945 void
3946 delete_insns_since (from)
3947 rtx from;
3948 {
3949 if (from == 0)
3950 first_insn = 0;
3951 else
3952 NEXT_INSN (from) = 0;
3953 last_insn = from;
3954 }
3955
3956 /* This function is deprecated, please use sequences instead.
3957
3958 Move a consecutive bunch of insns to a different place in the chain.
3959 The insns to be moved are those between FROM and TO.
3960 They are moved to a new position after the insn AFTER.
3961 AFTER must not be FROM or TO or any insn in between.
3962
3963 This function does not know about SEQUENCEs and hence should not be
3964 called after delay-slot filling has been done. */
3965
3966 void
3967 reorder_insns_nobb (from, to, after)
3968 rtx from, to, after;
3969 {
3970 /* Splice this bunch out of where it is now. */
3971 if (PREV_INSN (from))
3972 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3973 if (NEXT_INSN (to))
3974 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3975 if (last_insn == to)
3976 last_insn = PREV_INSN (from);
3977 if (first_insn == from)
3978 first_insn = NEXT_INSN (to);
3979
3980 /* Make the new neighbors point to it and it to them. */
3981 if (NEXT_INSN (after))
3982 PREV_INSN (NEXT_INSN (after)) = to;
3983
3984 NEXT_INSN (to) = NEXT_INSN (after);
3985 PREV_INSN (from) = after;
3986 NEXT_INSN (after) = from;
3987 if (after == last_insn)
3988 last_insn = to;
3989 }
3990
3991 /* Same as function above, but take care to update BB boundaries. */
3992 void
3993 reorder_insns (from, to, after)
3994 rtx from, to, after;
3995 {
3996 rtx prev = PREV_INSN (from);
3997 basic_block bb, bb2;
3998
3999 reorder_insns_nobb (from, to, after);
4000
4001 if (GET_CODE (after) != BARRIER
4002 && (bb = BLOCK_FOR_INSN (after)))
4003 {
4004 rtx x;
4005 bb->flags |= BB_DIRTY;
4006
4007 if (GET_CODE (from) != BARRIER
4008 && (bb2 = BLOCK_FOR_INSN (from)))
4009 {
4010 if (bb2->end == to)
4011 bb2->end = prev;
4012 bb2->flags |= BB_DIRTY;
4013 }
4014
4015 if (bb->end == after)
4016 bb->end = to;
4017
4018 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4019 set_block_for_insn (x, bb);
4020 }
4021 }
4022
4023 /* Return the line note insn preceding INSN. */
4024
4025 static rtx
4026 find_line_note (insn)
4027 rtx insn;
4028 {
4029 if (no_line_numbers)
4030 return 0;
4031
4032 for (; insn; insn = PREV_INSN (insn))
4033 if (GET_CODE (insn) == NOTE
4034 && NOTE_LINE_NUMBER (insn) >= 0)
4035 break;
4036
4037 return insn;
4038 }
4039
4040 /* Like reorder_insns, but inserts line notes to preserve the line numbers
4041 of the moved insns when debugging. This may insert a note between AFTER
4042 and FROM, and another one after TO. */
4043
4044 void
4045 reorder_insns_with_line_notes (from, to, after)
4046 rtx from, to, after;
4047 {
4048 rtx from_line = find_line_note (from);
4049 rtx after_line = find_line_note (after);
4050
4051 reorder_insns (from, to, after);
4052
4053 if (from_line == after_line)
4054 return;
4055
4056 if (from_line)
4057 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
4058 NOTE_LINE_NUMBER (from_line),
4059 after);
4060 if (after_line)
4061 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
4062 NOTE_LINE_NUMBER (after_line),
4063 to);
4064 }
4065
4066 /* Remove unnecessary notes from the instruction stream. */
4067
4068 void
4069 remove_unnecessary_notes ()
4070 {
4071 rtx block_stack = NULL_RTX;
4072 rtx eh_stack = NULL_RTX;
4073 rtx insn;
4074 rtx next;
4075 rtx tmp;
4076
4077 /* We must not remove the first instruction in the function because
4078 the compiler depends on the first instruction being a note. */
4079 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
4080 {
4081 /* Remember what's next. */
4082 next = NEXT_INSN (insn);
4083
4084 /* We're only interested in notes. */
4085 if (GET_CODE (insn) != NOTE)
4086 continue;
4087
4088 switch (NOTE_LINE_NUMBER (insn))
4089 {
4090 case NOTE_INSN_DELETED:
4091 case NOTE_INSN_LOOP_END_TOP_COND:
4092 remove_insn (insn);
4093 break;
4094
4095 case NOTE_INSN_EH_REGION_BEG:
4096 eh_stack = alloc_INSN_LIST (insn, eh_stack);
4097 break;
4098
4099 case NOTE_INSN_EH_REGION_END:
4100 /* Too many end notes. */
4101 if (eh_stack == NULL_RTX)
4102 abort ();
4103 /* Mismatched nesting. */
4104 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
4105 abort ();
4106 tmp = eh_stack;
4107 eh_stack = XEXP (eh_stack, 1);
4108 free_INSN_LIST_node (tmp);
4109 break;
4110
4111 case NOTE_INSN_BLOCK_BEG:
4112 /* By now, all notes indicating lexical blocks should have
4113 NOTE_BLOCK filled in. */
4114 if (NOTE_BLOCK (insn) == NULL_TREE)
4115 abort ();
4116 block_stack = alloc_INSN_LIST (insn, block_stack);
4117 break;
4118
4119 case NOTE_INSN_BLOCK_END:
4120 /* Too many end notes. */
4121 if (block_stack == NULL_RTX)
4122 abort ();
4123 /* Mismatched nesting. */
4124 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
4125 abort ();
4126 tmp = block_stack;
4127 block_stack = XEXP (block_stack, 1);
4128 free_INSN_LIST_node (tmp);
4129
4130 /* Scan back to see if there are any non-note instructions
4131 between INSN and the beginning of this block. If not,
4132 then there is no PC range in the generated code that will
4133 actually be in this block, so there's no point in
4134 remembering the existence of the block. */
4135 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
4136 {
4137 /* This block contains a real instruction. Note that we
4138 don't include labels; if the only thing in the block
4139 is a label, then there are still no PC values that
4140 lie within the block. */
4141 if (INSN_P (tmp))
4142 break;
4143
4144 /* We're only interested in NOTEs. */
4145 if (GET_CODE (tmp) != NOTE)
4146 continue;
4147
4148 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
4149 {
4150 /* We just verified that this BLOCK matches us with
4151 the block_stack check above. Never delete the
4152 BLOCK for the outermost scope of the function; we
4153 can refer to names from that scope even if the
4154 block notes are messed up. */
4155 if (! is_body_block (NOTE_BLOCK (insn))
4156 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
4157 {
4158 remove_insn (tmp);
4159 remove_insn (insn);
4160 }
4161 break;
4162 }
4163 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
4164 /* There's a nested block. We need to leave the
4165 current block in place since otherwise the debugger
4166 wouldn't be able to show symbols from our block in
4167 the nested block. */
4168 break;
4169 }
4170 }
4171 }
4172
4173 /* Too many begin notes. */
4174 if (block_stack || eh_stack)
4175 abort ();
4176 }
4177
4178 \f
4179 /* Emit insn(s) of given code and pattern
4180 at a specified place within the doubly-linked list.
4181
4182 All of the emit_foo global entry points accept an object
4183 X which is either an insn list or a PATTERN of a single
4184 instruction.
4185
4186 There are thus a few canonical ways to generate code and
4187 emit it at a specific place in the instruction stream. For
4188 example, consider the instruction named SPOT and the fact that
4189 we would like to emit some instructions before SPOT. We might
4190 do it like this:
4191
4192 start_sequence ();
4193 ... emit the new instructions ...
4194 insns_head = get_insns ();
4195 end_sequence ();
4196
4197 emit_insn_before (insns_head, SPOT);
4198
4199 It used to be common to generate SEQUENCE rtl instead, but that
4200 is a relic of the past which no longer occurs. The reason is that
4201 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4202 generated would almost certainly die right after it was created. */
4203
4204 /* Make X be output before the instruction BEFORE. */
4205
4206 rtx
4207 emit_insn_before (x, before)
4208 rtx x, before;
4209 {
4210 rtx last = before;
4211 rtx insn;
4212
4213 #ifdef ENABLE_RTL_CHECKING
4214 if (before == NULL_RTX)
4215 abort ();
4216 #endif
4217
4218 if (x == NULL_RTX)
4219 return last;
4220
4221 switch (GET_CODE (x))
4222 {
4223 case INSN:
4224 case JUMP_INSN:
4225 case CALL_INSN:
4226 case CODE_LABEL:
4227 case BARRIER:
4228 case NOTE:
4229 insn = x;
4230 while (insn)
4231 {
4232 rtx next = NEXT_INSN (insn);
4233 add_insn_before (insn, before);
4234 last = insn;
4235 insn = next;
4236 }
4237 break;
4238
4239 #ifdef ENABLE_RTL_CHECKING
4240 case SEQUENCE:
4241 abort ();
4242 break;
4243 #endif
4244
4245 default:
4246 last = make_insn_raw (x);
4247 add_insn_before (last, before);
4248 break;
4249 }
4250
4251 return last;
4252 }
4253
4254 /* Make an instruction with body X and code JUMP_INSN
4255 and output it before the instruction BEFORE. */
4256
4257 rtx
4258 emit_jump_insn_before (x, before)
4259 rtx x, before;
4260 {
4261 rtx insn, last = NULL_RTX;
4262
4263 #ifdef ENABLE_RTL_CHECKING
4264 if (before == NULL_RTX)
4265 abort ();
4266 #endif
4267
4268 switch (GET_CODE (x))
4269 {
4270 case INSN:
4271 case JUMP_INSN:
4272 case CALL_INSN:
4273 case CODE_LABEL:
4274 case BARRIER:
4275 case NOTE:
4276 insn = x;
4277 while (insn)
4278 {
4279 rtx next = NEXT_INSN (insn);
4280 add_insn_before (insn, before);
4281 last = insn;
4282 insn = next;
4283 }
4284 break;
4285
4286 #ifdef ENABLE_RTL_CHECKING
4287 case SEQUENCE:
4288 abort ();
4289 break;
4290 #endif
4291
4292 default:
4293 last = make_jump_insn_raw (x);
4294 add_insn_before (last, before);
4295 break;
4296 }
4297
4298 return last;
4299 }
4300
4301 /* Make an instruction with body X and code CALL_INSN
4302 and output it before the instruction BEFORE. */
4303
4304 rtx
4305 emit_call_insn_before (x, before)
4306 rtx x, before;
4307 {
4308 rtx last = NULL_RTX, insn;
4309
4310 #ifdef ENABLE_RTL_CHECKING
4311 if (before == NULL_RTX)
4312 abort ();
4313 #endif
4314
4315 switch (GET_CODE (x))
4316 {
4317 case INSN:
4318 case JUMP_INSN:
4319 case CALL_INSN:
4320 case CODE_LABEL:
4321 case BARRIER:
4322 case NOTE:
4323 insn = x;
4324 while (insn)
4325 {
4326 rtx next = NEXT_INSN (insn);
4327 add_insn_before (insn, before);
4328 last = insn;
4329 insn = next;
4330 }
4331 break;
4332
4333 #ifdef ENABLE_RTL_CHECKING
4334 case SEQUENCE:
4335 abort ();
4336 break;
4337 #endif
4338
4339 default:
4340 last = make_call_insn_raw (x);
4341 add_insn_before (last, before);
4342 break;
4343 }
4344
4345 return last;
4346 }
4347
4348 /* Make an insn of code BARRIER
4349 and output it before the insn BEFORE. */
4350
4351 rtx
4352 emit_barrier_before (before)
4353 rtx before;
4354 {
4355 rtx insn = rtx_alloc (BARRIER);
4356
4357 INSN_UID (insn) = cur_insn_uid++;
4358
4359 add_insn_before (insn, before);
4360 return insn;
4361 }
4362
4363 /* Emit the label LABEL before the insn BEFORE. */
4364
4365 rtx
4366 emit_label_before (label, before)
4367 rtx label, before;
4368 {
4369 /* This can be called twice for the same label as a result of the
4370 confusion that follows a syntax error! So make it harmless. */
4371 if (INSN_UID (label) == 0)
4372 {
4373 INSN_UID (label) = cur_insn_uid++;
4374 add_insn_before (label, before);
4375 }
4376
4377 return label;
4378 }
4379
4380 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4381
4382 rtx
4383 emit_note_before (subtype, before)
4384 int subtype;
4385 rtx before;
4386 {
4387 rtx note = rtx_alloc (NOTE);
4388 INSN_UID (note) = cur_insn_uid++;
4389 NOTE_SOURCE_FILE (note) = 0;
4390 NOTE_LINE_NUMBER (note) = subtype;
4391 BLOCK_FOR_INSN (note) = NULL;
4392
4393 add_insn_before (note, before);
4394 return note;
4395 }
4396 \f
4397 /* Helper for emit_insn_after, handles lists of instructions
4398 efficiently. */
4399
4400 static rtx emit_insn_after_1 PARAMS ((rtx, rtx));
4401
4402 static rtx
4403 emit_insn_after_1 (first, after)
4404 rtx first, after;
4405 {
4406 rtx last;
4407 rtx after_after;
4408 basic_block bb;
4409
4410 if (GET_CODE (after) != BARRIER
4411 && (bb = BLOCK_FOR_INSN (after)))
4412 {
4413 bb->flags |= BB_DIRTY;
4414 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4415 if (GET_CODE (last) != BARRIER)
4416 set_block_for_insn (last, bb);
4417 if (GET_CODE (last) != BARRIER)
4418 set_block_for_insn (last, bb);
4419 if (bb->end == after)
4420 bb->end = last;
4421 }
4422 else
4423 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4424 continue;
4425
4426 after_after = NEXT_INSN (after);
4427
4428 NEXT_INSN (after) = first;
4429 PREV_INSN (first) = after;
4430 NEXT_INSN (last) = after_after;
4431 if (after_after)
4432 PREV_INSN (after_after) = last;
4433
4434 if (after == last_insn)
4435 last_insn = last;
4436 return last;
4437 }
4438
4439 /* Make X be output after the insn AFTER. */
4440
4441 rtx
4442 emit_insn_after (x, after)
4443 rtx x, after;
4444 {
4445 rtx last = after;
4446
4447 #ifdef ENABLE_RTL_CHECKING
4448 if (after == NULL_RTX)
4449 abort ();
4450 #endif
4451
4452 if (x == NULL_RTX)
4453 return last;
4454
4455 switch (GET_CODE (x))
4456 {
4457 case INSN:
4458 case JUMP_INSN:
4459 case CALL_INSN:
4460 case CODE_LABEL:
4461 case BARRIER:
4462 case NOTE:
4463 last = emit_insn_after_1 (x, after);
4464 break;
4465
4466 #ifdef ENABLE_RTL_CHECKING
4467 case SEQUENCE:
4468 abort ();
4469 break;
4470 #endif
4471
4472 default:
4473 last = make_insn_raw (x);
4474 add_insn_after (last, after);
4475 break;
4476 }
4477
4478 return last;
4479 }
4480
4481 /* Similar to emit_insn_after, except that line notes are to be inserted so
4482 as to act as if this insn were at FROM. */
4483
4484 void
4485 emit_insn_after_with_line_notes (x, after, from)
4486 rtx x, after, from;
4487 {
4488 rtx from_line = find_line_note (from);
4489 rtx after_line = find_line_note (after);
4490 rtx insn = emit_insn_after (x, after);
4491
4492 if (from_line)
4493 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
4494 NOTE_LINE_NUMBER (from_line),
4495 after);
4496
4497 if (after_line)
4498 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
4499 NOTE_LINE_NUMBER (after_line),
4500 insn);
4501 }
4502
4503 /* Make an insn of code JUMP_INSN with body X
4504 and output it after the insn AFTER. */
4505
4506 rtx
4507 emit_jump_insn_after (x, after)
4508 rtx x, after;
4509 {
4510 rtx last;
4511
4512 #ifdef ENABLE_RTL_CHECKING
4513 if (after == NULL_RTX)
4514 abort ();
4515 #endif
4516
4517 switch (GET_CODE (x))
4518 {
4519 case INSN:
4520 case JUMP_INSN:
4521 case CALL_INSN:
4522 case CODE_LABEL:
4523 case BARRIER:
4524 case NOTE:
4525 last = emit_insn_after_1 (x, after);
4526 break;
4527
4528 #ifdef ENABLE_RTL_CHECKING
4529 case SEQUENCE:
4530 abort ();
4531 break;
4532 #endif
4533
4534 default:
4535 last = make_jump_insn_raw (x);
4536 add_insn_after (last, after);
4537 break;
4538 }
4539
4540 return last;
4541 }
4542
4543 /* Make an instruction with body X and code CALL_INSN
4544 and output it after the instruction AFTER. */
4545
4546 rtx
4547 emit_call_insn_after (x, after)
4548 rtx x, after;
4549 {
4550 rtx last;
4551
4552 #ifdef ENABLE_RTL_CHECKING
4553 if (after == NULL_RTX)
4554 abort ();
4555 #endif
4556
4557 switch (GET_CODE (x))
4558 {
4559 case INSN:
4560 case JUMP_INSN:
4561 case CALL_INSN:
4562 case CODE_LABEL:
4563 case BARRIER:
4564 case NOTE:
4565 last = emit_insn_after_1 (x, after);
4566 break;
4567
4568 #ifdef ENABLE_RTL_CHECKING
4569 case SEQUENCE:
4570 abort ();
4571 break;
4572 #endif
4573
4574 default:
4575 last = make_call_insn_raw (x);
4576 add_insn_after (last, after);
4577 break;
4578 }
4579
4580 return last;
4581 }
4582
4583 /* Make an insn of code BARRIER
4584 and output it after the insn AFTER. */
4585
4586 rtx
4587 emit_barrier_after (after)
4588 rtx after;
4589 {
4590 rtx insn = rtx_alloc (BARRIER);
4591
4592 INSN_UID (insn) = cur_insn_uid++;
4593
4594 add_insn_after (insn, after);
4595 return insn;
4596 }
4597
4598 /* Emit the label LABEL after the insn AFTER. */
4599
4600 rtx
4601 emit_label_after (label, after)
4602 rtx label, after;
4603 {
4604 /* This can be called twice for the same label
4605 as a result of the confusion that follows a syntax error!
4606 So make it harmless. */
4607 if (INSN_UID (label) == 0)
4608 {
4609 INSN_UID (label) = cur_insn_uid++;
4610 add_insn_after (label, after);
4611 }
4612
4613 return label;
4614 }
4615
4616 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4617
4618 rtx
4619 emit_note_after (subtype, after)
4620 int subtype;
4621 rtx after;
4622 {
4623 rtx note = rtx_alloc (NOTE);
4624 INSN_UID (note) = cur_insn_uid++;
4625 NOTE_SOURCE_FILE (note) = 0;
4626 NOTE_LINE_NUMBER (note) = subtype;
4627 BLOCK_FOR_INSN (note) = NULL;
4628 add_insn_after (note, after);
4629 return note;
4630 }
4631
4632 /* Emit a line note for FILE and LINE after the insn AFTER. */
4633
4634 rtx
4635 emit_line_note_after (file, line, after)
4636 const char *file;
4637 int line;
4638 rtx after;
4639 {
4640 rtx note;
4641
4642 if (no_line_numbers && line > 0)
4643 {
4644 cur_insn_uid++;
4645 return 0;
4646 }
4647
4648 note = rtx_alloc (NOTE);
4649 INSN_UID (note) = cur_insn_uid++;
4650 NOTE_SOURCE_FILE (note) = file;
4651 NOTE_LINE_NUMBER (note) = line;
4652 BLOCK_FOR_INSN (note) = NULL;
4653 add_insn_after (note, after);
4654 return note;
4655 }
4656 \f
4657 /* Like emit_insn_after, but set INSN_SCOPE according to SCOPE. */
4658 rtx
4659 emit_insn_after_scope (pattern, after, scope)
4660 rtx pattern, after;
4661 tree scope;
4662 {
4663 rtx last = emit_insn_after (pattern, after);
4664
4665 after = NEXT_INSN (after);
4666 while (1)
4667 {
4668 if (active_insn_p (after))
4669 INSN_SCOPE (after) = scope;
4670 if (after == last)
4671 break;
4672 after = NEXT_INSN (after);
4673 }
4674 return last;
4675 }
4676
4677 /* Like emit_jump_insn_after, but set INSN_SCOPE according to SCOPE. */
4678 rtx
4679 emit_jump_insn_after_scope (pattern, after, scope)
4680 rtx pattern, after;
4681 tree scope;
4682 {
4683 rtx last = emit_jump_insn_after (pattern, after);
4684
4685 after = NEXT_INSN (after);
4686 while (1)
4687 {
4688 if (active_insn_p (after))
4689 INSN_SCOPE (after) = scope;
4690 if (after == last)
4691 break;
4692 after = NEXT_INSN (after);
4693 }
4694 return last;
4695 }
4696
4697 /* Like emit_call_insn_after, but set INSN_SCOPE according to SCOPE. */
4698 rtx
4699 emit_call_insn_after_scope (pattern, after, scope)
4700 rtx pattern, after;
4701 tree scope;
4702 {
4703 rtx last = emit_call_insn_after (pattern, after);
4704
4705 after = NEXT_INSN (after);
4706 while (1)
4707 {
4708 if (active_insn_p (after))
4709 INSN_SCOPE (after) = scope;
4710 if (after == last)
4711 break;
4712 after = NEXT_INSN (after);
4713 }
4714 return last;
4715 }
4716
4717 /* Like emit_insn_before, but set INSN_SCOPE according to SCOPE. */
4718 rtx
4719 emit_insn_before_scope (pattern, before, scope)
4720 rtx pattern, before;
4721 tree scope;
4722 {
4723 rtx first = PREV_INSN (before);
4724 rtx last = emit_insn_before (pattern, before);
4725
4726 first = NEXT_INSN (first);
4727 while (1)
4728 {
4729 if (active_insn_p (first))
4730 INSN_SCOPE (first) = scope;
4731 if (first == last)
4732 break;
4733 first = NEXT_INSN (first);
4734 }
4735 return last;
4736 }
4737 \f
4738 /* Take X and emit it at the end of the doubly-linked
4739 INSN list.
4740
4741 Returns the last insn emitted. */
4742
4743 rtx
4744 emit_insn (x)
4745 rtx x;
4746 {
4747 rtx last = last_insn;
4748 rtx insn;
4749
4750 if (x == NULL_RTX)
4751 return last;
4752
4753 switch (GET_CODE (x))
4754 {
4755 case INSN:
4756 case JUMP_INSN:
4757 case CALL_INSN:
4758 case CODE_LABEL:
4759 case BARRIER:
4760 case NOTE:
4761 insn = x;
4762 while (insn)
4763 {
4764 rtx next = NEXT_INSN (insn);
4765 add_insn (insn);
4766 last = insn;
4767 insn = next;
4768 }
4769 break;
4770
4771 #ifdef ENABLE_RTL_CHECKING
4772 case SEQUENCE:
4773 abort ();
4774 break;
4775 #endif
4776
4777 default:
4778 last = make_insn_raw (x);
4779 add_insn (last);
4780 break;
4781 }
4782
4783 return last;
4784 }
4785
4786 /* Make an insn of code JUMP_INSN with pattern X
4787 and add it to the end of the doubly-linked list. */
4788
4789 rtx
4790 emit_jump_insn (x)
4791 rtx x;
4792 {
4793 rtx last = NULL_RTX, insn;
4794
4795 switch (GET_CODE (x))
4796 {
4797 case INSN:
4798 case JUMP_INSN:
4799 case CALL_INSN:
4800 case CODE_LABEL:
4801 case BARRIER:
4802 case NOTE:
4803 insn = x;
4804 while (insn)
4805 {
4806 rtx next = NEXT_INSN (insn);
4807 add_insn (insn);
4808 last = insn;
4809 insn = next;
4810 }
4811 break;
4812
4813 #ifdef ENABLE_RTL_CHECKING
4814 case SEQUENCE:
4815 abort ();
4816 break;
4817 #endif
4818
4819 default:
4820 last = make_jump_insn_raw (x);
4821 add_insn (last);
4822 break;
4823 }
4824
4825 return last;
4826 }
4827
4828 /* Make an insn of code CALL_INSN with pattern X
4829 and add it to the end of the doubly-linked list. */
4830
4831 rtx
4832 emit_call_insn (x)
4833 rtx x;
4834 {
4835 rtx insn;
4836
4837 switch (GET_CODE (x))
4838 {
4839 case INSN:
4840 case JUMP_INSN:
4841 case CALL_INSN:
4842 case CODE_LABEL:
4843 case BARRIER:
4844 case NOTE:
4845 insn = emit_insn (x);
4846 break;
4847
4848 #ifdef ENABLE_RTL_CHECKING
4849 case SEQUENCE:
4850 abort ();
4851 break;
4852 #endif
4853
4854 default:
4855 insn = make_call_insn_raw (x);
4856 add_insn (insn);
4857 break;
4858 }
4859
4860 return insn;
4861 }
4862
4863 /* Add the label LABEL to the end of the doubly-linked list. */
4864
4865 rtx
4866 emit_label (label)
4867 rtx label;
4868 {
4869 /* This can be called twice for the same label
4870 as a result of the confusion that follows a syntax error!
4871 So make it harmless. */
4872 if (INSN_UID (label) == 0)
4873 {
4874 INSN_UID (label) = cur_insn_uid++;
4875 add_insn (label);
4876 }
4877 return label;
4878 }
4879
4880 /* Make an insn of code BARRIER
4881 and add it to the end of the doubly-linked list. */
4882
4883 rtx
4884 emit_barrier ()
4885 {
4886 rtx barrier = rtx_alloc (BARRIER);
4887 INSN_UID (barrier) = cur_insn_uid++;
4888 add_insn (barrier);
4889 return barrier;
4890 }
4891
4892 /* Make an insn of code NOTE
4893 with data-fields specified by FILE and LINE
4894 and add it to the end of the doubly-linked list,
4895 but only if line-numbers are desired for debugging info. */
4896
4897 rtx
4898 emit_line_note (file, line)
4899 const char *file;
4900 int line;
4901 {
4902 set_file_and_line_for_stmt (file, line);
4903
4904 #if 0
4905 if (no_line_numbers)
4906 return 0;
4907 #endif
4908
4909 return emit_note (file, line);
4910 }
4911
4912 /* Make an insn of code NOTE
4913 with data-fields specified by FILE and LINE
4914 and add it to the end of the doubly-linked list.
4915 If it is a line-number NOTE, omit it if it matches the previous one. */
4916
4917 rtx
4918 emit_note (file, line)
4919 const char *file;
4920 int line;
4921 {
4922 rtx note;
4923
4924 if (line > 0)
4925 {
4926 if (file && last_filename && !strcmp (file, last_filename)
4927 && line == last_linenum)
4928 return 0;
4929 last_filename = file;
4930 last_linenum = line;
4931 }
4932
4933 if (no_line_numbers && line > 0)
4934 {
4935 cur_insn_uid++;
4936 return 0;
4937 }
4938
4939 note = rtx_alloc (NOTE);
4940 INSN_UID (note) = cur_insn_uid++;
4941 NOTE_SOURCE_FILE (note) = file;
4942 NOTE_LINE_NUMBER (note) = line;
4943 BLOCK_FOR_INSN (note) = NULL;
4944 add_insn (note);
4945 return note;
4946 }
4947
4948 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
4949
4950 rtx
4951 emit_line_note_force (file, line)
4952 const char *file;
4953 int line;
4954 {
4955 last_linenum = -1;
4956 return emit_line_note (file, line);
4957 }
4958
4959 /* Cause next statement to emit a line note even if the line number
4960 has not changed. This is used at the beginning of a function. */
4961
4962 void
4963 force_next_line_note ()
4964 {
4965 last_linenum = -1;
4966 }
4967
4968 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4969 note of this type already exists, remove it first. */
4970
4971 rtx
4972 set_unique_reg_note (insn, kind, datum)
4973 rtx insn;
4974 enum reg_note kind;
4975 rtx datum;
4976 {
4977 rtx note = find_reg_note (insn, kind, NULL_RTX);
4978
4979 switch (kind)
4980 {
4981 case REG_EQUAL:
4982 case REG_EQUIV:
4983 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4984 has multiple sets (some callers assume single_set
4985 means the insn only has one set, when in fact it
4986 means the insn only has one * useful * set). */
4987 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4988 {
4989 if (note)
4990 abort ();
4991 return NULL_RTX;
4992 }
4993
4994 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4995 It serves no useful purpose and breaks eliminate_regs. */
4996 if (GET_CODE (datum) == ASM_OPERANDS)
4997 return NULL_RTX;
4998 break;
4999
5000 default:
5001 break;
5002 }
5003
5004 if (note)
5005 {
5006 XEXP (note, 0) = datum;
5007 return note;
5008 }
5009
5010 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
5011 return REG_NOTES (insn);
5012 }
5013 \f
5014 /* Return an indication of which type of insn should have X as a body.
5015 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5016
5017 enum rtx_code
5018 classify_insn (x)
5019 rtx x;
5020 {
5021 if (GET_CODE (x) == CODE_LABEL)
5022 return CODE_LABEL;
5023 if (GET_CODE (x) == CALL)
5024 return CALL_INSN;
5025 if (GET_CODE (x) == RETURN)
5026 return JUMP_INSN;
5027 if (GET_CODE (x) == SET)
5028 {
5029 if (SET_DEST (x) == pc_rtx)
5030 return JUMP_INSN;
5031 else if (GET_CODE (SET_SRC (x)) == CALL)
5032 return CALL_INSN;
5033 else
5034 return INSN;
5035 }
5036 if (GET_CODE (x) == PARALLEL)
5037 {
5038 int j;
5039 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5040 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5041 return CALL_INSN;
5042 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5043 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5044 return JUMP_INSN;
5045 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5046 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5047 return CALL_INSN;
5048 }
5049 return INSN;
5050 }
5051
5052 /* Emit the rtl pattern X as an appropriate kind of insn.
5053 If X is a label, it is simply added into the insn chain. */
5054
5055 rtx
5056 emit (x)
5057 rtx x;
5058 {
5059 enum rtx_code code = classify_insn (x);
5060
5061 if (code == CODE_LABEL)
5062 return emit_label (x);
5063 else if (code == INSN)
5064 return emit_insn (x);
5065 else if (code == JUMP_INSN)
5066 {
5067 rtx insn = emit_jump_insn (x);
5068 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5069 return emit_barrier ();
5070 return insn;
5071 }
5072 else if (code == CALL_INSN)
5073 return emit_call_insn (x);
5074 else
5075 abort ();
5076 }
5077 \f
5078 /* Space for free sequence stack entries. */
5079 static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
5080
5081 /* Begin emitting insns to a sequence which can be packaged in an
5082 RTL_EXPR. If this sequence will contain something that might cause
5083 the compiler to pop arguments to function calls (because those
5084 pops have previously been deferred; see INHIBIT_DEFER_POP for more
5085 details), use do_pending_stack_adjust before calling this function.
5086 That will ensure that the deferred pops are not accidentally
5087 emitted in the middle of this sequence. */
5088
5089 void
5090 start_sequence ()
5091 {
5092 struct sequence_stack *tem;
5093
5094 if (free_sequence_stack != NULL)
5095 {
5096 tem = free_sequence_stack;
5097 free_sequence_stack = tem->next;
5098 }
5099 else
5100 tem = (struct sequence_stack *) ggc_alloc (sizeof (struct sequence_stack));
5101
5102 tem->next = seq_stack;
5103 tem->first = first_insn;
5104 tem->last = last_insn;
5105 tem->sequence_rtl_expr = seq_rtl_expr;
5106
5107 seq_stack = tem;
5108
5109 first_insn = 0;
5110 last_insn = 0;
5111 }
5112
5113 /* Similarly, but indicate that this sequence will be placed in T, an
5114 RTL_EXPR. See the documentation for start_sequence for more
5115 information about how to use this function. */
5116
5117 void
5118 start_sequence_for_rtl_expr (t)
5119 tree t;
5120 {
5121 start_sequence ();
5122
5123 seq_rtl_expr = t;
5124 }
5125
5126 /* Set up the insn chain starting with FIRST as the current sequence,
5127 saving the previously current one. See the documentation for
5128 start_sequence for more information about how to use this function. */
5129
5130 void
5131 push_to_sequence (first)
5132 rtx first;
5133 {
5134 rtx last;
5135
5136 start_sequence ();
5137
5138 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5139
5140 first_insn = first;
5141 last_insn = last;
5142 }
5143
5144 /* Set up the insn chain from a chain stort in FIRST to LAST. */
5145
5146 void
5147 push_to_full_sequence (first, last)
5148 rtx first, last;
5149 {
5150 start_sequence ();
5151 first_insn = first;
5152 last_insn = last;
5153 /* We really should have the end of the insn chain here. */
5154 if (last && NEXT_INSN (last))
5155 abort ();
5156 }
5157
5158 /* Set up the outer-level insn chain
5159 as the current sequence, saving the previously current one. */
5160
5161 void
5162 push_topmost_sequence ()
5163 {
5164 struct sequence_stack *stack, *top = NULL;
5165
5166 start_sequence ();
5167
5168 for (stack = seq_stack; stack; stack = stack->next)
5169 top = stack;
5170
5171 first_insn = top->first;
5172 last_insn = top->last;
5173 seq_rtl_expr = top->sequence_rtl_expr;
5174 }
5175
5176 /* After emitting to the outer-level insn chain, update the outer-level
5177 insn chain, and restore the previous saved state. */
5178
5179 void
5180 pop_topmost_sequence ()
5181 {
5182 struct sequence_stack *stack, *top = NULL;
5183
5184 for (stack = seq_stack; stack; stack = stack->next)
5185 top = stack;
5186
5187 top->first = first_insn;
5188 top->last = last_insn;
5189 /* ??? Why don't we save seq_rtl_expr here? */
5190
5191 end_sequence ();
5192 }
5193
5194 /* After emitting to a sequence, restore previous saved state.
5195
5196 To get the contents of the sequence just made, you must call
5197 `get_insns' *before* calling here.
5198
5199 If the compiler might have deferred popping arguments while
5200 generating this sequence, and this sequence will not be immediately
5201 inserted into the instruction stream, use do_pending_stack_adjust
5202 before calling get_insns. That will ensure that the deferred
5203 pops are inserted into this sequence, and not into some random
5204 location in the instruction stream. See INHIBIT_DEFER_POP for more
5205 information about deferred popping of arguments. */
5206
5207 void
5208 end_sequence ()
5209 {
5210 struct sequence_stack *tem = seq_stack;
5211
5212 first_insn = tem->first;
5213 last_insn = tem->last;
5214 seq_rtl_expr = tem->sequence_rtl_expr;
5215 seq_stack = tem->next;
5216
5217 memset (tem, 0, sizeof (*tem));
5218 tem->next = free_sequence_stack;
5219 free_sequence_stack = tem;
5220 }
5221
5222 /* This works like end_sequence, but records the old sequence in FIRST
5223 and LAST. */
5224
5225 void
5226 end_full_sequence (first, last)
5227 rtx *first, *last;
5228 {
5229 *first = first_insn;
5230 *last = last_insn;
5231 end_sequence ();
5232 }
5233
5234 /* Return 1 if currently emitting into a sequence. */
5235
5236 int
5237 in_sequence_p ()
5238 {
5239 return seq_stack != 0;
5240 }
5241 \f
5242 /* Put the various virtual registers into REGNO_REG_RTX. */
5243
5244 void
5245 init_virtual_regs (es)
5246 struct emit_status *es;
5247 {
5248 rtx *ptr = es->x_regno_reg_rtx;
5249 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5250 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5251 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5252 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5253 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5254 }
5255
5256 \f
5257 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5258 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5259 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5260 static int copy_insn_n_scratches;
5261
5262 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5263 copied an ASM_OPERANDS.
5264 In that case, it is the original input-operand vector. */
5265 static rtvec orig_asm_operands_vector;
5266
5267 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5268 copied an ASM_OPERANDS.
5269 In that case, it is the copied input-operand vector. */
5270 static rtvec copy_asm_operands_vector;
5271
5272 /* Likewise for the constraints vector. */
5273 static rtvec orig_asm_constraints_vector;
5274 static rtvec copy_asm_constraints_vector;
5275
5276 /* Recursively create a new copy of an rtx for copy_insn.
5277 This function differs from copy_rtx in that it handles SCRATCHes and
5278 ASM_OPERANDs properly.
5279 Normally, this function is not used directly; use copy_insn as front end.
5280 However, you could first copy an insn pattern with copy_insn and then use
5281 this function afterwards to properly copy any REG_NOTEs containing
5282 SCRATCHes. */
5283
5284 rtx
5285 copy_insn_1 (orig)
5286 rtx orig;
5287 {
5288 rtx copy;
5289 int i, j;
5290 RTX_CODE code;
5291 const char *format_ptr;
5292
5293 code = GET_CODE (orig);
5294
5295 switch (code)
5296 {
5297 case REG:
5298 case QUEUED:
5299 case CONST_INT:
5300 case CONST_DOUBLE:
5301 case CONST_VECTOR:
5302 case SYMBOL_REF:
5303 case CODE_LABEL:
5304 case PC:
5305 case CC0:
5306 case ADDRESSOF:
5307 return orig;
5308
5309 case SCRATCH:
5310 for (i = 0; i < copy_insn_n_scratches; i++)
5311 if (copy_insn_scratch_in[i] == orig)
5312 return copy_insn_scratch_out[i];
5313 break;
5314
5315 case CONST:
5316 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
5317 a LABEL_REF, it isn't sharable. */
5318 if (GET_CODE (XEXP (orig, 0)) == PLUS
5319 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5320 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5321 return orig;
5322 break;
5323
5324 /* A MEM with a constant address is not sharable. The problem is that
5325 the constant address may need to be reloaded. If the mem is shared,
5326 then reloading one copy of this mem will cause all copies to appear
5327 to have been reloaded. */
5328
5329 default:
5330 break;
5331 }
5332
5333 copy = rtx_alloc (code);
5334
5335 /* Copy the various flags, and other information. We assume that
5336 all fields need copying, and then clear the fields that should
5337 not be copied. That is the sensible default behavior, and forces
5338 us to explicitly document why we are *not* copying a flag. */
5339 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
5340
5341 /* We do not copy the USED flag, which is used as a mark bit during
5342 walks over the RTL. */
5343 RTX_FLAG (copy, used) = 0;
5344
5345 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5346 if (GET_RTX_CLASS (code) == 'i')
5347 {
5348 RTX_FLAG (copy, jump) = 0;
5349 RTX_FLAG (copy, call) = 0;
5350 RTX_FLAG (copy, frame_related) = 0;
5351 }
5352
5353 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5354
5355 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5356 {
5357 copy->fld[i] = orig->fld[i];
5358 switch (*format_ptr++)
5359 {
5360 case 'e':
5361 if (XEXP (orig, i) != NULL)
5362 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5363 break;
5364
5365 case 'E':
5366 case 'V':
5367 if (XVEC (orig, i) == orig_asm_constraints_vector)
5368 XVEC (copy, i) = copy_asm_constraints_vector;
5369 else if (XVEC (orig, i) == orig_asm_operands_vector)
5370 XVEC (copy, i) = copy_asm_operands_vector;
5371 else if (XVEC (orig, i) != NULL)
5372 {
5373 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5374 for (j = 0; j < XVECLEN (copy, i); j++)
5375 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5376 }
5377 break;
5378
5379 case 't':
5380 case 'w':
5381 case 'i':
5382 case 's':
5383 case 'S':
5384 case 'u':
5385 case '0':
5386 /* These are left unchanged. */
5387 break;
5388
5389 default:
5390 abort ();
5391 }
5392 }
5393
5394 if (code == SCRATCH)
5395 {
5396 i = copy_insn_n_scratches++;
5397 if (i >= MAX_RECOG_OPERANDS)
5398 abort ();
5399 copy_insn_scratch_in[i] = orig;
5400 copy_insn_scratch_out[i] = copy;
5401 }
5402 else if (code == ASM_OPERANDS)
5403 {
5404 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5405 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5406 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5407 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5408 }
5409
5410 return copy;
5411 }
5412
5413 /* Create a new copy of an rtx.
5414 This function differs from copy_rtx in that it handles SCRATCHes and
5415 ASM_OPERANDs properly.
5416 INSN doesn't really have to be a full INSN; it could be just the
5417 pattern. */
5418 rtx
5419 copy_insn (insn)
5420 rtx insn;
5421 {
5422 copy_insn_n_scratches = 0;
5423 orig_asm_operands_vector = 0;
5424 orig_asm_constraints_vector = 0;
5425 copy_asm_operands_vector = 0;
5426 copy_asm_constraints_vector = 0;
5427 return copy_insn_1 (insn);
5428 }
5429
5430 /* Initialize data structures and variables in this file
5431 before generating rtl for each function. */
5432
5433 void
5434 init_emit ()
5435 {
5436 struct function *f = cfun;
5437
5438 f->emit = (struct emit_status *) ggc_alloc (sizeof (struct emit_status));
5439 first_insn = NULL;
5440 last_insn = NULL;
5441 seq_rtl_expr = NULL;
5442 cur_insn_uid = 1;
5443 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5444 last_linenum = 0;
5445 last_filename = 0;
5446 first_label_num = label_num;
5447 last_label_num = 0;
5448 seq_stack = NULL;
5449
5450 /* Init the tables that describe all the pseudo regs. */
5451
5452 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5453
5454 f->emit->regno_pointer_align
5455 = (unsigned char *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5456 * sizeof (unsigned char));
5457
5458 regno_reg_rtx
5459 = (rtx *) ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
5460
5461 /* Put copies of all the hard registers into regno_reg_rtx. */
5462 memcpy (regno_reg_rtx,
5463 static_regno_reg_rtx,
5464 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5465
5466 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5467 init_virtual_regs (f->emit);
5468
5469 /* Indicate that the virtual registers and stack locations are
5470 all pointers. */
5471 REG_POINTER (stack_pointer_rtx) = 1;
5472 REG_POINTER (frame_pointer_rtx) = 1;
5473 REG_POINTER (hard_frame_pointer_rtx) = 1;
5474 REG_POINTER (arg_pointer_rtx) = 1;
5475
5476 REG_POINTER (virtual_incoming_args_rtx) = 1;
5477 REG_POINTER (virtual_stack_vars_rtx) = 1;
5478 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5479 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5480 REG_POINTER (virtual_cfa_rtx) = 1;
5481
5482 #ifdef STACK_BOUNDARY
5483 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5484 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5485 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5486 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5487
5488 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5489 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5490 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5491 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5492 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5493 #endif
5494
5495 #ifdef INIT_EXPANDERS
5496 INIT_EXPANDERS;
5497 #endif
5498 }
5499
5500 /* Generate the constant 0. */
5501
5502 static rtx
5503 gen_const_vector_0 (mode)
5504 enum machine_mode mode;
5505 {
5506 rtx tem;
5507 rtvec v;
5508 int units, i;
5509 enum machine_mode inner;
5510
5511 units = GET_MODE_NUNITS (mode);
5512 inner = GET_MODE_INNER (mode);
5513
5514 v = rtvec_alloc (units);
5515
5516 /* We need to call this function after we to set CONST0_RTX first. */
5517 if (!CONST0_RTX (inner))
5518 abort ();
5519
5520 for (i = 0; i < units; ++i)
5521 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5522
5523 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5524 return tem;
5525 }
5526
5527 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5528 all elements are zero. */
5529 rtx
5530 gen_rtx_CONST_VECTOR (mode, v)
5531 enum machine_mode mode;
5532 rtvec v;
5533 {
5534 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5535 int i;
5536
5537 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5538 if (RTVEC_ELT (v, i) != inner_zero)
5539 return gen_rtx_raw_CONST_VECTOR (mode, v);
5540 return CONST0_RTX (mode);
5541 }
5542
5543 /* Create some permanent unique rtl objects shared between all functions.
5544 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5545
5546 void
5547 init_emit_once (line_numbers)
5548 int line_numbers;
5549 {
5550 int i;
5551 enum machine_mode mode;
5552 enum machine_mode double_mode;
5553
5554 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5555 tables. */
5556 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5557 const_int_htab_eq, NULL);
5558
5559 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5560 const_double_htab_eq, NULL);
5561
5562 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5563 mem_attrs_htab_eq, NULL);
5564 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5565 reg_attrs_htab_eq, NULL);
5566
5567 no_line_numbers = ! line_numbers;
5568
5569 /* Compute the word and byte modes. */
5570
5571 byte_mode = VOIDmode;
5572 word_mode = VOIDmode;
5573 double_mode = VOIDmode;
5574
5575 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5576 mode = GET_MODE_WIDER_MODE (mode))
5577 {
5578 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5579 && byte_mode == VOIDmode)
5580 byte_mode = mode;
5581
5582 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5583 && word_mode == VOIDmode)
5584 word_mode = mode;
5585 }
5586
5587 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5588 mode = GET_MODE_WIDER_MODE (mode))
5589 {
5590 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5591 && double_mode == VOIDmode)
5592 double_mode = mode;
5593 }
5594
5595 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5596
5597 /* Assign register numbers to the globally defined register rtx.
5598 This must be done at runtime because the register number field
5599 is in a union and some compilers can't initialize unions. */
5600
5601 pc_rtx = gen_rtx (PC, VOIDmode);
5602 cc0_rtx = gen_rtx (CC0, VOIDmode);
5603 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5604 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5605 if (hard_frame_pointer_rtx == 0)
5606 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5607 HARD_FRAME_POINTER_REGNUM);
5608 if (arg_pointer_rtx == 0)
5609 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5610 virtual_incoming_args_rtx =
5611 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5612 virtual_stack_vars_rtx =
5613 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5614 virtual_stack_dynamic_rtx =
5615 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5616 virtual_outgoing_args_rtx =
5617 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5618 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5619
5620 /* Initialize RTL for commonly used hard registers. These are
5621 copied into regno_reg_rtx as we begin to compile each function. */
5622 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5623 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5624
5625 #ifdef INIT_EXPANDERS
5626 /* This is to initialize {init|mark|free}_machine_status before the first
5627 call to push_function_context_to. This is needed by the Chill front
5628 end which calls push_function_context_to before the first call to
5629 init_function_start. */
5630 INIT_EXPANDERS;
5631 #endif
5632
5633 /* Create the unique rtx's for certain rtx codes and operand values. */
5634
5635 /* Don't use gen_rtx here since gen_rtx in this case
5636 tries to use these variables. */
5637 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5638 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5639 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5640
5641 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5642 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5643 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5644 else
5645 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5646
5647 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5648 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5649 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5650 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5651 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5652
5653 dconsthalf = dconst1;
5654 dconsthalf.exp--;
5655
5656 for (i = 0; i <= 2; i++)
5657 {
5658 REAL_VALUE_TYPE *r =
5659 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5660
5661 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5662 mode = GET_MODE_WIDER_MODE (mode))
5663 const_tiny_rtx[i][(int) mode] =
5664 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5665
5666 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5667
5668 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5669 mode = GET_MODE_WIDER_MODE (mode))
5670 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5671
5672 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5673 mode != VOIDmode;
5674 mode = GET_MODE_WIDER_MODE (mode))
5675 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5676 }
5677
5678 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5679 mode != VOIDmode;
5680 mode = GET_MODE_WIDER_MODE (mode))
5681 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5682
5683 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5684 mode != VOIDmode;
5685 mode = GET_MODE_WIDER_MODE (mode))
5686 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5687
5688 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5689 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5690 const_tiny_rtx[0][i] = const0_rtx;
5691
5692 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5693 if (STORE_FLAG_VALUE == 1)
5694 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5695
5696 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5697 return_address_pointer_rtx
5698 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5699 #endif
5700
5701 #ifdef STRUCT_VALUE
5702 struct_value_rtx = STRUCT_VALUE;
5703 #else
5704 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
5705 #endif
5706
5707 #ifdef STRUCT_VALUE_INCOMING
5708 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
5709 #else
5710 #ifdef STRUCT_VALUE_INCOMING_REGNUM
5711 struct_value_incoming_rtx
5712 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
5713 #else
5714 struct_value_incoming_rtx = struct_value_rtx;
5715 #endif
5716 #endif
5717
5718 #ifdef STATIC_CHAIN_REGNUM
5719 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5720
5721 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5722 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5723 static_chain_incoming_rtx
5724 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5725 else
5726 #endif
5727 static_chain_incoming_rtx = static_chain_rtx;
5728 #endif
5729
5730 #ifdef STATIC_CHAIN
5731 static_chain_rtx = STATIC_CHAIN;
5732
5733 #ifdef STATIC_CHAIN_INCOMING
5734 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5735 #else
5736 static_chain_incoming_rtx = static_chain_rtx;
5737 #endif
5738 #endif
5739
5740 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5741 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5742 }
5743 \f
5744 /* Query and clear/ restore no_line_numbers. This is used by the
5745 switch / case handling in stmt.c to give proper line numbers in
5746 warnings about unreachable code. */
5747
5748 int
5749 force_line_numbers ()
5750 {
5751 int old = no_line_numbers;
5752
5753 no_line_numbers = 0;
5754 if (old)
5755 force_next_line_note ();
5756 return old;
5757 }
5758
5759 void
5760 restore_line_number_status (old_value)
5761 int old_value;
5762 {
5763 no_line_numbers = old_value;
5764 }
5765
5766 /* Produce exact duplicate of insn INSN after AFTER.
5767 Care updating of libcall regions if present. */
5768
5769 rtx
5770 emit_copy_of_insn_after (insn, after)
5771 rtx insn, after;
5772 {
5773 rtx new;
5774 rtx note1, note2, link;
5775
5776 switch (GET_CODE (insn))
5777 {
5778 case INSN:
5779 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5780 break;
5781
5782 case JUMP_INSN:
5783 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5784 break;
5785
5786 case CALL_INSN:
5787 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5788 if (CALL_INSN_FUNCTION_USAGE (insn))
5789 CALL_INSN_FUNCTION_USAGE (new)
5790 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5791 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5792 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5793 break;
5794
5795 default:
5796 abort ();
5797 }
5798
5799 /* Update LABEL_NUSES. */
5800 mark_jump_label (PATTERN (new), new, 0);
5801
5802 INSN_SCOPE (new) = INSN_SCOPE (insn);
5803
5804 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5805 make them. */
5806 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5807 if (REG_NOTE_KIND (link) != REG_LABEL)
5808 {
5809 if (GET_CODE (link) == EXPR_LIST)
5810 REG_NOTES (new)
5811 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5812 XEXP (link, 0),
5813 REG_NOTES (new)));
5814 else
5815 REG_NOTES (new)
5816 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5817 XEXP (link, 0),
5818 REG_NOTES (new)));
5819 }
5820
5821 /* Fix the libcall sequences. */
5822 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5823 {
5824 rtx p = new;
5825 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5826 p = PREV_INSN (p);
5827 XEXP (note1, 0) = p;
5828 XEXP (note2, 0) = new;
5829 }
5830 INSN_CODE (new) = INSN_CODE (insn);
5831 return new;
5832 }
5833
5834 #include "gt-emit-rtl.h"