dbxout.c: Consistently use putc instead of fputc.
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 /* Middle-to-low level generation of rtx code and insns.
24
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
28
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
31
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
37
38 #include "config.h"
39 #include "system.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "obstack.h"
54 #include "bitmap.h"
55 #include "basic-block.h"
56 #include "ggc.h"
57 #include "debug.h"
58
59 /* Commonly used modes. */
60
61 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
62 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
63 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
64 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
65
66
67 /* This is *not* reset after each function. It gives each CODE_LABEL
68 in the entire compilation a unique label number. */
69
70 static int label_num = 1;
71
72 /* Highest label number in current function.
73 Zero means use the value of label_num instead.
74 This is nonzero only when belatedly compiling an inline function. */
75
76 static int last_label_num;
77
78 /* Value label_num had when set_new_first_and_last_label_number was called.
79 If label_num has not changed since then, last_label_num is valid. */
80
81 static int base_label_num;
82
83 /* Nonzero means do not generate NOTEs for source line numbers. */
84
85 static int no_line_numbers;
86
87 /* Commonly used rtx's, so that we only need space for one copy.
88 These are initialized once for the entire compilation.
89 All of these except perhaps the floating-point CONST_DOUBLEs
90 are unique; no other rtx-object will be equal to any of these. */
91
92 rtx global_rtl[GR_MAX];
93
94 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
96 record a copy of const[012]_rtx. */
97
98 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
99
100 rtx const_true_rtx;
101
102 REAL_VALUE_TYPE dconst0;
103 REAL_VALUE_TYPE dconst1;
104 REAL_VALUE_TYPE dconst2;
105 REAL_VALUE_TYPE dconstm1;
106
107 /* All references to the following fixed hard registers go through
108 these unique rtl objects. On machines where the frame-pointer and
109 arg-pointer are the same register, they use the same unique object.
110
111 After register allocation, other rtl objects which used to be pseudo-regs
112 may be clobbered to refer to the frame-pointer register.
113 But references that were originally to the frame-pointer can be
114 distinguished from the others because they contain frame_pointer_rtx.
115
116 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
117 tricky: until register elimination has taken place hard_frame_pointer_rtx
118 should be used if it is being set, and frame_pointer_rtx otherwise. After
119 register elimination hard_frame_pointer_rtx should always be used.
120 On machines where the two registers are same (most) then these are the
121 same.
122
123 In an inline procedure, the stack and frame pointer rtxs may not be
124 used for anything else. */
125 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
126 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
127 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
128 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
129 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
130
131 /* This is used to implement __builtin_return_address for some machines.
132 See for instance the MIPS port. */
133 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
134
135 /* We make one copy of (const_int C) where C is in
136 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
137 to save space during the compilation and simplify comparisons of
138 integers. */
139
140 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
141
142 /* A hash table storing CONST_INTs whose absolute value is greater
143 than MAX_SAVED_CONST_INT. */
144
145 static htab_t const_int_htab;
146
147 /* start_sequence and gen_sequence can make a lot of rtx expressions which are
148 shortly thrown away. We use two mechanisms to prevent this waste:
149
150 For sizes up to 5 elements, we keep a SEQUENCE and its associated
151 rtvec for use by gen_sequence. One entry for each size is
152 sufficient because most cases are calls to gen_sequence followed by
153 immediately emitting the SEQUENCE. Reuse is safe since emitting a
154 sequence is destructive on the insn in it anyway and hence can't be
155 redone.
156
157 We do not bother to save this cached data over nested function calls.
158 Instead, we just reinitialize them. */
159
160 #define SEQUENCE_RESULT_SIZE 5
161
162 static rtx sequence_result[SEQUENCE_RESULT_SIZE];
163
164 /* During RTL generation, we also keep a list of free INSN rtl codes. */
165 static rtx free_insn;
166
167 #define first_insn (cfun->emit->x_first_insn)
168 #define last_insn (cfun->emit->x_last_insn)
169 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
170 #define last_linenum (cfun->emit->x_last_linenum)
171 #define last_filename (cfun->emit->x_last_filename)
172 #define first_label_num (cfun->emit->x_first_label_num)
173
174 static rtx make_jump_insn_raw PARAMS ((rtx));
175 static rtx make_call_insn_raw PARAMS ((rtx));
176 static rtx find_line_note PARAMS ((rtx));
177 static void mark_sequence_stack PARAMS ((struct sequence_stack *));
178 static void unshare_all_rtl_1 PARAMS ((rtx));
179 static void unshare_all_decls PARAMS ((tree));
180 static void reset_used_decls PARAMS ((tree));
181 static void mark_label_nuses PARAMS ((rtx));
182 static hashval_t const_int_htab_hash PARAMS ((const void *));
183 static int const_int_htab_eq PARAMS ((const void *,
184 const void *));
185 static int rtx_htab_mark_1 PARAMS ((void **, void *));
186 static void rtx_htab_mark PARAMS ((void *));
187
188 /* Probability of the conditional branch currently proceeded by try_split.
189 Set to -1 otherwise. */
190 int split_branch_probability = -1;
191
192 \f
193 /* Returns a hash code for X (which is a really a CONST_INT). */
194
195 static hashval_t
196 const_int_htab_hash (x)
197 const void *x;
198 {
199 return (hashval_t) INTVAL ((const struct rtx_def *) x);
200 }
201
202 /* Returns non-zero if the value represented by X (which is really a
203 CONST_INT) is the same as that given by Y (which is really a
204 HOST_WIDE_INT *). */
205
206 static int
207 const_int_htab_eq (x, y)
208 const void *x;
209 const void *y;
210 {
211 return (INTVAL ((const struct rtx_def *) x) == *((const HOST_WIDE_INT *) y));
212 }
213
214 /* Mark the hash-table element X (which is really a pointer to an
215 rtx). */
216
217 static int
218 rtx_htab_mark_1 (x, data)
219 void **x;
220 void *data ATTRIBUTE_UNUSED;
221 {
222 ggc_mark_rtx (*x);
223 return 1;
224 }
225
226 /* Mark all the elements of HTAB (which is really an htab_t full of
227 rtxs). */
228
229 static void
230 rtx_htab_mark (htab)
231 void *htab;
232 {
233 htab_traverse (*((htab_t *) htab), rtx_htab_mark_1, NULL);
234 }
235
236 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
237 don't attempt to share with the various global pieces of rtl (such as
238 frame_pointer_rtx). */
239
240 rtx
241 gen_raw_REG (mode, regno)
242 enum machine_mode mode;
243 int regno;
244 {
245 rtx x = gen_rtx_raw_REG (mode, regno);
246 ORIGINAL_REGNO (x) = regno;
247 return x;
248 }
249
250 /* There are some RTL codes that require special attention; the generation
251 functions do the raw handling. If you add to this list, modify
252 special_rtx in gengenrtl.c as well. */
253
254 rtx
255 gen_rtx_CONST_INT (mode, arg)
256 enum machine_mode mode ATTRIBUTE_UNUSED;
257 HOST_WIDE_INT arg;
258 {
259 void **slot;
260
261 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
262 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
263
264 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
265 if (const_true_rtx && arg == STORE_FLAG_VALUE)
266 return const_true_rtx;
267 #endif
268
269 /* Look up the CONST_INT in the hash table. */
270 slot = htab_find_slot_with_hash (const_int_htab, &arg,
271 (hashval_t) arg, INSERT);
272 if (*slot == 0)
273 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
274
275 return (rtx) *slot;
276 }
277
278 /* CONST_DOUBLEs needs special handling because their length is known
279 only at run-time. */
280
281 rtx
282 gen_rtx_CONST_DOUBLE (mode, arg0, arg1, arg2)
283 enum machine_mode mode;
284 rtx arg0;
285 HOST_WIDE_INT arg1, arg2;
286 {
287 rtx r = rtx_alloc (CONST_DOUBLE);
288 int i;
289
290 PUT_MODE (r, mode);
291 XEXP (r, 0) = arg0;
292 X0EXP (r, 1) = NULL_RTX;
293 XWINT (r, 2) = arg1;
294 XWINT (r, 3) = arg2;
295
296 for (i = GET_RTX_LENGTH (CONST_DOUBLE) - 1; i > 3; --i)
297 XWINT (r, i) = 0;
298
299 return r;
300 }
301
302 rtx
303 gen_rtx_REG (mode, regno)
304 enum machine_mode mode;
305 int regno;
306 {
307 /* In case the MD file explicitly references the frame pointer, have
308 all such references point to the same frame pointer. This is
309 used during frame pointer elimination to distinguish the explicit
310 references to these registers from pseudos that happened to be
311 assigned to them.
312
313 If we have eliminated the frame pointer or arg pointer, we will
314 be using it as a normal register, for example as a spill
315 register. In such cases, we might be accessing it in a mode that
316 is not Pmode and therefore cannot use the pre-allocated rtx.
317
318 Also don't do this when we are making new REGs in reload, since
319 we don't want to get confused with the real pointers. */
320
321 if (mode == Pmode && !reload_in_progress)
322 {
323 if (regno == FRAME_POINTER_REGNUM)
324 return frame_pointer_rtx;
325 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
326 if (regno == HARD_FRAME_POINTER_REGNUM)
327 return hard_frame_pointer_rtx;
328 #endif
329 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
330 if (regno == ARG_POINTER_REGNUM)
331 return arg_pointer_rtx;
332 #endif
333 #ifdef RETURN_ADDRESS_POINTER_REGNUM
334 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
335 return return_address_pointer_rtx;
336 #endif
337 if (regno == STACK_POINTER_REGNUM)
338 return stack_pointer_rtx;
339 }
340
341 return gen_raw_REG (mode, regno);
342 }
343
344 rtx
345 gen_rtx_MEM (mode, addr)
346 enum machine_mode mode;
347 rtx addr;
348 {
349 rtx rt = gen_rtx_raw_MEM (mode, addr);
350
351 /* This field is not cleared by the mere allocation of the rtx, so
352 we clear it here. */
353 MEM_ALIAS_SET (rt) = 0;
354
355 return rt;
356 }
357
358 rtx
359 gen_rtx_SUBREG (mode, reg, offset)
360 enum machine_mode mode;
361 rtx reg;
362 int offset;
363 {
364 /* This is the most common failure type.
365 Catch it early so we can see who does it. */
366 if ((offset % GET_MODE_SIZE (mode)) != 0)
367 abort ();
368
369 /* This check isn't usable right now because combine will
370 throw arbitrary crap like a CALL into a SUBREG in
371 gen_lowpart_for_combine so we must just eat it. */
372 #if 0
373 /* Check for this too. */
374 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
375 abort ();
376 #endif
377 return gen_rtx_fmt_ei (SUBREG, mode, reg, offset);
378 }
379
380 /* Generate a SUBREG representing the least-significant part
381 * of REG if MODE is smaller than mode of REG, otherwise
382 * paradoxical SUBREG. */
383 rtx
384 gen_lowpart_SUBREG (mode, reg)
385 enum machine_mode mode;
386 rtx reg;
387 {
388 enum machine_mode inmode;
389
390 inmode = GET_MODE (reg);
391 if (inmode == VOIDmode)
392 inmode = mode;
393 return gen_rtx_SUBREG (mode, reg,
394 subreg_lowpart_offset (mode, inmode));
395 }
396 \f
397 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
398 **
399 ** This routine generates an RTX of the size specified by
400 ** <code>, which is an RTX code. The RTX structure is initialized
401 ** from the arguments <element1> through <elementn>, which are
402 ** interpreted according to the specific RTX type's format. The
403 ** special machine mode associated with the rtx (if any) is specified
404 ** in <mode>.
405 **
406 ** gen_rtx can be invoked in a way which resembles the lisp-like
407 ** rtx it will generate. For example, the following rtx structure:
408 **
409 ** (plus:QI (mem:QI (reg:SI 1))
410 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
411 **
412 ** ...would be generated by the following C code:
413 **
414 ** gen_rtx (PLUS, QImode,
415 ** gen_rtx (MEM, QImode,
416 ** gen_rtx (REG, SImode, 1)),
417 ** gen_rtx (MEM, QImode,
418 ** gen_rtx (PLUS, SImode,
419 ** gen_rtx (REG, SImode, 2),
420 ** gen_rtx (REG, SImode, 3)))),
421 */
422
423 /*VARARGS2*/
424 rtx
425 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
426 {
427 #ifndef ANSI_PROTOTYPES
428 enum rtx_code code;
429 enum machine_mode mode;
430 #endif
431 va_list p;
432 register int i; /* Array indices... */
433 register const char *fmt; /* Current rtx's format... */
434 register rtx rt_val; /* RTX to return to caller... */
435
436 VA_START (p, mode);
437
438 #ifndef ANSI_PROTOTYPES
439 code = va_arg (p, enum rtx_code);
440 mode = va_arg (p, enum machine_mode);
441 #endif
442
443 switch (code)
444 {
445 case CONST_INT:
446 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
447 break;
448
449 case CONST_DOUBLE:
450 {
451 rtx arg0 = va_arg (p, rtx);
452 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
453 HOST_WIDE_INT arg2 = va_arg (p, HOST_WIDE_INT);
454 rt_val = gen_rtx_CONST_DOUBLE (mode, arg0, arg1, arg2);
455 }
456 break;
457
458 case REG:
459 rt_val = gen_rtx_REG (mode, va_arg (p, int));
460 break;
461
462 case MEM:
463 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
464 break;
465
466 default:
467 rt_val = rtx_alloc (code); /* Allocate the storage space. */
468 rt_val->mode = mode; /* Store the machine mode... */
469
470 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
471 for (i = 0; i < GET_RTX_LENGTH (code); i++)
472 {
473 switch (*fmt++)
474 {
475 case '0': /* Unused field. */
476 break;
477
478 case 'i': /* An integer? */
479 XINT (rt_val, i) = va_arg (p, int);
480 break;
481
482 case 'w': /* A wide integer? */
483 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
484 break;
485
486 case 's': /* A string? */
487 XSTR (rt_val, i) = va_arg (p, char *);
488 break;
489
490 case 'e': /* An expression? */
491 case 'u': /* An insn? Same except when printing. */
492 XEXP (rt_val, i) = va_arg (p, rtx);
493 break;
494
495 case 'E': /* An RTX vector? */
496 XVEC (rt_val, i) = va_arg (p, rtvec);
497 break;
498
499 case 'b': /* A bitmap? */
500 XBITMAP (rt_val, i) = va_arg (p, bitmap);
501 break;
502
503 case 't': /* A tree? */
504 XTREE (rt_val, i) = va_arg (p, tree);
505 break;
506
507 default:
508 abort ();
509 }
510 }
511 break;
512 }
513
514 va_end (p);
515 return rt_val;
516 }
517
518 /* gen_rtvec (n, [rt1, ..., rtn])
519 **
520 ** This routine creates an rtvec and stores within it the
521 ** pointers to rtx's which are its arguments.
522 */
523
524 /*VARARGS1*/
525 rtvec
526 gen_rtvec VPARAMS ((int n, ...))
527 {
528 #ifndef ANSI_PROTOTYPES
529 int n;
530 #endif
531 int i;
532 va_list p;
533 rtx *vector;
534
535 VA_START (p, n);
536
537 #ifndef ANSI_PROTOTYPES
538 n = va_arg (p, int);
539 #endif
540
541 if (n == 0)
542 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
543
544 vector = (rtx *) alloca (n * sizeof (rtx));
545
546 for (i = 0; i < n; i++)
547 vector[i] = va_arg (p, rtx);
548 va_end (p);
549
550 return gen_rtvec_v (n, vector);
551 }
552
553 rtvec
554 gen_rtvec_v (n, argp)
555 int n;
556 rtx *argp;
557 {
558 register int i;
559 register rtvec rt_val;
560
561 if (n == 0)
562 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
563
564 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
565
566 for (i = 0; i < n; i++)
567 rt_val->elem[i] = *argp++;
568
569 return rt_val;
570 }
571
572 \f
573 /* Generate a REG rtx for a new pseudo register of mode MODE.
574 This pseudo is assigned the next sequential register number. */
575
576 rtx
577 gen_reg_rtx (mode)
578 enum machine_mode mode;
579 {
580 struct function *f = cfun;
581 register rtx val;
582
583 /* Don't let anything called after initial flow analysis create new
584 registers. */
585 if (no_new_pseudos)
586 abort ();
587
588 if (generating_concat_p
589 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
590 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
591 {
592 /* For complex modes, don't make a single pseudo.
593 Instead, make a CONCAT of two pseudos.
594 This allows noncontiguous allocation of the real and imaginary parts,
595 which makes much better code. Besides, allocating DCmode
596 pseudos overstrains reload on some machines like the 386. */
597 rtx realpart, imagpart;
598 int size = GET_MODE_UNIT_SIZE (mode);
599 enum machine_mode partmode
600 = mode_for_size (size * BITS_PER_UNIT,
601 (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
602 ? MODE_FLOAT : MODE_INT),
603 0);
604
605 realpart = gen_reg_rtx (partmode);
606 imagpart = gen_reg_rtx (partmode);
607 return gen_rtx_CONCAT (mode, realpart, imagpart);
608 }
609
610 /* Make sure regno_pointer_align and regno_reg_rtx are large enough
611 to have an element for this pseudo reg number. */
612
613 if (reg_rtx_no == f->emit->regno_pointer_align_length)
614 {
615 int old_size = f->emit->regno_pointer_align_length;
616 rtx *new1;
617 char *new;
618 new = xrealloc (f->emit->regno_pointer_align, old_size * 2);
619 memset (new + old_size, 0, old_size);
620 f->emit->regno_pointer_align = (unsigned char *) new;
621
622 new1 = (rtx *) xrealloc (f->emit->x_regno_reg_rtx,
623 old_size * 2 * sizeof (rtx));
624 memset (new1 + old_size, 0, old_size * sizeof (rtx));
625 regno_reg_rtx = new1;
626
627 f->emit->regno_pointer_align_length = old_size * 2;
628 }
629
630 val = gen_raw_REG (mode, reg_rtx_no);
631 regno_reg_rtx[reg_rtx_no++] = val;
632 return val;
633 }
634
635 /* Identify REG (which may be a CONCAT) as a user register. */
636
637 void
638 mark_user_reg (reg)
639 rtx reg;
640 {
641 if (GET_CODE (reg) == CONCAT)
642 {
643 REG_USERVAR_P (XEXP (reg, 0)) = 1;
644 REG_USERVAR_P (XEXP (reg, 1)) = 1;
645 }
646 else if (GET_CODE (reg) == REG)
647 REG_USERVAR_P (reg) = 1;
648 else
649 abort ();
650 }
651
652 /* Identify REG as a probable pointer register and show its alignment
653 as ALIGN, if nonzero. */
654
655 void
656 mark_reg_pointer (reg, align)
657 rtx reg;
658 int align;
659 {
660 if (! REG_POINTER (reg))
661 {
662 REG_POINTER (reg) = 1;
663
664 if (align)
665 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
666 }
667 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
668 /* We can no-longer be sure just how aligned this pointer is */
669 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
670 }
671
672 /* Return 1 plus largest pseudo reg number used in the current function. */
673
674 int
675 max_reg_num ()
676 {
677 return reg_rtx_no;
678 }
679
680 /* Return 1 + the largest label number used so far in the current function. */
681
682 int
683 max_label_num ()
684 {
685 if (last_label_num && label_num == base_label_num)
686 return last_label_num;
687 return label_num;
688 }
689
690 /* Return first label number used in this function (if any were used). */
691
692 int
693 get_first_label_num ()
694 {
695 return first_label_num;
696 }
697 \f
698 /* Return the final regno of X, which is a SUBREG of a hard
699 register. */
700 int
701 subreg_hard_regno (x, check_mode)
702 register rtx x;
703 int check_mode;
704 {
705 enum machine_mode mode = GET_MODE (x);
706 unsigned int byte_offset, base_regno, final_regno;
707 rtx reg = SUBREG_REG (x);
708
709 /* This is where we attempt to catch illegal subregs
710 created by the compiler. */
711 if (GET_CODE (x) != SUBREG
712 || GET_CODE (reg) != REG)
713 abort ();
714 base_regno = REGNO (reg);
715 if (base_regno >= FIRST_PSEUDO_REGISTER)
716 abort ();
717 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
718 abort ();
719
720 /* Catch non-congruent offsets too. */
721 byte_offset = SUBREG_BYTE (x);
722 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
723 abort ();
724
725 final_regno = subreg_regno (x);
726
727 return final_regno;
728 }
729
730 /* Return a value representing some low-order bits of X, where the number
731 of low-order bits is given by MODE. Note that no conversion is done
732 between floating-point and fixed-point values, rather, the bit
733 representation is returned.
734
735 This function handles the cases in common between gen_lowpart, below,
736 and two variants in cse.c and combine.c. These are the cases that can
737 be safely handled at all points in the compilation.
738
739 If this is not a case we can handle, return 0. */
740
741 rtx
742 gen_lowpart_common (mode, x)
743 enum machine_mode mode;
744 register rtx x;
745 {
746 int msize = GET_MODE_SIZE (mode);
747 int xsize = GET_MODE_SIZE (GET_MODE (x));
748 int offset = 0;
749
750 if (GET_MODE (x) == mode)
751 return x;
752
753 /* MODE must occupy no more words than the mode of X. */
754 if (GET_MODE (x) != VOIDmode
755 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
756 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
757 return 0;
758
759 offset = subreg_lowpart_offset (mode, GET_MODE (x));
760
761 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
762 && (GET_MODE_CLASS (mode) == MODE_INT
763 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
764 {
765 /* If we are getting the low-order part of something that has been
766 sign- or zero-extended, we can either just use the object being
767 extended or make a narrower extension. If we want an even smaller
768 piece than the size of the object being extended, call ourselves
769 recursively.
770
771 This case is used mostly by combine and cse. */
772
773 if (GET_MODE (XEXP (x, 0)) == mode)
774 return XEXP (x, 0);
775 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
776 return gen_lowpart_common (mode, XEXP (x, 0));
777 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
778 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
779 }
780 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
781 || GET_CODE (x) == CONCAT)
782 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
783 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
784 from the low-order part of the constant. */
785 else if ((GET_MODE_CLASS (mode) == MODE_INT
786 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
787 && GET_MODE (x) == VOIDmode
788 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
789 {
790 /* If MODE is twice the host word size, X is already the desired
791 representation. Otherwise, if MODE is wider than a word, we can't
792 do this. If MODE is exactly a word, return just one CONST_INT. */
793
794 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
795 return x;
796 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
797 return 0;
798 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
799 return (GET_CODE (x) == CONST_INT ? x
800 : GEN_INT (CONST_DOUBLE_LOW (x)));
801 else
802 {
803 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
804 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
805 : CONST_DOUBLE_LOW (x));
806
807 /* Sign extend to HOST_WIDE_INT. */
808 val = trunc_int_for_mode (val, mode);
809
810 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
811 : GEN_INT (val));
812 }
813 }
814
815 #ifndef REAL_ARITHMETIC
816 /* If X is an integral constant but we want it in floating-point, it
817 must be the case that we have a union of an integer and a floating-point
818 value. If the machine-parameters allow it, simulate that union here
819 and return the result. The two-word and single-word cases are
820 different. */
821
822 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
823 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
824 || flag_pretend_float)
825 && GET_MODE_CLASS (mode) == MODE_FLOAT
826 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
827 && GET_CODE (x) == CONST_INT
828 && sizeof (float) * HOST_BITS_PER_CHAR == HOST_BITS_PER_WIDE_INT)
829 {
830 union {HOST_WIDE_INT i; float d; } u;
831
832 u.i = INTVAL (x);
833 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
834 }
835 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
836 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
837 || flag_pretend_float)
838 && GET_MODE_CLASS (mode) == MODE_FLOAT
839 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
840 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
841 && GET_MODE (x) == VOIDmode
842 && (sizeof (double) * HOST_BITS_PER_CHAR
843 == 2 * HOST_BITS_PER_WIDE_INT))
844 {
845 union {HOST_WIDE_INT i[2]; double d; } u;
846 HOST_WIDE_INT low, high;
847
848 if (GET_CODE (x) == CONST_INT)
849 low = INTVAL (x), high = low >> (HOST_BITS_PER_WIDE_INT -1);
850 else
851 low = CONST_DOUBLE_LOW (x), high = CONST_DOUBLE_HIGH (x);
852
853 #ifdef HOST_WORDS_BIG_ENDIAN
854 u.i[0] = high, u.i[1] = low;
855 #else
856 u.i[0] = low, u.i[1] = high;
857 #endif
858
859 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
860 }
861
862 /* Similarly, if this is converting a floating-point value into a
863 single-word integer. Only do this is the host and target parameters are
864 compatible. */
865
866 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
867 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
868 || flag_pretend_float)
869 && (GET_MODE_CLASS (mode) == MODE_INT
870 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
871 && GET_CODE (x) == CONST_DOUBLE
872 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
873 && GET_MODE_BITSIZE (mode) == BITS_PER_WORD)
874 return constant_subword (x, (offset / UNITS_PER_WORD), GET_MODE (x));
875
876 /* Similarly, if this is converting a floating-point value into a
877 two-word integer, we can do this one word at a time and make an
878 integer. Only do this is the host and target parameters are
879 compatible. */
880
881 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
882 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
883 || flag_pretend_float)
884 && (GET_MODE_CLASS (mode) == MODE_INT
885 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
886 && GET_CODE (x) == CONST_DOUBLE
887 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
888 && GET_MODE_BITSIZE (mode) == 2 * BITS_PER_WORD)
889 {
890 rtx lowpart, highpart;
891
892 lowpart = constant_subword (x,
893 (offset / UNITS_PER_WORD) + WORDS_BIG_ENDIAN,
894 GET_MODE (x));
895 highpart = constant_subword (x,
896 (offset / UNITS_PER_WORD) + (! WORDS_BIG_ENDIAN),
897 GET_MODE (x));
898 if (lowpart && GET_CODE (lowpart) == CONST_INT
899 && highpart && GET_CODE (highpart) == CONST_INT)
900 return immed_double_const (INTVAL (lowpart), INTVAL (highpart), mode);
901 }
902 #else /* ifndef REAL_ARITHMETIC */
903
904 /* When we have a FP emulator, we can handle all conversions between
905 FP and integer operands. This simplifies reload because it
906 doesn't have to deal with constructs like (subreg:DI
907 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
908 /* Single-precision floats are always 32-bits and double-precision
909 floats are always 64-bits. */
910
911 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
912 && GET_MODE_BITSIZE (mode) == 32
913 && GET_CODE (x) == CONST_INT)
914 {
915 REAL_VALUE_TYPE r;
916 HOST_WIDE_INT i;
917
918 i = INTVAL (x);
919 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
920 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
921 }
922 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
923 && GET_MODE_BITSIZE (mode) == 64
924 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
925 && GET_MODE (x) == VOIDmode)
926 {
927 REAL_VALUE_TYPE r;
928 HOST_WIDE_INT i[2];
929 HOST_WIDE_INT low, high;
930
931 if (GET_CODE (x) == CONST_INT)
932 {
933 low = INTVAL (x);
934 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
935 }
936 else
937 {
938 low = CONST_DOUBLE_LOW (x);
939 high = CONST_DOUBLE_HIGH (x);
940 }
941
942 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
943 target machine. */
944 if (WORDS_BIG_ENDIAN)
945 i[0] = high, i[1] = low;
946 else
947 i[0] = low, i[1] = high;
948
949 r = REAL_VALUE_FROM_TARGET_DOUBLE (i);
950 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
951 }
952 else if ((GET_MODE_CLASS (mode) == MODE_INT
953 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
954 && GET_CODE (x) == CONST_DOUBLE
955 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
956 {
957 REAL_VALUE_TYPE r;
958 long i[4]; /* Only the low 32 bits of each 'long' are used. */
959 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
960
961 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
962 switch (GET_MODE_BITSIZE (GET_MODE (x)))
963 {
964 case 32:
965 REAL_VALUE_TO_TARGET_SINGLE (r, i[endian]);
966 i[1 - endian] = 0;
967 break;
968 case 64:
969 REAL_VALUE_TO_TARGET_DOUBLE (r, i);
970 break;
971 case 96:
972 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
973 i[3-3*endian] = 0;
974 break;
975 case 128:
976 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
977 break;
978 default:
979 abort ();
980 }
981
982 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
983 and return it. */
984 #if HOST_BITS_PER_WIDE_INT == 32
985 return immed_double_const (i[endian], i[1 - endian], mode);
986 #else
987 {
988 int c;
989
990 if (HOST_BITS_PER_WIDE_INT != 64)
991 abort ();
992
993 for (c = 0; c < 4; c++)
994 i[c] &= ~ (0L);
995
996 switch (GET_MODE_BITSIZE (GET_MODE (x)))
997 {
998 case 32:
999 case 64:
1000 return immed_double_const (((unsigned long) i[endian]) |
1001 (((HOST_WIDE_INT) i[1-endian]) << 32),
1002 0, mode);
1003 case 96:
1004 case 128:
1005 return immed_double_const (((unsigned long) i[endian*3]) |
1006 (((HOST_WIDE_INT) i[1+endian]) << 32),
1007 ((unsigned long) i[2-endian]) |
1008 (((HOST_WIDE_INT) i[3-endian*3]) << 32),
1009 mode);
1010 default:
1011 abort ();
1012 }
1013 }
1014 #endif
1015 }
1016 #endif /* ifndef REAL_ARITHMETIC */
1017
1018 /* Otherwise, we can't do this. */
1019 return 0;
1020 }
1021 \f
1022 /* Return the real part (which has mode MODE) of a complex value X.
1023 This always comes at the low address in memory. */
1024
1025 rtx
1026 gen_realpart (mode, x)
1027 enum machine_mode mode;
1028 register rtx x;
1029 {
1030 if (WORDS_BIG_ENDIAN
1031 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1032 && REG_P (x)
1033 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1034 internal_error
1035 ("Can't access real part of complex value in hard register");
1036 else if (WORDS_BIG_ENDIAN)
1037 return gen_highpart (mode, x);
1038 else
1039 return gen_lowpart (mode, x);
1040 }
1041
1042 /* Return the imaginary part (which has mode MODE) of a complex value X.
1043 This always comes at the high address in memory. */
1044
1045 rtx
1046 gen_imagpart (mode, x)
1047 enum machine_mode mode;
1048 register rtx x;
1049 {
1050 if (WORDS_BIG_ENDIAN)
1051 return gen_lowpart (mode, x);
1052 else if (! WORDS_BIG_ENDIAN
1053 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1054 && REG_P (x)
1055 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1056 internal_error
1057 ("can't access imaginary part of complex value in hard register");
1058 else
1059 return gen_highpart (mode, x);
1060 }
1061
1062 /* Return 1 iff X, assumed to be a SUBREG,
1063 refers to the real part of the complex value in its containing reg.
1064 Complex values are always stored with the real part in the first word,
1065 regardless of WORDS_BIG_ENDIAN. */
1066
1067 int
1068 subreg_realpart_p (x)
1069 rtx x;
1070 {
1071 if (GET_CODE (x) != SUBREG)
1072 abort ();
1073
1074 return ((unsigned int) SUBREG_BYTE (x)
1075 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1076 }
1077 \f
1078 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1079 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1080 least-significant part of X.
1081 MODE specifies how big a part of X to return;
1082 it usually should not be larger than a word.
1083 If X is a MEM whose address is a QUEUED, the value may be so also. */
1084
1085 rtx
1086 gen_lowpart (mode, x)
1087 enum machine_mode mode;
1088 register rtx x;
1089 {
1090 rtx result = gen_lowpart_common (mode, x);
1091
1092 if (result)
1093 return result;
1094 else if (GET_CODE (x) == REG)
1095 {
1096 /* Must be a hard reg that's not valid in MODE. */
1097 result = gen_lowpart_common (mode, copy_to_reg (x));
1098 if (result == 0)
1099 abort ();
1100 return result;
1101 }
1102 else if (GET_CODE (x) == MEM)
1103 {
1104 /* The only additional case we can do is MEM. */
1105 register int offset = 0;
1106 if (WORDS_BIG_ENDIAN)
1107 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1108 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1109
1110 if (BYTES_BIG_ENDIAN)
1111 /* Adjust the address so that the address-after-the-data
1112 is unchanged. */
1113 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1114 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1115
1116 return adjust_address (x, mode, offset);
1117 }
1118 else if (GET_CODE (x) == ADDRESSOF)
1119 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1120 else
1121 abort ();
1122 }
1123
1124 /* Like `gen_lowpart', but refer to the most significant part.
1125 This is used to access the imaginary part of a complex number. */
1126
1127 rtx
1128 gen_highpart (mode, x)
1129 enum machine_mode mode;
1130 register rtx x;
1131 {
1132 unsigned int msize = GET_MODE_SIZE (mode);
1133 rtx result;
1134
1135 /* This case loses if X is a subreg. To catch bugs early,
1136 complain if an invalid MODE is used even in other cases. */
1137 if (msize > UNITS_PER_WORD
1138 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1139 abort ();
1140
1141 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1142 subreg_highpart_offset (mode, GET_MODE (x)));
1143
1144 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1145 the target if we have a MEM. gen_highpart must return a valid operand,
1146 emitting code if necessary to do so. */
1147 if (GET_CODE (result) == MEM)
1148 result = validize_mem (result);
1149
1150 if (!result)
1151 abort ();
1152 return result;
1153 }
1154
1155 /* Like gen_highpart_mode, but accept mode of EXP operand in case EXP can
1156 be VOIDmode constant. */
1157 rtx
1158 gen_highpart_mode (outermode, innermode, exp)
1159 enum machine_mode outermode, innermode;
1160 rtx exp;
1161 {
1162 if (GET_MODE (exp) != VOIDmode)
1163 {
1164 if (GET_MODE (exp) != innermode)
1165 abort ();
1166 return gen_highpart (outermode, exp);
1167 }
1168 return simplify_gen_subreg (outermode, exp, innermode,
1169 subreg_highpart_offset (outermode, innermode));
1170 }
1171 /* Return offset in bytes to get OUTERMODE low part
1172 of the value in mode INNERMODE stored in memory in target format. */
1173
1174 unsigned int
1175 subreg_lowpart_offset (outermode, innermode)
1176 enum machine_mode outermode, innermode;
1177 {
1178 unsigned int offset = 0;
1179 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1180
1181 if (difference > 0)
1182 {
1183 if (WORDS_BIG_ENDIAN)
1184 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1185 if (BYTES_BIG_ENDIAN)
1186 offset += difference % UNITS_PER_WORD;
1187 }
1188
1189 return offset;
1190 }
1191
1192 /* Return offset in bytes to get OUTERMODE high part
1193 of the value in mode INNERMODE stored in memory in target format. */
1194 unsigned int
1195 subreg_highpart_offset (outermode, innermode)
1196 enum machine_mode outermode, innermode;
1197 {
1198 unsigned int offset = 0;
1199 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1200
1201 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1202 abort ();
1203
1204 if (difference > 0)
1205 {
1206 if (! WORDS_BIG_ENDIAN)
1207 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1208 if (! BYTES_BIG_ENDIAN)
1209 offset += difference % UNITS_PER_WORD;
1210 }
1211
1212 return offset;
1213 }
1214
1215 /* Return 1 iff X, assumed to be a SUBREG,
1216 refers to the least significant part of its containing reg.
1217 If X is not a SUBREG, always return 1 (it is its own low part!). */
1218
1219 int
1220 subreg_lowpart_p (x)
1221 rtx x;
1222 {
1223 if (GET_CODE (x) != SUBREG)
1224 return 1;
1225 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1226 return 0;
1227
1228 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1229 == SUBREG_BYTE (x));
1230 }
1231 \f
1232
1233 /* Helper routine for all the constant cases of operand_subword.
1234 Some places invoke this directly. */
1235
1236 rtx
1237 constant_subword (op, offset, mode)
1238 rtx op;
1239 int offset;
1240 enum machine_mode mode;
1241 {
1242 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1243 HOST_WIDE_INT val;
1244
1245 /* If OP is already an integer word, return it. */
1246 if (GET_MODE_CLASS (mode) == MODE_INT
1247 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1248 return op;
1249
1250 #ifdef REAL_ARITHMETIC
1251 /* The output is some bits, the width of the target machine's word.
1252 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1253 host can't. */
1254 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1255 && GET_MODE_CLASS (mode) == MODE_FLOAT
1256 && GET_MODE_BITSIZE (mode) == 64
1257 && GET_CODE (op) == CONST_DOUBLE)
1258 {
1259 long k[2];
1260 REAL_VALUE_TYPE rv;
1261
1262 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1263 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1264
1265 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1266 which the words are written depends on the word endianness.
1267 ??? This is a potential portability problem and should
1268 be fixed at some point.
1269
1270 We must excercise caution with the sign bit. By definition there
1271 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1272 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1273 So we explicitly mask and sign-extend as necessary. */
1274 if (BITS_PER_WORD == 32)
1275 {
1276 val = k[offset];
1277 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1278 return GEN_INT (val);
1279 }
1280 #if HOST_BITS_PER_WIDE_INT >= 64
1281 else if (BITS_PER_WORD >= 64 && offset == 0)
1282 {
1283 val = k[! WORDS_BIG_ENDIAN];
1284 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1285 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1286 return GEN_INT (val);
1287 }
1288 #endif
1289 else if (BITS_PER_WORD == 16)
1290 {
1291 val = k[offset >> 1];
1292 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1293 val >>= 16;
1294 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1295 return GEN_INT (val);
1296 }
1297 else
1298 abort ();
1299 }
1300 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1301 && GET_MODE_CLASS (mode) == MODE_FLOAT
1302 && GET_MODE_BITSIZE (mode) > 64
1303 && GET_CODE (op) == CONST_DOUBLE)
1304 {
1305 long k[4];
1306 REAL_VALUE_TYPE rv;
1307
1308 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1309 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1310
1311 if (BITS_PER_WORD == 32)
1312 {
1313 val = k[offset];
1314 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1315 return GEN_INT (val);
1316 }
1317 #if HOST_BITS_PER_WIDE_INT >= 64
1318 else if (BITS_PER_WORD >= 64 && offset <= 1)
1319 {
1320 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1321 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1322 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1323 return GEN_INT (val);
1324 }
1325 #endif
1326 else
1327 abort ();
1328 }
1329 #else /* no REAL_ARITHMETIC */
1330 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1331 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1332 || flag_pretend_float)
1333 && GET_MODE_CLASS (mode) == MODE_FLOAT
1334 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
1335 && GET_CODE (op) == CONST_DOUBLE)
1336 {
1337 /* The constant is stored in the host's word-ordering,
1338 but we want to access it in the target's word-ordering. Some
1339 compilers don't like a conditional inside macro args, so we have two
1340 copies of the return. */
1341 #ifdef HOST_WORDS_BIG_ENDIAN
1342 return GEN_INT (offset == WORDS_BIG_ENDIAN
1343 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
1344 #else
1345 return GEN_INT (offset != WORDS_BIG_ENDIAN
1346 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
1347 #endif
1348 }
1349 #endif /* no REAL_ARITHMETIC */
1350
1351 /* Single word float is a little harder, since single- and double-word
1352 values often do not have the same high-order bits. We have already
1353 verified that we want the only defined word of the single-word value. */
1354 #ifdef REAL_ARITHMETIC
1355 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1356 && GET_MODE_BITSIZE (mode) == 32
1357 && GET_CODE (op) == CONST_DOUBLE)
1358 {
1359 long l;
1360 REAL_VALUE_TYPE rv;
1361
1362 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1363 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1364
1365 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1366 val = l;
1367 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1368
1369 if (BITS_PER_WORD == 16)
1370 {
1371 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1372 val >>= 16;
1373 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1374 }
1375
1376 return GEN_INT (val);
1377 }
1378 #else
1379 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1380 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1381 || flag_pretend_float)
1382 && sizeof (float) * 8 == HOST_BITS_PER_WIDE_INT
1383 && GET_MODE_CLASS (mode) == MODE_FLOAT
1384 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1385 && GET_CODE (op) == CONST_DOUBLE)
1386 {
1387 double d;
1388 union {float f; HOST_WIDE_INT i; } u;
1389
1390 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1391
1392 u.f = d;
1393 return GEN_INT (u.i);
1394 }
1395 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1396 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1397 || flag_pretend_float)
1398 && sizeof (double) * 8 == HOST_BITS_PER_WIDE_INT
1399 && GET_MODE_CLASS (mode) == MODE_FLOAT
1400 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1401 && GET_CODE (op) == CONST_DOUBLE)
1402 {
1403 double d;
1404 union {double d; HOST_WIDE_INT i; } u;
1405
1406 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1407
1408 u.d = d;
1409 return GEN_INT (u.i);
1410 }
1411 #endif /* no REAL_ARITHMETIC */
1412
1413 /* The only remaining cases that we can handle are integers.
1414 Convert to proper endianness now since these cases need it.
1415 At this point, offset == 0 means the low-order word.
1416
1417 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1418 in general. However, if OP is (const_int 0), we can just return
1419 it for any word. */
1420
1421 if (op == const0_rtx)
1422 return op;
1423
1424 if (GET_MODE_CLASS (mode) != MODE_INT
1425 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1426 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1427 return 0;
1428
1429 if (WORDS_BIG_ENDIAN)
1430 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1431
1432 /* Find out which word on the host machine this value is in and get
1433 it from the constant. */
1434 val = (offset / size_ratio == 0
1435 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1436 : (GET_CODE (op) == CONST_INT
1437 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1438
1439 /* Get the value we want into the low bits of val. */
1440 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1441 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1442
1443 val = trunc_int_for_mode (val, word_mode);
1444
1445 return GEN_INT (val);
1446 }
1447
1448 /* Return subword OFFSET of operand OP.
1449 The word number, OFFSET, is interpreted as the word number starting
1450 at the low-order address. OFFSET 0 is the low-order word if not
1451 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1452
1453 If we cannot extract the required word, we return zero. Otherwise,
1454 an rtx corresponding to the requested word will be returned.
1455
1456 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1457 reload has completed, a valid address will always be returned. After
1458 reload, if a valid address cannot be returned, we return zero.
1459
1460 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1461 it is the responsibility of the caller.
1462
1463 MODE is the mode of OP in case it is a CONST_INT.
1464
1465 ??? This is still rather broken for some cases. The problem for the
1466 moment is that all callers of this thing provide no 'goal mode' to
1467 tell us to work with. This exists because all callers were written
1468 in a word based SUBREG world.
1469 Now use of this function can be deprecated by simplify_subreg in most
1470 cases.
1471 */
1472
1473 rtx
1474 operand_subword (op, offset, validate_address, mode)
1475 rtx op;
1476 unsigned int offset;
1477 int validate_address;
1478 enum machine_mode mode;
1479 {
1480 if (mode == VOIDmode)
1481 mode = GET_MODE (op);
1482
1483 if (mode == VOIDmode)
1484 abort ();
1485
1486 /* If OP is narrower than a word, fail. */
1487 if (mode != BLKmode
1488 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1489 return 0;
1490
1491 /* If we want a word outside OP, return zero. */
1492 if (mode != BLKmode
1493 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1494 return const0_rtx;
1495
1496 /* Form a new MEM at the requested address. */
1497 if (GET_CODE (op) == MEM)
1498 {
1499 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1500
1501 if (! validate_address)
1502 return new;
1503
1504 else if (reload_completed)
1505 {
1506 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1507 return 0;
1508 }
1509 else
1510 return replace_equiv_address (new, XEXP (new, 0));
1511 }
1512
1513 /* Rest can be handled by simplify_subreg. */
1514 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1515 }
1516
1517 /* Similar to `operand_subword', but never return 0. If we can't extract
1518 the required subword, put OP into a register and try again. If that fails,
1519 abort. We always validate the address in this case.
1520
1521 MODE is the mode of OP, in case it is CONST_INT. */
1522
1523 rtx
1524 operand_subword_force (op, offset, mode)
1525 rtx op;
1526 unsigned int offset;
1527 enum machine_mode mode;
1528 {
1529 rtx result = operand_subword (op, offset, 1, mode);
1530
1531 if (result)
1532 return result;
1533
1534 if (mode != BLKmode && mode != VOIDmode)
1535 {
1536 /* If this is a register which can not be accessed by words, copy it
1537 to a pseudo register. */
1538 if (GET_CODE (op) == REG)
1539 op = copy_to_reg (op);
1540 else
1541 op = force_reg (mode, op);
1542 }
1543
1544 result = operand_subword (op, offset, 1, mode);
1545 if (result == 0)
1546 abort ();
1547
1548 return result;
1549 }
1550 \f
1551 /* Given a compare instruction, swap the operands.
1552 A test instruction is changed into a compare of 0 against the operand. */
1553
1554 void
1555 reverse_comparison (insn)
1556 rtx insn;
1557 {
1558 rtx body = PATTERN (insn);
1559 rtx comp;
1560
1561 if (GET_CODE (body) == SET)
1562 comp = SET_SRC (body);
1563 else
1564 comp = SET_SRC (XVECEXP (body, 0, 0));
1565
1566 if (GET_CODE (comp) == COMPARE)
1567 {
1568 rtx op0 = XEXP (comp, 0);
1569 rtx op1 = XEXP (comp, 1);
1570 XEXP (comp, 0) = op1;
1571 XEXP (comp, 1) = op0;
1572 }
1573 else
1574 {
1575 rtx new = gen_rtx_COMPARE (VOIDmode,
1576 CONST0_RTX (GET_MODE (comp)), comp);
1577 if (GET_CODE (body) == SET)
1578 SET_SRC (body) = new;
1579 else
1580 SET_SRC (XVECEXP (body, 0, 0)) = new;
1581 }
1582 }
1583 \f
1584 /* Return a memory reference like MEMREF, but with its mode changed
1585 to MODE and its address changed to ADDR.
1586 (VOIDmode means don't change the mode.
1587 NULL for ADDR means don't change the address.)
1588 VALIDATE is nonzero if the returned memory location is required to be
1589 valid. */
1590
1591 rtx
1592 change_address_1 (memref, mode, addr, validate)
1593 rtx memref;
1594 enum machine_mode mode;
1595 rtx addr;
1596 int validate;
1597 {
1598 rtx new;
1599
1600 if (GET_CODE (memref) != MEM)
1601 abort ();
1602 if (mode == VOIDmode)
1603 mode = GET_MODE (memref);
1604 if (addr == 0)
1605 addr = XEXP (memref, 0);
1606
1607 if (validate)
1608 {
1609 if (reload_in_progress || reload_completed)
1610 {
1611 if (! memory_address_p (mode, addr))
1612 abort ();
1613 }
1614 else
1615 addr = memory_address (mode, addr);
1616 }
1617
1618 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1619 return memref;
1620
1621 new = gen_rtx_MEM (mode, addr);
1622 MEM_COPY_ATTRIBUTES (new, memref);
1623 return new;
1624 }
1625
1626 /* Return a memory reference like MEMREF, but with its mode changed
1627 to MODE and its address offset by OFFSET bytes. */
1628
1629 rtx
1630 adjust_address (memref, mode, offset)
1631 rtx memref;
1632 enum machine_mode mode;
1633 HOST_WIDE_INT offset;
1634 {
1635 /* For now, this is just a wrapper for change_address, but eventually
1636 will do memref tracking. */
1637 rtx addr = XEXP (memref, 0);
1638
1639 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1640 object, we can merge it into the LO_SUM. */
1641 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1642 && offset >= 0
1643 && offset < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1644 addr = gen_rtx_LO_SUM (mode, XEXP (addr, 0),
1645 plus_constant (XEXP (addr, 1), offset));
1646 else
1647 addr = plus_constant (addr, offset);
1648
1649 return change_address (memref, mode, addr);
1650 }
1651
1652 /* Likewise, but the reference is not required to be valid. */
1653
1654 rtx
1655 adjust_address_nv (memref, mode, offset)
1656 rtx memref;
1657 enum machine_mode mode;
1658 HOST_WIDE_INT offset;
1659 {
1660 /* For now, this is just a wrapper for change_address, but eventually
1661 will do memref tracking. */
1662 rtx addr = XEXP (memref, 0);
1663
1664 /* If MEMREF is a LO_SUM and the offset is within the size of the
1665 object, we can merge it into the LO_SUM. */
1666 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1667 && offset >= 0
1668 && offset < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1669 addr = gen_rtx_LO_SUM (mode, XEXP (addr, 0),
1670 plus_constant (XEXP (addr, 1), offset));
1671 else
1672 addr = plus_constant (addr, offset);
1673
1674 return change_address_1 (memref, mode, addr, 0);
1675 }
1676
1677 /* Return a memory reference like MEMREF, but with its address changed to
1678 ADDR. The caller is asserting that the actual piece of memory pointed
1679 to is the same, just the form of the address is being changed, such as
1680 by putting something into a register. */
1681
1682 rtx
1683 replace_equiv_address (memref, addr)
1684 rtx memref;
1685 rtx addr;
1686 {
1687 /* For now, this is just a wrapper for change_address, but eventually
1688 will do memref tracking. */
1689 return change_address (memref, VOIDmode, addr);
1690 }
1691 /* Likewise, but the reference is not required to be valid. */
1692
1693 rtx
1694 replace_equiv_address_nv (memref, addr)
1695 rtx memref;
1696 rtx addr;
1697 {
1698 /* For now, this is just a wrapper for change_address, but eventually
1699 will do memref tracking. */
1700 return change_address_1 (memref, VOIDmode, addr, 0);
1701 }
1702 \f
1703 /* Return a newly created CODE_LABEL rtx with a unique label number. */
1704
1705 rtx
1706 gen_label_rtx ()
1707 {
1708 register rtx label;
1709
1710 label = gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX,
1711 NULL_RTX, label_num++, NULL, NULL);
1712
1713 LABEL_NUSES (label) = 0;
1714 LABEL_ALTERNATE_NAME (label) = NULL;
1715 return label;
1716 }
1717 \f
1718 /* For procedure integration. */
1719
1720 /* Install new pointers to the first and last insns in the chain.
1721 Also, set cur_insn_uid to one higher than the last in use.
1722 Used for an inline-procedure after copying the insn chain. */
1723
1724 void
1725 set_new_first_and_last_insn (first, last)
1726 rtx first, last;
1727 {
1728 rtx insn;
1729
1730 first_insn = first;
1731 last_insn = last;
1732 cur_insn_uid = 0;
1733
1734 for (insn = first; insn; insn = NEXT_INSN (insn))
1735 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
1736
1737 cur_insn_uid++;
1738 }
1739
1740 /* Set the range of label numbers found in the current function.
1741 This is used when belatedly compiling an inline function. */
1742
1743 void
1744 set_new_first_and_last_label_num (first, last)
1745 int first, last;
1746 {
1747 base_label_num = label_num;
1748 first_label_num = first;
1749 last_label_num = last;
1750 }
1751
1752 /* Set the last label number found in the current function.
1753 This is used when belatedly compiling an inline function. */
1754
1755 void
1756 set_new_last_label_num (last)
1757 int last;
1758 {
1759 base_label_num = label_num;
1760 last_label_num = last;
1761 }
1762 \f
1763 /* Restore all variables describing the current status from the structure *P.
1764 This is used after a nested function. */
1765
1766 void
1767 restore_emit_status (p)
1768 struct function *p ATTRIBUTE_UNUSED;
1769 {
1770 last_label_num = 0;
1771 clear_emit_caches ();
1772 }
1773
1774 /* Clear out all parts of the state in F that can safely be discarded
1775 after the function has been compiled, to let garbage collection
1776 reclaim the memory. */
1777
1778 void
1779 free_emit_status (f)
1780 struct function *f;
1781 {
1782 free (f->emit->x_regno_reg_rtx);
1783 free (f->emit->regno_pointer_align);
1784 free (f->emit);
1785 f->emit = NULL;
1786 }
1787 \f
1788 /* Go through all the RTL insn bodies and copy any invalid shared
1789 structure. This routine should only be called once. */
1790
1791 void
1792 unshare_all_rtl (fndecl, insn)
1793 tree fndecl;
1794 rtx insn;
1795 {
1796 tree decl;
1797
1798 /* Make sure that virtual parameters are not shared. */
1799 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1800 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
1801
1802 /* Make sure that virtual stack slots are not shared. */
1803 unshare_all_decls (DECL_INITIAL (fndecl));
1804
1805 /* Unshare just about everything else. */
1806 unshare_all_rtl_1 (insn);
1807
1808 /* Make sure the addresses of stack slots found outside the insn chain
1809 (such as, in DECL_RTL of a variable) are not shared
1810 with the insn chain.
1811
1812 This special care is necessary when the stack slot MEM does not
1813 actually appear in the insn chain. If it does appear, its address
1814 is unshared from all else at that point. */
1815 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
1816 }
1817
1818 /* Go through all the RTL insn bodies and copy any invalid shared
1819 structure, again. This is a fairly expensive thing to do so it
1820 should be done sparingly. */
1821
1822 void
1823 unshare_all_rtl_again (insn)
1824 rtx insn;
1825 {
1826 rtx p;
1827 tree decl;
1828
1829 for (p = insn; p; p = NEXT_INSN (p))
1830 if (INSN_P (p))
1831 {
1832 reset_used_flags (PATTERN (p));
1833 reset_used_flags (REG_NOTES (p));
1834 reset_used_flags (LOG_LINKS (p));
1835 }
1836
1837 /* Make sure that virtual stack slots are not shared. */
1838 reset_used_decls (DECL_INITIAL (cfun->decl));
1839
1840 /* Make sure that virtual parameters are not shared. */
1841 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
1842 reset_used_flags (DECL_RTL (decl));
1843
1844 reset_used_flags (stack_slot_list);
1845
1846 unshare_all_rtl (cfun->decl, insn);
1847 }
1848
1849 /* Go through all the RTL insn bodies and copy any invalid shared structure.
1850 Assumes the mark bits are cleared at entry. */
1851
1852 static void
1853 unshare_all_rtl_1 (insn)
1854 rtx insn;
1855 {
1856 for (; insn; insn = NEXT_INSN (insn))
1857 if (INSN_P (insn))
1858 {
1859 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
1860 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
1861 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
1862 }
1863 }
1864
1865 /* Go through all virtual stack slots of a function and copy any
1866 shared structure. */
1867 static void
1868 unshare_all_decls (blk)
1869 tree blk;
1870 {
1871 tree t;
1872
1873 /* Copy shared decls. */
1874 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
1875 if (DECL_RTL_SET_P (t))
1876 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
1877
1878 /* Now process sub-blocks. */
1879 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
1880 unshare_all_decls (t);
1881 }
1882
1883 /* Go through all virtual stack slots of a function and mark them as
1884 not shared. */
1885 static void
1886 reset_used_decls (blk)
1887 tree blk;
1888 {
1889 tree t;
1890
1891 /* Mark decls. */
1892 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
1893 if (DECL_RTL_SET_P (t))
1894 reset_used_flags (DECL_RTL (t));
1895
1896 /* Now process sub-blocks. */
1897 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
1898 reset_used_decls (t);
1899 }
1900
1901 /* Mark ORIG as in use, and return a copy of it if it was already in use.
1902 Recursively does the same for subexpressions. */
1903
1904 rtx
1905 copy_rtx_if_shared (orig)
1906 rtx orig;
1907 {
1908 register rtx x = orig;
1909 register int i;
1910 register enum rtx_code code;
1911 register const char *format_ptr;
1912 int copied = 0;
1913
1914 if (x == 0)
1915 return 0;
1916
1917 code = GET_CODE (x);
1918
1919 /* These types may be freely shared. */
1920
1921 switch (code)
1922 {
1923 case REG:
1924 case QUEUED:
1925 case CONST_INT:
1926 case CONST_DOUBLE:
1927 case SYMBOL_REF:
1928 case CODE_LABEL:
1929 case PC:
1930 case CC0:
1931 case SCRATCH:
1932 /* SCRATCH must be shared because they represent distinct values. */
1933 return x;
1934
1935 case CONST:
1936 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
1937 a LABEL_REF, it isn't sharable. */
1938 if (GET_CODE (XEXP (x, 0)) == PLUS
1939 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1940 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1941 return x;
1942 break;
1943
1944 case INSN:
1945 case JUMP_INSN:
1946 case CALL_INSN:
1947 case NOTE:
1948 case BARRIER:
1949 /* The chain of insns is not being copied. */
1950 return x;
1951
1952 case MEM:
1953 /* A MEM is allowed to be shared if its address is constant.
1954
1955 We used to allow sharing of MEMs which referenced
1956 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
1957 that can lose. instantiate_virtual_regs will not unshare
1958 the MEMs, and combine may change the structure of the address
1959 because it looks safe and profitable in one context, but
1960 in some other context it creates unrecognizable RTL. */
1961 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
1962 return x;
1963
1964 break;
1965
1966 default:
1967 break;
1968 }
1969
1970 /* This rtx may not be shared. If it has already been seen,
1971 replace it with a copy of itself. */
1972
1973 if (x->used)
1974 {
1975 register rtx copy;
1976
1977 copy = rtx_alloc (code);
1978 memcpy (copy, x,
1979 (sizeof (*copy) - sizeof (copy->fld)
1980 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
1981 x = copy;
1982 copied = 1;
1983 }
1984 x->used = 1;
1985
1986 /* Now scan the subexpressions recursively.
1987 We can store any replaced subexpressions directly into X
1988 since we know X is not shared! Any vectors in X
1989 must be copied if X was copied. */
1990
1991 format_ptr = GET_RTX_FORMAT (code);
1992
1993 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1994 {
1995 switch (*format_ptr++)
1996 {
1997 case 'e':
1998 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
1999 break;
2000
2001 case 'E':
2002 if (XVEC (x, i) != NULL)
2003 {
2004 register int j;
2005 int len = XVECLEN (x, i);
2006
2007 if (copied && len > 0)
2008 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2009 for (j = 0; j < len; j++)
2010 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2011 }
2012 break;
2013 }
2014 }
2015 return x;
2016 }
2017
2018 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2019 to look for shared sub-parts. */
2020
2021 void
2022 reset_used_flags (x)
2023 rtx x;
2024 {
2025 register int i, j;
2026 register enum rtx_code code;
2027 register const char *format_ptr;
2028
2029 if (x == 0)
2030 return;
2031
2032 code = GET_CODE (x);
2033
2034 /* These types may be freely shared so we needn't do any resetting
2035 for them. */
2036
2037 switch (code)
2038 {
2039 case REG:
2040 case QUEUED:
2041 case CONST_INT:
2042 case CONST_DOUBLE:
2043 case SYMBOL_REF:
2044 case CODE_LABEL:
2045 case PC:
2046 case CC0:
2047 return;
2048
2049 case INSN:
2050 case JUMP_INSN:
2051 case CALL_INSN:
2052 case NOTE:
2053 case LABEL_REF:
2054 case BARRIER:
2055 /* The chain of insns is not being copied. */
2056 return;
2057
2058 default:
2059 break;
2060 }
2061
2062 x->used = 0;
2063
2064 format_ptr = GET_RTX_FORMAT (code);
2065 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2066 {
2067 switch (*format_ptr++)
2068 {
2069 case 'e':
2070 reset_used_flags (XEXP (x, i));
2071 break;
2072
2073 case 'E':
2074 for (j = 0; j < XVECLEN (x, i); j++)
2075 reset_used_flags (XVECEXP (x, i, j));
2076 break;
2077 }
2078 }
2079 }
2080 \f
2081 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2082 Return X or the rtx for the pseudo reg the value of X was copied into.
2083 OTHER must be valid as a SET_DEST. */
2084
2085 rtx
2086 make_safe_from (x, other)
2087 rtx x, other;
2088 {
2089 while (1)
2090 switch (GET_CODE (other))
2091 {
2092 case SUBREG:
2093 other = SUBREG_REG (other);
2094 break;
2095 case STRICT_LOW_PART:
2096 case SIGN_EXTEND:
2097 case ZERO_EXTEND:
2098 other = XEXP (other, 0);
2099 break;
2100 default:
2101 goto done;
2102 }
2103 done:
2104 if ((GET_CODE (other) == MEM
2105 && ! CONSTANT_P (x)
2106 && GET_CODE (x) != REG
2107 && GET_CODE (x) != SUBREG)
2108 || (GET_CODE (other) == REG
2109 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2110 || reg_mentioned_p (other, x))))
2111 {
2112 rtx temp = gen_reg_rtx (GET_MODE (x));
2113 emit_move_insn (temp, x);
2114 return temp;
2115 }
2116 return x;
2117 }
2118 \f
2119 /* Emission of insns (adding them to the doubly-linked list). */
2120
2121 /* Return the first insn of the current sequence or current function. */
2122
2123 rtx
2124 get_insns ()
2125 {
2126 return first_insn;
2127 }
2128
2129 /* Return the last insn emitted in current sequence or current function. */
2130
2131 rtx
2132 get_last_insn ()
2133 {
2134 return last_insn;
2135 }
2136
2137 /* Specify a new insn as the last in the chain. */
2138
2139 void
2140 set_last_insn (insn)
2141 rtx insn;
2142 {
2143 if (NEXT_INSN (insn) != 0)
2144 abort ();
2145 last_insn = insn;
2146 }
2147
2148 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2149
2150 rtx
2151 get_last_insn_anywhere ()
2152 {
2153 struct sequence_stack *stack;
2154 if (last_insn)
2155 return last_insn;
2156 for (stack = seq_stack; stack; stack = stack->next)
2157 if (stack->last != 0)
2158 return stack->last;
2159 return 0;
2160 }
2161
2162 /* Return a number larger than any instruction's uid in this function. */
2163
2164 int
2165 get_max_uid ()
2166 {
2167 return cur_insn_uid;
2168 }
2169
2170 /* Renumber instructions so that no instruction UIDs are wasted. */
2171
2172 void
2173 renumber_insns (stream)
2174 FILE *stream;
2175 {
2176 rtx insn;
2177
2178 /* If we're not supposed to renumber instructions, don't. */
2179 if (!flag_renumber_insns)
2180 return;
2181
2182 /* If there aren't that many instructions, then it's not really
2183 worth renumbering them. */
2184 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2185 return;
2186
2187 cur_insn_uid = 1;
2188
2189 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2190 {
2191 if (stream)
2192 fprintf (stream, "Renumbering insn %d to %d\n",
2193 INSN_UID (insn), cur_insn_uid);
2194 INSN_UID (insn) = cur_insn_uid++;
2195 }
2196 }
2197 \f
2198 /* Return the next insn. If it is a SEQUENCE, return the first insn
2199 of the sequence. */
2200
2201 rtx
2202 next_insn (insn)
2203 rtx insn;
2204 {
2205 if (insn)
2206 {
2207 insn = NEXT_INSN (insn);
2208 if (insn && GET_CODE (insn) == INSN
2209 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2210 insn = XVECEXP (PATTERN (insn), 0, 0);
2211 }
2212
2213 return insn;
2214 }
2215
2216 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2217 of the sequence. */
2218
2219 rtx
2220 previous_insn (insn)
2221 rtx insn;
2222 {
2223 if (insn)
2224 {
2225 insn = PREV_INSN (insn);
2226 if (insn && GET_CODE (insn) == INSN
2227 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2228 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2229 }
2230
2231 return insn;
2232 }
2233
2234 /* Return the next insn after INSN that is not a NOTE. This routine does not
2235 look inside SEQUENCEs. */
2236
2237 rtx
2238 next_nonnote_insn (insn)
2239 rtx insn;
2240 {
2241 while (insn)
2242 {
2243 insn = NEXT_INSN (insn);
2244 if (insn == 0 || GET_CODE (insn) != NOTE)
2245 break;
2246 }
2247
2248 return insn;
2249 }
2250
2251 /* Return the previous insn before INSN that is not a NOTE. This routine does
2252 not look inside SEQUENCEs. */
2253
2254 rtx
2255 prev_nonnote_insn (insn)
2256 rtx insn;
2257 {
2258 while (insn)
2259 {
2260 insn = PREV_INSN (insn);
2261 if (insn == 0 || GET_CODE (insn) != NOTE)
2262 break;
2263 }
2264
2265 return insn;
2266 }
2267
2268 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2269 or 0, if there is none. This routine does not look inside
2270 SEQUENCEs. */
2271
2272 rtx
2273 next_real_insn (insn)
2274 rtx insn;
2275 {
2276 while (insn)
2277 {
2278 insn = NEXT_INSN (insn);
2279 if (insn == 0 || GET_CODE (insn) == INSN
2280 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2281 break;
2282 }
2283
2284 return insn;
2285 }
2286
2287 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2288 or 0, if there is none. This routine does not look inside
2289 SEQUENCEs. */
2290
2291 rtx
2292 prev_real_insn (insn)
2293 rtx insn;
2294 {
2295 while (insn)
2296 {
2297 insn = PREV_INSN (insn);
2298 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2299 || GET_CODE (insn) == JUMP_INSN)
2300 break;
2301 }
2302
2303 return insn;
2304 }
2305
2306 /* Find the next insn after INSN that really does something. This routine
2307 does not look inside SEQUENCEs. Until reload has completed, this is the
2308 same as next_real_insn. */
2309
2310 int
2311 active_insn_p (insn)
2312 rtx insn;
2313 {
2314 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2315 || (GET_CODE (insn) == INSN
2316 && (! reload_completed
2317 || (GET_CODE (PATTERN (insn)) != USE
2318 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2319 }
2320
2321 rtx
2322 next_active_insn (insn)
2323 rtx insn;
2324 {
2325 while (insn)
2326 {
2327 insn = NEXT_INSN (insn);
2328 if (insn == 0 || active_insn_p (insn))
2329 break;
2330 }
2331
2332 return insn;
2333 }
2334
2335 /* Find the last insn before INSN that really does something. This routine
2336 does not look inside SEQUENCEs. Until reload has completed, this is the
2337 same as prev_real_insn. */
2338
2339 rtx
2340 prev_active_insn (insn)
2341 rtx insn;
2342 {
2343 while (insn)
2344 {
2345 insn = PREV_INSN (insn);
2346 if (insn == 0 || active_insn_p (insn))
2347 break;
2348 }
2349
2350 return insn;
2351 }
2352
2353 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2354
2355 rtx
2356 next_label (insn)
2357 rtx insn;
2358 {
2359 while (insn)
2360 {
2361 insn = NEXT_INSN (insn);
2362 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2363 break;
2364 }
2365
2366 return insn;
2367 }
2368
2369 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2370
2371 rtx
2372 prev_label (insn)
2373 rtx insn;
2374 {
2375 while (insn)
2376 {
2377 insn = PREV_INSN (insn);
2378 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2379 break;
2380 }
2381
2382 return insn;
2383 }
2384 \f
2385 #ifdef HAVE_cc0
2386 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2387 and REG_CC_USER notes so we can find it. */
2388
2389 void
2390 link_cc0_insns (insn)
2391 rtx insn;
2392 {
2393 rtx user = next_nonnote_insn (insn);
2394
2395 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
2396 user = XVECEXP (PATTERN (user), 0, 0);
2397
2398 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
2399 REG_NOTES (user));
2400 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
2401 }
2402
2403 /* Return the next insn that uses CC0 after INSN, which is assumed to
2404 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
2405 applied to the result of this function should yield INSN).
2406
2407 Normally, this is simply the next insn. However, if a REG_CC_USER note
2408 is present, it contains the insn that uses CC0.
2409
2410 Return 0 if we can't find the insn. */
2411
2412 rtx
2413 next_cc0_user (insn)
2414 rtx insn;
2415 {
2416 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
2417
2418 if (note)
2419 return XEXP (note, 0);
2420
2421 insn = next_nonnote_insn (insn);
2422 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
2423 insn = XVECEXP (PATTERN (insn), 0, 0);
2424
2425 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
2426 return insn;
2427
2428 return 0;
2429 }
2430
2431 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
2432 note, it is the previous insn. */
2433
2434 rtx
2435 prev_cc0_setter (insn)
2436 rtx insn;
2437 {
2438 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2439
2440 if (note)
2441 return XEXP (note, 0);
2442
2443 insn = prev_nonnote_insn (insn);
2444 if (! sets_cc0_p (PATTERN (insn)))
2445 abort ();
2446
2447 return insn;
2448 }
2449 #endif
2450
2451 /* Increment the label uses for all labels present in rtx. */
2452
2453 static void
2454 mark_label_nuses(x)
2455 rtx x;
2456 {
2457 register enum rtx_code code;
2458 register int i, j;
2459 register const char *fmt;
2460
2461 code = GET_CODE (x);
2462 if (code == LABEL_REF)
2463 LABEL_NUSES (XEXP (x, 0))++;
2464
2465 fmt = GET_RTX_FORMAT (code);
2466 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2467 {
2468 if (fmt[i] == 'e')
2469 mark_label_nuses (XEXP (x, i));
2470 else if (fmt[i] == 'E')
2471 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2472 mark_label_nuses (XVECEXP (x, i, j));
2473 }
2474 }
2475
2476 \f
2477 /* Try splitting insns that can be split for better scheduling.
2478 PAT is the pattern which might split.
2479 TRIAL is the insn providing PAT.
2480 LAST is non-zero if we should return the last insn of the sequence produced.
2481
2482 If this routine succeeds in splitting, it returns the first or last
2483 replacement insn depending on the value of LAST. Otherwise, it
2484 returns TRIAL. If the insn to be returned can be split, it will be. */
2485
2486 rtx
2487 try_split (pat, trial, last)
2488 rtx pat, trial;
2489 int last;
2490 {
2491 rtx before = PREV_INSN (trial);
2492 rtx after = NEXT_INSN (trial);
2493 int has_barrier = 0;
2494 rtx tem;
2495 rtx note, seq;
2496 int probability;
2497
2498 if (any_condjump_p (trial)
2499 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
2500 split_branch_probability = INTVAL (XEXP (note, 0));
2501 probability = split_branch_probability;
2502
2503 seq = split_insns (pat, trial);
2504
2505 split_branch_probability = -1;
2506
2507 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
2508 We may need to handle this specially. */
2509 if (after && GET_CODE (after) == BARRIER)
2510 {
2511 has_barrier = 1;
2512 after = NEXT_INSN (after);
2513 }
2514
2515 if (seq)
2516 {
2517 /* SEQ can either be a SEQUENCE or the pattern of a single insn.
2518 The latter case will normally arise only when being done so that
2519 it, in turn, will be split (SFmode on the 29k is an example). */
2520 if (GET_CODE (seq) == SEQUENCE)
2521 {
2522 int i, njumps = 0;
2523 rtx eh_note;
2524
2525 /* Avoid infinite loop if any insn of the result matches
2526 the original pattern. */
2527 for (i = 0; i < XVECLEN (seq, 0); i++)
2528 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN
2529 && rtx_equal_p (PATTERN (XVECEXP (seq, 0, i)), pat))
2530 return trial;
2531
2532 /* Mark labels. */
2533 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2534 if (GET_CODE (XVECEXP (seq, 0, i)) == JUMP_INSN)
2535 {
2536 rtx insn = XVECEXP (seq, 0, i);
2537 mark_jump_label (PATTERN (insn),
2538 XVECEXP (seq, 0, i), 0);
2539 njumps++;
2540 if (probability != -1
2541 && any_condjump_p (insn)
2542 && !find_reg_note (insn, REG_BR_PROB, 0))
2543 {
2544 /* We can preserve the REG_BR_PROB notes only if exactly
2545 one jump is created, otherwise the machinde description
2546 is responsible for this step using
2547 split_branch_probability variable. */
2548 if (njumps != 1)
2549 abort ();
2550 REG_NOTES (insn)
2551 = gen_rtx_EXPR_LIST (REG_BR_PROB,
2552 GEN_INT (probability),
2553 REG_NOTES (insn));
2554 }
2555 }
2556 /* If we are splitting a CALL_INSN, look for the CALL_INSN
2557 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
2558 if (GET_CODE (trial) == CALL_INSN)
2559 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2560 if (GET_CODE (XVECEXP (seq, 0, i)) == CALL_INSN)
2561 CALL_INSN_FUNCTION_USAGE (XVECEXP (seq, 0, i))
2562 = CALL_INSN_FUNCTION_USAGE (trial);
2563
2564 /* Copy EH notes. */
2565 if ((eh_note = find_reg_note (trial, REG_EH_REGION, NULL_RTX)))
2566 for (i = 0; i < XVECLEN (seq, 0); i++)
2567 {
2568 rtx insn = XVECEXP (seq, 0, i);
2569 if (GET_CODE (insn) == CALL_INSN
2570 || (flag_non_call_exceptions
2571 && may_trap_p (PATTERN (insn))))
2572 REG_NOTES (insn)
2573 = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (eh_note, 0),
2574 REG_NOTES (insn));
2575 }
2576
2577 /* If there are LABELS inside the split insns increment the
2578 usage count so we don't delete the label. */
2579 if (GET_CODE (trial) == INSN)
2580 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2581 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN)
2582 mark_label_nuses (PATTERN (XVECEXP (seq, 0, i)));
2583
2584 tem = emit_insn_after (seq, before);
2585
2586 delete_insn (trial);
2587 if (has_barrier)
2588 emit_barrier_after (tem);
2589
2590 /* Recursively call try_split for each new insn created; by the
2591 time control returns here that insn will be fully split, so
2592 set LAST and continue from the insn after the one returned.
2593 We can't use next_active_insn here since AFTER may be a note.
2594 Ignore deleted insns, which can be occur if not optimizing. */
2595 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
2596 if (! INSN_DELETED_P (tem) && INSN_P (tem))
2597 tem = try_split (PATTERN (tem), tem, 1);
2598 }
2599 /* Avoid infinite loop if the result matches the original pattern. */
2600 else if (rtx_equal_p (seq, pat))
2601 return trial;
2602 else
2603 {
2604 PATTERN (trial) = seq;
2605 INSN_CODE (trial) = -1;
2606 try_split (seq, trial, last);
2607 }
2608
2609 /* Return either the first or the last insn, depending on which was
2610 requested. */
2611 return last
2612 ? (after ? PREV_INSN (after) : last_insn)
2613 : NEXT_INSN (before);
2614 }
2615
2616 return trial;
2617 }
2618 \f
2619 /* Make and return an INSN rtx, initializing all its slots.
2620 Store PATTERN in the pattern slots. */
2621
2622 rtx
2623 make_insn_raw (pattern)
2624 rtx pattern;
2625 {
2626 register rtx insn;
2627
2628 insn = rtx_alloc (INSN);
2629
2630 INSN_UID (insn) = cur_insn_uid++;
2631 PATTERN (insn) = pattern;
2632 INSN_CODE (insn) = -1;
2633 LOG_LINKS (insn) = NULL;
2634 REG_NOTES (insn) = NULL;
2635
2636 #ifdef ENABLE_RTL_CHECKING
2637 if (insn
2638 && INSN_P (insn)
2639 && (returnjump_p (insn)
2640 || (GET_CODE (insn) == SET
2641 && SET_DEST (insn) == pc_rtx)))
2642 {
2643 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
2644 debug_rtx (insn);
2645 }
2646 #endif
2647
2648 return insn;
2649 }
2650
2651 /* Like `make_insn' but make a JUMP_INSN instead of an insn. */
2652
2653 static rtx
2654 make_jump_insn_raw (pattern)
2655 rtx pattern;
2656 {
2657 register rtx insn;
2658
2659 insn = rtx_alloc (JUMP_INSN);
2660 INSN_UID (insn) = cur_insn_uid++;
2661
2662 PATTERN (insn) = pattern;
2663 INSN_CODE (insn) = -1;
2664 LOG_LINKS (insn) = NULL;
2665 REG_NOTES (insn) = NULL;
2666 JUMP_LABEL (insn) = NULL;
2667
2668 return insn;
2669 }
2670
2671 /* Like `make_insn' but make a CALL_INSN instead of an insn. */
2672
2673 static rtx
2674 make_call_insn_raw (pattern)
2675 rtx pattern;
2676 {
2677 register rtx insn;
2678
2679 insn = rtx_alloc (CALL_INSN);
2680 INSN_UID (insn) = cur_insn_uid++;
2681
2682 PATTERN (insn) = pattern;
2683 INSN_CODE (insn) = -1;
2684 LOG_LINKS (insn) = NULL;
2685 REG_NOTES (insn) = NULL;
2686 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
2687
2688 return insn;
2689 }
2690 \f
2691 /* Add INSN to the end of the doubly-linked list.
2692 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
2693
2694 void
2695 add_insn (insn)
2696 register rtx insn;
2697 {
2698 PREV_INSN (insn) = last_insn;
2699 NEXT_INSN (insn) = 0;
2700
2701 if (NULL != last_insn)
2702 NEXT_INSN (last_insn) = insn;
2703
2704 if (NULL == first_insn)
2705 first_insn = insn;
2706
2707 last_insn = insn;
2708 }
2709
2710 /* Add INSN into the doubly-linked list after insn AFTER. This and
2711 the next should be the only functions called to insert an insn once
2712 delay slots have been filled since only they know how to update a
2713 SEQUENCE. */
2714
2715 void
2716 add_insn_after (insn, after)
2717 rtx insn, after;
2718 {
2719 rtx next = NEXT_INSN (after);
2720
2721 if (optimize && INSN_DELETED_P (after))
2722 abort ();
2723
2724 NEXT_INSN (insn) = next;
2725 PREV_INSN (insn) = after;
2726
2727 if (next)
2728 {
2729 PREV_INSN (next) = insn;
2730 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2731 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
2732 }
2733 else if (last_insn == after)
2734 last_insn = insn;
2735 else
2736 {
2737 struct sequence_stack *stack = seq_stack;
2738 /* Scan all pending sequences too. */
2739 for (; stack; stack = stack->next)
2740 if (after == stack->last)
2741 {
2742 stack->last = insn;
2743 break;
2744 }
2745
2746 if (stack == 0)
2747 abort ();
2748 }
2749
2750 NEXT_INSN (after) = insn;
2751 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
2752 {
2753 rtx sequence = PATTERN (after);
2754 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
2755 }
2756 }
2757
2758 /* Add INSN into the doubly-linked list before insn BEFORE. This and
2759 the previous should be the only functions called to insert an insn once
2760 delay slots have been filled since only they know how to update a
2761 SEQUENCE. */
2762
2763 void
2764 add_insn_before (insn, before)
2765 rtx insn, before;
2766 {
2767 rtx prev = PREV_INSN (before);
2768
2769 if (optimize && INSN_DELETED_P (before))
2770 abort ();
2771
2772 PREV_INSN (insn) = prev;
2773 NEXT_INSN (insn) = before;
2774
2775 if (prev)
2776 {
2777 NEXT_INSN (prev) = insn;
2778 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2779 {
2780 rtx sequence = PATTERN (prev);
2781 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
2782 }
2783 }
2784 else if (first_insn == before)
2785 first_insn = insn;
2786 else
2787 {
2788 struct sequence_stack *stack = seq_stack;
2789 /* Scan all pending sequences too. */
2790 for (; stack; stack = stack->next)
2791 if (before == stack->first)
2792 {
2793 stack->first = insn;
2794 break;
2795 }
2796
2797 if (stack == 0)
2798 abort ();
2799 }
2800
2801 PREV_INSN (before) = insn;
2802 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
2803 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
2804 }
2805
2806 /* Remove an insn from its doubly-linked list. This function knows how
2807 to handle sequences. */
2808 void
2809 remove_insn (insn)
2810 rtx insn;
2811 {
2812 rtx next = NEXT_INSN (insn);
2813 rtx prev = PREV_INSN (insn);
2814 if (prev)
2815 {
2816 NEXT_INSN (prev) = next;
2817 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2818 {
2819 rtx sequence = PATTERN (prev);
2820 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
2821 }
2822 }
2823 else if (first_insn == insn)
2824 first_insn = next;
2825 else
2826 {
2827 struct sequence_stack *stack = seq_stack;
2828 /* Scan all pending sequences too. */
2829 for (; stack; stack = stack->next)
2830 if (insn == stack->first)
2831 {
2832 stack->first = next;
2833 break;
2834 }
2835
2836 if (stack == 0)
2837 abort ();
2838 }
2839
2840 if (next)
2841 {
2842 PREV_INSN (next) = prev;
2843 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2844 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
2845 }
2846 else if (last_insn == insn)
2847 last_insn = prev;
2848 else
2849 {
2850 struct sequence_stack *stack = seq_stack;
2851 /* Scan all pending sequences too. */
2852 for (; stack; stack = stack->next)
2853 if (insn == stack->last)
2854 {
2855 stack->last = prev;
2856 break;
2857 }
2858
2859 if (stack == 0)
2860 abort ();
2861 }
2862 }
2863
2864 /* Delete all insns made since FROM.
2865 FROM becomes the new last instruction. */
2866
2867 void
2868 delete_insns_since (from)
2869 rtx from;
2870 {
2871 if (from == 0)
2872 first_insn = 0;
2873 else
2874 NEXT_INSN (from) = 0;
2875 last_insn = from;
2876 }
2877
2878 /* This function is deprecated, please use sequences instead.
2879
2880 Move a consecutive bunch of insns to a different place in the chain.
2881 The insns to be moved are those between FROM and TO.
2882 They are moved to a new position after the insn AFTER.
2883 AFTER must not be FROM or TO or any insn in between.
2884
2885 This function does not know about SEQUENCEs and hence should not be
2886 called after delay-slot filling has been done. */
2887
2888 void
2889 reorder_insns (from, to, after)
2890 rtx from, to, after;
2891 {
2892 /* Splice this bunch out of where it is now. */
2893 if (PREV_INSN (from))
2894 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
2895 if (NEXT_INSN (to))
2896 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
2897 if (last_insn == to)
2898 last_insn = PREV_INSN (from);
2899 if (first_insn == from)
2900 first_insn = NEXT_INSN (to);
2901
2902 /* Make the new neighbors point to it and it to them. */
2903 if (NEXT_INSN (after))
2904 PREV_INSN (NEXT_INSN (after)) = to;
2905
2906 NEXT_INSN (to) = NEXT_INSN (after);
2907 PREV_INSN (from) = after;
2908 NEXT_INSN (after) = from;
2909 if (after == last_insn)
2910 last_insn = to;
2911 }
2912
2913 /* Return the line note insn preceding INSN. */
2914
2915 static rtx
2916 find_line_note (insn)
2917 rtx insn;
2918 {
2919 if (no_line_numbers)
2920 return 0;
2921
2922 for (; insn; insn = PREV_INSN (insn))
2923 if (GET_CODE (insn) == NOTE
2924 && NOTE_LINE_NUMBER (insn) >= 0)
2925 break;
2926
2927 return insn;
2928 }
2929
2930 /* Like reorder_insns, but inserts line notes to preserve the line numbers
2931 of the moved insns when debugging. This may insert a note between AFTER
2932 and FROM, and another one after TO. */
2933
2934 void
2935 reorder_insns_with_line_notes (from, to, after)
2936 rtx from, to, after;
2937 {
2938 rtx from_line = find_line_note (from);
2939 rtx after_line = find_line_note (after);
2940
2941 reorder_insns (from, to, after);
2942
2943 if (from_line == after_line)
2944 return;
2945
2946 if (from_line)
2947 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
2948 NOTE_LINE_NUMBER (from_line),
2949 after);
2950 if (after_line)
2951 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
2952 NOTE_LINE_NUMBER (after_line),
2953 to);
2954 }
2955
2956 /* Remove unnecessary notes from the instruction stream. */
2957
2958 void
2959 remove_unnecessary_notes ()
2960 {
2961 rtx block_stack = NULL_RTX;
2962 rtx eh_stack = NULL_RTX;
2963 rtx insn;
2964 rtx next;
2965 rtx tmp;
2966
2967 /* We must not remove the first instruction in the function because
2968 the compiler depends on the first instruction being a note. */
2969 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
2970 {
2971 /* Remember what's next. */
2972 next = NEXT_INSN (insn);
2973
2974 /* We're only interested in notes. */
2975 if (GET_CODE (insn) != NOTE)
2976 continue;
2977
2978 switch (NOTE_LINE_NUMBER (insn))
2979 {
2980 case NOTE_INSN_DELETED:
2981 remove_insn (insn);
2982 break;
2983
2984 case NOTE_INSN_EH_REGION_BEG:
2985 eh_stack = alloc_INSN_LIST (insn, eh_stack);
2986 break;
2987
2988 case NOTE_INSN_EH_REGION_END:
2989 /* Too many end notes. */
2990 if (eh_stack == NULL_RTX)
2991 abort ();
2992 /* Mismatched nesting. */
2993 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
2994 abort ();
2995 tmp = eh_stack;
2996 eh_stack = XEXP (eh_stack, 1);
2997 free_INSN_LIST_node (tmp);
2998 break;
2999
3000 case NOTE_INSN_BLOCK_BEG:
3001 /* By now, all notes indicating lexical blocks should have
3002 NOTE_BLOCK filled in. */
3003 if (NOTE_BLOCK (insn) == NULL_TREE)
3004 abort ();
3005 block_stack = alloc_INSN_LIST (insn, block_stack);
3006 break;
3007
3008 case NOTE_INSN_BLOCK_END:
3009 /* Too many end notes. */
3010 if (block_stack == NULL_RTX)
3011 abort ();
3012 /* Mismatched nesting. */
3013 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3014 abort ();
3015 tmp = block_stack;
3016 block_stack = XEXP (block_stack, 1);
3017 free_INSN_LIST_node (tmp);
3018
3019 /* Scan back to see if there are any non-note instructions
3020 between INSN and the beginning of this block. If not,
3021 then there is no PC range in the generated code that will
3022 actually be in this block, so there's no point in
3023 remembering the existence of the block. */
3024 for (tmp = PREV_INSN (insn); tmp ; tmp = PREV_INSN (tmp))
3025 {
3026 /* This block contains a real instruction. Note that we
3027 don't include labels; if the only thing in the block
3028 is a label, then there are still no PC values that
3029 lie within the block. */
3030 if (INSN_P (tmp))
3031 break;
3032
3033 /* We're only interested in NOTEs. */
3034 if (GET_CODE (tmp) != NOTE)
3035 continue;
3036
3037 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3038 {
3039 /* We just verified that this BLOCK matches us with
3040 the block_stack check above. Never delete the
3041 BLOCK for the outermost scope of the function; we
3042 can refer to names from that scope even if the
3043 block notes are messed up. */
3044 if (! is_body_block (NOTE_BLOCK (insn))
3045 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3046 {
3047 remove_insn (tmp);
3048 remove_insn (insn);
3049 }
3050 break;
3051 }
3052 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3053 /* There's a nested block. We need to leave the
3054 current block in place since otherwise the debugger
3055 wouldn't be able to show symbols from our block in
3056 the nested block. */
3057 break;
3058 }
3059 }
3060 }
3061
3062 /* Too many begin notes. */
3063 if (block_stack || eh_stack)
3064 abort ();
3065 }
3066
3067 \f
3068 /* Emit an insn of given code and pattern
3069 at a specified place within the doubly-linked list. */
3070
3071 /* Make an instruction with body PATTERN
3072 and output it before the instruction BEFORE. */
3073
3074 rtx
3075 emit_insn_before (pattern, before)
3076 register rtx pattern, before;
3077 {
3078 register rtx insn = before;
3079
3080 if (GET_CODE (pattern) == SEQUENCE)
3081 {
3082 register int i;
3083
3084 for (i = 0; i < XVECLEN (pattern, 0); i++)
3085 {
3086 insn = XVECEXP (pattern, 0, i);
3087 add_insn_before (insn, before);
3088 }
3089 }
3090 else
3091 {
3092 insn = make_insn_raw (pattern);
3093 add_insn_before (insn, before);
3094 }
3095
3096 return insn;
3097 }
3098
3099 /* Similar to emit_insn_before, but update basic block boundaries as well. */
3100
3101 rtx
3102 emit_block_insn_before (pattern, before, block)
3103 rtx pattern, before;
3104 basic_block block;
3105 {
3106 rtx prev = PREV_INSN (before);
3107 rtx r = emit_insn_before (pattern, before);
3108 if (block && block->head == before)
3109 block->head = NEXT_INSN (prev);
3110 return r;
3111 }
3112
3113 /* Make an instruction with body PATTERN and code JUMP_INSN
3114 and output it before the instruction BEFORE. */
3115
3116 rtx
3117 emit_jump_insn_before (pattern, before)
3118 register rtx pattern, before;
3119 {
3120 register rtx insn;
3121
3122 if (GET_CODE (pattern) == SEQUENCE)
3123 insn = emit_insn_before (pattern, before);
3124 else
3125 {
3126 insn = make_jump_insn_raw (pattern);
3127 add_insn_before (insn, before);
3128 }
3129
3130 return insn;
3131 }
3132
3133 /* Make an instruction with body PATTERN and code CALL_INSN
3134 and output it before the instruction BEFORE. */
3135
3136 rtx
3137 emit_call_insn_before (pattern, before)
3138 register rtx pattern, before;
3139 {
3140 register rtx insn;
3141
3142 if (GET_CODE (pattern) == SEQUENCE)
3143 insn = emit_insn_before (pattern, before);
3144 else
3145 {
3146 insn = make_call_insn_raw (pattern);
3147 add_insn_before (insn, before);
3148 PUT_CODE (insn, CALL_INSN);
3149 }
3150
3151 return insn;
3152 }
3153
3154 /* Make an insn of code BARRIER
3155 and output it before the insn BEFORE. */
3156
3157 rtx
3158 emit_barrier_before (before)
3159 register rtx before;
3160 {
3161 register rtx insn = rtx_alloc (BARRIER);
3162
3163 INSN_UID (insn) = cur_insn_uid++;
3164
3165 add_insn_before (insn, before);
3166 return insn;
3167 }
3168
3169 /* Emit the label LABEL before the insn BEFORE. */
3170
3171 rtx
3172 emit_label_before (label, before)
3173 rtx label, before;
3174 {
3175 /* This can be called twice for the same label as a result of the
3176 confusion that follows a syntax error! So make it harmless. */
3177 if (INSN_UID (label) == 0)
3178 {
3179 INSN_UID (label) = cur_insn_uid++;
3180 add_insn_before (label, before);
3181 }
3182
3183 return label;
3184 }
3185
3186 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3187
3188 rtx
3189 emit_note_before (subtype, before)
3190 int subtype;
3191 rtx before;
3192 {
3193 register rtx note = rtx_alloc (NOTE);
3194 INSN_UID (note) = cur_insn_uid++;
3195 NOTE_SOURCE_FILE (note) = 0;
3196 NOTE_LINE_NUMBER (note) = subtype;
3197
3198 add_insn_before (note, before);
3199 return note;
3200 }
3201 \f
3202 /* Make an insn of code INSN with body PATTERN
3203 and output it after the insn AFTER. */
3204
3205 rtx
3206 emit_insn_after (pattern, after)
3207 register rtx pattern, after;
3208 {
3209 register rtx insn = after;
3210
3211 if (GET_CODE (pattern) == SEQUENCE)
3212 {
3213 register int i;
3214
3215 for (i = 0; i < XVECLEN (pattern, 0); i++)
3216 {
3217 insn = XVECEXP (pattern, 0, i);
3218 add_insn_after (insn, after);
3219 after = insn;
3220 }
3221 }
3222 else
3223 {
3224 insn = make_insn_raw (pattern);
3225 add_insn_after (insn, after);
3226 }
3227
3228 return insn;
3229 }
3230
3231 /* Similar to emit_insn_after, except that line notes are to be inserted so
3232 as to act as if this insn were at FROM. */
3233
3234 void
3235 emit_insn_after_with_line_notes (pattern, after, from)
3236 rtx pattern, after, from;
3237 {
3238 rtx from_line = find_line_note (from);
3239 rtx after_line = find_line_note (after);
3240 rtx insn = emit_insn_after (pattern, after);
3241
3242 if (from_line)
3243 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3244 NOTE_LINE_NUMBER (from_line),
3245 after);
3246
3247 if (after_line)
3248 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3249 NOTE_LINE_NUMBER (after_line),
3250 insn);
3251 }
3252
3253 /* Similar to emit_insn_after, but update basic block boundaries as well. */
3254
3255 rtx
3256 emit_block_insn_after (pattern, after, block)
3257 rtx pattern, after;
3258 basic_block block;
3259 {
3260 rtx r = emit_insn_after (pattern, after);
3261 if (block && block->end == after)
3262 block->end = r;
3263 return r;
3264 }
3265
3266 /* Make an insn of code JUMP_INSN with body PATTERN
3267 and output it after the insn AFTER. */
3268
3269 rtx
3270 emit_jump_insn_after (pattern, after)
3271 register rtx pattern, after;
3272 {
3273 register rtx insn;
3274
3275 if (GET_CODE (pattern) == SEQUENCE)
3276 insn = emit_insn_after (pattern, after);
3277 else
3278 {
3279 insn = make_jump_insn_raw (pattern);
3280 add_insn_after (insn, after);
3281 }
3282
3283 return insn;
3284 }
3285
3286 /* Make an insn of code BARRIER
3287 and output it after the insn AFTER. */
3288
3289 rtx
3290 emit_barrier_after (after)
3291 register rtx after;
3292 {
3293 register rtx insn = rtx_alloc (BARRIER);
3294
3295 INSN_UID (insn) = cur_insn_uid++;
3296
3297 add_insn_after (insn, after);
3298 return insn;
3299 }
3300
3301 /* Emit the label LABEL after the insn AFTER. */
3302
3303 rtx
3304 emit_label_after (label, after)
3305 rtx label, after;
3306 {
3307 /* This can be called twice for the same label
3308 as a result of the confusion that follows a syntax error!
3309 So make it harmless. */
3310 if (INSN_UID (label) == 0)
3311 {
3312 INSN_UID (label) = cur_insn_uid++;
3313 add_insn_after (label, after);
3314 }
3315
3316 return label;
3317 }
3318
3319 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
3320
3321 rtx
3322 emit_note_after (subtype, after)
3323 int subtype;
3324 rtx after;
3325 {
3326 register rtx note = rtx_alloc (NOTE);
3327 INSN_UID (note) = cur_insn_uid++;
3328 NOTE_SOURCE_FILE (note) = 0;
3329 NOTE_LINE_NUMBER (note) = subtype;
3330 add_insn_after (note, after);
3331 return note;
3332 }
3333
3334 /* Emit a line note for FILE and LINE after the insn AFTER. */
3335
3336 rtx
3337 emit_line_note_after (file, line, after)
3338 const char *file;
3339 int line;
3340 rtx after;
3341 {
3342 register rtx note;
3343
3344 if (no_line_numbers && line > 0)
3345 {
3346 cur_insn_uid++;
3347 return 0;
3348 }
3349
3350 note = rtx_alloc (NOTE);
3351 INSN_UID (note) = cur_insn_uid++;
3352 NOTE_SOURCE_FILE (note) = file;
3353 NOTE_LINE_NUMBER (note) = line;
3354 add_insn_after (note, after);
3355 return note;
3356 }
3357 \f
3358 /* Make an insn of code INSN with pattern PATTERN
3359 and add it to the end of the doubly-linked list.
3360 If PATTERN is a SEQUENCE, take the elements of it
3361 and emit an insn for each element.
3362
3363 Returns the last insn emitted. */
3364
3365 rtx
3366 emit_insn (pattern)
3367 rtx pattern;
3368 {
3369 rtx insn = last_insn;
3370
3371 if (GET_CODE (pattern) == SEQUENCE)
3372 {
3373 register int i;
3374
3375 for (i = 0; i < XVECLEN (pattern, 0); i++)
3376 {
3377 insn = XVECEXP (pattern, 0, i);
3378 add_insn (insn);
3379 }
3380 }
3381 else
3382 {
3383 insn = make_insn_raw (pattern);
3384 add_insn (insn);
3385 }
3386
3387 return insn;
3388 }
3389
3390 /* Emit the insns in a chain starting with INSN.
3391 Return the last insn emitted. */
3392
3393 rtx
3394 emit_insns (insn)
3395 rtx insn;
3396 {
3397 rtx last = 0;
3398
3399 while (insn)
3400 {
3401 rtx next = NEXT_INSN (insn);
3402 add_insn (insn);
3403 last = insn;
3404 insn = next;
3405 }
3406
3407 return last;
3408 }
3409
3410 /* Emit the insns in a chain starting with INSN and place them in front of
3411 the insn BEFORE. Return the last insn emitted. */
3412
3413 rtx
3414 emit_insns_before (insn, before)
3415 rtx insn;
3416 rtx before;
3417 {
3418 rtx last = 0;
3419
3420 while (insn)
3421 {
3422 rtx next = NEXT_INSN (insn);
3423 add_insn_before (insn, before);
3424 last = insn;
3425 insn = next;
3426 }
3427
3428 return last;
3429 }
3430
3431 /* Emit the insns in a chain starting with FIRST and place them in back of
3432 the insn AFTER. Return the last insn emitted. */
3433
3434 rtx
3435 emit_insns_after (first, after)
3436 register rtx first;
3437 register rtx after;
3438 {
3439 register rtx last;
3440 register rtx after_after;
3441
3442 if (!after)
3443 abort ();
3444
3445 if (!first)
3446 return first;
3447
3448 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3449 continue;
3450
3451 after_after = NEXT_INSN (after);
3452
3453 NEXT_INSN (after) = first;
3454 PREV_INSN (first) = after;
3455 NEXT_INSN (last) = after_after;
3456 if (after_after)
3457 PREV_INSN (after_after) = last;
3458
3459 if (after == last_insn)
3460 last_insn = last;
3461 return last;
3462 }
3463
3464 /* Make an insn of code JUMP_INSN with pattern PATTERN
3465 and add it to the end of the doubly-linked list. */
3466
3467 rtx
3468 emit_jump_insn (pattern)
3469 rtx pattern;
3470 {
3471 if (GET_CODE (pattern) == SEQUENCE)
3472 return emit_insn (pattern);
3473 else
3474 {
3475 register rtx insn = make_jump_insn_raw (pattern);
3476 add_insn (insn);
3477 return insn;
3478 }
3479 }
3480
3481 /* Make an insn of code CALL_INSN with pattern PATTERN
3482 and add it to the end of the doubly-linked list. */
3483
3484 rtx
3485 emit_call_insn (pattern)
3486 rtx pattern;
3487 {
3488 if (GET_CODE (pattern) == SEQUENCE)
3489 return emit_insn (pattern);
3490 else
3491 {
3492 register rtx insn = make_call_insn_raw (pattern);
3493 add_insn (insn);
3494 PUT_CODE (insn, CALL_INSN);
3495 return insn;
3496 }
3497 }
3498
3499 /* Add the label LABEL to the end of the doubly-linked list. */
3500
3501 rtx
3502 emit_label (label)
3503 rtx label;
3504 {
3505 /* This can be called twice for the same label
3506 as a result of the confusion that follows a syntax error!
3507 So make it harmless. */
3508 if (INSN_UID (label) == 0)
3509 {
3510 INSN_UID (label) = cur_insn_uid++;
3511 add_insn (label);
3512 }
3513 return label;
3514 }
3515
3516 /* Make an insn of code BARRIER
3517 and add it to the end of the doubly-linked list. */
3518
3519 rtx
3520 emit_barrier ()
3521 {
3522 register rtx barrier = rtx_alloc (BARRIER);
3523 INSN_UID (barrier) = cur_insn_uid++;
3524 add_insn (barrier);
3525 return barrier;
3526 }
3527
3528 /* Make an insn of code NOTE
3529 with data-fields specified by FILE and LINE
3530 and add it to the end of the doubly-linked list,
3531 but only if line-numbers are desired for debugging info. */
3532
3533 rtx
3534 emit_line_note (file, line)
3535 const char *file;
3536 int line;
3537 {
3538 set_file_and_line_for_stmt (file, line);
3539
3540 #if 0
3541 if (no_line_numbers)
3542 return 0;
3543 #endif
3544
3545 return emit_note (file, line);
3546 }
3547
3548 /* Make an insn of code NOTE
3549 with data-fields specified by FILE and LINE
3550 and add it to the end of the doubly-linked list.
3551 If it is a line-number NOTE, omit it if it matches the previous one. */
3552
3553 rtx
3554 emit_note (file, line)
3555 const char *file;
3556 int line;
3557 {
3558 register rtx note;
3559
3560 if (line > 0)
3561 {
3562 if (file && last_filename && !strcmp (file, last_filename)
3563 && line == last_linenum)
3564 return 0;
3565 last_filename = file;
3566 last_linenum = line;
3567 }
3568
3569 if (no_line_numbers && line > 0)
3570 {
3571 cur_insn_uid++;
3572 return 0;
3573 }
3574
3575 note = rtx_alloc (NOTE);
3576 INSN_UID (note) = cur_insn_uid++;
3577 NOTE_SOURCE_FILE (note) = file;
3578 NOTE_LINE_NUMBER (note) = line;
3579 add_insn (note);
3580 return note;
3581 }
3582
3583 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
3584
3585 rtx
3586 emit_line_note_force (file, line)
3587 const char *file;
3588 int line;
3589 {
3590 last_linenum = -1;
3591 return emit_line_note (file, line);
3592 }
3593
3594 /* Cause next statement to emit a line note even if the line number
3595 has not changed. This is used at the beginning of a function. */
3596
3597 void
3598 force_next_line_note ()
3599 {
3600 last_linenum = -1;
3601 }
3602
3603 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
3604 note of this type already exists, remove it first. */
3605
3606 void
3607 set_unique_reg_note (insn, kind, datum)
3608 rtx insn;
3609 enum reg_note kind;
3610 rtx datum;
3611 {
3612 rtx note = find_reg_note (insn, kind, NULL_RTX);
3613
3614 /* First remove the note if there already is one. */
3615 if (note)
3616 remove_note (insn, note);
3617
3618 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
3619 }
3620 \f
3621 /* Return an indication of which type of insn should have X as a body.
3622 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
3623
3624 enum rtx_code
3625 classify_insn (x)
3626 rtx x;
3627 {
3628 if (GET_CODE (x) == CODE_LABEL)
3629 return CODE_LABEL;
3630 if (GET_CODE (x) == CALL)
3631 return CALL_INSN;
3632 if (GET_CODE (x) == RETURN)
3633 return JUMP_INSN;
3634 if (GET_CODE (x) == SET)
3635 {
3636 if (SET_DEST (x) == pc_rtx)
3637 return JUMP_INSN;
3638 else if (GET_CODE (SET_SRC (x)) == CALL)
3639 return CALL_INSN;
3640 else
3641 return INSN;
3642 }
3643 if (GET_CODE (x) == PARALLEL)
3644 {
3645 register int j;
3646 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
3647 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
3648 return CALL_INSN;
3649 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
3650 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
3651 return JUMP_INSN;
3652 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
3653 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
3654 return CALL_INSN;
3655 }
3656 return INSN;
3657 }
3658
3659 /* Emit the rtl pattern X as an appropriate kind of insn.
3660 If X is a label, it is simply added into the insn chain. */
3661
3662 rtx
3663 emit (x)
3664 rtx x;
3665 {
3666 enum rtx_code code = classify_insn (x);
3667
3668 if (code == CODE_LABEL)
3669 return emit_label (x);
3670 else if (code == INSN)
3671 return emit_insn (x);
3672 else if (code == JUMP_INSN)
3673 {
3674 register rtx insn = emit_jump_insn (x);
3675 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
3676 return emit_barrier ();
3677 return insn;
3678 }
3679 else if (code == CALL_INSN)
3680 return emit_call_insn (x);
3681 else
3682 abort ();
3683 }
3684 \f
3685 /* Begin emitting insns to a sequence which can be packaged in an
3686 RTL_EXPR. If this sequence will contain something that might cause
3687 the compiler to pop arguments to function calls (because those
3688 pops have previously been deferred; see INHIBIT_DEFER_POP for more
3689 details), use do_pending_stack_adjust before calling this function.
3690 That will ensure that the deferred pops are not accidentally
3691 emitted in the middle of this sequence. */
3692
3693 void
3694 start_sequence ()
3695 {
3696 struct sequence_stack *tem;
3697
3698 tem = (struct sequence_stack *) xmalloc (sizeof (struct sequence_stack));
3699
3700 tem->next = seq_stack;
3701 tem->first = first_insn;
3702 tem->last = last_insn;
3703 tem->sequence_rtl_expr = seq_rtl_expr;
3704
3705 seq_stack = tem;
3706
3707 first_insn = 0;
3708 last_insn = 0;
3709 }
3710
3711 /* Similarly, but indicate that this sequence will be placed in T, an
3712 RTL_EXPR. See the documentation for start_sequence for more
3713 information about how to use this function. */
3714
3715 void
3716 start_sequence_for_rtl_expr (t)
3717 tree t;
3718 {
3719 start_sequence ();
3720
3721 seq_rtl_expr = t;
3722 }
3723
3724 /* Set up the insn chain starting with FIRST as the current sequence,
3725 saving the previously current one. See the documentation for
3726 start_sequence for more information about how to use this function. */
3727
3728 void
3729 push_to_sequence (first)
3730 rtx first;
3731 {
3732 rtx last;
3733
3734 start_sequence ();
3735
3736 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
3737
3738 first_insn = first;
3739 last_insn = last;
3740 }
3741
3742 /* Set up the insn chain from a chain stort in FIRST to LAST. */
3743
3744 void
3745 push_to_full_sequence (first, last)
3746 rtx first, last;
3747 {
3748 start_sequence ();
3749 first_insn = first;
3750 last_insn = last;
3751 /* We really should have the end of the insn chain here. */
3752 if (last && NEXT_INSN (last))
3753 abort ();
3754 }
3755
3756 /* Set up the outer-level insn chain
3757 as the current sequence, saving the previously current one. */
3758
3759 void
3760 push_topmost_sequence ()
3761 {
3762 struct sequence_stack *stack, *top = NULL;
3763
3764 start_sequence ();
3765
3766 for (stack = seq_stack; stack; stack = stack->next)
3767 top = stack;
3768
3769 first_insn = top->first;
3770 last_insn = top->last;
3771 seq_rtl_expr = top->sequence_rtl_expr;
3772 }
3773
3774 /* After emitting to the outer-level insn chain, update the outer-level
3775 insn chain, and restore the previous saved state. */
3776
3777 void
3778 pop_topmost_sequence ()
3779 {
3780 struct sequence_stack *stack, *top = NULL;
3781
3782 for (stack = seq_stack; stack; stack = stack->next)
3783 top = stack;
3784
3785 top->first = first_insn;
3786 top->last = last_insn;
3787 /* ??? Why don't we save seq_rtl_expr here? */
3788
3789 end_sequence ();
3790 }
3791
3792 /* After emitting to a sequence, restore previous saved state.
3793
3794 To get the contents of the sequence just made, you must call
3795 `gen_sequence' *before* calling here.
3796
3797 If the compiler might have deferred popping arguments while
3798 generating this sequence, and this sequence will not be immediately
3799 inserted into the instruction stream, use do_pending_stack_adjust
3800 before calling gen_sequence. That will ensure that the deferred
3801 pops are inserted into this sequence, and not into some random
3802 location in the instruction stream. See INHIBIT_DEFER_POP for more
3803 information about deferred popping of arguments. */
3804
3805 void
3806 end_sequence ()
3807 {
3808 struct sequence_stack *tem = seq_stack;
3809
3810 first_insn = tem->first;
3811 last_insn = tem->last;
3812 seq_rtl_expr = tem->sequence_rtl_expr;
3813 seq_stack = tem->next;
3814
3815 free (tem);
3816 }
3817
3818 /* This works like end_sequence, but records the old sequence in FIRST
3819 and LAST. */
3820
3821 void
3822 end_full_sequence (first, last)
3823 rtx *first, *last;
3824 {
3825 *first = first_insn;
3826 *last = last_insn;
3827 end_sequence();
3828 }
3829
3830 /* Return 1 if currently emitting into a sequence. */
3831
3832 int
3833 in_sequence_p ()
3834 {
3835 return seq_stack != 0;
3836 }
3837
3838 /* Generate a SEQUENCE rtx containing the insns already emitted
3839 to the current sequence.
3840
3841 This is how the gen_... function from a DEFINE_EXPAND
3842 constructs the SEQUENCE that it returns. */
3843
3844 rtx
3845 gen_sequence ()
3846 {
3847 rtx result;
3848 rtx tem;
3849 int i;
3850 int len;
3851
3852 /* Count the insns in the chain. */
3853 len = 0;
3854 for (tem = first_insn; tem; tem = NEXT_INSN (tem))
3855 len++;
3856
3857 /* If only one insn, return it rather than a SEQUENCE.
3858 (Now that we cache SEQUENCE expressions, it isn't worth special-casing
3859 the case of an empty list.)
3860 We only return the pattern of an insn if its code is INSN and it
3861 has no notes. This ensures that no information gets lost. */
3862 if (len == 1
3863 && ! RTX_FRAME_RELATED_P (first_insn)
3864 && GET_CODE (first_insn) == INSN
3865 /* Don't throw away any reg notes. */
3866 && REG_NOTES (first_insn) == 0)
3867 return PATTERN (first_insn);
3868
3869 result = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (len));
3870
3871 for (i = 0, tem = first_insn; tem; tem = NEXT_INSN (tem), i++)
3872 XVECEXP (result, 0, i) = tem;
3873
3874 return result;
3875 }
3876 \f
3877 /* Put the various virtual registers into REGNO_REG_RTX. */
3878
3879 void
3880 init_virtual_regs (es)
3881 struct emit_status *es;
3882 {
3883 rtx *ptr = es->x_regno_reg_rtx;
3884 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
3885 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
3886 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
3887 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
3888 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
3889 }
3890
3891 void
3892 clear_emit_caches ()
3893 {
3894 int i;
3895
3896 /* Clear the start_sequence/gen_sequence cache. */
3897 for (i = 0; i < SEQUENCE_RESULT_SIZE; i++)
3898 sequence_result[i] = 0;
3899 free_insn = 0;
3900 }
3901 \f
3902 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
3903 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
3904 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
3905 static int copy_insn_n_scratches;
3906
3907 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
3908 copied an ASM_OPERANDS.
3909 In that case, it is the original input-operand vector. */
3910 static rtvec orig_asm_operands_vector;
3911
3912 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
3913 copied an ASM_OPERANDS.
3914 In that case, it is the copied input-operand vector. */
3915 static rtvec copy_asm_operands_vector;
3916
3917 /* Likewise for the constraints vector. */
3918 static rtvec orig_asm_constraints_vector;
3919 static rtvec copy_asm_constraints_vector;
3920
3921 /* Recursively create a new copy of an rtx for copy_insn.
3922 This function differs from copy_rtx in that it handles SCRATCHes and
3923 ASM_OPERANDs properly.
3924 Normally, this function is not used directly; use copy_insn as front end.
3925 However, you could first copy an insn pattern with copy_insn and then use
3926 this function afterwards to properly copy any REG_NOTEs containing
3927 SCRATCHes. */
3928
3929 rtx
3930 copy_insn_1 (orig)
3931 register rtx orig;
3932 {
3933 register rtx copy;
3934 register int i, j;
3935 register RTX_CODE code;
3936 register const char *format_ptr;
3937
3938 code = GET_CODE (orig);
3939
3940 switch (code)
3941 {
3942 case REG:
3943 case QUEUED:
3944 case CONST_INT:
3945 case CONST_DOUBLE:
3946 case SYMBOL_REF:
3947 case CODE_LABEL:
3948 case PC:
3949 case CC0:
3950 case ADDRESSOF:
3951 return orig;
3952
3953 case SCRATCH:
3954 for (i = 0; i < copy_insn_n_scratches; i++)
3955 if (copy_insn_scratch_in[i] == orig)
3956 return copy_insn_scratch_out[i];
3957 break;
3958
3959 case CONST:
3960 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
3961 a LABEL_REF, it isn't sharable. */
3962 if (GET_CODE (XEXP (orig, 0)) == PLUS
3963 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
3964 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
3965 return orig;
3966 break;
3967
3968 /* A MEM with a constant address is not sharable. The problem is that
3969 the constant address may need to be reloaded. If the mem is shared,
3970 then reloading one copy of this mem will cause all copies to appear
3971 to have been reloaded. */
3972
3973 default:
3974 break;
3975 }
3976
3977 copy = rtx_alloc (code);
3978
3979 /* Copy the various flags, and other information. We assume that
3980 all fields need copying, and then clear the fields that should
3981 not be copied. That is the sensible default behavior, and forces
3982 us to explicitly document why we are *not* copying a flag. */
3983 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
3984
3985 /* We do not copy the USED flag, which is used as a mark bit during
3986 walks over the RTL. */
3987 copy->used = 0;
3988
3989 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
3990 if (GET_RTX_CLASS (code) == 'i')
3991 {
3992 copy->jump = 0;
3993 copy->call = 0;
3994 copy->frame_related = 0;
3995 }
3996
3997 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
3998
3999 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4000 {
4001 copy->fld[i] = orig->fld[i];
4002 switch (*format_ptr++)
4003 {
4004 case 'e':
4005 if (XEXP (orig, i) != NULL)
4006 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4007 break;
4008
4009 case 'E':
4010 case 'V':
4011 if (XVEC (orig, i) == orig_asm_constraints_vector)
4012 XVEC (copy, i) = copy_asm_constraints_vector;
4013 else if (XVEC (orig, i) == orig_asm_operands_vector)
4014 XVEC (copy, i) = copy_asm_operands_vector;
4015 else if (XVEC (orig, i) != NULL)
4016 {
4017 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4018 for (j = 0; j < XVECLEN (copy, i); j++)
4019 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4020 }
4021 break;
4022
4023 case 't':
4024 case 'w':
4025 case 'i':
4026 case 's':
4027 case 'S':
4028 case 'u':
4029 case '0':
4030 /* These are left unchanged. */
4031 break;
4032
4033 default:
4034 abort ();
4035 }
4036 }
4037
4038 if (code == SCRATCH)
4039 {
4040 i = copy_insn_n_scratches++;
4041 if (i >= MAX_RECOG_OPERANDS)
4042 abort ();
4043 copy_insn_scratch_in[i] = orig;
4044 copy_insn_scratch_out[i] = copy;
4045 }
4046 else if (code == ASM_OPERANDS)
4047 {
4048 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4049 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4050 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
4051 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
4052 }
4053
4054 return copy;
4055 }
4056
4057 /* Create a new copy of an rtx.
4058 This function differs from copy_rtx in that it handles SCRATCHes and
4059 ASM_OPERANDs properly.
4060 INSN doesn't really have to be a full INSN; it could be just the
4061 pattern. */
4062 rtx
4063 copy_insn (insn)
4064 rtx insn;
4065 {
4066 copy_insn_n_scratches = 0;
4067 orig_asm_operands_vector = 0;
4068 orig_asm_constraints_vector = 0;
4069 copy_asm_operands_vector = 0;
4070 copy_asm_constraints_vector = 0;
4071 return copy_insn_1 (insn);
4072 }
4073
4074 /* Initialize data structures and variables in this file
4075 before generating rtl for each function. */
4076
4077 void
4078 init_emit ()
4079 {
4080 struct function *f = cfun;
4081
4082 f->emit = (struct emit_status *) xmalloc (sizeof (struct emit_status));
4083 first_insn = NULL;
4084 last_insn = NULL;
4085 seq_rtl_expr = NULL;
4086 cur_insn_uid = 1;
4087 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
4088 last_linenum = 0;
4089 last_filename = 0;
4090 first_label_num = label_num;
4091 last_label_num = 0;
4092 seq_stack = NULL;
4093
4094 clear_emit_caches ();
4095
4096 /* Init the tables that describe all the pseudo regs. */
4097
4098 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
4099
4100 f->emit->regno_pointer_align
4101 = (unsigned char *) xcalloc (f->emit->regno_pointer_align_length,
4102 sizeof (unsigned char));
4103
4104 regno_reg_rtx
4105 = (rtx *) xcalloc (f->emit->regno_pointer_align_length * sizeof (rtx),
4106 sizeof (rtx));
4107
4108 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
4109 init_virtual_regs (f->emit);
4110
4111 /* Indicate that the virtual registers and stack locations are
4112 all pointers. */
4113 REG_POINTER (stack_pointer_rtx) = 1;
4114 REG_POINTER (frame_pointer_rtx) = 1;
4115 REG_POINTER (hard_frame_pointer_rtx) = 1;
4116 REG_POINTER (arg_pointer_rtx) = 1;
4117
4118 REG_POINTER (virtual_incoming_args_rtx) = 1;
4119 REG_POINTER (virtual_stack_vars_rtx) = 1;
4120 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
4121 REG_POINTER (virtual_outgoing_args_rtx) = 1;
4122 REG_POINTER (virtual_cfa_rtx) = 1;
4123
4124 #ifdef STACK_BOUNDARY
4125 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
4126 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4127 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4128 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
4129
4130 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
4131 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
4132 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
4133 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
4134 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
4135 #endif
4136
4137 #ifdef INIT_EXPANDERS
4138 INIT_EXPANDERS;
4139 #endif
4140 }
4141
4142 /* Mark SS for GC. */
4143
4144 static void
4145 mark_sequence_stack (ss)
4146 struct sequence_stack *ss;
4147 {
4148 while (ss)
4149 {
4150 ggc_mark_rtx (ss->first);
4151 ggc_mark_tree (ss->sequence_rtl_expr);
4152 ss = ss->next;
4153 }
4154 }
4155
4156 /* Mark ES for GC. */
4157
4158 void
4159 mark_emit_status (es)
4160 struct emit_status *es;
4161 {
4162 rtx *r;
4163 int i;
4164
4165 if (es == 0)
4166 return;
4167
4168 for (i = es->regno_pointer_align_length, r = es->x_regno_reg_rtx;
4169 i > 0; --i, ++r)
4170 ggc_mark_rtx (*r);
4171
4172 mark_sequence_stack (es->sequence_stack);
4173 ggc_mark_tree (es->sequence_rtl_expr);
4174 ggc_mark_rtx (es->x_first_insn);
4175 }
4176
4177 /* Create some permanent unique rtl objects shared between all functions.
4178 LINE_NUMBERS is nonzero if line numbers are to be generated. */
4179
4180 void
4181 init_emit_once (line_numbers)
4182 int line_numbers;
4183 {
4184 int i;
4185 enum machine_mode mode;
4186 enum machine_mode double_mode;
4187
4188 /* Initialize the CONST_INT hash table. */
4189 const_int_htab = htab_create (37, const_int_htab_hash,
4190 const_int_htab_eq, NULL);
4191 ggc_add_root (&const_int_htab, 1, sizeof (const_int_htab),
4192 rtx_htab_mark);
4193
4194 no_line_numbers = ! line_numbers;
4195
4196 /* Compute the word and byte modes. */
4197
4198 byte_mode = VOIDmode;
4199 word_mode = VOIDmode;
4200 double_mode = VOIDmode;
4201
4202 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
4203 mode = GET_MODE_WIDER_MODE (mode))
4204 {
4205 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
4206 && byte_mode == VOIDmode)
4207 byte_mode = mode;
4208
4209 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
4210 && word_mode == VOIDmode)
4211 word_mode = mode;
4212 }
4213
4214 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
4215 mode = GET_MODE_WIDER_MODE (mode))
4216 {
4217 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
4218 && double_mode == VOIDmode)
4219 double_mode = mode;
4220 }
4221
4222 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
4223
4224 /* Assign register numbers to the globally defined register rtx.
4225 This must be done at runtime because the register number field
4226 is in a union and some compilers can't initialize unions. */
4227
4228 pc_rtx = gen_rtx (PC, VOIDmode);
4229 cc0_rtx = gen_rtx (CC0, VOIDmode);
4230 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
4231 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
4232 if (hard_frame_pointer_rtx == 0)
4233 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
4234 HARD_FRAME_POINTER_REGNUM);
4235 if (arg_pointer_rtx == 0)
4236 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
4237 virtual_incoming_args_rtx =
4238 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
4239 virtual_stack_vars_rtx =
4240 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
4241 virtual_stack_dynamic_rtx =
4242 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
4243 virtual_outgoing_args_rtx =
4244 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
4245 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
4246
4247 /* These rtx must be roots if GC is enabled. */
4248 ggc_add_rtx_root (global_rtl, GR_MAX);
4249
4250 #ifdef INIT_EXPANDERS
4251 /* This is to initialize {init|mark|free}_machine_status before the first
4252 call to push_function_context_to. This is needed by the Chill front
4253 end which calls push_function_context_to before the first cal to
4254 init_function_start. */
4255 INIT_EXPANDERS;
4256 #endif
4257
4258 /* Create the unique rtx's for certain rtx codes and operand values. */
4259
4260 /* Don't use gen_rtx here since gen_rtx in this case
4261 tries to use these variables. */
4262 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
4263 const_int_rtx[i + MAX_SAVED_CONST_INT] =
4264 gen_rtx_raw_CONST_INT (VOIDmode, i);
4265 ggc_add_rtx_root (const_int_rtx, 2 * MAX_SAVED_CONST_INT + 1);
4266
4267 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
4268 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
4269 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
4270 else
4271 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
4272
4273 dconst0 = REAL_VALUE_ATOF ("0", double_mode);
4274 dconst1 = REAL_VALUE_ATOF ("1", double_mode);
4275 dconst2 = REAL_VALUE_ATOF ("2", double_mode);
4276 dconstm1 = REAL_VALUE_ATOF ("-1", double_mode);
4277
4278 for (i = 0; i <= 2; i++)
4279 {
4280 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
4281 mode = GET_MODE_WIDER_MODE (mode))
4282 {
4283 rtx tem = rtx_alloc (CONST_DOUBLE);
4284 union real_extract u;
4285
4286 /* Zero any holes in a structure. */
4287 memset ((char *) &u, 0, sizeof u);
4288 u.d = i == 0 ? dconst0 : i == 1 ? dconst1 : dconst2;
4289
4290 /* Avoid trailing garbage in the rtx. */
4291 if (sizeof (u) < sizeof (HOST_WIDE_INT))
4292 CONST_DOUBLE_LOW (tem) = 0;
4293 if (sizeof (u) < 2 * sizeof (HOST_WIDE_INT))
4294 CONST_DOUBLE_HIGH (tem) = 0;
4295
4296 memcpy (&CONST_DOUBLE_LOW (tem), &u, sizeof u);
4297 CONST_DOUBLE_MEM (tem) = cc0_rtx;
4298 CONST_DOUBLE_CHAIN (tem) = NULL_RTX;
4299 PUT_MODE (tem, mode);
4300
4301 const_tiny_rtx[i][(int) mode] = tem;
4302 }
4303
4304 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
4305
4306 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
4307 mode = GET_MODE_WIDER_MODE (mode))
4308 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
4309
4310 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
4311 mode != VOIDmode;
4312 mode = GET_MODE_WIDER_MODE (mode))
4313 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
4314 }
4315
4316 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
4317 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
4318 const_tiny_rtx[0][i] = const0_rtx;
4319
4320 const_tiny_rtx[0][(int) BImode] = const0_rtx;
4321 if (STORE_FLAG_VALUE == 1)
4322 const_tiny_rtx[1][(int) BImode] = const1_rtx;
4323
4324 /* For bounded pointers, `&const_tiny_rtx[0][0]' is not the same as
4325 `(rtx *) const_tiny_rtx'. The former has bounds that only cover
4326 `const_tiny_rtx[0]', whereas the latter has bounds that cover all. */
4327 ggc_add_rtx_root ((rtx *) const_tiny_rtx, sizeof const_tiny_rtx / sizeof (rtx));
4328 ggc_add_rtx_root (&const_true_rtx, 1);
4329
4330 #ifdef RETURN_ADDRESS_POINTER_REGNUM
4331 return_address_pointer_rtx
4332 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
4333 #endif
4334
4335 #ifdef STRUCT_VALUE
4336 struct_value_rtx = STRUCT_VALUE;
4337 #else
4338 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
4339 #endif
4340
4341 #ifdef STRUCT_VALUE_INCOMING
4342 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
4343 #else
4344 #ifdef STRUCT_VALUE_INCOMING_REGNUM
4345 struct_value_incoming_rtx
4346 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
4347 #else
4348 struct_value_incoming_rtx = struct_value_rtx;
4349 #endif
4350 #endif
4351
4352 #ifdef STATIC_CHAIN_REGNUM
4353 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
4354
4355 #ifdef STATIC_CHAIN_INCOMING_REGNUM
4356 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
4357 static_chain_incoming_rtx
4358 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
4359 else
4360 #endif
4361 static_chain_incoming_rtx = static_chain_rtx;
4362 #endif
4363
4364 #ifdef STATIC_CHAIN
4365 static_chain_rtx = STATIC_CHAIN;
4366
4367 #ifdef STATIC_CHAIN_INCOMING
4368 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
4369 #else
4370 static_chain_incoming_rtx = static_chain_rtx;
4371 #endif
4372 #endif
4373
4374 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
4375 pic_offset_table_rtx = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
4376
4377 ggc_add_rtx_root (&pic_offset_table_rtx, 1);
4378 ggc_add_rtx_root (&struct_value_rtx, 1);
4379 ggc_add_rtx_root (&struct_value_incoming_rtx, 1);
4380 ggc_add_rtx_root (&static_chain_rtx, 1);
4381 ggc_add_rtx_root (&static_chain_incoming_rtx, 1);
4382 ggc_add_rtx_root (&return_address_pointer_rtx, 1);
4383 }
4384 \f
4385 /* Query and clear/ restore no_line_numbers. This is used by the
4386 switch / case handling in stmt.c to give proper line numbers in
4387 warnings about unreachable code. */
4388
4389 int
4390 force_line_numbers ()
4391 {
4392 int old = no_line_numbers;
4393
4394 no_line_numbers = 0;
4395 if (old)
4396 force_next_line_note ();
4397 return old;
4398 }
4399
4400 void
4401 restore_line_number_status (old_value)
4402 int old_value;
4403 {
4404 no_line_numbers = old_value;
4405 }