alias.c: Reorder #include statements and remove duplicates.
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "df.h"
42 #include "tm_p.h"
43 #include "stringpool.h"
44 #include "expmed.h"
45 #include "insn-config.h"
46 #include "regs.h"
47 #include "emit-rtl.h"
48 #include "recog.h"
49 #include "diagnostic-core.h"
50 #include "alias.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "cfgrtl.h"
54 #include "tree-eh.h"
55 #include "flags.h"
56 #include "dojump.h"
57 #include "explow.h"
58 #include "calls.h"
59 #include "stmt.h"
60 #include "expr.h"
61 #include "debug.h"
62 #include "langhooks.h"
63 #include "params.h"
64 #include "builtins.h"
65 #include "rtl-iter.h"
66 #include "stor-layout.h"
67
68 struct target_rtl default_target_rtl;
69 #if SWITCHABLE_TARGET
70 struct target_rtl *this_target_rtl = &default_target_rtl;
71 #endif
72
73 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
74
75 /* Commonly used modes. */
76
77 machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
78 machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
79 machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
80 machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
81
82 /* Datastructures maintained for currently processed function in RTL form. */
83
84 struct rtl_data x_rtl;
85
86 /* Indexed by pseudo register number, gives the rtx for that pseudo.
87 Allocated in parallel with regno_pointer_align.
88 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
89 with length attribute nested in top level structures. */
90
91 rtx * regno_reg_rtx;
92
93 /* This is *not* reset after each function. It gives each CODE_LABEL
94 in the entire compilation a unique label number. */
95
96 static GTY(()) int label_num = 1;
97
98 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
99 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
100 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
101 is set only for MODE_INT and MODE_VECTOR_INT modes. */
102
103 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
104
105 rtx const_true_rtx;
106
107 REAL_VALUE_TYPE dconst0;
108 REAL_VALUE_TYPE dconst1;
109 REAL_VALUE_TYPE dconst2;
110 REAL_VALUE_TYPE dconstm1;
111 REAL_VALUE_TYPE dconsthalf;
112
113 /* Record fixed-point constant 0 and 1. */
114 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
115 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
116
117 /* We make one copy of (const_int C) where C is in
118 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
119 to save space during the compilation and simplify comparisons of
120 integers. */
121
122 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
123
124 /* Standard pieces of rtx, to be substituted directly into things. */
125 rtx pc_rtx;
126 rtx ret_rtx;
127 rtx simple_return_rtx;
128 rtx cc0_rtx;
129
130 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
131 this pointer should normally never be dereferenced), but is required to be
132 distinct from NULL_RTX. Currently used by peephole2 pass. */
133 rtx_insn *invalid_insn_rtx;
134
135 /* A hash table storing CONST_INTs whose absolute value is greater
136 than MAX_SAVED_CONST_INT. */
137
138 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
139 {
140 typedef HOST_WIDE_INT compare_type;
141
142 static hashval_t hash (rtx i);
143 static bool equal (rtx i, HOST_WIDE_INT h);
144 };
145
146 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
147
148 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
149 {
150 static hashval_t hash (rtx x);
151 static bool equal (rtx x, rtx y);
152 };
153
154 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
155
156 /* A hash table storing register attribute structures. */
157 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
158 {
159 static hashval_t hash (reg_attrs *x);
160 static bool equal (reg_attrs *a, reg_attrs *b);
161 };
162
163 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
164
165 /* A hash table storing all CONST_DOUBLEs. */
166 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
167 {
168 static hashval_t hash (rtx x);
169 static bool equal (rtx x, rtx y);
170 };
171
172 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
173
174 /* A hash table storing all CONST_FIXEDs. */
175 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
176 {
177 static hashval_t hash (rtx x);
178 static bool equal (rtx x, rtx y);
179 };
180
181 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
182
183 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
184 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
185 #define first_label_num (crtl->emit.x_first_label_num)
186
187 static void set_used_decls (tree);
188 static void mark_label_nuses (rtx);
189 #if TARGET_SUPPORTS_WIDE_INT
190 static rtx lookup_const_wide_int (rtx);
191 #endif
192 static rtx lookup_const_double (rtx);
193 static rtx lookup_const_fixed (rtx);
194 static reg_attrs *get_reg_attrs (tree, int);
195 static rtx gen_const_vector (machine_mode, int);
196 static void copy_rtx_if_shared_1 (rtx *orig);
197
198 /* Probability of the conditional branch currently proceeded by try_split.
199 Set to -1 otherwise. */
200 int split_branch_probability = -1;
201 \f
202 /* Returns a hash code for X (which is a really a CONST_INT). */
203
204 hashval_t
205 const_int_hasher::hash (rtx x)
206 {
207 return (hashval_t) INTVAL (x);
208 }
209
210 /* Returns nonzero if the value represented by X (which is really a
211 CONST_INT) is the same as that given by Y (which is really a
212 HOST_WIDE_INT *). */
213
214 bool
215 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
216 {
217 return (INTVAL (x) == y);
218 }
219
220 #if TARGET_SUPPORTS_WIDE_INT
221 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
222
223 hashval_t
224 const_wide_int_hasher::hash (rtx x)
225 {
226 int i;
227 unsigned HOST_WIDE_INT hash = 0;
228 const_rtx xr = x;
229
230 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
231 hash += CONST_WIDE_INT_ELT (xr, i);
232
233 return (hashval_t) hash;
234 }
235
236 /* Returns nonzero if the value represented by X (which is really a
237 CONST_WIDE_INT) is the same as that given by Y (which is really a
238 CONST_WIDE_INT). */
239
240 bool
241 const_wide_int_hasher::equal (rtx x, rtx y)
242 {
243 int i;
244 const_rtx xr = x;
245 const_rtx yr = y;
246 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
247 return false;
248
249 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
250 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
251 return false;
252
253 return true;
254 }
255 #endif
256
257 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
258 hashval_t
259 const_double_hasher::hash (rtx x)
260 {
261 const_rtx const value = x;
262 hashval_t h;
263
264 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
265 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
266 else
267 {
268 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
269 /* MODE is used in the comparison, so it should be in the hash. */
270 h ^= GET_MODE (value);
271 }
272 return h;
273 }
274
275 /* Returns nonzero if the value represented by X (really a ...)
276 is the same as that represented by Y (really a ...) */
277 bool
278 const_double_hasher::equal (rtx x, rtx y)
279 {
280 const_rtx const a = x, b = y;
281
282 if (GET_MODE (a) != GET_MODE (b))
283 return 0;
284 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
285 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
286 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
287 else
288 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
289 CONST_DOUBLE_REAL_VALUE (b));
290 }
291
292 /* Returns a hash code for X (which is really a CONST_FIXED). */
293
294 hashval_t
295 const_fixed_hasher::hash (rtx x)
296 {
297 const_rtx const value = x;
298 hashval_t h;
299
300 h = fixed_hash (CONST_FIXED_VALUE (value));
301 /* MODE is used in the comparison, so it should be in the hash. */
302 h ^= GET_MODE (value);
303 return h;
304 }
305
306 /* Returns nonzero if the value represented by X is the same as that
307 represented by Y. */
308
309 bool
310 const_fixed_hasher::equal (rtx x, rtx y)
311 {
312 const_rtx const a = x, b = y;
313
314 if (GET_MODE (a) != GET_MODE (b))
315 return 0;
316 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
317 }
318
319 /* Return true if the given memory attributes are equal. */
320
321 bool
322 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
323 {
324 if (p == q)
325 return true;
326 if (!p || !q)
327 return false;
328 return (p->alias == q->alias
329 && p->offset_known_p == q->offset_known_p
330 && (!p->offset_known_p || p->offset == q->offset)
331 && p->size_known_p == q->size_known_p
332 && (!p->size_known_p || p->size == q->size)
333 && p->align == q->align
334 && p->addrspace == q->addrspace
335 && (p->expr == q->expr
336 || (p->expr != NULL_TREE && q->expr != NULL_TREE
337 && operand_equal_p (p->expr, q->expr, 0))));
338 }
339
340 /* Set MEM's memory attributes so that they are the same as ATTRS. */
341
342 static void
343 set_mem_attrs (rtx mem, mem_attrs *attrs)
344 {
345 /* If everything is the default, we can just clear the attributes. */
346 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
347 {
348 MEM_ATTRS (mem) = 0;
349 return;
350 }
351
352 if (!MEM_ATTRS (mem)
353 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
354 {
355 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
356 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
357 }
358 }
359
360 /* Returns a hash code for X (which is a really a reg_attrs *). */
361
362 hashval_t
363 reg_attr_hasher::hash (reg_attrs *x)
364 {
365 const reg_attrs *const p = x;
366
367 return ((p->offset * 1000) ^ (intptr_t) p->decl);
368 }
369
370 /* Returns nonzero if the value represented by X is the same as that given by
371 Y. */
372
373 bool
374 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
375 {
376 const reg_attrs *const p = x;
377 const reg_attrs *const q = y;
378
379 return (p->decl == q->decl && p->offset == q->offset);
380 }
381 /* Allocate a new reg_attrs structure and insert it into the hash table if
382 one identical to it is not already in the table. We are doing this for
383 MEM of mode MODE. */
384
385 static reg_attrs *
386 get_reg_attrs (tree decl, int offset)
387 {
388 reg_attrs attrs;
389
390 /* If everything is the default, we can just return zero. */
391 if (decl == 0 && offset == 0)
392 return 0;
393
394 attrs.decl = decl;
395 attrs.offset = offset;
396
397 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
398 if (*slot == 0)
399 {
400 *slot = ggc_alloc<reg_attrs> ();
401 memcpy (*slot, &attrs, sizeof (reg_attrs));
402 }
403
404 return *slot;
405 }
406
407
408 #if !HAVE_blockage
409 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
410 and to block register equivalences to be seen across this insn. */
411
412 rtx
413 gen_blockage (void)
414 {
415 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
416 MEM_VOLATILE_P (x) = true;
417 return x;
418 }
419 #endif
420
421
422 /* Set the mode and register number of X to MODE and REGNO. */
423
424 void
425 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
426 {
427 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
428 ? hard_regno_nregs[regno][mode]
429 : 1);
430 PUT_MODE_RAW (x, mode);
431 set_regno_raw (x, regno, nregs);
432 }
433
434 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
435 don't attempt to share with the various global pieces of rtl (such as
436 frame_pointer_rtx). */
437
438 rtx
439 gen_raw_REG (machine_mode mode, unsigned int regno)
440 {
441 rtx x = rtx_alloc_stat (REG MEM_STAT_INFO);
442 set_mode_and_regno (x, mode, regno);
443 REG_ATTRS (x) = NULL;
444 ORIGINAL_REGNO (x) = regno;
445 return x;
446 }
447
448 /* There are some RTL codes that require special attention; the generation
449 functions do the raw handling. If you add to this list, modify
450 special_rtx in gengenrtl.c as well. */
451
452 rtx_expr_list *
453 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
454 {
455 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
456 expr_list));
457 }
458
459 rtx_insn_list *
460 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
461 {
462 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
463 insn_list));
464 }
465
466 rtx_insn *
467 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
468 basic_block bb, rtx pattern, int location, int code,
469 rtx reg_notes)
470 {
471 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
472 prev_insn, next_insn,
473 bb, pattern, location, code,
474 reg_notes));
475 }
476
477 rtx
478 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
479 {
480 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
481 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
482
483 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
484 if (const_true_rtx && arg == STORE_FLAG_VALUE)
485 return const_true_rtx;
486 #endif
487
488 /* Look up the CONST_INT in the hash table. */
489 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
490 INSERT);
491 if (*slot == 0)
492 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
493
494 return *slot;
495 }
496
497 rtx
498 gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
499 {
500 return GEN_INT (trunc_int_for_mode (c, mode));
501 }
502
503 /* CONST_DOUBLEs might be created from pairs of integers, or from
504 REAL_VALUE_TYPEs. Also, their length is known only at run time,
505 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
506
507 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
508 hash table. If so, return its counterpart; otherwise add it
509 to the hash table and return it. */
510 static rtx
511 lookup_const_double (rtx real)
512 {
513 rtx *slot = const_double_htab->find_slot (real, INSERT);
514 if (*slot == 0)
515 *slot = real;
516
517 return *slot;
518 }
519
520 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
521 VALUE in mode MODE. */
522 rtx
523 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
524 {
525 rtx real = rtx_alloc (CONST_DOUBLE);
526 PUT_MODE (real, mode);
527
528 real->u.rv = value;
529
530 return lookup_const_double (real);
531 }
532
533 /* Determine whether FIXED, a CONST_FIXED, already exists in the
534 hash table. If so, return its counterpart; otherwise add it
535 to the hash table and return it. */
536
537 static rtx
538 lookup_const_fixed (rtx fixed)
539 {
540 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
541 if (*slot == 0)
542 *slot = fixed;
543
544 return *slot;
545 }
546
547 /* Return a CONST_FIXED rtx for a fixed-point value specified by
548 VALUE in mode MODE. */
549
550 rtx
551 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
552 {
553 rtx fixed = rtx_alloc (CONST_FIXED);
554 PUT_MODE (fixed, mode);
555
556 fixed->u.fv = value;
557
558 return lookup_const_fixed (fixed);
559 }
560
561 #if TARGET_SUPPORTS_WIDE_INT == 0
562 /* Constructs double_int from rtx CST. */
563
564 double_int
565 rtx_to_double_int (const_rtx cst)
566 {
567 double_int r;
568
569 if (CONST_INT_P (cst))
570 r = double_int::from_shwi (INTVAL (cst));
571 else if (CONST_DOUBLE_AS_INT_P (cst))
572 {
573 r.low = CONST_DOUBLE_LOW (cst);
574 r.high = CONST_DOUBLE_HIGH (cst);
575 }
576 else
577 gcc_unreachable ();
578
579 return r;
580 }
581 #endif
582
583 #if TARGET_SUPPORTS_WIDE_INT
584 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
585 If so, return its counterpart; otherwise add it to the hash table and
586 return it. */
587
588 static rtx
589 lookup_const_wide_int (rtx wint)
590 {
591 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
592 if (*slot == 0)
593 *slot = wint;
594
595 return *slot;
596 }
597 #endif
598
599 /* Return an rtx constant for V, given that the constant has mode MODE.
600 The returned rtx will be a CONST_INT if V fits, otherwise it will be
601 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
602 (if TARGET_SUPPORTS_WIDE_INT). */
603
604 rtx
605 immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
606 {
607 unsigned int len = v.get_len ();
608 unsigned int prec = GET_MODE_PRECISION (mode);
609
610 /* Allow truncation but not extension since we do not know if the
611 number is signed or unsigned. */
612 gcc_assert (prec <= v.get_precision ());
613
614 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
615 return gen_int_mode (v.elt (0), mode);
616
617 #if TARGET_SUPPORTS_WIDE_INT
618 {
619 unsigned int i;
620 rtx value;
621 unsigned int blocks_needed
622 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
623
624 if (len > blocks_needed)
625 len = blocks_needed;
626
627 value = const_wide_int_alloc (len);
628
629 /* It is so tempting to just put the mode in here. Must control
630 myself ... */
631 PUT_MODE (value, VOIDmode);
632 CWI_PUT_NUM_ELEM (value, len);
633
634 for (i = 0; i < len; i++)
635 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
636
637 return lookup_const_wide_int (value);
638 }
639 #else
640 return immed_double_const (v.elt (0), v.elt (1), mode);
641 #endif
642 }
643
644 #if TARGET_SUPPORTS_WIDE_INT == 0
645 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
646 of ints: I0 is the low-order word and I1 is the high-order word.
647 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
648 implied upper bits are copies of the high bit of i1. The value
649 itself is neither signed nor unsigned. Do not use this routine for
650 non-integer modes; convert to REAL_VALUE_TYPE and use
651 const_double_from_real_value. */
652
653 rtx
654 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
655 {
656 rtx value;
657 unsigned int i;
658
659 /* There are the following cases (note that there are no modes with
660 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
661
662 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
663 gen_int_mode.
664 2) If the value of the integer fits into HOST_WIDE_INT anyway
665 (i.e., i1 consists only from copies of the sign bit, and sign
666 of i0 and i1 are the same), then we return a CONST_INT for i0.
667 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
668 if (mode != VOIDmode)
669 {
670 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
671 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
672 /* We can get a 0 for an error mark. */
673 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
674 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
675 || GET_MODE_CLASS (mode) == MODE_POINTER_BOUNDS);
676
677 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
678 return gen_int_mode (i0, mode);
679 }
680
681 /* If this integer fits in one word, return a CONST_INT. */
682 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
683 return GEN_INT (i0);
684
685 /* We use VOIDmode for integers. */
686 value = rtx_alloc (CONST_DOUBLE);
687 PUT_MODE (value, VOIDmode);
688
689 CONST_DOUBLE_LOW (value) = i0;
690 CONST_DOUBLE_HIGH (value) = i1;
691
692 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
693 XWINT (value, i) = 0;
694
695 return lookup_const_double (value);
696 }
697 #endif
698
699 rtx
700 gen_rtx_REG (machine_mode mode, unsigned int regno)
701 {
702 /* In case the MD file explicitly references the frame pointer, have
703 all such references point to the same frame pointer. This is
704 used during frame pointer elimination to distinguish the explicit
705 references to these registers from pseudos that happened to be
706 assigned to them.
707
708 If we have eliminated the frame pointer or arg pointer, we will
709 be using it as a normal register, for example as a spill
710 register. In such cases, we might be accessing it in a mode that
711 is not Pmode and therefore cannot use the pre-allocated rtx.
712
713 Also don't do this when we are making new REGs in reload, since
714 we don't want to get confused with the real pointers. */
715
716 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
717 {
718 if (regno == FRAME_POINTER_REGNUM
719 && (!reload_completed || frame_pointer_needed))
720 return frame_pointer_rtx;
721
722 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
723 && regno == HARD_FRAME_POINTER_REGNUM
724 && (!reload_completed || frame_pointer_needed))
725 return hard_frame_pointer_rtx;
726 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
727 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
728 && regno == ARG_POINTER_REGNUM)
729 return arg_pointer_rtx;
730 #endif
731 #ifdef RETURN_ADDRESS_POINTER_REGNUM
732 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
733 return return_address_pointer_rtx;
734 #endif
735 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
736 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
737 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
738 return pic_offset_table_rtx;
739 if (regno == STACK_POINTER_REGNUM)
740 return stack_pointer_rtx;
741 }
742
743 #if 0
744 /* If the per-function register table has been set up, try to re-use
745 an existing entry in that table to avoid useless generation of RTL.
746
747 This code is disabled for now until we can fix the various backends
748 which depend on having non-shared hard registers in some cases. Long
749 term we want to re-enable this code as it can significantly cut down
750 on the amount of useless RTL that gets generated.
751
752 We'll also need to fix some code that runs after reload that wants to
753 set ORIGINAL_REGNO. */
754
755 if (cfun
756 && cfun->emit
757 && regno_reg_rtx
758 && regno < FIRST_PSEUDO_REGISTER
759 && reg_raw_mode[regno] == mode)
760 return regno_reg_rtx[regno];
761 #endif
762
763 return gen_raw_REG (mode, regno);
764 }
765
766 rtx
767 gen_rtx_MEM (machine_mode mode, rtx addr)
768 {
769 rtx rt = gen_rtx_raw_MEM (mode, addr);
770
771 /* This field is not cleared by the mere allocation of the rtx, so
772 we clear it here. */
773 MEM_ATTRS (rt) = 0;
774
775 return rt;
776 }
777
778 /* Generate a memory referring to non-trapping constant memory. */
779
780 rtx
781 gen_const_mem (machine_mode mode, rtx addr)
782 {
783 rtx mem = gen_rtx_MEM (mode, addr);
784 MEM_READONLY_P (mem) = 1;
785 MEM_NOTRAP_P (mem) = 1;
786 return mem;
787 }
788
789 /* Generate a MEM referring to fixed portions of the frame, e.g., register
790 save areas. */
791
792 rtx
793 gen_frame_mem (machine_mode mode, rtx addr)
794 {
795 rtx mem = gen_rtx_MEM (mode, addr);
796 MEM_NOTRAP_P (mem) = 1;
797 set_mem_alias_set (mem, get_frame_alias_set ());
798 return mem;
799 }
800
801 /* Generate a MEM referring to a temporary use of the stack, not part
802 of the fixed stack frame. For example, something which is pushed
803 by a target splitter. */
804 rtx
805 gen_tmp_stack_mem (machine_mode mode, rtx addr)
806 {
807 rtx mem = gen_rtx_MEM (mode, addr);
808 MEM_NOTRAP_P (mem) = 1;
809 if (!cfun->calls_alloca)
810 set_mem_alias_set (mem, get_frame_alias_set ());
811 return mem;
812 }
813
814 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
815 this construct would be valid, and false otherwise. */
816
817 bool
818 validate_subreg (machine_mode omode, machine_mode imode,
819 const_rtx reg, unsigned int offset)
820 {
821 unsigned int isize = GET_MODE_SIZE (imode);
822 unsigned int osize = GET_MODE_SIZE (omode);
823
824 /* All subregs must be aligned. */
825 if (offset % osize != 0)
826 return false;
827
828 /* The subreg offset cannot be outside the inner object. */
829 if (offset >= isize)
830 return false;
831
832 /* ??? This should not be here. Temporarily continue to allow word_mode
833 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
834 Generally, backends are doing something sketchy but it'll take time to
835 fix them all. */
836 if (omode == word_mode)
837 ;
838 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
839 is the culprit here, and not the backends. */
840 else if (osize >= UNITS_PER_WORD && isize >= osize)
841 ;
842 /* Allow component subregs of complex and vector. Though given the below
843 extraction rules, it's not always clear what that means. */
844 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
845 && GET_MODE_INNER (imode) == omode)
846 ;
847 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
848 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
849 represent this. It's questionable if this ought to be represented at
850 all -- why can't this all be hidden in post-reload splitters that make
851 arbitrarily mode changes to the registers themselves. */
852 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
853 ;
854 /* Subregs involving floating point modes are not allowed to
855 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
856 (subreg:SI (reg:DF) 0) isn't. */
857 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
858 {
859 if (! (isize == osize
860 /* LRA can use subreg to store a floating point value in
861 an integer mode. Although the floating point and the
862 integer modes need the same number of hard registers,
863 the size of floating point mode can be less than the
864 integer mode. LRA also uses subregs for a register
865 should be used in different mode in on insn. */
866 || lra_in_progress))
867 return false;
868 }
869
870 /* Paradoxical subregs must have offset zero. */
871 if (osize > isize)
872 return offset == 0;
873
874 /* This is a normal subreg. Verify that the offset is representable. */
875
876 /* For hard registers, we already have most of these rules collected in
877 subreg_offset_representable_p. */
878 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
879 {
880 unsigned int regno = REGNO (reg);
881
882 #ifdef CANNOT_CHANGE_MODE_CLASS
883 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
884 && GET_MODE_INNER (imode) == omode)
885 ;
886 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
887 return false;
888 #endif
889
890 return subreg_offset_representable_p (regno, imode, offset, omode);
891 }
892
893 /* For pseudo registers, we want most of the same checks. Namely:
894 If the register no larger than a word, the subreg must be lowpart.
895 If the register is larger than a word, the subreg must be the lowpart
896 of a subword. A subreg does *not* perform arbitrary bit extraction.
897 Given that we've already checked mode/offset alignment, we only have
898 to check subword subregs here. */
899 if (osize < UNITS_PER_WORD
900 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
901 {
902 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
903 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
904 if (offset % UNITS_PER_WORD != low_off)
905 return false;
906 }
907 return true;
908 }
909
910 rtx
911 gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
912 {
913 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
914 return gen_rtx_raw_SUBREG (mode, reg, offset);
915 }
916
917 /* Generate a SUBREG representing the least-significant part of REG if MODE
918 is smaller than mode of REG, otherwise paradoxical SUBREG. */
919
920 rtx
921 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
922 {
923 machine_mode inmode;
924
925 inmode = GET_MODE (reg);
926 if (inmode == VOIDmode)
927 inmode = mode;
928 return gen_rtx_SUBREG (mode, reg,
929 subreg_lowpart_offset (mode, inmode));
930 }
931
932 rtx
933 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
934 enum var_init_status status)
935 {
936 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
937 PAT_VAR_LOCATION_STATUS (x) = status;
938 return x;
939 }
940 \f
941
942 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
943
944 rtvec
945 gen_rtvec (int n, ...)
946 {
947 int i;
948 rtvec rt_val;
949 va_list p;
950
951 va_start (p, n);
952
953 /* Don't allocate an empty rtvec... */
954 if (n == 0)
955 {
956 va_end (p);
957 return NULL_RTVEC;
958 }
959
960 rt_val = rtvec_alloc (n);
961
962 for (i = 0; i < n; i++)
963 rt_val->elem[i] = va_arg (p, rtx);
964
965 va_end (p);
966 return rt_val;
967 }
968
969 rtvec
970 gen_rtvec_v (int n, rtx *argp)
971 {
972 int i;
973 rtvec rt_val;
974
975 /* Don't allocate an empty rtvec... */
976 if (n == 0)
977 return NULL_RTVEC;
978
979 rt_val = rtvec_alloc (n);
980
981 for (i = 0; i < n; i++)
982 rt_val->elem[i] = *argp++;
983
984 return rt_val;
985 }
986
987 rtvec
988 gen_rtvec_v (int n, rtx_insn **argp)
989 {
990 int i;
991 rtvec rt_val;
992
993 /* Don't allocate an empty rtvec... */
994 if (n == 0)
995 return NULL_RTVEC;
996
997 rt_val = rtvec_alloc (n);
998
999 for (i = 0; i < n; i++)
1000 rt_val->elem[i] = *argp++;
1001
1002 return rt_val;
1003 }
1004
1005 \f
1006 /* Return the number of bytes between the start of an OUTER_MODE
1007 in-memory value and the start of an INNER_MODE in-memory value,
1008 given that the former is a lowpart of the latter. It may be a
1009 paradoxical lowpart, in which case the offset will be negative
1010 on big-endian targets. */
1011
1012 int
1013 byte_lowpart_offset (machine_mode outer_mode,
1014 machine_mode inner_mode)
1015 {
1016 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
1017 return subreg_lowpart_offset (outer_mode, inner_mode);
1018 else
1019 return -subreg_lowpart_offset (inner_mode, outer_mode);
1020 }
1021 \f
1022 /* Generate a REG rtx for a new pseudo register of mode MODE.
1023 This pseudo is assigned the next sequential register number. */
1024
1025 rtx
1026 gen_reg_rtx (machine_mode mode)
1027 {
1028 rtx val;
1029 unsigned int align = GET_MODE_ALIGNMENT (mode);
1030
1031 gcc_assert (can_create_pseudo_p ());
1032
1033 /* If a virtual register with bigger mode alignment is generated,
1034 increase stack alignment estimation because it might be spilled
1035 to stack later. */
1036 if (SUPPORTS_STACK_ALIGNMENT
1037 && crtl->stack_alignment_estimated < align
1038 && !crtl->stack_realign_processed)
1039 {
1040 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1041 if (crtl->stack_alignment_estimated < min_align)
1042 crtl->stack_alignment_estimated = min_align;
1043 }
1044
1045 if (generating_concat_p
1046 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1047 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1048 {
1049 /* For complex modes, don't make a single pseudo.
1050 Instead, make a CONCAT of two pseudos.
1051 This allows noncontiguous allocation of the real and imaginary parts,
1052 which makes much better code. Besides, allocating DCmode
1053 pseudos overstrains reload on some machines like the 386. */
1054 rtx realpart, imagpart;
1055 machine_mode partmode = GET_MODE_INNER (mode);
1056
1057 realpart = gen_reg_rtx (partmode);
1058 imagpart = gen_reg_rtx (partmode);
1059 return gen_rtx_CONCAT (mode, realpart, imagpart);
1060 }
1061
1062 /* Do not call gen_reg_rtx with uninitialized crtl. */
1063 gcc_assert (crtl->emit.regno_pointer_align_length);
1064
1065 /* Make sure regno_pointer_align, and regno_reg_rtx are large
1066 enough to have an element for this pseudo reg number. */
1067
1068 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
1069 {
1070 int old_size = crtl->emit.regno_pointer_align_length;
1071 char *tmp;
1072 rtx *new1;
1073
1074 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1075 memset (tmp + old_size, 0, old_size);
1076 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
1077
1078 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
1079 memset (new1 + old_size, 0, old_size * sizeof (rtx));
1080 regno_reg_rtx = new1;
1081
1082 crtl->emit.regno_pointer_align_length = old_size * 2;
1083 }
1084
1085 val = gen_raw_REG (mode, reg_rtx_no);
1086 regno_reg_rtx[reg_rtx_no++] = val;
1087 return val;
1088 }
1089
1090 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1091
1092 bool
1093 reg_is_parm_p (rtx reg)
1094 {
1095 tree decl;
1096
1097 gcc_assert (REG_P (reg));
1098 decl = REG_EXPR (reg);
1099 return (decl && TREE_CODE (decl) == PARM_DECL);
1100 }
1101
1102 /* Update NEW with the same attributes as REG, but with OFFSET added
1103 to the REG_OFFSET. */
1104
1105 static void
1106 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1107 {
1108 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1109 REG_OFFSET (reg) + offset);
1110 }
1111
1112 /* Generate a register with same attributes as REG, but with OFFSET
1113 added to the REG_OFFSET. */
1114
1115 rtx
1116 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1117 int offset)
1118 {
1119 rtx new_rtx = gen_rtx_REG (mode, regno);
1120
1121 update_reg_offset (new_rtx, reg, offset);
1122 return new_rtx;
1123 }
1124
1125 /* Generate a new pseudo-register with the same attributes as REG, but
1126 with OFFSET added to the REG_OFFSET. */
1127
1128 rtx
1129 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1130 {
1131 rtx new_rtx = gen_reg_rtx (mode);
1132
1133 update_reg_offset (new_rtx, reg, offset);
1134 return new_rtx;
1135 }
1136
1137 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1138 new register is a (possibly paradoxical) lowpart of the old one. */
1139
1140 void
1141 adjust_reg_mode (rtx reg, machine_mode mode)
1142 {
1143 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1144 PUT_MODE (reg, mode);
1145 }
1146
1147 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1148 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1149
1150 void
1151 set_reg_attrs_from_value (rtx reg, rtx x)
1152 {
1153 int offset;
1154 bool can_be_reg_pointer = true;
1155
1156 /* Don't call mark_reg_pointer for incompatible pointer sign
1157 extension. */
1158 while (GET_CODE (x) == SIGN_EXTEND
1159 || GET_CODE (x) == ZERO_EXTEND
1160 || GET_CODE (x) == TRUNCATE
1161 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1162 {
1163 #if defined(POINTERS_EXTEND_UNSIGNED)
1164 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1165 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1166 && !targetm.have_ptr_extend ())
1167 can_be_reg_pointer = false;
1168 #endif
1169 x = XEXP (x, 0);
1170 }
1171
1172 /* Hard registers can be reused for multiple purposes within the same
1173 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1174 on them is wrong. */
1175 if (HARD_REGISTER_P (reg))
1176 return;
1177
1178 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1179 if (MEM_P (x))
1180 {
1181 if (MEM_OFFSET_KNOWN_P (x))
1182 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1183 MEM_OFFSET (x) + offset);
1184 if (can_be_reg_pointer && MEM_POINTER (x))
1185 mark_reg_pointer (reg, 0);
1186 }
1187 else if (REG_P (x))
1188 {
1189 if (REG_ATTRS (x))
1190 update_reg_offset (reg, x, offset);
1191 if (can_be_reg_pointer && REG_POINTER (x))
1192 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1193 }
1194 }
1195
1196 /* Generate a REG rtx for a new pseudo register, copying the mode
1197 and attributes from X. */
1198
1199 rtx
1200 gen_reg_rtx_and_attrs (rtx x)
1201 {
1202 rtx reg = gen_reg_rtx (GET_MODE (x));
1203 set_reg_attrs_from_value (reg, x);
1204 return reg;
1205 }
1206
1207 /* Set the register attributes for registers contained in PARM_RTX.
1208 Use needed values from memory attributes of MEM. */
1209
1210 void
1211 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1212 {
1213 if (REG_P (parm_rtx))
1214 set_reg_attrs_from_value (parm_rtx, mem);
1215 else if (GET_CODE (parm_rtx) == PARALLEL)
1216 {
1217 /* Check for a NULL entry in the first slot, used to indicate that the
1218 parameter goes both on the stack and in registers. */
1219 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1220 for (; i < XVECLEN (parm_rtx, 0); i++)
1221 {
1222 rtx x = XVECEXP (parm_rtx, 0, i);
1223 if (REG_P (XEXP (x, 0)))
1224 REG_ATTRS (XEXP (x, 0))
1225 = get_reg_attrs (MEM_EXPR (mem),
1226 INTVAL (XEXP (x, 1)));
1227 }
1228 }
1229 }
1230
1231 /* Set the REG_ATTRS for registers in value X, given that X represents
1232 decl T. */
1233
1234 void
1235 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1236 {
1237 if (!t)
1238 return;
1239 tree tdecl = t;
1240 if (GET_CODE (x) == SUBREG)
1241 {
1242 gcc_assert (subreg_lowpart_p (x));
1243 x = SUBREG_REG (x);
1244 }
1245 if (REG_P (x))
1246 REG_ATTRS (x)
1247 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1248 DECL_P (tdecl)
1249 ? DECL_MODE (tdecl)
1250 : TYPE_MODE (TREE_TYPE (tdecl))));
1251 if (GET_CODE (x) == CONCAT)
1252 {
1253 if (REG_P (XEXP (x, 0)))
1254 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1255 if (REG_P (XEXP (x, 1)))
1256 REG_ATTRS (XEXP (x, 1))
1257 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1258 }
1259 if (GET_CODE (x) == PARALLEL)
1260 {
1261 int i, start;
1262
1263 /* Check for a NULL entry, used to indicate that the parameter goes
1264 both on the stack and in registers. */
1265 if (XEXP (XVECEXP (x, 0, 0), 0))
1266 start = 0;
1267 else
1268 start = 1;
1269
1270 for (i = start; i < XVECLEN (x, 0); i++)
1271 {
1272 rtx y = XVECEXP (x, 0, i);
1273 if (REG_P (XEXP (y, 0)))
1274 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1275 }
1276 }
1277 }
1278
1279 /* Assign the RTX X to declaration T. */
1280
1281 void
1282 set_decl_rtl (tree t, rtx x)
1283 {
1284 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1285 if (x)
1286 set_reg_attrs_for_decl_rtl (t, x);
1287 }
1288
1289 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1290 if the ABI requires the parameter to be passed by reference. */
1291
1292 void
1293 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1294 {
1295 DECL_INCOMING_RTL (t) = x;
1296 if (x && !by_reference_p)
1297 set_reg_attrs_for_decl_rtl (t, x);
1298 }
1299
1300 /* Identify REG (which may be a CONCAT) as a user register. */
1301
1302 void
1303 mark_user_reg (rtx reg)
1304 {
1305 if (GET_CODE (reg) == CONCAT)
1306 {
1307 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1308 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1309 }
1310 else
1311 {
1312 gcc_assert (REG_P (reg));
1313 REG_USERVAR_P (reg) = 1;
1314 }
1315 }
1316
1317 /* Identify REG as a probable pointer register and show its alignment
1318 as ALIGN, if nonzero. */
1319
1320 void
1321 mark_reg_pointer (rtx reg, int align)
1322 {
1323 if (! REG_POINTER (reg))
1324 {
1325 REG_POINTER (reg) = 1;
1326
1327 if (align)
1328 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1329 }
1330 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1331 /* We can no-longer be sure just how aligned this pointer is. */
1332 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1333 }
1334
1335 /* Return 1 plus largest pseudo reg number used in the current function. */
1336
1337 int
1338 max_reg_num (void)
1339 {
1340 return reg_rtx_no;
1341 }
1342
1343 /* Return 1 + the largest label number used so far in the current function. */
1344
1345 int
1346 max_label_num (void)
1347 {
1348 return label_num;
1349 }
1350
1351 /* Return first label number used in this function (if any were used). */
1352
1353 int
1354 get_first_label_num (void)
1355 {
1356 return first_label_num;
1357 }
1358
1359 /* If the rtx for label was created during the expansion of a nested
1360 function, then first_label_num won't include this label number.
1361 Fix this now so that array indices work later. */
1362
1363 void
1364 maybe_set_first_label_num (rtx x)
1365 {
1366 if (CODE_LABEL_NUMBER (x) < first_label_num)
1367 first_label_num = CODE_LABEL_NUMBER (x);
1368 }
1369 \f
1370 /* Return a value representing some low-order bits of X, where the number
1371 of low-order bits is given by MODE. Note that no conversion is done
1372 between floating-point and fixed-point values, rather, the bit
1373 representation is returned.
1374
1375 This function handles the cases in common between gen_lowpart, below,
1376 and two variants in cse.c and combine.c. These are the cases that can
1377 be safely handled at all points in the compilation.
1378
1379 If this is not a case we can handle, return 0. */
1380
1381 rtx
1382 gen_lowpart_common (machine_mode mode, rtx x)
1383 {
1384 int msize = GET_MODE_SIZE (mode);
1385 int xsize;
1386 machine_mode innermode;
1387
1388 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1389 so we have to make one up. Yuk. */
1390 innermode = GET_MODE (x);
1391 if (CONST_INT_P (x)
1392 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1393 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1394 else if (innermode == VOIDmode)
1395 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1396
1397 xsize = GET_MODE_SIZE (innermode);
1398
1399 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1400
1401 if (innermode == mode)
1402 return x;
1403
1404 /* MODE must occupy no more words than the mode of X. */
1405 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1406 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1407 return 0;
1408
1409 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1410 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1411 return 0;
1412
1413 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1414 && (GET_MODE_CLASS (mode) == MODE_INT
1415 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1416 {
1417 /* If we are getting the low-order part of something that has been
1418 sign- or zero-extended, we can either just use the object being
1419 extended or make a narrower extension. If we want an even smaller
1420 piece than the size of the object being extended, call ourselves
1421 recursively.
1422
1423 This case is used mostly by combine and cse. */
1424
1425 if (GET_MODE (XEXP (x, 0)) == mode)
1426 return XEXP (x, 0);
1427 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1428 return gen_lowpart_common (mode, XEXP (x, 0));
1429 else if (msize < xsize)
1430 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1431 }
1432 else if (GET_CODE (x) == SUBREG || REG_P (x)
1433 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1434 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1435 return lowpart_subreg (mode, x, innermode);
1436
1437 /* Otherwise, we can't do this. */
1438 return 0;
1439 }
1440 \f
1441 rtx
1442 gen_highpart (machine_mode mode, rtx x)
1443 {
1444 unsigned int msize = GET_MODE_SIZE (mode);
1445 rtx result;
1446
1447 /* This case loses if X is a subreg. To catch bugs early,
1448 complain if an invalid MODE is used even in other cases. */
1449 gcc_assert (msize <= UNITS_PER_WORD
1450 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1451
1452 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1453 subreg_highpart_offset (mode, GET_MODE (x)));
1454 gcc_assert (result);
1455
1456 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1457 the target if we have a MEM. gen_highpart must return a valid operand,
1458 emitting code if necessary to do so. */
1459 if (MEM_P (result))
1460 {
1461 result = validize_mem (result);
1462 gcc_assert (result);
1463 }
1464
1465 return result;
1466 }
1467
1468 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1469 be VOIDmode constant. */
1470 rtx
1471 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1472 {
1473 if (GET_MODE (exp) != VOIDmode)
1474 {
1475 gcc_assert (GET_MODE (exp) == innermode);
1476 return gen_highpart (outermode, exp);
1477 }
1478 return simplify_gen_subreg (outermode, exp, innermode,
1479 subreg_highpart_offset (outermode, innermode));
1480 }
1481
1482 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1483
1484 unsigned int
1485 subreg_lowpart_offset (machine_mode outermode, machine_mode innermode)
1486 {
1487 unsigned int offset = 0;
1488 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1489
1490 if (difference > 0)
1491 {
1492 if (WORDS_BIG_ENDIAN)
1493 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1494 if (BYTES_BIG_ENDIAN)
1495 offset += difference % UNITS_PER_WORD;
1496 }
1497
1498 return offset;
1499 }
1500
1501 /* Return offset in bytes to get OUTERMODE high part
1502 of the value in mode INNERMODE stored in memory in target format. */
1503 unsigned int
1504 subreg_highpart_offset (machine_mode outermode, machine_mode innermode)
1505 {
1506 unsigned int offset = 0;
1507 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1508
1509 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1510
1511 if (difference > 0)
1512 {
1513 if (! WORDS_BIG_ENDIAN)
1514 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1515 if (! BYTES_BIG_ENDIAN)
1516 offset += difference % UNITS_PER_WORD;
1517 }
1518
1519 return offset;
1520 }
1521
1522 /* Return 1 iff X, assumed to be a SUBREG,
1523 refers to the least significant part of its containing reg.
1524 If X is not a SUBREG, always return 1 (it is its own low part!). */
1525
1526 int
1527 subreg_lowpart_p (const_rtx x)
1528 {
1529 if (GET_CODE (x) != SUBREG)
1530 return 1;
1531 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1532 return 0;
1533
1534 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1535 == SUBREG_BYTE (x));
1536 }
1537
1538 /* Return true if X is a paradoxical subreg, false otherwise. */
1539 bool
1540 paradoxical_subreg_p (const_rtx x)
1541 {
1542 if (GET_CODE (x) != SUBREG)
1543 return false;
1544 return (GET_MODE_PRECISION (GET_MODE (x))
1545 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1546 }
1547 \f
1548 /* Return subword OFFSET of operand OP.
1549 The word number, OFFSET, is interpreted as the word number starting
1550 at the low-order address. OFFSET 0 is the low-order word if not
1551 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1552
1553 If we cannot extract the required word, we return zero. Otherwise,
1554 an rtx corresponding to the requested word will be returned.
1555
1556 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1557 reload has completed, a valid address will always be returned. After
1558 reload, if a valid address cannot be returned, we return zero.
1559
1560 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1561 it is the responsibility of the caller.
1562
1563 MODE is the mode of OP in case it is a CONST_INT.
1564
1565 ??? This is still rather broken for some cases. The problem for the
1566 moment is that all callers of this thing provide no 'goal mode' to
1567 tell us to work with. This exists because all callers were written
1568 in a word based SUBREG world.
1569 Now use of this function can be deprecated by simplify_subreg in most
1570 cases.
1571 */
1572
1573 rtx
1574 operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
1575 {
1576 if (mode == VOIDmode)
1577 mode = GET_MODE (op);
1578
1579 gcc_assert (mode != VOIDmode);
1580
1581 /* If OP is narrower than a word, fail. */
1582 if (mode != BLKmode
1583 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1584 return 0;
1585
1586 /* If we want a word outside OP, return zero. */
1587 if (mode != BLKmode
1588 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1589 return const0_rtx;
1590
1591 /* Form a new MEM at the requested address. */
1592 if (MEM_P (op))
1593 {
1594 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1595
1596 if (! validate_address)
1597 return new_rtx;
1598
1599 else if (reload_completed)
1600 {
1601 if (! strict_memory_address_addr_space_p (word_mode,
1602 XEXP (new_rtx, 0),
1603 MEM_ADDR_SPACE (op)))
1604 return 0;
1605 }
1606 else
1607 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1608 }
1609
1610 /* Rest can be handled by simplify_subreg. */
1611 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1612 }
1613
1614 /* Similar to `operand_subword', but never return 0. If we can't
1615 extract the required subword, put OP into a register and try again.
1616 The second attempt must succeed. We always validate the address in
1617 this case.
1618
1619 MODE is the mode of OP, in case it is CONST_INT. */
1620
1621 rtx
1622 operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
1623 {
1624 rtx result = operand_subword (op, offset, 1, mode);
1625
1626 if (result)
1627 return result;
1628
1629 if (mode != BLKmode && mode != VOIDmode)
1630 {
1631 /* If this is a register which can not be accessed by words, copy it
1632 to a pseudo register. */
1633 if (REG_P (op))
1634 op = copy_to_reg (op);
1635 else
1636 op = force_reg (mode, op);
1637 }
1638
1639 result = operand_subword (op, offset, 1, mode);
1640 gcc_assert (result);
1641
1642 return result;
1643 }
1644 \f
1645 /* Returns 1 if both MEM_EXPR can be considered equal
1646 and 0 otherwise. */
1647
1648 int
1649 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1650 {
1651 if (expr1 == expr2)
1652 return 1;
1653
1654 if (! expr1 || ! expr2)
1655 return 0;
1656
1657 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1658 return 0;
1659
1660 return operand_equal_p (expr1, expr2, 0);
1661 }
1662
1663 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1664 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1665 -1 if not known. */
1666
1667 int
1668 get_mem_align_offset (rtx mem, unsigned int align)
1669 {
1670 tree expr;
1671 unsigned HOST_WIDE_INT offset;
1672
1673 /* This function can't use
1674 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1675 || (MAX (MEM_ALIGN (mem),
1676 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1677 < align))
1678 return -1;
1679 else
1680 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1681 for two reasons:
1682 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1683 for <variable>. get_inner_reference doesn't handle it and
1684 even if it did, the alignment in that case needs to be determined
1685 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1686 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1687 isn't sufficiently aligned, the object it is in might be. */
1688 gcc_assert (MEM_P (mem));
1689 expr = MEM_EXPR (mem);
1690 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1691 return -1;
1692
1693 offset = MEM_OFFSET (mem);
1694 if (DECL_P (expr))
1695 {
1696 if (DECL_ALIGN (expr) < align)
1697 return -1;
1698 }
1699 else if (INDIRECT_REF_P (expr))
1700 {
1701 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1702 return -1;
1703 }
1704 else if (TREE_CODE (expr) == COMPONENT_REF)
1705 {
1706 while (1)
1707 {
1708 tree inner = TREE_OPERAND (expr, 0);
1709 tree field = TREE_OPERAND (expr, 1);
1710 tree byte_offset = component_ref_field_offset (expr);
1711 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1712
1713 if (!byte_offset
1714 || !tree_fits_uhwi_p (byte_offset)
1715 || !tree_fits_uhwi_p (bit_offset))
1716 return -1;
1717
1718 offset += tree_to_uhwi (byte_offset);
1719 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1720
1721 if (inner == NULL_TREE)
1722 {
1723 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1724 < (unsigned int) align)
1725 return -1;
1726 break;
1727 }
1728 else if (DECL_P (inner))
1729 {
1730 if (DECL_ALIGN (inner) < align)
1731 return -1;
1732 break;
1733 }
1734 else if (TREE_CODE (inner) != COMPONENT_REF)
1735 return -1;
1736 expr = inner;
1737 }
1738 }
1739 else
1740 return -1;
1741
1742 return offset & ((align / BITS_PER_UNIT) - 1);
1743 }
1744
1745 /* Given REF (a MEM) and T, either the type of X or the expression
1746 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1747 if we are making a new object of this type. BITPOS is nonzero if
1748 there is an offset outstanding on T that will be applied later. */
1749
1750 void
1751 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1752 HOST_WIDE_INT bitpos)
1753 {
1754 HOST_WIDE_INT apply_bitpos = 0;
1755 tree type;
1756 struct mem_attrs attrs, *defattrs, *refattrs;
1757 addr_space_t as;
1758
1759 /* It can happen that type_for_mode was given a mode for which there
1760 is no language-level type. In which case it returns NULL, which
1761 we can see here. */
1762 if (t == NULL_TREE)
1763 return;
1764
1765 type = TYPE_P (t) ? t : TREE_TYPE (t);
1766 if (type == error_mark_node)
1767 return;
1768
1769 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1770 wrong answer, as it assumes that DECL_RTL already has the right alias
1771 info. Callers should not set DECL_RTL until after the call to
1772 set_mem_attributes. */
1773 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1774
1775 memset (&attrs, 0, sizeof (attrs));
1776
1777 /* Get the alias set from the expression or type (perhaps using a
1778 front-end routine) and use it. */
1779 attrs.alias = get_alias_set (t);
1780
1781 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1782 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1783
1784 /* Default values from pre-existing memory attributes if present. */
1785 refattrs = MEM_ATTRS (ref);
1786 if (refattrs)
1787 {
1788 /* ??? Can this ever happen? Calling this routine on a MEM that
1789 already carries memory attributes should probably be invalid. */
1790 attrs.expr = refattrs->expr;
1791 attrs.offset_known_p = refattrs->offset_known_p;
1792 attrs.offset = refattrs->offset;
1793 attrs.size_known_p = refattrs->size_known_p;
1794 attrs.size = refattrs->size;
1795 attrs.align = refattrs->align;
1796 }
1797
1798 /* Otherwise, default values from the mode of the MEM reference. */
1799 else
1800 {
1801 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1802 gcc_assert (!defattrs->expr);
1803 gcc_assert (!defattrs->offset_known_p);
1804
1805 /* Respect mode size. */
1806 attrs.size_known_p = defattrs->size_known_p;
1807 attrs.size = defattrs->size;
1808 /* ??? Is this really necessary? We probably should always get
1809 the size from the type below. */
1810
1811 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1812 if T is an object, always compute the object alignment below. */
1813 if (TYPE_P (t))
1814 attrs.align = defattrs->align;
1815 else
1816 attrs.align = BITS_PER_UNIT;
1817 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1818 e.g. if the type carries an alignment attribute. Should we be
1819 able to simply always use TYPE_ALIGN? */
1820 }
1821
1822 /* We can set the alignment from the type if we are making an object,
1823 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1824 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1825 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1826
1827 /* If the size is known, we can set that. */
1828 tree new_size = TYPE_SIZE_UNIT (type);
1829
1830 /* The address-space is that of the type. */
1831 as = TYPE_ADDR_SPACE (type);
1832
1833 /* If T is not a type, we may be able to deduce some more information about
1834 the expression. */
1835 if (! TYPE_P (t))
1836 {
1837 tree base;
1838
1839 if (TREE_THIS_VOLATILE (t))
1840 MEM_VOLATILE_P (ref) = 1;
1841
1842 /* Now remove any conversions: they don't change what the underlying
1843 object is. Likewise for SAVE_EXPR. */
1844 while (CONVERT_EXPR_P (t)
1845 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1846 || TREE_CODE (t) == SAVE_EXPR)
1847 t = TREE_OPERAND (t, 0);
1848
1849 /* Note whether this expression can trap. */
1850 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1851
1852 base = get_base_address (t);
1853 if (base)
1854 {
1855 if (DECL_P (base)
1856 && TREE_READONLY (base)
1857 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1858 && !TREE_THIS_VOLATILE (base))
1859 MEM_READONLY_P (ref) = 1;
1860
1861 /* Mark static const strings readonly as well. */
1862 if (TREE_CODE (base) == STRING_CST
1863 && TREE_READONLY (base)
1864 && TREE_STATIC (base))
1865 MEM_READONLY_P (ref) = 1;
1866
1867 /* Address-space information is on the base object. */
1868 if (TREE_CODE (base) == MEM_REF
1869 || TREE_CODE (base) == TARGET_MEM_REF)
1870 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1871 0))));
1872 else
1873 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1874 }
1875
1876 /* If this expression uses it's parent's alias set, mark it such
1877 that we won't change it. */
1878 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1879 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1880
1881 /* If this is a decl, set the attributes of the MEM from it. */
1882 if (DECL_P (t))
1883 {
1884 attrs.expr = t;
1885 attrs.offset_known_p = true;
1886 attrs.offset = 0;
1887 apply_bitpos = bitpos;
1888 new_size = DECL_SIZE_UNIT (t);
1889 }
1890
1891 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1892 else if (CONSTANT_CLASS_P (t))
1893 ;
1894
1895 /* If this is a field reference, record it. */
1896 else if (TREE_CODE (t) == COMPONENT_REF)
1897 {
1898 attrs.expr = t;
1899 attrs.offset_known_p = true;
1900 attrs.offset = 0;
1901 apply_bitpos = bitpos;
1902 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1903 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1904 }
1905
1906 /* If this is an array reference, look for an outer field reference. */
1907 else if (TREE_CODE (t) == ARRAY_REF)
1908 {
1909 tree off_tree = size_zero_node;
1910 /* We can't modify t, because we use it at the end of the
1911 function. */
1912 tree t2 = t;
1913
1914 do
1915 {
1916 tree index = TREE_OPERAND (t2, 1);
1917 tree low_bound = array_ref_low_bound (t2);
1918 tree unit_size = array_ref_element_size (t2);
1919
1920 /* We assume all arrays have sizes that are a multiple of a byte.
1921 First subtract the lower bound, if any, in the type of the
1922 index, then convert to sizetype and multiply by the size of
1923 the array element. */
1924 if (! integer_zerop (low_bound))
1925 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1926 index, low_bound);
1927
1928 off_tree = size_binop (PLUS_EXPR,
1929 size_binop (MULT_EXPR,
1930 fold_convert (sizetype,
1931 index),
1932 unit_size),
1933 off_tree);
1934 t2 = TREE_OPERAND (t2, 0);
1935 }
1936 while (TREE_CODE (t2) == ARRAY_REF);
1937
1938 if (DECL_P (t2)
1939 || TREE_CODE (t2) == COMPONENT_REF)
1940 {
1941 attrs.expr = t2;
1942 attrs.offset_known_p = false;
1943 if (tree_fits_uhwi_p (off_tree))
1944 {
1945 attrs.offset_known_p = true;
1946 attrs.offset = tree_to_uhwi (off_tree);
1947 apply_bitpos = bitpos;
1948 }
1949 }
1950 /* Else do not record a MEM_EXPR. */
1951 }
1952
1953 /* If this is an indirect reference, record it. */
1954 else if (TREE_CODE (t) == MEM_REF
1955 || TREE_CODE (t) == TARGET_MEM_REF)
1956 {
1957 attrs.expr = t;
1958 attrs.offset_known_p = true;
1959 attrs.offset = 0;
1960 apply_bitpos = bitpos;
1961 }
1962
1963 /* Compute the alignment. */
1964 unsigned int obj_align;
1965 unsigned HOST_WIDE_INT obj_bitpos;
1966 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1967 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1968 if (obj_bitpos != 0)
1969 obj_align = (obj_bitpos & -obj_bitpos);
1970 attrs.align = MAX (attrs.align, obj_align);
1971 }
1972
1973 if (tree_fits_uhwi_p (new_size))
1974 {
1975 attrs.size_known_p = true;
1976 attrs.size = tree_to_uhwi (new_size);
1977 }
1978
1979 /* If we modified OFFSET based on T, then subtract the outstanding
1980 bit position offset. Similarly, increase the size of the accessed
1981 object to contain the negative offset. */
1982 if (apply_bitpos)
1983 {
1984 gcc_assert (attrs.offset_known_p);
1985 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1986 if (attrs.size_known_p)
1987 attrs.size += apply_bitpos / BITS_PER_UNIT;
1988 }
1989
1990 /* Now set the attributes we computed above. */
1991 attrs.addrspace = as;
1992 set_mem_attrs (ref, &attrs);
1993 }
1994
1995 void
1996 set_mem_attributes (rtx ref, tree t, int objectp)
1997 {
1998 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1999 }
2000
2001 /* Set the alias set of MEM to SET. */
2002
2003 void
2004 set_mem_alias_set (rtx mem, alias_set_type set)
2005 {
2006 struct mem_attrs attrs;
2007
2008 /* If the new and old alias sets don't conflict, something is wrong. */
2009 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2010 attrs = *get_mem_attrs (mem);
2011 attrs.alias = set;
2012 set_mem_attrs (mem, &attrs);
2013 }
2014
2015 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2016
2017 void
2018 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2019 {
2020 struct mem_attrs attrs;
2021
2022 attrs = *get_mem_attrs (mem);
2023 attrs.addrspace = addrspace;
2024 set_mem_attrs (mem, &attrs);
2025 }
2026
2027 /* Set the alignment of MEM to ALIGN bits. */
2028
2029 void
2030 set_mem_align (rtx mem, unsigned int align)
2031 {
2032 struct mem_attrs attrs;
2033
2034 attrs = *get_mem_attrs (mem);
2035 attrs.align = align;
2036 set_mem_attrs (mem, &attrs);
2037 }
2038
2039 /* Set the expr for MEM to EXPR. */
2040
2041 void
2042 set_mem_expr (rtx mem, tree expr)
2043 {
2044 struct mem_attrs attrs;
2045
2046 attrs = *get_mem_attrs (mem);
2047 attrs.expr = expr;
2048 set_mem_attrs (mem, &attrs);
2049 }
2050
2051 /* Set the offset of MEM to OFFSET. */
2052
2053 void
2054 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
2055 {
2056 struct mem_attrs attrs;
2057
2058 attrs = *get_mem_attrs (mem);
2059 attrs.offset_known_p = true;
2060 attrs.offset = offset;
2061 set_mem_attrs (mem, &attrs);
2062 }
2063
2064 /* Clear the offset of MEM. */
2065
2066 void
2067 clear_mem_offset (rtx mem)
2068 {
2069 struct mem_attrs attrs;
2070
2071 attrs = *get_mem_attrs (mem);
2072 attrs.offset_known_p = false;
2073 set_mem_attrs (mem, &attrs);
2074 }
2075
2076 /* Set the size of MEM to SIZE. */
2077
2078 void
2079 set_mem_size (rtx mem, HOST_WIDE_INT size)
2080 {
2081 struct mem_attrs attrs;
2082
2083 attrs = *get_mem_attrs (mem);
2084 attrs.size_known_p = true;
2085 attrs.size = size;
2086 set_mem_attrs (mem, &attrs);
2087 }
2088
2089 /* Clear the size of MEM. */
2090
2091 void
2092 clear_mem_size (rtx mem)
2093 {
2094 struct mem_attrs attrs;
2095
2096 attrs = *get_mem_attrs (mem);
2097 attrs.size_known_p = false;
2098 set_mem_attrs (mem, &attrs);
2099 }
2100 \f
2101 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2102 and its address changed to ADDR. (VOIDmode means don't change the mode.
2103 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2104 returned memory location is required to be valid. INPLACE is true if any
2105 changes can be made directly to MEMREF or false if MEMREF must be treated
2106 as immutable.
2107
2108 The memory attributes are not changed. */
2109
2110 static rtx
2111 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2112 bool inplace)
2113 {
2114 addr_space_t as;
2115 rtx new_rtx;
2116
2117 gcc_assert (MEM_P (memref));
2118 as = MEM_ADDR_SPACE (memref);
2119 if (mode == VOIDmode)
2120 mode = GET_MODE (memref);
2121 if (addr == 0)
2122 addr = XEXP (memref, 0);
2123 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2124 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2125 return memref;
2126
2127 /* Don't validate address for LRA. LRA can make the address valid
2128 by itself in most efficient way. */
2129 if (validate && !lra_in_progress)
2130 {
2131 if (reload_in_progress || reload_completed)
2132 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2133 else
2134 addr = memory_address_addr_space (mode, addr, as);
2135 }
2136
2137 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2138 return memref;
2139
2140 if (inplace)
2141 {
2142 XEXP (memref, 0) = addr;
2143 return memref;
2144 }
2145
2146 new_rtx = gen_rtx_MEM (mode, addr);
2147 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2148 return new_rtx;
2149 }
2150
2151 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2152 way we are changing MEMREF, so we only preserve the alias set. */
2153
2154 rtx
2155 change_address (rtx memref, machine_mode mode, rtx addr)
2156 {
2157 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2158 machine_mode mmode = GET_MODE (new_rtx);
2159 struct mem_attrs attrs, *defattrs;
2160
2161 attrs = *get_mem_attrs (memref);
2162 defattrs = mode_mem_attrs[(int) mmode];
2163 attrs.expr = NULL_TREE;
2164 attrs.offset_known_p = false;
2165 attrs.size_known_p = defattrs->size_known_p;
2166 attrs.size = defattrs->size;
2167 attrs.align = defattrs->align;
2168
2169 /* If there are no changes, just return the original memory reference. */
2170 if (new_rtx == memref)
2171 {
2172 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2173 return new_rtx;
2174
2175 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2176 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2177 }
2178
2179 set_mem_attrs (new_rtx, &attrs);
2180 return new_rtx;
2181 }
2182
2183 /* Return a memory reference like MEMREF, but with its mode changed
2184 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2185 nonzero, the memory address is forced to be valid.
2186 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2187 and the caller is responsible for adjusting MEMREF base register.
2188 If ADJUST_OBJECT is zero, the underlying object associated with the
2189 memory reference is left unchanged and the caller is responsible for
2190 dealing with it. Otherwise, if the new memory reference is outside
2191 the underlying object, even partially, then the object is dropped.
2192 SIZE, if nonzero, is the size of an access in cases where MODE
2193 has no inherent size. */
2194
2195 rtx
2196 adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
2197 int validate, int adjust_address, int adjust_object,
2198 HOST_WIDE_INT size)
2199 {
2200 rtx addr = XEXP (memref, 0);
2201 rtx new_rtx;
2202 machine_mode address_mode;
2203 int pbits;
2204 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2205 unsigned HOST_WIDE_INT max_align;
2206 #ifdef POINTERS_EXTEND_UNSIGNED
2207 machine_mode pointer_mode
2208 = targetm.addr_space.pointer_mode (attrs.addrspace);
2209 #endif
2210
2211 /* VOIDmode means no mode change for change_address_1. */
2212 if (mode == VOIDmode)
2213 mode = GET_MODE (memref);
2214
2215 /* Take the size of non-BLKmode accesses from the mode. */
2216 defattrs = mode_mem_attrs[(int) mode];
2217 if (defattrs->size_known_p)
2218 size = defattrs->size;
2219
2220 /* If there are no changes, just return the original memory reference. */
2221 if (mode == GET_MODE (memref) && !offset
2222 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2223 && (!validate || memory_address_addr_space_p (mode, addr,
2224 attrs.addrspace)))
2225 return memref;
2226
2227 /* ??? Prefer to create garbage instead of creating shared rtl.
2228 This may happen even if offset is nonzero -- consider
2229 (plus (plus reg reg) const_int) -- so do this always. */
2230 addr = copy_rtx (addr);
2231
2232 /* Convert a possibly large offset to a signed value within the
2233 range of the target address space. */
2234 address_mode = get_address_mode (memref);
2235 pbits = GET_MODE_BITSIZE (address_mode);
2236 if (HOST_BITS_PER_WIDE_INT > pbits)
2237 {
2238 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2239 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2240 >> shift);
2241 }
2242
2243 if (adjust_address)
2244 {
2245 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2246 object, we can merge it into the LO_SUM. */
2247 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2248 && offset >= 0
2249 && (unsigned HOST_WIDE_INT) offset
2250 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2251 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2252 plus_constant (address_mode,
2253 XEXP (addr, 1), offset));
2254 #ifdef POINTERS_EXTEND_UNSIGNED
2255 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2256 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2257 the fact that pointers are not allowed to overflow. */
2258 else if (POINTERS_EXTEND_UNSIGNED > 0
2259 && GET_CODE (addr) == ZERO_EXTEND
2260 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2261 && trunc_int_for_mode (offset, pointer_mode) == offset)
2262 addr = gen_rtx_ZERO_EXTEND (address_mode,
2263 plus_constant (pointer_mode,
2264 XEXP (addr, 0), offset));
2265 #endif
2266 else
2267 addr = plus_constant (address_mode, addr, offset);
2268 }
2269
2270 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2271
2272 /* If the address is a REG, change_address_1 rightfully returns memref,
2273 but this would destroy memref's MEM_ATTRS. */
2274 if (new_rtx == memref && offset != 0)
2275 new_rtx = copy_rtx (new_rtx);
2276
2277 /* Conservatively drop the object if we don't know where we start from. */
2278 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2279 {
2280 attrs.expr = NULL_TREE;
2281 attrs.alias = 0;
2282 }
2283
2284 /* Compute the new values of the memory attributes due to this adjustment.
2285 We add the offsets and update the alignment. */
2286 if (attrs.offset_known_p)
2287 {
2288 attrs.offset += offset;
2289
2290 /* Drop the object if the new left end is not within its bounds. */
2291 if (adjust_object && attrs.offset < 0)
2292 {
2293 attrs.expr = NULL_TREE;
2294 attrs.alias = 0;
2295 }
2296 }
2297
2298 /* Compute the new alignment by taking the MIN of the alignment and the
2299 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2300 if zero. */
2301 if (offset != 0)
2302 {
2303 max_align = (offset & -offset) * BITS_PER_UNIT;
2304 attrs.align = MIN (attrs.align, max_align);
2305 }
2306
2307 if (size)
2308 {
2309 /* Drop the object if the new right end is not within its bounds. */
2310 if (adjust_object && (offset + size) > attrs.size)
2311 {
2312 attrs.expr = NULL_TREE;
2313 attrs.alias = 0;
2314 }
2315 attrs.size_known_p = true;
2316 attrs.size = size;
2317 }
2318 else if (attrs.size_known_p)
2319 {
2320 gcc_assert (!adjust_object);
2321 attrs.size -= offset;
2322 /* ??? The store_by_pieces machinery generates negative sizes,
2323 so don't assert for that here. */
2324 }
2325
2326 set_mem_attrs (new_rtx, &attrs);
2327
2328 return new_rtx;
2329 }
2330
2331 /* Return a memory reference like MEMREF, but with its mode changed
2332 to MODE and its address changed to ADDR, which is assumed to be
2333 MEMREF offset by OFFSET bytes. If VALIDATE is
2334 nonzero, the memory address is forced to be valid. */
2335
2336 rtx
2337 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2338 HOST_WIDE_INT offset, int validate)
2339 {
2340 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2341 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2342 }
2343
2344 /* Return a memory reference like MEMREF, but whose address is changed by
2345 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2346 known to be in OFFSET (possibly 1). */
2347
2348 rtx
2349 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2350 {
2351 rtx new_rtx, addr = XEXP (memref, 0);
2352 machine_mode address_mode;
2353 struct mem_attrs attrs, *defattrs;
2354
2355 attrs = *get_mem_attrs (memref);
2356 address_mode = get_address_mode (memref);
2357 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2358
2359 /* At this point we don't know _why_ the address is invalid. It
2360 could have secondary memory references, multiplies or anything.
2361
2362 However, if we did go and rearrange things, we can wind up not
2363 being able to recognize the magic around pic_offset_table_rtx.
2364 This stuff is fragile, and is yet another example of why it is
2365 bad to expose PIC machinery too early. */
2366 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2367 attrs.addrspace)
2368 && GET_CODE (addr) == PLUS
2369 && XEXP (addr, 0) == pic_offset_table_rtx)
2370 {
2371 addr = force_reg (GET_MODE (addr), addr);
2372 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2373 }
2374
2375 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2376 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2377
2378 /* If there are no changes, just return the original memory reference. */
2379 if (new_rtx == memref)
2380 return new_rtx;
2381
2382 /* Update the alignment to reflect the offset. Reset the offset, which
2383 we don't know. */
2384 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2385 attrs.offset_known_p = false;
2386 attrs.size_known_p = defattrs->size_known_p;
2387 attrs.size = defattrs->size;
2388 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2389 set_mem_attrs (new_rtx, &attrs);
2390 return new_rtx;
2391 }
2392
2393 /* Return a memory reference like MEMREF, but with its address changed to
2394 ADDR. The caller is asserting that the actual piece of memory pointed
2395 to is the same, just the form of the address is being changed, such as
2396 by putting something into a register. INPLACE is true if any changes
2397 can be made directly to MEMREF or false if MEMREF must be treated as
2398 immutable. */
2399
2400 rtx
2401 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2402 {
2403 /* change_address_1 copies the memory attribute structure without change
2404 and that's exactly what we want here. */
2405 update_temp_slot_address (XEXP (memref, 0), addr);
2406 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2407 }
2408
2409 /* Likewise, but the reference is not required to be valid. */
2410
2411 rtx
2412 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2413 {
2414 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2415 }
2416
2417 /* Return a memory reference like MEMREF, but with its mode widened to
2418 MODE and offset by OFFSET. This would be used by targets that e.g.
2419 cannot issue QImode memory operations and have to use SImode memory
2420 operations plus masking logic. */
2421
2422 rtx
2423 widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
2424 {
2425 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2426 struct mem_attrs attrs;
2427 unsigned int size = GET_MODE_SIZE (mode);
2428
2429 /* If there are no changes, just return the original memory reference. */
2430 if (new_rtx == memref)
2431 return new_rtx;
2432
2433 attrs = *get_mem_attrs (new_rtx);
2434
2435 /* If we don't know what offset we were at within the expression, then
2436 we can't know if we've overstepped the bounds. */
2437 if (! attrs.offset_known_p)
2438 attrs.expr = NULL_TREE;
2439
2440 while (attrs.expr)
2441 {
2442 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2443 {
2444 tree field = TREE_OPERAND (attrs.expr, 1);
2445 tree offset = component_ref_field_offset (attrs.expr);
2446
2447 if (! DECL_SIZE_UNIT (field))
2448 {
2449 attrs.expr = NULL_TREE;
2450 break;
2451 }
2452
2453 /* Is the field at least as large as the access? If so, ok,
2454 otherwise strip back to the containing structure. */
2455 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2456 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2457 && attrs.offset >= 0)
2458 break;
2459
2460 if (! tree_fits_uhwi_p (offset))
2461 {
2462 attrs.expr = NULL_TREE;
2463 break;
2464 }
2465
2466 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2467 attrs.offset += tree_to_uhwi (offset);
2468 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2469 / BITS_PER_UNIT);
2470 }
2471 /* Similarly for the decl. */
2472 else if (DECL_P (attrs.expr)
2473 && DECL_SIZE_UNIT (attrs.expr)
2474 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2475 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2476 && (! attrs.offset_known_p || attrs.offset >= 0))
2477 break;
2478 else
2479 {
2480 /* The widened memory access overflows the expression, which means
2481 that it could alias another expression. Zap it. */
2482 attrs.expr = NULL_TREE;
2483 break;
2484 }
2485 }
2486
2487 if (! attrs.expr)
2488 attrs.offset_known_p = false;
2489
2490 /* The widened memory may alias other stuff, so zap the alias set. */
2491 /* ??? Maybe use get_alias_set on any remaining expression. */
2492 attrs.alias = 0;
2493 attrs.size_known_p = true;
2494 attrs.size = size;
2495 set_mem_attrs (new_rtx, &attrs);
2496 return new_rtx;
2497 }
2498 \f
2499 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2500 static GTY(()) tree spill_slot_decl;
2501
2502 tree
2503 get_spill_slot_decl (bool force_build_p)
2504 {
2505 tree d = spill_slot_decl;
2506 rtx rd;
2507 struct mem_attrs attrs;
2508
2509 if (d || !force_build_p)
2510 return d;
2511
2512 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2513 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2514 DECL_ARTIFICIAL (d) = 1;
2515 DECL_IGNORED_P (d) = 1;
2516 TREE_USED (d) = 1;
2517 spill_slot_decl = d;
2518
2519 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2520 MEM_NOTRAP_P (rd) = 1;
2521 attrs = *mode_mem_attrs[(int) BLKmode];
2522 attrs.alias = new_alias_set ();
2523 attrs.expr = d;
2524 set_mem_attrs (rd, &attrs);
2525 SET_DECL_RTL (d, rd);
2526
2527 return d;
2528 }
2529
2530 /* Given MEM, a result from assign_stack_local, fill in the memory
2531 attributes as appropriate for a register allocator spill slot.
2532 These slots are not aliasable by other memory. We arrange for
2533 them all to use a single MEM_EXPR, so that the aliasing code can
2534 work properly in the case of shared spill slots. */
2535
2536 void
2537 set_mem_attrs_for_spill (rtx mem)
2538 {
2539 struct mem_attrs attrs;
2540 rtx addr;
2541
2542 attrs = *get_mem_attrs (mem);
2543 attrs.expr = get_spill_slot_decl (true);
2544 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2545 attrs.addrspace = ADDR_SPACE_GENERIC;
2546
2547 /* We expect the incoming memory to be of the form:
2548 (mem:MODE (plus (reg sfp) (const_int offset)))
2549 with perhaps the plus missing for offset = 0. */
2550 addr = XEXP (mem, 0);
2551 attrs.offset_known_p = true;
2552 attrs.offset = 0;
2553 if (GET_CODE (addr) == PLUS
2554 && CONST_INT_P (XEXP (addr, 1)))
2555 attrs.offset = INTVAL (XEXP (addr, 1));
2556
2557 set_mem_attrs (mem, &attrs);
2558 MEM_NOTRAP_P (mem) = 1;
2559 }
2560 \f
2561 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2562
2563 rtx_code_label *
2564 gen_label_rtx (void)
2565 {
2566 return as_a <rtx_code_label *> (
2567 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2568 NULL, label_num++, NULL));
2569 }
2570 \f
2571 /* For procedure integration. */
2572
2573 /* Install new pointers to the first and last insns in the chain.
2574 Also, set cur_insn_uid to one higher than the last in use.
2575 Used for an inline-procedure after copying the insn chain. */
2576
2577 void
2578 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2579 {
2580 rtx_insn *insn;
2581
2582 set_first_insn (first);
2583 set_last_insn (last);
2584 cur_insn_uid = 0;
2585
2586 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2587 {
2588 int debug_count = 0;
2589
2590 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2591 cur_debug_insn_uid = 0;
2592
2593 for (insn = first; insn; insn = NEXT_INSN (insn))
2594 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2595 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2596 else
2597 {
2598 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2599 if (DEBUG_INSN_P (insn))
2600 debug_count++;
2601 }
2602
2603 if (debug_count)
2604 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2605 else
2606 cur_debug_insn_uid++;
2607 }
2608 else
2609 for (insn = first; insn; insn = NEXT_INSN (insn))
2610 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2611
2612 cur_insn_uid++;
2613 }
2614 \f
2615 /* Go through all the RTL insn bodies and copy any invalid shared
2616 structure. This routine should only be called once. */
2617
2618 static void
2619 unshare_all_rtl_1 (rtx_insn *insn)
2620 {
2621 /* Unshare just about everything else. */
2622 unshare_all_rtl_in_chain (insn);
2623
2624 /* Make sure the addresses of stack slots found outside the insn chain
2625 (such as, in DECL_RTL of a variable) are not shared
2626 with the insn chain.
2627
2628 This special care is necessary when the stack slot MEM does not
2629 actually appear in the insn chain. If it does appear, its address
2630 is unshared from all else at that point. */
2631 stack_slot_list = safe_as_a <rtx_expr_list *> (
2632 copy_rtx_if_shared (stack_slot_list));
2633 }
2634
2635 /* Go through all the RTL insn bodies and copy any invalid shared
2636 structure, again. This is a fairly expensive thing to do so it
2637 should be done sparingly. */
2638
2639 void
2640 unshare_all_rtl_again (rtx_insn *insn)
2641 {
2642 rtx_insn *p;
2643 tree decl;
2644
2645 for (p = insn; p; p = NEXT_INSN (p))
2646 if (INSN_P (p))
2647 {
2648 reset_used_flags (PATTERN (p));
2649 reset_used_flags (REG_NOTES (p));
2650 if (CALL_P (p))
2651 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2652 }
2653
2654 /* Make sure that virtual stack slots are not shared. */
2655 set_used_decls (DECL_INITIAL (cfun->decl));
2656
2657 /* Make sure that virtual parameters are not shared. */
2658 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2659 set_used_flags (DECL_RTL (decl));
2660
2661 reset_used_flags (stack_slot_list);
2662
2663 unshare_all_rtl_1 (insn);
2664 }
2665
2666 unsigned int
2667 unshare_all_rtl (void)
2668 {
2669 unshare_all_rtl_1 (get_insns ());
2670 return 0;
2671 }
2672
2673
2674 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2675 Recursively does the same for subexpressions. */
2676
2677 static void
2678 verify_rtx_sharing (rtx orig, rtx insn)
2679 {
2680 rtx x = orig;
2681 int i;
2682 enum rtx_code code;
2683 const char *format_ptr;
2684
2685 if (x == 0)
2686 return;
2687
2688 code = GET_CODE (x);
2689
2690 /* These types may be freely shared. */
2691
2692 switch (code)
2693 {
2694 case REG:
2695 case DEBUG_EXPR:
2696 case VALUE:
2697 CASE_CONST_ANY:
2698 case SYMBOL_REF:
2699 case LABEL_REF:
2700 case CODE_LABEL:
2701 case PC:
2702 case CC0:
2703 case RETURN:
2704 case SIMPLE_RETURN:
2705 case SCRATCH:
2706 /* SCRATCH must be shared because they represent distinct values. */
2707 return;
2708 case CLOBBER:
2709 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2710 clobbers or clobbers of hard registers that originated as pseudos.
2711 This is needed to allow safe register renaming. */
2712 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2713 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2714 return;
2715 break;
2716
2717 case CONST:
2718 if (shared_const_p (orig))
2719 return;
2720 break;
2721
2722 case MEM:
2723 /* A MEM is allowed to be shared if its address is constant. */
2724 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2725 || reload_completed || reload_in_progress)
2726 return;
2727
2728 break;
2729
2730 default:
2731 break;
2732 }
2733
2734 /* This rtx may not be shared. If it has already been seen,
2735 replace it with a copy of itself. */
2736 if (flag_checking && RTX_FLAG (x, used))
2737 {
2738 error ("invalid rtl sharing found in the insn");
2739 debug_rtx (insn);
2740 error ("shared rtx");
2741 debug_rtx (x);
2742 internal_error ("internal consistency failure");
2743 }
2744 gcc_assert (!RTX_FLAG (x, used));
2745
2746 RTX_FLAG (x, used) = 1;
2747
2748 /* Now scan the subexpressions recursively. */
2749
2750 format_ptr = GET_RTX_FORMAT (code);
2751
2752 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2753 {
2754 switch (*format_ptr++)
2755 {
2756 case 'e':
2757 verify_rtx_sharing (XEXP (x, i), insn);
2758 break;
2759
2760 case 'E':
2761 if (XVEC (x, i) != NULL)
2762 {
2763 int j;
2764 int len = XVECLEN (x, i);
2765
2766 for (j = 0; j < len; j++)
2767 {
2768 /* We allow sharing of ASM_OPERANDS inside single
2769 instruction. */
2770 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2771 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2772 == ASM_OPERANDS))
2773 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2774 else
2775 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2776 }
2777 }
2778 break;
2779 }
2780 }
2781 return;
2782 }
2783
2784 /* Reset used-flags for INSN. */
2785
2786 static void
2787 reset_insn_used_flags (rtx insn)
2788 {
2789 gcc_assert (INSN_P (insn));
2790 reset_used_flags (PATTERN (insn));
2791 reset_used_flags (REG_NOTES (insn));
2792 if (CALL_P (insn))
2793 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2794 }
2795
2796 /* Go through all the RTL insn bodies and clear all the USED bits. */
2797
2798 static void
2799 reset_all_used_flags (void)
2800 {
2801 rtx_insn *p;
2802
2803 for (p = get_insns (); p; p = NEXT_INSN (p))
2804 if (INSN_P (p))
2805 {
2806 rtx pat = PATTERN (p);
2807 if (GET_CODE (pat) != SEQUENCE)
2808 reset_insn_used_flags (p);
2809 else
2810 {
2811 gcc_assert (REG_NOTES (p) == NULL);
2812 for (int i = 0; i < XVECLEN (pat, 0); i++)
2813 {
2814 rtx insn = XVECEXP (pat, 0, i);
2815 if (INSN_P (insn))
2816 reset_insn_used_flags (insn);
2817 }
2818 }
2819 }
2820 }
2821
2822 /* Verify sharing in INSN. */
2823
2824 static void
2825 verify_insn_sharing (rtx insn)
2826 {
2827 gcc_assert (INSN_P (insn));
2828 reset_used_flags (PATTERN (insn));
2829 reset_used_flags (REG_NOTES (insn));
2830 if (CALL_P (insn))
2831 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2832 }
2833
2834 /* Go through all the RTL insn bodies and check that there is no unexpected
2835 sharing in between the subexpressions. */
2836
2837 DEBUG_FUNCTION void
2838 verify_rtl_sharing (void)
2839 {
2840 rtx_insn *p;
2841
2842 timevar_push (TV_VERIFY_RTL_SHARING);
2843
2844 reset_all_used_flags ();
2845
2846 for (p = get_insns (); p; p = NEXT_INSN (p))
2847 if (INSN_P (p))
2848 {
2849 rtx pat = PATTERN (p);
2850 if (GET_CODE (pat) != SEQUENCE)
2851 verify_insn_sharing (p);
2852 else
2853 for (int i = 0; i < XVECLEN (pat, 0); i++)
2854 {
2855 rtx insn = XVECEXP (pat, 0, i);
2856 if (INSN_P (insn))
2857 verify_insn_sharing (insn);
2858 }
2859 }
2860
2861 reset_all_used_flags ();
2862
2863 timevar_pop (TV_VERIFY_RTL_SHARING);
2864 }
2865
2866 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2867 Assumes the mark bits are cleared at entry. */
2868
2869 void
2870 unshare_all_rtl_in_chain (rtx_insn *insn)
2871 {
2872 for (; insn; insn = NEXT_INSN (insn))
2873 if (INSN_P (insn))
2874 {
2875 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2876 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2877 if (CALL_P (insn))
2878 CALL_INSN_FUNCTION_USAGE (insn)
2879 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2880 }
2881 }
2882
2883 /* Go through all virtual stack slots of a function and mark them as
2884 shared. We never replace the DECL_RTLs themselves with a copy,
2885 but expressions mentioned into a DECL_RTL cannot be shared with
2886 expressions in the instruction stream.
2887
2888 Note that reload may convert pseudo registers into memories in-place.
2889 Pseudo registers are always shared, but MEMs never are. Thus if we
2890 reset the used flags on MEMs in the instruction stream, we must set
2891 them again on MEMs that appear in DECL_RTLs. */
2892
2893 static void
2894 set_used_decls (tree blk)
2895 {
2896 tree t;
2897
2898 /* Mark decls. */
2899 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2900 if (DECL_RTL_SET_P (t))
2901 set_used_flags (DECL_RTL (t));
2902
2903 /* Now process sub-blocks. */
2904 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2905 set_used_decls (t);
2906 }
2907
2908 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2909 Recursively does the same for subexpressions. Uses
2910 copy_rtx_if_shared_1 to reduce stack space. */
2911
2912 rtx
2913 copy_rtx_if_shared (rtx orig)
2914 {
2915 copy_rtx_if_shared_1 (&orig);
2916 return orig;
2917 }
2918
2919 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2920 use. Recursively does the same for subexpressions. */
2921
2922 static void
2923 copy_rtx_if_shared_1 (rtx *orig1)
2924 {
2925 rtx x;
2926 int i;
2927 enum rtx_code code;
2928 rtx *last_ptr;
2929 const char *format_ptr;
2930 int copied = 0;
2931 int length;
2932
2933 /* Repeat is used to turn tail-recursion into iteration. */
2934 repeat:
2935 x = *orig1;
2936
2937 if (x == 0)
2938 return;
2939
2940 code = GET_CODE (x);
2941
2942 /* These types may be freely shared. */
2943
2944 switch (code)
2945 {
2946 case REG:
2947 case DEBUG_EXPR:
2948 case VALUE:
2949 CASE_CONST_ANY:
2950 case SYMBOL_REF:
2951 case LABEL_REF:
2952 case CODE_LABEL:
2953 case PC:
2954 case CC0:
2955 case RETURN:
2956 case SIMPLE_RETURN:
2957 case SCRATCH:
2958 /* SCRATCH must be shared because they represent distinct values. */
2959 return;
2960 case CLOBBER:
2961 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2962 clobbers or clobbers of hard registers that originated as pseudos.
2963 This is needed to allow safe register renaming. */
2964 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2965 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2966 return;
2967 break;
2968
2969 case CONST:
2970 if (shared_const_p (x))
2971 return;
2972 break;
2973
2974 case DEBUG_INSN:
2975 case INSN:
2976 case JUMP_INSN:
2977 case CALL_INSN:
2978 case NOTE:
2979 case BARRIER:
2980 /* The chain of insns is not being copied. */
2981 return;
2982
2983 default:
2984 break;
2985 }
2986
2987 /* This rtx may not be shared. If it has already been seen,
2988 replace it with a copy of itself. */
2989
2990 if (RTX_FLAG (x, used))
2991 {
2992 x = shallow_copy_rtx (x);
2993 copied = 1;
2994 }
2995 RTX_FLAG (x, used) = 1;
2996
2997 /* Now scan the subexpressions recursively.
2998 We can store any replaced subexpressions directly into X
2999 since we know X is not shared! Any vectors in X
3000 must be copied if X was copied. */
3001
3002 format_ptr = GET_RTX_FORMAT (code);
3003 length = GET_RTX_LENGTH (code);
3004 last_ptr = NULL;
3005
3006 for (i = 0; i < length; i++)
3007 {
3008 switch (*format_ptr++)
3009 {
3010 case 'e':
3011 if (last_ptr)
3012 copy_rtx_if_shared_1 (last_ptr);
3013 last_ptr = &XEXP (x, i);
3014 break;
3015
3016 case 'E':
3017 if (XVEC (x, i) != NULL)
3018 {
3019 int j;
3020 int len = XVECLEN (x, i);
3021
3022 /* Copy the vector iff I copied the rtx and the length
3023 is nonzero. */
3024 if (copied && len > 0)
3025 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3026
3027 /* Call recursively on all inside the vector. */
3028 for (j = 0; j < len; j++)
3029 {
3030 if (last_ptr)
3031 copy_rtx_if_shared_1 (last_ptr);
3032 last_ptr = &XVECEXP (x, i, j);
3033 }
3034 }
3035 break;
3036 }
3037 }
3038 *orig1 = x;
3039 if (last_ptr)
3040 {
3041 orig1 = last_ptr;
3042 goto repeat;
3043 }
3044 return;
3045 }
3046
3047 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3048
3049 static void
3050 mark_used_flags (rtx x, int flag)
3051 {
3052 int i, j;
3053 enum rtx_code code;
3054 const char *format_ptr;
3055 int length;
3056
3057 /* Repeat is used to turn tail-recursion into iteration. */
3058 repeat:
3059 if (x == 0)
3060 return;
3061
3062 code = GET_CODE (x);
3063
3064 /* These types may be freely shared so we needn't do any resetting
3065 for them. */
3066
3067 switch (code)
3068 {
3069 case REG:
3070 case DEBUG_EXPR:
3071 case VALUE:
3072 CASE_CONST_ANY:
3073 case SYMBOL_REF:
3074 case CODE_LABEL:
3075 case PC:
3076 case CC0:
3077 case RETURN:
3078 case SIMPLE_RETURN:
3079 return;
3080
3081 case DEBUG_INSN:
3082 case INSN:
3083 case JUMP_INSN:
3084 case CALL_INSN:
3085 case NOTE:
3086 case LABEL_REF:
3087 case BARRIER:
3088 /* The chain of insns is not being copied. */
3089 return;
3090
3091 default:
3092 break;
3093 }
3094
3095 RTX_FLAG (x, used) = flag;
3096
3097 format_ptr = GET_RTX_FORMAT (code);
3098 length = GET_RTX_LENGTH (code);
3099
3100 for (i = 0; i < length; i++)
3101 {
3102 switch (*format_ptr++)
3103 {
3104 case 'e':
3105 if (i == length-1)
3106 {
3107 x = XEXP (x, i);
3108 goto repeat;
3109 }
3110 mark_used_flags (XEXP (x, i), flag);
3111 break;
3112
3113 case 'E':
3114 for (j = 0; j < XVECLEN (x, i); j++)
3115 mark_used_flags (XVECEXP (x, i, j), flag);
3116 break;
3117 }
3118 }
3119 }
3120
3121 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3122 to look for shared sub-parts. */
3123
3124 void
3125 reset_used_flags (rtx x)
3126 {
3127 mark_used_flags (x, 0);
3128 }
3129
3130 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3131 to look for shared sub-parts. */
3132
3133 void
3134 set_used_flags (rtx x)
3135 {
3136 mark_used_flags (x, 1);
3137 }
3138 \f
3139 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3140 Return X or the rtx for the pseudo reg the value of X was copied into.
3141 OTHER must be valid as a SET_DEST. */
3142
3143 rtx
3144 make_safe_from (rtx x, rtx other)
3145 {
3146 while (1)
3147 switch (GET_CODE (other))
3148 {
3149 case SUBREG:
3150 other = SUBREG_REG (other);
3151 break;
3152 case STRICT_LOW_PART:
3153 case SIGN_EXTEND:
3154 case ZERO_EXTEND:
3155 other = XEXP (other, 0);
3156 break;
3157 default:
3158 goto done;
3159 }
3160 done:
3161 if ((MEM_P (other)
3162 && ! CONSTANT_P (x)
3163 && !REG_P (x)
3164 && GET_CODE (x) != SUBREG)
3165 || (REG_P (other)
3166 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3167 || reg_mentioned_p (other, x))))
3168 {
3169 rtx temp = gen_reg_rtx (GET_MODE (x));
3170 emit_move_insn (temp, x);
3171 return temp;
3172 }
3173 return x;
3174 }
3175 \f
3176 /* Emission of insns (adding them to the doubly-linked list). */
3177
3178 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3179
3180 rtx_insn *
3181 get_last_insn_anywhere (void)
3182 {
3183 struct sequence_stack *seq;
3184 for (seq = get_current_sequence (); seq; seq = seq->next)
3185 if (seq->last != 0)
3186 return seq->last;
3187 return 0;
3188 }
3189
3190 /* Return the first nonnote insn emitted in current sequence or current
3191 function. This routine looks inside SEQUENCEs. */
3192
3193 rtx_insn *
3194 get_first_nonnote_insn (void)
3195 {
3196 rtx_insn *insn = get_insns ();
3197
3198 if (insn)
3199 {
3200 if (NOTE_P (insn))
3201 for (insn = next_insn (insn);
3202 insn && NOTE_P (insn);
3203 insn = next_insn (insn))
3204 continue;
3205 else
3206 {
3207 if (NONJUMP_INSN_P (insn)
3208 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3209 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3210 }
3211 }
3212
3213 return insn;
3214 }
3215
3216 /* Return the last nonnote insn emitted in current sequence or current
3217 function. This routine looks inside SEQUENCEs. */
3218
3219 rtx_insn *
3220 get_last_nonnote_insn (void)
3221 {
3222 rtx_insn *insn = get_last_insn ();
3223
3224 if (insn)
3225 {
3226 if (NOTE_P (insn))
3227 for (insn = previous_insn (insn);
3228 insn && NOTE_P (insn);
3229 insn = previous_insn (insn))
3230 continue;
3231 else
3232 {
3233 if (NONJUMP_INSN_P (insn))
3234 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3235 insn = seq->insn (seq->len () - 1);
3236 }
3237 }
3238
3239 return insn;
3240 }
3241
3242 /* Return the number of actual (non-debug) insns emitted in this
3243 function. */
3244
3245 int
3246 get_max_insn_count (void)
3247 {
3248 int n = cur_insn_uid;
3249
3250 /* The table size must be stable across -g, to avoid codegen
3251 differences due to debug insns, and not be affected by
3252 -fmin-insn-uid, to avoid excessive table size and to simplify
3253 debugging of -fcompare-debug failures. */
3254 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3255 n -= cur_debug_insn_uid;
3256 else
3257 n -= MIN_NONDEBUG_INSN_UID;
3258
3259 return n;
3260 }
3261
3262 \f
3263 /* Return the next insn. If it is a SEQUENCE, return the first insn
3264 of the sequence. */
3265
3266 rtx_insn *
3267 next_insn (rtx_insn *insn)
3268 {
3269 if (insn)
3270 {
3271 insn = NEXT_INSN (insn);
3272 if (insn && NONJUMP_INSN_P (insn)
3273 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3274 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3275 }
3276
3277 return insn;
3278 }
3279
3280 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3281 of the sequence. */
3282
3283 rtx_insn *
3284 previous_insn (rtx_insn *insn)
3285 {
3286 if (insn)
3287 {
3288 insn = PREV_INSN (insn);
3289 if (insn && NONJUMP_INSN_P (insn))
3290 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3291 insn = seq->insn (seq->len () - 1);
3292 }
3293
3294 return insn;
3295 }
3296
3297 /* Return the next insn after INSN that is not a NOTE. This routine does not
3298 look inside SEQUENCEs. */
3299
3300 rtx_insn *
3301 next_nonnote_insn (rtx uncast_insn)
3302 {
3303 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3304 while (insn)
3305 {
3306 insn = NEXT_INSN (insn);
3307 if (insn == 0 || !NOTE_P (insn))
3308 break;
3309 }
3310
3311 return insn;
3312 }
3313
3314 /* Return the next insn after INSN that is not a NOTE, but stop the
3315 search before we enter another basic block. This routine does not
3316 look inside SEQUENCEs. */
3317
3318 rtx_insn *
3319 next_nonnote_insn_bb (rtx_insn *insn)
3320 {
3321 while (insn)
3322 {
3323 insn = NEXT_INSN (insn);
3324 if (insn == 0 || !NOTE_P (insn))
3325 break;
3326 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3327 return NULL;
3328 }
3329
3330 return insn;
3331 }
3332
3333 /* Return the previous insn before INSN that is not a NOTE. This routine does
3334 not look inside SEQUENCEs. */
3335
3336 rtx_insn *
3337 prev_nonnote_insn (rtx uncast_insn)
3338 {
3339 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3340
3341 while (insn)
3342 {
3343 insn = PREV_INSN (insn);
3344 if (insn == 0 || !NOTE_P (insn))
3345 break;
3346 }
3347
3348 return insn;
3349 }
3350
3351 /* Return the previous insn before INSN that is not a NOTE, but stop
3352 the search before we enter another basic block. This routine does
3353 not look inside SEQUENCEs. */
3354
3355 rtx_insn *
3356 prev_nonnote_insn_bb (rtx uncast_insn)
3357 {
3358 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3359
3360 while (insn)
3361 {
3362 insn = PREV_INSN (insn);
3363 if (insn == 0 || !NOTE_P (insn))
3364 break;
3365 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3366 return NULL;
3367 }
3368
3369 return insn;
3370 }
3371
3372 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3373 routine does not look inside SEQUENCEs. */
3374
3375 rtx_insn *
3376 next_nondebug_insn (rtx uncast_insn)
3377 {
3378 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3379
3380 while (insn)
3381 {
3382 insn = NEXT_INSN (insn);
3383 if (insn == 0 || !DEBUG_INSN_P (insn))
3384 break;
3385 }
3386
3387 return insn;
3388 }
3389
3390 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3391 This routine does not look inside SEQUENCEs. */
3392
3393 rtx_insn *
3394 prev_nondebug_insn (rtx uncast_insn)
3395 {
3396 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3397
3398 while (insn)
3399 {
3400 insn = PREV_INSN (insn);
3401 if (insn == 0 || !DEBUG_INSN_P (insn))
3402 break;
3403 }
3404
3405 return insn;
3406 }
3407
3408 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3409 This routine does not look inside SEQUENCEs. */
3410
3411 rtx_insn *
3412 next_nonnote_nondebug_insn (rtx uncast_insn)
3413 {
3414 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3415
3416 while (insn)
3417 {
3418 insn = NEXT_INSN (insn);
3419 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3420 break;
3421 }
3422
3423 return insn;
3424 }
3425
3426 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3427 This routine does not look inside SEQUENCEs. */
3428
3429 rtx_insn *
3430 prev_nonnote_nondebug_insn (rtx uncast_insn)
3431 {
3432 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3433
3434 while (insn)
3435 {
3436 insn = PREV_INSN (insn);
3437 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3438 break;
3439 }
3440
3441 return insn;
3442 }
3443
3444 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3445 or 0, if there is none. This routine does not look inside
3446 SEQUENCEs. */
3447
3448 rtx_insn *
3449 next_real_insn (rtx uncast_insn)
3450 {
3451 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3452
3453 while (insn)
3454 {
3455 insn = NEXT_INSN (insn);
3456 if (insn == 0 || INSN_P (insn))
3457 break;
3458 }
3459
3460 return insn;
3461 }
3462
3463 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3464 or 0, if there is none. This routine does not look inside
3465 SEQUENCEs. */
3466
3467 rtx_insn *
3468 prev_real_insn (rtx uncast_insn)
3469 {
3470 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3471
3472 while (insn)
3473 {
3474 insn = PREV_INSN (insn);
3475 if (insn == 0 || INSN_P (insn))
3476 break;
3477 }
3478
3479 return insn;
3480 }
3481
3482 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3483 This routine does not look inside SEQUENCEs. */
3484
3485 rtx_call_insn *
3486 last_call_insn (void)
3487 {
3488 rtx_insn *insn;
3489
3490 for (insn = get_last_insn ();
3491 insn && !CALL_P (insn);
3492 insn = PREV_INSN (insn))
3493 ;
3494
3495 return safe_as_a <rtx_call_insn *> (insn);
3496 }
3497
3498 /* Find the next insn after INSN that really does something. This routine
3499 does not look inside SEQUENCEs. After reload this also skips over
3500 standalone USE and CLOBBER insn. */
3501
3502 int
3503 active_insn_p (const_rtx insn)
3504 {
3505 return (CALL_P (insn) || JUMP_P (insn)
3506 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3507 || (NONJUMP_INSN_P (insn)
3508 && (! reload_completed
3509 || (GET_CODE (PATTERN (insn)) != USE
3510 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3511 }
3512
3513 rtx_insn *
3514 next_active_insn (rtx uncast_insn)
3515 {
3516 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3517
3518 while (insn)
3519 {
3520 insn = NEXT_INSN (insn);
3521 if (insn == 0 || active_insn_p (insn))
3522 break;
3523 }
3524
3525 return insn;
3526 }
3527
3528 /* Find the last insn before INSN that really does something. This routine
3529 does not look inside SEQUENCEs. After reload this also skips over
3530 standalone USE and CLOBBER insn. */
3531
3532 rtx_insn *
3533 prev_active_insn (rtx uncast_insn)
3534 {
3535 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3536
3537 while (insn)
3538 {
3539 insn = PREV_INSN (insn);
3540 if (insn == 0 || active_insn_p (insn))
3541 break;
3542 }
3543
3544 return insn;
3545 }
3546 \f
3547 /* Return the next insn that uses CC0 after INSN, which is assumed to
3548 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3549 applied to the result of this function should yield INSN).
3550
3551 Normally, this is simply the next insn. However, if a REG_CC_USER note
3552 is present, it contains the insn that uses CC0.
3553
3554 Return 0 if we can't find the insn. */
3555
3556 rtx_insn *
3557 next_cc0_user (rtx uncast_insn)
3558 {
3559 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3560
3561 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3562
3563 if (note)
3564 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3565
3566 insn = next_nonnote_insn (insn);
3567 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3568 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3569
3570 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3571 return insn;
3572
3573 return 0;
3574 }
3575
3576 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3577 note, it is the previous insn. */
3578
3579 rtx_insn *
3580 prev_cc0_setter (rtx_insn *insn)
3581 {
3582 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3583
3584 if (note)
3585 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3586
3587 insn = prev_nonnote_insn (insn);
3588 gcc_assert (sets_cc0_p (PATTERN (insn)));
3589
3590 return insn;
3591 }
3592
3593 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3594
3595 static int
3596 find_auto_inc (const_rtx x, const_rtx reg)
3597 {
3598 subrtx_iterator::array_type array;
3599 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3600 {
3601 const_rtx x = *iter;
3602 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3603 && rtx_equal_p (reg, XEXP (x, 0)))
3604 return true;
3605 }
3606 return false;
3607 }
3608
3609 /* Increment the label uses for all labels present in rtx. */
3610
3611 static void
3612 mark_label_nuses (rtx x)
3613 {
3614 enum rtx_code code;
3615 int i, j;
3616 const char *fmt;
3617
3618 code = GET_CODE (x);
3619 if (code == LABEL_REF && LABEL_P (LABEL_REF_LABEL (x)))
3620 LABEL_NUSES (LABEL_REF_LABEL (x))++;
3621
3622 fmt = GET_RTX_FORMAT (code);
3623 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3624 {
3625 if (fmt[i] == 'e')
3626 mark_label_nuses (XEXP (x, i));
3627 else if (fmt[i] == 'E')
3628 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3629 mark_label_nuses (XVECEXP (x, i, j));
3630 }
3631 }
3632
3633 \f
3634 /* Try splitting insns that can be split for better scheduling.
3635 PAT is the pattern which might split.
3636 TRIAL is the insn providing PAT.
3637 LAST is nonzero if we should return the last insn of the sequence produced.
3638
3639 If this routine succeeds in splitting, it returns the first or last
3640 replacement insn depending on the value of LAST. Otherwise, it
3641 returns TRIAL. If the insn to be returned can be split, it will be. */
3642
3643 rtx_insn *
3644 try_split (rtx pat, rtx_insn *trial, int last)
3645 {
3646 rtx_insn *before = PREV_INSN (trial);
3647 rtx_insn *after = NEXT_INSN (trial);
3648 rtx note;
3649 rtx_insn *seq, *tem;
3650 int probability;
3651 rtx_insn *insn_last, *insn;
3652 int njumps = 0;
3653 rtx_insn *call_insn = NULL;
3654
3655 /* We're not good at redistributing frame information. */
3656 if (RTX_FRAME_RELATED_P (trial))
3657 return trial;
3658
3659 if (any_condjump_p (trial)
3660 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3661 split_branch_probability = XINT (note, 0);
3662 probability = split_branch_probability;
3663
3664 seq = split_insns (pat, trial);
3665
3666 split_branch_probability = -1;
3667
3668 if (!seq)
3669 return trial;
3670
3671 /* Avoid infinite loop if any insn of the result matches
3672 the original pattern. */
3673 insn_last = seq;
3674 while (1)
3675 {
3676 if (INSN_P (insn_last)
3677 && rtx_equal_p (PATTERN (insn_last), pat))
3678 return trial;
3679 if (!NEXT_INSN (insn_last))
3680 break;
3681 insn_last = NEXT_INSN (insn_last);
3682 }
3683
3684 /* We will be adding the new sequence to the function. The splitters
3685 may have introduced invalid RTL sharing, so unshare the sequence now. */
3686 unshare_all_rtl_in_chain (seq);
3687
3688 /* Mark labels and copy flags. */
3689 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3690 {
3691 if (JUMP_P (insn))
3692 {
3693 if (JUMP_P (trial))
3694 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3695 mark_jump_label (PATTERN (insn), insn, 0);
3696 njumps++;
3697 if (probability != -1
3698 && any_condjump_p (insn)
3699 && !find_reg_note (insn, REG_BR_PROB, 0))
3700 {
3701 /* We can preserve the REG_BR_PROB notes only if exactly
3702 one jump is created, otherwise the machine description
3703 is responsible for this step using
3704 split_branch_probability variable. */
3705 gcc_assert (njumps == 1);
3706 add_int_reg_note (insn, REG_BR_PROB, probability);
3707 }
3708 }
3709 }
3710
3711 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3712 in SEQ and copy any additional information across. */
3713 if (CALL_P (trial))
3714 {
3715 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3716 if (CALL_P (insn))
3717 {
3718 rtx_insn *next;
3719 rtx *p;
3720
3721 gcc_assert (call_insn == NULL_RTX);
3722 call_insn = insn;
3723
3724 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3725 target may have explicitly specified. */
3726 p = &CALL_INSN_FUNCTION_USAGE (insn);
3727 while (*p)
3728 p = &XEXP (*p, 1);
3729 *p = CALL_INSN_FUNCTION_USAGE (trial);
3730
3731 /* If the old call was a sibling call, the new one must
3732 be too. */
3733 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3734
3735 /* If the new call is the last instruction in the sequence,
3736 it will effectively replace the old call in-situ. Otherwise
3737 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3738 so that it comes immediately after the new call. */
3739 if (NEXT_INSN (insn))
3740 for (next = NEXT_INSN (trial);
3741 next && NOTE_P (next);
3742 next = NEXT_INSN (next))
3743 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3744 {
3745 remove_insn (next);
3746 add_insn_after (next, insn, NULL);
3747 break;
3748 }
3749 }
3750 }
3751
3752 /* Copy notes, particularly those related to the CFG. */
3753 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3754 {
3755 switch (REG_NOTE_KIND (note))
3756 {
3757 case REG_EH_REGION:
3758 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3759 break;
3760
3761 case REG_NORETURN:
3762 case REG_SETJMP:
3763 case REG_TM:
3764 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3765 {
3766 if (CALL_P (insn))
3767 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3768 }
3769 break;
3770
3771 case REG_NON_LOCAL_GOTO:
3772 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3773 {
3774 if (JUMP_P (insn))
3775 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3776 }
3777 break;
3778
3779 case REG_INC:
3780 if (!AUTO_INC_DEC)
3781 break;
3782
3783 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3784 {
3785 rtx reg = XEXP (note, 0);
3786 if (!FIND_REG_INC_NOTE (insn, reg)
3787 && find_auto_inc (PATTERN (insn), reg))
3788 add_reg_note (insn, REG_INC, reg);
3789 }
3790 break;
3791
3792 case REG_ARGS_SIZE:
3793 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
3794 break;
3795
3796 case REG_CALL_DECL:
3797 gcc_assert (call_insn != NULL_RTX);
3798 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3799 break;
3800
3801 default:
3802 break;
3803 }
3804 }
3805
3806 /* If there are LABELS inside the split insns increment the
3807 usage count so we don't delete the label. */
3808 if (INSN_P (trial))
3809 {
3810 insn = insn_last;
3811 while (insn != NULL_RTX)
3812 {
3813 /* JUMP_P insns have already been "marked" above. */
3814 if (NONJUMP_INSN_P (insn))
3815 mark_label_nuses (PATTERN (insn));
3816
3817 insn = PREV_INSN (insn);
3818 }
3819 }
3820
3821 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3822
3823 delete_insn (trial);
3824
3825 /* Recursively call try_split for each new insn created; by the
3826 time control returns here that insn will be fully split, so
3827 set LAST and continue from the insn after the one returned.
3828 We can't use next_active_insn here since AFTER may be a note.
3829 Ignore deleted insns, which can be occur if not optimizing. */
3830 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3831 if (! tem->deleted () && INSN_P (tem))
3832 tem = try_split (PATTERN (tem), tem, 1);
3833
3834 /* Return either the first or the last insn, depending on which was
3835 requested. */
3836 return last
3837 ? (after ? PREV_INSN (after) : get_last_insn ())
3838 : NEXT_INSN (before);
3839 }
3840 \f
3841 /* Make and return an INSN rtx, initializing all its slots.
3842 Store PATTERN in the pattern slots. */
3843
3844 rtx_insn *
3845 make_insn_raw (rtx pattern)
3846 {
3847 rtx_insn *insn;
3848
3849 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3850
3851 INSN_UID (insn) = cur_insn_uid++;
3852 PATTERN (insn) = pattern;
3853 INSN_CODE (insn) = -1;
3854 REG_NOTES (insn) = NULL;
3855 INSN_LOCATION (insn) = curr_insn_location ();
3856 BLOCK_FOR_INSN (insn) = NULL;
3857
3858 #ifdef ENABLE_RTL_CHECKING
3859 if (insn
3860 && INSN_P (insn)
3861 && (returnjump_p (insn)
3862 || (GET_CODE (insn) == SET
3863 && SET_DEST (insn) == pc_rtx)))
3864 {
3865 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3866 debug_rtx (insn);
3867 }
3868 #endif
3869
3870 return insn;
3871 }
3872
3873 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3874
3875 static rtx_insn *
3876 make_debug_insn_raw (rtx pattern)
3877 {
3878 rtx_debug_insn *insn;
3879
3880 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
3881 INSN_UID (insn) = cur_debug_insn_uid++;
3882 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3883 INSN_UID (insn) = cur_insn_uid++;
3884
3885 PATTERN (insn) = pattern;
3886 INSN_CODE (insn) = -1;
3887 REG_NOTES (insn) = NULL;
3888 INSN_LOCATION (insn) = curr_insn_location ();
3889 BLOCK_FOR_INSN (insn) = NULL;
3890
3891 return insn;
3892 }
3893
3894 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3895
3896 static rtx_insn *
3897 make_jump_insn_raw (rtx pattern)
3898 {
3899 rtx_jump_insn *insn;
3900
3901 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
3902 INSN_UID (insn) = cur_insn_uid++;
3903
3904 PATTERN (insn) = pattern;
3905 INSN_CODE (insn) = -1;
3906 REG_NOTES (insn) = NULL;
3907 JUMP_LABEL (insn) = NULL;
3908 INSN_LOCATION (insn) = curr_insn_location ();
3909 BLOCK_FOR_INSN (insn) = NULL;
3910
3911 return insn;
3912 }
3913
3914 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3915
3916 static rtx_insn *
3917 make_call_insn_raw (rtx pattern)
3918 {
3919 rtx_call_insn *insn;
3920
3921 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
3922 INSN_UID (insn) = cur_insn_uid++;
3923
3924 PATTERN (insn) = pattern;
3925 INSN_CODE (insn) = -1;
3926 REG_NOTES (insn) = NULL;
3927 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3928 INSN_LOCATION (insn) = curr_insn_location ();
3929 BLOCK_FOR_INSN (insn) = NULL;
3930
3931 return insn;
3932 }
3933
3934 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3935
3936 static rtx_note *
3937 make_note_raw (enum insn_note subtype)
3938 {
3939 /* Some notes are never created this way at all. These notes are
3940 only created by patching out insns. */
3941 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3942 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3943
3944 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
3945 INSN_UID (note) = cur_insn_uid++;
3946 NOTE_KIND (note) = subtype;
3947 BLOCK_FOR_INSN (note) = NULL;
3948 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3949 return note;
3950 }
3951 \f
3952 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3953 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3954 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3955
3956 static inline void
3957 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
3958 {
3959 SET_PREV_INSN (insn) = prev;
3960 SET_NEXT_INSN (insn) = next;
3961 if (prev != NULL)
3962 {
3963 SET_NEXT_INSN (prev) = insn;
3964 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3965 {
3966 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3967 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
3968 }
3969 }
3970 if (next != NULL)
3971 {
3972 SET_PREV_INSN (next) = insn;
3973 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3974 {
3975 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3976 SET_PREV_INSN (sequence->insn (0)) = insn;
3977 }
3978 }
3979
3980 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3981 {
3982 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3983 SET_PREV_INSN (sequence->insn (0)) = prev;
3984 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3985 }
3986 }
3987
3988 /* Add INSN to the end of the doubly-linked list.
3989 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3990
3991 void
3992 add_insn (rtx_insn *insn)
3993 {
3994 rtx_insn *prev = get_last_insn ();
3995 link_insn_into_chain (insn, prev, NULL);
3996 if (NULL == get_insns ())
3997 set_first_insn (insn);
3998 set_last_insn (insn);
3999 }
4000
4001 /* Add INSN into the doubly-linked list after insn AFTER. */
4002
4003 static void
4004 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4005 {
4006 rtx_insn *next = NEXT_INSN (after);
4007
4008 gcc_assert (!optimize || !after->deleted ());
4009
4010 link_insn_into_chain (insn, after, next);
4011
4012 if (next == NULL)
4013 {
4014 struct sequence_stack *seq;
4015
4016 for (seq = get_current_sequence (); seq; seq = seq->next)
4017 if (after == seq->last)
4018 {
4019 seq->last = insn;
4020 break;
4021 }
4022 }
4023 }
4024
4025 /* Add INSN into the doubly-linked list before insn BEFORE. */
4026
4027 static void
4028 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4029 {
4030 rtx_insn *prev = PREV_INSN (before);
4031
4032 gcc_assert (!optimize || !before->deleted ());
4033
4034 link_insn_into_chain (insn, prev, before);
4035
4036 if (prev == NULL)
4037 {
4038 struct sequence_stack *seq;
4039
4040 for (seq = get_current_sequence (); seq; seq = seq->next)
4041 if (before == seq->first)
4042 {
4043 seq->first = insn;
4044 break;
4045 }
4046
4047 gcc_assert (seq);
4048 }
4049 }
4050
4051 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4052 If BB is NULL, an attempt is made to infer the bb from before.
4053
4054 This and the next function should be the only functions called
4055 to insert an insn once delay slots have been filled since only
4056 they know how to update a SEQUENCE. */
4057
4058 void
4059 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4060 {
4061 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4062 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4063 add_insn_after_nobb (insn, after);
4064 if (!BARRIER_P (after)
4065 && !BARRIER_P (insn)
4066 && (bb = BLOCK_FOR_INSN (after)))
4067 {
4068 set_block_for_insn (insn, bb);
4069 if (INSN_P (insn))
4070 df_insn_rescan (insn);
4071 /* Should not happen as first in the BB is always
4072 either NOTE or LABEL. */
4073 if (BB_END (bb) == after
4074 /* Avoid clobbering of structure when creating new BB. */
4075 && !BARRIER_P (insn)
4076 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4077 BB_END (bb) = insn;
4078 }
4079 }
4080
4081 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4082 If BB is NULL, an attempt is made to infer the bb from before.
4083
4084 This and the previous function should be the only functions called
4085 to insert an insn once delay slots have been filled since only
4086 they know how to update a SEQUENCE. */
4087
4088 void
4089 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4090 {
4091 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4092 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4093 add_insn_before_nobb (insn, before);
4094
4095 if (!bb
4096 && !BARRIER_P (before)
4097 && !BARRIER_P (insn))
4098 bb = BLOCK_FOR_INSN (before);
4099
4100 if (bb)
4101 {
4102 set_block_for_insn (insn, bb);
4103 if (INSN_P (insn))
4104 df_insn_rescan (insn);
4105 /* Should not happen as first in the BB is always either NOTE or
4106 LABEL. */
4107 gcc_assert (BB_HEAD (bb) != insn
4108 /* Avoid clobbering of structure when creating new BB. */
4109 || BARRIER_P (insn)
4110 || NOTE_INSN_BASIC_BLOCK_P (insn));
4111 }
4112 }
4113
4114 /* Replace insn with an deleted instruction note. */
4115
4116 void
4117 set_insn_deleted (rtx insn)
4118 {
4119 if (INSN_P (insn))
4120 df_insn_delete (as_a <rtx_insn *> (insn));
4121 PUT_CODE (insn, NOTE);
4122 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4123 }
4124
4125
4126 /* Unlink INSN from the insn chain.
4127
4128 This function knows how to handle sequences.
4129
4130 This function does not invalidate data flow information associated with
4131 INSN (i.e. does not call df_insn_delete). That makes this function
4132 usable for only disconnecting an insn from the chain, and re-emit it
4133 elsewhere later.
4134
4135 To later insert INSN elsewhere in the insn chain via add_insn and
4136 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4137 the caller. Nullifying them here breaks many insn chain walks.
4138
4139 To really delete an insn and related DF information, use delete_insn. */
4140
4141 void
4142 remove_insn (rtx uncast_insn)
4143 {
4144 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4145 rtx_insn *next = NEXT_INSN (insn);
4146 rtx_insn *prev = PREV_INSN (insn);
4147 basic_block bb;
4148
4149 if (prev)
4150 {
4151 SET_NEXT_INSN (prev) = next;
4152 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4153 {
4154 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4155 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4156 }
4157 }
4158 else
4159 {
4160 struct sequence_stack *seq;
4161
4162 for (seq = get_current_sequence (); seq; seq = seq->next)
4163 if (insn == seq->first)
4164 {
4165 seq->first = next;
4166 break;
4167 }
4168
4169 gcc_assert (seq);
4170 }
4171
4172 if (next)
4173 {
4174 SET_PREV_INSN (next) = prev;
4175 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4176 {
4177 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4178 SET_PREV_INSN (sequence->insn (0)) = prev;
4179 }
4180 }
4181 else
4182 {
4183 struct sequence_stack *seq;
4184
4185 for (seq = get_current_sequence (); seq; seq = seq->next)
4186 if (insn == seq->last)
4187 {
4188 seq->last = prev;
4189 break;
4190 }
4191
4192 gcc_assert (seq);
4193 }
4194
4195 /* Fix up basic block boundaries, if necessary. */
4196 if (!BARRIER_P (insn)
4197 && (bb = BLOCK_FOR_INSN (insn)))
4198 {
4199 if (BB_HEAD (bb) == insn)
4200 {
4201 /* Never ever delete the basic block note without deleting whole
4202 basic block. */
4203 gcc_assert (!NOTE_P (insn));
4204 BB_HEAD (bb) = next;
4205 }
4206 if (BB_END (bb) == insn)
4207 BB_END (bb) = prev;
4208 }
4209 }
4210
4211 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4212
4213 void
4214 add_function_usage_to (rtx call_insn, rtx call_fusage)
4215 {
4216 gcc_assert (call_insn && CALL_P (call_insn));
4217
4218 /* Put the register usage information on the CALL. If there is already
4219 some usage information, put ours at the end. */
4220 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4221 {
4222 rtx link;
4223
4224 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4225 link = XEXP (link, 1))
4226 ;
4227
4228 XEXP (link, 1) = call_fusage;
4229 }
4230 else
4231 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4232 }
4233
4234 /* Delete all insns made since FROM.
4235 FROM becomes the new last instruction. */
4236
4237 void
4238 delete_insns_since (rtx_insn *from)
4239 {
4240 if (from == 0)
4241 set_first_insn (0);
4242 else
4243 SET_NEXT_INSN (from) = 0;
4244 set_last_insn (from);
4245 }
4246
4247 /* This function is deprecated, please use sequences instead.
4248
4249 Move a consecutive bunch of insns to a different place in the chain.
4250 The insns to be moved are those between FROM and TO.
4251 They are moved to a new position after the insn AFTER.
4252 AFTER must not be FROM or TO or any insn in between.
4253
4254 This function does not know about SEQUENCEs and hence should not be
4255 called after delay-slot filling has been done. */
4256
4257 void
4258 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4259 {
4260 if (flag_checking)
4261 {
4262 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4263 gcc_assert (after != x);
4264 gcc_assert (after != to);
4265 }
4266
4267 /* Splice this bunch out of where it is now. */
4268 if (PREV_INSN (from))
4269 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4270 if (NEXT_INSN (to))
4271 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4272 if (get_last_insn () == to)
4273 set_last_insn (PREV_INSN (from));
4274 if (get_insns () == from)
4275 set_first_insn (NEXT_INSN (to));
4276
4277 /* Make the new neighbors point to it and it to them. */
4278 if (NEXT_INSN (after))
4279 SET_PREV_INSN (NEXT_INSN (after)) = to;
4280
4281 SET_NEXT_INSN (to) = NEXT_INSN (after);
4282 SET_PREV_INSN (from) = after;
4283 SET_NEXT_INSN (after) = from;
4284 if (after == get_last_insn ())
4285 set_last_insn (to);
4286 }
4287
4288 /* Same as function above, but take care to update BB boundaries. */
4289 void
4290 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4291 {
4292 rtx_insn *prev = PREV_INSN (from);
4293 basic_block bb, bb2;
4294
4295 reorder_insns_nobb (from, to, after);
4296
4297 if (!BARRIER_P (after)
4298 && (bb = BLOCK_FOR_INSN (after)))
4299 {
4300 rtx_insn *x;
4301 df_set_bb_dirty (bb);
4302
4303 if (!BARRIER_P (from)
4304 && (bb2 = BLOCK_FOR_INSN (from)))
4305 {
4306 if (BB_END (bb2) == to)
4307 BB_END (bb2) = prev;
4308 df_set_bb_dirty (bb2);
4309 }
4310
4311 if (BB_END (bb) == after)
4312 BB_END (bb) = to;
4313
4314 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4315 if (!BARRIER_P (x))
4316 df_insn_change_bb (x, bb);
4317 }
4318 }
4319
4320 \f
4321 /* Emit insn(s) of given code and pattern
4322 at a specified place within the doubly-linked list.
4323
4324 All of the emit_foo global entry points accept an object
4325 X which is either an insn list or a PATTERN of a single
4326 instruction.
4327
4328 There are thus a few canonical ways to generate code and
4329 emit it at a specific place in the instruction stream. For
4330 example, consider the instruction named SPOT and the fact that
4331 we would like to emit some instructions before SPOT. We might
4332 do it like this:
4333
4334 start_sequence ();
4335 ... emit the new instructions ...
4336 insns_head = get_insns ();
4337 end_sequence ();
4338
4339 emit_insn_before (insns_head, SPOT);
4340
4341 It used to be common to generate SEQUENCE rtl instead, but that
4342 is a relic of the past which no longer occurs. The reason is that
4343 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4344 generated would almost certainly die right after it was created. */
4345
4346 static rtx_insn *
4347 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4348 rtx_insn *(*make_raw) (rtx))
4349 {
4350 rtx_insn *insn;
4351
4352 gcc_assert (before);
4353
4354 if (x == NULL_RTX)
4355 return safe_as_a <rtx_insn *> (last);
4356
4357 switch (GET_CODE (x))
4358 {
4359 case DEBUG_INSN:
4360 case INSN:
4361 case JUMP_INSN:
4362 case CALL_INSN:
4363 case CODE_LABEL:
4364 case BARRIER:
4365 case NOTE:
4366 insn = as_a <rtx_insn *> (x);
4367 while (insn)
4368 {
4369 rtx_insn *next = NEXT_INSN (insn);
4370 add_insn_before (insn, before, bb);
4371 last = insn;
4372 insn = next;
4373 }
4374 break;
4375
4376 #ifdef ENABLE_RTL_CHECKING
4377 case SEQUENCE:
4378 gcc_unreachable ();
4379 break;
4380 #endif
4381
4382 default:
4383 last = (*make_raw) (x);
4384 add_insn_before (last, before, bb);
4385 break;
4386 }
4387
4388 return safe_as_a <rtx_insn *> (last);
4389 }
4390
4391 /* Make X be output before the instruction BEFORE. */
4392
4393 rtx_insn *
4394 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4395 {
4396 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4397 }
4398
4399 /* Make an instruction with body X and code JUMP_INSN
4400 and output it before the instruction BEFORE. */
4401
4402 rtx_jump_insn *
4403 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4404 {
4405 return as_a <rtx_jump_insn *> (
4406 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4407 make_jump_insn_raw));
4408 }
4409
4410 /* Make an instruction with body X and code CALL_INSN
4411 and output it before the instruction BEFORE. */
4412
4413 rtx_insn *
4414 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4415 {
4416 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4417 make_call_insn_raw);
4418 }
4419
4420 /* Make an instruction with body X and code DEBUG_INSN
4421 and output it before the instruction BEFORE. */
4422
4423 rtx_insn *
4424 emit_debug_insn_before_noloc (rtx x, rtx before)
4425 {
4426 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4427 make_debug_insn_raw);
4428 }
4429
4430 /* Make an insn of code BARRIER
4431 and output it before the insn BEFORE. */
4432
4433 rtx_barrier *
4434 emit_barrier_before (rtx before)
4435 {
4436 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4437
4438 INSN_UID (insn) = cur_insn_uid++;
4439
4440 add_insn_before (insn, before, NULL);
4441 return insn;
4442 }
4443
4444 /* Emit the label LABEL before the insn BEFORE. */
4445
4446 rtx_code_label *
4447 emit_label_before (rtx label, rtx_insn *before)
4448 {
4449 gcc_checking_assert (INSN_UID (label) == 0);
4450 INSN_UID (label) = cur_insn_uid++;
4451 add_insn_before (label, before, NULL);
4452 return as_a <rtx_code_label *> (label);
4453 }
4454 \f
4455 /* Helper for emit_insn_after, handles lists of instructions
4456 efficiently. */
4457
4458 static rtx_insn *
4459 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4460 {
4461 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4462 rtx_insn *last;
4463 rtx_insn *after_after;
4464 if (!bb && !BARRIER_P (after))
4465 bb = BLOCK_FOR_INSN (after);
4466
4467 if (bb)
4468 {
4469 df_set_bb_dirty (bb);
4470 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4471 if (!BARRIER_P (last))
4472 {
4473 set_block_for_insn (last, bb);
4474 df_insn_rescan (last);
4475 }
4476 if (!BARRIER_P (last))
4477 {
4478 set_block_for_insn (last, bb);
4479 df_insn_rescan (last);
4480 }
4481 if (BB_END (bb) == after)
4482 BB_END (bb) = last;
4483 }
4484 else
4485 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4486 continue;
4487
4488 after_after = NEXT_INSN (after);
4489
4490 SET_NEXT_INSN (after) = first;
4491 SET_PREV_INSN (first) = after;
4492 SET_NEXT_INSN (last) = after_after;
4493 if (after_after)
4494 SET_PREV_INSN (after_after) = last;
4495
4496 if (after == get_last_insn ())
4497 set_last_insn (last);
4498
4499 return last;
4500 }
4501
4502 static rtx_insn *
4503 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4504 rtx_insn *(*make_raw)(rtx))
4505 {
4506 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4507 rtx_insn *last = after;
4508
4509 gcc_assert (after);
4510
4511 if (x == NULL_RTX)
4512 return last;
4513
4514 switch (GET_CODE (x))
4515 {
4516 case DEBUG_INSN:
4517 case INSN:
4518 case JUMP_INSN:
4519 case CALL_INSN:
4520 case CODE_LABEL:
4521 case BARRIER:
4522 case NOTE:
4523 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4524 break;
4525
4526 #ifdef ENABLE_RTL_CHECKING
4527 case SEQUENCE:
4528 gcc_unreachable ();
4529 break;
4530 #endif
4531
4532 default:
4533 last = (*make_raw) (x);
4534 add_insn_after (last, after, bb);
4535 break;
4536 }
4537
4538 return last;
4539 }
4540
4541 /* Make X be output after the insn AFTER and set the BB of insn. If
4542 BB is NULL, an attempt is made to infer the BB from AFTER. */
4543
4544 rtx_insn *
4545 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4546 {
4547 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4548 }
4549
4550
4551 /* Make an insn of code JUMP_INSN with body X
4552 and output it after the insn AFTER. */
4553
4554 rtx_jump_insn *
4555 emit_jump_insn_after_noloc (rtx x, rtx after)
4556 {
4557 return as_a <rtx_jump_insn *> (
4558 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4559 }
4560
4561 /* Make an instruction with body X and code CALL_INSN
4562 and output it after the instruction AFTER. */
4563
4564 rtx_insn *
4565 emit_call_insn_after_noloc (rtx x, rtx after)
4566 {
4567 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4568 }
4569
4570 /* Make an instruction with body X and code CALL_INSN
4571 and output it after the instruction AFTER. */
4572
4573 rtx_insn *
4574 emit_debug_insn_after_noloc (rtx x, rtx after)
4575 {
4576 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4577 }
4578
4579 /* Make an insn of code BARRIER
4580 and output it after the insn AFTER. */
4581
4582 rtx_barrier *
4583 emit_barrier_after (rtx after)
4584 {
4585 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4586
4587 INSN_UID (insn) = cur_insn_uid++;
4588
4589 add_insn_after (insn, after, NULL);
4590 return insn;
4591 }
4592
4593 /* Emit the label LABEL after the insn AFTER. */
4594
4595 rtx_insn *
4596 emit_label_after (rtx label, rtx_insn *after)
4597 {
4598 gcc_checking_assert (INSN_UID (label) == 0);
4599 INSN_UID (label) = cur_insn_uid++;
4600 add_insn_after (label, after, NULL);
4601 return as_a <rtx_insn *> (label);
4602 }
4603 \f
4604 /* Notes require a bit of special handling: Some notes need to have their
4605 BLOCK_FOR_INSN set, others should never have it set, and some should
4606 have it set or clear depending on the context. */
4607
4608 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4609 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4610 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4611
4612 static bool
4613 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4614 {
4615 switch (subtype)
4616 {
4617 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4618 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4619 return true;
4620
4621 /* Notes for var tracking and EH region markers can appear between or
4622 inside basic blocks. If the caller is emitting on the basic block
4623 boundary, do not set BLOCK_FOR_INSN on the new note. */
4624 case NOTE_INSN_VAR_LOCATION:
4625 case NOTE_INSN_CALL_ARG_LOCATION:
4626 case NOTE_INSN_EH_REGION_BEG:
4627 case NOTE_INSN_EH_REGION_END:
4628 return on_bb_boundary_p;
4629
4630 /* Otherwise, BLOCK_FOR_INSN must be set. */
4631 default:
4632 return false;
4633 }
4634 }
4635
4636 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4637
4638 rtx_note *
4639 emit_note_after (enum insn_note subtype, rtx_insn *after)
4640 {
4641 rtx_note *note = make_note_raw (subtype);
4642 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4643 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4644
4645 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4646 add_insn_after_nobb (note, after);
4647 else
4648 add_insn_after (note, after, bb);
4649 return note;
4650 }
4651
4652 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4653
4654 rtx_note *
4655 emit_note_before (enum insn_note subtype, rtx_insn *before)
4656 {
4657 rtx_note *note = make_note_raw (subtype);
4658 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4659 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4660
4661 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4662 add_insn_before_nobb (note, before);
4663 else
4664 add_insn_before (note, before, bb);
4665 return note;
4666 }
4667 \f
4668 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4669 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4670
4671 static rtx_insn *
4672 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4673 rtx_insn *(*make_raw) (rtx))
4674 {
4675 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4676 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4677
4678 if (pattern == NULL_RTX || !loc)
4679 return last;
4680
4681 after = NEXT_INSN (after);
4682 while (1)
4683 {
4684 if (active_insn_p (after)
4685 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4686 && !INSN_LOCATION (after))
4687 INSN_LOCATION (after) = loc;
4688 if (after == last)
4689 break;
4690 after = NEXT_INSN (after);
4691 }
4692 return last;
4693 }
4694
4695 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4696 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4697 any DEBUG_INSNs. */
4698
4699 static rtx_insn *
4700 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4701 rtx_insn *(*make_raw) (rtx))
4702 {
4703 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4704 rtx_insn *prev = after;
4705
4706 if (skip_debug_insns)
4707 while (DEBUG_INSN_P (prev))
4708 prev = PREV_INSN (prev);
4709
4710 if (INSN_P (prev))
4711 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4712 make_raw);
4713 else
4714 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4715 }
4716
4717 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4718 rtx_insn *
4719 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4720 {
4721 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4722 }
4723
4724 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4725 rtx_insn *
4726 emit_insn_after (rtx pattern, rtx after)
4727 {
4728 return emit_pattern_after (pattern, after, true, make_insn_raw);
4729 }
4730
4731 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4732 rtx_jump_insn *
4733 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4734 {
4735 return as_a <rtx_jump_insn *> (
4736 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4737 }
4738
4739 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4740 rtx_jump_insn *
4741 emit_jump_insn_after (rtx pattern, rtx after)
4742 {
4743 return as_a <rtx_jump_insn *> (
4744 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4745 }
4746
4747 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4748 rtx_insn *
4749 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4750 {
4751 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4752 }
4753
4754 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4755 rtx_insn *
4756 emit_call_insn_after (rtx pattern, rtx after)
4757 {
4758 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4759 }
4760
4761 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4762 rtx_insn *
4763 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4764 {
4765 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4766 }
4767
4768 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4769 rtx_insn *
4770 emit_debug_insn_after (rtx pattern, rtx after)
4771 {
4772 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4773 }
4774
4775 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4776 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4777 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4778 CALL_INSN, etc. */
4779
4780 static rtx_insn *
4781 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4782 rtx_insn *(*make_raw) (rtx))
4783 {
4784 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4785 rtx_insn *first = PREV_INSN (before);
4786 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4787 insnp ? before : NULL_RTX,
4788 NULL, make_raw);
4789
4790 if (pattern == NULL_RTX || !loc)
4791 return last;
4792
4793 if (!first)
4794 first = get_insns ();
4795 else
4796 first = NEXT_INSN (first);
4797 while (1)
4798 {
4799 if (active_insn_p (first)
4800 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4801 && !INSN_LOCATION (first))
4802 INSN_LOCATION (first) = loc;
4803 if (first == last)
4804 break;
4805 first = NEXT_INSN (first);
4806 }
4807 return last;
4808 }
4809
4810 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4811 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4812 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4813 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4814
4815 static rtx_insn *
4816 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4817 bool insnp, rtx_insn *(*make_raw) (rtx))
4818 {
4819 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4820 rtx_insn *next = before;
4821
4822 if (skip_debug_insns)
4823 while (DEBUG_INSN_P (next))
4824 next = PREV_INSN (next);
4825
4826 if (INSN_P (next))
4827 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4828 insnp, make_raw);
4829 else
4830 return emit_pattern_before_noloc (pattern, before,
4831 insnp ? before : NULL_RTX,
4832 NULL, make_raw);
4833 }
4834
4835 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4836 rtx_insn *
4837 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4838 {
4839 return emit_pattern_before_setloc (pattern, before, loc, true,
4840 make_insn_raw);
4841 }
4842
4843 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4844 rtx_insn *
4845 emit_insn_before (rtx pattern, rtx before)
4846 {
4847 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4848 }
4849
4850 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4851 rtx_jump_insn *
4852 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4853 {
4854 return as_a <rtx_jump_insn *> (
4855 emit_pattern_before_setloc (pattern, before, loc, false,
4856 make_jump_insn_raw));
4857 }
4858
4859 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4860 rtx_jump_insn *
4861 emit_jump_insn_before (rtx pattern, rtx before)
4862 {
4863 return as_a <rtx_jump_insn *> (
4864 emit_pattern_before (pattern, before, true, false,
4865 make_jump_insn_raw));
4866 }
4867
4868 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4869 rtx_insn *
4870 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4871 {
4872 return emit_pattern_before_setloc (pattern, before, loc, false,
4873 make_call_insn_raw);
4874 }
4875
4876 /* Like emit_call_insn_before_noloc,
4877 but set insn_location according to BEFORE. */
4878 rtx_insn *
4879 emit_call_insn_before (rtx pattern, rtx_insn *before)
4880 {
4881 return emit_pattern_before (pattern, before, true, false,
4882 make_call_insn_raw);
4883 }
4884
4885 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4886 rtx_insn *
4887 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4888 {
4889 return emit_pattern_before_setloc (pattern, before, loc, false,
4890 make_debug_insn_raw);
4891 }
4892
4893 /* Like emit_debug_insn_before_noloc,
4894 but set insn_location according to BEFORE. */
4895 rtx_insn *
4896 emit_debug_insn_before (rtx pattern, rtx_insn *before)
4897 {
4898 return emit_pattern_before (pattern, before, false, false,
4899 make_debug_insn_raw);
4900 }
4901 \f
4902 /* Take X and emit it at the end of the doubly-linked
4903 INSN list.
4904
4905 Returns the last insn emitted. */
4906
4907 rtx_insn *
4908 emit_insn (rtx x)
4909 {
4910 rtx_insn *last = get_last_insn ();
4911 rtx_insn *insn;
4912
4913 if (x == NULL_RTX)
4914 return last;
4915
4916 switch (GET_CODE (x))
4917 {
4918 case DEBUG_INSN:
4919 case INSN:
4920 case JUMP_INSN:
4921 case CALL_INSN:
4922 case CODE_LABEL:
4923 case BARRIER:
4924 case NOTE:
4925 insn = as_a <rtx_insn *> (x);
4926 while (insn)
4927 {
4928 rtx_insn *next = NEXT_INSN (insn);
4929 add_insn (insn);
4930 last = insn;
4931 insn = next;
4932 }
4933 break;
4934
4935 #ifdef ENABLE_RTL_CHECKING
4936 case JUMP_TABLE_DATA:
4937 case SEQUENCE:
4938 gcc_unreachable ();
4939 break;
4940 #endif
4941
4942 default:
4943 last = make_insn_raw (x);
4944 add_insn (last);
4945 break;
4946 }
4947
4948 return last;
4949 }
4950
4951 /* Make an insn of code DEBUG_INSN with pattern X
4952 and add it to the end of the doubly-linked list. */
4953
4954 rtx_insn *
4955 emit_debug_insn (rtx x)
4956 {
4957 rtx_insn *last = get_last_insn ();
4958 rtx_insn *insn;
4959
4960 if (x == NULL_RTX)
4961 return last;
4962
4963 switch (GET_CODE (x))
4964 {
4965 case DEBUG_INSN:
4966 case INSN:
4967 case JUMP_INSN:
4968 case CALL_INSN:
4969 case CODE_LABEL:
4970 case BARRIER:
4971 case NOTE:
4972 insn = as_a <rtx_insn *> (x);
4973 while (insn)
4974 {
4975 rtx_insn *next = NEXT_INSN (insn);
4976 add_insn (insn);
4977 last = insn;
4978 insn = next;
4979 }
4980 break;
4981
4982 #ifdef ENABLE_RTL_CHECKING
4983 case JUMP_TABLE_DATA:
4984 case SEQUENCE:
4985 gcc_unreachable ();
4986 break;
4987 #endif
4988
4989 default:
4990 last = make_debug_insn_raw (x);
4991 add_insn (last);
4992 break;
4993 }
4994
4995 return last;
4996 }
4997
4998 /* Make an insn of code JUMP_INSN with pattern X
4999 and add it to the end of the doubly-linked list. */
5000
5001 rtx_insn *
5002 emit_jump_insn (rtx x)
5003 {
5004 rtx_insn *last = NULL;
5005 rtx_insn *insn;
5006
5007 switch (GET_CODE (x))
5008 {
5009 case DEBUG_INSN:
5010 case INSN:
5011 case JUMP_INSN:
5012 case CALL_INSN:
5013 case CODE_LABEL:
5014 case BARRIER:
5015 case NOTE:
5016 insn = as_a <rtx_insn *> (x);
5017 while (insn)
5018 {
5019 rtx_insn *next = NEXT_INSN (insn);
5020 add_insn (insn);
5021 last = insn;
5022 insn = next;
5023 }
5024 break;
5025
5026 #ifdef ENABLE_RTL_CHECKING
5027 case JUMP_TABLE_DATA:
5028 case SEQUENCE:
5029 gcc_unreachable ();
5030 break;
5031 #endif
5032
5033 default:
5034 last = make_jump_insn_raw (x);
5035 add_insn (last);
5036 break;
5037 }
5038
5039 return last;
5040 }
5041
5042 /* Make an insn of code CALL_INSN with pattern X
5043 and add it to the end of the doubly-linked list. */
5044
5045 rtx_insn *
5046 emit_call_insn (rtx x)
5047 {
5048 rtx_insn *insn;
5049
5050 switch (GET_CODE (x))
5051 {
5052 case DEBUG_INSN:
5053 case INSN:
5054 case JUMP_INSN:
5055 case CALL_INSN:
5056 case CODE_LABEL:
5057 case BARRIER:
5058 case NOTE:
5059 insn = emit_insn (x);
5060 break;
5061
5062 #ifdef ENABLE_RTL_CHECKING
5063 case SEQUENCE:
5064 case JUMP_TABLE_DATA:
5065 gcc_unreachable ();
5066 break;
5067 #endif
5068
5069 default:
5070 insn = make_call_insn_raw (x);
5071 add_insn (insn);
5072 break;
5073 }
5074
5075 return insn;
5076 }
5077
5078 /* Add the label LABEL to the end of the doubly-linked list. */
5079
5080 rtx_code_label *
5081 emit_label (rtx uncast_label)
5082 {
5083 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5084
5085 gcc_checking_assert (INSN_UID (label) == 0);
5086 INSN_UID (label) = cur_insn_uid++;
5087 add_insn (label);
5088 return label;
5089 }
5090
5091 /* Make an insn of code JUMP_TABLE_DATA
5092 and add it to the end of the doubly-linked list. */
5093
5094 rtx_jump_table_data *
5095 emit_jump_table_data (rtx table)
5096 {
5097 rtx_jump_table_data *jump_table_data =
5098 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5099 INSN_UID (jump_table_data) = cur_insn_uid++;
5100 PATTERN (jump_table_data) = table;
5101 BLOCK_FOR_INSN (jump_table_data) = NULL;
5102 add_insn (jump_table_data);
5103 return jump_table_data;
5104 }
5105
5106 /* Make an insn of code BARRIER
5107 and add it to the end of the doubly-linked list. */
5108
5109 rtx_barrier *
5110 emit_barrier (void)
5111 {
5112 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5113 INSN_UID (barrier) = cur_insn_uid++;
5114 add_insn (barrier);
5115 return barrier;
5116 }
5117
5118 /* Emit a copy of note ORIG. */
5119
5120 rtx_note *
5121 emit_note_copy (rtx_note *orig)
5122 {
5123 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5124 rtx_note *note = make_note_raw (kind);
5125 NOTE_DATA (note) = NOTE_DATA (orig);
5126 add_insn (note);
5127 return note;
5128 }
5129
5130 /* Make an insn of code NOTE or type NOTE_NO
5131 and add it to the end of the doubly-linked list. */
5132
5133 rtx_note *
5134 emit_note (enum insn_note kind)
5135 {
5136 rtx_note *note = make_note_raw (kind);
5137 add_insn (note);
5138 return note;
5139 }
5140
5141 /* Emit a clobber of lvalue X. */
5142
5143 rtx_insn *
5144 emit_clobber (rtx x)
5145 {
5146 /* CONCATs should not appear in the insn stream. */
5147 if (GET_CODE (x) == CONCAT)
5148 {
5149 emit_clobber (XEXP (x, 0));
5150 return emit_clobber (XEXP (x, 1));
5151 }
5152 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5153 }
5154
5155 /* Return a sequence of insns to clobber lvalue X. */
5156
5157 rtx_insn *
5158 gen_clobber (rtx x)
5159 {
5160 rtx_insn *seq;
5161
5162 start_sequence ();
5163 emit_clobber (x);
5164 seq = get_insns ();
5165 end_sequence ();
5166 return seq;
5167 }
5168
5169 /* Emit a use of rvalue X. */
5170
5171 rtx_insn *
5172 emit_use (rtx x)
5173 {
5174 /* CONCATs should not appear in the insn stream. */
5175 if (GET_CODE (x) == CONCAT)
5176 {
5177 emit_use (XEXP (x, 0));
5178 return emit_use (XEXP (x, 1));
5179 }
5180 return emit_insn (gen_rtx_USE (VOIDmode, x));
5181 }
5182
5183 /* Return a sequence of insns to use rvalue X. */
5184
5185 rtx_insn *
5186 gen_use (rtx x)
5187 {
5188 rtx_insn *seq;
5189
5190 start_sequence ();
5191 emit_use (x);
5192 seq = get_insns ();
5193 end_sequence ();
5194 return seq;
5195 }
5196
5197 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5198 Return the set in INSN that such notes describe, or NULL if the notes
5199 have no meaning for INSN. */
5200
5201 rtx
5202 set_for_reg_notes (rtx insn)
5203 {
5204 rtx pat, reg;
5205
5206 if (!INSN_P (insn))
5207 return NULL_RTX;
5208
5209 pat = PATTERN (insn);
5210 if (GET_CODE (pat) == PARALLEL)
5211 {
5212 /* We do not use single_set because that ignores SETs of unused
5213 registers. REG_EQUAL and REG_EQUIV notes really do require the
5214 PARALLEL to have a single SET. */
5215 if (multiple_sets (insn))
5216 return NULL_RTX;
5217 pat = XVECEXP (pat, 0, 0);
5218 }
5219
5220 if (GET_CODE (pat) != SET)
5221 return NULL_RTX;
5222
5223 reg = SET_DEST (pat);
5224
5225 /* Notes apply to the contents of a STRICT_LOW_PART. */
5226 if (GET_CODE (reg) == STRICT_LOW_PART
5227 || GET_CODE (reg) == ZERO_EXTRACT)
5228 reg = XEXP (reg, 0);
5229
5230 /* Check that we have a register. */
5231 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5232 return NULL_RTX;
5233
5234 return pat;
5235 }
5236
5237 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5238 note of this type already exists, remove it first. */
5239
5240 rtx
5241 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5242 {
5243 rtx note = find_reg_note (insn, kind, NULL_RTX);
5244
5245 switch (kind)
5246 {
5247 case REG_EQUAL:
5248 case REG_EQUIV:
5249 if (!set_for_reg_notes (insn))
5250 return NULL_RTX;
5251
5252 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5253 It serves no useful purpose and breaks eliminate_regs. */
5254 if (GET_CODE (datum) == ASM_OPERANDS)
5255 return NULL_RTX;
5256
5257 /* Notes with side effects are dangerous. Even if the side-effect
5258 initially mirrors one in PATTERN (INSN), later optimizations
5259 might alter the way that the final register value is calculated
5260 and so move or alter the side-effect in some way. The note would
5261 then no longer be a valid substitution for SET_SRC. */
5262 if (side_effects_p (datum))
5263 return NULL_RTX;
5264 break;
5265
5266 default:
5267 break;
5268 }
5269
5270 if (note)
5271 XEXP (note, 0) = datum;
5272 else
5273 {
5274 add_reg_note (insn, kind, datum);
5275 note = REG_NOTES (insn);
5276 }
5277
5278 switch (kind)
5279 {
5280 case REG_EQUAL:
5281 case REG_EQUIV:
5282 df_notes_rescan (as_a <rtx_insn *> (insn));
5283 break;
5284 default:
5285 break;
5286 }
5287
5288 return note;
5289 }
5290
5291 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5292 rtx
5293 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5294 {
5295 rtx set = set_for_reg_notes (insn);
5296
5297 if (set && SET_DEST (set) == dst)
5298 return set_unique_reg_note (insn, kind, datum);
5299 return NULL_RTX;
5300 }
5301 \f
5302 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5303 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5304 is true.
5305
5306 If X is a label, it is simply added into the insn chain. */
5307
5308 rtx_insn *
5309 emit (rtx x, bool allow_barrier_p)
5310 {
5311 enum rtx_code code = classify_insn (x);
5312
5313 switch (code)
5314 {
5315 case CODE_LABEL:
5316 return emit_label (x);
5317 case INSN:
5318 return emit_insn (x);
5319 case JUMP_INSN:
5320 {
5321 rtx_insn *insn = emit_jump_insn (x);
5322 if (allow_barrier_p
5323 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5324 return emit_barrier ();
5325 return insn;
5326 }
5327 case CALL_INSN:
5328 return emit_call_insn (x);
5329 case DEBUG_INSN:
5330 return emit_debug_insn (x);
5331 default:
5332 gcc_unreachable ();
5333 }
5334 }
5335 \f
5336 /* Space for free sequence stack entries. */
5337 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5338
5339 /* Begin emitting insns to a sequence. If this sequence will contain
5340 something that might cause the compiler to pop arguments to function
5341 calls (because those pops have previously been deferred; see
5342 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5343 before calling this function. That will ensure that the deferred
5344 pops are not accidentally emitted in the middle of this sequence. */
5345
5346 void
5347 start_sequence (void)
5348 {
5349 struct sequence_stack *tem;
5350
5351 if (free_sequence_stack != NULL)
5352 {
5353 tem = free_sequence_stack;
5354 free_sequence_stack = tem->next;
5355 }
5356 else
5357 tem = ggc_alloc<sequence_stack> ();
5358
5359 tem->next = get_current_sequence ()->next;
5360 tem->first = get_insns ();
5361 tem->last = get_last_insn ();
5362 get_current_sequence ()->next = tem;
5363
5364 set_first_insn (0);
5365 set_last_insn (0);
5366 }
5367
5368 /* Set up the insn chain starting with FIRST as the current sequence,
5369 saving the previously current one. See the documentation for
5370 start_sequence for more information about how to use this function. */
5371
5372 void
5373 push_to_sequence (rtx_insn *first)
5374 {
5375 rtx_insn *last;
5376
5377 start_sequence ();
5378
5379 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5380 ;
5381
5382 set_first_insn (first);
5383 set_last_insn (last);
5384 }
5385
5386 /* Like push_to_sequence, but take the last insn as an argument to avoid
5387 looping through the list. */
5388
5389 void
5390 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5391 {
5392 start_sequence ();
5393
5394 set_first_insn (first);
5395 set_last_insn (last);
5396 }
5397
5398 /* Set up the outer-level insn chain
5399 as the current sequence, saving the previously current one. */
5400
5401 void
5402 push_topmost_sequence (void)
5403 {
5404 struct sequence_stack *top;
5405
5406 start_sequence ();
5407
5408 top = get_topmost_sequence ();
5409 set_first_insn (top->first);
5410 set_last_insn (top->last);
5411 }
5412
5413 /* After emitting to the outer-level insn chain, update the outer-level
5414 insn chain, and restore the previous saved state. */
5415
5416 void
5417 pop_topmost_sequence (void)
5418 {
5419 struct sequence_stack *top;
5420
5421 top = get_topmost_sequence ();
5422 top->first = get_insns ();
5423 top->last = get_last_insn ();
5424
5425 end_sequence ();
5426 }
5427
5428 /* After emitting to a sequence, restore previous saved state.
5429
5430 To get the contents of the sequence just made, you must call
5431 `get_insns' *before* calling here.
5432
5433 If the compiler might have deferred popping arguments while
5434 generating this sequence, and this sequence will not be immediately
5435 inserted into the instruction stream, use do_pending_stack_adjust
5436 before calling get_insns. That will ensure that the deferred
5437 pops are inserted into this sequence, and not into some random
5438 location in the instruction stream. See INHIBIT_DEFER_POP for more
5439 information about deferred popping of arguments. */
5440
5441 void
5442 end_sequence (void)
5443 {
5444 struct sequence_stack *tem = get_current_sequence ()->next;
5445
5446 set_first_insn (tem->first);
5447 set_last_insn (tem->last);
5448 get_current_sequence ()->next = tem->next;
5449
5450 memset (tem, 0, sizeof (*tem));
5451 tem->next = free_sequence_stack;
5452 free_sequence_stack = tem;
5453 }
5454
5455 /* Return 1 if currently emitting into a sequence. */
5456
5457 int
5458 in_sequence_p (void)
5459 {
5460 return get_current_sequence ()->next != 0;
5461 }
5462 \f
5463 /* Put the various virtual registers into REGNO_REG_RTX. */
5464
5465 static void
5466 init_virtual_regs (void)
5467 {
5468 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5469 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5470 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5471 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5472 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5473 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5474 = virtual_preferred_stack_boundary_rtx;
5475 }
5476
5477 \f
5478 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5479 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5480 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5481 static int copy_insn_n_scratches;
5482
5483 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5484 copied an ASM_OPERANDS.
5485 In that case, it is the original input-operand vector. */
5486 static rtvec orig_asm_operands_vector;
5487
5488 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5489 copied an ASM_OPERANDS.
5490 In that case, it is the copied input-operand vector. */
5491 static rtvec copy_asm_operands_vector;
5492
5493 /* Likewise for the constraints vector. */
5494 static rtvec orig_asm_constraints_vector;
5495 static rtvec copy_asm_constraints_vector;
5496
5497 /* Recursively create a new copy of an rtx for copy_insn.
5498 This function differs from copy_rtx in that it handles SCRATCHes and
5499 ASM_OPERANDs properly.
5500 Normally, this function is not used directly; use copy_insn as front end.
5501 However, you could first copy an insn pattern with copy_insn and then use
5502 this function afterwards to properly copy any REG_NOTEs containing
5503 SCRATCHes. */
5504
5505 rtx
5506 copy_insn_1 (rtx orig)
5507 {
5508 rtx copy;
5509 int i, j;
5510 RTX_CODE code;
5511 const char *format_ptr;
5512
5513 if (orig == NULL)
5514 return NULL;
5515
5516 code = GET_CODE (orig);
5517
5518 switch (code)
5519 {
5520 case REG:
5521 case DEBUG_EXPR:
5522 CASE_CONST_ANY:
5523 case SYMBOL_REF:
5524 case CODE_LABEL:
5525 case PC:
5526 case CC0:
5527 case RETURN:
5528 case SIMPLE_RETURN:
5529 return orig;
5530 case CLOBBER:
5531 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5532 clobbers or clobbers of hard registers that originated as pseudos.
5533 This is needed to allow safe register renaming. */
5534 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5535 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5536 return orig;
5537 break;
5538
5539 case SCRATCH:
5540 for (i = 0; i < copy_insn_n_scratches; i++)
5541 if (copy_insn_scratch_in[i] == orig)
5542 return copy_insn_scratch_out[i];
5543 break;
5544
5545 case CONST:
5546 if (shared_const_p (orig))
5547 return orig;
5548 break;
5549
5550 /* A MEM with a constant address is not sharable. The problem is that
5551 the constant address may need to be reloaded. If the mem is shared,
5552 then reloading one copy of this mem will cause all copies to appear
5553 to have been reloaded. */
5554
5555 default:
5556 break;
5557 }
5558
5559 /* Copy the various flags, fields, and other information. We assume
5560 that all fields need copying, and then clear the fields that should
5561 not be copied. That is the sensible default behavior, and forces
5562 us to explicitly document why we are *not* copying a flag. */
5563 copy = shallow_copy_rtx (orig);
5564
5565 /* We do not copy the USED flag, which is used as a mark bit during
5566 walks over the RTL. */
5567 RTX_FLAG (copy, used) = 0;
5568
5569 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5570 if (INSN_P (orig))
5571 {
5572 RTX_FLAG (copy, jump) = 0;
5573 RTX_FLAG (copy, call) = 0;
5574 RTX_FLAG (copy, frame_related) = 0;
5575 }
5576
5577 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5578
5579 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5580 switch (*format_ptr++)
5581 {
5582 case 'e':
5583 if (XEXP (orig, i) != NULL)
5584 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5585 break;
5586
5587 case 'E':
5588 case 'V':
5589 if (XVEC (orig, i) == orig_asm_constraints_vector)
5590 XVEC (copy, i) = copy_asm_constraints_vector;
5591 else if (XVEC (orig, i) == orig_asm_operands_vector)
5592 XVEC (copy, i) = copy_asm_operands_vector;
5593 else if (XVEC (orig, i) != NULL)
5594 {
5595 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5596 for (j = 0; j < XVECLEN (copy, i); j++)
5597 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5598 }
5599 break;
5600
5601 case 't':
5602 case 'w':
5603 case 'i':
5604 case 's':
5605 case 'S':
5606 case 'u':
5607 case '0':
5608 /* These are left unchanged. */
5609 break;
5610
5611 default:
5612 gcc_unreachable ();
5613 }
5614
5615 if (code == SCRATCH)
5616 {
5617 i = copy_insn_n_scratches++;
5618 gcc_assert (i < MAX_RECOG_OPERANDS);
5619 copy_insn_scratch_in[i] = orig;
5620 copy_insn_scratch_out[i] = copy;
5621 }
5622 else if (code == ASM_OPERANDS)
5623 {
5624 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5625 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5626 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5627 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5628 }
5629
5630 return copy;
5631 }
5632
5633 /* Create a new copy of an rtx.
5634 This function differs from copy_rtx in that it handles SCRATCHes and
5635 ASM_OPERANDs properly.
5636 INSN doesn't really have to be a full INSN; it could be just the
5637 pattern. */
5638 rtx
5639 copy_insn (rtx insn)
5640 {
5641 copy_insn_n_scratches = 0;
5642 orig_asm_operands_vector = 0;
5643 orig_asm_constraints_vector = 0;
5644 copy_asm_operands_vector = 0;
5645 copy_asm_constraints_vector = 0;
5646 return copy_insn_1 (insn);
5647 }
5648
5649 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5650 on that assumption that INSN itself remains in its original place. */
5651
5652 rtx_insn *
5653 copy_delay_slot_insn (rtx_insn *insn)
5654 {
5655 /* Copy INSN with its rtx_code, all its notes, location etc. */
5656 insn = as_a <rtx_insn *> (copy_rtx (insn));
5657 INSN_UID (insn) = cur_insn_uid++;
5658 return insn;
5659 }
5660
5661 /* Initialize data structures and variables in this file
5662 before generating rtl for each function. */
5663
5664 void
5665 init_emit (void)
5666 {
5667 set_first_insn (NULL);
5668 set_last_insn (NULL);
5669 if (MIN_NONDEBUG_INSN_UID)
5670 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5671 else
5672 cur_insn_uid = 1;
5673 cur_debug_insn_uid = 1;
5674 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5675 first_label_num = label_num;
5676 get_current_sequence ()->next = NULL;
5677
5678 /* Init the tables that describe all the pseudo regs. */
5679
5680 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5681
5682 crtl->emit.regno_pointer_align
5683 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5684
5685 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5686
5687 /* Put copies of all the hard registers into regno_reg_rtx. */
5688 memcpy (regno_reg_rtx,
5689 initial_regno_reg_rtx,
5690 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5691
5692 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5693 init_virtual_regs ();
5694
5695 /* Indicate that the virtual registers and stack locations are
5696 all pointers. */
5697 REG_POINTER (stack_pointer_rtx) = 1;
5698 REG_POINTER (frame_pointer_rtx) = 1;
5699 REG_POINTER (hard_frame_pointer_rtx) = 1;
5700 REG_POINTER (arg_pointer_rtx) = 1;
5701
5702 REG_POINTER (virtual_incoming_args_rtx) = 1;
5703 REG_POINTER (virtual_stack_vars_rtx) = 1;
5704 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5705 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5706 REG_POINTER (virtual_cfa_rtx) = 1;
5707
5708 #ifdef STACK_BOUNDARY
5709 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5710 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5711 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5712 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5713
5714 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5715 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5716 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5717 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5718 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5719 #endif
5720
5721 #ifdef INIT_EXPANDERS
5722 INIT_EXPANDERS;
5723 #endif
5724 }
5725
5726 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5727
5728 static rtx
5729 gen_const_vector (machine_mode mode, int constant)
5730 {
5731 rtx tem;
5732 rtvec v;
5733 int units, i;
5734 machine_mode inner;
5735
5736 units = GET_MODE_NUNITS (mode);
5737 inner = GET_MODE_INNER (mode);
5738
5739 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5740
5741 v = rtvec_alloc (units);
5742
5743 /* We need to call this function after we set the scalar const_tiny_rtx
5744 entries. */
5745 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5746
5747 for (i = 0; i < units; ++i)
5748 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5749
5750 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5751 return tem;
5752 }
5753
5754 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5755 all elements are zero, and the one vector when all elements are one. */
5756 rtx
5757 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
5758 {
5759 machine_mode inner = GET_MODE_INNER (mode);
5760 int nunits = GET_MODE_NUNITS (mode);
5761 rtx x;
5762 int i;
5763
5764 /* Check to see if all of the elements have the same value. */
5765 x = RTVEC_ELT (v, nunits - 1);
5766 for (i = nunits - 2; i >= 0; i--)
5767 if (RTVEC_ELT (v, i) != x)
5768 break;
5769
5770 /* If the values are all the same, check to see if we can use one of the
5771 standard constant vectors. */
5772 if (i == -1)
5773 {
5774 if (x == CONST0_RTX (inner))
5775 return CONST0_RTX (mode);
5776 else if (x == CONST1_RTX (inner))
5777 return CONST1_RTX (mode);
5778 else if (x == CONSTM1_RTX (inner))
5779 return CONSTM1_RTX (mode);
5780 }
5781
5782 return gen_rtx_raw_CONST_VECTOR (mode, v);
5783 }
5784
5785 /* Initialise global register information required by all functions. */
5786
5787 void
5788 init_emit_regs (void)
5789 {
5790 int i;
5791 machine_mode mode;
5792 mem_attrs *attrs;
5793
5794 /* Reset register attributes */
5795 reg_attrs_htab->empty ();
5796
5797 /* We need reg_raw_mode, so initialize the modes now. */
5798 init_reg_modes_target ();
5799
5800 /* Assign register numbers to the globally defined register rtx. */
5801 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5802 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5803 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5804 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5805 virtual_incoming_args_rtx =
5806 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5807 virtual_stack_vars_rtx =
5808 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5809 virtual_stack_dynamic_rtx =
5810 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5811 virtual_outgoing_args_rtx =
5812 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5813 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5814 virtual_preferred_stack_boundary_rtx =
5815 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5816
5817 /* Initialize RTL for commonly used hard registers. These are
5818 copied into regno_reg_rtx as we begin to compile each function. */
5819 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5820 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5821
5822 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5823 return_address_pointer_rtx
5824 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5825 #endif
5826
5827 pic_offset_table_rtx = NULL_RTX;
5828 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5829 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5830
5831 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5832 {
5833 mode = (machine_mode) i;
5834 attrs = ggc_cleared_alloc<mem_attrs> ();
5835 attrs->align = BITS_PER_UNIT;
5836 attrs->addrspace = ADDR_SPACE_GENERIC;
5837 if (mode != BLKmode)
5838 {
5839 attrs->size_known_p = true;
5840 attrs->size = GET_MODE_SIZE (mode);
5841 if (STRICT_ALIGNMENT)
5842 attrs->align = GET_MODE_ALIGNMENT (mode);
5843 }
5844 mode_mem_attrs[i] = attrs;
5845 }
5846 }
5847
5848 /* Initialize global machine_mode variables. */
5849
5850 void
5851 init_derived_machine_modes (void)
5852 {
5853 byte_mode = VOIDmode;
5854 word_mode = VOIDmode;
5855
5856 for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5857 mode != VOIDmode;
5858 mode = GET_MODE_WIDER_MODE (mode))
5859 {
5860 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5861 && byte_mode == VOIDmode)
5862 byte_mode = mode;
5863
5864 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5865 && word_mode == VOIDmode)
5866 word_mode = mode;
5867 }
5868
5869 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5870 }
5871
5872 /* Create some permanent unique rtl objects shared between all functions. */
5873
5874 void
5875 init_emit_once (void)
5876 {
5877 int i;
5878 machine_mode mode;
5879 machine_mode double_mode;
5880
5881 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5882 CONST_FIXED, and memory attribute hash tables. */
5883 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
5884
5885 #if TARGET_SUPPORTS_WIDE_INT
5886 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
5887 #endif
5888 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
5889
5890 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
5891
5892 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
5893
5894 #ifdef INIT_EXPANDERS
5895 /* This is to initialize {init|mark|free}_machine_status before the first
5896 call to push_function_context_to. This is needed by the Chill front
5897 end which calls push_function_context_to before the first call to
5898 init_function_start. */
5899 INIT_EXPANDERS;
5900 #endif
5901
5902 /* Create the unique rtx's for certain rtx codes and operand values. */
5903
5904 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5905 tries to use these variables. */
5906 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5907 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5908 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5909
5910 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5911 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5912 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5913 else
5914 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5915
5916 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5917
5918 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5919 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5920 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5921
5922 dconstm1 = dconst1;
5923 dconstm1.sign = 1;
5924
5925 dconsthalf = dconst1;
5926 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5927
5928 for (i = 0; i < 3; i++)
5929 {
5930 const REAL_VALUE_TYPE *const r =
5931 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5932
5933 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5934 mode != VOIDmode;
5935 mode = GET_MODE_WIDER_MODE (mode))
5936 const_tiny_rtx[i][(int) mode] =
5937 const_double_from_real_value (*r, mode);
5938
5939 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5940 mode != VOIDmode;
5941 mode = GET_MODE_WIDER_MODE (mode))
5942 const_tiny_rtx[i][(int) mode] =
5943 const_double_from_real_value (*r, mode);
5944
5945 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5946
5947 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5948 mode != VOIDmode;
5949 mode = GET_MODE_WIDER_MODE (mode))
5950 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5951
5952 for (mode = MIN_MODE_PARTIAL_INT;
5953 mode <= MAX_MODE_PARTIAL_INT;
5954 mode = (machine_mode)((int)(mode) + 1))
5955 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5956 }
5957
5958 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5959
5960 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5961 mode != VOIDmode;
5962 mode = GET_MODE_WIDER_MODE (mode))
5963 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5964
5965 for (mode = MIN_MODE_PARTIAL_INT;
5966 mode <= MAX_MODE_PARTIAL_INT;
5967 mode = (machine_mode)((int)(mode) + 1))
5968 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5969
5970 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5971 mode != VOIDmode;
5972 mode = GET_MODE_WIDER_MODE (mode))
5973 {
5974 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5975 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5976 }
5977
5978 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5979 mode != VOIDmode;
5980 mode = GET_MODE_WIDER_MODE (mode))
5981 {
5982 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5983 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5984 }
5985
5986 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5987 mode != VOIDmode;
5988 mode = GET_MODE_WIDER_MODE (mode))
5989 {
5990 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5991 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5992 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
5993 }
5994
5995 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5996 mode != VOIDmode;
5997 mode = GET_MODE_WIDER_MODE (mode))
5998 {
5999 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6000 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6001 }
6002
6003 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6004 mode != VOIDmode;
6005 mode = GET_MODE_WIDER_MODE (mode))
6006 {
6007 FCONST0 (mode).data.high = 0;
6008 FCONST0 (mode).data.low = 0;
6009 FCONST0 (mode).mode = mode;
6010 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6011 FCONST0 (mode), mode);
6012 }
6013
6014 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6015 mode != VOIDmode;
6016 mode = GET_MODE_WIDER_MODE (mode))
6017 {
6018 FCONST0 (mode).data.high = 0;
6019 FCONST0 (mode).data.low = 0;
6020 FCONST0 (mode).mode = mode;
6021 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6022 FCONST0 (mode), mode);
6023 }
6024
6025 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6026 mode != VOIDmode;
6027 mode = GET_MODE_WIDER_MODE (mode))
6028 {
6029 FCONST0 (mode).data.high = 0;
6030 FCONST0 (mode).data.low = 0;
6031 FCONST0 (mode).mode = mode;
6032 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6033 FCONST0 (mode), mode);
6034
6035 /* We store the value 1. */
6036 FCONST1 (mode).data.high = 0;
6037 FCONST1 (mode).data.low = 0;
6038 FCONST1 (mode).mode = mode;
6039 FCONST1 (mode).data
6040 = double_int_one.lshift (GET_MODE_FBIT (mode),
6041 HOST_BITS_PER_DOUBLE_INT,
6042 SIGNED_FIXED_POINT_MODE_P (mode));
6043 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6044 FCONST1 (mode), mode);
6045 }
6046
6047 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6048 mode != VOIDmode;
6049 mode = GET_MODE_WIDER_MODE (mode))
6050 {
6051 FCONST0 (mode).data.high = 0;
6052 FCONST0 (mode).data.low = 0;
6053 FCONST0 (mode).mode = mode;
6054 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6055 FCONST0 (mode), mode);
6056
6057 /* We store the value 1. */
6058 FCONST1 (mode).data.high = 0;
6059 FCONST1 (mode).data.low = 0;
6060 FCONST1 (mode).mode = mode;
6061 FCONST1 (mode).data
6062 = double_int_one.lshift (GET_MODE_FBIT (mode),
6063 HOST_BITS_PER_DOUBLE_INT,
6064 SIGNED_FIXED_POINT_MODE_P (mode));
6065 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6066 FCONST1 (mode), mode);
6067 }
6068
6069 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6070 mode != VOIDmode;
6071 mode = GET_MODE_WIDER_MODE (mode))
6072 {
6073 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6074 }
6075
6076 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6077 mode != VOIDmode;
6078 mode = GET_MODE_WIDER_MODE (mode))
6079 {
6080 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6081 }
6082
6083 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6084 mode != VOIDmode;
6085 mode = GET_MODE_WIDER_MODE (mode))
6086 {
6087 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6088 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6089 }
6090
6091 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6092 mode != VOIDmode;
6093 mode = GET_MODE_WIDER_MODE (mode))
6094 {
6095 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6096 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6097 }
6098
6099 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6100 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6101 const_tiny_rtx[0][i] = const0_rtx;
6102
6103 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6104 if (STORE_FLAG_VALUE == 1)
6105 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6106
6107 for (mode = GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS);
6108 mode != VOIDmode;
6109 mode = GET_MODE_WIDER_MODE (mode))
6110 {
6111 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (mode));
6112 const_tiny_rtx[0][mode] = immed_wide_int_const (wi_zero, mode);
6113 }
6114
6115 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6116 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6117 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6118 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6119 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6120 /*prev_insn=*/NULL,
6121 /*next_insn=*/NULL,
6122 /*bb=*/NULL,
6123 /*pattern=*/NULL_RTX,
6124 /*location=*/-1,
6125 CODE_FOR_nothing,
6126 /*reg_notes=*/NULL_RTX);
6127 }
6128 \f
6129 /* Produce exact duplicate of insn INSN after AFTER.
6130 Care updating of libcall regions if present. */
6131
6132 rtx_insn *
6133 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6134 {
6135 rtx_insn *new_rtx;
6136 rtx link;
6137
6138 switch (GET_CODE (insn))
6139 {
6140 case INSN:
6141 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6142 break;
6143
6144 case JUMP_INSN:
6145 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6146 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6147 break;
6148
6149 case DEBUG_INSN:
6150 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6151 break;
6152
6153 case CALL_INSN:
6154 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6155 if (CALL_INSN_FUNCTION_USAGE (insn))
6156 CALL_INSN_FUNCTION_USAGE (new_rtx)
6157 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6158 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6159 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6160 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6161 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6162 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6163 break;
6164
6165 default:
6166 gcc_unreachable ();
6167 }
6168
6169 /* Update LABEL_NUSES. */
6170 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6171
6172 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6173
6174 /* If the old insn is frame related, then so is the new one. This is
6175 primarily needed for IA-64 unwind info which marks epilogue insns,
6176 which may be duplicated by the basic block reordering code. */
6177 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6178
6179 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6180 will make them. REG_LABEL_TARGETs are created there too, but are
6181 supposed to be sticky, so we copy them. */
6182 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6183 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6184 {
6185 if (GET_CODE (link) == EXPR_LIST)
6186 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6187 copy_insn_1 (XEXP (link, 0)));
6188 else
6189 add_shallow_copy_of_reg_note (new_rtx, link);
6190 }
6191
6192 INSN_CODE (new_rtx) = INSN_CODE (insn);
6193 return new_rtx;
6194 }
6195
6196 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6197 rtx
6198 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6199 {
6200 if (hard_reg_clobbers[mode][regno])
6201 return hard_reg_clobbers[mode][regno];
6202 else
6203 return (hard_reg_clobbers[mode][regno] =
6204 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6205 }
6206
6207 location_t prologue_location;
6208 location_t epilogue_location;
6209
6210 /* Hold current location information and last location information, so the
6211 datastructures are built lazily only when some instructions in given
6212 place are needed. */
6213 static location_t curr_location;
6214
6215 /* Allocate insn location datastructure. */
6216 void
6217 insn_locations_init (void)
6218 {
6219 prologue_location = epilogue_location = 0;
6220 curr_location = UNKNOWN_LOCATION;
6221 }
6222
6223 /* At the end of emit stage, clear current location. */
6224 void
6225 insn_locations_finalize (void)
6226 {
6227 epilogue_location = curr_location;
6228 curr_location = UNKNOWN_LOCATION;
6229 }
6230
6231 /* Set current location. */
6232 void
6233 set_curr_insn_location (location_t location)
6234 {
6235 curr_location = location;
6236 }
6237
6238 /* Get current location. */
6239 location_t
6240 curr_insn_location (void)
6241 {
6242 return curr_location;
6243 }
6244
6245 /* Return lexical scope block insn belongs to. */
6246 tree
6247 insn_scope (const rtx_insn *insn)
6248 {
6249 return LOCATION_BLOCK (INSN_LOCATION (insn));
6250 }
6251
6252 /* Return line number of the statement that produced this insn. */
6253 int
6254 insn_line (const rtx_insn *insn)
6255 {
6256 return LOCATION_LINE (INSN_LOCATION (insn));
6257 }
6258
6259 /* Return source file of the statement that produced this insn. */
6260 const char *
6261 insn_file (const rtx_insn *insn)
6262 {
6263 return LOCATION_FILE (INSN_LOCATION (insn));
6264 }
6265
6266 /* Return expanded location of the statement that produced this insn. */
6267 expanded_location
6268 insn_location (const rtx_insn *insn)
6269 {
6270 return expand_location (INSN_LOCATION (insn));
6271 }
6272
6273 /* Return true if memory model MODEL requires a pre-operation (release-style)
6274 barrier or a post-operation (acquire-style) barrier. While not universal,
6275 this function matches behavior of several targets. */
6276
6277 bool
6278 need_atomic_barrier_p (enum memmodel model, bool pre)
6279 {
6280 switch (model & MEMMODEL_BASE_MASK)
6281 {
6282 case MEMMODEL_RELAXED:
6283 case MEMMODEL_CONSUME:
6284 return false;
6285 case MEMMODEL_RELEASE:
6286 return pre;
6287 case MEMMODEL_ACQUIRE:
6288 return !pre;
6289 case MEMMODEL_ACQ_REL:
6290 case MEMMODEL_SEQ_CST:
6291 return true;
6292 default:
6293 gcc_unreachable ();
6294 }
6295 }
6296 \f
6297 #include "gt-emit-rtl.h"