rtlanal.c (subreg_lsb_1): New function split out from subreg_lsb.
[gcc.git] / gcc / rtlanal.c
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "tm_p.h"
33 #include "flags.h"
34 #include "basic-block.h"
35 #include "real.h"
36
37 /* Forward declarations */
38 static int global_reg_mentioned_p_1 (rtx *, void *);
39 static void set_of_1 (rtx, rtx, void *);
40 static void insn_dependent_p_1 (rtx, rtx, void *);
41 static int rtx_referenced_p_1 (rtx *, void *);
42 static int computed_jump_p_1 (rtx);
43 static void parms_set (rtx, rtx, void *);
44 static bool hoist_test_store (rtx, rtx, regset);
45 static void hoist_update_store (rtx, rtx *, rtx, rtx);
46
47 /* Bit flags that specify the machine subtype we are compiling for.
48 Bits are tested using macros TARGET_... defined in the tm.h file
49 and set by `-m...' switches. Must be defined in rtlanal.c. */
50
51 int target_flags;
52 \f
53 /* Return 1 if the value of X is unstable
54 (would be different at a different point in the program).
55 The frame pointer, arg pointer, etc. are considered stable
56 (within one function) and so is anything marked `unchanging'. */
57
58 int
59 rtx_unstable_p (rtx x)
60 {
61 RTX_CODE code = GET_CODE (x);
62 int i;
63 const char *fmt;
64
65 switch (code)
66 {
67 case MEM:
68 return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
69
70 case QUEUED:
71 return 1;
72
73 case ADDRESSOF:
74 case CONST:
75 case CONST_INT:
76 case CONST_DOUBLE:
77 case CONST_VECTOR:
78 case SYMBOL_REF:
79 case LABEL_REF:
80 return 0;
81
82 case REG:
83 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
84 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
85 /* The arg pointer varies if it is not a fixed register. */
86 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
87 || RTX_UNCHANGING_P (x))
88 return 0;
89 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
90 /* ??? When call-clobbered, the value is stable modulo the restore
91 that must happen after a call. This currently screws up local-alloc
92 into believing that the restore is not needed. */
93 if (x == pic_offset_table_rtx)
94 return 0;
95 #endif
96 return 1;
97
98 case ASM_OPERANDS:
99 if (MEM_VOLATILE_P (x))
100 return 1;
101
102 /* Fall through. */
103
104 default:
105 break;
106 }
107
108 fmt = GET_RTX_FORMAT (code);
109 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
110 if (fmt[i] == 'e')
111 {
112 if (rtx_unstable_p (XEXP (x, i)))
113 return 1;
114 }
115 else if (fmt[i] == 'E')
116 {
117 int j;
118 for (j = 0; j < XVECLEN (x, i); j++)
119 if (rtx_unstable_p (XVECEXP (x, i, j)))
120 return 1;
121 }
122
123 return 0;
124 }
125
126 /* Return 1 if X has a value that can vary even between two
127 executions of the program. 0 means X can be compared reliably
128 against certain constants or near-constants.
129 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
130 zero, we are slightly more conservative.
131 The frame pointer and the arg pointer are considered constant. */
132
133 int
134 rtx_varies_p (rtx x, int for_alias)
135 {
136 RTX_CODE code = GET_CODE (x);
137 int i;
138 const char *fmt;
139
140 switch (code)
141 {
142 case MEM:
143 return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
144
145 case QUEUED:
146 return 1;
147
148 case CONST:
149 case CONST_INT:
150 case CONST_DOUBLE:
151 case CONST_VECTOR:
152 case SYMBOL_REF:
153 case LABEL_REF:
154 return 0;
155
156 case ADDRESSOF:
157 /* This will resolve to some offset from the frame pointer. */
158 return 0;
159
160 case REG:
161 /* Note that we have to test for the actual rtx used for the frame
162 and arg pointers and not just the register number in case we have
163 eliminated the frame and/or arg pointer and are using it
164 for pseudos. */
165 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
166 /* The arg pointer varies if it is not a fixed register. */
167 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
168 return 0;
169 if (x == pic_offset_table_rtx
170 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
171 /* ??? When call-clobbered, the value is stable modulo the restore
172 that must happen after a call. This currently screws up
173 local-alloc into believing that the restore is not needed, so we
174 must return 0 only if we are called from alias analysis. */
175 && for_alias
176 #endif
177 )
178 return 0;
179 return 1;
180
181 case LO_SUM:
182 /* The operand 0 of a LO_SUM is considered constant
183 (in fact it is related specifically to operand 1)
184 during alias analysis. */
185 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
186 || rtx_varies_p (XEXP (x, 1), for_alias);
187
188 case ASM_OPERANDS:
189 if (MEM_VOLATILE_P (x))
190 return 1;
191
192 /* Fall through. */
193
194 default:
195 break;
196 }
197
198 fmt = GET_RTX_FORMAT (code);
199 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
200 if (fmt[i] == 'e')
201 {
202 if (rtx_varies_p (XEXP (x, i), for_alias))
203 return 1;
204 }
205 else if (fmt[i] == 'E')
206 {
207 int j;
208 for (j = 0; j < XVECLEN (x, i); j++)
209 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
210 return 1;
211 }
212
213 return 0;
214 }
215
216 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
217
218 int
219 rtx_addr_can_trap_p (rtx x)
220 {
221 enum rtx_code code = GET_CODE (x);
222
223 switch (code)
224 {
225 case SYMBOL_REF:
226 return SYMBOL_REF_WEAK (x);
227
228 case LABEL_REF:
229 return 0;
230
231 case ADDRESSOF:
232 /* This will resolve to some offset from the frame pointer. */
233 return 0;
234
235 case REG:
236 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
237 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
238 || x == stack_pointer_rtx
239 /* The arg pointer varies if it is not a fixed register. */
240 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
241 return 0;
242 /* All of the virtual frame registers are stack references. */
243 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
244 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
245 return 0;
246 return 1;
247
248 case CONST:
249 return rtx_addr_can_trap_p (XEXP (x, 0));
250
251 case PLUS:
252 /* An address is assumed not to trap if it is an address that can't
253 trap plus a constant integer or it is the pic register plus a
254 constant. */
255 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
256 && GET_CODE (XEXP (x, 1)) == CONST_INT)
257 || (XEXP (x, 0) == pic_offset_table_rtx
258 && CONSTANT_P (XEXP (x, 1))));
259
260 case LO_SUM:
261 case PRE_MODIFY:
262 return rtx_addr_can_trap_p (XEXP (x, 1));
263
264 case PRE_DEC:
265 case PRE_INC:
266 case POST_DEC:
267 case POST_INC:
268 case POST_MODIFY:
269 return rtx_addr_can_trap_p (XEXP (x, 0));
270
271 default:
272 break;
273 }
274
275 /* If it isn't one of the case above, it can cause a trap. */
276 return 1;
277 }
278
279 /* Return true if X is an address that is known to not be zero. */
280
281 bool
282 nonzero_address_p (rtx x)
283 {
284 enum rtx_code code = GET_CODE (x);
285
286 switch (code)
287 {
288 case SYMBOL_REF:
289 return !SYMBOL_REF_WEAK (x);
290
291 case LABEL_REF:
292 return true;
293
294 case ADDRESSOF:
295 /* This will resolve to some offset from the frame pointer. */
296 return true;
297
298 case REG:
299 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
300 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
301 || x == stack_pointer_rtx
302 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
303 return true;
304 /* All of the virtual frame registers are stack references. */
305 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
306 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
307 return true;
308 return false;
309
310 case CONST:
311 return nonzero_address_p (XEXP (x, 0));
312
313 case PLUS:
314 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
315 {
316 /* Pointers aren't allowed to wrap. If we've got a register
317 that is known to be a pointer, and a positive offset, then
318 the composite can't be zero. */
319 if (INTVAL (XEXP (x, 1)) > 0
320 && REG_P (XEXP (x, 0))
321 && REG_POINTER (XEXP (x, 0)))
322 return true;
323
324 return nonzero_address_p (XEXP (x, 0));
325 }
326 /* Handle PIC references. */
327 else if (XEXP (x, 0) == pic_offset_table_rtx
328 && CONSTANT_P (XEXP (x, 1)))
329 return true;
330 return false;
331
332 case PRE_MODIFY:
333 /* Similar to the above; allow positive offsets. Further, since
334 auto-inc is only allowed in memories, the register must be a
335 pointer. */
336 if (GET_CODE (XEXP (x, 1)) == CONST_INT
337 && INTVAL (XEXP (x, 1)) > 0)
338 return true;
339 return nonzero_address_p (XEXP (x, 0));
340
341 case PRE_INC:
342 /* Similarly. Further, the offset is always positive. */
343 return true;
344
345 case PRE_DEC:
346 case POST_DEC:
347 case POST_INC:
348 case POST_MODIFY:
349 return nonzero_address_p (XEXP (x, 0));
350
351 case LO_SUM:
352 return nonzero_address_p (XEXP (x, 1));
353
354 default:
355 break;
356 }
357
358 /* If it isn't one of the case above, might be zero. */
359 return false;
360 }
361
362 /* Return 1 if X refers to a memory location whose address
363 cannot be compared reliably with constant addresses,
364 or if X refers to a BLKmode memory object.
365 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
366 zero, we are slightly more conservative. */
367
368 int
369 rtx_addr_varies_p (rtx x, int for_alias)
370 {
371 enum rtx_code code;
372 int i;
373 const char *fmt;
374
375 if (x == 0)
376 return 0;
377
378 code = GET_CODE (x);
379 if (code == MEM)
380 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
381
382 fmt = GET_RTX_FORMAT (code);
383 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
384 if (fmt[i] == 'e')
385 {
386 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
387 return 1;
388 }
389 else if (fmt[i] == 'E')
390 {
391 int j;
392 for (j = 0; j < XVECLEN (x, i); j++)
393 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
394 return 1;
395 }
396 return 0;
397 }
398 \f
399 /* Return the value of the integer term in X, if one is apparent;
400 otherwise return 0.
401 Only obvious integer terms are detected.
402 This is used in cse.c with the `related_value' field. */
403
404 HOST_WIDE_INT
405 get_integer_term (rtx x)
406 {
407 if (GET_CODE (x) == CONST)
408 x = XEXP (x, 0);
409
410 if (GET_CODE (x) == MINUS
411 && GET_CODE (XEXP (x, 1)) == CONST_INT)
412 return - INTVAL (XEXP (x, 1));
413 if (GET_CODE (x) == PLUS
414 && GET_CODE (XEXP (x, 1)) == CONST_INT)
415 return INTVAL (XEXP (x, 1));
416 return 0;
417 }
418
419 /* If X is a constant, return the value sans apparent integer term;
420 otherwise return 0.
421 Only obvious integer terms are detected. */
422
423 rtx
424 get_related_value (rtx x)
425 {
426 if (GET_CODE (x) != CONST)
427 return 0;
428 x = XEXP (x, 0);
429 if (GET_CODE (x) == PLUS
430 && GET_CODE (XEXP (x, 1)) == CONST_INT)
431 return XEXP (x, 0);
432 else if (GET_CODE (x) == MINUS
433 && GET_CODE (XEXP (x, 1)) == CONST_INT)
434 return XEXP (x, 0);
435 return 0;
436 }
437 \f
438 /* Given a tablejump insn INSN, return the RTL expression for the offset
439 into the jump table. If the offset cannot be determined, then return
440 NULL_RTX.
441
442 If EARLIEST is nonzero, it is a pointer to a place where the earliest
443 insn used in locating the offset was found. */
444
445 rtx
446 get_jump_table_offset (rtx insn, rtx *earliest)
447 {
448 rtx label;
449 rtx table;
450 rtx set;
451 rtx old_insn;
452 rtx x;
453 rtx old_x;
454 rtx y;
455 rtx old_y;
456 int i;
457
458 if (!tablejump_p (insn, &label, &table) || !(set = single_set (insn)))
459 return NULL_RTX;
460
461 x = SET_SRC (set);
462
463 /* Some targets (eg, ARM) emit a tablejump that also
464 contains the out-of-range target. */
465 if (GET_CODE (x) == IF_THEN_ELSE
466 && GET_CODE (XEXP (x, 2)) == LABEL_REF)
467 x = XEXP (x, 1);
468
469 /* Search backwards and locate the expression stored in X. */
470 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
471 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
472 ;
473
474 /* If X is an expression using a relative address then strip
475 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
476 or the jump table label. */
477 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
478 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
479 {
480 for (i = 0; i < 2; i++)
481 {
482 old_insn = insn;
483 y = XEXP (x, i);
484
485 if (y == pc_rtx || y == pic_offset_table_rtx)
486 break;
487
488 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
489 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
490 ;
491
492 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
493 break;
494 }
495
496 if (i >= 2)
497 return NULL_RTX;
498
499 x = XEXP (x, 1 - i);
500
501 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
502 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
503 ;
504 }
505
506 /* Strip off any sign or zero extension. */
507 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
508 {
509 x = XEXP (x, 0);
510
511 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
512 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
513 ;
514 }
515
516 /* If X isn't a MEM then this isn't a tablejump we understand. */
517 if (GET_CODE (x) != MEM)
518 return NULL_RTX;
519
520 /* Strip off the MEM. */
521 x = XEXP (x, 0);
522
523 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
524 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
525 ;
526
527 /* If X isn't a PLUS than this isn't a tablejump we understand. */
528 if (GET_CODE (x) != PLUS)
529 return NULL_RTX;
530
531 /* At this point we should have an expression representing the jump table
532 plus an offset. Examine each operand in order to determine which one
533 represents the jump table. Knowing that tells us that the other operand
534 must represent the offset. */
535 for (i = 0; i < 2; i++)
536 {
537 old_insn = insn;
538 y = XEXP (x, i);
539
540 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
541 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
542 ;
543
544 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
545 && reg_mentioned_p (label, y))
546 break;
547 }
548
549 if (i >= 2)
550 return NULL_RTX;
551
552 x = XEXP (x, 1 - i);
553
554 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
555 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
556 for (i = 0; i < 2; i++)
557 if (XEXP (x, i) == pic_offset_table_rtx)
558 {
559 x = XEXP (x, 1 - i);
560 break;
561 }
562
563 if (earliest)
564 *earliest = insn;
565
566 /* Return the RTL expression representing the offset. */
567 return x;
568 }
569 \f
570 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
571 a global register. */
572
573 static int
574 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
575 {
576 int regno;
577 rtx x = *loc;
578
579 if (! x)
580 return 0;
581
582 switch (GET_CODE (x))
583 {
584 case SUBREG:
585 if (GET_CODE (SUBREG_REG (x)) == REG)
586 {
587 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
588 && global_regs[subreg_regno (x)])
589 return 1;
590 return 0;
591 }
592 break;
593
594 case REG:
595 regno = REGNO (x);
596 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
597 return 1;
598 return 0;
599
600 case SCRATCH:
601 case PC:
602 case CC0:
603 case CONST_INT:
604 case CONST_DOUBLE:
605 case CONST:
606 case LABEL_REF:
607 return 0;
608
609 case CALL:
610 /* A non-constant call might use a global register. */
611 return 1;
612
613 default:
614 break;
615 }
616
617 return 0;
618 }
619
620 /* Returns nonzero if X mentions a global register. */
621
622 int
623 global_reg_mentioned_p (rtx x)
624 {
625 if (INSN_P (x))
626 {
627 if (GET_CODE (x) == CALL_INSN)
628 {
629 if (! CONST_OR_PURE_CALL_P (x))
630 return 1;
631 x = CALL_INSN_FUNCTION_USAGE (x);
632 if (x == 0)
633 return 0;
634 }
635 else
636 x = PATTERN (x);
637 }
638
639 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
640 }
641 \f
642 /* Return the number of places FIND appears within X. If COUNT_DEST is
643 zero, we do not count occurrences inside the destination of a SET. */
644
645 int
646 count_occurrences (rtx x, rtx find, int count_dest)
647 {
648 int i, j;
649 enum rtx_code code;
650 const char *format_ptr;
651 int count;
652
653 if (x == find)
654 return 1;
655
656 code = GET_CODE (x);
657
658 switch (code)
659 {
660 case REG:
661 case CONST_INT:
662 case CONST_DOUBLE:
663 case CONST_VECTOR:
664 case SYMBOL_REF:
665 case CODE_LABEL:
666 case PC:
667 case CC0:
668 return 0;
669
670 case MEM:
671 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
672 return 1;
673 break;
674
675 case SET:
676 if (SET_DEST (x) == find && ! count_dest)
677 return count_occurrences (SET_SRC (x), find, count_dest);
678 break;
679
680 default:
681 break;
682 }
683
684 format_ptr = GET_RTX_FORMAT (code);
685 count = 0;
686
687 for (i = 0; i < GET_RTX_LENGTH (code); i++)
688 {
689 switch (*format_ptr++)
690 {
691 case 'e':
692 count += count_occurrences (XEXP (x, i), find, count_dest);
693 break;
694
695 case 'E':
696 for (j = 0; j < XVECLEN (x, i); j++)
697 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
698 break;
699 }
700 }
701 return count;
702 }
703 \f
704 /* Nonzero if register REG appears somewhere within IN.
705 Also works if REG is not a register; in this case it checks
706 for a subexpression of IN that is Lisp "equal" to REG. */
707
708 int
709 reg_mentioned_p (rtx reg, rtx in)
710 {
711 const char *fmt;
712 int i;
713 enum rtx_code code;
714
715 if (in == 0)
716 return 0;
717
718 if (reg == in)
719 return 1;
720
721 if (GET_CODE (in) == LABEL_REF)
722 return reg == XEXP (in, 0);
723
724 code = GET_CODE (in);
725
726 switch (code)
727 {
728 /* Compare registers by number. */
729 case REG:
730 return GET_CODE (reg) == REG && REGNO (in) == REGNO (reg);
731
732 /* These codes have no constituent expressions
733 and are unique. */
734 case SCRATCH:
735 case CC0:
736 case PC:
737 return 0;
738
739 case CONST_INT:
740 case CONST_VECTOR:
741 case CONST_DOUBLE:
742 /* These are kept unique for a given value. */
743 return 0;
744
745 default:
746 break;
747 }
748
749 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
750 return 1;
751
752 fmt = GET_RTX_FORMAT (code);
753
754 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
755 {
756 if (fmt[i] == 'E')
757 {
758 int j;
759 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
760 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
761 return 1;
762 }
763 else if (fmt[i] == 'e'
764 && reg_mentioned_p (reg, XEXP (in, i)))
765 return 1;
766 }
767 return 0;
768 }
769 \f
770 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
771 no CODE_LABEL insn. */
772
773 int
774 no_labels_between_p (rtx beg, rtx end)
775 {
776 rtx p;
777 if (beg == end)
778 return 0;
779 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
780 if (GET_CODE (p) == CODE_LABEL)
781 return 0;
782 return 1;
783 }
784
785 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
786 no JUMP_INSN insn. */
787
788 int
789 no_jumps_between_p (rtx beg, rtx end)
790 {
791 rtx p;
792 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
793 if (GET_CODE (p) == JUMP_INSN)
794 return 0;
795 return 1;
796 }
797
798 /* Nonzero if register REG is used in an insn between
799 FROM_INSN and TO_INSN (exclusive of those two). */
800
801 int
802 reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
803 {
804 rtx insn;
805
806 if (from_insn == to_insn)
807 return 0;
808
809 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
810 if (INSN_P (insn)
811 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
812 || (GET_CODE (insn) == CALL_INSN
813 && (find_reg_fusage (insn, USE, reg)
814 || find_reg_fusage (insn, CLOBBER, reg)))))
815 return 1;
816 return 0;
817 }
818 \f
819 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
820 is entirely replaced by a new value and the only use is as a SET_DEST,
821 we do not consider it a reference. */
822
823 int
824 reg_referenced_p (rtx x, rtx body)
825 {
826 int i;
827
828 switch (GET_CODE (body))
829 {
830 case SET:
831 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
832 return 1;
833
834 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
835 of a REG that occupies all of the REG, the insn references X if
836 it is mentioned in the destination. */
837 if (GET_CODE (SET_DEST (body)) != CC0
838 && GET_CODE (SET_DEST (body)) != PC
839 && GET_CODE (SET_DEST (body)) != REG
840 && ! (GET_CODE (SET_DEST (body)) == SUBREG
841 && GET_CODE (SUBREG_REG (SET_DEST (body))) == REG
842 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
843 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
844 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
845 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
846 && reg_overlap_mentioned_p (x, SET_DEST (body)))
847 return 1;
848 return 0;
849
850 case ASM_OPERANDS:
851 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
852 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
853 return 1;
854 return 0;
855
856 case CALL:
857 case USE:
858 case IF_THEN_ELSE:
859 return reg_overlap_mentioned_p (x, body);
860
861 case TRAP_IF:
862 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
863
864 case PREFETCH:
865 return reg_overlap_mentioned_p (x, XEXP (body, 0));
866
867 case UNSPEC:
868 case UNSPEC_VOLATILE:
869 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
870 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
871 return 1;
872 return 0;
873
874 case PARALLEL:
875 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
876 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
877 return 1;
878 return 0;
879
880 case CLOBBER:
881 if (GET_CODE (XEXP (body, 0)) == MEM)
882 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
883 return 1;
884 return 0;
885
886 case COND_EXEC:
887 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
888 return 1;
889 return reg_referenced_p (x, COND_EXEC_CODE (body));
890
891 default:
892 return 0;
893 }
894 }
895
896 /* Nonzero if register REG is referenced in an insn between
897 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
898 not count. */
899
900 int
901 reg_referenced_between_p (rtx reg, rtx from_insn, rtx to_insn)
902 {
903 rtx insn;
904
905 if (from_insn == to_insn)
906 return 0;
907
908 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
909 if (INSN_P (insn)
910 && (reg_referenced_p (reg, PATTERN (insn))
911 || (GET_CODE (insn) == CALL_INSN
912 && find_reg_fusage (insn, USE, reg))))
913 return 1;
914 return 0;
915 }
916 \f
917 /* Nonzero if register REG is set or clobbered in an insn between
918 FROM_INSN and TO_INSN (exclusive of those two). */
919
920 int
921 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
922 {
923 rtx insn;
924
925 if (from_insn == to_insn)
926 return 0;
927
928 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
929 if (INSN_P (insn) && reg_set_p (reg, insn))
930 return 1;
931 return 0;
932 }
933
934 /* Internals of reg_set_between_p. */
935 int
936 reg_set_p (rtx reg, rtx insn)
937 {
938 /* We can be passed an insn or part of one. If we are passed an insn,
939 check if a side-effect of the insn clobbers REG. */
940 if (INSN_P (insn)
941 && (FIND_REG_INC_NOTE (insn, reg)
942 || (GET_CODE (insn) == CALL_INSN
943 /* We'd like to test call_used_regs here, but rtlanal.c can't
944 reference that variable due to its use in genattrtab. So
945 we'll just be more conservative.
946
947 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
948 information holds all clobbered registers. */
949 && ((GET_CODE (reg) == REG
950 && REGNO (reg) < FIRST_PSEUDO_REGISTER)
951 || GET_CODE (reg) == MEM
952 || find_reg_fusage (insn, CLOBBER, reg)))))
953 return 1;
954
955 return set_of (reg, insn) != NULL_RTX;
956 }
957
958 /* Similar to reg_set_between_p, but check all registers in X. Return 0
959 only if none of them are modified between START and END. Do not
960 consider non-registers one way or the other. */
961
962 int
963 regs_set_between_p (rtx x, rtx start, rtx end)
964 {
965 enum rtx_code code = GET_CODE (x);
966 const char *fmt;
967 int i, j;
968
969 switch (code)
970 {
971 case CONST_INT:
972 case CONST_DOUBLE:
973 case CONST_VECTOR:
974 case CONST:
975 case SYMBOL_REF:
976 case LABEL_REF:
977 case PC:
978 case CC0:
979 return 0;
980
981 case REG:
982 return reg_set_between_p (x, start, end);
983
984 default:
985 break;
986 }
987
988 fmt = GET_RTX_FORMAT (code);
989 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
990 {
991 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
992 return 1;
993
994 else if (fmt[i] == 'E')
995 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
996 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
997 return 1;
998 }
999
1000 return 0;
1001 }
1002
1003 /* Similar to reg_set_between_p, but check all registers in X. Return 0
1004 only if none of them are modified between START and END. Return 1 if
1005 X contains a MEM; this routine does usememory aliasing. */
1006
1007 int
1008 modified_between_p (rtx x, rtx start, rtx end)
1009 {
1010 enum rtx_code code = GET_CODE (x);
1011 const char *fmt;
1012 int i, j;
1013 rtx insn;
1014
1015 if (start == end)
1016 return 0;
1017
1018 switch (code)
1019 {
1020 case CONST_INT:
1021 case CONST_DOUBLE:
1022 case CONST_VECTOR:
1023 case CONST:
1024 case SYMBOL_REF:
1025 case LABEL_REF:
1026 return 0;
1027
1028 case PC:
1029 case CC0:
1030 return 1;
1031
1032 case MEM:
1033 if (RTX_UNCHANGING_P (x))
1034 return 0;
1035 if (modified_between_p (XEXP (x, 0), start, end))
1036 return 1;
1037 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
1038 if (memory_modified_in_insn_p (x, insn))
1039 return 1;
1040 return 0;
1041 break;
1042
1043 case REG:
1044 return reg_set_between_p (x, start, end);
1045
1046 default:
1047 break;
1048 }
1049
1050 fmt = GET_RTX_FORMAT (code);
1051 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1052 {
1053 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
1054 return 1;
1055
1056 else if (fmt[i] == 'E')
1057 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1058 if (modified_between_p (XVECEXP (x, i, j), start, end))
1059 return 1;
1060 }
1061
1062 return 0;
1063 }
1064
1065 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1066 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1067 does use memory aliasing. */
1068
1069 int
1070 modified_in_p (rtx x, rtx insn)
1071 {
1072 enum rtx_code code = GET_CODE (x);
1073 const char *fmt;
1074 int i, j;
1075
1076 switch (code)
1077 {
1078 case CONST_INT:
1079 case CONST_DOUBLE:
1080 case CONST_VECTOR:
1081 case CONST:
1082 case SYMBOL_REF:
1083 case LABEL_REF:
1084 return 0;
1085
1086 case PC:
1087 case CC0:
1088 return 1;
1089
1090 case MEM:
1091 if (RTX_UNCHANGING_P (x))
1092 return 0;
1093 if (modified_in_p (XEXP (x, 0), insn))
1094 return 1;
1095 if (memory_modified_in_insn_p (x, insn))
1096 return 1;
1097 return 0;
1098 break;
1099
1100 case REG:
1101 return reg_set_p (x, insn);
1102
1103 default:
1104 break;
1105 }
1106
1107 fmt = GET_RTX_FORMAT (code);
1108 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1109 {
1110 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1111 return 1;
1112
1113 else if (fmt[i] == 'E')
1114 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1115 if (modified_in_p (XVECEXP (x, i, j), insn))
1116 return 1;
1117 }
1118
1119 return 0;
1120 }
1121
1122 /* Return true if anything in insn X is (anti,output,true) dependent on
1123 anything in insn Y. */
1124
1125 int
1126 insn_dependent_p (rtx x, rtx y)
1127 {
1128 rtx tmp;
1129
1130 if (! INSN_P (x) || ! INSN_P (y))
1131 abort ();
1132
1133 tmp = PATTERN (y);
1134 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
1135 if (tmp == NULL_RTX)
1136 return 1;
1137
1138 tmp = PATTERN (x);
1139 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
1140 if (tmp == NULL_RTX)
1141 return 1;
1142
1143 return 0;
1144 }
1145
1146 /* A helper routine for insn_dependent_p called through note_stores. */
1147
1148 static void
1149 insn_dependent_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
1150 {
1151 rtx * pinsn = (rtx *) data;
1152
1153 if (*pinsn && reg_mentioned_p (x, *pinsn))
1154 *pinsn = NULL_RTX;
1155 }
1156 \f
1157 /* Helper function for set_of. */
1158 struct set_of_data
1159 {
1160 rtx found;
1161 rtx pat;
1162 };
1163
1164 static void
1165 set_of_1 (rtx x, rtx pat, void *data1)
1166 {
1167 struct set_of_data *data = (struct set_of_data *) (data1);
1168 if (rtx_equal_p (x, data->pat)
1169 || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
1170 data->found = pat;
1171 }
1172
1173 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1174 (either directly or via STRICT_LOW_PART and similar modifiers). */
1175 rtx
1176 set_of (rtx pat, rtx insn)
1177 {
1178 struct set_of_data data;
1179 data.found = NULL_RTX;
1180 data.pat = pat;
1181 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1182 return data.found;
1183 }
1184 \f
1185 /* Given an INSN, return a SET expression if this insn has only a single SET.
1186 It may also have CLOBBERs, USEs, or SET whose output
1187 will not be used, which we ignore. */
1188
1189 rtx
1190 single_set_2 (rtx insn, rtx pat)
1191 {
1192 rtx set = NULL;
1193 int set_verified = 1;
1194 int i;
1195
1196 if (GET_CODE (pat) == PARALLEL)
1197 {
1198 for (i = 0; i < XVECLEN (pat, 0); i++)
1199 {
1200 rtx sub = XVECEXP (pat, 0, i);
1201 switch (GET_CODE (sub))
1202 {
1203 case USE:
1204 case CLOBBER:
1205 break;
1206
1207 case SET:
1208 /* We can consider insns having multiple sets, where all
1209 but one are dead as single set insns. In common case
1210 only single set is present in the pattern so we want
1211 to avoid checking for REG_UNUSED notes unless necessary.
1212
1213 When we reach set first time, we just expect this is
1214 the single set we are looking for and only when more
1215 sets are found in the insn, we check them. */
1216 if (!set_verified)
1217 {
1218 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1219 && !side_effects_p (set))
1220 set = NULL;
1221 else
1222 set_verified = 1;
1223 }
1224 if (!set)
1225 set = sub, set_verified = 0;
1226 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1227 || side_effects_p (sub))
1228 return NULL_RTX;
1229 break;
1230
1231 default:
1232 return NULL_RTX;
1233 }
1234 }
1235 }
1236 return set;
1237 }
1238
1239 /* Given an INSN, return nonzero if it has more than one SET, else return
1240 zero. */
1241
1242 int
1243 multiple_sets (rtx insn)
1244 {
1245 int found;
1246 int i;
1247
1248 /* INSN must be an insn. */
1249 if (! INSN_P (insn))
1250 return 0;
1251
1252 /* Only a PARALLEL can have multiple SETs. */
1253 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1254 {
1255 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1256 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1257 {
1258 /* If we have already found a SET, then return now. */
1259 if (found)
1260 return 1;
1261 else
1262 found = 1;
1263 }
1264 }
1265
1266 /* Either zero or one SET. */
1267 return 0;
1268 }
1269 \f
1270 /* Return nonzero if the destination of SET equals the source
1271 and there are no side effects. */
1272
1273 int
1274 set_noop_p (rtx set)
1275 {
1276 rtx src = SET_SRC (set);
1277 rtx dst = SET_DEST (set);
1278
1279 if (dst == pc_rtx && src == pc_rtx)
1280 return 1;
1281
1282 if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
1283 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1284
1285 if (GET_CODE (dst) == SIGN_EXTRACT
1286 || GET_CODE (dst) == ZERO_EXTRACT)
1287 return rtx_equal_p (XEXP (dst, 0), src)
1288 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1289 && !side_effects_p (src);
1290
1291 if (GET_CODE (dst) == STRICT_LOW_PART)
1292 dst = XEXP (dst, 0);
1293
1294 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1295 {
1296 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1297 return 0;
1298 src = SUBREG_REG (src);
1299 dst = SUBREG_REG (dst);
1300 }
1301
1302 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
1303 && REGNO (src) == REGNO (dst));
1304 }
1305 \f
1306 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1307 value to itself. */
1308
1309 int
1310 noop_move_p (rtx insn)
1311 {
1312 rtx pat = PATTERN (insn);
1313
1314 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1315 return 1;
1316
1317 /* Insns carrying these notes are useful later on. */
1318 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1319 return 0;
1320
1321 /* For now treat an insn with a REG_RETVAL note as a
1322 a special insn which should not be considered a no-op. */
1323 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1324 return 0;
1325
1326 if (GET_CODE (pat) == SET && set_noop_p (pat))
1327 return 1;
1328
1329 if (GET_CODE (pat) == PARALLEL)
1330 {
1331 int i;
1332 /* If nothing but SETs of registers to themselves,
1333 this insn can also be deleted. */
1334 for (i = 0; i < XVECLEN (pat, 0); i++)
1335 {
1336 rtx tem = XVECEXP (pat, 0, i);
1337
1338 if (GET_CODE (tem) == USE
1339 || GET_CODE (tem) == CLOBBER)
1340 continue;
1341
1342 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1343 return 0;
1344 }
1345
1346 return 1;
1347 }
1348 return 0;
1349 }
1350 \f
1351
1352 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1353 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1354 If the object was modified, if we hit a partial assignment to X, or hit a
1355 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1356 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1357 be the src. */
1358
1359 rtx
1360 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1361 {
1362 rtx p;
1363
1364 for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
1365 p = PREV_INSN (p))
1366 if (INSN_P (p))
1367 {
1368 rtx set = single_set (p);
1369 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1370
1371 if (set && rtx_equal_p (x, SET_DEST (set)))
1372 {
1373 rtx src = SET_SRC (set);
1374
1375 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1376 src = XEXP (note, 0);
1377
1378 if ((valid_to == NULL_RTX
1379 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1380 /* Reject hard registers because we don't usually want
1381 to use them; we'd rather use a pseudo. */
1382 && (! (GET_CODE (src) == REG
1383 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1384 {
1385 *pinsn = p;
1386 return src;
1387 }
1388 }
1389
1390 /* If set in non-simple way, we don't have a value. */
1391 if (reg_set_p (x, p))
1392 break;
1393 }
1394
1395 return x;
1396 }
1397 \f
1398 /* Return nonzero if register in range [REGNO, ENDREGNO)
1399 appears either explicitly or implicitly in X
1400 other than being stored into.
1401
1402 References contained within the substructure at LOC do not count.
1403 LOC may be zero, meaning don't ignore anything. */
1404
1405 int
1406 refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1407 rtx *loc)
1408 {
1409 int i;
1410 unsigned int x_regno;
1411 RTX_CODE code;
1412 const char *fmt;
1413
1414 repeat:
1415 /* The contents of a REG_NONNEG note is always zero, so we must come here
1416 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1417 if (x == 0)
1418 return 0;
1419
1420 code = GET_CODE (x);
1421
1422 switch (code)
1423 {
1424 case REG:
1425 x_regno = REGNO (x);
1426
1427 /* If we modifying the stack, frame, or argument pointer, it will
1428 clobber a virtual register. In fact, we could be more precise,
1429 but it isn't worth it. */
1430 if ((x_regno == STACK_POINTER_REGNUM
1431 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1432 || x_regno == ARG_POINTER_REGNUM
1433 #endif
1434 || x_regno == FRAME_POINTER_REGNUM)
1435 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1436 return 1;
1437
1438 return (endregno > x_regno
1439 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1440 ? HARD_REGNO_NREGS (x_regno, GET_MODE (x))
1441 : 1));
1442
1443 case SUBREG:
1444 /* If this is a SUBREG of a hard reg, we can see exactly which
1445 registers are being modified. Otherwise, handle normally. */
1446 if (GET_CODE (SUBREG_REG (x)) == REG
1447 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1448 {
1449 unsigned int inner_regno = subreg_regno (x);
1450 unsigned int inner_endregno
1451 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1452 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1453
1454 return endregno > inner_regno && regno < inner_endregno;
1455 }
1456 break;
1457
1458 case CLOBBER:
1459 case SET:
1460 if (&SET_DEST (x) != loc
1461 /* Note setting a SUBREG counts as referring to the REG it is in for
1462 a pseudo but not for hard registers since we can
1463 treat each word individually. */
1464 && ((GET_CODE (SET_DEST (x)) == SUBREG
1465 && loc != &SUBREG_REG (SET_DEST (x))
1466 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
1467 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1468 && refers_to_regno_p (regno, endregno,
1469 SUBREG_REG (SET_DEST (x)), loc))
1470 || (GET_CODE (SET_DEST (x)) != REG
1471 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1472 return 1;
1473
1474 if (code == CLOBBER || loc == &SET_SRC (x))
1475 return 0;
1476 x = SET_SRC (x);
1477 goto repeat;
1478
1479 default:
1480 break;
1481 }
1482
1483 /* X does not match, so try its subexpressions. */
1484
1485 fmt = GET_RTX_FORMAT (code);
1486 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1487 {
1488 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1489 {
1490 if (i == 0)
1491 {
1492 x = XEXP (x, 0);
1493 goto repeat;
1494 }
1495 else
1496 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1497 return 1;
1498 }
1499 else if (fmt[i] == 'E')
1500 {
1501 int j;
1502 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1503 if (loc != &XVECEXP (x, i, j)
1504 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1505 return 1;
1506 }
1507 }
1508 return 0;
1509 }
1510
1511 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1512 we check if any register number in X conflicts with the relevant register
1513 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1514 contains a MEM (we don't bother checking for memory addresses that can't
1515 conflict because we expect this to be a rare case. */
1516
1517 int
1518 reg_overlap_mentioned_p (rtx x, rtx in)
1519 {
1520 unsigned int regno, endregno;
1521
1522 /* Overly conservative. */
1523 if (GET_CODE (x) == STRICT_LOW_PART
1524 || GET_CODE (x) == ZERO_EXTRACT
1525 || GET_CODE (x) == SIGN_EXTRACT)
1526 x = XEXP (x, 0);
1527
1528 /* If either argument is a constant, then modifying X can not affect IN. */
1529 if (CONSTANT_P (x) || CONSTANT_P (in))
1530 return 0;
1531
1532 switch (GET_CODE (x))
1533 {
1534 case SUBREG:
1535 regno = REGNO (SUBREG_REG (x));
1536 if (regno < FIRST_PSEUDO_REGISTER)
1537 regno = subreg_regno (x);
1538 goto do_reg;
1539
1540 case REG:
1541 regno = REGNO (x);
1542 do_reg:
1543 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1544 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1545 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1546
1547 case MEM:
1548 {
1549 const char *fmt;
1550 int i;
1551
1552 if (GET_CODE (in) == MEM)
1553 return 1;
1554
1555 fmt = GET_RTX_FORMAT (GET_CODE (in));
1556 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1557 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1558 return 1;
1559
1560 return 0;
1561 }
1562
1563 case SCRATCH:
1564 case PC:
1565 case CC0:
1566 return reg_mentioned_p (x, in);
1567
1568 case PARALLEL:
1569 {
1570 int i;
1571
1572 /* If any register in here refers to it we return true. */
1573 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1574 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1575 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1576 return 1;
1577 return 0;
1578 }
1579
1580 default:
1581 break;
1582 }
1583
1584 abort ();
1585 }
1586 \f
1587 /* Return the last value to which REG was set prior to INSN. If we can't
1588 find it easily, return 0.
1589
1590 We only return a REG, SUBREG, or constant because it is too hard to
1591 check if a MEM remains unchanged. */
1592
1593 rtx
1594 reg_set_last (rtx x, rtx insn)
1595 {
1596 rtx orig_insn = insn;
1597
1598 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1599 Stop when we reach a label or X is a hard reg and we reach a
1600 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1601
1602 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1603
1604 /* We compare with <= here, because reg_set_last_last_regno
1605 is actually the number of the first reg *not* in X. */
1606 for (;
1607 insn && GET_CODE (insn) != CODE_LABEL
1608 && ! (GET_CODE (insn) == CALL_INSN
1609 && REGNO (x) <= FIRST_PSEUDO_REGISTER);
1610 insn = PREV_INSN (insn))
1611 if (INSN_P (insn))
1612 {
1613 rtx set = set_of (x, insn);
1614 /* OK, this function modify our register. See if we understand it. */
1615 if (set)
1616 {
1617 rtx last_value;
1618 if (GET_CODE (set) != SET || SET_DEST (set) != x)
1619 return 0;
1620 last_value = SET_SRC (x);
1621 if (CONSTANT_P (last_value)
1622 || ((GET_CODE (last_value) == REG
1623 || GET_CODE (last_value) == SUBREG)
1624 && ! reg_set_between_p (last_value,
1625 insn, orig_insn)))
1626 return last_value;
1627 else
1628 return 0;
1629 }
1630 }
1631
1632 return 0;
1633 }
1634 \f
1635 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1636 (X would be the pattern of an insn).
1637 FUN receives two arguments:
1638 the REG, MEM, CC0 or PC being stored in or clobbered,
1639 the SET or CLOBBER rtx that does the store.
1640
1641 If the item being stored in or clobbered is a SUBREG of a hard register,
1642 the SUBREG will be passed. */
1643
1644 void
1645 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1646 {
1647 int i;
1648
1649 if (GET_CODE (x) == COND_EXEC)
1650 x = COND_EXEC_CODE (x);
1651
1652 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1653 {
1654 rtx dest = SET_DEST (x);
1655
1656 while ((GET_CODE (dest) == SUBREG
1657 && (GET_CODE (SUBREG_REG (dest)) != REG
1658 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1659 || GET_CODE (dest) == ZERO_EXTRACT
1660 || GET_CODE (dest) == SIGN_EXTRACT
1661 || GET_CODE (dest) == STRICT_LOW_PART)
1662 dest = XEXP (dest, 0);
1663
1664 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1665 each of whose first operand is a register. */
1666 if (GET_CODE (dest) == PARALLEL)
1667 {
1668 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1669 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1670 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1671 }
1672 else
1673 (*fun) (dest, x, data);
1674 }
1675
1676 else if (GET_CODE (x) == PARALLEL)
1677 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1678 note_stores (XVECEXP (x, 0, i), fun, data);
1679 }
1680 \f
1681 /* Like notes_stores, but call FUN for each expression that is being
1682 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1683 FUN for each expression, not any interior subexpressions. FUN receives a
1684 pointer to the expression and the DATA passed to this function.
1685
1686 Note that this is not quite the same test as that done in reg_referenced_p
1687 since that considers something as being referenced if it is being
1688 partially set, while we do not. */
1689
1690 void
1691 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1692 {
1693 rtx body = *pbody;
1694 int i;
1695
1696 switch (GET_CODE (body))
1697 {
1698 case COND_EXEC:
1699 (*fun) (&COND_EXEC_TEST (body), data);
1700 note_uses (&COND_EXEC_CODE (body), fun, data);
1701 return;
1702
1703 case PARALLEL:
1704 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1705 note_uses (&XVECEXP (body, 0, i), fun, data);
1706 return;
1707
1708 case USE:
1709 (*fun) (&XEXP (body, 0), data);
1710 return;
1711
1712 case ASM_OPERANDS:
1713 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1714 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1715 return;
1716
1717 case TRAP_IF:
1718 (*fun) (&TRAP_CONDITION (body), data);
1719 return;
1720
1721 case PREFETCH:
1722 (*fun) (&XEXP (body, 0), data);
1723 return;
1724
1725 case UNSPEC:
1726 case UNSPEC_VOLATILE:
1727 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1728 (*fun) (&XVECEXP (body, 0, i), data);
1729 return;
1730
1731 case CLOBBER:
1732 if (GET_CODE (XEXP (body, 0)) == MEM)
1733 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1734 return;
1735
1736 case SET:
1737 {
1738 rtx dest = SET_DEST (body);
1739
1740 /* For sets we replace everything in source plus registers in memory
1741 expression in store and operands of a ZERO_EXTRACT. */
1742 (*fun) (&SET_SRC (body), data);
1743
1744 if (GET_CODE (dest) == ZERO_EXTRACT)
1745 {
1746 (*fun) (&XEXP (dest, 1), data);
1747 (*fun) (&XEXP (dest, 2), data);
1748 }
1749
1750 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1751 dest = XEXP (dest, 0);
1752
1753 if (GET_CODE (dest) == MEM)
1754 (*fun) (&XEXP (dest, 0), data);
1755 }
1756 return;
1757
1758 default:
1759 /* All the other possibilities never store. */
1760 (*fun) (pbody, data);
1761 return;
1762 }
1763 }
1764 \f
1765 /* Return nonzero if X's old contents don't survive after INSN.
1766 This will be true if X is (cc0) or if X is a register and
1767 X dies in INSN or because INSN entirely sets X.
1768
1769 "Entirely set" means set directly and not through a SUBREG,
1770 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1771 Likewise, REG_INC does not count.
1772
1773 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1774 but for this use that makes no difference, since regs don't overlap
1775 during their lifetimes. Therefore, this function may be used
1776 at any time after deaths have been computed (in flow.c).
1777
1778 If REG is a hard reg that occupies multiple machine registers, this
1779 function will only return 1 if each of those registers will be replaced
1780 by INSN. */
1781
1782 int
1783 dead_or_set_p (rtx insn, rtx x)
1784 {
1785 unsigned int regno, last_regno;
1786 unsigned int i;
1787
1788 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1789 if (GET_CODE (x) == CC0)
1790 return 1;
1791
1792 if (GET_CODE (x) != REG)
1793 abort ();
1794
1795 regno = REGNO (x);
1796 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1797 : regno + HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1);
1798
1799 for (i = regno; i <= last_regno; i++)
1800 if (! dead_or_set_regno_p (insn, i))
1801 return 0;
1802
1803 return 1;
1804 }
1805
1806 /* Utility function for dead_or_set_p to check an individual register. Also
1807 called from flow.c. */
1808
1809 int
1810 dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1811 {
1812 unsigned int regno, endregno;
1813 rtx pattern;
1814
1815 /* See if there is a death note for something that includes TEST_REGNO. */
1816 if (find_regno_note (insn, REG_DEAD, test_regno))
1817 return 1;
1818
1819 if (GET_CODE (insn) == CALL_INSN
1820 && find_regno_fusage (insn, CLOBBER, test_regno))
1821 return 1;
1822
1823 pattern = PATTERN (insn);
1824
1825 if (GET_CODE (pattern) == COND_EXEC)
1826 pattern = COND_EXEC_CODE (pattern);
1827
1828 if (GET_CODE (pattern) == SET)
1829 {
1830 rtx dest = SET_DEST (pattern);
1831
1832 /* A value is totally replaced if it is the destination or the
1833 destination is a SUBREG of REGNO that does not change the number of
1834 words in it. */
1835 if (GET_CODE (dest) == SUBREG
1836 && (((GET_MODE_SIZE (GET_MODE (dest))
1837 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1838 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1839 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1840 dest = SUBREG_REG (dest);
1841
1842 if (GET_CODE (dest) != REG)
1843 return 0;
1844
1845 regno = REGNO (dest);
1846 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1847 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1848
1849 return (test_regno >= regno && test_regno < endregno);
1850 }
1851 else if (GET_CODE (pattern) == PARALLEL)
1852 {
1853 int i;
1854
1855 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1856 {
1857 rtx body = XVECEXP (pattern, 0, i);
1858
1859 if (GET_CODE (body) == COND_EXEC)
1860 body = COND_EXEC_CODE (body);
1861
1862 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1863 {
1864 rtx dest = SET_DEST (body);
1865
1866 if (GET_CODE (dest) == SUBREG
1867 && (((GET_MODE_SIZE (GET_MODE (dest))
1868 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1869 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1870 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1871 dest = SUBREG_REG (dest);
1872
1873 if (GET_CODE (dest) != REG)
1874 continue;
1875
1876 regno = REGNO (dest);
1877 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1878 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1879
1880 if (test_regno >= regno && test_regno < endregno)
1881 return 1;
1882 }
1883 }
1884 }
1885
1886 return 0;
1887 }
1888
1889 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1890 If DATUM is nonzero, look for one whose datum is DATUM. */
1891
1892 rtx
1893 find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1894 {
1895 rtx link;
1896
1897 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1898 if (! INSN_P (insn))
1899 return 0;
1900
1901 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1902 if (REG_NOTE_KIND (link) == kind
1903 && (datum == 0 || datum == XEXP (link, 0)))
1904 return link;
1905 return 0;
1906 }
1907
1908 /* Return the reg-note of kind KIND in insn INSN which applies to register
1909 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1910 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1911 it might be the case that the note overlaps REGNO. */
1912
1913 rtx
1914 find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1915 {
1916 rtx link;
1917
1918 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1919 if (! INSN_P (insn))
1920 return 0;
1921
1922 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1923 if (REG_NOTE_KIND (link) == kind
1924 /* Verify that it is a register, so that scratch and MEM won't cause a
1925 problem here. */
1926 && GET_CODE (XEXP (link, 0)) == REG
1927 && REGNO (XEXP (link, 0)) <= regno
1928 && ((REGNO (XEXP (link, 0))
1929 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1930 : HARD_REGNO_NREGS (REGNO (XEXP (link, 0)),
1931 GET_MODE (XEXP (link, 0)))))
1932 > regno))
1933 return link;
1934 return 0;
1935 }
1936
1937 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1938 has such a note. */
1939
1940 rtx
1941 find_reg_equal_equiv_note (rtx insn)
1942 {
1943 rtx link;
1944
1945 if (!INSN_P (insn))
1946 return 0;
1947 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1948 if (REG_NOTE_KIND (link) == REG_EQUAL
1949 || REG_NOTE_KIND (link) == REG_EQUIV)
1950 {
1951 if (single_set (insn) == 0)
1952 return 0;
1953 return link;
1954 }
1955 return NULL;
1956 }
1957
1958 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1959 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1960
1961 int
1962 find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1963 {
1964 /* If it's not a CALL_INSN, it can't possibly have a
1965 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1966 if (GET_CODE (insn) != CALL_INSN)
1967 return 0;
1968
1969 if (! datum)
1970 abort ();
1971
1972 if (GET_CODE (datum) != REG)
1973 {
1974 rtx link;
1975
1976 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1977 link;
1978 link = XEXP (link, 1))
1979 if (GET_CODE (XEXP (link, 0)) == code
1980 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1981 return 1;
1982 }
1983 else
1984 {
1985 unsigned int regno = REGNO (datum);
1986
1987 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1988 to pseudo registers, so don't bother checking. */
1989
1990 if (regno < FIRST_PSEUDO_REGISTER)
1991 {
1992 unsigned int end_regno
1993 = regno + HARD_REGNO_NREGS (regno, GET_MODE (datum));
1994 unsigned int i;
1995
1996 for (i = regno; i < end_regno; i++)
1997 if (find_regno_fusage (insn, code, i))
1998 return 1;
1999 }
2000 }
2001
2002 return 0;
2003 }
2004
2005 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
2006 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2007
2008 int
2009 find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
2010 {
2011 rtx link;
2012
2013 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2014 to pseudo registers, so don't bother checking. */
2015
2016 if (regno >= FIRST_PSEUDO_REGISTER
2017 || GET_CODE (insn) != CALL_INSN )
2018 return 0;
2019
2020 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2021 {
2022 unsigned int regnote;
2023 rtx op, reg;
2024
2025 if (GET_CODE (op = XEXP (link, 0)) == code
2026 && GET_CODE (reg = XEXP (op, 0)) == REG
2027 && (regnote = REGNO (reg)) <= regno
2028 && regnote + HARD_REGNO_NREGS (regnote, GET_MODE (reg)) > regno)
2029 return 1;
2030 }
2031
2032 return 0;
2033 }
2034
2035 /* Return true if INSN is a call to a pure function. */
2036
2037 int
2038 pure_call_p (rtx insn)
2039 {
2040 rtx link;
2041
2042 if (GET_CODE (insn) != CALL_INSN || ! CONST_OR_PURE_CALL_P (insn))
2043 return 0;
2044
2045 /* Look for the note that differentiates const and pure functions. */
2046 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2047 {
2048 rtx u, m;
2049
2050 if (GET_CODE (u = XEXP (link, 0)) == USE
2051 && GET_CODE (m = XEXP (u, 0)) == MEM && GET_MODE (m) == BLKmode
2052 && GET_CODE (XEXP (m, 0)) == SCRATCH)
2053 return 1;
2054 }
2055
2056 return 0;
2057 }
2058 \f
2059 /* Remove register note NOTE from the REG_NOTES of INSN. */
2060
2061 void
2062 remove_note (rtx insn, rtx note)
2063 {
2064 rtx link;
2065
2066 if (note == NULL_RTX)
2067 return;
2068
2069 if (REG_NOTES (insn) == note)
2070 {
2071 REG_NOTES (insn) = XEXP (note, 1);
2072 return;
2073 }
2074
2075 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2076 if (XEXP (link, 1) == note)
2077 {
2078 XEXP (link, 1) = XEXP (note, 1);
2079 return;
2080 }
2081
2082 abort ();
2083 }
2084
2085 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2086 return 1 if it is found. A simple equality test is used to determine if
2087 NODE matches. */
2088
2089 int
2090 in_expr_list_p (rtx listp, rtx node)
2091 {
2092 rtx x;
2093
2094 for (x = listp; x; x = XEXP (x, 1))
2095 if (node == XEXP (x, 0))
2096 return 1;
2097
2098 return 0;
2099 }
2100
2101 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2102 remove that entry from the list if it is found.
2103
2104 A simple equality test is used to determine if NODE matches. */
2105
2106 void
2107 remove_node_from_expr_list (rtx node, rtx *listp)
2108 {
2109 rtx temp = *listp;
2110 rtx prev = NULL_RTX;
2111
2112 while (temp)
2113 {
2114 if (node == XEXP (temp, 0))
2115 {
2116 /* Splice the node out of the list. */
2117 if (prev)
2118 XEXP (prev, 1) = XEXP (temp, 1);
2119 else
2120 *listp = XEXP (temp, 1);
2121
2122 return;
2123 }
2124
2125 prev = temp;
2126 temp = XEXP (temp, 1);
2127 }
2128 }
2129 \f
2130 /* Nonzero if X contains any volatile instructions. These are instructions
2131 which may cause unpredictable machine state instructions, and thus no
2132 instructions should be moved or combined across them. This includes
2133 only volatile asms and UNSPEC_VOLATILE instructions. */
2134
2135 int
2136 volatile_insn_p (rtx x)
2137 {
2138 RTX_CODE code;
2139
2140 code = GET_CODE (x);
2141 switch (code)
2142 {
2143 case LABEL_REF:
2144 case SYMBOL_REF:
2145 case CONST_INT:
2146 case CONST:
2147 case CONST_DOUBLE:
2148 case CONST_VECTOR:
2149 case CC0:
2150 case PC:
2151 case REG:
2152 case SCRATCH:
2153 case CLOBBER:
2154 case ADDR_VEC:
2155 case ADDR_DIFF_VEC:
2156 case CALL:
2157 case MEM:
2158 return 0;
2159
2160 case UNSPEC_VOLATILE:
2161 /* case TRAP_IF: This isn't clear yet. */
2162 return 1;
2163
2164 case ASM_INPUT:
2165 case ASM_OPERANDS:
2166 if (MEM_VOLATILE_P (x))
2167 return 1;
2168
2169 default:
2170 break;
2171 }
2172
2173 /* Recursively scan the operands of this expression. */
2174
2175 {
2176 const char *fmt = GET_RTX_FORMAT (code);
2177 int i;
2178
2179 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2180 {
2181 if (fmt[i] == 'e')
2182 {
2183 if (volatile_insn_p (XEXP (x, i)))
2184 return 1;
2185 }
2186 else if (fmt[i] == 'E')
2187 {
2188 int j;
2189 for (j = 0; j < XVECLEN (x, i); j++)
2190 if (volatile_insn_p (XVECEXP (x, i, j)))
2191 return 1;
2192 }
2193 }
2194 }
2195 return 0;
2196 }
2197
2198 /* Nonzero if X contains any volatile memory references
2199 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2200
2201 int
2202 volatile_refs_p (rtx x)
2203 {
2204 RTX_CODE code;
2205
2206 code = GET_CODE (x);
2207 switch (code)
2208 {
2209 case LABEL_REF:
2210 case SYMBOL_REF:
2211 case CONST_INT:
2212 case CONST:
2213 case CONST_DOUBLE:
2214 case CONST_VECTOR:
2215 case CC0:
2216 case PC:
2217 case REG:
2218 case SCRATCH:
2219 case CLOBBER:
2220 case ADDR_VEC:
2221 case ADDR_DIFF_VEC:
2222 return 0;
2223
2224 case UNSPEC_VOLATILE:
2225 return 1;
2226
2227 case MEM:
2228 case ASM_INPUT:
2229 case ASM_OPERANDS:
2230 if (MEM_VOLATILE_P (x))
2231 return 1;
2232
2233 default:
2234 break;
2235 }
2236
2237 /* Recursively scan the operands of this expression. */
2238
2239 {
2240 const char *fmt = GET_RTX_FORMAT (code);
2241 int i;
2242
2243 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2244 {
2245 if (fmt[i] == 'e')
2246 {
2247 if (volatile_refs_p (XEXP (x, i)))
2248 return 1;
2249 }
2250 else if (fmt[i] == 'E')
2251 {
2252 int j;
2253 for (j = 0; j < XVECLEN (x, i); j++)
2254 if (volatile_refs_p (XVECEXP (x, i, j)))
2255 return 1;
2256 }
2257 }
2258 }
2259 return 0;
2260 }
2261
2262 /* Similar to above, except that it also rejects register pre- and post-
2263 incrementing. */
2264
2265 int
2266 side_effects_p (rtx x)
2267 {
2268 RTX_CODE code;
2269
2270 code = GET_CODE (x);
2271 switch (code)
2272 {
2273 case LABEL_REF:
2274 case SYMBOL_REF:
2275 case CONST_INT:
2276 case CONST:
2277 case CONST_DOUBLE:
2278 case CONST_VECTOR:
2279 case CC0:
2280 case PC:
2281 case REG:
2282 case SCRATCH:
2283 case ADDR_VEC:
2284 case ADDR_DIFF_VEC:
2285 return 0;
2286
2287 case CLOBBER:
2288 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2289 when some combination can't be done. If we see one, don't think
2290 that we can simplify the expression. */
2291 return (GET_MODE (x) != VOIDmode);
2292
2293 case PRE_INC:
2294 case PRE_DEC:
2295 case POST_INC:
2296 case POST_DEC:
2297 case PRE_MODIFY:
2298 case POST_MODIFY:
2299 case CALL:
2300 case UNSPEC_VOLATILE:
2301 /* case TRAP_IF: This isn't clear yet. */
2302 return 1;
2303
2304 case MEM:
2305 case ASM_INPUT:
2306 case ASM_OPERANDS:
2307 if (MEM_VOLATILE_P (x))
2308 return 1;
2309
2310 default:
2311 break;
2312 }
2313
2314 /* Recursively scan the operands of this expression. */
2315
2316 {
2317 const char *fmt = GET_RTX_FORMAT (code);
2318 int i;
2319
2320 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2321 {
2322 if (fmt[i] == 'e')
2323 {
2324 if (side_effects_p (XEXP (x, i)))
2325 return 1;
2326 }
2327 else if (fmt[i] == 'E')
2328 {
2329 int j;
2330 for (j = 0; j < XVECLEN (x, i); j++)
2331 if (side_effects_p (XVECEXP (x, i, j)))
2332 return 1;
2333 }
2334 }
2335 }
2336 return 0;
2337 }
2338 \f
2339 /* Return nonzero if evaluating rtx X might cause a trap. */
2340
2341 int
2342 may_trap_p (rtx x)
2343 {
2344 int i;
2345 enum rtx_code code;
2346 const char *fmt;
2347
2348 if (x == 0)
2349 return 0;
2350 code = GET_CODE (x);
2351 switch (code)
2352 {
2353 /* Handle these cases quickly. */
2354 case CONST_INT:
2355 case CONST_DOUBLE:
2356 case CONST_VECTOR:
2357 case SYMBOL_REF:
2358 case LABEL_REF:
2359 case CONST:
2360 case PC:
2361 case CC0:
2362 case REG:
2363 case SCRATCH:
2364 return 0;
2365
2366 case ASM_INPUT:
2367 case UNSPEC_VOLATILE:
2368 case TRAP_IF:
2369 return 1;
2370
2371 case ASM_OPERANDS:
2372 return MEM_VOLATILE_P (x);
2373
2374 /* Memory ref can trap unless it's a static var or a stack slot. */
2375 case MEM:
2376 if (MEM_NOTRAP_P (x))
2377 return 0;
2378 return rtx_addr_can_trap_p (XEXP (x, 0));
2379
2380 /* Division by a non-constant might trap. */
2381 case DIV:
2382 case MOD:
2383 case UDIV:
2384 case UMOD:
2385 if (HONOR_SNANS (GET_MODE (x)))
2386 return 1;
2387 if (! CONSTANT_P (XEXP (x, 1))
2388 || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2389 && flag_trapping_math))
2390 return 1;
2391 if (XEXP (x, 1) == const0_rtx)
2392 return 1;
2393 break;
2394
2395 case EXPR_LIST:
2396 /* An EXPR_LIST is used to represent a function call. This
2397 certainly may trap. */
2398 return 1;
2399
2400 case GE:
2401 case GT:
2402 case LE:
2403 case LT:
2404 case COMPARE:
2405 /* Some floating point comparisons may trap. */
2406 if (!flag_trapping_math)
2407 break;
2408 /* ??? There is no machine independent way to check for tests that trap
2409 when COMPARE is used, though many targets do make this distinction.
2410 For instance, sparc uses CCFPE for compares which generate exceptions
2411 and CCFP for compares which do not generate exceptions. */
2412 if (HONOR_NANS (GET_MODE (x)))
2413 return 1;
2414 /* But often the compare has some CC mode, so check operand
2415 modes as well. */
2416 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2417 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2418 return 1;
2419 break;
2420
2421 case EQ:
2422 case NE:
2423 if (HONOR_SNANS (GET_MODE (x)))
2424 return 1;
2425 /* Often comparison is CC mode, so check operand modes. */
2426 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2427 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2428 return 1;
2429 break;
2430
2431 case FIX:
2432 /* Conversion of floating point might trap. */
2433 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2434 return 1;
2435 break;
2436
2437 case NEG:
2438 case ABS:
2439 /* These operations don't trap even with floating point. */
2440 break;
2441
2442 default:
2443 /* Any floating arithmetic may trap. */
2444 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2445 && flag_trapping_math)
2446 return 1;
2447 }
2448
2449 fmt = GET_RTX_FORMAT (code);
2450 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2451 {
2452 if (fmt[i] == 'e')
2453 {
2454 if (may_trap_p (XEXP (x, i)))
2455 return 1;
2456 }
2457 else if (fmt[i] == 'E')
2458 {
2459 int j;
2460 for (j = 0; j < XVECLEN (x, i); j++)
2461 if (may_trap_p (XVECEXP (x, i, j)))
2462 return 1;
2463 }
2464 }
2465 return 0;
2466 }
2467 \f
2468 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2469 i.e., an inequality. */
2470
2471 int
2472 inequality_comparisons_p (rtx x)
2473 {
2474 const char *fmt;
2475 int len, i;
2476 enum rtx_code code = GET_CODE (x);
2477
2478 switch (code)
2479 {
2480 case REG:
2481 case SCRATCH:
2482 case PC:
2483 case CC0:
2484 case CONST_INT:
2485 case CONST_DOUBLE:
2486 case CONST_VECTOR:
2487 case CONST:
2488 case LABEL_REF:
2489 case SYMBOL_REF:
2490 return 0;
2491
2492 case LT:
2493 case LTU:
2494 case GT:
2495 case GTU:
2496 case LE:
2497 case LEU:
2498 case GE:
2499 case GEU:
2500 return 1;
2501
2502 default:
2503 break;
2504 }
2505
2506 len = GET_RTX_LENGTH (code);
2507 fmt = GET_RTX_FORMAT (code);
2508
2509 for (i = 0; i < len; i++)
2510 {
2511 if (fmt[i] == 'e')
2512 {
2513 if (inequality_comparisons_p (XEXP (x, i)))
2514 return 1;
2515 }
2516 else if (fmt[i] == 'E')
2517 {
2518 int j;
2519 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2520 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2521 return 1;
2522 }
2523 }
2524
2525 return 0;
2526 }
2527 \f
2528 /* Replace any occurrence of FROM in X with TO. The function does
2529 not enter into CONST_DOUBLE for the replace.
2530
2531 Note that copying is not done so X must not be shared unless all copies
2532 are to be modified. */
2533
2534 rtx
2535 replace_rtx (rtx x, rtx from, rtx to)
2536 {
2537 int i, j;
2538 const char *fmt;
2539
2540 /* The following prevents loops occurrence when we change MEM in
2541 CONST_DOUBLE onto the same CONST_DOUBLE. */
2542 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2543 return x;
2544
2545 if (x == from)
2546 return to;
2547
2548 /* Allow this function to make replacements in EXPR_LISTs. */
2549 if (x == 0)
2550 return 0;
2551
2552 if (GET_CODE (x) == SUBREG)
2553 {
2554 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2555
2556 if (GET_CODE (new) == CONST_INT)
2557 {
2558 x = simplify_subreg (GET_MODE (x), new,
2559 GET_MODE (SUBREG_REG (x)),
2560 SUBREG_BYTE (x));
2561 if (! x)
2562 abort ();
2563 }
2564 else
2565 SUBREG_REG (x) = new;
2566
2567 return x;
2568 }
2569 else if (GET_CODE (x) == ZERO_EXTEND)
2570 {
2571 rtx new = replace_rtx (XEXP (x, 0), from, to);
2572
2573 if (GET_CODE (new) == CONST_INT)
2574 {
2575 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2576 new, GET_MODE (XEXP (x, 0)));
2577 if (! x)
2578 abort ();
2579 }
2580 else
2581 XEXP (x, 0) = new;
2582
2583 return x;
2584 }
2585
2586 fmt = GET_RTX_FORMAT (GET_CODE (x));
2587 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2588 {
2589 if (fmt[i] == 'e')
2590 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2591 else if (fmt[i] == 'E')
2592 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2593 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2594 }
2595
2596 return x;
2597 }
2598 \f
2599 /* Throughout the rtx X, replace many registers according to REG_MAP.
2600 Return the replacement for X (which may be X with altered contents).
2601 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2602 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2603
2604 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2605 should not be mapped to pseudos or vice versa since validate_change
2606 is not called.
2607
2608 If REPLACE_DEST is 1, replacements are also done in destinations;
2609 otherwise, only sources are replaced. */
2610
2611 rtx
2612 replace_regs (rtx x, rtx *reg_map, unsigned int nregs, int replace_dest)
2613 {
2614 enum rtx_code code;
2615 int i;
2616 const char *fmt;
2617
2618 if (x == 0)
2619 return x;
2620
2621 code = GET_CODE (x);
2622 switch (code)
2623 {
2624 case SCRATCH:
2625 case PC:
2626 case CC0:
2627 case CONST_INT:
2628 case CONST_DOUBLE:
2629 case CONST_VECTOR:
2630 case CONST:
2631 case SYMBOL_REF:
2632 case LABEL_REF:
2633 return x;
2634
2635 case REG:
2636 /* Verify that the register has an entry before trying to access it. */
2637 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2638 {
2639 /* SUBREGs can't be shared. Always return a copy to ensure that if
2640 this replacement occurs more than once then each instance will
2641 get distinct rtx. */
2642 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2643 return copy_rtx (reg_map[REGNO (x)]);
2644 return reg_map[REGNO (x)];
2645 }
2646 return x;
2647
2648 case SUBREG:
2649 /* Prevent making nested SUBREGs. */
2650 if (GET_CODE (SUBREG_REG (x)) == REG && REGNO (SUBREG_REG (x)) < nregs
2651 && reg_map[REGNO (SUBREG_REG (x))] != 0
2652 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2653 {
2654 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2655 return simplify_gen_subreg (GET_MODE (x), map_val,
2656 GET_MODE (SUBREG_REG (x)),
2657 SUBREG_BYTE (x));
2658 }
2659 break;
2660
2661 case SET:
2662 if (replace_dest)
2663 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2664
2665 else if (GET_CODE (SET_DEST (x)) == MEM
2666 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2667 /* Even if we are not to replace destinations, replace register if it
2668 is CONTAINED in destination (destination is memory or
2669 STRICT_LOW_PART). */
2670 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2671 reg_map, nregs, 0);
2672 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2673 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2674 break;
2675
2676 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2677 return x;
2678
2679 default:
2680 break;
2681 }
2682
2683 fmt = GET_RTX_FORMAT (code);
2684 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2685 {
2686 if (fmt[i] == 'e')
2687 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2688 else if (fmt[i] == 'E')
2689 {
2690 int j;
2691 for (j = 0; j < XVECLEN (x, i); j++)
2692 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2693 nregs, replace_dest);
2694 }
2695 }
2696 return x;
2697 }
2698
2699 /* Replace occurrences of the old label in *X with the new one.
2700 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2701
2702 int
2703 replace_label (rtx *x, void *data)
2704 {
2705 rtx l = *x;
2706 rtx tmp;
2707 rtx old_label = ((replace_label_data *) data)->r1;
2708 rtx new_label = ((replace_label_data *) data)->r2;
2709 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2710
2711 if (l == NULL_RTX)
2712 return 0;
2713
2714 if (GET_CODE (l) == MEM
2715 && (tmp = XEXP (l, 0)) != NULL_RTX
2716 && GET_CODE (tmp) == SYMBOL_REF
2717 && CONSTANT_POOL_ADDRESS_P (tmp))
2718 {
2719 rtx c = get_pool_constant (tmp);
2720 if (rtx_referenced_p (old_label, c))
2721 {
2722 rtx new_c, new_l;
2723 replace_label_data *d = (replace_label_data *) data;
2724
2725 /* Create a copy of constant C; replace the label inside
2726 but do not update LABEL_NUSES because uses in constant pool
2727 are not counted. */
2728 new_c = copy_rtx (c);
2729 d->update_label_nuses = false;
2730 for_each_rtx (&new_c, replace_label, data);
2731 d->update_label_nuses = update_label_nuses;
2732
2733 /* Add the new constant NEW_C to constant pool and replace
2734 the old reference to constant by new reference. */
2735 new_l = force_const_mem (get_pool_mode (tmp), new_c);
2736 *x = replace_rtx (l, l, new_l);
2737 }
2738 return 0;
2739 }
2740
2741 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2742 field. This is not handled by for_each_rtx because it doesn't
2743 handle unprinted ('0') fields. */
2744 if (GET_CODE (l) == JUMP_INSN && JUMP_LABEL (l) == old_label)
2745 JUMP_LABEL (l) = new_label;
2746
2747 if ((GET_CODE (l) == LABEL_REF
2748 || GET_CODE (l) == INSN_LIST)
2749 && XEXP (l, 0) == old_label)
2750 {
2751 XEXP (l, 0) = new_label;
2752 if (update_label_nuses)
2753 {
2754 ++LABEL_NUSES (new_label);
2755 --LABEL_NUSES (old_label);
2756 }
2757 return 0;
2758 }
2759
2760 return 0;
2761 }
2762
2763 /* When *BODY is equal to X or X is directly referenced by *BODY
2764 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2765 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2766
2767 static int
2768 rtx_referenced_p_1 (rtx *body, void *x)
2769 {
2770 rtx y = (rtx) x;
2771
2772 if (*body == NULL_RTX)
2773 return y == NULL_RTX;
2774
2775 /* Return true if a label_ref *BODY refers to label Y. */
2776 if (GET_CODE (*body) == LABEL_REF && GET_CODE (y) == CODE_LABEL)
2777 return XEXP (*body, 0) == y;
2778
2779 /* If *BODY is a reference to pool constant traverse the constant. */
2780 if (GET_CODE (*body) == SYMBOL_REF
2781 && CONSTANT_POOL_ADDRESS_P (*body))
2782 return rtx_referenced_p (y, get_pool_constant (*body));
2783
2784 /* By default, compare the RTL expressions. */
2785 return rtx_equal_p (*body, y);
2786 }
2787
2788 /* Return true if X is referenced in BODY. */
2789
2790 int
2791 rtx_referenced_p (rtx x, rtx body)
2792 {
2793 return for_each_rtx (&body, rtx_referenced_p_1, x);
2794 }
2795
2796 /* If INSN is a tablejump return true and store the label (before jump table) to
2797 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2798
2799 bool
2800 tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2801 {
2802 rtx label, table;
2803
2804 if (GET_CODE (insn) == JUMP_INSN
2805 && (label = JUMP_LABEL (insn)) != NULL_RTX
2806 && (table = next_active_insn (label)) != NULL_RTX
2807 && GET_CODE (table) == JUMP_INSN
2808 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2809 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2810 {
2811 if (labelp)
2812 *labelp = label;
2813 if (tablep)
2814 *tablep = table;
2815 return true;
2816 }
2817 return false;
2818 }
2819
2820 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2821 constant that is not in the constant pool and not in the condition
2822 of an IF_THEN_ELSE. */
2823
2824 static int
2825 computed_jump_p_1 (rtx x)
2826 {
2827 enum rtx_code code = GET_CODE (x);
2828 int i, j;
2829 const char *fmt;
2830
2831 switch (code)
2832 {
2833 case LABEL_REF:
2834 case PC:
2835 return 0;
2836
2837 case CONST:
2838 case CONST_INT:
2839 case CONST_DOUBLE:
2840 case CONST_VECTOR:
2841 case SYMBOL_REF:
2842 case REG:
2843 return 1;
2844
2845 case MEM:
2846 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2847 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2848
2849 case IF_THEN_ELSE:
2850 return (computed_jump_p_1 (XEXP (x, 1))
2851 || computed_jump_p_1 (XEXP (x, 2)));
2852
2853 default:
2854 break;
2855 }
2856
2857 fmt = GET_RTX_FORMAT (code);
2858 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2859 {
2860 if (fmt[i] == 'e'
2861 && computed_jump_p_1 (XEXP (x, i)))
2862 return 1;
2863
2864 else if (fmt[i] == 'E')
2865 for (j = 0; j < XVECLEN (x, i); j++)
2866 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2867 return 1;
2868 }
2869
2870 return 0;
2871 }
2872
2873 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2874
2875 Tablejumps and casesi insns are not considered indirect jumps;
2876 we can recognize them by a (use (label_ref)). */
2877
2878 int
2879 computed_jump_p (rtx insn)
2880 {
2881 int i;
2882 if (GET_CODE (insn) == JUMP_INSN)
2883 {
2884 rtx pat = PATTERN (insn);
2885
2886 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2887 return 0;
2888 else if (GET_CODE (pat) == PARALLEL)
2889 {
2890 int len = XVECLEN (pat, 0);
2891 int has_use_labelref = 0;
2892
2893 for (i = len - 1; i >= 0; i--)
2894 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2895 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2896 == LABEL_REF))
2897 has_use_labelref = 1;
2898
2899 if (! has_use_labelref)
2900 for (i = len - 1; i >= 0; i--)
2901 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2902 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2903 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2904 return 1;
2905 }
2906 else if (GET_CODE (pat) == SET
2907 && SET_DEST (pat) == pc_rtx
2908 && computed_jump_p_1 (SET_SRC (pat)))
2909 return 1;
2910 }
2911 return 0;
2912 }
2913
2914 /* Traverse X via depth-first search, calling F for each
2915 sub-expression (including X itself). F is also passed the DATA.
2916 If F returns -1, do not traverse sub-expressions, but continue
2917 traversing the rest of the tree. If F ever returns any other
2918 nonzero value, stop the traversal, and return the value returned
2919 by F. Otherwise, return 0. This function does not traverse inside
2920 tree structure that contains RTX_EXPRs, or into sub-expressions
2921 whose format code is `0' since it is not known whether or not those
2922 codes are actually RTL.
2923
2924 This routine is very general, and could (should?) be used to
2925 implement many of the other routines in this file. */
2926
2927 int
2928 for_each_rtx (rtx *x, rtx_function f, void *data)
2929 {
2930 int result;
2931 int length;
2932 const char *format;
2933 int i;
2934
2935 /* Call F on X. */
2936 result = (*f) (x, data);
2937 if (result == -1)
2938 /* Do not traverse sub-expressions. */
2939 return 0;
2940 else if (result != 0)
2941 /* Stop the traversal. */
2942 return result;
2943
2944 if (*x == NULL_RTX)
2945 /* There are no sub-expressions. */
2946 return 0;
2947
2948 length = GET_RTX_LENGTH (GET_CODE (*x));
2949 format = GET_RTX_FORMAT (GET_CODE (*x));
2950
2951 for (i = 0; i < length; ++i)
2952 {
2953 switch (format[i])
2954 {
2955 case 'e':
2956 result = for_each_rtx (&XEXP (*x, i), f, data);
2957 if (result != 0)
2958 return result;
2959 break;
2960
2961 case 'V':
2962 case 'E':
2963 if (XVEC (*x, i) != 0)
2964 {
2965 int j;
2966 for (j = 0; j < XVECLEN (*x, i); ++j)
2967 {
2968 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2969 if (result != 0)
2970 return result;
2971 }
2972 }
2973 break;
2974
2975 default:
2976 /* Nothing to do. */
2977 break;
2978 }
2979
2980 }
2981
2982 return 0;
2983 }
2984
2985 /* Searches X for any reference to REGNO, returning the rtx of the
2986 reference found if any. Otherwise, returns NULL_RTX. */
2987
2988 rtx
2989 regno_use_in (unsigned int regno, rtx x)
2990 {
2991 const char *fmt;
2992 int i, j;
2993 rtx tem;
2994
2995 if (GET_CODE (x) == REG && REGNO (x) == regno)
2996 return x;
2997
2998 fmt = GET_RTX_FORMAT (GET_CODE (x));
2999 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3000 {
3001 if (fmt[i] == 'e')
3002 {
3003 if ((tem = regno_use_in (regno, XEXP (x, i))))
3004 return tem;
3005 }
3006 else if (fmt[i] == 'E')
3007 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3008 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
3009 return tem;
3010 }
3011
3012 return NULL_RTX;
3013 }
3014
3015 /* Return a value indicating whether OP, an operand of a commutative
3016 operation, is preferred as the first or second operand. The higher
3017 the value, the stronger the preference for being the first operand.
3018 We use negative values to indicate a preference for the first operand
3019 and positive values for the second operand. */
3020
3021 int
3022 commutative_operand_precedence (rtx op)
3023 {
3024 /* Constants always come the second operand. Prefer "nice" constants. */
3025 if (GET_CODE (op) == CONST_INT)
3026 return -7;
3027 if (GET_CODE (op) == CONST_DOUBLE)
3028 return -6;
3029 op = avoid_constant_pool_reference (op);
3030 if (GET_CODE (op) == CONST_INT)
3031 return -5;
3032 if (GET_CODE (op) == CONST_DOUBLE)
3033 return -4;
3034 if (CONSTANT_P (op))
3035 return -3;
3036
3037 /* SUBREGs of objects should come second. */
3038 if (GET_CODE (op) == SUBREG
3039 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op))) == 'o')
3040 return -2;
3041
3042 /* If only one operand is a `neg', `not',
3043 `mult', `plus', or `minus' expression, it will be the first
3044 operand. */
3045 if (GET_CODE (op) == NEG || GET_CODE (op) == NOT
3046 || GET_CODE (op) == MULT || GET_CODE (op) == PLUS
3047 || GET_CODE (op) == MINUS)
3048 return 2;
3049
3050 /* Complex expressions should be the first, so decrease priority
3051 of objects. */
3052 if (GET_RTX_CLASS (GET_CODE (op)) == 'o')
3053 return -1;
3054 return 0;
3055 }
3056
3057 /* Return 1 iff it is necessary to swap operands of commutative operation
3058 in order to canonicalize expression. */
3059
3060 int
3061 swap_commutative_operands_p (rtx x, rtx y)
3062 {
3063 return (commutative_operand_precedence (x)
3064 < commutative_operand_precedence (y));
3065 }
3066
3067 /* Return 1 if X is an autoincrement side effect and the register is
3068 not the stack pointer. */
3069 int
3070 auto_inc_p (rtx x)
3071 {
3072 switch (GET_CODE (x))
3073 {
3074 case PRE_INC:
3075 case POST_INC:
3076 case PRE_DEC:
3077 case POST_DEC:
3078 case PRE_MODIFY:
3079 case POST_MODIFY:
3080 /* There are no REG_INC notes for SP. */
3081 if (XEXP (x, 0) != stack_pointer_rtx)
3082 return 1;
3083 default:
3084 break;
3085 }
3086 return 0;
3087 }
3088
3089 /* Return 1 if the sequence of instructions beginning with FROM and up
3090 to and including TO is safe to move. If NEW_TO is non-NULL, and
3091 the sequence is not already safe to move, but can be easily
3092 extended to a sequence which is safe, then NEW_TO will point to the
3093 end of the extended sequence.
3094
3095 For now, this function only checks that the region contains whole
3096 exception regions, but it could be extended to check additional
3097 conditions as well. */
3098
3099 int
3100 insns_safe_to_move_p (rtx from, rtx to, rtx *new_to)
3101 {
3102 int eh_region_count = 0;
3103 int past_to_p = 0;
3104 rtx r = from;
3105
3106 /* By default, assume the end of the region will be what was
3107 suggested. */
3108 if (new_to)
3109 *new_to = to;
3110
3111 while (r)
3112 {
3113 if (GET_CODE (r) == NOTE)
3114 {
3115 switch (NOTE_LINE_NUMBER (r))
3116 {
3117 case NOTE_INSN_EH_REGION_BEG:
3118 ++eh_region_count;
3119 break;
3120
3121 case NOTE_INSN_EH_REGION_END:
3122 if (eh_region_count == 0)
3123 /* This sequence of instructions contains the end of
3124 an exception region, but not he beginning. Moving
3125 it will cause chaos. */
3126 return 0;
3127
3128 --eh_region_count;
3129 break;
3130
3131 default:
3132 break;
3133 }
3134 }
3135 else if (past_to_p)
3136 /* If we've passed TO, and we see a non-note instruction, we
3137 can't extend the sequence to a movable sequence. */
3138 return 0;
3139
3140 if (r == to)
3141 {
3142 if (!new_to)
3143 /* It's OK to move the sequence if there were matched sets of
3144 exception region notes. */
3145 return eh_region_count == 0;
3146
3147 past_to_p = 1;
3148 }
3149
3150 /* It's OK to move the sequence if there were matched sets of
3151 exception region notes. */
3152 if (past_to_p && eh_region_count == 0)
3153 {
3154 *new_to = r;
3155 return 1;
3156 }
3157
3158 /* Go to the next instruction. */
3159 r = NEXT_INSN (r);
3160 }
3161
3162 return 0;
3163 }
3164
3165 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
3166 int
3167 loc_mentioned_in_p (rtx *loc, rtx in)
3168 {
3169 enum rtx_code code = GET_CODE (in);
3170 const char *fmt = GET_RTX_FORMAT (code);
3171 int i, j;
3172
3173 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3174 {
3175 if (loc == &in->u.fld[i].rtx)
3176 return 1;
3177 if (fmt[i] == 'e')
3178 {
3179 if (loc_mentioned_in_p (loc, XEXP (in, i)))
3180 return 1;
3181 }
3182 else if (fmt[i] == 'E')
3183 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3184 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3185 return 1;
3186 }
3187 return 0;
3188 }
3189
3190 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3191 and SUBREG_BYTE, return the bit offset where the subreg begins
3192 (counting from the least significant bit of the operand). */
3193
3194 unsigned int
3195 subreg_lsb_1 (enum machine_mode outer_mode,
3196 enum machine_mode inner_mode,
3197 unsigned int subreg_byte)
3198 {
3199 unsigned int bitpos;
3200 unsigned int byte;
3201 unsigned int word;
3202
3203 /* A paradoxical subreg begins at bit position 0. */
3204 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
3205 return 0;
3206
3207 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3208 /* If the subreg crosses a word boundary ensure that
3209 it also begins and ends on a word boundary. */
3210 if ((subreg_byte % UNITS_PER_WORD
3211 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3212 && (subreg_byte % UNITS_PER_WORD
3213 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD))
3214 abort ();
3215
3216 if (WORDS_BIG_ENDIAN)
3217 word = (GET_MODE_SIZE (inner_mode)
3218 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3219 else
3220 word = subreg_byte / UNITS_PER_WORD;
3221 bitpos = word * BITS_PER_WORD;
3222
3223 if (BYTES_BIG_ENDIAN)
3224 byte = (GET_MODE_SIZE (inner_mode)
3225 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3226 else
3227 byte = subreg_byte % UNITS_PER_WORD;
3228 bitpos += byte * BITS_PER_UNIT;
3229
3230 return bitpos;
3231 }
3232
3233 /* Given a subreg X, return the bit offset where the subreg begins
3234 (counting from the least significant bit of the reg). */
3235
3236 unsigned int
3237 subreg_lsb (rtx x)
3238 {
3239 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3240 SUBREG_BYTE (x));
3241 }
3242
3243 /* This function returns the regno offset of a subreg expression.
3244 xregno - A regno of an inner hard subreg_reg (or what will become one).
3245 xmode - The mode of xregno.
3246 offset - The byte offset.
3247 ymode - The mode of a top level SUBREG (or what may become one).
3248 RETURN - The regno offset which would be used. */
3249 unsigned int
3250 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3251 unsigned int offset, enum machine_mode ymode)
3252 {
3253 int nregs_xmode, nregs_ymode;
3254 int mode_multiple, nregs_multiple;
3255 int y_offset;
3256
3257 if (xregno >= FIRST_PSEUDO_REGISTER)
3258 abort ();
3259
3260 nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
3261 nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
3262
3263 /* If this is a big endian paradoxical subreg, which uses more actual
3264 hard registers than the original register, we must return a negative
3265 offset so that we find the proper highpart of the register. */
3266 if (offset == 0
3267 && nregs_ymode > nregs_xmode
3268 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3269 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3270 return nregs_xmode - nregs_ymode;
3271
3272 if (offset == 0 || nregs_xmode == nregs_ymode)
3273 return 0;
3274
3275 /* size of ymode must not be greater than the size of xmode. */
3276 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3277 if (mode_multiple == 0)
3278 abort ();
3279
3280 y_offset = offset / GET_MODE_SIZE (ymode);
3281 nregs_multiple = nregs_xmode / nregs_ymode;
3282 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3283 }
3284
3285 /* This function returns true when the offset is representable via
3286 subreg_offset in the given regno.
3287 xregno - A regno of an inner hard subreg_reg (or what will become one).
3288 xmode - The mode of xregno.
3289 offset - The byte offset.
3290 ymode - The mode of a top level SUBREG (or what may become one).
3291 RETURN - The regno offset which would be used. */
3292 bool
3293 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3294 unsigned int offset, enum machine_mode ymode)
3295 {
3296 int nregs_xmode, nregs_ymode;
3297 int mode_multiple, nregs_multiple;
3298 int y_offset;
3299
3300 if (xregno >= FIRST_PSEUDO_REGISTER)
3301 abort ();
3302
3303 nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
3304 nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
3305
3306 /* paradoxical subregs are always valid. */
3307 if (offset == 0
3308 && nregs_ymode > nregs_xmode
3309 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3310 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3311 return true;
3312
3313 /* Lowpart subregs are always valid. */
3314 if (offset == subreg_lowpart_offset (ymode, xmode))
3315 return true;
3316
3317 #ifdef ENABLE_CHECKING
3318 /* This should always pass, otherwise we don't know how to verify the
3319 constraint. These conditions may be relaxed but subreg_offset would
3320 need to be redesigned. */
3321 if (GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)
3322 || GET_MODE_SIZE (ymode) % nregs_ymode
3323 || nregs_xmode % nregs_ymode)
3324 abort ();
3325 #endif
3326
3327 /* The XMODE value can be seen as a vector of NREGS_XMODE
3328 values. The subreg must represent a lowpart of given field.
3329 Compute what field it is. */
3330 offset -= subreg_lowpart_offset (ymode,
3331 mode_for_size (GET_MODE_BITSIZE (xmode)
3332 / nregs_xmode,
3333 MODE_INT, 0));
3334
3335 /* size of ymode must not be greater than the size of xmode. */
3336 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3337 if (mode_multiple == 0)
3338 abort ();
3339
3340 y_offset = offset / GET_MODE_SIZE (ymode);
3341 nregs_multiple = nregs_xmode / nregs_ymode;
3342 #ifdef ENABLE_CHECKING
3343 if (offset % GET_MODE_SIZE (ymode)
3344 || mode_multiple % nregs_multiple)
3345 abort ();
3346 #endif
3347 return (!(y_offset % (mode_multiple / nregs_multiple)));
3348 }
3349
3350 /* Return the final regno that a subreg expression refers to. */
3351 unsigned int
3352 subreg_regno (rtx x)
3353 {
3354 unsigned int ret;
3355 rtx subreg = SUBREG_REG (x);
3356 int regno = REGNO (subreg);
3357
3358 ret = regno + subreg_regno_offset (regno,
3359 GET_MODE (subreg),
3360 SUBREG_BYTE (x),
3361 GET_MODE (x));
3362 return ret;
3363
3364 }
3365 struct parms_set_data
3366 {
3367 int nregs;
3368 HARD_REG_SET regs;
3369 };
3370
3371 /* Helper function for noticing stores to parameter registers. */
3372 static void
3373 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3374 {
3375 struct parms_set_data *d = data;
3376 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3377 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3378 {
3379 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3380 d->nregs--;
3381 }
3382 }
3383
3384 /* Look backward for first parameter to be loaded.
3385 Do not skip BOUNDARY. */
3386 rtx
3387 find_first_parameter_load (rtx call_insn, rtx boundary)
3388 {
3389 struct parms_set_data parm;
3390 rtx p, before;
3391
3392 /* Since different machines initialize their parameter registers
3393 in different orders, assume nothing. Collect the set of all
3394 parameter registers. */
3395 CLEAR_HARD_REG_SET (parm.regs);
3396 parm.nregs = 0;
3397 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3398 if (GET_CODE (XEXP (p, 0)) == USE
3399 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
3400 {
3401 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3402 abort ();
3403
3404 /* We only care about registers which can hold function
3405 arguments. */
3406 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3407 continue;
3408
3409 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3410 parm.nregs++;
3411 }
3412 before = call_insn;
3413
3414 /* Search backward for the first set of a register in this set. */
3415 while (parm.nregs && before != boundary)
3416 {
3417 before = PREV_INSN (before);
3418
3419 /* It is possible that some loads got CSEed from one call to
3420 another. Stop in that case. */
3421 if (GET_CODE (before) == CALL_INSN)
3422 break;
3423
3424 /* Our caller needs either ensure that we will find all sets
3425 (in case code has not been optimized yet), or take care
3426 for possible labels in a way by setting boundary to preceding
3427 CODE_LABEL. */
3428 if (GET_CODE (before) == CODE_LABEL)
3429 {
3430 if (before != boundary)
3431 abort ();
3432 break;
3433 }
3434
3435 if (INSN_P (before))
3436 note_stores (PATTERN (before), parms_set, &parm);
3437 }
3438 return before;
3439 }
3440
3441 /* Return true if we should avoid inserting code between INSN and preceding
3442 call instruction. */
3443
3444 bool
3445 keep_with_call_p (rtx insn)
3446 {
3447 rtx set;
3448
3449 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3450 {
3451 if (GET_CODE (SET_DEST (set)) == REG
3452 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3453 && fixed_regs[REGNO (SET_DEST (set))]
3454 && general_operand (SET_SRC (set), VOIDmode))
3455 return true;
3456 if (GET_CODE (SET_SRC (set)) == REG
3457 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3458 && GET_CODE (SET_DEST (set)) == REG
3459 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3460 return true;
3461 /* There may be a stack pop just after the call and before the store
3462 of the return register. Search for the actual store when deciding
3463 if we can break or not. */
3464 if (SET_DEST (set) == stack_pointer_rtx)
3465 {
3466 rtx i2 = next_nonnote_insn (insn);
3467 if (i2 && keep_with_call_p (i2))
3468 return true;
3469 }
3470 }
3471 return false;
3472 }
3473
3474 /* Return true when store to register X can be hoisted to the place
3475 with LIVE registers (can be NULL). Value VAL contains destination
3476 whose value will be used. */
3477
3478 static bool
3479 hoist_test_store (rtx x, rtx val, regset live)
3480 {
3481 if (GET_CODE (x) == SCRATCH)
3482 return true;
3483
3484 if (rtx_equal_p (x, val))
3485 return true;
3486
3487 /* Allow subreg of X in case it is not writing just part of multireg pseudo.
3488 Then we would need to update all users to care hoisting the store too.
3489 Caller may represent that by specifying whole subreg as val. */
3490
3491 if (GET_CODE (x) == SUBREG && rtx_equal_p (SUBREG_REG (x), val))
3492 {
3493 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3494 && GET_MODE_BITSIZE (GET_MODE (x)) <
3495 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
3496 return false;
3497 return true;
3498 }
3499 if (GET_CODE (x) == SUBREG)
3500 x = SUBREG_REG (x);
3501
3502 /* Anything except register store is not hoistable. This includes the
3503 partial stores to registers. */
3504
3505 if (!REG_P (x))
3506 return false;
3507
3508 /* Pseudo registers can be always replaced by another pseudo to avoid
3509 the side effect, for hard register we must ensure that they are dead.
3510 Eventually we may want to add code to try turn pseudos to hards, but it
3511 is unlikely useful. */
3512
3513 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3514 {
3515 int regno = REGNO (x);
3516 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3517
3518 if (!live)
3519 return false;
3520 if (REGNO_REG_SET_P (live, regno))
3521 return false;
3522 while (--n > 0)
3523 if (REGNO_REG_SET_P (live, regno + n))
3524 return false;
3525 }
3526 return true;
3527 }
3528
3529
3530 /* Return true if INSN can be hoisted to place with LIVE hard registers
3531 (LIVE can be NULL when unknown). VAL is expected to be stored by the insn
3532 and used by the hoisting pass. */
3533
3534 bool
3535 can_hoist_insn_p (rtx insn, rtx val, regset live)
3536 {
3537 rtx pat = PATTERN (insn);
3538 int i;
3539
3540 /* It probably does not worth the complexity to handle multiple
3541 set stores. */
3542 if (!single_set (insn))
3543 return false;
3544 /* We can move CALL_INSN, but we need to check that all caller clobbered
3545 regs are dead. */
3546 if (GET_CODE (insn) == CALL_INSN)
3547 return false;
3548 /* In future we will handle hoisting of libcall sequences, but
3549 give up for now. */
3550 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
3551 return false;
3552 switch (GET_CODE (pat))
3553 {
3554 case SET:
3555 if (!hoist_test_store (SET_DEST (pat), val, live))
3556 return false;
3557 break;
3558 case USE:
3559 /* USES do have sick semantics, so do not move them. */
3560 return false;
3561 break;
3562 case CLOBBER:
3563 if (!hoist_test_store (XEXP (pat, 0), val, live))
3564 return false;
3565 break;
3566 case PARALLEL:
3567 for (i = 0; i < XVECLEN (pat, 0); i++)
3568 {
3569 rtx x = XVECEXP (pat, 0, i);
3570 switch (GET_CODE (x))
3571 {
3572 case SET:
3573 if (!hoist_test_store (SET_DEST (x), val, live))
3574 return false;
3575 break;
3576 case USE:
3577 /* We need to fix callers to really ensure availability
3578 of all values insn uses, but for now it is safe to prohibit
3579 hoisting of any insn having such a hidden uses. */
3580 return false;
3581 break;
3582 case CLOBBER:
3583 if (!hoist_test_store (SET_DEST (x), val, live))
3584 return false;
3585 break;
3586 default:
3587 break;
3588 }
3589 }
3590 break;
3591 default:
3592 abort ();
3593 }
3594 return true;
3595 }
3596
3597 /* Update store after hoisting - replace all stores to pseudo registers
3598 by new ones to avoid clobbering of values except for store to VAL that will
3599 be updated to NEW. */
3600
3601 static void
3602 hoist_update_store (rtx insn, rtx *xp, rtx val, rtx new)
3603 {
3604 rtx x = *xp;
3605
3606 if (GET_CODE (x) == SCRATCH)
3607 return;
3608
3609 if (GET_CODE (x) == SUBREG && SUBREG_REG (x) == val)
3610 validate_change (insn, xp,
3611 simplify_gen_subreg (GET_MODE (x), new, GET_MODE (new),
3612 SUBREG_BYTE (x)), 1);
3613 if (rtx_equal_p (x, val))
3614 {
3615 validate_change (insn, xp, new, 1);
3616 return;
3617 }
3618 if (GET_CODE (x) == SUBREG)
3619 {
3620 xp = &SUBREG_REG (x);
3621 x = *xp;
3622 }
3623
3624 if (!REG_P (x))
3625 abort ();
3626
3627 /* We've verified that hard registers are dead, so we may keep the side
3628 effect. Otherwise replace it by new pseudo. */
3629 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
3630 validate_change (insn, xp, gen_reg_rtx (GET_MODE (x)), 1);
3631 REG_NOTES (insn)
3632 = alloc_EXPR_LIST (REG_UNUSED, *xp, REG_NOTES (insn));
3633 }
3634
3635 /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3636 and each other side effect to pseudo register by new pseudo register. */
3637
3638 rtx
3639 hoist_insn_after (rtx insn, rtx after, rtx val, rtx new)
3640 {
3641 rtx pat;
3642 int i;
3643 rtx note;
3644
3645 insn = emit_copy_of_insn_after (insn, after);
3646 pat = PATTERN (insn);
3647
3648 /* Remove REG_UNUSED notes as we will re-emit them. */
3649 while ((note = find_reg_note (insn, REG_UNUSED, NULL_RTX)))
3650 remove_note (insn, note);
3651
3652 /* To get this working callers must ensure to move everything referenced
3653 by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
3654 easier. */
3655 while ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)))
3656 remove_note (insn, note);
3657 while ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)))
3658 remove_note (insn, note);
3659
3660 /* Remove REG_DEAD notes as they might not be valid anymore in case
3661 we create redundancy. */
3662 while ((note = find_reg_note (insn, REG_DEAD, NULL_RTX)))
3663 remove_note (insn, note);
3664 switch (GET_CODE (pat))
3665 {
3666 case SET:
3667 hoist_update_store (insn, &SET_DEST (pat), val, new);
3668 break;
3669 case USE:
3670 break;
3671 case CLOBBER:
3672 hoist_update_store (insn, &XEXP (pat, 0), val, new);
3673 break;
3674 case PARALLEL:
3675 for (i = 0; i < XVECLEN (pat, 0); i++)
3676 {
3677 rtx x = XVECEXP (pat, 0, i);
3678 switch (GET_CODE (x))
3679 {
3680 case SET:
3681 hoist_update_store (insn, &SET_DEST (x), val, new);
3682 break;
3683 case USE:
3684 break;
3685 case CLOBBER:
3686 hoist_update_store (insn, &SET_DEST (x), val, new);
3687 break;
3688 default:
3689 break;
3690 }
3691 }
3692 break;
3693 default:
3694 abort ();
3695 }
3696 if (!apply_change_group ())
3697 abort ();
3698
3699 return insn;
3700 }
3701
3702 rtx
3703 hoist_insn_to_edge (rtx insn, edge e, rtx val, rtx new)
3704 {
3705 rtx new_insn;
3706
3707 /* We cannot insert instructions on an abnormal critical edge.
3708 It will be easier to find the culprit if we die now. */
3709 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
3710 abort ();
3711
3712 /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3713 stuff. We also emit CALL_INSNS and firends. */
3714 if (e->insns == NULL_RTX)
3715 {
3716 start_sequence ();
3717 emit_note (NOTE_INSN_DELETED);
3718 }
3719 else
3720 push_to_sequence (e->insns);
3721
3722 new_insn = hoist_insn_after (insn, get_last_insn (), val, new);
3723
3724 e->insns = get_insns ();
3725 end_sequence ();
3726 return new_insn;
3727 }
3728
3729 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3730 to non-complex jumps. That is, direct unconditional, conditional,
3731 and tablejumps, but not computed jumps or returns. It also does
3732 not apply to the fallthru case of a conditional jump. */
3733
3734 bool
3735 label_is_jump_target_p (rtx label, rtx jump_insn)
3736 {
3737 rtx tmp = JUMP_LABEL (jump_insn);
3738
3739 if (label == tmp)
3740 return true;
3741
3742 if (tablejump_p (jump_insn, NULL, &tmp))
3743 {
3744 rtvec vec = XVEC (PATTERN (tmp),
3745 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3746 int i, veclen = GET_NUM_ELEM (vec);
3747
3748 for (i = 0; i < veclen; ++i)
3749 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3750 return true;
3751 }
3752
3753 return false;
3754 }
3755