re PR c++/18738 (typename not allowed with non-dependent qualified name)
[gcc.git] / gcc / rtlanal.c
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "target.h"
33 #include "output.h"
34 #include "tm_p.h"
35 #include "flags.h"
36 #include "real.h"
37 #include "regs.h"
38 #include "function.h"
39
40 /* Forward declarations */
41 static int global_reg_mentioned_p_1 (rtx *, void *);
42 static void set_of_1 (rtx, rtx, void *);
43 static bool covers_regno_p (rtx, unsigned int);
44 static bool covers_regno_no_parallel_p (rtx, unsigned int);
45 static int rtx_referenced_p_1 (rtx *, void *);
46 static int computed_jump_p_1 (rtx);
47 static void parms_set (rtx, rtx, void *);
48
49 static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
50 rtx, enum machine_mode,
51 unsigned HOST_WIDE_INT);
52 static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
53 enum machine_mode,
54 unsigned HOST_WIDE_INT);
55 static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
56 enum machine_mode,
57 unsigned int);
58 static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
59 enum machine_mode, unsigned int);
60
61 /* Bit flags that specify the machine subtype we are compiling for.
62 Bits are tested using macros TARGET_... defined in the tm.h file
63 and set by `-m...' switches. Must be defined in rtlanal.c. */
64
65 int target_flags;
66 \f
67 /* Return 1 if the value of X is unstable
68 (would be different at a different point in the program).
69 The frame pointer, arg pointer, etc. are considered stable
70 (within one function) and so is anything marked `unchanging'. */
71
72 int
73 rtx_unstable_p (rtx x)
74 {
75 RTX_CODE code = GET_CODE (x);
76 int i;
77 const char *fmt;
78
79 switch (code)
80 {
81 case MEM:
82 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
83
84 case CONST:
85 case CONST_INT:
86 case CONST_DOUBLE:
87 case CONST_VECTOR:
88 case SYMBOL_REF:
89 case LABEL_REF:
90 return 0;
91
92 case REG:
93 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
94 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
95 /* The arg pointer varies if it is not a fixed register. */
96 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
97 return 0;
98 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
99 /* ??? When call-clobbered, the value is stable modulo the restore
100 that must happen after a call. This currently screws up local-alloc
101 into believing that the restore is not needed. */
102 if (x == pic_offset_table_rtx)
103 return 0;
104 #endif
105 return 1;
106
107 case ASM_OPERANDS:
108 if (MEM_VOLATILE_P (x))
109 return 1;
110
111 /* Fall through. */
112
113 default:
114 break;
115 }
116
117 fmt = GET_RTX_FORMAT (code);
118 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
119 if (fmt[i] == 'e')
120 {
121 if (rtx_unstable_p (XEXP (x, i)))
122 return 1;
123 }
124 else if (fmt[i] == 'E')
125 {
126 int j;
127 for (j = 0; j < XVECLEN (x, i); j++)
128 if (rtx_unstable_p (XVECEXP (x, i, j)))
129 return 1;
130 }
131
132 return 0;
133 }
134
135 /* Return 1 if X has a value that can vary even between two
136 executions of the program. 0 means X can be compared reliably
137 against certain constants or near-constants.
138 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
139 zero, we are slightly more conservative.
140 The frame pointer and the arg pointer are considered constant. */
141
142 int
143 rtx_varies_p (rtx x, int for_alias)
144 {
145 RTX_CODE code;
146 int i;
147 const char *fmt;
148
149 if (!x)
150 return 0;
151
152 code = GET_CODE (x);
153 switch (code)
154 {
155 case MEM:
156 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
157
158 case CONST:
159 case CONST_INT:
160 case CONST_DOUBLE:
161 case CONST_VECTOR:
162 case SYMBOL_REF:
163 case LABEL_REF:
164 return 0;
165
166 case REG:
167 /* Note that we have to test for the actual rtx used for the frame
168 and arg pointers and not just the register number in case we have
169 eliminated the frame and/or arg pointer and are using it
170 for pseudos. */
171 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
172 /* The arg pointer varies if it is not a fixed register. */
173 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
174 return 0;
175 if (x == pic_offset_table_rtx
176 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
177 /* ??? When call-clobbered, the value is stable modulo the restore
178 that must happen after a call. This currently screws up
179 local-alloc into believing that the restore is not needed, so we
180 must return 0 only if we are called from alias analysis. */
181 && for_alias
182 #endif
183 )
184 return 0;
185 return 1;
186
187 case LO_SUM:
188 /* The operand 0 of a LO_SUM is considered constant
189 (in fact it is related specifically to operand 1)
190 during alias analysis. */
191 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
192 || rtx_varies_p (XEXP (x, 1), for_alias);
193
194 case ASM_OPERANDS:
195 if (MEM_VOLATILE_P (x))
196 return 1;
197
198 /* Fall through. */
199
200 default:
201 break;
202 }
203
204 fmt = GET_RTX_FORMAT (code);
205 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
206 if (fmt[i] == 'e')
207 {
208 if (rtx_varies_p (XEXP (x, i), for_alias))
209 return 1;
210 }
211 else if (fmt[i] == 'E')
212 {
213 int j;
214 for (j = 0; j < XVECLEN (x, i); j++)
215 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
216 return 1;
217 }
218
219 return 0;
220 }
221
222 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
223
224 int
225 rtx_addr_can_trap_p (rtx x)
226 {
227 enum rtx_code code = GET_CODE (x);
228
229 switch (code)
230 {
231 case SYMBOL_REF:
232 return SYMBOL_REF_WEAK (x);
233
234 case LABEL_REF:
235 return 0;
236
237 case REG:
238 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
239 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
240 || x == stack_pointer_rtx
241 /* The arg pointer varies if it is not a fixed register. */
242 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
243 return 0;
244 /* All of the virtual frame registers are stack references. */
245 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
246 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
247 return 0;
248 return 1;
249
250 case CONST:
251 return rtx_addr_can_trap_p (XEXP (x, 0));
252
253 case PLUS:
254 /* An address is assumed not to trap if it is an address that can't
255 trap plus a constant integer or it is the pic register plus a
256 constant. */
257 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
258 && GET_CODE (XEXP (x, 1)) == CONST_INT)
259 || (XEXP (x, 0) == pic_offset_table_rtx
260 && CONSTANT_P (XEXP (x, 1))));
261
262 case LO_SUM:
263 case PRE_MODIFY:
264 return rtx_addr_can_trap_p (XEXP (x, 1));
265
266 case PRE_DEC:
267 case PRE_INC:
268 case POST_DEC:
269 case POST_INC:
270 case POST_MODIFY:
271 return rtx_addr_can_trap_p (XEXP (x, 0));
272
273 default:
274 break;
275 }
276
277 /* If it isn't one of the case above, it can cause a trap. */
278 return 1;
279 }
280
281 /* Return true if X is an address that is known to not be zero. */
282
283 bool
284 nonzero_address_p (rtx x)
285 {
286 enum rtx_code code = GET_CODE (x);
287
288 switch (code)
289 {
290 case SYMBOL_REF:
291 return !SYMBOL_REF_WEAK (x);
292
293 case LABEL_REF:
294 return true;
295
296 case REG:
297 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
298 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
299 || x == stack_pointer_rtx
300 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
301 return true;
302 /* All of the virtual frame registers are stack references. */
303 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
304 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
305 return true;
306 return false;
307
308 case CONST:
309 return nonzero_address_p (XEXP (x, 0));
310
311 case PLUS:
312 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
313 {
314 /* Pointers aren't allowed to wrap. If we've got a register
315 that is known to be a pointer, and a positive offset, then
316 the composite can't be zero. */
317 if (INTVAL (XEXP (x, 1)) > 0
318 && REG_P (XEXP (x, 0))
319 && REG_POINTER (XEXP (x, 0)))
320 return true;
321
322 return nonzero_address_p (XEXP (x, 0));
323 }
324 /* Handle PIC references. */
325 else if (XEXP (x, 0) == pic_offset_table_rtx
326 && CONSTANT_P (XEXP (x, 1)))
327 return true;
328 return false;
329
330 case PRE_MODIFY:
331 /* Similar to the above; allow positive offsets. Further, since
332 auto-inc is only allowed in memories, the register must be a
333 pointer. */
334 if (GET_CODE (XEXP (x, 1)) == CONST_INT
335 && INTVAL (XEXP (x, 1)) > 0)
336 return true;
337 return nonzero_address_p (XEXP (x, 0));
338
339 case PRE_INC:
340 /* Similarly. Further, the offset is always positive. */
341 return true;
342
343 case PRE_DEC:
344 case POST_DEC:
345 case POST_INC:
346 case POST_MODIFY:
347 return nonzero_address_p (XEXP (x, 0));
348
349 case LO_SUM:
350 return nonzero_address_p (XEXP (x, 1));
351
352 default:
353 break;
354 }
355
356 /* If it isn't one of the case above, might be zero. */
357 return false;
358 }
359
360 /* Return 1 if X refers to a memory location whose address
361 cannot be compared reliably with constant addresses,
362 or if X refers to a BLKmode memory object.
363 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
364 zero, we are slightly more conservative. */
365
366 int
367 rtx_addr_varies_p (rtx x, int for_alias)
368 {
369 enum rtx_code code;
370 int i;
371 const char *fmt;
372
373 if (x == 0)
374 return 0;
375
376 code = GET_CODE (x);
377 if (code == MEM)
378 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
379
380 fmt = GET_RTX_FORMAT (code);
381 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
382 if (fmt[i] == 'e')
383 {
384 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
385 return 1;
386 }
387 else if (fmt[i] == 'E')
388 {
389 int j;
390 for (j = 0; j < XVECLEN (x, i); j++)
391 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
392 return 1;
393 }
394 return 0;
395 }
396 \f
397 /* Return the value of the integer term in X, if one is apparent;
398 otherwise return 0.
399 Only obvious integer terms are detected.
400 This is used in cse.c with the `related_value' field. */
401
402 HOST_WIDE_INT
403 get_integer_term (rtx x)
404 {
405 if (GET_CODE (x) == CONST)
406 x = XEXP (x, 0);
407
408 if (GET_CODE (x) == MINUS
409 && GET_CODE (XEXP (x, 1)) == CONST_INT)
410 return - INTVAL (XEXP (x, 1));
411 if (GET_CODE (x) == PLUS
412 && GET_CODE (XEXP (x, 1)) == CONST_INT)
413 return INTVAL (XEXP (x, 1));
414 return 0;
415 }
416
417 /* If X is a constant, return the value sans apparent integer term;
418 otherwise return 0.
419 Only obvious integer terms are detected. */
420
421 rtx
422 get_related_value (rtx x)
423 {
424 if (GET_CODE (x) != CONST)
425 return 0;
426 x = XEXP (x, 0);
427 if (GET_CODE (x) == PLUS
428 && GET_CODE (XEXP (x, 1)) == CONST_INT)
429 return XEXP (x, 0);
430 else if (GET_CODE (x) == MINUS
431 && GET_CODE (XEXP (x, 1)) == CONST_INT)
432 return XEXP (x, 0);
433 return 0;
434 }
435 \f
436 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
437 a global register. */
438
439 static int
440 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
441 {
442 int regno;
443 rtx x = *loc;
444
445 if (! x)
446 return 0;
447
448 switch (GET_CODE (x))
449 {
450 case SUBREG:
451 if (REG_P (SUBREG_REG (x)))
452 {
453 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
454 && global_regs[subreg_regno (x)])
455 return 1;
456 return 0;
457 }
458 break;
459
460 case REG:
461 regno = REGNO (x);
462 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
463 return 1;
464 return 0;
465
466 case SCRATCH:
467 case PC:
468 case CC0:
469 case CONST_INT:
470 case CONST_DOUBLE:
471 case CONST:
472 case LABEL_REF:
473 return 0;
474
475 case CALL:
476 /* A non-constant call might use a global register. */
477 return 1;
478
479 default:
480 break;
481 }
482
483 return 0;
484 }
485
486 /* Returns nonzero if X mentions a global register. */
487
488 int
489 global_reg_mentioned_p (rtx x)
490 {
491 if (INSN_P (x))
492 {
493 if (CALL_P (x))
494 {
495 if (! CONST_OR_PURE_CALL_P (x))
496 return 1;
497 x = CALL_INSN_FUNCTION_USAGE (x);
498 if (x == 0)
499 return 0;
500 }
501 else
502 x = PATTERN (x);
503 }
504
505 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
506 }
507 \f
508 /* Return the number of places FIND appears within X. If COUNT_DEST is
509 zero, we do not count occurrences inside the destination of a SET. */
510
511 int
512 count_occurrences (rtx x, rtx find, int count_dest)
513 {
514 int i, j;
515 enum rtx_code code;
516 const char *format_ptr;
517 int count;
518
519 if (x == find)
520 return 1;
521
522 code = GET_CODE (x);
523
524 switch (code)
525 {
526 case REG:
527 case CONST_INT:
528 case CONST_DOUBLE:
529 case CONST_VECTOR:
530 case SYMBOL_REF:
531 case CODE_LABEL:
532 case PC:
533 case CC0:
534 return 0;
535
536 case MEM:
537 if (MEM_P (find) && rtx_equal_p (x, find))
538 return 1;
539 break;
540
541 case SET:
542 if (SET_DEST (x) == find && ! count_dest)
543 return count_occurrences (SET_SRC (x), find, count_dest);
544 break;
545
546 default:
547 break;
548 }
549
550 format_ptr = GET_RTX_FORMAT (code);
551 count = 0;
552
553 for (i = 0; i < GET_RTX_LENGTH (code); i++)
554 {
555 switch (*format_ptr++)
556 {
557 case 'e':
558 count += count_occurrences (XEXP (x, i), find, count_dest);
559 break;
560
561 case 'E':
562 for (j = 0; j < XVECLEN (x, i); j++)
563 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
564 break;
565 }
566 }
567 return count;
568 }
569 \f
570 /* Nonzero if register REG appears somewhere within IN.
571 Also works if REG is not a register; in this case it checks
572 for a subexpression of IN that is Lisp "equal" to REG. */
573
574 int
575 reg_mentioned_p (rtx reg, rtx in)
576 {
577 const char *fmt;
578 int i;
579 enum rtx_code code;
580
581 if (in == 0)
582 return 0;
583
584 if (reg == in)
585 return 1;
586
587 if (GET_CODE (in) == LABEL_REF)
588 return reg == XEXP (in, 0);
589
590 code = GET_CODE (in);
591
592 switch (code)
593 {
594 /* Compare registers by number. */
595 case REG:
596 return REG_P (reg) && REGNO (in) == REGNO (reg);
597
598 /* These codes have no constituent expressions
599 and are unique. */
600 case SCRATCH:
601 case CC0:
602 case PC:
603 return 0;
604
605 case CONST_INT:
606 case CONST_VECTOR:
607 case CONST_DOUBLE:
608 /* These are kept unique for a given value. */
609 return 0;
610
611 default:
612 break;
613 }
614
615 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
616 return 1;
617
618 fmt = GET_RTX_FORMAT (code);
619
620 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
621 {
622 if (fmt[i] == 'E')
623 {
624 int j;
625 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
626 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
627 return 1;
628 }
629 else if (fmt[i] == 'e'
630 && reg_mentioned_p (reg, XEXP (in, i)))
631 return 1;
632 }
633 return 0;
634 }
635 \f
636 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
637 no CODE_LABEL insn. */
638
639 int
640 no_labels_between_p (rtx beg, rtx end)
641 {
642 rtx p;
643 if (beg == end)
644 return 0;
645 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
646 if (LABEL_P (p))
647 return 0;
648 return 1;
649 }
650
651 /* Nonzero if register REG is used in an insn between
652 FROM_INSN and TO_INSN (exclusive of those two). */
653
654 int
655 reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
656 {
657 rtx insn;
658
659 if (from_insn == to_insn)
660 return 0;
661
662 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
663 if (INSN_P (insn)
664 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
665 || (CALL_P (insn)
666 && (find_reg_fusage (insn, USE, reg)
667 || find_reg_fusage (insn, CLOBBER, reg)))))
668 return 1;
669 return 0;
670 }
671 \f
672 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
673 is entirely replaced by a new value and the only use is as a SET_DEST,
674 we do not consider it a reference. */
675
676 int
677 reg_referenced_p (rtx x, rtx body)
678 {
679 int i;
680
681 switch (GET_CODE (body))
682 {
683 case SET:
684 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
685 return 1;
686
687 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
688 of a REG that occupies all of the REG, the insn references X if
689 it is mentioned in the destination. */
690 if (GET_CODE (SET_DEST (body)) != CC0
691 && GET_CODE (SET_DEST (body)) != PC
692 && !REG_P (SET_DEST (body))
693 && ! (GET_CODE (SET_DEST (body)) == SUBREG
694 && REG_P (SUBREG_REG (SET_DEST (body)))
695 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
696 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
697 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
698 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
699 && reg_overlap_mentioned_p (x, SET_DEST (body)))
700 return 1;
701 return 0;
702
703 case ASM_OPERANDS:
704 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
705 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
706 return 1;
707 return 0;
708
709 case CALL:
710 case USE:
711 case IF_THEN_ELSE:
712 return reg_overlap_mentioned_p (x, body);
713
714 case TRAP_IF:
715 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
716
717 case PREFETCH:
718 return reg_overlap_mentioned_p (x, XEXP (body, 0));
719
720 case UNSPEC:
721 case UNSPEC_VOLATILE:
722 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
723 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
724 return 1;
725 return 0;
726
727 case PARALLEL:
728 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
729 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
730 return 1;
731 return 0;
732
733 case CLOBBER:
734 if (MEM_P (XEXP (body, 0)))
735 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
736 return 1;
737 return 0;
738
739 case COND_EXEC:
740 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
741 return 1;
742 return reg_referenced_p (x, COND_EXEC_CODE (body));
743
744 default:
745 return 0;
746 }
747 }
748 \f
749 /* Nonzero if register REG is set or clobbered in an insn between
750 FROM_INSN and TO_INSN (exclusive of those two). */
751
752 int
753 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
754 {
755 rtx insn;
756
757 if (from_insn == to_insn)
758 return 0;
759
760 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
761 if (INSN_P (insn) && reg_set_p (reg, insn))
762 return 1;
763 return 0;
764 }
765
766 /* Internals of reg_set_between_p. */
767 int
768 reg_set_p (rtx reg, rtx insn)
769 {
770 /* We can be passed an insn or part of one. If we are passed an insn,
771 check if a side-effect of the insn clobbers REG. */
772 if (INSN_P (insn)
773 && (FIND_REG_INC_NOTE (insn, reg)
774 || (CALL_P (insn)
775 && ((REG_P (reg)
776 && REGNO (reg) < FIRST_PSEUDO_REGISTER
777 && TEST_HARD_REG_BIT (regs_invalidated_by_call,
778 REGNO (reg)))
779 || MEM_P (reg)
780 || find_reg_fusage (insn, CLOBBER, reg)))))
781 return 1;
782
783 return set_of (reg, insn) != NULL_RTX;
784 }
785
786 /* Similar to reg_set_between_p, but check all registers in X. Return 0
787 only if none of them are modified between START and END. Do not
788 consider non-registers one way or the other. */
789
790 int
791 regs_set_between_p (rtx x, rtx start, rtx end)
792 {
793 enum rtx_code code = GET_CODE (x);
794 const char *fmt;
795 int i, j;
796
797 switch (code)
798 {
799 case CONST_INT:
800 case CONST_DOUBLE:
801 case CONST_VECTOR:
802 case CONST:
803 case SYMBOL_REF:
804 case LABEL_REF:
805 case PC:
806 case CC0:
807 return 0;
808
809 case REG:
810 return reg_set_between_p (x, start, end);
811
812 default:
813 break;
814 }
815
816 fmt = GET_RTX_FORMAT (code);
817 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
818 {
819 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
820 return 1;
821
822 else if (fmt[i] == 'E')
823 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
824 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
825 return 1;
826 }
827
828 return 0;
829 }
830
831 /* Similar to reg_set_between_p, but check all registers in X. Return 0
832 only if none of them are modified between START and END. Return 1 if
833 X contains a MEM; this routine does usememory aliasing. */
834
835 int
836 modified_between_p (rtx x, rtx start, rtx end)
837 {
838 enum rtx_code code = GET_CODE (x);
839 const char *fmt;
840 int i, j;
841 rtx insn;
842
843 if (start == end)
844 return 0;
845
846 switch (code)
847 {
848 case CONST_INT:
849 case CONST_DOUBLE:
850 case CONST_VECTOR:
851 case CONST:
852 case SYMBOL_REF:
853 case LABEL_REF:
854 return 0;
855
856 case PC:
857 case CC0:
858 return 1;
859
860 case MEM:
861 if (MEM_READONLY_P (x))
862 return 0;
863 if (modified_between_p (XEXP (x, 0), start, end))
864 return 1;
865 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
866 if (memory_modified_in_insn_p (x, insn))
867 return 1;
868 return 0;
869 break;
870
871 case REG:
872 return reg_set_between_p (x, start, end);
873
874 default:
875 break;
876 }
877
878 fmt = GET_RTX_FORMAT (code);
879 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
880 {
881 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
882 return 1;
883
884 else if (fmt[i] == 'E')
885 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
886 if (modified_between_p (XVECEXP (x, i, j), start, end))
887 return 1;
888 }
889
890 return 0;
891 }
892
893 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
894 of them are modified in INSN. Return 1 if X contains a MEM; this routine
895 does use memory aliasing. */
896
897 int
898 modified_in_p (rtx x, rtx insn)
899 {
900 enum rtx_code code = GET_CODE (x);
901 const char *fmt;
902 int i, j;
903
904 switch (code)
905 {
906 case CONST_INT:
907 case CONST_DOUBLE:
908 case CONST_VECTOR:
909 case CONST:
910 case SYMBOL_REF:
911 case LABEL_REF:
912 return 0;
913
914 case PC:
915 case CC0:
916 return 1;
917
918 case MEM:
919 if (MEM_READONLY_P (x))
920 return 0;
921 if (modified_in_p (XEXP (x, 0), insn))
922 return 1;
923 if (memory_modified_in_insn_p (x, insn))
924 return 1;
925 return 0;
926 break;
927
928 case REG:
929 return reg_set_p (x, insn);
930
931 default:
932 break;
933 }
934
935 fmt = GET_RTX_FORMAT (code);
936 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
937 {
938 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
939 return 1;
940
941 else if (fmt[i] == 'E')
942 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
943 if (modified_in_p (XVECEXP (x, i, j), insn))
944 return 1;
945 }
946
947 return 0;
948 }
949 \f
950 /* Helper function for set_of. */
951 struct set_of_data
952 {
953 rtx found;
954 rtx pat;
955 };
956
957 static void
958 set_of_1 (rtx x, rtx pat, void *data1)
959 {
960 struct set_of_data *data = (struct set_of_data *) (data1);
961 if (rtx_equal_p (x, data->pat)
962 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
963 data->found = pat;
964 }
965
966 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
967 (either directly or via STRICT_LOW_PART and similar modifiers). */
968 rtx
969 set_of (rtx pat, rtx insn)
970 {
971 struct set_of_data data;
972 data.found = NULL_RTX;
973 data.pat = pat;
974 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
975 return data.found;
976 }
977 \f
978 /* Given an INSN, return a SET expression if this insn has only a single SET.
979 It may also have CLOBBERs, USEs, or SET whose output
980 will not be used, which we ignore. */
981
982 rtx
983 single_set_2 (rtx insn, rtx pat)
984 {
985 rtx set = NULL;
986 int set_verified = 1;
987 int i;
988
989 if (GET_CODE (pat) == PARALLEL)
990 {
991 for (i = 0; i < XVECLEN (pat, 0); i++)
992 {
993 rtx sub = XVECEXP (pat, 0, i);
994 switch (GET_CODE (sub))
995 {
996 case USE:
997 case CLOBBER:
998 break;
999
1000 case SET:
1001 /* We can consider insns having multiple sets, where all
1002 but one are dead as single set insns. In common case
1003 only single set is present in the pattern so we want
1004 to avoid checking for REG_UNUSED notes unless necessary.
1005
1006 When we reach set first time, we just expect this is
1007 the single set we are looking for and only when more
1008 sets are found in the insn, we check them. */
1009 if (!set_verified)
1010 {
1011 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1012 && !side_effects_p (set))
1013 set = NULL;
1014 else
1015 set_verified = 1;
1016 }
1017 if (!set)
1018 set = sub, set_verified = 0;
1019 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1020 || side_effects_p (sub))
1021 return NULL_RTX;
1022 break;
1023
1024 default:
1025 return NULL_RTX;
1026 }
1027 }
1028 }
1029 return set;
1030 }
1031
1032 /* Given an INSN, return nonzero if it has more than one SET, else return
1033 zero. */
1034
1035 int
1036 multiple_sets (rtx insn)
1037 {
1038 int found;
1039 int i;
1040
1041 /* INSN must be an insn. */
1042 if (! INSN_P (insn))
1043 return 0;
1044
1045 /* Only a PARALLEL can have multiple SETs. */
1046 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1047 {
1048 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1049 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1050 {
1051 /* If we have already found a SET, then return now. */
1052 if (found)
1053 return 1;
1054 else
1055 found = 1;
1056 }
1057 }
1058
1059 /* Either zero or one SET. */
1060 return 0;
1061 }
1062 \f
1063 /* Return nonzero if the destination of SET equals the source
1064 and there are no side effects. */
1065
1066 int
1067 set_noop_p (rtx set)
1068 {
1069 rtx src = SET_SRC (set);
1070 rtx dst = SET_DEST (set);
1071
1072 if (dst == pc_rtx && src == pc_rtx)
1073 return 1;
1074
1075 if (MEM_P (dst) && MEM_P (src))
1076 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1077
1078 if (GET_CODE (dst) == SIGN_EXTRACT
1079 || GET_CODE (dst) == ZERO_EXTRACT)
1080 return rtx_equal_p (XEXP (dst, 0), src)
1081 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1082 && !side_effects_p (src);
1083
1084 if (GET_CODE (dst) == STRICT_LOW_PART)
1085 dst = XEXP (dst, 0);
1086
1087 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1088 {
1089 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1090 return 0;
1091 src = SUBREG_REG (src);
1092 dst = SUBREG_REG (dst);
1093 }
1094
1095 return (REG_P (src) && REG_P (dst)
1096 && REGNO (src) == REGNO (dst));
1097 }
1098 \f
1099 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1100 value to itself. */
1101
1102 int
1103 noop_move_p (rtx insn)
1104 {
1105 rtx pat = PATTERN (insn);
1106
1107 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1108 return 1;
1109
1110 /* Insns carrying these notes are useful later on. */
1111 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1112 return 0;
1113
1114 /* For now treat an insn with a REG_RETVAL note as a
1115 a special insn which should not be considered a no-op. */
1116 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1117 return 0;
1118
1119 if (GET_CODE (pat) == SET && set_noop_p (pat))
1120 return 1;
1121
1122 if (GET_CODE (pat) == PARALLEL)
1123 {
1124 int i;
1125 /* If nothing but SETs of registers to themselves,
1126 this insn can also be deleted. */
1127 for (i = 0; i < XVECLEN (pat, 0); i++)
1128 {
1129 rtx tem = XVECEXP (pat, 0, i);
1130
1131 if (GET_CODE (tem) == USE
1132 || GET_CODE (tem) == CLOBBER)
1133 continue;
1134
1135 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1136 return 0;
1137 }
1138
1139 return 1;
1140 }
1141 return 0;
1142 }
1143 \f
1144
1145 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1146 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1147 If the object was modified, if we hit a partial assignment to X, or hit a
1148 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1149 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1150 be the src. */
1151
1152 rtx
1153 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1154 {
1155 rtx p;
1156
1157 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1158 p = PREV_INSN (p))
1159 if (INSN_P (p))
1160 {
1161 rtx set = single_set (p);
1162 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1163
1164 if (set && rtx_equal_p (x, SET_DEST (set)))
1165 {
1166 rtx src = SET_SRC (set);
1167
1168 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1169 src = XEXP (note, 0);
1170
1171 if ((valid_to == NULL_RTX
1172 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1173 /* Reject hard registers because we don't usually want
1174 to use them; we'd rather use a pseudo. */
1175 && (! (REG_P (src)
1176 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1177 {
1178 *pinsn = p;
1179 return src;
1180 }
1181 }
1182
1183 /* If set in non-simple way, we don't have a value. */
1184 if (reg_set_p (x, p))
1185 break;
1186 }
1187
1188 return x;
1189 }
1190 \f
1191 /* Return nonzero if register in range [REGNO, ENDREGNO)
1192 appears either explicitly or implicitly in X
1193 other than being stored into.
1194
1195 References contained within the substructure at LOC do not count.
1196 LOC may be zero, meaning don't ignore anything. */
1197
1198 int
1199 refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1200 rtx *loc)
1201 {
1202 int i;
1203 unsigned int x_regno;
1204 RTX_CODE code;
1205 const char *fmt;
1206
1207 repeat:
1208 /* The contents of a REG_NONNEG note is always zero, so we must come here
1209 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1210 if (x == 0)
1211 return 0;
1212
1213 code = GET_CODE (x);
1214
1215 switch (code)
1216 {
1217 case REG:
1218 x_regno = REGNO (x);
1219
1220 /* If we modifying the stack, frame, or argument pointer, it will
1221 clobber a virtual register. In fact, we could be more precise,
1222 but it isn't worth it. */
1223 if ((x_regno == STACK_POINTER_REGNUM
1224 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1225 || x_regno == ARG_POINTER_REGNUM
1226 #endif
1227 || x_regno == FRAME_POINTER_REGNUM)
1228 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1229 return 1;
1230
1231 return (endregno > x_regno
1232 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1233 ? hard_regno_nregs[x_regno][GET_MODE (x)]
1234 : 1));
1235
1236 case SUBREG:
1237 /* If this is a SUBREG of a hard reg, we can see exactly which
1238 registers are being modified. Otherwise, handle normally. */
1239 if (REG_P (SUBREG_REG (x))
1240 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1241 {
1242 unsigned int inner_regno = subreg_regno (x);
1243 unsigned int inner_endregno
1244 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1245 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1246
1247 return endregno > inner_regno && regno < inner_endregno;
1248 }
1249 break;
1250
1251 case CLOBBER:
1252 case SET:
1253 if (&SET_DEST (x) != loc
1254 /* Note setting a SUBREG counts as referring to the REG it is in for
1255 a pseudo but not for hard registers since we can
1256 treat each word individually. */
1257 && ((GET_CODE (SET_DEST (x)) == SUBREG
1258 && loc != &SUBREG_REG (SET_DEST (x))
1259 && REG_P (SUBREG_REG (SET_DEST (x)))
1260 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1261 && refers_to_regno_p (regno, endregno,
1262 SUBREG_REG (SET_DEST (x)), loc))
1263 || (!REG_P (SET_DEST (x))
1264 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1265 return 1;
1266
1267 if (code == CLOBBER || loc == &SET_SRC (x))
1268 return 0;
1269 x = SET_SRC (x);
1270 goto repeat;
1271
1272 default:
1273 break;
1274 }
1275
1276 /* X does not match, so try its subexpressions. */
1277
1278 fmt = GET_RTX_FORMAT (code);
1279 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1280 {
1281 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1282 {
1283 if (i == 0)
1284 {
1285 x = XEXP (x, 0);
1286 goto repeat;
1287 }
1288 else
1289 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1290 return 1;
1291 }
1292 else if (fmt[i] == 'E')
1293 {
1294 int j;
1295 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1296 if (loc != &XVECEXP (x, i, j)
1297 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1298 return 1;
1299 }
1300 }
1301 return 0;
1302 }
1303
1304 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1305 we check if any register number in X conflicts with the relevant register
1306 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1307 contains a MEM (we don't bother checking for memory addresses that can't
1308 conflict because we expect this to be a rare case. */
1309
1310 int
1311 reg_overlap_mentioned_p (rtx x, rtx in)
1312 {
1313 unsigned int regno, endregno;
1314
1315 /* If either argument is a constant, then modifying X can not
1316 affect IN. Here we look at IN, we can profitably combine
1317 CONSTANT_P (x) with the switch statement below. */
1318 if (CONSTANT_P (in))
1319 return 0;
1320
1321 recurse:
1322 switch (GET_CODE (x))
1323 {
1324 case STRICT_LOW_PART:
1325 case ZERO_EXTRACT:
1326 case SIGN_EXTRACT:
1327 /* Overly conservative. */
1328 x = XEXP (x, 0);
1329 goto recurse;
1330
1331 case SUBREG:
1332 regno = REGNO (SUBREG_REG (x));
1333 if (regno < FIRST_PSEUDO_REGISTER)
1334 regno = subreg_regno (x);
1335 goto do_reg;
1336
1337 case REG:
1338 regno = REGNO (x);
1339 do_reg:
1340 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1341 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1342 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1343
1344 case MEM:
1345 {
1346 const char *fmt;
1347 int i;
1348
1349 if (MEM_P (in))
1350 return 1;
1351
1352 fmt = GET_RTX_FORMAT (GET_CODE (in));
1353 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1354 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1355 return 1;
1356
1357 return 0;
1358 }
1359
1360 case SCRATCH:
1361 case PC:
1362 case CC0:
1363 return reg_mentioned_p (x, in);
1364
1365 case PARALLEL:
1366 {
1367 int i;
1368
1369 /* If any register in here refers to it we return true. */
1370 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1371 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1372 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1373 return 1;
1374 return 0;
1375 }
1376
1377 default:
1378 gcc_assert (CONSTANT_P (x));
1379 return 0;
1380 }
1381 }
1382 \f
1383 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1384 (X would be the pattern of an insn).
1385 FUN receives two arguments:
1386 the REG, MEM, CC0 or PC being stored in or clobbered,
1387 the SET or CLOBBER rtx that does the store.
1388
1389 If the item being stored in or clobbered is a SUBREG of a hard register,
1390 the SUBREG will be passed. */
1391
1392 void
1393 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1394 {
1395 int i;
1396
1397 if (GET_CODE (x) == COND_EXEC)
1398 x = COND_EXEC_CODE (x);
1399
1400 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1401 {
1402 rtx dest = SET_DEST (x);
1403
1404 while ((GET_CODE (dest) == SUBREG
1405 && (!REG_P (SUBREG_REG (dest))
1406 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1407 || GET_CODE (dest) == ZERO_EXTRACT
1408 || GET_CODE (dest) == SIGN_EXTRACT
1409 || GET_CODE (dest) == STRICT_LOW_PART)
1410 dest = XEXP (dest, 0);
1411
1412 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1413 each of whose first operand is a register. */
1414 if (GET_CODE (dest) == PARALLEL)
1415 {
1416 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1417 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1418 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1419 }
1420 else
1421 (*fun) (dest, x, data);
1422 }
1423
1424 else if (GET_CODE (x) == PARALLEL)
1425 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1426 note_stores (XVECEXP (x, 0, i), fun, data);
1427 }
1428 \f
1429 /* Like notes_stores, but call FUN for each expression that is being
1430 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1431 FUN for each expression, not any interior subexpressions. FUN receives a
1432 pointer to the expression and the DATA passed to this function.
1433
1434 Note that this is not quite the same test as that done in reg_referenced_p
1435 since that considers something as being referenced if it is being
1436 partially set, while we do not. */
1437
1438 void
1439 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1440 {
1441 rtx body = *pbody;
1442 int i;
1443
1444 switch (GET_CODE (body))
1445 {
1446 case COND_EXEC:
1447 (*fun) (&COND_EXEC_TEST (body), data);
1448 note_uses (&COND_EXEC_CODE (body), fun, data);
1449 return;
1450
1451 case PARALLEL:
1452 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1453 note_uses (&XVECEXP (body, 0, i), fun, data);
1454 return;
1455
1456 case USE:
1457 (*fun) (&XEXP (body, 0), data);
1458 return;
1459
1460 case ASM_OPERANDS:
1461 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1462 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1463 return;
1464
1465 case TRAP_IF:
1466 (*fun) (&TRAP_CONDITION (body), data);
1467 return;
1468
1469 case PREFETCH:
1470 (*fun) (&XEXP (body, 0), data);
1471 return;
1472
1473 case UNSPEC:
1474 case UNSPEC_VOLATILE:
1475 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1476 (*fun) (&XVECEXP (body, 0, i), data);
1477 return;
1478
1479 case CLOBBER:
1480 if (MEM_P (XEXP (body, 0)))
1481 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1482 return;
1483
1484 case SET:
1485 {
1486 rtx dest = SET_DEST (body);
1487
1488 /* For sets we replace everything in source plus registers in memory
1489 expression in store and operands of a ZERO_EXTRACT. */
1490 (*fun) (&SET_SRC (body), data);
1491
1492 if (GET_CODE (dest) == ZERO_EXTRACT)
1493 {
1494 (*fun) (&XEXP (dest, 1), data);
1495 (*fun) (&XEXP (dest, 2), data);
1496 }
1497
1498 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1499 dest = XEXP (dest, 0);
1500
1501 if (MEM_P (dest))
1502 (*fun) (&XEXP (dest, 0), data);
1503 }
1504 return;
1505
1506 default:
1507 /* All the other possibilities never store. */
1508 (*fun) (pbody, data);
1509 return;
1510 }
1511 }
1512 \f
1513 /* Return nonzero if X's old contents don't survive after INSN.
1514 This will be true if X is (cc0) or if X is a register and
1515 X dies in INSN or because INSN entirely sets X.
1516
1517 "Entirely set" means set directly and not through a SUBREG,
1518 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1519 Likewise, REG_INC does not count.
1520
1521 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1522 but for this use that makes no difference, since regs don't overlap
1523 during their lifetimes. Therefore, this function may be used
1524 at any time after deaths have been computed (in flow.c).
1525
1526 If REG is a hard reg that occupies multiple machine registers, this
1527 function will only return 1 if each of those registers will be replaced
1528 by INSN. */
1529
1530 int
1531 dead_or_set_p (rtx insn, rtx x)
1532 {
1533 unsigned int regno, last_regno;
1534 unsigned int i;
1535
1536 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1537 if (GET_CODE (x) == CC0)
1538 return 1;
1539
1540 gcc_assert (REG_P (x));
1541
1542 regno = REGNO (x);
1543 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1544 : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1545
1546 for (i = regno; i <= last_regno; i++)
1547 if (! dead_or_set_regno_p (insn, i))
1548 return 0;
1549
1550 return 1;
1551 }
1552
1553 /* Return TRUE iff DEST is a register or subreg of a register and
1554 doesn't change the number of words of the inner register, and any
1555 part of the register is TEST_REGNO. */
1556
1557 static bool
1558 covers_regno_no_parallel_p (rtx dest, unsigned int test_regno)
1559 {
1560 unsigned int regno, endregno;
1561
1562 if (GET_CODE (dest) == SUBREG
1563 && (((GET_MODE_SIZE (GET_MODE (dest))
1564 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1565 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1566 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1567 dest = SUBREG_REG (dest);
1568
1569 if (!REG_P (dest))
1570 return false;
1571
1572 regno = REGNO (dest);
1573 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1574 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1575 return (test_regno >= regno && test_regno < endregno);
1576 }
1577
1578 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1579 any member matches the covers_regno_no_parallel_p criteria. */
1580
1581 static bool
1582 covers_regno_p (rtx dest, unsigned int test_regno)
1583 {
1584 if (GET_CODE (dest) == PARALLEL)
1585 {
1586 /* Some targets place small structures in registers for return
1587 values of functions, and those registers are wrapped in
1588 PARALLELs that we may see as the destination of a SET. */
1589 int i;
1590
1591 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1592 {
1593 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1594 if (inner != NULL_RTX
1595 && covers_regno_no_parallel_p (inner, test_regno))
1596 return true;
1597 }
1598
1599 return false;
1600 }
1601 else
1602 return covers_regno_no_parallel_p (dest, test_regno);
1603 }
1604
1605 /* Utility function for dead_or_set_p to check an individual register. Also
1606 called from flow.c. */
1607
1608 int
1609 dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1610 {
1611 rtx pattern;
1612
1613 /* See if there is a death note for something that includes TEST_REGNO. */
1614 if (find_regno_note (insn, REG_DEAD, test_regno))
1615 return 1;
1616
1617 if (CALL_P (insn)
1618 && find_regno_fusage (insn, CLOBBER, test_regno))
1619 return 1;
1620
1621 pattern = PATTERN (insn);
1622
1623 if (GET_CODE (pattern) == COND_EXEC)
1624 pattern = COND_EXEC_CODE (pattern);
1625
1626 if (GET_CODE (pattern) == SET)
1627 return covers_regno_p (SET_DEST (pattern), test_regno);
1628 else if (GET_CODE (pattern) == PARALLEL)
1629 {
1630 int i;
1631
1632 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1633 {
1634 rtx body = XVECEXP (pattern, 0, i);
1635
1636 if (GET_CODE (body) == COND_EXEC)
1637 body = COND_EXEC_CODE (body);
1638
1639 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1640 && covers_regno_p (SET_DEST (body), test_regno))
1641 return 1;
1642 }
1643 }
1644
1645 return 0;
1646 }
1647
1648 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1649 If DATUM is nonzero, look for one whose datum is DATUM. */
1650
1651 rtx
1652 find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1653 {
1654 rtx link;
1655
1656 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1657 if (! INSN_P (insn))
1658 return 0;
1659 if (datum == 0)
1660 {
1661 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1662 if (REG_NOTE_KIND (link) == kind)
1663 return link;
1664 return 0;
1665 }
1666
1667 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1668 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1669 return link;
1670 return 0;
1671 }
1672
1673 /* Return the reg-note of kind KIND in insn INSN which applies to register
1674 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1675 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1676 it might be the case that the note overlaps REGNO. */
1677
1678 rtx
1679 find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1680 {
1681 rtx link;
1682
1683 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1684 if (! INSN_P (insn))
1685 return 0;
1686
1687 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1688 if (REG_NOTE_KIND (link) == kind
1689 /* Verify that it is a register, so that scratch and MEM won't cause a
1690 problem here. */
1691 && REG_P (XEXP (link, 0))
1692 && REGNO (XEXP (link, 0)) <= regno
1693 && ((REGNO (XEXP (link, 0))
1694 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1695 : hard_regno_nregs[REGNO (XEXP (link, 0))]
1696 [GET_MODE (XEXP (link, 0))]))
1697 > regno))
1698 return link;
1699 return 0;
1700 }
1701
1702 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1703 has such a note. */
1704
1705 rtx
1706 find_reg_equal_equiv_note (rtx insn)
1707 {
1708 rtx link;
1709
1710 if (!INSN_P (insn))
1711 return 0;
1712 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1713 if (REG_NOTE_KIND (link) == REG_EQUAL
1714 || REG_NOTE_KIND (link) == REG_EQUIV)
1715 {
1716 if (single_set (insn) == 0)
1717 return 0;
1718 return link;
1719 }
1720 return NULL;
1721 }
1722
1723 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1724 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1725
1726 int
1727 find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1728 {
1729 /* If it's not a CALL_INSN, it can't possibly have a
1730 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1731 if (!CALL_P (insn))
1732 return 0;
1733
1734 gcc_assert (datum);
1735
1736 if (!REG_P (datum))
1737 {
1738 rtx link;
1739
1740 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1741 link;
1742 link = XEXP (link, 1))
1743 if (GET_CODE (XEXP (link, 0)) == code
1744 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1745 return 1;
1746 }
1747 else
1748 {
1749 unsigned int regno = REGNO (datum);
1750
1751 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1752 to pseudo registers, so don't bother checking. */
1753
1754 if (regno < FIRST_PSEUDO_REGISTER)
1755 {
1756 unsigned int end_regno
1757 = regno + hard_regno_nregs[regno][GET_MODE (datum)];
1758 unsigned int i;
1759
1760 for (i = regno; i < end_regno; i++)
1761 if (find_regno_fusage (insn, code, i))
1762 return 1;
1763 }
1764 }
1765
1766 return 0;
1767 }
1768
1769 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1770 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1771
1772 int
1773 find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
1774 {
1775 rtx link;
1776
1777 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1778 to pseudo registers, so don't bother checking. */
1779
1780 if (regno >= FIRST_PSEUDO_REGISTER
1781 || !CALL_P (insn) )
1782 return 0;
1783
1784 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1785 {
1786 unsigned int regnote;
1787 rtx op, reg;
1788
1789 if (GET_CODE (op = XEXP (link, 0)) == code
1790 && REG_P (reg = XEXP (op, 0))
1791 && (regnote = REGNO (reg)) <= regno
1792 && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
1793 return 1;
1794 }
1795
1796 return 0;
1797 }
1798
1799 /* Return true if INSN is a call to a pure function. */
1800
1801 int
1802 pure_call_p (rtx insn)
1803 {
1804 rtx link;
1805
1806 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
1807 return 0;
1808
1809 /* Look for the note that differentiates const and pure functions. */
1810 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1811 {
1812 rtx u, m;
1813
1814 if (GET_CODE (u = XEXP (link, 0)) == USE
1815 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
1816 && GET_CODE (XEXP (m, 0)) == SCRATCH)
1817 return 1;
1818 }
1819
1820 return 0;
1821 }
1822 \f
1823 /* Remove register note NOTE from the REG_NOTES of INSN. */
1824
1825 void
1826 remove_note (rtx insn, rtx note)
1827 {
1828 rtx link;
1829
1830 if (note == NULL_RTX)
1831 return;
1832
1833 if (REG_NOTES (insn) == note)
1834 {
1835 REG_NOTES (insn) = XEXP (note, 1);
1836 return;
1837 }
1838
1839 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1840 if (XEXP (link, 1) == note)
1841 {
1842 XEXP (link, 1) = XEXP (note, 1);
1843 return;
1844 }
1845
1846 gcc_unreachable ();
1847 }
1848
1849 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1850 return 1 if it is found. A simple equality test is used to determine if
1851 NODE matches. */
1852
1853 int
1854 in_expr_list_p (rtx listp, rtx node)
1855 {
1856 rtx x;
1857
1858 for (x = listp; x; x = XEXP (x, 1))
1859 if (node == XEXP (x, 0))
1860 return 1;
1861
1862 return 0;
1863 }
1864
1865 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1866 remove that entry from the list if it is found.
1867
1868 A simple equality test is used to determine if NODE matches. */
1869
1870 void
1871 remove_node_from_expr_list (rtx node, rtx *listp)
1872 {
1873 rtx temp = *listp;
1874 rtx prev = NULL_RTX;
1875
1876 while (temp)
1877 {
1878 if (node == XEXP (temp, 0))
1879 {
1880 /* Splice the node out of the list. */
1881 if (prev)
1882 XEXP (prev, 1) = XEXP (temp, 1);
1883 else
1884 *listp = XEXP (temp, 1);
1885
1886 return;
1887 }
1888
1889 prev = temp;
1890 temp = XEXP (temp, 1);
1891 }
1892 }
1893 \f
1894 /* Nonzero if X contains any volatile instructions. These are instructions
1895 which may cause unpredictable machine state instructions, and thus no
1896 instructions should be moved or combined across them. This includes
1897 only volatile asms and UNSPEC_VOLATILE instructions. */
1898
1899 int
1900 volatile_insn_p (rtx x)
1901 {
1902 RTX_CODE code;
1903
1904 code = GET_CODE (x);
1905 switch (code)
1906 {
1907 case LABEL_REF:
1908 case SYMBOL_REF:
1909 case CONST_INT:
1910 case CONST:
1911 case CONST_DOUBLE:
1912 case CONST_VECTOR:
1913 case CC0:
1914 case PC:
1915 case REG:
1916 case SCRATCH:
1917 case CLOBBER:
1918 case ADDR_VEC:
1919 case ADDR_DIFF_VEC:
1920 case CALL:
1921 case MEM:
1922 return 0;
1923
1924 case UNSPEC_VOLATILE:
1925 /* case TRAP_IF: This isn't clear yet. */
1926 return 1;
1927
1928 case ASM_INPUT:
1929 case ASM_OPERANDS:
1930 if (MEM_VOLATILE_P (x))
1931 return 1;
1932
1933 default:
1934 break;
1935 }
1936
1937 /* Recursively scan the operands of this expression. */
1938
1939 {
1940 const char *fmt = GET_RTX_FORMAT (code);
1941 int i;
1942
1943 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1944 {
1945 if (fmt[i] == 'e')
1946 {
1947 if (volatile_insn_p (XEXP (x, i)))
1948 return 1;
1949 }
1950 else if (fmt[i] == 'E')
1951 {
1952 int j;
1953 for (j = 0; j < XVECLEN (x, i); j++)
1954 if (volatile_insn_p (XVECEXP (x, i, j)))
1955 return 1;
1956 }
1957 }
1958 }
1959 return 0;
1960 }
1961
1962 /* Nonzero if X contains any volatile memory references
1963 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
1964
1965 int
1966 volatile_refs_p (rtx x)
1967 {
1968 RTX_CODE code;
1969
1970 code = GET_CODE (x);
1971 switch (code)
1972 {
1973 case LABEL_REF:
1974 case SYMBOL_REF:
1975 case CONST_INT:
1976 case CONST:
1977 case CONST_DOUBLE:
1978 case CONST_VECTOR:
1979 case CC0:
1980 case PC:
1981 case REG:
1982 case SCRATCH:
1983 case CLOBBER:
1984 case ADDR_VEC:
1985 case ADDR_DIFF_VEC:
1986 return 0;
1987
1988 case UNSPEC_VOLATILE:
1989 return 1;
1990
1991 case MEM:
1992 case ASM_INPUT:
1993 case ASM_OPERANDS:
1994 if (MEM_VOLATILE_P (x))
1995 return 1;
1996
1997 default:
1998 break;
1999 }
2000
2001 /* Recursively scan the operands of this expression. */
2002
2003 {
2004 const char *fmt = GET_RTX_FORMAT (code);
2005 int i;
2006
2007 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2008 {
2009 if (fmt[i] == 'e')
2010 {
2011 if (volatile_refs_p (XEXP (x, i)))
2012 return 1;
2013 }
2014 else if (fmt[i] == 'E')
2015 {
2016 int j;
2017 for (j = 0; j < XVECLEN (x, i); j++)
2018 if (volatile_refs_p (XVECEXP (x, i, j)))
2019 return 1;
2020 }
2021 }
2022 }
2023 return 0;
2024 }
2025
2026 /* Similar to above, except that it also rejects register pre- and post-
2027 incrementing. */
2028
2029 int
2030 side_effects_p (rtx x)
2031 {
2032 RTX_CODE code;
2033
2034 code = GET_CODE (x);
2035 switch (code)
2036 {
2037 case LABEL_REF:
2038 case SYMBOL_REF:
2039 case CONST_INT:
2040 case CONST:
2041 case CONST_DOUBLE:
2042 case CONST_VECTOR:
2043 case CC0:
2044 case PC:
2045 case REG:
2046 case SCRATCH:
2047 case ADDR_VEC:
2048 case ADDR_DIFF_VEC:
2049 return 0;
2050
2051 case CLOBBER:
2052 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2053 when some combination can't be done. If we see one, don't think
2054 that we can simplify the expression. */
2055 return (GET_MODE (x) != VOIDmode);
2056
2057 case PRE_INC:
2058 case PRE_DEC:
2059 case POST_INC:
2060 case POST_DEC:
2061 case PRE_MODIFY:
2062 case POST_MODIFY:
2063 case CALL:
2064 case UNSPEC_VOLATILE:
2065 /* case TRAP_IF: This isn't clear yet. */
2066 return 1;
2067
2068 case MEM:
2069 case ASM_INPUT:
2070 case ASM_OPERANDS:
2071 if (MEM_VOLATILE_P (x))
2072 return 1;
2073
2074 default:
2075 break;
2076 }
2077
2078 /* Recursively scan the operands of this expression. */
2079
2080 {
2081 const char *fmt = GET_RTX_FORMAT (code);
2082 int i;
2083
2084 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2085 {
2086 if (fmt[i] == 'e')
2087 {
2088 if (side_effects_p (XEXP (x, i)))
2089 return 1;
2090 }
2091 else if (fmt[i] == 'E')
2092 {
2093 int j;
2094 for (j = 0; j < XVECLEN (x, i); j++)
2095 if (side_effects_p (XVECEXP (x, i, j)))
2096 return 1;
2097 }
2098 }
2099 }
2100 return 0;
2101 }
2102 \f
2103 /* Return nonzero if evaluating rtx X might cause a trap. */
2104
2105 int
2106 may_trap_p (rtx x)
2107 {
2108 int i;
2109 enum rtx_code code;
2110 const char *fmt;
2111
2112 if (x == 0)
2113 return 0;
2114 code = GET_CODE (x);
2115 switch (code)
2116 {
2117 /* Handle these cases quickly. */
2118 case CONST_INT:
2119 case CONST_DOUBLE:
2120 case CONST_VECTOR:
2121 case SYMBOL_REF:
2122 case LABEL_REF:
2123 case CONST:
2124 case PC:
2125 case CC0:
2126 case REG:
2127 case SCRATCH:
2128 return 0;
2129
2130 case ASM_INPUT:
2131 case UNSPEC_VOLATILE:
2132 case TRAP_IF:
2133 return 1;
2134
2135 case ASM_OPERANDS:
2136 return MEM_VOLATILE_P (x);
2137
2138 /* Memory ref can trap unless it's a static var or a stack slot. */
2139 case MEM:
2140 if (MEM_NOTRAP_P (x))
2141 return 0;
2142 return rtx_addr_can_trap_p (XEXP (x, 0));
2143
2144 /* Division by a non-constant might trap. */
2145 case DIV:
2146 case MOD:
2147 case UDIV:
2148 case UMOD:
2149 if (HONOR_SNANS (GET_MODE (x)))
2150 return 1;
2151 if (! CONSTANT_P (XEXP (x, 1))
2152 || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2153 && flag_trapping_math))
2154 return 1;
2155 if (XEXP (x, 1) == const0_rtx)
2156 return 1;
2157 break;
2158
2159 case EXPR_LIST:
2160 /* An EXPR_LIST is used to represent a function call. This
2161 certainly may trap. */
2162 return 1;
2163
2164 case GE:
2165 case GT:
2166 case LE:
2167 case LT:
2168 case LTGT:
2169 case COMPARE:
2170 /* Some floating point comparisons may trap. */
2171 if (!flag_trapping_math)
2172 break;
2173 /* ??? There is no machine independent way to check for tests that trap
2174 when COMPARE is used, though many targets do make this distinction.
2175 For instance, sparc uses CCFPE for compares which generate exceptions
2176 and CCFP for compares which do not generate exceptions. */
2177 if (HONOR_NANS (GET_MODE (x)))
2178 return 1;
2179 /* But often the compare has some CC mode, so check operand
2180 modes as well. */
2181 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2182 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2183 return 1;
2184 break;
2185
2186 case EQ:
2187 case NE:
2188 if (HONOR_SNANS (GET_MODE (x)))
2189 return 1;
2190 /* Often comparison is CC mode, so check operand modes. */
2191 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2192 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2193 return 1;
2194 break;
2195
2196 case FIX:
2197 /* Conversion of floating point might trap. */
2198 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2199 return 1;
2200 break;
2201
2202 case NEG:
2203 case ABS:
2204 /* These operations don't trap even with floating point. */
2205 break;
2206
2207 default:
2208 /* Any floating arithmetic may trap. */
2209 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2210 && flag_trapping_math)
2211 return 1;
2212 }
2213
2214 fmt = GET_RTX_FORMAT (code);
2215 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2216 {
2217 if (fmt[i] == 'e')
2218 {
2219 if (may_trap_p (XEXP (x, i)))
2220 return 1;
2221 }
2222 else if (fmt[i] == 'E')
2223 {
2224 int j;
2225 for (j = 0; j < XVECLEN (x, i); j++)
2226 if (may_trap_p (XVECEXP (x, i, j)))
2227 return 1;
2228 }
2229 }
2230 return 0;
2231 }
2232 \f
2233 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2234 i.e., an inequality. */
2235
2236 int
2237 inequality_comparisons_p (rtx x)
2238 {
2239 const char *fmt;
2240 int len, i;
2241 enum rtx_code code = GET_CODE (x);
2242
2243 switch (code)
2244 {
2245 case REG:
2246 case SCRATCH:
2247 case PC:
2248 case CC0:
2249 case CONST_INT:
2250 case CONST_DOUBLE:
2251 case CONST_VECTOR:
2252 case CONST:
2253 case LABEL_REF:
2254 case SYMBOL_REF:
2255 return 0;
2256
2257 case LT:
2258 case LTU:
2259 case GT:
2260 case GTU:
2261 case LE:
2262 case LEU:
2263 case GE:
2264 case GEU:
2265 return 1;
2266
2267 default:
2268 break;
2269 }
2270
2271 len = GET_RTX_LENGTH (code);
2272 fmt = GET_RTX_FORMAT (code);
2273
2274 for (i = 0; i < len; i++)
2275 {
2276 if (fmt[i] == 'e')
2277 {
2278 if (inequality_comparisons_p (XEXP (x, i)))
2279 return 1;
2280 }
2281 else if (fmt[i] == 'E')
2282 {
2283 int j;
2284 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2285 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2286 return 1;
2287 }
2288 }
2289
2290 return 0;
2291 }
2292 \f
2293 /* Replace any occurrence of FROM in X with TO. The function does
2294 not enter into CONST_DOUBLE for the replace.
2295
2296 Note that copying is not done so X must not be shared unless all copies
2297 are to be modified. */
2298
2299 rtx
2300 replace_rtx (rtx x, rtx from, rtx to)
2301 {
2302 int i, j;
2303 const char *fmt;
2304
2305 /* The following prevents loops occurrence when we change MEM in
2306 CONST_DOUBLE onto the same CONST_DOUBLE. */
2307 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2308 return x;
2309
2310 if (x == from)
2311 return to;
2312
2313 /* Allow this function to make replacements in EXPR_LISTs. */
2314 if (x == 0)
2315 return 0;
2316
2317 if (GET_CODE (x) == SUBREG)
2318 {
2319 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2320
2321 if (GET_CODE (new) == CONST_INT)
2322 {
2323 x = simplify_subreg (GET_MODE (x), new,
2324 GET_MODE (SUBREG_REG (x)),
2325 SUBREG_BYTE (x));
2326 gcc_assert (x);
2327 }
2328 else
2329 SUBREG_REG (x) = new;
2330
2331 return x;
2332 }
2333 else if (GET_CODE (x) == ZERO_EXTEND)
2334 {
2335 rtx new = replace_rtx (XEXP (x, 0), from, to);
2336
2337 if (GET_CODE (new) == CONST_INT)
2338 {
2339 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2340 new, GET_MODE (XEXP (x, 0)));
2341 gcc_assert (x);
2342 }
2343 else
2344 XEXP (x, 0) = new;
2345
2346 return x;
2347 }
2348
2349 fmt = GET_RTX_FORMAT (GET_CODE (x));
2350 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2351 {
2352 if (fmt[i] == 'e')
2353 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2354 else if (fmt[i] == 'E')
2355 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2356 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2357 }
2358
2359 return x;
2360 }
2361 \f
2362 /* Throughout the rtx X, replace many registers according to REG_MAP.
2363 Return the replacement for X (which may be X with altered contents).
2364 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2365 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2366
2367 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2368 should not be mapped to pseudos or vice versa since validate_change
2369 is not called.
2370
2371 If REPLACE_DEST is 1, replacements are also done in destinations;
2372 otherwise, only sources are replaced. */
2373
2374 rtx
2375 replace_regs (rtx x, rtx *reg_map, unsigned int nregs, int replace_dest)
2376 {
2377 enum rtx_code code;
2378 int i;
2379 const char *fmt;
2380
2381 if (x == 0)
2382 return x;
2383
2384 code = GET_CODE (x);
2385 switch (code)
2386 {
2387 case SCRATCH:
2388 case PC:
2389 case CC0:
2390 case CONST_INT:
2391 case CONST_DOUBLE:
2392 case CONST_VECTOR:
2393 case CONST:
2394 case SYMBOL_REF:
2395 case LABEL_REF:
2396 return x;
2397
2398 case REG:
2399 /* Verify that the register has an entry before trying to access it. */
2400 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2401 {
2402 /* SUBREGs can't be shared. Always return a copy to ensure that if
2403 this replacement occurs more than once then each instance will
2404 get distinct rtx. */
2405 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2406 return copy_rtx (reg_map[REGNO (x)]);
2407 return reg_map[REGNO (x)];
2408 }
2409 return x;
2410
2411 case SUBREG:
2412 /* Prevent making nested SUBREGs. */
2413 if (REG_P (SUBREG_REG (x)) && REGNO (SUBREG_REG (x)) < nregs
2414 && reg_map[REGNO (SUBREG_REG (x))] != 0
2415 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2416 {
2417 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2418 return simplify_gen_subreg (GET_MODE (x), map_val,
2419 GET_MODE (SUBREG_REG (x)),
2420 SUBREG_BYTE (x));
2421 }
2422 break;
2423
2424 case SET:
2425 if (replace_dest)
2426 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2427
2428 else if (MEM_P (SET_DEST (x))
2429 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2430 /* Even if we are not to replace destinations, replace register if it
2431 is CONTAINED in destination (destination is memory or
2432 STRICT_LOW_PART). */
2433 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2434 reg_map, nregs, 0);
2435 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2436 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2437 break;
2438
2439 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2440 return x;
2441
2442 default:
2443 break;
2444 }
2445
2446 fmt = GET_RTX_FORMAT (code);
2447 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2448 {
2449 if (fmt[i] == 'e')
2450 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2451 else if (fmt[i] == 'E')
2452 {
2453 int j;
2454 for (j = 0; j < XVECLEN (x, i); j++)
2455 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2456 nregs, replace_dest);
2457 }
2458 }
2459 return x;
2460 }
2461
2462 /* Replace occurrences of the old label in *X with the new one.
2463 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2464
2465 int
2466 replace_label (rtx *x, void *data)
2467 {
2468 rtx l = *x;
2469 rtx old_label = ((replace_label_data *) data)->r1;
2470 rtx new_label = ((replace_label_data *) data)->r2;
2471 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2472
2473 if (l == NULL_RTX)
2474 return 0;
2475
2476 if (GET_CODE (l) == SYMBOL_REF
2477 && CONSTANT_POOL_ADDRESS_P (l))
2478 {
2479 rtx c = get_pool_constant (l);
2480 if (rtx_referenced_p (old_label, c))
2481 {
2482 rtx new_c, new_l;
2483 replace_label_data *d = (replace_label_data *) data;
2484
2485 /* Create a copy of constant C; replace the label inside
2486 but do not update LABEL_NUSES because uses in constant pool
2487 are not counted. */
2488 new_c = copy_rtx (c);
2489 d->update_label_nuses = false;
2490 for_each_rtx (&new_c, replace_label, data);
2491 d->update_label_nuses = update_label_nuses;
2492
2493 /* Add the new constant NEW_C to constant pool and replace
2494 the old reference to constant by new reference. */
2495 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2496 *x = replace_rtx (l, l, new_l);
2497 }
2498 return 0;
2499 }
2500
2501 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2502 field. This is not handled by for_each_rtx because it doesn't
2503 handle unprinted ('0') fields. */
2504 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2505 JUMP_LABEL (l) = new_label;
2506
2507 if ((GET_CODE (l) == LABEL_REF
2508 || GET_CODE (l) == INSN_LIST)
2509 && XEXP (l, 0) == old_label)
2510 {
2511 XEXP (l, 0) = new_label;
2512 if (update_label_nuses)
2513 {
2514 ++LABEL_NUSES (new_label);
2515 --LABEL_NUSES (old_label);
2516 }
2517 return 0;
2518 }
2519
2520 return 0;
2521 }
2522
2523 /* When *BODY is equal to X or X is directly referenced by *BODY
2524 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2525 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2526
2527 static int
2528 rtx_referenced_p_1 (rtx *body, void *x)
2529 {
2530 rtx y = (rtx) x;
2531
2532 if (*body == NULL_RTX)
2533 return y == NULL_RTX;
2534
2535 /* Return true if a label_ref *BODY refers to label Y. */
2536 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2537 return XEXP (*body, 0) == y;
2538
2539 /* If *BODY is a reference to pool constant traverse the constant. */
2540 if (GET_CODE (*body) == SYMBOL_REF
2541 && CONSTANT_POOL_ADDRESS_P (*body))
2542 return rtx_referenced_p (y, get_pool_constant (*body));
2543
2544 /* By default, compare the RTL expressions. */
2545 return rtx_equal_p (*body, y);
2546 }
2547
2548 /* Return true if X is referenced in BODY. */
2549
2550 int
2551 rtx_referenced_p (rtx x, rtx body)
2552 {
2553 return for_each_rtx (&body, rtx_referenced_p_1, x);
2554 }
2555
2556 /* If INSN is a tablejump return true and store the label (before jump table) to
2557 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2558
2559 bool
2560 tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2561 {
2562 rtx label, table;
2563
2564 if (JUMP_P (insn)
2565 && (label = JUMP_LABEL (insn)) != NULL_RTX
2566 && (table = next_active_insn (label)) != NULL_RTX
2567 && JUMP_P (table)
2568 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2569 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2570 {
2571 if (labelp)
2572 *labelp = label;
2573 if (tablep)
2574 *tablep = table;
2575 return true;
2576 }
2577 return false;
2578 }
2579
2580 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2581 constant that is not in the constant pool and not in the condition
2582 of an IF_THEN_ELSE. */
2583
2584 static int
2585 computed_jump_p_1 (rtx x)
2586 {
2587 enum rtx_code code = GET_CODE (x);
2588 int i, j;
2589 const char *fmt;
2590
2591 switch (code)
2592 {
2593 case LABEL_REF:
2594 case PC:
2595 return 0;
2596
2597 case CONST:
2598 case CONST_INT:
2599 case CONST_DOUBLE:
2600 case CONST_VECTOR:
2601 case SYMBOL_REF:
2602 case REG:
2603 return 1;
2604
2605 case MEM:
2606 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2607 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2608
2609 case IF_THEN_ELSE:
2610 return (computed_jump_p_1 (XEXP (x, 1))
2611 || computed_jump_p_1 (XEXP (x, 2)));
2612
2613 default:
2614 break;
2615 }
2616
2617 fmt = GET_RTX_FORMAT (code);
2618 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2619 {
2620 if (fmt[i] == 'e'
2621 && computed_jump_p_1 (XEXP (x, i)))
2622 return 1;
2623
2624 else if (fmt[i] == 'E')
2625 for (j = 0; j < XVECLEN (x, i); j++)
2626 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2627 return 1;
2628 }
2629
2630 return 0;
2631 }
2632
2633 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2634
2635 Tablejumps and casesi insns are not considered indirect jumps;
2636 we can recognize them by a (use (label_ref)). */
2637
2638 int
2639 computed_jump_p (rtx insn)
2640 {
2641 int i;
2642 if (JUMP_P (insn))
2643 {
2644 rtx pat = PATTERN (insn);
2645
2646 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2647 return 0;
2648 else if (GET_CODE (pat) == PARALLEL)
2649 {
2650 int len = XVECLEN (pat, 0);
2651 int has_use_labelref = 0;
2652
2653 for (i = len - 1; i >= 0; i--)
2654 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2655 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2656 == LABEL_REF))
2657 has_use_labelref = 1;
2658
2659 if (! has_use_labelref)
2660 for (i = len - 1; i >= 0; i--)
2661 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2662 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2663 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2664 return 1;
2665 }
2666 else if (GET_CODE (pat) == SET
2667 && SET_DEST (pat) == pc_rtx
2668 && computed_jump_p_1 (SET_SRC (pat)))
2669 return 1;
2670 }
2671 return 0;
2672 }
2673
2674 /* Traverse X via depth-first search, calling F for each
2675 sub-expression (including X itself). F is also passed the DATA.
2676 If F returns -1, do not traverse sub-expressions, but continue
2677 traversing the rest of the tree. If F ever returns any other
2678 nonzero value, stop the traversal, and return the value returned
2679 by F. Otherwise, return 0. This function does not traverse inside
2680 tree structure that contains RTX_EXPRs, or into sub-expressions
2681 whose format code is `0' since it is not known whether or not those
2682 codes are actually RTL.
2683
2684 This routine is very general, and could (should?) be used to
2685 implement many of the other routines in this file. */
2686
2687 int
2688 for_each_rtx (rtx *x, rtx_function f, void *data)
2689 {
2690 int result;
2691 int length;
2692 const char *format;
2693 int i;
2694
2695 /* Call F on X. */
2696 result = (*f) (x, data);
2697 if (result == -1)
2698 /* Do not traverse sub-expressions. */
2699 return 0;
2700 else if (result != 0)
2701 /* Stop the traversal. */
2702 return result;
2703
2704 if (*x == NULL_RTX)
2705 /* There are no sub-expressions. */
2706 return 0;
2707
2708 length = GET_RTX_LENGTH (GET_CODE (*x));
2709 format = GET_RTX_FORMAT (GET_CODE (*x));
2710
2711 for (i = 0; i < length; ++i)
2712 {
2713 switch (format[i])
2714 {
2715 case 'e':
2716 result = for_each_rtx (&XEXP (*x, i), f, data);
2717 if (result != 0)
2718 return result;
2719 break;
2720
2721 case 'V':
2722 case 'E':
2723 if (XVEC (*x, i) != 0)
2724 {
2725 int j;
2726 for (j = 0; j < XVECLEN (*x, i); ++j)
2727 {
2728 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2729 if (result != 0)
2730 return result;
2731 }
2732 }
2733 break;
2734
2735 default:
2736 /* Nothing to do. */
2737 break;
2738 }
2739
2740 }
2741
2742 return 0;
2743 }
2744
2745 /* Searches X for any reference to REGNO, returning the rtx of the
2746 reference found if any. Otherwise, returns NULL_RTX. */
2747
2748 rtx
2749 regno_use_in (unsigned int regno, rtx x)
2750 {
2751 const char *fmt;
2752 int i, j;
2753 rtx tem;
2754
2755 if (REG_P (x) && REGNO (x) == regno)
2756 return x;
2757
2758 fmt = GET_RTX_FORMAT (GET_CODE (x));
2759 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2760 {
2761 if (fmt[i] == 'e')
2762 {
2763 if ((tem = regno_use_in (regno, XEXP (x, i))))
2764 return tem;
2765 }
2766 else if (fmt[i] == 'E')
2767 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2768 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2769 return tem;
2770 }
2771
2772 return NULL_RTX;
2773 }
2774
2775 /* Return a value indicating whether OP, an operand of a commutative
2776 operation, is preferred as the first or second operand. The higher
2777 the value, the stronger the preference for being the first operand.
2778 We use negative values to indicate a preference for the first operand
2779 and positive values for the second operand. */
2780
2781 int
2782 commutative_operand_precedence (rtx op)
2783 {
2784 enum rtx_code code = GET_CODE (op);
2785
2786 /* Constants always come the second operand. Prefer "nice" constants. */
2787 if (code == CONST_INT)
2788 return -7;
2789 if (code == CONST_DOUBLE)
2790 return -6;
2791 op = avoid_constant_pool_reference (op);
2792 code = GET_CODE (op);
2793
2794 switch (GET_RTX_CLASS (code))
2795 {
2796 case RTX_CONST_OBJ:
2797 if (code == CONST_INT)
2798 return -5;
2799 if (code == CONST_DOUBLE)
2800 return -4;
2801 return -3;
2802
2803 case RTX_EXTRA:
2804 /* SUBREGs of objects should come second. */
2805 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2806 return -2;
2807
2808 if (!CONSTANT_P (op))
2809 return 0;
2810 else
2811 /* As for RTX_CONST_OBJ. */
2812 return -3;
2813
2814 case RTX_OBJ:
2815 /* Complex expressions should be the first, so decrease priority
2816 of objects. */
2817 return -1;
2818
2819 case RTX_COMM_ARITH:
2820 /* Prefer operands that are themselves commutative to be first.
2821 This helps to make things linear. In particular,
2822 (and (and (reg) (reg)) (not (reg))) is canonical. */
2823 return 4;
2824
2825 case RTX_BIN_ARITH:
2826 /* If only one operand is a binary expression, it will be the first
2827 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2828 is canonical, although it will usually be further simplified. */
2829 return 2;
2830
2831 case RTX_UNARY:
2832 /* Then prefer NEG and NOT. */
2833 if (code == NEG || code == NOT)
2834 return 1;
2835
2836 default:
2837 return 0;
2838 }
2839 }
2840
2841 /* Return 1 iff it is necessary to swap operands of commutative operation
2842 in order to canonicalize expression. */
2843
2844 int
2845 swap_commutative_operands_p (rtx x, rtx y)
2846 {
2847 return (commutative_operand_precedence (x)
2848 < commutative_operand_precedence (y));
2849 }
2850
2851 /* Return 1 if X is an autoincrement side effect and the register is
2852 not the stack pointer. */
2853 int
2854 auto_inc_p (rtx x)
2855 {
2856 switch (GET_CODE (x))
2857 {
2858 case PRE_INC:
2859 case POST_INC:
2860 case PRE_DEC:
2861 case POST_DEC:
2862 case PRE_MODIFY:
2863 case POST_MODIFY:
2864 /* There are no REG_INC notes for SP. */
2865 if (XEXP (x, 0) != stack_pointer_rtx)
2866 return 1;
2867 default:
2868 break;
2869 }
2870 return 0;
2871 }
2872
2873 /* Return 1 if the sequence of instructions beginning with FROM and up
2874 to and including TO is safe to move. If NEW_TO is non-NULL, and
2875 the sequence is not already safe to move, but can be easily
2876 extended to a sequence which is safe, then NEW_TO will point to the
2877 end of the extended sequence.
2878
2879 For now, this function only checks that the region contains whole
2880 exception regions, but it could be extended to check additional
2881 conditions as well. */
2882
2883 int
2884 insns_safe_to_move_p (rtx from, rtx to, rtx *new_to)
2885 {
2886 int eh_region_count = 0;
2887 int past_to_p = 0;
2888 rtx r = from;
2889
2890 /* By default, assume the end of the region will be what was
2891 suggested. */
2892 if (new_to)
2893 *new_to = to;
2894
2895 while (r)
2896 {
2897 if (NOTE_P (r))
2898 {
2899 switch (NOTE_LINE_NUMBER (r))
2900 {
2901 case NOTE_INSN_EH_REGION_BEG:
2902 ++eh_region_count;
2903 break;
2904
2905 case NOTE_INSN_EH_REGION_END:
2906 if (eh_region_count == 0)
2907 /* This sequence of instructions contains the end of
2908 an exception region, but not he beginning. Moving
2909 it will cause chaos. */
2910 return 0;
2911
2912 --eh_region_count;
2913 break;
2914
2915 default:
2916 break;
2917 }
2918 }
2919 else if (past_to_p)
2920 /* If we've passed TO, and we see a non-note instruction, we
2921 can't extend the sequence to a movable sequence. */
2922 return 0;
2923
2924 if (r == to)
2925 {
2926 if (!new_to)
2927 /* It's OK to move the sequence if there were matched sets of
2928 exception region notes. */
2929 return eh_region_count == 0;
2930
2931 past_to_p = 1;
2932 }
2933
2934 /* It's OK to move the sequence if there were matched sets of
2935 exception region notes. */
2936 if (past_to_p && eh_region_count == 0)
2937 {
2938 *new_to = r;
2939 return 1;
2940 }
2941
2942 /* Go to the next instruction. */
2943 r = NEXT_INSN (r);
2944 }
2945
2946 return 0;
2947 }
2948
2949 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
2950 int
2951 loc_mentioned_in_p (rtx *loc, rtx in)
2952 {
2953 enum rtx_code code = GET_CODE (in);
2954 const char *fmt = GET_RTX_FORMAT (code);
2955 int i, j;
2956
2957 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2958 {
2959 if (loc == &in->u.fld[i].rt_rtx)
2960 return 1;
2961 if (fmt[i] == 'e')
2962 {
2963 if (loc_mentioned_in_p (loc, XEXP (in, i)))
2964 return 1;
2965 }
2966 else if (fmt[i] == 'E')
2967 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2968 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2969 return 1;
2970 }
2971 return 0;
2972 }
2973
2974 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
2975 and SUBREG_BYTE, return the bit offset where the subreg begins
2976 (counting from the least significant bit of the operand). */
2977
2978 unsigned int
2979 subreg_lsb_1 (enum machine_mode outer_mode,
2980 enum machine_mode inner_mode,
2981 unsigned int subreg_byte)
2982 {
2983 unsigned int bitpos;
2984 unsigned int byte;
2985 unsigned int word;
2986
2987 /* A paradoxical subreg begins at bit position 0. */
2988 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
2989 return 0;
2990
2991 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2992 /* If the subreg crosses a word boundary ensure that
2993 it also begins and ends on a word boundary. */
2994 gcc_assert (!((subreg_byte % UNITS_PER_WORD
2995 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
2996 && (subreg_byte % UNITS_PER_WORD
2997 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
2998
2999 if (WORDS_BIG_ENDIAN)
3000 word = (GET_MODE_SIZE (inner_mode)
3001 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3002 else
3003 word = subreg_byte / UNITS_PER_WORD;
3004 bitpos = word * BITS_PER_WORD;
3005
3006 if (BYTES_BIG_ENDIAN)
3007 byte = (GET_MODE_SIZE (inner_mode)
3008 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3009 else
3010 byte = subreg_byte % UNITS_PER_WORD;
3011 bitpos += byte * BITS_PER_UNIT;
3012
3013 return bitpos;
3014 }
3015
3016 /* Given a subreg X, return the bit offset where the subreg begins
3017 (counting from the least significant bit of the reg). */
3018
3019 unsigned int
3020 subreg_lsb (rtx x)
3021 {
3022 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3023 SUBREG_BYTE (x));
3024 }
3025
3026 /* This function returns the regno offset of a subreg expression.
3027 xregno - A regno of an inner hard subreg_reg (or what will become one).
3028 xmode - The mode of xregno.
3029 offset - The byte offset.
3030 ymode - The mode of a top level SUBREG (or what may become one).
3031 RETURN - The regno offset which would be used. */
3032 unsigned int
3033 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3034 unsigned int offset, enum machine_mode ymode)
3035 {
3036 int nregs_xmode, nregs_ymode;
3037 int mode_multiple, nregs_multiple;
3038 int y_offset;
3039
3040 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3041
3042 nregs_xmode = hard_regno_nregs[xregno][xmode];
3043 nregs_ymode = hard_regno_nregs[xregno][ymode];
3044
3045 /* If this is a big endian paradoxical subreg, which uses more actual
3046 hard registers than the original register, we must return a negative
3047 offset so that we find the proper highpart of the register. */
3048 if (offset == 0
3049 && nregs_ymode > nregs_xmode
3050 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3051 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3052 return nregs_xmode - nregs_ymode;
3053
3054 if (offset == 0 || nregs_xmode == nregs_ymode)
3055 return 0;
3056
3057 /* size of ymode must not be greater than the size of xmode. */
3058 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3059 gcc_assert (mode_multiple != 0);
3060
3061 y_offset = offset / GET_MODE_SIZE (ymode);
3062 nregs_multiple = nregs_xmode / nregs_ymode;
3063 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3064 }
3065
3066 /* This function returns true when the offset is representable via
3067 subreg_offset in the given regno.
3068 xregno - A regno of an inner hard subreg_reg (or what will become one).
3069 xmode - The mode of xregno.
3070 offset - The byte offset.
3071 ymode - The mode of a top level SUBREG (or what may become one).
3072 RETURN - The regno offset which would be used. */
3073 bool
3074 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3075 unsigned int offset, enum machine_mode ymode)
3076 {
3077 int nregs_xmode, nregs_ymode;
3078 int mode_multiple, nregs_multiple;
3079 int y_offset;
3080
3081 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3082
3083 nregs_xmode = hard_regno_nregs[xregno][xmode];
3084 nregs_ymode = hard_regno_nregs[xregno][ymode];
3085
3086 /* Paradoxical subregs are always valid. */
3087 if (offset == 0
3088 && nregs_ymode > nregs_xmode
3089 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3090 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3091 return true;
3092
3093 /* Lowpart subregs are always valid. */
3094 if (offset == subreg_lowpart_offset (ymode, xmode))
3095 return true;
3096
3097 /* This should always pass, otherwise we don't know how to verify the
3098 constraint. These conditions may be relaxed but subreg_offset would
3099 need to be redesigned. */
3100 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3101 gcc_assert ((GET_MODE_SIZE (ymode) % nregs_ymode) == 0);
3102 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3103
3104 /* The XMODE value can be seen as a vector of NREGS_XMODE
3105 values. The subreg must represent a lowpart of given field.
3106 Compute what field it is. */
3107 offset -= subreg_lowpart_offset (ymode,
3108 mode_for_size (GET_MODE_BITSIZE (xmode)
3109 / nregs_xmode,
3110 MODE_INT, 0));
3111
3112 /* size of ymode must not be greater than the size of xmode. */
3113 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3114 gcc_assert (mode_multiple != 0);
3115
3116 y_offset = offset / GET_MODE_SIZE (ymode);
3117 nregs_multiple = nregs_xmode / nregs_ymode;
3118
3119 gcc_assert ((offset % GET_MODE_SIZE (ymode)) == 0);
3120 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3121
3122 return (!(y_offset % (mode_multiple / nregs_multiple)));
3123 }
3124
3125 /* Return the final regno that a subreg expression refers to. */
3126 unsigned int
3127 subreg_regno (rtx x)
3128 {
3129 unsigned int ret;
3130 rtx subreg = SUBREG_REG (x);
3131 int regno = REGNO (subreg);
3132
3133 ret = regno + subreg_regno_offset (regno,
3134 GET_MODE (subreg),
3135 SUBREG_BYTE (x),
3136 GET_MODE (x));
3137 return ret;
3138
3139 }
3140 struct parms_set_data
3141 {
3142 int nregs;
3143 HARD_REG_SET regs;
3144 };
3145
3146 /* Helper function for noticing stores to parameter registers. */
3147 static void
3148 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3149 {
3150 struct parms_set_data *d = data;
3151 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3152 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3153 {
3154 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3155 d->nregs--;
3156 }
3157 }
3158
3159 /* Look backward for first parameter to be loaded.
3160 Do not skip BOUNDARY. */
3161 rtx
3162 find_first_parameter_load (rtx call_insn, rtx boundary)
3163 {
3164 struct parms_set_data parm;
3165 rtx p, before;
3166
3167 /* Since different machines initialize their parameter registers
3168 in different orders, assume nothing. Collect the set of all
3169 parameter registers. */
3170 CLEAR_HARD_REG_SET (parm.regs);
3171 parm.nregs = 0;
3172 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3173 if (GET_CODE (XEXP (p, 0)) == USE
3174 && REG_P (XEXP (XEXP (p, 0), 0)))
3175 {
3176 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3177
3178 /* We only care about registers which can hold function
3179 arguments. */
3180 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3181 continue;
3182
3183 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3184 parm.nregs++;
3185 }
3186 before = call_insn;
3187
3188 /* Search backward for the first set of a register in this set. */
3189 while (parm.nregs && before != boundary)
3190 {
3191 before = PREV_INSN (before);
3192
3193 /* It is possible that some loads got CSEed from one call to
3194 another. Stop in that case. */
3195 if (CALL_P (before))
3196 break;
3197
3198 /* Our caller needs either ensure that we will find all sets
3199 (in case code has not been optimized yet), or take care
3200 for possible labels in a way by setting boundary to preceding
3201 CODE_LABEL. */
3202 if (LABEL_P (before))
3203 {
3204 gcc_assert (before == boundary);
3205 break;
3206 }
3207
3208 if (INSN_P (before))
3209 note_stores (PATTERN (before), parms_set, &parm);
3210 }
3211 return before;
3212 }
3213
3214 /* Return true if we should avoid inserting code between INSN and preceding
3215 call instruction. */
3216
3217 bool
3218 keep_with_call_p (rtx insn)
3219 {
3220 rtx set;
3221
3222 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3223 {
3224 if (REG_P (SET_DEST (set))
3225 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3226 && fixed_regs[REGNO (SET_DEST (set))]
3227 && general_operand (SET_SRC (set), VOIDmode))
3228 return true;
3229 if (REG_P (SET_SRC (set))
3230 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3231 && REG_P (SET_DEST (set))
3232 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3233 return true;
3234 /* There may be a stack pop just after the call and before the store
3235 of the return register. Search for the actual store when deciding
3236 if we can break or not. */
3237 if (SET_DEST (set) == stack_pointer_rtx)
3238 {
3239 rtx i2 = next_nonnote_insn (insn);
3240 if (i2 && keep_with_call_p (i2))
3241 return true;
3242 }
3243 }
3244 return false;
3245 }
3246
3247 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3248 to non-complex jumps. That is, direct unconditional, conditional,
3249 and tablejumps, but not computed jumps or returns. It also does
3250 not apply to the fallthru case of a conditional jump. */
3251
3252 bool
3253 label_is_jump_target_p (rtx label, rtx jump_insn)
3254 {
3255 rtx tmp = JUMP_LABEL (jump_insn);
3256
3257 if (label == tmp)
3258 return true;
3259
3260 if (tablejump_p (jump_insn, NULL, &tmp))
3261 {
3262 rtvec vec = XVEC (PATTERN (tmp),
3263 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3264 int i, veclen = GET_NUM_ELEM (vec);
3265
3266 for (i = 0; i < veclen; ++i)
3267 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3268 return true;
3269 }
3270
3271 return false;
3272 }
3273
3274 \f
3275 /* Return an estimate of the cost of computing rtx X.
3276 One use is in cse, to decide which expression to keep in the hash table.
3277 Another is in rtl generation, to pick the cheapest way to multiply.
3278 Other uses like the latter are expected in the future. */
3279
3280 int
3281 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3282 {
3283 int i, j;
3284 enum rtx_code code;
3285 const char *fmt;
3286 int total;
3287
3288 if (x == 0)
3289 return 0;
3290
3291 /* Compute the default costs of certain things.
3292 Note that targetm.rtx_costs can override the defaults. */
3293
3294 code = GET_CODE (x);
3295 switch (code)
3296 {
3297 case MULT:
3298 total = COSTS_N_INSNS (5);
3299 break;
3300 case DIV:
3301 case UDIV:
3302 case MOD:
3303 case UMOD:
3304 total = COSTS_N_INSNS (7);
3305 break;
3306 case USE:
3307 /* Used in loop.c and combine.c as a marker. */
3308 total = 0;
3309 break;
3310 default:
3311 total = COSTS_N_INSNS (1);
3312 }
3313
3314 switch (code)
3315 {
3316 case REG:
3317 return 0;
3318
3319 case SUBREG:
3320 /* If we can't tie these modes, make this expensive. The larger
3321 the mode, the more expensive it is. */
3322 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3323 return COSTS_N_INSNS (2
3324 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3325 break;
3326
3327 default:
3328 if (targetm.rtx_costs (x, code, outer_code, &total))
3329 return total;
3330 break;
3331 }
3332
3333 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3334 which is already in total. */
3335
3336 fmt = GET_RTX_FORMAT (code);
3337 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3338 if (fmt[i] == 'e')
3339 total += rtx_cost (XEXP (x, i), code);
3340 else if (fmt[i] == 'E')
3341 for (j = 0; j < XVECLEN (x, i); j++)
3342 total += rtx_cost (XVECEXP (x, i, j), code);
3343
3344 return total;
3345 }
3346 \f
3347 /* Return cost of address expression X.
3348 Expect that X is properly formed address reference. */
3349
3350 int
3351 address_cost (rtx x, enum machine_mode mode)
3352 {
3353 /* We may be asked for cost of various unusual addresses, such as operands
3354 of push instruction. It is not worthwhile to complicate writing
3355 of the target hook by such cases. */
3356
3357 if (!memory_address_p (mode, x))
3358 return 1000;
3359
3360 return targetm.address_cost (x);
3361 }
3362
3363 /* If the target doesn't override, compute the cost as with arithmetic. */
3364
3365 int
3366 default_address_cost (rtx x)
3367 {
3368 return rtx_cost (x, MEM);
3369 }
3370 \f
3371
3372 unsigned HOST_WIDE_INT
3373 nonzero_bits (rtx x, enum machine_mode mode)
3374 {
3375 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3376 }
3377
3378 unsigned int
3379 num_sign_bit_copies (rtx x, enum machine_mode mode)
3380 {
3381 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3382 }
3383
3384 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3385 It avoids exponential behavior in nonzero_bits1 when X has
3386 identical subexpressions on the first or the second level. */
3387
3388 static unsigned HOST_WIDE_INT
3389 cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3390 enum machine_mode known_mode,
3391 unsigned HOST_WIDE_INT known_ret)
3392 {
3393 if (x == known_x && mode == known_mode)
3394 return known_ret;
3395
3396 /* Try to find identical subexpressions. If found call
3397 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3398 precomputed value for the subexpression as KNOWN_RET. */
3399
3400 if (ARITHMETIC_P (x))
3401 {
3402 rtx x0 = XEXP (x, 0);
3403 rtx x1 = XEXP (x, 1);
3404
3405 /* Check the first level. */
3406 if (x0 == x1)
3407 return nonzero_bits1 (x, mode, x0, mode,
3408 cached_nonzero_bits (x0, mode, known_x,
3409 known_mode, known_ret));
3410
3411 /* Check the second level. */
3412 if (ARITHMETIC_P (x0)
3413 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3414 return nonzero_bits1 (x, mode, x1, mode,
3415 cached_nonzero_bits (x1, mode, known_x,
3416 known_mode, known_ret));
3417
3418 if (ARITHMETIC_P (x1)
3419 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3420 return nonzero_bits1 (x, mode, x0, mode,
3421 cached_nonzero_bits (x0, mode, known_x,
3422 known_mode, known_ret));
3423 }
3424
3425 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3426 }
3427
3428 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3429 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3430 is less useful. We can't allow both, because that results in exponential
3431 run time recursion. There is a nullstone testcase that triggered
3432 this. This macro avoids accidental uses of num_sign_bit_copies. */
3433 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3434
3435 /* Given an expression, X, compute which bits in X can be nonzero.
3436 We don't care about bits outside of those defined in MODE.
3437
3438 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3439 an arithmetic operation, we can do better. */
3440
3441 static unsigned HOST_WIDE_INT
3442 nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3443 enum machine_mode known_mode,
3444 unsigned HOST_WIDE_INT known_ret)
3445 {
3446 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3447 unsigned HOST_WIDE_INT inner_nz;
3448 enum rtx_code code;
3449 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3450
3451 /* For floating-point values, assume all bits are needed. */
3452 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3453 return nonzero;
3454
3455 /* If X is wider than MODE, use its mode instead. */
3456 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3457 {
3458 mode = GET_MODE (x);
3459 nonzero = GET_MODE_MASK (mode);
3460 mode_width = GET_MODE_BITSIZE (mode);
3461 }
3462
3463 if (mode_width > HOST_BITS_PER_WIDE_INT)
3464 /* Our only callers in this case look for single bit values. So
3465 just return the mode mask. Those tests will then be false. */
3466 return nonzero;
3467
3468 #ifndef WORD_REGISTER_OPERATIONS
3469 /* If MODE is wider than X, but both are a single word for both the host
3470 and target machines, we can compute this from which bits of the
3471 object might be nonzero in its own mode, taking into account the fact
3472 that on many CISC machines, accessing an object in a wider mode
3473 causes the high-order bits to become undefined. So they are
3474 not known to be zero. */
3475
3476 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3477 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3478 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3479 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3480 {
3481 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3482 known_x, known_mode, known_ret);
3483 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3484 return nonzero;
3485 }
3486 #endif
3487
3488 code = GET_CODE (x);
3489 switch (code)
3490 {
3491 case REG:
3492 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3493 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3494 all the bits above ptr_mode are known to be zero. */
3495 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3496 && REG_POINTER (x))
3497 nonzero &= GET_MODE_MASK (ptr_mode);
3498 #endif
3499
3500 /* Include declared information about alignment of pointers. */
3501 /* ??? We don't properly preserve REG_POINTER changes across
3502 pointer-to-integer casts, so we can't trust it except for
3503 things that we know must be pointers. See execute/960116-1.c. */
3504 if ((x == stack_pointer_rtx
3505 || x == frame_pointer_rtx
3506 || x == arg_pointer_rtx)
3507 && REGNO_POINTER_ALIGN (REGNO (x)))
3508 {
3509 unsigned HOST_WIDE_INT alignment
3510 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3511
3512 #ifdef PUSH_ROUNDING
3513 /* If PUSH_ROUNDING is defined, it is possible for the
3514 stack to be momentarily aligned only to that amount,
3515 so we pick the least alignment. */
3516 if (x == stack_pointer_rtx && PUSH_ARGS)
3517 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3518 alignment);
3519 #endif
3520
3521 nonzero &= ~(alignment - 1);
3522 }
3523
3524 {
3525 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3526 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3527 known_mode, known_ret,
3528 &nonzero_for_hook);
3529
3530 if (new)
3531 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3532 known_mode, known_ret);
3533
3534 return nonzero_for_hook;
3535 }
3536
3537 case CONST_INT:
3538 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3539 /* If X is negative in MODE, sign-extend the value. */
3540 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3541 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3542 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3543 #endif
3544
3545 return INTVAL (x);
3546
3547 case MEM:
3548 #ifdef LOAD_EXTEND_OP
3549 /* In many, if not most, RISC machines, reading a byte from memory
3550 zeros the rest of the register. Noticing that fact saves a lot
3551 of extra zero-extends. */
3552 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3553 nonzero &= GET_MODE_MASK (GET_MODE (x));
3554 #endif
3555 break;
3556
3557 case EQ: case NE:
3558 case UNEQ: case LTGT:
3559 case GT: case GTU: case UNGT:
3560 case LT: case LTU: case UNLT:
3561 case GE: case GEU: case UNGE:
3562 case LE: case LEU: case UNLE:
3563 case UNORDERED: case ORDERED:
3564
3565 /* If this produces an integer result, we know which bits are set.
3566 Code here used to clear bits outside the mode of X, but that is
3567 now done above. */
3568
3569 if (GET_MODE_CLASS (mode) == MODE_INT
3570 && mode_width <= HOST_BITS_PER_WIDE_INT)
3571 nonzero = STORE_FLAG_VALUE;
3572 break;
3573
3574 case NEG:
3575 #if 0
3576 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3577 and num_sign_bit_copies. */
3578 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3579 == GET_MODE_BITSIZE (GET_MODE (x)))
3580 nonzero = 1;
3581 #endif
3582
3583 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3584 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3585 break;
3586
3587 case ABS:
3588 #if 0
3589 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3590 and num_sign_bit_copies. */
3591 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3592 == GET_MODE_BITSIZE (GET_MODE (x)))
3593 nonzero = 1;
3594 #endif
3595 break;
3596
3597 case TRUNCATE:
3598 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3599 known_x, known_mode, known_ret)
3600 & GET_MODE_MASK (mode));
3601 break;
3602
3603 case ZERO_EXTEND:
3604 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3605 known_x, known_mode, known_ret);
3606 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3607 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3608 break;
3609
3610 case SIGN_EXTEND:
3611 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3612 Otherwise, show all the bits in the outer mode but not the inner
3613 may be nonzero. */
3614 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3615 known_x, known_mode, known_ret);
3616 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3617 {
3618 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3619 if (inner_nz
3620 & (((HOST_WIDE_INT) 1
3621 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3622 inner_nz |= (GET_MODE_MASK (mode)
3623 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3624 }
3625
3626 nonzero &= inner_nz;
3627 break;
3628
3629 case AND:
3630 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3631 known_x, known_mode, known_ret)
3632 & cached_nonzero_bits (XEXP (x, 1), mode,
3633 known_x, known_mode, known_ret);
3634 break;
3635
3636 case XOR: case IOR:
3637 case UMIN: case UMAX: case SMIN: case SMAX:
3638 {
3639 unsigned HOST_WIDE_INT nonzero0 =
3640 cached_nonzero_bits (XEXP (x, 0), mode,
3641 known_x, known_mode, known_ret);
3642
3643 /* Don't call nonzero_bits for the second time if it cannot change
3644 anything. */
3645 if ((nonzero & nonzero0) != nonzero)
3646 nonzero &= nonzero0
3647 | cached_nonzero_bits (XEXP (x, 1), mode,
3648 known_x, known_mode, known_ret);
3649 }
3650 break;
3651
3652 case PLUS: case MINUS:
3653 case MULT:
3654 case DIV: case UDIV:
3655 case MOD: case UMOD:
3656 /* We can apply the rules of arithmetic to compute the number of
3657 high- and low-order zero bits of these operations. We start by
3658 computing the width (position of the highest-order nonzero bit)
3659 and the number of low-order zero bits for each value. */
3660 {
3661 unsigned HOST_WIDE_INT nz0 =
3662 cached_nonzero_bits (XEXP (x, 0), mode,
3663 known_x, known_mode, known_ret);
3664 unsigned HOST_WIDE_INT nz1 =
3665 cached_nonzero_bits (XEXP (x, 1), mode,
3666 known_x, known_mode, known_ret);
3667 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3668 int width0 = floor_log2 (nz0) + 1;
3669 int width1 = floor_log2 (nz1) + 1;
3670 int low0 = floor_log2 (nz0 & -nz0);
3671 int low1 = floor_log2 (nz1 & -nz1);
3672 HOST_WIDE_INT op0_maybe_minusp
3673 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3674 HOST_WIDE_INT op1_maybe_minusp
3675 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3676 unsigned int result_width = mode_width;
3677 int result_low = 0;
3678
3679 switch (code)
3680 {
3681 case PLUS:
3682 result_width = MAX (width0, width1) + 1;
3683 result_low = MIN (low0, low1);
3684 break;
3685 case MINUS:
3686 result_low = MIN (low0, low1);
3687 break;
3688 case MULT:
3689 result_width = width0 + width1;
3690 result_low = low0 + low1;
3691 break;
3692 case DIV:
3693 if (width1 == 0)
3694 break;
3695 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3696 result_width = width0;
3697 break;
3698 case UDIV:
3699 if (width1 == 0)
3700 break;
3701 result_width = width0;
3702 break;
3703 case MOD:
3704 if (width1 == 0)
3705 break;
3706 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3707 result_width = MIN (width0, width1);
3708 result_low = MIN (low0, low1);
3709 break;
3710 case UMOD:
3711 if (width1 == 0)
3712 break;
3713 result_width = MIN (width0, width1);
3714 result_low = MIN (low0, low1);
3715 break;
3716 default:
3717 gcc_unreachable ();
3718 }
3719
3720 if (result_width < mode_width)
3721 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3722
3723 if (result_low > 0)
3724 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3725
3726 #ifdef POINTERS_EXTEND_UNSIGNED
3727 /* If pointers extend unsigned and this is an addition or subtraction
3728 to a pointer in Pmode, all the bits above ptr_mode are known to be
3729 zero. */
3730 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3731 && (code == PLUS || code == MINUS)
3732 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3733 nonzero &= GET_MODE_MASK (ptr_mode);
3734 #endif
3735 }
3736 break;
3737
3738 case ZERO_EXTRACT:
3739 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3740 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3741 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3742 break;
3743
3744 case SUBREG:
3745 /* If this is a SUBREG formed for a promoted variable that has
3746 been zero-extended, we know that at least the high-order bits
3747 are zero, though others might be too. */
3748
3749 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3750 nonzero = GET_MODE_MASK (GET_MODE (x))
3751 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3752 known_x, known_mode, known_ret);
3753
3754 /* If the inner mode is a single word for both the host and target
3755 machines, we can compute this from which bits of the inner
3756 object might be nonzero. */
3757 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3758 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3759 <= HOST_BITS_PER_WIDE_INT))
3760 {
3761 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3762 known_x, known_mode, known_ret);
3763
3764 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3765 /* If this is a typical RISC machine, we only have to worry
3766 about the way loads are extended. */
3767 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3768 ? (((nonzero
3769 & (((unsigned HOST_WIDE_INT) 1
3770 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3771 != 0))
3772 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3773 || !MEM_P (SUBREG_REG (x)))
3774 #endif
3775 {
3776 /* On many CISC machines, accessing an object in a wider mode
3777 causes the high-order bits to become undefined. So they are
3778 not known to be zero. */
3779 if (GET_MODE_SIZE (GET_MODE (x))
3780 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3781 nonzero |= (GET_MODE_MASK (GET_MODE (x))
3782 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3783 }
3784 }
3785 break;
3786
3787 case ASHIFTRT:
3788 case LSHIFTRT:
3789 case ASHIFT:
3790 case ROTATE:
3791 /* The nonzero bits are in two classes: any bits within MODE
3792 that aren't in GET_MODE (x) are always significant. The rest of the
3793 nonzero bits are those that are significant in the operand of
3794 the shift when shifted the appropriate number of bits. This
3795 shows that high-order bits are cleared by the right shift and
3796 low-order bits by left shifts. */
3797 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3798 && INTVAL (XEXP (x, 1)) >= 0
3799 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3800 {
3801 enum machine_mode inner_mode = GET_MODE (x);
3802 unsigned int width = GET_MODE_BITSIZE (inner_mode);
3803 int count = INTVAL (XEXP (x, 1));
3804 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3805 unsigned HOST_WIDE_INT op_nonzero =
3806 cached_nonzero_bits (XEXP (x, 0), mode,
3807 known_x, known_mode, known_ret);
3808 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
3809 unsigned HOST_WIDE_INT outer = 0;
3810
3811 if (mode_width > width)
3812 outer = (op_nonzero & nonzero & ~mode_mask);
3813
3814 if (code == LSHIFTRT)
3815 inner >>= count;
3816 else if (code == ASHIFTRT)
3817 {
3818 inner >>= count;
3819
3820 /* If the sign bit may have been nonzero before the shift, we
3821 need to mark all the places it could have been copied to
3822 by the shift as possibly nonzero. */
3823 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
3824 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
3825 }
3826 else if (code == ASHIFT)
3827 inner <<= count;
3828 else
3829 inner = ((inner << (count % width)
3830 | (inner >> (width - (count % width)))) & mode_mask);
3831
3832 nonzero &= (outer | inner);
3833 }
3834 break;
3835
3836 case FFS:
3837 case POPCOUNT:
3838 /* This is at most the number of bits in the mode. */
3839 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
3840 break;
3841
3842 case CLZ:
3843 /* If CLZ has a known value at zero, then the nonzero bits are
3844 that value, plus the number of bits in the mode minus one. */
3845 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3846 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3847 else
3848 nonzero = -1;
3849 break;
3850
3851 case CTZ:
3852 /* If CTZ has a known value at zero, then the nonzero bits are
3853 that value, plus the number of bits in the mode minus one. */
3854 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3855 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3856 else
3857 nonzero = -1;
3858 break;
3859
3860 case PARITY:
3861 nonzero = 1;
3862 break;
3863
3864 case IF_THEN_ELSE:
3865 {
3866 unsigned HOST_WIDE_INT nonzero_true =
3867 cached_nonzero_bits (XEXP (x, 1), mode,
3868 known_x, known_mode, known_ret);
3869
3870 /* Don't call nonzero_bits for the second time if it cannot change
3871 anything. */
3872 if ((nonzero & nonzero_true) != nonzero)
3873 nonzero &= nonzero_true
3874 | cached_nonzero_bits (XEXP (x, 2), mode,
3875 known_x, known_mode, known_ret);
3876 }
3877 break;
3878
3879 default:
3880 break;
3881 }
3882
3883 return nonzero;
3884 }
3885
3886 /* See the macro definition above. */
3887 #undef cached_num_sign_bit_copies
3888
3889 \f
3890 /* The function cached_num_sign_bit_copies is a wrapper around
3891 num_sign_bit_copies1. It avoids exponential behavior in
3892 num_sign_bit_copies1 when X has identical subexpressions on the
3893 first or the second level. */
3894
3895 static unsigned int
3896 cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
3897 enum machine_mode known_mode,
3898 unsigned int known_ret)
3899 {
3900 if (x == known_x && mode == known_mode)
3901 return known_ret;
3902
3903 /* Try to find identical subexpressions. If found call
3904 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
3905 the precomputed value for the subexpression as KNOWN_RET. */
3906
3907 if (ARITHMETIC_P (x))
3908 {
3909 rtx x0 = XEXP (x, 0);
3910 rtx x1 = XEXP (x, 1);
3911
3912 /* Check the first level. */
3913 if (x0 == x1)
3914 return
3915 num_sign_bit_copies1 (x, mode, x0, mode,
3916 cached_num_sign_bit_copies (x0, mode, known_x,
3917 known_mode,
3918 known_ret));
3919
3920 /* Check the second level. */
3921 if (ARITHMETIC_P (x0)
3922 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3923 return
3924 num_sign_bit_copies1 (x, mode, x1, mode,
3925 cached_num_sign_bit_copies (x1, mode, known_x,
3926 known_mode,
3927 known_ret));
3928
3929 if (ARITHMETIC_P (x1)
3930 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3931 return
3932 num_sign_bit_copies1 (x, mode, x0, mode,
3933 cached_num_sign_bit_copies (x0, mode, known_x,
3934 known_mode,
3935 known_ret));
3936 }
3937
3938 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
3939 }
3940
3941 /* Return the number of bits at the high-order end of X that are known to
3942 be equal to the sign bit. X will be used in mode MODE; if MODE is
3943 VOIDmode, X will be used in its own mode. The returned value will always
3944 be between 1 and the number of bits in MODE. */
3945
3946 static unsigned int
3947 num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
3948 enum machine_mode known_mode,
3949 unsigned int known_ret)
3950 {
3951 enum rtx_code code = GET_CODE (x);
3952 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
3953 int num0, num1, result;
3954 unsigned HOST_WIDE_INT nonzero;
3955
3956 /* If we weren't given a mode, use the mode of X. If the mode is still
3957 VOIDmode, we don't know anything. Likewise if one of the modes is
3958 floating-point. */
3959
3960 if (mode == VOIDmode)
3961 mode = GET_MODE (x);
3962
3963 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
3964 return 1;
3965
3966 /* For a smaller object, just ignore the high bits. */
3967 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
3968 {
3969 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
3970 known_x, known_mode, known_ret);
3971 return MAX (1,
3972 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
3973 }
3974
3975 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
3976 {
3977 #ifndef WORD_REGISTER_OPERATIONS
3978 /* If this machine does not do all register operations on the entire
3979 register and MODE is wider than the mode of X, we can say nothing
3980 at all about the high-order bits. */
3981 return 1;
3982 #else
3983 /* Likewise on machines that do, if the mode of the object is smaller
3984 than a word and loads of that size don't sign extend, we can say
3985 nothing about the high order bits. */
3986 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
3987 #ifdef LOAD_EXTEND_OP
3988 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
3989 #endif
3990 )
3991 return 1;
3992 #endif
3993 }
3994
3995 switch (code)
3996 {
3997 case REG:
3998
3999 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4000 /* If pointers extend signed and this is a pointer in Pmode, say that
4001 all the bits above ptr_mode are known to be sign bit copies. */
4002 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4003 && REG_POINTER (x))
4004 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4005 #endif
4006
4007 {
4008 unsigned int copies_for_hook = 1, copies = 1;
4009 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4010 known_mode, known_ret,
4011 &copies_for_hook);
4012
4013 if (new)
4014 copies = cached_num_sign_bit_copies (new, mode, known_x,
4015 known_mode, known_ret);
4016
4017 if (copies > 1 || copies_for_hook > 1)
4018 return MAX (copies, copies_for_hook);
4019
4020 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4021 }
4022 break;
4023
4024 case MEM:
4025 #ifdef LOAD_EXTEND_OP
4026 /* Some RISC machines sign-extend all loads of smaller than a word. */
4027 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4028 return MAX (1, ((int) bitwidth
4029 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4030 #endif
4031 break;
4032
4033 case CONST_INT:
4034 /* If the constant is negative, take its 1's complement and remask.
4035 Then see how many zero bits we have. */
4036 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4037 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4038 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4039 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4040
4041 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4042
4043 case SUBREG:
4044 /* If this is a SUBREG for a promoted object that is sign-extended
4045 and we are looking at it in a wider mode, we know that at least the
4046 high-order bits are known to be sign bit copies. */
4047
4048 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4049 {
4050 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4051 known_x, known_mode, known_ret);
4052 return MAX ((int) bitwidth
4053 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4054 num0);
4055 }
4056
4057 /* For a smaller object, just ignore the high bits. */
4058 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4059 {
4060 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4061 known_x, known_mode, known_ret);
4062 return MAX (1, (num0
4063 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4064 - bitwidth)));
4065 }
4066
4067 #ifdef WORD_REGISTER_OPERATIONS
4068 #ifdef LOAD_EXTEND_OP
4069 /* For paradoxical SUBREGs on machines where all register operations
4070 affect the entire register, just look inside. Note that we are
4071 passing MODE to the recursive call, so the number of sign bit copies
4072 will remain relative to that mode, not the inner mode. */
4073
4074 /* This works only if loads sign extend. Otherwise, if we get a
4075 reload for the inner part, it may be loaded from the stack, and
4076 then we lose all sign bit copies that existed before the store
4077 to the stack. */
4078
4079 if ((GET_MODE_SIZE (GET_MODE (x))
4080 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4081 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4082 && MEM_P (SUBREG_REG (x)))
4083 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4084 known_x, known_mode, known_ret);
4085 #endif
4086 #endif
4087 break;
4088
4089 case SIGN_EXTRACT:
4090 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4091 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4092 break;
4093
4094 case SIGN_EXTEND:
4095 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4096 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4097 known_x, known_mode, known_ret));
4098
4099 case TRUNCATE:
4100 /* For a smaller object, just ignore the high bits. */
4101 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4102 known_x, known_mode, known_ret);
4103 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4104 - bitwidth)));
4105
4106 case NOT:
4107 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4108 known_x, known_mode, known_ret);
4109
4110 case ROTATE: case ROTATERT:
4111 /* If we are rotating left by a number of bits less than the number
4112 of sign bit copies, we can just subtract that amount from the
4113 number. */
4114 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4115 && INTVAL (XEXP (x, 1)) >= 0
4116 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4117 {
4118 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4119 known_x, known_mode, known_ret);
4120 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4121 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4122 }
4123 break;
4124
4125 case NEG:
4126 /* In general, this subtracts one sign bit copy. But if the value
4127 is known to be positive, the number of sign bit copies is the
4128 same as that of the input. Finally, if the input has just one bit
4129 that might be nonzero, all the bits are copies of the sign bit. */
4130 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4131 known_x, known_mode, known_ret);
4132 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4133 return num0 > 1 ? num0 - 1 : 1;
4134
4135 nonzero = nonzero_bits (XEXP (x, 0), mode);
4136 if (nonzero == 1)
4137 return bitwidth;
4138
4139 if (num0 > 1
4140 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4141 num0--;
4142
4143 return num0;
4144
4145 case IOR: case AND: case XOR:
4146 case SMIN: case SMAX: case UMIN: case UMAX:
4147 /* Logical operations will preserve the number of sign-bit copies.
4148 MIN and MAX operations always return one of the operands. */
4149 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4150 known_x, known_mode, known_ret);
4151 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4152 known_x, known_mode, known_ret);
4153 return MIN (num0, num1);
4154
4155 case PLUS: case MINUS:
4156 /* For addition and subtraction, we can have a 1-bit carry. However,
4157 if we are subtracting 1 from a positive number, there will not
4158 be such a carry. Furthermore, if the positive number is known to
4159 be 0 or 1, we know the result is either -1 or 0. */
4160
4161 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4162 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4163 {
4164 nonzero = nonzero_bits (XEXP (x, 0), mode);
4165 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4166 return (nonzero == 1 || nonzero == 0 ? bitwidth
4167 : bitwidth - floor_log2 (nonzero) - 1);
4168 }
4169
4170 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4171 known_x, known_mode, known_ret);
4172 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4173 known_x, known_mode, known_ret);
4174 result = MAX (1, MIN (num0, num1) - 1);
4175
4176 #ifdef POINTERS_EXTEND_UNSIGNED
4177 /* If pointers extend signed and this is an addition or subtraction
4178 to a pointer in Pmode, all the bits above ptr_mode are known to be
4179 sign bit copies. */
4180 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4181 && (code == PLUS || code == MINUS)
4182 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4183 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4184 - GET_MODE_BITSIZE (ptr_mode) + 1),
4185 result);
4186 #endif
4187 return result;
4188
4189 case MULT:
4190 /* The number of bits of the product is the sum of the number of
4191 bits of both terms. However, unless one of the terms if known
4192 to be positive, we must allow for an additional bit since negating
4193 a negative number can remove one sign bit copy. */
4194
4195 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4196 known_x, known_mode, known_ret);
4197 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4198 known_x, known_mode, known_ret);
4199
4200 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4201 if (result > 0
4202 && (bitwidth > HOST_BITS_PER_WIDE_INT
4203 || (((nonzero_bits (XEXP (x, 0), mode)
4204 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4205 && ((nonzero_bits (XEXP (x, 1), mode)
4206 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4207 result--;
4208
4209 return MAX (1, result);
4210
4211 case UDIV:
4212 /* The result must be <= the first operand. If the first operand
4213 has the high bit set, we know nothing about the number of sign
4214 bit copies. */
4215 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4216 return 1;
4217 else if ((nonzero_bits (XEXP (x, 0), mode)
4218 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4219 return 1;
4220 else
4221 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4222 known_x, known_mode, known_ret);
4223
4224 case UMOD:
4225 /* The result must be <= the second operand. */
4226 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4227 known_x, known_mode, known_ret);
4228
4229 case DIV:
4230 /* Similar to unsigned division, except that we have to worry about
4231 the case where the divisor is negative, in which case we have
4232 to add 1. */
4233 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4234 known_x, known_mode, known_ret);
4235 if (result > 1
4236 && (bitwidth > HOST_BITS_PER_WIDE_INT
4237 || (nonzero_bits (XEXP (x, 1), mode)
4238 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4239 result--;
4240
4241 return result;
4242
4243 case MOD:
4244 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4245 known_x, known_mode, known_ret);
4246 if (result > 1
4247 && (bitwidth > HOST_BITS_PER_WIDE_INT
4248 || (nonzero_bits (XEXP (x, 1), mode)
4249 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4250 result--;
4251
4252 return result;
4253
4254 case ASHIFTRT:
4255 /* Shifts by a constant add to the number of bits equal to the
4256 sign bit. */
4257 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4258 known_x, known_mode, known_ret);
4259 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4260 && INTVAL (XEXP (x, 1)) > 0)
4261 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4262
4263 return num0;
4264
4265 case ASHIFT:
4266 /* Left shifts destroy copies. */
4267 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4268 || INTVAL (XEXP (x, 1)) < 0
4269 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4270 return 1;
4271
4272 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4273 known_x, known_mode, known_ret);
4274 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4275
4276 case IF_THEN_ELSE:
4277 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4278 known_x, known_mode, known_ret);
4279 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4280 known_x, known_mode, known_ret);
4281 return MIN (num0, num1);
4282
4283 case EQ: case NE: case GE: case GT: case LE: case LT:
4284 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4285 case GEU: case GTU: case LEU: case LTU:
4286 case UNORDERED: case ORDERED:
4287 /* If the constant is negative, take its 1's complement and remask.
4288 Then see how many zero bits we have. */
4289 nonzero = STORE_FLAG_VALUE;
4290 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4291 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4292 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4293
4294 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4295
4296 default:
4297 break;
4298 }
4299
4300 /* If we haven't been able to figure it out by one of the above rules,
4301 see if some of the high-order bits are known to be zero. If so,
4302 count those bits and return one less than that amount. If we can't
4303 safely compute the mask for this mode, always return BITWIDTH. */
4304
4305 bitwidth = GET_MODE_BITSIZE (mode);
4306 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4307 return 1;
4308
4309 nonzero = nonzero_bits (x, mode);
4310 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4311 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4312 }
4313
4314 /* Calculate the rtx_cost of a single instruction. A return value of
4315 zero indicates an instruction pattern without a known cost. */
4316
4317 int
4318 insn_rtx_cost (rtx pat)
4319 {
4320 int i, cost;
4321 rtx set;
4322
4323 /* Extract the single set rtx from the instruction pattern.
4324 We can't use single_set since we only have the pattern. */
4325 if (GET_CODE (pat) == SET)
4326 set = pat;
4327 else if (GET_CODE (pat) == PARALLEL)
4328 {
4329 set = NULL_RTX;
4330 for (i = 0; i < XVECLEN (pat, 0); i++)
4331 {
4332 rtx x = XVECEXP (pat, 0, i);
4333 if (GET_CODE (x) == SET)
4334 {
4335 if (set)
4336 return 0;
4337 set = x;
4338 }
4339 }
4340 if (!set)
4341 return 0;
4342 }
4343 else
4344 return 0;
4345
4346 cost = rtx_cost (SET_SRC (set), SET);
4347 return cost > 0 ? cost : COSTS_N_INSNS (1);
4348 }
4349
4350 /* Given an insn INSN and condition COND, return the condition in a
4351 canonical form to simplify testing by callers. Specifically:
4352
4353 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4354 (2) Both operands will be machine operands; (cc0) will have been replaced.
4355 (3) If an operand is a constant, it will be the second operand.
4356 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4357 for GE, GEU, and LEU.
4358
4359 If the condition cannot be understood, or is an inequality floating-point
4360 comparison which needs to be reversed, 0 will be returned.
4361
4362 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4363
4364 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4365 insn used in locating the condition was found. If a replacement test
4366 of the condition is desired, it should be placed in front of that
4367 insn and we will be sure that the inputs are still valid.
4368
4369 If WANT_REG is nonzero, we wish the condition to be relative to that
4370 register, if possible. Therefore, do not canonicalize the condition
4371 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4372 to be a compare to a CC mode register.
4373
4374 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4375 and at INSN. */
4376
4377 rtx
4378 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4379 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4380 {
4381 enum rtx_code code;
4382 rtx prev = insn;
4383 rtx set;
4384 rtx tem;
4385 rtx op0, op1;
4386 int reverse_code = 0;
4387 enum machine_mode mode;
4388
4389 code = GET_CODE (cond);
4390 mode = GET_MODE (cond);
4391 op0 = XEXP (cond, 0);
4392 op1 = XEXP (cond, 1);
4393
4394 if (reverse)
4395 code = reversed_comparison_code (cond, insn);
4396 if (code == UNKNOWN)
4397 return 0;
4398
4399 if (earliest)
4400 *earliest = insn;
4401
4402 /* If we are comparing a register with zero, see if the register is set
4403 in the previous insn to a COMPARE or a comparison operation. Perform
4404 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4405 in cse.c */
4406
4407 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4408 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4409 && op1 == CONST0_RTX (GET_MODE (op0))
4410 && op0 != want_reg)
4411 {
4412 /* Set nonzero when we find something of interest. */
4413 rtx x = 0;
4414
4415 #ifdef HAVE_cc0
4416 /* If comparison with cc0, import actual comparison from compare
4417 insn. */
4418 if (op0 == cc0_rtx)
4419 {
4420 if ((prev = prev_nonnote_insn (prev)) == 0
4421 || !NONJUMP_INSN_P (prev)
4422 || (set = single_set (prev)) == 0
4423 || SET_DEST (set) != cc0_rtx)
4424 return 0;
4425
4426 op0 = SET_SRC (set);
4427 op1 = CONST0_RTX (GET_MODE (op0));
4428 if (earliest)
4429 *earliest = prev;
4430 }
4431 #endif
4432
4433 /* If this is a COMPARE, pick up the two things being compared. */
4434 if (GET_CODE (op0) == COMPARE)
4435 {
4436 op1 = XEXP (op0, 1);
4437 op0 = XEXP (op0, 0);
4438 continue;
4439 }
4440 else if (!REG_P (op0))
4441 break;
4442
4443 /* Go back to the previous insn. Stop if it is not an INSN. We also
4444 stop if it isn't a single set or if it has a REG_INC note because
4445 we don't want to bother dealing with it. */
4446
4447 if ((prev = prev_nonnote_insn (prev)) == 0
4448 || !NONJUMP_INSN_P (prev)
4449 || FIND_REG_INC_NOTE (prev, NULL_RTX))
4450 break;
4451
4452 set = set_of (op0, prev);
4453
4454 if (set
4455 && (GET_CODE (set) != SET
4456 || !rtx_equal_p (SET_DEST (set), op0)))
4457 break;
4458
4459 /* If this is setting OP0, get what it sets it to if it looks
4460 relevant. */
4461 if (set)
4462 {
4463 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4464 #ifdef FLOAT_STORE_FLAG_VALUE
4465 REAL_VALUE_TYPE fsfv;
4466 #endif
4467
4468 /* ??? We may not combine comparisons done in a CCmode with
4469 comparisons not done in a CCmode. This is to aid targets
4470 like Alpha that have an IEEE compliant EQ instruction, and
4471 a non-IEEE compliant BEQ instruction. The use of CCmode is
4472 actually artificial, simply to prevent the combination, but
4473 should not affect other platforms.
4474
4475 However, we must allow VOIDmode comparisons to match either
4476 CCmode or non-CCmode comparison, because some ports have
4477 modeless comparisons inside branch patterns.
4478
4479 ??? This mode check should perhaps look more like the mode check
4480 in simplify_comparison in combine. */
4481
4482 if ((GET_CODE (SET_SRC (set)) == COMPARE
4483 || (((code == NE
4484 || (code == LT
4485 && GET_MODE_CLASS (inner_mode) == MODE_INT
4486 && (GET_MODE_BITSIZE (inner_mode)
4487 <= HOST_BITS_PER_WIDE_INT)
4488 && (STORE_FLAG_VALUE
4489 & ((HOST_WIDE_INT) 1
4490 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4491 #ifdef FLOAT_STORE_FLAG_VALUE
4492 || (code == LT
4493 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
4494 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4495 REAL_VALUE_NEGATIVE (fsfv)))
4496 #endif
4497 ))
4498 && COMPARISON_P (SET_SRC (set))))
4499 && (((GET_MODE_CLASS (mode) == MODE_CC)
4500 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4501 || mode == VOIDmode || inner_mode == VOIDmode))
4502 x = SET_SRC (set);
4503 else if (((code == EQ
4504 || (code == GE
4505 && (GET_MODE_BITSIZE (inner_mode)
4506 <= HOST_BITS_PER_WIDE_INT)
4507 && GET_MODE_CLASS (inner_mode) == MODE_INT
4508 && (STORE_FLAG_VALUE
4509 & ((HOST_WIDE_INT) 1
4510 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4511 #ifdef FLOAT_STORE_FLAG_VALUE
4512 || (code == GE
4513 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
4514 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4515 REAL_VALUE_NEGATIVE (fsfv)))
4516 #endif
4517 ))
4518 && COMPARISON_P (SET_SRC (set))
4519 && (((GET_MODE_CLASS (mode) == MODE_CC)
4520 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4521 || mode == VOIDmode || inner_mode == VOIDmode))
4522
4523 {
4524 reverse_code = 1;
4525 x = SET_SRC (set);
4526 }
4527 else
4528 break;
4529 }
4530
4531 else if (reg_set_p (op0, prev))
4532 /* If this sets OP0, but not directly, we have to give up. */
4533 break;
4534
4535 if (x)
4536 {
4537 /* If the caller is expecting the condition to be valid at INSN,
4538 make sure X doesn't change before INSN. */
4539 if (valid_at_insn_p)
4540 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4541 break;
4542 if (COMPARISON_P (x))
4543 code = GET_CODE (x);
4544 if (reverse_code)
4545 {
4546 code = reversed_comparison_code (x, prev);
4547 if (code == UNKNOWN)
4548 return 0;
4549 reverse_code = 0;
4550 }
4551
4552 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4553 if (earliest)
4554 *earliest = prev;
4555 }
4556 }
4557
4558 /* If constant is first, put it last. */
4559 if (CONSTANT_P (op0))
4560 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4561
4562 /* If OP0 is the result of a comparison, we weren't able to find what
4563 was really being compared, so fail. */
4564 if (!allow_cc_mode
4565 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4566 return 0;
4567
4568 /* Canonicalize any ordered comparison with integers involving equality
4569 if we can do computations in the relevant mode and we do not
4570 overflow. */
4571
4572 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4573 && GET_CODE (op1) == CONST_INT
4574 && GET_MODE (op0) != VOIDmode
4575 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4576 {
4577 HOST_WIDE_INT const_val = INTVAL (op1);
4578 unsigned HOST_WIDE_INT uconst_val = const_val;
4579 unsigned HOST_WIDE_INT max_val
4580 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4581
4582 switch (code)
4583 {
4584 case LE:
4585 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4586 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4587 break;
4588
4589 /* When cross-compiling, const_val might be sign-extended from
4590 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4591 case GE:
4592 if ((HOST_WIDE_INT) (const_val & max_val)
4593 != (((HOST_WIDE_INT) 1
4594 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4595 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4596 break;
4597
4598 case LEU:
4599 if (uconst_val < max_val)
4600 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4601 break;
4602
4603 case GEU:
4604 if (uconst_val != 0)
4605 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4606 break;
4607
4608 default:
4609 break;
4610 }
4611 }
4612
4613 /* Never return CC0; return zero instead. */
4614 if (CC0_P (op0))
4615 return 0;
4616
4617 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4618 }
4619
4620 /* Given a jump insn JUMP, return the condition that will cause it to branch
4621 to its JUMP_LABEL. If the condition cannot be understood, or is an
4622 inequality floating-point comparison which needs to be reversed, 0 will
4623 be returned.
4624
4625 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4626 insn used in locating the condition was found. If a replacement test
4627 of the condition is desired, it should be placed in front of that
4628 insn and we will be sure that the inputs are still valid. If EARLIEST
4629 is null, the returned condition will be valid at INSN.
4630
4631 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4632 compare CC mode register.
4633
4634 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4635
4636 rtx
4637 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4638 {
4639 rtx cond;
4640 int reverse;
4641 rtx set;
4642
4643 /* If this is not a standard conditional jump, we can't parse it. */
4644 if (!JUMP_P (jump)
4645 || ! any_condjump_p (jump))
4646 return 0;
4647 set = pc_set (jump);
4648
4649 cond = XEXP (SET_SRC (set), 0);
4650
4651 /* If this branches to JUMP_LABEL when the condition is false, reverse
4652 the condition. */
4653 reverse
4654 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4655 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4656
4657 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4658 allow_cc_mode, valid_at_insn_p);
4659 }
4660