cfglayout.c (insn_scope, insn_line): Constify.
[gcc.git] / gcc / rtlanal.c
1 /* Analyze RTL for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software
4 Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "toplev.h"
29 #include "rtl.h"
30 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "target.h"
34 #include "output.h"
35 #include "tm_p.h"
36 #include "flags.h"
37 #include "real.h"
38 #include "regs.h"
39 #include "function.h"
40 #include "df.h"
41 #include "tree.h"
42
43 /* Information about a subreg of a hard register. */
44 struct subreg_info
45 {
46 /* Offset of first hard register involved in the subreg. */
47 int offset;
48 /* Number of hard registers involved in the subreg. */
49 int nregs;
50 /* Whether this subreg can be represented as a hard reg with the new
51 mode. */
52 bool representable_p;
53 };
54
55 /* Forward declarations */
56 static void set_of_1 (rtx, rtx, void *);
57 static bool covers_regno_p (const_rtx, unsigned int);
58 static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
59 static int rtx_referenced_p_1 (rtx *, void *);
60 static int computed_jump_p_1 (const_rtx);
61 static void parms_set (rtx, rtx, void *);
62 static void subreg_get_info (unsigned int, enum machine_mode,
63 unsigned int, enum machine_mode,
64 struct subreg_info *);
65
66 static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
67 rtx, enum machine_mode,
68 unsigned HOST_WIDE_INT);
69 static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode,
70 rtx, enum machine_mode,
71 unsigned HOST_WIDE_INT);
72 static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
73 enum machine_mode,
74 unsigned int);
75 static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
76 enum machine_mode, unsigned int);
77
78 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
79 -1 if a code has no such operand. */
80 static int non_rtx_starting_operands[NUM_RTX_CODE];
81
82 /* Bit flags that specify the machine subtype we are compiling for.
83 Bits are tested using macros TARGET_... defined in the tm.h file
84 and set by `-m...' switches. Must be defined in rtlanal.c. */
85
86 int target_flags;
87
88 /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
89 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
90 SIGN_EXTEND then while narrowing we also have to enforce the
91 representation and sign-extend the value to mode DESTINATION_REP.
92
93 If the value is already sign-extended to DESTINATION_REP mode we
94 can just switch to DESTINATION mode on it. For each pair of
95 integral modes SOURCE and DESTINATION, when truncating from SOURCE
96 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
97 contains the number of high-order bits in SOURCE that have to be
98 copies of the sign-bit so that we can do this mode-switch to
99 DESTINATION. */
100
101 static unsigned int
102 num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
103 \f
104 /* Return 1 if the value of X is unstable
105 (would be different at a different point in the program).
106 The frame pointer, arg pointer, etc. are considered stable
107 (within one function) and so is anything marked `unchanging'. */
108
109 int
110 rtx_unstable_p (const_rtx x)
111 {
112 const RTX_CODE code = GET_CODE (x);
113 int i;
114 const char *fmt;
115
116 switch (code)
117 {
118 case MEM:
119 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
120
121 case CONST:
122 case CONST_INT:
123 case CONST_DOUBLE:
124 case CONST_VECTOR:
125 case SYMBOL_REF:
126 case LABEL_REF:
127 return 0;
128
129 case REG:
130 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
131 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
132 /* The arg pointer varies if it is not a fixed register. */
133 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
134 return 0;
135 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
136 /* ??? When call-clobbered, the value is stable modulo the restore
137 that must happen after a call. This currently screws up local-alloc
138 into believing that the restore is not needed. */
139 if (x == pic_offset_table_rtx)
140 return 0;
141 #endif
142 return 1;
143
144 case ASM_OPERANDS:
145 if (MEM_VOLATILE_P (x))
146 return 1;
147
148 /* Fall through. */
149
150 default:
151 break;
152 }
153
154 fmt = GET_RTX_FORMAT (code);
155 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
156 if (fmt[i] == 'e')
157 {
158 if (rtx_unstable_p (XEXP (x, i)))
159 return 1;
160 }
161 else if (fmt[i] == 'E')
162 {
163 int j;
164 for (j = 0; j < XVECLEN (x, i); j++)
165 if (rtx_unstable_p (XVECEXP (x, i, j)))
166 return 1;
167 }
168
169 return 0;
170 }
171
172 /* Return 1 if X has a value that can vary even between two
173 executions of the program. 0 means X can be compared reliably
174 against certain constants or near-constants.
175 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
176 zero, we are slightly more conservative.
177 The frame pointer and the arg pointer are considered constant. */
178
179 int
180 rtx_varies_p (rtx x, int for_alias)
181 {
182 RTX_CODE code;
183 int i;
184 const char *fmt;
185
186 if (!x)
187 return 0;
188
189 code = GET_CODE (x);
190 switch (code)
191 {
192 case MEM:
193 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
194
195 case CONST:
196 case CONST_INT:
197 case CONST_DOUBLE:
198 case CONST_VECTOR:
199 case SYMBOL_REF:
200 case LABEL_REF:
201 return 0;
202
203 case REG:
204 /* Note that we have to test for the actual rtx used for the frame
205 and arg pointers and not just the register number in case we have
206 eliminated the frame and/or arg pointer and are using it
207 for pseudos. */
208 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
209 /* The arg pointer varies if it is not a fixed register. */
210 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
211 return 0;
212 if (x == pic_offset_table_rtx
213 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
214 /* ??? When call-clobbered, the value is stable modulo the restore
215 that must happen after a call. This currently screws up
216 local-alloc into believing that the restore is not needed, so we
217 must return 0 only if we are called from alias analysis. */
218 && for_alias
219 #endif
220 )
221 return 0;
222 return 1;
223
224 case LO_SUM:
225 /* The operand 0 of a LO_SUM is considered constant
226 (in fact it is related specifically to operand 1)
227 during alias analysis. */
228 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
229 || rtx_varies_p (XEXP (x, 1), for_alias);
230
231 case ASM_OPERANDS:
232 if (MEM_VOLATILE_P (x))
233 return 1;
234
235 /* Fall through. */
236
237 default:
238 break;
239 }
240
241 fmt = GET_RTX_FORMAT (code);
242 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
243 if (fmt[i] == 'e')
244 {
245 if (rtx_varies_p (XEXP (x, i), for_alias))
246 return 1;
247 }
248 else if (fmt[i] == 'E')
249 {
250 int j;
251 for (j = 0; j < XVECLEN (x, i); j++)
252 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
253 return 1;
254 }
255
256 return 0;
257 }
258
259 /* Return nonzero if the use of X as an address in a MEM can cause a trap.
260 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
261 whether nonzero is returned for unaligned memory accesses on strict
262 alignment machines. */
263
264 static int
265 rtx_addr_can_trap_p_1 (const_rtx x, enum machine_mode mode, bool unaligned_mems)
266 {
267 enum rtx_code code = GET_CODE (x);
268
269 switch (code)
270 {
271 case SYMBOL_REF:
272 return SYMBOL_REF_WEAK (x);
273
274 case LABEL_REF:
275 return 0;
276
277 case REG:
278 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
279 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
280 || x == stack_pointer_rtx
281 /* The arg pointer varies if it is not a fixed register. */
282 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
283 return 0;
284 /* All of the virtual frame registers are stack references. */
285 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
286 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
287 return 0;
288 return 1;
289
290 case CONST:
291 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
292
293 case PLUS:
294 /* An address is assumed not to trap if:
295 - it is an address that can't trap plus a constant integer,
296 with the proper remainder modulo the mode size if we are
297 considering unaligned memory references. */
298 if (!rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems)
299 && GET_CODE (XEXP (x, 1)) == CONST_INT)
300 {
301 HOST_WIDE_INT offset;
302
303 if (!STRICT_ALIGNMENT
304 || !unaligned_mems
305 || GET_MODE_SIZE (mode) == 0)
306 return 0;
307
308 offset = INTVAL (XEXP (x, 1));
309
310 #ifdef SPARC_STACK_BOUNDARY_HACK
311 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
312 the real alignment of %sp. However, when it does this, the
313 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
314 if (SPARC_STACK_BOUNDARY_HACK
315 && (XEXP (x, 0) == stack_pointer_rtx
316 || XEXP (x, 0) == hard_frame_pointer_rtx))
317 offset -= STACK_POINTER_OFFSET;
318 #endif
319
320 return offset % GET_MODE_SIZE (mode) != 0;
321 }
322
323 /* - or it is the pic register plus a constant. */
324 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
325 return 0;
326
327 return 1;
328
329 case LO_SUM:
330 case PRE_MODIFY:
331 return rtx_addr_can_trap_p_1 (XEXP (x, 1), mode, unaligned_mems);
332
333 case PRE_DEC:
334 case PRE_INC:
335 case POST_DEC:
336 case POST_INC:
337 case POST_MODIFY:
338 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
339
340 default:
341 break;
342 }
343
344 /* If it isn't one of the case above, it can cause a trap. */
345 return 1;
346 }
347
348 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */
349
350 int
351 rtx_addr_can_trap_p (const_rtx x)
352 {
353 return rtx_addr_can_trap_p_1 (x, VOIDmode, false);
354 }
355
356 /* Return true if X is an address that is known to not be zero. */
357
358 bool
359 nonzero_address_p (const_rtx x)
360 {
361 const enum rtx_code code = GET_CODE (x);
362
363 switch (code)
364 {
365 case SYMBOL_REF:
366 return !SYMBOL_REF_WEAK (x);
367
368 case LABEL_REF:
369 return true;
370
371 case REG:
372 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
373 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
374 || x == stack_pointer_rtx
375 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
376 return true;
377 /* All of the virtual frame registers are stack references. */
378 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
379 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
380 return true;
381 return false;
382
383 case CONST:
384 return nonzero_address_p (XEXP (x, 0));
385
386 case PLUS:
387 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
388 return nonzero_address_p (XEXP (x, 0));
389 /* Handle PIC references. */
390 else if (XEXP (x, 0) == pic_offset_table_rtx
391 && CONSTANT_P (XEXP (x, 1)))
392 return true;
393 return false;
394
395 case PRE_MODIFY:
396 /* Similar to the above; allow positive offsets. Further, since
397 auto-inc is only allowed in memories, the register must be a
398 pointer. */
399 if (GET_CODE (XEXP (x, 1)) == CONST_INT
400 && INTVAL (XEXP (x, 1)) > 0)
401 return true;
402 return nonzero_address_p (XEXP (x, 0));
403
404 case PRE_INC:
405 /* Similarly. Further, the offset is always positive. */
406 return true;
407
408 case PRE_DEC:
409 case POST_DEC:
410 case POST_INC:
411 case POST_MODIFY:
412 return nonzero_address_p (XEXP (x, 0));
413
414 case LO_SUM:
415 return nonzero_address_p (XEXP (x, 1));
416
417 default:
418 break;
419 }
420
421 /* If it isn't one of the case above, might be zero. */
422 return false;
423 }
424
425 /* Return 1 if X refers to a memory location whose address
426 cannot be compared reliably with constant addresses,
427 or if X refers to a BLKmode memory object.
428 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
429 zero, we are slightly more conservative. */
430
431 int
432 rtx_addr_varies_p (rtx x, int for_alias)
433 {
434 enum rtx_code code;
435 int i;
436 const char *fmt;
437
438 if (x == 0)
439 return 0;
440
441 code = GET_CODE (x);
442 if (code == MEM)
443 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
444
445 fmt = GET_RTX_FORMAT (code);
446 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
447 if (fmt[i] == 'e')
448 {
449 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
450 return 1;
451 }
452 else if (fmt[i] == 'E')
453 {
454 int j;
455 for (j = 0; j < XVECLEN (x, i); j++)
456 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
457 return 1;
458 }
459 return 0;
460 }
461 \f
462 /* Return the value of the integer term in X, if one is apparent;
463 otherwise return 0.
464 Only obvious integer terms are detected.
465 This is used in cse.c with the `related_value' field. */
466
467 HOST_WIDE_INT
468 get_integer_term (const_rtx x)
469 {
470 if (GET_CODE (x) == CONST)
471 x = XEXP (x, 0);
472
473 if (GET_CODE (x) == MINUS
474 && GET_CODE (XEXP (x, 1)) == CONST_INT)
475 return - INTVAL (XEXP (x, 1));
476 if (GET_CODE (x) == PLUS
477 && GET_CODE (XEXP (x, 1)) == CONST_INT)
478 return INTVAL (XEXP (x, 1));
479 return 0;
480 }
481
482 /* If X is a constant, return the value sans apparent integer term;
483 otherwise return 0.
484 Only obvious integer terms are detected. */
485
486 rtx
487 get_related_value (const_rtx x)
488 {
489 if (GET_CODE (x) != CONST)
490 return 0;
491 x = XEXP (x, 0);
492 if (GET_CODE (x) == PLUS
493 && GET_CODE (XEXP (x, 1)) == CONST_INT)
494 return XEXP (x, 0);
495 else if (GET_CODE (x) == MINUS
496 && GET_CODE (XEXP (x, 1)) == CONST_INT)
497 return XEXP (x, 0);
498 return 0;
499 }
500 \f
501 /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
502 to somewhere in the same object or object_block as SYMBOL. */
503
504 bool
505 offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
506 {
507 tree decl;
508
509 if (GET_CODE (symbol) != SYMBOL_REF)
510 return false;
511
512 if (offset == 0)
513 return true;
514
515 if (offset > 0)
516 {
517 if (CONSTANT_POOL_ADDRESS_P (symbol)
518 && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
519 return true;
520
521 decl = SYMBOL_REF_DECL (symbol);
522 if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
523 return true;
524 }
525
526 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
527 && SYMBOL_REF_BLOCK (symbol)
528 && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
529 && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
530 < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
531 return true;
532
533 return false;
534 }
535
536 /* Split X into a base and a constant offset, storing them in *BASE_OUT
537 and *OFFSET_OUT respectively. */
538
539 void
540 split_const (rtx x, rtx *base_out, rtx *offset_out)
541 {
542 if (GET_CODE (x) == CONST)
543 {
544 x = XEXP (x, 0);
545 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
546 {
547 *base_out = XEXP (x, 0);
548 *offset_out = XEXP (x, 1);
549 return;
550 }
551 }
552 *base_out = x;
553 *offset_out = const0_rtx;
554 }
555 \f
556 /* Return the number of places FIND appears within X. If COUNT_DEST is
557 zero, we do not count occurrences inside the destination of a SET. */
558
559 int
560 count_occurrences (const_rtx x, const_rtx find, int count_dest)
561 {
562 int i, j;
563 enum rtx_code code;
564 const char *format_ptr;
565 int count;
566
567 if (x == find)
568 return 1;
569
570 code = GET_CODE (x);
571
572 switch (code)
573 {
574 case REG:
575 case CONST_INT:
576 case CONST_DOUBLE:
577 case CONST_VECTOR:
578 case SYMBOL_REF:
579 case CODE_LABEL:
580 case PC:
581 case CC0:
582 return 0;
583
584 case EXPR_LIST:
585 count = count_occurrences (XEXP (x, 0), find, count_dest);
586 if (XEXP (x, 1))
587 count += count_occurrences (XEXP (x, 1), find, count_dest);
588 return count;
589
590 case MEM:
591 if (MEM_P (find) && rtx_equal_p (x, find))
592 return 1;
593 break;
594
595 case SET:
596 if (SET_DEST (x) == find && ! count_dest)
597 return count_occurrences (SET_SRC (x), find, count_dest);
598 break;
599
600 default:
601 break;
602 }
603
604 format_ptr = GET_RTX_FORMAT (code);
605 count = 0;
606
607 for (i = 0; i < GET_RTX_LENGTH (code); i++)
608 {
609 switch (*format_ptr++)
610 {
611 case 'e':
612 count += count_occurrences (XEXP (x, i), find, count_dest);
613 break;
614
615 case 'E':
616 for (j = 0; j < XVECLEN (x, i); j++)
617 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
618 break;
619 }
620 }
621 return count;
622 }
623
624 \f
625 /* Nonzero if register REG appears somewhere within IN.
626 Also works if REG is not a register; in this case it checks
627 for a subexpression of IN that is Lisp "equal" to REG. */
628
629 int
630 reg_mentioned_p (const_rtx reg, const_rtx in)
631 {
632 const char *fmt;
633 int i;
634 enum rtx_code code;
635
636 if (in == 0)
637 return 0;
638
639 if (reg == in)
640 return 1;
641
642 if (GET_CODE (in) == LABEL_REF)
643 return reg == XEXP (in, 0);
644
645 code = GET_CODE (in);
646
647 switch (code)
648 {
649 /* Compare registers by number. */
650 case REG:
651 return REG_P (reg) && REGNO (in) == REGNO (reg);
652
653 /* These codes have no constituent expressions
654 and are unique. */
655 case SCRATCH:
656 case CC0:
657 case PC:
658 return 0;
659
660 case CONST_INT:
661 case CONST_VECTOR:
662 case CONST_DOUBLE:
663 /* These are kept unique for a given value. */
664 return 0;
665
666 default:
667 break;
668 }
669
670 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
671 return 1;
672
673 fmt = GET_RTX_FORMAT (code);
674
675 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
676 {
677 if (fmt[i] == 'E')
678 {
679 int j;
680 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
681 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
682 return 1;
683 }
684 else if (fmt[i] == 'e'
685 && reg_mentioned_p (reg, XEXP (in, i)))
686 return 1;
687 }
688 return 0;
689 }
690 \f
691 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
692 no CODE_LABEL insn. */
693
694 int
695 no_labels_between_p (const_rtx beg, const_rtx end)
696 {
697 rtx p;
698 if (beg == end)
699 return 0;
700 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
701 if (LABEL_P (p))
702 return 0;
703 return 1;
704 }
705
706 /* Nonzero if register REG is used in an insn between
707 FROM_INSN and TO_INSN (exclusive of those two). */
708
709 int
710 reg_used_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
711 {
712 rtx insn;
713
714 if (from_insn == to_insn)
715 return 0;
716
717 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
718 if (INSN_P (insn)
719 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
720 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
721 return 1;
722 return 0;
723 }
724 \f
725 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
726 is entirely replaced by a new value and the only use is as a SET_DEST,
727 we do not consider it a reference. */
728
729 int
730 reg_referenced_p (const_rtx x, const_rtx body)
731 {
732 int i;
733
734 switch (GET_CODE (body))
735 {
736 case SET:
737 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
738 return 1;
739
740 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
741 of a REG that occupies all of the REG, the insn references X if
742 it is mentioned in the destination. */
743 if (GET_CODE (SET_DEST (body)) != CC0
744 && GET_CODE (SET_DEST (body)) != PC
745 && !REG_P (SET_DEST (body))
746 && ! (GET_CODE (SET_DEST (body)) == SUBREG
747 && REG_P (SUBREG_REG (SET_DEST (body)))
748 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
749 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
750 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
751 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
752 && reg_overlap_mentioned_p (x, SET_DEST (body)))
753 return 1;
754 return 0;
755
756 case ASM_OPERANDS:
757 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
758 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
759 return 1;
760 return 0;
761
762 case CALL:
763 case USE:
764 case IF_THEN_ELSE:
765 return reg_overlap_mentioned_p (x, body);
766
767 case TRAP_IF:
768 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
769
770 case PREFETCH:
771 return reg_overlap_mentioned_p (x, XEXP (body, 0));
772
773 case UNSPEC:
774 case UNSPEC_VOLATILE:
775 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
776 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
777 return 1;
778 return 0;
779
780 case PARALLEL:
781 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
782 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
783 return 1;
784 return 0;
785
786 case CLOBBER:
787 if (MEM_P (XEXP (body, 0)))
788 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
789 return 1;
790 return 0;
791
792 case COND_EXEC:
793 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
794 return 1;
795 return reg_referenced_p (x, COND_EXEC_CODE (body));
796
797 default:
798 return 0;
799 }
800 }
801 \f
802 /* Nonzero if register REG is set or clobbered in an insn between
803 FROM_INSN and TO_INSN (exclusive of those two). */
804
805 int
806 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
807 {
808 rtx insn;
809
810 if (from_insn == to_insn)
811 return 0;
812
813 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
814 if (INSN_P (insn) && reg_set_p (reg, insn))
815 return 1;
816 return 0;
817 }
818
819 /* Internals of reg_set_between_p. */
820 int
821 reg_set_p (rtx reg, rtx insn)
822 {
823 /* We can be passed an insn or part of one. If we are passed an insn,
824 check if a side-effect of the insn clobbers REG. */
825 if (INSN_P (insn)
826 && (FIND_REG_INC_NOTE (insn, reg)
827 || (CALL_P (insn)
828 && ((REG_P (reg)
829 && REGNO (reg) < FIRST_PSEUDO_REGISTER
830 && overlaps_hard_reg_set_p (regs_invalidated_by_call,
831 GET_MODE (reg), REGNO (reg)))
832 || MEM_P (reg)
833 || find_reg_fusage (insn, CLOBBER, reg)))))
834 return 1;
835
836 return set_of (reg, insn) != NULL_RTX;
837 }
838
839 /* Similar to reg_set_between_p, but check all registers in X. Return 0
840 only if none of them are modified between START and END. Return 1 if
841 X contains a MEM; this routine does usememory aliasing. */
842
843 int
844 modified_between_p (rtx x, rtx start, rtx end)
845 {
846 enum rtx_code code = GET_CODE (x);
847 const char *fmt;
848 int i, j;
849 rtx insn;
850
851 if (start == end)
852 return 0;
853
854 switch (code)
855 {
856 case CONST_INT:
857 case CONST_DOUBLE:
858 case CONST_VECTOR:
859 case CONST:
860 case SYMBOL_REF:
861 case LABEL_REF:
862 return 0;
863
864 case PC:
865 case CC0:
866 return 1;
867
868 case MEM:
869 if (modified_between_p (XEXP (x, 0), start, end))
870 return 1;
871 if (MEM_READONLY_P (x))
872 return 0;
873 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
874 if (memory_modified_in_insn_p (x, insn))
875 return 1;
876 return 0;
877 break;
878
879 case REG:
880 return reg_set_between_p (x, start, end);
881
882 default:
883 break;
884 }
885
886 fmt = GET_RTX_FORMAT (code);
887 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
888 {
889 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
890 return 1;
891
892 else if (fmt[i] == 'E')
893 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
894 if (modified_between_p (XVECEXP (x, i, j), start, end))
895 return 1;
896 }
897
898 return 0;
899 }
900
901 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
902 of them are modified in INSN. Return 1 if X contains a MEM; this routine
903 does use memory aliasing. */
904
905 int
906 modified_in_p (rtx x, rtx insn)
907 {
908 enum rtx_code code = GET_CODE (x);
909 const char *fmt;
910 int i, j;
911
912 switch (code)
913 {
914 case CONST_INT:
915 case CONST_DOUBLE:
916 case CONST_VECTOR:
917 case CONST:
918 case SYMBOL_REF:
919 case LABEL_REF:
920 return 0;
921
922 case PC:
923 case CC0:
924 return 1;
925
926 case MEM:
927 if (modified_in_p (XEXP (x, 0), insn))
928 return 1;
929 if (MEM_READONLY_P (x))
930 return 0;
931 if (memory_modified_in_insn_p (x, insn))
932 return 1;
933 return 0;
934 break;
935
936 case REG:
937 return reg_set_p (x, insn);
938
939 default:
940 break;
941 }
942
943 fmt = GET_RTX_FORMAT (code);
944 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
945 {
946 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
947 return 1;
948
949 else if (fmt[i] == 'E')
950 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
951 if (modified_in_p (XVECEXP (x, i, j), insn))
952 return 1;
953 }
954
955 return 0;
956 }
957 \f
958 /* Helper function for set_of. */
959 struct set_of_data
960 {
961 rtx found;
962 rtx pat;
963 };
964
965 static void
966 set_of_1 (rtx x, rtx pat, void *data1)
967 {
968 struct set_of_data *data = (struct set_of_data *) (data1);
969 if (rtx_equal_p (x, data->pat)
970 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
971 data->found = pat;
972 }
973
974 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
975 (either directly or via STRICT_LOW_PART and similar modifiers). */
976 rtx
977 set_of (rtx pat, rtx insn)
978 {
979 struct set_of_data data;
980 data.found = NULL_RTX;
981 data.pat = pat;
982 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
983 return data.found;
984 }
985 \f
986 /* Given an INSN, return a SET expression if this insn has only a single SET.
987 It may also have CLOBBERs, USEs, or SET whose output
988 will not be used, which we ignore. */
989
990 rtx
991 single_set_2 (const_rtx insn, const_rtx pat)
992 {
993 rtx set = NULL;
994 int set_verified = 1;
995 int i;
996
997 if (GET_CODE (pat) == PARALLEL)
998 {
999 for (i = 0; i < XVECLEN (pat, 0); i++)
1000 {
1001 rtx sub = XVECEXP (pat, 0, i);
1002 switch (GET_CODE (sub))
1003 {
1004 case USE:
1005 case CLOBBER:
1006 break;
1007
1008 case SET:
1009 /* We can consider insns having multiple sets, where all
1010 but one are dead as single set insns. In common case
1011 only single set is present in the pattern so we want
1012 to avoid checking for REG_UNUSED notes unless necessary.
1013
1014 When we reach set first time, we just expect this is
1015 the single set we are looking for and only when more
1016 sets are found in the insn, we check them. */
1017 if (!set_verified)
1018 {
1019 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1020 && !side_effects_p (set))
1021 set = NULL;
1022 else
1023 set_verified = 1;
1024 }
1025 if (!set)
1026 set = sub, set_verified = 0;
1027 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1028 || side_effects_p (sub))
1029 return NULL_RTX;
1030 break;
1031
1032 default:
1033 return NULL_RTX;
1034 }
1035 }
1036 }
1037 return set;
1038 }
1039
1040 /* Given an INSN, return nonzero if it has more than one SET, else return
1041 zero. */
1042
1043 int
1044 multiple_sets (const_rtx insn)
1045 {
1046 int found;
1047 int i;
1048
1049 /* INSN must be an insn. */
1050 if (! INSN_P (insn))
1051 return 0;
1052
1053 /* Only a PARALLEL can have multiple SETs. */
1054 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1055 {
1056 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1057 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1058 {
1059 /* If we have already found a SET, then return now. */
1060 if (found)
1061 return 1;
1062 else
1063 found = 1;
1064 }
1065 }
1066
1067 /* Either zero or one SET. */
1068 return 0;
1069 }
1070 \f
1071 /* Return nonzero if the destination of SET equals the source
1072 and there are no side effects. */
1073
1074 int
1075 set_noop_p (const_rtx set)
1076 {
1077 rtx src = SET_SRC (set);
1078 rtx dst = SET_DEST (set);
1079
1080 if (dst == pc_rtx && src == pc_rtx)
1081 return 1;
1082
1083 if (MEM_P (dst) && MEM_P (src))
1084 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1085
1086 if (GET_CODE (dst) == ZERO_EXTRACT)
1087 return rtx_equal_p (XEXP (dst, 0), src)
1088 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1089 && !side_effects_p (src);
1090
1091 if (GET_CODE (dst) == STRICT_LOW_PART)
1092 dst = XEXP (dst, 0);
1093
1094 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1095 {
1096 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1097 return 0;
1098 src = SUBREG_REG (src);
1099 dst = SUBREG_REG (dst);
1100 }
1101
1102 return (REG_P (src) && REG_P (dst)
1103 && REGNO (src) == REGNO (dst));
1104 }
1105 \f
1106 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1107 value to itself. */
1108
1109 int
1110 noop_move_p (rtx insn)
1111 {
1112 rtx pat = PATTERN (insn);
1113
1114 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1115 return 1;
1116
1117 /* Insns carrying these notes are useful later on. */
1118 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1119 return 0;
1120
1121 /* For now treat an insn with a REG_RETVAL note as a
1122 a special insn which should not be considered a no-op. */
1123 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1124 return 0;
1125
1126 if (GET_CODE (pat) == SET && set_noop_p (pat))
1127 return 1;
1128
1129 if (GET_CODE (pat) == PARALLEL)
1130 {
1131 int i;
1132 /* If nothing but SETs of registers to themselves,
1133 this insn can also be deleted. */
1134 for (i = 0; i < XVECLEN (pat, 0); i++)
1135 {
1136 rtx tem = XVECEXP (pat, 0, i);
1137
1138 if (GET_CODE (tem) == USE
1139 || GET_CODE (tem) == CLOBBER)
1140 continue;
1141
1142 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1143 return 0;
1144 }
1145
1146 return 1;
1147 }
1148 return 0;
1149 }
1150 \f
1151
1152 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1153 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1154 If the object was modified, if we hit a partial assignment to X, or hit a
1155 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1156 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1157 be the src. */
1158
1159 rtx
1160 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1161 {
1162 rtx p;
1163
1164 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1165 p = PREV_INSN (p))
1166 if (INSN_P (p))
1167 {
1168 rtx set = single_set (p);
1169 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1170
1171 if (set && rtx_equal_p (x, SET_DEST (set)))
1172 {
1173 rtx src = SET_SRC (set);
1174
1175 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1176 src = XEXP (note, 0);
1177
1178 if ((valid_to == NULL_RTX
1179 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1180 /* Reject hard registers because we don't usually want
1181 to use them; we'd rather use a pseudo. */
1182 && (! (REG_P (src)
1183 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1184 {
1185 *pinsn = p;
1186 return src;
1187 }
1188 }
1189
1190 /* If set in non-simple way, we don't have a value. */
1191 if (reg_set_p (x, p))
1192 break;
1193 }
1194
1195 return x;
1196 }
1197 \f
1198 /* Return nonzero if register in range [REGNO, ENDREGNO)
1199 appears either explicitly or implicitly in X
1200 other than being stored into.
1201
1202 References contained within the substructure at LOC do not count.
1203 LOC may be zero, meaning don't ignore anything. */
1204
1205 int
1206 refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
1207 rtx *loc)
1208 {
1209 int i;
1210 unsigned int x_regno;
1211 RTX_CODE code;
1212 const char *fmt;
1213
1214 repeat:
1215 /* The contents of a REG_NONNEG note is always zero, so we must come here
1216 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1217 if (x == 0)
1218 return 0;
1219
1220 code = GET_CODE (x);
1221
1222 switch (code)
1223 {
1224 case REG:
1225 x_regno = REGNO (x);
1226
1227 /* If we modifying the stack, frame, or argument pointer, it will
1228 clobber a virtual register. In fact, we could be more precise,
1229 but it isn't worth it. */
1230 if ((x_regno == STACK_POINTER_REGNUM
1231 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1232 || x_regno == ARG_POINTER_REGNUM
1233 #endif
1234 || x_regno == FRAME_POINTER_REGNUM)
1235 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1236 return 1;
1237
1238 return endregno > x_regno && regno < END_REGNO (x);
1239
1240 case SUBREG:
1241 /* If this is a SUBREG of a hard reg, we can see exactly which
1242 registers are being modified. Otherwise, handle normally. */
1243 if (REG_P (SUBREG_REG (x))
1244 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1245 {
1246 unsigned int inner_regno = subreg_regno (x);
1247 unsigned int inner_endregno
1248 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1249 ? subreg_nregs (x) : 1);
1250
1251 return endregno > inner_regno && regno < inner_endregno;
1252 }
1253 break;
1254
1255 case CLOBBER:
1256 case SET:
1257 if (&SET_DEST (x) != loc
1258 /* Note setting a SUBREG counts as referring to the REG it is in for
1259 a pseudo but not for hard registers since we can
1260 treat each word individually. */
1261 && ((GET_CODE (SET_DEST (x)) == SUBREG
1262 && loc != &SUBREG_REG (SET_DEST (x))
1263 && REG_P (SUBREG_REG (SET_DEST (x)))
1264 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1265 && refers_to_regno_p (regno, endregno,
1266 SUBREG_REG (SET_DEST (x)), loc))
1267 || (!REG_P (SET_DEST (x))
1268 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1269 return 1;
1270
1271 if (code == CLOBBER || loc == &SET_SRC (x))
1272 return 0;
1273 x = SET_SRC (x);
1274 goto repeat;
1275
1276 default:
1277 break;
1278 }
1279
1280 /* X does not match, so try its subexpressions. */
1281
1282 fmt = GET_RTX_FORMAT (code);
1283 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1284 {
1285 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1286 {
1287 if (i == 0)
1288 {
1289 x = XEXP (x, 0);
1290 goto repeat;
1291 }
1292 else
1293 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1294 return 1;
1295 }
1296 else if (fmt[i] == 'E')
1297 {
1298 int j;
1299 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1300 if (loc != &XVECEXP (x, i, j)
1301 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1302 return 1;
1303 }
1304 }
1305 return 0;
1306 }
1307
1308 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1309 we check if any register number in X conflicts with the relevant register
1310 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1311 contains a MEM (we don't bother checking for memory addresses that can't
1312 conflict because we expect this to be a rare case. */
1313
1314 int
1315 reg_overlap_mentioned_p (const_rtx x, const_rtx in)
1316 {
1317 unsigned int regno, endregno;
1318
1319 /* If either argument is a constant, then modifying X can not
1320 affect IN. Here we look at IN, we can profitably combine
1321 CONSTANT_P (x) with the switch statement below. */
1322 if (CONSTANT_P (in))
1323 return 0;
1324
1325 recurse:
1326 switch (GET_CODE (x))
1327 {
1328 case STRICT_LOW_PART:
1329 case ZERO_EXTRACT:
1330 case SIGN_EXTRACT:
1331 /* Overly conservative. */
1332 x = XEXP (x, 0);
1333 goto recurse;
1334
1335 case SUBREG:
1336 regno = REGNO (SUBREG_REG (x));
1337 if (regno < FIRST_PSEUDO_REGISTER)
1338 regno = subreg_regno (x);
1339 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1340 ? subreg_nregs (x) : 1);
1341 goto do_reg;
1342
1343 case REG:
1344 regno = REGNO (x);
1345 endregno = END_REGNO (x);
1346 do_reg:
1347 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1348
1349 case MEM:
1350 {
1351 const char *fmt;
1352 int i;
1353
1354 if (MEM_P (in))
1355 return 1;
1356
1357 fmt = GET_RTX_FORMAT (GET_CODE (in));
1358 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1359 if (fmt[i] == 'e')
1360 {
1361 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1362 return 1;
1363 }
1364 else if (fmt[i] == 'E')
1365 {
1366 int j;
1367 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1368 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1369 return 1;
1370 }
1371
1372 return 0;
1373 }
1374
1375 case SCRATCH:
1376 case PC:
1377 case CC0:
1378 return reg_mentioned_p (x, in);
1379
1380 case PARALLEL:
1381 {
1382 int i;
1383
1384 /* If any register in here refers to it we return true. */
1385 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1386 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1387 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1388 return 1;
1389 return 0;
1390 }
1391
1392 default:
1393 gcc_assert (CONSTANT_P (x));
1394 return 0;
1395 }
1396 }
1397 \f
1398 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1399 (X would be the pattern of an insn). DATA is an arbitrary pointer,
1400 ignored by note_stores, but passed to FUN.
1401
1402 FUN receives three arguments:
1403 1. the REG, MEM, CC0 or PC being stored in or clobbered,
1404 2. the SET or CLOBBER rtx that does the store,
1405 3. the pointer DATA provided to note_stores.
1406
1407 If the item being stored in or clobbered is a SUBREG of a hard register,
1408 the SUBREG will be passed. */
1409
1410 void
1411 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1412 {
1413 int i;
1414
1415 if (GET_CODE (x) == COND_EXEC)
1416 x = COND_EXEC_CODE (x);
1417
1418 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1419 {
1420 rtx dest = SET_DEST (x);
1421
1422 while ((GET_CODE (dest) == SUBREG
1423 && (!REG_P (SUBREG_REG (dest))
1424 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1425 || GET_CODE (dest) == ZERO_EXTRACT
1426 || GET_CODE (dest) == STRICT_LOW_PART)
1427 dest = XEXP (dest, 0);
1428
1429 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1430 each of whose first operand is a register. */
1431 if (GET_CODE (dest) == PARALLEL)
1432 {
1433 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1434 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1435 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1436 }
1437 else
1438 (*fun) (dest, x, data);
1439 }
1440
1441 else if (GET_CODE (x) == PARALLEL)
1442 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1443 note_stores (XVECEXP (x, 0, i), fun, data);
1444 }
1445 \f
1446 /* Like notes_stores, but call FUN for each expression that is being
1447 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1448 FUN for each expression, not any interior subexpressions. FUN receives a
1449 pointer to the expression and the DATA passed to this function.
1450
1451 Note that this is not quite the same test as that done in reg_referenced_p
1452 since that considers something as being referenced if it is being
1453 partially set, while we do not. */
1454
1455 void
1456 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1457 {
1458 rtx body = *pbody;
1459 int i;
1460
1461 switch (GET_CODE (body))
1462 {
1463 case COND_EXEC:
1464 (*fun) (&COND_EXEC_TEST (body), data);
1465 note_uses (&COND_EXEC_CODE (body), fun, data);
1466 return;
1467
1468 case PARALLEL:
1469 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1470 note_uses (&XVECEXP (body, 0, i), fun, data);
1471 return;
1472
1473 case SEQUENCE:
1474 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1475 note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1476 return;
1477
1478 case USE:
1479 (*fun) (&XEXP (body, 0), data);
1480 return;
1481
1482 case ASM_OPERANDS:
1483 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1484 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1485 return;
1486
1487 case TRAP_IF:
1488 (*fun) (&TRAP_CONDITION (body), data);
1489 return;
1490
1491 case PREFETCH:
1492 (*fun) (&XEXP (body, 0), data);
1493 return;
1494
1495 case UNSPEC:
1496 case UNSPEC_VOLATILE:
1497 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1498 (*fun) (&XVECEXP (body, 0, i), data);
1499 return;
1500
1501 case CLOBBER:
1502 if (MEM_P (XEXP (body, 0)))
1503 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1504 return;
1505
1506 case SET:
1507 {
1508 rtx dest = SET_DEST (body);
1509
1510 /* For sets we replace everything in source plus registers in memory
1511 expression in store and operands of a ZERO_EXTRACT. */
1512 (*fun) (&SET_SRC (body), data);
1513
1514 if (GET_CODE (dest) == ZERO_EXTRACT)
1515 {
1516 (*fun) (&XEXP (dest, 1), data);
1517 (*fun) (&XEXP (dest, 2), data);
1518 }
1519
1520 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1521 dest = XEXP (dest, 0);
1522
1523 if (MEM_P (dest))
1524 (*fun) (&XEXP (dest, 0), data);
1525 }
1526 return;
1527
1528 default:
1529 /* All the other possibilities never store. */
1530 (*fun) (pbody, data);
1531 return;
1532 }
1533 }
1534 \f
1535 /* Return nonzero if X's old contents don't survive after INSN.
1536 This will be true if X is (cc0) or if X is a register and
1537 X dies in INSN or because INSN entirely sets X.
1538
1539 "Entirely set" means set directly and not through a SUBREG, or
1540 ZERO_EXTRACT, so no trace of the old contents remains.
1541 Likewise, REG_INC does not count.
1542
1543 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1544 but for this use that makes no difference, since regs don't overlap
1545 during their lifetimes. Therefore, this function may be used
1546 at any time after deaths have been computed.
1547
1548 If REG is a hard reg that occupies multiple machine registers, this
1549 function will only return 1 if each of those registers will be replaced
1550 by INSN. */
1551
1552 int
1553 dead_or_set_p (const_rtx insn, const_rtx x)
1554 {
1555 unsigned int regno, end_regno;
1556 unsigned int i;
1557
1558 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1559 if (GET_CODE (x) == CC0)
1560 return 1;
1561
1562 gcc_assert (REG_P (x));
1563
1564 regno = REGNO (x);
1565 end_regno = END_REGNO (x);
1566 for (i = regno; i < end_regno; i++)
1567 if (! dead_or_set_regno_p (insn, i))
1568 return 0;
1569
1570 return 1;
1571 }
1572
1573 /* Return TRUE iff DEST is a register or subreg of a register and
1574 doesn't change the number of words of the inner register, and any
1575 part of the register is TEST_REGNO. */
1576
1577 static bool
1578 covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
1579 {
1580 unsigned int regno, endregno;
1581
1582 if (GET_CODE (dest) == SUBREG
1583 && (((GET_MODE_SIZE (GET_MODE (dest))
1584 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1585 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1586 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1587 dest = SUBREG_REG (dest);
1588
1589 if (!REG_P (dest))
1590 return false;
1591
1592 regno = REGNO (dest);
1593 endregno = END_REGNO (dest);
1594 return (test_regno >= regno && test_regno < endregno);
1595 }
1596
1597 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1598 any member matches the covers_regno_no_parallel_p criteria. */
1599
1600 static bool
1601 covers_regno_p (const_rtx dest, unsigned int test_regno)
1602 {
1603 if (GET_CODE (dest) == PARALLEL)
1604 {
1605 /* Some targets place small structures in registers for return
1606 values of functions, and those registers are wrapped in
1607 PARALLELs that we may see as the destination of a SET. */
1608 int i;
1609
1610 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1611 {
1612 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1613 if (inner != NULL_RTX
1614 && covers_regno_no_parallel_p (inner, test_regno))
1615 return true;
1616 }
1617
1618 return false;
1619 }
1620 else
1621 return covers_regno_no_parallel_p (dest, test_regno);
1622 }
1623
1624 /* Utility function for dead_or_set_p to check an individual register. */
1625
1626 int
1627 dead_or_set_regno_p (const_rtx insn, unsigned int test_regno)
1628 {
1629 const_rtx pattern;
1630
1631 /* See if there is a death note for something that includes TEST_REGNO. */
1632 if (find_regno_note (insn, REG_DEAD, test_regno))
1633 return 1;
1634
1635 if (CALL_P (insn)
1636 && find_regno_fusage (insn, CLOBBER, test_regno))
1637 return 1;
1638
1639 pattern = PATTERN (insn);
1640
1641 if (GET_CODE (pattern) == COND_EXEC)
1642 pattern = COND_EXEC_CODE (pattern);
1643
1644 if (GET_CODE (pattern) == SET)
1645 return covers_regno_p (SET_DEST (pattern), test_regno);
1646 else if (GET_CODE (pattern) == PARALLEL)
1647 {
1648 int i;
1649
1650 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1651 {
1652 rtx body = XVECEXP (pattern, 0, i);
1653
1654 if (GET_CODE (body) == COND_EXEC)
1655 body = COND_EXEC_CODE (body);
1656
1657 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1658 && covers_regno_p (SET_DEST (body), test_regno))
1659 return 1;
1660 }
1661 }
1662
1663 return 0;
1664 }
1665
1666 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1667 If DATUM is nonzero, look for one whose datum is DATUM. */
1668
1669 rtx
1670 find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
1671 {
1672 rtx link;
1673
1674 gcc_assert (insn);
1675
1676 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1677 if (! INSN_P (insn))
1678 return 0;
1679 if (datum == 0)
1680 {
1681 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1682 if (REG_NOTE_KIND (link) == kind)
1683 return link;
1684 return 0;
1685 }
1686
1687 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1688 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1689 return link;
1690 return 0;
1691 }
1692
1693 /* Return the reg-note of kind KIND in insn INSN which applies to register
1694 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1695 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1696 it might be the case that the note overlaps REGNO. */
1697
1698 rtx
1699 find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
1700 {
1701 rtx link;
1702
1703 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1704 if (! INSN_P (insn))
1705 return 0;
1706
1707 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1708 if (REG_NOTE_KIND (link) == kind
1709 /* Verify that it is a register, so that scratch and MEM won't cause a
1710 problem here. */
1711 && REG_P (XEXP (link, 0))
1712 && REGNO (XEXP (link, 0)) <= regno
1713 && END_REGNO (XEXP (link, 0)) > regno)
1714 return link;
1715 return 0;
1716 }
1717
1718 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1719 has such a note. */
1720
1721 rtx
1722 find_reg_equal_equiv_note (const_rtx insn)
1723 {
1724 rtx link;
1725
1726 if (!INSN_P (insn))
1727 return 0;
1728
1729 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1730 if (REG_NOTE_KIND (link) == REG_EQUAL
1731 || REG_NOTE_KIND (link) == REG_EQUIV)
1732 {
1733 /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1734 insns that have multiple sets. Checking single_set to
1735 make sure of this is not the proper check, as explained
1736 in the comment in set_unique_reg_note.
1737
1738 This should be changed into an assert. */
1739 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
1740 return 0;
1741 return link;
1742 }
1743 return NULL;
1744 }
1745
1746 /* Check whether INSN is a single_set whose source is known to be
1747 equivalent to a constant. Return that constant if so, otherwise
1748 return null. */
1749
1750 rtx
1751 find_constant_src (const_rtx insn)
1752 {
1753 rtx note, set, x;
1754
1755 set = single_set (insn);
1756 if (set)
1757 {
1758 x = avoid_constant_pool_reference (SET_SRC (set));
1759 if (CONSTANT_P (x))
1760 return x;
1761 }
1762
1763 note = find_reg_equal_equiv_note (insn);
1764 if (note && CONSTANT_P (XEXP (note, 0)))
1765 return XEXP (note, 0);
1766
1767 return NULL_RTX;
1768 }
1769
1770 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1771 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1772
1773 int
1774 find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
1775 {
1776 /* If it's not a CALL_INSN, it can't possibly have a
1777 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1778 if (!CALL_P (insn))
1779 return 0;
1780
1781 gcc_assert (datum);
1782
1783 if (!REG_P (datum))
1784 {
1785 rtx link;
1786
1787 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1788 link;
1789 link = XEXP (link, 1))
1790 if (GET_CODE (XEXP (link, 0)) == code
1791 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1792 return 1;
1793 }
1794 else
1795 {
1796 unsigned int regno = REGNO (datum);
1797
1798 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1799 to pseudo registers, so don't bother checking. */
1800
1801 if (regno < FIRST_PSEUDO_REGISTER)
1802 {
1803 unsigned int end_regno = END_HARD_REGNO (datum);
1804 unsigned int i;
1805
1806 for (i = regno; i < end_regno; i++)
1807 if (find_regno_fusage (insn, code, i))
1808 return 1;
1809 }
1810 }
1811
1812 return 0;
1813 }
1814
1815 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1816 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1817
1818 int
1819 find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
1820 {
1821 rtx link;
1822
1823 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1824 to pseudo registers, so don't bother checking. */
1825
1826 if (regno >= FIRST_PSEUDO_REGISTER
1827 || !CALL_P (insn) )
1828 return 0;
1829
1830 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1831 {
1832 rtx op, reg;
1833
1834 if (GET_CODE (op = XEXP (link, 0)) == code
1835 && REG_P (reg = XEXP (op, 0))
1836 && REGNO (reg) <= regno
1837 && END_HARD_REGNO (reg) > regno)
1838 return 1;
1839 }
1840
1841 return 0;
1842 }
1843
1844 /* Return true if INSN is a call to a pure function. */
1845
1846 int
1847 pure_call_p (const_rtx insn)
1848 {
1849 const_rtx link;
1850
1851 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
1852 return 0;
1853
1854 /* Look for the note that differentiates const and pure functions. */
1855 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1856 {
1857 rtx u, m;
1858
1859 if (GET_CODE (u = XEXP (link, 0)) == USE
1860 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
1861 && GET_CODE (XEXP (m, 0)) == SCRATCH)
1862 return 1;
1863 }
1864
1865 return 0;
1866 }
1867 \f
1868 /* Remove register note NOTE from the REG_NOTES of INSN. */
1869
1870 void
1871 remove_note (rtx insn, const_rtx note)
1872 {
1873 rtx link;
1874
1875 if (note == NULL_RTX)
1876 return;
1877
1878 if (REG_NOTES (insn) == note)
1879 REG_NOTES (insn) = XEXP (note, 1);
1880 else
1881 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1882 if (XEXP (link, 1) == note)
1883 {
1884 XEXP (link, 1) = XEXP (note, 1);
1885 break;
1886 }
1887
1888 switch (REG_NOTE_KIND (note))
1889 {
1890 case REG_EQUAL:
1891 case REG_EQUIV:
1892 df_notes_rescan (insn);
1893 break;
1894 default:
1895 break;
1896 }
1897 }
1898
1899 /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */
1900
1901 void
1902 remove_reg_equal_equiv_notes (rtx insn)
1903 {
1904 rtx *loc;
1905
1906 loc = &REG_NOTES (insn);
1907 while (*loc)
1908 {
1909 enum reg_note kind = REG_NOTE_KIND (*loc);
1910 if (kind == REG_EQUAL || kind == REG_EQUIV)
1911 *loc = XEXP (*loc, 1);
1912 else
1913 loc = &XEXP (*loc, 1);
1914 }
1915 }
1916
1917 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1918 return 1 if it is found. A simple equality test is used to determine if
1919 NODE matches. */
1920
1921 int
1922 in_expr_list_p (const_rtx listp, const_rtx node)
1923 {
1924 const_rtx x;
1925
1926 for (x = listp; x; x = XEXP (x, 1))
1927 if (node == XEXP (x, 0))
1928 return 1;
1929
1930 return 0;
1931 }
1932
1933 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1934 remove that entry from the list if it is found.
1935
1936 A simple equality test is used to determine if NODE matches. */
1937
1938 void
1939 remove_node_from_expr_list (const_rtx node, rtx *listp)
1940 {
1941 rtx temp = *listp;
1942 rtx prev = NULL_RTX;
1943
1944 while (temp)
1945 {
1946 if (node == XEXP (temp, 0))
1947 {
1948 /* Splice the node out of the list. */
1949 if (prev)
1950 XEXP (prev, 1) = XEXP (temp, 1);
1951 else
1952 *listp = XEXP (temp, 1);
1953
1954 return;
1955 }
1956
1957 prev = temp;
1958 temp = XEXP (temp, 1);
1959 }
1960 }
1961 \f
1962 /* Nonzero if X contains any volatile instructions. These are instructions
1963 which may cause unpredictable machine state instructions, and thus no
1964 instructions should be moved or combined across them. This includes
1965 only volatile asms and UNSPEC_VOLATILE instructions. */
1966
1967 int
1968 volatile_insn_p (const_rtx x)
1969 {
1970 const RTX_CODE code = GET_CODE (x);
1971 switch (code)
1972 {
1973 case LABEL_REF:
1974 case SYMBOL_REF:
1975 case CONST_INT:
1976 case CONST:
1977 case CONST_DOUBLE:
1978 case CONST_VECTOR:
1979 case CC0:
1980 case PC:
1981 case REG:
1982 case SCRATCH:
1983 case CLOBBER:
1984 case ADDR_VEC:
1985 case ADDR_DIFF_VEC:
1986 case CALL:
1987 case MEM:
1988 return 0;
1989
1990 case UNSPEC_VOLATILE:
1991 /* case TRAP_IF: This isn't clear yet. */
1992 return 1;
1993
1994 case ASM_INPUT:
1995 case ASM_OPERANDS:
1996 if (MEM_VOLATILE_P (x))
1997 return 1;
1998
1999 default:
2000 break;
2001 }
2002
2003 /* Recursively scan the operands of this expression. */
2004
2005 {
2006 const char *const fmt = GET_RTX_FORMAT (code);
2007 int i;
2008
2009 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2010 {
2011 if (fmt[i] == 'e')
2012 {
2013 if (volatile_insn_p (XEXP (x, i)))
2014 return 1;
2015 }
2016 else if (fmt[i] == 'E')
2017 {
2018 int j;
2019 for (j = 0; j < XVECLEN (x, i); j++)
2020 if (volatile_insn_p (XVECEXP (x, i, j)))
2021 return 1;
2022 }
2023 }
2024 }
2025 return 0;
2026 }
2027
2028 /* Nonzero if X contains any volatile memory references
2029 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2030
2031 int
2032 volatile_refs_p (const_rtx x)
2033 {
2034 const RTX_CODE code = GET_CODE (x);
2035 switch (code)
2036 {
2037 case LABEL_REF:
2038 case SYMBOL_REF:
2039 case CONST_INT:
2040 case CONST:
2041 case CONST_DOUBLE:
2042 case CONST_VECTOR:
2043 case CC0:
2044 case PC:
2045 case REG:
2046 case SCRATCH:
2047 case CLOBBER:
2048 case ADDR_VEC:
2049 case ADDR_DIFF_VEC:
2050 return 0;
2051
2052 case UNSPEC_VOLATILE:
2053 return 1;
2054
2055 case MEM:
2056 case ASM_INPUT:
2057 case ASM_OPERANDS:
2058 if (MEM_VOLATILE_P (x))
2059 return 1;
2060
2061 default:
2062 break;
2063 }
2064
2065 /* Recursively scan the operands of this expression. */
2066
2067 {
2068 const char *const fmt = GET_RTX_FORMAT (code);
2069 int i;
2070
2071 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2072 {
2073 if (fmt[i] == 'e')
2074 {
2075 if (volatile_refs_p (XEXP (x, i)))
2076 return 1;
2077 }
2078 else if (fmt[i] == 'E')
2079 {
2080 int j;
2081 for (j = 0; j < XVECLEN (x, i); j++)
2082 if (volatile_refs_p (XVECEXP (x, i, j)))
2083 return 1;
2084 }
2085 }
2086 }
2087 return 0;
2088 }
2089
2090 /* Similar to above, except that it also rejects register pre- and post-
2091 incrementing. */
2092
2093 int
2094 side_effects_p (const_rtx x)
2095 {
2096 const RTX_CODE code = GET_CODE (x);
2097 switch (code)
2098 {
2099 case LABEL_REF:
2100 case SYMBOL_REF:
2101 case CONST_INT:
2102 case CONST:
2103 case CONST_DOUBLE:
2104 case CONST_VECTOR:
2105 case CC0:
2106 case PC:
2107 case REG:
2108 case SCRATCH:
2109 case ADDR_VEC:
2110 case ADDR_DIFF_VEC:
2111 return 0;
2112
2113 case CLOBBER:
2114 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2115 when some combination can't be done. If we see one, don't think
2116 that we can simplify the expression. */
2117 return (GET_MODE (x) != VOIDmode);
2118
2119 case PRE_INC:
2120 case PRE_DEC:
2121 case POST_INC:
2122 case POST_DEC:
2123 case PRE_MODIFY:
2124 case POST_MODIFY:
2125 case CALL:
2126 case UNSPEC_VOLATILE:
2127 /* case TRAP_IF: This isn't clear yet. */
2128 return 1;
2129
2130 case MEM:
2131 case ASM_INPUT:
2132 case ASM_OPERANDS:
2133 if (MEM_VOLATILE_P (x))
2134 return 1;
2135
2136 default:
2137 break;
2138 }
2139
2140 /* Recursively scan the operands of this expression. */
2141
2142 {
2143 const char *fmt = GET_RTX_FORMAT (code);
2144 int i;
2145
2146 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2147 {
2148 if (fmt[i] == 'e')
2149 {
2150 if (side_effects_p (XEXP (x, i)))
2151 return 1;
2152 }
2153 else if (fmt[i] == 'E')
2154 {
2155 int j;
2156 for (j = 0; j < XVECLEN (x, i); j++)
2157 if (side_effects_p (XVECEXP (x, i, j)))
2158 return 1;
2159 }
2160 }
2161 }
2162 return 0;
2163 }
2164 \f
2165 enum may_trap_p_flags
2166 {
2167 MTP_UNALIGNED_MEMS = 1,
2168 MTP_AFTER_MOVE = 2
2169 };
2170 /* Return nonzero if evaluating rtx X might cause a trap.
2171 (FLAGS & MTP_UNALIGNED_MEMS) controls whether nonzero is returned for
2172 unaligned memory accesses on strict alignment machines. If
2173 (FLAGS & AFTER_MOVE) is true, returns nonzero even in case the expression
2174 cannot trap at its current location, but it might become trapping if moved
2175 elsewhere. */
2176
2177 static int
2178 may_trap_p_1 (const_rtx x, unsigned flags)
2179 {
2180 int i;
2181 enum rtx_code code;
2182 const char *fmt;
2183 bool unaligned_mems = (flags & MTP_UNALIGNED_MEMS) != 0;
2184
2185 if (x == 0)
2186 return 0;
2187 code = GET_CODE (x);
2188 switch (code)
2189 {
2190 /* Handle these cases quickly. */
2191 case CONST_INT:
2192 case CONST_DOUBLE:
2193 case CONST_VECTOR:
2194 case SYMBOL_REF:
2195 case LABEL_REF:
2196 case CONST:
2197 case PC:
2198 case CC0:
2199 case REG:
2200 case SCRATCH:
2201 return 0;
2202
2203 case ASM_INPUT:
2204 case UNSPEC_VOLATILE:
2205 case TRAP_IF:
2206 return 1;
2207
2208 case ASM_OPERANDS:
2209 return MEM_VOLATILE_P (x);
2210
2211 /* Memory ref can trap unless it's a static var or a stack slot. */
2212 case MEM:
2213 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2214 reference; moving it out of condition might cause its address
2215 become invalid. */
2216 !(flags & MTP_AFTER_MOVE)
2217 && MEM_NOTRAP_P (x)
2218 && (!STRICT_ALIGNMENT || !unaligned_mems))
2219 return 0;
2220 return
2221 rtx_addr_can_trap_p_1 (XEXP (x, 0), GET_MODE (x), unaligned_mems);
2222
2223 /* Division by a non-constant might trap. */
2224 case DIV:
2225 case MOD:
2226 case UDIV:
2227 case UMOD:
2228 if (HONOR_SNANS (GET_MODE (x)))
2229 return 1;
2230 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2231 return flag_trapping_math;
2232 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2233 return 1;
2234 break;
2235
2236 case EXPR_LIST:
2237 /* An EXPR_LIST is used to represent a function call. This
2238 certainly may trap. */
2239 return 1;
2240
2241 case GE:
2242 case GT:
2243 case LE:
2244 case LT:
2245 case LTGT:
2246 case COMPARE:
2247 /* Some floating point comparisons may trap. */
2248 if (!flag_trapping_math)
2249 break;
2250 /* ??? There is no machine independent way to check for tests that trap
2251 when COMPARE is used, though many targets do make this distinction.
2252 For instance, sparc uses CCFPE for compares which generate exceptions
2253 and CCFP for compares which do not generate exceptions. */
2254 if (HONOR_NANS (GET_MODE (x)))
2255 return 1;
2256 /* But often the compare has some CC mode, so check operand
2257 modes as well. */
2258 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2259 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2260 return 1;
2261 break;
2262
2263 case EQ:
2264 case NE:
2265 if (HONOR_SNANS (GET_MODE (x)))
2266 return 1;
2267 /* Often comparison is CC mode, so check operand modes. */
2268 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2269 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2270 return 1;
2271 break;
2272
2273 case FIX:
2274 /* Conversion of floating point might trap. */
2275 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2276 return 1;
2277 break;
2278
2279 case NEG:
2280 case ABS:
2281 case SUBREG:
2282 /* These operations don't trap even with floating point. */
2283 break;
2284
2285 default:
2286 /* Any floating arithmetic may trap. */
2287 if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
2288 && flag_trapping_math)
2289 return 1;
2290 }
2291
2292 fmt = GET_RTX_FORMAT (code);
2293 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2294 {
2295 if (fmt[i] == 'e')
2296 {
2297 if (may_trap_p_1 (XEXP (x, i), flags))
2298 return 1;
2299 }
2300 else if (fmt[i] == 'E')
2301 {
2302 int j;
2303 for (j = 0; j < XVECLEN (x, i); j++)
2304 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2305 return 1;
2306 }
2307 }
2308 return 0;
2309 }
2310
2311 /* Return nonzero if evaluating rtx X might cause a trap. */
2312
2313 int
2314 may_trap_p (const_rtx x)
2315 {
2316 return may_trap_p_1 (x, 0);
2317 }
2318
2319 /* Return nonzero if evaluating rtx X might cause a trap, when the expression
2320 is moved from its current location by some optimization. */
2321
2322 int
2323 may_trap_after_code_motion_p (const_rtx x)
2324 {
2325 return may_trap_p_1 (x, MTP_AFTER_MOVE);
2326 }
2327
2328 /* Same as above, but additionally return nonzero if evaluating rtx X might
2329 cause a fault. We define a fault for the purpose of this function as a
2330 erroneous execution condition that cannot be encountered during the normal
2331 execution of a valid program; the typical example is an unaligned memory
2332 access on a strict alignment machine. The compiler guarantees that it
2333 doesn't generate code that will fault from a valid program, but this
2334 guarantee doesn't mean anything for individual instructions. Consider
2335 the following example:
2336
2337 struct S { int d; union { char *cp; int *ip; }; };
2338
2339 int foo(struct S *s)
2340 {
2341 if (s->d == 1)
2342 return *s->ip;
2343 else
2344 return *s->cp;
2345 }
2346
2347 on a strict alignment machine. In a valid program, foo will never be
2348 invoked on a structure for which d is equal to 1 and the underlying
2349 unique field of the union not aligned on a 4-byte boundary, but the
2350 expression *s->ip might cause a fault if considered individually.
2351
2352 At the RTL level, potentially problematic expressions will almost always
2353 verify may_trap_p; for example, the above dereference can be emitted as
2354 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2355 However, suppose that foo is inlined in a caller that causes s->cp to
2356 point to a local character variable and guarantees that s->d is not set
2357 to 1; foo may have been effectively translated into pseudo-RTL as:
2358
2359 if ((reg:SI) == 1)
2360 (set (reg:SI) (mem:SI (%fp - 7)))
2361 else
2362 (set (reg:QI) (mem:QI (%fp - 7)))
2363
2364 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2365 memory reference to a stack slot, but it will certainly cause a fault
2366 on a strict alignment machine. */
2367
2368 int
2369 may_trap_or_fault_p (const_rtx x)
2370 {
2371 return may_trap_p_1 (x, MTP_UNALIGNED_MEMS);
2372 }
2373 \f
2374 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2375 i.e., an inequality. */
2376
2377 int
2378 inequality_comparisons_p (const_rtx x)
2379 {
2380 const char *fmt;
2381 int len, i;
2382 const enum rtx_code code = GET_CODE (x);
2383
2384 switch (code)
2385 {
2386 case REG:
2387 case SCRATCH:
2388 case PC:
2389 case CC0:
2390 case CONST_INT:
2391 case CONST_DOUBLE:
2392 case CONST_VECTOR:
2393 case CONST:
2394 case LABEL_REF:
2395 case SYMBOL_REF:
2396 return 0;
2397
2398 case LT:
2399 case LTU:
2400 case GT:
2401 case GTU:
2402 case LE:
2403 case LEU:
2404 case GE:
2405 case GEU:
2406 return 1;
2407
2408 default:
2409 break;
2410 }
2411
2412 len = GET_RTX_LENGTH (code);
2413 fmt = GET_RTX_FORMAT (code);
2414
2415 for (i = 0; i < len; i++)
2416 {
2417 if (fmt[i] == 'e')
2418 {
2419 if (inequality_comparisons_p (XEXP (x, i)))
2420 return 1;
2421 }
2422 else if (fmt[i] == 'E')
2423 {
2424 int j;
2425 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2426 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2427 return 1;
2428 }
2429 }
2430
2431 return 0;
2432 }
2433 \f
2434 /* Replace any occurrence of FROM in X with TO. The function does
2435 not enter into CONST_DOUBLE for the replace.
2436
2437 Note that copying is not done so X must not be shared unless all copies
2438 are to be modified. */
2439
2440 rtx
2441 replace_rtx (rtx x, rtx from, rtx to)
2442 {
2443 int i, j;
2444 const char *fmt;
2445
2446 /* The following prevents loops occurrence when we change MEM in
2447 CONST_DOUBLE onto the same CONST_DOUBLE. */
2448 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2449 return x;
2450
2451 if (x == from)
2452 return to;
2453
2454 /* Allow this function to make replacements in EXPR_LISTs. */
2455 if (x == 0)
2456 return 0;
2457
2458 if (GET_CODE (x) == SUBREG)
2459 {
2460 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2461
2462 if (GET_CODE (new) == CONST_INT)
2463 {
2464 x = simplify_subreg (GET_MODE (x), new,
2465 GET_MODE (SUBREG_REG (x)),
2466 SUBREG_BYTE (x));
2467 gcc_assert (x);
2468 }
2469 else
2470 SUBREG_REG (x) = new;
2471
2472 return x;
2473 }
2474 else if (GET_CODE (x) == ZERO_EXTEND)
2475 {
2476 rtx new = replace_rtx (XEXP (x, 0), from, to);
2477
2478 if (GET_CODE (new) == CONST_INT)
2479 {
2480 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2481 new, GET_MODE (XEXP (x, 0)));
2482 gcc_assert (x);
2483 }
2484 else
2485 XEXP (x, 0) = new;
2486
2487 return x;
2488 }
2489
2490 fmt = GET_RTX_FORMAT (GET_CODE (x));
2491 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2492 {
2493 if (fmt[i] == 'e')
2494 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2495 else if (fmt[i] == 'E')
2496 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2497 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2498 }
2499
2500 return x;
2501 }
2502 \f
2503 /* Replace occurrences of the old label in *X with the new one.
2504 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2505
2506 int
2507 replace_label (rtx *x, void *data)
2508 {
2509 rtx l = *x;
2510 rtx old_label = ((replace_label_data *) data)->r1;
2511 rtx new_label = ((replace_label_data *) data)->r2;
2512 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2513
2514 if (l == NULL_RTX)
2515 return 0;
2516
2517 if (GET_CODE (l) == SYMBOL_REF
2518 && CONSTANT_POOL_ADDRESS_P (l))
2519 {
2520 rtx c = get_pool_constant (l);
2521 if (rtx_referenced_p (old_label, c))
2522 {
2523 rtx new_c, new_l;
2524 replace_label_data *d = (replace_label_data *) data;
2525
2526 /* Create a copy of constant C; replace the label inside
2527 but do not update LABEL_NUSES because uses in constant pool
2528 are not counted. */
2529 new_c = copy_rtx (c);
2530 d->update_label_nuses = false;
2531 for_each_rtx (&new_c, replace_label, data);
2532 d->update_label_nuses = update_label_nuses;
2533
2534 /* Add the new constant NEW_C to constant pool and replace
2535 the old reference to constant by new reference. */
2536 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2537 *x = replace_rtx (l, l, new_l);
2538 }
2539 return 0;
2540 }
2541
2542 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2543 field. This is not handled by for_each_rtx because it doesn't
2544 handle unprinted ('0') fields. */
2545 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2546 JUMP_LABEL (l) = new_label;
2547
2548 if ((GET_CODE (l) == LABEL_REF
2549 || GET_CODE (l) == INSN_LIST)
2550 && XEXP (l, 0) == old_label)
2551 {
2552 XEXP (l, 0) = new_label;
2553 if (update_label_nuses)
2554 {
2555 ++LABEL_NUSES (new_label);
2556 --LABEL_NUSES (old_label);
2557 }
2558 return 0;
2559 }
2560
2561 return 0;
2562 }
2563
2564 /* When *BODY is equal to X or X is directly referenced by *BODY
2565 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2566 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2567
2568 static int
2569 rtx_referenced_p_1 (rtx *body, void *x)
2570 {
2571 rtx y = (rtx) x;
2572
2573 if (*body == NULL_RTX)
2574 return y == NULL_RTX;
2575
2576 /* Return true if a label_ref *BODY refers to label Y. */
2577 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2578 return XEXP (*body, 0) == y;
2579
2580 /* If *BODY is a reference to pool constant traverse the constant. */
2581 if (GET_CODE (*body) == SYMBOL_REF
2582 && CONSTANT_POOL_ADDRESS_P (*body))
2583 return rtx_referenced_p (y, get_pool_constant (*body));
2584
2585 /* By default, compare the RTL expressions. */
2586 return rtx_equal_p (*body, y);
2587 }
2588
2589 /* Return true if X is referenced in BODY. */
2590
2591 int
2592 rtx_referenced_p (rtx x, rtx body)
2593 {
2594 return for_each_rtx (&body, rtx_referenced_p_1, x);
2595 }
2596
2597 /* If INSN is a tablejump return true and store the label (before jump table) to
2598 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2599
2600 bool
2601 tablejump_p (const_rtx insn, rtx *labelp, rtx *tablep)
2602 {
2603 rtx label, table;
2604
2605 if (JUMP_P (insn)
2606 && (label = JUMP_LABEL (insn)) != NULL_RTX
2607 && (table = next_active_insn (label)) != NULL_RTX
2608 && JUMP_P (table)
2609 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2610 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2611 {
2612 if (labelp)
2613 *labelp = label;
2614 if (tablep)
2615 *tablep = table;
2616 return true;
2617 }
2618 return false;
2619 }
2620
2621 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2622 constant that is not in the constant pool and not in the condition
2623 of an IF_THEN_ELSE. */
2624
2625 static int
2626 computed_jump_p_1 (const_rtx x)
2627 {
2628 const enum rtx_code code = GET_CODE (x);
2629 int i, j;
2630 const char *fmt;
2631
2632 switch (code)
2633 {
2634 case LABEL_REF:
2635 case PC:
2636 return 0;
2637
2638 case CONST:
2639 case CONST_INT:
2640 case CONST_DOUBLE:
2641 case CONST_VECTOR:
2642 case SYMBOL_REF:
2643 case REG:
2644 return 1;
2645
2646 case MEM:
2647 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2648 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2649
2650 case IF_THEN_ELSE:
2651 return (computed_jump_p_1 (XEXP (x, 1))
2652 || computed_jump_p_1 (XEXP (x, 2)));
2653
2654 default:
2655 break;
2656 }
2657
2658 fmt = GET_RTX_FORMAT (code);
2659 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2660 {
2661 if (fmt[i] == 'e'
2662 && computed_jump_p_1 (XEXP (x, i)))
2663 return 1;
2664
2665 else if (fmt[i] == 'E')
2666 for (j = 0; j < XVECLEN (x, i); j++)
2667 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2668 return 1;
2669 }
2670
2671 return 0;
2672 }
2673
2674 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2675
2676 Tablejumps and casesi insns are not considered indirect jumps;
2677 we can recognize them by a (use (label_ref)). */
2678
2679 int
2680 computed_jump_p (const_rtx insn)
2681 {
2682 int i;
2683 if (JUMP_P (insn))
2684 {
2685 rtx pat = PATTERN (insn);
2686
2687 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2688 return 0;
2689 else if (GET_CODE (pat) == PARALLEL)
2690 {
2691 int len = XVECLEN (pat, 0);
2692 int has_use_labelref = 0;
2693
2694 for (i = len - 1; i >= 0; i--)
2695 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2696 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2697 == LABEL_REF))
2698 has_use_labelref = 1;
2699
2700 if (! has_use_labelref)
2701 for (i = len - 1; i >= 0; i--)
2702 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2703 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2704 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2705 return 1;
2706 }
2707 else if (GET_CODE (pat) == SET
2708 && SET_DEST (pat) == pc_rtx
2709 && computed_jump_p_1 (SET_SRC (pat)))
2710 return 1;
2711 }
2712 return 0;
2713 }
2714
2715 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2716 calls. Processes the subexpressions of EXP and passes them to F. */
2717 static int
2718 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2719 {
2720 int result, i, j;
2721 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2722 rtx *x;
2723
2724 for (; format[n] != '\0'; n++)
2725 {
2726 switch (format[n])
2727 {
2728 case 'e':
2729 /* Call F on X. */
2730 x = &XEXP (exp, n);
2731 result = (*f) (x, data);
2732 if (result == -1)
2733 /* Do not traverse sub-expressions. */
2734 continue;
2735 else if (result != 0)
2736 /* Stop the traversal. */
2737 return result;
2738
2739 if (*x == NULL_RTX)
2740 /* There are no sub-expressions. */
2741 continue;
2742
2743 i = non_rtx_starting_operands[GET_CODE (*x)];
2744 if (i >= 0)
2745 {
2746 result = for_each_rtx_1 (*x, i, f, data);
2747 if (result != 0)
2748 return result;
2749 }
2750 break;
2751
2752 case 'V':
2753 case 'E':
2754 if (XVEC (exp, n) == 0)
2755 continue;
2756 for (j = 0; j < XVECLEN (exp, n); ++j)
2757 {
2758 /* Call F on X. */
2759 x = &XVECEXP (exp, n, j);
2760 result = (*f) (x, data);
2761 if (result == -1)
2762 /* Do not traverse sub-expressions. */
2763 continue;
2764 else if (result != 0)
2765 /* Stop the traversal. */
2766 return result;
2767
2768 if (*x == NULL_RTX)
2769 /* There are no sub-expressions. */
2770 continue;
2771
2772 i = non_rtx_starting_operands[GET_CODE (*x)];
2773 if (i >= 0)
2774 {
2775 result = for_each_rtx_1 (*x, i, f, data);
2776 if (result != 0)
2777 return result;
2778 }
2779 }
2780 break;
2781
2782 default:
2783 /* Nothing to do. */
2784 break;
2785 }
2786 }
2787
2788 return 0;
2789 }
2790
2791 /* Traverse X via depth-first search, calling F for each
2792 sub-expression (including X itself). F is also passed the DATA.
2793 If F returns -1, do not traverse sub-expressions, but continue
2794 traversing the rest of the tree. If F ever returns any other
2795 nonzero value, stop the traversal, and return the value returned
2796 by F. Otherwise, return 0. This function does not traverse inside
2797 tree structure that contains RTX_EXPRs, or into sub-expressions
2798 whose format code is `0' since it is not known whether or not those
2799 codes are actually RTL.
2800
2801 This routine is very general, and could (should?) be used to
2802 implement many of the other routines in this file. */
2803
2804 int
2805 for_each_rtx (rtx *x, rtx_function f, void *data)
2806 {
2807 int result;
2808 int i;
2809
2810 /* Call F on X. */
2811 result = (*f) (x, data);
2812 if (result == -1)
2813 /* Do not traverse sub-expressions. */
2814 return 0;
2815 else if (result != 0)
2816 /* Stop the traversal. */
2817 return result;
2818
2819 if (*x == NULL_RTX)
2820 /* There are no sub-expressions. */
2821 return 0;
2822
2823 i = non_rtx_starting_operands[GET_CODE (*x)];
2824 if (i < 0)
2825 return 0;
2826
2827 return for_each_rtx_1 (*x, i, f, data);
2828 }
2829
2830
2831 /* Searches X for any reference to REGNO, returning the rtx of the
2832 reference found if any. Otherwise, returns NULL_RTX. */
2833
2834 rtx
2835 regno_use_in (unsigned int regno, rtx x)
2836 {
2837 const char *fmt;
2838 int i, j;
2839 rtx tem;
2840
2841 if (REG_P (x) && REGNO (x) == regno)
2842 return x;
2843
2844 fmt = GET_RTX_FORMAT (GET_CODE (x));
2845 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2846 {
2847 if (fmt[i] == 'e')
2848 {
2849 if ((tem = regno_use_in (regno, XEXP (x, i))))
2850 return tem;
2851 }
2852 else if (fmt[i] == 'E')
2853 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2854 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2855 return tem;
2856 }
2857
2858 return NULL_RTX;
2859 }
2860
2861 /* Return a value indicating whether OP, an operand of a commutative
2862 operation, is preferred as the first or second operand. The higher
2863 the value, the stronger the preference for being the first operand.
2864 We use negative values to indicate a preference for the first operand
2865 and positive values for the second operand. */
2866
2867 int
2868 commutative_operand_precedence (rtx op)
2869 {
2870 enum rtx_code code = GET_CODE (op);
2871
2872 /* Constants always come the second operand. Prefer "nice" constants. */
2873 if (code == CONST_INT)
2874 return -8;
2875 if (code == CONST_DOUBLE)
2876 return -7;
2877 op = avoid_constant_pool_reference (op);
2878 code = GET_CODE (op);
2879
2880 switch (GET_RTX_CLASS (code))
2881 {
2882 case RTX_CONST_OBJ:
2883 if (code == CONST_INT)
2884 return -6;
2885 if (code == CONST_DOUBLE)
2886 return -5;
2887 return -4;
2888
2889 case RTX_EXTRA:
2890 /* SUBREGs of objects should come second. */
2891 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2892 return -3;
2893 return 0;
2894
2895 case RTX_OBJ:
2896 /* Complex expressions should be the first, so decrease priority
2897 of objects. Prefer pointer objects over non pointer objects. */
2898 if ((REG_P (op) && REG_POINTER (op))
2899 || (MEM_P (op) && MEM_POINTER (op)))
2900 return -1;
2901 return -2;
2902
2903 case RTX_COMM_ARITH:
2904 /* Prefer operands that are themselves commutative to be first.
2905 This helps to make things linear. In particular,
2906 (and (and (reg) (reg)) (not (reg))) is canonical. */
2907 return 4;
2908
2909 case RTX_BIN_ARITH:
2910 /* If only one operand is a binary expression, it will be the first
2911 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2912 is canonical, although it will usually be further simplified. */
2913 return 2;
2914
2915 case RTX_UNARY:
2916 /* Then prefer NEG and NOT. */
2917 if (code == NEG || code == NOT)
2918 return 1;
2919
2920 default:
2921 return 0;
2922 }
2923 }
2924
2925 /* Return 1 iff it is necessary to swap operands of commutative operation
2926 in order to canonicalize expression. */
2927
2928 bool
2929 swap_commutative_operands_p (rtx x, rtx y)
2930 {
2931 return (commutative_operand_precedence (x)
2932 < commutative_operand_precedence (y));
2933 }
2934
2935 /* Return 1 if X is an autoincrement side effect and the register is
2936 not the stack pointer. */
2937 int
2938 auto_inc_p (const_rtx x)
2939 {
2940 switch (GET_CODE (x))
2941 {
2942 case PRE_INC:
2943 case POST_INC:
2944 case PRE_DEC:
2945 case POST_DEC:
2946 case PRE_MODIFY:
2947 case POST_MODIFY:
2948 /* There are no REG_INC notes for SP. */
2949 if (XEXP (x, 0) != stack_pointer_rtx)
2950 return 1;
2951 default:
2952 break;
2953 }
2954 return 0;
2955 }
2956
2957 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
2958 int
2959 loc_mentioned_in_p (rtx *loc, const_rtx in)
2960 {
2961 enum rtx_code code;
2962 const char *fmt;
2963 int i, j;
2964
2965 if (!in)
2966 return 0;
2967
2968 code = GET_CODE (in);
2969 fmt = GET_RTX_FORMAT (code);
2970 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2971 {
2972 if (loc == &in->u.fld[i].rt_rtx)
2973 return 1;
2974 if (fmt[i] == 'e')
2975 {
2976 if (loc_mentioned_in_p (loc, XEXP (in, i)))
2977 return 1;
2978 }
2979 else if (fmt[i] == 'E')
2980 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2981 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2982 return 1;
2983 }
2984 return 0;
2985 }
2986
2987 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
2988 and SUBREG_BYTE, return the bit offset where the subreg begins
2989 (counting from the least significant bit of the operand). */
2990
2991 unsigned int
2992 subreg_lsb_1 (enum machine_mode outer_mode,
2993 enum machine_mode inner_mode,
2994 unsigned int subreg_byte)
2995 {
2996 unsigned int bitpos;
2997 unsigned int byte;
2998 unsigned int word;
2999
3000 /* A paradoxical subreg begins at bit position 0. */
3001 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
3002 return 0;
3003
3004 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3005 /* If the subreg crosses a word boundary ensure that
3006 it also begins and ends on a word boundary. */
3007 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3008 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3009 && (subreg_byte % UNITS_PER_WORD
3010 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3011
3012 if (WORDS_BIG_ENDIAN)
3013 word = (GET_MODE_SIZE (inner_mode)
3014 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3015 else
3016 word = subreg_byte / UNITS_PER_WORD;
3017 bitpos = word * BITS_PER_WORD;
3018
3019 if (BYTES_BIG_ENDIAN)
3020 byte = (GET_MODE_SIZE (inner_mode)
3021 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3022 else
3023 byte = subreg_byte % UNITS_PER_WORD;
3024 bitpos += byte * BITS_PER_UNIT;
3025
3026 return bitpos;
3027 }
3028
3029 /* Given a subreg X, return the bit offset where the subreg begins
3030 (counting from the least significant bit of the reg). */
3031
3032 unsigned int
3033 subreg_lsb (const_rtx x)
3034 {
3035 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3036 SUBREG_BYTE (x));
3037 }
3038
3039 /* Fill in information about a subreg of a hard register.
3040 xregno - A regno of an inner hard subreg_reg (or what will become one).
3041 xmode - The mode of xregno.
3042 offset - The byte offset.
3043 ymode - The mode of a top level SUBREG (or what may become one).
3044 info - Pointer to structure to fill in. */
3045 static void
3046 subreg_get_info (unsigned int xregno, enum machine_mode xmode,
3047 unsigned int offset, enum machine_mode ymode,
3048 struct subreg_info *info)
3049 {
3050 int nregs_xmode, nregs_ymode;
3051 int mode_multiple, nregs_multiple;
3052 int offset_adj, y_offset, y_offset_adj;
3053 int regsize_xmode, regsize_ymode;
3054 bool rknown;
3055
3056 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3057
3058 rknown = false;
3059
3060 /* If there are holes in a non-scalar mode in registers, we expect
3061 that it is made up of its units concatenated together. */
3062 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
3063 {
3064 enum machine_mode xmode_unit;
3065
3066 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3067 if (GET_MODE_INNER (xmode) == VOIDmode)
3068 xmode_unit = xmode;
3069 else
3070 xmode_unit = GET_MODE_INNER (xmode);
3071 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3072 gcc_assert (nregs_xmode
3073 == (GET_MODE_NUNITS (xmode)
3074 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3075 gcc_assert (hard_regno_nregs[xregno][xmode]
3076 == (hard_regno_nregs[xregno][xmode_unit]
3077 * GET_MODE_NUNITS (xmode)));
3078
3079 /* You can only ask for a SUBREG of a value with holes in the middle
3080 if you don't cross the holes. (Such a SUBREG should be done by
3081 picking a different register class, or doing it in memory if
3082 necessary.) An example of a value with holes is XCmode on 32-bit
3083 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3084 3 for each part, but in memory it's two 128-bit parts.
3085 Padding is assumed to be at the end (not necessarily the 'high part')
3086 of each unit. */
3087 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3088 < GET_MODE_NUNITS (xmode))
3089 && (offset / GET_MODE_SIZE (xmode_unit)
3090 != ((offset + GET_MODE_SIZE (ymode) - 1)
3091 / GET_MODE_SIZE (xmode_unit))))
3092 {
3093 info->representable_p = false;
3094 rknown = true;
3095 }
3096 }
3097 else
3098 nregs_xmode = hard_regno_nregs[xregno][xmode];
3099
3100 nregs_ymode = hard_regno_nregs[xregno][ymode];
3101
3102 /* Paradoxical subregs are otherwise valid. */
3103 if (!rknown
3104 && offset == 0
3105 && GET_MODE_SIZE (ymode) > GET_MODE_SIZE (xmode))
3106 {
3107 info->representable_p = true;
3108 /* If this is a big endian paradoxical subreg, which uses more
3109 actual hard registers than the original register, we must
3110 return a negative offset so that we find the proper highpart
3111 of the register. */
3112 if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3113 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3114 info->offset = nregs_xmode - nregs_ymode;
3115 else
3116 info->offset = 0;
3117 info->nregs = nregs_ymode;
3118 return;
3119 }
3120
3121 /* If registers store different numbers of bits in the different
3122 modes, we cannot generally form this subreg. */
3123 if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
3124 && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3125 && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3126 && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
3127 {
3128 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3129 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3130 if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3131 {
3132 info->representable_p = false;
3133 info->nregs
3134 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3135 info->offset = offset / regsize_xmode;
3136 return;
3137 }
3138 if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3139 {
3140 info->representable_p = false;
3141 info->nregs
3142 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3143 info->offset = offset / regsize_xmode;
3144 return;
3145 }
3146 }
3147
3148 /* Lowpart subregs are otherwise valid. */
3149 if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3150 {
3151 info->representable_p = true;
3152 rknown = true;
3153
3154 if (offset == 0 || nregs_xmode == nregs_ymode)
3155 {
3156 info->offset = 0;
3157 info->nregs = nregs_ymode;
3158 return;
3159 }
3160 }
3161
3162 /* This should always pass, otherwise we don't know how to verify
3163 the constraint. These conditions may be relaxed but
3164 subreg_regno_offset would need to be redesigned. */
3165 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3166 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3167
3168 /* The XMODE value can be seen as a vector of NREGS_XMODE
3169 values. The subreg must represent a lowpart of given field.
3170 Compute what field it is. */
3171 offset_adj = offset;
3172 offset_adj -= subreg_lowpart_offset (ymode,
3173 mode_for_size (GET_MODE_BITSIZE (xmode)
3174 / nregs_xmode,
3175 MODE_INT, 0));
3176
3177 /* Size of ymode must not be greater than the size of xmode. */
3178 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3179 gcc_assert (mode_multiple != 0);
3180
3181 y_offset = offset / GET_MODE_SIZE (ymode);
3182 y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3183 nregs_multiple = nregs_xmode / nregs_ymode;
3184
3185 gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
3186 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3187
3188 if (!rknown)
3189 {
3190 info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3191 rknown = true;
3192 }
3193 info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3194 info->nregs = nregs_ymode;
3195 }
3196
3197 /* This function returns the regno offset of a subreg expression.
3198 xregno - A regno of an inner hard subreg_reg (or what will become one).
3199 xmode - The mode of xregno.
3200 offset - The byte offset.
3201 ymode - The mode of a top level SUBREG (or what may become one).
3202 RETURN - The regno offset which would be used. */
3203 unsigned int
3204 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3205 unsigned int offset, enum machine_mode ymode)
3206 {
3207 struct subreg_info info;
3208 subreg_get_info (xregno, xmode, offset, ymode, &info);
3209 return info.offset;
3210 }
3211
3212 /* This function returns true when the offset is representable via
3213 subreg_offset in the given regno.
3214 xregno - A regno of an inner hard subreg_reg (or what will become one).
3215 xmode - The mode of xregno.
3216 offset - The byte offset.
3217 ymode - The mode of a top level SUBREG (or what may become one).
3218 RETURN - Whether the offset is representable. */
3219 bool
3220 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3221 unsigned int offset, enum machine_mode ymode)
3222 {
3223 struct subreg_info info;
3224 subreg_get_info (xregno, xmode, offset, ymode, &info);
3225 return info.representable_p;
3226 }
3227
3228 /* Return the final regno that a subreg expression refers to. */
3229 unsigned int
3230 subreg_regno (const_rtx x)
3231 {
3232 unsigned int ret;
3233 rtx subreg = SUBREG_REG (x);
3234 int regno = REGNO (subreg);
3235
3236 ret = regno + subreg_regno_offset (regno,
3237 GET_MODE (subreg),
3238 SUBREG_BYTE (x),
3239 GET_MODE (x));
3240 return ret;
3241
3242 }
3243
3244 /* Return the number of registers that a subreg expression refers
3245 to. */
3246 unsigned int
3247 subreg_nregs (const_rtx x)
3248 {
3249 struct subreg_info info;
3250 rtx subreg = SUBREG_REG (x);
3251 int regno = REGNO (subreg);
3252
3253 subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3254 &info);
3255 return info.nregs;
3256 }
3257
3258 struct parms_set_data
3259 {
3260 int nregs;
3261 HARD_REG_SET regs;
3262 };
3263
3264 /* Helper function for noticing stores to parameter registers. */
3265 static void
3266 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3267 {
3268 struct parms_set_data *d = data;
3269 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3270 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3271 {
3272 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3273 d->nregs--;
3274 }
3275 }
3276
3277 /* Look backward for first parameter to be loaded.
3278 Note that loads of all parameters will not necessarily be
3279 found if CSE has eliminated some of them (e.g., an argument
3280 to the outer function is passed down as a parameter).
3281 Do not skip BOUNDARY. */
3282 rtx
3283 find_first_parameter_load (rtx call_insn, rtx boundary)
3284 {
3285 struct parms_set_data parm;
3286 rtx p, before, first_set;
3287
3288 /* Since different machines initialize their parameter registers
3289 in different orders, assume nothing. Collect the set of all
3290 parameter registers. */
3291 CLEAR_HARD_REG_SET (parm.regs);
3292 parm.nregs = 0;
3293 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3294 if (GET_CODE (XEXP (p, 0)) == USE
3295 && REG_P (XEXP (XEXP (p, 0), 0)))
3296 {
3297 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3298
3299 /* We only care about registers which can hold function
3300 arguments. */
3301 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3302 continue;
3303
3304 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3305 parm.nregs++;
3306 }
3307 before = call_insn;
3308 first_set = call_insn;
3309
3310 /* Search backward for the first set of a register in this set. */
3311 while (parm.nregs && before != boundary)
3312 {
3313 before = PREV_INSN (before);
3314
3315 /* It is possible that some loads got CSEed from one call to
3316 another. Stop in that case. */
3317 if (CALL_P (before))
3318 break;
3319
3320 /* Our caller needs either ensure that we will find all sets
3321 (in case code has not been optimized yet), or take care
3322 for possible labels in a way by setting boundary to preceding
3323 CODE_LABEL. */
3324 if (LABEL_P (before))
3325 {
3326 gcc_assert (before == boundary);
3327 break;
3328 }
3329
3330 if (INSN_P (before))
3331 {
3332 int nregs_old = parm.nregs;
3333 note_stores (PATTERN (before), parms_set, &parm);
3334 /* If we found something that did not set a parameter reg,
3335 we're done. Do not keep going, as that might result
3336 in hoisting an insn before the setting of a pseudo
3337 that is used by the hoisted insn. */
3338 if (nregs_old != parm.nregs)
3339 first_set = before;
3340 else
3341 break;
3342 }
3343 }
3344 return first_set;
3345 }
3346
3347 /* Return true if we should avoid inserting code between INSN and preceding
3348 call instruction. */
3349
3350 bool
3351 keep_with_call_p (rtx insn)
3352 {
3353 rtx set;
3354
3355 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3356 {
3357 if (REG_P (SET_DEST (set))
3358 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3359 && fixed_regs[REGNO (SET_DEST (set))]
3360 && general_operand (SET_SRC (set), VOIDmode))
3361 return true;
3362 if (REG_P (SET_SRC (set))
3363 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3364 && REG_P (SET_DEST (set))
3365 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3366 return true;
3367 /* There may be a stack pop just after the call and before the store
3368 of the return register. Search for the actual store when deciding
3369 if we can break or not. */
3370 if (SET_DEST (set) == stack_pointer_rtx)
3371 {
3372 rtx i2 = next_nonnote_insn (insn);
3373 if (i2 && keep_with_call_p (i2))
3374 return true;
3375 }
3376 }
3377 return false;
3378 }
3379
3380 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3381 to non-complex jumps. That is, direct unconditional, conditional,
3382 and tablejumps, but not computed jumps or returns. It also does
3383 not apply to the fallthru case of a conditional jump. */
3384
3385 bool
3386 label_is_jump_target_p (const_rtx label, const_rtx jump_insn)
3387 {
3388 rtx tmp = JUMP_LABEL (jump_insn);
3389
3390 if (label == tmp)
3391 return true;
3392
3393 if (tablejump_p (jump_insn, NULL, &tmp))
3394 {
3395 rtvec vec = XVEC (PATTERN (tmp),
3396 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3397 int i, veclen = GET_NUM_ELEM (vec);
3398
3399 for (i = 0; i < veclen; ++i)
3400 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3401 return true;
3402 }
3403
3404 return false;
3405 }
3406
3407 \f
3408 /* Return an estimate of the cost of computing rtx X.
3409 One use is in cse, to decide which expression to keep in the hash table.
3410 Another is in rtl generation, to pick the cheapest way to multiply.
3411 Other uses like the latter are expected in the future. */
3412
3413 int
3414 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3415 {
3416 int i, j;
3417 enum rtx_code code;
3418 const char *fmt;
3419 int total;
3420
3421 if (x == 0)
3422 return 0;
3423
3424 /* Compute the default costs of certain things.
3425 Note that targetm.rtx_costs can override the defaults. */
3426
3427 code = GET_CODE (x);
3428 switch (code)
3429 {
3430 case MULT:
3431 total = COSTS_N_INSNS (5);
3432 break;
3433 case DIV:
3434 case UDIV:
3435 case MOD:
3436 case UMOD:
3437 total = COSTS_N_INSNS (7);
3438 break;
3439 case USE:
3440 /* Used in combine.c as a marker. */
3441 total = 0;
3442 break;
3443 default:
3444 total = COSTS_N_INSNS (1);
3445 }
3446
3447 switch (code)
3448 {
3449 case REG:
3450 return 0;
3451
3452 case SUBREG:
3453 total = 0;
3454 /* If we can't tie these modes, make this expensive. The larger
3455 the mode, the more expensive it is. */
3456 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3457 return COSTS_N_INSNS (2
3458 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3459 break;
3460
3461 default:
3462 if (targetm.rtx_costs (x, code, outer_code, &total))
3463 return total;
3464 break;
3465 }
3466
3467 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3468 which is already in total. */
3469
3470 fmt = GET_RTX_FORMAT (code);
3471 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3472 if (fmt[i] == 'e')
3473 total += rtx_cost (XEXP (x, i), code);
3474 else if (fmt[i] == 'E')
3475 for (j = 0; j < XVECLEN (x, i); j++)
3476 total += rtx_cost (XVECEXP (x, i, j), code);
3477
3478 return total;
3479 }
3480 \f
3481 /* Return cost of address expression X.
3482 Expect that X is properly formed address reference. */
3483
3484 int
3485 address_cost (rtx x, enum machine_mode mode)
3486 {
3487 /* We may be asked for cost of various unusual addresses, such as operands
3488 of push instruction. It is not worthwhile to complicate writing
3489 of the target hook by such cases. */
3490
3491 if (!memory_address_p (mode, x))
3492 return 1000;
3493
3494 return targetm.address_cost (x);
3495 }
3496
3497 /* If the target doesn't override, compute the cost as with arithmetic. */
3498
3499 int
3500 default_address_cost (rtx x)
3501 {
3502 return rtx_cost (x, MEM);
3503 }
3504 \f
3505
3506 unsigned HOST_WIDE_INT
3507 nonzero_bits (rtx x, enum machine_mode mode)
3508 {
3509 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3510 }
3511
3512 unsigned int
3513 num_sign_bit_copies (rtx x, enum machine_mode mode)
3514 {
3515 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3516 }
3517
3518 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3519 It avoids exponential behavior in nonzero_bits1 when X has
3520 identical subexpressions on the first or the second level. */
3521
3522 static unsigned HOST_WIDE_INT
3523 cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3524 enum machine_mode known_mode,
3525 unsigned HOST_WIDE_INT known_ret)
3526 {
3527 if (x == known_x && mode == known_mode)
3528 return known_ret;
3529
3530 /* Try to find identical subexpressions. If found call
3531 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3532 precomputed value for the subexpression as KNOWN_RET. */
3533
3534 if (ARITHMETIC_P (x))
3535 {
3536 rtx x0 = XEXP (x, 0);
3537 rtx x1 = XEXP (x, 1);
3538
3539 /* Check the first level. */
3540 if (x0 == x1)
3541 return nonzero_bits1 (x, mode, x0, mode,
3542 cached_nonzero_bits (x0, mode, known_x,
3543 known_mode, known_ret));
3544
3545 /* Check the second level. */
3546 if (ARITHMETIC_P (x0)
3547 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3548 return nonzero_bits1 (x, mode, x1, mode,
3549 cached_nonzero_bits (x1, mode, known_x,
3550 known_mode, known_ret));
3551
3552 if (ARITHMETIC_P (x1)
3553 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3554 return nonzero_bits1 (x, mode, x0, mode,
3555 cached_nonzero_bits (x0, mode, known_x,
3556 known_mode, known_ret));
3557 }
3558
3559 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3560 }
3561
3562 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3563 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3564 is less useful. We can't allow both, because that results in exponential
3565 run time recursion. There is a nullstone testcase that triggered
3566 this. This macro avoids accidental uses of num_sign_bit_copies. */
3567 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3568
3569 /* Given an expression, X, compute which bits in X can be nonzero.
3570 We don't care about bits outside of those defined in MODE.
3571
3572 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3573 an arithmetic operation, we can do better. */
3574
3575 static unsigned HOST_WIDE_INT
3576 nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3577 enum machine_mode known_mode,
3578 unsigned HOST_WIDE_INT known_ret)
3579 {
3580 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3581 unsigned HOST_WIDE_INT inner_nz;
3582 enum rtx_code code;
3583 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3584
3585 /* For floating-point values, assume all bits are needed. */
3586 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3587 return nonzero;
3588
3589 /* If X is wider than MODE, use its mode instead. */
3590 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3591 {
3592 mode = GET_MODE (x);
3593 nonzero = GET_MODE_MASK (mode);
3594 mode_width = GET_MODE_BITSIZE (mode);
3595 }
3596
3597 if (mode_width > HOST_BITS_PER_WIDE_INT)
3598 /* Our only callers in this case look for single bit values. So
3599 just return the mode mask. Those tests will then be false. */
3600 return nonzero;
3601
3602 #ifndef WORD_REGISTER_OPERATIONS
3603 /* If MODE is wider than X, but both are a single word for both the host
3604 and target machines, we can compute this from which bits of the
3605 object might be nonzero in its own mode, taking into account the fact
3606 that on many CISC machines, accessing an object in a wider mode
3607 causes the high-order bits to become undefined. So they are
3608 not known to be zero. */
3609
3610 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3611 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3612 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3613 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3614 {
3615 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3616 known_x, known_mode, known_ret);
3617 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3618 return nonzero;
3619 }
3620 #endif
3621
3622 code = GET_CODE (x);
3623 switch (code)
3624 {
3625 case REG:
3626 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3627 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3628 all the bits above ptr_mode are known to be zero. */
3629 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3630 && REG_POINTER (x))
3631 nonzero &= GET_MODE_MASK (ptr_mode);
3632 #endif
3633
3634 /* Include declared information about alignment of pointers. */
3635 /* ??? We don't properly preserve REG_POINTER changes across
3636 pointer-to-integer casts, so we can't trust it except for
3637 things that we know must be pointers. See execute/960116-1.c. */
3638 if ((x == stack_pointer_rtx
3639 || x == frame_pointer_rtx
3640 || x == arg_pointer_rtx)
3641 && REGNO_POINTER_ALIGN (REGNO (x)))
3642 {
3643 unsigned HOST_WIDE_INT alignment
3644 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3645
3646 #ifdef PUSH_ROUNDING
3647 /* If PUSH_ROUNDING is defined, it is possible for the
3648 stack to be momentarily aligned only to that amount,
3649 so we pick the least alignment. */
3650 if (x == stack_pointer_rtx && PUSH_ARGS)
3651 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3652 alignment);
3653 #endif
3654
3655 nonzero &= ~(alignment - 1);
3656 }
3657
3658 {
3659 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3660 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3661 known_mode, known_ret,
3662 &nonzero_for_hook);
3663
3664 if (new)
3665 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3666 known_mode, known_ret);
3667
3668 return nonzero_for_hook;
3669 }
3670
3671 case CONST_INT:
3672 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3673 /* If X is negative in MODE, sign-extend the value. */
3674 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3675 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3676 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3677 #endif
3678
3679 return INTVAL (x);
3680
3681 case MEM:
3682 #ifdef LOAD_EXTEND_OP
3683 /* In many, if not most, RISC machines, reading a byte from memory
3684 zeros the rest of the register. Noticing that fact saves a lot
3685 of extra zero-extends. */
3686 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3687 nonzero &= GET_MODE_MASK (GET_MODE (x));
3688 #endif
3689 break;
3690
3691 case EQ: case NE:
3692 case UNEQ: case LTGT:
3693 case GT: case GTU: case UNGT:
3694 case LT: case LTU: case UNLT:
3695 case GE: case GEU: case UNGE:
3696 case LE: case LEU: case UNLE:
3697 case UNORDERED: case ORDERED:
3698 /* If this produces an integer result, we know which bits are set.
3699 Code here used to clear bits outside the mode of X, but that is
3700 now done above. */
3701 /* Mind that MODE is the mode the caller wants to look at this
3702 operation in, and not the actual operation mode. We can wind
3703 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3704 that describes the results of a vector compare. */
3705 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
3706 && mode_width <= HOST_BITS_PER_WIDE_INT)
3707 nonzero = STORE_FLAG_VALUE;
3708 break;
3709
3710 case NEG:
3711 #if 0
3712 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3713 and num_sign_bit_copies. */
3714 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3715 == GET_MODE_BITSIZE (GET_MODE (x)))
3716 nonzero = 1;
3717 #endif
3718
3719 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3720 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3721 break;
3722
3723 case ABS:
3724 #if 0
3725 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3726 and num_sign_bit_copies. */
3727 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3728 == GET_MODE_BITSIZE (GET_MODE (x)))
3729 nonzero = 1;
3730 #endif
3731 break;
3732
3733 case TRUNCATE:
3734 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3735 known_x, known_mode, known_ret)
3736 & GET_MODE_MASK (mode));
3737 break;
3738
3739 case ZERO_EXTEND:
3740 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3741 known_x, known_mode, known_ret);
3742 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3743 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3744 break;
3745
3746 case SIGN_EXTEND:
3747 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3748 Otherwise, show all the bits in the outer mode but not the inner
3749 may be nonzero. */
3750 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3751 known_x, known_mode, known_ret);
3752 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3753 {
3754 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3755 if (inner_nz
3756 & (((HOST_WIDE_INT) 1
3757 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3758 inner_nz |= (GET_MODE_MASK (mode)
3759 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3760 }
3761
3762 nonzero &= inner_nz;
3763 break;
3764
3765 case AND:
3766 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3767 known_x, known_mode, known_ret)
3768 & cached_nonzero_bits (XEXP (x, 1), mode,
3769 known_x, known_mode, known_ret);
3770 break;
3771
3772 case XOR: case IOR:
3773 case UMIN: case UMAX: case SMIN: case SMAX:
3774 {
3775 unsigned HOST_WIDE_INT nonzero0 =
3776 cached_nonzero_bits (XEXP (x, 0), mode,
3777 known_x, known_mode, known_ret);
3778
3779 /* Don't call nonzero_bits for the second time if it cannot change
3780 anything. */
3781 if ((nonzero & nonzero0) != nonzero)
3782 nonzero &= nonzero0
3783 | cached_nonzero_bits (XEXP (x, 1), mode,
3784 known_x, known_mode, known_ret);
3785 }
3786 break;
3787
3788 case PLUS: case MINUS:
3789 case MULT:
3790 case DIV: case UDIV:
3791 case MOD: case UMOD:
3792 /* We can apply the rules of arithmetic to compute the number of
3793 high- and low-order zero bits of these operations. We start by
3794 computing the width (position of the highest-order nonzero bit)
3795 and the number of low-order zero bits for each value. */
3796 {
3797 unsigned HOST_WIDE_INT nz0 =
3798 cached_nonzero_bits (XEXP (x, 0), mode,
3799 known_x, known_mode, known_ret);
3800 unsigned HOST_WIDE_INT nz1 =
3801 cached_nonzero_bits (XEXP (x, 1), mode,
3802 known_x, known_mode, known_ret);
3803 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3804 int width0 = floor_log2 (nz0) + 1;
3805 int width1 = floor_log2 (nz1) + 1;
3806 int low0 = floor_log2 (nz0 & -nz0);
3807 int low1 = floor_log2 (nz1 & -nz1);
3808 HOST_WIDE_INT op0_maybe_minusp
3809 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3810 HOST_WIDE_INT op1_maybe_minusp
3811 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3812 unsigned int result_width = mode_width;
3813 int result_low = 0;
3814
3815 switch (code)
3816 {
3817 case PLUS:
3818 result_width = MAX (width0, width1) + 1;
3819 result_low = MIN (low0, low1);
3820 break;
3821 case MINUS:
3822 result_low = MIN (low0, low1);
3823 break;
3824 case MULT:
3825 result_width = width0 + width1;
3826 result_low = low0 + low1;
3827 break;
3828 case DIV:
3829 if (width1 == 0)
3830 break;
3831 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3832 result_width = width0;
3833 break;
3834 case UDIV:
3835 if (width1 == 0)
3836 break;
3837 result_width = width0;
3838 break;
3839 case MOD:
3840 if (width1 == 0)
3841 break;
3842 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3843 result_width = MIN (width0, width1);
3844 result_low = MIN (low0, low1);
3845 break;
3846 case UMOD:
3847 if (width1 == 0)
3848 break;
3849 result_width = MIN (width0, width1);
3850 result_low = MIN (low0, low1);
3851 break;
3852 default:
3853 gcc_unreachable ();
3854 }
3855
3856 if (result_width < mode_width)
3857 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3858
3859 if (result_low > 0)
3860 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3861
3862 #ifdef POINTERS_EXTEND_UNSIGNED
3863 /* If pointers extend unsigned and this is an addition or subtraction
3864 to a pointer in Pmode, all the bits above ptr_mode are known to be
3865 zero. */
3866 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3867 && (code == PLUS || code == MINUS)
3868 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3869 nonzero &= GET_MODE_MASK (ptr_mode);
3870 #endif
3871 }
3872 break;
3873
3874 case ZERO_EXTRACT:
3875 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3876 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3877 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3878 break;
3879
3880 case SUBREG:
3881 /* If this is a SUBREG formed for a promoted variable that has
3882 been zero-extended, we know that at least the high-order bits
3883 are zero, though others might be too. */
3884
3885 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3886 nonzero = GET_MODE_MASK (GET_MODE (x))
3887 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3888 known_x, known_mode, known_ret);
3889
3890 /* If the inner mode is a single word for both the host and target
3891 machines, we can compute this from which bits of the inner
3892 object might be nonzero. */
3893 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3894 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3895 <= HOST_BITS_PER_WIDE_INT))
3896 {
3897 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3898 known_x, known_mode, known_ret);
3899
3900 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3901 /* If this is a typical RISC machine, we only have to worry
3902 about the way loads are extended. */
3903 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3904 ? (((nonzero
3905 & (((unsigned HOST_WIDE_INT) 1
3906 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3907 != 0))
3908 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3909 || !MEM_P (SUBREG_REG (x)))
3910 #endif
3911 {
3912 /* On many CISC machines, accessing an object in a wider mode
3913 causes the high-order bits to become undefined. So they are
3914 not known to be zero. */
3915 if (GET_MODE_SIZE (GET_MODE (x))
3916 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3917 nonzero |= (GET_MODE_MASK (GET_MODE (x))
3918 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3919 }
3920 }
3921 break;
3922
3923 case ASHIFTRT:
3924 case LSHIFTRT:
3925 case ASHIFT:
3926 case ROTATE:
3927 /* The nonzero bits are in two classes: any bits within MODE
3928 that aren't in GET_MODE (x) are always significant. The rest of the
3929 nonzero bits are those that are significant in the operand of
3930 the shift when shifted the appropriate number of bits. This
3931 shows that high-order bits are cleared by the right shift and
3932 low-order bits by left shifts. */
3933 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3934 && INTVAL (XEXP (x, 1)) >= 0
3935 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3936 {
3937 enum machine_mode inner_mode = GET_MODE (x);
3938 unsigned int width = GET_MODE_BITSIZE (inner_mode);
3939 int count = INTVAL (XEXP (x, 1));
3940 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3941 unsigned HOST_WIDE_INT op_nonzero =
3942 cached_nonzero_bits (XEXP (x, 0), mode,
3943 known_x, known_mode, known_ret);
3944 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
3945 unsigned HOST_WIDE_INT outer = 0;
3946
3947 if (mode_width > width)
3948 outer = (op_nonzero & nonzero & ~mode_mask);
3949
3950 if (code == LSHIFTRT)
3951 inner >>= count;
3952 else if (code == ASHIFTRT)
3953 {
3954 inner >>= count;
3955
3956 /* If the sign bit may have been nonzero before the shift, we
3957 need to mark all the places it could have been copied to
3958 by the shift as possibly nonzero. */
3959 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
3960 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
3961 }
3962 else if (code == ASHIFT)
3963 inner <<= count;
3964 else
3965 inner = ((inner << (count % width)
3966 | (inner >> (width - (count % width)))) & mode_mask);
3967
3968 nonzero &= (outer | inner);
3969 }
3970 break;
3971
3972 case FFS:
3973 case POPCOUNT:
3974 /* This is at most the number of bits in the mode. */
3975 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
3976 break;
3977
3978 case CLZ:
3979 /* If CLZ has a known value at zero, then the nonzero bits are
3980 that value, plus the number of bits in the mode minus one. */
3981 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3982 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3983 else
3984 nonzero = -1;
3985 break;
3986
3987 case CTZ:
3988 /* If CTZ has a known value at zero, then the nonzero bits are
3989 that value, plus the number of bits in the mode minus one. */
3990 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3991 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3992 else
3993 nonzero = -1;
3994 break;
3995
3996 case PARITY:
3997 nonzero = 1;
3998 break;
3999
4000 case IF_THEN_ELSE:
4001 {
4002 unsigned HOST_WIDE_INT nonzero_true =
4003 cached_nonzero_bits (XEXP (x, 1), mode,
4004 known_x, known_mode, known_ret);
4005
4006 /* Don't call nonzero_bits for the second time if it cannot change
4007 anything. */
4008 if ((nonzero & nonzero_true) != nonzero)
4009 nonzero &= nonzero_true
4010 | cached_nonzero_bits (XEXP (x, 2), mode,
4011 known_x, known_mode, known_ret);
4012 }
4013 break;
4014
4015 default:
4016 break;
4017 }
4018
4019 return nonzero;
4020 }
4021
4022 /* See the macro definition above. */
4023 #undef cached_num_sign_bit_copies
4024
4025 \f
4026 /* The function cached_num_sign_bit_copies is a wrapper around
4027 num_sign_bit_copies1. It avoids exponential behavior in
4028 num_sign_bit_copies1 when X has identical subexpressions on the
4029 first or the second level. */
4030
4031 static unsigned int
4032 cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
4033 enum machine_mode known_mode,
4034 unsigned int known_ret)
4035 {
4036 if (x == known_x && mode == known_mode)
4037 return known_ret;
4038
4039 /* Try to find identical subexpressions. If found call
4040 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4041 the precomputed value for the subexpression as KNOWN_RET. */
4042
4043 if (ARITHMETIC_P (x))
4044 {
4045 rtx x0 = XEXP (x, 0);
4046 rtx x1 = XEXP (x, 1);
4047
4048 /* Check the first level. */
4049 if (x0 == x1)
4050 return
4051 num_sign_bit_copies1 (x, mode, x0, mode,
4052 cached_num_sign_bit_copies (x0, mode, known_x,
4053 known_mode,
4054 known_ret));
4055
4056 /* Check the second level. */
4057 if (ARITHMETIC_P (x0)
4058 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4059 return
4060 num_sign_bit_copies1 (x, mode, x1, mode,
4061 cached_num_sign_bit_copies (x1, mode, known_x,
4062 known_mode,
4063 known_ret));
4064
4065 if (ARITHMETIC_P (x1)
4066 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4067 return
4068 num_sign_bit_copies1 (x, mode, x0, mode,
4069 cached_num_sign_bit_copies (x0, mode, known_x,
4070 known_mode,
4071 known_ret));
4072 }
4073
4074 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4075 }
4076
4077 /* Return the number of bits at the high-order end of X that are known to
4078 be equal to the sign bit. X will be used in mode MODE; if MODE is
4079 VOIDmode, X will be used in its own mode. The returned value will always
4080 be between 1 and the number of bits in MODE. */
4081
4082 static unsigned int
4083 num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
4084 enum machine_mode known_mode,
4085 unsigned int known_ret)
4086 {
4087 enum rtx_code code = GET_CODE (x);
4088 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4089 int num0, num1, result;
4090 unsigned HOST_WIDE_INT nonzero;
4091
4092 /* If we weren't given a mode, use the mode of X. If the mode is still
4093 VOIDmode, we don't know anything. Likewise if one of the modes is
4094 floating-point. */
4095
4096 if (mode == VOIDmode)
4097 mode = GET_MODE (x);
4098
4099 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
4100 return 1;
4101
4102 /* For a smaller object, just ignore the high bits. */
4103 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4104 {
4105 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4106 known_x, known_mode, known_ret);
4107 return MAX (1,
4108 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4109 }
4110
4111 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4112 {
4113 #ifndef WORD_REGISTER_OPERATIONS
4114 /* If this machine does not do all register operations on the entire
4115 register and MODE is wider than the mode of X, we can say nothing
4116 at all about the high-order bits. */
4117 return 1;
4118 #else
4119 /* Likewise on machines that do, if the mode of the object is smaller
4120 than a word and loads of that size don't sign extend, we can say
4121 nothing about the high order bits. */
4122 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4123 #ifdef LOAD_EXTEND_OP
4124 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4125 #endif
4126 )
4127 return 1;
4128 #endif
4129 }
4130
4131 switch (code)
4132 {
4133 case REG:
4134
4135 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4136 /* If pointers extend signed and this is a pointer in Pmode, say that
4137 all the bits above ptr_mode are known to be sign bit copies. */
4138 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4139 && REG_POINTER (x))
4140 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4141 #endif
4142
4143 {
4144 unsigned int copies_for_hook = 1, copies = 1;
4145 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4146 known_mode, known_ret,
4147 &copies_for_hook);
4148
4149 if (new)
4150 copies = cached_num_sign_bit_copies (new, mode, known_x,
4151 known_mode, known_ret);
4152
4153 if (copies > 1 || copies_for_hook > 1)
4154 return MAX (copies, copies_for_hook);
4155
4156 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4157 }
4158 break;
4159
4160 case MEM:
4161 #ifdef LOAD_EXTEND_OP
4162 /* Some RISC machines sign-extend all loads of smaller than a word. */
4163 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4164 return MAX (1, ((int) bitwidth
4165 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4166 #endif
4167 break;
4168
4169 case CONST_INT:
4170 /* If the constant is negative, take its 1's complement and remask.
4171 Then see how many zero bits we have. */
4172 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4173 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4174 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4175 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4176
4177 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4178
4179 case SUBREG:
4180 /* If this is a SUBREG for a promoted object that is sign-extended
4181 and we are looking at it in a wider mode, we know that at least the
4182 high-order bits are known to be sign bit copies. */
4183
4184 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4185 {
4186 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4187 known_x, known_mode, known_ret);
4188 return MAX ((int) bitwidth
4189 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4190 num0);
4191 }
4192
4193 /* For a smaller object, just ignore the high bits. */
4194 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4195 {
4196 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4197 known_x, known_mode, known_ret);
4198 return MAX (1, (num0
4199 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4200 - bitwidth)));
4201 }
4202
4203 #ifdef WORD_REGISTER_OPERATIONS
4204 #ifdef LOAD_EXTEND_OP
4205 /* For paradoxical SUBREGs on machines where all register operations
4206 affect the entire register, just look inside. Note that we are
4207 passing MODE to the recursive call, so the number of sign bit copies
4208 will remain relative to that mode, not the inner mode. */
4209
4210 /* This works only if loads sign extend. Otherwise, if we get a
4211 reload for the inner part, it may be loaded from the stack, and
4212 then we lose all sign bit copies that existed before the store
4213 to the stack. */
4214
4215 if ((GET_MODE_SIZE (GET_MODE (x))
4216 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4217 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4218 && MEM_P (SUBREG_REG (x)))
4219 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4220 known_x, known_mode, known_ret);
4221 #endif
4222 #endif
4223 break;
4224
4225 case SIGN_EXTRACT:
4226 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4227 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4228 break;
4229
4230 case SIGN_EXTEND:
4231 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4232 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4233 known_x, known_mode, known_ret));
4234
4235 case TRUNCATE:
4236 /* For a smaller object, just ignore the high bits. */
4237 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4238 known_x, known_mode, known_ret);
4239 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4240 - bitwidth)));
4241
4242 case NOT:
4243 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4244 known_x, known_mode, known_ret);
4245
4246 case ROTATE: case ROTATERT:
4247 /* If we are rotating left by a number of bits less than the number
4248 of sign bit copies, we can just subtract that amount from the
4249 number. */
4250 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4251 && INTVAL (XEXP (x, 1)) >= 0
4252 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4253 {
4254 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4255 known_x, known_mode, known_ret);
4256 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4257 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4258 }
4259 break;
4260
4261 case NEG:
4262 /* In general, this subtracts one sign bit copy. But if the value
4263 is known to be positive, the number of sign bit copies is the
4264 same as that of the input. Finally, if the input has just one bit
4265 that might be nonzero, all the bits are copies of the sign bit. */
4266 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4267 known_x, known_mode, known_ret);
4268 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4269 return num0 > 1 ? num0 - 1 : 1;
4270
4271 nonzero = nonzero_bits (XEXP (x, 0), mode);
4272 if (nonzero == 1)
4273 return bitwidth;
4274
4275 if (num0 > 1
4276 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4277 num0--;
4278
4279 return num0;
4280
4281 case IOR: case AND: case XOR:
4282 case SMIN: case SMAX: case UMIN: case UMAX:
4283 /* Logical operations will preserve the number of sign-bit copies.
4284 MIN and MAX operations always return one of the operands. */
4285 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4286 known_x, known_mode, known_ret);
4287 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4288 known_x, known_mode, known_ret);
4289
4290 /* If num1 is clearing some of the top bits then regardless of
4291 the other term, we are guaranteed to have at least that many
4292 high-order zero bits. */
4293 if (code == AND
4294 && num1 > 1
4295 && bitwidth <= HOST_BITS_PER_WIDE_INT
4296 && GET_CODE (XEXP (x, 1)) == CONST_INT
4297 && !(INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4298 return num1;
4299
4300 /* Similarly for IOR when setting high-order bits. */
4301 if (code == IOR
4302 && num1 > 1
4303 && bitwidth <= HOST_BITS_PER_WIDE_INT
4304 && GET_CODE (XEXP (x, 1)) == CONST_INT
4305 && (INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4306 return num1;
4307
4308 return MIN (num0, num1);
4309
4310 case PLUS: case MINUS:
4311 /* For addition and subtraction, we can have a 1-bit carry. However,
4312 if we are subtracting 1 from a positive number, there will not
4313 be such a carry. Furthermore, if the positive number is known to
4314 be 0 or 1, we know the result is either -1 or 0. */
4315
4316 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4317 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4318 {
4319 nonzero = nonzero_bits (XEXP (x, 0), mode);
4320 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4321 return (nonzero == 1 || nonzero == 0 ? bitwidth
4322 : bitwidth - floor_log2 (nonzero) - 1);
4323 }
4324
4325 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4326 known_x, known_mode, known_ret);
4327 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4328 known_x, known_mode, known_ret);
4329 result = MAX (1, MIN (num0, num1) - 1);
4330
4331 #ifdef POINTERS_EXTEND_UNSIGNED
4332 /* If pointers extend signed and this is an addition or subtraction
4333 to a pointer in Pmode, all the bits above ptr_mode are known to be
4334 sign bit copies. */
4335 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4336 && (code == PLUS || code == MINUS)
4337 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4338 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4339 - GET_MODE_BITSIZE (ptr_mode) + 1),
4340 result);
4341 #endif
4342 return result;
4343
4344 case MULT:
4345 /* The number of bits of the product is the sum of the number of
4346 bits of both terms. However, unless one of the terms if known
4347 to be positive, we must allow for an additional bit since negating
4348 a negative number can remove one sign bit copy. */
4349
4350 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4351 known_x, known_mode, known_ret);
4352 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4353 known_x, known_mode, known_ret);
4354
4355 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4356 if (result > 0
4357 && (bitwidth > HOST_BITS_PER_WIDE_INT
4358 || (((nonzero_bits (XEXP (x, 0), mode)
4359 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4360 && ((nonzero_bits (XEXP (x, 1), mode)
4361 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4362 result--;
4363
4364 return MAX (1, result);
4365
4366 case UDIV:
4367 /* The result must be <= the first operand. If the first operand
4368 has the high bit set, we know nothing about the number of sign
4369 bit copies. */
4370 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4371 return 1;
4372 else if ((nonzero_bits (XEXP (x, 0), mode)
4373 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4374 return 1;
4375 else
4376 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4377 known_x, known_mode, known_ret);
4378
4379 case UMOD:
4380 /* The result must be <= the second operand. */
4381 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4382 known_x, known_mode, known_ret);
4383
4384 case DIV:
4385 /* Similar to unsigned division, except that we have to worry about
4386 the case where the divisor is negative, in which case we have
4387 to add 1. */
4388 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4389 known_x, known_mode, known_ret);
4390 if (result > 1
4391 && (bitwidth > HOST_BITS_PER_WIDE_INT
4392 || (nonzero_bits (XEXP (x, 1), mode)
4393 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4394 result--;
4395
4396 return result;
4397
4398 case MOD:
4399 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4400 known_x, known_mode, known_ret);
4401 if (result > 1
4402 && (bitwidth > HOST_BITS_PER_WIDE_INT
4403 || (nonzero_bits (XEXP (x, 1), mode)
4404 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4405 result--;
4406
4407 return result;
4408
4409 case ASHIFTRT:
4410 /* Shifts by a constant add to the number of bits equal to the
4411 sign bit. */
4412 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4413 known_x, known_mode, known_ret);
4414 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4415 && INTVAL (XEXP (x, 1)) > 0)
4416 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4417
4418 return num0;
4419
4420 case ASHIFT:
4421 /* Left shifts destroy copies. */
4422 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4423 || INTVAL (XEXP (x, 1)) < 0
4424 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4425 return 1;
4426
4427 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4428 known_x, known_mode, known_ret);
4429 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4430
4431 case IF_THEN_ELSE:
4432 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4433 known_x, known_mode, known_ret);
4434 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4435 known_x, known_mode, known_ret);
4436 return MIN (num0, num1);
4437
4438 case EQ: case NE: case GE: case GT: case LE: case LT:
4439 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4440 case GEU: case GTU: case LEU: case LTU:
4441 case UNORDERED: case ORDERED:
4442 /* If the constant is negative, take its 1's complement and remask.
4443 Then see how many zero bits we have. */
4444 nonzero = STORE_FLAG_VALUE;
4445 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4446 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4447 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4448
4449 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4450
4451 default:
4452 break;
4453 }
4454
4455 /* If we haven't been able to figure it out by one of the above rules,
4456 see if some of the high-order bits are known to be zero. If so,
4457 count those bits and return one less than that amount. If we can't
4458 safely compute the mask for this mode, always return BITWIDTH. */
4459
4460 bitwidth = GET_MODE_BITSIZE (mode);
4461 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4462 return 1;
4463
4464 nonzero = nonzero_bits (x, mode);
4465 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4466 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4467 }
4468
4469 /* Calculate the rtx_cost of a single instruction. A return value of
4470 zero indicates an instruction pattern without a known cost. */
4471
4472 int
4473 insn_rtx_cost (rtx pat)
4474 {
4475 int i, cost;
4476 rtx set;
4477
4478 /* Extract the single set rtx from the instruction pattern.
4479 We can't use single_set since we only have the pattern. */
4480 if (GET_CODE (pat) == SET)
4481 set = pat;
4482 else if (GET_CODE (pat) == PARALLEL)
4483 {
4484 set = NULL_RTX;
4485 for (i = 0; i < XVECLEN (pat, 0); i++)
4486 {
4487 rtx x = XVECEXP (pat, 0, i);
4488 if (GET_CODE (x) == SET)
4489 {
4490 if (set)
4491 return 0;
4492 set = x;
4493 }
4494 }
4495 if (!set)
4496 return 0;
4497 }
4498 else
4499 return 0;
4500
4501 cost = rtx_cost (SET_SRC (set), SET);
4502 return cost > 0 ? cost : COSTS_N_INSNS (1);
4503 }
4504
4505 /* Given an insn INSN and condition COND, return the condition in a
4506 canonical form to simplify testing by callers. Specifically:
4507
4508 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4509 (2) Both operands will be machine operands; (cc0) will have been replaced.
4510 (3) If an operand is a constant, it will be the second operand.
4511 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4512 for GE, GEU, and LEU.
4513
4514 If the condition cannot be understood, or is an inequality floating-point
4515 comparison which needs to be reversed, 0 will be returned.
4516
4517 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4518
4519 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4520 insn used in locating the condition was found. If a replacement test
4521 of the condition is desired, it should be placed in front of that
4522 insn and we will be sure that the inputs are still valid.
4523
4524 If WANT_REG is nonzero, we wish the condition to be relative to that
4525 register, if possible. Therefore, do not canonicalize the condition
4526 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4527 to be a compare to a CC mode register.
4528
4529 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4530 and at INSN. */
4531
4532 rtx
4533 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4534 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4535 {
4536 enum rtx_code code;
4537 rtx prev = insn;
4538 const_rtx set;
4539 rtx tem;
4540 rtx op0, op1;
4541 int reverse_code = 0;
4542 enum machine_mode mode;
4543 basic_block bb = BLOCK_FOR_INSN (insn);
4544
4545 code = GET_CODE (cond);
4546 mode = GET_MODE (cond);
4547 op0 = XEXP (cond, 0);
4548 op1 = XEXP (cond, 1);
4549
4550 if (reverse)
4551 code = reversed_comparison_code (cond, insn);
4552 if (code == UNKNOWN)
4553 return 0;
4554
4555 if (earliest)
4556 *earliest = insn;
4557
4558 /* If we are comparing a register with zero, see if the register is set
4559 in the previous insn to a COMPARE or a comparison operation. Perform
4560 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4561 in cse.c */
4562
4563 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4564 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4565 && op1 == CONST0_RTX (GET_MODE (op0))
4566 && op0 != want_reg)
4567 {
4568 /* Set nonzero when we find something of interest. */
4569 rtx x = 0;
4570
4571 #ifdef HAVE_cc0
4572 /* If comparison with cc0, import actual comparison from compare
4573 insn. */
4574 if (op0 == cc0_rtx)
4575 {
4576 if ((prev = prev_nonnote_insn (prev)) == 0
4577 || !NONJUMP_INSN_P (prev)
4578 || (set = single_set (prev)) == 0
4579 || SET_DEST (set) != cc0_rtx)
4580 return 0;
4581
4582 op0 = SET_SRC (set);
4583 op1 = CONST0_RTX (GET_MODE (op0));
4584 if (earliest)
4585 *earliest = prev;
4586 }
4587 #endif
4588
4589 /* If this is a COMPARE, pick up the two things being compared. */
4590 if (GET_CODE (op0) == COMPARE)
4591 {
4592 op1 = XEXP (op0, 1);
4593 op0 = XEXP (op0, 0);
4594 continue;
4595 }
4596 else if (!REG_P (op0))
4597 break;
4598
4599 /* Go back to the previous insn. Stop if it is not an INSN. We also
4600 stop if it isn't a single set or if it has a REG_INC note because
4601 we don't want to bother dealing with it. */
4602
4603 if ((prev = prev_nonnote_insn (prev)) == 0
4604 || !NONJUMP_INSN_P (prev)
4605 || FIND_REG_INC_NOTE (prev, NULL_RTX)
4606 /* In cfglayout mode, there do not have to be labels at the
4607 beginning of a block, or jumps at the end, so the previous
4608 conditions would not stop us when we reach bb boundary. */
4609 || BLOCK_FOR_INSN (prev) != bb)
4610 break;
4611
4612 set = set_of (op0, prev);
4613
4614 if (set
4615 && (GET_CODE (set) != SET
4616 || !rtx_equal_p (SET_DEST (set), op0)))
4617 break;
4618
4619 /* If this is setting OP0, get what it sets it to if it looks
4620 relevant. */
4621 if (set)
4622 {
4623 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4624 #ifdef FLOAT_STORE_FLAG_VALUE
4625 REAL_VALUE_TYPE fsfv;
4626 #endif
4627
4628 /* ??? We may not combine comparisons done in a CCmode with
4629 comparisons not done in a CCmode. This is to aid targets
4630 like Alpha that have an IEEE compliant EQ instruction, and
4631 a non-IEEE compliant BEQ instruction. The use of CCmode is
4632 actually artificial, simply to prevent the combination, but
4633 should not affect other platforms.
4634
4635 However, we must allow VOIDmode comparisons to match either
4636 CCmode or non-CCmode comparison, because some ports have
4637 modeless comparisons inside branch patterns.
4638
4639 ??? This mode check should perhaps look more like the mode check
4640 in simplify_comparison in combine. */
4641
4642 if ((GET_CODE (SET_SRC (set)) == COMPARE
4643 || (((code == NE
4644 || (code == LT
4645 && GET_MODE_CLASS (inner_mode) == MODE_INT
4646 && (GET_MODE_BITSIZE (inner_mode)
4647 <= HOST_BITS_PER_WIDE_INT)
4648 && (STORE_FLAG_VALUE
4649 & ((HOST_WIDE_INT) 1
4650 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4651 #ifdef FLOAT_STORE_FLAG_VALUE
4652 || (code == LT
4653 && SCALAR_FLOAT_MODE_P (inner_mode)
4654 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4655 REAL_VALUE_NEGATIVE (fsfv)))
4656 #endif
4657 ))
4658 && COMPARISON_P (SET_SRC (set))))
4659 && (((GET_MODE_CLASS (mode) == MODE_CC)
4660 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4661 || mode == VOIDmode || inner_mode == VOIDmode))
4662 x = SET_SRC (set);
4663 else if (((code == EQ
4664 || (code == GE
4665 && (GET_MODE_BITSIZE (inner_mode)
4666 <= HOST_BITS_PER_WIDE_INT)
4667 && GET_MODE_CLASS (inner_mode) == MODE_INT
4668 && (STORE_FLAG_VALUE
4669 & ((HOST_WIDE_INT) 1
4670 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4671 #ifdef FLOAT_STORE_FLAG_VALUE
4672 || (code == GE
4673 && SCALAR_FLOAT_MODE_P (inner_mode)
4674 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4675 REAL_VALUE_NEGATIVE (fsfv)))
4676 #endif
4677 ))
4678 && COMPARISON_P (SET_SRC (set))
4679 && (((GET_MODE_CLASS (mode) == MODE_CC)
4680 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4681 || mode == VOIDmode || inner_mode == VOIDmode))
4682
4683 {
4684 reverse_code = 1;
4685 x = SET_SRC (set);
4686 }
4687 else
4688 break;
4689 }
4690
4691 else if (reg_set_p (op0, prev))
4692 /* If this sets OP0, but not directly, we have to give up. */
4693 break;
4694
4695 if (x)
4696 {
4697 /* If the caller is expecting the condition to be valid at INSN,
4698 make sure X doesn't change before INSN. */
4699 if (valid_at_insn_p)
4700 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4701 break;
4702 if (COMPARISON_P (x))
4703 code = GET_CODE (x);
4704 if (reverse_code)
4705 {
4706 code = reversed_comparison_code (x, prev);
4707 if (code == UNKNOWN)
4708 return 0;
4709 reverse_code = 0;
4710 }
4711
4712 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4713 if (earliest)
4714 *earliest = prev;
4715 }
4716 }
4717
4718 /* If constant is first, put it last. */
4719 if (CONSTANT_P (op0))
4720 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4721
4722 /* If OP0 is the result of a comparison, we weren't able to find what
4723 was really being compared, so fail. */
4724 if (!allow_cc_mode
4725 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4726 return 0;
4727
4728 /* Canonicalize any ordered comparison with integers involving equality
4729 if we can do computations in the relevant mode and we do not
4730 overflow. */
4731
4732 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4733 && GET_CODE (op1) == CONST_INT
4734 && GET_MODE (op0) != VOIDmode
4735 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4736 {
4737 HOST_WIDE_INT const_val = INTVAL (op1);
4738 unsigned HOST_WIDE_INT uconst_val = const_val;
4739 unsigned HOST_WIDE_INT max_val
4740 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4741
4742 switch (code)
4743 {
4744 case LE:
4745 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4746 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4747 break;
4748
4749 /* When cross-compiling, const_val might be sign-extended from
4750 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4751 case GE:
4752 if ((HOST_WIDE_INT) (const_val & max_val)
4753 != (((HOST_WIDE_INT) 1
4754 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4755 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4756 break;
4757
4758 case LEU:
4759 if (uconst_val < max_val)
4760 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4761 break;
4762
4763 case GEU:
4764 if (uconst_val != 0)
4765 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4766 break;
4767
4768 default:
4769 break;
4770 }
4771 }
4772
4773 /* Never return CC0; return zero instead. */
4774 if (CC0_P (op0))
4775 return 0;
4776
4777 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4778 }
4779
4780 /* Given a jump insn JUMP, return the condition that will cause it to branch
4781 to its JUMP_LABEL. If the condition cannot be understood, or is an
4782 inequality floating-point comparison which needs to be reversed, 0 will
4783 be returned.
4784
4785 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4786 insn used in locating the condition was found. If a replacement test
4787 of the condition is desired, it should be placed in front of that
4788 insn and we will be sure that the inputs are still valid. If EARLIEST
4789 is null, the returned condition will be valid at INSN.
4790
4791 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4792 compare CC mode register.
4793
4794 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4795
4796 rtx
4797 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4798 {
4799 rtx cond;
4800 int reverse;
4801 rtx set;
4802
4803 /* If this is not a standard conditional jump, we can't parse it. */
4804 if (!JUMP_P (jump)
4805 || ! any_condjump_p (jump))
4806 return 0;
4807 set = pc_set (jump);
4808
4809 cond = XEXP (SET_SRC (set), 0);
4810
4811 /* If this branches to JUMP_LABEL when the condition is false, reverse
4812 the condition. */
4813 reverse
4814 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4815 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4816
4817 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4818 allow_cc_mode, valid_at_insn_p);
4819 }
4820
4821 /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
4822 TARGET_MODE_REP_EXTENDED.
4823
4824 Note that we assume that the property of
4825 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
4826 narrower than mode B. I.e., if A is a mode narrower than B then in
4827 order to be able to operate on it in mode B, mode A needs to
4828 satisfy the requirements set by the representation of mode B. */
4829
4830 static void
4831 init_num_sign_bit_copies_in_rep (void)
4832 {
4833 enum machine_mode mode, in_mode;
4834
4835 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
4836 in_mode = GET_MODE_WIDER_MODE (mode))
4837 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
4838 mode = GET_MODE_WIDER_MODE (mode))
4839 {
4840 enum machine_mode i;
4841
4842 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
4843 extends to the next widest mode. */
4844 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
4845 || GET_MODE_WIDER_MODE (mode) == in_mode);
4846
4847 /* We are in in_mode. Count how many bits outside of mode
4848 have to be copies of the sign-bit. */
4849 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
4850 {
4851 enum machine_mode wider = GET_MODE_WIDER_MODE (i);
4852
4853 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
4854 /* We can only check sign-bit copies starting from the
4855 top-bit. In order to be able to check the bits we
4856 have already seen we pretend that subsequent bits
4857 have to be sign-bit copies too. */
4858 || num_sign_bit_copies_in_rep [in_mode][mode])
4859 num_sign_bit_copies_in_rep [in_mode][mode]
4860 += GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i);
4861 }
4862 }
4863 }
4864
4865 /* Suppose that truncation from the machine mode of X to MODE is not a
4866 no-op. See if there is anything special about X so that we can
4867 assume it already contains a truncated value of MODE. */
4868
4869 bool
4870 truncated_to_mode (enum machine_mode mode, rtx x)
4871 {
4872 /* This register has already been used in MODE without explicit
4873 truncation. */
4874 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
4875 return true;
4876
4877 /* See if we already satisfy the requirements of MODE. If yes we
4878 can just switch to MODE. */
4879 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
4880 && (num_sign_bit_copies (x, GET_MODE (x))
4881 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
4882 return true;
4883
4884 return false;
4885 }
4886 \f
4887 /* Initialize non_rtx_starting_operands, which is used to speed up
4888 for_each_rtx. */
4889 void
4890 init_rtlanal (void)
4891 {
4892 int i;
4893 for (i = 0; i < NUM_RTX_CODE; i++)
4894 {
4895 const char *format = GET_RTX_FORMAT (i);
4896 const char *first = strpbrk (format, "eEV");
4897 non_rtx_starting_operands[i] = first ? first - format : -1;
4898 }
4899
4900 init_num_sign_bit_copies_in_rep ();
4901 }
4902 \f
4903 /* Check whether this is a constant pool constant. */
4904 bool
4905 constant_pool_constant_p (rtx x)
4906 {
4907 x = avoid_constant_pool_reference (x);
4908 return GET_CODE (x) == CONST_DOUBLE;
4909 }
4910