rtl.h (in_expr_list_p): New declaration.
[gcc.git] / gcc / rtlanal.c
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "toplev.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "tm_p.h"
29
30 /* Forward declarations */
31 static void set_of_1 PARAMS ((rtx, rtx, void *));
32 static void insn_dependent_p_1 PARAMS ((rtx, rtx, void *));
33 static int computed_jump_p_1 PARAMS ((rtx));
34 static void parms_set PARAMS ((rtx, rtx, void *));
35
36 /* Bit flags that specify the machine subtype we are compiling for.
37 Bits are tested using macros TARGET_... defined in the tm.h file
38 and set by `-m...' switches. Must be defined in rtlanal.c. */
39
40 int target_flags;
41 \f
42 /* Return 1 if the value of X is unstable
43 (would be different at a different point in the program).
44 The frame pointer, arg pointer, etc. are considered stable
45 (within one function) and so is anything marked `unchanging'. */
46
47 int
48 rtx_unstable_p (x)
49 rtx x;
50 {
51 RTX_CODE code = GET_CODE (x);
52 int i;
53 const char *fmt;
54
55 switch (code)
56 {
57 case MEM:
58 return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
59
60 case QUEUED:
61 return 1;
62
63 case ADDRESSOF:
64 case CONST:
65 case CONST_INT:
66 case CONST_DOUBLE:
67 case SYMBOL_REF:
68 case LABEL_REF:
69 return 0;
70
71 case REG:
72 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
73 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
74 /* The arg pointer varies if it is not a fixed register. */
75 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
76 || RTX_UNCHANGING_P (x))
77 return 0;
78 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
79 /* ??? When call-clobbered, the value is stable modulo the restore
80 that must happen after a call. This currently screws up local-alloc
81 into believing that the restore is not needed. */
82 if (x == pic_offset_table_rtx)
83 return 0;
84 #endif
85 return 1;
86
87 case ASM_OPERANDS:
88 if (MEM_VOLATILE_P (x))
89 return 1;
90
91 /* FALLTHROUGH */
92
93 default:
94 break;
95 }
96
97 fmt = GET_RTX_FORMAT (code);
98 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
99 if (fmt[i] == 'e')
100 {
101 if (rtx_unstable_p (XEXP (x, i)))
102 return 1;
103 }
104 else if (fmt[i] == 'E')
105 {
106 int j;
107 for (j = 0; j < XVECLEN (x, i); j++)
108 if (rtx_unstable_p (XVECEXP (x, i, j)))
109 return 1;
110 }
111
112 return 0;
113 }
114
115 /* Return 1 if X has a value that can vary even between two
116 executions of the program. 0 means X can be compared reliably
117 against certain constants or near-constants.
118 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
119 zero, we are slightly more conservative.
120 The frame pointer and the arg pointer are considered constant. */
121
122 int
123 rtx_varies_p (x, for_alias)
124 rtx x;
125 int for_alias;
126 {
127 RTX_CODE code = GET_CODE (x);
128 int i;
129 const char *fmt;
130
131 switch (code)
132 {
133 case MEM:
134 return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
135
136 case QUEUED:
137 return 1;
138
139 case CONST:
140 case CONST_INT:
141 case CONST_DOUBLE:
142 case SYMBOL_REF:
143 case LABEL_REF:
144 return 0;
145
146 case REG:
147 /* Note that we have to test for the actual rtx used for the frame
148 and arg pointers and not just the register number in case we have
149 eliminated the frame and/or arg pointer and are using it
150 for pseudos. */
151 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
152 /* The arg pointer varies if it is not a fixed register. */
153 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
154 return 0;
155 if (x == pic_offset_table_rtx
156 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
157 /* ??? When call-clobbered, the value is stable modulo the restore
158 that must happen after a call. This currently screws up
159 local-alloc into believing that the restore is not needed, so we
160 must return 0 only if we are called from alias analysis. */
161 && for_alias
162 #endif
163 )
164 return 0;
165 return 1;
166
167 case LO_SUM:
168 /* The operand 0 of a LO_SUM is considered constant
169 (in fact it is related specifically to operand 1)
170 during alias analysis. */
171 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
172 || rtx_varies_p (XEXP (x, 1), for_alias);
173
174 case ASM_OPERANDS:
175 if (MEM_VOLATILE_P (x))
176 return 1;
177
178 /* FALLTHROUGH */
179
180 default:
181 break;
182 }
183
184 fmt = GET_RTX_FORMAT (code);
185 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
186 if (fmt[i] == 'e')
187 {
188 if (rtx_varies_p (XEXP (x, i), for_alias))
189 return 1;
190 }
191 else if (fmt[i] == 'E')
192 {
193 int j;
194 for (j = 0; j < XVECLEN (x, i); j++)
195 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
196 return 1;
197 }
198
199 return 0;
200 }
201
202 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
203
204 int
205 rtx_addr_can_trap_p (x)
206 rtx x;
207 {
208 enum rtx_code code = GET_CODE (x);
209
210 switch (code)
211 {
212 case SYMBOL_REF:
213 return SYMBOL_REF_WEAK (x);
214
215 case LABEL_REF:
216 return 0;
217
218 case REG:
219 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
220 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
221 || x == stack_pointer_rtx
222 /* The arg pointer varies if it is not a fixed register. */
223 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
224 return 0;
225 /* All of the virtual frame registers are stack references. */
226 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
227 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
228 return 0;
229 return 1;
230
231 case CONST:
232 return rtx_addr_can_trap_p (XEXP (x, 0));
233
234 case PLUS:
235 /* An address is assumed not to trap if it is an address that can't
236 trap plus a constant integer or it is the pic register plus a
237 constant. */
238 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
239 && GET_CODE (XEXP (x, 1)) == CONST_INT)
240 || (XEXP (x, 0) == pic_offset_table_rtx
241 && CONSTANT_P (XEXP (x, 1))));
242
243 case LO_SUM:
244 case PRE_MODIFY:
245 return rtx_addr_can_trap_p (XEXP (x, 1));
246
247 case PRE_DEC:
248 case PRE_INC:
249 case POST_DEC:
250 case POST_INC:
251 case POST_MODIFY:
252 return rtx_addr_can_trap_p (XEXP (x, 0));
253
254 default:
255 break;
256 }
257
258 /* If it isn't one of the case above, it can cause a trap. */
259 return 1;
260 }
261
262 /* Return 1 if X refers to a memory location whose address
263 cannot be compared reliably with constant addresses,
264 or if X refers to a BLKmode memory object.
265 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
266 zero, we are slightly more conservative. */
267
268 int
269 rtx_addr_varies_p (x, for_alias)
270 rtx x;
271 int for_alias;
272 {
273 enum rtx_code code;
274 int i;
275 const char *fmt;
276
277 if (x == 0)
278 return 0;
279
280 code = GET_CODE (x);
281 if (code == MEM)
282 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
283
284 fmt = GET_RTX_FORMAT (code);
285 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
286 if (fmt[i] == 'e')
287 {
288 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
289 return 1;
290 }
291 else if (fmt[i] == 'E')
292 {
293 int j;
294 for (j = 0; j < XVECLEN (x, i); j++)
295 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
296 return 1;
297 }
298 return 0;
299 }
300 \f
301 /* Return the value of the integer term in X, if one is apparent;
302 otherwise return 0.
303 Only obvious integer terms are detected.
304 This is used in cse.c with the `related_value' field.*/
305
306 HOST_WIDE_INT
307 get_integer_term (x)
308 rtx x;
309 {
310 if (GET_CODE (x) == CONST)
311 x = XEXP (x, 0);
312
313 if (GET_CODE (x) == MINUS
314 && GET_CODE (XEXP (x, 1)) == CONST_INT)
315 return - INTVAL (XEXP (x, 1));
316 if (GET_CODE (x) == PLUS
317 && GET_CODE (XEXP (x, 1)) == CONST_INT)
318 return INTVAL (XEXP (x, 1));
319 return 0;
320 }
321
322 /* If X is a constant, return the value sans apparent integer term;
323 otherwise return 0.
324 Only obvious integer terms are detected. */
325
326 rtx
327 get_related_value (x)
328 rtx x;
329 {
330 if (GET_CODE (x) != CONST)
331 return 0;
332 x = XEXP (x, 0);
333 if (GET_CODE (x) == PLUS
334 && GET_CODE (XEXP (x, 1)) == CONST_INT)
335 return XEXP (x, 0);
336 else if (GET_CODE (x) == MINUS
337 && GET_CODE (XEXP (x, 1)) == CONST_INT)
338 return XEXP (x, 0);
339 return 0;
340 }
341 \f
342 /* Given a tablejump insn INSN, return the RTL expression for the offset
343 into the jump table. If the offset cannot be determined, then return
344 NULL_RTX.
345
346 If EARLIEST is non-zero, it is a pointer to a place where the earliest
347 insn used in locating the offset was found. */
348
349 rtx
350 get_jump_table_offset (insn, earliest)
351 rtx insn;
352 rtx *earliest;
353 {
354 rtx label;
355 rtx table;
356 rtx set;
357 rtx old_insn;
358 rtx x;
359 rtx old_x;
360 rtx y;
361 rtx old_y;
362 int i;
363
364 if (GET_CODE (insn) != JUMP_INSN
365 || ! (label = JUMP_LABEL (insn))
366 || ! (table = NEXT_INSN (label))
367 || GET_CODE (table) != JUMP_INSN
368 || (GET_CODE (PATTERN (table)) != ADDR_VEC
369 && GET_CODE (PATTERN (table)) != ADDR_DIFF_VEC)
370 || ! (set = single_set (insn)))
371 return NULL_RTX;
372
373 x = SET_SRC (set);
374
375 /* Some targets (eg, ARM) emit a tablejump that also
376 contains the out-of-range target. */
377 if (GET_CODE (x) == IF_THEN_ELSE
378 && GET_CODE (XEXP (x, 2)) == LABEL_REF)
379 x = XEXP (x, 1);
380
381 /* Search backwards and locate the expression stored in X. */
382 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
383 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
384 ;
385
386 /* If X is an expression using a relative address then strip
387 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
388 or the jump table label. */
389 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
390 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
391 {
392 for (i = 0; i < 2; i++)
393 {
394 old_insn = insn;
395 y = XEXP (x, i);
396
397 if (y == pc_rtx || y == pic_offset_table_rtx)
398 break;
399
400 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
401 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
402 ;
403
404 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
405 break;
406 }
407
408 if (i >= 2)
409 return NULL_RTX;
410
411 x = XEXP (x, 1 - i);
412
413 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
414 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
415 ;
416 }
417
418 /* Strip off any sign or zero extension. */
419 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
420 {
421 x = XEXP (x, 0);
422
423 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
424 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
425 ;
426 }
427
428 /* If X isn't a MEM then this isn't a tablejump we understand. */
429 if (GET_CODE (x) != MEM)
430 return NULL_RTX;
431
432 /* Strip off the MEM. */
433 x = XEXP (x, 0);
434
435 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
436 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
437 ;
438
439 /* If X isn't a PLUS than this isn't a tablejump we understand. */
440 if (GET_CODE (x) != PLUS)
441 return NULL_RTX;
442
443 /* At this point we should have an expression representing the jump table
444 plus an offset. Examine each operand in order to determine which one
445 represents the jump table. Knowing that tells us that the other operand
446 must represent the offset. */
447 for (i = 0; i < 2; i++)
448 {
449 old_insn = insn;
450 y = XEXP (x, i);
451
452 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
453 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
454 ;
455
456 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
457 && reg_mentioned_p (label, y))
458 break;
459 }
460
461 if (i >= 2)
462 return NULL_RTX;
463
464 x = XEXP (x, 1 - i);
465
466 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
467 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
468 for (i = 0; i < 2; i++)
469 if (XEXP (x, i) == pic_offset_table_rtx)
470 {
471 x = XEXP (x, 1 - i);
472 break;
473 }
474
475 if (earliest)
476 *earliest = insn;
477
478 /* Return the RTL expression representing the offset. */
479 return x;
480 }
481 \f
482 /* Return the number of places FIND appears within X. If COUNT_DEST is
483 zero, we do not count occurrences inside the destination of a SET. */
484
485 int
486 count_occurrences (x, find, count_dest)
487 rtx x, find;
488 int count_dest;
489 {
490 int i, j;
491 enum rtx_code code;
492 const char *format_ptr;
493 int count;
494
495 if (x == find)
496 return 1;
497
498 code = GET_CODE (x);
499
500 switch (code)
501 {
502 case REG:
503 case CONST_INT:
504 case CONST_DOUBLE:
505 case SYMBOL_REF:
506 case CODE_LABEL:
507 case PC:
508 case CC0:
509 return 0;
510
511 case MEM:
512 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
513 return 1;
514 break;
515
516 case SET:
517 if (SET_DEST (x) == find && ! count_dest)
518 return count_occurrences (SET_SRC (x), find, count_dest);
519 break;
520
521 default:
522 break;
523 }
524
525 format_ptr = GET_RTX_FORMAT (code);
526 count = 0;
527
528 for (i = 0; i < GET_RTX_LENGTH (code); i++)
529 {
530 switch (*format_ptr++)
531 {
532 case 'e':
533 count += count_occurrences (XEXP (x, i), find, count_dest);
534 break;
535
536 case 'E':
537 for (j = 0; j < XVECLEN (x, i); j++)
538 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
539 break;
540 }
541 }
542 return count;
543 }
544 \f
545 /* Nonzero if register REG appears somewhere within IN.
546 Also works if REG is not a register; in this case it checks
547 for a subexpression of IN that is Lisp "equal" to REG. */
548
549 int
550 reg_mentioned_p (reg, in)
551 rtx reg, in;
552 {
553 const char *fmt;
554 int i;
555 enum rtx_code code;
556
557 if (in == 0)
558 return 0;
559
560 if (reg == in)
561 return 1;
562
563 if (GET_CODE (in) == LABEL_REF)
564 return reg == XEXP (in, 0);
565
566 code = GET_CODE (in);
567
568 switch (code)
569 {
570 /* Compare registers by number. */
571 case REG:
572 return GET_CODE (reg) == REG && REGNO (in) == REGNO (reg);
573
574 /* These codes have no constituent expressions
575 and are unique. */
576 case SCRATCH:
577 case CC0:
578 case PC:
579 return 0;
580
581 case CONST_INT:
582 return GET_CODE (reg) == CONST_INT && INTVAL (in) == INTVAL (reg);
583
584 case CONST_DOUBLE:
585 /* These are kept unique for a given value. */
586 return 0;
587
588 default:
589 break;
590 }
591
592 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
593 return 1;
594
595 fmt = GET_RTX_FORMAT (code);
596
597 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
598 {
599 if (fmt[i] == 'E')
600 {
601 int j;
602 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
603 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
604 return 1;
605 }
606 else if (fmt[i] == 'e'
607 && reg_mentioned_p (reg, XEXP (in, i)))
608 return 1;
609 }
610 return 0;
611 }
612 \f
613 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
614 no CODE_LABEL insn. */
615
616 int
617 no_labels_between_p (beg, end)
618 rtx beg, end;
619 {
620 rtx p;
621 if (beg == end)
622 return 0;
623 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
624 if (GET_CODE (p) == CODE_LABEL)
625 return 0;
626 return 1;
627 }
628
629 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
630 no JUMP_INSN insn. */
631
632 int
633 no_jumps_between_p (beg, end)
634 rtx beg, end;
635 {
636 rtx p;
637 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
638 if (GET_CODE (p) == JUMP_INSN)
639 return 0;
640 return 1;
641 }
642
643 /* Nonzero if register REG is used in an insn between
644 FROM_INSN and TO_INSN (exclusive of those two). */
645
646 int
647 reg_used_between_p (reg, from_insn, to_insn)
648 rtx reg, from_insn, to_insn;
649 {
650 rtx insn;
651
652 if (from_insn == to_insn)
653 return 0;
654
655 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
656 if (INSN_P (insn)
657 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
658 || (GET_CODE (insn) == CALL_INSN
659 && (find_reg_fusage (insn, USE, reg)
660 || find_reg_fusage (insn, CLOBBER, reg)))))
661 return 1;
662 return 0;
663 }
664 \f
665 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
666 is entirely replaced by a new value and the only use is as a SET_DEST,
667 we do not consider it a reference. */
668
669 int
670 reg_referenced_p (x, body)
671 rtx x;
672 rtx body;
673 {
674 int i;
675
676 switch (GET_CODE (body))
677 {
678 case SET:
679 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
680 return 1;
681
682 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
683 of a REG that occupies all of the REG, the insn references X if
684 it is mentioned in the destination. */
685 if (GET_CODE (SET_DEST (body)) != CC0
686 && GET_CODE (SET_DEST (body)) != PC
687 && GET_CODE (SET_DEST (body)) != REG
688 && ! (GET_CODE (SET_DEST (body)) == SUBREG
689 && GET_CODE (SUBREG_REG (SET_DEST (body))) == REG
690 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
691 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
692 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
693 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
694 && reg_overlap_mentioned_p (x, SET_DEST (body)))
695 return 1;
696 return 0;
697
698 case ASM_OPERANDS:
699 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
700 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
701 return 1;
702 return 0;
703
704 case CALL:
705 case USE:
706 case IF_THEN_ELSE:
707 return reg_overlap_mentioned_p (x, body);
708
709 case TRAP_IF:
710 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
711
712 case PREFETCH:
713 return reg_overlap_mentioned_p (x, XEXP (body, 0));
714
715 case UNSPEC:
716 case UNSPEC_VOLATILE:
717 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
718 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
719 return 1;
720 return 0;
721
722 case PARALLEL:
723 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
724 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
725 return 1;
726 return 0;
727
728 case CLOBBER:
729 if (GET_CODE (XEXP (body, 0)) == MEM)
730 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
731 return 1;
732 return 0;
733
734 case COND_EXEC:
735 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
736 return 1;
737 return reg_referenced_p (x, COND_EXEC_CODE (body));
738
739 default:
740 return 0;
741 }
742 }
743
744 /* Nonzero if register REG is referenced in an insn between
745 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
746 not count. */
747
748 int
749 reg_referenced_between_p (reg, from_insn, to_insn)
750 rtx reg, from_insn, to_insn;
751 {
752 rtx insn;
753
754 if (from_insn == to_insn)
755 return 0;
756
757 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
758 if (INSN_P (insn)
759 && (reg_referenced_p (reg, PATTERN (insn))
760 || (GET_CODE (insn) == CALL_INSN
761 && find_reg_fusage (insn, USE, reg))))
762 return 1;
763 return 0;
764 }
765 \f
766 /* Nonzero if register REG is set or clobbered in an insn between
767 FROM_INSN and TO_INSN (exclusive of those two). */
768
769 int
770 reg_set_between_p (reg, from_insn, to_insn)
771 rtx reg, from_insn, to_insn;
772 {
773 rtx insn;
774
775 if (from_insn == to_insn)
776 return 0;
777
778 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
779 if (INSN_P (insn) && reg_set_p (reg, insn))
780 return 1;
781 return 0;
782 }
783
784 /* Internals of reg_set_between_p. */
785 int
786 reg_set_p (reg, insn)
787 rtx reg, insn;
788 {
789 rtx body = insn;
790
791 /* We can be passed an insn or part of one. If we are passed an insn,
792 check if a side-effect of the insn clobbers REG. */
793 if (INSN_P (insn))
794 {
795 if (FIND_REG_INC_NOTE (insn, reg)
796 || (GET_CODE (insn) == CALL_INSN
797 /* We'd like to test call_used_regs here, but rtlanal.c can't
798 reference that variable due to its use in genattrtab. So
799 we'll just be more conservative.
800
801 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
802 information holds all clobbered registers. */
803 && ((GET_CODE (reg) == REG
804 && REGNO (reg) < FIRST_PSEUDO_REGISTER)
805 || GET_CODE (reg) == MEM
806 || find_reg_fusage (insn, CLOBBER, reg))))
807 return 1;
808
809 body = PATTERN (insn);
810 }
811
812 return set_of (reg, insn) != NULL_RTX;
813 }
814
815 /* Similar to reg_set_between_p, but check all registers in X. Return 0
816 only if none of them are modified between START and END. Do not
817 consider non-registers one way or the other. */
818
819 int
820 regs_set_between_p (x, start, end)
821 rtx x;
822 rtx start, end;
823 {
824 enum rtx_code code = GET_CODE (x);
825 const char *fmt;
826 int i, j;
827
828 switch (code)
829 {
830 case CONST_INT:
831 case CONST_DOUBLE:
832 case CONST:
833 case SYMBOL_REF:
834 case LABEL_REF:
835 case PC:
836 case CC0:
837 return 0;
838
839 case REG:
840 return reg_set_between_p (x, start, end);
841
842 default:
843 break;
844 }
845
846 fmt = GET_RTX_FORMAT (code);
847 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
848 {
849 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
850 return 1;
851
852 else if (fmt[i] == 'E')
853 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
854 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
855 return 1;
856 }
857
858 return 0;
859 }
860
861 /* Similar to reg_set_between_p, but check all registers in X. Return 0
862 only if none of them are modified between START and END. Return 1 if
863 X contains a MEM; this routine does not perform any memory aliasing. */
864
865 int
866 modified_between_p (x, start, end)
867 rtx x;
868 rtx start, end;
869 {
870 enum rtx_code code = GET_CODE (x);
871 const char *fmt;
872 int i, j;
873
874 switch (code)
875 {
876 case CONST_INT:
877 case CONST_DOUBLE:
878 case CONST:
879 case SYMBOL_REF:
880 case LABEL_REF:
881 return 0;
882
883 case PC:
884 case CC0:
885 return 1;
886
887 case MEM:
888 /* If the memory is not constant, assume it is modified. If it is
889 constant, we still have to check the address. */
890 if (! RTX_UNCHANGING_P (x))
891 return 1;
892 break;
893
894 case REG:
895 return reg_set_between_p (x, start, end);
896
897 default:
898 break;
899 }
900
901 fmt = GET_RTX_FORMAT (code);
902 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
903 {
904 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
905 return 1;
906
907 else if (fmt[i] == 'E')
908 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
909 if (modified_between_p (XVECEXP (x, i, j), start, end))
910 return 1;
911 }
912
913 return 0;
914 }
915
916 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
917 of them are modified in INSN. Return 1 if X contains a MEM; this routine
918 does not perform any memory aliasing. */
919
920 int
921 modified_in_p (x, insn)
922 rtx x;
923 rtx insn;
924 {
925 enum rtx_code code = GET_CODE (x);
926 const char *fmt;
927 int i, j;
928
929 switch (code)
930 {
931 case CONST_INT:
932 case CONST_DOUBLE:
933 case CONST:
934 case SYMBOL_REF:
935 case LABEL_REF:
936 return 0;
937
938 case PC:
939 case CC0:
940 return 1;
941
942 case MEM:
943 /* If the memory is not constant, assume it is modified. If it is
944 constant, we still have to check the address. */
945 if (! RTX_UNCHANGING_P (x))
946 return 1;
947 break;
948
949 case REG:
950 return reg_set_p (x, insn);
951
952 default:
953 break;
954 }
955
956 fmt = GET_RTX_FORMAT (code);
957 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
958 {
959 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
960 return 1;
961
962 else if (fmt[i] == 'E')
963 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
964 if (modified_in_p (XVECEXP (x, i, j), insn))
965 return 1;
966 }
967
968 return 0;
969 }
970
971 /* Return true if anything in insn X is (anti,output,true) dependent on
972 anything in insn Y. */
973
974 int
975 insn_dependent_p (x, y)
976 rtx x, y;
977 {
978 rtx tmp;
979
980 if (! INSN_P (x) || ! INSN_P (y))
981 abort ();
982
983 tmp = PATTERN (y);
984 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
985 if (tmp == NULL_RTX)
986 return 1;
987
988 tmp = PATTERN (x);
989 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
990 if (tmp == NULL_RTX)
991 return 1;
992
993 return 0;
994 }
995
996 /* A helper routine for insn_dependent_p called through note_stores. */
997
998 static void
999 insn_dependent_p_1 (x, pat, data)
1000 rtx x;
1001 rtx pat ATTRIBUTE_UNUSED;
1002 void *data;
1003 {
1004 rtx * pinsn = (rtx *) data;
1005
1006 if (*pinsn && reg_mentioned_p (x, *pinsn))
1007 *pinsn = NULL_RTX;
1008 }
1009 \f
1010 /* Helper function for set_of. */
1011 struct set_of_data
1012 {
1013 rtx found;
1014 rtx pat;
1015 };
1016
1017 static void
1018 set_of_1 (x, pat, data1)
1019 rtx x;
1020 rtx pat;
1021 void *data1;
1022 {
1023 struct set_of_data *data = (struct set_of_data *) (data1);
1024 if (rtx_equal_p (x, data->pat)
1025 || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
1026 data->found = pat;
1027 }
1028
1029 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1030 (either directly or via STRICT_LOW_PART and similar modifiers). */
1031 rtx
1032 set_of (pat, insn)
1033 rtx pat, insn;
1034 {
1035 struct set_of_data data;
1036 data.found = NULL_RTX;
1037 data.pat = pat;
1038 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1039 return data.found;
1040 }
1041 \f
1042 /* Given an INSN, return a SET expression if this insn has only a single SET.
1043 It may also have CLOBBERs, USEs, or SET whose output
1044 will not be used, which we ignore. */
1045
1046 rtx
1047 single_set_2 (insn, pat)
1048 rtx insn, pat;
1049 {
1050 rtx set = NULL;
1051 int set_verified = 1;
1052 int i;
1053
1054 if (GET_CODE (pat) == PARALLEL)
1055 {
1056 for (i = 0; i < XVECLEN (pat, 0); i++)
1057 {
1058 rtx sub = XVECEXP (pat, 0, i);
1059 switch (GET_CODE (sub))
1060 {
1061 case USE:
1062 case CLOBBER:
1063 break;
1064
1065 case SET:
1066 /* We can consider insns having multiple sets, where all
1067 but one are dead as single set insns. In common case
1068 only single set is present in the pattern so we want
1069 to avoid checking for REG_UNUSED notes unless necessary.
1070
1071 When we reach set first time, we just expect this is
1072 the single set we are looking for and only when more
1073 sets are found in the insn, we check them. */
1074 if (!set_verified)
1075 {
1076 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1077 && !side_effects_p (set))
1078 set = NULL;
1079 else
1080 set_verified = 1;
1081 }
1082 if (!set)
1083 set = sub, set_verified = 0;
1084 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1085 || side_effects_p (sub))
1086 return NULL_RTX;
1087 break;
1088
1089 default:
1090 return NULL_RTX;
1091 }
1092 }
1093 }
1094 return set;
1095 }
1096
1097 /* Given an INSN, return nonzero if it has more than one SET, else return
1098 zero. */
1099
1100 int
1101 multiple_sets (insn)
1102 rtx insn;
1103 {
1104 int found;
1105 int i;
1106
1107 /* INSN must be an insn. */
1108 if (! INSN_P (insn))
1109 return 0;
1110
1111 /* Only a PARALLEL can have multiple SETs. */
1112 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1113 {
1114 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1115 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1116 {
1117 /* If we have already found a SET, then return now. */
1118 if (found)
1119 return 1;
1120 else
1121 found = 1;
1122 }
1123 }
1124
1125 /* Either zero or one SET. */
1126 return 0;
1127 }
1128 \f
1129 /* Return nonzero if the destination of SET equals the source
1130 and there are no side effects. */
1131
1132 int
1133 set_noop_p (set)
1134 rtx set;
1135 {
1136 rtx src = SET_SRC (set);
1137 rtx dst = SET_DEST (set);
1138
1139 if (side_effects_p (src) || side_effects_p (dst))
1140 return 0;
1141
1142 if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
1143 return rtx_equal_p (dst, src);
1144
1145 if (dst == pc_rtx && src == pc_rtx)
1146 return 1;
1147
1148 if (GET_CODE (dst) == SIGN_EXTRACT
1149 || GET_CODE (dst) == ZERO_EXTRACT)
1150 return rtx_equal_p (XEXP (dst, 0), src)
1151 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx;
1152
1153 if (GET_CODE (dst) == STRICT_LOW_PART)
1154 dst = XEXP (dst, 0);
1155
1156 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1157 {
1158 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1159 return 0;
1160 src = SUBREG_REG (src);
1161 dst = SUBREG_REG (dst);
1162 }
1163
1164 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
1165 && REGNO (src) == REGNO (dst));
1166 }
1167 \f
1168 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1169 value to itself. */
1170
1171 int
1172 noop_move_p (insn)
1173 rtx insn;
1174 {
1175 rtx pat = PATTERN (insn);
1176
1177 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1178 return 1;
1179
1180 /* Insns carrying these notes are useful later on. */
1181 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1182 return 0;
1183
1184 /* For now treat an insn with a REG_RETVAL note as a
1185 a special insn which should not be considered a no-op. */
1186 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1187 return 0;
1188
1189 if (GET_CODE (pat) == SET && set_noop_p (pat))
1190 return 1;
1191
1192 if (GET_CODE (pat) == PARALLEL)
1193 {
1194 int i;
1195 /* If nothing but SETs of registers to themselves,
1196 this insn can also be deleted. */
1197 for (i = 0; i < XVECLEN (pat, 0); i++)
1198 {
1199 rtx tem = XVECEXP (pat, 0, i);
1200
1201 if (GET_CODE (tem) == USE
1202 || GET_CODE (tem) == CLOBBER)
1203 continue;
1204
1205 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1206 return 0;
1207 }
1208
1209 return 1;
1210 }
1211 return 0;
1212 }
1213 \f
1214
1215 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1216 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1217 If the object was modified, if we hit a partial assignment to X, or hit a
1218 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1219 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1220 be the src. */
1221
1222 rtx
1223 find_last_value (x, pinsn, valid_to, allow_hwreg)
1224 rtx x;
1225 rtx *pinsn;
1226 rtx valid_to;
1227 int allow_hwreg;
1228 {
1229 rtx p;
1230
1231 for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
1232 p = PREV_INSN (p))
1233 if (INSN_P (p))
1234 {
1235 rtx set = single_set (p);
1236 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1237
1238 if (set && rtx_equal_p (x, SET_DEST (set)))
1239 {
1240 rtx src = SET_SRC (set);
1241
1242 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1243 src = XEXP (note, 0);
1244
1245 if ((valid_to == NULL_RTX
1246 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1247 /* Reject hard registers because we don't usually want
1248 to use them; we'd rather use a pseudo. */
1249 && (! (GET_CODE (src) == REG
1250 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1251 {
1252 *pinsn = p;
1253 return src;
1254 }
1255 }
1256
1257 /* If set in non-simple way, we don't have a value. */
1258 if (reg_set_p (x, p))
1259 break;
1260 }
1261
1262 return x;
1263 }
1264 \f
1265 /* Return nonzero if register in range [REGNO, ENDREGNO)
1266 appears either explicitly or implicitly in X
1267 other than being stored into.
1268
1269 References contained within the substructure at LOC do not count.
1270 LOC may be zero, meaning don't ignore anything. */
1271
1272 int
1273 refers_to_regno_p (regno, endregno, x, loc)
1274 unsigned int regno, endregno;
1275 rtx x;
1276 rtx *loc;
1277 {
1278 int i;
1279 unsigned int x_regno;
1280 RTX_CODE code;
1281 const char *fmt;
1282
1283 repeat:
1284 /* The contents of a REG_NONNEG note is always zero, so we must come here
1285 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1286 if (x == 0)
1287 return 0;
1288
1289 code = GET_CODE (x);
1290
1291 switch (code)
1292 {
1293 case REG:
1294 x_regno = REGNO (x);
1295
1296 /* If we modifying the stack, frame, or argument pointer, it will
1297 clobber a virtual register. In fact, we could be more precise,
1298 but it isn't worth it. */
1299 if ((x_regno == STACK_POINTER_REGNUM
1300 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1301 || x_regno == ARG_POINTER_REGNUM
1302 #endif
1303 || x_regno == FRAME_POINTER_REGNUM)
1304 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1305 return 1;
1306
1307 return (endregno > x_regno
1308 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1309 ? HARD_REGNO_NREGS (x_regno, GET_MODE (x))
1310 : 1));
1311
1312 case SUBREG:
1313 /* If this is a SUBREG of a hard reg, we can see exactly which
1314 registers are being modified. Otherwise, handle normally. */
1315 if (GET_CODE (SUBREG_REG (x)) == REG
1316 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1317 {
1318 unsigned int inner_regno = subreg_regno (x);
1319 unsigned int inner_endregno
1320 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1321 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1322
1323 return endregno > inner_regno && regno < inner_endregno;
1324 }
1325 break;
1326
1327 case CLOBBER:
1328 case SET:
1329 if (&SET_DEST (x) != loc
1330 /* Note setting a SUBREG counts as referring to the REG it is in for
1331 a pseudo but not for hard registers since we can
1332 treat each word individually. */
1333 && ((GET_CODE (SET_DEST (x)) == SUBREG
1334 && loc != &SUBREG_REG (SET_DEST (x))
1335 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
1336 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1337 && refers_to_regno_p (regno, endregno,
1338 SUBREG_REG (SET_DEST (x)), loc))
1339 || (GET_CODE (SET_DEST (x)) != REG
1340 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1341 return 1;
1342
1343 if (code == CLOBBER || loc == &SET_SRC (x))
1344 return 0;
1345 x = SET_SRC (x);
1346 goto repeat;
1347
1348 default:
1349 break;
1350 }
1351
1352 /* X does not match, so try its subexpressions. */
1353
1354 fmt = GET_RTX_FORMAT (code);
1355 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1356 {
1357 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1358 {
1359 if (i == 0)
1360 {
1361 x = XEXP (x, 0);
1362 goto repeat;
1363 }
1364 else
1365 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1366 return 1;
1367 }
1368 else if (fmt[i] == 'E')
1369 {
1370 int j;
1371 for (j = XVECLEN (x, i) - 1; j >=0; j--)
1372 if (loc != &XVECEXP (x, i, j)
1373 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1374 return 1;
1375 }
1376 }
1377 return 0;
1378 }
1379
1380 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1381 we check if any register number in X conflicts with the relevant register
1382 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1383 contains a MEM (we don't bother checking for memory addresses that can't
1384 conflict because we expect this to be a rare case. */
1385
1386 int
1387 reg_overlap_mentioned_p (x, in)
1388 rtx x, in;
1389 {
1390 unsigned int regno, endregno;
1391
1392 /* Overly conservative. */
1393 if (GET_CODE (x) == STRICT_LOW_PART)
1394 x = XEXP (x, 0);
1395
1396 /* If either argument is a constant, then modifying X can not affect IN. */
1397 if (CONSTANT_P (x) || CONSTANT_P (in))
1398 return 0;
1399
1400 switch (GET_CODE (x))
1401 {
1402 case SUBREG:
1403 regno = REGNO (SUBREG_REG (x));
1404 if (regno < FIRST_PSEUDO_REGISTER)
1405 regno = subreg_regno (x);
1406 goto do_reg;
1407
1408 case REG:
1409 regno = REGNO (x);
1410 do_reg:
1411 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1412 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1413 return refers_to_regno_p (regno, endregno, in, (rtx*)0);
1414
1415 case MEM:
1416 {
1417 const char *fmt;
1418 int i;
1419
1420 if (GET_CODE (in) == MEM)
1421 return 1;
1422
1423 fmt = GET_RTX_FORMAT (GET_CODE (in));
1424 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1425 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1426 return 1;
1427
1428 return 0;
1429 }
1430
1431 case SCRATCH:
1432 case PC:
1433 case CC0:
1434 return reg_mentioned_p (x, in);
1435
1436 case PARALLEL:
1437 {
1438 int i;
1439
1440 /* If any register in here refers to it we return true. */
1441 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1442 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1443 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1444 return 1;
1445 return 0;
1446 }
1447
1448 default:
1449 break;
1450 }
1451
1452 abort ();
1453 }
1454 \f
1455 /* Return the last value to which REG was set prior to INSN. If we can't
1456 find it easily, return 0.
1457
1458 We only return a REG, SUBREG, or constant because it is too hard to
1459 check if a MEM remains unchanged. */
1460
1461 rtx
1462 reg_set_last (x, insn)
1463 rtx x;
1464 rtx insn;
1465 {
1466 rtx orig_insn = insn;
1467
1468 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1469 Stop when we reach a label or X is a hard reg and we reach a
1470 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1471
1472 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1473
1474 /* We compare with <= here, because reg_set_last_last_regno
1475 is actually the number of the first reg *not* in X. */
1476 for (;
1477 insn && GET_CODE (insn) != CODE_LABEL
1478 && ! (GET_CODE (insn) == CALL_INSN
1479 && REGNO (x) <= FIRST_PSEUDO_REGISTER);
1480 insn = PREV_INSN (insn))
1481 if (INSN_P (insn))
1482 {
1483 rtx set = set_of (x, insn);
1484 /* OK, this function modify our register. See if we understand it. */
1485 if (set)
1486 {
1487 rtx last_value;
1488 if (GET_CODE (set) != SET || SET_DEST (set) != x)
1489 return 0;
1490 last_value = SET_SRC (x);
1491 if (CONSTANT_P (last_value)
1492 || ((GET_CODE (last_value) == REG
1493 || GET_CODE (last_value) == SUBREG)
1494 && ! reg_set_between_p (last_value,
1495 insn, orig_insn)))
1496 return last_value;
1497 else
1498 return 0;
1499 }
1500 }
1501
1502 return 0;
1503 }
1504 \f
1505 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1506 (X would be the pattern of an insn).
1507 FUN receives two arguments:
1508 the REG, MEM, CC0 or PC being stored in or clobbered,
1509 the SET or CLOBBER rtx that does the store.
1510
1511 If the item being stored in or clobbered is a SUBREG of a hard register,
1512 the SUBREG will be passed. */
1513
1514 void
1515 note_stores (x, fun, data)
1516 rtx x;
1517 void (*fun) PARAMS ((rtx, rtx, void *));
1518 void *data;
1519 {
1520 int i;
1521
1522 if (GET_CODE (x) == COND_EXEC)
1523 x = COND_EXEC_CODE (x);
1524
1525 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1526 {
1527 rtx dest = SET_DEST (x);
1528
1529 while ((GET_CODE (dest) == SUBREG
1530 && (GET_CODE (SUBREG_REG (dest)) != REG
1531 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1532 || GET_CODE (dest) == ZERO_EXTRACT
1533 || GET_CODE (dest) == SIGN_EXTRACT
1534 || GET_CODE (dest) == STRICT_LOW_PART)
1535 dest = XEXP (dest, 0);
1536
1537 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1538 each of whose first operand is a register. We can't know what
1539 precisely is being set in these cases, so make up a CLOBBER to pass
1540 to the function. */
1541 if (GET_CODE (dest) == PARALLEL)
1542 {
1543 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1544 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1545 (*fun) (XEXP (XVECEXP (dest, 0, i), 0),
1546 gen_rtx_CLOBBER (VOIDmode,
1547 XEXP (XVECEXP (dest, 0, i), 0)),
1548 data);
1549 }
1550 else
1551 (*fun) (dest, x, data);
1552 }
1553
1554 else if (GET_CODE (x) == PARALLEL)
1555 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1556 note_stores (XVECEXP (x, 0, i), fun, data);
1557 }
1558 \f
1559 /* Like notes_stores, but call FUN for each expression that is being
1560 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1561 FUN for each expression, not any interior subexpressions. FUN receives a
1562 pointer to the expression and the DATA passed to this function.
1563
1564 Note that this is not quite the same test as that done in reg_referenced_p
1565 since that considers something as being referenced if it is being
1566 partially set, while we do not. */
1567
1568 void
1569 note_uses (pbody, fun, data)
1570 rtx *pbody;
1571 void (*fun) PARAMS ((rtx *, void *));
1572 void *data;
1573 {
1574 rtx body = *pbody;
1575 int i;
1576
1577 switch (GET_CODE (body))
1578 {
1579 case COND_EXEC:
1580 (*fun) (&COND_EXEC_TEST (body), data);
1581 note_uses (&COND_EXEC_CODE (body), fun, data);
1582 return;
1583
1584 case PARALLEL:
1585 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1586 note_uses (&XVECEXP (body, 0, i), fun, data);
1587 return;
1588
1589 case USE:
1590 (*fun) (&XEXP (body, 0), data);
1591 return;
1592
1593 case ASM_OPERANDS:
1594 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1595 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1596 return;
1597
1598 case TRAP_IF:
1599 (*fun) (&TRAP_CONDITION (body), data);
1600 return;
1601
1602 case PREFETCH:
1603 (*fun) (&XEXP (body, 0), data);
1604 return;
1605
1606 case UNSPEC:
1607 case UNSPEC_VOLATILE:
1608 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1609 (*fun) (&XVECEXP (body, 0, i), data);
1610 return;
1611
1612 case CLOBBER:
1613 if (GET_CODE (XEXP (body, 0)) == MEM)
1614 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1615 return;
1616
1617 case SET:
1618 {
1619 rtx dest = SET_DEST (body);
1620
1621 /* For sets we replace everything in source plus registers in memory
1622 expression in store and operands of a ZERO_EXTRACT. */
1623 (*fun) (&SET_SRC (body), data);
1624
1625 if (GET_CODE (dest) == ZERO_EXTRACT)
1626 {
1627 (*fun) (&XEXP (dest, 1), data);
1628 (*fun) (&XEXP (dest, 2), data);
1629 }
1630
1631 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1632 dest = XEXP (dest, 0);
1633
1634 if (GET_CODE (dest) == MEM)
1635 (*fun) (&XEXP (dest, 0), data);
1636 }
1637 return;
1638
1639 default:
1640 /* All the other possibilities never store. */
1641 (*fun) (pbody, data);
1642 return;
1643 }
1644 }
1645 \f
1646 /* Return nonzero if X's old contents don't survive after INSN.
1647 This will be true if X is (cc0) or if X is a register and
1648 X dies in INSN or because INSN entirely sets X.
1649
1650 "Entirely set" means set directly and not through a SUBREG,
1651 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1652 Likewise, REG_INC does not count.
1653
1654 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1655 but for this use that makes no difference, since regs don't overlap
1656 during their lifetimes. Therefore, this function may be used
1657 at any time after deaths have been computed (in flow.c).
1658
1659 If REG is a hard reg that occupies multiple machine registers, this
1660 function will only return 1 if each of those registers will be replaced
1661 by INSN. */
1662
1663 int
1664 dead_or_set_p (insn, x)
1665 rtx insn;
1666 rtx x;
1667 {
1668 unsigned int regno, last_regno;
1669 unsigned int i;
1670
1671 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1672 if (GET_CODE (x) == CC0)
1673 return 1;
1674
1675 if (GET_CODE (x) != REG)
1676 abort ();
1677
1678 regno = REGNO (x);
1679 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1680 : regno + HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1);
1681
1682 for (i = regno; i <= last_regno; i++)
1683 if (! dead_or_set_regno_p (insn, i))
1684 return 0;
1685
1686 return 1;
1687 }
1688
1689 /* Utility function for dead_or_set_p to check an individual register. Also
1690 called from flow.c. */
1691
1692 int
1693 dead_or_set_regno_p (insn, test_regno)
1694 rtx insn;
1695 unsigned int test_regno;
1696 {
1697 unsigned int regno, endregno;
1698 rtx pattern;
1699
1700 /* See if there is a death note for something that includes TEST_REGNO. */
1701 if (find_regno_note (insn, REG_DEAD, test_regno))
1702 return 1;
1703
1704 if (GET_CODE (insn) == CALL_INSN
1705 && find_regno_fusage (insn, CLOBBER, test_regno))
1706 return 1;
1707
1708 pattern = PATTERN (insn);
1709
1710 if (GET_CODE (pattern) == COND_EXEC)
1711 pattern = COND_EXEC_CODE (pattern);
1712
1713 if (GET_CODE (pattern) == SET)
1714 {
1715 rtx dest = SET_DEST (PATTERN (insn));
1716
1717 /* A value is totally replaced if it is the destination or the
1718 destination is a SUBREG of REGNO that does not change the number of
1719 words in it. */
1720 if (GET_CODE (dest) == SUBREG
1721 && (((GET_MODE_SIZE (GET_MODE (dest))
1722 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1723 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1724 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1725 dest = SUBREG_REG (dest);
1726
1727 if (GET_CODE (dest) != REG)
1728 return 0;
1729
1730 regno = REGNO (dest);
1731 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1732 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1733
1734 return (test_regno >= regno && test_regno < endregno);
1735 }
1736 else if (GET_CODE (pattern) == PARALLEL)
1737 {
1738 int i;
1739
1740 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1741 {
1742 rtx body = XVECEXP (pattern, 0, i);
1743
1744 if (GET_CODE (body) == COND_EXEC)
1745 body = COND_EXEC_CODE (body);
1746
1747 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1748 {
1749 rtx dest = SET_DEST (body);
1750
1751 if (GET_CODE (dest) == SUBREG
1752 && (((GET_MODE_SIZE (GET_MODE (dest))
1753 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1754 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1755 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1756 dest = SUBREG_REG (dest);
1757
1758 if (GET_CODE (dest) != REG)
1759 continue;
1760
1761 regno = REGNO (dest);
1762 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1763 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1764
1765 if (test_regno >= regno && test_regno < endregno)
1766 return 1;
1767 }
1768 }
1769 }
1770
1771 return 0;
1772 }
1773
1774 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1775 If DATUM is nonzero, look for one whose datum is DATUM. */
1776
1777 rtx
1778 find_reg_note (insn, kind, datum)
1779 rtx insn;
1780 enum reg_note kind;
1781 rtx datum;
1782 {
1783 rtx link;
1784
1785 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1786 if (! INSN_P (insn))
1787 return 0;
1788
1789 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1790 if (REG_NOTE_KIND (link) == kind
1791 && (datum == 0 || datum == XEXP (link, 0)))
1792 return link;
1793 return 0;
1794 }
1795
1796 /* Return the reg-note of kind KIND in insn INSN which applies to register
1797 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1798 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1799 it might be the case that the note overlaps REGNO. */
1800
1801 rtx
1802 find_regno_note (insn, kind, regno)
1803 rtx insn;
1804 enum reg_note kind;
1805 unsigned int regno;
1806 {
1807 rtx link;
1808
1809 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1810 if (! INSN_P (insn))
1811 return 0;
1812
1813 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1814 if (REG_NOTE_KIND (link) == kind
1815 /* Verify that it is a register, so that scratch and MEM won't cause a
1816 problem here. */
1817 && GET_CODE (XEXP (link, 0)) == REG
1818 && REGNO (XEXP (link, 0)) <= regno
1819 && ((REGNO (XEXP (link, 0))
1820 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1821 : HARD_REGNO_NREGS (REGNO (XEXP (link, 0)),
1822 GET_MODE (XEXP (link, 0)))))
1823 > regno))
1824 return link;
1825 return 0;
1826 }
1827
1828 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1829 has such a note. */
1830
1831 rtx
1832 find_reg_equal_equiv_note (insn)
1833 rtx insn;
1834 {
1835 rtx note;
1836
1837 if (single_set (insn) == 0)
1838 return 0;
1839 else if ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1840 return note;
1841 else
1842 return find_reg_note (insn, REG_EQUAL, NULL_RTX);
1843 }
1844
1845 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1846 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1847
1848 int
1849 find_reg_fusage (insn, code, datum)
1850 rtx insn;
1851 enum rtx_code code;
1852 rtx datum;
1853 {
1854 /* If it's not a CALL_INSN, it can't possibly have a
1855 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1856 if (GET_CODE (insn) != CALL_INSN)
1857 return 0;
1858
1859 if (! datum)
1860 abort();
1861
1862 if (GET_CODE (datum) != REG)
1863 {
1864 rtx link;
1865
1866 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1867 link;
1868 link = XEXP (link, 1))
1869 if (GET_CODE (XEXP (link, 0)) == code
1870 && rtx_equal_p (datum, SET_DEST (XEXP (link, 0))))
1871 return 1;
1872 }
1873 else
1874 {
1875 unsigned int regno = REGNO (datum);
1876
1877 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1878 to pseudo registers, so don't bother checking. */
1879
1880 if (regno < FIRST_PSEUDO_REGISTER)
1881 {
1882 unsigned int end_regno
1883 = regno + HARD_REGNO_NREGS (regno, GET_MODE (datum));
1884 unsigned int i;
1885
1886 for (i = regno; i < end_regno; i++)
1887 if (find_regno_fusage (insn, code, i))
1888 return 1;
1889 }
1890 }
1891
1892 return 0;
1893 }
1894
1895 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1896 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1897
1898 int
1899 find_regno_fusage (insn, code, regno)
1900 rtx insn;
1901 enum rtx_code code;
1902 unsigned int regno;
1903 {
1904 rtx link;
1905
1906 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1907 to pseudo registers, so don't bother checking. */
1908
1909 if (regno >= FIRST_PSEUDO_REGISTER
1910 || GET_CODE (insn) != CALL_INSN )
1911 return 0;
1912
1913 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1914 {
1915 unsigned int regnote;
1916 rtx op, reg;
1917
1918 if (GET_CODE (op = XEXP (link, 0)) == code
1919 && GET_CODE (reg = XEXP (op, 0)) == REG
1920 && (regnote = REGNO (reg)) <= regno
1921 && regnote + HARD_REGNO_NREGS (regnote, GET_MODE (reg)) > regno)
1922 return 1;
1923 }
1924
1925 return 0;
1926 }
1927 \f
1928 /* Remove register note NOTE from the REG_NOTES of INSN. */
1929
1930 void
1931 remove_note (insn, note)
1932 rtx insn;
1933 rtx note;
1934 {
1935 rtx link;
1936
1937 if (note == NULL_RTX)
1938 return;
1939
1940 if (REG_NOTES (insn) == note)
1941 {
1942 REG_NOTES (insn) = XEXP (note, 1);
1943 return;
1944 }
1945
1946 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1947 if (XEXP (link, 1) == note)
1948 {
1949 XEXP (link, 1) = XEXP (note, 1);
1950 return;
1951 }
1952
1953 abort ();
1954 }
1955
1956 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1957 return 1 if it is found. A simple equality test is used to determine if
1958 NODE matches. */
1959
1960 int
1961 in_expr_list_p (listp, node)
1962 rtx listp;
1963 rtx node;
1964 {
1965 rtx x;
1966
1967 for (x = listp; x; x = XEXP (x, 1))
1968 if (node == XEXP (x, 0))
1969 return 1;
1970
1971 return 0;
1972 }
1973
1974 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1975 remove that entry from the list if it is found.
1976
1977 A simple equality test is used to determine if NODE matches. */
1978
1979 void
1980 remove_node_from_expr_list (node, listp)
1981 rtx node;
1982 rtx *listp;
1983 {
1984 rtx temp = *listp;
1985 rtx prev = NULL_RTX;
1986
1987 while (temp)
1988 {
1989 if (node == XEXP (temp, 0))
1990 {
1991 /* Splice the node out of the list. */
1992 if (prev)
1993 XEXP (prev, 1) = XEXP (temp, 1);
1994 else
1995 *listp = XEXP (temp, 1);
1996
1997 return;
1998 }
1999
2000 prev = temp;
2001 temp = XEXP (temp, 1);
2002 }
2003 }
2004 \f
2005 /* Nonzero if X contains any volatile instructions. These are instructions
2006 which may cause unpredictable machine state instructions, and thus no
2007 instructions should be moved or combined across them. This includes
2008 only volatile asms and UNSPEC_VOLATILE instructions. */
2009
2010 int
2011 volatile_insn_p (x)
2012 rtx x;
2013 {
2014 RTX_CODE code;
2015
2016 code = GET_CODE (x);
2017 switch (code)
2018 {
2019 case LABEL_REF:
2020 case SYMBOL_REF:
2021 case CONST_INT:
2022 case CONST:
2023 case CONST_DOUBLE:
2024 case CC0:
2025 case PC:
2026 case REG:
2027 case SCRATCH:
2028 case CLOBBER:
2029 case ASM_INPUT:
2030 case ADDR_VEC:
2031 case ADDR_DIFF_VEC:
2032 case CALL:
2033 case MEM:
2034 return 0;
2035
2036 case UNSPEC_VOLATILE:
2037 /* case TRAP_IF: This isn't clear yet. */
2038 return 1;
2039
2040 case ASM_OPERANDS:
2041 if (MEM_VOLATILE_P (x))
2042 return 1;
2043
2044 default:
2045 break;
2046 }
2047
2048 /* Recursively scan the operands of this expression. */
2049
2050 {
2051 const char *fmt = GET_RTX_FORMAT (code);
2052 int i;
2053
2054 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2055 {
2056 if (fmt[i] == 'e')
2057 {
2058 if (volatile_insn_p (XEXP (x, i)))
2059 return 1;
2060 }
2061 else if (fmt[i] == 'E')
2062 {
2063 int j;
2064 for (j = 0; j < XVECLEN (x, i); j++)
2065 if (volatile_insn_p (XVECEXP (x, i, j)))
2066 return 1;
2067 }
2068 }
2069 }
2070 return 0;
2071 }
2072
2073 /* Nonzero if X contains any volatile memory references
2074 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2075
2076 int
2077 volatile_refs_p (x)
2078 rtx x;
2079 {
2080 RTX_CODE code;
2081
2082 code = GET_CODE (x);
2083 switch (code)
2084 {
2085 case LABEL_REF:
2086 case SYMBOL_REF:
2087 case CONST_INT:
2088 case CONST:
2089 case CONST_DOUBLE:
2090 case CC0:
2091 case PC:
2092 case REG:
2093 case SCRATCH:
2094 case CLOBBER:
2095 case ASM_INPUT:
2096 case ADDR_VEC:
2097 case ADDR_DIFF_VEC:
2098 return 0;
2099
2100 case CALL:
2101 case UNSPEC_VOLATILE:
2102 /* case TRAP_IF: This isn't clear yet. */
2103 return 1;
2104
2105 case MEM:
2106 case ASM_OPERANDS:
2107 if (MEM_VOLATILE_P (x))
2108 return 1;
2109
2110 default:
2111 break;
2112 }
2113
2114 /* Recursively scan the operands of this expression. */
2115
2116 {
2117 const char *fmt = GET_RTX_FORMAT (code);
2118 int i;
2119
2120 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2121 {
2122 if (fmt[i] == 'e')
2123 {
2124 if (volatile_refs_p (XEXP (x, i)))
2125 return 1;
2126 }
2127 else if (fmt[i] == 'E')
2128 {
2129 int j;
2130 for (j = 0; j < XVECLEN (x, i); j++)
2131 if (volatile_refs_p (XVECEXP (x, i, j)))
2132 return 1;
2133 }
2134 }
2135 }
2136 return 0;
2137 }
2138
2139 /* Similar to above, except that it also rejects register pre- and post-
2140 incrementing. */
2141
2142 int
2143 side_effects_p (x)
2144 rtx x;
2145 {
2146 RTX_CODE code;
2147
2148 code = GET_CODE (x);
2149 switch (code)
2150 {
2151 case LABEL_REF:
2152 case SYMBOL_REF:
2153 case CONST_INT:
2154 case CONST:
2155 case CONST_DOUBLE:
2156 case CC0:
2157 case PC:
2158 case REG:
2159 case SCRATCH:
2160 case ASM_INPUT:
2161 case ADDR_VEC:
2162 case ADDR_DIFF_VEC:
2163 return 0;
2164
2165 case CLOBBER:
2166 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2167 when some combination can't be done. If we see one, don't think
2168 that we can simplify the expression. */
2169 return (GET_MODE (x) != VOIDmode);
2170
2171 case PRE_INC:
2172 case PRE_DEC:
2173 case POST_INC:
2174 case POST_DEC:
2175 case PRE_MODIFY:
2176 case POST_MODIFY:
2177 case CALL:
2178 case UNSPEC_VOLATILE:
2179 /* case TRAP_IF: This isn't clear yet. */
2180 return 1;
2181
2182 case MEM:
2183 case ASM_OPERANDS:
2184 if (MEM_VOLATILE_P (x))
2185 return 1;
2186
2187 default:
2188 break;
2189 }
2190
2191 /* Recursively scan the operands of this expression. */
2192
2193 {
2194 const char *fmt = GET_RTX_FORMAT (code);
2195 int i;
2196
2197 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2198 {
2199 if (fmt[i] == 'e')
2200 {
2201 if (side_effects_p (XEXP (x, i)))
2202 return 1;
2203 }
2204 else if (fmt[i] == 'E')
2205 {
2206 int j;
2207 for (j = 0; j < XVECLEN (x, i); j++)
2208 if (side_effects_p (XVECEXP (x, i, j)))
2209 return 1;
2210 }
2211 }
2212 }
2213 return 0;
2214 }
2215 \f
2216 /* Return nonzero if evaluating rtx X might cause a trap. */
2217
2218 int
2219 may_trap_p (x)
2220 rtx x;
2221 {
2222 int i;
2223 enum rtx_code code;
2224 const char *fmt;
2225
2226 if (x == 0)
2227 return 0;
2228 code = GET_CODE (x);
2229 switch (code)
2230 {
2231 /* Handle these cases quickly. */
2232 case CONST_INT:
2233 case CONST_DOUBLE:
2234 case SYMBOL_REF:
2235 case LABEL_REF:
2236 case CONST:
2237 case PC:
2238 case CC0:
2239 case REG:
2240 case SCRATCH:
2241 return 0;
2242
2243 case ASM_INPUT:
2244 case UNSPEC_VOLATILE:
2245 case TRAP_IF:
2246 return 1;
2247
2248 case ASM_OPERANDS:
2249 return MEM_VOLATILE_P (x);
2250
2251 /* Memory ref can trap unless it's a static var or a stack slot. */
2252 case MEM:
2253 return rtx_addr_can_trap_p (XEXP (x, 0));
2254
2255 /* Division by a non-constant might trap. */
2256 case DIV:
2257 case MOD:
2258 case UDIV:
2259 case UMOD:
2260 if (! CONSTANT_P (XEXP (x, 1))
2261 || GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2262 return 1;
2263 /* This was const0_rtx, but by not using that,
2264 we can link this file into other programs. */
2265 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
2266 return 1;
2267 break;
2268
2269 case EXPR_LIST:
2270 /* An EXPR_LIST is used to represent a function call. This
2271 certainly may trap. */
2272 return 1;
2273
2274 case GE:
2275 case GT:
2276 case LE:
2277 case LT:
2278 case COMPARE:
2279 /* Some floating point comparisons may trap. */
2280 /* ??? There is no machine independent way to check for tests that trap
2281 when COMPARE is used, though many targets do make this distinction.
2282 For instance, sparc uses CCFPE for compares which generate exceptions
2283 and CCFP for compares which do not generate exceptions. */
2284 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2285 return 1;
2286 /* But often the compare has some CC mode, so check operand
2287 modes as well. */
2288 if (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_FLOAT
2289 || GET_MODE_CLASS (GET_MODE (XEXP (x, 1))) == MODE_FLOAT)
2290 return 1;
2291 break;
2292
2293 case NEG:
2294 case ABS:
2295 /* These operations don't trap even with floating point. */
2296 break;
2297
2298 default:
2299 /* Any floating arithmetic may trap. */
2300 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2301 return 1;
2302 }
2303
2304 fmt = GET_RTX_FORMAT (code);
2305 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2306 {
2307 if (fmt[i] == 'e')
2308 {
2309 if (may_trap_p (XEXP (x, i)))
2310 return 1;
2311 }
2312 else if (fmt[i] == 'E')
2313 {
2314 int j;
2315 for (j = 0; j < XVECLEN (x, i); j++)
2316 if (may_trap_p (XVECEXP (x, i, j)))
2317 return 1;
2318 }
2319 }
2320 return 0;
2321 }
2322 \f
2323 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2324 i.e., an inequality. */
2325
2326 int
2327 inequality_comparisons_p (x)
2328 rtx x;
2329 {
2330 const char *fmt;
2331 int len, i;
2332 enum rtx_code code = GET_CODE (x);
2333
2334 switch (code)
2335 {
2336 case REG:
2337 case SCRATCH:
2338 case PC:
2339 case CC0:
2340 case CONST_INT:
2341 case CONST_DOUBLE:
2342 case CONST:
2343 case LABEL_REF:
2344 case SYMBOL_REF:
2345 return 0;
2346
2347 case LT:
2348 case LTU:
2349 case GT:
2350 case GTU:
2351 case LE:
2352 case LEU:
2353 case GE:
2354 case GEU:
2355 return 1;
2356
2357 default:
2358 break;
2359 }
2360
2361 len = GET_RTX_LENGTH (code);
2362 fmt = GET_RTX_FORMAT (code);
2363
2364 for (i = 0; i < len; i++)
2365 {
2366 if (fmt[i] == 'e')
2367 {
2368 if (inequality_comparisons_p (XEXP (x, i)))
2369 return 1;
2370 }
2371 else if (fmt[i] == 'E')
2372 {
2373 int j;
2374 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2375 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2376 return 1;
2377 }
2378 }
2379
2380 return 0;
2381 }
2382 \f
2383 /* Replace any occurrence of FROM in X with TO. The function does
2384 not enter into CONST_DOUBLE for the replace.
2385
2386 Note that copying is not done so X must not be shared unless all copies
2387 are to be modified. */
2388
2389 rtx
2390 replace_rtx (x, from, to)
2391 rtx x, from, to;
2392 {
2393 int i, j;
2394 const char *fmt;
2395
2396 /* The following prevents loops occurrence when we change MEM in
2397 CONST_DOUBLE onto the same CONST_DOUBLE. */
2398 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2399 return x;
2400
2401 if (x == from)
2402 return to;
2403
2404 /* Allow this function to make replacements in EXPR_LISTs. */
2405 if (x == 0)
2406 return 0;
2407
2408 fmt = GET_RTX_FORMAT (GET_CODE (x));
2409 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2410 {
2411 if (fmt[i] == 'e')
2412 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2413 else if (fmt[i] == 'E')
2414 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2415 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2416 }
2417
2418 return x;
2419 }
2420 \f
2421 /* Throughout the rtx X, replace many registers according to REG_MAP.
2422 Return the replacement for X (which may be X with altered contents).
2423 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2424 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2425
2426 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2427 should not be mapped to pseudos or vice versa since validate_change
2428 is not called.
2429
2430 If REPLACE_DEST is 1, replacements are also done in destinations;
2431 otherwise, only sources are replaced. */
2432
2433 rtx
2434 replace_regs (x, reg_map, nregs, replace_dest)
2435 rtx x;
2436 rtx *reg_map;
2437 unsigned int nregs;
2438 int replace_dest;
2439 {
2440 enum rtx_code code;
2441 int i;
2442 const char *fmt;
2443
2444 if (x == 0)
2445 return x;
2446
2447 code = GET_CODE (x);
2448 switch (code)
2449 {
2450 case SCRATCH:
2451 case PC:
2452 case CC0:
2453 case CONST_INT:
2454 case CONST_DOUBLE:
2455 case CONST:
2456 case SYMBOL_REF:
2457 case LABEL_REF:
2458 return x;
2459
2460 case REG:
2461 /* Verify that the register has an entry before trying to access it. */
2462 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2463 {
2464 /* SUBREGs can't be shared. Always return a copy to ensure that if
2465 this replacement occurs more than once then each instance will
2466 get distinct rtx. */
2467 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2468 return copy_rtx (reg_map[REGNO (x)]);
2469 return reg_map[REGNO (x)];
2470 }
2471 return x;
2472
2473 case SUBREG:
2474 /* Prevent making nested SUBREGs. */
2475 if (GET_CODE (SUBREG_REG (x)) == REG && REGNO (SUBREG_REG (x)) < nregs
2476 && reg_map[REGNO (SUBREG_REG (x))] != 0
2477 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2478 {
2479 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2480 return simplify_gen_subreg (GET_MODE (x), map_val,
2481 GET_MODE (SUBREG_REG (x)),
2482 SUBREG_BYTE (x));
2483 }
2484 break;
2485
2486 case SET:
2487 if (replace_dest)
2488 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2489
2490 else if (GET_CODE (SET_DEST (x)) == MEM
2491 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2492 /* Even if we are not to replace destinations, replace register if it
2493 is CONTAINED in destination (destination is memory or
2494 STRICT_LOW_PART). */
2495 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2496 reg_map, nregs, 0);
2497 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2498 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2499 break;
2500
2501 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2502 return x;
2503
2504 default:
2505 break;
2506 }
2507
2508 fmt = GET_RTX_FORMAT (code);
2509 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2510 {
2511 if (fmt[i] == 'e')
2512 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2513 else if (fmt[i] == 'E')
2514 {
2515 int j;
2516 for (j = 0; j < XVECLEN (x, i); j++)
2517 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2518 nregs, replace_dest);
2519 }
2520 }
2521 return x;
2522 }
2523
2524 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2525 constant that is not in the constant pool and not in the condition
2526 of an IF_THEN_ELSE. */
2527
2528 static int
2529 computed_jump_p_1 (x)
2530 rtx x;
2531 {
2532 enum rtx_code code = GET_CODE (x);
2533 int i, j;
2534 const char *fmt;
2535
2536 switch (code)
2537 {
2538 case LABEL_REF:
2539 case PC:
2540 return 0;
2541
2542 case CONST:
2543 case CONST_INT:
2544 case CONST_DOUBLE:
2545 case SYMBOL_REF:
2546 case REG:
2547 return 1;
2548
2549 case MEM:
2550 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2551 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2552
2553 case IF_THEN_ELSE:
2554 return (computed_jump_p_1 (XEXP (x, 1))
2555 || computed_jump_p_1 (XEXP (x, 2)));
2556
2557 default:
2558 break;
2559 }
2560
2561 fmt = GET_RTX_FORMAT (code);
2562 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2563 {
2564 if (fmt[i] == 'e'
2565 && computed_jump_p_1 (XEXP (x, i)))
2566 return 1;
2567
2568 else if (fmt[i] == 'E')
2569 for (j = 0; j < XVECLEN (x, i); j++)
2570 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2571 return 1;
2572 }
2573
2574 return 0;
2575 }
2576
2577 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2578
2579 Tablejumps and casesi insns are not considered indirect jumps;
2580 we can recognize them by a (use (label_ref)). */
2581
2582 int
2583 computed_jump_p (insn)
2584 rtx insn;
2585 {
2586 int i;
2587 if (GET_CODE (insn) == JUMP_INSN)
2588 {
2589 rtx pat = PATTERN (insn);
2590
2591 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2592 return 0;
2593 else if (GET_CODE (pat) == PARALLEL)
2594 {
2595 int len = XVECLEN (pat, 0);
2596 int has_use_labelref = 0;
2597
2598 for (i = len - 1; i >= 0; i--)
2599 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2600 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2601 == LABEL_REF))
2602 has_use_labelref = 1;
2603
2604 if (! has_use_labelref)
2605 for (i = len - 1; i >= 0; i--)
2606 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2607 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2608 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2609 return 1;
2610 }
2611 else if (GET_CODE (pat) == SET
2612 && SET_DEST (pat) == pc_rtx
2613 && computed_jump_p_1 (SET_SRC (pat)))
2614 return 1;
2615 }
2616 return 0;
2617 }
2618
2619 /* Traverse X via depth-first search, calling F for each
2620 sub-expression (including X itself). F is also passed the DATA.
2621 If F returns -1, do not traverse sub-expressions, but continue
2622 traversing the rest of the tree. If F ever returns any other
2623 non-zero value, stop the traversal, and return the value returned
2624 by F. Otherwise, return 0. This function does not traverse inside
2625 tree structure that contains RTX_EXPRs, or into sub-expressions
2626 whose format code is `0' since it is not known whether or not those
2627 codes are actually RTL.
2628
2629 This routine is very general, and could (should?) be used to
2630 implement many of the other routines in this file. */
2631
2632 int
2633 for_each_rtx (x, f, data)
2634 rtx *x;
2635 rtx_function f;
2636 void *data;
2637 {
2638 int result;
2639 int length;
2640 const char *format;
2641 int i;
2642
2643 /* Call F on X. */
2644 result = (*f) (x, data);
2645 if (result == -1)
2646 /* Do not traverse sub-expressions. */
2647 return 0;
2648 else if (result != 0)
2649 /* Stop the traversal. */
2650 return result;
2651
2652 if (*x == NULL_RTX)
2653 /* There are no sub-expressions. */
2654 return 0;
2655
2656 length = GET_RTX_LENGTH (GET_CODE (*x));
2657 format = GET_RTX_FORMAT (GET_CODE (*x));
2658
2659 for (i = 0; i < length; ++i)
2660 {
2661 switch (format[i])
2662 {
2663 case 'e':
2664 result = for_each_rtx (&XEXP (*x, i), f, data);
2665 if (result != 0)
2666 return result;
2667 break;
2668
2669 case 'V':
2670 case 'E':
2671 if (XVEC (*x, i) != 0)
2672 {
2673 int j;
2674 for (j = 0; j < XVECLEN (*x, i); ++j)
2675 {
2676 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2677 if (result != 0)
2678 return result;
2679 }
2680 }
2681 break;
2682
2683 default:
2684 /* Nothing to do. */
2685 break;
2686 }
2687
2688 }
2689
2690 return 0;
2691 }
2692
2693 /* Searches X for any reference to REGNO, returning the rtx of the
2694 reference found if any. Otherwise, returns NULL_RTX. */
2695
2696 rtx
2697 regno_use_in (regno, x)
2698 unsigned int regno;
2699 rtx x;
2700 {
2701 const char *fmt;
2702 int i, j;
2703 rtx tem;
2704
2705 if (GET_CODE (x) == REG && REGNO (x) == regno)
2706 return x;
2707
2708 fmt = GET_RTX_FORMAT (GET_CODE (x));
2709 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2710 {
2711 if (fmt[i] == 'e')
2712 {
2713 if ((tem = regno_use_in (regno, XEXP (x, i))))
2714 return tem;
2715 }
2716 else if (fmt[i] == 'E')
2717 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2718 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2719 return tem;
2720 }
2721
2722 return NULL_RTX;
2723 }
2724
2725 /* Return a value indicating whether OP, an operand of a commutative
2726 operation, is preferred as the first or second operand. The higher
2727 the value, the stronger the preference for being the first operand.
2728 We use negative values to indicate a preference for the first operand
2729 and positive values for the second operand. */
2730
2731 int
2732 commutative_operand_precedence (op)
2733 rtx op;
2734 {
2735 /* Constants always come the second operand. Prefer "nice" constants. */
2736 if (GET_CODE (op) == CONST_INT)
2737 return -5;
2738 if (GET_CODE (op) == CONST_DOUBLE)
2739 return -4;
2740 if (CONSTANT_P (op))
2741 return -3;
2742
2743 /* SUBREGs of objects should come second. */
2744 if (GET_CODE (op) == SUBREG
2745 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op))) == 'o')
2746 return -2;
2747
2748 /* If only one operand is a `neg', `not',
2749 `mult', `plus', or `minus' expression, it will be the first
2750 operand. */
2751 if (GET_CODE (op) == NEG || GET_CODE (op) == NOT
2752 || GET_CODE (op) == MULT || GET_CODE (op) == PLUS
2753 || GET_CODE (op) == MINUS)
2754 return 2;
2755
2756 /* Complex expressions should be the first, so decrease priority
2757 of objects. */
2758 if (GET_RTX_CLASS (GET_CODE (op)) == 'o')
2759 return -1;
2760 return 0;
2761 }
2762
2763 /* Return 1 iff it is necessary to swap operands of commutative operation
2764 in order to canonicalize expression. */
2765
2766 int
2767 swap_commutative_operands_p (x, y)
2768 rtx x, y;
2769 {
2770 return (commutative_operand_precedence (x)
2771 < commutative_operand_precedence (y));
2772 }
2773
2774 /* Return 1 if X is an autoincrement side effect and the register is
2775 not the stack pointer. */
2776 int
2777 auto_inc_p (x)
2778 rtx x;
2779 {
2780 switch (GET_CODE (x))
2781 {
2782 case PRE_INC:
2783 case POST_INC:
2784 case PRE_DEC:
2785 case POST_DEC:
2786 case PRE_MODIFY:
2787 case POST_MODIFY:
2788 /* There are no REG_INC notes for SP. */
2789 if (XEXP (x, 0) != stack_pointer_rtx)
2790 return 1;
2791 default:
2792 break;
2793 }
2794 return 0;
2795 }
2796
2797 /* Return 1 if the sequence of instructions beginning with FROM and up
2798 to and including TO is safe to move. If NEW_TO is non-NULL, and
2799 the sequence is not already safe to move, but can be easily
2800 extended to a sequence which is safe, then NEW_TO will point to the
2801 end of the extended sequence.
2802
2803 For now, this function only checks that the region contains whole
2804 exception regions, but it could be extended to check additional
2805 conditions as well. */
2806
2807 int
2808 insns_safe_to_move_p (from, to, new_to)
2809 rtx from;
2810 rtx to;
2811 rtx *new_to;
2812 {
2813 int eh_region_count = 0;
2814 int past_to_p = 0;
2815 rtx r = from;
2816
2817 /* By default, assume the end of the region will be what was
2818 suggested. */
2819 if (new_to)
2820 *new_to = to;
2821
2822 while (r)
2823 {
2824 if (GET_CODE (r) == NOTE)
2825 {
2826 switch (NOTE_LINE_NUMBER (r))
2827 {
2828 case NOTE_INSN_EH_REGION_BEG:
2829 ++eh_region_count;
2830 break;
2831
2832 case NOTE_INSN_EH_REGION_END:
2833 if (eh_region_count == 0)
2834 /* This sequence of instructions contains the end of
2835 an exception region, but not he beginning. Moving
2836 it will cause chaos. */
2837 return 0;
2838
2839 --eh_region_count;
2840 break;
2841
2842 default:
2843 break;
2844 }
2845 }
2846 else if (past_to_p)
2847 /* If we've passed TO, and we see a non-note instruction, we
2848 can't extend the sequence to a movable sequence. */
2849 return 0;
2850
2851 if (r == to)
2852 {
2853 if (!new_to)
2854 /* It's OK to move the sequence if there were matched sets of
2855 exception region notes. */
2856 return eh_region_count == 0;
2857
2858 past_to_p = 1;
2859 }
2860
2861 /* It's OK to move the sequence if there were matched sets of
2862 exception region notes. */
2863 if (past_to_p && eh_region_count == 0)
2864 {
2865 *new_to = r;
2866 return 1;
2867 }
2868
2869 /* Go to the next instruction. */
2870 r = NEXT_INSN (r);
2871 }
2872
2873 return 0;
2874 }
2875
2876 /* Return non-zero if IN contains a piece of rtl that has the address LOC */
2877 int
2878 loc_mentioned_in_p (loc, in)
2879 rtx *loc, in;
2880 {
2881 enum rtx_code code = GET_CODE (in);
2882 const char *fmt = GET_RTX_FORMAT (code);
2883 int i, j;
2884
2885 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2886 {
2887 if (loc == &in->fld[i].rtx)
2888 return 1;
2889 if (fmt[i] == 'e')
2890 {
2891 if (loc_mentioned_in_p (loc, XEXP (in, i)))
2892 return 1;
2893 }
2894 else if (fmt[i] == 'E')
2895 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2896 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2897 return 1;
2898 }
2899 return 0;
2900 }
2901
2902 /* Given a subreg X, return the bit offset where the subreg begins
2903 (counting from the least significant bit of the reg). */
2904
2905 unsigned int
2906 subreg_lsb (x)
2907 rtx x;
2908 {
2909 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
2910 enum machine_mode mode = GET_MODE (x);
2911 unsigned int bitpos;
2912 unsigned int byte;
2913 unsigned int word;
2914
2915 /* A paradoxical subreg begins at bit position 0. */
2916 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (inner_mode))
2917 return 0;
2918
2919 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2920 /* If the subreg crosses a word boundary ensure that
2921 it also begins and ends on a word boundary. */
2922 if ((SUBREG_BYTE (x) % UNITS_PER_WORD
2923 + GET_MODE_SIZE (mode)) > UNITS_PER_WORD
2924 && (SUBREG_BYTE (x) % UNITS_PER_WORD
2925 || GET_MODE_SIZE (mode) % UNITS_PER_WORD))
2926 abort ();
2927
2928 if (WORDS_BIG_ENDIAN)
2929 word = (GET_MODE_SIZE (inner_mode)
2930 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) / UNITS_PER_WORD;
2931 else
2932 word = SUBREG_BYTE (x) / UNITS_PER_WORD;
2933 bitpos = word * BITS_PER_WORD;
2934
2935 if (BYTES_BIG_ENDIAN)
2936 byte = (GET_MODE_SIZE (inner_mode)
2937 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) % UNITS_PER_WORD;
2938 else
2939 byte = SUBREG_BYTE (x) % UNITS_PER_WORD;
2940 bitpos += byte * BITS_PER_UNIT;
2941
2942 return bitpos;
2943 }
2944
2945 /* This function returns the regno offset of a subreg expression.
2946 xregno - A regno of an inner hard subreg_reg (or what will become one).
2947 xmode - The mode of xregno.
2948 offset - The byte offset.
2949 ymode - The mode of a top level SUBREG (or what may become one).
2950 RETURN - The regno offset which would be used.
2951 This function can be overridden by defining SUBREG_REGNO_OFFSET,
2952 taking the same parameters. */
2953 unsigned int
2954 subreg_regno_offset (xregno, xmode, offset, ymode)
2955 unsigned int xregno;
2956 enum machine_mode xmode;
2957 unsigned int offset;
2958 enum machine_mode ymode;
2959 {
2960 unsigned ret;
2961 int nregs_xmode, nregs_ymode;
2962 int mode_multiple, nregs_multiple;
2963 int y_offset;
2964
2965 /* Check for an override, and use it instead. */
2966 #ifdef SUBREG_REGNO_OFFSET
2967 ret = SUBREG_REGNO_OFFSET (xregno, xmode, offset, ymode);
2968 #else
2969 if (xregno >= FIRST_PSEUDO_REGISTER)
2970 abort ();
2971
2972 nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
2973 nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
2974 if (offset == 0 || nregs_xmode == nregs_ymode)
2975 return 0;
2976
2977 /* size of ymode must not be greater than the size of xmode. */
2978 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
2979 if (mode_multiple == 0)
2980 abort ();
2981
2982 y_offset = offset / GET_MODE_SIZE (ymode);
2983 nregs_multiple = nregs_xmode / nregs_ymode;
2984 ret = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
2985 #endif
2986
2987 return ret;
2988 }
2989
2990 /* Return the final regno that a subreg expression refers to. */
2991 unsigned int
2992 subreg_regno (x)
2993 rtx x;
2994 {
2995 unsigned int ret;
2996 rtx subreg = SUBREG_REG (x);
2997 int regno = REGNO (subreg);
2998
2999 ret = regno + subreg_regno_offset (regno,
3000 GET_MODE (subreg),
3001 SUBREG_BYTE (x),
3002 GET_MODE (x));
3003 return ret;
3004
3005 }
3006 struct parms_set_data
3007 {
3008 int nregs;
3009 HARD_REG_SET regs;
3010 };
3011
3012 /* Helper function for noticing stores to parameter registers. */
3013 static void
3014 parms_set (x, pat, data)
3015 rtx x, pat ATTRIBUTE_UNUSED;
3016 void *data;
3017 {
3018 struct parms_set_data *d = data;
3019 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3020 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3021 {
3022 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3023 d->nregs--;
3024 }
3025 }
3026
3027 /* Look backward for first parameter to be loaded.
3028 Do not skip BOUNDARY. */
3029 rtx
3030 find_first_parameter_load (call_insn, boundary)
3031 rtx call_insn, boundary;
3032 {
3033 struct parms_set_data parm;
3034 rtx p, before;
3035
3036 /* Since different machines initialize their parameter registers
3037 in different orders, assume nothing. Collect the set of all
3038 parameter registers. */
3039 CLEAR_HARD_REG_SET (parm.regs);
3040 parm.nregs = 0;
3041 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3042 if (GET_CODE (XEXP (p, 0)) == USE
3043 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
3044 {
3045 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3046 abort ();
3047
3048 /* We only care about registers which can hold function
3049 arguments. */
3050 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3051 continue;
3052
3053 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3054 parm.nregs++;
3055 }
3056 before = call_insn;
3057
3058 /* Search backward for the first set of a register in this set. */
3059 while (parm.nregs && before != boundary)
3060 {
3061 before = PREV_INSN (before);
3062
3063 /* It is possible that some loads got CSEed from one call to
3064 another. Stop in that case. */
3065 if (GET_CODE (before) == CALL_INSN)
3066 break;
3067
3068 /* Our caller needs either ensure that we will find all sets
3069 (in case code has not been optimized yet), or take care
3070 for possible labels in a way by setting boundary to preceding
3071 CODE_LABEL. */
3072 if (GET_CODE (before) == CODE_LABEL)
3073 {
3074 if (before != boundary)
3075 abort ();
3076 break;
3077 }
3078
3079 if (INSN_P (before))
3080 note_stores (PATTERN (before), parms_set, &parm);
3081 }
3082 return before;
3083 }