Put include of stdio.h after GCC header files (for i860-stardent-sysv4).
[gcc.git] / gcc / combine.c
1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 /* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
24
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
30
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
34
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
40
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
43
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
50
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
53
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
58 REG_DEAD note is lost
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
61 linking
62
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
66
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
74 combine anyway. */
75
76 #include "config.h"
77 #include "gvarargs.h"
78 #include "rtl.h"
79 #include "flags.h"
80 #include "regs.h"
81 #include "expr.h"
82 #include "basic-block.h"
83 #include "insn-config.h"
84 #include "insn-flags.h"
85 #include "insn-codes.h"
86 #include "insn-attr.h"
87 #include "recog.h"
88 #include "real.h"
89 #include <stdio.h>
90
91 /* It is not safe to use ordinary gen_lowpart in combine.
92 Use gen_lowpart_for_combine instead. See comments there. */
93 #define gen_lowpart dont_use_gen_lowpart_you_dummy
94
95 /* Number of attempts to combine instructions in this function. */
96
97 static int combine_attempts;
98
99 /* Number of attempts that got as far as substitution in this function. */
100
101 static int combine_merges;
102
103 /* Number of instructions combined with added SETs in this function. */
104
105 static int combine_extras;
106
107 /* Number of instructions combined in this function. */
108
109 static int combine_successes;
110
111 /* Totals over entire compilation. */
112
113 static int total_attempts, total_merges, total_extras, total_successes;
114 \f
115 /* Vector mapping INSN_UIDs to cuids.
116 The cuids are like uids but increase monotonically always.
117 Combine always uses cuids so that it can compare them.
118 But actually renumbering the uids, which we used to do,
119 proves to be a bad idea because it makes it hard to compare
120 the dumps produced by earlier passes with those from later passes. */
121
122 static int *uid_cuid;
123
124 /* Get the cuid of an insn. */
125
126 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
127
128 /* Maximum register number, which is the size of the tables below. */
129
130 static int combine_max_regno;
131
132 /* Record last point of death of (hard or pseudo) register n. */
133
134 static rtx *reg_last_death;
135
136 /* Record last point of modification of (hard or pseudo) register n. */
137
138 static rtx *reg_last_set;
139
140 /* Record the cuid of the last insn that invalidated memory
141 (anything that writes memory, and subroutine calls, but not pushes). */
142
143 static int mem_last_set;
144
145 /* Record the cuid of the last CALL_INSN
146 so we can tell whether a potential combination crosses any calls. */
147
148 static int last_call_cuid;
149
150 /* When `subst' is called, this is the insn that is being modified
151 (by combining in a previous insn). The PATTERN of this insn
152 is still the old pattern partially modified and it should not be
153 looked at, but this may be used to examine the successors of the insn
154 to judge whether a simplification is valid. */
155
156 static rtx subst_insn;
157
158 /* This is the lowest CUID that `subst' is currently dealing with.
159 get_last_value will not return a value if the register was set at or
160 after this CUID. If not for this mechanism, we could get confused if
161 I2 or I1 in try_combine were an insn that used the old value of a register
162 to obtain a new value. In that case, we might erroneously get the
163 new value of the register when we wanted the old one. */
164
165 static int subst_low_cuid;
166
167 /* This is the value of undobuf.num_undo when we started processing this
168 substitution. This will prevent gen_rtx_combine from re-used a piece
169 from the previous expression. Doing so can produce circular rtl
170 structures. */
171
172 static int previous_num_undos;
173 \f
174 /* The next group of arrays allows the recording of the last value assigned
175 to (hard or pseudo) register n. We use this information to see if a
176 operation being processed is redundant given a prior operation performed
177 on the register. For example, an `and' with a constant is redundant if
178 all the zero bits are already known to be turned off.
179
180 We use an approach similar to that used by cse, but change it in the
181 following ways:
182
183 (1) We do not want to reinitialize at each label.
184 (2) It is useful, but not critical, to know the actual value assigned
185 to a register. Often just its form is helpful.
186
187 Therefore, we maintain the following arrays:
188
189 reg_last_set_value the last value assigned
190 reg_last_set_label records the value of label_tick when the
191 register was assigned
192 reg_last_set_table_tick records the value of label_tick when a
193 value using the register is assigned
194 reg_last_set_invalid set to non-zero when it is not valid
195 to use the value of this register in some
196 register's value
197
198 To understand the usage of these tables, it is important to understand
199 the distinction between the value in reg_last_set_value being valid
200 and the register being validly contained in some other expression in the
201 table.
202
203 Entry I in reg_last_set_value is valid if it is non-zero, and either
204 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
205
206 Register I may validly appear in any expression returned for the value
207 of another register if reg_n_sets[i] is 1. It may also appear in the
208 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
209 reg_last_set_invalid[j] is zero.
210
211 If an expression is found in the table containing a register which may
212 not validly appear in an expression, the register is replaced by
213 something that won't match, (clobber (const_int 0)).
214
215 reg_last_set_invalid[i] is set non-zero when register I is being assigned
216 to and reg_last_set_table_tick[i] == label_tick. */
217
218 /* Record last value assigned to (hard or pseudo) register n. */
219
220 static rtx *reg_last_set_value;
221
222 /* Record the value of label_tick when the value for register n is placed in
223 reg_last_set_value[n]. */
224
225 static short *reg_last_set_label;
226
227 /* Record the value of label_tick when an expression involving register n
228 is placed in reg_last_set_value. */
229
230 static short *reg_last_set_table_tick;
231
232 /* Set non-zero if references to register n in expressions should not be
233 used. */
234
235 static char *reg_last_set_invalid;
236
237 /* Incremented for each label. */
238
239 static short label_tick;
240
241 /* Some registers that are set more than once and used in more than one
242 basic block are nevertheless always set in similar ways. For example,
243 a QImode register may be loaded from memory in two places on a machine
244 where byte loads zero extend.
245
246 We record in the following array what we know about the significant
247 bits of a register, specifically which bits are known to be zero.
248
249 If an entry is zero, it means that we don't know anything special. */
250
251 static HOST_WIDE_INT *reg_significant;
252
253 /* Mode used to compute significance in reg_significant. It is the largest
254 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
255
256 static enum machine_mode significant_mode;
257
258 /* Nonzero if we know that a register has some leading bits that are always
259 equal to the sign bit. */
260
261 static char *reg_sign_bit_copies;
262
263 /* Nonzero when reg_significant and reg_sign_bit_copies can be safely used.
264 It is zero while computing them. This prevents propagating values based
265 on previously set values, which can be incorrect if a variable
266 is modified in a loop. */
267
268 static int significant_valid;
269 \f
270 /* Record one modification to rtl structure
271 to be undone by storing old_contents into *where.
272 is_int is 1 if the contents are an int. */
273
274 struct undo
275 {
276 int is_int;
277 union {rtx rtx; int i;} old_contents;
278 union {rtx *rtx; int *i;} where;
279 };
280
281 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
282 num_undo says how many are currently recorded.
283
284 storage is nonzero if we must undo the allocation of new storage.
285 The value of storage is what to pass to obfree.
286
287 other_insn is nonzero if we have modified some other insn in the process
288 of working on subst_insn. It must be verified too. */
289
290 #define MAX_UNDO 50
291
292 struct undobuf
293 {
294 int num_undo;
295 char *storage;
296 struct undo undo[MAX_UNDO];
297 rtx other_insn;
298 };
299
300 static struct undobuf undobuf;
301
302 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
303 insn. The substitution can be undone by undo_all. If INTO is already
304 set to NEWVAL, do not record this change. Because computing NEWVAL might
305 also call SUBST, we have to compute it before we put anything into
306 the undo table. */
307
308 #define SUBST(INTO, NEWVAL) \
309 do { rtx _new = (NEWVAL); \
310 if (undobuf.num_undo < MAX_UNDO) \
311 { \
312 undobuf.undo[undobuf.num_undo].is_int = 0; \
313 undobuf.undo[undobuf.num_undo].where.rtx = &INTO; \
314 undobuf.undo[undobuf.num_undo].old_contents.rtx = INTO; \
315 INTO = _new; \
316 if (undobuf.undo[undobuf.num_undo].old_contents.rtx != INTO) \
317 undobuf.num_undo++; \
318 } \
319 } while (0)
320
321 /* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
322 expression.
323 Note that substitution for the value of a CONST_INT is not safe. */
324
325 #define SUBST_INT(INTO, NEWVAL) \
326 do { if (undobuf.num_undo < MAX_UNDO) \
327 { \
328 undobuf.undo[undobuf.num_undo].is_int = 1; \
329 undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \
330 undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \
331 INTO = NEWVAL; \
332 if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \
333 undobuf.num_undo++; \
334 } \
335 } while (0)
336
337 /* Number of times the pseudo being substituted for
338 was found and replaced. */
339
340 static int n_occurrences;
341
342 static void set_significant ();
343 static void move_deaths ();
344 rtx remove_death ();
345 static void record_value_for_reg ();
346 static void record_dead_and_set_regs ();
347 static int use_crosses_set_p ();
348 static rtx try_combine ();
349 static rtx *find_split_point ();
350 static rtx subst ();
351 static void undo_all ();
352 static int reg_dead_at_p ();
353 static rtx expand_compound_operation ();
354 static rtx expand_field_assignment ();
355 static rtx make_extraction ();
356 static int get_pos_from_mask ();
357 static rtx force_to_mode ();
358 static rtx make_field_assignment ();
359 static rtx make_compound_operation ();
360 static rtx apply_distributive_law ();
361 static rtx simplify_and_const_int ();
362 static unsigned HOST_WIDE_INT significant_bits ();
363 static int num_sign_bit_copies ();
364 static int merge_outer_ops ();
365 static rtx simplify_shift_const ();
366 static int recog_for_combine ();
367 static rtx gen_lowpart_for_combine ();
368 static rtx gen_rtx_combine ();
369 static rtx gen_binary ();
370 static rtx gen_unary ();
371 static enum rtx_code simplify_comparison ();
372 static int reversible_comparison_p ();
373 static int get_last_value_validate ();
374 static rtx get_last_value ();
375 static void distribute_notes ();
376 static void distribute_links ();
377 \f
378 /* Main entry point for combiner. F is the first insn of the function.
379 NREGS is the first unused pseudo-reg number. */
380
381 void
382 combine_instructions (f, nregs)
383 rtx f;
384 int nregs;
385 {
386 register rtx insn, next, prev;
387 register int i;
388 register rtx links, nextlinks;
389
390 combine_attempts = 0;
391 combine_merges = 0;
392 combine_extras = 0;
393 combine_successes = 0;
394
395 combine_max_regno = nregs;
396
397 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
398 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
399 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
400 reg_last_set_table_tick = (short *) alloca (nregs * sizeof (short));
401 reg_last_set_label = (short *) alloca (nregs * sizeof (short));
402 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
403 reg_significant = (HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
404 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
405
406 bzero (reg_last_death, nregs * sizeof (rtx));
407 bzero (reg_last_set, nregs * sizeof (rtx));
408 bzero (reg_last_set_value, nregs * sizeof (rtx));
409 bzero (reg_last_set_table_tick, nregs * sizeof (short));
410 bzero (reg_last_set_invalid, nregs * sizeof (char));
411 bzero (reg_significant, nregs * sizeof (HOST_WIDE_INT));
412 bzero (reg_sign_bit_copies, nregs * sizeof (char));
413
414 init_recog_no_volatile ();
415
416 /* Compute maximum uid value so uid_cuid can be allocated. */
417
418 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
419 if (INSN_UID (insn) > i)
420 i = INSN_UID (insn);
421
422 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
423
424 significant_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
425
426 /* Don't use reg_significant when computing it. This can cause problems
427 when, for example, we have j <<= 1 in a loop. */
428
429 significant_valid = 0;
430
431 /* Compute the mapping from uids to cuids.
432 Cuids are numbers assigned to insns, like uids,
433 except that cuids increase monotonically through the code.
434
435 Scan all SETs and see if we can deduce anything about what
436 bits are significant for some registers. */
437
438 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
439 {
440 INSN_CUID (insn) = ++i;
441 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
442 note_stores (PATTERN (insn), set_significant);
443 }
444
445 significant_valid = 1;
446
447 /* Now scan all the insns in forward order. */
448
449 label_tick = 1;
450 last_call_cuid = 0;
451 mem_last_set = 0;
452
453 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
454 {
455 next = 0;
456
457 if (GET_CODE (insn) == CODE_LABEL)
458 label_tick++;
459
460 else if (GET_CODE (insn) == INSN
461 || GET_CODE (insn) == CALL_INSN
462 || GET_CODE (insn) == JUMP_INSN)
463 {
464 /* Try this insn with each insn it links back to. */
465
466 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
467 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
468 goto retry;
469
470 /* Try each sequence of three linked insns ending with this one. */
471
472 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
473 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
474 nextlinks = XEXP (nextlinks, 1))
475 if ((next = try_combine (insn, XEXP (links, 0),
476 XEXP (nextlinks, 0))) != 0)
477 goto retry;
478
479 #ifdef HAVE_cc0
480 /* Try to combine a jump insn that uses CC0
481 with a preceding insn that sets CC0, and maybe with its
482 logical predecessor as well.
483 This is how we make decrement-and-branch insns.
484 We need this special code because data flow connections
485 via CC0 do not get entered in LOG_LINKS. */
486
487 if (GET_CODE (insn) == JUMP_INSN
488 && (prev = prev_nonnote_insn (insn)) != 0
489 && GET_CODE (prev) == INSN
490 && sets_cc0_p (PATTERN (prev)))
491 {
492 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
493 goto retry;
494
495 for (nextlinks = LOG_LINKS (prev); nextlinks;
496 nextlinks = XEXP (nextlinks, 1))
497 if ((next = try_combine (insn, prev,
498 XEXP (nextlinks, 0))) != 0)
499 goto retry;
500 }
501
502 /* Do the same for an insn that explicitly references CC0. */
503 if (GET_CODE (insn) == INSN
504 && (prev = prev_nonnote_insn (insn)) != 0
505 && GET_CODE (prev) == INSN
506 && sets_cc0_p (PATTERN (prev))
507 && GET_CODE (PATTERN (insn)) == SET
508 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
509 {
510 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
511 goto retry;
512
513 for (nextlinks = LOG_LINKS (prev); nextlinks;
514 nextlinks = XEXP (nextlinks, 1))
515 if ((next = try_combine (insn, prev,
516 XEXP (nextlinks, 0))) != 0)
517 goto retry;
518 }
519
520 /* Finally, see if any of the insns that this insn links to
521 explicitly references CC0. If so, try this insn, that insn,
522 and its predecessor if it sets CC0. */
523 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
524 if (GET_CODE (XEXP (links, 0)) == INSN
525 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
526 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
527 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
528 && GET_CODE (prev) == INSN
529 && sets_cc0_p (PATTERN (prev))
530 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
531 goto retry;
532 #endif
533
534 /* Try combining an insn with two different insns whose results it
535 uses. */
536 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
537 for (nextlinks = XEXP (links, 1); nextlinks;
538 nextlinks = XEXP (nextlinks, 1))
539 if ((next = try_combine (insn, XEXP (links, 0),
540 XEXP (nextlinks, 0))) != 0)
541 goto retry;
542
543 if (GET_CODE (insn) != NOTE)
544 record_dead_and_set_regs (insn);
545
546 retry:
547 ;
548 }
549 }
550
551 total_attempts += combine_attempts;
552 total_merges += combine_merges;
553 total_extras += combine_extras;
554 total_successes += combine_successes;
555 }
556 \f
557 /* Called via note_stores. If X is a pseudo that is used in more than
558 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
559 set, record what bits are significant. If we are clobbering X,
560 ignore this "set" because the clobbered value won't be used.
561
562 If we are setting only a portion of X and we can't figure out what
563 portion, assume all bits will be used since we don't know what will
564 be happening.
565
566 Similarly, set how many bits of X are known to be copies of the sign bit
567 at all locations in the function. This is the smallest number implied
568 by any set of X. */
569
570 static void
571 set_significant (x, set)
572 rtx x;
573 rtx set;
574 {
575 int num;
576
577 if (GET_CODE (x) == REG
578 && REGNO (x) >= FIRST_PSEUDO_REGISTER
579 && reg_n_sets[REGNO (x)] > 1
580 && reg_basic_block[REGNO (x)] < 0
581 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
582 {
583 if (GET_CODE (set) == CLOBBER)
584 return;
585
586 /* If this is a complex assignment, see if we can convert it into a
587 simple assignment. */
588 set = expand_field_assignment (set);
589 if (SET_DEST (set) == x)
590 {
591 reg_significant[REGNO (x)]
592 |= significant_bits (SET_SRC (set), significant_mode);
593 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
594 if (reg_sign_bit_copies[REGNO (x)] == 0
595 || reg_sign_bit_copies[REGNO (x)] > num)
596 reg_sign_bit_copies[REGNO (x)] = num;
597 }
598 else
599 {
600 reg_significant[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
601 reg_sign_bit_copies[REGNO (x)] = 0;
602 }
603 }
604 }
605 \f
606 /* See if INSN can be combined into I3. PRED and SUCC are optionally
607 insns that were previously combined into I3 or that will be combined
608 into the merger of INSN and I3.
609
610 Return 0 if the combination is not allowed for any reason.
611
612 If the combination is allowed, *PDEST will be set to the single
613 destination of INSN and *PSRC to the single source, and this function
614 will return 1. */
615
616 static int
617 can_combine_p (insn, i3, pred, succ, pdest, psrc)
618 rtx insn;
619 rtx i3;
620 rtx pred, succ;
621 rtx *pdest, *psrc;
622 {
623 int i;
624 rtx set = 0, src, dest;
625 rtx p, link;
626 int all_adjacent = (succ ? (next_active_insn (insn) == succ
627 && next_active_insn (succ) == i3)
628 : next_active_insn (insn) == i3);
629
630 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
631 or a PARALLEL consisting of such a SET and CLOBBERs.
632
633 If INSN has CLOBBER parallel parts, ignore them for our processing.
634 By definition, these happen during the execution of the insn. When it
635 is merged with another insn, all bets are off. If they are, in fact,
636 needed and aren't also supplied in I3, they may be added by
637 recog_for_combine. Otherwise, it won't match.
638
639 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
640 note.
641
642 Get the source and destination of INSN. If more than one, can't
643 combine. */
644
645 if (GET_CODE (PATTERN (insn)) == SET)
646 set = PATTERN (insn);
647 else if (GET_CODE (PATTERN (insn)) == PARALLEL
648 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
649 {
650 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
651 {
652 rtx elt = XVECEXP (PATTERN (insn), 0, i);
653
654 switch (GET_CODE (elt))
655 {
656 /* We can ignore CLOBBERs. */
657 case CLOBBER:
658 break;
659
660 case SET:
661 /* Ignore SETs whose result isn't used but not those that
662 have side-effects. */
663 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
664 && ! side_effects_p (elt))
665 break;
666
667 /* If we have already found a SET, this is a second one and
668 so we cannot combine with this insn. */
669 if (set)
670 return 0;
671
672 set = elt;
673 break;
674
675 default:
676 /* Anything else means we can't combine. */
677 return 0;
678 }
679 }
680
681 if (set == 0
682 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
683 so don't do anything with it. */
684 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
685 return 0;
686 }
687 else
688 return 0;
689
690 if (set == 0)
691 return 0;
692
693 set = expand_field_assignment (set);
694 src = SET_SRC (set), dest = SET_DEST (set);
695
696 /* Don't eliminate a store in the stack pointer. */
697 if (dest == stack_pointer_rtx
698 /* Don't install a subreg involving two modes not tieable.
699 It can worsen register allocation, and can even make invalid reload
700 insns, since the reg inside may need to be copied from in the
701 outside mode, and that may be invalid if it is an fp reg copied in
702 integer mode. As a special exception, we can allow this if
703 I3 is simply copying DEST, a REG, to CC0. */
704 || (GET_CODE (src) == SUBREG
705 && ! MODES_TIEABLE_P (GET_MODE (src), GET_MODE (SUBREG_REG (src)))
706 #ifdef HAVE_cc0
707 && ! (GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
708 && SET_DEST (PATTERN (i3)) == cc0_rtx
709 && GET_CODE (dest) == REG && dest == SET_SRC (PATTERN (i3)))
710 #endif
711 )
712 /* If we couldn't eliminate a field assignment, we can't combine. */
713 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
714 /* Don't combine with an insn that sets a register to itself if it has
715 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
716 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
717 /* Can't merge a function call. */
718 || GET_CODE (src) == CALL
719 /* Don't substitute into an incremented register. */
720 || FIND_REG_INC_NOTE (i3, dest)
721 || (succ && FIND_REG_INC_NOTE (succ, dest))
722 /* Don't combine the end of a libcall into anything. */
723 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
724 /* Make sure that DEST is not used after SUCC but before I3. */
725 || (succ && ! all_adjacent
726 && reg_used_between_p (dest, succ, i3))
727 /* Make sure that the value that is to be substituted for the register
728 does not use any registers whose values alter in between. However,
729 If the insns are adjacent, a use can't cross a set even though we
730 think it might (this can happen for a sequence of insns each setting
731 the same destination; reg_last_set of that register might point to
732 a NOTE). Also, don't move a volatile asm across any other insns. */
733 || (! all_adjacent
734 && (use_crosses_set_p (src, INSN_CUID (insn))
735 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))))
736 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
737 better register allocation by not doing the combine. */
738 || find_reg_note (i3, REG_NO_CONFLICT, dest)
739 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
740 /* Don't combine across a CALL_INSN, because that would possibly
741 change whether the life span of some REGs crosses calls or not,
742 and it is a pain to update that information.
743 Exception: if source is a constant, moving it later can't hurt.
744 Accept that special case, because it helps -fforce-addr a lot. */
745 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
746 return 0;
747
748 /* DEST must either be a REG or CC0. */
749 if (GET_CODE (dest) == REG)
750 {
751 /* If register alignment is being enforced for multi-word items in all
752 cases except for parameters, it is possible to have a register copy
753 insn referencing a hard register that is not allowed to contain the
754 mode being copied and which would not be valid as an operand of most
755 insns. Eliminate this problem by not combining with such an insn.
756
757 Also, on some machines we don't want to extend the life of a hard
758 register. */
759
760 if (GET_CODE (src) == REG
761 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
762 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
763 #ifdef SMALL_REGISTER_CLASSES
764 /* Don't extend the life of a hard register. */
765 || REGNO (src) < FIRST_PSEUDO_REGISTER
766 #else
767 || (REGNO (src) < FIRST_PSEUDO_REGISTER
768 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))
769 #endif
770 ))
771 return 0;
772 }
773 else if (GET_CODE (dest) != CC0)
774 return 0;
775
776 /* Don't substitute for a register intended as a clobberable operand. */
777 if (GET_CODE (PATTERN (i3)) == PARALLEL)
778 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
779 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
780 && rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest))
781 return 0;
782
783 /* If INSN contains anything volatile, or is an `asm' (whether volatile
784 or not), reject, unless nothing volatile comes between it and I3,
785 with the exception of SUCC. */
786
787 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
788 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
789 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
790 && p != succ && volatile_refs_p (PATTERN (p)))
791 return 0;
792
793 /* If INSN or I2 contains an autoincrement or autodecrement,
794 make sure that register is not used between there and I3,
795 and not already used in I3 either.
796 Also insist that I3 not be a jump; if it were one
797 and the incremented register were spilled, we would lose. */
798
799 #ifdef AUTO_INC_DEC
800 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
801 if (REG_NOTE_KIND (link) == REG_INC
802 && (GET_CODE (i3) == JUMP_INSN
803 || reg_used_between_p (XEXP (link, 0), insn, i3)
804 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
805 return 0;
806 #endif
807
808 #ifdef HAVE_cc0
809 /* Don't combine an insn that follows a CC0-setting insn.
810 An insn that uses CC0 must not be separated from the one that sets it.
811 We do, however, allow I2 to follow a CC0-setting insn if that insn
812 is passed as I1; in that case it will be deleted also.
813 We also allow combining in this case if all the insns are adjacent
814 because that would leave the two CC0 insns adjacent as well.
815 It would be more logical to test whether CC0 occurs inside I1 or I2,
816 but that would be much slower, and this ought to be equivalent. */
817
818 p = prev_nonnote_insn (insn);
819 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
820 && ! all_adjacent)
821 return 0;
822 #endif
823
824 /* If we get here, we have passed all the tests and the combination is
825 to be allowed. */
826
827 *pdest = dest;
828 *psrc = src;
829
830 return 1;
831 }
832 \f
833 /* LOC is the location within I3 that contains its pattern or the component
834 of a PARALLEL of the pattern. We validate that it is valid for combining.
835
836 One problem is if I3 modifies its output, as opposed to replacing it
837 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
838 so would produce an insn that is not equivalent to the original insns.
839
840 Consider:
841
842 (set (reg:DI 101) (reg:DI 100))
843 (set (subreg:SI (reg:DI 101) 0) <foo>)
844
845 This is NOT equivalent to:
846
847 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
848 (set (reg:DI 101) (reg:DI 100))])
849
850 Not only does this modify 100 (in which case it might still be valid
851 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
852
853 We can also run into a problem if I2 sets a register that I1
854 uses and I1 gets directly substituted into I3 (not via I2). In that
855 case, we would be getting the wrong value of I2DEST into I3, so we
856 must reject the combination. This case occurs when I2 and I1 both
857 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
858 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
859 of a SET must prevent combination from occurring.
860
861 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
862 if the destination of a SET is a hard register.
863
864 Before doing the above check, we first try to expand a field assignment
865 into a set of logical operations.
866
867 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
868 we place a register that is both set and used within I3. If more than one
869 such register is detected, we fail.
870
871 Return 1 if the combination is valid, zero otherwise. */
872
873 static int
874 combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
875 rtx i3;
876 rtx *loc;
877 rtx i2dest;
878 rtx i1dest;
879 int i1_not_in_src;
880 rtx *pi3dest_killed;
881 {
882 rtx x = *loc;
883
884 if (GET_CODE (x) == SET)
885 {
886 rtx set = expand_field_assignment (x);
887 rtx dest = SET_DEST (set);
888 rtx src = SET_SRC (set);
889 rtx inner_dest = dest, inner_src = src;
890
891 SUBST (*loc, set);
892
893 while (GET_CODE (inner_dest) == STRICT_LOW_PART
894 || GET_CODE (inner_dest) == SUBREG
895 || GET_CODE (inner_dest) == ZERO_EXTRACT)
896 inner_dest = XEXP (inner_dest, 0);
897
898 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
899 was added. */
900 #if 0
901 while (GET_CODE (inner_src) == STRICT_LOW_PART
902 || GET_CODE (inner_src) == SUBREG
903 || GET_CODE (inner_src) == ZERO_EXTRACT)
904 inner_src = XEXP (inner_src, 0);
905
906 /* If it is better that two different modes keep two different pseudos,
907 avoid combining them. This avoids producing the following pattern
908 on a 386:
909 (set (subreg:SI (reg/v:QI 21) 0)
910 (lshiftrt:SI (reg/v:SI 20)
911 (const_int 24)))
912 If that were made, reload could not handle the pair of
913 reg 20/21, since it would try to get any GENERAL_REGS
914 but some of them don't handle QImode. */
915
916 if (rtx_equal_p (inner_src, i2dest)
917 && GET_CODE (inner_dest) == REG
918 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
919 return 0;
920 #endif
921
922 /* Check for the case where I3 modifies its output, as
923 discussed above. */
924 if ((inner_dest != dest
925 && (reg_overlap_mentioned_p (i2dest, inner_dest)
926 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
927 /* This is the same test done in can_combine_p except that we
928 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
929 CALL operation. */
930 || (GET_CODE (inner_dest) == REG
931 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
932 #ifdef SMALL_REGISTER_CLASSES
933 && GET_CODE (src) != CALL
934 #else
935 && ! HARD_REGNO_MODE_OK (REGNO (inner_dest),
936 GET_MODE (inner_dest))
937 #endif
938 )
939
940 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
941 return 0;
942
943 /* If DEST is used in I3, it is being killed in this insn,
944 so record that for later. */
945 if (pi3dest_killed && GET_CODE (dest) == REG
946 && reg_referenced_p (dest, PATTERN (i3)))
947 {
948 if (*pi3dest_killed)
949 return 0;
950
951 *pi3dest_killed = dest;
952 }
953 }
954
955 else if (GET_CODE (x) == PARALLEL)
956 {
957 int i;
958
959 for (i = 0; i < XVECLEN (x, 0); i++)
960 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
961 i1_not_in_src, pi3dest_killed))
962 return 0;
963 }
964
965 return 1;
966 }
967 \f
968 /* Try to combine the insns I1 and I2 into I3.
969 Here I1 and I2 appear earlier than I3.
970 I1 can be zero; then we combine just I2 into I3.
971
972 It we are combining three insns and the resulting insn is not recognized,
973 try splitting it into two insns. If that happens, I2 and I3 are retained
974 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
975 are pseudo-deleted.
976
977 If we created two insns, return I2; otherwise return I3.
978 Return 0 if the combination does not work. Then nothing is changed. */
979
980 static rtx
981 try_combine (i3, i2, i1)
982 register rtx i3, i2, i1;
983 {
984 /* New patterns for I3 and I3, respectively. */
985 rtx newpat, newi2pat = 0;
986 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
987 int added_sets_1, added_sets_2;
988 /* Total number of SETs to put into I3. */
989 int total_sets;
990 /* Nonzero is I2's body now appears in I3. */
991 int i2_is_used;
992 /* INSN_CODEs for new I3, new I2, and user of condition code. */
993 int insn_code_number, i2_code_number, other_code_number;
994 /* Contains I3 if the destination of I3 is used in its source, which means
995 that the old life of I3 is being killed. If that usage is placed into
996 I2 and not in I3, a REG_DEAD note must be made. */
997 rtx i3dest_killed = 0;
998 /* SET_DEST and SET_SRC of I2 and I1. */
999 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1000 /* PATTERN (I2), or a copy of it in certain cases. */
1001 rtx i2pat;
1002 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1003 int i2dest_in_i2src, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1004 int i1_feeds_i3 = 0;
1005 /* Notes that must be added to REG_NOTES in I3 and I2. */
1006 rtx new_i3_notes, new_i2_notes;
1007
1008 int maxreg;
1009 rtx temp;
1010 register rtx link;
1011 int i;
1012
1013 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1014 This can occur when flow deletes an insn that it has merged into an
1015 auto-increment address. We also can't do anything if I3 has a
1016 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1017 libcall. */
1018
1019 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1020 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1021 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
1022 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
1023 return 0;
1024
1025 combine_attempts++;
1026
1027 undobuf.num_undo = previous_num_undos = 0;
1028 undobuf.other_insn = 0;
1029
1030 /* Save the current high-water-mark so we can free storage if we didn't
1031 accept this combination. */
1032 undobuf.storage = (char *) oballoc (0);
1033
1034 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1035 code below, set I1 to be the earlier of the two insns. */
1036 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1037 temp = i1, i1 = i2, i2 = temp;
1038
1039 /* First check for one important special-case that the code below will
1040 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1041 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1042 we may be able to replace that destination with the destination of I3.
1043 This occurs in the common code where we compute both a quotient and
1044 remainder into a structure, in which case we want to do the computation
1045 directly into the structure to avoid register-register copies.
1046
1047 We make very conservative checks below and only try to handle the
1048 most common cases of this. For example, we only handle the case
1049 where I2 and I3 are adjacent to avoid making difficult register
1050 usage tests. */
1051
1052 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1053 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1054 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1055 #ifdef SMALL_REGISTER_CLASSES
1056 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1057 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER)
1058 #endif
1059 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1060 && GET_CODE (PATTERN (i2)) == PARALLEL
1061 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1062 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1063 below would need to check what is inside (and reg_overlap_mentioned_p
1064 doesn't support those codes anyway). Don't allow those destinations;
1065 the resulting insn isn't likely to be recognized anyway. */
1066 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1067 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1068 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1069 SET_DEST (PATTERN (i3)))
1070 && next_real_insn (i2) == i3)
1071 {
1072 rtx p2 = PATTERN (i2);
1073
1074 /* Make sure that the destination of I3,
1075 which we are going to substitute into one output of I2,
1076 is not used within another output of I2. We must avoid making this:
1077 (parallel [(set (mem (reg 69)) ...)
1078 (set (reg 69) ...)])
1079 which is not well-defined as to order of actions.
1080 (Besides, reload can't handle output reloads for this.)
1081
1082 The problem can also happen if the dest of I3 is a memory ref,
1083 if another dest in I2 is an indirect memory ref. */
1084 for (i = 0; i < XVECLEN (p2, 0); i++)
1085 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1086 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1087 SET_DEST (XVECEXP (p2, 0, i))))
1088 break;
1089
1090 if (i == XVECLEN (p2, 0))
1091 for (i = 0; i < XVECLEN (p2, 0); i++)
1092 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1093 {
1094 combine_merges++;
1095
1096 subst_insn = i3;
1097 subst_low_cuid = INSN_CUID (i2);
1098
1099 added_sets_2 = 0;
1100 i2dest = SET_SRC (PATTERN (i3));
1101
1102 /* Replace the dest in I2 with our dest and make the resulting
1103 insn the new pattern for I3. Then skip to where we
1104 validate the pattern. Everything was set up above. */
1105 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1106 SET_DEST (PATTERN (i3)));
1107
1108 newpat = p2;
1109 goto validate_replacement;
1110 }
1111 }
1112
1113 #ifndef HAVE_cc0
1114 /* If we have no I1 and I2 looks like:
1115 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1116 (set Y OP)])
1117 make up a dummy I1 that is
1118 (set Y OP)
1119 and change I2 to be
1120 (set (reg:CC X) (compare:CC Y (const_int 0)))
1121
1122 (We can ignore any trailing CLOBBERs.)
1123
1124 This undoes a previous combination and allows us to match a branch-and-
1125 decrement insn. */
1126
1127 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1128 && XVECLEN (PATTERN (i2), 0) >= 2
1129 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1130 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1131 == MODE_CC)
1132 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1133 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1134 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1135 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1136 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1137 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1138 {
1139 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1140 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1141 break;
1142
1143 if (i == 1)
1144 {
1145 /* We make I1 with the same INSN_UID as I2. This gives it
1146 the same INSN_CUID for value tracking. Our fake I1 will
1147 never appear in the insn stream so giving it the same INSN_UID
1148 as I2 will not cause a problem. */
1149
1150 i1 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1151 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
1152
1153 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1154 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1155 SET_DEST (PATTERN (i1)));
1156 }
1157 }
1158 #endif
1159
1160 /* Verify that I2 and I1 are valid for combining. */
1161 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1162 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1163 {
1164 undo_all ();
1165 return 0;
1166 }
1167
1168 /* Record whether I2DEST is used in I2SRC and similarly for the other
1169 cases. Knowing this will help in register status updating below. */
1170 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1171 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1172 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1173
1174 /* See if I1 directly feeds into I3. It does if I1DEST is not used
1175 in I2SRC. */
1176 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1177
1178 /* Ensure that I3's pattern can be the destination of combines. */
1179 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1180 i1 && i2dest_in_i1src && i1_feeds_i3,
1181 &i3dest_killed))
1182 {
1183 undo_all ();
1184 return 0;
1185 }
1186
1187 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1188 We used to do this EXCEPT in one case: I3 has a post-inc in an
1189 output operand. However, that exception can give rise to insns like
1190 mov r3,(r3)+
1191 which is a famous insn on the PDP-11 where the value of r3 used as the
1192 source was model-dependent. Avoid this sort of thing. */
1193
1194 #if 0
1195 if (!(GET_CODE (PATTERN (i3)) == SET
1196 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1197 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1198 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1199 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1200 /* It's not the exception. */
1201 #endif
1202 #ifdef AUTO_INC_DEC
1203 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1204 if (REG_NOTE_KIND (link) == REG_INC
1205 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1206 || (i1 != 0
1207 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1208 {
1209 undo_all ();
1210 return 0;
1211 }
1212 #endif
1213
1214 /* See if the SETs in I1 or I2 need to be kept around in the merged
1215 instruction: whenever the value set there is still needed past I3.
1216 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1217
1218 For the SET in I1, we have two cases: If I1 and I2 independently
1219 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1220 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1221 in I1 needs to be kept around unless I1DEST dies or is set in either
1222 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1223 I1DEST. If so, we know I1 feeds into I2. */
1224
1225 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1226
1227 added_sets_1
1228 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1229 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1230
1231 /* If the set in I2 needs to be kept around, we must make a copy of
1232 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1233 PATTERN (I2), we are only substituting for the original I1DEST, not into
1234 an already-substituted copy. This also prevents making self-referential
1235 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1236 I2DEST. */
1237
1238 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1239 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1240 : PATTERN (i2));
1241
1242 if (added_sets_2)
1243 i2pat = copy_rtx (i2pat);
1244
1245 combine_merges++;
1246
1247 /* Substitute in the latest insn for the regs set by the earlier ones. */
1248
1249 maxreg = max_reg_num ();
1250
1251 subst_insn = i3;
1252
1253 /* It is possible that the source of I2 or I1 may be performing an
1254 unneeded operation, such as a ZERO_EXTEND of something that is known
1255 to have the high part zero. Handle that case by letting subst look at
1256 the innermost one of them.
1257
1258 Another way to do this would be to have a function that tries to
1259 simplify a single insn instead of merging two or more insns. We don't
1260 do this because of the potential of infinite loops and because
1261 of the potential extra memory required. However, doing it the way
1262 we are is a bit of a kludge and doesn't catch all cases.
1263
1264 But only do this if -fexpensive-optimizations since it slows things down
1265 and doesn't usually win. */
1266
1267 if (flag_expensive_optimizations)
1268 {
1269 /* Pass pc_rtx so no substitutions are done, just simplifications.
1270 The cases that we are interested in here do not involve the few
1271 cases were is_replaced is checked. */
1272 if (i1)
1273 {
1274 subst_low_cuid = INSN_CUID (i1);
1275 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1276 }
1277 else
1278 {
1279 subst_low_cuid = INSN_CUID (i2);
1280 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1281 }
1282
1283 previous_num_undos = undobuf.num_undo;
1284 }
1285
1286 #ifndef HAVE_cc0
1287 /* Many machines that don't use CC0 have insns that can both perform an
1288 arithmetic operation and set the condition code. These operations will
1289 be represented as a PARALLEL with the first element of the vector
1290 being a COMPARE of an arithmetic operation with the constant zero.
1291 The second element of the vector will set some pseudo to the result
1292 of the same arithmetic operation. If we simplify the COMPARE, we won't
1293 match such a pattern and so will generate an extra insn. Here we test
1294 for this case, where both the comparison and the operation result are
1295 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1296 I2SRC. Later we will make the PARALLEL that contains I2. */
1297
1298 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1299 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1300 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1301 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1302 {
1303 rtx *cc_use;
1304 enum machine_mode compare_mode;
1305
1306 newpat = PATTERN (i3);
1307 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1308
1309 i2_is_used = 1;
1310
1311 #ifdef EXTRA_CC_MODES
1312 /* See if a COMPARE with the operand we substituted in should be done
1313 with the mode that is currently being used. If not, do the same
1314 processing we do in `subst' for a SET; namely, if the destination
1315 is used only once, try to replace it with a register of the proper
1316 mode and also replace the COMPARE. */
1317 if (undobuf.other_insn == 0
1318 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1319 &undobuf.other_insn))
1320 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1321 i2src, const0_rtx))
1322 != GET_MODE (SET_DEST (newpat))))
1323 {
1324 int regno = REGNO (SET_DEST (newpat));
1325 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1326
1327 if (regno < FIRST_PSEUDO_REGISTER
1328 || (reg_n_sets[regno] == 1 && ! added_sets_2
1329 && ! REG_USERVAR_P (SET_DEST (newpat))))
1330 {
1331 if (regno >= FIRST_PSEUDO_REGISTER)
1332 SUBST (regno_reg_rtx[regno], new_dest);
1333
1334 SUBST (SET_DEST (newpat), new_dest);
1335 SUBST (XEXP (*cc_use, 0), new_dest);
1336 SUBST (SET_SRC (newpat),
1337 gen_rtx_combine (COMPARE, compare_mode,
1338 i2src, const0_rtx));
1339 }
1340 else
1341 undobuf.other_insn = 0;
1342 }
1343 #endif
1344 }
1345 else
1346 #endif
1347 {
1348 n_occurrences = 0; /* `subst' counts here */
1349
1350 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1351 need to make a unique copy of I2SRC each time we substitute it
1352 to avoid self-referential rtl. */
1353
1354 subst_low_cuid = INSN_CUID (i2);
1355 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1356 ! i1_feeds_i3 && i1dest_in_i1src);
1357 previous_num_undos = undobuf.num_undo;
1358
1359 /* Record whether i2's body now appears within i3's body. */
1360 i2_is_used = n_occurrences;
1361 }
1362
1363 /* If we already got a failure, don't try to do more. Otherwise,
1364 try to substitute in I1 if we have it. */
1365
1366 if (i1 && GET_CODE (newpat) != CLOBBER)
1367 {
1368 /* Before we can do this substitution, we must redo the test done
1369 above (see detailed comments there) that ensures that I1DEST
1370 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1371
1372 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1373 0, NULL_PTR))
1374 {
1375 undo_all ();
1376 return 0;
1377 }
1378
1379 n_occurrences = 0;
1380 subst_low_cuid = INSN_CUID (i1);
1381 newpat = subst (newpat, i1dest, i1src, 0, 0);
1382 previous_num_undos = undobuf.num_undo;
1383 }
1384
1385 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1386 to count all the ways that I2SRC and I1SRC can be used. */
1387 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1388 && i2_is_used + added_sets_2 > 1)
1389 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1390 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1391 > 1))
1392 /* Fail if we tried to make a new register (we used to abort, but there's
1393 really no reason to). */
1394 || max_reg_num () != maxreg
1395 /* Fail if we couldn't do something and have a CLOBBER. */
1396 || GET_CODE (newpat) == CLOBBER)
1397 {
1398 undo_all ();
1399 return 0;
1400 }
1401
1402 /* If the actions of the earlier insns must be kept
1403 in addition to substituting them into the latest one,
1404 we must make a new PARALLEL for the latest insn
1405 to hold additional the SETs. */
1406
1407 if (added_sets_1 || added_sets_2)
1408 {
1409 combine_extras++;
1410
1411 if (GET_CODE (newpat) == PARALLEL)
1412 {
1413 rtvec old = XVEC (newpat, 0);
1414 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1415 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1416 bcopy (&old->elem[0], &XVECEXP (newpat, 0, 0),
1417 sizeof (old->elem[0]) * old->num_elem);
1418 }
1419 else
1420 {
1421 rtx old = newpat;
1422 total_sets = 1 + added_sets_1 + added_sets_2;
1423 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1424 XVECEXP (newpat, 0, 0) = old;
1425 }
1426
1427 if (added_sets_1)
1428 XVECEXP (newpat, 0, --total_sets)
1429 = (GET_CODE (PATTERN (i1)) == PARALLEL
1430 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1431
1432 if (added_sets_2)
1433 {
1434 /* If there is no I1, use I2's body as is. We used to also not do
1435 the subst call below if I2 was substituted into I3,
1436 but that could lose a simplification. */
1437 if (i1 == 0)
1438 XVECEXP (newpat, 0, --total_sets) = i2pat;
1439 else
1440 /* See comment where i2pat is assigned. */
1441 XVECEXP (newpat, 0, --total_sets)
1442 = subst (i2pat, i1dest, i1src, 0, 0);
1443 }
1444 }
1445
1446 /* We come here when we are replacing a destination in I2 with the
1447 destination of I3. */
1448 validate_replacement:
1449
1450 /* Is the result of combination a valid instruction? */
1451 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1452
1453 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1454 the second SET's destination is a register that is unused. In that case,
1455 we just need the first SET. This can occur when simplifying a divmod
1456 insn. We *must* test for this case here because the code below that
1457 splits two independent SETs doesn't handle this case correctly when it
1458 updates the register status. Also check the case where the first
1459 SET's destination is unused. That would not cause incorrect code, but
1460 does cause an unneeded insn to remain. */
1461
1462 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1463 && XVECLEN (newpat, 0) == 2
1464 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1465 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1466 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1467 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1468 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1469 && asm_noperands (newpat) < 0)
1470 {
1471 newpat = XVECEXP (newpat, 0, 0);
1472 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1473 }
1474
1475 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1476 && XVECLEN (newpat, 0) == 2
1477 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1478 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1479 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1480 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1481 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1482 && asm_noperands (newpat) < 0)
1483 {
1484 newpat = XVECEXP (newpat, 0, 1);
1485 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1486 }
1487
1488 /* See if this is an XOR. If so, perhaps the problem is that the
1489 constant is out of range. Replace it with a complemented XOR with
1490 a complemented constant; it might be in range. */
1491
1492 else if (insn_code_number < 0 && GET_CODE (newpat) == SET
1493 && GET_CODE (SET_SRC (newpat)) == XOR
1494 && GET_CODE (XEXP (SET_SRC (newpat), 1)) == CONST_INT
1495 && ((temp = simplify_unary_operation (NOT,
1496 GET_MODE (SET_SRC (newpat)),
1497 XEXP (SET_SRC (newpat), 1),
1498 GET_MODE (SET_SRC (newpat))))
1499 != 0))
1500 {
1501 enum machine_mode i_mode = GET_MODE (SET_SRC (newpat));
1502 rtx pat
1503 = gen_rtx_combine (SET, VOIDmode, SET_DEST (newpat),
1504 gen_unary (NOT, i_mode,
1505 gen_binary (XOR, i_mode,
1506 XEXP (SET_SRC (newpat), 0),
1507 temp)));
1508
1509 insn_code_number = recog_for_combine (&pat, i3, &new_i3_notes);
1510 if (insn_code_number >= 0)
1511 newpat = pat;
1512 }
1513
1514 /* If we were combining three insns and the result is a simple SET
1515 with no ASM_OPERANDS that wasn't recognized, try to split it into two
1516 insns. There are two ways to do this. It can be split using a
1517 machine-specific method (like when you have an addition of a large
1518 constant) or by combine in the function find_split_point. */
1519
1520 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1521 && asm_noperands (newpat) < 0)
1522 {
1523 rtx m_split, *split;
1524 rtx ni2dest = i2dest;
1525
1526 /* See if the MD file can split NEWPAT. If it can't, see if letting it
1527 use I2DEST as a scratch register will help. In the latter case,
1528 convert I2DEST to the mode of the source of NEWPAT if we can. */
1529
1530 m_split = split_insns (newpat, i3);
1531 if (m_split == 0)
1532 {
1533 /* If I2DEST is a hard register or the only use of a pseudo,
1534 we can change its mode. */
1535 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
1536 && GET_MODE (SET_DEST (newpat)) != VOIDmode
1537 && GET_CODE (i2dest) == REG
1538 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1539 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1540 && ! REG_USERVAR_P (i2dest))))
1541 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1542 REGNO (i2dest));
1543
1544 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1545 gen_rtvec (2, newpat,
1546 gen_rtx (CLOBBER,
1547 VOIDmode,
1548 ni2dest))),
1549 i3);
1550 }
1551
1552 if (m_split && GET_CODE (m_split) == SEQUENCE
1553 && XVECLEN (m_split, 0) == 2
1554 && (next_real_insn (i2) == i3
1555 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1556 INSN_CUID (i2))))
1557 {
1558 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
1559 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
1560
1561 /* In case we changed the mode of I2DEST, replace it in the
1562 pseudo-register table here. We can't do it above in case this
1563 code doesn't get executed and we do a split the other way. */
1564
1565 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1566 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1567
1568 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1569 if (i2_code_number >= 0)
1570 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1571
1572 if (insn_code_number >= 0)
1573 newpat = newi3pat;
1574
1575 /* It is possible that both insns now set the destination of I3.
1576 If so, we must show an extra use of it and update
1577 reg_significant. */
1578
1579 if (insn_code_number >= 0 && GET_CODE (SET_DEST (newpat)) == REG
1580 && GET_CODE (SET_DEST (newi2pat)) == REG
1581 && REGNO (SET_DEST (newpat)) == REGNO (SET_DEST (newi2pat)))
1582 {
1583 reg_n_sets[REGNO (SET_DEST (newpat))]++;
1584 set_significant (SET_DEST (newi2pat), newi2pat);
1585 set_significant (SET_DEST (newpat), newpat);
1586 }
1587 }
1588
1589 /* If we can split it and use I2DEST, go ahead and see if that
1590 helps things be recognized. Verify that none of the registers
1591 are set between I2 and I3. */
1592 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
1593 #ifdef HAVE_cc0
1594 && GET_CODE (i2dest) == REG
1595 #endif
1596 /* We need I2DEST in the proper mode. If it is a hard register
1597 or the only use of a pseudo, we can change its mode. */
1598 && (GET_MODE (*split) == GET_MODE (i2dest)
1599 || GET_MODE (*split) == VOIDmode
1600 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1601 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1602 && ! REG_USERVAR_P (i2dest)))
1603 && (next_real_insn (i2) == i3
1604 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1605 /* We can't overwrite I2DEST if its value is still used by
1606 NEWPAT. */
1607 && ! reg_referenced_p (i2dest, newpat))
1608 {
1609 rtx newdest = i2dest;
1610
1611 /* Get NEWDEST as a register in the proper mode. We have already
1612 validated that we can do this. */
1613 if (GET_MODE (i2dest) != GET_MODE (*split)
1614 && GET_MODE (*split) != VOIDmode)
1615 {
1616 newdest = gen_rtx (REG, GET_MODE (*split), REGNO (i2dest));
1617
1618 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1619 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1620 }
1621
1622 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1623 an ASHIFT. This can occur if it was inside a PLUS and hence
1624 appeared to be a memory address. This is a kludge. */
1625 if (GET_CODE (*split) == MULT
1626 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1627 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1628 SUBST (*split, gen_rtx_combine (ASHIFT, GET_MODE (*split),
1629 XEXP (*split, 0), GEN_INT (i)));
1630
1631 #ifdef INSN_SCHEDULING
1632 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1633 be written as a ZERO_EXTEND. */
1634 if (GET_CODE (*split) == SUBREG
1635 && GET_CODE (SUBREG_REG (*split)) == MEM)
1636 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, GET_MODE (*split),
1637 XEXP (*split, 0)));
1638 #endif
1639
1640 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1641 SUBST (*split, newdest);
1642 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1643 if (i2_code_number >= 0)
1644 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1645 }
1646 }
1647
1648 /* Check for a case where we loaded from memory in a narrow mode and
1649 then sign extended it, but we need both registers. In that case,
1650 we have a PARALLEL with both loads from the same memory location.
1651 We can split this into a load from memory followed by a register-register
1652 copy. This saves at least one insn, more if register allocation can
1653 eliminate the copy. */
1654
1655 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1656 && GET_CODE (newpat) == PARALLEL
1657 && XVECLEN (newpat, 0) == 2
1658 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1659 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1660 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1661 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1662 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1663 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1664 INSN_CUID (i2))
1665 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1666 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1667 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1668 SET_SRC (XVECEXP (newpat, 0, 1)))
1669 && ! find_reg_note (i3, REG_UNUSED,
1670 SET_DEST (XVECEXP (newpat, 0, 0))))
1671 {
1672 newi2pat = XVECEXP (newpat, 0, 0);
1673 newpat = XVECEXP (newpat, 0, 1);
1674 SUBST (SET_SRC (newpat),
1675 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)),
1676 SET_DEST (newi2pat)));
1677 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1678 if (i2_code_number >= 0)
1679 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1680
1681 if (insn_code_number >= 0)
1682 {
1683 rtx insn;
1684 rtx link;
1685
1686 /* If we will be able to accept this, we have made a change to the
1687 destination of I3. This can invalidate a LOG_LINKS pointing
1688 to I3. No other part of combine.c makes such a transformation.
1689
1690 The new I3 will have a destination that was previously the
1691 destination of I1 or I2 and which was used in i2 or I3. Call
1692 distribute_links to make a LOG_LINK from the next use of
1693 that destination. */
1694
1695 PATTERN (i3) = newpat;
1696 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
1697
1698 /* I3 now uses what used to be its destination and which is
1699 now I2's destination. That means we need a LOG_LINK from
1700 I3 to I2. But we used to have one, so we still will.
1701
1702 However, some later insn might be using I2's dest and have
1703 a LOG_LINK pointing at I3. We must remove this link.
1704 The simplest way to remove the link is to point it at I1,
1705 which we know will be a NOTE. */
1706
1707 for (insn = NEXT_INSN (i3);
1708 insn && GET_CODE (insn) != CODE_LABEL
1709 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN;
1710 insn = NEXT_INSN (insn))
1711 {
1712 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1713 && reg_referenced_p (SET_DEST (newi2pat), PATTERN (insn)))
1714 {
1715 for (link = LOG_LINKS (insn); link;
1716 link = XEXP (link, 1))
1717 if (XEXP (link, 0) == i3)
1718 XEXP (link, 0) = i1;
1719
1720 break;
1721 }
1722 }
1723 }
1724 }
1725
1726 /* Similarly, check for a case where we have a PARALLEL of two independent
1727 SETs but we started with three insns. In this case, we can do the sets
1728 as two separate insns. This case occurs when some SET allows two
1729 other insns to combine, but the destination of that SET is still live. */
1730
1731 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1732 && GET_CODE (newpat) == PARALLEL
1733 && XVECLEN (newpat, 0) == 2
1734 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1735 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1736 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
1737 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1738 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1739 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1740 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1741 INSN_CUID (i2))
1742 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1743 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
1744 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
1745 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1746 XVECEXP (newpat, 0, 0))
1747 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
1748 XVECEXP (newpat, 0, 1)))
1749 {
1750 newi2pat = XVECEXP (newpat, 0, 1);
1751 newpat = XVECEXP (newpat, 0, 0);
1752
1753 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1754 if (i2_code_number >= 0)
1755 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1756 }
1757
1758 /* If it still isn't recognized, fail and change things back the way they
1759 were. */
1760 if ((insn_code_number < 0
1761 /* Is the result a reasonable ASM_OPERANDS? */
1762 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
1763 {
1764 undo_all ();
1765 return 0;
1766 }
1767
1768 /* If we had to change another insn, make sure it is valid also. */
1769 if (undobuf.other_insn)
1770 {
1771 rtx other_notes = REG_NOTES (undobuf.other_insn);
1772 rtx other_pat = PATTERN (undobuf.other_insn);
1773 rtx new_other_notes;
1774 rtx note, next;
1775
1776 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
1777 &new_other_notes);
1778
1779 if (other_code_number < 0 && ! check_asm_operands (other_pat))
1780 {
1781 undo_all ();
1782 return 0;
1783 }
1784
1785 PATTERN (undobuf.other_insn) = other_pat;
1786
1787 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
1788 are still valid. Then add any non-duplicate notes added by
1789 recog_for_combine. */
1790 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
1791 {
1792 next = XEXP (note, 1);
1793
1794 if (REG_NOTE_KIND (note) == REG_UNUSED
1795 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1796 remove_note (undobuf.other_insn, note);
1797 }
1798
1799 distribute_notes (new_other_notes, undobuf.other_insn,
1800 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
1801 }
1802
1803 /* We now know that we can do this combination. Merge the insns and
1804 update the status of registers and LOG_LINKS. */
1805
1806 {
1807 rtx i3notes, i2notes, i1notes = 0;
1808 rtx i3links, i2links, i1links = 0;
1809 rtx midnotes = 0;
1810 int all_adjacent = (next_real_insn (i2) == i3
1811 && (i1 == 0 || next_real_insn (i1) == i2));
1812 register int regno;
1813 /* Compute which registers we expect to eliminate. */
1814 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
1815 ? 0 : i2dest);
1816 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
1817
1818 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
1819 clear them. */
1820 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
1821 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
1822 if (i1)
1823 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
1824
1825 /* Ensure that we do not have something that should not be shared but
1826 occurs multiple times in the new insns. Check this by first
1827 resetting all the `used' flags and then copying anything is shared. */
1828
1829 reset_used_flags (i3notes);
1830 reset_used_flags (i2notes);
1831 reset_used_flags (i1notes);
1832 reset_used_flags (newpat);
1833 reset_used_flags (newi2pat);
1834 if (undobuf.other_insn)
1835 reset_used_flags (PATTERN (undobuf.other_insn));
1836
1837 i3notes = copy_rtx_if_shared (i3notes);
1838 i2notes = copy_rtx_if_shared (i2notes);
1839 i1notes = copy_rtx_if_shared (i1notes);
1840 newpat = copy_rtx_if_shared (newpat);
1841 newi2pat = copy_rtx_if_shared (newi2pat);
1842 if (undobuf.other_insn)
1843 reset_used_flags (PATTERN (undobuf.other_insn));
1844
1845 INSN_CODE (i3) = insn_code_number;
1846 PATTERN (i3) = newpat;
1847 if (undobuf.other_insn)
1848 INSN_CODE (undobuf.other_insn) = other_code_number;
1849
1850 /* We had one special case above where I2 had more than one set and
1851 we replaced a destination of one of those sets with the destination
1852 of I3. In that case, we have to update LOG_LINKS of insns later
1853 in this basic block. Note that this (expensive) case is rare. */
1854
1855 if (GET_CODE (PATTERN (i2)) == PARALLEL)
1856 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
1857 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
1858 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
1859 && ! find_reg_note (i2, REG_UNUSED,
1860 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
1861 {
1862 register rtx insn;
1863
1864 for (insn = NEXT_INSN (i2); insn; insn = NEXT_INSN (insn))
1865 {
1866 if (insn != i3 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1867 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
1868 if (XEXP (link, 0) == i2)
1869 XEXP (link, 0) = i3;
1870
1871 if (GET_CODE (insn) == CODE_LABEL
1872 || GET_CODE (insn) == JUMP_INSN)
1873 break;
1874 }
1875 }
1876
1877 LOG_LINKS (i3) = 0;
1878 REG_NOTES (i3) = 0;
1879 LOG_LINKS (i2) = 0;
1880 REG_NOTES (i2) = 0;
1881
1882 if (newi2pat)
1883 {
1884 INSN_CODE (i2) = i2_code_number;
1885 PATTERN (i2) = newi2pat;
1886 }
1887 else
1888 {
1889 PUT_CODE (i2, NOTE);
1890 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
1891 NOTE_SOURCE_FILE (i2) = 0;
1892 }
1893
1894 if (i1)
1895 {
1896 LOG_LINKS (i1) = 0;
1897 REG_NOTES (i1) = 0;
1898 PUT_CODE (i1, NOTE);
1899 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
1900 NOTE_SOURCE_FILE (i1) = 0;
1901 }
1902
1903 /* Get death notes for everything that is now used in either I3 or
1904 I2 and used to die in a previous insn. */
1905
1906 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
1907 if (newi2pat)
1908 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
1909
1910 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
1911 if (i3notes)
1912 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
1913 elim_i2, elim_i1);
1914 if (i2notes)
1915 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
1916 elim_i2, elim_i1);
1917 if (i1notes)
1918 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
1919 elim_i2, elim_i1);
1920 if (midnotes)
1921 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1922 elim_i2, elim_i1);
1923
1924 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
1925 know these are REG_UNUSED and want them to go to the desired insn,
1926 so we always pass it as i3. */
1927 if (newi2pat && new_i2_notes)
1928 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
1929 if (new_i3_notes)
1930 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
1931
1932 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
1933 put a REG_DEAD note for it somewhere. Similarly for I2 and I1. */
1934
1935 if (i3dest_killed)
1936 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed, NULL_RTX),
1937 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1938 NULL_RTX, NULL_RTX);
1939
1940 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
1941 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
1942 we passed I3 in that case, it might delete I2. */
1943
1944 if (i2dest_in_i2src)
1945 {
1946 if (newi2pat && reg_set_p (i2dest, newi2pat))
1947 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
1948 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
1949 else
1950 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
1951 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1952 NULL_RTX, NULL_RTX);
1953 }
1954
1955 if (i1dest_in_i1src)
1956 {
1957 if (newi2pat && reg_set_p (i1dest, newi2pat))
1958 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
1959 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
1960 else
1961 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
1962 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1963 NULL_RTX, NULL_RTX);
1964 }
1965
1966 distribute_links (i3links);
1967 distribute_links (i2links);
1968 distribute_links (i1links);
1969
1970 if (GET_CODE (i2dest) == REG)
1971 {
1972 rtx link;
1973 rtx i2_insn = 0, i2_val = 0, set;
1974
1975 /* The insn that used to set this register doesn't exist, and
1976 this life of the register may not exist either. See if one of
1977 I3's links points to an insn that sets I2DEST. If it does,
1978 that is now the last known value for I2DEST. If we don't update
1979 this and I2 set the register to a value that depended on its old
1980 contents, we will get confused. If this insn is used, thing
1981 will be set correctly in combine_instructions. */
1982
1983 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
1984 if ((set = single_set (XEXP (link, 0))) != 0
1985 && rtx_equal_p (i2dest, SET_DEST (set)))
1986 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
1987
1988 record_value_for_reg (i2dest, i2_insn, i2_val);
1989
1990 /* If the reg formerly set in I2 died only once and that was in I3,
1991 zero its use count so it won't make `reload' do any work. */
1992 if (! added_sets_2 && newi2pat == 0)
1993 {
1994 regno = REGNO (i2dest);
1995 reg_n_sets[regno]--;
1996 if (reg_n_sets[regno] == 0
1997 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
1998 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
1999 reg_n_refs[regno] = 0;
2000 }
2001 }
2002
2003 if (i1 && GET_CODE (i1dest) == REG)
2004 {
2005 rtx link;
2006 rtx i1_insn = 0, i1_val = 0, set;
2007
2008 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2009 if ((set = single_set (XEXP (link, 0))) != 0
2010 && rtx_equal_p (i1dest, SET_DEST (set)))
2011 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2012
2013 record_value_for_reg (i1dest, i1_insn, i1_val);
2014
2015 regno = REGNO (i1dest);
2016 if (! added_sets_1)
2017 {
2018 reg_n_sets[regno]--;
2019 if (reg_n_sets[regno] == 0
2020 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2021 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2022 reg_n_refs[regno] = 0;
2023 }
2024 }
2025
2026 /* If I3 is now an unconditional jump, ensure that it has a
2027 BARRIER following it since it may have initially been a
2028 conditional jump. */
2029
2030 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
2031 && GET_CODE (next_nonnote_insn (i3)) != BARRIER)
2032 emit_barrier_after (i3);
2033 }
2034
2035 combine_successes++;
2036
2037 return newi2pat ? i2 : i3;
2038 }
2039 \f
2040 /* Undo all the modifications recorded in undobuf. */
2041
2042 static void
2043 undo_all ()
2044 {
2045 register int i;
2046 if (undobuf.num_undo > MAX_UNDO)
2047 undobuf.num_undo = MAX_UNDO;
2048 for (i = undobuf.num_undo - 1; i >= 0; i--)
2049 {
2050 if (undobuf.undo[i].is_int)
2051 *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2052 else
2053 *undobuf.undo[i].where.rtx = undobuf.undo[i].old_contents.rtx;
2054
2055 }
2056
2057 obfree (undobuf.storage);
2058 undobuf.num_undo = 0;
2059 }
2060 \f
2061 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
2062 where we have an arithmetic expression and return that point. LOC will
2063 be inside INSN.
2064
2065 try_combine will call this function to see if an insn can be split into
2066 two insns. */
2067
2068 static rtx *
2069 find_split_point (loc, insn)
2070 rtx *loc;
2071 rtx insn;
2072 {
2073 rtx x = *loc;
2074 enum rtx_code code = GET_CODE (x);
2075 rtx *split;
2076 int len = 0, pos, unsignedp;
2077 rtx inner;
2078
2079 /* First special-case some codes. */
2080 switch (code)
2081 {
2082 case SUBREG:
2083 #ifdef INSN_SCHEDULING
2084 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2085 point. */
2086 if (GET_CODE (SUBREG_REG (x)) == MEM)
2087 return loc;
2088 #endif
2089 return find_split_point (&SUBREG_REG (x), insn);
2090
2091 case MEM:
2092 #ifdef HAVE_lo_sum
2093 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2094 using LO_SUM and HIGH. */
2095 if (GET_CODE (XEXP (x, 0)) == CONST
2096 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2097 {
2098 SUBST (XEXP (x, 0),
2099 gen_rtx_combine (LO_SUM, Pmode,
2100 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2101 XEXP (x, 0)));
2102 return &XEXP (XEXP (x, 0), 0);
2103 }
2104 #endif
2105
2106 /* If we have a PLUS whose second operand is a constant and the
2107 address is not valid, perhaps will can split it up using
2108 the machine-specific way to split large constants. We use
2109 the first psuedo-reg (one of the virtual regs) as a placeholder;
2110 it will not remain in the result. */
2111 if (GET_CODE (XEXP (x, 0)) == PLUS
2112 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2113 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2114 {
2115 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2116 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2117 subst_insn);
2118
2119 /* This should have produced two insns, each of which sets our
2120 placeholder. If the source of the second is a valid address,
2121 we can make put both sources together and make a split point
2122 in the middle. */
2123
2124 if (seq && XVECLEN (seq, 0) == 2
2125 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2126 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2127 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2128 && ! reg_mentioned_p (reg,
2129 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2130 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2131 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2132 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2133 && memory_address_p (GET_MODE (x),
2134 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2135 {
2136 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2137 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2138
2139 /* Replace the placeholder in SRC2 with SRC1. If we can
2140 find where in SRC2 it was placed, that can become our
2141 split point and we can replace this address with SRC2.
2142 Just try two obvious places. */
2143
2144 src2 = replace_rtx (src2, reg, src1);
2145 split = 0;
2146 if (XEXP (src2, 0) == src1)
2147 split = &XEXP (src2, 0);
2148 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2149 && XEXP (XEXP (src2, 0), 0) == src1)
2150 split = &XEXP (XEXP (src2, 0), 0);
2151
2152 if (split)
2153 {
2154 SUBST (XEXP (x, 0), src2);
2155 return split;
2156 }
2157 }
2158 }
2159 break;
2160
2161 case SET:
2162 #ifdef HAVE_cc0
2163 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2164 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2165 we need to put the operand into a register. So split at that
2166 point. */
2167
2168 if (SET_DEST (x) == cc0_rtx
2169 && GET_CODE (SET_SRC (x)) != COMPARE
2170 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2171 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2172 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2173 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2174 return &SET_SRC (x);
2175 #endif
2176
2177 /* See if we can split SET_SRC as it stands. */
2178 split = find_split_point (&SET_SRC (x), insn);
2179 if (split && split != &SET_SRC (x))
2180 return split;
2181
2182 /* See if this is a bitfield assignment with everything constant. If
2183 so, this is an IOR of an AND, so split it into that. */
2184 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2185 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2186 <= HOST_BITS_PER_WIDE_INT)
2187 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2188 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2189 && GET_CODE (SET_SRC (x)) == CONST_INT
2190 && ((INTVAL (XEXP (SET_DEST (x), 1))
2191 + INTVAL (XEXP (SET_DEST (x), 2)))
2192 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2193 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2194 {
2195 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2196 int len = INTVAL (XEXP (SET_DEST (x), 1));
2197 int src = INTVAL (SET_SRC (x));
2198 rtx dest = XEXP (SET_DEST (x), 0);
2199 enum machine_mode mode = GET_MODE (dest);
2200 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
2201
2202 #if BITS_BIG_ENDIAN
2203 pos = GET_MODE_BITSIZE (mode) - len - pos;
2204 #endif
2205
2206 if (src == mask)
2207 SUBST (SET_SRC (x),
2208 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
2209 else
2210 SUBST (SET_SRC (x),
2211 gen_binary (IOR, mode,
2212 gen_binary (AND, mode, dest,
2213 GEN_INT (~ (mask << pos)
2214 & GET_MODE_MASK (mode))),
2215 GEN_INT (src << pos)));
2216
2217 SUBST (SET_DEST (x), dest);
2218
2219 split = find_split_point (&SET_SRC (x), insn);
2220 if (split && split != &SET_SRC (x))
2221 return split;
2222 }
2223
2224 /* Otherwise, see if this is an operation that we can split into two.
2225 If so, try to split that. */
2226 code = GET_CODE (SET_SRC (x));
2227
2228 switch (code)
2229 {
2230 case AND:
2231 /* If we are AND'ing with a large constant that is only a single
2232 bit and the result is only being used in a context where we
2233 need to know if it is zero or non-zero, replace it with a bit
2234 extraction. This will avoid the large constant, which might
2235 have taken more than one insn to make. If the constant were
2236 not a valid argument to the AND but took only one insn to make,
2237 this is no worse, but if it took more than one insn, it will
2238 be better. */
2239
2240 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2241 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2242 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2243 && GET_CODE (SET_DEST (x)) == REG
2244 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2245 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2246 && XEXP (*split, 0) == SET_DEST (x)
2247 && XEXP (*split, 1) == const0_rtx)
2248 {
2249 SUBST (SET_SRC (x),
2250 make_extraction (GET_MODE (SET_DEST (x)),
2251 XEXP (SET_SRC (x), 0),
2252 pos, NULL_RTX, 1, 1, 0, 0));
2253 return find_split_point (loc, insn);
2254 }
2255 break;
2256
2257 case SIGN_EXTEND:
2258 inner = XEXP (SET_SRC (x), 0);
2259 pos = 0;
2260 len = GET_MODE_BITSIZE (GET_MODE (inner));
2261 unsignedp = 0;
2262 break;
2263
2264 case SIGN_EXTRACT:
2265 case ZERO_EXTRACT:
2266 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2267 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2268 {
2269 inner = XEXP (SET_SRC (x), 0);
2270 len = INTVAL (XEXP (SET_SRC (x), 1));
2271 pos = INTVAL (XEXP (SET_SRC (x), 2));
2272
2273 #if BITS_BIG_ENDIAN
2274 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2275 #endif
2276 unsignedp = (code == ZERO_EXTRACT);
2277 }
2278 break;
2279 }
2280
2281 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2282 {
2283 enum machine_mode mode = GET_MODE (SET_SRC (x));
2284
2285 /* For unsigned, we have a choice of a shift followed by an
2286 AND or two shifts. Use two shifts for field sizes where the
2287 constant might be too large. We assume here that we can
2288 always at least get 8-bit constants in an AND insn, which is
2289 true for every current RISC. */
2290
2291 if (unsignedp && len <= 8)
2292 {
2293 SUBST (SET_SRC (x),
2294 gen_rtx_combine
2295 (AND, mode,
2296 gen_rtx_combine (LSHIFTRT, mode,
2297 gen_lowpart_for_combine (mode, inner),
2298 GEN_INT (pos)),
2299 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
2300
2301 split = find_split_point (&SET_SRC (x), insn);
2302 if (split && split != &SET_SRC (x))
2303 return split;
2304 }
2305 else
2306 {
2307 SUBST (SET_SRC (x),
2308 gen_rtx_combine
2309 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
2310 gen_rtx_combine (ASHIFT, mode,
2311 gen_lowpart_for_combine (mode, inner),
2312 GEN_INT (GET_MODE_BITSIZE (mode)
2313 - len - pos)),
2314 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
2315
2316 split = find_split_point (&SET_SRC (x), insn);
2317 if (split && split != &SET_SRC (x))
2318 return split;
2319 }
2320 }
2321
2322 /* See if this is a simple operation with a constant as the second
2323 operand. It might be that this constant is out of range and hence
2324 could be used as a split point. */
2325 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2326 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2327 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2328 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2329 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2330 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2331 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2332 == 'o'))))
2333 return &XEXP (SET_SRC (x), 1);
2334
2335 /* Finally, see if this is a simple operation with its first operand
2336 not in a register. The operation might require this operand in a
2337 register, so return it as a split point. We can always do this
2338 because if the first operand were another operation, we would have
2339 already found it as a split point. */
2340 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2341 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2342 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2343 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2344 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2345 return &XEXP (SET_SRC (x), 0);
2346
2347 return 0;
2348
2349 case AND:
2350 case IOR:
2351 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2352 it is better to write this as (not (ior A B)) so we can split it.
2353 Similarly for IOR. */
2354 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2355 {
2356 SUBST (*loc,
2357 gen_rtx_combine (NOT, GET_MODE (x),
2358 gen_rtx_combine (code == IOR ? AND : IOR,
2359 GET_MODE (x),
2360 XEXP (XEXP (x, 0), 0),
2361 XEXP (XEXP (x, 1), 0))));
2362 return find_split_point (loc, insn);
2363 }
2364
2365 /* Many RISC machines have a large set of logical insns. If the
2366 second operand is a NOT, put it first so we will try to split the
2367 other operand first. */
2368 if (GET_CODE (XEXP (x, 1)) == NOT)
2369 {
2370 rtx tem = XEXP (x, 0);
2371 SUBST (XEXP (x, 0), XEXP (x, 1));
2372 SUBST (XEXP (x, 1), tem);
2373 }
2374 break;
2375 }
2376
2377 /* Otherwise, select our actions depending on our rtx class. */
2378 switch (GET_RTX_CLASS (code))
2379 {
2380 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2381 case '3':
2382 split = find_split_point (&XEXP (x, 2), insn);
2383 if (split)
2384 return split;
2385 /* ... fall through ... */
2386 case '2':
2387 case 'c':
2388 case '<':
2389 split = find_split_point (&XEXP (x, 1), insn);
2390 if (split)
2391 return split;
2392 /* ... fall through ... */
2393 case '1':
2394 /* Some machines have (and (shift ...) ...) insns. If X is not
2395 an AND, but XEXP (X, 0) is, use it as our split point. */
2396 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2397 return &XEXP (x, 0);
2398
2399 split = find_split_point (&XEXP (x, 0), insn);
2400 if (split)
2401 return split;
2402 return loc;
2403 }
2404
2405 /* Otherwise, we don't have a split point. */
2406 return 0;
2407 }
2408 \f
2409 /* Throughout X, replace FROM with TO, and return the result.
2410 The result is TO if X is FROM;
2411 otherwise the result is X, but its contents may have been modified.
2412 If they were modified, a record was made in undobuf so that
2413 undo_all will (among other things) return X to its original state.
2414
2415 If the number of changes necessary is too much to record to undo,
2416 the excess changes are not made, so the result is invalid.
2417 The changes already made can still be undone.
2418 undobuf.num_undo is incremented for such changes, so by testing that
2419 the caller can tell whether the result is valid.
2420
2421 `n_occurrences' is incremented each time FROM is replaced.
2422
2423 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2424
2425 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
2426 by copying if `n_occurrences' is non-zero. */
2427
2428 static rtx
2429 subst (x, from, to, in_dest, unique_copy)
2430 register rtx x, from, to;
2431 int in_dest;
2432 int unique_copy;
2433 {
2434 register char *fmt;
2435 register int len, i;
2436 register enum rtx_code code = GET_CODE (x), orig_code = code;
2437 rtx temp;
2438 enum machine_mode mode = GET_MODE (x);
2439 enum machine_mode op0_mode = VOIDmode;
2440 rtx other_insn;
2441 rtx *cc_use;
2442 int n_restarts = 0;
2443
2444 /* FAKE_EXTEND_SAFE_P (MODE, FROM) is 1 if (subreg:MODE FROM 0) is a safe
2445 replacement for (zero_extend:MODE FROM) or (sign_extend:MODE FROM).
2446 If it is 0, that cannot be done. We can now do this for any MEM
2447 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be reloaded.
2448 If not for that, MEM's would very rarely be safe. */
2449
2450 /* Reject MODEs bigger than a word, because we might not be able
2451 to reference a two-register group starting with an arbitrary register
2452 (and currently gen_lowpart might crash for a SUBREG). */
2453
2454 #define FAKE_EXTEND_SAFE_P(MODE, FROM) \
2455 (GET_MODE_SIZE (MODE) <= UNITS_PER_WORD)
2456
2457 /* Two expressions are equal if they are identical copies of a shared
2458 RTX or if they are both registers with the same register number
2459 and mode. */
2460
2461 #define COMBINE_RTX_EQUAL_P(X,Y) \
2462 ((X) == (Y) \
2463 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2464 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2465
2466 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2467 {
2468 n_occurrences++;
2469 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2470 }
2471
2472 /* If X and FROM are the same register but different modes, they will
2473 not have been seen as equal above. However, flow.c will make a
2474 LOG_LINKS entry for that case. If we do nothing, we will try to
2475 rerecognize our original insn and, when it succeeds, we will
2476 delete the feeding insn, which is incorrect.
2477
2478 So force this insn not to match in this (rare) case. */
2479 if (! in_dest && code == REG && GET_CODE (from) == REG
2480 && REGNO (x) == REGNO (from))
2481 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2482
2483 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2484 of which may contain things that can be combined. */
2485 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2486 return x;
2487
2488 /* It is possible to have a subexpression appear twice in the insn.
2489 Suppose that FROM is a register that appears within TO.
2490 Then, after that subexpression has been scanned once by `subst',
2491 the second time it is scanned, TO may be found. If we were
2492 to scan TO here, we would find FROM within it and create a
2493 self-referent rtl structure which is completely wrong. */
2494 if (COMBINE_RTX_EQUAL_P (x, to))
2495 return to;
2496
2497 len = GET_RTX_LENGTH (code);
2498 fmt = GET_RTX_FORMAT (code);
2499
2500 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2501 set up to skip this common case. All other cases where we want to
2502 suppress replacing something inside a SET_SRC are handled via the
2503 IN_DEST operand. */
2504 if (code == SET
2505 && (GET_CODE (SET_DEST (x)) == REG
2506 || GET_CODE (SET_DEST (x)) == CC0
2507 || GET_CODE (SET_DEST (x)) == PC))
2508 fmt = "ie";
2509
2510 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2511 if (fmt[0] == 'e')
2512 op0_mode = GET_MODE (XEXP (x, 0));
2513
2514 for (i = 0; i < len; i++)
2515 {
2516 if (fmt[i] == 'E')
2517 {
2518 register int j;
2519 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2520 {
2521 register rtx new;
2522 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2523 {
2524 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2525 n_occurrences++;
2526 }
2527 else
2528 {
2529 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2530
2531 /* If this substitution failed, this whole thing fails. */
2532 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2533 return new;
2534 }
2535
2536 SUBST (XVECEXP (x, i, j), new);
2537 }
2538 }
2539 else if (fmt[i] == 'e')
2540 {
2541 register rtx new;
2542
2543 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2544 {
2545 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2546 n_occurrences++;
2547 }
2548 else
2549 /* If we are in a SET_DEST, suppress most cases unless we
2550 have gone inside a MEM, in which case we want to
2551 simplify the address. We assume here that things that
2552 are actually part of the destination have their inner
2553 parts in the first expression. This is true for SUBREG,
2554 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2555 things aside from REG and MEM that should appear in a
2556 SET_DEST. */
2557 new = subst (XEXP (x, i), from, to,
2558 (((in_dest
2559 && (code == SUBREG || code == STRICT_LOW_PART
2560 || code == ZERO_EXTRACT))
2561 || code == SET)
2562 && i == 0), unique_copy);
2563
2564 /* If we found that we will have to reject this combination,
2565 indicate that by returning the CLOBBER ourselves, rather than
2566 an expression containing it. This will speed things up as
2567 well as prevent accidents where two CLOBBERs are considered
2568 to be equal, thus producing an incorrect simplification. */
2569
2570 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2571 return new;
2572
2573 SUBST (XEXP (x, i), new);
2574 }
2575 }
2576
2577 /* We come back to here if we have replaced the expression with one of
2578 a different code and it is likely that further simplification will be
2579 possible. */
2580
2581 restart:
2582
2583 code = GET_CODE (x);
2584
2585 /* If this is a commutative operation, put a constant last and a complex
2586 expression first. We don't need to do this for comparisons here. */
2587 if (GET_RTX_CLASS (code) == 'c'
2588 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2589 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2590 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2591 || (GET_CODE (XEXP (x, 0)) == SUBREG
2592 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2593 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2594 {
2595 temp = XEXP (x, 0);
2596 SUBST (XEXP (x, 0), XEXP (x, 1));
2597 SUBST (XEXP (x, 1), temp);
2598 }
2599
2600 /* If this is a simple operation applied to an IF_THEN_ELSE, try
2601 applying it to the arms of the IF_THEN_ELSE. This often simplifies
2602 things. Don't deal with operations that change modes here. */
2603
2604 if ((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
2605 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE)
2606 {
2607 SUBST (XEXP (XEXP (x, 0), 1),
2608 subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 1),
2609 XEXP (x, 1)),
2610 pc_rtx, pc_rtx, 0));
2611 SUBST (XEXP (XEXP (x, 0), 2),
2612 subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 2),
2613 XEXP (x, 1)),
2614 pc_rtx, pc_rtx, 0));
2615
2616 x = XEXP (x, 0);
2617 goto restart;
2618 }
2619
2620 else if (GET_RTX_CLASS (code) == '1'
2621 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE
2622 && GET_MODE (XEXP (x, 0)) == mode)
2623 {
2624 SUBST (XEXP (XEXP (x, 0), 1),
2625 subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 1)),
2626 pc_rtx, pc_rtx, 0));
2627 SUBST (XEXP (XEXP (x, 0), 2),
2628 subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 2)),
2629 pc_rtx, pc_rtx, 0));
2630
2631 x = XEXP (x, 0);
2632 goto restart;
2633 }
2634
2635 /* Try to fold this expression in case we have constants that weren't
2636 present before. */
2637 temp = 0;
2638 switch (GET_RTX_CLASS (code))
2639 {
2640 case '1':
2641 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
2642 break;
2643 case '<':
2644 temp = simplify_relational_operation (code, op0_mode,
2645 XEXP (x, 0), XEXP (x, 1));
2646 #ifdef FLOAT_STORE_FLAG_VALUE
2647 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2648 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2649 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
2650 #endif
2651 break;
2652 case 'c':
2653 case '2':
2654 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
2655 break;
2656 case 'b':
2657 case '3':
2658 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
2659 XEXP (x, 1), XEXP (x, 2));
2660 break;
2661 }
2662
2663 if (temp)
2664 x = temp, code = GET_CODE (temp);
2665
2666 /* If we have restarted more than 4 times, we are probably looping, so
2667 give up. */
2668 if (++n_restarts > 4)
2669 return x;
2670
2671 /* First see if we can apply the inverse distributive law. */
2672 if (code == PLUS || code == MINUS || code == IOR || code == XOR)
2673 {
2674 x = apply_distributive_law (x);
2675 code = GET_CODE (x);
2676 }
2677
2678 /* If CODE is an associative operation not otherwise handled, see if we
2679 can associate some operands. This can win if they are constants or
2680 if they are logically related (i.e. (a & b) & a. */
2681 if ((code == PLUS || code == MINUS
2682 || code == MULT || code == AND || code == IOR || code == XOR
2683 || code == DIV || code == UDIV
2684 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
2685 && GET_MODE_CLASS (mode) == MODE_INT)
2686 {
2687 if (GET_CODE (XEXP (x, 0)) == code)
2688 {
2689 rtx other = XEXP (XEXP (x, 0), 0);
2690 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
2691 rtx inner_op1 = XEXP (x, 1);
2692 rtx inner;
2693
2694 /* Make sure we pass the constant operand if any as the second
2695 one if this is a commutative operation. */
2696 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
2697 {
2698 rtx tem = inner_op0;
2699 inner_op0 = inner_op1;
2700 inner_op1 = tem;
2701 }
2702 inner = simplify_binary_operation (code == MINUS ? PLUS
2703 : code == DIV ? MULT
2704 : code == UDIV ? MULT
2705 : code,
2706 mode, inner_op0, inner_op1);
2707
2708 /* For commutative operations, try the other pair if that one
2709 didn't simplify. */
2710 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
2711 {
2712 other = XEXP (XEXP (x, 0), 1);
2713 inner = simplify_binary_operation (code, mode,
2714 XEXP (XEXP (x, 0), 0),
2715 XEXP (x, 1));
2716 }
2717
2718 if (inner)
2719 {
2720 x = gen_binary (code, mode, other, inner);
2721 goto restart;
2722
2723 }
2724 }
2725 }
2726
2727 /* A little bit of algebraic simplification here. */
2728 switch (code)
2729 {
2730 case MEM:
2731 /* Ensure that our address has any ASHIFTs converted to MULT in case
2732 address-recognizing predicates are called later. */
2733 temp = make_compound_operation (XEXP (x, 0), MEM);
2734 SUBST (XEXP (x, 0), temp);
2735 break;
2736
2737 case SUBREG:
2738 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
2739 is paradoxical. If we can't do that safely, then it becomes
2740 something nonsensical so that this combination won't take place. */
2741
2742 if (GET_CODE (SUBREG_REG (x)) == MEM
2743 && (GET_MODE_SIZE (mode)
2744 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2745 {
2746 rtx inner = SUBREG_REG (x);
2747 int endian_offset = 0;
2748 /* Don't change the mode of the MEM
2749 if that would change the meaning of the address. */
2750 if (MEM_VOLATILE_P (SUBREG_REG (x))
2751 || mode_dependent_address_p (XEXP (inner, 0)))
2752 return gen_rtx (CLOBBER, mode, const0_rtx);
2753
2754 #if BYTES_BIG_ENDIAN
2755 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2756 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
2757 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
2758 endian_offset -= UNITS_PER_WORD - GET_MODE_SIZE (GET_MODE (inner));
2759 #endif
2760 /* Note if the plus_constant doesn't make a valid address
2761 then this combination won't be accepted. */
2762 x = gen_rtx (MEM, mode,
2763 plus_constant (XEXP (inner, 0),
2764 (SUBREG_WORD (x) * UNITS_PER_WORD
2765 + endian_offset)));
2766 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
2767 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
2768 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
2769 return x;
2770 }
2771
2772 /* If we are in a SET_DEST, these other cases can't apply. */
2773 if (in_dest)
2774 return x;
2775
2776 /* Changing mode twice with SUBREG => just change it once,
2777 or not at all if changing back to starting mode. */
2778 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
2779 {
2780 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
2781 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
2782 return SUBREG_REG (SUBREG_REG (x));
2783
2784 SUBST_INT (SUBREG_WORD (x),
2785 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
2786 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
2787 }
2788
2789 /* SUBREG of a hard register => just change the register number
2790 and/or mode. If the hard register is not valid in that mode,
2791 suppress this combination. */
2792
2793 if (GET_CODE (SUBREG_REG (x)) == REG
2794 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2795 {
2796 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
2797 mode))
2798 return gen_rtx (REG, mode,
2799 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2800 else
2801 return gen_rtx (CLOBBER, mode, const0_rtx);
2802 }
2803
2804 /* For a constant, try to pick up the part we want. Handle a full
2805 word and low-order part. Only do this if we are narrowing
2806 the constant; if it is being widened, we have no idea what
2807 the extra bits will have been set to. */
2808
2809 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
2810 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
2811 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
2812 && GET_MODE_CLASS (mode) == MODE_INT)
2813 {
2814 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
2815 0, op0_mode);
2816 if (temp)
2817 return temp;
2818 }
2819
2820 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
2821 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode))
2822 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
2823
2824 /* If we are narrowing the object, we need to see if we can simplify
2825 the expression for the object knowing that we only need the
2826 low-order bits. */
2827
2828 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2829 && subreg_lowpart_p (x))
2830 return force_to_mode (SUBREG_REG (x), mode, GET_MODE_BITSIZE (mode),
2831 NULL_RTX);
2832 break;
2833
2834 case NOT:
2835 /* (not (plus X -1)) can become (neg X). */
2836 if (GET_CODE (XEXP (x, 0)) == PLUS
2837 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
2838 {
2839 x = gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
2840 goto restart;
2841 }
2842
2843 /* Similarly, (not (neg X)) is (plus X -1). */
2844 if (GET_CODE (XEXP (x, 0)) == NEG)
2845 {
2846 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
2847 goto restart;
2848 }
2849
2850 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
2851 if (GET_CODE (XEXP (x, 0)) == XOR
2852 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2853 && (temp = simplify_unary_operation (NOT, mode,
2854 XEXP (XEXP (x, 0), 1),
2855 mode)) != 0)
2856 {
2857 SUBST (XEXP (XEXP (x, 0), 1), temp);
2858 return XEXP (x, 0);
2859 }
2860
2861 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
2862 other than 1, but that is not valid. We could do a similar
2863 simplification for (not (lshiftrt C X)) where C is just the sign bit,
2864 but this doesn't seem common enough to bother with. */
2865 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2866 && XEXP (XEXP (x, 0), 0) == const1_rtx)
2867 {
2868 x = gen_rtx (ROTATE, mode, gen_unary (NOT, mode, const1_rtx),
2869 XEXP (XEXP (x, 0), 1));
2870 goto restart;
2871 }
2872
2873 if (GET_CODE (XEXP (x, 0)) == SUBREG
2874 && subreg_lowpart_p (XEXP (x, 0))
2875 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
2876 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
2877 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
2878 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
2879 {
2880 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
2881
2882 x = gen_rtx (ROTATE, inner_mode,
2883 gen_unary (NOT, inner_mode, const1_rtx),
2884 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
2885 x = gen_lowpart_for_combine (mode, x);
2886 goto restart;
2887 }
2888
2889 #if STORE_FLAG_VALUE == -1
2890 /* (not (comparison foo bar)) can be done by reversing the comparison
2891 code if valid. */
2892 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
2893 && reversible_comparison_p (XEXP (x, 0)))
2894 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
2895 mode, XEXP (XEXP (x, 0), 0),
2896 XEXP (XEXP (x, 0), 1));
2897 #endif
2898
2899 /* Apply De Morgan's laws to reduce number of patterns for machines
2900 with negating logical insns (and-not, nand, etc.). If result has
2901 only one NOT, put it first, since that is how the patterns are
2902 coded. */
2903
2904 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
2905 {
2906 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
2907
2908 if (GET_CODE (in1) == NOT)
2909 in1 = XEXP (in1, 0);
2910 else
2911 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
2912
2913 if (GET_CODE (in2) == NOT)
2914 in2 = XEXP (in2, 0);
2915 else if (GET_CODE (in2) == CONST_INT
2916 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2917 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
2918 else
2919 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
2920
2921 if (GET_CODE (in2) == NOT)
2922 {
2923 rtx tem = in2;
2924 in2 = in1; in1 = tem;
2925 }
2926
2927 x = gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
2928 mode, in1, in2);
2929 goto restart;
2930 }
2931 break;
2932
2933 case NEG:
2934 /* (neg (plus X 1)) can become (not X). */
2935 if (GET_CODE (XEXP (x, 0)) == PLUS
2936 && XEXP (XEXP (x, 0), 1) == const1_rtx)
2937 {
2938 x = gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
2939 goto restart;
2940 }
2941
2942 /* Similarly, (neg (not X)) is (plus X 1). */
2943 if (GET_CODE (XEXP (x, 0)) == NOT)
2944 {
2945 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), const1_rtx);
2946 goto restart;
2947 }
2948
2949 /* (neg (minus X Y)) can become (minus Y X). */
2950 if (GET_CODE (XEXP (x, 0)) == MINUS
2951 && (GET_MODE_CLASS (mode) != MODE_FLOAT
2952 /* x-y != -(y-x) with IEEE floating point. */
2953 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT))
2954 {
2955 x = gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
2956 XEXP (XEXP (x, 0), 0));
2957 goto restart;
2958 }
2959
2960 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
2961 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
2962 && significant_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
2963 {
2964 x = gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
2965 goto restart;
2966 }
2967
2968 /* NEG commutes with ASHIFT since it is multiplication. Only do this
2969 if we can then eliminate the NEG (e.g.,
2970 if the operand is a constant). */
2971
2972 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
2973 {
2974 temp = simplify_unary_operation (NEG, mode,
2975 XEXP (XEXP (x, 0), 0), mode);
2976 if (temp)
2977 {
2978 SUBST (XEXP (XEXP (x, 0), 0), temp);
2979 return XEXP (x, 0);
2980 }
2981 }
2982
2983 temp = expand_compound_operation (XEXP (x, 0));
2984
2985 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
2986 replaced by (lshiftrt X C). This will convert
2987 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
2988
2989 if (GET_CODE (temp) == ASHIFTRT
2990 && GET_CODE (XEXP (temp, 1)) == CONST_INT
2991 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
2992 {
2993 x = simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
2994 INTVAL (XEXP (temp, 1)));
2995 goto restart;
2996 }
2997
2998 /* If X has only a single bit significant, say, bit I, convert
2999 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3000 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3001 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3002 or a SUBREG of one since we'd be making the expression more
3003 complex if it was just a register. */
3004
3005 if (GET_CODE (temp) != REG
3006 && ! (GET_CODE (temp) == SUBREG
3007 && GET_CODE (SUBREG_REG (temp)) == REG)
3008 && (i = exact_log2 (significant_bits (temp, mode))) >= 0)
3009 {
3010 rtx temp1 = simplify_shift_const
3011 (NULL_RTX, ASHIFTRT, mode,
3012 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
3013 GET_MODE_BITSIZE (mode) - 1 - i),
3014 GET_MODE_BITSIZE (mode) - 1 - i);
3015
3016 /* If all we did was surround TEMP with the two shifts, we
3017 haven't improved anything, so don't use it. Otherwise,
3018 we are better off with TEMP1. */
3019 if (GET_CODE (temp1) != ASHIFTRT
3020 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3021 || XEXP (XEXP (temp1, 0), 0) != temp)
3022 {
3023 x = temp1;
3024 goto restart;
3025 }
3026 }
3027 break;
3028
3029 case FLOAT_TRUNCATE:
3030 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3031 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3032 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3033 return XEXP (XEXP (x, 0), 0);
3034 break;
3035
3036 #ifdef HAVE_cc0
3037 case COMPARE:
3038 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3039 using cc0, in which case we want to leave it as a COMPARE
3040 so we can distinguish it from a register-register-copy. */
3041 if (XEXP (x, 1) == const0_rtx)
3042 return XEXP (x, 0);
3043
3044 /* In IEEE floating point, x-0 is not the same as x. */
3045 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3046 || GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT)
3047 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3048 return XEXP (x, 0);
3049 break;
3050 #endif
3051
3052 case CONST:
3053 /* (const (const X)) can become (const X). Do it this way rather than
3054 returning the inner CONST since CONST can be shared with a
3055 REG_EQUAL note. */
3056 if (GET_CODE (XEXP (x, 0)) == CONST)
3057 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3058 break;
3059
3060 #ifdef HAVE_lo_sum
3061 case LO_SUM:
3062 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3063 can add in an offset. find_split_point will split this address up
3064 again if it doesn't match. */
3065 if (GET_CODE (XEXP (x, 0)) == HIGH
3066 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3067 return XEXP (x, 1);
3068 break;
3069 #endif
3070
3071 case PLUS:
3072 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3073 outermost. That's because that's the way indexed addresses are
3074 supposed to appear. This code used to check many more cases, but
3075 they are now checked elsewhere. */
3076 if (GET_CODE (XEXP (x, 0)) == PLUS
3077 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3078 return gen_binary (PLUS, mode,
3079 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3080 XEXP (x, 1)),
3081 XEXP (XEXP (x, 0), 1));
3082
3083 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3084 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3085 bit-field and can be replaced by either a sign_extend or a
3086 sign_extract. The `and' may be a zero_extend. */
3087 if (GET_CODE (XEXP (x, 0)) == XOR
3088 && GET_CODE (XEXP (x, 1)) == CONST_INT
3089 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3090 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3091 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
3092 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3093 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3094 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3095 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
3096 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
3097 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3098 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3099 == i + 1))))
3100 {
3101 x = simplify_shift_const
3102 (NULL_RTX, ASHIFTRT, mode,
3103 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3104 XEXP (XEXP (XEXP (x, 0), 0), 0),
3105 GET_MODE_BITSIZE (mode) - (i + 1)),
3106 GET_MODE_BITSIZE (mode) - (i + 1));
3107 goto restart;
3108 }
3109
3110 /* If only the low-order bit of X is significant, (plus x -1)
3111 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3112 the bitsize of the mode - 1. This allows simplification of
3113 "a = (b & 8) == 0;" */
3114 if (XEXP (x, 1) == constm1_rtx
3115 && GET_CODE (XEXP (x, 0)) != REG
3116 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3117 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
3118 && significant_bits (XEXP (x, 0), mode) == 1)
3119 {
3120 x = simplify_shift_const
3121 (NULL_RTX, ASHIFTRT, mode,
3122 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3123 gen_rtx_combine (XOR, mode,
3124 XEXP (x, 0), const1_rtx),
3125 GET_MODE_BITSIZE (mode) - 1),
3126 GET_MODE_BITSIZE (mode) - 1);
3127 goto restart;
3128 }
3129
3130 /* If we are adding two things that have no bits in common, convert
3131 the addition into an IOR. This will often be further simplified,
3132 for example in cases like ((a & 1) + (a & 2)), which can
3133 become a & 3. */
3134
3135 if ((significant_bits (XEXP (x, 0), mode)
3136 & significant_bits (XEXP (x, 1), mode)) == 0)
3137 {
3138 x = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3139 goto restart;
3140 }
3141 break;
3142
3143 case MINUS:
3144 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3145 (and <foo> (const_int pow2-1)) */
3146 if (GET_CODE (XEXP (x, 1)) == AND
3147 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3148 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3149 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3150 {
3151 x = simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3152 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
3153 goto restart;
3154 }
3155 break;
3156
3157 case MULT:
3158 /* If we have (mult (plus A B) C), apply the distributive law and then
3159 the inverse distributive law to see if things simplify. This
3160 occurs mostly in addresses, often when unrolling loops. */
3161
3162 if (GET_CODE (XEXP (x, 0)) == PLUS)
3163 {
3164 x = apply_distributive_law
3165 (gen_binary (PLUS, mode,
3166 gen_binary (MULT, mode,
3167 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3168 gen_binary (MULT, mode,
3169 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3170
3171 if (GET_CODE (x) != MULT)
3172 goto restart;
3173 }
3174
3175 /* If this is multiplication by a power of two and its first operand is
3176 a shift, treat the multiply as a shift to allow the shifts to
3177 possibly combine. */
3178 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3179 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3180 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3181 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3182 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3183 || GET_CODE (XEXP (x, 0)) == ROTATE
3184 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3185 {
3186 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), i);
3187 goto restart;
3188 }
3189
3190 /* Convert (mult (ashift (const_int 1) A) B) to (ashift B A). */
3191 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3192 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3193 return gen_rtx_combine (ASHIFT, mode, XEXP (x, 1),
3194 XEXP (XEXP (x, 0), 1));
3195 break;
3196
3197 case UDIV:
3198 /* If this is a divide by a power of two, treat it as a shift if
3199 its first operand is a shift. */
3200 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3201 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3202 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3203 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3204 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3205 || GET_CODE (XEXP (x, 0)) == ROTATE
3206 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3207 {
3208 x = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
3209 goto restart;
3210 }
3211 break;
3212
3213 case EQ: case NE:
3214 case GT: case GTU: case GE: case GEU:
3215 case LT: case LTU: case LE: case LEU:
3216 /* If the first operand is a condition code, we can't do anything
3217 with it. */
3218 if (GET_CODE (XEXP (x, 0)) == COMPARE
3219 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3220 #ifdef HAVE_cc0
3221 && XEXP (x, 0) != cc0_rtx
3222 #endif
3223 ))
3224 {
3225 rtx op0 = XEXP (x, 0);
3226 rtx op1 = XEXP (x, 1);
3227 enum rtx_code new_code;
3228
3229 if (GET_CODE (op0) == COMPARE)
3230 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3231
3232 /* Simplify our comparison, if possible. */
3233 new_code = simplify_comparison (code, &op0, &op1);
3234
3235 #if STORE_FLAG_VALUE == 1
3236 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
3237 if only the low-order bit is significant in X (such as when
3238 X is a ZERO_EXTRACT of one bit. Similarly, we can convert
3239 EQ to (xor X 1). */
3240 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3241 && op1 == const0_rtx
3242 && significant_bits (op0, GET_MODE (op0)) == 1)
3243 return gen_lowpart_for_combine (mode, op0);
3244 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3245 && op1 == const0_rtx
3246 && significant_bits (op0, GET_MODE (op0)) == 1)
3247 return gen_rtx_combine (XOR, mode,
3248 gen_lowpart_for_combine (mode, op0),
3249 const1_rtx);
3250 #endif
3251
3252 #if STORE_FLAG_VALUE == -1
3253 /* If STORE_FLAG_VALUE is -1, we can convert (ne x 0)
3254 to (neg x) if only the low-order bit of X is significant.
3255 This converts (ne (zero_extract X 1 Y) 0) to
3256 (sign_extract X 1 Y). */
3257 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3258 && op1 == const0_rtx
3259 && significant_bits (op0, GET_MODE (op0)) == 1)
3260 {
3261 x = gen_rtx_combine (NEG, mode,
3262 gen_lowpart_for_combine (mode, op0));
3263 goto restart;
3264 }
3265 #endif
3266
3267 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
3268 one significant bit, we can convert (ne x 0) to (ashift x c)
3269 where C puts the bit in the sign bit. Remove any AND with
3270 STORE_FLAG_VALUE when we are done, since we are only going to
3271 test the sign bit. */
3272 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3273 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3274 && (STORE_FLAG_VALUE
3275 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
3276 && op1 == const0_rtx
3277 && mode == GET_MODE (op0)
3278 && (i = exact_log2 (significant_bits (op0, GET_MODE (op0)))) >= 0)
3279 {
3280 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, op0,
3281 GET_MODE_BITSIZE (mode) - 1 - i);
3282 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3283 return XEXP (x, 0);
3284 else
3285 return x;
3286 }
3287
3288 /* If the code changed, return a whole new comparison. */
3289 if (new_code != code)
3290 return gen_rtx_combine (new_code, mode, op0, op1);
3291
3292 /* Otherwise, keep this operation, but maybe change its operands.
3293 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3294 SUBST (XEXP (x, 0), op0);
3295 SUBST (XEXP (x, 1), op1);
3296 }
3297 break;
3298
3299 case IF_THEN_ELSE:
3300 /* If we are testing a register for equality see if that register is
3301 used in one of the arms. If so, and we know something about its
3302 value in that arm, try to simplify it. */
3303
3304 if ((GET_CODE (XEXP (x, 0)) == EQ || GET_CODE (XEXP (x, 0)) == NE)
3305 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
3306 {
3307 /* Get the value being compared and the value it has on the equal
3308 branch. */
3309 HOST_WIDE_INT sig;
3310 rtx from = XEXP (XEXP (x, 0), 0);
3311 rtx val_if_eq = XEXP (XEXP (x, 0), 1);
3312 rtx val_if_ne = from;
3313 int is_eq = (GET_CODE (XEXP (x, 0)) == EQ);
3314
3315 /* If we are comparing against zero and the expressiond being tested
3316 has only a single significant bit, that is it's value when it is
3317 not equal to zero. Simplilarly if it is known to be -1 or 0. */
3318
3319 if (val_if_eq == const0_rtx
3320 && exact_log2 (sig = significant_bits (from,
3321 GET_MODE (from))) >= 0)
3322 val_if_ne = GEN_INT (sig);
3323 else if (val_if_eq == const0_rtx
3324 && (num_sign_bit_copies (from, GET_MODE (from))
3325 == GET_MODE_BITSIZE (GET_MODE (from))))
3326 val_if_ne = constm1_rtx;
3327
3328 /* Now simplify an arm if we know the value of the register
3329 in the branch and it is used in the arm. Be carefull due to
3330 the potential of locally-shared RTL. */
3331
3332 if ((is_eq || val_if_ne != from)
3333 && reg_mentioned_p (from, XEXP (x, 1)))
3334 SUBST (XEXP (x, 1), subst (copy_rtx (XEXP (x, 1)), from,
3335 is_eq ? val_if_eq : val_if_ne, 0));
3336
3337 if ((! is_eq || val_if_ne != from)
3338 && reg_mentioned_p (from, XEXP (x, 2)))
3339 SUBST (XEXP (x, 2), subst (XEXP (x, 2), from,
3340 is_eq ? val_if_ne : val_if_eq, 0));
3341 }
3342
3343 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3344 reversed, do so to avoid needing two sets of patterns for
3345 subtract-and-branch insns. Similarly if we have a constant in that
3346 position. */
3347 if ((XEXP (x, 1) == pc_rtx || GET_CODE (XEXP (x, 1)) == CONST_INT)
3348 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3349 && reversible_comparison_p (XEXP (x, 0)))
3350 {
3351 SUBST (XEXP (x, 0),
3352 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3353 GET_MODE (XEXP (x, 0)),
3354 XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 0), 1)));
3355
3356 temp = XEXP (x, 1);
3357 SUBST (XEXP (x, 1), XEXP (x, 2));
3358 SUBST (XEXP (x, 2), temp);
3359 }
3360 break;
3361
3362 case ZERO_EXTRACT:
3363 case SIGN_EXTRACT:
3364 case ZERO_EXTEND:
3365 case SIGN_EXTEND:
3366 /* If we are processing SET_DEST, we are done. */
3367 if (in_dest)
3368 return x;
3369
3370 x = expand_compound_operation (x);
3371 if (GET_CODE (x) != code)
3372 goto restart;
3373 break;
3374
3375 case SET:
3376 /* (set (pc) (return)) gets written as (return). */
3377 if (GET_CODE (SET_DEST (x)) == PC && GET_CODE (SET_SRC (x)) == RETURN)
3378 return SET_SRC (x);
3379
3380 /* Convert this into a field assignment operation, if possible. */
3381 x = make_field_assignment (x);
3382
3383 /* If we are setting CC0 or if the source is a COMPARE, look for the
3384 use of the comparison result and try to simplify it unless we already
3385 have used undobuf.other_insn. */
3386 if ((GET_CODE (SET_SRC (x)) == COMPARE
3387 #ifdef HAVE_cc0
3388 || SET_DEST (x) == cc0_rtx
3389 #endif
3390 )
3391 && (cc_use = find_single_use (SET_DEST (x), subst_insn,
3392 &other_insn)) != 0
3393 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
3394 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
3395 && XEXP (*cc_use, 0) == SET_DEST (x))
3396 {
3397 enum rtx_code old_code = GET_CODE (*cc_use);
3398 enum rtx_code new_code;
3399 rtx op0, op1;
3400 int other_changed = 0;
3401 enum machine_mode compare_mode = GET_MODE (SET_DEST (x));
3402
3403 if (GET_CODE (SET_SRC (x)) == COMPARE)
3404 op0 = XEXP (SET_SRC (x), 0), op1 = XEXP (SET_SRC (x), 1);
3405 else
3406 op0 = SET_SRC (x), op1 = const0_rtx;
3407
3408 /* Simplify our comparison, if possible. */
3409 new_code = simplify_comparison (old_code, &op0, &op1);
3410
3411 #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
3412 /* If this machine has CC modes other than CCmode, check to see
3413 if we need to use a different CC mode here. */
3414 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
3415
3416 /* If the mode changed, we have to change SET_DEST, the mode
3417 in the compare, and the mode in the place SET_DEST is used.
3418 If SET_DEST is a hard register, just build new versions with
3419 the proper mode. If it is a pseudo, we lose unless it is only
3420 time we set the pseudo, in which case we can safely change
3421 its mode. */
3422 if (compare_mode != GET_MODE (SET_DEST (x)))
3423 {
3424 int regno = REGNO (SET_DEST (x));
3425 rtx new_dest = gen_rtx (REG, compare_mode, regno);
3426
3427 if (regno < FIRST_PSEUDO_REGISTER
3428 || (reg_n_sets[regno] == 1
3429 && ! REG_USERVAR_P (SET_DEST (x))))
3430 {
3431 if (regno >= FIRST_PSEUDO_REGISTER)
3432 SUBST (regno_reg_rtx[regno], new_dest);
3433
3434 SUBST (SET_DEST (x), new_dest);
3435 SUBST (XEXP (*cc_use, 0), new_dest);
3436 other_changed = 1;
3437 }
3438 }
3439 #endif
3440
3441 /* If the code changed, we have to build a new comparison
3442 in undobuf.other_insn. */
3443 if (new_code != old_code)
3444 {
3445 unsigned mask;
3446
3447 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
3448 SET_DEST (x), const0_rtx));
3449
3450 /* If the only change we made was to change an EQ into an
3451 NE or vice versa, OP0 has only one significant bit,
3452 and OP1 is zero, check if changing the user of the condition
3453 code will produce a valid insn. If it won't, we can keep
3454 the original code in that insn by surrounding our operation
3455 with an XOR. */
3456
3457 if (((old_code == NE && new_code == EQ)
3458 || (old_code == EQ && new_code == NE))
3459 && ! other_changed && op1 == const0_rtx
3460 && (GET_MODE_BITSIZE (GET_MODE (op0))
3461 <= HOST_BITS_PER_WIDE_INT)
3462 && (exact_log2 (mask = significant_bits (op0,
3463 GET_MODE (op0)))
3464 >= 0))
3465 {
3466 rtx pat = PATTERN (other_insn), note = 0;
3467
3468 if ((recog_for_combine (&pat, undobuf.other_insn, &note) < 0
3469 && ! check_asm_operands (pat)))
3470 {
3471 PUT_CODE (*cc_use, old_code);
3472 other_insn = 0;
3473
3474 op0 = gen_binary (XOR, GET_MODE (op0), op0,
3475 GEN_INT (mask));
3476 }
3477 }
3478
3479 other_changed = 1;
3480 }
3481
3482 if (other_changed)
3483 undobuf.other_insn = other_insn;
3484
3485 #ifdef HAVE_cc0
3486 /* If we are now comparing against zero, change our source if
3487 needed. If we do not use cc0, we always have a COMPARE. */
3488 if (op1 == const0_rtx && SET_DEST (x) == cc0_rtx)
3489 SUBST (SET_SRC (x), op0);
3490 else
3491 #endif
3492
3493 /* Otherwise, if we didn't previously have a COMPARE in the
3494 correct mode, we need one. */
3495 if (GET_CODE (SET_SRC (x)) != COMPARE
3496 || GET_MODE (SET_SRC (x)) != compare_mode)
3497 SUBST (SET_SRC (x), gen_rtx_combine (COMPARE, compare_mode,
3498 op0, op1));
3499 else
3500 {
3501 /* Otherwise, update the COMPARE if needed. */
3502 SUBST (XEXP (SET_SRC (x), 0), op0);
3503 SUBST (XEXP (SET_SRC (x), 1), op1);
3504 }
3505 }
3506 else
3507 {
3508 /* Get SET_SRC in a form where we have placed back any
3509 compound expressions. Then do the checks below. */
3510 temp = make_compound_operation (SET_SRC (x), SET);
3511 SUBST (SET_SRC (x), temp);
3512 }
3513
3514 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some
3515 operation, and X being a REG or (subreg (reg)), we may be able to
3516 convert this to (set (subreg:m2 x) (op)).
3517
3518 We can always do this if M1 is narrower than M2 because that
3519 means that we only care about the low bits of the result.
3520
3521 However, on most machines (those with BYTE_LOADS_ZERO_EXTEND
3522 not defined), we cannot perform a narrower operation that
3523 requested since the high-order bits will be undefined. On
3524 machine where BYTE_LOADS_ZERO_EXTEND are defined, however, this
3525 transformation is safe as long as M1 and M2 have the same number
3526 of words. */
3527
3528 if (GET_CODE (SET_SRC (x)) == SUBREG
3529 && subreg_lowpart_p (SET_SRC (x))
3530 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) != 'o'
3531 && (((GET_MODE_SIZE (GET_MODE (SET_SRC (x))) + (UNITS_PER_WORD - 1))
3532 / UNITS_PER_WORD)
3533 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x))))
3534 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
3535 #ifndef BYTE_LOADS_ZERO_EXTEND
3536 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3537 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3538 #endif
3539 && (GET_CODE (SET_DEST (x)) == REG
3540 || (GET_CODE (SET_DEST (x)) == SUBREG
3541 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)))
3542 {
3543 SUBST (SET_DEST (x),
3544 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_SRC (x))),
3545 SET_DEST (x)));
3546 SUBST (SET_SRC (x), SUBREG_REG (SET_SRC (x)));
3547 }
3548
3549 #ifdef BYTE_LOADS_ZERO_EXTEND
3550 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with
3551 M wider than N, this would require a paradoxical subreg.
3552 Replace the subreg with a zero_extend to avoid the reload that
3553 would otherwise be required. */
3554 if (GET_CODE (SET_SRC (x)) == SUBREG
3555 && subreg_lowpart_p (SET_SRC (x))
3556 && SUBREG_WORD (SET_SRC (x)) == 0
3557 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3558 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3559 && GET_CODE (SUBREG_REG (SET_SRC (x))) == MEM)
3560 SUBST (SET_SRC (x), gen_rtx_combine (ZERO_EXTEND,
3561 GET_MODE (SET_SRC (x)),
3562 XEXP (SET_SRC (x), 0)));
3563 #endif
3564
3565 break;
3566
3567 case AND:
3568 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3569 {
3570 x = simplify_and_const_int (x, mode, XEXP (x, 0),
3571 INTVAL (XEXP (x, 1)));
3572
3573 /* If we have (ior (and (X C1) C2)) and the next restart would be
3574 the last, simplify this by making C1 as small as possible
3575 and then exit. */
3576 if (n_restarts >= 3 && GET_CODE (x) == IOR
3577 && GET_CODE (XEXP (x, 0)) == AND
3578 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3579 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3580 {
3581 temp = gen_binary (AND, mode, XEXP (XEXP (x, 0), 0),
3582 GEN_INT (INTVAL (XEXP (XEXP (x, 0), 1))
3583 & ~ INTVAL (XEXP (x, 1))));
3584 return gen_binary (IOR, mode, temp, XEXP (x, 1));
3585 }
3586
3587 if (GET_CODE (x) != AND)
3588 goto restart;
3589 }
3590
3591 /* Convert (A | B) & A to A. */
3592 if (GET_CODE (XEXP (x, 0)) == IOR
3593 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3594 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
3595 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
3596 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
3597 return XEXP (x, 1);
3598
3599 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
3600 insn (and may simplify more). */
3601 else if (GET_CODE (XEXP (x, 0)) == XOR
3602 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3603 && ! side_effects_p (XEXP (x, 1)))
3604 {
3605 x = gen_binary (AND, mode,
3606 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
3607 XEXP (x, 1));
3608 goto restart;
3609 }
3610 else if (GET_CODE (XEXP (x, 0)) == XOR
3611 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
3612 && ! side_effects_p (XEXP (x, 1)))
3613 {
3614 x = gen_binary (AND, mode,
3615 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
3616 XEXP (x, 1));
3617 goto restart;
3618 }
3619
3620 /* Similarly for (~ (A ^ B)) & A. */
3621 else if (GET_CODE (XEXP (x, 0)) == NOT
3622 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
3623 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 0), XEXP (x, 1))
3624 && ! side_effects_p (XEXP (x, 1)))
3625 {
3626 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 1),
3627 XEXP (x, 1));
3628 goto restart;
3629 }
3630 else if (GET_CODE (XEXP (x, 0)) == NOT
3631 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
3632 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 1), XEXP (x, 1))
3633 && ! side_effects_p (XEXP (x, 1)))
3634 {
3635 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 0),
3636 XEXP (x, 1));
3637 goto restart;
3638 }
3639
3640 #ifdef HAVE_conditional_move
3641
3642 /* If we have (and A B) with A not an object but that is known to
3643 be -1 or 0, this is equivalent to the expression
3644 (if_then_else (ne A (const_int 0)) B (const_int 0))
3645 We make this conversion because it may allow further
3646 simplifications and then allow use of conditional move insns. */
3647
3648 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3649 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3650 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o')
3651 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3652 == GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
3653 {
3654 rtx op0 = XEXP (x, 0);
3655 rtx op1 = const0_rtx;
3656 enum rtx_code comp_code
3657 = simplify_comparison (NE, &op0, &op1);
3658
3659 x = gen_rtx_combine (IF_THEN_ELSE, mode,
3660 gen_binary (comp_code, VOIDmode, op0, op1),
3661 XEXP (x, 1), const0_rtx);
3662 goto restart;
3663 }
3664 #endif
3665
3666 /* In the following group of tests (and those in case IOR below),
3667 we start with some combination of logical operations and apply
3668 the distributive law followed by the inverse distributive law.
3669 Most of the time, this results in no change. However, if some of
3670 the operands are the same or inverses of each other, simplifications
3671 will result.
3672
3673 For example, (and (ior A B) (not B)) can occur as the result of
3674 expanding a bit field assignment. When we apply the distributive
3675 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
3676 which then simplifies to (and (A (not B))). */
3677
3678 /* If we have (and (ior A B) C), apply the distributive law and then
3679 the inverse distributive law to see if things simplify. */
3680
3681 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == XOR)
3682 {
3683 x = apply_distributive_law
3684 (gen_binary (GET_CODE (XEXP (x, 0)), mode,
3685 gen_binary (AND, mode,
3686 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3687 gen_binary (AND, mode,
3688 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3689 if (GET_CODE (x) != AND)
3690 goto restart;
3691 }
3692
3693 if (GET_CODE (XEXP (x, 1)) == IOR || GET_CODE (XEXP (x, 1)) == XOR)
3694 {
3695 x = apply_distributive_law
3696 (gen_binary (GET_CODE (XEXP (x, 1)), mode,
3697 gen_binary (AND, mode,
3698 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
3699 gen_binary (AND, mode,
3700 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
3701 if (GET_CODE (x) != AND)
3702 goto restart;
3703 }
3704
3705 /* Similarly, taking advantage of the fact that
3706 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
3707
3708 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == XOR)
3709 {
3710 x = apply_distributive_law
3711 (gen_binary (XOR, mode,
3712 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
3713 XEXP (XEXP (x, 1), 0)),
3714 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
3715 XEXP (XEXP (x, 1), 1))));
3716 if (GET_CODE (x) != AND)
3717 goto restart;
3718 }
3719
3720 else if (GET_CODE (XEXP (x, 1)) == NOT && GET_CODE (XEXP (x, 0)) == XOR)
3721 {
3722 x = apply_distributive_law
3723 (gen_binary (XOR, mode,
3724 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
3725 XEXP (XEXP (x, 0), 0)),
3726 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
3727 XEXP (XEXP (x, 0), 1))));
3728 if (GET_CODE (x) != AND)
3729 goto restart;
3730 }
3731 break;
3732
3733 case IOR:
3734 /* (ior A C) is C if all significant bits of A are on in C. */
3735 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3736 && (significant_bits (XEXP (x, 0), mode)
3737 & ~ INTVAL (XEXP (x, 1))) == 0)
3738 return XEXP (x, 1);
3739
3740 /* Convert (A & B) | A to A. */
3741 if (GET_CODE (XEXP (x, 0)) == AND
3742 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3743 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
3744 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
3745 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
3746 return XEXP (x, 1);
3747
3748 /* If we have (ior (and A B) C), apply the distributive law and then
3749 the inverse distributive law to see if things simplify. */
3750
3751 if (GET_CODE (XEXP (x, 0)) == AND)
3752 {
3753 x = apply_distributive_law
3754 (gen_binary (AND, mode,
3755 gen_binary (IOR, mode,
3756 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3757 gen_binary (IOR, mode,
3758 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3759
3760 if (GET_CODE (x) != IOR)
3761 goto restart;
3762 }
3763
3764 if (GET_CODE (XEXP (x, 1)) == AND)
3765 {
3766 x = apply_distributive_law
3767 (gen_binary (AND, mode,
3768 gen_binary (IOR, mode,
3769 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
3770 gen_binary (IOR, mode,
3771 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
3772
3773 if (GET_CODE (x) != IOR)
3774 goto restart;
3775 }
3776
3777 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
3778 mode size to (rotate A CX). */
3779
3780 if (((GET_CODE (XEXP (x, 0)) == ASHIFT
3781 && GET_CODE (XEXP (x, 1)) == LSHIFTRT)
3782 || (GET_CODE (XEXP (x, 1)) == ASHIFT
3783 && GET_CODE (XEXP (x, 0)) == LSHIFTRT))
3784 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 1), 0))
3785 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3786 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3787 && (INTVAL (XEXP (XEXP (x, 0), 1)) + INTVAL (XEXP (XEXP (x, 1), 1))
3788 == GET_MODE_BITSIZE (mode)))
3789 {
3790 rtx shift_count;
3791
3792 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3793 shift_count = XEXP (XEXP (x, 0), 1);
3794 else
3795 shift_count = XEXP (XEXP (x, 1), 1);
3796 x = gen_rtx (ROTATE, mode, XEXP (XEXP (x, 0), 0), shift_count);
3797 goto restart;
3798 }
3799 break;
3800
3801 case XOR:
3802 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
3803 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
3804 (NOT y). */
3805 {
3806 int num_negated = 0;
3807 rtx in1 = XEXP (x, 0), in2 = XEXP (x, 1);
3808
3809 if (GET_CODE (in1) == NOT)
3810 num_negated++, in1 = XEXP (in1, 0);
3811 if (GET_CODE (in2) == NOT)
3812 num_negated++, in2 = XEXP (in2, 0);
3813
3814 if (num_negated == 2)
3815 {
3816 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3817 SUBST (XEXP (x, 1), XEXP (XEXP (x, 1), 0));
3818 }
3819 else if (num_negated == 1)
3820 {
3821 x = gen_unary (NOT, mode,
3822 gen_binary (XOR, mode, in1, in2));
3823 goto restart;
3824 }
3825 }
3826
3827 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
3828 correspond to a machine insn or result in further simplifications
3829 if B is a constant. */
3830
3831 if (GET_CODE (XEXP (x, 0)) == AND
3832 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
3833 && ! side_effects_p (XEXP (x, 1)))
3834 {
3835 x = gen_binary (AND, mode,
3836 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
3837 XEXP (x, 1));
3838 goto restart;
3839 }
3840 else if (GET_CODE (XEXP (x, 0)) == AND
3841 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3842 && ! side_effects_p (XEXP (x, 1)))
3843 {
3844 x = gen_binary (AND, mode,
3845 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
3846 XEXP (x, 1));
3847 goto restart;
3848 }
3849
3850
3851 #if STORE_FLAG_VALUE == 1
3852 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
3853 comparison. */
3854 if (XEXP (x, 1) == const1_rtx
3855 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3856 && reversible_comparison_p (XEXP (x, 0)))
3857 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3858 mode, XEXP (XEXP (x, 0), 0),
3859 XEXP (XEXP (x, 0), 1));
3860 #endif
3861
3862 /* (xor (comparison foo bar) (const_int sign-bit))
3863 when STORE_FLAG_VALUE is the sign bit. */
3864 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3865 && (STORE_FLAG_VALUE
3866 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
3867 && XEXP (x, 1) == const_true_rtx
3868 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3869 && reversible_comparison_p (XEXP (x, 0)))
3870 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3871 mode, XEXP (XEXP (x, 0), 0),
3872 XEXP (XEXP (x, 0), 1));
3873 break;
3874
3875 case ABS:
3876 /* (abs (neg <foo>)) -> (abs <foo>) */
3877 if (GET_CODE (XEXP (x, 0)) == NEG)
3878 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3879
3880 /* If operand is something known to be positive, ignore the ABS. */
3881 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
3882 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
3883 <= HOST_BITS_PER_WIDE_INT)
3884 && ((significant_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3885 & ((HOST_WIDE_INT) 1
3886 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
3887 == 0)))
3888 return XEXP (x, 0);
3889
3890
3891 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
3892 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
3893 {
3894 x = gen_rtx_combine (NEG, mode, XEXP (x, 0));
3895 goto restart;
3896 }
3897 break;
3898
3899 case FFS:
3900 /* (ffs (*_extend <X>)) = (ffs <X>) */
3901 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3902 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3903 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3904 break;
3905
3906 case FLOAT:
3907 /* (float (sign_extend <X>)) = (float <X>). */
3908 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
3909 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3910 break;
3911
3912 case LSHIFT:
3913 case ASHIFT:
3914 case LSHIFTRT:
3915 case ASHIFTRT:
3916 case ROTATE:
3917 case ROTATERT:
3918 /* If this is a shift by a constant amount, simplify it. */
3919 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3920 {
3921 x = simplify_shift_const (x, code, mode, XEXP (x, 0),
3922 INTVAL (XEXP (x, 1)));
3923 if (GET_CODE (x) != code)
3924 goto restart;
3925 }
3926
3927 #ifdef SHIFT_COUNT_TRUNCATED
3928 else if (GET_CODE (XEXP (x, 1)) != REG)
3929 SUBST (XEXP (x, 1),
3930 force_to_mode (XEXP (x, 1), GET_MODE (x),
3931 exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))),
3932 NULL_RTX));
3933 #endif
3934
3935 break;
3936 }
3937
3938 return x;
3939 }
3940 \f
3941 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
3942 operations" because they can be replaced with two more basic operations.
3943 ZERO_EXTEND is also considered "compound" because it can be replaced with
3944 an AND operation, which is simpler, though only one operation.
3945
3946 The function expand_compound_operation is called with an rtx expression
3947 and will convert it to the appropriate shifts and AND operations,
3948 simplifying at each stage.
3949
3950 The function make_compound_operation is called to convert an expression
3951 consisting of shifts and ANDs into the equivalent compound expression.
3952 It is the inverse of this function, loosely speaking. */
3953
3954 static rtx
3955 expand_compound_operation (x)
3956 rtx x;
3957 {
3958 int pos = 0, len;
3959 int unsignedp = 0;
3960 int modewidth;
3961 rtx tem;
3962
3963 switch (GET_CODE (x))
3964 {
3965 case ZERO_EXTEND:
3966 unsignedp = 1;
3967 case SIGN_EXTEND:
3968 /* We can't necessarily use a const_int for a multiword mode;
3969 it depends on implicitly extending the value.
3970 Since we don't know the right way to extend it,
3971 we can't tell whether the implicit way is right.
3972
3973 Even for a mode that is no wider than a const_int,
3974 we can't win, because we need to sign extend one of its bits through
3975 the rest of it, and we don't know which bit. */
3976 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3977 return x;
3978
3979 if (! FAKE_EXTEND_SAFE_P (GET_MODE (XEXP (x, 0)), XEXP (x, 0)))
3980 return x;
3981
3982 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
3983 /* If the inner object has VOIDmode (the only way this can happen
3984 is if it is a ASM_OPERANDS), we can't do anything since we don't
3985 know how much masking to do. */
3986 if (len == 0)
3987 return x;
3988
3989 break;
3990
3991 case ZERO_EXTRACT:
3992 unsignedp = 1;
3993 case SIGN_EXTRACT:
3994 /* If the operand is a CLOBBER, just return it. */
3995 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
3996 return XEXP (x, 0);
3997
3998 if (GET_CODE (XEXP (x, 1)) != CONST_INT
3999 || GET_CODE (XEXP (x, 2)) != CONST_INT
4000 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4001 return x;
4002
4003 len = INTVAL (XEXP (x, 1));
4004 pos = INTVAL (XEXP (x, 2));
4005
4006 /* If this goes outside the object being extracted, replace the object
4007 with a (use (mem ...)) construct that only combine understands
4008 and is used only for this purpose. */
4009 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4010 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4011
4012 #if BITS_BIG_ENDIAN
4013 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4014 #endif
4015 break;
4016
4017 default:
4018 return x;
4019 }
4020
4021 /* If we reach here, we want to return a pair of shifts. The inner
4022 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4023 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4024 logical depending on the value of UNSIGNEDP.
4025
4026 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4027 converted into an AND of a shift.
4028
4029 We must check for the case where the left shift would have a negative
4030 count. This can happen in a case like (x >> 31) & 255 on machines
4031 that can't shift by a constant. On those machines, we would first
4032 combine the shift with the AND to produce a variable-position
4033 extraction. Then the constant of 31 would be substituted in to produce
4034 a such a position. */
4035
4036 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4037 if (modewidth >= pos - len)
4038 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
4039 GET_MODE (x),
4040 simplify_shift_const (NULL_RTX, ASHIFT,
4041 GET_MODE (x),
4042 XEXP (x, 0),
4043 modewidth - pos - len),
4044 modewidth - len);
4045
4046 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4047 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4048 simplify_shift_const (NULL_RTX, LSHIFTRT,
4049 GET_MODE (x),
4050 XEXP (x, 0), pos),
4051 ((HOST_WIDE_INT) 1 << len) - 1);
4052 else
4053 /* Any other cases we can't handle. */
4054 return x;
4055
4056
4057 /* If we couldn't do this for some reason, return the original
4058 expression. */
4059 if (GET_CODE (tem) == CLOBBER)
4060 return x;
4061
4062 return tem;
4063 }
4064 \f
4065 /* X is a SET which contains an assignment of one object into
4066 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4067 or certain SUBREGS). If possible, convert it into a series of
4068 logical operations.
4069
4070 We half-heartedly support variable positions, but do not at all
4071 support variable lengths. */
4072
4073 static rtx
4074 expand_field_assignment (x)
4075 rtx x;
4076 {
4077 rtx inner;
4078 rtx pos; /* Always counts from low bit. */
4079 int len;
4080 rtx mask;
4081 enum machine_mode compute_mode;
4082
4083 /* Loop until we find something we can't simplify. */
4084 while (1)
4085 {
4086 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4087 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4088 {
4089 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4090 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4091 pos = const0_rtx;
4092 }
4093 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4094 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4095 {
4096 inner = XEXP (SET_DEST (x), 0);
4097 len = INTVAL (XEXP (SET_DEST (x), 1));
4098 pos = XEXP (SET_DEST (x), 2);
4099
4100 /* If the position is constant and spans the width of INNER,
4101 surround INNER with a USE to indicate this. */
4102 if (GET_CODE (pos) == CONST_INT
4103 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4104 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4105
4106 #if BITS_BIG_ENDIAN
4107 if (GET_CODE (pos) == CONST_INT)
4108 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4109 - INTVAL (pos));
4110 else if (GET_CODE (pos) == MINUS
4111 && GET_CODE (XEXP (pos, 1)) == CONST_INT
4112 && (INTVAL (XEXP (pos, 1))
4113 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4114 /* If position is ADJUST - X, new position is X. */
4115 pos = XEXP (pos, 0);
4116 else
4117 pos = gen_binary (MINUS, GET_MODE (pos),
4118 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4119 - len),
4120 pos);
4121 #endif
4122 }
4123
4124 /* A SUBREG between two modes that occupy the same numbers of words
4125 can be done by moving the SUBREG to the source. */
4126 else if (GET_CODE (SET_DEST (x)) == SUBREG
4127 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4128 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4129 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4130 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4131 {
4132 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4133 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4134 SET_SRC (x)));
4135 continue;
4136 }
4137 else
4138 break;
4139
4140 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4141 inner = SUBREG_REG (inner);
4142
4143 compute_mode = GET_MODE (inner);
4144
4145 /* Compute a mask of LEN bits, if we can do this on the host machine. */
4146 if (len < HOST_BITS_PER_WIDE_INT)
4147 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
4148 else
4149 break;
4150
4151 /* Now compute the equivalent expression. Make a copy of INNER
4152 for the SET_DEST in case it is a MEM into which we will substitute;
4153 we don't want shared RTL in that case. */
4154 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4155 gen_binary (IOR, compute_mode,
4156 gen_binary (AND, compute_mode,
4157 gen_unary (NOT, compute_mode,
4158 gen_binary (ASHIFT,
4159 compute_mode,
4160 mask, pos)),
4161 inner),
4162 gen_binary (ASHIFT, compute_mode,
4163 gen_binary (AND, compute_mode,
4164 gen_lowpart_for_combine
4165 (compute_mode,
4166 SET_SRC (x)),
4167 mask),
4168 pos)));
4169 }
4170
4171 return x;
4172 }
4173 \f
4174 /* Return an RTX for a reference to LEN bits of INNER. POS is the starting
4175 bit position (counted from the LSB) if >= 0; otherwise POS_RTX represents
4176 the starting bit position.
4177
4178 INNER may be a USE. This will occur when we started with a bitfield
4179 that went outside the boundary of the object in memory, which is
4180 allowed on most machines. To isolate this case, we produce a USE
4181 whose mode is wide enough and surround the MEM with it. The only
4182 code that understands the USE is this routine. If it is not removed,
4183 it will cause the resulting insn not to match.
4184
4185 UNSIGNEDP is non-zero for an unsigned reference and zero for a
4186 signed reference.
4187
4188 IN_DEST is non-zero if this is a reference in the destination of a
4189 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
4190 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
4191 be used.
4192
4193 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
4194 ZERO_EXTRACT should be built even for bits starting at bit 0.
4195
4196 MODE is the desired mode of the result (if IN_DEST == 0). */
4197
4198 static rtx
4199 make_extraction (mode, inner, pos, pos_rtx, len,
4200 unsignedp, in_dest, in_compare)
4201 enum machine_mode mode;
4202 rtx inner;
4203 int pos;
4204 rtx pos_rtx;
4205 int len;
4206 int unsignedp;
4207 int in_dest, in_compare;
4208 {
4209 enum machine_mode is_mode = GET_MODE (inner);
4210 enum machine_mode inner_mode;
4211 enum machine_mode wanted_mem_mode = byte_mode;
4212 enum machine_mode pos_mode = word_mode;
4213 enum machine_mode extraction_mode = word_mode;
4214 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
4215 int spans_byte = 0;
4216 rtx new = 0;
4217
4218 /* Get some information about INNER and get the innermost object. */
4219 if (GET_CODE (inner) == USE)
4220 /* We don't need to adjust the position because we set up the USE
4221 to pretend that it was a full-word object. */
4222 spans_byte = 1, inner = XEXP (inner, 0);
4223 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4224 inner = SUBREG_REG (inner);
4225
4226 inner_mode = GET_MODE (inner);
4227
4228 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
4229 pos = INTVAL (pos_rtx);
4230
4231 /* See if this can be done without an extraction. We never can if the
4232 width of the field is not the same as that of some integer mode. For
4233 registers, we can only avoid the extraction if the position is at the
4234 low-order bit and this is either not in the destination or we have the
4235 appropriate STRICT_LOW_PART operation available.
4236
4237 For MEM, we can avoid an extract if the field starts on an appropriate
4238 boundary and we can change the mode of the memory reference. However,
4239 we cannot directly access the MEM if we have a USE and the underlying
4240 MEM is not TMODE. This combination means that MEM was being used in a
4241 context where bits outside its mode were being referenced; that is only
4242 valid in bit-field insns. */
4243
4244 if (tmode != BLKmode
4245 && ! (spans_byte && inner_mode != tmode)
4246 && ((pos == 0 && GET_CODE (inner) != MEM
4247 && (! in_dest
4248 || (GET_CODE (inner) == REG
4249 && (movstrict_optab->handlers[(int) tmode].insn_code
4250 != CODE_FOR_nothing))))
4251 || (GET_CODE (inner) == MEM && pos >= 0
4252 && (pos
4253 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
4254 : BITS_PER_UNIT)) == 0
4255 /* We can't do this if we are widening INNER_MODE (it
4256 may not be aligned, for one thing). */
4257 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
4258 && (inner_mode == tmode
4259 || (! mode_dependent_address_p (XEXP (inner, 0))
4260 && ! MEM_VOLATILE_P (inner))))))
4261 {
4262 int offset = pos / BITS_PER_UNIT;
4263
4264 /* If INNER is a MEM, make a new MEM that encompasses just the desired
4265 field. If the original and current mode are the same, we need not
4266 adjust the offset. Otherwise, we do if bytes big endian.
4267
4268 If INNER is not a MEM, get a piece consisting of the just the field
4269 of interest (in this case POS must be 0). */
4270
4271 if (GET_CODE (inner) == MEM)
4272 {
4273 #if BYTES_BIG_ENDIAN
4274 if (inner_mode != tmode)
4275 offset = (GET_MODE_SIZE (inner_mode)
4276 - GET_MODE_SIZE (tmode) - offset);
4277 #endif
4278
4279 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
4280 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
4281 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
4282 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
4283 }
4284 else if (GET_CODE (inner) == REG)
4285 /* We can't call gen_lowpart_for_combine here since we always want
4286 a SUBREG and it would sometimes return a new hard register. */
4287 new = gen_rtx (SUBREG, tmode, inner,
4288 (WORDS_BIG_ENDIAN
4289 && GET_MODE_SIZE (is_mode) > UNITS_PER_WORD)
4290 ? ((GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (tmode)
4291 / UNITS_PER_WORD))
4292 : 0);
4293 else
4294 new = force_to_mode (inner, tmode, len, NULL_RTX);
4295
4296 /* If this extraction is going into the destination of a SET,
4297 make a STRICT_LOW_PART unless we made a MEM. */
4298
4299 if (in_dest)
4300 return (GET_CODE (new) == MEM ? new
4301 : (GET_CODE (new) != SUBREG
4302 ? gen_rtx (CLOBBER, tmode, const0_rtx)
4303 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
4304
4305 /* Otherwise, sign- or zero-extend unless we already are in the
4306 proper mode. */
4307
4308 return (mode == tmode ? new
4309 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
4310 mode, new));
4311 }
4312
4313 /* Unless this isin a COMPARE or we have a funny memory reference,
4314 don't do anything with field extracts starting at the low-order
4315 bit since they are simple AND operations. */
4316 if (pos == 0 && ! in_dest && ! in_compare && ! spans_byte)
4317 return 0;
4318
4319 /* Get the mode to use should INNER be a MEM, the mode for the position,
4320 and the mode for the result. */
4321 #ifdef HAVE_insv
4322 if (in_dest)
4323 {
4324 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
4325 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
4326 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
4327 }
4328 #endif
4329
4330 #ifdef HAVE_extzv
4331 if (! in_dest && unsignedp)
4332 {
4333 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
4334 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
4335 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
4336 }
4337 #endif
4338
4339 #ifdef HAVE_extv
4340 if (! in_dest && ! unsignedp)
4341 {
4342 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
4343 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
4344 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
4345 }
4346 #endif
4347
4348 /* Never narrow an object, since that might not be safe. */
4349
4350 if (mode != VOIDmode
4351 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
4352 extraction_mode = mode;
4353
4354 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
4355 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4356 pos_mode = GET_MODE (pos_rtx);
4357
4358 /* If this is not from memory or we have to change the mode of memory and
4359 cannot, the desired mode is EXTRACTION_MODE. */
4360 if (GET_CODE (inner) != MEM
4361 || (inner_mode != wanted_mem_mode
4362 && (mode_dependent_address_p (XEXP (inner, 0))
4363 || MEM_VOLATILE_P (inner))))
4364 wanted_mem_mode = extraction_mode;
4365
4366 #if BITS_BIG_ENDIAN
4367 /* If position is constant, compute new position. Otherwise, build
4368 subtraction. */
4369 if (pos >= 0)
4370 pos = (MAX (GET_MODE_BITSIZE (is_mode), GET_MODE_BITSIZE (wanted_mem_mode))
4371 - len - pos);
4372 else
4373 pos_rtx
4374 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
4375 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
4376 GET_MODE_BITSIZE (wanted_mem_mode))
4377 - len),
4378 pos_rtx);
4379 #endif
4380
4381 /* If INNER has a wider mode, make it smaller. If this is a constant
4382 extract, try to adjust the byte to point to the byte containing
4383 the value. */
4384 if (wanted_mem_mode != VOIDmode
4385 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
4386 && ((GET_CODE (inner) == MEM
4387 && (inner_mode == wanted_mem_mode
4388 || (! mode_dependent_address_p (XEXP (inner, 0))
4389 && ! MEM_VOLATILE_P (inner))))))
4390 {
4391 int offset = 0;
4392
4393 /* The computations below will be correct if the machine is big
4394 endian in both bits and bytes or little endian in bits and bytes.
4395 If it is mixed, we must adjust. */
4396
4397 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
4398 if (! spans_byte && is_mode != wanted_mem_mode)
4399 offset = (GET_MODE_SIZE (is_mode)
4400 - GET_MODE_SIZE (wanted_mem_mode) - offset);
4401 #endif
4402
4403 /* If bytes are big endian and we had a paradoxical SUBREG, we must
4404 adjust OFFSET to compensate. */
4405 #if BYTES_BIG_ENDIAN
4406 if (! spans_byte
4407 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
4408 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
4409 #endif
4410
4411 /* If this is a constant position, we can move to the desired byte. */
4412 if (pos >= 0)
4413 {
4414 offset += pos / BITS_PER_UNIT;
4415 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
4416 }
4417
4418 if (offset != 0 || inner_mode != wanted_mem_mode)
4419 {
4420 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
4421 plus_constant (XEXP (inner, 0), offset));
4422 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
4423 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
4424 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
4425 inner = newmem;
4426 }
4427 }
4428
4429 /* If INNER is not memory, we can always get it into the proper mode. */
4430 else if (GET_CODE (inner) != MEM)
4431 inner = force_to_mode (inner, extraction_mode,
4432 (pos < 0 ? GET_MODE_BITSIZE (extraction_mode)
4433 : len + pos),
4434 NULL_RTX);
4435
4436 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
4437 have to zero extend. Otherwise, we can just use a SUBREG. */
4438 if (pos < 0
4439 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
4440 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
4441 else if (pos < 0
4442 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4443 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
4444
4445 /* Make POS_RTX unless we already have it and it is correct. */
4446 if (pos_rtx == 0 || (pos >= 0 && INTVAL (pos_rtx) != pos))
4447 pos_rtx = GEN_INT (pos);
4448
4449 /* Make the required operation. See if we can use existing rtx. */
4450 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
4451 extraction_mode, inner, GEN_INT (len), pos_rtx);
4452 if (! in_dest)
4453 new = gen_lowpart_for_combine (mode, new);
4454
4455 return new;
4456 }
4457 \f
4458 /* Look at the expression rooted at X. Look for expressions
4459 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
4460 Form these expressions.
4461
4462 Return the new rtx, usually just X.
4463
4464 Also, for machines like the Vax that don't have logical shift insns,
4465 try to convert logical to arithmetic shift operations in cases where
4466 they are equivalent. This undoes the canonicalizations to logical
4467 shifts done elsewhere.
4468
4469 We try, as much as possible, to re-use rtl expressions to save memory.
4470
4471 IN_CODE says what kind of expression we are processing. Normally, it is
4472 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
4473 being kludges), it is MEM. When processing the arguments of a comparison
4474 or a COMPARE against zero, it is COMPARE. */
4475
4476 static rtx
4477 make_compound_operation (x, in_code)
4478 rtx x;
4479 enum rtx_code in_code;
4480 {
4481 enum rtx_code code = GET_CODE (x);
4482 enum machine_mode mode = GET_MODE (x);
4483 int mode_width = GET_MODE_BITSIZE (mode);
4484 enum rtx_code next_code;
4485 int i, count;
4486 rtx new = 0;
4487 char *fmt;
4488
4489 /* Select the code to be used in recursive calls. Once we are inside an
4490 address, we stay there. If we have a comparison, set to COMPARE,
4491 but once inside, go back to our default of SET. */
4492
4493 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
4494 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
4495 && XEXP (x, 1) == const0_rtx) ? COMPARE
4496 : in_code == COMPARE ? SET : in_code);
4497
4498 /* Process depending on the code of this operation. If NEW is set
4499 non-zero, it will be returned. */
4500
4501 switch (code)
4502 {
4503 case ASHIFT:
4504 case LSHIFT:
4505 /* Convert shifts by constants into multiplications if inside
4506 an address. */
4507 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
4508 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
4509 && INTVAL (XEXP (x, 1)) >= 0)
4510 new = gen_rtx_combine (MULT, mode, XEXP (x, 0),
4511 GEN_INT ((HOST_WIDE_INT) 1
4512 << INTVAL (XEXP (x, 1))));
4513 break;
4514
4515 case AND:
4516 /* If the second operand is not a constant, we can't do anything
4517 with it. */
4518 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4519 break;
4520
4521 /* If the constant is a power of two minus one and the first operand
4522 is a logical right shift, make an extraction. */
4523 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4524 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4525 new = make_extraction (mode, XEXP (XEXP (x, 0), 0), -1,
4526 XEXP (XEXP (x, 0), 1), i, 1,
4527 0, in_code == COMPARE);
4528
4529 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
4530 else if (GET_CODE (XEXP (x, 0)) == SUBREG
4531 && subreg_lowpart_p (XEXP (x, 0))
4532 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
4533 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4534 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))),
4535 XEXP (SUBREG_REG (XEXP (x, 0)), 0), -1,
4536 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
4537 0, in_code == COMPARE);
4538
4539
4540 /* If we are have (and (rotate X C) M) and C is larger than the number
4541 of bits in M, this is an extraction. */
4542
4543 else if (GET_CODE (XEXP (x, 0)) == ROTATE
4544 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4545 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
4546 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
4547 new = make_extraction (mode, XEXP (XEXP (x, 0), 0),
4548 (GET_MODE_BITSIZE (mode)
4549 - INTVAL (XEXP (XEXP (x, 0), 1))),
4550 NULL_RTX, i, 1, 0, in_code == COMPARE);
4551
4552 /* On machines without logical shifts, if the operand of the AND is
4553 a logical shift and our mask turns off all the propagated sign
4554 bits, we can replace the logical shift with an arithmetic shift. */
4555 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
4556 && (lshr_optab->handlers[(int) mode].insn_code
4557 == CODE_FOR_nothing)
4558 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
4559 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4560 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
4561 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
4562 && mode_width <= HOST_BITS_PER_WIDE_INT)
4563 {
4564 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
4565
4566 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
4567 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
4568 SUBST (XEXP (x, 0),
4569 gen_rtx_combine (ASHIFTRT, mode, XEXP (XEXP (x, 0), 0),
4570 XEXP (XEXP (x, 0), 1)));
4571 }
4572
4573 /* If the constant is one less than a power of two, this might be
4574 representable by an extraction even if no shift is present.
4575 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
4576 we are in a COMPARE. */
4577 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4578 new = make_extraction (mode, XEXP (x, 0), 0, NULL_RTX, i, 1,
4579 0, in_code == COMPARE);
4580
4581 /* If we are in a comparison and this is an AND with a power of two,
4582 convert this into the appropriate bit extract. */
4583 else if (in_code == COMPARE
4584 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4585 new = make_extraction (mode, XEXP (x, 0), i, NULL_RTX, 1, 1, 0, 1);
4586
4587 break;
4588
4589 case LSHIFTRT:
4590 /* If the sign bit is known to be zero, replace this with an
4591 arithmetic shift. */
4592 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
4593 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
4594 && mode_width <= HOST_BITS_PER_WIDE_INT
4595 && (significant_bits (XEXP (x, 0), mode)
4596 & (1 << (mode_width - 1))) == 0)
4597 {
4598 new = gen_rtx_combine (ASHIFTRT, mode, XEXP (x, 0), XEXP (x, 1));
4599 break;
4600 }
4601
4602 /* ... fall through ... */
4603
4604 case ASHIFTRT:
4605 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
4606 this is a SIGN_EXTRACT. */
4607 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4608 && GET_CODE (XEXP (x, 0)) == ASHIFT
4609 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4610 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (x, 0), 1)))
4611 new = make_extraction (mode, XEXP (XEXP (x, 0), 0),
4612 (INTVAL (XEXP (x, 1))
4613 - INTVAL (XEXP (XEXP (x, 0), 1))),
4614 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
4615 code == LSHIFTRT, 0, in_code == COMPARE);
4616
4617 /* Similarly if we have (ashifrt (OP (ashift foo C1) C3) C2). In these
4618 cases, we are better off returning a SIGN_EXTEND of the operation. */
4619
4620 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4621 && (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND
4622 || GET_CODE (XEXP (x, 0)) == XOR
4623 || GET_CODE (XEXP (x, 0)) == PLUS)
4624 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
4625 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4626 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4627 && INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) < HOST_BITS_PER_WIDE_INT
4628 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4629 && (INTVAL (XEXP (XEXP (x, 0), 1))
4630 & (((HOST_WIDE_INT) 1
4631 << INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))) - 1)) == 0)
4632 {
4633 HOST_WIDE_INT newop1
4634 = (INTVAL (XEXP (XEXP (x, 0), 1))
4635 >> INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)));
4636
4637 new = make_extraction (mode,
4638 gen_binary (GET_CODE (XEXP (x, 0)), mode,
4639 XEXP (XEXP (XEXP (x, 0), 0), 0),
4640 GEN_INT (newop1)),
4641 (INTVAL (XEXP (x, 1))
4642 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
4643 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
4644 code == LSHIFTRT, 0, in_code == COMPARE);
4645 }
4646
4647 break;
4648 }
4649
4650 if (new)
4651 {
4652 x = gen_lowpart_for_combine (mode, new);
4653 code = GET_CODE (x);
4654 }
4655
4656 /* Now recursively process each operand of this operation. */
4657 fmt = GET_RTX_FORMAT (code);
4658 for (i = 0; i < GET_RTX_LENGTH (code); i++)
4659 if (fmt[i] == 'e')
4660 {
4661 new = make_compound_operation (XEXP (x, i), next_code);
4662 SUBST (XEXP (x, i), new);
4663 }
4664
4665 return x;
4666 }
4667 \f
4668 /* Given M see if it is a value that would select a field of bits
4669 within an item, but not the entire word. Return -1 if not.
4670 Otherwise, return the starting position of the field, where 0 is the
4671 low-order bit.
4672
4673 *PLEN is set to the length of the field. */
4674
4675 static int
4676 get_pos_from_mask (m, plen)
4677 unsigned HOST_WIDE_INT m;
4678 int *plen;
4679 {
4680 /* Get the bit number of the first 1 bit from the right, -1 if none. */
4681 int pos = exact_log2 (m & - m);
4682
4683 if (pos < 0)
4684 return -1;
4685
4686 /* Now shift off the low-order zero bits and see if we have a power of
4687 two minus 1. */
4688 *plen = exact_log2 ((m >> pos) + 1);
4689
4690 if (*plen <= 0)
4691 return -1;
4692
4693 return pos;
4694 }
4695 \f
4696 /* Rewrite X so that it is an expression in MODE. We only care about the
4697 low-order BITS bits so we can ignore AND operations that just clear
4698 higher-order bits.
4699
4700 Also, if REG is non-zero and X is a register equal in value to REG,
4701 replace X with REG. */
4702
4703 static rtx
4704 force_to_mode (x, mode, bits, reg)
4705 rtx x;
4706 enum machine_mode mode;
4707 int bits;
4708 rtx reg;
4709 {
4710 enum rtx_code code = GET_CODE (x);
4711 enum machine_mode op_mode = mode;
4712
4713 /* If X is narrower than MODE or if BITS is larger than the size of MODE,
4714 just get X in the proper mode. */
4715
4716 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
4717 || bits > GET_MODE_BITSIZE (mode))
4718 return gen_lowpart_for_combine (mode, x);
4719
4720 switch (code)
4721 {
4722 case SIGN_EXTEND:
4723 case ZERO_EXTEND:
4724 case ZERO_EXTRACT:
4725 case SIGN_EXTRACT:
4726 x = expand_compound_operation (x);
4727 if (GET_CODE (x) != code)
4728 return force_to_mode (x, mode, bits, reg);
4729 break;
4730
4731 case REG:
4732 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
4733 || rtx_equal_p (reg, get_last_value (x))))
4734 x = reg;
4735 break;
4736
4737 case CONST_INT:
4738 if (bits < HOST_BITS_PER_WIDE_INT)
4739 x = GEN_INT (INTVAL (x) & (((HOST_WIDE_INT) 1 << bits) - 1));
4740 return x;
4741
4742 case SUBREG:
4743 /* Ignore low-order SUBREGs. */
4744 if (subreg_lowpart_p (x))
4745 return force_to_mode (SUBREG_REG (x), mode, bits, reg);
4746 break;
4747
4748 case AND:
4749 /* If this is an AND with a constant. Otherwise, we fall through to
4750 do the general binary case. */
4751
4752 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4753 {
4754 HOST_WIDE_INT mask = INTVAL (XEXP (x, 1));
4755 int len = exact_log2 (mask + 1);
4756 rtx op = XEXP (x, 0);
4757
4758 /* If this is masking some low-order bits, we may be able to
4759 impose a stricter constraint on what bits of the operand are
4760 required. */
4761
4762 op = force_to_mode (op, mode, len > 0 ? MIN (len, bits) : bits,
4763 reg);
4764
4765 if (bits < HOST_BITS_PER_WIDE_INT)
4766 mask &= ((HOST_WIDE_INT) 1 << bits) - 1;
4767
4768 /* If we have no AND in MODE, use the original mode for the
4769 operation. */
4770
4771 if (and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4772 op_mode = GET_MODE (x);
4773
4774 x = simplify_and_const_int (x, op_mode, op, mask);
4775
4776 /* If X is still an AND, see if it is an AND with a mask that
4777 is just some low-order bits. If so, and it is BITS wide (it
4778 can't be wider), we don't need it. */
4779
4780 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
4781 && bits < HOST_BITS_PER_WIDE_INT
4782 && INTVAL (XEXP (x, 1)) == ((HOST_WIDE_INT) 1 << bits) - 1)
4783 x = XEXP (x, 0);
4784
4785 break;
4786 }
4787
4788 /* ... fall through ... */
4789
4790 case PLUS:
4791 case MINUS:
4792 case MULT:
4793 case IOR:
4794 case XOR:
4795 /* For most binary operations, just propagate into the operation and
4796 change the mode if we have an operation of that mode. */
4797
4798 if ((code == PLUS
4799 && add_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4800 || (code == MINUS
4801 && sub_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4802 || (code == MULT && (smul_optab->handlers[(int) mode].insn_code
4803 == CODE_FOR_nothing))
4804 || (code == AND
4805 && and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4806 || (code == IOR
4807 && ior_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4808 || (code == XOR && (xor_optab->handlers[(int) mode].insn_code
4809 == CODE_FOR_nothing)))
4810 op_mode = GET_MODE (x);
4811
4812 x = gen_binary (code, op_mode,
4813 gen_lowpart_for_combine (op_mode,
4814 force_to_mode (XEXP (x, 0),
4815 mode, bits,
4816 reg)),
4817 gen_lowpart_for_combine (op_mode,
4818 force_to_mode (XEXP (x, 1),
4819 mode, bits,
4820 reg)));
4821 break;
4822
4823 case ASHIFT:
4824 case LSHIFT:
4825 /* For left shifts, do the same, but just for the first operand.
4826 If the shift count is a constant, we need even fewer bits of the
4827 first operand. */
4828
4829 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) < bits)
4830 bits -= INTVAL (XEXP (x, 1));
4831
4832 if ((code == ASHIFT
4833 && ashl_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4834 || (code == LSHIFT && (lshl_optab->handlers[(int) mode].insn_code
4835 == CODE_FOR_nothing)))
4836 op_mode = GET_MODE (x);
4837
4838 x = gen_binary (code, op_mode,
4839 gen_lowpart_for_combine (op_mode,
4840 force_to_mode (XEXP (x, 0),
4841 mode, bits,
4842 reg)),
4843 XEXP (x, 1));
4844 break;
4845
4846 case LSHIFTRT:
4847 /* Here we can only do something if the shift count is a constant and
4848 the count plus BITS is no larger than the width of MODE, we can do
4849 the shift in MODE. */
4850
4851 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4852 && INTVAL (XEXP (x, 1)) + bits <= GET_MODE_BITSIZE (mode))
4853 {
4854 rtx inner = force_to_mode (XEXP (x, 0), mode,
4855 bits + INTVAL (XEXP (x, 1)), reg);
4856
4857 if (lshr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4858 op_mode = GET_MODE (x);
4859
4860 x = gen_binary (LSHIFTRT, op_mode,
4861 gen_lowpart_for_combine (op_mode, inner),
4862 XEXP (x, 1));
4863 }
4864 break;
4865
4866 case ASHIFTRT:
4867 /* If this is a sign-extension operation that just affects bits
4868 we don't care about, remove it. */
4869
4870 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4871 && INTVAL (XEXP (x, 1)) >= 0
4872 && INTVAL (XEXP (x, 1)) <= GET_MODE_BITSIZE (GET_MODE (x)) - bits
4873 && GET_CODE (XEXP (x, 0)) == ASHIFT
4874 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4875 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
4876 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, bits, reg);
4877 break;
4878
4879 case NEG:
4880 case NOT:
4881 if ((code == NEG
4882 && neg_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4883 || (code == NOT && (one_cmpl_optab->handlers[(int) mode].insn_code
4884 == CODE_FOR_nothing)))
4885 op_mode = GET_MODE (x);
4886
4887 /* Handle these similarly to the way we handle most binary operations. */
4888 x = gen_unary (code, op_mode,
4889 gen_lowpart_for_combine (op_mode,
4890 force_to_mode (XEXP (x, 0), mode,
4891 bits, reg)));
4892 break;
4893
4894 case IF_THEN_ELSE:
4895 /* We have no way of knowing if the IF_THEN_ELSE can itself be
4896 written in a narrower mode. We play it safe and do not do so. */
4897
4898 SUBST (XEXP (x, 1),
4899 gen_lowpart_for_combine (GET_MODE (x),
4900 force_to_mode (XEXP (x, 1), mode,
4901 bits, reg)));
4902 SUBST (XEXP (x, 2),
4903 gen_lowpart_for_combine (GET_MODE (x),
4904 force_to_mode (XEXP (x, 2), mode,
4905 bits, reg)));
4906 break;
4907 }
4908
4909 /* Ensure we return a value of the proper mode. */
4910 return gen_lowpart_for_combine (mode, x);
4911 }
4912 \f
4913 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
4914 Return that assignment if so.
4915
4916 We only handle the most common cases. */
4917
4918 static rtx
4919 make_field_assignment (x)
4920 rtx x;
4921 {
4922 rtx dest = SET_DEST (x);
4923 rtx src = SET_SRC (x);
4924 rtx ourdest;
4925 rtx assign;
4926 HOST_WIDE_INT c1;
4927 int pos, len;
4928 rtx other;
4929 enum machine_mode mode;
4930
4931 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
4932 a clear of a one-bit field. We will have changed it to
4933 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
4934 for a SUBREG. */
4935
4936 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
4937 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
4938 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
4939 && (rtx_equal_p (dest, XEXP (src, 1))
4940 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
4941 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
4942 {
4943 assign = make_extraction (VOIDmode, dest, -1, XEXP (XEXP (src, 0), 1),
4944 1, 1, 1, 0);
4945 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
4946 }
4947
4948 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
4949 && subreg_lowpart_p (XEXP (src, 0))
4950 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
4951 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
4952 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
4953 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
4954 && (rtx_equal_p (dest, XEXP (src, 1))
4955 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
4956 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
4957 {
4958 assign = make_extraction (VOIDmode, dest, -1,
4959 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
4960 1, 1, 1, 0);
4961 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
4962 }
4963
4964 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
4965 one-bit field. */
4966 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
4967 && XEXP (XEXP (src, 0), 0) == const1_rtx
4968 && (rtx_equal_p (dest, XEXP (src, 1))
4969 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
4970 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
4971 {
4972 assign = make_extraction (VOIDmode, dest, -1, XEXP (XEXP (src, 0), 1),
4973 1, 1, 1, 0);
4974 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
4975 }
4976
4977 /* The other case we handle is assignments into a constant-position
4978 field. They look like (ior (and DEST C1) OTHER). If C1 represents
4979 a mask that has all one bits except for a group of zero bits and
4980 OTHER is known to have zeros where C1 has ones, this is such an
4981 assignment. Compute the position and length from C1. Shift OTHER
4982 to the appropriate position, force it to the required mode, and
4983 make the extraction. Check for the AND in both operands. */
4984
4985 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
4986 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
4987 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
4988 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
4989 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
4990 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
4991 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
4992 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
4993 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
4994 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
4995 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
4996 dest)))
4997 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
4998 else
4999 return x;
5000
5001 pos = get_pos_from_mask (~c1, &len);
5002 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
5003 || (c1 & significant_bits (other, GET_MODE (other))) != 0)
5004 return x;
5005
5006 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
5007
5008 /* The mode to use for the source is the mode of the assignment, or of
5009 what is inside a possible STRICT_LOW_PART. */
5010 mode = (GET_CODE (assign) == STRICT_LOW_PART
5011 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
5012
5013 /* Shift OTHER right POS places and make it the source, restricting it
5014 to the proper length and mode. */
5015
5016 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
5017 GET_MODE (src), other, pos),
5018 mode, len, dest);
5019
5020 return gen_rtx_combine (SET, VOIDmode, assign, src);
5021 }
5022 \f
5023 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
5024 if so. */
5025
5026 static rtx
5027 apply_distributive_law (x)
5028 rtx x;
5029 {
5030 enum rtx_code code = GET_CODE (x);
5031 rtx lhs, rhs, other;
5032 rtx tem;
5033 enum rtx_code inner_code;
5034
5035 /* The outer operation can only be one of the following: */
5036 if (code != IOR && code != AND && code != XOR
5037 && code != PLUS && code != MINUS)
5038 return x;
5039
5040 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
5041
5042 /* If either operand is a primitive we can't do anything, so get out fast. */
5043 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
5044 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
5045 return x;
5046
5047 lhs = expand_compound_operation (lhs);
5048 rhs = expand_compound_operation (rhs);
5049 inner_code = GET_CODE (lhs);
5050 if (inner_code != GET_CODE (rhs))
5051 return x;
5052
5053 /* See if the inner and outer operations distribute. */
5054 switch (inner_code)
5055 {
5056 case LSHIFTRT:
5057 case ASHIFTRT:
5058 case AND:
5059 case IOR:
5060 /* These all distribute except over PLUS. */
5061 if (code == PLUS || code == MINUS)
5062 return x;
5063 break;
5064
5065 case MULT:
5066 if (code != PLUS && code != MINUS)
5067 return x;
5068 break;
5069
5070 case ASHIFT:
5071 case LSHIFT:
5072 /* These are also multiplies, so they distribute over everything. */
5073 break;
5074
5075 case SUBREG:
5076 /* Non-paradoxical SUBREGs distributes over all operations, provided
5077 the inner modes and word numbers are the same, this is an extraction
5078 of a low-order part, we don't convert an fp operation to int or
5079 vice versa, and we would not be converting a single-word
5080 operation into a multi-word operation. The latter test is not
5081 required, but it prevents generating unneeded multi-word operations.
5082 Some of the previous tests are redundant given the latter test, but
5083 are retained because they are required for correctness.
5084
5085 We produce the result slightly differently in this case. */
5086
5087 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
5088 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
5089 || ! subreg_lowpart_p (lhs)
5090 || (GET_MODE_CLASS (GET_MODE (lhs))
5091 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
5092 || (GET_MODE_SIZE (GET_MODE (lhs))
5093 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
5094 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
5095 return x;
5096
5097 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
5098 SUBREG_REG (lhs), SUBREG_REG (rhs));
5099 return gen_lowpart_for_combine (GET_MODE (x), tem);
5100
5101 default:
5102 return x;
5103 }
5104
5105 /* Set LHS and RHS to the inner operands (A and B in the example
5106 above) and set OTHER to the common operand (C in the example).
5107 These is only one way to do this unless the inner operation is
5108 commutative. */
5109 if (GET_RTX_CLASS (inner_code) == 'c'
5110 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
5111 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
5112 else if (GET_RTX_CLASS (inner_code) == 'c'
5113 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
5114 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
5115 else if (GET_RTX_CLASS (inner_code) == 'c'
5116 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
5117 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
5118 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
5119 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
5120 else
5121 return x;
5122
5123 /* Form the new inner operation, seeing if it simplifies first. */
5124 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
5125
5126 /* There is one exception to the general way of distributing:
5127 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
5128 if (code == XOR && inner_code == IOR)
5129 {
5130 inner_code = AND;
5131 other = gen_unary (NOT, GET_MODE (x), other);
5132 }
5133
5134 /* We may be able to continuing distributing the result, so call
5135 ourselves recursively on the inner operation before forming the
5136 outer operation, which we return. */
5137 return gen_binary (inner_code, GET_MODE (x),
5138 apply_distributive_law (tem), other);
5139 }
5140 \f
5141 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
5142 in MODE.
5143
5144 Return an equivalent form, if different from X. Otherwise, return X. If
5145 X is zero, we are to always construct the equivalent form. */
5146
5147 static rtx
5148 simplify_and_const_int (x, mode, varop, constop)
5149 rtx x;
5150 enum machine_mode mode;
5151 rtx varop;
5152 unsigned HOST_WIDE_INT constop;
5153 {
5154 register enum machine_mode tmode;
5155 register rtx temp;
5156 unsigned HOST_WIDE_INT significant;
5157
5158 /* There is a large class of optimizations based on the principle that
5159 some operations produce results where certain bits are known to be zero,
5160 and hence are not significant to the AND. For example, if we have just
5161 done a left shift of one bit, the low-order bit is known to be zero and
5162 hence an AND with a mask of ~1 would not do anything.
5163
5164 At the end of the following loop, we set:
5165
5166 VAROP to be the item to be AND'ed with;
5167 CONSTOP to the constant value to AND it with. */
5168
5169 while (1)
5170 {
5171 /* If we ever encounter a mode wider than the host machine's widest
5172 integer size, we can't compute the masks accurately, so give up. */
5173 if (GET_MODE_BITSIZE (GET_MODE (varop)) > HOST_BITS_PER_WIDE_INT)
5174 break;
5175
5176 /* Unless one of the cases below does a `continue',
5177 a `break' will be executed to exit the loop. */
5178
5179 switch (GET_CODE (varop))
5180 {
5181 case CLOBBER:
5182 /* If VAROP is a (clobber (const_int)), return it since we know
5183 we are generating something that won't match. */
5184 return varop;
5185
5186 #if ! BITS_BIG_ENDIAN
5187 case USE:
5188 /* VAROP is a (use (mem ..)) that was made from a bit-field
5189 extraction that spanned the boundary of the MEM. If we are
5190 now masking so it is within that boundary, we don't need the
5191 USE any more. */
5192 if ((constop & ~ GET_MODE_MASK (GET_MODE (XEXP (varop, 0)))) == 0)
5193 {
5194 varop = XEXP (varop, 0);
5195 continue;
5196 }
5197 break;
5198 #endif
5199
5200 case SUBREG:
5201 if (subreg_lowpart_p (varop)
5202 /* We can ignore the effect this SUBREG if it narrows the mode
5203 or, on machines where byte operations zero extend, if the
5204 constant masks to zero all the bits the mode doesn't have. */
5205 && ((GET_MODE_SIZE (GET_MODE (varop))
5206 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop))))
5207 #ifdef BYTE_LOADS_ZERO_EXTEND
5208 || (0 == (constop
5209 & GET_MODE_MASK (GET_MODE (varop))
5210 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (varop)))))
5211 #endif
5212 ))
5213 {
5214 varop = SUBREG_REG (varop);
5215 continue;
5216 }
5217 break;
5218
5219 case ZERO_EXTRACT:
5220 case SIGN_EXTRACT:
5221 case ZERO_EXTEND:
5222 case SIGN_EXTEND:
5223 /* Try to expand these into a series of shifts and then work
5224 with that result. If we can't, for example, if the extract
5225 isn't at a fixed position, give up. */
5226 temp = expand_compound_operation (varop);
5227 if (temp != varop)
5228 {
5229 varop = temp;
5230 continue;
5231 }
5232 break;
5233
5234 case AND:
5235 if (GET_CODE (XEXP (varop, 1)) == CONST_INT)
5236 {
5237 constop &= INTVAL (XEXP (varop, 1));
5238 varop = XEXP (varop, 0);
5239 continue;
5240 }
5241 break;
5242
5243 case IOR:
5244 case XOR:
5245 /* If VAROP is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5246 LSHIFT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5247 operation which may be a bitfield extraction. */
5248
5249 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5250 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5251 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5252 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT
5253 && GET_CODE (XEXP (varop, 1)) == CONST_INT
5254 && (INTVAL (XEXP (varop, 1))
5255 & ~ significant_bits (XEXP (varop, 0),
5256 GET_MODE (varop)) == 0))
5257 {
5258 temp = GEN_INT ((INTVAL (XEXP (varop, 1)) & constop)
5259 << INTVAL (XEXP (XEXP (varop, 0), 1)));
5260 temp = gen_binary (GET_CODE (varop), GET_MODE (varop),
5261 XEXP (XEXP (varop, 0), 0), temp);
5262 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5263 temp, XEXP (varop, 1));
5264 continue;
5265 }
5266
5267 /* Apply the AND to both branches of the IOR or XOR, then try to
5268 apply the distributive law. This may eliminate operations
5269 if either branch can be simplified because of the AND.
5270 It may also make some cases more complex, but those cases
5271 probably won't match a pattern either with or without this. */
5272 return
5273 gen_lowpart_for_combine
5274 (mode, apply_distributive_law
5275 (gen_rtx_combine
5276 (GET_CODE (varop), GET_MODE (varop),
5277 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
5278 XEXP (varop, 0), constop),
5279 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
5280 XEXP (varop, 1), constop))));
5281
5282 case NOT:
5283 /* (and (not FOO)) is (and (xor FOO CONST_OP)) so if FOO is an
5284 LSHIFTRT we can do the same as above. */
5285
5286 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5287 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5288 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5289 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT)
5290 {
5291 temp = GEN_INT (constop << INTVAL (XEXP (XEXP (varop, 0), 1)));
5292 temp = gen_binary (XOR, GET_MODE (varop),
5293 XEXP (XEXP (varop, 0), 0), temp);
5294 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5295 temp, XEXP (XEXP (varop, 0), 1));
5296 continue;
5297 }
5298 break;
5299
5300 case ASHIFTRT:
5301 /* If we are just looking for the sign bit, we don't need this
5302 shift at all, even if it has a variable count. */
5303 if (constop == ((HOST_WIDE_INT) 1
5304 << (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)))
5305 {
5306 varop = XEXP (varop, 0);
5307 continue;
5308 }
5309
5310 /* If this is a shift by a constant, get a mask that contains
5311 those bits that are not copies of the sign bit. We then have
5312 two cases: If CONSTOP only includes those bits, this can be
5313 a logical shift, which may allow simplifications. If CONSTOP
5314 is a single-bit field not within those bits, we are requesting
5315 a copy of the sign bit and hence can shift the sign bit to
5316 the appropriate location. */
5317 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5318 && INTVAL (XEXP (varop, 1)) >= 0
5319 && INTVAL (XEXP (varop, 1)) < HOST_BITS_PER_WIDE_INT)
5320 {
5321 int i = -1;
5322
5323 significant = GET_MODE_MASK (GET_MODE (varop));
5324 significant >>= INTVAL (XEXP (varop, 1));
5325
5326 if ((constop & ~significant) == 0
5327 || (i = exact_log2 (constop)) >= 0)
5328 {
5329 varop = simplify_shift_const
5330 (varop, LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5331 i < 0 ? INTVAL (XEXP (varop, 1))
5332 : GET_MODE_BITSIZE (GET_MODE (varop)) - 1 - i);
5333 if (GET_CODE (varop) != ASHIFTRT)
5334 continue;
5335 }
5336 }
5337
5338 /* If our mask is 1, convert this to a LSHIFTRT. This can be done
5339 even if the shift count isn't a constant. */
5340 if (constop == 1)
5341 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5342 XEXP (varop, 0), XEXP (varop, 1));
5343 break;
5344
5345 case NE:
5346 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is
5347 included in STORE_FLAG_VALUE and FOO has no significant bits
5348 not in CONST. */
5349 if ((constop & ~ STORE_FLAG_VALUE) == 0
5350 && XEXP (varop, 0) == const0_rtx
5351 && (significant_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
5352 {
5353 varop = XEXP (varop, 0);
5354 continue;
5355 }
5356 break;
5357
5358 case PLUS:
5359 /* In (and (plus FOO C1) M), if M is a mask that just turns off
5360 low-order bits (as in an alignment operation) and FOO is already
5361 aligned to that boundary, we can convert remove this AND
5362 and possibly the PLUS if it is now adding zero. */
5363 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5364 && exact_log2 (-constop) >= 0
5365 && (significant_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
5366 {
5367 varop = plus_constant (XEXP (varop, 0),
5368 INTVAL (XEXP (varop, 1)) & constop);
5369 constop = ~0;
5370 break;
5371 }
5372
5373 /* ... fall through ... */
5374
5375 case MINUS:
5376 /* In (and (plus (and FOO M1) BAR) M2), if M1 and M2 are one
5377 less than powers of two and M2 is narrower than M1, we can
5378 eliminate the inner AND. This occurs when incrementing
5379 bit fields. */
5380
5381 if (GET_CODE (XEXP (varop, 0)) == ZERO_EXTRACT
5382 || GET_CODE (XEXP (varop, 0)) == ZERO_EXTEND)
5383 SUBST (XEXP (varop, 0),
5384 expand_compound_operation (XEXP (varop, 0)));
5385
5386 if (GET_CODE (XEXP (varop, 0)) == AND
5387 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5388 && exact_log2 (constop + 1) >= 0
5389 && exact_log2 (INTVAL (XEXP (XEXP (varop, 0), 1)) + 1) >= 0
5390 && (~ INTVAL (XEXP (XEXP (varop, 0), 1)) & constop) == 0)
5391 SUBST (XEXP (varop, 0), XEXP (XEXP (varop, 0), 0));
5392 break;
5393 }
5394
5395 break;
5396 }
5397
5398 /* If we have reached a constant, this whole thing is constant. */
5399 if (GET_CODE (varop) == CONST_INT)
5400 return GEN_INT (constop & INTVAL (varop));
5401
5402 /* See what bits are significant in VAROP. */
5403 significant = significant_bits (varop, mode);
5404
5405 /* Turn off all bits in the constant that are known to already be zero.
5406 Thus, if the AND isn't needed at all, we will have CONSTOP == SIGNIFICANT
5407 which is tested below. */
5408
5409 constop &= significant;
5410
5411 /* If we don't have any bits left, return zero. */
5412 if (constop == 0)
5413 return const0_rtx;
5414
5415 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
5416 if we already had one (just check for the simplest cases). */
5417 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
5418 && GET_MODE (XEXP (x, 0)) == mode
5419 && SUBREG_REG (XEXP (x, 0)) == varop)
5420 varop = XEXP (x, 0);
5421 else
5422 varop = gen_lowpart_for_combine (mode, varop);
5423
5424 /* If we can't make the SUBREG, try to return what we were given. */
5425 if (GET_CODE (varop) == CLOBBER)
5426 return x ? x : varop;
5427
5428 /* If we are only masking insignificant bits, return VAROP. */
5429 if (constop == significant)
5430 x = varop;
5431
5432 /* Otherwise, return an AND. See how much, if any, of X we can use. */
5433 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
5434 x = gen_rtx_combine (AND, mode, varop, GEN_INT (constop));
5435
5436 else
5437 {
5438 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5439 || INTVAL (XEXP (x, 1)) != constop)
5440 SUBST (XEXP (x, 1), GEN_INT (constop));
5441
5442 SUBST (XEXP (x, 0), varop);
5443 }
5444
5445 return x;
5446 }
5447 \f
5448 /* Given an expression, X, compute which bits in X can be non-zero.
5449 We don't care about bits outside of those defined in MODE.
5450
5451 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
5452 a shift, AND, or zero_extract, we can do better. */
5453
5454 static unsigned HOST_WIDE_INT
5455 significant_bits (x, mode)
5456 rtx x;
5457 enum machine_mode mode;
5458 {
5459 unsigned HOST_WIDE_INT significant = GET_MODE_MASK (mode);
5460 unsigned HOST_WIDE_INT inner_sig;
5461 enum rtx_code code;
5462 int mode_width = GET_MODE_BITSIZE (mode);
5463 rtx tem;
5464
5465 /* If X is wider than MODE, use its mode instead. */
5466 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
5467 {
5468 mode = GET_MODE (x);
5469 significant = GET_MODE_MASK (mode);
5470 mode_width = GET_MODE_BITSIZE (mode);
5471 }
5472
5473 if (mode_width > HOST_BITS_PER_WIDE_INT)
5474 /* Our only callers in this case look for single bit values. So
5475 just return the mode mask. Those tests will then be false. */
5476 return significant;
5477
5478 code = GET_CODE (x);
5479 switch (code)
5480 {
5481 case REG:
5482 #ifdef STACK_BOUNDARY
5483 /* If this is the stack pointer, we may know something about its
5484 alignment. If PUSH_ROUNDING is defined, it is possible for the
5485 stack to be momentarily aligned only to that amount, so we pick
5486 the least alignment. */
5487
5488 if (x == stack_pointer_rtx)
5489 {
5490 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
5491
5492 #ifdef PUSH_ROUNDING
5493 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
5494 #endif
5495
5496 return significant & ~ (sp_alignment - 1);
5497 }
5498 #endif
5499
5500 /* If X is a register whose value we can find, use that value.
5501 Otherwise, use the previously-computed significant bits for this
5502 register. */
5503
5504 tem = get_last_value (x);
5505 if (tem)
5506 return significant_bits (tem, mode);
5507 else if (significant_valid && reg_significant[REGNO (x)])
5508 return reg_significant[REGNO (x)] & significant;
5509 else
5510 return significant;
5511
5512 case CONST_INT:
5513 return INTVAL (x);
5514
5515 #ifdef BYTE_LOADS_ZERO_EXTEND
5516 case MEM:
5517 /* In many, if not most, RISC machines, reading a byte from memory
5518 zeros the rest of the register. Noticing that fact saves a lot
5519 of extra zero-extends. */
5520 significant &= GET_MODE_MASK (GET_MODE (x));
5521 break;
5522 #endif
5523
5524 #if STORE_FLAG_VALUE == 1
5525 case EQ: case NE:
5526 case GT: case GTU:
5527 case LT: case LTU:
5528 case GE: case GEU:
5529 case LE: case LEU:
5530
5531 if (GET_MODE_CLASS (mode) == MODE_INT)
5532 significant = 1;
5533
5534 /* A comparison operation only sets the bits given by its mode. The
5535 rest are set undefined. */
5536 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
5537 significant |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
5538 break;
5539 #endif
5540
5541 case NEG:
5542 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
5543 == GET_MODE_BITSIZE (GET_MODE (x)))
5544 significant = 1;
5545
5546 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
5547 significant |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
5548 break;
5549
5550 case ABS:
5551 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
5552 == GET_MODE_BITSIZE (GET_MODE (x)))
5553 significant = 1;
5554 break;
5555
5556 case TRUNCATE:
5557 significant &= (significant_bits (XEXP (x, 0), mode)
5558 & GET_MODE_MASK (mode));
5559 break;
5560
5561 case ZERO_EXTEND:
5562 significant &= significant_bits (XEXP (x, 0), mode);
5563 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
5564 significant &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
5565 break;
5566
5567 case SIGN_EXTEND:
5568 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
5569 Otherwise, show all the bits in the outer mode but not the inner
5570 may be non-zero. */
5571 inner_sig = significant_bits (XEXP (x, 0), mode);
5572 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
5573 {
5574 inner_sig &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
5575 if (inner_sig &
5576 (((HOST_WIDE_INT) 1
5577 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
5578 inner_sig |= (GET_MODE_MASK (mode)
5579 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
5580 }
5581
5582 significant &= inner_sig;
5583 break;
5584
5585 case AND:
5586 significant &= (significant_bits (XEXP (x, 0), mode)
5587 & significant_bits (XEXP (x, 1), mode));
5588 break;
5589
5590 case XOR: case IOR:
5591 case UMIN: case UMAX: case SMIN: case SMAX:
5592 significant &= (significant_bits (XEXP (x, 0), mode)
5593 | significant_bits (XEXP (x, 1), mode));
5594 break;
5595
5596 case PLUS: case MINUS:
5597 case MULT:
5598 case DIV: case UDIV:
5599 case MOD: case UMOD:
5600 /* We can apply the rules of arithmetic to compute the number of
5601 high- and low-order zero bits of these operations. We start by
5602 computing the width (position of the highest-order non-zero bit)
5603 and the number of low-order zero bits for each value. */
5604 {
5605 unsigned HOST_WIDE_INT sig0 = significant_bits (XEXP (x, 0), mode);
5606 unsigned HOST_WIDE_INT sig1 = significant_bits (XEXP (x, 1), mode);
5607 int width0 = floor_log2 (sig0) + 1;
5608 int width1 = floor_log2 (sig1) + 1;
5609 int low0 = floor_log2 (sig0 & -sig0);
5610 int low1 = floor_log2 (sig1 & -sig1);
5611 int op0_maybe_minusp = (sig0 & (1 << (mode_width - 1)));
5612 int op1_maybe_minusp = (sig1 & (1 << (mode_width - 1)));
5613 int result_width = mode_width;
5614 int result_low = 0;
5615
5616 switch (code)
5617 {
5618 case PLUS:
5619 result_width = MAX (width0, width1) + 1;
5620 result_low = MIN (low0, low1);
5621 break;
5622 case MINUS:
5623 result_low = MIN (low0, low1);
5624 break;
5625 case MULT:
5626 result_width = width0 + width1;
5627 result_low = low0 + low1;
5628 break;
5629 case DIV:
5630 if (! op0_maybe_minusp && ! op1_maybe_minusp)
5631 result_width = width0;
5632 break;
5633 case UDIV:
5634 result_width = width0;
5635 break;
5636 case MOD:
5637 if (! op0_maybe_minusp && ! op1_maybe_minusp)
5638 result_width = MIN (width0, width1);
5639 result_low = MIN (low0, low1);
5640 break;
5641 case UMOD:
5642 result_width = MIN (width0, width1);
5643 result_low = MIN (low0, low1);
5644 break;
5645 }
5646
5647 if (result_width < mode_width)
5648 significant &= ((HOST_WIDE_INT) 1 << result_width) - 1;
5649
5650 if (result_low > 0)
5651 significant &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
5652 }
5653 break;
5654
5655 case ZERO_EXTRACT:
5656 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5657 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
5658 significant &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
5659 break;
5660
5661 case SUBREG:
5662 /* If the inner mode is a single word for both the host and target
5663 machines, we can compute this from which bits of the inner
5664 object are known significant. */
5665 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5666 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
5667 <= HOST_BITS_PER_WIDE_INT))
5668 {
5669 significant &= significant_bits (SUBREG_REG (x), mode);
5670 #ifndef BYTE_LOADS_ZERO_EXTEND
5671 /* On many CISC machines, accessing an object in a wider mode
5672 causes the high-order bits to become undefined. So they are
5673 not known to be zero. */
5674 if (GET_MODE_SIZE (GET_MODE (x))
5675 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
5676 significant |= (GET_MODE_MASK (GET_MODE (x))
5677 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
5678 #endif
5679 }
5680 break;
5681
5682 case ASHIFTRT:
5683 case LSHIFTRT:
5684 case ASHIFT:
5685 case LSHIFT:
5686 case ROTATE:
5687 /* The significant bits are in two classes: any bits within MODE
5688 that aren't in GET_MODE (x) are always significant. The rest of the
5689 significant bits are those that are significant in the operand of
5690 the shift when shifted the appropriate number of bits. This
5691 shows that high-order bits are cleared by the right shift and
5692 low-order bits by left shifts. */
5693 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5694 && INTVAL (XEXP (x, 1)) >= 0
5695 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
5696 {
5697 enum machine_mode inner_mode = GET_MODE (x);
5698 int width = GET_MODE_BITSIZE (inner_mode);
5699 int count = INTVAL (XEXP (x, 1));
5700 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
5701 unsigned HOST_WIDE_INT op_significant
5702 = significant_bits (XEXP (x, 0), mode);
5703 unsigned HOST_WIDE_INT inner = op_significant & mode_mask;
5704 unsigned HOST_WIDE_INT outer = 0;
5705
5706 if (mode_width > width)
5707 outer = (op_significant & significant & ~ mode_mask);
5708
5709 if (code == LSHIFTRT)
5710 inner >>= count;
5711 else if (code == ASHIFTRT)
5712 {
5713 inner >>= count;
5714
5715 /* If the sign bit was significant at before the shift, we
5716 need to mark all the places it could have been copied to
5717 by the shift significant. */
5718 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
5719 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
5720 }
5721 else if (code == LSHIFT || code == ASHIFT)
5722 inner <<= count;
5723 else
5724 inner = ((inner << (count % width)
5725 | (inner >> (width - (count % width)))) & mode_mask);
5726
5727 significant &= (outer | inner);
5728 }
5729 break;
5730
5731 case FFS:
5732 /* This is at most the number of bits in the mode. */
5733 significant = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
5734 break;
5735
5736 case IF_THEN_ELSE:
5737 significant &= (significant_bits (XEXP (x, 1), mode)
5738 | significant_bits (XEXP (x, 2), mode));
5739 break;
5740 }
5741
5742 return significant;
5743 }
5744 \f
5745 /* Return the number of bits at the high-order end of X that are known to
5746 be equal to the sign bit. This number will always be between 1 and
5747 the number of bits in the mode of X. MODE is the mode to be used
5748 if X is VOIDmode. */
5749
5750 static int
5751 num_sign_bit_copies (x, mode)
5752 rtx x;
5753 enum machine_mode mode;
5754 {
5755 enum rtx_code code = GET_CODE (x);
5756 int bitwidth;
5757 int num0, num1, result;
5758 unsigned HOST_WIDE_INT sig;
5759 rtx tem;
5760
5761 /* If we weren't given a mode, use the mode of X. If the mode is still
5762 VOIDmode, we don't know anything. */
5763
5764 if (mode == VOIDmode)
5765 mode = GET_MODE (x);
5766
5767 if (mode == VOIDmode)
5768 return 0;
5769
5770 bitwidth = GET_MODE_BITSIZE (mode);
5771
5772 switch (code)
5773 {
5774 case REG:
5775 if (significant_valid && reg_sign_bit_copies[REGNO (x)] != 0)
5776 return reg_sign_bit_copies[REGNO (x)];
5777
5778 tem = get_last_value (x);
5779 if (tem != 0)
5780 return num_sign_bit_copies (tem, mode);
5781 break;
5782
5783 case CONST_INT:
5784 /* If the constant is negative, take its 1's complement and remask.
5785 Then see how many zero bits we have. */
5786 sig = INTVAL (x) & GET_MODE_MASK (mode);
5787 if (sig & ((HOST_WIDE_INT) 1 << (bitwidth - 1)))
5788 sig = (~ sig) & GET_MODE_MASK (mode);
5789
5790 return (sig == 0 ? bitwidth : bitwidth - floor_log2 (sig) - 1);
5791
5792 case SUBREG:
5793 /* For a smaller object, just ignore the high bits. */
5794 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
5795 {
5796 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
5797 return MAX (1, (num0
5798 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
5799 - bitwidth)));
5800 }
5801 break;
5802
5803 case SIGN_EXTRACT:
5804 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
5805 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
5806 break;
5807
5808 case SIGN_EXTEND:
5809 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5810 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
5811
5812 case TRUNCATE:
5813 /* For a smaller object, just ignore the high bits. */
5814 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
5815 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5816 - bitwidth)));
5817
5818 case NOT:
5819 return num_sign_bit_copies (XEXP (x, 0), mode);
5820
5821 case ROTATE: case ROTATERT:
5822 /* If we are rotating left by a number of bits less than the number
5823 of sign bit copies, we can just subtract that amount from the
5824 number. */
5825 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5826 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
5827 {
5828 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
5829 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
5830 : bitwidth - INTVAL (XEXP (x, 1))));
5831 }
5832 break;
5833
5834 case NEG:
5835 /* In general, this subtracts one sign bit copy. But if the value
5836 is known to be positive, the number of sign bit copies is the
5837 same as that of the input. Finally, if the input has just one
5838 significant bit, all the bits are copies of the sign bit. */
5839 sig = significant_bits (XEXP (x, 0), mode);
5840 if (sig == 1)
5841 return bitwidth;
5842
5843 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
5844 if (num0 > 1
5845 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & sig))
5846 num0--;
5847
5848 return num0;
5849
5850 case IOR: case AND: case XOR:
5851 case SMIN: case SMAX: case UMIN: case UMAX:
5852 /* Logical operations will preserve the number of sign-bit copies.
5853 MIN and MAX operations always return one of the operands. */
5854 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
5855 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
5856 return MIN (num0, num1);
5857
5858 case PLUS: case MINUS:
5859 /* For addition and subtraction, we can have a 1-bit carry. However,
5860 if we are subtracting 1 from a positive number, there will not
5861 be such a carry. Furthermore, if the positive number is known to
5862 be 0 or 1, we know the result is either -1 or 0. */
5863
5864 if (code == PLUS && XEXP (x, 1) == constm1_rtx)
5865 {
5866 sig = significant_bits (XEXP (x, 0), mode);
5867 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & sig) == 0)
5868 return (sig == 1 || sig == 0 ? bitwidth
5869 : bitwidth - floor_log2 (sig));
5870 }
5871
5872 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
5873 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
5874 return MAX (1, MIN (num0, num1) - 1);
5875
5876 case MULT:
5877 /* The number of bits of the product is the sum of the number of
5878 bits of both terms. However, unless one of the terms if known
5879 to be positive, we must allow for an additional bit since negating
5880 a negative number can remove one sign bit copy. */
5881
5882 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
5883 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
5884
5885 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
5886 if (result > 0
5887 && ((significant_bits (XEXP (x, 0), mode)
5888 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
5889 && (significant_bits (XEXP (x, 1), mode)
5890 & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
5891 result--;
5892
5893 return MAX (1, result);
5894
5895 case UDIV:
5896 /* The result must be <= the first operand. */
5897 return num_sign_bit_copies (XEXP (x, 0), mode);
5898
5899 case UMOD:
5900 /* The result must be <= the scond operand. */
5901 return num_sign_bit_copies (XEXP (x, 1), mode);
5902
5903 case DIV:
5904 /* Similar to unsigned division, except that we have to worry about
5905 the case where the divisor is negative, in which case we have
5906 to add 1. */
5907 result = num_sign_bit_copies (XEXP (x, 0), mode);
5908 if (result > 1
5909 && (significant_bits (XEXP (x, 1), mode)
5910 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
5911 result --;
5912
5913 return result;
5914
5915 case MOD:
5916 result = num_sign_bit_copies (XEXP (x, 1), mode);
5917 if (result > 1
5918 && (significant_bits (XEXP (x, 1), mode)
5919 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
5920 result --;
5921
5922 return result;
5923
5924 case ASHIFTRT:
5925 /* Shifts by a constant add to the number of bits equal to the
5926 sign bit. */
5927 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
5928 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5929 && INTVAL (XEXP (x, 1)) > 0)
5930 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
5931
5932 return num0;
5933
5934 case ASHIFT:
5935 case LSHIFT:
5936 /* Left shifts destroy copies. */
5937 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5938 || INTVAL (XEXP (x, 1)) < 0
5939 || INTVAL (XEXP (x, 1)) >= bitwidth)
5940 return 1;
5941
5942 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
5943 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
5944
5945 case IF_THEN_ELSE:
5946 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
5947 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
5948 return MIN (num0, num1);
5949
5950 #if STORE_FLAG_VALUE == -1
5951 case EQ: case NE: case GE: case GT: case LE: case LT:
5952 case GEU: case GTU: case LEU: case LTU:
5953 return bitwidth;
5954 #endif
5955 }
5956
5957 /* If we haven't been able to figure it out by one of the above rules,
5958 see if some of the high-order bits are known to be zero. If so,
5959 count those bits and return one less than that amount. */
5960
5961 sig = significant_bits (x, mode);
5962 return sig == GET_MODE_MASK (mode) ? 1 : bitwidth - floor_log2 (sig) - 1;
5963 }
5964 \f
5965 /* This function is called from `simplify_shift_const' to merge two
5966 outer operations. Specifically, we have already found that we need
5967 to perform operation *POP0 with constant *PCONST0 at the outermost
5968 position. We would now like to also perform OP1 with constant CONST1
5969 (with *POP0 being done last).
5970
5971 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
5972 the resulting operation. *PCOMP_P is set to 1 if we would need to
5973 complement the innermost operand, otherwise it is unchanged.
5974
5975 MODE is the mode in which the operation will be done. No bits outside
5976 the width of this mode matter. It is assumed that the width of this mode
5977 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
5978
5979 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
5980 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
5981 result is simply *PCONST0.
5982
5983 If the resulting operation cannot be expressed as one operation, we
5984 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
5985
5986 static int
5987 merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
5988 enum rtx_code *pop0;
5989 HOST_WIDE_INT *pconst0;
5990 enum rtx_code op1;
5991 HOST_WIDE_INT const1;
5992 enum machine_mode mode;
5993 int *pcomp_p;
5994 {
5995 enum rtx_code op0 = *pop0;
5996 HOST_WIDE_INT const0 = *pconst0;
5997
5998 const0 &= GET_MODE_MASK (mode);
5999 const1 &= GET_MODE_MASK (mode);
6000
6001 /* If OP0 is an AND, clear unimportant bits in CONST1. */
6002 if (op0 == AND)
6003 const1 &= const0;
6004
6005 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
6006 if OP0 is SET. */
6007
6008 if (op1 == NIL || op0 == SET)
6009 return 1;
6010
6011 else if (op0 == NIL)
6012 op0 = op1, const0 = const1;
6013
6014 else if (op0 == op1)
6015 {
6016 switch (op0)
6017 {
6018 case AND:
6019 const0 &= const1;
6020 break;
6021 case IOR:
6022 const0 |= const1;
6023 break;
6024 case XOR:
6025 const0 ^= const1;
6026 break;
6027 case PLUS:
6028 const0 += const1;
6029 break;
6030 case NEG:
6031 op0 = NIL;
6032 break;
6033 }
6034 }
6035
6036 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
6037 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
6038 return 0;
6039
6040 /* If the two constants aren't the same, we can't do anything. The
6041 remaining six cases can all be done. */
6042 else if (const0 != const1)
6043 return 0;
6044
6045 else
6046 switch (op0)
6047 {
6048 case IOR:
6049 if (op1 == AND)
6050 /* (a & b) | b == b */
6051 op0 = SET;
6052 else /* op1 == XOR */
6053 /* (a ^ b) | b == a | b */
6054 ;
6055 break;
6056
6057 case XOR:
6058 if (op1 == AND)
6059 /* (a & b) ^ b == (~a) & b */
6060 op0 = AND, *pcomp_p = 1;
6061 else /* op1 == IOR */
6062 /* (a | b) ^ b == a & ~b */
6063 op0 = AND, *pconst0 = ~ const0;
6064 break;
6065
6066 case AND:
6067 if (op1 == IOR)
6068 /* (a | b) & b == b */
6069 op0 = SET;
6070 else /* op1 == XOR */
6071 /* (a ^ b) & b) == (~a) & b */
6072 *pcomp_p = 1;
6073 break;
6074 }
6075
6076 /* Check for NO-OP cases. */
6077 const0 &= GET_MODE_MASK (mode);
6078 if (const0 == 0
6079 && (op0 == IOR || op0 == XOR || op0 == PLUS))
6080 op0 = NIL;
6081 else if (const0 == 0 && op0 == AND)
6082 op0 = SET;
6083 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
6084 op0 = NIL;
6085
6086 *pop0 = op0;
6087 *pconst0 = const0;
6088
6089 return 1;
6090 }
6091 \f
6092 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
6093 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
6094 that we started with.
6095
6096 The shift is normally computed in the widest mode we find in VAROP, as
6097 long as it isn't a different number of words than RESULT_MODE. Exceptions
6098 are ASHIFTRT and ROTATE, which are always done in their original mode, */
6099
6100 static rtx
6101 simplify_shift_const (x, code, result_mode, varop, count)
6102 rtx x;
6103 enum rtx_code code;
6104 enum machine_mode result_mode;
6105 rtx varop;
6106 int count;
6107 {
6108 enum rtx_code orig_code = code;
6109 int orig_count = count;
6110 enum machine_mode mode = result_mode;
6111 enum machine_mode shift_mode, tmode;
6112 int mode_words
6113 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
6114 /* We form (outer_op (code varop count) (outer_const)). */
6115 enum rtx_code outer_op = NIL;
6116 HOST_WIDE_INT outer_const;
6117 rtx const_rtx;
6118 int complement_p = 0;
6119 rtx new;
6120
6121 /* If we were given an invalid count, don't do anything except exactly
6122 what was requested. */
6123
6124 if (count < 0 || count > GET_MODE_BITSIZE (mode))
6125 {
6126 if (x)
6127 return x;
6128
6129 return gen_rtx (code, mode, varop, GEN_INT (count));
6130 }
6131
6132 /* Unless one of the branches of the `if' in this loop does a `continue',
6133 we will `break' the loop after the `if'. */
6134
6135 while (count != 0)
6136 {
6137 /* If we have an operand of (clobber (const_int 0)), just return that
6138 value. */
6139 if (GET_CODE (varop) == CLOBBER)
6140 return varop;
6141
6142 /* If we discovered we had to complement VAROP, leave. Making a NOT
6143 here would cause an infinite loop. */
6144 if (complement_p)
6145 break;
6146
6147 /* Convert ROTATETRT to ROTATE. */
6148 if (code == ROTATERT)
6149 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
6150
6151 /* Canonicalize LSHIFT to ASHIFT. */
6152 if (code == LSHIFT)
6153 code = ASHIFT;
6154
6155 /* We need to determine what mode we will do the shift in. If the
6156 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
6157 was originally done in. Otherwise, we can do it in MODE, the widest
6158 mode encountered. */
6159 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
6160
6161 /* Handle cases where the count is greater than the size of the mode
6162 minus 1. For ASHIFT, use the size minus one as the count (this can
6163 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
6164 take the count modulo the size. For other shifts, the result is
6165 zero.
6166
6167 Since these shifts are being produced by the compiler by combining
6168 multiple operations, each of which are defined, we know what the
6169 result is supposed to be. */
6170
6171 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
6172 {
6173 if (code == ASHIFTRT)
6174 count = GET_MODE_BITSIZE (shift_mode) - 1;
6175 else if (code == ROTATE || code == ROTATERT)
6176 count %= GET_MODE_BITSIZE (shift_mode);
6177 else
6178 {
6179 /* We can't simply return zero because there may be an
6180 outer op. */
6181 varop = const0_rtx;
6182 count = 0;
6183 break;
6184 }
6185 }
6186
6187 /* Negative counts are invalid and should not have been made (a
6188 programmer-specified negative count should have been handled
6189 above). */
6190 else if (count < 0)
6191 abort ();
6192
6193 /* An arithmetic right shift of a quantity known to be -1 or 0
6194 is a no-op. */
6195 if (code == ASHIFTRT
6196 && (num_sign_bit_copies (varop, shift_mode)
6197 == GET_MODE_BITSIZE (shift_mode)))
6198 {
6199 count = 0;
6200 break;
6201 }
6202
6203 /* We simplify the tests below and elsewhere by converting
6204 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
6205 `make_compound_operation' will convert it to a ASHIFTRT for
6206 those machines (such as Vax) that don't have a LSHIFTRT. */
6207 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
6208 && code == ASHIFTRT
6209 && ((significant_bits (varop, shift_mode)
6210 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
6211 == 0))
6212 code = LSHIFTRT;
6213
6214 switch (GET_CODE (varop))
6215 {
6216 case SIGN_EXTEND:
6217 case ZERO_EXTEND:
6218 case SIGN_EXTRACT:
6219 case ZERO_EXTRACT:
6220 new = expand_compound_operation (varop);
6221 if (new != varop)
6222 {
6223 varop = new;
6224 continue;
6225 }
6226 break;
6227
6228 case MEM:
6229 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
6230 minus the width of a smaller mode, we can do this with a
6231 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
6232 if ((code == ASHIFTRT || code == LSHIFTRT)
6233 && ! mode_dependent_address_p (XEXP (varop, 0))
6234 && ! MEM_VOLATILE_P (varop)
6235 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
6236 MODE_INT, 1)) != BLKmode)
6237 {
6238 #if BYTES_BIG_ENDIAN
6239 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
6240 #else
6241 new = gen_rtx (MEM, tmode,
6242 plus_constant (XEXP (varop, 0),
6243 count / BITS_PER_UNIT));
6244 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
6245 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
6246 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
6247 #endif
6248 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
6249 : ZERO_EXTEND, mode, new);
6250 count = 0;
6251 continue;
6252 }
6253 break;
6254
6255 case USE:
6256 /* Similar to the case above, except that we can only do this if
6257 the resulting mode is the same as that of the underlying
6258 MEM and adjust the address depending on the *bits* endianness
6259 because of the way that bit-field extract insns are defined. */
6260 if ((code == ASHIFTRT || code == LSHIFTRT)
6261 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
6262 MODE_INT, 1)) != BLKmode
6263 && tmode == GET_MODE (XEXP (varop, 0)))
6264 {
6265 #if BITS_BIG_ENDIAN
6266 new = XEXP (varop, 0);
6267 #else
6268 new = copy_rtx (XEXP (varop, 0));
6269 SUBST (XEXP (new, 0),
6270 plus_constant (XEXP (new, 0),
6271 count / BITS_PER_UNIT));
6272 #endif
6273
6274 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
6275 : ZERO_EXTEND, mode, new);
6276 count = 0;
6277 continue;
6278 }
6279 break;
6280
6281 case SUBREG:
6282 /* If VAROP is a SUBREG, strip it as long as the inner operand has
6283 the same number of words as what we've seen so far. Then store
6284 the widest mode in MODE. */
6285 if (SUBREG_WORD (varop) == 0
6286 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
6287 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
6288 == mode_words))
6289 {
6290 varop = SUBREG_REG (varop);
6291 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
6292 mode = GET_MODE (varop);
6293 continue;
6294 }
6295 break;
6296
6297 case MULT:
6298 /* Some machines use MULT instead of ASHIFT because MULT
6299 is cheaper. But it is still better on those machines to
6300 merge two shifts into one. */
6301 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6302 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
6303 {
6304 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
6305 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
6306 continue;
6307 }
6308 break;
6309
6310 case UDIV:
6311 /* Similar, for when divides are cheaper. */
6312 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6313 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
6314 {
6315 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
6316 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
6317 continue;
6318 }
6319 break;
6320
6321 case ASHIFTRT:
6322 /* If we are extracting just the sign bit of an arithmetic right
6323 shift, that shift is not needed. */
6324 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
6325 {
6326 varop = XEXP (varop, 0);
6327 continue;
6328 }
6329
6330 /* ... fall through ... */
6331
6332 case LSHIFTRT:
6333 case ASHIFT:
6334 case LSHIFT:
6335 case ROTATE:
6336 /* Here we have two nested shifts. The result is usually the
6337 AND of a new shift with a mask. We compute the result below. */
6338 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6339 && INTVAL (XEXP (varop, 1)) >= 0
6340 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
6341 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
6342 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6343 {
6344 enum rtx_code first_code = GET_CODE (varop);
6345 int first_count = INTVAL (XEXP (varop, 1));
6346 unsigned HOST_WIDE_INT mask;
6347 rtx mask_rtx;
6348 rtx inner;
6349
6350 if (first_code == LSHIFT)
6351 first_code = ASHIFT;
6352
6353 /* We have one common special case. We can't do any merging if
6354 the inner code is an ASHIFTRT of a smaller mode. However, if
6355 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
6356 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
6357 we can convert it to
6358 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
6359 This simplifies certain SIGN_EXTEND operations. */
6360 if (code == ASHIFT && first_code == ASHIFTRT
6361 && (GET_MODE_BITSIZE (result_mode)
6362 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
6363 {
6364 /* C3 has the low-order C1 bits zero. */
6365
6366 mask = (GET_MODE_MASK (mode)
6367 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
6368
6369 varop = simplify_and_const_int (NULL_RTX, result_mode,
6370 XEXP (varop, 0), mask);
6371 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
6372 varop, count);
6373 count = first_count;
6374 code = ASHIFTRT;
6375 continue;
6376 }
6377
6378 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
6379 than C1 high-order bits equal to the sign bit, we can convert
6380 this to either an ASHIFT or a ASHIFTRT depending on the
6381 two counts.
6382
6383 We cannot do this if VAROP's mode is not SHIFT_MODE. */
6384
6385 if (code == ASHIFTRT && first_code == ASHIFT
6386 && GET_MODE (varop) == shift_mode
6387 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
6388 > first_count))
6389 {
6390 count -= first_count;
6391 if (count < 0)
6392 count = - count, code = ASHIFT;
6393 varop = XEXP (varop, 0);
6394 continue;
6395 }
6396
6397 /* There are some cases we can't do. If CODE is ASHIFTRT,
6398 we can only do this if FIRST_CODE is also ASHIFTRT.
6399
6400 We can't do the case when CODE is ROTATE and FIRST_CODE is
6401 ASHIFTRT.
6402
6403 If the mode of this shift is not the mode of the outer shift,
6404 we can't do this if either shift is ASHIFTRT or ROTATE.
6405
6406 Finally, we can't do any of these if the mode is too wide
6407 unless the codes are the same.
6408
6409 Handle the case where the shift codes are the same
6410 first. */
6411
6412 if (code == first_code)
6413 {
6414 if (GET_MODE (varop) != result_mode
6415 && (code == ASHIFTRT || code == ROTATE))
6416 break;
6417
6418 count += first_count;
6419 varop = XEXP (varop, 0);
6420 continue;
6421 }
6422
6423 if (code == ASHIFTRT
6424 || (code == ROTATE && first_code == ASHIFTRT)
6425 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
6426 || (GET_MODE (varop) != result_mode
6427 && (first_code == ASHIFTRT || first_code == ROTATE
6428 || code == ROTATE)))
6429 break;
6430
6431 /* To compute the mask to apply after the shift, shift the
6432 significant bits of the inner shift the same way the
6433 outer shift will. */
6434
6435 mask_rtx = GEN_INT (significant_bits (varop, GET_MODE (varop)));
6436
6437 mask_rtx
6438 = simplify_binary_operation (code, result_mode, mask_rtx,
6439 GEN_INT (count));
6440
6441 /* Give up if we can't compute an outer operation to use. */
6442 if (mask_rtx == 0
6443 || GET_CODE (mask_rtx) != CONST_INT
6444 || ! merge_outer_ops (&outer_op, &outer_const, AND,
6445 INTVAL (mask_rtx),
6446 result_mode, &complement_p))
6447 break;
6448
6449 /* If the shifts are in the same direction, we add the
6450 counts. Otherwise, we subtract them. */
6451 if ((code == ASHIFTRT || code == LSHIFTRT)
6452 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
6453 count += first_count;
6454 else
6455 count -= first_count;
6456
6457 /* If COUNT is positive, the new shift is usually CODE,
6458 except for the two exceptions below, in which case it is
6459 FIRST_CODE. If the count is negative, FIRST_CODE should
6460 always be used */
6461 if (count > 0
6462 && ((first_code == ROTATE && code == ASHIFT)
6463 || (first_code == ASHIFTRT && code == LSHIFTRT)))
6464 code = first_code;
6465 else if (count < 0)
6466 code = first_code, count = - count;
6467
6468 varop = XEXP (varop, 0);
6469 continue;
6470 }
6471
6472 /* If we have (A << B << C) for any shift, we can convert this to
6473 (A << C << B). This wins if A is a constant. Only try this if
6474 B is not a constant. */
6475
6476 else if (GET_CODE (varop) == code
6477 && GET_CODE (XEXP (varop, 1)) != CONST_INT
6478 && 0 != (new
6479 = simplify_binary_operation (code, mode,
6480 XEXP (varop, 0),
6481 GEN_INT (count))))
6482 {
6483 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
6484 count = 0;
6485 continue;
6486 }
6487 break;
6488
6489 case NOT:
6490 /* Make this fit the case below. */
6491 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
6492 GEN_INT (GET_MODE_MASK (mode)));
6493 continue;
6494
6495 case IOR:
6496 case AND:
6497 case XOR:
6498 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
6499 with C the size of VAROP - 1 and the shift is logical if
6500 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
6501 we have an (le X 0) operation. If we have an arithmetic shift
6502 and STORE_FLAG_VALUE is 1 or we have a logical shift with
6503 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
6504
6505 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
6506 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
6507 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6508 && (code == LSHIFTRT || code == ASHIFTRT)
6509 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
6510 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
6511 {
6512 count = 0;
6513 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
6514 const0_rtx);
6515
6516 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
6517 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
6518
6519 continue;
6520 }
6521
6522 /* If we have (shift (logical)), move the logical to the outside
6523 to allow it to possibly combine with another logical and the
6524 shift to combine with another shift. This also canonicalizes to
6525 what a ZERO_EXTRACT looks like. Also, some machines have
6526 (and (shift)) insns. */
6527
6528 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6529 && (new = simplify_binary_operation (code, result_mode,
6530 XEXP (varop, 1),
6531 GEN_INT (count))) != 0
6532 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
6533 INTVAL (new), result_mode, &complement_p))
6534 {
6535 varop = XEXP (varop, 0);
6536 continue;
6537 }
6538
6539 /* If we can't do that, try to simplify the shift in each arm of the
6540 logical expression, make a new logical expression, and apply
6541 the inverse distributive law. */
6542 {
6543 rtx lhs = simplify_shift_const (NULL_RTX, code, result_mode,
6544 XEXP (varop, 0), count);
6545 rtx rhs = simplify_shift_const (NULL_RTX, code, result_mode,
6546 XEXP (varop, 1), count);
6547
6548 varop = gen_binary (GET_CODE (varop), result_mode, lhs, rhs);
6549 varop = apply_distributive_law (varop);
6550
6551 count = 0;
6552 }
6553 break;
6554
6555 case EQ:
6556 /* convert (lshift (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
6557 says that the sign bit can be tested, FOO has mode MODE, C is
6558 GET_MODE_BITSIZE (MODE) - 1, and FOO has only the low-order bit
6559 significant. */
6560 if (code == LSHIFT
6561 && XEXP (varop, 1) == const0_rtx
6562 && GET_MODE (XEXP (varop, 0)) == result_mode
6563 && count == GET_MODE_BITSIZE (result_mode) - 1
6564 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
6565 && ((STORE_FLAG_VALUE
6566 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
6567 && significant_bits (XEXP (varop, 0), result_mode) == 1
6568 && merge_outer_ops (&outer_op, &outer_const, XOR,
6569 (HOST_WIDE_INT) 1, result_mode,
6570 &complement_p))
6571 {
6572 varop = XEXP (varop, 0);
6573 count = 0;
6574 continue;
6575 }
6576 break;
6577
6578 case NEG:
6579 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
6580 than the number of bits in the mode is equivalent to A. */
6581 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
6582 && significant_bits (XEXP (varop, 0), result_mode) == 1)
6583 {
6584 varop = XEXP (varop, 0);
6585 count = 0;
6586 continue;
6587 }
6588
6589 /* NEG commutes with ASHIFT since it is multiplication. Move the
6590 NEG outside to allow shifts to combine. */
6591 if (code == ASHIFT
6592 && merge_outer_ops (&outer_op, &outer_const, NEG,
6593 (HOST_WIDE_INT) 0, result_mode,
6594 &complement_p))
6595 {
6596 varop = XEXP (varop, 0);
6597 continue;
6598 }
6599 break;
6600
6601 case PLUS:
6602 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
6603 is one less than the number of bits in the mode is
6604 equivalent to (xor A 1). */
6605 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
6606 && XEXP (varop, 1) == constm1_rtx
6607 && significant_bits (XEXP (varop, 0), result_mode) == 1
6608 && merge_outer_ops (&outer_op, &outer_const, XOR,
6609 (HOST_WIDE_INT) 1, result_mode,
6610 &complement_p))
6611 {
6612 count = 0;
6613 varop = XEXP (varop, 0);
6614 continue;
6615 }
6616
6617 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
6618 significant in BAR are those being shifted out and those
6619 bits are known zero in FOO, we can replace the PLUS with FOO.
6620 Similarly in the other operand order. This code occurs when
6621 we are computing the size of a variable-size array. */
6622
6623 if ((code == ASHIFTRT || code == LSHIFTRT)
6624 && count < HOST_BITS_PER_WIDE_INT
6625 && significant_bits (XEXP (varop, 1), result_mode) >> count == 0
6626 && (significant_bits (XEXP (varop, 1), result_mode)
6627 & significant_bits (XEXP (varop, 0), result_mode)) == 0)
6628 {
6629 varop = XEXP (varop, 0);
6630 continue;
6631 }
6632 else if ((code == ASHIFTRT || code == LSHIFTRT)
6633 && count < HOST_BITS_PER_WIDE_INT
6634 && 0 == (significant_bits (XEXP (varop, 0), result_mode)
6635 >> count)
6636 && 0 == (significant_bits (XEXP (varop, 0), result_mode)
6637 & significant_bits (XEXP (varop, 1),
6638 result_mode)))
6639 {
6640 varop = XEXP (varop, 1);
6641 continue;
6642 }
6643
6644 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
6645 if (code == ASHIFT
6646 && GET_CODE (XEXP (varop, 1)) == CONST_INT
6647 && (new = simplify_binary_operation (ASHIFT, result_mode,
6648 XEXP (varop, 1),
6649 GEN_INT (count))) != 0
6650 && merge_outer_ops (&outer_op, &outer_const, PLUS,
6651 INTVAL (new), result_mode, &complement_p))
6652 {
6653 varop = XEXP (varop, 0);
6654 continue;
6655 }
6656 break;
6657
6658 case MINUS:
6659 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
6660 with C the size of VAROP - 1 and the shift is logical if
6661 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
6662 we have a (gt X 0) operation. If the shift is arithmetic with
6663 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
6664 we have a (neg (gt X 0)) operation. */
6665
6666 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
6667 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
6668 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6669 && (code == LSHIFTRT || code == ASHIFTRT)
6670 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
6671 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
6672 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
6673 {
6674 count = 0;
6675 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
6676 const0_rtx);
6677
6678 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
6679 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
6680
6681 continue;
6682 }
6683 break;
6684 }
6685
6686 break;
6687 }
6688
6689 /* We need to determine what mode to do the shift in. If the shift is
6690 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
6691 done in. Otherwise, we can do it in MODE, the widest mode encountered.
6692 The code we care about is that of the shift that will actually be done,
6693 not the shift that was originally requested. */
6694 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
6695
6696 /* We have now finished analyzing the shift. The result should be
6697 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
6698 OUTER_OP is non-NIL, it is an operation that needs to be applied
6699 to the result of the shift. OUTER_CONST is the relevant constant,
6700 but we must turn off all bits turned off in the shift.
6701
6702 If we were passed a value for X, see if we can use any pieces of
6703 it. If not, make new rtx. */
6704
6705 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
6706 && GET_CODE (XEXP (x, 1)) == CONST_INT
6707 && INTVAL (XEXP (x, 1)) == count)
6708 const_rtx = XEXP (x, 1);
6709 else
6710 const_rtx = GEN_INT (count);
6711
6712 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
6713 && GET_MODE (XEXP (x, 0)) == shift_mode
6714 && SUBREG_REG (XEXP (x, 0)) == varop)
6715 varop = XEXP (x, 0);
6716 else if (GET_MODE (varop) != shift_mode)
6717 varop = gen_lowpart_for_combine (shift_mode, varop);
6718
6719 /* If we can't make the SUBREG, try to return what we were given. */
6720 if (GET_CODE (varop) == CLOBBER)
6721 return x ? x : varop;
6722
6723 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
6724 if (new != 0)
6725 x = new;
6726 else
6727 {
6728 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
6729 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
6730
6731 SUBST (XEXP (x, 0), varop);
6732 SUBST (XEXP (x, 1), const_rtx);
6733 }
6734
6735 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
6736 turn off all the bits that the shift would have turned off. */
6737 if (orig_code == LSHIFTRT && result_mode != shift_mode)
6738 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
6739 GET_MODE_MASK (result_mode) >> orig_count);
6740
6741 /* Do the remainder of the processing in RESULT_MODE. */
6742 x = gen_lowpart_for_combine (result_mode, x);
6743
6744 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
6745 operation. */
6746 if (complement_p)
6747 x = gen_unary (NOT, result_mode, x);
6748
6749 if (outer_op != NIL)
6750 {
6751 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
6752 outer_const &= GET_MODE_MASK (result_mode);
6753
6754 if (outer_op == AND)
6755 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
6756 else if (outer_op == SET)
6757 /* This means that we have determined that the result is
6758 equivalent to a constant. This should be rare. */
6759 x = GEN_INT (outer_const);
6760 else if (GET_RTX_CLASS (outer_op) == '1')
6761 x = gen_unary (outer_op, result_mode, x);
6762 else
6763 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
6764 }
6765
6766 return x;
6767 }
6768 \f
6769 /* Like recog, but we receive the address of a pointer to a new pattern.
6770 We try to match the rtx that the pointer points to.
6771 If that fails, we may try to modify or replace the pattern,
6772 storing the replacement into the same pointer object.
6773
6774 Modifications include deletion or addition of CLOBBERs.
6775
6776 PNOTES is a pointer to a location where any REG_UNUSED notes added for
6777 the CLOBBERs are placed.
6778
6779 The value is the final insn code from the pattern ultimately matched,
6780 or -1. */
6781
6782 static int
6783 recog_for_combine (pnewpat, insn, pnotes)
6784 rtx *pnewpat;
6785 rtx insn;
6786 rtx *pnotes;
6787 {
6788 register rtx pat = *pnewpat;
6789 int insn_code_number;
6790 int num_clobbers_to_add = 0;
6791 int i;
6792 rtx notes = 0;
6793
6794 /* Is the result of combination a valid instruction? */
6795 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
6796
6797 /* If it isn't, there is the possibility that we previously had an insn
6798 that clobbered some register as a side effect, but the combined
6799 insn doesn't need to do that. So try once more without the clobbers
6800 unless this represents an ASM insn. */
6801
6802 if (insn_code_number < 0 && ! check_asm_operands (pat)
6803 && GET_CODE (pat) == PARALLEL)
6804 {
6805 int pos;
6806
6807 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
6808 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
6809 {
6810 if (i != pos)
6811 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
6812 pos++;
6813 }
6814
6815 SUBST_INT (XVECLEN (pat, 0), pos);
6816
6817 if (pos == 1)
6818 pat = XVECEXP (pat, 0, 0);
6819
6820 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
6821 }
6822
6823 /* If we had any clobbers to add, make a new pattern than contains
6824 them. Then check to make sure that all of them are dead. */
6825 if (num_clobbers_to_add)
6826 {
6827 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
6828 gen_rtvec (GET_CODE (pat) == PARALLEL
6829 ? XVECLEN (pat, 0) + num_clobbers_to_add
6830 : num_clobbers_to_add + 1));
6831
6832 if (GET_CODE (pat) == PARALLEL)
6833 for (i = 0; i < XVECLEN (pat, 0); i++)
6834 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
6835 else
6836 XVECEXP (newpat, 0, 0) = pat;
6837
6838 add_clobbers (newpat, insn_code_number);
6839
6840 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
6841 i < XVECLEN (newpat, 0); i++)
6842 {
6843 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
6844 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
6845 return -1;
6846 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
6847 XEXP (XVECEXP (newpat, 0, i), 0), notes);
6848 }
6849 pat = newpat;
6850 }
6851
6852 *pnewpat = pat;
6853 *pnotes = notes;
6854
6855 return insn_code_number;
6856 }
6857 \f
6858 /* Like gen_lowpart but for use by combine. In combine it is not possible
6859 to create any new pseudoregs. However, it is safe to create
6860 invalid memory addresses, because combine will try to recognize
6861 them and all they will do is make the combine attempt fail.
6862
6863 If for some reason this cannot do its job, an rtx
6864 (clobber (const_int 0)) is returned.
6865 An insn containing that will not be recognized. */
6866
6867 #undef gen_lowpart
6868
6869 static rtx
6870 gen_lowpart_for_combine (mode, x)
6871 enum machine_mode mode;
6872 register rtx x;
6873 {
6874 rtx result;
6875
6876 if (GET_MODE (x) == mode)
6877 return x;
6878
6879 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6880 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
6881
6882 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
6883 won't know what to do. So we will strip off the SUBREG here and
6884 process normally. */
6885 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
6886 {
6887 x = SUBREG_REG (x);
6888 if (GET_MODE (x) == mode)
6889 return x;
6890 }
6891
6892 result = gen_lowpart_common (mode, x);
6893 if (result)
6894 return result;
6895
6896 if (GET_CODE (x) == MEM)
6897 {
6898 register int offset = 0;
6899 rtx new;
6900
6901 /* Refuse to work on a volatile memory ref or one with a mode-dependent
6902 address. */
6903 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
6904 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
6905
6906 /* If we want to refer to something bigger than the original memref,
6907 generate a perverse subreg instead. That will force a reload
6908 of the original memref X. */
6909 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
6910 return gen_rtx (SUBREG, mode, x, 0);
6911
6912 #if WORDS_BIG_ENDIAN
6913 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
6914 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
6915 #endif
6916 #if BYTES_BIG_ENDIAN
6917 /* Adjust the address so that the address-after-the-data
6918 is unchanged. */
6919 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
6920 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
6921 #endif
6922 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
6923 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
6924 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
6925 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
6926 return new;
6927 }
6928
6929 /* If X is a comparison operator, rewrite it in a new mode. This
6930 probably won't match, but may allow further simplifications. */
6931 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
6932 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
6933
6934 /* If we couldn't simplify X any other way, just enclose it in a
6935 SUBREG. Normally, this SUBREG won't match, but some patterns may
6936 include an explicit SUBREG or we may simplify it further in combine. */
6937 else
6938 {
6939 int word = 0;
6940
6941 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
6942 word = ((GET_MODE_SIZE (GET_MODE (x))
6943 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
6944 / UNITS_PER_WORD);
6945 return gen_rtx (SUBREG, mode, x, word);
6946 }
6947 }
6948 \f
6949 /* Make an rtx expression. This is a subset of gen_rtx and only supports
6950 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
6951
6952 If the identical expression was previously in the insn (in the undobuf),
6953 it will be returned. Only if it is not found will a new expression
6954 be made. */
6955
6956 /*VARARGS2*/
6957 static rtx
6958 gen_rtx_combine (va_alist)
6959 va_dcl
6960 {
6961 va_list p;
6962 enum rtx_code code;
6963 enum machine_mode mode;
6964 int n_args;
6965 rtx args[3];
6966 int i, j;
6967 char *fmt;
6968 rtx rt;
6969
6970 va_start (p);
6971 code = va_arg (p, enum rtx_code);
6972 mode = va_arg (p, enum machine_mode);
6973 n_args = GET_RTX_LENGTH (code);
6974 fmt = GET_RTX_FORMAT (code);
6975
6976 if (n_args == 0 || n_args > 3)
6977 abort ();
6978
6979 /* Get each arg and verify that it is supposed to be an expression. */
6980 for (j = 0; j < n_args; j++)
6981 {
6982 if (*fmt++ != 'e')
6983 abort ();
6984
6985 args[j] = va_arg (p, rtx);
6986 }
6987
6988 /* See if this is in undobuf. Be sure we don't use objects that came
6989 from another insn; this could produce circular rtl structures. */
6990
6991 for (i = previous_num_undos; i < undobuf.num_undo; i++)
6992 if (!undobuf.undo[i].is_int
6993 && GET_CODE (undobuf.undo[i].old_contents.rtx) == code
6994 && GET_MODE (undobuf.undo[i].old_contents.rtx) == mode)
6995 {
6996 for (j = 0; j < n_args; j++)
6997 if (XEXP (undobuf.undo[i].old_contents.rtx, j) != args[j])
6998 break;
6999
7000 if (j == n_args)
7001 return undobuf.undo[i].old_contents.rtx;
7002 }
7003
7004 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
7005 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
7006 rt = rtx_alloc (code);
7007 PUT_MODE (rt, mode);
7008 XEXP (rt, 0) = args[0];
7009 if (n_args > 1)
7010 {
7011 XEXP (rt, 1) = args[1];
7012 if (n_args > 2)
7013 XEXP (rt, 2) = args[2];
7014 }
7015 return rt;
7016 }
7017
7018 /* These routines make binary and unary operations by first seeing if they
7019 fold; if not, a new expression is allocated. */
7020
7021 static rtx
7022 gen_binary (code, mode, op0, op1)
7023 enum rtx_code code;
7024 enum machine_mode mode;
7025 rtx op0, op1;
7026 {
7027 rtx result;
7028
7029 if (GET_RTX_CLASS (code) == '<')
7030 {
7031 enum machine_mode op_mode = GET_MODE (op0);
7032 if (op_mode == VOIDmode)
7033 op_mode = GET_MODE (op1);
7034 result = simplify_relational_operation (code, op_mode, op0, op1);
7035 }
7036 else
7037 result = simplify_binary_operation (code, mode, op0, op1);
7038
7039 if (result)
7040 return result;
7041
7042 /* Put complex operands first and constants second. */
7043 if (GET_RTX_CLASS (code) == 'c'
7044 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
7045 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
7046 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
7047 || (GET_CODE (op0) == SUBREG
7048 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
7049 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
7050 return gen_rtx_combine (code, mode, op1, op0);
7051
7052 return gen_rtx_combine (code, mode, op0, op1);
7053 }
7054
7055 static rtx
7056 gen_unary (code, mode, op0)
7057 enum rtx_code code;
7058 enum machine_mode mode;
7059 rtx op0;
7060 {
7061 rtx result = simplify_unary_operation (code, mode, op0, mode);
7062
7063 if (result)
7064 return result;
7065
7066 return gen_rtx_combine (code, mode, op0);
7067 }
7068 \f
7069 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
7070 comparison code that will be tested.
7071
7072 The result is a possibly different comparison code to use. *POP0 and
7073 *POP1 may be updated.
7074
7075 It is possible that we might detect that a comparison is either always
7076 true or always false. However, we do not perform general constant
7077 folding in combine, so this knowledge isn't useful. Such tautologies
7078 should have been detected earlier. Hence we ignore all such cases. */
7079
7080 static enum rtx_code
7081 simplify_comparison (code, pop0, pop1)
7082 enum rtx_code code;
7083 rtx *pop0;
7084 rtx *pop1;
7085 {
7086 rtx op0 = *pop0;
7087 rtx op1 = *pop1;
7088 rtx tem, tem1;
7089 int i;
7090 enum machine_mode mode, tmode;
7091
7092 /* Try a few ways of applying the same transformation to both operands. */
7093 while (1)
7094 {
7095 /* If both operands are the same constant shift, see if we can ignore the
7096 shift. We can if the shift is a rotate or if the bits shifted out of
7097 this shift are not significant for either input and if the type of
7098 comparison is compatible with the shift. */
7099 if (GET_CODE (op0) == GET_CODE (op1)
7100 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
7101 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
7102 || ((GET_CODE (op0) == LSHIFTRT
7103 || GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7104 && (code != GT && code != LT && code != GE && code != LE))
7105 || (GET_CODE (op0) == ASHIFTRT
7106 && (code != GTU && code != LTU
7107 && code != GEU && code != GEU)))
7108 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7109 && INTVAL (XEXP (op0, 1)) >= 0
7110 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
7111 && XEXP (op0, 1) == XEXP (op1, 1))
7112 {
7113 enum machine_mode mode = GET_MODE (op0);
7114 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
7115 int shift_count = INTVAL (XEXP (op0, 1));
7116
7117 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
7118 mask &= (mask >> shift_count) << shift_count;
7119 else if (GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7120 mask = (mask & (mask << shift_count)) >> shift_count;
7121
7122 if ((significant_bits (XEXP (op0, 0), mode) & ~ mask) == 0
7123 && (significant_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
7124 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
7125 else
7126 break;
7127 }
7128
7129 /* If both operands are AND's of a paradoxical SUBREG by constant, the
7130 SUBREGs are of the same mode, and, in both cases, the AND would
7131 be redundant if the comparison was done in the narrower mode,
7132 do the comparison in the narrower mode (e.g., we are AND'ing with 1
7133 and the operand's significant bits are 0xffffff01; in that case if
7134 we only care about QImode, we don't need the AND). This case occurs
7135 if the output mode of an scc insn is not SImode and
7136 STORE_FLAG_VALUE == 1 (e.g., the 386). */
7137
7138 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
7139 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7140 && GET_CODE (XEXP (op1, 1)) == CONST_INT
7141 && GET_CODE (XEXP (op0, 0)) == SUBREG
7142 && GET_CODE (XEXP (op1, 0)) == SUBREG
7143 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
7144 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
7145 && (GET_MODE (SUBREG_REG (XEXP (op0, 0)))
7146 == GET_MODE (SUBREG_REG (XEXP (op1, 0))))
7147 && (significant_bits (SUBREG_REG (XEXP (op0, 0)),
7148 GET_MODE (SUBREG_REG (XEXP (op0, 0))))
7149 & ~ INTVAL (XEXP (op0, 1))) == 0
7150 && (significant_bits (SUBREG_REG (XEXP (op1, 0)),
7151 GET_MODE (SUBREG_REG (XEXP (op1, 0))))
7152 & ~ INTVAL (XEXP (op1, 1))) == 0)
7153 {
7154 op0 = SUBREG_REG (XEXP (op0, 0));
7155 op1 = SUBREG_REG (XEXP (op1, 0));
7156
7157 /* the resulting comparison is always unsigned since we masked off
7158 the original sign bit. */
7159 code = unsigned_condition (code);
7160 }
7161 else
7162 break;
7163 }
7164
7165 /* If the first operand is a constant, swap the operands and adjust the
7166 comparison code appropriately. */
7167 if (CONSTANT_P (op0))
7168 {
7169 tem = op0, op0 = op1, op1 = tem;
7170 code = swap_condition (code);
7171 }
7172
7173 /* We now enter a loop during which we will try to simplify the comparison.
7174 For the most part, we only are concerned with comparisons with zero,
7175 but some things may really be comparisons with zero but not start
7176 out looking that way. */
7177
7178 while (GET_CODE (op1) == CONST_INT)
7179 {
7180 enum machine_mode mode = GET_MODE (op0);
7181 int mode_width = GET_MODE_BITSIZE (mode);
7182 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
7183 int equality_comparison_p;
7184 int sign_bit_comparison_p;
7185 int unsigned_comparison_p;
7186 HOST_WIDE_INT const_op;
7187
7188 /* We only want to handle integral modes. This catches VOIDmode,
7189 CCmode, and the floating-point modes. An exception is that we
7190 can handle VOIDmode if OP0 is a COMPARE or a comparison
7191 operation. */
7192
7193 if (GET_MODE_CLASS (mode) != MODE_INT
7194 && ! (mode == VOIDmode
7195 && (GET_CODE (op0) == COMPARE
7196 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
7197 break;
7198
7199 /* Get the constant we are comparing against and turn off all bits
7200 not on in our mode. */
7201 const_op = INTVAL (op1);
7202 if (mode_width <= HOST_BITS_PER_WIDE_INT)
7203 const_op &= mask;
7204
7205 /* If we are comparing against a constant power of two and the value
7206 being compared has only that single significant bit (e.g., it was
7207 `and'ed with that bit), we can replace this with a comparison
7208 with zero. */
7209 if (const_op
7210 && (code == EQ || code == NE || code == GE || code == GEU
7211 || code == LT || code == LTU)
7212 && mode_width <= HOST_BITS_PER_WIDE_INT
7213 && exact_log2 (const_op) >= 0
7214 && significant_bits (op0, mode) == const_op)
7215 {
7216 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
7217 op1 = const0_rtx, const_op = 0;
7218 }
7219
7220 /* Similarly, if we are comparing a value known to be either -1 or
7221 0 with -1, change it to the opposite comparison against zero. */
7222
7223 if (const_op == -1
7224 && (code == EQ || code == NE || code == GT || code == LE
7225 || code == GEU || code == LTU)
7226 && num_sign_bit_copies (op0, mode) == mode_width)
7227 {
7228 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
7229 op1 = const0_rtx, const_op = 0;
7230 }
7231
7232 /* Do some canonicalizations based on the comparison code. We prefer
7233 comparisons against zero and then prefer equality comparisons.
7234 If we can reduce the size of a constant, we will do that too. */
7235
7236 switch (code)
7237 {
7238 case LT:
7239 /* < C is equivalent to <= (C - 1) */
7240 if (const_op > 0)
7241 {
7242 const_op -= 1;
7243 op1 = GEN_INT (const_op);
7244 code = LE;
7245 /* ... fall through to LE case below. */
7246 }
7247 else
7248 break;
7249
7250 case LE:
7251 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
7252 if (const_op < 0)
7253 {
7254 const_op += 1;
7255 op1 = GEN_INT (const_op);
7256 code = LT;
7257 }
7258
7259 /* If we are doing a <= 0 comparison on a value known to have
7260 a zero sign bit, we can replace this with == 0. */
7261 else if (const_op == 0
7262 && mode_width <= HOST_BITS_PER_WIDE_INT
7263 && (significant_bits (op0, mode)
7264 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
7265 code = EQ;
7266 break;
7267
7268 case GE:
7269 /* >= C is equivalent to > (C - 1). */
7270 if (const_op > 0)
7271 {
7272 const_op -= 1;
7273 op1 = GEN_INT (const_op);
7274 code = GT;
7275 /* ... fall through to GT below. */
7276 }
7277 else
7278 break;
7279
7280 case GT:
7281 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
7282 if (const_op < 0)
7283 {
7284 const_op += 1;
7285 op1 = GEN_INT (const_op);
7286 code = GE;
7287 }
7288
7289 /* If we are doing a > 0 comparison on a value known to have
7290 a zero sign bit, we can replace this with != 0. */
7291 else if (const_op == 0
7292 && mode_width <= HOST_BITS_PER_WIDE_INT
7293 && (significant_bits (op0, mode)
7294 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
7295 code = NE;
7296 break;
7297
7298 case LTU:
7299 /* < C is equivalent to <= (C - 1). */
7300 if (const_op > 0)
7301 {
7302 const_op -= 1;
7303 op1 = GEN_INT (const_op);
7304 code = LEU;
7305 /* ... fall through ... */
7306 }
7307
7308 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
7309 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
7310 {
7311 const_op = 0, op1 = const0_rtx;
7312 code = GE;
7313 break;
7314 }
7315 else
7316 break;
7317
7318 case LEU:
7319 /* unsigned <= 0 is equivalent to == 0 */
7320 if (const_op == 0)
7321 code = EQ;
7322
7323 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
7324 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
7325 {
7326 const_op = 0, op1 = const0_rtx;
7327 code = GE;
7328 }
7329 break;
7330
7331 case GEU:
7332 /* >= C is equivalent to < (C - 1). */
7333 if (const_op > 1)
7334 {
7335 const_op -= 1;
7336 op1 = GEN_INT (const_op);
7337 code = GTU;
7338 /* ... fall through ... */
7339 }
7340
7341 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
7342 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
7343 {
7344 const_op = 0, op1 = const0_rtx;
7345 code = LT;
7346 }
7347 else
7348 break;
7349
7350 case GTU:
7351 /* unsigned > 0 is equivalent to != 0 */
7352 if (const_op == 0)
7353 code = NE;
7354
7355 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
7356 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
7357 {
7358 const_op = 0, op1 = const0_rtx;
7359 code = LT;
7360 }
7361 break;
7362 }
7363
7364 /* Compute some predicates to simplify code below. */
7365
7366 equality_comparison_p = (code == EQ || code == NE);
7367 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
7368 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
7369 || code == LEU);
7370
7371 /* Now try cases based on the opcode of OP0. If none of the cases
7372 does a "continue", we exit this loop immediately after the
7373 switch. */
7374
7375 switch (GET_CODE (op0))
7376 {
7377 case ZERO_EXTRACT:
7378 /* If we are extracting a single bit from a variable position in
7379 a constant that has only a single bit set and are comparing it
7380 with zero, we can convert this into an equality comparison
7381 between the position and the location of the single bit. We can't
7382 do this if bit endian and we don't have an extzv since we then
7383 can't know what mode to use for the endianness adjustment. */
7384
7385 #if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
7386 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
7387 && XEXP (op0, 1) == const1_rtx
7388 && equality_comparison_p && const_op == 0
7389 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
7390 {
7391 #if BITS_BIG_ENDIAN
7392 i = (GET_MODE_BITSIZE
7393 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
7394 #endif
7395
7396 op0 = XEXP (op0, 2);
7397 op1 = GEN_INT (i);
7398 const_op = i;
7399
7400 /* Result is nonzero iff shift count is equal to I. */
7401 code = reverse_condition (code);
7402 continue;
7403 }
7404 #endif
7405
7406 /* ... fall through ... */
7407
7408 case SIGN_EXTRACT:
7409 tem = expand_compound_operation (op0);
7410 if (tem != op0)
7411 {
7412 op0 = tem;
7413 continue;
7414 }
7415 break;
7416
7417 case NOT:
7418 /* If testing for equality, we can take the NOT of the constant. */
7419 if (equality_comparison_p
7420 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
7421 {
7422 op0 = XEXP (op0, 0);
7423 op1 = tem;
7424 continue;
7425 }
7426
7427 /* If just looking at the sign bit, reverse the sense of the
7428 comparison. */
7429 if (sign_bit_comparison_p)
7430 {
7431 op0 = XEXP (op0, 0);
7432 code = (code == GE ? LT : GE);
7433 continue;
7434 }
7435 break;
7436
7437 case NEG:
7438 /* If testing for equality, we can take the NEG of the constant. */
7439 if (equality_comparison_p
7440 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
7441 {
7442 op0 = XEXP (op0, 0);
7443 op1 = tem;
7444 continue;
7445 }
7446
7447 /* The remaining cases only apply to comparisons with zero. */
7448 if (const_op != 0)
7449 break;
7450
7451 /* When X is ABS or is known positive,
7452 (neg X) is < 0 if and only if X != 0. */
7453
7454 if (sign_bit_comparison_p
7455 && (GET_CODE (XEXP (op0, 0)) == ABS
7456 || (mode_width <= HOST_BITS_PER_WIDE_INT
7457 && (significant_bits (XEXP (op0, 0), mode)
7458 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
7459 {
7460 op0 = XEXP (op0, 0);
7461 code = (code == LT ? NE : EQ);
7462 continue;
7463 }
7464
7465 /* If we have NEG of something that is the result of a
7466 SIGN_EXTEND, SIGN_EXTRACT, or ASHIFTRT, we know that the
7467 two high-order bits must be the same and hence that
7468 "(-a) < 0" is equivalent to "a > 0". Otherwise, we can't
7469 do this. */
7470 if (GET_CODE (XEXP (op0, 0)) == SIGN_EXTEND
7471 || (GET_CODE (XEXP (op0, 0)) == SIGN_EXTRACT
7472 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
7473 && (INTVAL (XEXP (XEXP (op0, 0), 1))
7474 < GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (op0, 0), 0)))))
7475 || (GET_CODE (XEXP (op0, 0)) == ASHIFTRT
7476 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
7477 && XEXP (XEXP (op0, 0), 1) != const0_rtx)
7478 || ((tem = get_last_value (XEXP (op0, 0))) != 0
7479 && (GET_CODE (tem) == SIGN_EXTEND
7480 || (GET_CODE (tem) == SIGN_EXTRACT
7481 && GET_CODE (XEXP (tem, 1)) == CONST_INT
7482 && (INTVAL (XEXP (tem, 1))
7483 < GET_MODE_BITSIZE (GET_MODE (XEXP (tem, 0)))))
7484 || (GET_CODE (tem) == ASHIFTRT
7485 && GET_CODE (XEXP (tem, 1)) == CONST_INT
7486 && XEXP (tem, 1) != const0_rtx))))
7487 {
7488 op0 = XEXP (op0, 0);
7489 code = swap_condition (code);
7490 continue;
7491 }
7492 break;
7493
7494 case ROTATE:
7495 /* If we are testing equality and our count is a constant, we
7496 can perform the inverse operation on our RHS. */
7497 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
7498 && (tem = simplify_binary_operation (ROTATERT, mode,
7499 op1, XEXP (op0, 1))) != 0)
7500 {
7501 op0 = XEXP (op0, 0);
7502 op1 = tem;
7503 continue;
7504 }
7505
7506 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
7507 a particular bit. Convert it to an AND of a constant of that
7508 bit. This will be converted into a ZERO_EXTRACT. */
7509 if (const_op == 0 && sign_bit_comparison_p
7510 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7511 && mode_width <= HOST_BITS_PER_WIDE_INT)
7512 {
7513 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
7514 ((HOST_WIDE_INT) 1
7515 << (mode_width - 1
7516 - INTVAL (XEXP (op0, 1)))));
7517 code = (code == LT ? NE : EQ);
7518 continue;
7519 }
7520
7521 /* ... fall through ... */
7522
7523 case ABS:
7524 /* ABS is ignorable inside an equality comparison with zero. */
7525 if (const_op == 0 && equality_comparison_p)
7526 {
7527 op0 = XEXP (op0, 0);
7528 continue;
7529 }
7530 break;
7531
7532
7533 case SIGN_EXTEND:
7534 /* Can simplify (compare (zero/sign_extend FOO) CONST)
7535 to (compare FOO CONST) if CONST fits in FOO's mode and we
7536 are either testing inequality or have an unsigned comparison
7537 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
7538 if (! unsigned_comparison_p
7539 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
7540 <= HOST_BITS_PER_WIDE_INT)
7541 && ((unsigned HOST_WIDE_INT) const_op
7542 < (((HOST_WIDE_INT) 1
7543 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
7544 {
7545 op0 = XEXP (op0, 0);
7546 continue;
7547 }
7548 break;
7549
7550 case SUBREG:
7551 /* Check for the case where we are comparing A - C1 with C2,
7552 both constants are smaller than 1/2 the maxium positive
7553 value in MODE, and the comparison is equality or unsigned.
7554 In that case, if A is either zero-extended to MODE or has
7555 sufficient sign bits so that the high-order bit in MODE
7556 is a copy of the sign in the inner mode, we can prove that it is
7557 safe to do the operation in the wider mode. This simplifies
7558 many range checks. */
7559
7560 if (mode_width <= HOST_BITS_PER_WIDE_INT
7561 && subreg_lowpart_p (op0)
7562 && GET_CODE (SUBREG_REG (op0)) == PLUS
7563 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
7564 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
7565 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
7566 < GET_MODE_MASK (mode) / 2)
7567 && (unsigned) const_op < GET_MODE_MASK (mode) / 2
7568 && (0 == (significant_bits (XEXP (SUBREG_REG (op0), 0),
7569 GET_MODE (SUBREG_REG (op0)))
7570 & ~ GET_MODE_MASK (mode))
7571 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
7572 GET_MODE (SUBREG_REG (op0)))
7573 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
7574 - GET_MODE_BITSIZE (mode)))))
7575 {
7576 op0 = SUBREG_REG (op0);
7577 continue;
7578 }
7579
7580 /* If the inner mode is narrower and we are extracting the low part,
7581 we can treat the SUBREG as if it were a ZERO_EXTEND. */
7582 if (subreg_lowpart_p (op0)
7583 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
7584 /* Fall through */ ;
7585 else
7586 break;
7587
7588 /* ... fall through ... */
7589
7590 case ZERO_EXTEND:
7591 if ((unsigned_comparison_p || equality_comparison_p)
7592 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
7593 <= HOST_BITS_PER_WIDE_INT)
7594 && ((unsigned HOST_WIDE_INT) const_op
7595 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
7596 {
7597 op0 = XEXP (op0, 0);
7598 continue;
7599 }
7600 break;
7601
7602 case PLUS:
7603 /* (eq (plus X C1) C2) -> (eq X (minus C2 C1)). We can only do
7604 this for equality comparisons due to pathological cases involving
7605 overflows. */
7606 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
7607 && (tem = simplify_binary_operation (MINUS, mode, op1,
7608 XEXP (op0, 1))) != 0)
7609 {
7610 op0 = XEXP (op0, 0);
7611 op1 = tem;
7612 continue;
7613 }
7614
7615 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
7616 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
7617 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
7618 {
7619 op0 = XEXP (XEXP (op0, 0), 0);
7620 code = (code == LT ? EQ : NE);
7621 continue;
7622 }
7623 break;
7624
7625 case MINUS:
7626 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
7627 of bits in X minus 1, is one iff X > 0. */
7628 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
7629 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
7630 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
7631 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
7632 {
7633 op0 = XEXP (op0, 1);
7634 code = (code == GE ? LE : GT);
7635 continue;
7636 }
7637 break;
7638
7639 case XOR:
7640 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
7641 if C is zero or B is a constant. */
7642 if (equality_comparison_p
7643 && 0 != (tem = simplify_binary_operation (XOR, mode,
7644 XEXP (op0, 1), op1)))
7645 {
7646 op0 = XEXP (op0, 0);
7647 op1 = tem;
7648 continue;
7649 }
7650 break;
7651
7652 case EQ: case NE:
7653 case LT: case LTU: case LE: case LEU:
7654 case GT: case GTU: case GE: case GEU:
7655 /* We can't do anything if OP0 is a condition code value, rather
7656 than an actual data value. */
7657 if (const_op != 0
7658 #ifdef HAVE_cc0
7659 || XEXP (op0, 0) == cc0_rtx
7660 #endif
7661 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
7662 break;
7663
7664 /* Get the two operands being compared. */
7665 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
7666 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
7667 else
7668 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
7669
7670 /* Check for the cases where we simply want the result of the
7671 earlier test or the opposite of that result. */
7672 if (code == NE
7673 || (code == EQ && reversible_comparison_p (op0))
7674 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
7675 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7676 && (STORE_FLAG_VALUE
7677 & (((HOST_WIDE_INT) 1
7678 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
7679 && (code == LT
7680 || (code == GE && reversible_comparison_p (op0)))))
7681 {
7682 code = (code == LT || code == NE
7683 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
7684 op0 = tem, op1 = tem1;
7685 continue;
7686 }
7687 break;
7688
7689 case IOR:
7690 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
7691 iff X <= 0. */
7692 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
7693 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
7694 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
7695 {
7696 op0 = XEXP (op0, 1);
7697 code = (code == GE ? GT : LE);
7698 continue;
7699 }
7700 break;
7701
7702 case AND:
7703 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
7704 will be converted to a ZERO_EXTRACT later. */
7705 if (const_op == 0 && equality_comparison_p
7706 && (GET_CODE (XEXP (op0, 0)) == ASHIFT
7707 || GET_CODE (XEXP (op0, 0)) == LSHIFT)
7708 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
7709 {
7710 op0 = simplify_and_const_int
7711 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
7712 XEXP (op0, 1),
7713 XEXP (XEXP (op0, 0), 1)),
7714 (HOST_WIDE_INT) 1);
7715 continue;
7716 }
7717
7718 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
7719 zero and X is a comparison and C1 and C2 describe only bits set
7720 in STORE_FLAG_VALUE, we can compare with X. */
7721 if (const_op == 0 && equality_comparison_p
7722 && mode_width <= HOST_BITS_PER_WIDE_INT
7723 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7724 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
7725 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
7726 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
7727 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7728 {
7729 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
7730 << INTVAL (XEXP (XEXP (op0, 0), 1)));
7731 if ((~ STORE_FLAG_VALUE & mask) == 0
7732 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
7733 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
7734 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
7735 {
7736 op0 = XEXP (XEXP (op0, 0), 0);
7737 continue;
7738 }
7739 }
7740
7741 /* If we are doing an equality comparison of an AND of a bit equal
7742 to the sign bit, replace this with a LT or GE comparison of
7743 the underlying value. */
7744 if (equality_comparison_p
7745 && const_op == 0
7746 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7747 && mode_width <= HOST_BITS_PER_WIDE_INT
7748 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
7749 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
7750 {
7751 op0 = XEXP (op0, 0);
7752 code = (code == EQ ? GE : LT);
7753 continue;
7754 }
7755
7756 /* If this AND operation is really a ZERO_EXTEND from a narrower
7757 mode, the constant fits within that mode, and this is either an
7758 equality or unsigned comparison, try to do this comparison in
7759 the narrower mode. */
7760 if ((equality_comparison_p || unsigned_comparison_p)
7761 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7762 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
7763 & GET_MODE_MASK (mode))
7764 + 1)) >= 0
7765 && const_op >> i == 0
7766 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
7767 {
7768 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
7769 continue;
7770 }
7771 break;
7772
7773 case ASHIFT:
7774 case LSHIFT:
7775 /* If we have (compare (xshift FOO N) (const_int C)) and
7776 the high order N bits of FOO (N+1 if an inequality comparison)
7777 are not significant, we can do this by comparing FOO with C
7778 shifted right N bits so long as the low-order N bits of C are
7779 zero. */
7780 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
7781 && INTVAL (XEXP (op0, 1)) >= 0
7782 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
7783 < HOST_BITS_PER_WIDE_INT)
7784 && ((const_op
7785 & ~ (((HOST_WIDE_INT) 1
7786 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
7787 && mode_width <= HOST_BITS_PER_WIDE_INT
7788 && (significant_bits (XEXP (op0, 0), mode)
7789 & ~ (mask >> (INTVAL (XEXP (op0, 1))
7790 + ! equality_comparison_p))) == 0)
7791 {
7792 const_op >>= INTVAL (XEXP (op0, 1));
7793 op1 = GEN_INT (const_op);
7794 op0 = XEXP (op0, 0);
7795 continue;
7796 }
7797
7798 /* If we are doing a sign bit comparison, it means we are testing
7799 a particular bit. Convert it to the appropriate AND. */
7800 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
7801 && mode_width <= HOST_BITS_PER_WIDE_INT)
7802 {
7803 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
7804 ((HOST_WIDE_INT) 1
7805 << (mode_width - 1
7806 - INTVAL (XEXP (op0, 1)))));
7807 code = (code == LT ? NE : EQ);
7808 continue;
7809 }
7810
7811 /* If this an equality comparison with zero and we are shifting
7812 the low bit to the sign bit, we can convert this to an AND of the
7813 low-order bit. */
7814 if (const_op == 0 && equality_comparison_p
7815 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7816 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
7817 {
7818 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
7819 (HOST_WIDE_INT) 1);
7820 continue;
7821 }
7822 break;
7823
7824 case ASHIFTRT:
7825 /* If this is an equality comparison with zero, we can do this
7826 as a logical shift, which might be much simpler. */
7827 if (equality_comparison_p && const_op == 0
7828 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7829 {
7830 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
7831 XEXP (op0, 0),
7832 INTVAL (XEXP (op0, 1)));
7833 continue;
7834 }
7835
7836 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
7837 do the comparison in a narrower mode. */
7838 if (! unsigned_comparison_p
7839 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7840 && GET_CODE (XEXP (op0, 0)) == ASHIFT
7841 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
7842 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
7843 MODE_INT, 1)) != VOIDmode
7844 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
7845 || ((unsigned HOST_WIDE_INT) - const_op
7846 <= GET_MODE_MASK (tmode))))
7847 {
7848 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
7849 continue;
7850 }
7851
7852 /* ... fall through ... */
7853 case LSHIFTRT:
7854 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
7855 the low order N bits of FOO are not significant, we can do this
7856 by comparing FOO with C shifted left N bits so long as no
7857 overflow occurs. */
7858 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
7859 && INTVAL (XEXP (op0, 1)) >= 0
7860 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
7861 && mode_width <= HOST_BITS_PER_WIDE_INT
7862 && (significant_bits (XEXP (op0, 0), mode)
7863 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
7864 && (const_op == 0
7865 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
7866 < mode_width)))
7867 {
7868 const_op <<= INTVAL (XEXP (op0, 1));
7869 op1 = GEN_INT (const_op);
7870 op0 = XEXP (op0, 0);
7871 continue;
7872 }
7873
7874 /* If we are using this shift to extract just the sign bit, we
7875 can replace this with an LT or GE comparison. */
7876 if (const_op == 0
7877 && (equality_comparison_p || sign_bit_comparison_p)
7878 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7879 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
7880 {
7881 op0 = XEXP (op0, 0);
7882 code = (code == NE || code == GT ? LT : GE);
7883 continue;
7884 }
7885 break;
7886 }
7887
7888 break;
7889 }
7890
7891 /* Now make any compound operations involved in this comparison. Then,
7892 check for an outmost SUBREG on OP0 that isn't doing anything or is
7893 paradoxical. The latter case can only occur when it is known that the
7894 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
7895 We can never remove a SUBREG for a non-equality comparison because the
7896 sign bit is in a different place in the underlying object. */
7897
7898 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
7899 op1 = make_compound_operation (op1, SET);
7900
7901 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
7902 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7903 && (code == NE || code == EQ)
7904 && ((GET_MODE_SIZE (GET_MODE (op0))
7905 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
7906 {
7907 op0 = SUBREG_REG (op0);
7908 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
7909 }
7910
7911 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
7912 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7913 && (code == NE || code == EQ)
7914 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
7915 && (significant_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
7916 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
7917 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
7918 op1),
7919 (significant_bits (tem, GET_MODE (SUBREG_REG (op0)))
7920 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
7921 op0 = SUBREG_REG (op0), op1 = tem;
7922
7923 /* We now do the opposite procedure: Some machines don't have compare
7924 insns in all modes. If OP0's mode is an integer mode smaller than a
7925 word and we can't do a compare in that mode, see if there is a larger
7926 mode for which we can do the compare. There are a number of cases in
7927 which we can use the wider mode. */
7928
7929 mode = GET_MODE (op0);
7930 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
7931 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
7932 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
7933 for (tmode = GET_MODE_WIDER_MODE (mode);
7934 (tmode != VOIDmode
7935 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
7936 tmode = GET_MODE_WIDER_MODE (tmode))
7937 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
7938 {
7939 /* If the only significant bits in OP0 and OP1 are those in the
7940 narrower mode and this is an equality or unsigned comparison,
7941 we can use the wider mode. Similarly for sign-extended
7942 values and equality or signed comparisons. */
7943 if (((code == EQ || code == NE
7944 || code == GEU || code == GTU || code == LEU || code == LTU)
7945 && ((significant_bits (op0, tmode) & ~ GET_MODE_MASK (mode))
7946 == 0)
7947 && ((significant_bits (op1, tmode) & ~ GET_MODE_MASK (mode))
7948 == 0))
7949 || ((code == EQ || code == NE
7950 || code == GE || code == GT || code == LE || code == LT)
7951 && (num_sign_bit_copies (op0, tmode)
7952 >= GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
7953 && (num_sign_bit_copies (op1, tmode)
7954 >= GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
7955 {
7956 op0 = gen_lowpart_for_combine (tmode, op0);
7957 op1 = gen_lowpart_for_combine (tmode, op1);
7958 break;
7959 }
7960
7961 /* If this is a test for negative, we can make an explicit
7962 test of the sign bit. */
7963
7964 if (op1 == const0_rtx && (code == LT || code == GE)
7965 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7966 {
7967 op0 = gen_binary (AND, tmode,
7968 gen_lowpart_for_combine (tmode, op0),
7969 GEN_INT ((HOST_WIDE_INT) 1
7970 << (GET_MODE_BITSIZE (mode) - 1)));
7971 code = (code == LT) ? NE : EQ;
7972 break;
7973 }
7974 }
7975
7976 *pop0 = op0;
7977 *pop1 = op1;
7978
7979 return code;
7980 }
7981 \f
7982 /* Return 1 if we know that X, a comparison operation, is not operating
7983 on a floating-point value or is EQ or NE, meaning that we can safely
7984 reverse it. */
7985
7986 static int
7987 reversible_comparison_p (x)
7988 rtx x;
7989 {
7990 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7991 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
7992 return 1;
7993
7994 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
7995 {
7996 case MODE_INT:
7997 return 1;
7998
7999 case MODE_CC:
8000 x = get_last_value (XEXP (x, 0));
8001 return (x && GET_CODE (x) == COMPARE
8002 && GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT);
8003 }
8004
8005 return 0;
8006 }
8007 \f
8008 /* Utility function for following routine. Called when X is part of a value
8009 being stored into reg_last_set_value. Sets reg_last_set_table_tick
8010 for each register mentioned. Similar to mention_regs in cse.c */
8011
8012 static void
8013 update_table_tick (x)
8014 rtx x;
8015 {
8016 register enum rtx_code code = GET_CODE (x);
8017 register char *fmt = GET_RTX_FORMAT (code);
8018 register int i;
8019
8020 if (code == REG)
8021 {
8022 int regno = REGNO (x);
8023 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8024 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
8025
8026 for (i = regno; i < endregno; i++)
8027 reg_last_set_table_tick[i] = label_tick;
8028
8029 return;
8030 }
8031
8032 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8033 /* Note that we can't have an "E" in values stored; see
8034 get_last_value_validate. */
8035 if (fmt[i] == 'e')
8036 update_table_tick (XEXP (x, i));
8037 }
8038
8039 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
8040 are saying that the register is clobbered and we no longer know its
8041 value. If INSN is zero, don't update reg_last_set; this call is normally
8042 done with VALUE also zero to invalidate the register. */
8043
8044 static void
8045 record_value_for_reg (reg, insn, value)
8046 rtx reg;
8047 rtx insn;
8048 rtx value;
8049 {
8050 int regno = REGNO (reg);
8051 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8052 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
8053 int i;
8054
8055 /* If VALUE contains REG and we have a previous value for REG, substitute
8056 the previous value. */
8057 if (value && insn && reg_overlap_mentioned_p (reg, value))
8058 {
8059 rtx tem;
8060
8061 /* Set things up so get_last_value is allowed to see anything set up to
8062 our insn. */
8063 subst_low_cuid = INSN_CUID (insn);
8064 tem = get_last_value (reg);
8065
8066 if (tem)
8067 value = replace_rtx (copy_rtx (value), reg, tem);
8068 }
8069
8070 /* For each register modified, show we don't know its value, that
8071 its value has been updated, and that we don't know the location of
8072 the death of the register. */
8073 for (i = regno; i < endregno; i ++)
8074 {
8075 if (insn)
8076 reg_last_set[i] = insn;
8077 reg_last_set_value[i] = 0;
8078 reg_last_death[i] = 0;
8079 }
8080
8081 /* Mark registers that are being referenced in this value. */
8082 if (value)
8083 update_table_tick (value);
8084
8085 /* Now update the status of each register being set.
8086 If someone is using this register in this block, set this register
8087 to invalid since we will get confused between the two lives in this
8088 basic block. This makes using this register always invalid. In cse, we
8089 scan the table to invalidate all entries using this register, but this
8090 is too much work for us. */
8091
8092 for (i = regno; i < endregno; i++)
8093 {
8094 reg_last_set_label[i] = label_tick;
8095 if (value && reg_last_set_table_tick[i] == label_tick)
8096 reg_last_set_invalid[i] = 1;
8097 else
8098 reg_last_set_invalid[i] = 0;
8099 }
8100
8101 /* The value being assigned might refer to X (like in "x++;"). In that
8102 case, we must replace it with (clobber (const_int 0)) to prevent
8103 infinite loops. */
8104 if (value && ! get_last_value_validate (&value,
8105 reg_last_set_label[regno], 0))
8106 {
8107 value = copy_rtx (value);
8108 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
8109 value = 0;
8110 }
8111
8112 /* For the main register being modified, update the value. */
8113 reg_last_set_value[regno] = value;
8114
8115 }
8116
8117 /* Used for communication between the following two routines. */
8118 static rtx record_dead_insn;
8119
8120 /* Called via note_stores from record_dead_and_set_regs to handle one
8121 SET or CLOBBER in an insn. */
8122
8123 static void
8124 record_dead_and_set_regs_1 (dest, setter)
8125 rtx dest, setter;
8126 {
8127 if (GET_CODE (dest) == REG)
8128 {
8129 /* If we are setting the whole register, we know its value. Otherwise
8130 show that we don't know the value. We can handle SUBREG in
8131 some cases. */
8132 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
8133 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
8134 else if (GET_CODE (setter) == SET
8135 && GET_CODE (SET_DEST (setter)) == SUBREG
8136 && SUBREG_REG (SET_DEST (setter)) == dest
8137 && subreg_lowpart_p (SET_DEST (setter)))
8138 record_value_for_reg (dest, record_dead_insn,
8139 gen_lowpart_for_combine (GET_MODE (dest),
8140 SET_SRC (setter)));
8141 else
8142 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
8143 }
8144 else if (GET_CODE (dest) == MEM
8145 /* Ignore pushes, they clobber nothing. */
8146 && ! push_operand (dest, GET_MODE (dest)))
8147 mem_last_set = INSN_CUID (record_dead_insn);
8148 }
8149
8150 /* Update the records of when each REG was most recently set or killed
8151 for the things done by INSN. This is the last thing done in processing
8152 INSN in the combiner loop.
8153
8154 We update reg_last_set, reg_last_set_value, reg_last_death, and also the
8155 similar information mem_last_set (which insn most recently modified memory)
8156 and last_call_cuid (which insn was the most recent subroutine call). */
8157
8158 static void
8159 record_dead_and_set_regs (insn)
8160 rtx insn;
8161 {
8162 register rtx link;
8163 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
8164 {
8165 if (REG_NOTE_KIND (link) == REG_DEAD)
8166 reg_last_death[REGNO (XEXP (link, 0))] = insn;
8167 else if (REG_NOTE_KIND (link) == REG_INC)
8168 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
8169 }
8170
8171 if (GET_CODE (insn) == CALL_INSN)
8172 last_call_cuid = mem_last_set = INSN_CUID (insn);
8173
8174 record_dead_insn = insn;
8175 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
8176 }
8177 \f
8178 /* Utility routine for the following function. Verify that all the registers
8179 mentioned in *LOC are valid when *LOC was part of a value set when
8180 label_tick == TICK. Return 0 if some are not.
8181
8182 If REPLACE is non-zero, replace the invalid reference with
8183 (clobber (const_int 0)) and return 1. This replacement is useful because
8184 we often can get useful information about the form of a value (e.g., if
8185 it was produced by a shift that always produces -1 or 0) even though
8186 we don't know exactly what registers it was produced from. */
8187
8188 static int
8189 get_last_value_validate (loc, tick, replace)
8190 rtx *loc;
8191 int tick;
8192 int replace;
8193 {
8194 rtx x = *loc;
8195 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
8196 int len = GET_RTX_LENGTH (GET_CODE (x));
8197 int i;
8198
8199 if (GET_CODE (x) == REG)
8200 {
8201 int regno = REGNO (x);
8202 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8203 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
8204 int j;
8205
8206 for (j = regno; j < endregno; j++)
8207 if (reg_last_set_invalid[j]
8208 /* If this is a pseudo-register that was only set once, it is
8209 always valid. */
8210 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
8211 && reg_last_set_label[j] > tick))
8212 {
8213 if (replace)
8214 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8215 return replace;
8216 }
8217
8218 return 1;
8219 }
8220
8221 for (i = 0; i < len; i++)
8222 if ((fmt[i] == 'e'
8223 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
8224 /* Don't bother with these. They shouldn't occur anyway. */
8225 || fmt[i] == 'E')
8226 return 0;
8227
8228 /* If we haven't found a reason for it to be invalid, it is valid. */
8229 return 1;
8230 }
8231
8232 /* Get the last value assigned to X, if known. Some registers
8233 in the value may be replaced with (clobber (const_int 0)) if their value
8234 is known longer known reliably. */
8235
8236 static rtx
8237 get_last_value (x)
8238 rtx x;
8239 {
8240 int regno;
8241 rtx value;
8242
8243 /* If this is a non-paradoxical SUBREG, get the value of its operand and
8244 then convert it to the desired mode. If this is a paradoxical SUBREG,
8245 we cannot predict what values the "extra" bits might have. */
8246 if (GET_CODE (x) == SUBREG
8247 && subreg_lowpart_p (x)
8248 && (GET_MODE_SIZE (GET_MODE (x))
8249 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8250 && (value = get_last_value (SUBREG_REG (x))) != 0)
8251 return gen_lowpart_for_combine (GET_MODE (x), value);
8252
8253 if (GET_CODE (x) != REG)
8254 return 0;
8255
8256 regno = REGNO (x);
8257 value = reg_last_set_value[regno];
8258
8259 /* If we don't have a value or if it isn't for this basic block, return 0. */
8260
8261 if (value == 0
8262 || (reg_n_sets[regno] != 1
8263 && (reg_last_set_label[regno] != label_tick)))
8264 return 0;
8265
8266 /* If the value was set in a later insn that the ones we are processing,
8267 we can't use it, but make a quick check to see if the previous insn
8268 set it to something. This is commonly the case when the same pseudo
8269 is used by repeated insns. */
8270
8271 if (reg_n_sets[regno] != 1
8272 && INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
8273 {
8274 rtx insn, set;
8275
8276 for (insn = prev_nonnote_insn (subst_insn);
8277 insn && INSN_CUID (insn) >= subst_low_cuid;
8278 insn = prev_nonnote_insn (insn))
8279 ;
8280
8281 if (insn
8282 && (set = single_set (insn)) != 0
8283 && rtx_equal_p (SET_DEST (set), x))
8284 {
8285 value = SET_SRC (set);
8286
8287 /* Make sure that VALUE doesn't reference X. Replace any
8288 expliit references with a CLOBBER. If there are any remaining
8289 references (rare), don't use the value. */
8290
8291 if (reg_mentioned_p (x, value))
8292 value = replace_rtx (copy_rtx (value), x,
8293 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
8294
8295 if (reg_overlap_mentioned_p (x, value))
8296 return 0;
8297 }
8298 else
8299 return 0;
8300 }
8301
8302 /* If the value has all its registers valid, return it. */
8303 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
8304 return value;
8305
8306 /* Otherwise, make a copy and replace any invalid register with
8307 (clobber (const_int 0)). If that fails for some reason, return 0. */
8308
8309 value = copy_rtx (value);
8310 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
8311 return value;
8312
8313 return 0;
8314 }
8315 \f
8316 /* Return nonzero if expression X refers to a REG or to memory
8317 that is set in an instruction more recent than FROM_CUID. */
8318
8319 static int
8320 use_crosses_set_p (x, from_cuid)
8321 register rtx x;
8322 int from_cuid;
8323 {
8324 register char *fmt;
8325 register int i;
8326 register enum rtx_code code = GET_CODE (x);
8327
8328 if (code == REG)
8329 {
8330 register int regno = REGNO (x);
8331 #ifdef PUSH_ROUNDING
8332 /* Don't allow uses of the stack pointer to be moved,
8333 because we don't know whether the move crosses a push insn. */
8334 if (regno == STACK_POINTER_REGNUM)
8335 return 1;
8336 #endif
8337 return (reg_last_set[regno]
8338 && INSN_CUID (reg_last_set[regno]) > from_cuid);
8339 }
8340
8341 if (code == MEM && mem_last_set > from_cuid)
8342 return 1;
8343
8344 fmt = GET_RTX_FORMAT (code);
8345
8346 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8347 {
8348 if (fmt[i] == 'E')
8349 {
8350 register int j;
8351 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8352 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
8353 return 1;
8354 }
8355 else if (fmt[i] == 'e'
8356 && use_crosses_set_p (XEXP (x, i), from_cuid))
8357 return 1;
8358 }
8359 return 0;
8360 }
8361 \f
8362 /* Define three variables used for communication between the following
8363 routines. */
8364
8365 static int reg_dead_regno, reg_dead_endregno;
8366 static int reg_dead_flag;
8367
8368 /* Function called via note_stores from reg_dead_at_p.
8369
8370 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
8371 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
8372
8373 static void
8374 reg_dead_at_p_1 (dest, x)
8375 rtx dest;
8376 rtx x;
8377 {
8378 int regno, endregno;
8379
8380 if (GET_CODE (dest) != REG)
8381 return;
8382
8383 regno = REGNO (dest);
8384 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8385 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
8386
8387 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
8388 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
8389 }
8390
8391 /* Return non-zero if REG is known to be dead at INSN.
8392
8393 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
8394 referencing REG, it is dead. If we hit a SET referencing REG, it is
8395 live. Otherwise, see if it is live or dead at the start of the basic
8396 block we are in. */
8397
8398 static int
8399 reg_dead_at_p (reg, insn)
8400 rtx reg;
8401 rtx insn;
8402 {
8403 int block, i;
8404
8405 /* Set variables for reg_dead_at_p_1. */
8406 reg_dead_regno = REGNO (reg);
8407 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
8408 ? HARD_REGNO_NREGS (reg_dead_regno,
8409 GET_MODE (reg))
8410 : 1);
8411
8412 reg_dead_flag = 0;
8413
8414 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
8415 beginning of function. */
8416 for (; insn && GET_CODE (insn) != CODE_LABEL;
8417 insn = prev_nonnote_insn (insn))
8418 {
8419 note_stores (PATTERN (insn), reg_dead_at_p_1);
8420 if (reg_dead_flag)
8421 return reg_dead_flag == 1 ? 1 : 0;
8422
8423 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
8424 return 1;
8425 }
8426
8427 /* Get the basic block number that we were in. */
8428 if (insn == 0)
8429 block = 0;
8430 else
8431 {
8432 for (block = 0; block < n_basic_blocks; block++)
8433 if (insn == basic_block_head[block])
8434 break;
8435
8436 if (block == n_basic_blocks)
8437 return 0;
8438 }
8439
8440 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
8441 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
8442 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
8443 return 0;
8444
8445 return 1;
8446 }
8447 \f
8448 /* Remove register number REGNO from the dead registers list of INSN.
8449
8450 Return the note used to record the death, if there was one. */
8451
8452 rtx
8453 remove_death (regno, insn)
8454 int regno;
8455 rtx insn;
8456 {
8457 register rtx note = find_regno_note (insn, REG_DEAD, regno);
8458
8459 if (note)
8460 remove_note (insn, note);
8461
8462 return note;
8463 }
8464
8465 /* For each register (hardware or pseudo) used within expression X, if its
8466 death is in an instruction with cuid between FROM_CUID (inclusive) and
8467 TO_INSN (exclusive), put a REG_DEAD note for that register in the
8468 list headed by PNOTES.
8469
8470 This is done when X is being merged by combination into TO_INSN. These
8471 notes will then be distributed as needed. */
8472
8473 static void
8474 move_deaths (x, from_cuid, to_insn, pnotes)
8475 rtx x;
8476 int from_cuid;
8477 rtx to_insn;
8478 rtx *pnotes;
8479 {
8480 register char *fmt;
8481 register int len, i;
8482 register enum rtx_code code = GET_CODE (x);
8483
8484 if (code == REG)
8485 {
8486 register int regno = REGNO (x);
8487 register rtx where_dead = reg_last_death[regno];
8488
8489 if (where_dead && INSN_CUID (where_dead) >= from_cuid
8490 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
8491 {
8492 rtx note = remove_death (regno, reg_last_death[regno]);
8493
8494 /* It is possible for the call above to return 0. This can occur
8495 when reg_last_death points to I2 or I1 that we combined with.
8496 In that case make a new note. */
8497
8498 if (note)
8499 {
8500 XEXP (note, 1) = *pnotes;
8501 *pnotes = note;
8502 }
8503 else
8504 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
8505 }
8506
8507 return;
8508 }
8509
8510 else if (GET_CODE (x) == SET)
8511 {
8512 rtx dest = SET_DEST (x);
8513
8514 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
8515
8516 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
8517 that accesses one word of a multi-word item, some
8518 piece of everything register in the expression is used by
8519 this insn, so remove any old death. */
8520
8521 if (GET_CODE (dest) == ZERO_EXTRACT
8522 || GET_CODE (dest) == STRICT_LOW_PART
8523 || (GET_CODE (dest) == SUBREG
8524 && (((GET_MODE_SIZE (GET_MODE (dest))
8525 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
8526 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
8527 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
8528 {
8529 move_deaths (dest, from_cuid, to_insn, pnotes);
8530 return;
8531 }
8532
8533 /* If this is some other SUBREG, we know it replaces the entire
8534 value, so use that as the destination. */
8535 if (GET_CODE (dest) == SUBREG)
8536 dest = SUBREG_REG (dest);
8537
8538 /* If this is a MEM, adjust deaths of anything used in the address.
8539 For a REG (the only other possibility), the entire value is
8540 being replaced so the old value is not used in this insn. */
8541
8542 if (GET_CODE (dest) == MEM)
8543 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
8544 return;
8545 }
8546
8547 else if (GET_CODE (x) == CLOBBER)
8548 return;
8549
8550 len = GET_RTX_LENGTH (code);
8551 fmt = GET_RTX_FORMAT (code);
8552
8553 for (i = 0; i < len; i++)
8554 {
8555 if (fmt[i] == 'E')
8556 {
8557 register int j;
8558 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8559 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
8560 }
8561 else if (fmt[i] == 'e')
8562 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
8563 }
8564 }
8565 \f
8566 /* Return 1 if X is the target of a bit-field assignment in BODY, the
8567 pattern of an insn. X must be a REG. */
8568
8569 static int
8570 reg_bitfield_target_p (x, body)
8571 rtx x;
8572 rtx body;
8573 {
8574 int i;
8575
8576 if (GET_CODE (body) == SET)
8577 {
8578 rtx dest = SET_DEST (body);
8579 rtx target;
8580 int regno, tregno, endregno, endtregno;
8581
8582 if (GET_CODE (dest) == ZERO_EXTRACT)
8583 target = XEXP (dest, 0);
8584 else if (GET_CODE (dest) == STRICT_LOW_PART)
8585 target = SUBREG_REG (XEXP (dest, 0));
8586 else
8587 return 0;
8588
8589 if (GET_CODE (target) == SUBREG)
8590 target = SUBREG_REG (target);
8591
8592 if (GET_CODE (target) != REG)
8593 return 0;
8594
8595 tregno = REGNO (target), regno = REGNO (x);
8596 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
8597 return target == x;
8598
8599 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
8600 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
8601
8602 return endregno > tregno && regno < endtregno;
8603 }
8604
8605 else if (GET_CODE (body) == PARALLEL)
8606 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
8607 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
8608 return 1;
8609
8610 return 0;
8611 }
8612 \f
8613 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
8614 as appropriate. I3 and I2 are the insns resulting from the combination
8615 insns including FROM (I2 may be zero).
8616
8617 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
8618 not need REG_DEAD notes because they are being substituted for. This
8619 saves searching in the most common cases.
8620
8621 Each note in the list is either ignored or placed on some insns, depending
8622 on the type of note. */
8623
8624 static void
8625 distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
8626 rtx notes;
8627 rtx from_insn;
8628 rtx i3, i2;
8629 rtx elim_i2, elim_i1;
8630 {
8631 rtx note, next_note;
8632 rtx tem;
8633
8634 for (note = notes; note; note = next_note)
8635 {
8636 rtx place = 0, place2 = 0;
8637
8638 /* If this NOTE references a pseudo register, ensure it references
8639 the latest copy of that register. */
8640 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
8641 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
8642 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
8643
8644 next_note = XEXP (note, 1);
8645 switch (REG_NOTE_KIND (note))
8646 {
8647 case REG_UNUSED:
8648 /* If this register is set or clobbered in I3, put the note there
8649 unless there is one already. */
8650 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
8651 {
8652 if (! (GET_CODE (XEXP (note, 0)) == REG
8653 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
8654 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
8655 place = i3;
8656 }
8657 /* Otherwise, if this register is used by I3, then this register
8658 now dies here, so we must put a REG_DEAD note here unless there
8659 is one already. */
8660 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
8661 && ! (GET_CODE (XEXP (note, 0)) == REG
8662 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
8663 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
8664 {
8665 PUT_REG_NOTE_KIND (note, REG_DEAD);
8666 place = i3;
8667 }
8668 break;
8669
8670 case REG_EQUAL:
8671 case REG_EQUIV:
8672 case REG_NONNEG:
8673 /* These notes say something about results of an insn. We can
8674 only support them if they used to be on I3 in which case they
8675 remain on I3. Otherwise they are ignored.
8676
8677 If the note refers to an expression that is not a constant, we
8678 must also ignore the note since we cannot tell whether the
8679 equivalence is still true. It might be possible to do
8680 slightly better than this (we only have a problem if I2DEST
8681 or I1DEST is present in the expression), but it doesn't
8682 seem worth the trouble. */
8683
8684 if (from_insn == i3
8685 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
8686 place = i3;
8687 break;
8688
8689 case REG_INC:
8690 case REG_NO_CONFLICT:
8691 case REG_LABEL:
8692 /* These notes say something about how a register is used. They must
8693 be present on any use of the register in I2 or I3. */
8694 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
8695 place = i3;
8696
8697 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
8698 {
8699 if (place)
8700 place2 = i2;
8701 else
8702 place = i2;
8703 }
8704 break;
8705
8706 case REG_WAS_0:
8707 /* It is too much trouble to try to see if this note is still
8708 correct in all situations. It is better to simply delete it. */
8709 break;
8710
8711 case REG_RETVAL:
8712 /* If the insn previously containing this note still exists,
8713 put it back where it was. Otherwise move it to the previous
8714 insn. Adjust the corresponding REG_LIBCALL note. */
8715 if (GET_CODE (from_insn) != NOTE)
8716 place = from_insn;
8717 else
8718 {
8719 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
8720 place = prev_real_insn (from_insn);
8721 if (tem && place)
8722 XEXP (tem, 0) = place;
8723 }
8724 break;
8725
8726 case REG_LIBCALL:
8727 /* This is handled similarly to REG_RETVAL. */
8728 if (GET_CODE (from_insn) != NOTE)
8729 place = from_insn;
8730 else
8731 {
8732 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
8733 place = next_real_insn (from_insn);
8734 if (tem && place)
8735 XEXP (tem, 0) = place;
8736 }
8737 break;
8738
8739 case REG_DEAD:
8740 /* If the register is used as an input in I3, it dies there.
8741 Similarly for I2, if it is non-zero and adjacent to I3.
8742
8743 If the register is not used as an input in either I3 or I2
8744 and it is not one of the registers we were supposed to eliminate,
8745 there are two possibilities. We might have a non-adjacent I2
8746 or we might have somehow eliminated an additional register
8747 from a computation. For example, we might have had A & B where
8748 we discover that B will always be zero. In this case we will
8749 eliminate the reference to A.
8750
8751 In both cases, we must search to see if we can find a previous
8752 use of A and put the death note there. */
8753
8754 if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
8755 place = i3;
8756 else if (i2 != 0 && next_nonnote_insn (i2) == i3
8757 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
8758 place = i2;
8759
8760 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
8761 break;
8762
8763 /* If the register is used in both I2 and I3 and it dies in I3,
8764 we might have added another reference to it. If reg_n_refs
8765 was 2, bump it to 3. This has to be correct since the
8766 register must have been set somewhere. The reason this is
8767 done is because local-alloc.c treats 2 references as a
8768 special case. */
8769
8770 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
8771 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
8772 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
8773 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
8774
8775 if (place == 0)
8776 for (tem = prev_nonnote_insn (i3);
8777 tem && (GET_CODE (tem) == INSN
8778 || GET_CODE (tem) == CALL_INSN);
8779 tem = prev_nonnote_insn (tem))
8780 {
8781 /* If the register is being set at TEM, see if that is all
8782 TEM is doing. If so, delete TEM. Otherwise, make this
8783 into a REG_UNUSED note instead. */
8784 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
8785 {
8786 rtx set = single_set (tem);
8787
8788 /* Verify that it was the set, and not a clobber that
8789 modified the register. */
8790
8791 if (set != 0 && ! side_effects_p (SET_SRC (set))
8792 && rtx_equal_p (XEXP (note, 0), SET_DEST (set)))
8793 {
8794 /* Move the notes and links of TEM elsewhere.
8795 This might delete other dead insns recursively.
8796 First set the pattern to something that won't use
8797 any register. */
8798
8799 PATTERN (tem) = pc_rtx;
8800
8801 distribute_notes (REG_NOTES (tem), tem, tem,
8802 NULL_RTX, NULL_RTX, NULL_RTX);
8803 distribute_links (LOG_LINKS (tem));
8804
8805 PUT_CODE (tem, NOTE);
8806 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
8807 NOTE_SOURCE_FILE (tem) = 0;
8808 }
8809 else
8810 {
8811 PUT_REG_NOTE_KIND (note, REG_UNUSED);
8812
8813 /* If there isn't already a REG_UNUSED note, put one
8814 here. */
8815 if (! find_regno_note (tem, REG_UNUSED,
8816 REGNO (XEXP (note, 0))))
8817 place = tem;
8818 break;
8819 }
8820 }
8821 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem)))
8822 {
8823 place = tem;
8824 break;
8825 }
8826 }
8827
8828 /* If the register is set or already dead at PLACE, we needn't do
8829 anything with this note if it is still a REG_DEAD note.
8830
8831 Note that we cannot use just `dead_or_set_p' here since we can
8832 convert an assignment to a register into a bit-field assignment.
8833 Therefore, we must also omit the note if the register is the
8834 target of a bitfield assignment. */
8835
8836 if (place && REG_NOTE_KIND (note) == REG_DEAD)
8837 {
8838 int regno = REGNO (XEXP (note, 0));
8839
8840 if (dead_or_set_p (place, XEXP (note, 0))
8841 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
8842 {
8843 /* Unless the register previously died in PLACE, clear
8844 reg_last_death. [I no longer understand why this is
8845 being done.] */
8846 if (reg_last_death[regno] != place)
8847 reg_last_death[regno] = 0;
8848 place = 0;
8849 }
8850 else
8851 reg_last_death[regno] = place;
8852
8853 /* If this is a death note for a hard reg that is occupying
8854 multiple registers, ensure that we are still using all
8855 parts of the object. If we find a piece of the object
8856 that is unused, we must add a USE for that piece before
8857 PLACE and put the appropriate REG_DEAD note on it.
8858
8859 An alternative would be to put a REG_UNUSED for the pieces
8860 on the insn that set the register, but that can't be done if
8861 it is not in the same block. It is simpler, though less
8862 efficient, to add the USE insns. */
8863
8864 if (place && regno < FIRST_PSEUDO_REGISTER
8865 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
8866 {
8867 int endregno
8868 = regno + HARD_REGNO_NREGS (regno,
8869 GET_MODE (XEXP (note, 0)));
8870 int all_used = 1;
8871 int i;
8872
8873 for (i = regno; i < endregno; i++)
8874 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0))
8875 {
8876 rtx piece = gen_rtx (REG, word_mode, i);
8877 rtx p;
8878
8879 /* See if we already placed a USE note for this
8880 register in front of PLACE. */
8881 for (p = place;
8882 GET_CODE (PREV_INSN (p)) == INSN
8883 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
8884 p = PREV_INSN (p))
8885 if (rtx_equal_p (piece,
8886 XEXP (PATTERN (PREV_INSN (p)), 0)))
8887 {
8888 p = 0;
8889 break;
8890 }
8891
8892 if (p)
8893 {
8894 rtx use_insn
8895 = emit_insn_before (gen_rtx (USE, VOIDmode,
8896 piece),
8897 p);
8898 REG_NOTES (use_insn)
8899 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
8900 REG_NOTES (use_insn));
8901 }
8902
8903 all_used = 0;
8904 }
8905
8906 if (! all_used)
8907 {
8908 /* Put only REG_DEAD notes for pieces that are
8909 still used and that are not already dead or set. */
8910
8911 for (i = regno; i < endregno; i++)
8912 {
8913 rtx piece = gen_rtx (REG, word_mode, i);
8914
8915 if (reg_referenced_p (piece, PATTERN (place))
8916 && ! dead_or_set_p (place, piece)
8917 && ! reg_bitfield_target_p (piece,
8918 PATTERN (place)))
8919 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
8920 piece,
8921 REG_NOTES (place));
8922 }
8923
8924 place = 0;
8925 }
8926 }
8927 }
8928 break;
8929
8930 default:
8931 /* Any other notes should not be present at this point in the
8932 compilation. */
8933 abort ();
8934 }
8935
8936 if (place)
8937 {
8938 XEXP (note, 1) = REG_NOTES (place);
8939 REG_NOTES (place) = note;
8940 }
8941
8942 if (place2)
8943 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
8944 XEXP (note, 0), REG_NOTES (place2));
8945 }
8946 }
8947 \f
8948 /* Similarly to above, distribute the LOG_LINKS that used to be present on
8949 I3, I2, and I1 to new locations. This is also called in one case to
8950 add a link pointing at I3 when I3's destination is changed. */
8951
8952 static void
8953 distribute_links (links)
8954 rtx links;
8955 {
8956 rtx link, next_link;
8957
8958 for (link = links; link; link = next_link)
8959 {
8960 rtx place = 0;
8961 rtx insn;
8962 rtx set, reg;
8963
8964 next_link = XEXP (link, 1);
8965
8966 /* If the insn that this link points to is a NOTE or isn't a single
8967 set, ignore it. In the latter case, it isn't clear what we
8968 can do other than ignore the link, since we can't tell which
8969 register it was for. Such links wouldn't be used by combine
8970 anyway.
8971
8972 It is not possible for the destination of the target of the link to
8973 have been changed by combine. The only potential of this is if we
8974 replace I3, I2, and I1 by I3 and I2. But in that case the
8975 destination of I2 also remains unchanged. */
8976
8977 if (GET_CODE (XEXP (link, 0)) == NOTE
8978 || (set = single_set (XEXP (link, 0))) == 0)
8979 continue;
8980
8981 reg = SET_DEST (set);
8982 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
8983 || GET_CODE (reg) == SIGN_EXTRACT
8984 || GET_CODE (reg) == STRICT_LOW_PART)
8985 reg = XEXP (reg, 0);
8986
8987 /* A LOG_LINK is defined as being placed on the first insn that uses
8988 a register and points to the insn that sets the register. Start
8989 searching at the next insn after the target of the link and stop
8990 when we reach a set of the register or the end of the basic block.
8991
8992 Note that this correctly handles the link that used to point from
8993 I3 to I2. Also note that not much searching is typically done here
8994 since most links don't point very far away. */
8995
8996 for (insn = NEXT_INSN (XEXP (link, 0));
8997 (insn && GET_CODE (insn) != CODE_LABEL
8998 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN);
8999 insn = NEXT_INSN (insn))
9000 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
9001 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
9002 {
9003 if (reg_referenced_p (reg, PATTERN (insn)))
9004 place = insn;
9005 break;
9006 }
9007
9008 /* If we found a place to put the link, place it there unless there
9009 is already a link to the same insn as LINK at that point. */
9010
9011 if (place)
9012 {
9013 rtx link2;
9014
9015 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
9016 if (XEXP (link2, 0) == XEXP (link, 0))
9017 break;
9018
9019 if (link2 == 0)
9020 {
9021 XEXP (link, 1) = LOG_LINKS (place);
9022 LOG_LINKS (place) = link;
9023 }
9024 }
9025 }
9026 }
9027 \f
9028 void
9029 dump_combine_stats (file)
9030 FILE *file;
9031 {
9032 fprintf
9033 (file,
9034 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
9035 combine_attempts, combine_merges, combine_extras, combine_successes);
9036 }
9037
9038 void
9039 dump_combine_total_stats (file)
9040 FILE *file;
9041 {
9042 fprintf
9043 (file,
9044 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
9045 total_attempts, total_merges, total_extras, total_successes);
9046 }