function.h (emit_status): Delete member regno_pointer_flag and rename regno_pointer_f...
[gcc.git] / gcc / combine.c
1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
25
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
31
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
35
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
41
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
44
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
51
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
54
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_regnotes) when a
59 REG_DEAD note is lost
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61 removed because there is no way to know which register it was
62 linking
63
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
67
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
75 combine anyway. */
76
77 #include "config.h"
78 #include "system.h"
79 #include "rtl.h"
80 #include "tm_p.h"
81 #include "flags.h"
82 #include "regs.h"
83 #include "hard-reg-set.h"
84 #include "basic-block.h"
85 #include "insn-config.h"
86 #include "function.h"
87 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
88 #include "expr.h"
89 #include "insn-flags.h"
90 #include "insn-codes.h"
91 #include "insn-attr.h"
92 #include "recog.h"
93 #include "real.h"
94 #include "toplev.h"
95 #include "defaults.h"
96
97 #ifndef ACCUMULATE_OUTGOING_ARGS
98 #define ACCUMULATE_OUTGOING_ARGS 0
99 #endif
100
101 /* Supply a default definition for PUSH_ARGS. */
102 #ifndef PUSH_ARGS
103 #ifdef PUSH_ROUNDING
104 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
105 #else
106 #define PUSH_ARGS 0
107 #endif
108 #endif
109
110 /* It is not safe to use ordinary gen_lowpart in combine.
111 Use gen_lowpart_for_combine instead. See comments there. */
112 #define gen_lowpart dont_use_gen_lowpart_you_dummy
113
114 /* Number of attempts to combine instructions in this function. */
115
116 static int combine_attempts;
117
118 /* Number of attempts that got as far as substitution in this function. */
119
120 static int combine_merges;
121
122 /* Number of instructions combined with added SETs in this function. */
123
124 static int combine_extras;
125
126 /* Number of instructions combined in this function. */
127
128 static int combine_successes;
129
130 /* Totals over entire compilation. */
131
132 static int total_attempts, total_merges, total_extras, total_successes;
133
134 /* Define a default value for REVERSIBLE_CC_MODE.
135 We can never assume that a condition code mode is safe to reverse unless
136 the md tells us so. */
137 #ifndef REVERSIBLE_CC_MODE
138 #define REVERSIBLE_CC_MODE(MODE) 0
139 #endif
140 \f
141 /* Vector mapping INSN_UIDs to cuids.
142 The cuids are like uids but increase monotonically always.
143 Combine always uses cuids so that it can compare them.
144 But actually renumbering the uids, which we used to do,
145 proves to be a bad idea because it makes it hard to compare
146 the dumps produced by earlier passes with those from later passes. */
147
148 static int *uid_cuid;
149 static int max_uid_cuid;
150
151 /* Get the cuid of an insn. */
152
153 #define INSN_CUID(INSN) \
154 (INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
155
156 /* Maximum register number, which is the size of the tables below. */
157
158 static unsigned int combine_max_regno;
159
160 /* Record last point of death of (hard or pseudo) register n. */
161
162 static rtx *reg_last_death;
163
164 /* Record last point of modification of (hard or pseudo) register n. */
165
166 static rtx *reg_last_set;
167
168 /* Record the cuid of the last insn that invalidated memory
169 (anything that writes memory, and subroutine calls, but not pushes). */
170
171 static int mem_last_set;
172
173 /* Record the cuid of the last CALL_INSN
174 so we can tell whether a potential combination crosses any calls. */
175
176 static int last_call_cuid;
177
178 /* When `subst' is called, this is the insn that is being modified
179 (by combining in a previous insn). The PATTERN of this insn
180 is still the old pattern partially modified and it should not be
181 looked at, but this may be used to examine the successors of the insn
182 to judge whether a simplification is valid. */
183
184 static rtx subst_insn;
185
186 /* This is an insn that belongs before subst_insn, but is not currently
187 on the insn chain. */
188
189 static rtx subst_prev_insn;
190
191 /* This is the lowest CUID that `subst' is currently dealing with.
192 get_last_value will not return a value if the register was set at or
193 after this CUID. If not for this mechanism, we could get confused if
194 I2 or I1 in try_combine were an insn that used the old value of a register
195 to obtain a new value. In that case, we might erroneously get the
196 new value of the register when we wanted the old one. */
197
198 static int subst_low_cuid;
199
200 /* This contains any hard registers that are used in newpat; reg_dead_at_p
201 must consider all these registers to be always live. */
202
203 static HARD_REG_SET newpat_used_regs;
204
205 /* This is an insn to which a LOG_LINKS entry has been added. If this
206 insn is the earlier than I2 or I3, combine should rescan starting at
207 that location. */
208
209 static rtx added_links_insn;
210
211 /* Basic block number of the block in which we are performing combines. */
212 static int this_basic_block;
213
214 /* A bitmap indicating which blocks had registers go dead at entry.
215 After combine, we'll need to re-do global life analysis with
216 those blocks as starting points. */
217 static sbitmap refresh_blocks;
218 static int need_refresh;
219 \f
220 /* The next group of arrays allows the recording of the last value assigned
221 to (hard or pseudo) register n. We use this information to see if a
222 operation being processed is redundant given a prior operation performed
223 on the register. For example, an `and' with a constant is redundant if
224 all the zero bits are already known to be turned off.
225
226 We use an approach similar to that used by cse, but change it in the
227 following ways:
228
229 (1) We do not want to reinitialize at each label.
230 (2) It is useful, but not critical, to know the actual value assigned
231 to a register. Often just its form is helpful.
232
233 Therefore, we maintain the following arrays:
234
235 reg_last_set_value the last value assigned
236 reg_last_set_label records the value of label_tick when the
237 register was assigned
238 reg_last_set_table_tick records the value of label_tick when a
239 value using the register is assigned
240 reg_last_set_invalid set to non-zero when it is not valid
241 to use the value of this register in some
242 register's value
243
244 To understand the usage of these tables, it is important to understand
245 the distinction between the value in reg_last_set_value being valid
246 and the register being validly contained in some other expression in the
247 table.
248
249 Entry I in reg_last_set_value is valid if it is non-zero, and either
250 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
251
252 Register I may validly appear in any expression returned for the value
253 of another register if reg_n_sets[i] is 1. It may also appear in the
254 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
255 reg_last_set_invalid[j] is zero.
256
257 If an expression is found in the table containing a register which may
258 not validly appear in an expression, the register is replaced by
259 something that won't match, (clobber (const_int 0)).
260
261 reg_last_set_invalid[i] is set non-zero when register I is being assigned
262 to and reg_last_set_table_tick[i] == label_tick. */
263
264 /* Record last value assigned to (hard or pseudo) register n. */
265
266 static rtx *reg_last_set_value;
267
268 /* Record the value of label_tick when the value for register n is placed in
269 reg_last_set_value[n]. */
270
271 static int *reg_last_set_label;
272
273 /* Record the value of label_tick when an expression involving register n
274 is placed in reg_last_set_value. */
275
276 static int *reg_last_set_table_tick;
277
278 /* Set non-zero if references to register n in expressions should not be
279 used. */
280
281 static char *reg_last_set_invalid;
282
283 /* Incremented for each label. */
284
285 static int label_tick;
286
287 /* Some registers that are set more than once and used in more than one
288 basic block are nevertheless always set in similar ways. For example,
289 a QImode register may be loaded from memory in two places on a machine
290 where byte loads zero extend.
291
292 We record in the following array what we know about the nonzero
293 bits of a register, specifically which bits are known to be zero.
294
295 If an entry is zero, it means that we don't know anything special. */
296
297 static unsigned HOST_WIDE_INT *reg_nonzero_bits;
298
299 /* Mode used to compute significance in reg_nonzero_bits. It is the largest
300 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
301
302 static enum machine_mode nonzero_bits_mode;
303
304 /* Nonzero if we know that a register has some leading bits that are always
305 equal to the sign bit. */
306
307 static unsigned char *reg_sign_bit_copies;
308
309 /* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
310 It is zero while computing them and after combine has completed. This
311 former test prevents propagating values based on previously set values,
312 which can be incorrect if a variable is modified in a loop. */
313
314 static int nonzero_sign_valid;
315
316 /* These arrays are maintained in parallel with reg_last_set_value
317 and are used to store the mode in which the register was last set,
318 the bits that were known to be zero when it was last set, and the
319 number of sign bits copies it was known to have when it was last set. */
320
321 static enum machine_mode *reg_last_set_mode;
322 static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
323 static char *reg_last_set_sign_bit_copies;
324 \f
325 /* Record one modification to rtl structure
326 to be undone by storing old_contents into *where.
327 is_int is 1 if the contents are an int. */
328
329 struct undo
330 {
331 struct undo *next;
332 int is_int;
333 union {rtx r; unsigned int i;} old_contents;
334 union {rtx *r; unsigned int *i;} where;
335 };
336
337 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
338 num_undo says how many are currently recorded.
339
340 other_insn is nonzero if we have modified some other insn in the process
341 of working on subst_insn. It must be verified too.
342
343 previous_undos is the value of undobuf.undos when we started processing
344 this substitution. This will prevent gen_rtx_combine from re-used a piece
345 from the previous expression. Doing so can produce circular rtl
346 structures. */
347
348 struct undobuf
349 {
350 struct undo *undos;
351 struct undo *frees;
352 struct undo *previous_undos;
353 rtx other_insn;
354 };
355
356 static struct undobuf undobuf;
357
358 /* Number of times the pseudo being substituted for
359 was found and replaced. */
360
361 static int n_occurrences;
362
363 static void do_SUBST PARAMS ((rtx *, rtx));
364 static void do_SUBST_INT PARAMS ((unsigned int *,
365 unsigned int));
366 static void init_reg_last_arrays PARAMS ((void));
367 static void setup_incoming_promotions PARAMS ((void));
368 static void set_nonzero_bits_and_sign_copies PARAMS ((rtx, rtx, void *));
369 static int cant_combine_insn_p PARAMS ((rtx));
370 static int can_combine_p PARAMS ((rtx, rtx, rtx, rtx, rtx *, rtx *));
371 static int sets_function_arg_p PARAMS ((rtx));
372 static int combinable_i3pat PARAMS ((rtx, rtx *, rtx, rtx, int, rtx *));
373 static int contains_muldiv PARAMS ((rtx));
374 static rtx try_combine PARAMS ((rtx, rtx, rtx, int *));
375 static void undo_all PARAMS ((void));
376 static void undo_commit PARAMS ((void));
377 static rtx *find_split_point PARAMS ((rtx *, rtx));
378 static rtx subst PARAMS ((rtx, rtx, rtx, int, int));
379 static rtx combine_simplify_rtx PARAMS ((rtx, enum machine_mode, int, int));
380 static rtx simplify_if_then_else PARAMS ((rtx));
381 static rtx simplify_set PARAMS ((rtx));
382 static rtx simplify_logical PARAMS ((rtx, int));
383 static rtx expand_compound_operation PARAMS ((rtx));
384 static rtx expand_field_assignment PARAMS ((rtx));
385 static rtx make_extraction PARAMS ((enum machine_mode, rtx, HOST_WIDE_INT,
386 rtx, unsigned HOST_WIDE_INT, int,
387 int, int));
388 static rtx extract_left_shift PARAMS ((rtx, int));
389 static rtx make_compound_operation PARAMS ((rtx, enum rtx_code));
390 static int get_pos_from_mask PARAMS ((unsigned HOST_WIDE_INT,
391 unsigned HOST_WIDE_INT *));
392 static rtx force_to_mode PARAMS ((rtx, enum machine_mode,
393 unsigned HOST_WIDE_INT, rtx, int));
394 static rtx if_then_else_cond PARAMS ((rtx, rtx *, rtx *));
395 static rtx known_cond PARAMS ((rtx, enum rtx_code, rtx, rtx));
396 static int rtx_equal_for_field_assignment_p PARAMS ((rtx, rtx));
397 static rtx make_field_assignment PARAMS ((rtx));
398 static rtx apply_distributive_law PARAMS ((rtx));
399 static rtx simplify_and_const_int PARAMS ((rtx, enum machine_mode, rtx,
400 unsigned HOST_WIDE_INT));
401 static unsigned HOST_WIDE_INT nonzero_bits PARAMS ((rtx, enum machine_mode));
402 static unsigned int num_sign_bit_copies PARAMS ((rtx, enum machine_mode));
403 static int merge_outer_ops PARAMS ((enum rtx_code *, HOST_WIDE_INT *,
404 enum rtx_code, HOST_WIDE_INT,
405 enum machine_mode, int *));
406 static rtx simplify_shift_const PARAMS ((rtx, enum rtx_code, enum machine_mode,
407 rtx, int));
408 static int recog_for_combine PARAMS ((rtx *, rtx, rtx *));
409 static rtx gen_lowpart_for_combine PARAMS ((enum machine_mode, rtx));
410 static rtx gen_rtx_combine PARAMS ((enum rtx_code code, enum machine_mode mode,
411 ...));
412 static rtx gen_binary PARAMS ((enum rtx_code, enum machine_mode,
413 rtx, rtx));
414 static rtx gen_unary PARAMS ((enum rtx_code, enum machine_mode,
415 enum machine_mode, rtx));
416 static enum rtx_code simplify_comparison PARAMS ((enum rtx_code, rtx *, rtx *));
417 static int reversible_comparison_p PARAMS ((rtx));
418 static void update_table_tick PARAMS ((rtx));
419 static void record_value_for_reg PARAMS ((rtx, rtx, rtx));
420 static void check_promoted_subreg PARAMS ((rtx, rtx));
421 static void record_dead_and_set_regs_1 PARAMS ((rtx, rtx, void *));
422 static void record_dead_and_set_regs PARAMS ((rtx));
423 static int get_last_value_validate PARAMS ((rtx *, rtx, int, int));
424 static rtx get_last_value PARAMS ((rtx));
425 static int use_crosses_set_p PARAMS ((rtx, int));
426 static void reg_dead_at_p_1 PARAMS ((rtx, rtx, void *));
427 static int reg_dead_at_p PARAMS ((rtx, rtx));
428 static void move_deaths PARAMS ((rtx, rtx, int, rtx, rtx *));
429 static int reg_bitfield_target_p PARAMS ((rtx, rtx));
430 static void distribute_notes PARAMS ((rtx, rtx, rtx, rtx, rtx, rtx));
431 static void distribute_links PARAMS ((rtx));
432 static void mark_used_regs_combine PARAMS ((rtx));
433 static int insn_cuid PARAMS ((rtx));
434 static void record_promoted_value PARAMS ((rtx, rtx));
435 \f
436 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
437 insn. The substitution can be undone by undo_all. If INTO is already
438 set to NEWVAL, do not record this change. Because computing NEWVAL might
439 also call SUBST, we have to compute it before we put anything into
440 the undo table. */
441
442 static void
443 do_SUBST (into, newval)
444 rtx *into, newval;
445 {
446 struct undo *buf;
447 rtx oldval = *into;
448
449 if (oldval == newval)
450 return;
451
452 if (undobuf.frees)
453 buf = undobuf.frees, undobuf.frees = buf->next;
454 else
455 buf = (struct undo *) xmalloc (sizeof (struct undo));
456
457 buf->is_int = 0;
458 buf->where.r = into;
459 buf->old_contents.r = oldval;
460 *into = newval;
461
462 buf->next = undobuf.undos, undobuf.undos = buf;
463 }
464
465 #define SUBST(INTO, NEWVAL) do_SUBST(&(INTO), (NEWVAL))
466
467 /* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
468 for the value of a HOST_WIDE_INT value (including CONST_INT) is
469 not safe. */
470
471 static void
472 do_SUBST_INT (into, newval)
473 unsigned int *into, newval;
474 {
475 struct undo *buf;
476 unsigned int oldval = *into;
477
478 if (oldval == newval)
479 return;
480
481 if (undobuf.frees)
482 buf = undobuf.frees, undobuf.frees = buf->next;
483 else
484 buf = (struct undo *) xmalloc (sizeof (struct undo));
485
486 buf->is_int = 1;
487 buf->where.i = into;
488 buf->old_contents.i = oldval;
489 *into = newval;
490
491 buf->next = undobuf.undos, undobuf.undos = buf;
492 }
493
494 #define SUBST_INT(INTO, NEWVAL) do_SUBST_INT(&(INTO), (NEWVAL))
495 \f
496 /* Main entry point for combiner. F is the first insn of the function.
497 NREGS is the first unused pseudo-reg number.
498
499 Return non-zero if the combiner has turned an indirect jump
500 instruction into a direct jump. */
501 int
502 combine_instructions (f, nregs)
503 rtx f;
504 unsigned int nregs;
505 {
506 register rtx insn, next;
507 #ifdef HAVE_cc0
508 register rtx prev;
509 #endif
510 register int i;
511 register rtx links, nextlinks;
512
513 int new_direct_jump_p = 0;
514
515 combine_attempts = 0;
516 combine_merges = 0;
517 combine_extras = 0;
518 combine_successes = 0;
519
520 combine_max_regno = nregs;
521
522 reg_nonzero_bits = ((unsigned HOST_WIDE_INT *)
523 xcalloc (nregs, sizeof (unsigned HOST_WIDE_INT)));
524 reg_sign_bit_copies
525 = (unsigned char *) xcalloc (nregs, sizeof (unsigned char));
526
527 reg_last_death = (rtx *) xmalloc (nregs * sizeof (rtx));
528 reg_last_set = (rtx *) xmalloc (nregs * sizeof (rtx));
529 reg_last_set_value = (rtx *) xmalloc (nregs * sizeof (rtx));
530 reg_last_set_table_tick = (int *) xmalloc (nregs * sizeof (int));
531 reg_last_set_label = (int *) xmalloc (nregs * sizeof (int));
532 reg_last_set_invalid = (char *) xmalloc (nregs * sizeof (char));
533 reg_last_set_mode
534 = (enum machine_mode *) xmalloc (nregs * sizeof (enum machine_mode));
535 reg_last_set_nonzero_bits
536 = (unsigned HOST_WIDE_INT *) xmalloc (nregs * sizeof (HOST_WIDE_INT));
537 reg_last_set_sign_bit_copies
538 = (char *) xmalloc (nregs * sizeof (char));
539
540 init_reg_last_arrays ();
541
542 init_recog_no_volatile ();
543
544 /* Compute maximum uid value so uid_cuid can be allocated. */
545
546 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
547 if (INSN_UID (insn) > i)
548 i = INSN_UID (insn);
549
550 uid_cuid = (int *) xmalloc ((i + 1) * sizeof (int));
551 max_uid_cuid = i;
552
553 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
554
555 /* Don't use reg_nonzero_bits when computing it. This can cause problems
556 when, for example, we have j <<= 1 in a loop. */
557
558 nonzero_sign_valid = 0;
559
560 /* Compute the mapping from uids to cuids.
561 Cuids are numbers assigned to insns, like uids,
562 except that cuids increase monotonically through the code.
563
564 Scan all SETs and see if we can deduce anything about what
565 bits are known to be zero for some registers and how many copies
566 of the sign bit are known to exist for those registers.
567
568 Also set any known values so that we can use it while searching
569 for what bits are known to be set. */
570
571 label_tick = 1;
572
573 /* We need to initialize it here, because record_dead_and_set_regs may call
574 get_last_value. */
575 subst_prev_insn = NULL_RTX;
576
577 setup_incoming_promotions ();
578
579 refresh_blocks = sbitmap_alloc (n_basic_blocks);
580 sbitmap_zero (refresh_blocks);
581 need_refresh = 0;
582
583 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
584 {
585 uid_cuid[INSN_UID (insn)] = ++i;
586 subst_low_cuid = i;
587 subst_insn = insn;
588
589 if (INSN_P (insn))
590 {
591 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
592 NULL);
593 record_dead_and_set_regs (insn);
594
595 #ifdef AUTO_INC_DEC
596 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
597 if (REG_NOTE_KIND (links) == REG_INC)
598 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
599 NULL);
600 #endif
601 }
602
603 if (GET_CODE (insn) == CODE_LABEL)
604 label_tick++;
605 }
606
607 nonzero_sign_valid = 1;
608
609 /* Now scan all the insns in forward order. */
610
611 this_basic_block = -1;
612 label_tick = 1;
613 last_call_cuid = 0;
614 mem_last_set = 0;
615 init_reg_last_arrays ();
616 setup_incoming_promotions ();
617
618 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
619 {
620 next = 0;
621
622 /* If INSN starts a new basic block, update our basic block number. */
623 if (this_basic_block + 1 < n_basic_blocks
624 && BLOCK_HEAD (this_basic_block + 1) == insn)
625 this_basic_block++;
626
627 if (GET_CODE (insn) == CODE_LABEL)
628 label_tick++;
629
630 else if (INSN_P (insn))
631 {
632 /* See if we know about function return values before this
633 insn based upon SUBREG flags. */
634 check_promoted_subreg (insn, PATTERN (insn));
635
636 /* Try this insn with each insn it links back to. */
637
638 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
639 if ((next = try_combine (insn, XEXP (links, 0),
640 NULL_RTX, &new_direct_jump_p)) != 0)
641 goto retry;
642
643 /* Try each sequence of three linked insns ending with this one. */
644
645 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
646 {
647 rtx link = XEXP (links, 0);
648
649 /* If the linked insn has been replaced by a note, then there
650 is no point in persuing this chain any further. */
651 if (GET_CODE (link) == NOTE)
652 break;
653
654 for (nextlinks = LOG_LINKS (link);
655 nextlinks;
656 nextlinks = XEXP (nextlinks, 1))
657 if ((next = try_combine (insn, XEXP (links, 0),
658 XEXP (nextlinks, 0),
659 &new_direct_jump_p)) != 0)
660 goto retry;
661 }
662
663 #ifdef HAVE_cc0
664 /* Try to combine a jump insn that uses CC0
665 with a preceding insn that sets CC0, and maybe with its
666 logical predecessor as well.
667 This is how we make decrement-and-branch insns.
668 We need this special code because data flow connections
669 via CC0 do not get entered in LOG_LINKS. */
670
671 if (GET_CODE (insn) == JUMP_INSN
672 && (prev = prev_nonnote_insn (insn)) != 0
673 && GET_CODE (prev) == INSN
674 && sets_cc0_p (PATTERN (prev)))
675 {
676 if ((next = try_combine (insn, prev,
677 NULL_RTX, &new_direct_jump_p)) != 0)
678 goto retry;
679
680 for (nextlinks = LOG_LINKS (prev); nextlinks;
681 nextlinks = XEXP (nextlinks, 1))
682 if ((next = try_combine (insn, prev,
683 XEXP (nextlinks, 0),
684 &new_direct_jump_p)) != 0)
685 goto retry;
686 }
687
688 /* Do the same for an insn that explicitly references CC0. */
689 if (GET_CODE (insn) == INSN
690 && (prev = prev_nonnote_insn (insn)) != 0
691 && GET_CODE (prev) == INSN
692 && sets_cc0_p (PATTERN (prev))
693 && GET_CODE (PATTERN (insn)) == SET
694 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
695 {
696 if ((next = try_combine (insn, prev,
697 NULL_RTX, &new_direct_jump_p)) != 0)
698 goto retry;
699
700 for (nextlinks = LOG_LINKS (prev); nextlinks;
701 nextlinks = XEXP (nextlinks, 1))
702 if ((next = try_combine (insn, prev,
703 XEXP (nextlinks, 0),
704 &new_direct_jump_p)) != 0)
705 goto retry;
706 }
707
708 /* Finally, see if any of the insns that this insn links to
709 explicitly references CC0. If so, try this insn, that insn,
710 and its predecessor if it sets CC0. */
711 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
712 if (GET_CODE (XEXP (links, 0)) == INSN
713 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
714 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
715 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
716 && GET_CODE (prev) == INSN
717 && sets_cc0_p (PATTERN (prev))
718 && (next = try_combine (insn, XEXP (links, 0),
719 prev, &new_direct_jump_p)) != 0)
720 goto retry;
721 #endif
722
723 /* Try combining an insn with two different insns whose results it
724 uses. */
725 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
726 for (nextlinks = XEXP (links, 1); nextlinks;
727 nextlinks = XEXP (nextlinks, 1))
728 if ((next = try_combine (insn, XEXP (links, 0),
729 XEXP (nextlinks, 0),
730 &new_direct_jump_p)) != 0)
731 goto retry;
732
733 if (GET_CODE (insn) != NOTE)
734 record_dead_and_set_regs (insn);
735
736 retry:
737 ;
738 }
739 }
740
741 if (need_refresh)
742 {
743 compute_bb_for_insn (get_max_uid ());
744 update_life_info (refresh_blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
745 PROP_DEATH_NOTES);
746 }
747
748 /* Clean up. */
749 sbitmap_free (refresh_blocks);
750 free (reg_nonzero_bits);
751 free (reg_sign_bit_copies);
752 free (reg_last_death);
753 free (reg_last_set);
754 free (reg_last_set_value);
755 free (reg_last_set_table_tick);
756 free (reg_last_set_label);
757 free (reg_last_set_invalid);
758 free (reg_last_set_mode);
759 free (reg_last_set_nonzero_bits);
760 free (reg_last_set_sign_bit_copies);
761 free (uid_cuid);
762
763 {
764 struct undo *undo, *next;
765 for (undo = undobuf.frees; undo; undo = next)
766 {
767 next = undo->next;
768 free (undo);
769 }
770 undobuf.frees = 0;
771 }
772
773 total_attempts += combine_attempts;
774 total_merges += combine_merges;
775 total_extras += combine_extras;
776 total_successes += combine_successes;
777
778 nonzero_sign_valid = 0;
779
780 /* Make recognizer allow volatile MEMs again. */
781 init_recog ();
782
783 return new_direct_jump_p;
784 }
785
786 /* Wipe the reg_last_xxx arrays in preparation for another pass. */
787
788 static void
789 init_reg_last_arrays ()
790 {
791 unsigned int nregs = combine_max_regno;
792
793 memset ((char *) reg_last_death, 0, nregs * sizeof (rtx));
794 memset ((char *) reg_last_set, 0, nregs * sizeof (rtx));
795 memset ((char *) reg_last_set_value, 0, nregs * sizeof (rtx));
796 memset ((char *) reg_last_set_table_tick, 0, nregs * sizeof (int));
797 memset ((char *) reg_last_set_label, 0, nregs * sizeof (int));
798 memset (reg_last_set_invalid, 0, nregs * sizeof (char));
799 memset ((char *) reg_last_set_mode, 0, nregs * sizeof (enum machine_mode));
800 memset ((char *) reg_last_set_nonzero_bits, 0, nregs * sizeof (HOST_WIDE_INT));
801 memset (reg_last_set_sign_bit_copies, 0, nregs * sizeof (char));
802 }
803 \f
804 /* Set up any promoted values for incoming argument registers. */
805
806 static void
807 setup_incoming_promotions ()
808 {
809 #ifdef PROMOTE_FUNCTION_ARGS
810 unsigned int regno;
811 rtx reg;
812 enum machine_mode mode;
813 int unsignedp;
814 rtx first = get_insns ();
815
816 #ifndef OUTGOING_REGNO
817 #define OUTGOING_REGNO(N) N
818 #endif
819 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
820 /* Check whether this register can hold an incoming pointer
821 argument. FUNCTION_ARG_REGNO_P tests outgoing register
822 numbers, so translate if necessary due to register windows. */
823 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno))
824 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
825 {
826 record_value_for_reg
827 (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
828 : SIGN_EXTEND),
829 GET_MODE (reg),
830 gen_rtx_CLOBBER (mode, const0_rtx)));
831 }
832 #endif
833 }
834 \f
835 /* Called via note_stores. If X is a pseudo that is narrower than
836 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
837
838 If we are setting only a portion of X and we can't figure out what
839 portion, assume all bits will be used since we don't know what will
840 be happening.
841
842 Similarly, set how many bits of X are known to be copies of the sign bit
843 at all locations in the function. This is the smallest number implied
844 by any set of X. */
845
846 static void
847 set_nonzero_bits_and_sign_copies (x, set, data)
848 rtx x;
849 rtx set;
850 void *data ATTRIBUTE_UNUSED;
851 {
852 unsigned int num;
853
854 if (GET_CODE (x) == REG
855 && REGNO (x) >= FIRST_PSEUDO_REGISTER
856 /* If this register is undefined at the start of the file, we can't
857 say what its contents were. */
858 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, REGNO (x))
859 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
860 {
861 if (set == 0 || GET_CODE (set) == CLOBBER)
862 {
863 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
864 reg_sign_bit_copies[REGNO (x)] = 1;
865 return;
866 }
867
868 /* If this is a complex assignment, see if we can convert it into a
869 simple assignment. */
870 set = expand_field_assignment (set);
871
872 /* If this is a simple assignment, or we have a paradoxical SUBREG,
873 set what we know about X. */
874
875 if (SET_DEST (set) == x
876 || (GET_CODE (SET_DEST (set)) == SUBREG
877 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
878 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
879 && SUBREG_REG (SET_DEST (set)) == x))
880 {
881 rtx src = SET_SRC (set);
882
883 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
884 /* If X is narrower than a word and SRC is a non-negative
885 constant that would appear negative in the mode of X,
886 sign-extend it for use in reg_nonzero_bits because some
887 machines (maybe most) will actually do the sign-extension
888 and this is the conservative approach.
889
890 ??? For 2.5, try to tighten up the MD files in this regard
891 instead of this kludge. */
892
893 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
894 && GET_CODE (src) == CONST_INT
895 && INTVAL (src) > 0
896 && 0 != (INTVAL (src)
897 & ((HOST_WIDE_INT) 1
898 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
899 src = GEN_INT (INTVAL (src)
900 | ((HOST_WIDE_INT) (-1)
901 << GET_MODE_BITSIZE (GET_MODE (x))));
902 #endif
903
904 reg_nonzero_bits[REGNO (x)]
905 |= nonzero_bits (src, nonzero_bits_mode);
906 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
907 if (reg_sign_bit_copies[REGNO (x)] == 0
908 || reg_sign_bit_copies[REGNO (x)] > num)
909 reg_sign_bit_copies[REGNO (x)] = num;
910 }
911 else
912 {
913 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
914 reg_sign_bit_copies[REGNO (x)] = 1;
915 }
916 }
917 }
918 \f
919 /* See if INSN can be combined into I3. PRED and SUCC are optionally
920 insns that were previously combined into I3 or that will be combined
921 into the merger of INSN and I3.
922
923 Return 0 if the combination is not allowed for any reason.
924
925 If the combination is allowed, *PDEST will be set to the single
926 destination of INSN and *PSRC to the single source, and this function
927 will return 1. */
928
929 static int
930 can_combine_p (insn, i3, pred, succ, pdest, psrc)
931 rtx insn;
932 rtx i3;
933 rtx pred ATTRIBUTE_UNUSED;
934 rtx succ;
935 rtx *pdest, *psrc;
936 {
937 int i;
938 rtx set = 0, src, dest;
939 rtx p;
940 #ifdef AUTO_INC_DEC
941 rtx link;
942 #endif
943 int all_adjacent = (succ ? (next_active_insn (insn) == succ
944 && next_active_insn (succ) == i3)
945 : next_active_insn (insn) == i3);
946
947 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
948 or a PARALLEL consisting of such a SET and CLOBBERs.
949
950 If INSN has CLOBBER parallel parts, ignore them for our processing.
951 By definition, these happen during the execution of the insn. When it
952 is merged with another insn, all bets are off. If they are, in fact,
953 needed and aren't also supplied in I3, they may be added by
954 recog_for_combine. Otherwise, it won't match.
955
956 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
957 note.
958
959 Get the source and destination of INSN. If more than one, can't
960 combine. */
961
962 if (GET_CODE (PATTERN (insn)) == SET)
963 set = PATTERN (insn);
964 else if (GET_CODE (PATTERN (insn)) == PARALLEL
965 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
966 {
967 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
968 {
969 rtx elt = XVECEXP (PATTERN (insn), 0, i);
970
971 switch (GET_CODE (elt))
972 {
973 /* This is important to combine floating point insns
974 for the SH4 port. */
975 case USE:
976 /* Combining an isolated USE doesn't make sense.
977 We depend here on combinable_i3_pat to reject them. */
978 /* The code below this loop only verifies that the inputs of
979 the SET in INSN do not change. We call reg_set_between_p
980 to verify that the REG in the USE does not change betweeen
981 I3 and INSN.
982 If the USE in INSN was for a pseudo register, the matching
983 insn pattern will likely match any register; combining this
984 with any other USE would only be safe if we knew that the
985 used registers have identical values, or if there was
986 something to tell them apart, e.g. different modes. For
987 now, we forgo such compilcated tests and simply disallow
988 combining of USES of pseudo registers with any other USE. */
989 if (GET_CODE (XEXP (elt, 0)) == REG
990 && GET_CODE (PATTERN (i3)) == PARALLEL)
991 {
992 rtx i3pat = PATTERN (i3);
993 int i = XVECLEN (i3pat, 0) - 1;
994 unsigned int regno = REGNO (XEXP (elt, 0));
995
996 do
997 {
998 rtx i3elt = XVECEXP (i3pat, 0, i);
999
1000 if (GET_CODE (i3elt) == USE
1001 && GET_CODE (XEXP (i3elt, 0)) == REG
1002 && (REGNO (XEXP (i3elt, 0)) == regno
1003 ? reg_set_between_p (XEXP (elt, 0),
1004 PREV_INSN (insn), i3)
1005 : regno >= FIRST_PSEUDO_REGISTER))
1006 return 0;
1007 }
1008 while (--i >= 0);
1009 }
1010 break;
1011
1012 /* We can ignore CLOBBERs. */
1013 case CLOBBER:
1014 break;
1015
1016 case SET:
1017 /* Ignore SETs whose result isn't used but not those that
1018 have side-effects. */
1019 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1020 && ! side_effects_p (elt))
1021 break;
1022
1023 /* If we have already found a SET, this is a second one and
1024 so we cannot combine with this insn. */
1025 if (set)
1026 return 0;
1027
1028 set = elt;
1029 break;
1030
1031 default:
1032 /* Anything else means we can't combine. */
1033 return 0;
1034 }
1035 }
1036
1037 if (set == 0
1038 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1039 so don't do anything with it. */
1040 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1041 return 0;
1042 }
1043 else
1044 return 0;
1045
1046 if (set == 0)
1047 return 0;
1048
1049 set = expand_field_assignment (set);
1050 src = SET_SRC (set), dest = SET_DEST (set);
1051
1052 /* Don't eliminate a store in the stack pointer. */
1053 if (dest == stack_pointer_rtx
1054 /* If we couldn't eliminate a field assignment, we can't combine. */
1055 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
1056 /* Don't combine with an insn that sets a register to itself if it has
1057 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
1058 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1059 /* Can't merge an ASM_OPERANDS. */
1060 || GET_CODE (src) == ASM_OPERANDS
1061 /* Can't merge a function call. */
1062 || GET_CODE (src) == CALL
1063 /* Don't eliminate a function call argument. */
1064 || (GET_CODE (i3) == CALL_INSN
1065 && (find_reg_fusage (i3, USE, dest)
1066 || (GET_CODE (dest) == REG
1067 && REGNO (dest) < FIRST_PSEUDO_REGISTER
1068 && global_regs[REGNO (dest)])))
1069 /* Don't substitute into an incremented register. */
1070 || FIND_REG_INC_NOTE (i3, dest)
1071 || (succ && FIND_REG_INC_NOTE (succ, dest))
1072 #if 0
1073 /* Don't combine the end of a libcall into anything. */
1074 /* ??? This gives worse code, and appears to be unnecessary, since no
1075 pass after flow uses REG_LIBCALL/REG_RETVAL notes. Local-alloc does
1076 use REG_RETVAL notes for noconflict blocks, but other code here
1077 makes sure that those insns don't disappear. */
1078 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1079 #endif
1080 /* Make sure that DEST is not used after SUCC but before I3. */
1081 || (succ && ! all_adjacent
1082 && reg_used_between_p (dest, succ, i3))
1083 /* Make sure that the value that is to be substituted for the register
1084 does not use any registers whose values alter in between. However,
1085 If the insns are adjacent, a use can't cross a set even though we
1086 think it might (this can happen for a sequence of insns each setting
1087 the same destination; reg_last_set of that register might point to
1088 a NOTE). If INSN has a REG_EQUIV note, the register is always
1089 equivalent to the memory so the substitution is valid even if there
1090 are intervening stores. Also, don't move a volatile asm or
1091 UNSPEC_VOLATILE across any other insns. */
1092 || (! all_adjacent
1093 && (((GET_CODE (src) != MEM
1094 || ! find_reg_note (insn, REG_EQUIV, src))
1095 && use_crosses_set_p (src, INSN_CUID (insn)))
1096 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1097 || GET_CODE (src) == UNSPEC_VOLATILE))
1098 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1099 better register allocation by not doing the combine. */
1100 || find_reg_note (i3, REG_NO_CONFLICT, dest)
1101 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
1102 /* Don't combine across a CALL_INSN, because that would possibly
1103 change whether the life span of some REGs crosses calls or not,
1104 and it is a pain to update that information.
1105 Exception: if source is a constant, moving it later can't hurt.
1106 Accept that special case, because it helps -fforce-addr a lot. */
1107 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
1108 return 0;
1109
1110 /* DEST must either be a REG or CC0. */
1111 if (GET_CODE (dest) == REG)
1112 {
1113 /* If register alignment is being enforced for multi-word items in all
1114 cases except for parameters, it is possible to have a register copy
1115 insn referencing a hard register that is not allowed to contain the
1116 mode being copied and which would not be valid as an operand of most
1117 insns. Eliminate this problem by not combining with such an insn.
1118
1119 Also, on some machines we don't want to extend the life of a hard
1120 register.
1121
1122 This is the same test done in can_combine except that we don't test
1123 if SRC is a CALL operation to permit a hard register with
1124 SMALL_REGISTER_CLASSES, and that we have to take all_adjacent
1125 into account. */
1126
1127 if (GET_CODE (src) == REG
1128 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1129 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1130 /* Don't extend the life of a hard register unless it is
1131 user variable (if we have few registers) or it can't
1132 fit into the desired register (meaning something special
1133 is going on).
1134 Also avoid substituting a return register into I3, because
1135 reload can't handle a conflict with constraints of other
1136 inputs. */
1137 || (REGNO (src) < FIRST_PSEUDO_REGISTER
1138 && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
1139 || (SMALL_REGISTER_CLASSES
1140 && ((! all_adjacent && ! REG_USERVAR_P (src))
1141 || (FUNCTION_VALUE_REGNO_P (REGNO (src))
1142 && ! REG_USERVAR_P (src))))))))
1143 return 0;
1144 }
1145 else if (GET_CODE (dest) != CC0)
1146 return 0;
1147
1148 /* Don't substitute for a register intended as a clobberable operand.
1149 Similarly, don't substitute an expression containing a register that
1150 will be clobbered in I3. */
1151 if (GET_CODE (PATTERN (i3)) == PARALLEL)
1152 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1153 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
1154 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1155 src)
1156 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
1157 return 0;
1158
1159 /* If INSN contains anything volatile, or is an `asm' (whether volatile
1160 or not), reject, unless nothing volatile comes between it and I3 */
1161
1162 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1163 {
1164 /* Make sure succ doesn't contain a volatile reference. */
1165 if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1166 return 0;
1167
1168 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1169 if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
1170 return 0;
1171 }
1172
1173 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1174 to be an explicit register variable, and was chosen for a reason. */
1175
1176 if (GET_CODE (src) == ASM_OPERANDS
1177 && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1178 return 0;
1179
1180 /* If there are any volatile insns between INSN and I3, reject, because
1181 they might affect machine state. */
1182
1183 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1184 if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
1185 return 0;
1186
1187 /* If INSN or I2 contains an autoincrement or autodecrement,
1188 make sure that register is not used between there and I3,
1189 and not already used in I3 either.
1190 Also insist that I3 not be a jump; if it were one
1191 and the incremented register were spilled, we would lose. */
1192
1193 #ifdef AUTO_INC_DEC
1194 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1195 if (REG_NOTE_KIND (link) == REG_INC
1196 && (GET_CODE (i3) == JUMP_INSN
1197 || reg_used_between_p (XEXP (link, 0), insn, i3)
1198 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1199 return 0;
1200 #endif
1201
1202 #ifdef HAVE_cc0
1203 /* Don't combine an insn that follows a CC0-setting insn.
1204 An insn that uses CC0 must not be separated from the one that sets it.
1205 We do, however, allow I2 to follow a CC0-setting insn if that insn
1206 is passed as I1; in that case it will be deleted also.
1207 We also allow combining in this case if all the insns are adjacent
1208 because that would leave the two CC0 insns adjacent as well.
1209 It would be more logical to test whether CC0 occurs inside I1 or I2,
1210 but that would be much slower, and this ought to be equivalent. */
1211
1212 p = prev_nonnote_insn (insn);
1213 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1214 && ! all_adjacent)
1215 return 0;
1216 #endif
1217
1218 /* If we get here, we have passed all the tests and the combination is
1219 to be allowed. */
1220
1221 *pdest = dest;
1222 *psrc = src;
1223
1224 return 1;
1225 }
1226 \f
1227 /* Check if PAT is an insn - or a part of it - used to set up an
1228 argument for a function in a hard register. */
1229
1230 static int
1231 sets_function_arg_p (pat)
1232 rtx pat;
1233 {
1234 int i;
1235 rtx inner_dest;
1236
1237 switch (GET_CODE (pat))
1238 {
1239 case INSN:
1240 return sets_function_arg_p (PATTERN (pat));
1241
1242 case PARALLEL:
1243 for (i = XVECLEN (pat, 0); --i >= 0;)
1244 if (sets_function_arg_p (XVECEXP (pat, 0, i)))
1245 return 1;
1246
1247 break;
1248
1249 case SET:
1250 inner_dest = SET_DEST (pat);
1251 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1252 || GET_CODE (inner_dest) == SUBREG
1253 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1254 inner_dest = XEXP (inner_dest, 0);
1255
1256 return (GET_CODE (inner_dest) == REG
1257 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1258 && FUNCTION_ARG_REGNO_P (REGNO (inner_dest)));
1259
1260 default:
1261 break;
1262 }
1263
1264 return 0;
1265 }
1266
1267 /* LOC is the location within I3 that contains its pattern or the component
1268 of a PARALLEL of the pattern. We validate that it is valid for combining.
1269
1270 One problem is if I3 modifies its output, as opposed to replacing it
1271 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1272 so would produce an insn that is not equivalent to the original insns.
1273
1274 Consider:
1275
1276 (set (reg:DI 101) (reg:DI 100))
1277 (set (subreg:SI (reg:DI 101) 0) <foo>)
1278
1279 This is NOT equivalent to:
1280
1281 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1282 (set (reg:DI 101) (reg:DI 100))])
1283
1284 Not only does this modify 100 (in which case it might still be valid
1285 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1286
1287 We can also run into a problem if I2 sets a register that I1
1288 uses and I1 gets directly substituted into I3 (not via I2). In that
1289 case, we would be getting the wrong value of I2DEST into I3, so we
1290 must reject the combination. This case occurs when I2 and I1 both
1291 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1292 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1293 of a SET must prevent combination from occurring.
1294
1295 On machines where SMALL_REGISTER_CLASSES is non-zero, we don't combine
1296 if the destination of a SET is a hard register that isn't a user
1297 variable.
1298
1299 Before doing the above check, we first try to expand a field assignment
1300 into a set of logical operations.
1301
1302 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1303 we place a register that is both set and used within I3. If more than one
1304 such register is detected, we fail.
1305
1306 Return 1 if the combination is valid, zero otherwise. */
1307
1308 static int
1309 combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1310 rtx i3;
1311 rtx *loc;
1312 rtx i2dest;
1313 rtx i1dest;
1314 int i1_not_in_src;
1315 rtx *pi3dest_killed;
1316 {
1317 rtx x = *loc;
1318
1319 if (GET_CODE (x) == SET)
1320 {
1321 rtx set = expand_field_assignment (x);
1322 rtx dest = SET_DEST (set);
1323 rtx src = SET_SRC (set);
1324 rtx inner_dest = dest;
1325
1326 #if 0
1327 rtx inner_src = src;
1328 #endif
1329
1330 SUBST (*loc, set);
1331
1332 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1333 || GET_CODE (inner_dest) == SUBREG
1334 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1335 inner_dest = XEXP (inner_dest, 0);
1336
1337 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1338 was added. */
1339 #if 0
1340 while (GET_CODE (inner_src) == STRICT_LOW_PART
1341 || GET_CODE (inner_src) == SUBREG
1342 || GET_CODE (inner_src) == ZERO_EXTRACT)
1343 inner_src = XEXP (inner_src, 0);
1344
1345 /* If it is better that two different modes keep two different pseudos,
1346 avoid combining them. This avoids producing the following pattern
1347 on a 386:
1348 (set (subreg:SI (reg/v:QI 21) 0)
1349 (lshiftrt:SI (reg/v:SI 20)
1350 (const_int 24)))
1351 If that were made, reload could not handle the pair of
1352 reg 20/21, since it would try to get any GENERAL_REGS
1353 but some of them don't handle QImode. */
1354
1355 if (rtx_equal_p (inner_src, i2dest)
1356 && GET_CODE (inner_dest) == REG
1357 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1358 return 0;
1359 #endif
1360
1361 /* Check for the case where I3 modifies its output, as
1362 discussed above. */
1363 if ((inner_dest != dest
1364 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1365 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1366
1367 /* This is the same test done in can_combine_p except that we
1368 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
1369 CALL operation. Moreover, we can't test all_adjacent; we don't
1370 have to, since this instruction will stay in place, thus we are
1371 not considering increasing the lifetime of INNER_DEST.
1372
1373 Also, if this insn sets a function argument, combining it with
1374 something that might need a spill could clobber a previous
1375 function argument; the all_adjacent test in can_combine_p also
1376 checks this; here, we do a more specific test for this case. */
1377
1378 || (GET_CODE (inner_dest) == REG
1379 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1380 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1381 GET_MODE (inner_dest))
1382 || (SMALL_REGISTER_CLASSES && GET_CODE (src) != CALL
1383 && ! REG_USERVAR_P (inner_dest)
1384 && (FUNCTION_VALUE_REGNO_P (REGNO (inner_dest))
1385 || (FUNCTION_ARG_REGNO_P (REGNO (inner_dest))
1386 && i3 != 0
1387 && sets_function_arg_p (prev_nonnote_insn (i3)))))))
1388 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1389 return 0;
1390
1391 /* If DEST is used in I3, it is being killed in this insn,
1392 so record that for later.
1393 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1394 STACK_POINTER_REGNUM, since these are always considered to be
1395 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
1396 if (pi3dest_killed && GET_CODE (dest) == REG
1397 && reg_referenced_p (dest, PATTERN (i3))
1398 && REGNO (dest) != FRAME_POINTER_REGNUM
1399 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1400 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1401 #endif
1402 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1403 && (REGNO (dest) != ARG_POINTER_REGNUM
1404 || ! fixed_regs [REGNO (dest)])
1405 #endif
1406 && REGNO (dest) != STACK_POINTER_REGNUM)
1407 {
1408 if (*pi3dest_killed)
1409 return 0;
1410
1411 *pi3dest_killed = dest;
1412 }
1413 }
1414
1415 else if (GET_CODE (x) == PARALLEL)
1416 {
1417 int i;
1418
1419 for (i = 0; i < XVECLEN (x, 0); i++)
1420 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1421 i1_not_in_src, pi3dest_killed))
1422 return 0;
1423 }
1424
1425 return 1;
1426 }
1427 \f
1428 /* Return 1 if X is an arithmetic expression that contains a multiplication
1429 and division. We don't count multiplications by powers of two here. */
1430
1431 static int
1432 contains_muldiv (x)
1433 rtx x;
1434 {
1435 switch (GET_CODE (x))
1436 {
1437 case MOD: case DIV: case UMOD: case UDIV:
1438 return 1;
1439
1440 case MULT:
1441 return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
1442 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1443 default:
1444 switch (GET_RTX_CLASS (GET_CODE (x)))
1445 {
1446 case 'c': case '<': case '2':
1447 return contains_muldiv (XEXP (x, 0))
1448 || contains_muldiv (XEXP (x, 1));
1449
1450 case '1':
1451 return contains_muldiv (XEXP (x, 0));
1452
1453 default:
1454 return 0;
1455 }
1456 }
1457 }
1458 \f
1459 /* Determine whether INSN can be used in a combination. Return nonzero if
1460 not. This is used in try_combine to detect early some cases where we
1461 can't perform combinations. */
1462
1463 static int
1464 cant_combine_insn_p (insn)
1465 rtx insn;
1466 {
1467 rtx set;
1468 rtx src, dest;
1469
1470 /* If this isn't really an insn, we can't do anything.
1471 This can occur when flow deletes an insn that it has merged into an
1472 auto-increment address. */
1473 if (! INSN_P (insn))
1474 return 1;
1475
1476 /* Never combine loads and stores involving hard regs. The register
1477 allocator can usually handle such reg-reg moves by tying. If we allow
1478 the combiner to make substitutions of hard regs, we risk aborting in
1479 reload on machines that have SMALL_REGISTER_CLASSES.
1480 As an exception, we allow combinations involving fixed regs; these are
1481 not available to the register allocator so there's no risk involved. */
1482
1483 set = single_set (insn);
1484 if (! set)
1485 return 0;
1486 src = SET_SRC (set);
1487 dest = SET_DEST (set);
1488 if (REG_P (src)
1489 && REGNO (src) < FIRST_PSEUDO_REGISTER
1490 && ! fixed_regs[REGNO (src)])
1491 return 1;
1492 if (REG_P (dest)
1493 && REGNO (dest) < FIRST_PSEUDO_REGISTER
1494 && ! fixed_regs[REGNO (dest)])
1495 return 1;
1496 return 0;
1497 }
1498
1499 /* Try to combine the insns I1 and I2 into I3.
1500 Here I1 and I2 appear earlier than I3.
1501 I1 can be zero; then we combine just I2 into I3.
1502
1503 It we are combining three insns and the resulting insn is not recognized,
1504 try splitting it into two insns. If that happens, I2 and I3 are retained
1505 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1506 are pseudo-deleted.
1507
1508 Return 0 if the combination does not work. Then nothing is changed.
1509 If we did the combination, return the insn at which combine should
1510 resume scanning.
1511
1512 Set NEW_DIRECT_JUMP_P to a non-zero value if try_combine creates a
1513 new direct jump instruction. */
1514
1515 static rtx
1516 try_combine (i3, i2, i1, new_direct_jump_p)
1517 register rtx i3, i2, i1;
1518 register int *new_direct_jump_p;
1519 {
1520 /* New patterns for I3 and I2, respectively. */
1521 rtx newpat, newi2pat = 0;
1522 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1523 int added_sets_1, added_sets_2;
1524 /* Total number of SETs to put into I3. */
1525 int total_sets;
1526 /* Nonzero is I2's body now appears in I3. */
1527 int i2_is_used;
1528 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1529 int insn_code_number, i2_code_number = 0, other_code_number = 0;
1530 /* Contains I3 if the destination of I3 is used in its source, which means
1531 that the old life of I3 is being killed. If that usage is placed into
1532 I2 and not in I3, a REG_DEAD note must be made. */
1533 rtx i3dest_killed = 0;
1534 /* SET_DEST and SET_SRC of I2 and I1. */
1535 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1536 /* PATTERN (I2), or a copy of it in certain cases. */
1537 rtx i2pat;
1538 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1539 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1540 int i1_feeds_i3 = 0;
1541 /* Notes that must be added to REG_NOTES in I3 and I2. */
1542 rtx new_i3_notes, new_i2_notes;
1543 /* Notes that we substituted I3 into I2 instead of the normal case. */
1544 int i3_subst_into_i2 = 0;
1545 /* Notes that I1, I2 or I3 is a MULT operation. */
1546 int have_mult = 0;
1547
1548 int maxreg;
1549 rtx temp;
1550 register rtx link;
1551 int i;
1552
1553 /* Exit early if one of the insns involved can't be used for
1554 combinations. */
1555 if (cant_combine_insn_p (i3)
1556 || cant_combine_insn_p (i2)
1557 || (i1 && cant_combine_insn_p (i1))
1558 /* We also can't do anything if I3 has a
1559 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1560 libcall. */
1561 #if 0
1562 /* ??? This gives worse code, and appears to be unnecessary, since no
1563 pass after flow uses REG_LIBCALL/REG_RETVAL notes. */
1564 || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1565 #endif
1566 )
1567 return 0;
1568
1569 combine_attempts++;
1570 undobuf.other_insn = 0;
1571
1572 /* Reset the hard register usage information. */
1573 CLEAR_HARD_REG_SET (newpat_used_regs);
1574
1575 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1576 code below, set I1 to be the earlier of the two insns. */
1577 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1578 temp = i1, i1 = i2, i2 = temp;
1579
1580 added_links_insn = 0;
1581
1582 /* First check for one important special-case that the code below will
1583 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1584 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1585 we may be able to replace that destination with the destination of I3.
1586 This occurs in the common code where we compute both a quotient and
1587 remainder into a structure, in which case we want to do the computation
1588 directly into the structure to avoid register-register copies.
1589
1590 We make very conservative checks below and only try to handle the
1591 most common cases of this. For example, we only handle the case
1592 where I2 and I3 are adjacent to avoid making difficult register
1593 usage tests. */
1594
1595 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1596 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1597 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1598 && (! SMALL_REGISTER_CLASSES
1599 || (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1600 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1601 || REG_USERVAR_P (SET_DEST (PATTERN (i3)))))
1602 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1603 && GET_CODE (PATTERN (i2)) == PARALLEL
1604 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1605 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1606 below would need to check what is inside (and reg_overlap_mentioned_p
1607 doesn't support those codes anyway). Don't allow those destinations;
1608 the resulting insn isn't likely to be recognized anyway. */
1609 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1610 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1611 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1612 SET_DEST (PATTERN (i3)))
1613 && next_real_insn (i2) == i3)
1614 {
1615 rtx p2 = PATTERN (i2);
1616
1617 /* Make sure that the destination of I3,
1618 which we are going to substitute into one output of I2,
1619 is not used within another output of I2. We must avoid making this:
1620 (parallel [(set (mem (reg 69)) ...)
1621 (set (reg 69) ...)])
1622 which is not well-defined as to order of actions.
1623 (Besides, reload can't handle output reloads for this.)
1624
1625 The problem can also happen if the dest of I3 is a memory ref,
1626 if another dest in I2 is an indirect memory ref. */
1627 for (i = 0; i < XVECLEN (p2, 0); i++)
1628 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1629 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1630 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1631 SET_DEST (XVECEXP (p2, 0, i))))
1632 break;
1633
1634 if (i == XVECLEN (p2, 0))
1635 for (i = 0; i < XVECLEN (p2, 0); i++)
1636 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1637 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1638 && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1639 {
1640 combine_merges++;
1641
1642 subst_insn = i3;
1643 subst_low_cuid = INSN_CUID (i2);
1644
1645 added_sets_2 = added_sets_1 = 0;
1646 i2dest = SET_SRC (PATTERN (i3));
1647
1648 /* Replace the dest in I2 with our dest and make the resulting
1649 insn the new pattern for I3. Then skip to where we
1650 validate the pattern. Everything was set up above. */
1651 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1652 SET_DEST (PATTERN (i3)));
1653
1654 newpat = p2;
1655 i3_subst_into_i2 = 1;
1656 goto validate_replacement;
1657 }
1658 }
1659
1660 /* If I2 is setting a double-word pseudo to a constant and I3 is setting
1661 one of those words to another constant, merge them by making a new
1662 constant. */
1663 if (i1 == 0
1664 && (temp = single_set (i2)) != 0
1665 && (GET_CODE (SET_SRC (temp)) == CONST_INT
1666 || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
1667 && GET_CODE (SET_DEST (temp)) == REG
1668 && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT
1669 && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD
1670 && GET_CODE (PATTERN (i3)) == SET
1671 && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG
1672 && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp)
1673 && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT
1674 && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD
1675 && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT)
1676 {
1677 HOST_WIDE_INT lo, hi;
1678
1679 if (GET_CODE (SET_SRC (temp)) == CONST_INT)
1680 lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0;
1681 else
1682 {
1683 lo = CONST_DOUBLE_LOW (SET_SRC (temp));
1684 hi = CONST_DOUBLE_HIGH (SET_SRC (temp));
1685 }
1686
1687 if (subreg_lowpart_p (SET_DEST (PATTERN (i3))))
1688 lo = INTVAL (SET_SRC (PATTERN (i3)));
1689 else
1690 hi = INTVAL (SET_SRC (PATTERN (i3)));
1691
1692 combine_merges++;
1693 subst_insn = i3;
1694 subst_low_cuid = INSN_CUID (i2);
1695 added_sets_2 = added_sets_1 = 0;
1696 i2dest = SET_DEST (temp);
1697
1698 SUBST (SET_SRC (temp),
1699 immed_double_const (lo, hi, GET_MODE (SET_DEST (temp))));
1700
1701 newpat = PATTERN (i2);
1702 i3_subst_into_i2 = 1;
1703 goto validate_replacement;
1704 }
1705
1706 #ifndef HAVE_cc0
1707 /* If we have no I1 and I2 looks like:
1708 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1709 (set Y OP)])
1710 make up a dummy I1 that is
1711 (set Y OP)
1712 and change I2 to be
1713 (set (reg:CC X) (compare:CC Y (const_int 0)))
1714
1715 (We can ignore any trailing CLOBBERs.)
1716
1717 This undoes a previous combination and allows us to match a branch-and-
1718 decrement insn. */
1719
1720 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1721 && XVECLEN (PATTERN (i2), 0) >= 2
1722 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1723 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1724 == MODE_CC)
1725 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1726 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1727 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1728 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1729 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1730 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1731 {
1732 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1733 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1734 break;
1735
1736 if (i == 1)
1737 {
1738 /* We make I1 with the same INSN_UID as I2. This gives it
1739 the same INSN_CUID for value tracking. Our fake I1 will
1740 never appear in the insn stream so giving it the same INSN_UID
1741 as I2 will not cause a problem. */
1742
1743 subst_prev_insn = i1
1744 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1745 XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1746 NULL_RTX);
1747
1748 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1749 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1750 SET_DEST (PATTERN (i1)));
1751 }
1752 }
1753 #endif
1754
1755 /* Verify that I2 and I1 are valid for combining. */
1756 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1757 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1758 {
1759 undo_all ();
1760 return 0;
1761 }
1762
1763 /* Record whether I2DEST is used in I2SRC and similarly for the other
1764 cases. Knowing this will help in register status updating below. */
1765 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1766 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1767 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1768
1769 /* See if I1 directly feeds into I3. It does if I1DEST is not used
1770 in I2SRC. */
1771 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1772
1773 /* Ensure that I3's pattern can be the destination of combines. */
1774 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1775 i1 && i2dest_in_i1src && i1_feeds_i3,
1776 &i3dest_killed))
1777 {
1778 undo_all ();
1779 return 0;
1780 }
1781
1782 /* See if any of the insns is a MULT operation. Unless one is, we will
1783 reject a combination that is, since it must be slower. Be conservative
1784 here. */
1785 if (GET_CODE (i2src) == MULT
1786 || (i1 != 0 && GET_CODE (i1src) == MULT)
1787 || (GET_CODE (PATTERN (i3)) == SET
1788 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1789 have_mult = 1;
1790
1791 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1792 We used to do this EXCEPT in one case: I3 has a post-inc in an
1793 output operand. However, that exception can give rise to insns like
1794 mov r3,(r3)+
1795 which is a famous insn on the PDP-11 where the value of r3 used as the
1796 source was model-dependent. Avoid this sort of thing. */
1797
1798 #if 0
1799 if (!(GET_CODE (PATTERN (i3)) == SET
1800 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1801 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1802 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1803 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1804 /* It's not the exception. */
1805 #endif
1806 #ifdef AUTO_INC_DEC
1807 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1808 if (REG_NOTE_KIND (link) == REG_INC
1809 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1810 || (i1 != 0
1811 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1812 {
1813 undo_all ();
1814 return 0;
1815 }
1816 #endif
1817
1818 /* See if the SETs in I1 or I2 need to be kept around in the merged
1819 instruction: whenever the value set there is still needed past I3.
1820 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1821
1822 For the SET in I1, we have two cases: If I1 and I2 independently
1823 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1824 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1825 in I1 needs to be kept around unless I1DEST dies or is set in either
1826 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1827 I1DEST. If so, we know I1 feeds into I2. */
1828
1829 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1830
1831 added_sets_1
1832 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1833 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1834
1835 /* If the set in I2 needs to be kept around, we must make a copy of
1836 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1837 PATTERN (I2), we are only substituting for the original I1DEST, not into
1838 an already-substituted copy. This also prevents making self-referential
1839 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1840 I2DEST. */
1841
1842 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1843 ? gen_rtx_SET (VOIDmode, i2dest, i2src)
1844 : PATTERN (i2));
1845
1846 if (added_sets_2)
1847 i2pat = copy_rtx (i2pat);
1848
1849 combine_merges++;
1850
1851 /* Substitute in the latest insn for the regs set by the earlier ones. */
1852
1853 maxreg = max_reg_num ();
1854
1855 subst_insn = i3;
1856
1857 /* It is possible that the source of I2 or I1 may be performing an
1858 unneeded operation, such as a ZERO_EXTEND of something that is known
1859 to have the high part zero. Handle that case by letting subst look at
1860 the innermost one of them.
1861
1862 Another way to do this would be to have a function that tries to
1863 simplify a single insn instead of merging two or more insns. We don't
1864 do this because of the potential of infinite loops and because
1865 of the potential extra memory required. However, doing it the way
1866 we are is a bit of a kludge and doesn't catch all cases.
1867
1868 But only do this if -fexpensive-optimizations since it slows things down
1869 and doesn't usually win. */
1870
1871 if (flag_expensive_optimizations)
1872 {
1873 /* Pass pc_rtx so no substitutions are done, just simplifications.
1874 The cases that we are interested in here do not involve the few
1875 cases were is_replaced is checked. */
1876 if (i1)
1877 {
1878 subst_low_cuid = INSN_CUID (i1);
1879 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1880 }
1881 else
1882 {
1883 subst_low_cuid = INSN_CUID (i2);
1884 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1885 }
1886
1887 undobuf.previous_undos = undobuf.undos;
1888 }
1889
1890 #ifndef HAVE_cc0
1891 /* Many machines that don't use CC0 have insns that can both perform an
1892 arithmetic operation and set the condition code. These operations will
1893 be represented as a PARALLEL with the first element of the vector
1894 being a COMPARE of an arithmetic operation with the constant zero.
1895 The second element of the vector will set some pseudo to the result
1896 of the same arithmetic operation. If we simplify the COMPARE, we won't
1897 match such a pattern and so will generate an extra insn. Here we test
1898 for this case, where both the comparison and the operation result are
1899 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1900 I2SRC. Later we will make the PARALLEL that contains I2. */
1901
1902 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1903 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1904 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1905 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1906 {
1907 #ifdef EXTRA_CC_MODES
1908 rtx *cc_use;
1909 enum machine_mode compare_mode;
1910 #endif
1911
1912 newpat = PATTERN (i3);
1913 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1914
1915 i2_is_used = 1;
1916
1917 #ifdef EXTRA_CC_MODES
1918 /* See if a COMPARE with the operand we substituted in should be done
1919 with the mode that is currently being used. If not, do the same
1920 processing we do in `subst' for a SET; namely, if the destination
1921 is used only once, try to replace it with a register of the proper
1922 mode and also replace the COMPARE. */
1923 if (undobuf.other_insn == 0
1924 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1925 &undobuf.other_insn))
1926 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1927 i2src, const0_rtx))
1928 != GET_MODE (SET_DEST (newpat))))
1929 {
1930 unsigned int regno = REGNO (SET_DEST (newpat));
1931 rtx new_dest = gen_rtx_REG (compare_mode, regno);
1932
1933 if (regno < FIRST_PSEUDO_REGISTER
1934 || (REG_N_SETS (regno) == 1 && ! added_sets_2
1935 && ! REG_USERVAR_P (SET_DEST (newpat))))
1936 {
1937 if (regno >= FIRST_PSEUDO_REGISTER)
1938 SUBST (regno_reg_rtx[regno], new_dest);
1939
1940 SUBST (SET_DEST (newpat), new_dest);
1941 SUBST (XEXP (*cc_use, 0), new_dest);
1942 SUBST (SET_SRC (newpat),
1943 gen_rtx_combine (COMPARE, compare_mode,
1944 i2src, const0_rtx));
1945 }
1946 else
1947 undobuf.other_insn = 0;
1948 }
1949 #endif
1950 }
1951 else
1952 #endif
1953 {
1954 n_occurrences = 0; /* `subst' counts here */
1955
1956 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1957 need to make a unique copy of I2SRC each time we substitute it
1958 to avoid self-referential rtl. */
1959
1960 subst_low_cuid = INSN_CUID (i2);
1961 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1962 ! i1_feeds_i3 && i1dest_in_i1src);
1963 undobuf.previous_undos = undobuf.undos;
1964
1965 /* Record whether i2's body now appears within i3's body. */
1966 i2_is_used = n_occurrences;
1967 }
1968
1969 /* If we already got a failure, don't try to do more. Otherwise,
1970 try to substitute in I1 if we have it. */
1971
1972 if (i1 && GET_CODE (newpat) != CLOBBER)
1973 {
1974 /* Before we can do this substitution, we must redo the test done
1975 above (see detailed comments there) that ensures that I1DEST
1976 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1977
1978 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1979 0, NULL_PTR))
1980 {
1981 undo_all ();
1982 return 0;
1983 }
1984
1985 n_occurrences = 0;
1986 subst_low_cuid = INSN_CUID (i1);
1987 newpat = subst (newpat, i1dest, i1src, 0, 0);
1988 undobuf.previous_undos = undobuf.undos;
1989 }
1990
1991 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1992 to count all the ways that I2SRC and I1SRC can be used. */
1993 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1994 && i2_is_used + added_sets_2 > 1)
1995 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1996 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1997 > 1))
1998 /* Fail if we tried to make a new register (we used to abort, but there's
1999 really no reason to). */
2000 || max_reg_num () != maxreg
2001 /* Fail if we couldn't do something and have a CLOBBER. */
2002 || GET_CODE (newpat) == CLOBBER
2003 /* Fail if this new pattern is a MULT and we didn't have one before
2004 at the outer level. */
2005 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
2006 && ! have_mult))
2007 {
2008 undo_all ();
2009 return 0;
2010 }
2011
2012 /* If the actions of the earlier insns must be kept
2013 in addition to substituting them into the latest one,
2014 we must make a new PARALLEL for the latest insn
2015 to hold additional the SETs. */
2016
2017 if (added_sets_1 || added_sets_2)
2018 {
2019 combine_extras++;
2020
2021 if (GET_CODE (newpat) == PARALLEL)
2022 {
2023 rtvec old = XVEC (newpat, 0);
2024 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
2025 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2026 bcopy ((char *) &old->elem[0], (char *) XVEC (newpat, 0)->elem,
2027 sizeof (old->elem[0]) * old->num_elem);
2028 }
2029 else
2030 {
2031 rtx old = newpat;
2032 total_sets = 1 + added_sets_1 + added_sets_2;
2033 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2034 XVECEXP (newpat, 0, 0) = old;
2035 }
2036
2037 if (added_sets_1)
2038 XVECEXP (newpat, 0, --total_sets)
2039 = (GET_CODE (PATTERN (i1)) == PARALLEL
2040 ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
2041
2042 if (added_sets_2)
2043 {
2044 /* If there is no I1, use I2's body as is. We used to also not do
2045 the subst call below if I2 was substituted into I3,
2046 but that could lose a simplification. */
2047 if (i1 == 0)
2048 XVECEXP (newpat, 0, --total_sets) = i2pat;
2049 else
2050 /* See comment where i2pat is assigned. */
2051 XVECEXP (newpat, 0, --total_sets)
2052 = subst (i2pat, i1dest, i1src, 0, 0);
2053 }
2054 }
2055
2056 /* We come here when we are replacing a destination in I2 with the
2057 destination of I3. */
2058 validate_replacement:
2059
2060 /* Note which hard regs this insn has as inputs. */
2061 mark_used_regs_combine (newpat);
2062
2063 /* Is the result of combination a valid instruction? */
2064 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2065
2066 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
2067 the second SET's destination is a register that is unused. In that case,
2068 we just need the first SET. This can occur when simplifying a divmod
2069 insn. We *must* test for this case here because the code below that
2070 splits two independent SETs doesn't handle this case correctly when it
2071 updates the register status. Also check the case where the first
2072 SET's destination is unused. That would not cause incorrect code, but
2073 does cause an unneeded insn to remain. */
2074
2075 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2076 && XVECLEN (newpat, 0) == 2
2077 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2078 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2079 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
2080 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
2081 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
2082 && asm_noperands (newpat) < 0)
2083 {
2084 newpat = XVECEXP (newpat, 0, 0);
2085 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2086 }
2087
2088 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2089 && XVECLEN (newpat, 0) == 2
2090 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2091 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2092 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
2093 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
2094 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
2095 && asm_noperands (newpat) < 0)
2096 {
2097 newpat = XVECEXP (newpat, 0, 1);
2098 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2099 }
2100
2101 /* If we were combining three insns and the result is a simple SET
2102 with no ASM_OPERANDS that wasn't recognized, try to split it into two
2103 insns. There are two ways to do this. It can be split using a
2104 machine-specific method (like when you have an addition of a large
2105 constant) or by combine in the function find_split_point. */
2106
2107 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
2108 && asm_noperands (newpat) < 0)
2109 {
2110 rtx m_split, *split;
2111 rtx ni2dest = i2dest;
2112
2113 /* See if the MD file can split NEWPAT. If it can't, see if letting it
2114 use I2DEST as a scratch register will help. In the latter case,
2115 convert I2DEST to the mode of the source of NEWPAT if we can. */
2116
2117 m_split = split_insns (newpat, i3);
2118
2119 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2120 inputs of NEWPAT. */
2121
2122 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2123 possible to try that as a scratch reg. This would require adding
2124 more code to make it work though. */
2125
2126 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
2127 {
2128 /* If I2DEST is a hard register or the only use of a pseudo,
2129 we can change its mode. */
2130 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
2131 && GET_MODE (SET_DEST (newpat)) != VOIDmode
2132 && GET_CODE (i2dest) == REG
2133 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
2134 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
2135 && ! REG_USERVAR_P (i2dest))))
2136 ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
2137 REGNO (i2dest));
2138
2139 m_split = split_insns (gen_rtx_PARALLEL
2140 (VOIDmode,
2141 gen_rtvec (2, newpat,
2142 gen_rtx_CLOBBER (VOIDmode,
2143 ni2dest))),
2144 i3);
2145 }
2146
2147 if (m_split && GET_CODE (m_split) != SEQUENCE)
2148 {
2149 insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
2150 if (insn_code_number >= 0)
2151 newpat = m_split;
2152 }
2153 else if (m_split && GET_CODE (m_split) == SEQUENCE
2154 && XVECLEN (m_split, 0) == 2
2155 && (next_real_insn (i2) == i3
2156 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
2157 INSN_CUID (i2))))
2158 {
2159 rtx i2set, i3set;
2160 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
2161 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
2162
2163 i3set = single_set (XVECEXP (m_split, 0, 1));
2164 i2set = single_set (XVECEXP (m_split, 0, 0));
2165
2166 /* In case we changed the mode of I2DEST, replace it in the
2167 pseudo-register table here. We can't do it above in case this
2168 code doesn't get executed and we do a split the other way. */
2169
2170 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2171 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
2172
2173 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2174
2175 /* If I2 or I3 has multiple SETs, we won't know how to track
2176 register status, so don't use these insns. If I2's destination
2177 is used between I2 and I3, we also can't use these insns. */
2178
2179 if (i2_code_number >= 0 && i2set && i3set
2180 && (next_real_insn (i2) == i3
2181 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
2182 insn_code_number = recog_for_combine (&newi3pat, i3,
2183 &new_i3_notes);
2184 if (insn_code_number >= 0)
2185 newpat = newi3pat;
2186
2187 /* It is possible that both insns now set the destination of I3.
2188 If so, we must show an extra use of it. */
2189
2190 if (insn_code_number >= 0)
2191 {
2192 rtx new_i3_dest = SET_DEST (i3set);
2193 rtx new_i2_dest = SET_DEST (i2set);
2194
2195 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
2196 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
2197 || GET_CODE (new_i3_dest) == SUBREG)
2198 new_i3_dest = XEXP (new_i3_dest, 0);
2199
2200 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
2201 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
2202 || GET_CODE (new_i2_dest) == SUBREG)
2203 new_i2_dest = XEXP (new_i2_dest, 0);
2204
2205 if (GET_CODE (new_i3_dest) == REG
2206 && GET_CODE (new_i2_dest) == REG
2207 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
2208 REG_N_SETS (REGNO (new_i2_dest))++;
2209 }
2210 }
2211
2212 /* If we can split it and use I2DEST, go ahead and see if that
2213 helps things be recognized. Verify that none of the registers
2214 are set between I2 and I3. */
2215 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
2216 #ifdef HAVE_cc0
2217 && GET_CODE (i2dest) == REG
2218 #endif
2219 /* We need I2DEST in the proper mode. If it is a hard register
2220 or the only use of a pseudo, we can change its mode. */
2221 && (GET_MODE (*split) == GET_MODE (i2dest)
2222 || GET_MODE (*split) == VOIDmode
2223 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
2224 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
2225 && ! REG_USERVAR_P (i2dest)))
2226 && (next_real_insn (i2) == i3
2227 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2228 /* We can't overwrite I2DEST if its value is still used by
2229 NEWPAT. */
2230 && ! reg_referenced_p (i2dest, newpat))
2231 {
2232 rtx newdest = i2dest;
2233 enum rtx_code split_code = GET_CODE (*split);
2234 enum machine_mode split_mode = GET_MODE (*split);
2235
2236 /* Get NEWDEST as a register in the proper mode. We have already
2237 validated that we can do this. */
2238 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
2239 {
2240 newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
2241
2242 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2243 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2244 }
2245
2246 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2247 an ASHIFT. This can occur if it was inside a PLUS and hence
2248 appeared to be a memory address. This is a kludge. */
2249 if (split_code == MULT
2250 && GET_CODE (XEXP (*split, 1)) == CONST_INT
2251 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
2252 {
2253 SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
2254 XEXP (*split, 0), GEN_INT (i)));
2255 /* Update split_code because we may not have a multiply
2256 anymore. */
2257 split_code = GET_CODE (*split);
2258 }
2259
2260 #ifdef INSN_SCHEDULING
2261 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2262 be written as a ZERO_EXTEND. */
2263 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
2264 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
2265 XEXP (*split, 0)));
2266 #endif
2267
2268 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
2269 SUBST (*split, newdest);
2270 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2271
2272 /* If the split point was a MULT and we didn't have one before,
2273 don't use one now. */
2274 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
2275 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2276 }
2277 }
2278
2279 /* Check for a case where we loaded from memory in a narrow mode and
2280 then sign extended it, but we need both registers. In that case,
2281 we have a PARALLEL with both loads from the same memory location.
2282 We can split this into a load from memory followed by a register-register
2283 copy. This saves at least one insn, more if register allocation can
2284 eliminate the copy.
2285
2286 We cannot do this if the destination of the second assignment is
2287 a register that we have already assumed is zero-extended. Similarly
2288 for a SUBREG of such a register. */
2289
2290 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2291 && GET_CODE (newpat) == PARALLEL
2292 && XVECLEN (newpat, 0) == 2
2293 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2294 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2295 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2296 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2297 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2298 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2299 INSN_CUID (i2))
2300 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2301 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2302 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2303 (GET_CODE (temp) == REG
2304 && reg_nonzero_bits[REGNO (temp)] != 0
2305 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2306 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2307 && (reg_nonzero_bits[REGNO (temp)]
2308 != GET_MODE_MASK (word_mode))))
2309 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2310 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2311 (GET_CODE (temp) == REG
2312 && reg_nonzero_bits[REGNO (temp)] != 0
2313 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2314 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2315 && (reg_nonzero_bits[REGNO (temp)]
2316 != GET_MODE_MASK (word_mode)))))
2317 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2318 SET_SRC (XVECEXP (newpat, 0, 1)))
2319 && ! find_reg_note (i3, REG_UNUSED,
2320 SET_DEST (XVECEXP (newpat, 0, 0))))
2321 {
2322 rtx ni2dest;
2323
2324 newi2pat = XVECEXP (newpat, 0, 0);
2325 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
2326 newpat = XVECEXP (newpat, 0, 1);
2327 SUBST (SET_SRC (newpat),
2328 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
2329 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2330
2331 if (i2_code_number >= 0)
2332 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2333
2334 if (insn_code_number >= 0)
2335 {
2336 rtx insn;
2337 rtx link;
2338
2339 /* If we will be able to accept this, we have made a change to the
2340 destination of I3. This can invalidate a LOG_LINKS pointing
2341 to I3. No other part of combine.c makes such a transformation.
2342
2343 The new I3 will have a destination that was previously the
2344 destination of I1 or I2 and which was used in i2 or I3. Call
2345 distribute_links to make a LOG_LINK from the next use of
2346 that destination. */
2347
2348 PATTERN (i3) = newpat;
2349 distribute_links (gen_rtx_INSN_LIST (VOIDmode, i3, NULL_RTX));
2350
2351 /* I3 now uses what used to be its destination and which is
2352 now I2's destination. That means we need a LOG_LINK from
2353 I3 to I2. But we used to have one, so we still will.
2354
2355 However, some later insn might be using I2's dest and have
2356 a LOG_LINK pointing at I3. We must remove this link.
2357 The simplest way to remove the link is to point it at I1,
2358 which we know will be a NOTE. */
2359
2360 for (insn = NEXT_INSN (i3);
2361 insn && (this_basic_block == n_basic_blocks - 1
2362 || insn != BLOCK_HEAD (this_basic_block + 1));
2363 insn = NEXT_INSN (insn))
2364 {
2365 if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
2366 {
2367 for (link = LOG_LINKS (insn); link;
2368 link = XEXP (link, 1))
2369 if (XEXP (link, 0) == i3)
2370 XEXP (link, 0) = i1;
2371
2372 break;
2373 }
2374 }
2375 }
2376 }
2377
2378 /* Similarly, check for a case where we have a PARALLEL of two independent
2379 SETs but we started with three insns. In this case, we can do the sets
2380 as two separate insns. This case occurs when some SET allows two
2381 other insns to combine, but the destination of that SET is still live. */
2382
2383 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2384 && GET_CODE (newpat) == PARALLEL
2385 && XVECLEN (newpat, 0) == 2
2386 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2387 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2388 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2389 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2390 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2391 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2392 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2393 INSN_CUID (i2))
2394 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2395 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2396 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2397 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2398 XVECEXP (newpat, 0, 0))
2399 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2400 XVECEXP (newpat, 0, 1))
2401 && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
2402 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
2403 {
2404 /* Normally, it doesn't matter which of the two is done first,
2405 but it does if one references cc0. In that case, it has to
2406 be first. */
2407 #ifdef HAVE_cc0
2408 if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2409 {
2410 newi2pat = XVECEXP (newpat, 0, 0);
2411 newpat = XVECEXP (newpat, 0, 1);
2412 }
2413 else
2414 #endif
2415 {
2416 newi2pat = XVECEXP (newpat, 0, 1);
2417 newpat = XVECEXP (newpat, 0, 0);
2418 }
2419
2420 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2421
2422 if (i2_code_number >= 0)
2423 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2424 }
2425
2426 /* If it still isn't recognized, fail and change things back the way they
2427 were. */
2428 if ((insn_code_number < 0
2429 /* Is the result a reasonable ASM_OPERANDS? */
2430 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2431 {
2432 undo_all ();
2433 return 0;
2434 }
2435
2436 /* If we had to change another insn, make sure it is valid also. */
2437 if (undobuf.other_insn)
2438 {
2439 rtx other_pat = PATTERN (undobuf.other_insn);
2440 rtx new_other_notes;
2441 rtx note, next;
2442
2443 CLEAR_HARD_REG_SET (newpat_used_regs);
2444
2445 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2446 &new_other_notes);
2447
2448 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2449 {
2450 undo_all ();
2451 return 0;
2452 }
2453
2454 PATTERN (undobuf.other_insn) = other_pat;
2455
2456 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2457 are still valid. Then add any non-duplicate notes added by
2458 recog_for_combine. */
2459 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2460 {
2461 next = XEXP (note, 1);
2462
2463 if (REG_NOTE_KIND (note) == REG_UNUSED
2464 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2465 {
2466 if (GET_CODE (XEXP (note, 0)) == REG)
2467 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
2468
2469 remove_note (undobuf.other_insn, note);
2470 }
2471 }
2472
2473 for (note = new_other_notes; note; note = XEXP (note, 1))
2474 if (GET_CODE (XEXP (note, 0)) == REG)
2475 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
2476
2477 distribute_notes (new_other_notes, undobuf.other_insn,
2478 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
2479 }
2480 #ifdef HAVE_cc0
2481 /* If I2 is the setter CC0 and I3 is the user CC0 then check whether
2482 they are adjacent to each other or not. */
2483 {
2484 rtx p = prev_nonnote_insn (i3);
2485 if (p && p != i2 && GET_CODE (p) == INSN && newi2pat
2486 && sets_cc0_p (newi2pat))
2487 {
2488 undo_all ();
2489 return 0;
2490 }
2491 }
2492 #endif
2493
2494 /* We now know that we can do this combination. Merge the insns and
2495 update the status of registers and LOG_LINKS. */
2496
2497 {
2498 rtx i3notes, i2notes, i1notes = 0;
2499 rtx i3links, i2links, i1links = 0;
2500 rtx midnotes = 0;
2501 unsigned int regno;
2502 /* Compute which registers we expect to eliminate. newi2pat may be setting
2503 either i3dest or i2dest, so we must check it. Also, i1dest may be the
2504 same as i3dest, in which case newi2pat may be setting i1dest. */
2505 rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2506 || i2dest_in_i2src || i2dest_in_i1src
2507 ? 0 : i2dest);
2508 rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2509 || (newi2pat && reg_set_p (i1dest, newi2pat))
2510 ? 0 : i1dest);
2511
2512 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2513 clear them. */
2514 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2515 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2516 if (i1)
2517 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2518
2519 /* Ensure that we do not have something that should not be shared but
2520 occurs multiple times in the new insns. Check this by first
2521 resetting all the `used' flags and then copying anything is shared. */
2522
2523 reset_used_flags (i3notes);
2524 reset_used_flags (i2notes);
2525 reset_used_flags (i1notes);
2526 reset_used_flags (newpat);
2527 reset_used_flags (newi2pat);
2528 if (undobuf.other_insn)
2529 reset_used_flags (PATTERN (undobuf.other_insn));
2530
2531 i3notes = copy_rtx_if_shared (i3notes);
2532 i2notes = copy_rtx_if_shared (i2notes);
2533 i1notes = copy_rtx_if_shared (i1notes);
2534 newpat = copy_rtx_if_shared (newpat);
2535 newi2pat = copy_rtx_if_shared (newi2pat);
2536 if (undobuf.other_insn)
2537 reset_used_flags (PATTERN (undobuf.other_insn));
2538
2539 INSN_CODE (i3) = insn_code_number;
2540 PATTERN (i3) = newpat;
2541 if (undobuf.other_insn)
2542 INSN_CODE (undobuf.other_insn) = other_code_number;
2543
2544 /* We had one special case above where I2 had more than one set and
2545 we replaced a destination of one of those sets with the destination
2546 of I3. In that case, we have to update LOG_LINKS of insns later
2547 in this basic block. Note that this (expensive) case is rare.
2548
2549 Also, in this case, we must pretend that all REG_NOTEs for I2
2550 actually came from I3, so that REG_UNUSED notes from I2 will be
2551 properly handled. */
2552
2553 if (i3_subst_into_i2 && GET_CODE (PATTERN (i2)) == PARALLEL)
2554 {
2555 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2556 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2557 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2558 && ! find_reg_note (i2, REG_UNUSED,
2559 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2560 for (temp = NEXT_INSN (i2);
2561 temp && (this_basic_block == n_basic_blocks - 1
2562 || BLOCK_HEAD (this_basic_block) != temp);
2563 temp = NEXT_INSN (temp))
2564 if (temp != i3 && INSN_P (temp))
2565 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2566 if (XEXP (link, 0) == i2)
2567 XEXP (link, 0) = i3;
2568
2569 if (i3notes)
2570 {
2571 rtx link = i3notes;
2572 while (XEXP (link, 1))
2573 link = XEXP (link, 1);
2574 XEXP (link, 1) = i2notes;
2575 }
2576 else
2577 i3notes = i2notes;
2578 i2notes = 0;
2579 }
2580
2581 LOG_LINKS (i3) = 0;
2582 REG_NOTES (i3) = 0;
2583 LOG_LINKS (i2) = 0;
2584 REG_NOTES (i2) = 0;
2585
2586 if (newi2pat)
2587 {
2588 INSN_CODE (i2) = i2_code_number;
2589 PATTERN (i2) = newi2pat;
2590 }
2591 else
2592 {
2593 PUT_CODE (i2, NOTE);
2594 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2595 NOTE_SOURCE_FILE (i2) = 0;
2596 }
2597
2598 if (i1)
2599 {
2600 LOG_LINKS (i1) = 0;
2601 REG_NOTES (i1) = 0;
2602 PUT_CODE (i1, NOTE);
2603 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2604 NOTE_SOURCE_FILE (i1) = 0;
2605 }
2606
2607 /* Get death notes for everything that is now used in either I3 or
2608 I2 and used to die in a previous insn. If we built two new
2609 patterns, move from I1 to I2 then I2 to I3 so that we get the
2610 proper movement on registers that I2 modifies. */
2611
2612 if (newi2pat)
2613 {
2614 move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2615 move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2616 }
2617 else
2618 move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2619 i3, &midnotes);
2620
2621 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2622 if (i3notes)
2623 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2624 elim_i2, elim_i1);
2625 if (i2notes)
2626 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2627 elim_i2, elim_i1);
2628 if (i1notes)
2629 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2630 elim_i2, elim_i1);
2631 if (midnotes)
2632 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2633 elim_i2, elim_i1);
2634
2635 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2636 know these are REG_UNUSED and want them to go to the desired insn,
2637 so we always pass it as i3. We have not counted the notes in
2638 reg_n_deaths yet, so we need to do so now. */
2639
2640 if (newi2pat && new_i2_notes)
2641 {
2642 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2643 if (GET_CODE (XEXP (temp, 0)) == REG)
2644 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2645
2646 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2647 }
2648
2649 if (new_i3_notes)
2650 {
2651 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2652 if (GET_CODE (XEXP (temp, 0)) == REG)
2653 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2654
2655 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2656 }
2657
2658 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
2659 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
2660 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
2661 in that case, it might delete I2. Similarly for I2 and I1.
2662 Show an additional death due to the REG_DEAD note we make here. If
2663 we discard it in distribute_notes, we will decrement it again. */
2664
2665 if (i3dest_killed)
2666 {
2667 if (GET_CODE (i3dest_killed) == REG)
2668 REG_N_DEATHS (REGNO (i3dest_killed))++;
2669
2670 if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
2671 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2672 NULL_RTX),
2673 NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
2674 else
2675 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2676 NULL_RTX),
2677 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2678 elim_i2, elim_i1);
2679 }
2680
2681 if (i2dest_in_i2src)
2682 {
2683 if (GET_CODE (i2dest) == REG)
2684 REG_N_DEATHS (REGNO (i2dest))++;
2685
2686 if (newi2pat && reg_set_p (i2dest, newi2pat))
2687 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2688 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2689 else
2690 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2691 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2692 NULL_RTX, NULL_RTX);
2693 }
2694
2695 if (i1dest_in_i1src)
2696 {
2697 if (GET_CODE (i1dest) == REG)
2698 REG_N_DEATHS (REGNO (i1dest))++;
2699
2700 if (newi2pat && reg_set_p (i1dest, newi2pat))
2701 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2702 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2703 else
2704 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2705 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2706 NULL_RTX, NULL_RTX);
2707 }
2708
2709 distribute_links (i3links);
2710 distribute_links (i2links);
2711 distribute_links (i1links);
2712
2713 if (GET_CODE (i2dest) == REG)
2714 {
2715 rtx link;
2716 rtx i2_insn = 0, i2_val = 0, set;
2717
2718 /* The insn that used to set this register doesn't exist, and
2719 this life of the register may not exist either. See if one of
2720 I3's links points to an insn that sets I2DEST. If it does,
2721 that is now the last known value for I2DEST. If we don't update
2722 this and I2 set the register to a value that depended on its old
2723 contents, we will get confused. If this insn is used, thing
2724 will be set correctly in combine_instructions. */
2725
2726 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2727 if ((set = single_set (XEXP (link, 0))) != 0
2728 && rtx_equal_p (i2dest, SET_DEST (set)))
2729 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2730
2731 record_value_for_reg (i2dest, i2_insn, i2_val);
2732
2733 /* If the reg formerly set in I2 died only once and that was in I3,
2734 zero its use count so it won't make `reload' do any work. */
2735 if (! added_sets_2
2736 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2737 && ! i2dest_in_i2src)
2738 {
2739 regno = REGNO (i2dest);
2740 REG_N_SETS (regno)--;
2741 }
2742 }
2743
2744 if (i1 && GET_CODE (i1dest) == REG)
2745 {
2746 rtx link;
2747 rtx i1_insn = 0, i1_val = 0, set;
2748
2749 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2750 if ((set = single_set (XEXP (link, 0))) != 0
2751 && rtx_equal_p (i1dest, SET_DEST (set)))
2752 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2753
2754 record_value_for_reg (i1dest, i1_insn, i1_val);
2755
2756 regno = REGNO (i1dest);
2757 if (! added_sets_1 && ! i1dest_in_i1src)
2758 REG_N_SETS (regno)--;
2759 }
2760
2761 /* Update reg_nonzero_bits et al for any changes that may have been made
2762 to this insn. The order of set_nonzero_bits_and_sign_copies() is
2763 important. Because newi2pat can affect nonzero_bits of newpat */
2764 if (newi2pat)
2765 note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
2766 note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
2767
2768 /* Set new_direct_jump_p if a new return or simple jump instruction
2769 has been created.
2770
2771 If I3 is now an unconditional jump, ensure that it has a
2772 BARRIER following it since it may have initially been a
2773 conditional jump. It may also be the last nonnote insn. */
2774
2775 if (GET_CODE (newpat) == RETURN || any_uncondjump_p (i3))
2776 {
2777 *new_direct_jump_p = 1;
2778
2779 if ((temp = next_nonnote_insn (i3)) == NULL_RTX
2780 || GET_CODE (temp) != BARRIER)
2781 emit_barrier_after (i3);
2782 }
2783 }
2784
2785 combine_successes++;
2786 undo_commit ();
2787
2788 /* Clear this here, so that subsequent get_last_value calls are not
2789 affected. */
2790 subst_prev_insn = NULL_RTX;
2791
2792 if (added_links_insn
2793 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2794 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2795 return added_links_insn;
2796 else
2797 return newi2pat ? i2 : i3;
2798 }
2799 \f
2800 /* Undo all the modifications recorded in undobuf. */
2801
2802 static void
2803 undo_all ()
2804 {
2805 struct undo *undo, *next;
2806
2807 for (undo = undobuf.undos; undo; undo = next)
2808 {
2809 next = undo->next;
2810 if (undo->is_int)
2811 *undo->where.i = undo->old_contents.i;
2812 else
2813 *undo->where.r = undo->old_contents.r;
2814
2815 undo->next = undobuf.frees;
2816 undobuf.frees = undo;
2817 }
2818
2819 undobuf.undos = undobuf.previous_undos = 0;
2820
2821 /* Clear this here, so that subsequent get_last_value calls are not
2822 affected. */
2823 subst_prev_insn = NULL_RTX;
2824 }
2825
2826 /* We've committed to accepting the changes we made. Move all
2827 of the undos to the free list. */
2828
2829 static void
2830 undo_commit ()
2831 {
2832 struct undo *undo, *next;
2833
2834 for (undo = undobuf.undos; undo; undo = next)
2835 {
2836 next = undo->next;
2837 undo->next = undobuf.frees;
2838 undobuf.frees = undo;
2839 }
2840 undobuf.undos = undobuf.previous_undos = 0;
2841 }
2842
2843 \f
2844 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
2845 where we have an arithmetic expression and return that point. LOC will
2846 be inside INSN.
2847
2848 try_combine will call this function to see if an insn can be split into
2849 two insns. */
2850
2851 static rtx *
2852 find_split_point (loc, insn)
2853 rtx *loc;
2854 rtx insn;
2855 {
2856 rtx x = *loc;
2857 enum rtx_code code = GET_CODE (x);
2858 rtx *split;
2859 unsigned HOST_WIDE_INT len = 0;
2860 HOST_WIDE_INT pos = 0;
2861 int unsignedp = 0;
2862 rtx inner = NULL_RTX;
2863
2864 /* First special-case some codes. */
2865 switch (code)
2866 {
2867 case SUBREG:
2868 #ifdef INSN_SCHEDULING
2869 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2870 point. */
2871 if (GET_CODE (SUBREG_REG (x)) == MEM)
2872 return loc;
2873 #endif
2874 return find_split_point (&SUBREG_REG (x), insn);
2875
2876 case MEM:
2877 #ifdef HAVE_lo_sum
2878 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2879 using LO_SUM and HIGH. */
2880 if (GET_CODE (XEXP (x, 0)) == CONST
2881 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2882 {
2883 SUBST (XEXP (x, 0),
2884 gen_rtx_combine (LO_SUM, Pmode,
2885 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2886 XEXP (x, 0)));
2887 return &XEXP (XEXP (x, 0), 0);
2888 }
2889 #endif
2890
2891 /* If we have a PLUS whose second operand is a constant and the
2892 address is not valid, perhaps will can split it up using
2893 the machine-specific way to split large constants. We use
2894 the first pseudo-reg (one of the virtual regs) as a placeholder;
2895 it will not remain in the result. */
2896 if (GET_CODE (XEXP (x, 0)) == PLUS
2897 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2898 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2899 {
2900 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2901 rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
2902 subst_insn);
2903
2904 /* This should have produced two insns, each of which sets our
2905 placeholder. If the source of the second is a valid address,
2906 we can make put both sources together and make a split point
2907 in the middle. */
2908
2909 if (seq && XVECLEN (seq, 0) == 2
2910 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2911 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2912 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2913 && ! reg_mentioned_p (reg,
2914 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2915 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2916 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2917 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2918 && memory_address_p (GET_MODE (x),
2919 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2920 {
2921 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2922 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2923
2924 /* Replace the placeholder in SRC2 with SRC1. If we can
2925 find where in SRC2 it was placed, that can become our
2926 split point and we can replace this address with SRC2.
2927 Just try two obvious places. */
2928
2929 src2 = replace_rtx (src2, reg, src1);
2930 split = 0;
2931 if (XEXP (src2, 0) == src1)
2932 split = &XEXP (src2, 0);
2933 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2934 && XEXP (XEXP (src2, 0), 0) == src1)
2935 split = &XEXP (XEXP (src2, 0), 0);
2936
2937 if (split)
2938 {
2939 SUBST (XEXP (x, 0), src2);
2940 return split;
2941 }
2942 }
2943
2944 /* If that didn't work, perhaps the first operand is complex and
2945 needs to be computed separately, so make a split point there.
2946 This will occur on machines that just support REG + CONST
2947 and have a constant moved through some previous computation. */
2948
2949 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2950 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2951 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2952 == 'o')))
2953 return &XEXP (XEXP (x, 0), 0);
2954 }
2955 break;
2956
2957 case SET:
2958 #ifdef HAVE_cc0
2959 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2960 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2961 we need to put the operand into a register. So split at that
2962 point. */
2963
2964 if (SET_DEST (x) == cc0_rtx
2965 && GET_CODE (SET_SRC (x)) != COMPARE
2966 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2967 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2968 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2969 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2970 return &SET_SRC (x);
2971 #endif
2972
2973 /* See if we can split SET_SRC as it stands. */
2974 split = find_split_point (&SET_SRC (x), insn);
2975 if (split && split != &SET_SRC (x))
2976 return split;
2977
2978 /* See if we can split SET_DEST as it stands. */
2979 split = find_split_point (&SET_DEST (x), insn);
2980 if (split && split != &SET_DEST (x))
2981 return split;
2982
2983 /* See if this is a bitfield assignment with everything constant. If
2984 so, this is an IOR of an AND, so split it into that. */
2985 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2986 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2987 <= HOST_BITS_PER_WIDE_INT)
2988 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2989 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2990 && GET_CODE (SET_SRC (x)) == CONST_INT
2991 && ((INTVAL (XEXP (SET_DEST (x), 1))
2992 + INTVAL (XEXP (SET_DEST (x), 2)))
2993 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2994 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2995 {
2996 HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
2997 unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
2998 unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
2999 rtx dest = XEXP (SET_DEST (x), 0);
3000 enum machine_mode mode = GET_MODE (dest);
3001 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
3002
3003 if (BITS_BIG_ENDIAN)
3004 pos = GET_MODE_BITSIZE (mode) - len - pos;
3005
3006 if (src == mask)
3007 SUBST (SET_SRC (x),
3008 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
3009 else
3010 SUBST (SET_SRC (x),
3011 gen_binary (IOR, mode,
3012 gen_binary (AND, mode, dest,
3013 GEN_INT (~(mask << pos)
3014 & GET_MODE_MASK (mode))),
3015 GEN_INT (src << pos)));
3016
3017 SUBST (SET_DEST (x), dest);
3018
3019 split = find_split_point (&SET_SRC (x), insn);
3020 if (split && split != &SET_SRC (x))
3021 return split;
3022 }
3023
3024 /* Otherwise, see if this is an operation that we can split into two.
3025 If so, try to split that. */
3026 code = GET_CODE (SET_SRC (x));
3027
3028 switch (code)
3029 {
3030 case AND:
3031 /* If we are AND'ing with a large constant that is only a single
3032 bit and the result is only being used in a context where we
3033 need to know if it is zero or non-zero, replace it with a bit
3034 extraction. This will avoid the large constant, which might
3035 have taken more than one insn to make. If the constant were
3036 not a valid argument to the AND but took only one insn to make,
3037 this is no worse, but if it took more than one insn, it will
3038 be better. */
3039
3040 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3041 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
3042 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
3043 && GET_CODE (SET_DEST (x)) == REG
3044 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
3045 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
3046 && XEXP (*split, 0) == SET_DEST (x)
3047 && XEXP (*split, 1) == const0_rtx)
3048 {
3049 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
3050 XEXP (SET_SRC (x), 0),
3051 pos, NULL_RTX, 1, 1, 0, 0);
3052 if (extraction != 0)
3053 {
3054 SUBST (SET_SRC (x), extraction);
3055 return find_split_point (loc, insn);
3056 }
3057 }
3058 break;
3059
3060 case NE:
3061 /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
3062 is known to be on, this can be converted into a NEG of a shift. */
3063 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
3064 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
3065 && 1 <= (pos = exact_log2
3066 (nonzero_bits (XEXP (SET_SRC (x), 0),
3067 GET_MODE (XEXP (SET_SRC (x), 0))))))
3068 {
3069 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
3070
3071 SUBST (SET_SRC (x),
3072 gen_rtx_combine (NEG, mode,
3073 gen_rtx_combine (LSHIFTRT, mode,
3074 XEXP (SET_SRC (x), 0),
3075 GEN_INT (pos))));
3076
3077 split = find_split_point (&SET_SRC (x), insn);
3078 if (split && split != &SET_SRC (x))
3079 return split;
3080 }
3081 break;
3082
3083 case SIGN_EXTEND:
3084 inner = XEXP (SET_SRC (x), 0);
3085
3086 /* We can't optimize if either mode is a partial integer
3087 mode as we don't know how many bits are significant
3088 in those modes. */
3089 if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
3090 || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
3091 break;
3092
3093 pos = 0;
3094 len = GET_MODE_BITSIZE (GET_MODE (inner));
3095 unsignedp = 0;
3096 break;
3097
3098 case SIGN_EXTRACT:
3099 case ZERO_EXTRACT:
3100 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3101 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
3102 {
3103 inner = XEXP (SET_SRC (x), 0);
3104 len = INTVAL (XEXP (SET_SRC (x), 1));
3105 pos = INTVAL (XEXP (SET_SRC (x), 2));
3106
3107 if (BITS_BIG_ENDIAN)
3108 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
3109 unsignedp = (code == ZERO_EXTRACT);
3110 }
3111 break;
3112
3113 default:
3114 break;
3115 }
3116
3117 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
3118 {
3119 enum machine_mode mode = GET_MODE (SET_SRC (x));
3120
3121 /* For unsigned, we have a choice of a shift followed by an
3122 AND or two shifts. Use two shifts for field sizes where the
3123 constant might be too large. We assume here that we can
3124 always at least get 8-bit constants in an AND insn, which is
3125 true for every current RISC. */
3126
3127 if (unsignedp && len <= 8)
3128 {
3129 SUBST (SET_SRC (x),
3130 gen_rtx_combine
3131 (AND, mode,
3132 gen_rtx_combine (LSHIFTRT, mode,
3133 gen_lowpart_for_combine (mode, inner),
3134 GEN_INT (pos)),
3135 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
3136
3137 split = find_split_point (&SET_SRC (x), insn);
3138 if (split && split != &SET_SRC (x))
3139 return split;
3140 }
3141 else
3142 {
3143 SUBST (SET_SRC (x),
3144 gen_rtx_combine
3145 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
3146 gen_rtx_combine (ASHIFT, mode,
3147 gen_lowpart_for_combine (mode, inner),
3148 GEN_INT (GET_MODE_BITSIZE (mode)
3149 - len - pos)),
3150 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
3151
3152 split = find_split_point (&SET_SRC (x), insn);
3153 if (split && split != &SET_SRC (x))
3154 return split;
3155 }
3156 }
3157
3158 /* See if this is a simple operation with a constant as the second
3159 operand. It might be that this constant is out of range and hence
3160 could be used as a split point. */
3161 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3162 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3163 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
3164 && CONSTANT_P (XEXP (SET_SRC (x), 1))
3165 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
3166 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
3167 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
3168 == 'o'))))
3169 return &XEXP (SET_SRC (x), 1);
3170
3171 /* Finally, see if this is a simple operation with its first operand
3172 not in a register. The operation might require this operand in a
3173 register, so return it as a split point. We can always do this
3174 because if the first operand were another operation, we would have
3175 already found it as a split point. */
3176 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3177 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3178 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
3179 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
3180 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
3181 return &XEXP (SET_SRC (x), 0);
3182
3183 return 0;
3184
3185 case AND:
3186 case IOR:
3187 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
3188 it is better to write this as (not (ior A B)) so we can split it.
3189 Similarly for IOR. */
3190 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
3191 {
3192 SUBST (*loc,
3193 gen_rtx_combine (NOT, GET_MODE (x),
3194 gen_rtx_combine (code == IOR ? AND : IOR,
3195 GET_MODE (x),
3196 XEXP (XEXP (x, 0), 0),
3197 XEXP (XEXP (x, 1), 0))));
3198 return find_split_point (loc, insn);
3199 }
3200
3201 /* Many RISC machines have a large set of logical insns. If the
3202 second operand is a NOT, put it first so we will try to split the
3203 other operand first. */
3204 if (GET_CODE (XEXP (x, 1)) == NOT)
3205 {
3206 rtx tem = XEXP (x, 0);
3207 SUBST (XEXP (x, 0), XEXP (x, 1));
3208 SUBST (XEXP (x, 1), tem);
3209 }
3210 break;
3211
3212 default:
3213 break;
3214 }
3215
3216 /* Otherwise, select our actions depending on our rtx class. */
3217 switch (GET_RTX_CLASS (code))
3218 {
3219 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
3220 case '3':
3221 split = find_split_point (&XEXP (x, 2), insn);
3222 if (split)
3223 return split;
3224 /* ... fall through ... */
3225 case '2':
3226 case 'c':
3227 case '<':
3228 split = find_split_point (&XEXP (x, 1), insn);
3229 if (split)
3230 return split;
3231 /* ... fall through ... */
3232 case '1':
3233 /* Some machines have (and (shift ...) ...) insns. If X is not
3234 an AND, but XEXP (X, 0) is, use it as our split point. */
3235 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3236 return &XEXP (x, 0);
3237
3238 split = find_split_point (&XEXP (x, 0), insn);
3239 if (split)
3240 return split;
3241 return loc;
3242 }
3243
3244 /* Otherwise, we don't have a split point. */
3245 return 0;
3246 }
3247 \f
3248 /* Throughout X, replace FROM with TO, and return the result.
3249 The result is TO if X is FROM;
3250 otherwise the result is X, but its contents may have been modified.
3251 If they were modified, a record was made in undobuf so that
3252 undo_all will (among other things) return X to its original state.
3253
3254 If the number of changes necessary is too much to record to undo,
3255 the excess changes are not made, so the result is invalid.
3256 The changes already made can still be undone.
3257 undobuf.num_undo is incremented for such changes, so by testing that
3258 the caller can tell whether the result is valid.
3259
3260 `n_occurrences' is incremented each time FROM is replaced.
3261
3262 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
3263
3264 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
3265 by copying if `n_occurrences' is non-zero. */
3266
3267 static rtx
3268 subst (x, from, to, in_dest, unique_copy)
3269 register rtx x, from, to;
3270 int in_dest;
3271 int unique_copy;
3272 {
3273 register enum rtx_code code = GET_CODE (x);
3274 enum machine_mode op0_mode = VOIDmode;
3275 register const char *fmt;
3276 register int len, i;
3277 rtx new;
3278
3279 /* Two expressions are equal if they are identical copies of a shared
3280 RTX or if they are both registers with the same register number
3281 and mode. */
3282
3283 #define COMBINE_RTX_EQUAL_P(X,Y) \
3284 ((X) == (Y) \
3285 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
3286 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3287
3288 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3289 {
3290 n_occurrences++;
3291 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3292 }
3293
3294 /* If X and FROM are the same register but different modes, they will
3295 not have been seen as equal above. However, flow.c will make a
3296 LOG_LINKS entry for that case. If we do nothing, we will try to
3297 rerecognize our original insn and, when it succeeds, we will
3298 delete the feeding insn, which is incorrect.
3299
3300 So force this insn not to match in this (rare) case. */
3301 if (! in_dest && code == REG && GET_CODE (from) == REG
3302 && REGNO (x) == REGNO (from))
3303 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
3304
3305 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3306 of which may contain things that can be combined. */
3307 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3308 return x;
3309
3310 /* It is possible to have a subexpression appear twice in the insn.
3311 Suppose that FROM is a register that appears within TO.
3312 Then, after that subexpression has been scanned once by `subst',
3313 the second time it is scanned, TO may be found. If we were
3314 to scan TO here, we would find FROM within it and create a
3315 self-referent rtl structure which is completely wrong. */
3316 if (COMBINE_RTX_EQUAL_P (x, to))
3317 return to;
3318
3319 /* Parallel asm_operands need special attention because all of the
3320 inputs are shared across the arms. Furthermore, unsharing the
3321 rtl results in recognition failures. Failure to handle this case
3322 specially can result in circular rtl.
3323
3324 Solve this by doing a normal pass across the first entry of the
3325 parallel, and only processing the SET_DESTs of the subsequent
3326 entries. Ug. */
3327
3328 if (code == PARALLEL
3329 && GET_CODE (XVECEXP (x, 0, 0)) == SET
3330 && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
3331 {
3332 new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3333
3334 /* If this substitution failed, this whole thing fails. */
3335 if (GET_CODE (new) == CLOBBER
3336 && XEXP (new, 0) == const0_rtx)
3337 return new;
3338
3339 SUBST (XVECEXP (x, 0, 0), new);
3340
3341 for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
3342 {
3343 rtx dest = SET_DEST (XVECEXP (x, 0, i));
3344
3345 if (GET_CODE (dest) != REG
3346 && GET_CODE (dest) != CC0
3347 && GET_CODE (dest) != PC)
3348 {
3349 new = subst (dest, from, to, 0, unique_copy);
3350
3351 /* If this substitution failed, this whole thing fails. */
3352 if (GET_CODE (new) == CLOBBER
3353 && XEXP (new, 0) == const0_rtx)
3354 return new;
3355
3356 SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
3357 }
3358 }
3359 }
3360 else
3361 {
3362 len = GET_RTX_LENGTH (code);
3363 fmt = GET_RTX_FORMAT (code);
3364
3365 /* We don't need to process a SET_DEST that is a register, CC0,
3366 or PC, so set up to skip this common case. All other cases
3367 where we want to suppress replacing something inside a
3368 SET_SRC are handled via the IN_DEST operand. */
3369 if (code == SET
3370 && (GET_CODE (SET_DEST (x)) == REG
3371 || GET_CODE (SET_DEST (x)) == CC0
3372 || GET_CODE (SET_DEST (x)) == PC))
3373 fmt = "ie";
3374
3375 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3376 constant. */
3377 if (fmt[0] == 'e')
3378 op0_mode = GET_MODE (XEXP (x, 0));
3379
3380 for (i = 0; i < len; i++)
3381 {
3382 if (fmt[i] == 'E')
3383 {
3384 register int j;
3385 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3386 {
3387 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3388 {
3389 new = (unique_copy && n_occurrences
3390 ? copy_rtx (to) : to);
3391 n_occurrences++;
3392 }
3393 else
3394 {
3395 new = subst (XVECEXP (x, i, j), from, to, 0,
3396 unique_copy);
3397
3398 /* If this substitution failed, this whole thing
3399 fails. */
3400 if (GET_CODE (new) == CLOBBER
3401 && XEXP (new, 0) == const0_rtx)
3402 return new;
3403 }
3404
3405 SUBST (XVECEXP (x, i, j), new);
3406 }
3407 }
3408 else if (fmt[i] == 'e')
3409 {
3410 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3411 {
3412 /* In general, don't install a subreg involving two
3413 modes not tieable. It can worsen register
3414 allocation, and can even make invalid reload
3415 insns, since the reg inside may need to be copied
3416 from in the outside mode, and that may be invalid
3417 if it is an fp reg copied in integer mode.
3418
3419 We allow two exceptions to this: It is valid if
3420 it is inside another SUBREG and the mode of that
3421 SUBREG and the mode of the inside of TO is
3422 tieable and it is valid if X is a SET that copies
3423 FROM to CC0. */
3424
3425 if (GET_CODE (to) == SUBREG
3426 && ! MODES_TIEABLE_P (GET_MODE (to),
3427 GET_MODE (SUBREG_REG (to)))
3428 && ! (code == SUBREG
3429 && MODES_TIEABLE_P (GET_MODE (x),
3430 GET_MODE (SUBREG_REG (to))))
3431 #ifdef HAVE_cc0
3432 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
3433 #endif
3434 )
3435 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3436
3437 #ifdef CLASS_CANNOT_CHANGE_MODE
3438 if (code == SUBREG
3439 && GET_CODE (to) == REG
3440 && REGNO (to) < FIRST_PSEUDO_REGISTER
3441 && (TEST_HARD_REG_BIT
3442 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
3443 REGNO (to)))
3444 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (to),
3445 GET_MODE (x)))
3446 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3447 #endif
3448
3449 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3450 n_occurrences++;
3451 }
3452 else
3453 /* If we are in a SET_DEST, suppress most cases unless we
3454 have gone inside a MEM, in which case we want to
3455 simplify the address. We assume here that things that
3456 are actually part of the destination have their inner
3457 parts in the first expression. This is true for SUBREG,
3458 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3459 things aside from REG and MEM that should appear in a
3460 SET_DEST. */
3461 new = subst (XEXP (x, i), from, to,
3462 (((in_dest
3463 && (code == SUBREG || code == STRICT_LOW_PART
3464 || code == ZERO_EXTRACT))
3465 || code == SET)
3466 && i == 0), unique_copy);
3467
3468 /* If we found that we will have to reject this combination,
3469 indicate that by returning the CLOBBER ourselves, rather than
3470 an expression containing it. This will speed things up as
3471 well as prevent accidents where two CLOBBERs are considered
3472 to be equal, thus producing an incorrect simplification. */
3473
3474 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3475 return new;
3476
3477 SUBST (XEXP (x, i), new);
3478 }
3479 }
3480 }
3481
3482 /* Try to simplify X. If the simplification changed the code, it is likely
3483 that further simplification will help, so loop, but limit the number
3484 of repetitions that will be performed. */
3485
3486 for (i = 0; i < 4; i++)
3487 {
3488 /* If X is sufficiently simple, don't bother trying to do anything
3489 with it. */
3490 if (code != CONST_INT && code != REG && code != CLOBBER)
3491 x = combine_simplify_rtx (x, op0_mode, i == 3, in_dest);
3492
3493 if (GET_CODE (x) == code)
3494 break;
3495
3496 code = GET_CODE (x);
3497
3498 /* We no longer know the original mode of operand 0 since we
3499 have changed the form of X) */
3500 op0_mode = VOIDmode;
3501 }
3502
3503 return x;
3504 }
3505 \f
3506 /* Simplify X, a piece of RTL. We just operate on the expression at the
3507 outer level; call `subst' to simplify recursively. Return the new
3508 expression.
3509
3510 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3511 will be the iteration even if an expression with a code different from
3512 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
3513
3514 static rtx
3515 combine_simplify_rtx (x, op0_mode, last, in_dest)
3516 rtx x;
3517 enum machine_mode op0_mode;
3518 int last;
3519 int in_dest;
3520 {
3521 enum rtx_code code = GET_CODE (x);
3522 enum machine_mode mode = GET_MODE (x);
3523 rtx temp;
3524 int i;
3525
3526 /* If this is a commutative operation, put a constant last and a complex
3527 expression first. We don't need to do this for comparisons here. */
3528 if (GET_RTX_CLASS (code) == 'c'
3529 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3530 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
3531 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
3532 || (GET_CODE (XEXP (x, 0)) == SUBREG
3533 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
3534 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
3535 {
3536 temp = XEXP (x, 0);
3537 SUBST (XEXP (x, 0), XEXP (x, 1));
3538 SUBST (XEXP (x, 1), temp);
3539 }
3540
3541 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3542 sign extension of a PLUS with a constant, reverse the order of the sign
3543 extension and the addition. Note that this not the same as the original
3544 code, but overflow is undefined for signed values. Also note that the
3545 PLUS will have been partially moved "inside" the sign-extension, so that
3546 the first operand of X will really look like:
3547 (ashiftrt (plus (ashift A C4) C5) C4).
3548 We convert this to
3549 (plus (ashiftrt (ashift A C4) C2) C4)
3550 and replace the first operand of X with that expression. Later parts
3551 of this function may simplify the expression further.
3552
3553 For example, if we start with (mult (sign_extend (plus A C1)) C2),
3554 we swap the SIGN_EXTEND and PLUS. Later code will apply the
3555 distributive law to produce (plus (mult (sign_extend X) C1) C3).
3556
3557 We do this to simplify address expressions. */
3558
3559 if ((code == PLUS || code == MINUS || code == MULT)
3560 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3561 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3562 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3563 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3564 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3565 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3566 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3567 && (temp = simplify_binary_operation (ASHIFTRT, mode,
3568 XEXP (XEXP (XEXP (x, 0), 0), 1),
3569 XEXP (XEXP (x, 0), 1))) != 0)
3570 {
3571 rtx new
3572 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3573 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3574 INTVAL (XEXP (XEXP (x, 0), 1)));
3575
3576 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3577 INTVAL (XEXP (XEXP (x, 0), 1)));
3578
3579 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3580 }
3581
3582 /* If this is a simple operation applied to an IF_THEN_ELSE, try
3583 applying it to the arms of the IF_THEN_ELSE. This often simplifies
3584 things. Check for cases where both arms are testing the same
3585 condition.
3586
3587 Don't do anything if all operands are very simple. */
3588
3589 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3590 || GET_RTX_CLASS (code) == '<')
3591 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3592 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3593 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3594 == 'o')))
3595 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3596 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3597 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3598 == 'o')))))
3599 || (GET_RTX_CLASS (code) == '1'
3600 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3601 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3602 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3603 == 'o'))))))
3604 {
3605 rtx cond, true, false;
3606
3607 cond = if_then_else_cond (x, &true, &false);
3608 if (cond != 0
3609 /* If everything is a comparison, what we have is highly unlikely
3610 to be simpler, so don't use it. */
3611 && ! (GET_RTX_CLASS (code) == '<'
3612 && (GET_RTX_CLASS (GET_CODE (true)) == '<'
3613 || GET_RTX_CLASS (GET_CODE (false)) == '<')))
3614 {
3615 rtx cop1 = const0_rtx;
3616 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3617
3618 if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3619 return x;
3620
3621 /* Simplify the alternative arms; this may collapse the true and
3622 false arms to store-flag values. */
3623 true = subst (true, pc_rtx, pc_rtx, 0, 0);
3624 false = subst (false, pc_rtx, pc_rtx, 0, 0);
3625
3626 /* If true and false are not general_operands, an if_then_else
3627 is unlikely to be simpler. */
3628 if (general_operand (true, VOIDmode)
3629 && general_operand (false, VOIDmode))
3630 {
3631 /* Restarting if we generate a store-flag expression will cause
3632 us to loop. Just drop through in this case. */
3633
3634 /* If the result values are STORE_FLAG_VALUE and zero, we can
3635 just make the comparison operation. */
3636 if (true == const_true_rtx && false == const0_rtx)
3637 x = gen_binary (cond_code, mode, cond, cop1);
3638 else if (true == const0_rtx && false == const_true_rtx)
3639 x = gen_binary (reverse_condition (cond_code),
3640 mode, cond, cop1);
3641
3642 /* Likewise, we can make the negate of a comparison operation
3643 if the result values are - STORE_FLAG_VALUE and zero. */
3644 else if (GET_CODE (true) == CONST_INT
3645 && INTVAL (true) == - STORE_FLAG_VALUE
3646 && false == const0_rtx)
3647 x = gen_unary (NEG, mode, mode,
3648 gen_binary (cond_code, mode, cond, cop1));
3649 else if (GET_CODE (false) == CONST_INT
3650 && INTVAL (false) == - STORE_FLAG_VALUE
3651 && true == const0_rtx)
3652 x = gen_unary (NEG, mode, mode,
3653 gen_binary (reverse_condition (cond_code),
3654 mode, cond, cop1));
3655 else
3656 return gen_rtx_IF_THEN_ELSE (mode,
3657 gen_binary (cond_code, VOIDmode,
3658 cond, cop1),
3659 true, false);
3660
3661 code = GET_CODE (x);
3662 op0_mode = VOIDmode;
3663 }
3664 }
3665 }
3666
3667 /* Try to fold this expression in case we have constants that weren't
3668 present before. */
3669 temp = 0;
3670 switch (GET_RTX_CLASS (code))
3671 {
3672 case '1':
3673 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3674 break;
3675 case '<':
3676 {
3677 enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
3678 if (cmp_mode == VOIDmode)
3679 cmp_mode = GET_MODE (XEXP (x, 1));
3680 temp = simplify_relational_operation (code, cmp_mode,
3681 XEXP (x, 0), XEXP (x, 1));
3682 }
3683 #ifdef FLOAT_STORE_FLAG_VALUE
3684 if (temp != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3685 {
3686 if (temp == const0_rtx)
3687 temp = CONST0_RTX (mode);
3688 else
3689 temp = immed_real_const_1 (FLOAT_STORE_FLAG_VALUE (mode), mode);
3690 }
3691 #endif
3692 break;
3693 case 'c':
3694 case '2':
3695 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3696 break;
3697 case 'b':
3698 case '3':
3699 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3700 XEXP (x, 1), XEXP (x, 2));
3701 break;
3702 }
3703
3704 if (temp)
3705 x = temp, code = GET_CODE (temp);
3706
3707 /* First see if we can apply the inverse distributive law. */
3708 if (code == PLUS || code == MINUS
3709 || code == AND || code == IOR || code == XOR)
3710 {
3711 x = apply_distributive_law (x);
3712 code = GET_CODE (x);
3713 }
3714
3715 /* If CODE is an associative operation not otherwise handled, see if we
3716 can associate some operands. This can win if they are constants or
3717 if they are logically related (i.e. (a & b) & a. */
3718 if ((code == PLUS || code == MINUS
3719 || code == MULT || code == AND || code == IOR || code == XOR
3720 || code == DIV || code == UDIV
3721 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3722 && INTEGRAL_MODE_P (mode))
3723 {
3724 if (GET_CODE (XEXP (x, 0)) == code)
3725 {
3726 rtx other = XEXP (XEXP (x, 0), 0);
3727 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3728 rtx inner_op1 = XEXP (x, 1);
3729 rtx inner;
3730
3731 /* Make sure we pass the constant operand if any as the second
3732 one if this is a commutative operation. */
3733 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3734 {
3735 rtx tem = inner_op0;
3736 inner_op0 = inner_op1;
3737 inner_op1 = tem;
3738 }
3739 inner = simplify_binary_operation (code == MINUS ? PLUS
3740 : code == DIV ? MULT
3741 : code == UDIV ? MULT
3742 : code,
3743 mode, inner_op0, inner_op1);
3744
3745 /* For commutative operations, try the other pair if that one
3746 didn't simplify. */
3747 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3748 {
3749 other = XEXP (XEXP (x, 0), 1);
3750 inner = simplify_binary_operation (code, mode,
3751 XEXP (XEXP (x, 0), 0),
3752 XEXP (x, 1));
3753 }
3754
3755 if (inner)
3756 return gen_binary (code, mode, other, inner);
3757 }
3758 }
3759
3760 /* A little bit of algebraic simplification here. */
3761 switch (code)
3762 {
3763 case MEM:
3764 /* Ensure that our address has any ASHIFTs converted to MULT in case
3765 address-recognizing predicates are called later. */
3766 temp = make_compound_operation (XEXP (x, 0), MEM);
3767 SUBST (XEXP (x, 0), temp);
3768 break;
3769
3770 case SUBREG:
3771 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3772 is paradoxical. If we can't do that safely, then it becomes
3773 something nonsensical so that this combination won't take place. */
3774
3775 if (GET_CODE (SUBREG_REG (x)) == MEM
3776 && (GET_MODE_SIZE (mode)
3777 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3778 {
3779 rtx inner = SUBREG_REG (x);
3780 int endian_offset = 0;
3781 /* Don't change the mode of the MEM
3782 if that would change the meaning of the address. */
3783 if (MEM_VOLATILE_P (SUBREG_REG (x))
3784 || mode_dependent_address_p (XEXP (inner, 0)))
3785 return gen_rtx_CLOBBER (mode, const0_rtx);
3786
3787 if (BYTES_BIG_ENDIAN)
3788 {
3789 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3790 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3791 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3792 endian_offset -= (UNITS_PER_WORD
3793 - GET_MODE_SIZE (GET_MODE (inner)));
3794 }
3795 /* Note if the plus_constant doesn't make a valid address
3796 then this combination won't be accepted. */
3797 x = gen_rtx_MEM (mode,
3798 plus_constant (XEXP (inner, 0),
3799 (SUBREG_WORD (x) * UNITS_PER_WORD
3800 + endian_offset)));
3801 MEM_COPY_ATTRIBUTES (x, inner);
3802 return x;
3803 }
3804
3805 /* If we are in a SET_DEST, these other cases can't apply. */
3806 if (in_dest)
3807 return x;
3808
3809 /* Changing mode twice with SUBREG => just change it once,
3810 or not at all if changing back to starting mode. */
3811 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3812 {
3813 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3814 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3815 return SUBREG_REG (SUBREG_REG (x));
3816
3817 SUBST_INT (SUBREG_WORD (x),
3818 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3819 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3820 }
3821
3822 /* SUBREG of a hard register => just change the register number
3823 and/or mode. If the hard register is not valid in that mode,
3824 suppress this combination. If the hard register is the stack,
3825 frame, or argument pointer, leave this as a SUBREG. */
3826
3827 if (GET_CODE (SUBREG_REG (x)) == REG
3828 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3829 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
3830 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3831 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3832 #endif
3833 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3834 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3835 #endif
3836 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
3837 {
3838 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3839 mode))
3840 return gen_rtx_REG (mode,
3841 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3842 else
3843 return gen_rtx_CLOBBER (mode, const0_rtx);
3844 }
3845
3846 /* For a constant, try to pick up the part we want. Handle a full
3847 word and low-order part. Only do this if we are narrowing
3848 the constant; if it is being widened, we have no idea what
3849 the extra bits will have been set to. */
3850
3851 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3852 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3853 && GET_MODE_SIZE (op0_mode) > UNITS_PER_WORD
3854 && GET_MODE_CLASS (mode) == MODE_INT)
3855 {
3856 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
3857 0, op0_mode);
3858 if (temp)
3859 return temp;
3860 }
3861
3862 /* If we want a subreg of a constant, at offset 0,
3863 take the low bits. On a little-endian machine, that's
3864 always valid. On a big-endian machine, it's valid
3865 only if the constant's mode fits in one word. Note that we
3866 cannot use subreg_lowpart_p since SUBREG_REG may be VOIDmode. */
3867 if (CONSTANT_P (SUBREG_REG (x))
3868 && ((GET_MODE_SIZE (op0_mode) <= UNITS_PER_WORD
3869 || ! WORDS_BIG_ENDIAN)
3870 ? SUBREG_WORD (x) == 0
3871 : (SUBREG_WORD (x)
3872 == ((GET_MODE_SIZE (op0_mode)
3873 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
3874 / UNITS_PER_WORD)))
3875 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (op0_mode)
3876 && (! WORDS_BIG_ENDIAN
3877 || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD))
3878 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3879
3880 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3881 since we are saying that the high bits don't matter. */
3882 if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3883 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
3884 {
3885 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3886 && (WORDS_BIG_ENDIAN || SUBREG_WORD (x) != 0))
3887 return operand_subword (SUBREG_REG (x), SUBREG_WORD (x), 0, mode);
3888 return SUBREG_REG (x);
3889 }
3890
3891 /* Note that we cannot do any narrowing for non-constants since
3892 we might have been counting on using the fact that some bits were
3893 zero. We now do this in the SET. */
3894
3895 break;
3896
3897 case NOT:
3898 /* (not (plus X -1)) can become (neg X). */
3899 if (GET_CODE (XEXP (x, 0)) == PLUS
3900 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3901 return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
3902
3903 /* Similarly, (not (neg X)) is (plus X -1). */
3904 if (GET_CODE (XEXP (x, 0)) == NEG)
3905 return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3906 constm1_rtx);
3907
3908 /* (not (xor X C)) for C constant is (xor X D) with D = ~C. */
3909 if (GET_CODE (XEXP (x, 0)) == XOR
3910 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3911 && (temp = simplify_unary_operation (NOT, mode,
3912 XEXP (XEXP (x, 0), 1),
3913 mode)) != 0)
3914 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
3915
3916 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3917 other than 1, but that is not valid. We could do a similar
3918 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3919 but this doesn't seem common enough to bother with. */
3920 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3921 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3922 return gen_rtx_ROTATE (mode, gen_unary (NOT, mode, mode, const1_rtx),
3923 XEXP (XEXP (x, 0), 1));
3924
3925 if (GET_CODE (XEXP (x, 0)) == SUBREG
3926 && subreg_lowpart_p (XEXP (x, 0))
3927 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3928 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3929 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3930 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3931 {
3932 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3933
3934 x = gen_rtx_ROTATE (inner_mode,
3935 gen_unary (NOT, inner_mode, inner_mode,
3936 const1_rtx),
3937 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3938 return gen_lowpart_for_combine (mode, x);
3939 }
3940
3941 /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
3942 reversing the comparison code if valid. */
3943 if (STORE_FLAG_VALUE == -1
3944 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3945 && reversible_comparison_p (XEXP (x, 0)))
3946 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3947 mode, XEXP (XEXP (x, 0), 0),
3948 XEXP (XEXP (x, 0), 1));
3949
3950 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
3951 is (lt foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
3952 perform the above simplification. */
3953
3954 if (STORE_FLAG_VALUE == -1
3955 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3956 && XEXP (x, 1) == const1_rtx
3957 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3958 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3959 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
3960
3961 /* Apply De Morgan's laws to reduce number of patterns for machines
3962 with negating logical insns (and-not, nand, etc.). If result has
3963 only one NOT, put it first, since that is how the patterns are
3964 coded. */
3965
3966 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3967 {
3968 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3969 enum machine_mode op_mode;
3970
3971 op_mode = GET_MODE (in1);
3972 in1 = gen_unary (NOT, op_mode, op_mode, in1);
3973
3974 op_mode = GET_MODE (in2);
3975 if (op_mode == VOIDmode)
3976 op_mode = mode;
3977 in2 = gen_unary (NOT, op_mode, op_mode, in2);
3978
3979 if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
3980 {
3981 rtx tem = in2;
3982 in2 = in1; in1 = tem;
3983 }
3984
3985 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3986 mode, in1, in2);
3987 }
3988 break;
3989
3990 case NEG:
3991 /* (neg (plus X 1)) can become (not X). */
3992 if (GET_CODE (XEXP (x, 0)) == PLUS
3993 && XEXP (XEXP (x, 0), 1) == const1_rtx)
3994 return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
3995
3996 /* Similarly, (neg (not X)) is (plus X 1). */
3997 if (GET_CODE (XEXP (x, 0)) == NOT)
3998 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
3999
4000 /* (neg (minus X Y)) can become (minus Y X). */
4001 if (GET_CODE (XEXP (x, 0)) == MINUS
4002 && (! FLOAT_MODE_P (mode)
4003 /* x-y != -(y-x) with IEEE floating point. */
4004 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4005 || flag_fast_math))
4006 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
4007 XEXP (XEXP (x, 0), 0));
4008
4009 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
4010 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
4011 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
4012 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
4013
4014 /* NEG commutes with ASHIFT since it is multiplication. Only do this
4015 if we can then eliminate the NEG (e.g.,
4016 if the operand is a constant). */
4017
4018 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
4019 {
4020 temp = simplify_unary_operation (NEG, mode,
4021 XEXP (XEXP (x, 0), 0), mode);
4022 if (temp)
4023 {
4024 SUBST (XEXP (XEXP (x, 0), 0), temp);
4025 return XEXP (x, 0);
4026 }
4027 }
4028
4029 temp = expand_compound_operation (XEXP (x, 0));
4030
4031 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
4032 replaced by (lshiftrt X C). This will convert
4033 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
4034
4035 if (GET_CODE (temp) == ASHIFTRT
4036 && GET_CODE (XEXP (temp, 1)) == CONST_INT
4037 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
4038 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
4039 INTVAL (XEXP (temp, 1)));
4040
4041 /* If X has only a single bit that might be nonzero, say, bit I, convert
4042 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
4043 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
4044 (sign_extract X 1 Y). But only do this if TEMP isn't a register
4045 or a SUBREG of one since we'd be making the expression more
4046 complex if it was just a register. */
4047
4048 if (GET_CODE (temp) != REG
4049 && ! (GET_CODE (temp) == SUBREG
4050 && GET_CODE (SUBREG_REG (temp)) == REG)
4051 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
4052 {
4053 rtx temp1 = simplify_shift_const
4054 (NULL_RTX, ASHIFTRT, mode,
4055 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
4056 GET_MODE_BITSIZE (mode) - 1 - i),
4057 GET_MODE_BITSIZE (mode) - 1 - i);
4058
4059 /* If all we did was surround TEMP with the two shifts, we
4060 haven't improved anything, so don't use it. Otherwise,
4061 we are better off with TEMP1. */
4062 if (GET_CODE (temp1) != ASHIFTRT
4063 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
4064 || XEXP (XEXP (temp1, 0), 0) != temp)
4065 return temp1;
4066 }
4067 break;
4068
4069 case TRUNCATE:
4070 /* We can't handle truncation to a partial integer mode here
4071 because we don't know the real bitsize of the partial
4072 integer mode. */
4073 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
4074 break;
4075
4076 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4077 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4078 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
4079 SUBST (XEXP (x, 0),
4080 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
4081 GET_MODE_MASK (mode), NULL_RTX, 0));
4082
4083 /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
4084 if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4085 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4086 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4087 return XEXP (XEXP (x, 0), 0);
4088
4089 /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
4090 (OP:SI foo:SI) if OP is NEG or ABS. */
4091 if ((GET_CODE (XEXP (x, 0)) == ABS
4092 || GET_CODE (XEXP (x, 0)) == NEG)
4093 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
4094 || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
4095 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4096 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
4097 XEXP (XEXP (XEXP (x, 0), 0), 0));
4098
4099 /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
4100 (truncate:SI x). */
4101 if (GET_CODE (XEXP (x, 0)) == SUBREG
4102 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
4103 && subreg_lowpart_p (XEXP (x, 0)))
4104 return SUBREG_REG (XEXP (x, 0));
4105
4106 /* If we know that the value is already truncated, we can
4107 replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION
4108 is nonzero for the corresponding modes. But don't do this
4109 for an (LSHIFTRT (MULT ...)) since this will cause problems
4110 with the umulXi3_highpart patterns. */
4111 if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4112 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4113 && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4114 >= GET_MODE_BITSIZE (mode) + 1
4115 && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4116 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
4117 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4118
4119 /* A truncate of a comparison can be replaced with a subreg if
4120 STORE_FLAG_VALUE permits. This is like the previous test,
4121 but it works even if the comparison is done in a mode larger
4122 than HOST_BITS_PER_WIDE_INT. */
4123 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4124 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4125 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
4126 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4127
4128 /* Similarly, a truncate of a register whose value is a
4129 comparison can be replaced with a subreg if STORE_FLAG_VALUE
4130 permits. */
4131 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4132 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
4133 && (temp = get_last_value (XEXP (x, 0)))
4134 && GET_RTX_CLASS (GET_CODE (temp)) == '<')
4135 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4136
4137 break;
4138
4139 case FLOAT_TRUNCATE:
4140 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
4141 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4142 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4143 return XEXP (XEXP (x, 0), 0);
4144
4145 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
4146 (OP:SF foo:SF) if OP is NEG or ABS. */
4147 if ((GET_CODE (XEXP (x, 0)) == ABS
4148 || GET_CODE (XEXP (x, 0)) == NEG)
4149 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
4150 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4151 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
4152 XEXP (XEXP (XEXP (x, 0), 0), 0));
4153
4154 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
4155 is (float_truncate:SF x). */
4156 if (GET_CODE (XEXP (x, 0)) == SUBREG
4157 && subreg_lowpart_p (XEXP (x, 0))
4158 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
4159 return SUBREG_REG (XEXP (x, 0));
4160 break;
4161
4162 #ifdef HAVE_cc0
4163 case COMPARE:
4164 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4165 using cc0, in which case we want to leave it as a COMPARE
4166 so we can distinguish it from a register-register-copy. */
4167 if (XEXP (x, 1) == const0_rtx)
4168 return XEXP (x, 0);
4169
4170 /* In IEEE floating point, x-0 is not the same as x. */
4171 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4172 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
4173 || flag_fast_math)
4174 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
4175 return XEXP (x, 0);
4176 break;
4177 #endif
4178
4179 case CONST:
4180 /* (const (const X)) can become (const X). Do it this way rather than
4181 returning the inner CONST since CONST can be shared with a
4182 REG_EQUAL note. */
4183 if (GET_CODE (XEXP (x, 0)) == CONST)
4184 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4185 break;
4186
4187 #ifdef HAVE_lo_sum
4188 case LO_SUM:
4189 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
4190 can add in an offset. find_split_point will split this address up
4191 again if it doesn't match. */
4192 if (GET_CODE (XEXP (x, 0)) == HIGH
4193 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
4194 return XEXP (x, 1);
4195 break;
4196 #endif
4197
4198 case PLUS:
4199 /* If we have (plus (plus (A const) B)), associate it so that CONST is
4200 outermost. That's because that's the way indexed addresses are
4201 supposed to appear. This code used to check many more cases, but
4202 they are now checked elsewhere. */
4203 if (GET_CODE (XEXP (x, 0)) == PLUS
4204 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
4205 return gen_binary (PLUS, mode,
4206 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
4207 XEXP (x, 1)),
4208 XEXP (XEXP (x, 0), 1));
4209
4210 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4211 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4212 bit-field and can be replaced by either a sign_extend or a
4213 sign_extract. The `and' may be a zero_extend and the two
4214 <c>, -<c> constants may be reversed. */
4215 if (GET_CODE (XEXP (x, 0)) == XOR
4216 && GET_CODE (XEXP (x, 1)) == CONST_INT
4217 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4218 && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
4219 && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
4220 || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4221 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4222 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
4223 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4224 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4225 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
4226 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
4227 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
4228 == (unsigned int) i + 1))))
4229 return simplify_shift_const
4230 (NULL_RTX, ASHIFTRT, mode,
4231 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4232 XEXP (XEXP (XEXP (x, 0), 0), 0),
4233 GET_MODE_BITSIZE (mode) - (i + 1)),
4234 GET_MODE_BITSIZE (mode) - (i + 1));
4235
4236 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
4237 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
4238 is 1. This produces better code than the alternative immediately
4239 below. */
4240 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4241 && reversible_comparison_p (XEXP (x, 0))
4242 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
4243 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
4244 return
4245 gen_unary (NEG, mode, mode,
4246 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
4247 mode, XEXP (XEXP (x, 0), 0),
4248 XEXP (XEXP (x, 0), 1)));
4249
4250 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
4251 can become (ashiftrt (ashift (xor x 1) C) C) where C is
4252 the bitsize of the mode - 1. This allows simplification of
4253 "a = (b & 8) == 0;" */
4254 if (XEXP (x, 1) == constm1_rtx
4255 && GET_CODE (XEXP (x, 0)) != REG
4256 && ! (GET_CODE (XEXP (x,0)) == SUBREG
4257 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
4258 && nonzero_bits (XEXP (x, 0), mode) == 1)
4259 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
4260 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4261 gen_rtx_combine (XOR, mode,
4262 XEXP (x, 0), const1_rtx),
4263 GET_MODE_BITSIZE (mode) - 1),
4264 GET_MODE_BITSIZE (mode) - 1);
4265
4266 /* If we are adding two things that have no bits in common, convert
4267 the addition into an IOR. This will often be further simplified,
4268 for example in cases like ((a & 1) + (a & 2)), which can
4269 become a & 3. */
4270
4271 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4272 && (nonzero_bits (XEXP (x, 0), mode)
4273 & nonzero_bits (XEXP (x, 1), mode)) == 0)
4274 {
4275 /* Try to simplify the expression further. */
4276 rtx tor = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
4277 temp = combine_simplify_rtx (tor, mode, last, in_dest);
4278
4279 /* If we could, great. If not, do not go ahead with the IOR
4280 replacement, since PLUS appears in many special purpose
4281 address arithmetic instructions. */
4282 if (GET_CODE (temp) != CLOBBER && temp != tor)
4283 return temp;
4284 }
4285 break;
4286
4287 case MINUS:
4288 /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
4289 by reversing the comparison code if valid. */
4290 if (STORE_FLAG_VALUE == 1
4291 && XEXP (x, 0) == const1_rtx
4292 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
4293 && reversible_comparison_p (XEXP (x, 1)))
4294 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))), mode,
4295 XEXP (XEXP (x, 1), 0),
4296 XEXP (XEXP (x, 1), 1));
4297
4298 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4299 (and <foo> (const_int pow2-1)) */
4300 if (GET_CODE (XEXP (x, 1)) == AND
4301 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4302 && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
4303 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4304 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
4305 -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
4306
4307 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4308 integers. */
4309 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
4310 return gen_binary (MINUS, mode,
4311 gen_binary (MINUS, mode, XEXP (x, 0),
4312 XEXP (XEXP (x, 1), 0)),
4313 XEXP (XEXP (x, 1), 1));
4314 break;
4315
4316 case MULT:
4317 /* If we have (mult (plus A B) C), apply the distributive law and then
4318 the inverse distributive law to see if things simplify. This
4319 occurs mostly in addresses, often when unrolling loops. */
4320
4321 if (GET_CODE (XEXP (x, 0)) == PLUS)
4322 {
4323 x = apply_distributive_law
4324 (gen_binary (PLUS, mode,
4325 gen_binary (MULT, mode,
4326 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4327 gen_binary (MULT, mode,
4328 XEXP (XEXP (x, 0), 1),
4329 copy_rtx (XEXP (x, 1)))));
4330
4331 if (GET_CODE (x) != MULT)
4332 return x;
4333 }
4334 break;
4335
4336 case UDIV:
4337 /* If this is a divide by a power of two, treat it as a shift if
4338 its first operand is a shift. */
4339 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4340 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4341 && (GET_CODE (XEXP (x, 0)) == ASHIFT
4342 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4343 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4344 || GET_CODE (XEXP (x, 0)) == ROTATE
4345 || GET_CODE (XEXP (x, 0)) == ROTATERT))
4346 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
4347 break;
4348
4349 case EQ: case NE:
4350 case GT: case GTU: case GE: case GEU:
4351 case LT: case LTU: case LE: case LEU:
4352 /* If the first operand is a condition code, we can't do anything
4353 with it. */
4354 if (GET_CODE (XEXP (x, 0)) == COMPARE
4355 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4356 #ifdef HAVE_cc0
4357 && XEXP (x, 0) != cc0_rtx
4358 #endif
4359 ))
4360 {
4361 rtx op0 = XEXP (x, 0);
4362 rtx op1 = XEXP (x, 1);
4363 enum rtx_code new_code;
4364
4365 if (GET_CODE (op0) == COMPARE)
4366 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4367
4368 /* Simplify our comparison, if possible. */
4369 new_code = simplify_comparison (code, &op0, &op1);
4370
4371 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
4372 if only the low-order bit is possibly nonzero in X (such as when
4373 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
4374 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
4375 known to be either 0 or -1, NE becomes a NEG and EQ becomes
4376 (plus X 1).
4377
4378 Remove any ZERO_EXTRACT we made when thinking this was a
4379 comparison. It may now be simpler to use, e.g., an AND. If a
4380 ZERO_EXTRACT is indeed appropriate, it will be placed back by
4381 the call to make_compound_operation in the SET case. */
4382
4383 if (STORE_FLAG_VALUE == 1
4384 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4385 && op1 == const0_rtx
4386 && mode == GET_MODE (op0)
4387 && nonzero_bits (op0, mode) == 1)
4388 return gen_lowpart_for_combine (mode,
4389 expand_compound_operation (op0));
4390
4391 else if (STORE_FLAG_VALUE == 1
4392 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4393 && op1 == const0_rtx
4394 && mode == GET_MODE (op0)
4395 && (num_sign_bit_copies (op0, mode)
4396 == GET_MODE_BITSIZE (mode)))
4397 {
4398 op0 = expand_compound_operation (op0);
4399 return gen_unary (NEG, mode, mode,
4400 gen_lowpart_for_combine (mode, op0));
4401 }
4402
4403 else if (STORE_FLAG_VALUE == 1
4404 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4405 && op1 == const0_rtx
4406 && mode == GET_MODE (op0)
4407 && nonzero_bits (op0, mode) == 1)
4408 {
4409 op0 = expand_compound_operation (op0);
4410 return gen_binary (XOR, mode,
4411 gen_lowpart_for_combine (mode, op0),
4412 const1_rtx);
4413 }
4414
4415 else if (STORE_FLAG_VALUE == 1
4416 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4417 && op1 == const0_rtx
4418 && mode == GET_MODE (op0)
4419 && (num_sign_bit_copies (op0, mode)
4420 == GET_MODE_BITSIZE (mode)))
4421 {
4422 op0 = expand_compound_operation (op0);
4423 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
4424 }
4425
4426 /* If STORE_FLAG_VALUE is -1, we have cases similar to
4427 those above. */
4428 if (STORE_FLAG_VALUE == -1
4429 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4430 && op1 == const0_rtx
4431 && (num_sign_bit_copies (op0, mode)
4432 == GET_MODE_BITSIZE (mode)))
4433 return gen_lowpart_for_combine (mode,
4434 expand_compound_operation (op0));
4435
4436 else if (STORE_FLAG_VALUE == -1
4437 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4438 && op1 == const0_rtx
4439 && mode == GET_MODE (op0)
4440 && nonzero_bits (op0, mode) == 1)
4441 {
4442 op0 = expand_compound_operation (op0);
4443 return gen_unary (NEG, mode, mode,
4444 gen_lowpart_for_combine (mode, op0));
4445 }
4446
4447 else if (STORE_FLAG_VALUE == -1
4448 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4449 && op1 == const0_rtx
4450 && mode == GET_MODE (op0)
4451 && (num_sign_bit_copies (op0, mode)
4452 == GET_MODE_BITSIZE (mode)))
4453 {
4454 op0 = expand_compound_operation (op0);
4455 return gen_unary (NOT, mode, mode,
4456 gen_lowpart_for_combine (mode, op0));
4457 }
4458
4459 /* If X is 0/1, (eq X 0) is X-1. */
4460 else if (STORE_FLAG_VALUE == -1
4461 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4462 && op1 == const0_rtx
4463 && mode == GET_MODE (op0)
4464 && nonzero_bits (op0, mode) == 1)
4465 {
4466 op0 = expand_compound_operation (op0);
4467 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
4468 }
4469
4470 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
4471 one bit that might be nonzero, we can convert (ne x 0) to
4472 (ashift x c) where C puts the bit in the sign bit. Remove any
4473 AND with STORE_FLAG_VALUE when we are done, since we are only
4474 going to test the sign bit. */
4475 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4476 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4477 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
4478 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE(mode)-1))
4479 && op1 == const0_rtx
4480 && mode == GET_MODE (op0)
4481 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
4482 {
4483 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4484 expand_compound_operation (op0),
4485 GET_MODE_BITSIZE (mode) - 1 - i);
4486 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4487 return XEXP (x, 0);
4488 else
4489 return x;
4490 }
4491
4492 /* If the code changed, return a whole new comparison. */
4493 if (new_code != code)
4494 return gen_rtx_combine (new_code, mode, op0, op1);
4495
4496 /* Otherwise, keep this operation, but maybe change its operands.
4497 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
4498 SUBST (XEXP (x, 0), op0);
4499 SUBST (XEXP (x, 1), op1);
4500 }
4501 break;
4502
4503 case IF_THEN_ELSE:
4504 return simplify_if_then_else (x);
4505
4506 case ZERO_EXTRACT:
4507 case SIGN_EXTRACT:
4508 case ZERO_EXTEND:
4509 case SIGN_EXTEND:
4510 /* If we are processing SET_DEST, we are done. */
4511 if (in_dest)
4512 return x;
4513
4514 return expand_compound_operation (x);
4515
4516 case SET:
4517 return simplify_set (x);
4518
4519 case AND:
4520 case IOR:
4521 case XOR:
4522 return simplify_logical (x, last);
4523
4524 case ABS:
4525 /* (abs (neg <foo>)) -> (abs <foo>) */
4526 if (GET_CODE (XEXP (x, 0)) == NEG)
4527 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4528
4529 /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4530 do nothing. */
4531 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4532 break;
4533
4534 /* If operand is something known to be positive, ignore the ABS. */
4535 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4536 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4537 <= HOST_BITS_PER_WIDE_INT)
4538 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4539 & ((HOST_WIDE_INT) 1
4540 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4541 == 0)))
4542 return XEXP (x, 0);
4543
4544 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4545 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4546 return gen_rtx_combine (NEG, mode, XEXP (x, 0));
4547
4548 break;
4549
4550 case FFS:
4551 /* (ffs (*_extend <X>)) = (ffs <X>) */
4552 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4553 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4554 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4555 break;
4556
4557 case FLOAT:
4558 /* (float (sign_extend <X>)) = (float <X>). */
4559 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4560 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4561 break;
4562
4563 case ASHIFT:
4564 case LSHIFTRT:
4565 case ASHIFTRT:
4566 case ROTATE:
4567 case ROTATERT:
4568 /* If this is a shift by a constant amount, simplify it. */
4569 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4570 return simplify_shift_const (x, code, mode, XEXP (x, 0),
4571 INTVAL (XEXP (x, 1)));
4572
4573 #ifdef SHIFT_COUNT_TRUNCATED
4574 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4575 SUBST (XEXP (x, 1),
4576 force_to_mode (XEXP (x, 1), GET_MODE (x),
4577 ((HOST_WIDE_INT) 1
4578 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4579 - 1,
4580 NULL_RTX, 0));
4581 #endif
4582
4583 break;
4584
4585 case VEC_SELECT:
4586 {
4587 rtx op0 = XEXP (x, 0);
4588 rtx op1 = XEXP (x, 1);
4589 int len;
4590
4591 if (GET_CODE (op1) != PARALLEL)
4592 abort ();
4593 len = XVECLEN (op1, 0);
4594 if (len == 1
4595 && GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT
4596 && GET_CODE (op0) == VEC_CONCAT)
4597 {
4598 int offset = INTVAL (XVECEXP (op1, 0, 0)) * GET_MODE_SIZE (GET_MODE (x));
4599
4600 /* Try to find the element in the VEC_CONCAT. */
4601 for (;;)
4602 {
4603 if (GET_MODE (op0) == GET_MODE (x))
4604 return op0;
4605 if (GET_CODE (op0) == VEC_CONCAT)
4606 {
4607 HOST_WIDE_INT op0_size = GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)));
4608 if (op0_size < offset)
4609 op0 = XEXP (op0, 0);
4610 else
4611 {
4612 offset -= op0_size;
4613 op0 = XEXP (op0, 1);
4614 }
4615 }
4616 else
4617 break;
4618 }
4619 }
4620 }
4621
4622 break;
4623
4624 default:
4625 break;
4626 }
4627
4628 return x;
4629 }
4630 \f
4631 /* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
4632
4633 static rtx
4634 simplify_if_then_else (x)
4635 rtx x;
4636 {
4637 enum machine_mode mode = GET_MODE (x);
4638 rtx cond = XEXP (x, 0);
4639 rtx true = XEXP (x, 1);
4640 rtx false = XEXP (x, 2);
4641 enum rtx_code true_code = GET_CODE (cond);
4642 int comparison_p = GET_RTX_CLASS (true_code) == '<';
4643 rtx temp;
4644 int i;
4645
4646 /* Simplify storing of the truth value. */
4647 if (comparison_p && true == const_true_rtx && false == const0_rtx)
4648 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4649
4650 /* Also when the truth value has to be reversed. */
4651 if (comparison_p && reversible_comparison_p (cond)
4652 && true == const0_rtx && false == const_true_rtx)
4653 return gen_binary (reverse_condition (true_code),
4654 mode, XEXP (cond, 0), XEXP (cond, 1));
4655
4656 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4657 in it is being compared against certain values. Get the true and false
4658 comparisons and see if that says anything about the value of each arm. */
4659
4660 if (comparison_p && reversible_comparison_p (cond)
4661 && GET_CODE (XEXP (cond, 0)) == REG)
4662 {
4663 HOST_WIDE_INT nzb;
4664 rtx from = XEXP (cond, 0);
4665 enum rtx_code false_code = reverse_condition (true_code);
4666 rtx true_val = XEXP (cond, 1);
4667 rtx false_val = true_val;
4668 int swapped = 0;
4669
4670 /* If FALSE_CODE is EQ, swap the codes and arms. */
4671
4672 if (false_code == EQ)
4673 {
4674 swapped = 1, true_code = EQ, false_code = NE;
4675 temp = true, true = false, false = temp;
4676 }
4677
4678 /* If we are comparing against zero and the expression being tested has
4679 only a single bit that might be nonzero, that is its value when it is
4680 not equal to zero. Similarly if it is known to be -1 or 0. */
4681
4682 if (true_code == EQ && true_val == const0_rtx
4683 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4684 false_code = EQ, false_val = GEN_INT (nzb);
4685 else if (true_code == EQ && true_val == const0_rtx
4686 && (num_sign_bit_copies (from, GET_MODE (from))
4687 == GET_MODE_BITSIZE (GET_MODE (from))))
4688 false_code = EQ, false_val = constm1_rtx;
4689
4690 /* Now simplify an arm if we know the value of the register in the
4691 branch and it is used in the arm. Be careful due to the potential
4692 of locally-shared RTL. */
4693
4694 if (reg_mentioned_p (from, true))
4695 true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
4696 pc_rtx, pc_rtx, 0, 0);
4697 if (reg_mentioned_p (from, false))
4698 false = subst (known_cond (copy_rtx (false), false_code,
4699 from, false_val),
4700 pc_rtx, pc_rtx, 0, 0);
4701
4702 SUBST (XEXP (x, 1), swapped ? false : true);
4703 SUBST (XEXP (x, 2), swapped ? true : false);
4704
4705 true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
4706 }
4707
4708 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4709 reversed, do so to avoid needing two sets of patterns for
4710 subtract-and-branch insns. Similarly if we have a constant in the true
4711 arm, the false arm is the same as the first operand of the comparison, or
4712 the false arm is more complicated than the true arm. */
4713
4714 if (comparison_p && reversible_comparison_p (cond)
4715 && (true == pc_rtx
4716 || (CONSTANT_P (true)
4717 && GET_CODE (false) != CONST_INT && false != pc_rtx)
4718 || true == const0_rtx
4719 || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
4720 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4721 || (GET_CODE (true) == SUBREG
4722 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
4723 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4724 || reg_mentioned_p (true, false)
4725 || rtx_equal_p (false, XEXP (cond, 0))))
4726 {
4727 true_code = reverse_condition (true_code);
4728 SUBST (XEXP (x, 0),
4729 gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
4730 XEXP (cond, 1)));
4731
4732 SUBST (XEXP (x, 1), false);
4733 SUBST (XEXP (x, 2), true);
4734
4735 temp = true, true = false, false = temp, cond = XEXP (x, 0);
4736
4737 /* It is possible that the conditional has been simplified out. */
4738 true_code = GET_CODE (cond);
4739 comparison_p = GET_RTX_CLASS (true_code) == '<';
4740 }
4741
4742 /* If the two arms are identical, we don't need the comparison. */
4743
4744 if (rtx_equal_p (true, false) && ! side_effects_p (cond))
4745 return true;
4746
4747 /* Convert a == b ? b : a to "a". */
4748 if (true_code == EQ && ! side_effects_p (cond)
4749 && (! FLOAT_MODE_P (mode) || flag_fast_math)
4750 && rtx_equal_p (XEXP (cond, 0), false)
4751 && rtx_equal_p (XEXP (cond, 1), true))
4752 return false;
4753 else if (true_code == NE && ! side_effects_p (cond)
4754 && (! FLOAT_MODE_P (mode) || flag_fast_math)
4755 && rtx_equal_p (XEXP (cond, 0), true)
4756 && rtx_equal_p (XEXP (cond, 1), false))
4757 return true;
4758
4759 /* Look for cases where we have (abs x) or (neg (abs X)). */
4760
4761 if (GET_MODE_CLASS (mode) == MODE_INT
4762 && GET_CODE (false) == NEG
4763 && rtx_equal_p (true, XEXP (false, 0))
4764 && comparison_p
4765 && rtx_equal_p (true, XEXP (cond, 0))
4766 && ! side_effects_p (true))
4767 switch (true_code)
4768 {
4769 case GT:
4770 case GE:
4771 return gen_unary (ABS, mode, mode, true);
4772 case LT:
4773 case LE:
4774 return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
4775 default:
4776 break;
4777 }
4778
4779 /* Look for MIN or MAX. */
4780
4781 if ((! FLOAT_MODE_P (mode) || flag_fast_math)
4782 && comparison_p
4783 && rtx_equal_p (XEXP (cond, 0), true)
4784 && rtx_equal_p (XEXP (cond, 1), false)
4785 && ! side_effects_p (cond))
4786 switch (true_code)
4787 {
4788 case GE:
4789 case GT:
4790 return gen_binary (SMAX, mode, true, false);
4791 case LE:
4792 case LT:
4793 return gen_binary (SMIN, mode, true, false);
4794 case GEU:
4795 case GTU:
4796 return gen_binary (UMAX, mode, true, false);
4797 case LEU:
4798 case LTU:
4799 return gen_binary (UMIN, mode, true, false);
4800 default:
4801 break;
4802 }
4803
4804 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4805 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4806 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4807 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4808 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
4809 neither 1 or -1, but it isn't worth checking for. */
4810
4811 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4812 && comparison_p && mode != VOIDmode && ! side_effects_p (x))
4813 {
4814 rtx t = make_compound_operation (true, SET);
4815 rtx f = make_compound_operation (false, SET);
4816 rtx cond_op0 = XEXP (cond, 0);
4817 rtx cond_op1 = XEXP (cond, 1);
4818 enum rtx_code op = NIL, extend_op = NIL;
4819 enum machine_mode m = mode;
4820 rtx z = 0, c1 = NULL_RTX;
4821
4822 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4823 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4824 || GET_CODE (t) == ASHIFT
4825 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4826 && rtx_equal_p (XEXP (t, 0), f))
4827 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4828
4829 /* If an identity-zero op is commutative, check whether there
4830 would be a match if we swapped the operands. */
4831 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4832 || GET_CODE (t) == XOR)
4833 && rtx_equal_p (XEXP (t, 1), f))
4834 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4835 else if (GET_CODE (t) == SIGN_EXTEND
4836 && (GET_CODE (XEXP (t, 0)) == PLUS
4837 || GET_CODE (XEXP (t, 0)) == MINUS
4838 || GET_CODE (XEXP (t, 0)) == IOR
4839 || GET_CODE (XEXP (t, 0)) == XOR
4840 || GET_CODE (XEXP (t, 0)) == ASHIFT
4841 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4842 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4843 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4844 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4845 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4846 && (num_sign_bit_copies (f, GET_MODE (f))
4847 > (GET_MODE_BITSIZE (mode)
4848 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4849 {
4850 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4851 extend_op = SIGN_EXTEND;
4852 m = GET_MODE (XEXP (t, 0));
4853 }
4854 else if (GET_CODE (t) == SIGN_EXTEND
4855 && (GET_CODE (XEXP (t, 0)) == PLUS
4856 || GET_CODE (XEXP (t, 0)) == IOR
4857 || GET_CODE (XEXP (t, 0)) == XOR)
4858 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4859 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4860 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4861 && (num_sign_bit_copies (f, GET_MODE (f))
4862 > (GET_MODE_BITSIZE (mode)
4863 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4864 {
4865 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4866 extend_op = SIGN_EXTEND;
4867 m = GET_MODE (XEXP (t, 0));
4868 }
4869 else if (GET_CODE (t) == ZERO_EXTEND
4870 && (GET_CODE (XEXP (t, 0)) == PLUS
4871 || GET_CODE (XEXP (t, 0)) == MINUS
4872 || GET_CODE (XEXP (t, 0)) == IOR
4873 || GET_CODE (XEXP (t, 0)) == XOR
4874 || GET_CODE (XEXP (t, 0)) == ASHIFT
4875 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4876 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4877 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4878 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4879 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4880 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4881 && ((nonzero_bits (f, GET_MODE (f))
4882 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4883 == 0))
4884 {
4885 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4886 extend_op = ZERO_EXTEND;
4887 m = GET_MODE (XEXP (t, 0));
4888 }
4889 else if (GET_CODE (t) == ZERO_EXTEND
4890 && (GET_CODE (XEXP (t, 0)) == PLUS
4891 || GET_CODE (XEXP (t, 0)) == IOR
4892 || GET_CODE (XEXP (t, 0)) == XOR)
4893 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4894 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4895 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4896 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4897 && ((nonzero_bits (f, GET_MODE (f))
4898 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4899 == 0))
4900 {
4901 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4902 extend_op = ZERO_EXTEND;
4903 m = GET_MODE (XEXP (t, 0));
4904 }
4905
4906 if (z)
4907 {
4908 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4909 pc_rtx, pc_rtx, 0, 0);
4910 temp = gen_binary (MULT, m, temp,
4911 gen_binary (MULT, m, c1, const_true_rtx));
4912 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4913 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4914
4915 if (extend_op != NIL)
4916 temp = gen_unary (extend_op, mode, m, temp);
4917
4918 return temp;
4919 }
4920 }
4921
4922 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4923 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4924 negation of a single bit, we can convert this operation to a shift. We
4925 can actually do this more generally, but it doesn't seem worth it. */
4926
4927 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4928 && false == const0_rtx && GET_CODE (true) == CONST_INT
4929 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4930 && (i = exact_log2 (INTVAL (true))) >= 0)
4931 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4932 == GET_MODE_BITSIZE (mode))
4933 && (i = exact_log2 (-INTVAL (true))) >= 0)))
4934 return
4935 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4936 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
4937
4938 return x;
4939 }
4940 \f
4941 /* Simplify X, a SET expression. Return the new expression. */
4942
4943 static rtx
4944 simplify_set (x)
4945 rtx x;
4946 {
4947 rtx src = SET_SRC (x);
4948 rtx dest = SET_DEST (x);
4949 enum machine_mode mode
4950 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4951 rtx other_insn;
4952 rtx *cc_use;
4953
4954 /* (set (pc) (return)) gets written as (return). */
4955 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4956 return src;
4957
4958 /* Now that we know for sure which bits of SRC we are using, see if we can
4959 simplify the expression for the object knowing that we only need the
4960 low-order bits. */
4961
4962 if (GET_MODE_CLASS (mode) == MODE_INT)
4963 {
4964 src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
4965 SUBST (SET_SRC (x), src);
4966 }
4967
4968 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4969 the comparison result and try to simplify it unless we already have used
4970 undobuf.other_insn. */
4971 if ((GET_CODE (src) == COMPARE
4972 #ifdef HAVE_cc0
4973 || dest == cc0_rtx
4974 #endif
4975 )
4976 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4977 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4978 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
4979 && rtx_equal_p (XEXP (*cc_use, 0), dest))
4980 {
4981 enum rtx_code old_code = GET_CODE (*cc_use);
4982 enum rtx_code new_code;
4983 rtx op0, op1;
4984 int other_changed = 0;
4985 enum machine_mode compare_mode = GET_MODE (dest);
4986
4987 if (GET_CODE (src) == COMPARE)
4988 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4989 else
4990 op0 = src, op1 = const0_rtx;
4991
4992 /* Simplify our comparison, if possible. */
4993 new_code = simplify_comparison (old_code, &op0, &op1);
4994
4995 #ifdef EXTRA_CC_MODES
4996 /* If this machine has CC modes other than CCmode, check to see if we
4997 need to use a different CC mode here. */
4998 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
4999 #endif /* EXTRA_CC_MODES */
5000
5001 #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
5002 /* If the mode changed, we have to change SET_DEST, the mode in the
5003 compare, and the mode in the place SET_DEST is used. If SET_DEST is
5004 a hard register, just build new versions with the proper mode. If it
5005 is a pseudo, we lose unless it is only time we set the pseudo, in
5006 which case we can safely change its mode. */
5007 if (compare_mode != GET_MODE (dest))
5008 {
5009 unsigned int regno = REGNO (dest);
5010 rtx new_dest = gen_rtx_REG (compare_mode, regno);
5011
5012 if (regno < FIRST_PSEUDO_REGISTER
5013 || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
5014 {
5015 if (regno >= FIRST_PSEUDO_REGISTER)
5016 SUBST (regno_reg_rtx[regno], new_dest);
5017
5018 SUBST (SET_DEST (x), new_dest);
5019 SUBST (XEXP (*cc_use, 0), new_dest);
5020 other_changed = 1;
5021
5022 dest = new_dest;
5023 }
5024 }
5025 #endif
5026
5027 /* If the code changed, we have to build a new comparison in
5028 undobuf.other_insn. */
5029 if (new_code != old_code)
5030 {
5031 unsigned HOST_WIDE_INT mask;
5032
5033 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
5034 dest, const0_rtx));
5035
5036 /* If the only change we made was to change an EQ into an NE or
5037 vice versa, OP0 has only one bit that might be nonzero, and OP1
5038 is zero, check if changing the user of the condition code will
5039 produce a valid insn. If it won't, we can keep the original code
5040 in that insn by surrounding our operation with an XOR. */
5041
5042 if (((old_code == NE && new_code == EQ)
5043 || (old_code == EQ && new_code == NE))
5044 && ! other_changed && op1 == const0_rtx
5045 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
5046 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
5047 {
5048 rtx pat = PATTERN (other_insn), note = 0;
5049
5050 if ((recog_for_combine (&pat, other_insn, &note) < 0
5051 && ! check_asm_operands (pat)))
5052 {
5053 PUT_CODE (*cc_use, old_code);
5054 other_insn = 0;
5055
5056 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
5057 }
5058 }
5059
5060 other_changed = 1;
5061 }
5062
5063 if (other_changed)
5064 undobuf.other_insn = other_insn;
5065
5066 #ifdef HAVE_cc0
5067 /* If we are now comparing against zero, change our source if
5068 needed. If we do not use cc0, we always have a COMPARE. */
5069 if (op1 == const0_rtx && dest == cc0_rtx)
5070 {
5071 SUBST (SET_SRC (x), op0);
5072 src = op0;
5073 }
5074 else
5075 #endif
5076
5077 /* Otherwise, if we didn't previously have a COMPARE in the
5078 correct mode, we need one. */
5079 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
5080 {
5081 SUBST (SET_SRC (x),
5082 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
5083 src = SET_SRC (x);
5084 }
5085 else
5086 {
5087 /* Otherwise, update the COMPARE if needed. */
5088 SUBST (XEXP (src, 0), op0);
5089 SUBST (XEXP (src, 1), op1);
5090 }
5091 }
5092 else
5093 {
5094 /* Get SET_SRC in a form where we have placed back any
5095 compound expressions. Then do the checks below. */
5096 src = make_compound_operation (src, SET);
5097 SUBST (SET_SRC (x), src);
5098 }
5099
5100 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
5101 and X being a REG or (subreg (reg)), we may be able to convert this to
5102 (set (subreg:m2 x) (op)).
5103
5104 We can always do this if M1 is narrower than M2 because that means that
5105 we only care about the low bits of the result.
5106
5107 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
5108 perform a narrower operation than requested since the high-order bits will
5109 be undefined. On machine where it is defined, this transformation is safe
5110 as long as M1 and M2 have the same number of words. */
5111
5112 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5113 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
5114 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
5115 / UNITS_PER_WORD)
5116 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5117 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
5118 #ifndef WORD_REGISTER_OPERATIONS
5119 && (GET_MODE_SIZE (GET_MODE (src))
5120 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5121 #endif
5122 #ifdef CLASS_CANNOT_CHANGE_MODE
5123 && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
5124 && (TEST_HARD_REG_BIT
5125 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
5126 REGNO (dest)))
5127 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (src),
5128 GET_MODE (SUBREG_REG (src))))
5129 #endif
5130 && (GET_CODE (dest) == REG
5131 || (GET_CODE (dest) == SUBREG
5132 && GET_CODE (SUBREG_REG (dest)) == REG)))
5133 {
5134 SUBST (SET_DEST (x),
5135 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
5136 dest));
5137 SUBST (SET_SRC (x), SUBREG_REG (src));
5138
5139 src = SET_SRC (x), dest = SET_DEST (x);
5140 }
5141
5142 #ifdef LOAD_EXTEND_OP
5143 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
5144 would require a paradoxical subreg. Replace the subreg with a
5145 zero_extend to avoid the reload that would otherwise be required. */
5146
5147 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5148 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
5149 && SUBREG_WORD (src) == 0
5150 && (GET_MODE_SIZE (GET_MODE (src))
5151 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5152 && GET_CODE (SUBREG_REG (src)) == MEM)
5153 {
5154 SUBST (SET_SRC (x),
5155 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
5156 GET_MODE (src), XEXP (src, 0)));
5157
5158 src = SET_SRC (x);
5159 }
5160 #endif
5161
5162 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
5163 are comparing an item known to be 0 or -1 against 0, use a logical
5164 operation instead. Check for one of the arms being an IOR of the other
5165 arm with some value. We compute three terms to be IOR'ed together. In
5166 practice, at most two will be nonzero. Then we do the IOR's. */
5167
5168 if (GET_CODE (dest) != PC
5169 && GET_CODE (src) == IF_THEN_ELSE
5170 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
5171 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
5172 && XEXP (XEXP (src, 0), 1) == const0_rtx
5173 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
5174 #ifdef HAVE_conditional_move
5175 && ! can_conditionally_move_p (GET_MODE (src))
5176 #endif
5177 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
5178 GET_MODE (XEXP (XEXP (src, 0), 0)))
5179 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
5180 && ! side_effects_p (src))
5181 {
5182 rtx true = (GET_CODE (XEXP (src, 0)) == NE
5183 ? XEXP (src, 1) : XEXP (src, 2));
5184 rtx false = (GET_CODE (XEXP (src, 0)) == NE
5185 ? XEXP (src, 2) : XEXP (src, 1));
5186 rtx term1 = const0_rtx, term2, term3;
5187
5188 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
5189 term1 = false, true = XEXP (true, 1), false = const0_rtx;
5190 else if (GET_CODE (true) == IOR
5191 && rtx_equal_p (XEXP (true, 1), false))
5192 term1 = false, true = XEXP (true, 0), false = const0_rtx;
5193 else if (GET_CODE (false) == IOR
5194 && rtx_equal_p (XEXP (false, 0), true))
5195 term1 = true, false = XEXP (false, 1), true = const0_rtx;
5196 else if (GET_CODE (false) == IOR
5197 && rtx_equal_p (XEXP (false, 1), true))
5198 term1 = true, false = XEXP (false, 0), true = const0_rtx;
5199
5200 term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
5201 term3 = gen_binary (AND, GET_MODE (src),
5202 gen_unary (NOT, GET_MODE (src), GET_MODE (src),
5203 XEXP (XEXP (src, 0), 0)),
5204 false);
5205
5206 SUBST (SET_SRC (x),
5207 gen_binary (IOR, GET_MODE (src),
5208 gen_binary (IOR, GET_MODE (src), term1, term2),
5209 term3));
5210
5211 src = SET_SRC (x);
5212 }
5213
5214 #ifdef HAVE_conditional_arithmetic
5215 /* If we have conditional arithmetic and the operand of a SET is
5216 a conditional expression, replace this with an IF_THEN_ELSE.
5217 We can either have a conditional expression or a MULT of that expression
5218 with a constant. */
5219 if ((GET_RTX_CLASS (GET_CODE (src)) == '1'
5220 || GET_RTX_CLASS (GET_CODE (src)) == '2'
5221 || GET_RTX_CLASS (GET_CODE (src)) == 'c')
5222 && (GET_RTX_CLASS (GET_CODE (XEXP (src, 0))) == '<'
5223 || (GET_CODE (XEXP (src, 0)) == MULT
5224 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (src, 0), 0))) == '<'
5225 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT)))
5226 {
5227 rtx cond = XEXP (src, 0);
5228 rtx true_val = const1_rtx;
5229 rtx false_arm, true_arm;
5230
5231 if (GET_CODE (cond) == MULT)
5232 {
5233 true_val = XEXP (cond, 1);
5234 cond = XEXP (cond, 0);
5235 }
5236
5237 if (GET_RTX_CLASS (GET_CODE (src)) == '1')
5238 {
5239 true_arm = gen_unary (GET_CODE (src), GET_MODE (src),
5240 GET_MODE (XEXP (src, 0)), true_val);
5241 false_arm = gen_unary (GET_CODE (src), GET_MODE (src),
5242 GET_MODE (XEXP (src, 0)), const0_rtx);
5243 }
5244 else
5245 {
5246 true_arm = gen_binary (GET_CODE (src), GET_MODE (src),
5247 true_val, XEXP (src, 1));
5248 false_arm = gen_binary (GET_CODE (src), GET_MODE (src),
5249 const0_rtx, XEXP (src, 1));
5250 }
5251
5252 /* Canonicalize if true_arm is the simpler one. */
5253 if (GET_RTX_CLASS (GET_CODE (true_arm)) == 'o'
5254 && GET_RTX_CLASS (GET_CODE (false_arm)) != 'o'
5255 && reversible_comparison_p (cond))
5256 {
5257 rtx temp = true_arm;
5258
5259 true_arm = false_arm;
5260 false_arm = temp;
5261
5262 cond = gen_rtx_combine (reverse_condition (GET_CODE (cond)),
5263 GET_MODE (cond), XEXP (cond, 0),
5264 XEXP (cond, 1));
5265 }
5266
5267 src = gen_rtx_combine (IF_THEN_ELSE, GET_MODE (src),
5268 gen_rtx_combine (GET_CODE (cond), VOIDmode,
5269 XEXP (cond, 0),
5270 XEXP (cond, 1)),
5271 true_arm, false_arm);
5272 SUBST (SET_SRC (x), src);
5273 }
5274 #endif
5275
5276 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5277 whole thing fail. */
5278 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
5279 return src;
5280 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
5281 return dest;
5282 else
5283 /* Convert this into a field assignment operation, if possible. */
5284 return make_field_assignment (x);
5285 }
5286 \f
5287 /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5288 result. LAST is nonzero if this is the last retry. */
5289
5290 static rtx
5291 simplify_logical (x, last)
5292 rtx x;
5293 int last;
5294 {
5295 enum machine_mode mode = GET_MODE (x);
5296 rtx op0 = XEXP (x, 0);
5297 rtx op1 = XEXP (x, 1);
5298
5299 switch (GET_CODE (x))
5300 {
5301 case AND:
5302 /* Convert (A ^ B) & A to A & (~B) since the latter is often a single
5303 insn (and may simplify more). */
5304 if (GET_CODE (op0) == XOR
5305 && rtx_equal_p (XEXP (op0, 0), op1)
5306 && ! side_effects_p (op1))
5307 x = gen_binary (AND, mode,
5308 gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
5309
5310 if (GET_CODE (op0) == XOR
5311 && rtx_equal_p (XEXP (op0, 1), op1)
5312 && ! side_effects_p (op1))
5313 x = gen_binary (AND, mode,
5314 gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
5315
5316 /* Similarly for (~(A ^ B)) & A. */
5317 if (GET_CODE (op0) == NOT
5318 && GET_CODE (XEXP (op0, 0)) == XOR
5319 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
5320 && ! side_effects_p (op1))
5321 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
5322
5323 if (GET_CODE (op0) == NOT
5324 && GET_CODE (XEXP (op0, 0)) == XOR
5325 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
5326 && ! side_effects_p (op1))
5327 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
5328
5329 /* We can call simplify_and_const_int only if we don't lose
5330 any (sign) bits when converting INTVAL (op1) to
5331 "unsigned HOST_WIDE_INT". */
5332 if (GET_CODE (op1) == CONST_INT
5333 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5334 || INTVAL (op1) > 0))
5335 {
5336 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
5337
5338 /* If we have (ior (and (X C1) C2)) and the next restart would be
5339 the last, simplify this by making C1 as small as possible
5340 and then exit. */
5341 if (last
5342 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
5343 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5344 && GET_CODE (op1) == CONST_INT)
5345 return gen_binary (IOR, mode,
5346 gen_binary (AND, mode, XEXP (op0, 0),
5347 GEN_INT (INTVAL (XEXP (op0, 1))
5348 & ~INTVAL (op1))), op1);
5349
5350 if (GET_CODE (x) != AND)
5351 return x;
5352
5353 if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
5354 || GET_RTX_CLASS (GET_CODE (x)) == '2')
5355 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
5356 }
5357
5358 /* Convert (A | B) & A to A. */
5359 if (GET_CODE (op0) == IOR
5360 && (rtx_equal_p (XEXP (op0, 0), op1)
5361 || rtx_equal_p (XEXP (op0, 1), op1))
5362 && ! side_effects_p (XEXP (op0, 0))
5363 && ! side_effects_p (XEXP (op0, 1)))
5364 return op1;
5365
5366 /* In the following group of tests (and those in case IOR below),
5367 we start with some combination of logical operations and apply
5368 the distributive law followed by the inverse distributive law.
5369 Most of the time, this results in no change. However, if some of
5370 the operands are the same or inverses of each other, simplifications
5371 will result.
5372
5373 For example, (and (ior A B) (not B)) can occur as the result of
5374 expanding a bit field assignment. When we apply the distributive
5375 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
5376 which then simplifies to (and (A (not B))).
5377
5378 If we have (and (ior A B) C), apply the distributive law and then
5379 the inverse distributive law to see if things simplify. */
5380
5381 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
5382 {
5383 x = apply_distributive_law
5384 (gen_binary (GET_CODE (op0), mode,
5385 gen_binary (AND, mode, XEXP (op0, 0), op1),
5386 gen_binary (AND, mode, XEXP (op0, 1),
5387 copy_rtx (op1))));
5388 if (GET_CODE (x) != AND)
5389 return x;
5390 }
5391
5392 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5393 return apply_distributive_law
5394 (gen_binary (GET_CODE (op1), mode,
5395 gen_binary (AND, mode, XEXP (op1, 0), op0),
5396 gen_binary (AND, mode, XEXP (op1, 1),
5397 copy_rtx (op0))));
5398
5399 /* Similarly, taking advantage of the fact that
5400 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
5401
5402 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
5403 return apply_distributive_law
5404 (gen_binary (XOR, mode,
5405 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
5406 gen_binary (IOR, mode, copy_rtx (XEXP (op0, 0)),
5407 XEXP (op1, 1))));
5408
5409 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
5410 return apply_distributive_law
5411 (gen_binary (XOR, mode,
5412 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
5413 gen_binary (IOR, mode, copy_rtx (XEXP (op1, 0)), XEXP (op0, 1))));
5414 break;
5415
5416 case IOR:
5417 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
5418 if (GET_CODE (op1) == CONST_INT
5419 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5420 && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
5421 return op1;
5422
5423 /* Convert (A & B) | A to A. */
5424 if (GET_CODE (op0) == AND
5425 && (rtx_equal_p (XEXP (op0, 0), op1)
5426 || rtx_equal_p (XEXP (op0, 1), op1))
5427 && ! side_effects_p (XEXP (op0, 0))
5428 && ! side_effects_p (XEXP (op0, 1)))
5429 return op1;
5430
5431 /* If we have (ior (and A B) C), apply the distributive law and then
5432 the inverse distributive law to see if things simplify. */
5433
5434 if (GET_CODE (op0) == AND)
5435 {
5436 x = apply_distributive_law
5437 (gen_binary (AND, mode,
5438 gen_binary (IOR, mode, XEXP (op0, 0), op1),
5439 gen_binary (IOR, mode, XEXP (op0, 1),
5440 copy_rtx (op1))));
5441
5442 if (GET_CODE (x) != IOR)
5443 return x;
5444 }
5445
5446 if (GET_CODE (op1) == AND)
5447 {
5448 x = apply_distributive_law
5449 (gen_binary (AND, mode,
5450 gen_binary (IOR, mode, XEXP (op1, 0), op0),
5451 gen_binary (IOR, mode, XEXP (op1, 1),
5452 copy_rtx (op0))));
5453
5454 if (GET_CODE (x) != IOR)
5455 return x;
5456 }
5457
5458 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5459 mode size to (rotate A CX). */
5460
5461 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5462 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5463 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5464 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5465 && GET_CODE (XEXP (op1, 1)) == CONST_INT
5466 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
5467 == GET_MODE_BITSIZE (mode)))
5468 return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5469 (GET_CODE (op0) == ASHIFT
5470 ? XEXP (op0, 1) : XEXP (op1, 1)));
5471
5472 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5473 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
5474 does not affect any of the bits in OP1, it can really be done
5475 as a PLUS and we can associate. We do this by seeing if OP1
5476 can be safely shifted left C bits. */
5477 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5478 && GET_CODE (XEXP (op0, 0)) == PLUS
5479 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5480 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5481 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5482 {
5483 int count = INTVAL (XEXP (op0, 1));
5484 HOST_WIDE_INT mask = INTVAL (op1) << count;
5485
5486 if (mask >> count == INTVAL (op1)
5487 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5488 {
5489 SUBST (XEXP (XEXP (op0, 0), 1),
5490 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5491 return op0;
5492 }
5493 }
5494 break;
5495
5496 case XOR:
5497 /* If we are XORing two things that have no bits in common,
5498 convert them into an IOR. This helps to detect rotation encoded
5499 using those methods and possibly other simplifications. */
5500
5501 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5502 && (nonzero_bits (op0, mode)
5503 & nonzero_bits (op1, mode)) == 0)
5504 return (gen_binary (IOR, mode, op0, op1));
5505
5506 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5507 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5508 (NOT y). */
5509 {
5510 int num_negated = 0;
5511
5512 if (GET_CODE (op0) == NOT)
5513 num_negated++, op0 = XEXP (op0, 0);
5514 if (GET_CODE (op1) == NOT)
5515 num_negated++, op1 = XEXP (op1, 0);
5516
5517 if (num_negated == 2)
5518 {
5519 SUBST (XEXP (x, 0), op0);
5520 SUBST (XEXP (x, 1), op1);
5521 }
5522 else if (num_negated == 1)
5523 return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
5524 }
5525
5526 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
5527 correspond to a machine insn or result in further simplifications
5528 if B is a constant. */
5529
5530 if (GET_CODE (op0) == AND
5531 && rtx_equal_p (XEXP (op0, 1), op1)
5532 && ! side_effects_p (op1))
5533 return gen_binary (AND, mode,
5534 gen_unary (NOT, mode, mode, XEXP (op0, 0)),
5535 op1);
5536
5537 else if (GET_CODE (op0) == AND
5538 && rtx_equal_p (XEXP (op0, 0), op1)
5539 && ! side_effects_p (op1))
5540 return gen_binary (AND, mode,
5541 gen_unary (NOT, mode, mode, XEXP (op0, 1)),
5542 op1);
5543
5544 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
5545 comparison if STORE_FLAG_VALUE is 1. */
5546 if (STORE_FLAG_VALUE == 1
5547 && op1 == const1_rtx
5548 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5549 && reversible_comparison_p (op0))
5550 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5551 mode, XEXP (op0, 0), XEXP (op0, 1));
5552
5553 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5554 is (lt foo (const_int 0)), so we can perform the above
5555 simplification if STORE_FLAG_VALUE is 1. */
5556
5557 if (STORE_FLAG_VALUE == 1
5558 && op1 == const1_rtx
5559 && GET_CODE (op0) == LSHIFTRT
5560 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5561 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5562 return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
5563
5564 /* (xor (comparison foo bar) (const_int sign-bit))
5565 when STORE_FLAG_VALUE is the sign bit. */
5566 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5567 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5568 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
5569 && op1 == const_true_rtx
5570 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5571 && reversible_comparison_p (op0))
5572 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5573 mode, XEXP (op0, 0), XEXP (op0, 1));
5574
5575 break;
5576
5577 default:
5578 abort ();
5579 }
5580
5581 return x;
5582 }
5583 \f
5584 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5585 operations" because they can be replaced with two more basic operations.
5586 ZERO_EXTEND is also considered "compound" because it can be replaced with
5587 an AND operation, which is simpler, though only one operation.
5588
5589 The function expand_compound_operation is called with an rtx expression
5590 and will convert it to the appropriate shifts and AND operations,
5591 simplifying at each stage.
5592
5593 The function make_compound_operation is called to convert an expression
5594 consisting of shifts and ANDs into the equivalent compound expression.
5595 It is the inverse of this function, loosely speaking. */
5596
5597 static rtx
5598 expand_compound_operation (x)
5599 rtx x;
5600 {
5601 unsigned HOST_WIDE_INT pos = 0, len;
5602 int unsignedp = 0;
5603 unsigned int modewidth;
5604 rtx tem;
5605
5606 switch (GET_CODE (x))
5607 {
5608 case ZERO_EXTEND:
5609 unsignedp = 1;
5610 case SIGN_EXTEND:
5611 /* We can't necessarily use a const_int for a multiword mode;
5612 it depends on implicitly extending the value.
5613 Since we don't know the right way to extend it,
5614 we can't tell whether the implicit way is right.
5615
5616 Even for a mode that is no wider than a const_int,
5617 we can't win, because we need to sign extend one of its bits through
5618 the rest of it, and we don't know which bit. */
5619 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
5620 return x;
5621
5622 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5623 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
5624 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5625 reloaded. If not for that, MEM's would very rarely be safe.
5626
5627 Reject MODEs bigger than a word, because we might not be able
5628 to reference a two-register group starting with an arbitrary register
5629 (and currently gen_lowpart might crash for a SUBREG). */
5630
5631 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
5632 return x;
5633
5634 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5635 /* If the inner object has VOIDmode (the only way this can happen
5636 is if it is a ASM_OPERANDS), we can't do anything since we don't
5637 know how much masking to do. */
5638 if (len == 0)
5639 return x;
5640
5641 break;
5642
5643 case ZERO_EXTRACT:
5644 unsignedp = 1;
5645 case SIGN_EXTRACT:
5646 /* If the operand is a CLOBBER, just return it. */
5647 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5648 return XEXP (x, 0);
5649
5650 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5651 || GET_CODE (XEXP (x, 2)) != CONST_INT
5652 || GET_MODE (XEXP (x, 0)) == VOIDmode)
5653 return x;
5654
5655 len = INTVAL (XEXP (x, 1));
5656 pos = INTVAL (XEXP (x, 2));
5657
5658 /* If this goes outside the object being extracted, replace the object
5659 with a (use (mem ...)) construct that only combine understands
5660 and is used only for this purpose. */
5661 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
5662 SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
5663
5664 if (BITS_BIG_ENDIAN)
5665 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5666
5667 break;
5668
5669 default:
5670 return x;
5671 }
5672 /* Convert sign extension to zero extension, if we know that the high
5673 bit is not set, as this is easier to optimize. It will be converted
5674 back to cheaper alternative in make_extraction. */
5675 if (GET_CODE (x) == SIGN_EXTEND
5676 && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5677 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5678 & ~(((unsigned HOST_WIDE_INT)
5679 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5680 >> 1))
5681 == 0)))
5682 {
5683 rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
5684 return expand_compound_operation (temp);
5685 }
5686
5687 /* We can optimize some special cases of ZERO_EXTEND. */
5688 if (GET_CODE (x) == ZERO_EXTEND)
5689 {
5690 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5691 know that the last value didn't have any inappropriate bits
5692 set. */
5693 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5694 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5695 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5696 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5697 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5698 return XEXP (XEXP (x, 0), 0);
5699
5700 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5701 if (GET_CODE (XEXP (x, 0)) == SUBREG
5702 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5703 && subreg_lowpart_p (XEXP (x, 0))
5704 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5705 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5706 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5707 return SUBREG_REG (XEXP (x, 0));
5708
5709 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5710 is a comparison and STORE_FLAG_VALUE permits. This is like
5711 the first case, but it works even when GET_MODE (x) is larger
5712 than HOST_WIDE_INT. */
5713 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5714 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5715 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5716 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5717 <= HOST_BITS_PER_WIDE_INT)
5718 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5719 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5720 return XEXP (XEXP (x, 0), 0);
5721
5722 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5723 if (GET_CODE (XEXP (x, 0)) == SUBREG
5724 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5725 && subreg_lowpart_p (XEXP (x, 0))
5726 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5727 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5728 <= HOST_BITS_PER_WIDE_INT)
5729 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5730 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5731 return SUBREG_REG (XEXP (x, 0));
5732
5733 }
5734
5735 /* If we reach here, we want to return a pair of shifts. The inner
5736 shift is a left shift of BITSIZE - POS - LEN bits. The outer
5737 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
5738 logical depending on the value of UNSIGNEDP.
5739
5740 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5741 converted into an AND of a shift.
5742
5743 We must check for the case where the left shift would have a negative
5744 count. This can happen in a case like (x >> 31) & 255 on machines
5745 that can't shift by a constant. On those machines, we would first
5746 combine the shift with the AND to produce a variable-position
5747 extraction. Then the constant of 31 would be substituted in to produce
5748 a such a position. */
5749
5750 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5751 if (modewidth + len >= pos)
5752 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
5753 GET_MODE (x),
5754 simplify_shift_const (NULL_RTX, ASHIFT,
5755 GET_MODE (x),
5756 XEXP (x, 0),
5757 modewidth - pos - len),
5758 modewidth - len);
5759
5760 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5761 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5762 simplify_shift_const (NULL_RTX, LSHIFTRT,
5763 GET_MODE (x),
5764 XEXP (x, 0), pos),
5765 ((HOST_WIDE_INT) 1 << len) - 1);
5766 else
5767 /* Any other cases we can't handle. */
5768 return x;
5769
5770 /* If we couldn't do this for some reason, return the original
5771 expression. */
5772 if (GET_CODE (tem) == CLOBBER)
5773 return x;
5774
5775 return tem;
5776 }
5777 \f
5778 /* X is a SET which contains an assignment of one object into
5779 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5780 or certain SUBREGS). If possible, convert it into a series of
5781 logical operations.
5782
5783 We half-heartedly support variable positions, but do not at all
5784 support variable lengths. */
5785
5786 static rtx
5787 expand_field_assignment (x)
5788 rtx x;
5789 {
5790 rtx inner;
5791 rtx pos; /* Always counts from low bit. */
5792 int len;
5793 rtx mask;
5794 enum machine_mode compute_mode;
5795
5796 /* Loop until we find something we can't simplify. */
5797 while (1)
5798 {
5799 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5800 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5801 {
5802 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5803 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
5804 pos = GEN_INT (BITS_PER_WORD * SUBREG_WORD (XEXP (SET_DEST (x), 0)));
5805 }
5806 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5807 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5808 {
5809 inner = XEXP (SET_DEST (x), 0);
5810 len = INTVAL (XEXP (SET_DEST (x), 1));
5811 pos = XEXP (SET_DEST (x), 2);
5812
5813 /* If the position is constant and spans the width of INNER,
5814 surround INNER with a USE to indicate this. */
5815 if (GET_CODE (pos) == CONST_INT
5816 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
5817 inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
5818
5819 if (BITS_BIG_ENDIAN)
5820 {
5821 if (GET_CODE (pos) == CONST_INT)
5822 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5823 - INTVAL (pos));
5824 else if (GET_CODE (pos) == MINUS
5825 && GET_CODE (XEXP (pos, 1)) == CONST_INT
5826 && (INTVAL (XEXP (pos, 1))
5827 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5828 /* If position is ADJUST - X, new position is X. */
5829 pos = XEXP (pos, 0);
5830 else
5831 pos = gen_binary (MINUS, GET_MODE (pos),
5832 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5833 - len),
5834 pos);
5835 }
5836 }
5837
5838 /* A SUBREG between two modes that occupy the same numbers of words
5839 can be done by moving the SUBREG to the source. */
5840 else if (GET_CODE (SET_DEST (x)) == SUBREG
5841 /* We need SUBREGs to compute nonzero_bits properly. */
5842 && nonzero_sign_valid
5843 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5844 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5845 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5846 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5847 {
5848 x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
5849 gen_lowpart_for_combine
5850 (GET_MODE (SUBREG_REG (SET_DEST (x))),
5851 SET_SRC (x)));
5852 continue;
5853 }
5854 else
5855 break;
5856
5857 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5858 inner = SUBREG_REG (inner);
5859
5860 compute_mode = GET_MODE (inner);
5861
5862 /* Don't attempt bitwise arithmetic on non-integral modes. */
5863 if (! INTEGRAL_MODE_P (compute_mode))
5864 {
5865 enum machine_mode imode;
5866
5867 /* Something is probably seriously wrong if this matches. */
5868 if (! FLOAT_MODE_P (compute_mode))
5869 break;
5870
5871 /* Try to find an integral mode to pun with. */
5872 imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
5873 if (imode == BLKmode)
5874 break;
5875
5876 compute_mode = imode;
5877 inner = gen_lowpart_for_combine (imode, inner);
5878 }
5879
5880 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5881 if (len < HOST_BITS_PER_WIDE_INT)
5882 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
5883 else
5884 break;
5885
5886 /* Now compute the equivalent expression. Make a copy of INNER
5887 for the SET_DEST in case it is a MEM into which we will substitute;
5888 we don't want shared RTL in that case. */
5889 x = gen_rtx_SET
5890 (VOIDmode, copy_rtx (inner),
5891 gen_binary (IOR, compute_mode,
5892 gen_binary (AND, compute_mode,
5893 gen_unary (NOT, compute_mode,
5894 compute_mode,
5895 gen_binary (ASHIFT,
5896 compute_mode,
5897 mask, pos)),
5898 inner),
5899 gen_binary (ASHIFT, compute_mode,
5900 gen_binary (AND, compute_mode,
5901 gen_lowpart_for_combine
5902 (compute_mode, SET_SRC (x)),
5903 mask),
5904 pos)));
5905 }
5906
5907 return x;
5908 }
5909 \f
5910 /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5911 it is an RTX that represents a variable starting position; otherwise,
5912 POS is the (constant) starting bit position (counted from the LSB).
5913
5914 INNER may be a USE. This will occur when we started with a bitfield
5915 that went outside the boundary of the object in memory, which is
5916 allowed on most machines. To isolate this case, we produce a USE
5917 whose mode is wide enough and surround the MEM with it. The only
5918 code that understands the USE is this routine. If it is not removed,
5919 it will cause the resulting insn not to match.
5920
5921 UNSIGNEDP is non-zero for an unsigned reference and zero for a
5922 signed reference.
5923
5924 IN_DEST is non-zero if this is a reference in the destination of a
5925 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
5926 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5927 be used.
5928
5929 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
5930 ZERO_EXTRACT should be built even for bits starting at bit 0.
5931
5932 MODE is the desired mode of the result (if IN_DEST == 0).
5933
5934 The result is an RTX for the extraction or NULL_RTX if the target
5935 can't handle it. */
5936
5937 static rtx
5938 make_extraction (mode, inner, pos, pos_rtx, len,
5939 unsignedp, in_dest, in_compare)
5940 enum machine_mode mode;
5941 rtx inner;
5942 HOST_WIDE_INT pos;
5943 rtx pos_rtx;
5944 unsigned HOST_WIDE_INT len;
5945 int unsignedp;
5946 int in_dest, in_compare;
5947 {
5948 /* This mode describes the size of the storage area
5949 to fetch the overall value from. Within that, we
5950 ignore the POS lowest bits, etc. */
5951 enum machine_mode is_mode = GET_MODE (inner);
5952 enum machine_mode inner_mode;
5953 enum machine_mode wanted_inner_mode = byte_mode;
5954 enum machine_mode wanted_inner_reg_mode = word_mode;
5955 enum machine_mode pos_mode = word_mode;
5956 enum machine_mode extraction_mode = word_mode;
5957 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5958 int spans_byte = 0;
5959 rtx new = 0;
5960 rtx orig_pos_rtx = pos_rtx;
5961 HOST_WIDE_INT orig_pos;
5962
5963 /* Get some information about INNER and get the innermost object. */
5964 if (GET_CODE (inner) == USE)
5965 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
5966 /* We don't need to adjust the position because we set up the USE
5967 to pretend that it was a full-word object. */
5968 spans_byte = 1, inner = XEXP (inner, 0);
5969 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5970 {
5971 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5972 consider just the QI as the memory to extract from.
5973 The subreg adds or removes high bits; its mode is
5974 irrelevant to the meaning of this extraction,
5975 since POS and LEN count from the lsb. */
5976 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5977 is_mode = GET_MODE (SUBREG_REG (inner));
5978 inner = SUBREG_REG (inner);
5979 }
5980
5981 inner_mode = GET_MODE (inner);
5982
5983 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
5984 pos = INTVAL (pos_rtx), pos_rtx = 0;
5985
5986 /* See if this can be done without an extraction. We never can if the
5987 width of the field is not the same as that of some integer mode. For
5988 registers, we can only avoid the extraction if the position is at the
5989 low-order bit and this is either not in the destination or we have the
5990 appropriate STRICT_LOW_PART operation available.
5991
5992 For MEM, we can avoid an extract if the field starts on an appropriate
5993 boundary and we can change the mode of the memory reference. However,
5994 we cannot directly access the MEM if we have a USE and the underlying
5995 MEM is not TMODE. This combination means that MEM was being used in a
5996 context where bits outside its mode were being referenced; that is only
5997 valid in bit-field insns. */
5998
5999 if (tmode != BLKmode
6000 && ! (spans_byte && inner_mode != tmode)
6001 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
6002 && GET_CODE (inner) != MEM
6003 && (! in_dest
6004 || (GET_CODE (inner) == REG
6005 && (movstrict_optab->handlers[(int) tmode].insn_code
6006 != CODE_FOR_nothing))))
6007 || (GET_CODE (inner) == MEM && pos_rtx == 0
6008 && (pos
6009 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
6010 : BITS_PER_UNIT)) == 0
6011 /* We can't do this if we are widening INNER_MODE (it
6012 may not be aligned, for one thing). */
6013 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
6014 && (inner_mode == tmode
6015 || (! mode_dependent_address_p (XEXP (inner, 0))
6016 && ! MEM_VOLATILE_P (inner))))))
6017 {
6018 /* If INNER is a MEM, make a new MEM that encompasses just the desired
6019 field. If the original and current mode are the same, we need not
6020 adjust the offset. Otherwise, we do if bytes big endian.
6021
6022 If INNER is not a MEM, get a piece consisting of just the field
6023 of interest (in this case POS % BITS_PER_WORD must be 0). */
6024
6025 if (GET_CODE (inner) == MEM)
6026 {
6027 int offset;
6028 /* POS counts from lsb, but make OFFSET count in memory order. */
6029 if (BYTES_BIG_ENDIAN)
6030 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
6031 else
6032 offset = pos / BITS_PER_UNIT;
6033
6034 new = gen_rtx_MEM (tmode, plus_constant (XEXP (inner, 0), offset));
6035 MEM_COPY_ATTRIBUTES (new, inner);
6036 }
6037 else if (GET_CODE (inner) == REG)
6038 {
6039 /* We can't call gen_lowpart_for_combine here since we always want
6040 a SUBREG and it would sometimes return a new hard register. */
6041 if (tmode != inner_mode)
6042 new = gen_rtx_SUBREG (tmode, inner,
6043 (WORDS_BIG_ENDIAN
6044 && (GET_MODE_SIZE (inner_mode)
6045 > UNITS_PER_WORD)
6046 ? (((GET_MODE_SIZE (inner_mode)
6047 - GET_MODE_SIZE (tmode))
6048 / UNITS_PER_WORD)
6049 - pos / BITS_PER_WORD)
6050 : pos / BITS_PER_WORD));
6051 else
6052 new = inner;
6053 }
6054 else
6055 new = force_to_mode (inner, tmode,
6056 len >= HOST_BITS_PER_WIDE_INT
6057 ? ~(unsigned HOST_WIDE_INT) 0
6058 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
6059 NULL_RTX, 0);
6060
6061 /* If this extraction is going into the destination of a SET,
6062 make a STRICT_LOW_PART unless we made a MEM. */
6063
6064 if (in_dest)
6065 return (GET_CODE (new) == MEM ? new
6066 : (GET_CODE (new) != SUBREG
6067 ? gen_rtx_CLOBBER (tmode, const0_rtx)
6068 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
6069
6070 if (mode == tmode)
6071 return new;
6072
6073 /* If we know that no extraneous bits are set, and that the high
6074 bit is not set, convert the extraction to the cheaper of
6075 sign and zero extension, that are equivalent in these cases. */
6076 if (flag_expensive_optimizations
6077 && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
6078 && ((nonzero_bits (new, tmode)
6079 & ~(((unsigned HOST_WIDE_INT)
6080 GET_MODE_MASK (tmode))
6081 >> 1))
6082 == 0)))
6083 {
6084 rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
6085 rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
6086
6087 /* Prefer ZERO_EXTENSION, since it gives more information to
6088 backends. */
6089 if (rtx_cost (temp, SET) <= rtx_cost (temp1, SET))
6090 return temp;
6091 return temp1;
6092 }
6093
6094 /* Otherwise, sign- or zero-extend unless we already are in the
6095 proper mode. */
6096
6097 return (gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6098 mode, new));
6099 }
6100
6101 /* Unless this is a COMPARE or we have a funny memory reference,
6102 don't do anything with zero-extending field extracts starting at
6103 the low-order bit since they are simple AND operations. */
6104 if (pos_rtx == 0 && pos == 0 && ! in_dest
6105 && ! in_compare && ! spans_byte && unsignedp)
6106 return 0;
6107
6108 /* Unless we are allowed to span bytes or INNER is not MEM, reject this if
6109 we would be spanning bytes or if the position is not a constant and the
6110 length is not 1. In all other cases, we would only be going outside
6111 our object in cases when an original shift would have been
6112 undefined. */
6113 if (! spans_byte && GET_CODE (inner) == MEM
6114 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6115 || (pos_rtx != 0 && len != 1)))
6116 return 0;
6117
6118 /* Get the mode to use should INNER not be a MEM, the mode for the position,
6119 and the mode for the result. */
6120 #ifdef HAVE_insv
6121 if (in_dest)
6122 {
6123 wanted_inner_reg_mode
6124 = insn_data[(int) CODE_FOR_insv].operand[0].mode;
6125 if (wanted_inner_reg_mode == VOIDmode)
6126 wanted_inner_reg_mode = word_mode;
6127
6128 pos_mode = insn_data[(int) CODE_FOR_insv].operand[2].mode;
6129 if (pos_mode == VOIDmode)
6130 pos_mode = word_mode;
6131
6132 extraction_mode = insn_data[(int) CODE_FOR_insv].operand[3].mode;
6133 if (extraction_mode == VOIDmode)
6134 extraction_mode = word_mode;
6135 }
6136 #endif
6137
6138 #ifdef HAVE_extzv
6139 if (! in_dest && unsignedp)
6140 {
6141 wanted_inner_reg_mode
6142 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
6143 if (wanted_inner_reg_mode == VOIDmode)
6144 wanted_inner_reg_mode = word_mode;
6145
6146 pos_mode = insn_data[(int) CODE_FOR_extzv].operand[3].mode;
6147 if (pos_mode == VOIDmode)
6148 pos_mode = word_mode;
6149
6150 extraction_mode = insn_data[(int) CODE_FOR_extzv].operand[0].mode;
6151 if (extraction_mode == VOIDmode)
6152 extraction_mode = word_mode;
6153 }
6154 #endif
6155
6156 #ifdef HAVE_extv
6157 if (! in_dest && ! unsignedp)
6158 {
6159 wanted_inner_reg_mode
6160 = insn_data[(int) CODE_FOR_extv].operand[1].mode;
6161 if (wanted_inner_reg_mode == VOIDmode)
6162 wanted_inner_reg_mode = word_mode;
6163
6164 pos_mode = insn_data[(int) CODE_FOR_extv].operand[3].mode;
6165 if (pos_mode == VOIDmode)
6166 pos_mode = word_mode;
6167
6168 extraction_mode = insn_data[(int) CODE_FOR_extv].operand[0].mode;
6169 if (extraction_mode == VOIDmode)
6170 extraction_mode = word_mode;
6171 }
6172 #endif
6173
6174 /* Never narrow an object, since that might not be safe. */
6175
6176 if (mode != VOIDmode
6177 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6178 extraction_mode = mode;
6179
6180 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6181 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6182 pos_mode = GET_MODE (pos_rtx);
6183
6184 /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
6185 if we have to change the mode of memory and cannot, the desired mode is
6186 EXTRACTION_MODE. */
6187 if (GET_CODE (inner) != MEM)
6188 wanted_inner_mode = wanted_inner_reg_mode;
6189 else if (inner_mode != wanted_inner_mode
6190 && (mode_dependent_address_p (XEXP (inner, 0))
6191 || MEM_VOLATILE_P (inner)))
6192 wanted_inner_mode = extraction_mode;
6193
6194 orig_pos = pos;
6195
6196 if (BITS_BIG_ENDIAN)
6197 {
6198 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6199 BITS_BIG_ENDIAN style. If position is constant, compute new
6200 position. Otherwise, build subtraction.
6201 Note that POS is relative to the mode of the original argument.
6202 If it's a MEM we need to recompute POS relative to that.
6203 However, if we're extracting from (or inserting into) a register,
6204 we want to recompute POS relative to wanted_inner_mode. */
6205 int width = (GET_CODE (inner) == MEM
6206 ? GET_MODE_BITSIZE (is_mode)
6207 : GET_MODE_BITSIZE (wanted_inner_mode));
6208
6209 if (pos_rtx == 0)
6210 pos = width - len - pos;
6211 else
6212 pos_rtx
6213 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
6214 GEN_INT (width - len), pos_rtx);
6215 /* POS may be less than 0 now, but we check for that below.
6216 Note that it can only be less than 0 if GET_CODE (inner) != MEM. */
6217 }
6218
6219 /* If INNER has a wider mode, make it smaller. If this is a constant
6220 extract, try to adjust the byte to point to the byte containing
6221 the value. */
6222 if (wanted_inner_mode != VOIDmode
6223 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
6224 && ((GET_CODE (inner) == MEM
6225 && (inner_mode == wanted_inner_mode
6226 || (! mode_dependent_address_p (XEXP (inner, 0))
6227 && ! MEM_VOLATILE_P (inner))))))
6228 {
6229 int offset = 0;
6230
6231 /* The computations below will be correct if the machine is big
6232 endian in both bits and bytes or little endian in bits and bytes.
6233 If it is mixed, we must adjust. */
6234
6235 /* If bytes are big endian and we had a paradoxical SUBREG, we must
6236 adjust OFFSET to compensate. */
6237 if (BYTES_BIG_ENDIAN
6238 && ! spans_byte
6239 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6240 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
6241
6242 /* If this is a constant position, we can move to the desired byte. */
6243 if (pos_rtx == 0)
6244 {
6245 offset += pos / BITS_PER_UNIT;
6246 pos %= GET_MODE_BITSIZE (wanted_inner_mode);
6247 }
6248
6249 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6250 && ! spans_byte
6251 && is_mode != wanted_inner_mode)
6252 offset = (GET_MODE_SIZE (is_mode)
6253 - GET_MODE_SIZE (wanted_inner_mode) - offset);
6254
6255 if (offset != 0 || inner_mode != wanted_inner_mode)
6256 {
6257 rtx newmem = gen_rtx_MEM (wanted_inner_mode,
6258 plus_constant (XEXP (inner, 0), offset));
6259
6260 MEM_COPY_ATTRIBUTES (newmem, inner);
6261 inner = newmem;
6262 }
6263 }
6264
6265 /* If INNER is not memory, we can always get it into the proper mode. If we
6266 are changing its mode, POS must be a constant and smaller than the size
6267 of the new mode. */
6268 else if (GET_CODE (inner) != MEM)
6269 {
6270 if (GET_MODE (inner) != wanted_inner_mode
6271 && (pos_rtx != 0
6272 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6273 return 0;
6274
6275 inner = force_to_mode (inner, wanted_inner_mode,
6276 pos_rtx
6277 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
6278 ? ~(unsigned HOST_WIDE_INT) 0
6279 : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
6280 << orig_pos),
6281 NULL_RTX, 0);
6282 }
6283
6284 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
6285 have to zero extend. Otherwise, we can just use a SUBREG. */
6286 if (pos_rtx != 0
6287 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
6288 {
6289 rtx temp = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
6290
6291 /* If we know that no extraneous bits are set, and that the high
6292 bit is not set, convert extraction to cheaper one - eighter
6293 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6294 cases. */
6295 if (flag_expensive_optimizations
6296 && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
6297 && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
6298 & ~(((unsigned HOST_WIDE_INT)
6299 GET_MODE_MASK (GET_MODE (pos_rtx)))
6300 >> 1))
6301 == 0)))
6302 {
6303 rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
6304
6305 /* Prefer ZERO_EXTENSION, since it gives more information to
6306 backends. */
6307 if (rtx_cost (temp1, SET) < rtx_cost (temp, SET))
6308 temp = temp1;
6309 }
6310 pos_rtx = temp;
6311 }
6312 else if (pos_rtx != 0
6313 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6314 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
6315
6316 /* Make POS_RTX unless we already have it and it is correct. If we don't
6317 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
6318 be a CONST_INT. */
6319 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
6320 pos_rtx = orig_pos_rtx;
6321
6322 else if (pos_rtx == 0)
6323 pos_rtx = GEN_INT (pos);
6324
6325 /* Make the required operation. See if we can use existing rtx. */
6326 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
6327 extraction_mode, inner, GEN_INT (len), pos_rtx);
6328 if (! in_dest)
6329 new = gen_lowpart_for_combine (mode, new);
6330
6331 return new;
6332 }
6333 \f
6334 /* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6335 with any other operations in X. Return X without that shift if so. */
6336
6337 static rtx
6338 extract_left_shift (x, count)
6339 rtx x;
6340 int count;
6341 {
6342 enum rtx_code code = GET_CODE (x);
6343 enum machine_mode mode = GET_MODE (x);
6344 rtx tem;
6345
6346 switch (code)
6347 {
6348 case ASHIFT:
6349 /* This is the shift itself. If it is wide enough, we will return
6350 either the value being shifted if the shift count is equal to
6351 COUNT or a shift for the difference. */
6352 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6353 && INTVAL (XEXP (x, 1)) >= count)
6354 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
6355 INTVAL (XEXP (x, 1)) - count);
6356 break;
6357
6358 case NEG: case NOT:
6359 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6360 return gen_unary (code, mode, mode, tem);
6361
6362 break;
6363
6364 case PLUS: case IOR: case XOR: case AND:
6365 /* If we can safely shift this constant and we find the inner shift,
6366 make a new operation. */
6367 if (GET_CODE (XEXP (x,1)) == CONST_INT
6368 && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
6369 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6370 return gen_binary (code, mode, tem,
6371 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
6372
6373 break;
6374
6375 default:
6376 break;
6377 }
6378
6379 return 0;
6380 }
6381 \f
6382 /* Look at the expression rooted at X. Look for expressions
6383 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6384 Form these expressions.
6385
6386 Return the new rtx, usually just X.
6387
6388 Also, for machines like the Vax that don't have logical shift insns,
6389 try to convert logical to arithmetic shift operations in cases where
6390 they are equivalent. This undoes the canonicalizations to logical
6391 shifts done elsewhere.
6392
6393 We try, as much as possible, to re-use rtl expressions to save memory.
6394
6395 IN_CODE says what kind of expression we are processing. Normally, it is
6396 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
6397 being kludges), it is MEM. When processing the arguments of a comparison
6398 or a COMPARE against zero, it is COMPARE. */
6399
6400 static rtx
6401 make_compound_operation (x, in_code)
6402 rtx x;
6403 enum rtx_code in_code;
6404 {
6405 enum rtx_code code = GET_CODE (x);
6406 enum machine_mode mode = GET_MODE (x);
6407 int mode_width = GET_MODE_BITSIZE (mode);
6408 rtx rhs, lhs;
6409 enum rtx_code next_code;
6410 int i;
6411 rtx new = 0;
6412 rtx tem;
6413 const char *fmt;
6414
6415 /* Select the code to be used in recursive calls. Once we are inside an
6416 address, we stay there. If we have a comparison, set to COMPARE,
6417 but once inside, go back to our default of SET. */
6418
6419 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
6420 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
6421 && XEXP (x, 1) == const0_rtx) ? COMPARE
6422 : in_code == COMPARE ? SET : in_code);
6423
6424 /* Process depending on the code of this operation. If NEW is set
6425 non-zero, it will be returned. */
6426
6427 switch (code)
6428 {
6429 case ASHIFT:
6430 /* Convert shifts by constants into multiplications if inside
6431 an address. */
6432 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
6433 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6434 && INTVAL (XEXP (x, 1)) >= 0)
6435 {
6436 new = make_compound_operation (XEXP (x, 0), next_code);
6437 new = gen_rtx_combine (MULT, mode, new,
6438 GEN_INT ((HOST_WIDE_INT) 1
6439 << INTVAL (XEXP (x, 1))));
6440 }
6441 break;
6442
6443 case AND:
6444 /* If the second operand is not a constant, we can't do anything
6445 with it. */
6446 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6447 break;
6448
6449 /* If the constant is a power of two minus one and the first operand
6450 is a logical right shift, make an extraction. */
6451 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6452 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6453 {
6454 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6455 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6456 0, in_code == COMPARE);
6457 }
6458
6459 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
6460 else if (GET_CODE (XEXP (x, 0)) == SUBREG
6461 && subreg_lowpart_p (XEXP (x, 0))
6462 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6463 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6464 {
6465 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6466 next_code);
6467 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
6468 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6469 0, in_code == COMPARE);
6470 }
6471 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
6472 else if ((GET_CODE (XEXP (x, 0)) == XOR
6473 || GET_CODE (XEXP (x, 0)) == IOR)
6474 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6475 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6476 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6477 {
6478 /* Apply the distributive law, and then try to make extractions. */
6479 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
6480 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6481 XEXP (x, 1)),
6482 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6483 XEXP (x, 1)));
6484 new = make_compound_operation (new, in_code);
6485 }
6486
6487 /* If we are have (and (rotate X C) M) and C is larger than the number
6488 of bits in M, this is an extraction. */
6489
6490 else if (GET_CODE (XEXP (x, 0)) == ROTATE
6491 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6492 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6493 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
6494 {
6495 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6496 new = make_extraction (mode, new,
6497 (GET_MODE_BITSIZE (mode)
6498 - INTVAL (XEXP (XEXP (x, 0), 1))),
6499 NULL_RTX, i, 1, 0, in_code == COMPARE);
6500 }
6501
6502 /* On machines without logical shifts, if the operand of the AND is
6503 a logical shift and our mask turns off all the propagated sign
6504 bits, we can replace the logical shift with an arithmetic shift. */
6505 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6506 && (lshr_optab->handlers[(int) mode].insn_code
6507 == CODE_FOR_nothing)
6508 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
6509 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6510 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6511 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6512 && mode_width <= HOST_BITS_PER_WIDE_INT)
6513 {
6514 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
6515
6516 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6517 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6518 SUBST (XEXP (x, 0),
6519 gen_rtx_combine (ASHIFTRT, mode,
6520 make_compound_operation (XEXP (XEXP (x, 0), 0),
6521 next_code),
6522 XEXP (XEXP (x, 0), 1)));
6523 }
6524
6525 /* If the constant is one less than a power of two, this might be
6526 representable by an extraction even if no shift is present.
6527 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6528 we are in a COMPARE. */
6529 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6530 new = make_extraction (mode,
6531 make_compound_operation (XEXP (x, 0),
6532 next_code),
6533 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
6534
6535 /* If we are in a comparison and this is an AND with a power of two,
6536 convert this into the appropriate bit extract. */
6537 else if (in_code == COMPARE
6538 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
6539 new = make_extraction (mode,
6540 make_compound_operation (XEXP (x, 0),
6541 next_code),
6542 i, NULL_RTX, 1, 1, 0, 1);
6543
6544 break;
6545
6546 case LSHIFTRT:
6547 /* If the sign bit is known to be zero, replace this with an
6548 arithmetic shift. */
6549 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
6550 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6551 && mode_width <= HOST_BITS_PER_WIDE_INT
6552 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
6553 {
6554 new = gen_rtx_combine (ASHIFTRT, mode,
6555 make_compound_operation (XEXP (x, 0),
6556 next_code),
6557 XEXP (x, 1));
6558 break;
6559 }
6560
6561 /* ... fall through ... */
6562
6563 case ASHIFTRT:
6564 lhs = XEXP (x, 0);
6565 rhs = XEXP (x, 1);
6566
6567 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6568 this is a SIGN_EXTRACT. */
6569 if (GET_CODE (rhs) == CONST_INT
6570 && GET_CODE (lhs) == ASHIFT
6571 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6572 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
6573 {
6574 new = make_compound_operation (XEXP (lhs, 0), next_code);
6575 new = make_extraction (mode, new,
6576 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6577 NULL_RTX, mode_width - INTVAL (rhs),
6578 code == LSHIFTRT, 0, in_code == COMPARE);
6579 break;
6580 }
6581
6582 /* See if we have operations between an ASHIFTRT and an ASHIFT.
6583 If so, try to merge the shifts into a SIGN_EXTEND. We could
6584 also do this for some cases of SIGN_EXTRACT, but it doesn't
6585 seem worth the effort; the case checked for occurs on Alpha. */
6586
6587 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6588 && ! (GET_CODE (lhs) == SUBREG
6589 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6590 && GET_CODE (rhs) == CONST_INT
6591 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6592 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6593 new = make_extraction (mode, make_compound_operation (new, next_code),
6594 0, NULL_RTX, mode_width - INTVAL (rhs),
6595 code == LSHIFTRT, 0, in_code == COMPARE);
6596
6597 break;
6598
6599 case SUBREG:
6600 /* Call ourselves recursively on the inner expression. If we are
6601 narrowing the object and it has a different RTL code from
6602 what it originally did, do this SUBREG as a force_to_mode. */
6603
6604 tem = make_compound_operation (SUBREG_REG (x), in_code);
6605 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6606 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6607 && subreg_lowpart_p (x))
6608 {
6609 rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
6610 NULL_RTX, 0);
6611
6612 /* If we have something other than a SUBREG, we might have
6613 done an expansion, so rerun outselves. */
6614 if (GET_CODE (newer) != SUBREG)
6615 newer = make_compound_operation (newer, in_code);
6616
6617 return newer;
6618 }
6619
6620 /* If this is a paradoxical subreg, and the new code is a sign or
6621 zero extension, omit the subreg and widen the extension. If it
6622 is a regular subreg, we can still get rid of the subreg by not
6623 widening so much, or in fact removing the extension entirely. */
6624 if ((GET_CODE (tem) == SIGN_EXTEND
6625 || GET_CODE (tem) == ZERO_EXTEND)
6626 && subreg_lowpart_p (x))
6627 {
6628 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem))
6629 || (GET_MODE_SIZE (mode) >
6630 GET_MODE_SIZE (GET_MODE (XEXP (tem, 0)))))
6631 tem = gen_rtx_combine (GET_CODE (tem), mode, XEXP (tem, 0));
6632 else
6633 tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
6634 return tem;
6635 }
6636 break;
6637
6638 default:
6639 break;
6640 }
6641
6642 if (new)
6643 {
6644 x = gen_lowpart_for_combine (mode, new);
6645 code = GET_CODE (x);
6646 }
6647
6648 /* Now recursively process each operand of this operation. */
6649 fmt = GET_RTX_FORMAT (code);
6650 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6651 if (fmt[i] == 'e')
6652 {
6653 new = make_compound_operation (XEXP (x, i), next_code);
6654 SUBST (XEXP (x, i), new);
6655 }
6656
6657 return x;
6658 }
6659 \f
6660 /* Given M see if it is a value that would select a field of bits
6661 within an item, but not the entire word. Return -1 if not.
6662 Otherwise, return the starting position of the field, where 0 is the
6663 low-order bit.
6664
6665 *PLEN is set to the length of the field. */
6666
6667 static int
6668 get_pos_from_mask (m, plen)
6669 unsigned HOST_WIDE_INT m;
6670 unsigned HOST_WIDE_INT *plen;
6671 {
6672 /* Get the bit number of the first 1 bit from the right, -1 if none. */
6673 int pos = exact_log2 (m & -m);
6674 int len;
6675
6676 if (pos < 0)
6677 return -1;
6678
6679 /* Now shift off the low-order zero bits and see if we have a power of
6680 two minus 1. */
6681 len = exact_log2 ((m >> pos) + 1);
6682
6683 if (len <= 0)
6684 return -1;
6685
6686 *plen = len;
6687 return pos;
6688 }
6689 \f
6690 /* See if X can be simplified knowing that we will only refer to it in
6691 MODE and will only refer to those bits that are nonzero in MASK.
6692 If other bits are being computed or if masking operations are done
6693 that select a superset of the bits in MASK, they can sometimes be
6694 ignored.
6695
6696 Return a possibly simplified expression, but always convert X to
6697 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
6698
6699 Also, if REG is non-zero and X is a register equal in value to REG,
6700 replace X with REG.
6701
6702 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6703 are all off in X. This is used when X will be complemented, by either
6704 NOT, NEG, or XOR. */
6705
6706 static rtx
6707 force_to_mode (x, mode, mask, reg, just_select)
6708 rtx x;
6709 enum machine_mode mode;
6710 unsigned HOST_WIDE_INT mask;
6711 rtx reg;
6712 int just_select;
6713 {
6714 enum rtx_code code = GET_CODE (x);
6715 int next_select = just_select || code == XOR || code == NOT || code == NEG;
6716 enum machine_mode op_mode;
6717 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6718 rtx op0, op1, temp;
6719
6720 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
6721 code below will do the wrong thing since the mode of such an
6722 expression is VOIDmode.
6723
6724 Also do nothing if X is a CLOBBER; this can happen if X was
6725 the return value from a call to gen_lowpart_for_combine. */
6726 if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
6727 return x;
6728
6729 /* We want to perform the operation is its present mode unless we know
6730 that the operation is valid in MODE, in which case we do the operation
6731 in MODE. */
6732 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6733 && code_to_optab[(int) code] != 0
6734 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
6735 != CODE_FOR_nothing))
6736 ? mode : GET_MODE (x));
6737
6738 /* It is not valid to do a right-shift in a narrower mode
6739 than the one it came in with. */
6740 if ((code == LSHIFTRT || code == ASHIFTRT)
6741 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6742 op_mode = GET_MODE (x);
6743
6744 /* Truncate MASK to fit OP_MODE. */
6745 if (op_mode)
6746 mask &= GET_MODE_MASK (op_mode);
6747
6748 /* When we have an arithmetic operation, or a shift whose count we
6749 do not know, we need to assume that all bit the up to the highest-order
6750 bit in MASK will be needed. This is how we form such a mask. */
6751 if (op_mode)
6752 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
6753 ? GET_MODE_MASK (op_mode)
6754 : (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
6755 - 1));
6756 else
6757 fuller_mask = ~(HOST_WIDE_INT) 0;
6758
6759 /* Determine what bits of X are guaranteed to be (non)zero. */
6760 nonzero = nonzero_bits (x, mode);
6761
6762 /* If none of the bits in X are needed, return a zero. */
6763 if (! just_select && (nonzero & mask) == 0)
6764 return const0_rtx;
6765
6766 /* If X is a CONST_INT, return a new one. Do this here since the
6767 test below will fail. */
6768 if (GET_CODE (x) == CONST_INT)
6769 {
6770 HOST_WIDE_INT cval = INTVAL (x) & mask;
6771 int width = GET_MODE_BITSIZE (mode);
6772
6773 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6774 number, sign extend it. */
6775 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6776 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6777 cval |= (HOST_WIDE_INT) -1 << width;
6778
6779 return GEN_INT (cval);
6780 }
6781
6782 /* If X is narrower than MODE and we want all the bits in X's mode, just
6783 get X in the proper mode. */
6784 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6785 && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
6786 return gen_lowpart_for_combine (mode, x);
6787
6788 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6789 MASK are already known to be zero in X, we need not do anything. */
6790 if (GET_MODE (x) == mode && code != SUBREG && (~mask & nonzero) == 0)
6791 return x;
6792
6793 switch (code)
6794 {
6795 case CLOBBER:
6796 /* If X is a (clobber (const_int)), return it since we know we are
6797 generating something that won't match. */
6798 return x;
6799
6800 case USE:
6801 /* X is a (use (mem ..)) that was made from a bit-field extraction that
6802 spanned the boundary of the MEM. If we are now masking so it is
6803 within that boundary, we don't need the USE any more. */
6804 if (! BITS_BIG_ENDIAN
6805 && (mask & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6806 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6807 break;
6808
6809 case SIGN_EXTEND:
6810 case ZERO_EXTEND:
6811 case ZERO_EXTRACT:
6812 case SIGN_EXTRACT:
6813 x = expand_compound_operation (x);
6814 if (GET_CODE (x) != code)
6815 return force_to_mode (x, mode, mask, reg, next_select);
6816 break;
6817
6818 case REG:
6819 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6820 || rtx_equal_p (reg, get_last_value (x))))
6821 x = reg;
6822 break;
6823
6824 case SUBREG:
6825 if (subreg_lowpart_p (x)
6826 /* We can ignore the effect of this SUBREG if it narrows the mode or
6827 if the constant masks to zero all the bits the mode doesn't
6828 have. */
6829 && ((GET_MODE_SIZE (GET_MODE (x))
6830 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6831 || (0 == (mask
6832 & GET_MODE_MASK (GET_MODE (x))
6833 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
6834 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
6835 break;
6836
6837 case AND:
6838 /* If this is an AND with a constant, convert it into an AND
6839 whose constant is the AND of that constant with MASK. If it
6840 remains an AND of MASK, delete it since it is redundant. */
6841
6842 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6843 {
6844 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6845 mask & INTVAL (XEXP (x, 1)));
6846
6847 /* If X is still an AND, see if it is an AND with a mask that
6848 is just some low-order bits. If so, and it is MASK, we don't
6849 need it. */
6850
6851 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6852 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == mask)
6853 x = XEXP (x, 0);
6854
6855 /* If it remains an AND, try making another AND with the bits
6856 in the mode mask that aren't in MASK turned on. If the
6857 constant in the AND is wide enough, this might make a
6858 cheaper constant. */
6859
6860 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6861 && GET_MODE_MASK (GET_MODE (x)) != mask
6862 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
6863 {
6864 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6865 | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
6866 int width = GET_MODE_BITSIZE (GET_MODE (x));
6867 rtx y;
6868
6869 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6870 number, sign extend it. */
6871 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6872 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6873 cval |= (HOST_WIDE_INT) -1 << width;
6874
6875 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6876 if (rtx_cost (y, SET) < rtx_cost (x, SET))
6877 x = y;
6878 }
6879
6880 break;
6881 }
6882
6883 goto binop;
6884
6885 case PLUS:
6886 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6887 low-order bits (as in an alignment operation) and FOO is already
6888 aligned to that boundary, mask C1 to that boundary as well.
6889 This may eliminate that PLUS and, later, the AND. */
6890
6891 {
6892 unsigned int width = GET_MODE_BITSIZE (mode);
6893 unsigned HOST_WIDE_INT smask = mask;
6894
6895 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6896 number, sign extend it. */
6897
6898 if (width < HOST_BITS_PER_WIDE_INT
6899 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6900 smask |= (HOST_WIDE_INT) -1 << width;
6901
6902 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6903 && exact_log2 (- smask) >= 0)
6904 {
6905 #ifdef STACK_BIAS
6906 if (STACK_BIAS
6907 && (XEXP (x, 0) == stack_pointer_rtx
6908 || XEXP (x, 0) == frame_pointer_rtx))
6909 {
6910 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6911 unsigned HOST_WIDE_INT sp_mask = GET_MODE_MASK (mode);
6912
6913 sp_mask &= ~(sp_alignment - 1);
6914 if ((sp_mask & ~smask) == 0
6915 && ((INTVAL (XEXP (x, 1)) - STACK_BIAS) & ~smask) != 0)
6916 return force_to_mode (plus_constant (XEXP (x, 0),
6917 ((INTVAL (XEXP (x, 1)) -
6918 STACK_BIAS) & smask)
6919 + STACK_BIAS),
6920 mode, smask, reg, next_select);
6921 }
6922 #endif
6923 if ((nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
6924 && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
6925 return force_to_mode (plus_constant (XEXP (x, 0),
6926 (INTVAL (XEXP (x, 1))
6927 & smask)),
6928 mode, smask, reg, next_select);
6929 }
6930 }
6931
6932 /* ... fall through ... */
6933
6934 case MULT:
6935 /* For PLUS, MINUS and MULT, we need any bits less significant than the
6936 most significant bit in MASK since carries from those bits will
6937 affect the bits we are interested in. */
6938 mask = fuller_mask;
6939 goto binop;
6940
6941 case MINUS:
6942 /* If X is (minus C Y) where C's least set bit is larger than any bit
6943 in the mask, then we may replace with (neg Y). */
6944 if (GET_CODE (XEXP (x, 0)) == CONST_INT
6945 && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
6946 & -INTVAL (XEXP (x, 0))))
6947 > mask))
6948 {
6949 x = gen_unary (NEG, GET_MODE (x), GET_MODE (x), XEXP (x, 1));
6950 return force_to_mode (x, mode, mask, reg, next_select);
6951 }
6952
6953 /* Similarly, if C contains every bit in the mask, then we may
6954 replace with (not Y). */
6955 if (GET_CODE (XEXP (x, 0)) == CONST_INT
6956 && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) mask)
6957 == INTVAL (XEXP (x, 0))))
6958 {
6959 x = gen_unary (NOT, GET_MODE (x), GET_MODE (x), XEXP (x, 1));
6960 return force_to_mode (x, mode, mask, reg, next_select);
6961 }
6962
6963 mask = fuller_mask;
6964 goto binop;
6965
6966 case IOR:
6967 case XOR:
6968 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6969 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6970 operation which may be a bitfield extraction. Ensure that the
6971 constant we form is not wider than the mode of X. */
6972
6973 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6974 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6975 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6976 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6977 && GET_CODE (XEXP (x, 1)) == CONST_INT
6978 && ((INTVAL (XEXP (XEXP (x, 0), 1))
6979 + floor_log2 (INTVAL (XEXP (x, 1))))
6980 < GET_MODE_BITSIZE (GET_MODE (x)))
6981 && (INTVAL (XEXP (x, 1))
6982 & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6983 {
6984 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
6985 << INTVAL (XEXP (XEXP (x, 0), 1)));
6986 temp = gen_binary (GET_CODE (x), GET_MODE (x),
6987 XEXP (XEXP (x, 0), 0), temp);
6988 x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6989 XEXP (XEXP (x, 0), 1));
6990 return force_to_mode (x, mode, mask, reg, next_select);
6991 }
6992
6993 binop:
6994 /* For most binary operations, just propagate into the operation and
6995 change the mode if we have an operation of that mode. */
6996
6997 op0 = gen_lowpart_for_combine (op_mode,
6998 force_to_mode (XEXP (x, 0), mode, mask,
6999 reg, next_select));
7000 op1 = gen_lowpart_for_combine (op_mode,
7001 force_to_mode (XEXP (x, 1), mode, mask,
7002 reg, next_select));
7003
7004 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
7005 MASK since OP1 might have been sign-extended but we never want
7006 to turn on extra bits, since combine might have previously relied
7007 on them being off. */
7008 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
7009 && (INTVAL (op1) & mask) != 0)
7010 op1 = GEN_INT (INTVAL (op1) & mask);
7011
7012 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7013 x = gen_binary (code, op_mode, op0, op1);
7014 break;
7015
7016 case ASHIFT:
7017 /* For left shifts, do the same, but just for the first operand.
7018 However, we cannot do anything with shifts where we cannot
7019 guarantee that the counts are smaller than the size of the mode
7020 because such a count will have a different meaning in a
7021 wider mode. */
7022
7023 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
7024 && INTVAL (XEXP (x, 1)) >= 0
7025 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
7026 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
7027 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
7028 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
7029 break;
7030
7031 /* If the shift count is a constant and we can do arithmetic in
7032 the mode of the shift, refine which bits we need. Otherwise, use the
7033 conservative form of the mask. */
7034 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7035 && INTVAL (XEXP (x, 1)) >= 0
7036 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
7037 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7038 mask >>= INTVAL (XEXP (x, 1));
7039 else
7040 mask = fuller_mask;
7041
7042 op0 = gen_lowpart_for_combine (op_mode,
7043 force_to_mode (XEXP (x, 0), op_mode,
7044 mask, reg, next_select));
7045
7046 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7047 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
7048 break;
7049
7050 case LSHIFTRT:
7051 /* Here we can only do something if the shift count is a constant,
7052 this shift constant is valid for the host, and we can do arithmetic
7053 in OP_MODE. */
7054
7055 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7056 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
7057 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7058 {
7059 rtx inner = XEXP (x, 0);
7060 unsigned HOST_WIDE_INT inner_mask;
7061
7062 /* Select the mask of the bits we need for the shift operand. */
7063 inner_mask = mask << INTVAL (XEXP (x, 1));
7064
7065 /* We can only change the mode of the shift if we can do arithmetic
7066 in the mode of the shift and INNER_MASK is no wider than the
7067 width of OP_MODE. */
7068 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
7069 || (inner_mask & ~GET_MODE_MASK (op_mode)) != 0)
7070 op_mode = GET_MODE (x);
7071
7072 inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select);
7073
7074 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
7075 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
7076 }
7077
7078 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
7079 shift and AND produces only copies of the sign bit (C2 is one less
7080 than a power of two), we can do this with just a shift. */
7081
7082 if (GET_CODE (x) == LSHIFTRT
7083 && GET_CODE (XEXP (x, 1)) == CONST_INT
7084 /* The shift puts one of the sign bit copies in the least significant
7085 bit. */
7086 && ((INTVAL (XEXP (x, 1))
7087 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
7088 >= GET_MODE_BITSIZE (GET_MODE (x)))
7089 && exact_log2 (mask + 1) >= 0
7090 /* Number of bits left after the shift must be more than the mask
7091 needs. */
7092 && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
7093 <= GET_MODE_BITSIZE (GET_MODE (x)))
7094 /* Must be more sign bit copies than the mask needs. */
7095 && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
7096 >= exact_log2 (mask + 1)))
7097 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7098 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
7099 - exact_log2 (mask + 1)));
7100
7101 goto shiftrt;
7102
7103 case ASHIFTRT:
7104 /* If we are just looking for the sign bit, we don't need this shift at
7105 all, even if it has a variable count. */
7106 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7107 && (mask == ((unsigned HOST_WIDE_INT) 1
7108 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7109 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7110
7111 /* If this is a shift by a constant, get a mask that contains those bits
7112 that are not copies of the sign bit. We then have two cases: If
7113 MASK only includes those bits, this can be a logical shift, which may
7114 allow simplifications. If MASK is a single-bit field not within
7115 those bits, we are requesting a copy of the sign bit and hence can
7116 shift the sign bit to the appropriate location. */
7117
7118 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
7119 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7120 {
7121 int i = -1;
7122
7123 /* If the considered data is wider then HOST_WIDE_INT, we can't
7124 represent a mask for all its bits in a single scalar.
7125 But we only care about the lower bits, so calculate these. */
7126
7127 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
7128 {
7129 nonzero = ~(HOST_WIDE_INT) 0;
7130
7131 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7132 is the number of bits a full-width mask would have set.
7133 We need only shift if these are fewer than nonzero can
7134 hold. If not, we must keep all bits set in nonzero. */
7135
7136 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7137 < HOST_BITS_PER_WIDE_INT)
7138 nonzero >>= INTVAL (XEXP (x, 1))
7139 + HOST_BITS_PER_WIDE_INT
7140 - GET_MODE_BITSIZE (GET_MODE (x)) ;
7141 }
7142 else
7143 {
7144 nonzero = GET_MODE_MASK (GET_MODE (x));
7145 nonzero >>= INTVAL (XEXP (x, 1));
7146 }
7147
7148 if ((mask & ~nonzero) == 0
7149 || (i = exact_log2 (mask)) >= 0)
7150 {
7151 x = simplify_shift_const
7152 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7153 i < 0 ? INTVAL (XEXP (x, 1))
7154 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
7155
7156 if (GET_CODE (x) != ASHIFTRT)
7157 return force_to_mode (x, mode, mask, reg, next_select);
7158 }
7159 }
7160
7161 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
7162 even if the shift count isn't a constant. */
7163 if (mask == 1)
7164 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
7165
7166 shiftrt:
7167
7168 /* If this is a zero- or sign-extension operation that just affects bits
7169 we don't care about, remove it. Be sure the call above returned
7170 something that is still a shift. */
7171
7172 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
7173 && GET_CODE (XEXP (x, 1)) == CONST_INT
7174 && INTVAL (XEXP (x, 1)) >= 0
7175 && (INTVAL (XEXP (x, 1))
7176 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
7177 && GET_CODE (XEXP (x, 0)) == ASHIFT
7178 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7179 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
7180 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
7181 reg, next_select);
7182
7183 break;
7184
7185 case ROTATE:
7186 case ROTATERT:
7187 /* If the shift count is constant and we can do computations
7188 in the mode of X, compute where the bits we care about are.
7189 Otherwise, we can't do anything. Don't change the mode of
7190 the shift or propagate MODE into the shift, though. */
7191 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7192 && INTVAL (XEXP (x, 1)) >= 0)
7193 {
7194 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
7195 GET_MODE (x), GEN_INT (mask),
7196 XEXP (x, 1));
7197 if (temp && GET_CODE(temp) == CONST_INT)
7198 SUBST (XEXP (x, 0),
7199 force_to_mode (XEXP (x, 0), GET_MODE (x),
7200 INTVAL (temp), reg, next_select));
7201 }
7202 break;
7203
7204 case NEG:
7205 /* If we just want the low-order bit, the NEG isn't needed since it
7206 won't change the low-order bit. */
7207 if (mask == 1)
7208 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
7209
7210 /* We need any bits less significant than the most significant bit in
7211 MASK since carries from those bits will affect the bits we are
7212 interested in. */
7213 mask = fuller_mask;
7214 goto unop;
7215
7216 case NOT:
7217 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
7218 same as the XOR case above. Ensure that the constant we form is not
7219 wider than the mode of X. */
7220
7221 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7222 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7223 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7224 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
7225 < GET_MODE_BITSIZE (GET_MODE (x)))
7226 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7227 {
7228 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
7229 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
7230 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
7231
7232 return force_to_mode (x, mode, mask, reg, next_select);
7233 }
7234
7235 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7236 use the full mask inside the NOT. */
7237 mask = fuller_mask;
7238
7239 unop:
7240 op0 = gen_lowpart_for_combine (op_mode,
7241 force_to_mode (XEXP (x, 0), mode, mask,
7242 reg, next_select));
7243 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7244 x = gen_unary (code, op_mode, op_mode, op0);
7245 break;
7246
7247 case NE:
7248 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
7249 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
7250 which is equal to STORE_FLAG_VALUE. */
7251 if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
7252 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
7253 && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
7254 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7255
7256 break;
7257
7258 case IF_THEN_ELSE:
7259 /* We have no way of knowing if the IF_THEN_ELSE can itself be
7260 written in a narrower mode. We play it safe and do not do so. */
7261
7262 SUBST (XEXP (x, 1),
7263 gen_lowpart_for_combine (GET_MODE (x),
7264 force_to_mode (XEXP (x, 1), mode,
7265 mask, reg, next_select)));
7266 SUBST (XEXP (x, 2),
7267 gen_lowpart_for_combine (GET_MODE (x),
7268 force_to_mode (XEXP (x, 2), mode,
7269 mask, reg,next_select)));
7270 break;
7271
7272 default:
7273 break;
7274 }
7275
7276 /* Ensure we return a value of the proper mode. */
7277 return gen_lowpart_for_combine (mode, x);
7278 }
7279 \f
7280 /* Return nonzero if X is an expression that has one of two values depending on
7281 whether some other value is zero or nonzero. In that case, we return the
7282 value that is being tested, *PTRUE is set to the value if the rtx being
7283 returned has a nonzero value, and *PFALSE is set to the other alternative.
7284
7285 If we return zero, we set *PTRUE and *PFALSE to X. */
7286
7287 static rtx
7288 if_then_else_cond (x, ptrue, pfalse)
7289 rtx x;
7290 rtx *ptrue, *pfalse;
7291 {
7292 enum machine_mode mode = GET_MODE (x);
7293 enum rtx_code code = GET_CODE (x);
7294 rtx cond0, cond1, true0, true1, false0, false1;
7295 unsigned HOST_WIDE_INT nz;
7296
7297 /* If we are comparing a value against zero, we are done. */
7298 if ((code == NE || code == EQ)
7299 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
7300 {
7301 *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
7302 *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
7303 return XEXP (x, 0);
7304 }
7305
7306 /* If this is a unary operation whose operand has one of two values, apply
7307 our opcode to compute those values. */
7308 else if (GET_RTX_CLASS (code) == '1'
7309 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
7310 {
7311 *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
7312 *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
7313 return cond0;
7314 }
7315
7316 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
7317 make can't possibly match and would suppress other optimizations. */
7318 else if (code == COMPARE)
7319 ;
7320
7321 /* If this is a binary operation, see if either side has only one of two
7322 values. If either one does or if both do and they are conditional on
7323 the same value, compute the new true and false values. */
7324 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
7325 || GET_RTX_CLASS (code) == '<')
7326 {
7327 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
7328 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
7329
7330 if ((cond0 != 0 || cond1 != 0)
7331 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
7332 {
7333 /* If if_then_else_cond returned zero, then true/false are the
7334 same rtl. We must copy one of them to prevent invalid rtl
7335 sharing. */
7336 if (cond0 == 0)
7337 true0 = copy_rtx (true0);
7338 else if (cond1 == 0)
7339 true1 = copy_rtx (true1);
7340
7341 *ptrue = gen_binary (code, mode, true0, true1);
7342 *pfalse = gen_binary (code, mode, false0, false1);
7343 return cond0 ? cond0 : cond1;
7344 }
7345
7346 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
7347 operands is zero when the other is non-zero, and vice-versa,
7348 and STORE_FLAG_VALUE is 1 or -1. */
7349
7350 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7351 && (code == PLUS || code == IOR || code == XOR || code == MINUS
7352 || code == UMAX)
7353 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7354 {
7355 rtx op0 = XEXP (XEXP (x, 0), 1);
7356 rtx op1 = XEXP (XEXP (x, 1), 1);
7357
7358 cond0 = XEXP (XEXP (x, 0), 0);
7359 cond1 = XEXP (XEXP (x, 1), 0);
7360
7361 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7362 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7363 && reversible_comparison_p (cond1)
7364 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
7365 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7366 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7367 || ((swap_condition (GET_CODE (cond0))
7368 == reverse_condition (GET_CODE (cond1)))
7369 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7370 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7371 && ! side_effects_p (x))
7372 {
7373 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
7374 *pfalse = gen_binary (MULT, mode,
7375 (code == MINUS
7376 ? gen_unary (NEG, mode, mode, op1) : op1),
7377 const_true_rtx);
7378 return cond0;
7379 }
7380 }
7381
7382 /* Similarly for MULT, AND and UMIN, execpt that for these the result
7383 is always zero. */
7384 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7385 && (code == MULT || code == AND || code == UMIN)
7386 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7387 {
7388 cond0 = XEXP (XEXP (x, 0), 0);
7389 cond1 = XEXP (XEXP (x, 1), 0);
7390
7391 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7392 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7393 && reversible_comparison_p (cond1)
7394 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
7395 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7396 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7397 || ((swap_condition (GET_CODE (cond0))
7398 == reverse_condition (GET_CODE (cond1)))
7399 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7400 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7401 && ! side_effects_p (x))
7402 {
7403 *ptrue = *pfalse = const0_rtx;
7404 return cond0;
7405 }
7406 }
7407 }
7408
7409 else if (code == IF_THEN_ELSE)
7410 {
7411 /* If we have IF_THEN_ELSE already, extract the condition and
7412 canonicalize it if it is NE or EQ. */
7413 cond0 = XEXP (x, 0);
7414 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
7415 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
7416 return XEXP (cond0, 0);
7417 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
7418 {
7419 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
7420 return XEXP (cond0, 0);
7421 }
7422 else
7423 return cond0;
7424 }
7425
7426 /* If X is a normal SUBREG with both inner and outer modes integral,
7427 we can narrow both the true and false values of the inner expression,
7428 if there is a condition. */
7429 else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
7430 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
7431 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
7432 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
7433 &true0, &false0)))
7434 {
7435 if ((GET_CODE (SUBREG_REG (x)) == REG
7436 || GET_CODE (SUBREG_REG (x)) == MEM
7437 || CONSTANT_P (SUBREG_REG (x)))
7438 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
7439 && (WORDS_BIG_ENDIAN || SUBREG_WORD (x) != 0))
7440 {
7441 true0 = operand_subword (true0, SUBREG_WORD (x), 0, mode);
7442 false0 = operand_subword (false0, SUBREG_WORD (x), 0, mode);
7443 }
7444 *ptrue = force_to_mode (true0, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
7445 *pfalse
7446 = force_to_mode (false0, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
7447
7448 return cond0;
7449 }
7450
7451 /* If X is a constant, this isn't special and will cause confusions
7452 if we treat it as such. Likewise if it is equivalent to a constant. */
7453 else if (CONSTANT_P (x)
7454 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
7455 ;
7456
7457 /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
7458 will be least confusing to the rest of the compiler. */
7459 else if (mode == BImode)
7460 {
7461 *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
7462 return x;
7463 }
7464
7465 /* If X is known to be either 0 or -1, those are the true and
7466 false values when testing X. */
7467 else if (x == constm1_rtx || x == const0_rtx
7468 || (mode != VOIDmode
7469 && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
7470 {
7471 *ptrue = constm1_rtx, *pfalse = const0_rtx;
7472 return x;
7473 }
7474
7475 /* Likewise for 0 or a single bit. */
7476 else if (mode != VOIDmode
7477 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7478 && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
7479 {
7480 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
7481 return x;
7482 }
7483
7484 /* Otherwise fail; show no condition with true and false values the same. */
7485 *ptrue = *pfalse = x;
7486 return 0;
7487 }
7488 \f
7489 /* Return the value of expression X given the fact that condition COND
7490 is known to be true when applied to REG as its first operand and VAL
7491 as its second. X is known to not be shared and so can be modified in
7492 place.
7493
7494 We only handle the simplest cases, and specifically those cases that
7495 arise with IF_THEN_ELSE expressions. */
7496
7497 static rtx
7498 known_cond (x, cond, reg, val)
7499 rtx x;
7500 enum rtx_code cond;
7501 rtx reg, val;
7502 {
7503 enum rtx_code code = GET_CODE (x);
7504 rtx temp;
7505 const char *fmt;
7506 int i, j;
7507
7508 if (side_effects_p (x))
7509 return x;
7510
7511 if (cond == EQ && rtx_equal_p (x, reg))
7512 return val;
7513
7514 /* If X is (abs REG) and we know something about REG's relationship
7515 with zero, we may be able to simplify this. */
7516
7517 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7518 switch (cond)
7519 {
7520 case GE: case GT: case EQ:
7521 return XEXP (x, 0);
7522 case LT: case LE:
7523 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
7524 XEXP (x, 0));
7525 default:
7526 break;
7527 }
7528
7529 /* The only other cases we handle are MIN, MAX, and comparisons if the
7530 operands are the same as REG and VAL. */
7531
7532 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
7533 {
7534 if (rtx_equal_p (XEXP (x, 0), val))
7535 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7536
7537 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7538 {
7539 if (GET_RTX_CLASS (code) == '<')
7540 {
7541 if (comparison_dominates_p (cond, code))
7542 return const_true_rtx;
7543
7544 code = reverse_condition (code);
7545 if (code != UNKNOWN
7546 && comparison_dominates_p (cond, code))
7547 return const0_rtx;
7548 else
7549 return x;
7550 }
7551 else if (code == SMAX || code == SMIN
7552 || code == UMIN || code == UMAX)
7553 {
7554 int unsignedp = (code == UMIN || code == UMAX);
7555
7556 if (code == SMAX || code == UMAX)
7557 cond = reverse_condition (cond);
7558
7559 switch (cond)
7560 {
7561 case GE: case GT:
7562 return unsignedp ? x : XEXP (x, 1);
7563 case LE: case LT:
7564 return unsignedp ? x : XEXP (x, 0);
7565 case GEU: case GTU:
7566 return unsignedp ? XEXP (x, 1) : x;
7567 case LEU: case LTU:
7568 return unsignedp ? XEXP (x, 0) : x;
7569 default:
7570 break;
7571 }
7572 }
7573 }
7574 }
7575
7576 fmt = GET_RTX_FORMAT (code);
7577 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7578 {
7579 if (fmt[i] == 'e')
7580 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7581 else if (fmt[i] == 'E')
7582 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7583 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7584 cond, reg, val));
7585 }
7586
7587 return x;
7588 }
7589 \f
7590 /* See if X and Y are equal for the purposes of seeing if we can rewrite an
7591 assignment as a field assignment. */
7592
7593 static int
7594 rtx_equal_for_field_assignment_p (x, y)
7595 rtx x;
7596 rtx y;
7597 {
7598 if (x == y || rtx_equal_p (x, y))
7599 return 1;
7600
7601 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7602 return 0;
7603
7604 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7605 Note that all SUBREGs of MEM are paradoxical; otherwise they
7606 would have been rewritten. */
7607 if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
7608 && GET_CODE (SUBREG_REG (y)) == MEM
7609 && rtx_equal_p (SUBREG_REG (y),
7610 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
7611 return 1;
7612
7613 if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
7614 && GET_CODE (SUBREG_REG (x)) == MEM
7615 && rtx_equal_p (SUBREG_REG (x),
7616 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
7617 return 1;
7618
7619 /* We used to see if get_last_value of X and Y were the same but that's
7620 not correct. In one direction, we'll cause the assignment to have
7621 the wrong destination and in the case, we'll import a register into this
7622 insn that might have already have been dead. So fail if none of the
7623 above cases are true. */
7624 return 0;
7625 }
7626 \f
7627 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
7628 Return that assignment if so.
7629
7630 We only handle the most common cases. */
7631
7632 static rtx
7633 make_field_assignment (x)
7634 rtx x;
7635 {
7636 rtx dest = SET_DEST (x);
7637 rtx src = SET_SRC (x);
7638 rtx assign;
7639 rtx rhs, lhs;
7640 HOST_WIDE_INT c1;
7641 HOST_WIDE_INT pos;
7642 unsigned HOST_WIDE_INT len;
7643 rtx other;
7644 enum machine_mode mode;
7645
7646 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7647 a clear of a one-bit field. We will have changed it to
7648 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
7649 for a SUBREG. */
7650
7651 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7652 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7653 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
7654 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7655 {
7656 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7657 1, 1, 1, 0);
7658 if (assign != 0)
7659 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7660 return x;
7661 }
7662
7663 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7664 && subreg_lowpart_p (XEXP (src, 0))
7665 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7666 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7667 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7668 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
7669 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7670 {
7671 assign = make_extraction (VOIDmode, dest, 0,
7672 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7673 1, 1, 1, 0);
7674 if (assign != 0)
7675 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7676 return x;
7677 }
7678
7679 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
7680 one-bit field. */
7681 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7682 && XEXP (XEXP (src, 0), 0) == const1_rtx
7683 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7684 {
7685 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7686 1, 1, 1, 0);
7687 if (assign != 0)
7688 return gen_rtx_SET (VOIDmode, assign, const1_rtx);
7689 return x;
7690 }
7691
7692 /* The other case we handle is assignments into a constant-position
7693 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
7694 a mask that has all one bits except for a group of zero bits and
7695 OTHER is known to have zeros where C1 has ones, this is such an
7696 assignment. Compute the position and length from C1. Shift OTHER
7697 to the appropriate position, force it to the required mode, and
7698 make the extraction. Check for the AND in both operands. */
7699
7700 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
7701 return x;
7702
7703 rhs = expand_compound_operation (XEXP (src, 0));
7704 lhs = expand_compound_operation (XEXP (src, 1));
7705
7706 if (GET_CODE (rhs) == AND
7707 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7708 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7709 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7710 else if (GET_CODE (lhs) == AND
7711 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7712 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7713 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
7714 else
7715 return x;
7716
7717 pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
7718 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
7719 || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7720 || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
7721 return x;
7722
7723 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
7724 if (assign == 0)
7725 return x;
7726
7727 /* The mode to use for the source is the mode of the assignment, or of
7728 what is inside a possible STRICT_LOW_PART. */
7729 mode = (GET_CODE (assign) == STRICT_LOW_PART
7730 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
7731
7732 /* Shift OTHER right POS places and make it the source, restricting it
7733 to the proper length and mode. */
7734
7735 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7736 GET_MODE (src), other, pos),
7737 mode,
7738 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7739 ? ~(unsigned HOST_WIDE_INT) 0
7740 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
7741 dest, 0);
7742
7743 return gen_rtx_combine (SET, VOIDmode, assign, src);
7744 }
7745 \f
7746 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7747 if so. */
7748
7749 static rtx
7750 apply_distributive_law (x)
7751 rtx x;
7752 {
7753 enum rtx_code code = GET_CODE (x);
7754 rtx lhs, rhs, other;
7755 rtx tem;
7756 enum rtx_code inner_code;
7757
7758 /* Distributivity is not true for floating point.
7759 It can change the value. So don't do it.
7760 -- rms and moshier@world.std.com. */
7761 if (FLOAT_MODE_P (GET_MODE (x)))
7762 return x;
7763
7764 /* The outer operation can only be one of the following: */
7765 if (code != IOR && code != AND && code != XOR
7766 && code != PLUS && code != MINUS)
7767 return x;
7768
7769 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
7770
7771 /* If either operand is a primitive we can't do anything, so get out
7772 fast. */
7773 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
7774 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
7775 return x;
7776
7777 lhs = expand_compound_operation (lhs);
7778 rhs = expand_compound_operation (rhs);
7779 inner_code = GET_CODE (lhs);
7780 if (inner_code != GET_CODE (rhs))
7781 return x;
7782
7783 /* See if the inner and outer operations distribute. */
7784 switch (inner_code)
7785 {
7786 case LSHIFTRT:
7787 case ASHIFTRT:
7788 case AND:
7789 case IOR:
7790 /* These all distribute except over PLUS. */
7791 if (code == PLUS || code == MINUS)
7792 return x;
7793 break;
7794
7795 case MULT:
7796 if (code != PLUS && code != MINUS)
7797 return x;
7798 break;
7799
7800 case ASHIFT:
7801 /* This is also a multiply, so it distributes over everything. */
7802 break;
7803
7804 case SUBREG:
7805 /* Non-paradoxical SUBREGs distributes over all operations, provided
7806 the inner modes and word numbers are the same, this is an extraction
7807 of a low-order part, we don't convert an fp operation to int or
7808 vice versa, and we would not be converting a single-word
7809 operation into a multi-word operation. The latter test is not
7810 required, but it prevents generating unneeded multi-word operations.
7811 Some of the previous tests are redundant given the latter test, but
7812 are retained because they are required for correctness.
7813
7814 We produce the result slightly differently in this case. */
7815
7816 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7817 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
7818 || ! subreg_lowpart_p (lhs)
7819 || (GET_MODE_CLASS (GET_MODE (lhs))
7820 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
7821 || (GET_MODE_SIZE (GET_MODE (lhs))
7822 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
7823 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
7824 return x;
7825
7826 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7827 SUBREG_REG (lhs), SUBREG_REG (rhs));
7828 return gen_lowpart_for_combine (GET_MODE (x), tem);
7829
7830 default:
7831 return x;
7832 }
7833
7834 /* Set LHS and RHS to the inner operands (A and B in the example
7835 above) and set OTHER to the common operand (C in the example).
7836 These is only one way to do this unless the inner operation is
7837 commutative. */
7838 if (GET_RTX_CLASS (inner_code) == 'c'
7839 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7840 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7841 else if (GET_RTX_CLASS (inner_code) == 'c'
7842 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7843 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7844 else if (GET_RTX_CLASS (inner_code) == 'c'
7845 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7846 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7847 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7848 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7849 else
7850 return x;
7851
7852 /* Form the new inner operation, seeing if it simplifies first. */
7853 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7854
7855 /* There is one exception to the general way of distributing:
7856 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
7857 if (code == XOR && inner_code == IOR)
7858 {
7859 inner_code = AND;
7860 other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
7861 }
7862
7863 /* We may be able to continuing distributing the result, so call
7864 ourselves recursively on the inner operation before forming the
7865 outer operation, which we return. */
7866 return gen_binary (inner_code, GET_MODE (x),
7867 apply_distributive_law (tem), other);
7868 }
7869 \f
7870 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7871 in MODE.
7872
7873 Return an equivalent form, if different from X. Otherwise, return X. If
7874 X is zero, we are to always construct the equivalent form. */
7875
7876 static rtx
7877 simplify_and_const_int (x, mode, varop, constop)
7878 rtx x;
7879 enum machine_mode mode;
7880 rtx varop;
7881 unsigned HOST_WIDE_INT constop;
7882 {
7883 unsigned HOST_WIDE_INT nonzero;
7884 int i;
7885
7886 /* Simplify VAROP knowing that we will be only looking at some of the
7887 bits in it. */
7888 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
7889
7890 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
7891 CONST_INT, we are done. */
7892 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
7893 return varop;
7894
7895 /* See what bits may be nonzero in VAROP. Unlike the general case of
7896 a call to nonzero_bits, here we don't care about bits outside
7897 MODE. */
7898
7899 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
7900 nonzero = trunc_int_for_mode (nonzero, mode);
7901
7902 /* Turn off all bits in the constant that are known to already be zero.
7903 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
7904 which is tested below. */
7905
7906 constop &= nonzero;
7907
7908 /* If we don't have any bits left, return zero. */
7909 if (constop == 0)
7910 return const0_rtx;
7911
7912 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7913 a power of two, we can replace this with a ASHIFT. */
7914 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7915 && (i = exact_log2 (constop)) >= 0)
7916 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
7917
7918 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7919 or XOR, then try to apply the distributive law. This may eliminate
7920 operations if either branch can be simplified because of the AND.
7921 It may also make some cases more complex, but those cases probably
7922 won't match a pattern either with or without this. */
7923
7924 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7925 return
7926 gen_lowpart_for_combine
7927 (mode,
7928 apply_distributive_law
7929 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7930 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7931 XEXP (varop, 0), constop),
7932 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7933 XEXP (varop, 1), constop))));
7934
7935 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
7936 if we already had one (just check for the simplest cases). */
7937 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7938 && GET_MODE (XEXP (x, 0)) == mode
7939 && SUBREG_REG (XEXP (x, 0)) == varop)
7940 varop = XEXP (x, 0);
7941 else
7942 varop = gen_lowpart_for_combine (mode, varop);
7943
7944 /* If we can't make the SUBREG, try to return what we were given. */
7945 if (GET_CODE (varop) == CLOBBER)
7946 return x ? x : varop;
7947
7948 /* If we are only masking insignificant bits, return VAROP. */
7949 if (constop == nonzero)
7950 x = varop;
7951
7952 /* Otherwise, return an AND. See how much, if any, of X we can use. */
7953 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
7954 x = gen_binary (AND, mode, varop, GEN_INT (constop));
7955
7956 else
7957 {
7958 if (GET_CODE (XEXP (x, 1)) != CONST_INT
7959 || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
7960 SUBST (XEXP (x, 1), GEN_INT (constop));
7961
7962 SUBST (XEXP (x, 0), varop);
7963 }
7964
7965 return x;
7966 }
7967 \f
7968 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
7969 We don't let nonzero_bits recur into num_sign_bit_copies, because that
7970 is less useful. We can't allow both, because that results in exponential
7971 run time recursion. There is a nullstone testcase that triggered
7972 this. This macro avoids accidental uses of num_sign_bit_copies. */
7973 #define num_sign_bit_copies()
7974
7975 /* Given an expression, X, compute which bits in X can be non-zero.
7976 We don't care about bits outside of those defined in MODE.
7977
7978 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
7979 a shift, AND, or zero_extract, we can do better. */
7980
7981 static unsigned HOST_WIDE_INT
7982 nonzero_bits (x, mode)
7983 rtx x;
7984 enum machine_mode mode;
7985 {
7986 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
7987 unsigned HOST_WIDE_INT inner_nz;
7988 enum rtx_code code;
7989 unsigned int mode_width = GET_MODE_BITSIZE (mode);
7990 rtx tem;
7991
7992 /* For floating-point values, assume all bits are needed. */
7993 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
7994 return nonzero;
7995
7996 /* If X is wider than MODE, use its mode instead. */
7997 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
7998 {
7999 mode = GET_MODE (x);
8000 nonzero = GET_MODE_MASK (mode);
8001 mode_width = GET_MODE_BITSIZE (mode);
8002 }
8003
8004 if (mode_width > HOST_BITS_PER_WIDE_INT)
8005 /* Our only callers in this case look for single bit values. So
8006 just return the mode mask. Those tests will then be false. */
8007 return nonzero;
8008
8009 #ifndef WORD_REGISTER_OPERATIONS
8010 /* If MODE is wider than X, but both are a single word for both the host
8011 and target machines, we can compute this from which bits of the
8012 object might be nonzero in its own mode, taking into account the fact
8013 that on many CISC machines, accessing an object in a wider mode
8014 causes the high-order bits to become undefined. So they are
8015 not known to be zero. */
8016
8017 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
8018 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
8019 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
8020 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
8021 {
8022 nonzero &= nonzero_bits (x, GET_MODE (x));
8023 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
8024 return nonzero;
8025 }
8026 #endif
8027
8028 code = GET_CODE (x);
8029 switch (code)
8030 {
8031 case REG:
8032 #ifdef POINTERS_EXTEND_UNSIGNED
8033 /* If pointers extend unsigned and this is a pointer in Pmode, say that
8034 all the bits above ptr_mode are known to be zero. */
8035 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8036 && REG_POINTER (x))
8037 nonzero &= GET_MODE_MASK (ptr_mode);
8038 #endif
8039
8040 #ifdef STACK_BOUNDARY
8041 /* If this is the stack pointer, we may know something about its
8042 alignment. If PUSH_ROUNDING is defined, it is possible for the
8043 stack to be momentarily aligned only to that amount, so we pick
8044 the least alignment. */
8045
8046 /* We can't check for arg_pointer_rtx here, because it is not
8047 guaranteed to have as much alignment as the stack pointer.
8048 In particular, in the Irix6 n64 ABI, the stack has 128 bit
8049 alignment but the argument pointer has only 64 bit alignment. */
8050
8051 if ((x == frame_pointer_rtx
8052 || x == stack_pointer_rtx
8053 || x == hard_frame_pointer_rtx
8054 || (REGNO (x) >= FIRST_VIRTUAL_REGISTER
8055 && REGNO (x) <= LAST_VIRTUAL_REGISTER))
8056 #ifdef STACK_BIAS
8057 && !STACK_BIAS
8058 #endif
8059 )
8060 {
8061 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
8062
8063 #ifdef PUSH_ROUNDING
8064 if (REGNO (x) == STACK_POINTER_REGNUM && PUSH_ARGS)
8065 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
8066 #endif
8067
8068 /* We must return here, otherwise we may get a worse result from
8069 one of the choices below. There is nothing useful below as
8070 far as the stack pointer is concerned. */
8071 return nonzero &= ~(sp_alignment - 1);
8072 }
8073 #endif
8074
8075 /* If X is a register whose nonzero bits value is current, use it.
8076 Otherwise, if X is a register whose value we can find, use that
8077 value. Otherwise, use the previously-computed global nonzero bits
8078 for this register. */
8079
8080 if (reg_last_set_value[REGNO (x)] != 0
8081 && reg_last_set_mode[REGNO (x)] == mode
8082 && (reg_last_set_label[REGNO (x)] == label_tick
8083 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8084 && REG_N_SETS (REGNO (x)) == 1
8085 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
8086 REGNO (x))))
8087 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8088 return reg_last_set_nonzero_bits[REGNO (x)];
8089
8090 tem = get_last_value (x);
8091
8092 if (tem)
8093 {
8094 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8095 /* If X is narrower than MODE and TEM is a non-negative
8096 constant that would appear negative in the mode of X,
8097 sign-extend it for use in reg_nonzero_bits because some
8098 machines (maybe most) will actually do the sign-extension
8099 and this is the conservative approach.
8100
8101 ??? For 2.5, try to tighten up the MD files in this regard
8102 instead of this kludge. */
8103
8104 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
8105 && GET_CODE (tem) == CONST_INT
8106 && INTVAL (tem) > 0
8107 && 0 != (INTVAL (tem)
8108 & ((HOST_WIDE_INT) 1
8109 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
8110 tem = GEN_INT (INTVAL (tem)
8111 | ((HOST_WIDE_INT) (-1)
8112 << GET_MODE_BITSIZE (GET_MODE (x))));
8113 #endif
8114 return nonzero_bits (tem, mode);
8115 }
8116 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
8117 return reg_nonzero_bits[REGNO (x)] & nonzero;
8118 else
8119 return nonzero;
8120
8121 case CONST_INT:
8122 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8123 /* If X is negative in MODE, sign-extend the value. */
8124 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
8125 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
8126 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
8127 #endif
8128
8129 return INTVAL (x);
8130
8131 case MEM:
8132 #ifdef LOAD_EXTEND_OP
8133 /* In many, if not most, RISC machines, reading a byte from memory
8134 zeros the rest of the register. Noticing that fact saves a lot
8135 of extra zero-extends. */
8136 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
8137 nonzero &= GET_MODE_MASK (GET_MODE (x));
8138 #endif
8139 break;
8140
8141 case EQ: case NE:
8142 case GT: case GTU:
8143 case LT: case LTU:
8144 case GE: case GEU:
8145 case LE: case LEU:
8146
8147 /* If this produces an integer result, we know which bits are set.
8148 Code here used to clear bits outside the mode of X, but that is
8149 now done above. */
8150
8151 if (GET_MODE_CLASS (mode) == MODE_INT
8152 && mode_width <= HOST_BITS_PER_WIDE_INT)
8153 nonzero = STORE_FLAG_VALUE;
8154 break;
8155
8156 case NEG:
8157 #if 0
8158 /* Disabled to avoid exponential mutual recursion between nonzero_bits
8159 and num_sign_bit_copies. */
8160 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8161 == GET_MODE_BITSIZE (GET_MODE (x)))
8162 nonzero = 1;
8163 #endif
8164
8165 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
8166 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
8167 break;
8168
8169 case ABS:
8170 #if 0
8171 /* Disabled to avoid exponential mutual recursion between nonzero_bits
8172 and num_sign_bit_copies. */
8173 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8174 == GET_MODE_BITSIZE (GET_MODE (x)))
8175 nonzero = 1;
8176 #endif
8177 break;
8178
8179 case TRUNCATE:
8180 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
8181 break;
8182
8183 case ZERO_EXTEND:
8184 nonzero &= nonzero_bits (XEXP (x, 0), mode);
8185 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8186 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
8187 break;
8188
8189 case SIGN_EXTEND:
8190 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
8191 Otherwise, show all the bits in the outer mode but not the inner
8192 may be non-zero. */
8193 inner_nz = nonzero_bits (XEXP (x, 0), mode);
8194 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8195 {
8196 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
8197 if (inner_nz
8198 & (((HOST_WIDE_INT) 1
8199 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
8200 inner_nz |= (GET_MODE_MASK (mode)
8201 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
8202 }
8203
8204 nonzero &= inner_nz;
8205 break;
8206
8207 case AND:
8208 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
8209 & nonzero_bits (XEXP (x, 1), mode));
8210 break;
8211
8212 case XOR: case IOR:
8213 case UMIN: case UMAX: case SMIN: case SMAX:
8214 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
8215 | nonzero_bits (XEXP (x, 1), mode));
8216 break;
8217
8218 case PLUS: case MINUS:
8219 case MULT:
8220 case DIV: case UDIV:
8221 case MOD: case UMOD:
8222 /* We can apply the rules of arithmetic to compute the number of
8223 high- and low-order zero bits of these operations. We start by
8224 computing the width (position of the highest-order non-zero bit)
8225 and the number of low-order zero bits for each value. */
8226 {
8227 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
8228 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
8229 int width0 = floor_log2 (nz0) + 1;
8230 int width1 = floor_log2 (nz1) + 1;
8231 int low0 = floor_log2 (nz0 & -nz0);
8232 int low1 = floor_log2 (nz1 & -nz1);
8233 HOST_WIDE_INT op0_maybe_minusp
8234 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
8235 HOST_WIDE_INT op1_maybe_minusp
8236 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
8237 unsigned int result_width = mode_width;
8238 int result_low = 0;
8239
8240 switch (code)
8241 {
8242 case PLUS:
8243 #ifdef STACK_BIAS
8244 if (STACK_BIAS
8245 && (XEXP (x, 0) == stack_pointer_rtx
8246 || XEXP (x, 0) == frame_pointer_rtx)
8247 && GET_CODE (XEXP (x, 1)) == CONST_INT)
8248 {
8249 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
8250
8251 nz0 = (GET_MODE_MASK (mode) & ~(sp_alignment - 1));
8252 nz1 = INTVAL (XEXP (x, 1)) - STACK_BIAS;
8253 width0 = floor_log2 (nz0) + 1;
8254 width1 = floor_log2 (nz1) + 1;
8255 low0 = floor_log2 (nz0 & -nz0);
8256 low1 = floor_log2 (nz1 & -nz1);
8257 }
8258 #endif
8259 result_width = MAX (width0, width1) + 1;
8260 result_low = MIN (low0, low1);
8261 break;
8262 case MINUS:
8263 result_low = MIN (low0, low1);
8264 break;
8265 case MULT:
8266 result_width = width0 + width1;
8267 result_low = low0 + low1;
8268 break;
8269 case DIV:
8270 if (! op0_maybe_minusp && ! op1_maybe_minusp)
8271 result_width = width0;
8272 break;
8273 case UDIV:
8274 result_width = width0;
8275 break;
8276 case MOD:
8277 if (! op0_maybe_minusp && ! op1_maybe_minusp)
8278 result_width = MIN (width0, width1);
8279 result_low = MIN (low0, low1);
8280 break;
8281 case UMOD:
8282 result_width = MIN (width0, width1);
8283 result_low = MIN (low0, low1);
8284 break;
8285 default:
8286 abort ();
8287 }
8288
8289 if (result_width < mode_width)
8290 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
8291
8292 if (result_low > 0)
8293 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
8294 }
8295 break;
8296
8297 case ZERO_EXTRACT:
8298 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8299 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
8300 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
8301 break;
8302
8303 case SUBREG:
8304 /* If this is a SUBREG formed for a promoted variable that has
8305 been zero-extended, we know that at least the high-order bits
8306 are zero, though others might be too. */
8307
8308 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
8309 nonzero = (GET_MODE_MASK (GET_MODE (x))
8310 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
8311
8312 /* If the inner mode is a single word for both the host and target
8313 machines, we can compute this from which bits of the inner
8314 object might be nonzero. */
8315 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
8316 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8317 <= HOST_BITS_PER_WIDE_INT))
8318 {
8319 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8320
8321 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
8322 /* If this is a typical RISC machine, we only have to worry
8323 about the way loads are extended. */
8324 if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
8325 ? (((nonzero
8326 & (((unsigned HOST_WIDE_INT) 1
8327 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
8328 != 0))
8329 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
8330 #endif
8331 {
8332 /* On many CISC machines, accessing an object in a wider mode
8333 causes the high-order bits to become undefined. So they are
8334 not known to be zero. */
8335 if (GET_MODE_SIZE (GET_MODE (x))
8336 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8337 nonzero |= (GET_MODE_MASK (GET_MODE (x))
8338 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
8339 }
8340 }
8341 break;
8342
8343 case ASHIFTRT:
8344 case LSHIFTRT:
8345 case ASHIFT:
8346 case ROTATE:
8347 /* The nonzero bits are in two classes: any bits within MODE
8348 that aren't in GET_MODE (x) are always significant. The rest of the
8349 nonzero bits are those that are significant in the operand of
8350 the shift when shifted the appropriate number of bits. This
8351 shows that high-order bits are cleared by the right shift and
8352 low-order bits by left shifts. */
8353 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8354 && INTVAL (XEXP (x, 1)) >= 0
8355 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
8356 {
8357 enum machine_mode inner_mode = GET_MODE (x);
8358 unsigned int width = GET_MODE_BITSIZE (inner_mode);
8359 int count = INTVAL (XEXP (x, 1));
8360 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
8361 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
8362 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
8363 unsigned HOST_WIDE_INT outer = 0;
8364
8365 if (mode_width > width)
8366 outer = (op_nonzero & nonzero & ~mode_mask);
8367
8368 if (code == LSHIFTRT)
8369 inner >>= count;
8370 else if (code == ASHIFTRT)
8371 {
8372 inner >>= count;
8373
8374 /* If the sign bit may have been nonzero before the shift, we
8375 need to mark all the places it could have been copied to
8376 by the shift as possibly nonzero. */
8377 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
8378 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
8379 }
8380 else if (code == ASHIFT)
8381 inner <<= count;
8382 else
8383 inner = ((inner << (count % width)
8384 | (inner >> (width - (count % width)))) & mode_mask);
8385
8386 nonzero &= (outer | inner);
8387 }
8388 break;
8389
8390 case FFS:
8391 /* This is at most the number of bits in the mode. */
8392 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
8393 break;
8394
8395 case IF_THEN_ELSE:
8396 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
8397 | nonzero_bits (XEXP (x, 2), mode));
8398 break;
8399
8400 default:
8401 break;
8402 }
8403
8404 return nonzero;
8405 }
8406
8407 /* See the macro definition above. */
8408 #undef num_sign_bit_copies
8409 \f
8410 /* Return the number of bits at the high-order end of X that are known to
8411 be equal to the sign bit. X will be used in mode MODE; if MODE is
8412 VOIDmode, X will be used in its own mode. The returned value will always
8413 be between 1 and the number of bits in MODE. */
8414
8415 static unsigned int
8416 num_sign_bit_copies (x, mode)
8417 rtx x;
8418 enum machine_mode mode;
8419 {
8420 enum rtx_code code = GET_CODE (x);
8421 unsigned int bitwidth;
8422 int num0, num1, result;
8423 unsigned HOST_WIDE_INT nonzero;
8424 rtx tem;
8425
8426 /* If we weren't given a mode, use the mode of X. If the mode is still
8427 VOIDmode, we don't know anything. Likewise if one of the modes is
8428 floating-point. */
8429
8430 if (mode == VOIDmode)
8431 mode = GET_MODE (x);
8432
8433 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
8434 return 1;
8435
8436 bitwidth = GET_MODE_BITSIZE (mode);
8437
8438 /* For a smaller object, just ignore the high bits. */
8439 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
8440 {
8441 num0 = num_sign_bit_copies (x, GET_MODE (x));
8442 return MAX (1,
8443 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
8444 }
8445
8446 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
8447 {
8448 #ifndef WORD_REGISTER_OPERATIONS
8449 /* If this machine does not do all register operations on the entire
8450 register and MODE is wider than the mode of X, we can say nothing
8451 at all about the high-order bits. */
8452 return 1;
8453 #else
8454 /* Likewise on machines that do, if the mode of the object is smaller
8455 than a word and loads of that size don't sign extend, we can say
8456 nothing about the high order bits. */
8457 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
8458 #ifdef LOAD_EXTEND_OP
8459 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
8460 #endif
8461 )
8462 return 1;
8463 #endif
8464 }
8465
8466 switch (code)
8467 {
8468 case REG:
8469
8470 #ifdef POINTERS_EXTEND_UNSIGNED
8471 /* If pointers extend signed and this is a pointer in Pmode, say that
8472 all the bits above ptr_mode are known to be sign bit copies. */
8473 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
8474 && REG_POINTER (x))
8475 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
8476 #endif
8477
8478 if (reg_last_set_value[REGNO (x)] != 0
8479 && reg_last_set_mode[REGNO (x)] == mode
8480 && (reg_last_set_label[REGNO (x)] == label_tick
8481 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8482 && REG_N_SETS (REGNO (x)) == 1
8483 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
8484 REGNO (x))))
8485 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8486 return reg_last_set_sign_bit_copies[REGNO (x)];
8487
8488 tem = get_last_value (x);
8489 if (tem != 0)
8490 return num_sign_bit_copies (tem, mode);
8491
8492 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
8493 return reg_sign_bit_copies[REGNO (x)];
8494 break;
8495
8496 case MEM:
8497 #ifdef LOAD_EXTEND_OP
8498 /* Some RISC machines sign-extend all loads of smaller than a word. */
8499 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
8500 return MAX (1, ((int) bitwidth
8501 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
8502 #endif
8503 break;
8504
8505 case CONST_INT:
8506 /* If the constant is negative, take its 1's complement and remask.
8507 Then see how many zero bits we have. */
8508 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
8509 if (bitwidth <= HOST_BITS_PER_WIDE_INT
8510 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8511 nonzero = (~nonzero) & GET_MODE_MASK (mode);
8512
8513 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
8514
8515 case SUBREG:
8516 /* If this is a SUBREG for a promoted object that is sign-extended
8517 and we are looking at it in a wider mode, we know that at least the
8518 high-order bits are known to be sign bit copies. */
8519
8520 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
8521 {
8522 num0 = num_sign_bit_copies (SUBREG_REG (x), mode);
8523 return MAX ((int) bitwidth
8524 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
8525 num0);
8526 }
8527
8528 /* For a smaller object, just ignore the high bits. */
8529 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
8530 {
8531 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
8532 return MAX (1, (num0
8533 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8534 - bitwidth)));
8535 }
8536
8537 #ifdef WORD_REGISTER_OPERATIONS
8538 #ifdef LOAD_EXTEND_OP
8539 /* For paradoxical SUBREGs on machines where all register operations
8540 affect the entire register, just look inside. Note that we are
8541 passing MODE to the recursive call, so the number of sign bit copies
8542 will remain relative to that mode, not the inner mode. */
8543
8544 /* This works only if loads sign extend. Otherwise, if we get a
8545 reload for the inner part, it may be loaded from the stack, and
8546 then we lose all sign bit copies that existed before the store
8547 to the stack. */
8548
8549 if ((GET_MODE_SIZE (GET_MODE (x))
8550 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8551 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
8552 return num_sign_bit_copies (SUBREG_REG (x), mode);
8553 #endif
8554 #endif
8555 break;
8556
8557 case SIGN_EXTRACT:
8558 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
8559 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
8560 break;
8561
8562 case SIGN_EXTEND:
8563 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8564 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
8565
8566 case TRUNCATE:
8567 /* For a smaller object, just ignore the high bits. */
8568 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
8569 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8570 - bitwidth)));
8571
8572 case NOT:
8573 return num_sign_bit_copies (XEXP (x, 0), mode);
8574
8575 case ROTATE: case ROTATERT:
8576 /* If we are rotating left by a number of bits less than the number
8577 of sign bit copies, we can just subtract that amount from the
8578 number. */
8579 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8580 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
8581 {
8582 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8583 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
8584 : (int) bitwidth - INTVAL (XEXP (x, 1))));
8585 }
8586 break;
8587
8588 case NEG:
8589 /* In general, this subtracts one sign bit copy. But if the value
8590 is known to be positive, the number of sign bit copies is the
8591 same as that of the input. Finally, if the input has just one bit
8592 that might be nonzero, all the bits are copies of the sign bit. */
8593 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8594 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8595 return num0 > 1 ? num0 - 1 : 1;
8596
8597 nonzero = nonzero_bits (XEXP (x, 0), mode);
8598 if (nonzero == 1)
8599 return bitwidth;
8600
8601 if (num0 > 1
8602 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
8603 num0--;
8604
8605 return num0;
8606
8607 case IOR: case AND: case XOR:
8608 case SMIN: case SMAX: case UMIN: case UMAX:
8609 /* Logical operations will preserve the number of sign-bit copies.
8610 MIN and MAX operations always return one of the operands. */
8611 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8612 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8613 return MIN (num0, num1);
8614
8615 case PLUS: case MINUS:
8616 /* For addition and subtraction, we can have a 1-bit carry. However,
8617 if we are subtracting 1 from a positive number, there will not
8618 be such a carry. Furthermore, if the positive number is known to
8619 be 0 or 1, we know the result is either -1 or 0. */
8620
8621 if (code == PLUS && XEXP (x, 1) == constm1_rtx
8622 && bitwidth <= HOST_BITS_PER_WIDE_INT)
8623 {
8624 nonzero = nonzero_bits (XEXP (x, 0), mode);
8625 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
8626 return (nonzero == 1 || nonzero == 0 ? bitwidth
8627 : bitwidth - floor_log2 (nonzero) - 1);
8628 }
8629
8630 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8631 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8632 return MAX (1, MIN (num0, num1) - 1);
8633
8634 case MULT:
8635 /* The number of bits of the product is the sum of the number of
8636 bits of both terms. However, unless one of the terms if known
8637 to be positive, we must allow for an additional bit since negating
8638 a negative number can remove one sign bit copy. */
8639
8640 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8641 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8642
8643 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
8644 if (result > 0
8645 && (bitwidth > HOST_BITS_PER_WIDE_INT
8646 || (((nonzero_bits (XEXP (x, 0), mode)
8647 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8648 && ((nonzero_bits (XEXP (x, 1), mode)
8649 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
8650 result--;
8651
8652 return MAX (1, result);
8653
8654 case UDIV:
8655 /* The result must be <= the first operand. If the first operand
8656 has the high bit set, we know nothing about the number of sign
8657 bit copies. */
8658 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8659 return 1;
8660 else if ((nonzero_bits (XEXP (x, 0), mode)
8661 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8662 return 1;
8663 else
8664 return num_sign_bit_copies (XEXP (x, 0), mode);
8665
8666 case UMOD:
8667 /* The result must be <= the scond operand. */
8668 return num_sign_bit_copies (XEXP (x, 1), mode);
8669
8670 case DIV:
8671 /* Similar to unsigned division, except that we have to worry about
8672 the case where the divisor is negative, in which case we have
8673 to add 1. */
8674 result = num_sign_bit_copies (XEXP (x, 0), mode);
8675 if (result > 1
8676 && (bitwidth > HOST_BITS_PER_WIDE_INT
8677 || (nonzero_bits (XEXP (x, 1), mode)
8678 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8679 result--;
8680
8681 return result;
8682
8683 case MOD:
8684 result = num_sign_bit_copies (XEXP (x, 1), mode);
8685 if (result > 1
8686 && (bitwidth > HOST_BITS_PER_WIDE_INT
8687 || (nonzero_bits (XEXP (x, 1), mode)
8688 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8689 result--;
8690
8691 return result;
8692
8693 case ASHIFTRT:
8694 /* Shifts by a constant add to the number of bits equal to the
8695 sign bit. */
8696 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8697 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8698 && INTVAL (XEXP (x, 1)) > 0)
8699 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
8700
8701 return num0;
8702
8703 case ASHIFT:
8704 /* Left shifts destroy copies. */
8705 if (GET_CODE (XEXP (x, 1)) != CONST_INT
8706 || INTVAL (XEXP (x, 1)) < 0
8707 || INTVAL (XEXP (x, 1)) >= bitwidth)
8708 return 1;
8709
8710 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8711 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
8712
8713 case IF_THEN_ELSE:
8714 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
8715 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
8716 return MIN (num0, num1);
8717
8718 case EQ: case NE: case GE: case GT: case LE: case LT:
8719 case GEU: case GTU: case LEU: case LTU:
8720 if (STORE_FLAG_VALUE == -1)
8721 return bitwidth;
8722 break;
8723
8724 default:
8725 break;
8726 }
8727
8728 /* If we haven't been able to figure it out by one of the above rules,
8729 see if some of the high-order bits are known to be zero. If so,
8730 count those bits and return one less than that amount. If we can't
8731 safely compute the mask for this mode, always return BITWIDTH. */
8732
8733 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8734 return 1;
8735
8736 nonzero = nonzero_bits (x, mode);
8737 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
8738 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
8739 }
8740 \f
8741 /* Return the number of "extended" bits there are in X, when interpreted
8742 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
8743 unsigned quantities, this is the number of high-order zero bits.
8744 For signed quantities, this is the number of copies of the sign bit
8745 minus 1. In both case, this function returns the number of "spare"
8746 bits. For example, if two quantities for which this function returns
8747 at least 1 are added, the addition is known not to overflow.
8748
8749 This function will always return 0 unless called during combine, which
8750 implies that it must be called from a define_split. */
8751
8752 unsigned int
8753 extended_count (x, mode, unsignedp)
8754 rtx x;
8755 enum machine_mode mode;
8756 int unsignedp;
8757 {
8758 if (nonzero_sign_valid == 0)
8759 return 0;
8760
8761 return (unsignedp
8762 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8763 ? (GET_MODE_BITSIZE (mode) - 1
8764 - floor_log2 (nonzero_bits (x, mode)))
8765 : 0)
8766 : num_sign_bit_copies (x, mode) - 1);
8767 }
8768 \f
8769 /* This function is called from `simplify_shift_const' to merge two
8770 outer operations. Specifically, we have already found that we need
8771 to perform operation *POP0 with constant *PCONST0 at the outermost
8772 position. We would now like to also perform OP1 with constant CONST1
8773 (with *POP0 being done last).
8774
8775 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8776 the resulting operation. *PCOMP_P is set to 1 if we would need to
8777 complement the innermost operand, otherwise it is unchanged.
8778
8779 MODE is the mode in which the operation will be done. No bits outside
8780 the width of this mode matter. It is assumed that the width of this mode
8781 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
8782
8783 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
8784 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
8785 result is simply *PCONST0.
8786
8787 If the resulting operation cannot be expressed as one operation, we
8788 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
8789
8790 static int
8791 merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
8792 enum rtx_code *pop0;
8793 HOST_WIDE_INT *pconst0;
8794 enum rtx_code op1;
8795 HOST_WIDE_INT const1;
8796 enum machine_mode mode;
8797 int *pcomp_p;
8798 {
8799 enum rtx_code op0 = *pop0;
8800 HOST_WIDE_INT const0 = *pconst0;
8801
8802 const0 &= GET_MODE_MASK (mode);
8803 const1 &= GET_MODE_MASK (mode);
8804
8805 /* If OP0 is an AND, clear unimportant bits in CONST1. */
8806 if (op0 == AND)
8807 const1 &= const0;
8808
8809 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
8810 if OP0 is SET. */
8811
8812 if (op1 == NIL || op0 == SET)
8813 return 1;
8814
8815 else if (op0 == NIL)
8816 op0 = op1, const0 = const1;
8817
8818 else if (op0 == op1)
8819 {
8820 switch (op0)
8821 {
8822 case AND:
8823 const0 &= const1;
8824 break;
8825 case IOR:
8826 const0 |= const1;
8827 break;
8828 case XOR:
8829 const0 ^= const1;
8830 break;
8831 case PLUS:
8832 const0 += const1;
8833 break;
8834 case NEG:
8835 op0 = NIL;
8836 break;
8837 default:
8838 break;
8839 }
8840 }
8841
8842 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
8843 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8844 return 0;
8845
8846 /* If the two constants aren't the same, we can't do anything. The
8847 remaining six cases can all be done. */
8848 else if (const0 != const1)
8849 return 0;
8850
8851 else
8852 switch (op0)
8853 {
8854 case IOR:
8855 if (op1 == AND)
8856 /* (a & b) | b == b */
8857 op0 = SET;
8858 else /* op1 == XOR */
8859 /* (a ^ b) | b == a | b */
8860 {;}
8861 break;
8862
8863 case XOR:
8864 if (op1 == AND)
8865 /* (a & b) ^ b == (~a) & b */
8866 op0 = AND, *pcomp_p = 1;
8867 else /* op1 == IOR */
8868 /* (a | b) ^ b == a & ~b */
8869 op0 = AND, *pconst0 = ~const0;
8870 break;
8871
8872 case AND:
8873 if (op1 == IOR)
8874 /* (a | b) & b == b */
8875 op0 = SET;
8876 else /* op1 == XOR */
8877 /* (a ^ b) & b) == (~a) & b */
8878 *pcomp_p = 1;
8879 break;
8880 default:
8881 break;
8882 }
8883
8884 /* Check for NO-OP cases. */
8885 const0 &= GET_MODE_MASK (mode);
8886 if (const0 == 0
8887 && (op0 == IOR || op0 == XOR || op0 == PLUS))
8888 op0 = NIL;
8889 else if (const0 == 0 && op0 == AND)
8890 op0 = SET;
8891 else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
8892 && op0 == AND)
8893 op0 = NIL;
8894
8895 /* ??? Slightly redundant with the above mask, but not entirely.
8896 Moving this above means we'd have to sign-extend the mode mask
8897 for the final test. */
8898 const0 = trunc_int_for_mode (const0, mode);
8899
8900 *pop0 = op0;
8901 *pconst0 = const0;
8902
8903 return 1;
8904 }
8905 \f
8906 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
8907 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
8908 that we started with.
8909
8910 The shift is normally computed in the widest mode we find in VAROP, as
8911 long as it isn't a different number of words than RESULT_MODE. Exceptions
8912 are ASHIFTRT and ROTATE, which are always done in their original mode, */
8913
8914 static rtx
8915 simplify_shift_const (x, code, result_mode, varop, input_count)
8916 rtx x;
8917 enum rtx_code code;
8918 enum machine_mode result_mode;
8919 rtx varop;
8920 int input_count;
8921 {
8922 enum rtx_code orig_code = code;
8923 int orig_count = input_count;
8924 unsigned int count;
8925 int signed_count;
8926 enum machine_mode mode = result_mode;
8927 enum machine_mode shift_mode, tmode;
8928 unsigned int mode_words
8929 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8930 /* We form (outer_op (code varop count) (outer_const)). */
8931 enum rtx_code outer_op = NIL;
8932 HOST_WIDE_INT outer_const = 0;
8933 rtx const_rtx;
8934 int complement_p = 0;
8935 rtx new;
8936
8937 /* If we were given an invalid count, don't do anything except exactly
8938 what was requested. */
8939
8940 if (input_count < 0 || input_count > (int) GET_MODE_BITSIZE (mode))
8941 {
8942 if (x)
8943 return x;
8944
8945 return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (input_count));
8946 }
8947
8948 count = input_count;
8949
8950 /* Make sure and truncate the "natural" shift on the way in. We don't
8951 want to do this inside the loop as it makes it more difficult to
8952 combine shifts. */
8953 #ifdef SHIFT_COUNT_TRUNCATED
8954 if (SHIFT_COUNT_TRUNCATED)
8955 count %= GET_MODE_BITSIZE (mode);
8956 #endif
8957
8958 /* Unless one of the branches of the `if' in this loop does a `continue',
8959 we will `break' the loop after the `if'. */
8960
8961 while (count != 0)
8962 {
8963 /* If we have an operand of (clobber (const_int 0)), just return that
8964 value. */
8965 if (GET_CODE (varop) == CLOBBER)
8966 return varop;
8967
8968 /* If we discovered we had to complement VAROP, leave. Making a NOT
8969 here would cause an infinite loop. */
8970 if (complement_p)
8971 break;
8972
8973 /* Convert ROTATERT to ROTATE. */
8974 if (code == ROTATERT)
8975 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
8976
8977 /* We need to determine what mode we will do the shift in. If the
8978 shift is a right shift or a ROTATE, we must always do it in the mode
8979 it was originally done in. Otherwise, we can do it in MODE, the
8980 widest mode encountered. */
8981 shift_mode
8982 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8983 ? result_mode : mode);
8984
8985 /* Handle cases where the count is greater than the size of the mode
8986 minus 1. For ASHIFT, use the size minus one as the count (this can
8987 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
8988 take the count modulo the size. For other shifts, the result is
8989 zero.
8990
8991 Since these shifts are being produced by the compiler by combining
8992 multiple operations, each of which are defined, we know what the
8993 result is supposed to be. */
8994
8995 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
8996 {
8997 if (code == ASHIFTRT)
8998 count = GET_MODE_BITSIZE (shift_mode) - 1;
8999 else if (code == ROTATE || code == ROTATERT)
9000 count %= GET_MODE_BITSIZE (shift_mode);
9001 else
9002 {
9003 /* We can't simply return zero because there may be an
9004 outer op. */
9005 varop = const0_rtx;
9006 count = 0;
9007 break;
9008 }
9009 }
9010
9011 /* An arithmetic right shift of a quantity known to be -1 or 0
9012 is a no-op. */
9013 if (code == ASHIFTRT
9014 && (num_sign_bit_copies (varop, shift_mode)
9015 == GET_MODE_BITSIZE (shift_mode)))
9016 {
9017 count = 0;
9018 break;
9019 }
9020
9021 /* If we are doing an arithmetic right shift and discarding all but
9022 the sign bit copies, this is equivalent to doing a shift by the
9023 bitsize minus one. Convert it into that shift because it will often
9024 allow other simplifications. */
9025
9026 if (code == ASHIFTRT
9027 && (count + num_sign_bit_copies (varop, shift_mode)
9028 >= GET_MODE_BITSIZE (shift_mode)))
9029 count = GET_MODE_BITSIZE (shift_mode) - 1;
9030
9031 /* We simplify the tests below and elsewhere by converting
9032 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
9033 `make_compound_operation' will convert it to a ASHIFTRT for
9034 those machines (such as Vax) that don't have a LSHIFTRT. */
9035 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9036 && code == ASHIFTRT
9037 && ((nonzero_bits (varop, shift_mode)
9038 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
9039 == 0))
9040 code = LSHIFTRT;
9041
9042 switch (GET_CODE (varop))
9043 {
9044 case SIGN_EXTEND:
9045 case ZERO_EXTEND:
9046 case SIGN_EXTRACT:
9047 case ZERO_EXTRACT:
9048 new = expand_compound_operation (varop);
9049 if (new != varop)
9050 {
9051 varop = new;
9052 continue;
9053 }
9054 break;
9055
9056 case MEM:
9057 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
9058 minus the width of a smaller mode, we can do this with a
9059 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
9060 if ((code == ASHIFTRT || code == LSHIFTRT)
9061 && ! mode_dependent_address_p (XEXP (varop, 0))
9062 && ! MEM_VOLATILE_P (varop)
9063 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9064 MODE_INT, 1)) != BLKmode)
9065 {
9066 if (BYTES_BIG_ENDIAN)
9067 new = gen_rtx_MEM (tmode, XEXP (varop, 0));
9068 else
9069 new = gen_rtx_MEM (tmode,
9070 plus_constant (XEXP (varop, 0),
9071 count / BITS_PER_UNIT));
9072
9073 MEM_COPY_ATTRIBUTES (new, varop);
9074 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
9075 : ZERO_EXTEND, mode, new);
9076 count = 0;
9077 continue;
9078 }
9079 break;
9080
9081 case USE:
9082 /* Similar to the case above, except that we can only do this if
9083 the resulting mode is the same as that of the underlying
9084 MEM and adjust the address depending on the *bits* endianness
9085 because of the way that bit-field extract insns are defined. */
9086 if ((code == ASHIFTRT || code == LSHIFTRT)
9087 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9088 MODE_INT, 1)) != BLKmode
9089 && tmode == GET_MODE (XEXP (varop, 0)))
9090 {
9091 if (BITS_BIG_ENDIAN)
9092 new = XEXP (varop, 0);
9093 else
9094 {
9095 new = copy_rtx (XEXP (varop, 0));
9096 SUBST (XEXP (new, 0),
9097 plus_constant (XEXP (new, 0),
9098 count / BITS_PER_UNIT));
9099 }
9100
9101 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
9102 : ZERO_EXTEND, mode, new);
9103 count = 0;
9104 continue;
9105 }
9106 break;
9107
9108 case SUBREG:
9109 /* If VAROP is a SUBREG, strip it as long as the inner operand has
9110 the same number of words as what we've seen so far. Then store
9111 the widest mode in MODE. */
9112 if (subreg_lowpart_p (varop)
9113 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9114 > GET_MODE_SIZE (GET_MODE (varop)))
9115 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9116 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
9117 == mode_words))
9118 {
9119 varop = SUBREG_REG (varop);
9120 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
9121 mode = GET_MODE (varop);
9122 continue;
9123 }
9124 break;
9125
9126 case MULT:
9127 /* Some machines use MULT instead of ASHIFT because MULT
9128 is cheaper. But it is still better on those machines to
9129 merge two shifts into one. */
9130 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9131 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9132 {
9133 varop
9134 = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
9135 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
9136 continue;
9137 }
9138 break;
9139
9140 case UDIV:
9141 /* Similar, for when divides are cheaper. */
9142 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9143 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9144 {
9145 varop
9146 = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
9147 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
9148 continue;
9149 }
9150 break;
9151
9152 case ASHIFTRT:
9153 /* If we are extracting just the sign bit of an arithmetic right
9154 shift, that shift is not needed. */
9155 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
9156 {
9157 varop = XEXP (varop, 0);
9158 continue;
9159 }
9160
9161 /* ... fall through ... */
9162
9163 case LSHIFTRT:
9164 case ASHIFT:
9165 case ROTATE:
9166 /* Here we have two nested shifts. The result is usually the
9167 AND of a new shift with a mask. We compute the result below. */
9168 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9169 && INTVAL (XEXP (varop, 1)) >= 0
9170 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
9171 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9172 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9173 {
9174 enum rtx_code first_code = GET_CODE (varop);
9175 unsigned int first_count = INTVAL (XEXP (varop, 1));
9176 unsigned HOST_WIDE_INT mask;
9177 rtx mask_rtx;
9178
9179 /* We have one common special case. We can't do any merging if
9180 the inner code is an ASHIFTRT of a smaller mode. However, if
9181 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
9182 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
9183 we can convert it to
9184 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
9185 This simplifies certain SIGN_EXTEND operations. */
9186 if (code == ASHIFT && first_code == ASHIFTRT
9187 && (GET_MODE_BITSIZE (result_mode)
9188 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
9189 {
9190 /* C3 has the low-order C1 bits zero. */
9191
9192 mask = (GET_MODE_MASK (mode)
9193 & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
9194
9195 varop = simplify_and_const_int (NULL_RTX, result_mode,
9196 XEXP (varop, 0), mask);
9197 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
9198 varop, count);
9199 count = first_count;
9200 code = ASHIFTRT;
9201 continue;
9202 }
9203
9204 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
9205 than C1 high-order bits equal to the sign bit, we can convert
9206 this to either an ASHIFT or a ASHIFTRT depending on the
9207 two counts.
9208
9209 We cannot do this if VAROP's mode is not SHIFT_MODE. */
9210
9211 if (code == ASHIFTRT && first_code == ASHIFT
9212 && GET_MODE (varop) == shift_mode
9213 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
9214 > first_count))
9215 {
9216 varop = XEXP (varop, 0);
9217
9218 signed_count = count - first_count;
9219 if (signed_count < 0)
9220 count = -signed_count, code = ASHIFT;
9221 else
9222 count = signed_count;
9223
9224 continue;
9225 }
9226
9227 /* There are some cases we can't do. If CODE is ASHIFTRT,
9228 we can only do this if FIRST_CODE is also ASHIFTRT.
9229
9230 We can't do the case when CODE is ROTATE and FIRST_CODE is
9231 ASHIFTRT.
9232
9233 If the mode of this shift is not the mode of the outer shift,
9234 we can't do this if either shift is a right shift or ROTATE.
9235
9236 Finally, we can't do any of these if the mode is too wide
9237 unless the codes are the same.
9238
9239 Handle the case where the shift codes are the same
9240 first. */
9241
9242 if (code == first_code)
9243 {
9244 if (GET_MODE (varop) != result_mode
9245 && (code == ASHIFTRT || code == LSHIFTRT
9246 || code == ROTATE))
9247 break;
9248
9249 count += first_count;
9250 varop = XEXP (varop, 0);
9251 continue;
9252 }
9253
9254 if (code == ASHIFTRT
9255 || (code == ROTATE && first_code == ASHIFTRT)
9256 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
9257 || (GET_MODE (varop) != result_mode
9258 && (first_code == ASHIFTRT || first_code == LSHIFTRT
9259 || first_code == ROTATE
9260 || code == ROTATE)))
9261 break;
9262
9263 /* To compute the mask to apply after the shift, shift the
9264 nonzero bits of the inner shift the same way the
9265 outer shift will. */
9266
9267 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
9268
9269 mask_rtx
9270 = simplify_binary_operation (code, result_mode, mask_rtx,
9271 GEN_INT (count));
9272
9273 /* Give up if we can't compute an outer operation to use. */
9274 if (mask_rtx == 0
9275 || GET_CODE (mask_rtx) != CONST_INT
9276 || ! merge_outer_ops (&outer_op, &outer_const, AND,
9277 INTVAL (mask_rtx),
9278 result_mode, &complement_p))
9279 break;
9280
9281 /* If the shifts are in the same direction, we add the
9282 counts. Otherwise, we subtract them. */
9283 signed_count = count;
9284 if ((code == ASHIFTRT || code == LSHIFTRT)
9285 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
9286 signed_count += first_count;
9287 else
9288 signed_count -= first_count;
9289
9290 /* If COUNT is positive, the new shift is usually CODE,
9291 except for the two exceptions below, in which case it is
9292 FIRST_CODE. If the count is negative, FIRST_CODE should
9293 always be used */
9294 if (signed_count > 0
9295 && ((first_code == ROTATE && code == ASHIFT)
9296 || (first_code == ASHIFTRT && code == LSHIFTRT)))
9297 code = first_code, count = signed_count;
9298 else if (signed_count < 0)
9299 code = first_code, count = -signed_count;
9300 else
9301 count = signed_count;
9302
9303 varop = XEXP (varop, 0);
9304 continue;
9305 }
9306
9307 /* If we have (A << B << C) for any shift, we can convert this to
9308 (A << C << B). This wins if A is a constant. Only try this if
9309 B is not a constant. */
9310
9311 else if (GET_CODE (varop) == code
9312 && GET_CODE (XEXP (varop, 1)) != CONST_INT
9313 && 0 != (new
9314 = simplify_binary_operation (code, mode,
9315 XEXP (varop, 0),
9316 GEN_INT (count))))
9317 {
9318 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
9319 count = 0;
9320 continue;
9321 }
9322 break;
9323
9324 case NOT:
9325 /* Make this fit the case below. */
9326 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
9327 GEN_INT (GET_MODE_MASK (mode)));
9328 continue;
9329
9330 case IOR:
9331 case AND:
9332 case XOR:
9333 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9334 with C the size of VAROP - 1 and the shift is logical if
9335 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9336 we have an (le X 0) operation. If we have an arithmetic shift
9337 and STORE_FLAG_VALUE is 1 or we have a logical shift with
9338 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
9339
9340 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9341 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9342 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9343 && (code == LSHIFTRT || code == ASHIFTRT)
9344 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
9345 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9346 {
9347 count = 0;
9348 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
9349 const0_rtx);
9350
9351 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9352 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
9353
9354 continue;
9355 }
9356
9357 /* If we have (shift (logical)), move the logical to the outside
9358 to allow it to possibly combine with another logical and the
9359 shift to combine with another shift. This also canonicalizes to
9360 what a ZERO_EXTRACT looks like. Also, some machines have
9361 (and (shift)) insns. */
9362
9363 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9364 && (new = simplify_binary_operation (code, result_mode,
9365 XEXP (varop, 1),
9366 GEN_INT (count))) != 0
9367 && GET_CODE (new) == CONST_INT
9368 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9369 INTVAL (new), result_mode, &complement_p))
9370 {
9371 varop = XEXP (varop, 0);
9372 continue;
9373 }
9374
9375 /* If we can't do that, try to simplify the shift in each arm of the
9376 logical expression, make a new logical expression, and apply
9377 the inverse distributive law. */
9378 {
9379 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9380 XEXP (varop, 0), count);
9381 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9382 XEXP (varop, 1), count);
9383
9384 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
9385 varop = apply_distributive_law (varop);
9386
9387 count = 0;
9388 }
9389 break;
9390
9391 case EQ:
9392 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
9393 says that the sign bit can be tested, FOO has mode MODE, C is
9394 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9395 that may be nonzero. */
9396 if (code == LSHIFTRT
9397 && XEXP (varop, 1) == const0_rtx
9398 && GET_MODE (XEXP (varop, 0)) == result_mode
9399 && count == GET_MODE_BITSIZE (result_mode) - 1
9400 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9401 && ((STORE_FLAG_VALUE
9402 & ((HOST_WIDE_INT) 1
9403 < (GET_MODE_BITSIZE (result_mode) - 1))))
9404 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9405 && merge_outer_ops (&outer_op, &outer_const, XOR,
9406 (HOST_WIDE_INT) 1, result_mode,
9407 &complement_p))
9408 {
9409 varop = XEXP (varop, 0);
9410 count = 0;
9411 continue;
9412 }
9413 break;
9414
9415 case NEG:
9416 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9417 than the number of bits in the mode is equivalent to A. */
9418 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
9419 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
9420 {
9421 varop = XEXP (varop, 0);
9422 count = 0;
9423 continue;
9424 }
9425
9426 /* NEG commutes with ASHIFT since it is multiplication. Move the
9427 NEG outside to allow shifts to combine. */
9428 if (code == ASHIFT
9429 && merge_outer_ops (&outer_op, &outer_const, NEG,
9430 (HOST_WIDE_INT) 0, result_mode,
9431 &complement_p))
9432 {
9433 varop = XEXP (varop, 0);
9434 continue;
9435 }
9436 break;
9437
9438 case PLUS:
9439 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9440 is one less than the number of bits in the mode is
9441 equivalent to (xor A 1). */
9442 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
9443 && XEXP (varop, 1) == constm1_rtx
9444 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9445 && merge_outer_ops (&outer_op, &outer_const, XOR,
9446 (HOST_WIDE_INT) 1, result_mode,
9447 &complement_p))
9448 {
9449 count = 0;
9450 varop = XEXP (varop, 0);
9451 continue;
9452 }
9453
9454 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
9455 that might be nonzero in BAR are those being shifted out and those
9456 bits are known zero in FOO, we can replace the PLUS with FOO.
9457 Similarly in the other operand order. This code occurs when
9458 we are computing the size of a variable-size array. */
9459
9460 if ((code == ASHIFTRT || code == LSHIFTRT)
9461 && count < HOST_BITS_PER_WIDE_INT
9462 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9463 && (nonzero_bits (XEXP (varop, 1), result_mode)
9464 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
9465 {
9466 varop = XEXP (varop, 0);
9467 continue;
9468 }
9469 else if ((code == ASHIFTRT || code == LSHIFTRT)
9470 && count < HOST_BITS_PER_WIDE_INT
9471 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9472 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9473 >> count)
9474 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9475 & nonzero_bits (XEXP (varop, 1),
9476 result_mode)))
9477 {
9478 varop = XEXP (varop, 1);
9479 continue;
9480 }
9481
9482 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
9483 if (code == ASHIFT
9484 && GET_CODE (XEXP (varop, 1)) == CONST_INT
9485 && (new = simplify_binary_operation (ASHIFT, result_mode,
9486 XEXP (varop, 1),
9487 GEN_INT (count))) != 0
9488 && GET_CODE (new) == CONST_INT
9489 && merge_outer_ops (&outer_op, &outer_const, PLUS,
9490 INTVAL (new), result_mode, &complement_p))
9491 {
9492 varop = XEXP (varop, 0);
9493 continue;
9494 }
9495 break;
9496
9497 case MINUS:
9498 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9499 with C the size of VAROP - 1 and the shift is logical if
9500 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9501 we have a (gt X 0) operation. If the shift is arithmetic with
9502 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9503 we have a (neg (gt X 0)) operation. */
9504
9505 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9506 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
9507 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
9508 && (code == LSHIFTRT || code == ASHIFTRT)
9509 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9510 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
9511 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9512 {
9513 count = 0;
9514 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
9515 const0_rtx);
9516
9517 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9518 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
9519
9520 continue;
9521 }
9522 break;
9523
9524 case TRUNCATE:
9525 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9526 if the truncate does not affect the value. */
9527 if (code == LSHIFTRT
9528 && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9529 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9530 && (INTVAL (XEXP (XEXP (varop, 0), 1))
9531 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9532 - GET_MODE_BITSIZE (GET_MODE (varop)))))
9533 {
9534 rtx varop_inner = XEXP (varop, 0);
9535
9536 varop_inner
9537 = gen_rtx_combine (LSHIFTRT, GET_MODE (varop_inner),
9538 XEXP (varop_inner, 0),
9539 GEN_INT (count
9540 + INTVAL (XEXP (varop_inner, 1))));
9541 varop = gen_rtx_combine (TRUNCATE, GET_MODE (varop),
9542 varop_inner);
9543 count = 0;
9544 continue;
9545 }
9546 break;
9547
9548 default:
9549 break;
9550 }
9551
9552 break;
9553 }
9554
9555 /* We need to determine what mode to do the shift in. If the shift is
9556 a right shift or ROTATE, we must always do it in the mode it was
9557 originally done in. Otherwise, we can do it in MODE, the widest mode
9558 encountered. The code we care about is that of the shift that will
9559 actually be done, not the shift that was originally requested. */
9560 shift_mode
9561 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9562 ? result_mode : mode);
9563
9564 /* We have now finished analyzing the shift. The result should be
9565 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
9566 OUTER_OP is non-NIL, it is an operation that needs to be applied
9567 to the result of the shift. OUTER_CONST is the relevant constant,
9568 but we must turn off all bits turned off in the shift.
9569
9570 If we were passed a value for X, see if we can use any pieces of
9571 it. If not, make new rtx. */
9572
9573 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
9574 && GET_CODE (XEXP (x, 1)) == CONST_INT
9575 && INTVAL (XEXP (x, 1)) == count)
9576 const_rtx = XEXP (x, 1);
9577 else
9578 const_rtx = GEN_INT (count);
9579
9580 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
9581 && GET_MODE (XEXP (x, 0)) == shift_mode
9582 && SUBREG_REG (XEXP (x, 0)) == varop)
9583 varop = XEXP (x, 0);
9584 else if (GET_MODE (varop) != shift_mode)
9585 varop = gen_lowpart_for_combine (shift_mode, varop);
9586
9587 /* If we can't make the SUBREG, try to return what we were given. */
9588 if (GET_CODE (varop) == CLOBBER)
9589 return x ? x : varop;
9590
9591 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
9592 if (new != 0)
9593 x = new;
9594 else
9595 {
9596 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
9597 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
9598
9599 SUBST (XEXP (x, 0), varop);
9600 SUBST (XEXP (x, 1), const_rtx);
9601 }
9602
9603 /* If we have an outer operation and we just made a shift, it is
9604 possible that we could have simplified the shift were it not
9605 for the outer operation. So try to do the simplification
9606 recursively. */
9607
9608 if (outer_op != NIL && GET_CODE (x) == code
9609 && GET_CODE (XEXP (x, 1)) == CONST_INT)
9610 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9611 INTVAL (XEXP (x, 1)));
9612
9613 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
9614 turn off all the bits that the shift would have turned off. */
9615 if (orig_code == LSHIFTRT && result_mode != shift_mode)
9616 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
9617 GET_MODE_MASK (result_mode) >> orig_count);
9618
9619 /* Do the remainder of the processing in RESULT_MODE. */
9620 x = gen_lowpart_for_combine (result_mode, x);
9621
9622 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9623 operation. */
9624 if (complement_p)
9625 x = gen_unary (NOT, result_mode, result_mode, x);
9626
9627 if (outer_op != NIL)
9628 {
9629 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9630 outer_const = trunc_int_for_mode (outer_const, result_mode);
9631
9632 if (outer_op == AND)
9633 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
9634 else if (outer_op == SET)
9635 /* This means that we have determined that the result is
9636 equivalent to a constant. This should be rare. */
9637 x = GEN_INT (outer_const);
9638 else if (GET_RTX_CLASS (outer_op) == '1')
9639 x = gen_unary (outer_op, result_mode, result_mode, x);
9640 else
9641 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
9642 }
9643
9644 return x;
9645 }
9646 \f
9647 /* Like recog, but we receive the address of a pointer to a new pattern.
9648 We try to match the rtx that the pointer points to.
9649 If that fails, we may try to modify or replace the pattern,
9650 storing the replacement into the same pointer object.
9651
9652 Modifications include deletion or addition of CLOBBERs.
9653
9654 PNOTES is a pointer to a location where any REG_UNUSED notes added for
9655 the CLOBBERs are placed.
9656
9657 The value is the final insn code from the pattern ultimately matched,
9658 or -1. */
9659
9660 static int
9661 recog_for_combine (pnewpat, insn, pnotes)
9662 rtx *pnewpat;
9663 rtx insn;
9664 rtx *pnotes;
9665 {
9666 register rtx pat = *pnewpat;
9667 int insn_code_number;
9668 int num_clobbers_to_add = 0;
9669 int i;
9670 rtx notes = 0;
9671 rtx old_notes;
9672
9673 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9674 we use to indicate that something didn't match. If we find such a
9675 thing, force rejection. */
9676 if (GET_CODE (pat) == PARALLEL)
9677 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9678 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9679 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
9680 return -1;
9681
9682 /* Remove the old notes prior to trying to recognize the new pattern. */
9683 old_notes = REG_NOTES (insn);
9684 REG_NOTES (insn) = 0;
9685
9686 /* Is the result of combination a valid instruction? */
9687 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9688
9689 /* If it isn't, there is the possibility that we previously had an insn
9690 that clobbered some register as a side effect, but the combined
9691 insn doesn't need to do that. So try once more without the clobbers
9692 unless this represents an ASM insn. */
9693
9694 if (insn_code_number < 0 && ! check_asm_operands (pat)
9695 && GET_CODE (pat) == PARALLEL)
9696 {
9697 int pos;
9698
9699 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9700 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9701 {
9702 if (i != pos)
9703 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9704 pos++;
9705 }
9706
9707 SUBST_INT (XVECLEN (pat, 0), pos);
9708
9709 if (pos == 1)
9710 pat = XVECEXP (pat, 0, 0);
9711
9712 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9713 }
9714
9715 REG_NOTES (insn) = old_notes;
9716
9717 /* If we had any clobbers to add, make a new pattern than contains
9718 them. Then check to make sure that all of them are dead. */
9719 if (num_clobbers_to_add)
9720 {
9721 rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9722 rtvec_alloc (GET_CODE (pat) == PARALLEL
9723 ? (XVECLEN (pat, 0)
9724 + num_clobbers_to_add)
9725 : num_clobbers_to_add + 1));
9726
9727 if (GET_CODE (pat) == PARALLEL)
9728 for (i = 0; i < XVECLEN (pat, 0); i++)
9729 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9730 else
9731 XVECEXP (newpat, 0, 0) = pat;
9732
9733 add_clobbers (newpat, insn_code_number);
9734
9735 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9736 i < XVECLEN (newpat, 0); i++)
9737 {
9738 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
9739 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9740 return -1;
9741 notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9742 XEXP (XVECEXP (newpat, 0, i), 0), notes);
9743 }
9744 pat = newpat;
9745 }
9746
9747 *pnewpat = pat;
9748 *pnotes = notes;
9749
9750 return insn_code_number;
9751 }
9752 \f
9753 /* Like gen_lowpart but for use by combine. In combine it is not possible
9754 to create any new pseudoregs. However, it is safe to create
9755 invalid memory addresses, because combine will try to recognize
9756 them and all they will do is make the combine attempt fail.
9757
9758 If for some reason this cannot do its job, an rtx
9759 (clobber (const_int 0)) is returned.
9760 An insn containing that will not be recognized. */
9761
9762 #undef gen_lowpart
9763
9764 static rtx
9765 gen_lowpart_for_combine (mode, x)
9766 enum machine_mode mode;
9767 register rtx x;
9768 {
9769 rtx result;
9770
9771 if (GET_MODE (x) == mode)
9772 return x;
9773
9774 /* We can only support MODE being wider than a word if X is a
9775 constant integer or has a mode the same size. */
9776
9777 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
9778 && ! ((GET_MODE (x) == VOIDmode
9779 && (GET_CODE (x) == CONST_INT
9780 || GET_CODE (x) == CONST_DOUBLE))
9781 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
9782 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9783
9784 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
9785 won't know what to do. So we will strip off the SUBREG here and
9786 process normally. */
9787 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
9788 {
9789 x = SUBREG_REG (x);
9790 if (GET_MODE (x) == mode)
9791 return x;
9792 }
9793
9794 result = gen_lowpart_common (mode, x);
9795 #ifdef CLASS_CANNOT_CHANGE_MODE
9796 if (result != 0
9797 && GET_CODE (result) == SUBREG
9798 && GET_CODE (SUBREG_REG (result)) == REG
9799 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
9800 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (result),
9801 GET_MODE (SUBREG_REG (result))))
9802 REG_CHANGES_MODE (REGNO (SUBREG_REG (result))) = 1;
9803 #endif
9804
9805 if (result)
9806 return result;
9807
9808 if (GET_CODE (x) == MEM)
9809 {
9810 register int offset = 0;
9811 rtx new;
9812
9813 /* Refuse to work on a volatile memory ref or one with a mode-dependent
9814 address. */
9815 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
9816 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9817
9818 /* If we want to refer to something bigger than the original memref,
9819 generate a perverse subreg instead. That will force a reload
9820 of the original memref X. */
9821 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
9822 return gen_rtx_SUBREG (mode, x, 0);
9823
9824 if (WORDS_BIG_ENDIAN)
9825 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
9826 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
9827
9828 if (BYTES_BIG_ENDIAN)
9829 {
9830 /* Adjust the address so that the address-after-the-data is
9831 unchanged. */
9832 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
9833 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
9834 }
9835 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
9836 MEM_COPY_ATTRIBUTES (new, x);
9837 return new;
9838 }
9839
9840 /* If X is a comparison operator, rewrite it in a new mode. This
9841 probably won't match, but may allow further simplifications. */
9842 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
9843 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
9844
9845 /* If we couldn't simplify X any other way, just enclose it in a
9846 SUBREG. Normally, this SUBREG won't match, but some patterns may
9847 include an explicit SUBREG or we may simplify it further in combine. */
9848 else
9849 {
9850 int word = 0;
9851
9852 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
9853 word = ((GET_MODE_SIZE (GET_MODE (x))
9854 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
9855 / UNITS_PER_WORD);
9856 return gen_rtx_SUBREG (mode, x, word);
9857 }
9858 }
9859 \f
9860 /* Make an rtx expression. This is a subset of gen_rtx and only supports
9861 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
9862
9863 If the identical expression was previously in the insn (in the undobuf),
9864 it will be returned. Only if it is not found will a new expression
9865 be made. */
9866
9867 /*VARARGS2*/
9868 static rtx
9869 gen_rtx_combine VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
9870 {
9871 #ifndef ANSI_PROTOTYPES
9872 enum rtx_code code;
9873 enum machine_mode mode;
9874 #endif
9875 va_list p;
9876 int n_args;
9877 rtx args[3];
9878 int j;
9879 const char *fmt;
9880 rtx rt;
9881 struct undo *undo;
9882
9883 VA_START (p, mode);
9884
9885 #ifndef ANSI_PROTOTYPES
9886 code = va_arg (p, enum rtx_code);
9887 mode = va_arg (p, enum machine_mode);
9888 #endif
9889
9890 n_args = GET_RTX_LENGTH (code);
9891 fmt = GET_RTX_FORMAT (code);
9892
9893 if (n_args == 0 || n_args > 3)
9894 abort ();
9895
9896 /* Get each arg and verify that it is supposed to be an expression. */
9897 for (j = 0; j < n_args; j++)
9898 {
9899 if (*fmt++ != 'e')
9900 abort ();
9901
9902 args[j] = va_arg (p, rtx);
9903 }
9904
9905 va_end (p);
9906
9907 /* See if this is in undobuf. Be sure we don't use objects that came
9908 from another insn; this could produce circular rtl structures. */
9909
9910 for (undo = undobuf.undos; undo != undobuf.previous_undos; undo = undo->next)
9911 if (!undo->is_int
9912 && GET_CODE (undo->old_contents.r) == code
9913 && GET_MODE (undo->old_contents.r) == mode)
9914 {
9915 for (j = 0; j < n_args; j++)
9916 if (XEXP (undo->old_contents.r, j) != args[j])
9917 break;
9918
9919 if (j == n_args)
9920 return undo->old_contents.r;
9921 }
9922
9923 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
9924 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
9925 rt = rtx_alloc (code);
9926 PUT_MODE (rt, mode);
9927 XEXP (rt, 0) = args[0];
9928 if (n_args > 1)
9929 {
9930 XEXP (rt, 1) = args[1];
9931 if (n_args > 2)
9932 XEXP (rt, 2) = args[2];
9933 }
9934 return rt;
9935 }
9936
9937 /* These routines make binary and unary operations by first seeing if they
9938 fold; if not, a new expression is allocated. */
9939
9940 static rtx
9941 gen_binary (code, mode, op0, op1)
9942 enum rtx_code code;
9943 enum machine_mode mode;
9944 rtx op0, op1;
9945 {
9946 rtx result;
9947 rtx tem;
9948
9949 if (GET_RTX_CLASS (code) == 'c'
9950 && (GET_CODE (op0) == CONST_INT
9951 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
9952 tem = op0, op0 = op1, op1 = tem;
9953
9954 if (GET_RTX_CLASS (code) == '<')
9955 {
9956 enum machine_mode op_mode = GET_MODE (op0);
9957
9958 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
9959 just (REL_OP X Y). */
9960 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
9961 {
9962 op1 = XEXP (op0, 1);
9963 op0 = XEXP (op0, 0);
9964 op_mode = GET_MODE (op0);
9965 }
9966
9967 if (op_mode == VOIDmode)
9968 op_mode = GET_MODE (op1);
9969 result = simplify_relational_operation (code, op_mode, op0, op1);
9970 }
9971 else
9972 result = simplify_binary_operation (code, mode, op0, op1);
9973
9974 if (result)
9975 return result;
9976
9977 /* Put complex operands first and constants second. */
9978 if (GET_RTX_CLASS (code) == 'c'
9979 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
9980 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
9981 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
9982 || (GET_CODE (op0) == SUBREG
9983 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
9984 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
9985 return gen_rtx_combine (code, mode, op1, op0);
9986
9987 /* If we are turning off bits already known off in OP0, we need not do
9988 an AND. */
9989 else if (code == AND && GET_CODE (op1) == CONST_INT
9990 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9991 && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
9992 return op0;
9993
9994 return gen_rtx_combine (code, mode, op0, op1);
9995 }
9996
9997 static rtx
9998 gen_unary (code, mode, op0_mode, op0)
9999 enum rtx_code code;
10000 enum machine_mode mode, op0_mode;
10001 rtx op0;
10002 {
10003 rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
10004
10005 if (result)
10006 return result;
10007
10008 return gen_rtx_combine (code, mode, op0);
10009 }
10010 \f
10011 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
10012 comparison code that will be tested.
10013
10014 The result is a possibly different comparison code to use. *POP0 and
10015 *POP1 may be updated.
10016
10017 It is possible that we might detect that a comparison is either always
10018 true or always false. However, we do not perform general constant
10019 folding in combine, so this knowledge isn't useful. Such tautologies
10020 should have been detected earlier. Hence we ignore all such cases. */
10021
10022 static enum rtx_code
10023 simplify_comparison (code, pop0, pop1)
10024 enum rtx_code code;
10025 rtx *pop0;
10026 rtx *pop1;
10027 {
10028 rtx op0 = *pop0;
10029 rtx op1 = *pop1;
10030 rtx tem, tem1;
10031 int i;
10032 enum machine_mode mode, tmode;
10033
10034 /* Try a few ways of applying the same transformation to both operands. */
10035 while (1)
10036 {
10037 #ifndef WORD_REGISTER_OPERATIONS
10038 /* The test below this one won't handle SIGN_EXTENDs on these machines,
10039 so check specially. */
10040 if (code != GTU && code != GEU && code != LTU && code != LEU
10041 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
10042 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10043 && GET_CODE (XEXP (op1, 0)) == ASHIFT
10044 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
10045 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
10046 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
10047 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
10048 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10049 && GET_CODE (XEXP (op1, 1)) == CONST_INT
10050 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10051 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
10052 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
10053 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
10054 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
10055 && (INTVAL (XEXP (op0, 1))
10056 == (GET_MODE_BITSIZE (GET_MODE (op0))
10057 - (GET_MODE_BITSIZE
10058 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
10059 {
10060 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
10061 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
10062 }
10063 #endif
10064
10065 /* If both operands are the same constant shift, see if we can ignore the
10066 shift. We can if the shift is a rotate or if the bits shifted out of
10067 this shift are known to be zero for both inputs and if the type of
10068 comparison is compatible with the shift. */
10069 if (GET_CODE (op0) == GET_CODE (op1)
10070 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10071 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
10072 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
10073 && (code != GT && code != LT && code != GE && code != LE))
10074 || (GET_CODE (op0) == ASHIFTRT
10075 && (code != GTU && code != LTU
10076 && code != GEU && code != GEU)))
10077 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10078 && INTVAL (XEXP (op0, 1)) >= 0
10079 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10080 && XEXP (op0, 1) == XEXP (op1, 1))
10081 {
10082 enum machine_mode mode = GET_MODE (op0);
10083 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10084 int shift_count = INTVAL (XEXP (op0, 1));
10085
10086 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
10087 mask &= (mask >> shift_count) << shift_count;
10088 else if (GET_CODE (op0) == ASHIFT)
10089 mask = (mask & (mask << shift_count)) >> shift_count;
10090
10091 if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
10092 && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
10093 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
10094 else
10095 break;
10096 }
10097
10098 /* If both operands are AND's of a paradoxical SUBREG by constant, the
10099 SUBREGs are of the same mode, and, in both cases, the AND would
10100 be redundant if the comparison was done in the narrower mode,
10101 do the comparison in the narrower mode (e.g., we are AND'ing with 1
10102 and the operand's possibly nonzero bits are 0xffffff01; in that case
10103 if we only care about QImode, we don't need the AND). This case
10104 occurs if the output mode of an scc insn is not SImode and
10105 STORE_FLAG_VALUE == 1 (e.g., the 386).
10106
10107 Similarly, check for a case where the AND's are ZERO_EXTEND
10108 operations from some narrower mode even though a SUBREG is not
10109 present. */
10110
10111 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
10112 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10113 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
10114 {
10115 rtx inner_op0 = XEXP (op0, 0);
10116 rtx inner_op1 = XEXP (op1, 0);
10117 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
10118 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
10119 int changed = 0;
10120
10121 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
10122 && (GET_MODE_SIZE (GET_MODE (inner_op0))
10123 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
10124 && (GET_MODE (SUBREG_REG (inner_op0))
10125 == GET_MODE (SUBREG_REG (inner_op1)))
10126 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
10127 <= HOST_BITS_PER_WIDE_INT)
10128 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
10129 GET_MODE (SUBREG_REG (inner_op0)))))
10130 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
10131 GET_MODE (SUBREG_REG (inner_op1))))))
10132 {
10133 op0 = SUBREG_REG (inner_op0);
10134 op1 = SUBREG_REG (inner_op1);
10135
10136 /* The resulting comparison is always unsigned since we masked
10137 off the original sign bit. */
10138 code = unsigned_condition (code);
10139
10140 changed = 1;
10141 }
10142
10143 else if (c0 == c1)
10144 for (tmode = GET_CLASS_NARROWEST_MODE
10145 (GET_MODE_CLASS (GET_MODE (op0)));
10146 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
10147 if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
10148 {
10149 op0 = gen_lowpart_for_combine (tmode, inner_op0);
10150 op1 = gen_lowpart_for_combine (tmode, inner_op1);
10151 code = unsigned_condition (code);
10152 changed = 1;
10153 break;
10154 }
10155
10156 if (! changed)
10157 break;
10158 }
10159
10160 /* If both operands are NOT, we can strip off the outer operation
10161 and adjust the comparison code for swapped operands; similarly for
10162 NEG, except that this must be an equality comparison. */
10163 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
10164 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
10165 && (code == EQ || code == NE)))
10166 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
10167
10168 else
10169 break;
10170 }
10171
10172 /* If the first operand is a constant, swap the operands and adjust the
10173 comparison code appropriately, but don't do this if the second operand
10174 is already a constant integer. */
10175 if (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
10176 {
10177 tem = op0, op0 = op1, op1 = tem;
10178 code = swap_condition (code);
10179 }
10180
10181 /* We now enter a loop during which we will try to simplify the comparison.
10182 For the most part, we only are concerned with comparisons with zero,
10183 but some things may really be comparisons with zero but not start
10184 out looking that way. */
10185
10186 while (GET_CODE (op1) == CONST_INT)
10187 {
10188 enum machine_mode mode = GET_MODE (op0);
10189 unsigned int mode_width = GET_MODE_BITSIZE (mode);
10190 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10191 int equality_comparison_p;
10192 int sign_bit_comparison_p;
10193 int unsigned_comparison_p;
10194 HOST_WIDE_INT const_op;
10195
10196 /* We only want to handle integral modes. This catches VOIDmode,
10197 CCmode, and the floating-point modes. An exception is that we
10198 can handle VOIDmode if OP0 is a COMPARE or a comparison
10199 operation. */
10200
10201 if (GET_MODE_CLASS (mode) != MODE_INT
10202 && ! (mode == VOIDmode
10203 && (GET_CODE (op0) == COMPARE
10204 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
10205 break;
10206
10207 /* Get the constant we are comparing against and turn off all bits
10208 not on in our mode. */
10209 const_op = trunc_int_for_mode (INTVAL (op1), mode);
10210
10211 /* If we are comparing against a constant power of two and the value
10212 being compared can only have that single bit nonzero (e.g., it was
10213 `and'ed with that bit), we can replace this with a comparison
10214 with zero. */
10215 if (const_op
10216 && (code == EQ || code == NE || code == GE || code == GEU
10217 || code == LT || code == LTU)
10218 && mode_width <= HOST_BITS_PER_WIDE_INT
10219 && exact_log2 (const_op) >= 0
10220 && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
10221 {
10222 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
10223 op1 = const0_rtx, const_op = 0;
10224 }
10225
10226 /* Similarly, if we are comparing a value known to be either -1 or
10227 0 with -1, change it to the opposite comparison against zero. */
10228
10229 if (const_op == -1
10230 && (code == EQ || code == NE || code == GT || code == LE
10231 || code == GEU || code == LTU)
10232 && num_sign_bit_copies (op0, mode) == mode_width)
10233 {
10234 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
10235 op1 = const0_rtx, const_op = 0;
10236 }
10237
10238 /* Do some canonicalizations based on the comparison code. We prefer
10239 comparisons against zero and then prefer equality comparisons.
10240 If we can reduce the size of a constant, we will do that too. */
10241
10242 switch (code)
10243 {
10244 case LT:
10245 /* < C is equivalent to <= (C - 1) */
10246 if (const_op > 0)
10247 {
10248 const_op -= 1;
10249 op1 = GEN_INT (const_op);
10250 code = LE;
10251 /* ... fall through to LE case below. */
10252 }
10253 else
10254 break;
10255
10256 case LE:
10257 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
10258 if (const_op < 0)
10259 {
10260 const_op += 1;
10261 op1 = GEN_INT (const_op);
10262 code = LT;
10263 }
10264
10265 /* If we are doing a <= 0 comparison on a value known to have
10266 a zero sign bit, we can replace this with == 0. */
10267 else if (const_op == 0
10268 && mode_width <= HOST_BITS_PER_WIDE_INT
10269 && (nonzero_bits (op0, mode)
10270 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10271 code = EQ;
10272 break;
10273
10274 case GE:
10275 /* >= C is equivalent to > (C - 1). */
10276 if (const_op > 0)
10277 {
10278 const_op -= 1;
10279 op1 = GEN_INT (const_op);
10280 code = GT;
10281 /* ... fall through to GT below. */
10282 }
10283 else
10284 break;
10285
10286 case GT:
10287 /* > C is equivalent to >= (C + 1); we do this for C < 0. */
10288 if (const_op < 0)
10289 {
10290 const_op += 1;
10291 op1 = GEN_INT (const_op);
10292 code = GE;
10293 }
10294
10295 /* If we are doing a > 0 comparison on a value known to have
10296 a zero sign bit, we can replace this with != 0. */
10297 else if (const_op == 0
10298 && mode_width <= HOST_BITS_PER_WIDE_INT
10299 && (nonzero_bits (op0, mode)
10300 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10301 code = NE;
10302 break;
10303
10304 case LTU:
10305 /* < C is equivalent to <= (C - 1). */
10306 if (const_op > 0)
10307 {
10308 const_op -= 1;
10309 op1 = GEN_INT (const_op);
10310 code = LEU;
10311 /* ... fall through ... */
10312 }
10313
10314 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
10315 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10316 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10317 {
10318 const_op = 0, op1 = const0_rtx;
10319 code = GE;
10320 break;
10321 }
10322 else
10323 break;
10324
10325 case LEU:
10326 /* unsigned <= 0 is equivalent to == 0 */
10327 if (const_op == 0)
10328 code = EQ;
10329
10330 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
10331 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10332 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10333 {
10334 const_op = 0, op1 = const0_rtx;
10335 code = GE;
10336 }
10337 break;
10338
10339 case GEU:
10340 /* >= C is equivalent to < (C - 1). */
10341 if (const_op > 1)
10342 {
10343 const_op -= 1;
10344 op1 = GEN_INT (const_op);
10345 code = GTU;
10346 /* ... fall through ... */
10347 }
10348
10349 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
10350 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10351 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10352 {
10353 const_op = 0, op1 = const0_rtx;
10354 code = LT;
10355 break;
10356 }
10357 else
10358 break;
10359
10360 case GTU:
10361 /* unsigned > 0 is equivalent to != 0 */
10362 if (const_op == 0)
10363 code = NE;
10364
10365 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
10366 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10367 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10368 {
10369 const_op = 0, op1 = const0_rtx;
10370 code = LT;
10371 }
10372 break;
10373
10374 default:
10375 break;
10376 }
10377
10378 /* Compute some predicates to simplify code below. */
10379
10380 equality_comparison_p = (code == EQ || code == NE);
10381 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10382 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
10383 || code == GEU);
10384
10385 /* If this is a sign bit comparison and we can do arithmetic in
10386 MODE, say that we will only be needing the sign bit of OP0. */
10387 if (sign_bit_comparison_p
10388 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10389 op0 = force_to_mode (op0, mode,
10390 ((HOST_WIDE_INT) 1
10391 << (GET_MODE_BITSIZE (mode) - 1)),
10392 NULL_RTX, 0);
10393
10394 /* Now try cases based on the opcode of OP0. If none of the cases
10395 does a "continue", we exit this loop immediately after the
10396 switch. */
10397
10398 switch (GET_CODE (op0))
10399 {
10400 case ZERO_EXTRACT:
10401 /* If we are extracting a single bit from a variable position in
10402 a constant that has only a single bit set and are comparing it
10403 with zero, we can convert this into an equality comparison
10404 between the position and the location of the single bit. */
10405
10406 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
10407 && XEXP (op0, 1) == const1_rtx
10408 && equality_comparison_p && const_op == 0
10409 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
10410 {
10411 if (BITS_BIG_ENDIAN)
10412 {
10413 #ifdef HAVE_extzv
10414 mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
10415 if (mode == VOIDmode)
10416 mode = word_mode;
10417 i = (GET_MODE_BITSIZE (mode) - 1 - i);
10418 #else
10419 i = BITS_PER_WORD - 1 - i;
10420 #endif
10421 }
10422
10423 op0 = XEXP (op0, 2);
10424 op1 = GEN_INT (i);
10425 const_op = i;
10426
10427 /* Result is nonzero iff shift count is equal to I. */
10428 code = reverse_condition (code);
10429 continue;
10430 }
10431
10432 /* ... fall through ... */
10433
10434 case SIGN_EXTRACT:
10435 tem = expand_compound_operation (op0);
10436 if (tem != op0)
10437 {
10438 op0 = tem;
10439 continue;
10440 }
10441 break;
10442
10443 case NOT:
10444 /* If testing for equality, we can take the NOT of the constant. */
10445 if (equality_comparison_p
10446 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10447 {
10448 op0 = XEXP (op0, 0);
10449 op1 = tem;
10450 continue;
10451 }
10452
10453 /* If just looking at the sign bit, reverse the sense of the
10454 comparison. */
10455 if (sign_bit_comparison_p)
10456 {
10457 op0 = XEXP (op0, 0);
10458 code = (code == GE ? LT : GE);
10459 continue;
10460 }
10461 break;
10462
10463 case NEG:
10464 /* If testing for equality, we can take the NEG of the constant. */
10465 if (equality_comparison_p
10466 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10467 {
10468 op0 = XEXP (op0, 0);
10469 op1 = tem;
10470 continue;
10471 }
10472
10473 /* The remaining cases only apply to comparisons with zero. */
10474 if (const_op != 0)
10475 break;
10476
10477 /* When X is ABS or is known positive,
10478 (neg X) is < 0 if and only if X != 0. */
10479
10480 if (sign_bit_comparison_p
10481 && (GET_CODE (XEXP (op0, 0)) == ABS
10482 || (mode_width <= HOST_BITS_PER_WIDE_INT
10483 && (nonzero_bits (XEXP (op0, 0), mode)
10484 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
10485 {
10486 op0 = XEXP (op0, 0);
10487 code = (code == LT ? NE : EQ);
10488 continue;
10489 }
10490
10491 /* If we have NEG of something whose two high-order bits are the
10492 same, we know that "(-a) < 0" is equivalent to "a > 0". */
10493 if (num_sign_bit_copies (op0, mode) >= 2)
10494 {
10495 op0 = XEXP (op0, 0);
10496 code = swap_condition (code);
10497 continue;
10498 }
10499 break;
10500
10501 case ROTATE:
10502 /* If we are testing equality and our count is a constant, we
10503 can perform the inverse operation on our RHS. */
10504 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10505 && (tem = simplify_binary_operation (ROTATERT, mode,
10506 op1, XEXP (op0, 1))) != 0)
10507 {
10508 op0 = XEXP (op0, 0);
10509 op1 = tem;
10510 continue;
10511 }
10512
10513 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10514 a particular bit. Convert it to an AND of a constant of that
10515 bit. This will be converted into a ZERO_EXTRACT. */
10516 if (const_op == 0 && sign_bit_comparison_p
10517 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10518 && mode_width <= HOST_BITS_PER_WIDE_INT)
10519 {
10520 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10521 ((HOST_WIDE_INT) 1
10522 << (mode_width - 1
10523 - INTVAL (XEXP (op0, 1)))));
10524 code = (code == LT ? NE : EQ);
10525 continue;
10526 }
10527
10528 /* Fall through. */
10529
10530 case ABS:
10531 /* ABS is ignorable inside an equality comparison with zero. */
10532 if (const_op == 0 && equality_comparison_p)
10533 {
10534 op0 = XEXP (op0, 0);
10535 continue;
10536 }
10537 break;
10538
10539 case SIGN_EXTEND:
10540 /* Can simplify (compare (zero/sign_extend FOO) CONST)
10541 to (compare FOO CONST) if CONST fits in FOO's mode and we
10542 are either testing inequality or have an unsigned comparison
10543 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
10544 if (! unsigned_comparison_p
10545 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10546 <= HOST_BITS_PER_WIDE_INT)
10547 && ((unsigned HOST_WIDE_INT) const_op
10548 < (((unsigned HOST_WIDE_INT) 1
10549 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
10550 {
10551 op0 = XEXP (op0, 0);
10552 continue;
10553 }
10554 break;
10555
10556 case SUBREG:
10557 /* Check for the case where we are comparing A - C1 with C2,
10558 both constants are smaller than 1/2 the maximum positive
10559 value in MODE, and the comparison is equality or unsigned.
10560 In that case, if A is either zero-extended to MODE or has
10561 sufficient sign bits so that the high-order bit in MODE
10562 is a copy of the sign in the inner mode, we can prove that it is
10563 safe to do the operation in the wider mode. This simplifies
10564 many range checks. */
10565
10566 if (mode_width <= HOST_BITS_PER_WIDE_INT
10567 && subreg_lowpart_p (op0)
10568 && GET_CODE (SUBREG_REG (op0)) == PLUS
10569 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
10570 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
10571 && (-INTVAL (XEXP (SUBREG_REG (op0), 1))
10572 < (HOST_WIDE_INT) (GET_MODE_MASK (mode) / 2))
10573 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
10574 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
10575 GET_MODE (SUBREG_REG (op0)))
10576 & ~GET_MODE_MASK (mode))
10577 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
10578 GET_MODE (SUBREG_REG (op0)))
10579 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10580 - GET_MODE_BITSIZE (mode)))))
10581 {
10582 op0 = SUBREG_REG (op0);
10583 continue;
10584 }
10585
10586 /* If the inner mode is narrower and we are extracting the low part,
10587 we can treat the SUBREG as if it were a ZERO_EXTEND. */
10588 if (subreg_lowpart_p (op0)
10589 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10590 /* Fall through */ ;
10591 else
10592 break;
10593
10594 /* ... fall through ... */
10595
10596 case ZERO_EXTEND:
10597 if ((unsigned_comparison_p || equality_comparison_p)
10598 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10599 <= HOST_BITS_PER_WIDE_INT)
10600 && ((unsigned HOST_WIDE_INT) const_op
10601 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
10602 {
10603 op0 = XEXP (op0, 0);
10604 continue;
10605 }
10606 break;
10607
10608 case PLUS:
10609 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
10610 this for equality comparisons due to pathological cases involving
10611 overflows. */
10612 if (equality_comparison_p
10613 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10614 op1, XEXP (op0, 1))))
10615 {
10616 op0 = XEXP (op0, 0);
10617 op1 = tem;
10618 continue;
10619 }
10620
10621 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
10622 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10623 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10624 {
10625 op0 = XEXP (XEXP (op0, 0), 0);
10626 code = (code == LT ? EQ : NE);
10627 continue;
10628 }
10629 break;
10630
10631 case MINUS:
10632 /* We used to optimize signed comparisons against zero, but that
10633 was incorrect. Unsigned comparisons against zero (GTU, LEU)
10634 arrive here as equality comparisons, or (GEU, LTU) are
10635 optimized away. No need to special-case them. */
10636
10637 /* (eq (minus A B) C) -> (eq A (plus B C)) or
10638 (eq B (minus A C)), whichever simplifies. We can only do
10639 this for equality comparisons due to pathological cases involving
10640 overflows. */
10641 if (equality_comparison_p
10642 && 0 != (tem = simplify_binary_operation (PLUS, mode,
10643 XEXP (op0, 1), op1)))
10644 {
10645 op0 = XEXP (op0, 0);
10646 op1 = tem;
10647 continue;
10648 }
10649
10650 if (equality_comparison_p
10651 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10652 XEXP (op0, 0), op1)))
10653 {
10654 op0 = XEXP (op0, 1);
10655 op1 = tem;
10656 continue;
10657 }
10658
10659 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10660 of bits in X minus 1, is one iff X > 0. */
10661 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10662 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10663 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
10664 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10665 {
10666 op0 = XEXP (op0, 1);
10667 code = (code == GE ? LE : GT);
10668 continue;
10669 }
10670 break;
10671
10672 case XOR:
10673 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
10674 if C is zero or B is a constant. */
10675 if (equality_comparison_p
10676 && 0 != (tem = simplify_binary_operation (XOR, mode,
10677 XEXP (op0, 1), op1)))
10678 {
10679 op0 = XEXP (op0, 0);
10680 op1 = tem;
10681 continue;
10682 }
10683 break;
10684
10685 case EQ: case NE:
10686 case LT: case LTU: case LE: case LEU:
10687 case GT: case GTU: case GE: case GEU:
10688 /* We can't do anything if OP0 is a condition code value, rather
10689 than an actual data value. */
10690 if (const_op != 0
10691 #ifdef HAVE_cc0
10692 || XEXP (op0, 0) == cc0_rtx
10693 #endif
10694 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10695 break;
10696
10697 /* Get the two operands being compared. */
10698 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10699 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10700 else
10701 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10702
10703 /* Check for the cases where we simply want the result of the
10704 earlier test or the opposite of that result. */
10705 if (code == NE
10706 || (code == EQ && reversible_comparison_p (op0))
10707 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10708 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10709 && (STORE_FLAG_VALUE
10710 & (((HOST_WIDE_INT) 1
10711 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
10712 && (code == LT
10713 || (code == GE && reversible_comparison_p (op0)))))
10714 {
10715 code = (code == LT || code == NE
10716 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
10717 op0 = tem, op1 = tem1;
10718 continue;
10719 }
10720 break;
10721
10722 case IOR:
10723 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
10724 iff X <= 0. */
10725 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10726 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10727 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10728 {
10729 op0 = XEXP (op0, 1);
10730 code = (code == GE ? GT : LE);
10731 continue;
10732 }
10733 break;
10734
10735 case AND:
10736 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
10737 will be converted to a ZERO_EXTRACT later. */
10738 if (const_op == 0 && equality_comparison_p
10739 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10740 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10741 {
10742 op0 = simplify_and_const_int
10743 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
10744 XEXP (op0, 1),
10745 XEXP (XEXP (op0, 0), 1)),
10746 (HOST_WIDE_INT) 1);
10747 continue;
10748 }
10749
10750 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10751 zero and X is a comparison and C1 and C2 describe only bits set
10752 in STORE_FLAG_VALUE, we can compare with X. */
10753 if (const_op == 0 && equality_comparison_p
10754 && mode_width <= HOST_BITS_PER_WIDE_INT
10755 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10756 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10757 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10758 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
10759 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
10760 {
10761 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10762 << INTVAL (XEXP (XEXP (op0, 0), 1)));
10763 if ((~STORE_FLAG_VALUE & mask) == 0
10764 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
10765 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10766 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
10767 {
10768 op0 = XEXP (XEXP (op0, 0), 0);
10769 continue;
10770 }
10771 }
10772
10773 /* If we are doing an equality comparison of an AND of a bit equal
10774 to the sign bit, replace this with a LT or GE comparison of
10775 the underlying value. */
10776 if (equality_comparison_p
10777 && const_op == 0
10778 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10779 && mode_width <= HOST_BITS_PER_WIDE_INT
10780 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10781 == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
10782 {
10783 op0 = XEXP (op0, 0);
10784 code = (code == EQ ? GE : LT);
10785 continue;
10786 }
10787
10788 /* If this AND operation is really a ZERO_EXTEND from a narrower
10789 mode, the constant fits within that mode, and this is either an
10790 equality or unsigned comparison, try to do this comparison in
10791 the narrower mode. */
10792 if ((equality_comparison_p || unsigned_comparison_p)
10793 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10794 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10795 & GET_MODE_MASK (mode))
10796 + 1)) >= 0
10797 && const_op >> i == 0
10798 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10799 {
10800 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
10801 continue;
10802 }
10803
10804 /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1 fits
10805 in both M1 and M2 and the SUBREG is either paradoxical or
10806 represents the low part, permute the SUBREG and the AND and
10807 try again. */
10808 if (GET_CODE (XEXP (op0, 0)) == SUBREG
10809 && (0
10810 #ifdef WORD_REGISTER_OPERATIONS
10811 || ((mode_width
10812 > (GET_MODE_BITSIZE
10813 (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10814 && mode_width <= BITS_PER_WORD)
10815 #endif
10816 || ((mode_width
10817 <= (GET_MODE_BITSIZE
10818 (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10819 && subreg_lowpart_p (XEXP (op0, 0))))
10820 #ifndef WORD_REGISTER_OPERATIONS
10821 /* It is unsafe to commute the AND into the SUBREG if the SUBREG
10822 is paradoxical and WORD_REGISTER_OPERATIONS is not defined.
10823 As originally written the upper bits have a defined value
10824 due to the AND operation. However, if we commute the AND
10825 inside the SUBREG then they no longer have defined values
10826 and the meaning of the code has been changed. */
10827 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
10828 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
10829 #endif
10830 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10831 && mode_width <= HOST_BITS_PER_WIDE_INT
10832 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10833 <= HOST_BITS_PER_WIDE_INT)
10834 && (INTVAL (XEXP (op0, 1)) & ~mask) == 0
10835 && 0 == (~GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10836 & INTVAL (XEXP (op0, 1)))
10837 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) != mask
10838 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
10839 != GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10840
10841 {
10842 op0
10843 = gen_lowpart_for_combine
10844 (mode,
10845 gen_binary (AND, GET_MODE (SUBREG_REG (XEXP (op0, 0))),
10846 SUBREG_REG (XEXP (op0, 0)), XEXP (op0, 1)));
10847 continue;
10848 }
10849
10850 /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
10851 (eq (and (lshiftrt X) 1) 0). */
10852 if (const_op == 0 && equality_comparison_p
10853 && XEXP (op0, 1) == const1_rtx
10854 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10855 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == NOT)
10856 {
10857 op0 = simplify_and_const_int
10858 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
10859 XEXP (XEXP (XEXP (op0, 0), 0), 0),
10860 XEXP (XEXP (op0, 0), 1)),
10861 (HOST_WIDE_INT) 1);
10862 code = (code == NE ? EQ : NE);
10863 continue;
10864 }
10865 break;
10866
10867 case ASHIFT:
10868 /* If we have (compare (ashift FOO N) (const_int C)) and
10869 the high order N bits of FOO (N+1 if an inequality comparison)
10870 are known to be zero, we can do this by comparing FOO with C
10871 shifted right N bits so long as the low-order N bits of C are
10872 zero. */
10873 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10874 && INTVAL (XEXP (op0, 1)) >= 0
10875 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
10876 < HOST_BITS_PER_WIDE_INT)
10877 && ((const_op
10878 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
10879 && mode_width <= HOST_BITS_PER_WIDE_INT
10880 && (nonzero_bits (XEXP (op0, 0), mode)
10881 & ~(mask >> (INTVAL (XEXP (op0, 1))
10882 + ! equality_comparison_p))) == 0)
10883 {
10884 /* We must perform a logical shift, not an arithmetic one,
10885 as we want the top N bits of C to be zero. */
10886 unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
10887
10888 temp >>= INTVAL (XEXP (op0, 1));
10889 op1 = GEN_INT (trunc_int_for_mode (temp, mode));
10890 op0 = XEXP (op0, 0);
10891 continue;
10892 }
10893
10894 /* If we are doing a sign bit comparison, it means we are testing
10895 a particular bit. Convert it to the appropriate AND. */
10896 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10897 && mode_width <= HOST_BITS_PER_WIDE_INT)
10898 {
10899 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10900 ((HOST_WIDE_INT) 1
10901 << (mode_width - 1
10902 - INTVAL (XEXP (op0, 1)))));
10903 code = (code == LT ? NE : EQ);
10904 continue;
10905 }
10906
10907 /* If this an equality comparison with zero and we are shifting
10908 the low bit to the sign bit, we can convert this to an AND of the
10909 low-order bit. */
10910 if (const_op == 0 && equality_comparison_p
10911 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10912 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10913 {
10914 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10915 (HOST_WIDE_INT) 1);
10916 continue;
10917 }
10918 break;
10919
10920 case ASHIFTRT:
10921 /* If this is an equality comparison with zero, we can do this
10922 as a logical shift, which might be much simpler. */
10923 if (equality_comparison_p && const_op == 0
10924 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10925 {
10926 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10927 XEXP (op0, 0),
10928 INTVAL (XEXP (op0, 1)));
10929 continue;
10930 }
10931
10932 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10933 do the comparison in a narrower mode. */
10934 if (! unsigned_comparison_p
10935 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10936 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10937 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10938 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10939 MODE_INT, 1)) != BLKmode
10940 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
10941 || ((unsigned HOST_WIDE_INT) -const_op
10942 <= GET_MODE_MASK (tmode))))
10943 {
10944 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
10945 continue;
10946 }
10947
10948 /* Likewise if OP0 is a PLUS of a sign extension with a
10949 constant, which is usually represented with the PLUS
10950 between the shifts. */
10951 if (! unsigned_comparison_p
10952 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10953 && GET_CODE (XEXP (op0, 0)) == PLUS
10954 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10955 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
10956 && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
10957 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10958 MODE_INT, 1)) != BLKmode
10959 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
10960 || ((unsigned HOST_WIDE_INT) -const_op
10961 <= GET_MODE_MASK (tmode))))
10962 {
10963 rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
10964 rtx add_const = XEXP (XEXP (op0, 0), 1);
10965 rtx new_const = gen_binary (ASHIFTRT, GET_MODE (op0), add_const,
10966 XEXP (op0, 1));
10967
10968 op0 = gen_binary (PLUS, tmode,
10969 gen_lowpart_for_combine (tmode, inner),
10970 new_const);
10971 continue;
10972 }
10973
10974 /* ... fall through ... */
10975 case LSHIFTRT:
10976 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
10977 the low order N bits of FOO are known to be zero, we can do this
10978 by comparing FOO with C shifted left N bits so long as no
10979 overflow occurs. */
10980 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10981 && INTVAL (XEXP (op0, 1)) >= 0
10982 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10983 && mode_width <= HOST_BITS_PER_WIDE_INT
10984 && (nonzero_bits (XEXP (op0, 0), mode)
10985 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
10986 && (const_op == 0
10987 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
10988 < mode_width)))
10989 {
10990 const_op <<= INTVAL (XEXP (op0, 1));
10991 op1 = GEN_INT (const_op);
10992 op0 = XEXP (op0, 0);
10993 continue;
10994 }
10995
10996 /* If we are using this shift to extract just the sign bit, we
10997 can replace this with an LT or GE comparison. */
10998 if (const_op == 0
10999 && (equality_comparison_p || sign_bit_comparison_p)
11000 && GET_CODE (XEXP (op0, 1)) == CONST_INT
11001 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
11002 {
11003 op0 = XEXP (op0, 0);
11004 code = (code == NE || code == GT ? LT : GE);
11005 continue;
11006 }
11007 break;
11008
11009 default:
11010 break;
11011 }
11012
11013 break;
11014 }
11015
11016 /* Now make any compound operations involved in this comparison. Then,
11017 check for an outmost SUBREG on OP0 that is not doing anything or is
11018 paradoxical. The latter case can only occur when it is known that the
11019 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
11020 We can never remove a SUBREG for a non-equality comparison because the
11021 sign bit is in a different place in the underlying object. */
11022
11023 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
11024 op1 = make_compound_operation (op1, SET);
11025
11026 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
11027 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
11028 && (code == NE || code == EQ)
11029 && ((GET_MODE_SIZE (GET_MODE (op0))
11030 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
11031 {
11032 op0 = SUBREG_REG (op0);
11033 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
11034 }
11035
11036 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
11037 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
11038 && (code == NE || code == EQ)
11039 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
11040 <= HOST_BITS_PER_WIDE_INT)
11041 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
11042 & ~GET_MODE_MASK (GET_MODE (op0))) == 0
11043 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
11044 op1),
11045 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
11046 & ~GET_MODE_MASK (GET_MODE (op0))) == 0))
11047 op0 = SUBREG_REG (op0), op1 = tem;
11048
11049 /* We now do the opposite procedure: Some machines don't have compare
11050 insns in all modes. If OP0's mode is an integer mode smaller than a
11051 word and we can't do a compare in that mode, see if there is a larger
11052 mode for which we can do the compare. There are a number of cases in
11053 which we can use the wider mode. */
11054
11055 mode = GET_MODE (op0);
11056 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
11057 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
11058 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
11059 for (tmode = GET_MODE_WIDER_MODE (mode);
11060 (tmode != VOIDmode
11061 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
11062 tmode = GET_MODE_WIDER_MODE (tmode))
11063 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
11064 {
11065 /* If the only nonzero bits in OP0 and OP1 are those in the
11066 narrower mode and this is an equality or unsigned comparison,
11067 we can use the wider mode. Similarly for sign-extended
11068 values, in which case it is true for all comparisons. */
11069 if (((code == EQ || code == NE
11070 || code == GEU || code == GTU || code == LEU || code == LTU)
11071 && (nonzero_bits (op0, tmode) & ~GET_MODE_MASK (mode)) == 0
11072 && (nonzero_bits (op1, tmode) & ~GET_MODE_MASK (mode)) == 0)
11073 || ((num_sign_bit_copies (op0, tmode)
11074 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
11075 && (num_sign_bit_copies (op1, tmode)
11076 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
11077 {
11078 /* If OP0 is an AND and we don't have an AND in MODE either,
11079 make a new AND in the proper mode. */
11080 if (GET_CODE (op0) == AND
11081 && (add_optab->handlers[(int) mode].insn_code
11082 == CODE_FOR_nothing))
11083 op0 = gen_binary (AND, tmode,
11084 gen_lowpart_for_combine (tmode,
11085 XEXP (op0, 0)),
11086 gen_lowpart_for_combine (tmode,
11087 XEXP (op0, 1)));
11088
11089 op0 = gen_lowpart_for_combine (tmode, op0);
11090 op1 = gen_lowpart_for_combine (tmode, op1);
11091 break;
11092 }
11093
11094 /* If this is a test for negative, we can make an explicit
11095 test of the sign bit. */
11096
11097 if (op1 == const0_rtx && (code == LT || code == GE)
11098 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11099 {
11100 op0 = gen_binary (AND, tmode,
11101 gen_lowpart_for_combine (tmode, op0),
11102 GEN_INT ((HOST_WIDE_INT) 1
11103 << (GET_MODE_BITSIZE (mode) - 1)));
11104 code = (code == LT) ? NE : EQ;
11105 break;
11106 }
11107 }
11108
11109 #ifdef CANONICALIZE_COMPARISON
11110 /* If this machine only supports a subset of valid comparisons, see if we
11111 can convert an unsupported one into a supported one. */
11112 CANONICALIZE_COMPARISON (code, op0, op1);
11113 #endif
11114
11115 *pop0 = op0;
11116 *pop1 = op1;
11117
11118 return code;
11119 }
11120 \f
11121 /* Return 1 if we know that X, a comparison operation, is not operating
11122 on a floating-point value or is EQ or NE, meaning that we can safely
11123 reverse it. */
11124
11125 static int
11126 reversible_comparison_p (x)
11127 rtx x;
11128 {
11129 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
11130 || flag_fast_math
11131 || GET_CODE (x) == NE || GET_CODE (x) == EQ
11132 || GET_CODE (x) == UNORDERED || GET_CODE (x) == ORDERED)
11133 return 1;
11134
11135 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
11136 {
11137 case MODE_INT:
11138 case MODE_PARTIAL_INT:
11139 case MODE_COMPLEX_INT:
11140 return 1;
11141
11142 case MODE_CC:
11143 /* If the mode of the condition codes tells us that this is safe,
11144 we need look no further. */
11145 if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
11146 return 1;
11147
11148 /* Otherwise try and find where the condition codes were last set and
11149 use that. */
11150 x = get_last_value (XEXP (x, 0));
11151 return (x && GET_CODE (x) == COMPARE
11152 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
11153
11154 default:
11155 return 0;
11156 }
11157 }
11158 \f
11159 /* Utility function for following routine. Called when X is part of a value
11160 being stored into reg_last_set_value. Sets reg_last_set_table_tick
11161 for each register mentioned. Similar to mention_regs in cse.c */
11162
11163 static void
11164 update_table_tick (x)
11165 rtx x;
11166 {
11167 register enum rtx_code code = GET_CODE (x);
11168 register const char *fmt = GET_RTX_FORMAT (code);
11169 register int i;
11170
11171 if (code == REG)
11172 {
11173 unsigned int regno = REGNO (x);
11174 unsigned int endregno
11175 = regno + (regno < FIRST_PSEUDO_REGISTER
11176 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11177 unsigned int r;
11178
11179 for (r = regno; r < endregno; r++)
11180 reg_last_set_table_tick[r] = label_tick;
11181
11182 return;
11183 }
11184
11185 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11186 /* Note that we can't have an "E" in values stored; see
11187 get_last_value_validate. */
11188 if (fmt[i] == 'e')
11189 update_table_tick (XEXP (x, i));
11190 }
11191
11192 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
11193 are saying that the register is clobbered and we no longer know its
11194 value. If INSN is zero, don't update reg_last_set; this is only permitted
11195 with VALUE also zero and is used to invalidate the register. */
11196
11197 static void
11198 record_value_for_reg (reg, insn, value)
11199 rtx reg;
11200 rtx insn;
11201 rtx value;
11202 {
11203 unsigned int regno = REGNO (reg);
11204 unsigned int endregno
11205 = regno + (regno < FIRST_PSEUDO_REGISTER
11206 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
11207 unsigned int i;
11208
11209 /* If VALUE contains REG and we have a previous value for REG, substitute
11210 the previous value. */
11211 if (value && insn && reg_overlap_mentioned_p (reg, value))
11212 {
11213 rtx tem;
11214
11215 /* Set things up so get_last_value is allowed to see anything set up to
11216 our insn. */
11217 subst_low_cuid = INSN_CUID (insn);
11218 tem = get_last_value (reg);
11219
11220 /* If TEM is simply a binary operation with two CLOBBERs as operands,
11221 it isn't going to be useful and will take a lot of time to process,
11222 so just use the CLOBBER. */
11223
11224 if (tem)
11225 {
11226 if ((GET_RTX_CLASS (GET_CODE (tem)) == '2'
11227 || GET_RTX_CLASS (GET_CODE (tem)) == 'c')
11228 && GET_CODE (XEXP (tem, 0)) == CLOBBER
11229 && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11230 tem = XEXP (tem, 0);
11231
11232 value = replace_rtx (copy_rtx (value), reg, tem);
11233 }
11234 }
11235
11236 /* For each register modified, show we don't know its value, that
11237 we don't know about its bitwise content, that its value has been
11238 updated, and that we don't know the location of the death of the
11239 register. */
11240 for (i = regno; i < endregno; i++)
11241 {
11242 if (insn)
11243 reg_last_set[i] = insn;
11244
11245 reg_last_set_value[i] = 0;
11246 reg_last_set_mode[i] = 0;
11247 reg_last_set_nonzero_bits[i] = 0;
11248 reg_last_set_sign_bit_copies[i] = 0;
11249 reg_last_death[i] = 0;
11250 }
11251
11252 /* Mark registers that are being referenced in this value. */
11253 if (value)
11254 update_table_tick (value);
11255
11256 /* Now update the status of each register being set.
11257 If someone is using this register in this block, set this register
11258 to invalid since we will get confused between the two lives in this
11259 basic block. This makes using this register always invalid. In cse, we
11260 scan the table to invalidate all entries using this register, but this
11261 is too much work for us. */
11262
11263 for (i = regno; i < endregno; i++)
11264 {
11265 reg_last_set_label[i] = label_tick;
11266 if (value && reg_last_set_table_tick[i] == label_tick)
11267 reg_last_set_invalid[i] = 1;
11268 else
11269 reg_last_set_invalid[i] = 0;
11270 }
11271
11272 /* The value being assigned might refer to X (like in "x++;"). In that
11273 case, we must replace it with (clobber (const_int 0)) to prevent
11274 infinite loops. */
11275 if (value && ! get_last_value_validate (&value, insn,
11276 reg_last_set_label[regno], 0))
11277 {
11278 value = copy_rtx (value);
11279 if (! get_last_value_validate (&value, insn,
11280 reg_last_set_label[regno], 1))
11281 value = 0;
11282 }
11283
11284 /* For the main register being modified, update the value, the mode, the
11285 nonzero bits, and the number of sign bit copies. */
11286
11287 reg_last_set_value[regno] = value;
11288
11289 if (value)
11290 {
11291 subst_low_cuid = INSN_CUID (insn);
11292 reg_last_set_mode[regno] = GET_MODE (reg);
11293 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
11294 reg_last_set_sign_bit_copies[regno]
11295 = num_sign_bit_copies (value, GET_MODE (reg));
11296 }
11297 }
11298
11299 /* Called via note_stores from record_dead_and_set_regs to handle one
11300 SET or CLOBBER in an insn. DATA is the instruction in which the
11301 set is occurring. */
11302
11303 static void
11304 record_dead_and_set_regs_1 (dest, setter, data)
11305 rtx dest, setter;
11306 void *data;
11307 {
11308 rtx record_dead_insn = (rtx) data;
11309
11310 if (GET_CODE (dest) == SUBREG)
11311 dest = SUBREG_REG (dest);
11312
11313 if (GET_CODE (dest) == REG)
11314 {
11315 /* If we are setting the whole register, we know its value. Otherwise
11316 show that we don't know the value. We can handle SUBREG in
11317 some cases. */
11318 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11319 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11320 else if (GET_CODE (setter) == SET
11321 && GET_CODE (SET_DEST (setter)) == SUBREG
11322 && SUBREG_REG (SET_DEST (setter)) == dest
11323 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
11324 && subreg_lowpart_p (SET_DEST (setter)))
11325 record_value_for_reg (dest, record_dead_insn,
11326 gen_lowpart_for_combine (GET_MODE (dest),
11327 SET_SRC (setter)));
11328 else
11329 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
11330 }
11331 else if (GET_CODE (dest) == MEM
11332 /* Ignore pushes, they clobber nothing. */
11333 && ! push_operand (dest, GET_MODE (dest)))
11334 mem_last_set = INSN_CUID (record_dead_insn);
11335 }
11336
11337 /* Update the records of when each REG was most recently set or killed
11338 for the things done by INSN. This is the last thing done in processing
11339 INSN in the combiner loop.
11340
11341 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
11342 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
11343 and also the similar information mem_last_set (which insn most recently
11344 modified memory) and last_call_cuid (which insn was the most recent
11345 subroutine call). */
11346
11347 static void
11348 record_dead_and_set_regs (insn)
11349 rtx insn;
11350 {
11351 register rtx link;
11352 unsigned int i;
11353
11354 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11355 {
11356 if (REG_NOTE_KIND (link) == REG_DEAD
11357 && GET_CODE (XEXP (link, 0)) == REG)
11358 {
11359 unsigned int regno = REGNO (XEXP (link, 0));
11360 unsigned int endregno
11361 = regno + (regno < FIRST_PSEUDO_REGISTER
11362 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
11363 : 1);
11364
11365 for (i = regno; i < endregno; i++)
11366 reg_last_death[i] = insn;
11367 }
11368 else if (REG_NOTE_KIND (link) == REG_INC)
11369 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
11370 }
11371
11372 if (GET_CODE (insn) == CALL_INSN)
11373 {
11374 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11375 if (call_used_regs[i])
11376 {
11377 reg_last_set_value[i] = 0;
11378 reg_last_set_mode[i] = 0;
11379 reg_last_set_nonzero_bits[i] = 0;
11380 reg_last_set_sign_bit_copies[i] = 0;
11381 reg_last_death[i] = 0;
11382 }
11383
11384 last_call_cuid = mem_last_set = INSN_CUID (insn);
11385 }
11386
11387 note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
11388 }
11389
11390 /* If a SUBREG has the promoted bit set, it is in fact a property of the
11391 register present in the SUBREG, so for each such SUBREG go back and
11392 adjust nonzero and sign bit information of the registers that are
11393 known to have some zero/sign bits set.
11394
11395 This is needed because when combine blows the SUBREGs away, the
11396 information on zero/sign bits is lost and further combines can be
11397 missed because of that. */
11398
11399 static void
11400 record_promoted_value (insn, subreg)
11401 rtx insn;
11402 rtx subreg;
11403 {
11404 rtx links, set;
11405 unsigned int regno = REGNO (SUBREG_REG (subreg));
11406 enum machine_mode mode = GET_MODE (subreg);
11407
11408 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
11409 return;
11410
11411 for (links = LOG_LINKS (insn); links;)
11412 {
11413 insn = XEXP (links, 0);
11414 set = single_set (insn);
11415
11416 if (! set || GET_CODE (SET_DEST (set)) != REG
11417 || REGNO (SET_DEST (set)) != regno
11418 || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11419 {
11420 links = XEXP (links, 1);
11421 continue;
11422 }
11423
11424 if (reg_last_set[regno] == insn)
11425 {
11426 if (SUBREG_PROMOTED_UNSIGNED_P (subreg))
11427 reg_last_set_nonzero_bits[regno] &= GET_MODE_MASK (mode);
11428 }
11429
11430 if (GET_CODE (SET_SRC (set)) == REG)
11431 {
11432 regno = REGNO (SET_SRC (set));
11433 links = LOG_LINKS (insn);
11434 }
11435 else
11436 break;
11437 }
11438 }
11439
11440 /* Scan X for promoted SUBREGs. For each one found,
11441 note what it implies to the registers used in it. */
11442
11443 static void
11444 check_promoted_subreg (insn, x)
11445 rtx insn;
11446 rtx x;
11447 {
11448 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
11449 && GET_CODE (SUBREG_REG (x)) == REG)
11450 record_promoted_value (insn, x);
11451 else
11452 {
11453 const char *format = GET_RTX_FORMAT (GET_CODE (x));
11454 int i, j;
11455
11456 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
11457 switch (format[i])
11458 {
11459 case 'e':
11460 check_promoted_subreg (insn, XEXP (x, i));
11461 break;
11462 case 'V':
11463 case 'E':
11464 if (XVEC (x, i) != 0)
11465 for (j = 0; j < XVECLEN (x, i); j++)
11466 check_promoted_subreg (insn, XVECEXP (x, i, j));
11467 break;
11468 }
11469 }
11470 }
11471 \f
11472 /* Utility routine for the following function. Verify that all the registers
11473 mentioned in *LOC are valid when *LOC was part of a value set when
11474 label_tick == TICK. Return 0 if some are not.
11475
11476 If REPLACE is non-zero, replace the invalid reference with
11477 (clobber (const_int 0)) and return 1. This replacement is useful because
11478 we often can get useful information about the form of a value (e.g., if
11479 it was produced by a shift that always produces -1 or 0) even though
11480 we don't know exactly what registers it was produced from. */
11481
11482 static int
11483 get_last_value_validate (loc, insn, tick, replace)
11484 rtx *loc;
11485 rtx insn;
11486 int tick;
11487 int replace;
11488 {
11489 rtx x = *loc;
11490 const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
11491 int len = GET_RTX_LENGTH (GET_CODE (x));
11492 int i;
11493
11494 if (GET_CODE (x) == REG)
11495 {
11496 unsigned int regno = REGNO (x);
11497 unsigned int endregno
11498 = regno + (regno < FIRST_PSEUDO_REGISTER
11499 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11500 unsigned int j;
11501
11502 for (j = regno; j < endregno; j++)
11503 if (reg_last_set_invalid[j]
11504 /* If this is a pseudo-register that was only set once and not
11505 live at the beginning of the function, it is always valid. */
11506 || (! (regno >= FIRST_PSEUDO_REGISTER
11507 && REG_N_SETS (regno) == 1
11508 && (! REGNO_REG_SET_P
11509 (BASIC_BLOCK (0)->global_live_at_start, regno)))
11510 && reg_last_set_label[j] > tick))
11511 {
11512 if (replace)
11513 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11514 return replace;
11515 }
11516
11517 return 1;
11518 }
11519 /* If this is a memory reference, make sure that there were
11520 no stores after it that might have clobbered the value. We don't
11521 have alias info, so we assume any store invalidates it. */
11522 else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
11523 && INSN_CUID (insn) <= mem_last_set)
11524 {
11525 if (replace)
11526 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11527 return replace;
11528 }
11529
11530 for (i = 0; i < len; i++)
11531 if ((fmt[i] == 'e'
11532 && get_last_value_validate (&XEXP (x, i), insn, tick, replace) == 0)
11533 /* Don't bother with these. They shouldn't occur anyway. */
11534 || fmt[i] == 'E')
11535 return 0;
11536
11537 /* If we haven't found a reason for it to be invalid, it is valid. */
11538 return 1;
11539 }
11540
11541 /* Get the last value assigned to X, if known. Some registers
11542 in the value may be replaced with (clobber (const_int 0)) if their value
11543 is known longer known reliably. */
11544
11545 static rtx
11546 get_last_value (x)
11547 rtx x;
11548 {
11549 unsigned int regno;
11550 rtx value;
11551
11552 /* If this is a non-paradoxical SUBREG, get the value of its operand and
11553 then convert it to the desired mode. If this is a paradoxical SUBREG,
11554 we cannot predict what values the "extra" bits might have. */
11555 if (GET_CODE (x) == SUBREG
11556 && subreg_lowpart_p (x)
11557 && (GET_MODE_SIZE (GET_MODE (x))
11558 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
11559 && (value = get_last_value (SUBREG_REG (x))) != 0)
11560 return gen_lowpart_for_combine (GET_MODE (x), value);
11561
11562 if (GET_CODE (x) != REG)
11563 return 0;
11564
11565 regno = REGNO (x);
11566 value = reg_last_set_value[regno];
11567
11568 /* If we don't have a value, or if it isn't for this basic block and
11569 it's either a hard register, set more than once, or it's a live
11570 at the beginning of the function, return 0.
11571
11572 Because if it's not live at the beginnning of the function then the reg
11573 is always set before being used (is never used without being set).
11574 And, if it's set only once, and it's always set before use, then all
11575 uses must have the same last value, even if it's not from this basic
11576 block. */
11577
11578 if (value == 0
11579 || (reg_last_set_label[regno] != label_tick
11580 && (regno < FIRST_PSEUDO_REGISTER
11581 || REG_N_SETS (regno) != 1
11582 || (REGNO_REG_SET_P
11583 (BASIC_BLOCK (0)->global_live_at_start, regno)))))
11584 return 0;
11585
11586 /* If the value was set in a later insn than the ones we are processing,
11587 we can't use it even if the register was only set once. */
11588 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
11589 return 0;
11590
11591 /* If the value has all its registers valid, return it. */
11592 if (get_last_value_validate (&value, reg_last_set[regno],
11593 reg_last_set_label[regno], 0))
11594 return value;
11595
11596 /* Otherwise, make a copy and replace any invalid register with
11597 (clobber (const_int 0)). If that fails for some reason, return 0. */
11598
11599 value = copy_rtx (value);
11600 if (get_last_value_validate (&value, reg_last_set[regno],
11601 reg_last_set_label[regno], 1))
11602 return value;
11603
11604 return 0;
11605 }
11606 \f
11607 /* Return nonzero if expression X refers to a REG or to memory
11608 that is set in an instruction more recent than FROM_CUID. */
11609
11610 static int
11611 use_crosses_set_p (x, from_cuid)
11612 register rtx x;
11613 int from_cuid;
11614 {
11615 register const char *fmt;
11616 register int i;
11617 register enum rtx_code code = GET_CODE (x);
11618
11619 if (code == REG)
11620 {
11621 unsigned int regno = REGNO (x);
11622 unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER
11623 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11624
11625 #ifdef PUSH_ROUNDING
11626 /* Don't allow uses of the stack pointer to be moved,
11627 because we don't know whether the move crosses a push insn. */
11628 if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
11629 return 1;
11630 #endif
11631 for (; regno < endreg; regno++)
11632 if (reg_last_set[regno]
11633 && INSN_CUID (reg_last_set[regno]) > from_cuid)
11634 return 1;
11635 return 0;
11636 }
11637
11638 if (code == MEM && mem_last_set > from_cuid)
11639 return 1;
11640
11641 fmt = GET_RTX_FORMAT (code);
11642
11643 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11644 {
11645 if (fmt[i] == 'E')
11646 {
11647 register int j;
11648 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11649 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
11650 return 1;
11651 }
11652 else if (fmt[i] == 'e'
11653 && use_crosses_set_p (XEXP (x, i), from_cuid))
11654 return 1;
11655 }
11656 return 0;
11657 }
11658 \f
11659 /* Define three variables used for communication between the following
11660 routines. */
11661
11662 static unsigned int reg_dead_regno, reg_dead_endregno;
11663 static int reg_dead_flag;
11664
11665 /* Function called via note_stores from reg_dead_at_p.
11666
11667 If DEST is within [reg_dead_regno, reg_dead_endregno), set
11668 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
11669
11670 static void
11671 reg_dead_at_p_1 (dest, x, data)
11672 rtx dest;
11673 rtx x;
11674 void *data ATTRIBUTE_UNUSED;
11675 {
11676 unsigned int regno, endregno;
11677
11678 if (GET_CODE (dest) != REG)
11679 return;
11680
11681 regno = REGNO (dest);
11682 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
11683 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
11684
11685 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
11686 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
11687 }
11688
11689 /* Return non-zero if REG is known to be dead at INSN.
11690
11691 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
11692 referencing REG, it is dead. If we hit a SET referencing REG, it is
11693 live. Otherwise, see if it is live or dead at the start of the basic
11694 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
11695 must be assumed to be always live. */
11696
11697 static int
11698 reg_dead_at_p (reg, insn)
11699 rtx reg;
11700 rtx insn;
11701 {
11702 int block;
11703 unsigned int i;
11704
11705 /* Set variables for reg_dead_at_p_1. */
11706 reg_dead_regno = REGNO (reg);
11707 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
11708 ? HARD_REGNO_NREGS (reg_dead_regno,
11709 GET_MODE (reg))
11710 : 1);
11711
11712 reg_dead_flag = 0;
11713
11714 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
11715 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
11716 {
11717 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11718 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
11719 return 0;
11720 }
11721
11722 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
11723 beginning of function. */
11724 for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
11725 insn = prev_nonnote_insn (insn))
11726 {
11727 note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
11728 if (reg_dead_flag)
11729 return reg_dead_flag == 1 ? 1 : 0;
11730
11731 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
11732 return 1;
11733 }
11734
11735 /* Get the basic block number that we were in. */
11736 if (insn == 0)
11737 block = 0;
11738 else
11739 {
11740 for (block = 0; block < n_basic_blocks; block++)
11741 if (insn == BLOCK_HEAD (block))
11742 break;
11743
11744 if (block == n_basic_blocks)
11745 return 0;
11746 }
11747
11748 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11749 if (REGNO_REG_SET_P (BASIC_BLOCK (block)->global_live_at_start, i))
11750 return 0;
11751
11752 return 1;
11753 }
11754 \f
11755 /* Note hard registers in X that are used. This code is similar to
11756 that in flow.c, but much simpler since we don't care about pseudos. */
11757
11758 static void
11759 mark_used_regs_combine (x)
11760 rtx x;
11761 {
11762 RTX_CODE code = GET_CODE (x);
11763 unsigned int regno;
11764 int i;
11765
11766 switch (code)
11767 {
11768 case LABEL_REF:
11769 case SYMBOL_REF:
11770 case CONST_INT:
11771 case CONST:
11772 case CONST_DOUBLE:
11773 case PC:
11774 case ADDR_VEC:
11775 case ADDR_DIFF_VEC:
11776 case ASM_INPUT:
11777 #ifdef HAVE_cc0
11778 /* CC0 must die in the insn after it is set, so we don't need to take
11779 special note of it here. */
11780 case CC0:
11781 #endif
11782 return;
11783
11784 case CLOBBER:
11785 /* If we are clobbering a MEM, mark any hard registers inside the
11786 address as used. */
11787 if (GET_CODE (XEXP (x, 0)) == MEM)
11788 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
11789 return;
11790
11791 case REG:
11792 regno = REGNO (x);
11793 /* A hard reg in a wide mode may really be multiple registers.
11794 If so, mark all of them just like the first. */
11795 if (regno < FIRST_PSEUDO_REGISTER)
11796 {
11797 unsigned int endregno, r;
11798
11799 /* None of this applies to the stack, frame or arg pointers */
11800 if (regno == STACK_POINTER_REGNUM
11801 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11802 || regno == HARD_FRAME_POINTER_REGNUM
11803 #endif
11804 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11805 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
11806 #endif
11807 || regno == FRAME_POINTER_REGNUM)
11808 return;
11809
11810 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11811 for (r = regno; r < endregno; r++)
11812 SET_HARD_REG_BIT (newpat_used_regs, r);
11813 }
11814 return;
11815
11816 case SET:
11817 {
11818 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11819 the address. */
11820 register rtx testreg = SET_DEST (x);
11821
11822 while (GET_CODE (testreg) == SUBREG
11823 || GET_CODE (testreg) == ZERO_EXTRACT
11824 || GET_CODE (testreg) == SIGN_EXTRACT
11825 || GET_CODE (testreg) == STRICT_LOW_PART)
11826 testreg = XEXP (testreg, 0);
11827
11828 if (GET_CODE (testreg) == MEM)
11829 mark_used_regs_combine (XEXP (testreg, 0));
11830
11831 mark_used_regs_combine (SET_SRC (x));
11832 }
11833 return;
11834
11835 default:
11836 break;
11837 }
11838
11839 /* Recursively scan the operands of this expression. */
11840
11841 {
11842 register const char *fmt = GET_RTX_FORMAT (code);
11843
11844 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11845 {
11846 if (fmt[i] == 'e')
11847 mark_used_regs_combine (XEXP (x, i));
11848 else if (fmt[i] == 'E')
11849 {
11850 register int j;
11851
11852 for (j = 0; j < XVECLEN (x, i); j++)
11853 mark_used_regs_combine (XVECEXP (x, i, j));
11854 }
11855 }
11856 }
11857 }
11858 \f
11859 /* Remove register number REGNO from the dead registers list of INSN.
11860
11861 Return the note used to record the death, if there was one. */
11862
11863 rtx
11864 remove_death (regno, insn)
11865 unsigned int regno;
11866 rtx insn;
11867 {
11868 register rtx note = find_regno_note (insn, REG_DEAD, regno);
11869
11870 if (note)
11871 {
11872 REG_N_DEATHS (regno)--;
11873 remove_note (insn, note);
11874 }
11875
11876 return note;
11877 }
11878
11879 /* For each register (hardware or pseudo) used within expression X, if its
11880 death is in an instruction with cuid between FROM_CUID (inclusive) and
11881 TO_INSN (exclusive), put a REG_DEAD note for that register in the
11882 list headed by PNOTES.
11883
11884 That said, don't move registers killed by maybe_kill_insn.
11885
11886 This is done when X is being merged by combination into TO_INSN. These
11887 notes will then be distributed as needed. */
11888
11889 static void
11890 move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
11891 rtx x;
11892 rtx maybe_kill_insn;
11893 int from_cuid;
11894 rtx to_insn;
11895 rtx *pnotes;
11896 {
11897 register const char *fmt;
11898 register int len, i;
11899 register enum rtx_code code = GET_CODE (x);
11900
11901 if (code == REG)
11902 {
11903 unsigned int regno = REGNO (x);
11904 register rtx where_dead = reg_last_death[regno];
11905 register rtx before_dead, after_dead;
11906
11907 /* Don't move the register if it gets killed in between from and to */
11908 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
11909 && ! reg_referenced_p (x, maybe_kill_insn))
11910 return;
11911
11912 /* WHERE_DEAD could be a USE insn made by combine, so first we
11913 make sure that we have insns with valid INSN_CUID values. */
11914 before_dead = where_dead;
11915 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11916 before_dead = PREV_INSN (before_dead);
11917
11918 after_dead = where_dead;
11919 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11920 after_dead = NEXT_INSN (after_dead);
11921
11922 if (before_dead && after_dead
11923 && INSN_CUID (before_dead) >= from_cuid
11924 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11925 || (where_dead != after_dead
11926 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
11927 {
11928 rtx note = remove_death (regno, where_dead);
11929
11930 /* It is possible for the call above to return 0. This can occur
11931 when reg_last_death points to I2 or I1 that we combined with.
11932 In that case make a new note.
11933
11934 We must also check for the case where X is a hard register
11935 and NOTE is a death note for a range of hard registers
11936 including X. In that case, we must put REG_DEAD notes for
11937 the remaining registers in place of NOTE. */
11938
11939 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11940 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11941 > GET_MODE_SIZE (GET_MODE (x))))
11942 {
11943 unsigned int deadregno = REGNO (XEXP (note, 0));
11944 unsigned int deadend
11945 = (deadregno + HARD_REGNO_NREGS (deadregno,
11946 GET_MODE (XEXP (note, 0))));
11947 unsigned int ourend
11948 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11949 unsigned int i;
11950
11951 for (i = deadregno; i < deadend; i++)
11952 if (i < regno || i >= ourend)
11953 REG_NOTES (where_dead)
11954 = gen_rtx_EXPR_LIST (REG_DEAD,
11955 gen_rtx_REG (reg_raw_mode[i], i),
11956 REG_NOTES (where_dead));
11957 }
11958
11959 /* If we didn't find any note, or if we found a REG_DEAD note that
11960 covers only part of the given reg, and we have a multi-reg hard
11961 register, then to be safe we must check for REG_DEAD notes
11962 for each register other than the first. They could have
11963 their own REG_DEAD notes lying around. */
11964 else if ((note == 0
11965 || (note != 0
11966 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11967 < GET_MODE_SIZE (GET_MODE (x)))))
11968 && regno < FIRST_PSEUDO_REGISTER
11969 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
11970 {
11971 unsigned int ourend
11972 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11973 unsigned int i, offset;
11974 rtx oldnotes = 0;
11975
11976 if (note)
11977 offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
11978 else
11979 offset = 1;
11980
11981 for (i = regno + offset; i < ourend; i++)
11982 move_deaths (gen_rtx_REG (reg_raw_mode[i], i),
11983 maybe_kill_insn, from_cuid, to_insn, &oldnotes);
11984 }
11985
11986 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
11987 {
11988 XEXP (note, 1) = *pnotes;
11989 *pnotes = note;
11990 }
11991 else
11992 *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
11993
11994 REG_N_DEATHS (regno)++;
11995 }
11996
11997 return;
11998 }
11999
12000 else if (GET_CODE (x) == SET)
12001 {
12002 rtx dest = SET_DEST (x);
12003
12004 move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
12005
12006 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
12007 that accesses one word of a multi-word item, some
12008 piece of everything register in the expression is used by
12009 this insn, so remove any old death. */
12010
12011 if (GET_CODE (dest) == ZERO_EXTRACT
12012 || GET_CODE (dest) == STRICT_LOW_PART
12013 || (GET_CODE (dest) == SUBREG
12014 && (((GET_MODE_SIZE (GET_MODE (dest))
12015 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
12016 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
12017 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
12018 {
12019 move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
12020 return;
12021 }
12022
12023 /* If this is some other SUBREG, we know it replaces the entire
12024 value, so use that as the destination. */
12025 if (GET_CODE (dest) == SUBREG)
12026 dest = SUBREG_REG (dest);
12027
12028 /* If this is a MEM, adjust deaths of anything used in the address.
12029 For a REG (the only other possibility), the entire value is
12030 being replaced so the old value is not used in this insn. */
12031
12032 if (GET_CODE (dest) == MEM)
12033 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
12034 to_insn, pnotes);
12035 return;
12036 }
12037
12038 else if (GET_CODE (x) == CLOBBER)
12039 return;
12040
12041 len = GET_RTX_LENGTH (code);
12042 fmt = GET_RTX_FORMAT (code);
12043
12044 for (i = 0; i < len; i++)
12045 {
12046 if (fmt[i] == 'E')
12047 {
12048 register int j;
12049 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
12050 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
12051 to_insn, pnotes);
12052 }
12053 else if (fmt[i] == 'e')
12054 move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
12055 }
12056 }
12057 \f
12058 /* Return 1 if X is the target of a bit-field assignment in BODY, the
12059 pattern of an insn. X must be a REG. */
12060
12061 static int
12062 reg_bitfield_target_p (x, body)
12063 rtx x;
12064 rtx body;
12065 {
12066 int i;
12067
12068 if (GET_CODE (body) == SET)
12069 {
12070 rtx dest = SET_DEST (body);
12071 rtx target;
12072 unsigned int regno, tregno, endregno, endtregno;
12073
12074 if (GET_CODE (dest) == ZERO_EXTRACT)
12075 target = XEXP (dest, 0);
12076 else if (GET_CODE (dest) == STRICT_LOW_PART)
12077 target = SUBREG_REG (XEXP (dest, 0));
12078 else
12079 return 0;
12080
12081 if (GET_CODE (target) == SUBREG)
12082 target = SUBREG_REG (target);
12083
12084 if (GET_CODE (target) != REG)
12085 return 0;
12086
12087 tregno = REGNO (target), regno = REGNO (x);
12088 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
12089 return target == x;
12090
12091 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
12092 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12093
12094 return endregno > tregno && regno < endtregno;
12095 }
12096
12097 else if (GET_CODE (body) == PARALLEL)
12098 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
12099 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
12100 return 1;
12101
12102 return 0;
12103 }
12104 \f
12105 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
12106 as appropriate. I3 and I2 are the insns resulting from the combination
12107 insns including FROM (I2 may be zero).
12108
12109 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
12110 not need REG_DEAD notes because they are being substituted for. This
12111 saves searching in the most common cases.
12112
12113 Each note in the list is either ignored or placed on some insns, depending
12114 on the type of note. */
12115
12116 static void
12117 distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
12118 rtx notes;
12119 rtx from_insn;
12120 rtx i3, i2;
12121 rtx elim_i2, elim_i1;
12122 {
12123 rtx note, next_note;
12124 rtx tem;
12125
12126 for (note = notes; note; note = next_note)
12127 {
12128 rtx place = 0, place2 = 0;
12129
12130 /* If this NOTE references a pseudo register, ensure it references
12131 the latest copy of that register. */
12132 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
12133 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
12134 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
12135
12136 next_note = XEXP (note, 1);
12137 switch (REG_NOTE_KIND (note))
12138 {
12139 case REG_BR_PROB:
12140 case REG_EXEC_COUNT:
12141 /* Doesn't matter much where we put this, as long as it's somewhere.
12142 It is preferable to keep these notes on branches, which is most
12143 likely to be i3. */
12144 place = i3;
12145 break;
12146
12147 case REG_EH_REGION:
12148 case REG_EH_RETHROW:
12149 case REG_NORETURN:
12150 /* These notes must remain with the call. It should not be
12151 possible for both I2 and I3 to be a call. */
12152 if (GET_CODE (i3) == CALL_INSN)
12153 place = i3;
12154 else if (i2 && GET_CODE (i2) == CALL_INSN)
12155 place = i2;
12156 else
12157 abort ();
12158 break;
12159
12160 case REG_UNUSED:
12161 /* Any clobbers for i3 may still exist, and so we must process
12162 REG_UNUSED notes from that insn.
12163
12164 Any clobbers from i2 or i1 can only exist if they were added by
12165 recog_for_combine. In that case, recog_for_combine created the
12166 necessary REG_UNUSED notes. Trying to keep any original
12167 REG_UNUSED notes from these insns can cause incorrect output
12168 if it is for the same register as the original i3 dest.
12169 In that case, we will notice that the register is set in i3,
12170 and then add a REG_UNUSED note for the destination of i3, which
12171 is wrong. However, it is possible to have REG_UNUSED notes from
12172 i2 or i1 for register which were both used and clobbered, so
12173 we keep notes from i2 or i1 if they will turn into REG_DEAD
12174 notes. */
12175
12176 /* If this register is set or clobbered in I3, put the note there
12177 unless there is one already. */
12178 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
12179 {
12180 if (from_insn != i3)
12181 break;
12182
12183 if (! (GET_CODE (XEXP (note, 0)) == REG
12184 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12185 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12186 place = i3;
12187 }
12188 /* Otherwise, if this register is used by I3, then this register
12189 now dies here, so we must put a REG_DEAD note here unless there
12190 is one already. */
12191 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
12192 && ! (GET_CODE (XEXP (note, 0)) == REG
12193 ? find_regno_note (i3, REG_DEAD,
12194 REGNO (XEXP (note, 0)))
12195 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12196 {
12197 PUT_REG_NOTE_KIND (note, REG_DEAD);
12198 place = i3;
12199 }
12200 break;
12201
12202 case REG_EQUAL:
12203 case REG_EQUIV:
12204 case REG_NOALIAS:
12205 /* These notes say something about results of an insn. We can
12206 only support them if they used to be on I3 in which case they
12207 remain on I3. Otherwise they are ignored.
12208
12209 If the note refers to an expression that is not a constant, we
12210 must also ignore the note since we cannot tell whether the
12211 equivalence is still true. It might be possible to do
12212 slightly better than this (we only have a problem if I2DEST
12213 or I1DEST is present in the expression), but it doesn't
12214 seem worth the trouble. */
12215
12216 if (from_insn == i3
12217 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
12218 place = i3;
12219 break;
12220
12221 case REG_INC:
12222 case REG_NO_CONFLICT:
12223 /* These notes say something about how a register is used. They must
12224 be present on any use of the register in I2 or I3. */
12225 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12226 place = i3;
12227
12228 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12229 {
12230 if (place)
12231 place2 = i2;
12232 else
12233 place = i2;
12234 }
12235 break;
12236
12237 case REG_LABEL:
12238 /* This can show up in several ways -- either directly in the
12239 pattern, or hidden off in the constant pool with (or without?)
12240 a REG_EQUAL note. */
12241 /* ??? Ignore the without-reg_equal-note problem for now. */
12242 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12243 || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12244 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12245 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12246 place = i3;
12247
12248 if (i2
12249 && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
12250 || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
12251 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12252 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12253 {
12254 if (place)
12255 place2 = i2;
12256 else
12257 place = i2;
12258 }
12259 break;
12260
12261 case REG_NONNEG:
12262 case REG_WAS_0:
12263 /* These notes say something about the value of a register prior
12264 to the execution of an insn. It is too much trouble to see
12265 if the note is still correct in all situations. It is better
12266 to simply delete it. */
12267 break;
12268
12269 case REG_RETVAL:
12270 /* If the insn previously containing this note still exists,
12271 put it back where it was. Otherwise move it to the previous
12272 insn. Adjust the corresponding REG_LIBCALL note. */
12273 if (GET_CODE (from_insn) != NOTE)
12274 place = from_insn;
12275 else
12276 {
12277 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
12278 place = prev_real_insn (from_insn);
12279 if (tem && place)
12280 XEXP (tem, 0) = place;
12281 /* If we're deleting the last remaining instruction of a
12282 libcall sequence, don't add the notes. */
12283 else if (XEXP (note, 0) == from_insn)
12284 tem = place = 0;
12285 }
12286 break;
12287
12288 case REG_LIBCALL:
12289 /* This is handled similarly to REG_RETVAL. */
12290 if (GET_CODE (from_insn) != NOTE)
12291 place = from_insn;
12292 else
12293 {
12294 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
12295 place = next_real_insn (from_insn);
12296 if (tem && place)
12297 XEXP (tem, 0) = place;
12298 /* If we're deleting the last remaining instruction of a
12299 libcall sequence, don't add the notes. */
12300 else if (XEXP (note, 0) == from_insn)
12301 tem = place = 0;
12302 }
12303 break;
12304
12305 case REG_DEAD:
12306 /* If the register is used as an input in I3, it dies there.
12307 Similarly for I2, if it is non-zero and adjacent to I3.
12308
12309 If the register is not used as an input in either I3 or I2
12310 and it is not one of the registers we were supposed to eliminate,
12311 there are two possibilities. We might have a non-adjacent I2
12312 or we might have somehow eliminated an additional register
12313 from a computation. For example, we might have had A & B where
12314 we discover that B will always be zero. In this case we will
12315 eliminate the reference to A.
12316
12317 In both cases, we must search to see if we can find a previous
12318 use of A and put the death note there. */
12319
12320 if (from_insn
12321 && GET_CODE (from_insn) == CALL_INSN
12322 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
12323 place = from_insn;
12324 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
12325 place = i3;
12326 else if (i2 != 0 && next_nonnote_insn (i2) == i3
12327 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12328 place = i2;
12329
12330 if (rtx_equal_p (XEXP (note, 0), elim_i2)
12331 || rtx_equal_p (XEXP (note, 0), elim_i1))
12332 break;
12333
12334 if (place == 0)
12335 {
12336 basic_block bb = BASIC_BLOCK (this_basic_block);
12337
12338 for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem))
12339 {
12340 if (! INSN_P (tem))
12341 {
12342 if (tem == bb->head)
12343 break;
12344 continue;
12345 }
12346
12347 /* If the register is being set at TEM, see if that is all
12348 TEM is doing. If so, delete TEM. Otherwise, make this
12349 into a REG_UNUSED note instead. */
12350 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
12351 {
12352 rtx set = single_set (tem);
12353 rtx inner_dest = 0;
12354 #ifdef HAVE_cc0
12355 rtx cc0_setter = NULL_RTX;
12356 #endif
12357
12358 if (set != 0)
12359 for (inner_dest = SET_DEST (set);
12360 (GET_CODE (inner_dest) == STRICT_LOW_PART
12361 || GET_CODE (inner_dest) == SUBREG
12362 || GET_CODE (inner_dest) == ZERO_EXTRACT);
12363 inner_dest = XEXP (inner_dest, 0))
12364 ;
12365
12366 /* Verify that it was the set, and not a clobber that
12367 modified the register.
12368
12369 CC0 targets must be careful to maintain setter/user
12370 pairs. If we cannot delete the setter due to side
12371 effects, mark the user with an UNUSED note instead
12372 of deleting it. */
12373
12374 if (set != 0 && ! side_effects_p (SET_SRC (set))
12375 && rtx_equal_p (XEXP (note, 0), inner_dest)
12376 #ifdef HAVE_cc0
12377 && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
12378 || ((cc0_setter = prev_cc0_setter (tem)) != NULL
12379 && sets_cc0_p (PATTERN (cc0_setter)) > 0))
12380 #endif
12381 )
12382 {
12383 /* Move the notes and links of TEM elsewhere.
12384 This might delete other dead insns recursively.
12385 First set the pattern to something that won't use
12386 any register. */
12387
12388 PATTERN (tem) = pc_rtx;
12389
12390 distribute_notes (REG_NOTES (tem), tem, tem,
12391 NULL_RTX, NULL_RTX, NULL_RTX);
12392 distribute_links (LOG_LINKS (tem));
12393
12394 PUT_CODE (tem, NOTE);
12395 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
12396 NOTE_SOURCE_FILE (tem) = 0;
12397
12398 #ifdef HAVE_cc0
12399 /* Delete the setter too. */
12400 if (cc0_setter)
12401 {
12402 PATTERN (cc0_setter) = pc_rtx;
12403
12404 distribute_notes (REG_NOTES (cc0_setter),
12405 cc0_setter, cc0_setter,
12406 NULL_RTX, NULL_RTX, NULL_RTX);
12407 distribute_links (LOG_LINKS (cc0_setter));
12408
12409 PUT_CODE (cc0_setter, NOTE);
12410 NOTE_LINE_NUMBER (cc0_setter)
12411 = NOTE_INSN_DELETED;
12412 NOTE_SOURCE_FILE (cc0_setter) = 0;
12413 }
12414 #endif
12415 }
12416 /* If the register is both set and used here, put the
12417 REG_DEAD note here, but place a REG_UNUSED note
12418 here too unless there already is one. */
12419 else if (reg_referenced_p (XEXP (note, 0),
12420 PATTERN (tem)))
12421 {
12422 place = tem;
12423
12424 if (! find_regno_note (tem, REG_UNUSED,
12425 REGNO (XEXP (note, 0))))
12426 REG_NOTES (tem)
12427 = gen_rtx_EXPR_LIST (REG_UNUSED, XEXP (note, 0),
12428 REG_NOTES (tem));
12429 }
12430 else
12431 {
12432 PUT_REG_NOTE_KIND (note, REG_UNUSED);
12433
12434 /* If there isn't already a REG_UNUSED note, put one
12435 here. */
12436 if (! find_regno_note (tem, REG_UNUSED,
12437 REGNO (XEXP (note, 0))))
12438 place = tem;
12439 break;
12440 }
12441 }
12442 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
12443 || (GET_CODE (tem) == CALL_INSN
12444 && find_reg_fusage (tem, USE, XEXP (note, 0))))
12445 {
12446 place = tem;
12447
12448 /* If we are doing a 3->2 combination, and we have a
12449 register which formerly died in i3 and was not used
12450 by i2, which now no longer dies in i3 and is used in
12451 i2 but does not die in i2, and place is between i2
12452 and i3, then we may need to move a link from place to
12453 i2. */
12454 if (i2 && INSN_UID (place) <= max_uid_cuid
12455 && INSN_CUID (place) > INSN_CUID (i2)
12456 && from_insn
12457 && INSN_CUID (from_insn) > INSN_CUID (i2)
12458 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12459 {
12460 rtx links = LOG_LINKS (place);
12461 LOG_LINKS (place) = 0;
12462 distribute_links (links);
12463 }
12464 break;
12465 }
12466
12467 if (tem == bb->head)
12468 break;
12469 }
12470
12471 /* We haven't found an insn for the death note and it
12472 is still a REG_DEAD note, but we have hit the beginning
12473 of the block. If the existing life info says the reg
12474 was dead, there's nothing left to do. Otherwise, we'll
12475 need to do a global life update after combine. */
12476 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0
12477 && REGNO_REG_SET_P (bb->global_live_at_start,
12478 REGNO (XEXP (note, 0))))
12479 {
12480 SET_BIT (refresh_blocks, this_basic_block);
12481 need_refresh = 1;
12482 }
12483 }
12484
12485 /* If the register is set or already dead at PLACE, we needn't do
12486 anything with this note if it is still a REG_DEAD note.
12487 We can here if it is set at all, not if is it totally replace,
12488 which is what `dead_or_set_p' checks, so also check for it being
12489 set partially. */
12490
12491 if (place && REG_NOTE_KIND (note) == REG_DEAD)
12492 {
12493 unsigned int regno = REGNO (XEXP (note, 0));
12494
12495 if (dead_or_set_p (place, XEXP (note, 0))
12496 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
12497 {
12498 /* Unless the register previously died in PLACE, clear
12499 reg_last_death. [I no longer understand why this is
12500 being done.] */
12501 if (reg_last_death[regno] != place)
12502 reg_last_death[regno] = 0;
12503 place = 0;
12504 }
12505 else
12506 reg_last_death[regno] = place;
12507
12508 /* If this is a death note for a hard reg that is occupying
12509 multiple registers, ensure that we are still using all
12510 parts of the object. If we find a piece of the object
12511 that is unused, we must arrange for an appropriate REG_DEAD
12512 note to be added for it. However, we can't just emit a USE
12513 and tag the note to it, since the register might actually
12514 be dead; so we recourse, and the recursive call then finds
12515 the previous insn that used this register. */
12516
12517 if (place && regno < FIRST_PSEUDO_REGISTER
12518 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
12519 {
12520 unsigned int endregno
12521 = regno + HARD_REGNO_NREGS (regno,
12522 GET_MODE (XEXP (note, 0)));
12523 int all_used = 1;
12524 unsigned int i;
12525
12526 for (i = regno; i < endregno; i++)
12527 if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
12528 && ! find_regno_fusage (place, USE, i))
12529 || dead_or_set_regno_p (place, i))
12530 all_used = 0;
12531
12532 if (! all_used)
12533 {
12534 /* Put only REG_DEAD notes for pieces that are
12535 not already dead or set. */
12536
12537 for (i = regno; i < endregno;
12538 i += HARD_REGNO_NREGS (i, reg_raw_mode[i]))
12539 {
12540 rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
12541 basic_block bb = BASIC_BLOCK (this_basic_block);
12542
12543 if (! dead_or_set_p (place, piece)
12544 && ! reg_bitfield_target_p (piece,
12545 PATTERN (place)))
12546 {
12547 rtx new_note
12548 = gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX);
12549
12550 distribute_notes (new_note, place, place,
12551 NULL_RTX, NULL_RTX, NULL_RTX);
12552 }
12553 else if (! refers_to_regno_p (i, i + 1,
12554 PATTERN (place), 0)
12555 && ! find_regno_fusage (place, USE, i))
12556 for (tem = PREV_INSN (place); ;
12557 tem = PREV_INSN (tem))
12558 {
12559 if (! INSN_P (tem))
12560 {
12561 if (tem == bb->head)
12562 {
12563 SET_BIT (refresh_blocks,
12564 this_basic_block);
12565 need_refresh = 1;
12566 break;
12567 }
12568 continue;
12569 }
12570 if (dead_or_set_p (tem, piece)
12571 || reg_bitfield_target_p (piece,
12572 PATTERN (tem)))
12573 {
12574 REG_NOTES (tem)
12575 = gen_rtx_EXPR_LIST (REG_UNUSED, piece,
12576 REG_NOTES (tem));
12577 break;
12578 }
12579 }
12580
12581 }
12582
12583 place = 0;
12584 }
12585 }
12586 }
12587 break;
12588
12589 default:
12590 /* Any other notes should not be present at this point in the
12591 compilation. */
12592 abort ();
12593 }
12594
12595 if (place)
12596 {
12597 XEXP (note, 1) = REG_NOTES (place);
12598 REG_NOTES (place) = note;
12599 }
12600 else if ((REG_NOTE_KIND (note) == REG_DEAD
12601 || REG_NOTE_KIND (note) == REG_UNUSED)
12602 && GET_CODE (XEXP (note, 0)) == REG)
12603 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
12604
12605 if (place2)
12606 {
12607 if ((REG_NOTE_KIND (note) == REG_DEAD
12608 || REG_NOTE_KIND (note) == REG_UNUSED)
12609 && GET_CODE (XEXP (note, 0)) == REG)
12610 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
12611
12612 REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
12613 REG_NOTE_KIND (note),
12614 XEXP (note, 0),
12615 REG_NOTES (place2));
12616 }
12617 }
12618 }
12619 \f
12620 /* Similarly to above, distribute the LOG_LINKS that used to be present on
12621 I3, I2, and I1 to new locations. This is also called in one case to
12622 add a link pointing at I3 when I3's destination is changed. */
12623
12624 static void
12625 distribute_links (links)
12626 rtx links;
12627 {
12628 rtx link, next_link;
12629
12630 for (link = links; link; link = next_link)
12631 {
12632 rtx place = 0;
12633 rtx insn;
12634 rtx set, reg;
12635
12636 next_link = XEXP (link, 1);
12637
12638 /* If the insn that this link points to is a NOTE or isn't a single
12639 set, ignore it. In the latter case, it isn't clear what we
12640 can do other than ignore the link, since we can't tell which
12641 register it was for. Such links wouldn't be used by combine
12642 anyway.
12643
12644 It is not possible for the destination of the target of the link to
12645 have been changed by combine. The only potential of this is if we
12646 replace I3, I2, and I1 by I3 and I2. But in that case the
12647 destination of I2 also remains unchanged. */
12648
12649 if (GET_CODE (XEXP (link, 0)) == NOTE
12650 || (set = single_set (XEXP (link, 0))) == 0)
12651 continue;
12652
12653 reg = SET_DEST (set);
12654 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
12655 || GET_CODE (reg) == SIGN_EXTRACT
12656 || GET_CODE (reg) == STRICT_LOW_PART)
12657 reg = XEXP (reg, 0);
12658
12659 /* A LOG_LINK is defined as being placed on the first insn that uses
12660 a register and points to the insn that sets the register. Start
12661 searching at the next insn after the target of the link and stop
12662 when we reach a set of the register or the end of the basic block.
12663
12664 Note that this correctly handles the link that used to point from
12665 I3 to I2. Also note that not much searching is typically done here
12666 since most links don't point very far away. */
12667
12668 for (insn = NEXT_INSN (XEXP (link, 0));
12669 (insn && (this_basic_block == n_basic_blocks - 1
12670 || BLOCK_HEAD (this_basic_block + 1) != insn));
12671 insn = NEXT_INSN (insn))
12672 if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
12673 {
12674 if (reg_referenced_p (reg, PATTERN (insn)))
12675 place = insn;
12676 break;
12677 }
12678 else if (GET_CODE (insn) == CALL_INSN
12679 && find_reg_fusage (insn, USE, reg))
12680 {
12681 place = insn;
12682 break;
12683 }
12684
12685 /* If we found a place to put the link, place it there unless there
12686 is already a link to the same insn as LINK at that point. */
12687
12688 if (place)
12689 {
12690 rtx link2;
12691
12692 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
12693 if (XEXP (link2, 0) == XEXP (link, 0))
12694 break;
12695
12696 if (link2 == 0)
12697 {
12698 XEXP (link, 1) = LOG_LINKS (place);
12699 LOG_LINKS (place) = link;
12700
12701 /* Set added_links_insn to the earliest insn we added a
12702 link to. */
12703 if (added_links_insn == 0
12704 || INSN_CUID (added_links_insn) > INSN_CUID (place))
12705 added_links_insn = place;
12706 }
12707 }
12708 }
12709 }
12710 \f
12711 /* Compute INSN_CUID for INSN, which is an insn made by combine. */
12712
12713 static int
12714 insn_cuid (insn)
12715 rtx insn;
12716 {
12717 while (insn != 0 && INSN_UID (insn) > max_uid_cuid
12718 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
12719 insn = NEXT_INSN (insn);
12720
12721 if (INSN_UID (insn) > max_uid_cuid)
12722 abort ();
12723
12724 return INSN_CUID (insn);
12725 }
12726 \f
12727 void
12728 dump_combine_stats (file)
12729 FILE *file;
12730 {
12731 fnotice
12732 (file,
12733 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
12734 combine_attempts, combine_merges, combine_extras, combine_successes);
12735 }
12736
12737 void
12738 dump_combine_total_stats (file)
12739 FILE *file;
12740 {
12741 fnotice
12742 (file,
12743 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
12744 total_attempts, total_merges, total_extras, total_successes);
12745 }