(simplify_unary_operation): Fix typo in last change.
[gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include "config.h"
22 /* Must precede rtl.h for FFS. */
23 #include <stdio.h>
24
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "flags.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "recog.h"
32
33 #include <setjmp.h>
34
35 /* The basic idea of common subexpression elimination is to go
36 through the code, keeping a record of expressions that would
37 have the same value at the current scan point, and replacing
38 expressions encountered with the cheapest equivalent expression.
39
40 It is too complicated to keep track of the different possibilities
41 when control paths merge; so, at each label, we forget all that is
42 known and start fresh. This can be described as processing each
43 basic block separately. Note, however, that these are not quite
44 the same as the basic blocks found by a later pass and used for
45 data flow analysis and register packing. We do not need to start fresh
46 after a conditional jump instruction if there is no label there.
47
48 We use two data structures to record the equivalent expressions:
49 a hash table for most expressions, and several vectors together
50 with "quantity numbers" to record equivalent (pseudo) registers.
51
52 The use of the special data structure for registers is desirable
53 because it is faster. It is possible because registers references
54 contain a fairly small number, the register number, taken from
55 a contiguously allocated series, and two register references are
56 identical if they have the same number. General expressions
57 do not have any such thing, so the only way to retrieve the
58 information recorded on an expression other than a register
59 is to keep it in a hash table.
60
61 Registers and "quantity numbers":
62
63 At the start of each basic block, all of the (hardware and pseudo)
64 registers used in the function are given distinct quantity
65 numbers to indicate their contents. During scan, when the code
66 copies one register into another, we copy the quantity number.
67 When a register is loaded in any other way, we allocate a new
68 quantity number to describe the value generated by this operation.
69 `reg_qty' records what quantity a register is currently thought
70 of as containing.
71
72 All real quantity numbers are greater than or equal to `max_reg'.
73 If register N has not been assigned a quantity, reg_qty[N] will equal N.
74
75 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
76 variables should be referenced with an index below `max_reg'.
77
78 We also maintain a bidirectional chain of registers for each
79 quantity number. `qty_first_reg', `qty_last_reg',
80 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
81
82 The first register in a chain is the one whose lifespan is least local.
83 Among equals, it is the one that was seen first.
84 We replace any equivalent register with that one.
85
86 If two registers have the same quantity number, it must be true that
87 REG expressions with `qty_mode' must be in the hash table for both
88 registers and must be in the same class.
89
90 The converse is not true. Since hard registers may be referenced in
91 any mode, two REG expressions might be equivalent in the hash table
92 but not have the same quantity number if the quantity number of one
93 of the registers is not the same mode as those expressions.
94
95 Constants and quantity numbers
96
97 When a quantity has a known constant value, that value is stored
98 in the appropriate element of qty_const. This is in addition to
99 putting the constant in the hash table as is usual for non-regs.
100
101 Whether a reg or a constant is preferred is determined by the configuration
102 macro CONST_COSTS and will often depend on the constant value. In any
103 event, expressions containing constants can be simplified, by fold_rtx.
104
105 When a quantity has a known nearly constant value (such as an address
106 of a stack slot), that value is stored in the appropriate element
107 of qty_const.
108
109 Integer constants don't have a machine mode. However, cse
110 determines the intended machine mode from the destination
111 of the instruction that moves the constant. The machine mode
112 is recorded in the hash table along with the actual RTL
113 constant expression so that different modes are kept separate.
114
115 Other expressions:
116
117 To record known equivalences among expressions in general
118 we use a hash table called `table'. It has a fixed number of buckets
119 that contain chains of `struct table_elt' elements for expressions.
120 These chains connect the elements whose expressions have the same
121 hash codes.
122
123 Other chains through the same elements connect the elements which
124 currently have equivalent values.
125
126 Register references in an expression are canonicalized before hashing
127 the expression. This is done using `reg_qty' and `qty_first_reg'.
128 The hash code of a register reference is computed using the quantity
129 number, not the register number.
130
131 When the value of an expression changes, it is necessary to remove from the
132 hash table not just that expression but all expressions whose values
133 could be different as a result.
134
135 1. If the value changing is in memory, except in special cases
136 ANYTHING referring to memory could be changed. That is because
137 nobody knows where a pointer does not point.
138 The function `invalidate_memory' removes what is necessary.
139
140 The special cases are when the address is constant or is
141 a constant plus a fixed register such as the frame pointer
142 or a static chain pointer. When such addresses are stored in,
143 we can tell exactly which other such addresses must be invalidated
144 due to overlap. `invalidate' does this.
145 All expressions that refer to non-constant
146 memory addresses are also invalidated. `invalidate_memory' does this.
147
148 2. If the value changing is a register, all expressions
149 containing references to that register, and only those,
150 must be removed.
151
152 Because searching the entire hash table for expressions that contain
153 a register is very slow, we try to figure out when it isn't necessary.
154 Precisely, this is necessary only when expressions have been
155 entered in the hash table using this register, and then the value has
156 changed, and then another expression wants to be added to refer to
157 the register's new value. This sequence of circumstances is rare
158 within any one basic block.
159
160 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
161 reg_tick[i] is incremented whenever a value is stored in register i.
162 reg_in_table[i] holds -1 if no references to register i have been
163 entered in the table; otherwise, it contains the value reg_tick[i] had
164 when the references were entered. If we want to enter a reference
165 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
166 Until we want to enter a new entry, the mere fact that the two vectors
167 don't match makes the entries be ignored if anyone tries to match them.
168
169 Registers themselves are entered in the hash table as well as in
170 the equivalent-register chains. However, the vectors `reg_tick'
171 and `reg_in_table' do not apply to expressions which are simple
172 register references. These expressions are removed from the table
173 immediately when they become invalid, and this can be done even if
174 we do not immediately search for all the expressions that refer to
175 the register.
176
177 A CLOBBER rtx in an instruction invalidates its operand for further
178 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
179 invalidates everything that resides in memory.
180
181 Related expressions:
182
183 Constant expressions that differ only by an additive integer
184 are called related. When a constant expression is put in
185 the table, the related expression with no constant term
186 is also entered. These are made to point at each other
187 so that it is possible to find out if there exists any
188 register equivalent to an expression related to a given expression. */
189
190 /* One plus largest register number used in this function. */
191
192 static int max_reg;
193
194 /* Length of vectors indexed by quantity number.
195 We know in advance we will not need a quantity number this big. */
196
197 static int max_qty;
198
199 /* Next quantity number to be allocated.
200 This is 1 + the largest number needed so far. */
201
202 static int next_qty;
203
204 /* Indexed by quantity number, gives the first (or last) (pseudo) register
205 in the chain of registers that currently contain this quantity. */
206
207 static int *qty_first_reg;
208 static int *qty_last_reg;
209
210 /* Index by quantity number, gives the mode of the quantity. */
211
212 static enum machine_mode *qty_mode;
213
214 /* Indexed by quantity number, gives the rtx of the constant value of the
215 quantity, or zero if it does not have a known value.
216 A sum of the frame pointer (or arg pointer) plus a constant
217 can also be entered here. */
218
219 static rtx *qty_const;
220
221 /* Indexed by qty number, gives the insn that stored the constant value
222 recorded in `qty_const'. */
223
224 static rtx *qty_const_insn;
225
226 /* The next three variables are used to track when a comparison between a
227 quantity and some constant or register has been passed. In that case, we
228 know the results of the comparison in case we see it again. These variables
229 record a comparison that is known to be true. */
230
231 /* Indexed by qty number, gives the rtx code of a comparison with a known
232 result involving this quantity. If none, it is UNKNOWN. */
233 static enum rtx_code *qty_comparison_code;
234
235 /* Indexed by qty number, gives the constant being compared against in a
236 comparison of known result. If no such comparison, it is undefined.
237 If the comparison is not with a constant, it is zero. */
238
239 static rtx *qty_comparison_const;
240
241 /* Indexed by qty number, gives the quantity being compared against in a
242 comparison of known result. If no such comparison, if it undefined.
243 If the comparison is not with a register, it is -1. */
244
245 static int *qty_comparison_qty;
246
247 #ifdef HAVE_cc0
248 /* For machines that have a CC0, we do not record its value in the hash
249 table since its use is guaranteed to be the insn immediately following
250 its definition and any other insn is presumed to invalidate it.
251
252 Instead, we store below the value last assigned to CC0. If it should
253 happen to be a constant, it is stored in preference to the actual
254 assigned value. In case it is a constant, we store the mode in which
255 the constant should be interpreted. */
256
257 static rtx prev_insn_cc0;
258 static enum machine_mode prev_insn_cc0_mode;
259 #endif
260
261 /* Previous actual insn. 0 if at first insn of basic block. */
262
263 static rtx prev_insn;
264
265 /* Insn being scanned. */
266
267 static rtx this_insn;
268
269 /* Index by (pseudo) register number, gives the quantity number
270 of the register's current contents. */
271
272 static int *reg_qty;
273
274 /* Index by (pseudo) register number, gives the number of the next (or
275 previous) (pseudo) register in the chain of registers sharing the same
276 value.
277
278 Or -1 if this register is at the end of the chain.
279
280 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
281
282 static int *reg_next_eqv;
283 static int *reg_prev_eqv;
284
285 /* Index by (pseudo) register number, gives the number of times
286 that register has been altered in the current basic block. */
287
288 static int *reg_tick;
289
290 /* Index by (pseudo) register number, gives the reg_tick value at which
291 rtx's containing this register are valid in the hash table.
292 If this does not equal the current reg_tick value, such expressions
293 existing in the hash table are invalid.
294 If this is -1, no expressions containing this register have been
295 entered in the table. */
296
297 static int *reg_in_table;
298
299 /* A HARD_REG_SET containing all the hard registers for which there is
300 currently a REG expression in the hash table. Note the difference
301 from the above variables, which indicate if the REG is mentioned in some
302 expression in the table. */
303
304 static HARD_REG_SET hard_regs_in_table;
305
306 /* A HARD_REG_SET containing all the hard registers that are invalidated
307 by a CALL_INSN. */
308
309 static HARD_REG_SET regs_invalidated_by_call;
310
311 /* Two vectors of ints:
312 one containing max_reg -1's; the other max_reg + 500 (an approximation
313 for max_qty) elements where element i contains i.
314 These are used to initialize various other vectors fast. */
315
316 static int *all_minus_one;
317 static int *consec_ints;
318
319 /* CUID of insn that starts the basic block currently being cse-processed. */
320
321 static int cse_basic_block_start;
322
323 /* CUID of insn that ends the basic block currently being cse-processed. */
324
325 static int cse_basic_block_end;
326
327 /* Vector mapping INSN_UIDs to cuids.
328 The cuids are like uids but increase monotonically always.
329 We use them to see whether a reg is used outside a given basic block. */
330
331 static int *uid_cuid;
332
333 /* Highest UID in UID_CUID. */
334 static int max_uid;
335
336 /* Get the cuid of an insn. */
337
338 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
339
340 /* Nonzero if cse has altered conditional jump insns
341 in such a way that jump optimization should be redone. */
342
343 static int cse_jumps_altered;
344
345 /* canon_hash stores 1 in do_not_record
346 if it notices a reference to CC0, PC, or some other volatile
347 subexpression. */
348
349 static int do_not_record;
350
351 #ifdef LOAD_EXTEND_OP
352
353 /* Scratch rtl used when looking for load-extended copy of a MEM. */
354 static rtx memory_extend_rtx;
355 #endif
356
357 /* canon_hash stores 1 in hash_arg_in_memory
358 if it notices a reference to memory within the expression being hashed. */
359
360 static int hash_arg_in_memory;
361
362 /* canon_hash stores 1 in hash_arg_in_struct
363 if it notices a reference to memory that's part of a structure. */
364
365 static int hash_arg_in_struct;
366
367 /* The hash table contains buckets which are chains of `struct table_elt's,
368 each recording one expression's information.
369 That expression is in the `exp' field.
370
371 Those elements with the same hash code are chained in both directions
372 through the `next_same_hash' and `prev_same_hash' fields.
373
374 Each set of expressions with equivalent values
375 are on a two-way chain through the `next_same_value'
376 and `prev_same_value' fields, and all point with
377 the `first_same_value' field at the first element in
378 that chain. The chain is in order of increasing cost.
379 Each element's cost value is in its `cost' field.
380
381 The `in_memory' field is nonzero for elements that
382 involve any reference to memory. These elements are removed
383 whenever a write is done to an unidentified location in memory.
384 To be safe, we assume that a memory address is unidentified unless
385 the address is either a symbol constant or a constant plus
386 the frame pointer or argument pointer.
387
388 The `in_struct' field is nonzero for elements that
389 involve any reference to memory inside a structure or array.
390
391 The `related_value' field is used to connect related expressions
392 (that differ by adding an integer).
393 The related expressions are chained in a circular fashion.
394 `related_value' is zero for expressions for which this
395 chain is not useful.
396
397 The `cost' field stores the cost of this element's expression.
398
399 The `is_const' flag is set if the element is a constant (including
400 a fixed address).
401
402 The `flag' field is used as a temporary during some search routines.
403
404 The `mode' field is usually the same as GET_MODE (`exp'), but
405 if `exp' is a CONST_INT and has no machine mode then the `mode'
406 field is the mode it was being used as. Each constant is
407 recorded separately for each mode it is used with. */
408
409
410 struct table_elt
411 {
412 rtx exp;
413 struct table_elt *next_same_hash;
414 struct table_elt *prev_same_hash;
415 struct table_elt *next_same_value;
416 struct table_elt *prev_same_value;
417 struct table_elt *first_same_value;
418 struct table_elt *related_value;
419 int cost;
420 enum machine_mode mode;
421 char in_memory;
422 char in_struct;
423 char is_const;
424 char flag;
425 };
426
427 /* We don't want a lot of buckets, because we rarely have very many
428 things stored in the hash table, and a lot of buckets slows
429 down a lot of loops that happen frequently. */
430 #define NBUCKETS 31
431
432 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
433 register (hard registers may require `do_not_record' to be set). */
434
435 #define HASH(X, M) \
436 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
437 ? (((unsigned) REG << 7) + (unsigned) reg_qty[REGNO (X)]) % NBUCKETS \
438 : canon_hash (X, M) % NBUCKETS)
439
440 /* Determine whether register number N is considered a fixed register for CSE.
441 It is desirable to replace other regs with fixed regs, to reduce need for
442 non-fixed hard regs.
443 A reg wins if it is either the frame pointer or designated as fixed,
444 but not if it is an overlapping register. */
445 #ifdef OVERLAPPING_REGNO_P
446 #define FIXED_REGNO_P(N) \
447 (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
448 || fixed_regs[N] || global_regs[N]) \
449 && ! OVERLAPPING_REGNO_P ((N)))
450 #else
451 #define FIXED_REGNO_P(N) \
452 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
453 || fixed_regs[N] || global_regs[N])
454 #endif
455
456 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
457 hard registers and pointers into the frame are the cheapest with a cost
458 of 0. Next come pseudos with a cost of one and other hard registers with
459 a cost of 2. Aside from these special cases, call `rtx_cost'. */
460
461 #define CHEAP_REGNO(N) \
462 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
463 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
464 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
465 || ((N) < FIRST_PSEUDO_REGISTER \
466 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
467
468 /* A register is cheap if it is a user variable assigned to the register
469 or if its register number always corresponds to a cheap register. */
470
471 #define CHEAP_REG(N) \
472 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
473 || CHEAP_REGNO (REGNO (N)))
474
475 #define COST(X) \
476 (GET_CODE (X) == REG \
477 ? (CHEAP_REG (X) ? 0 \
478 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
479 : 2) \
480 : rtx_cost (X, SET) * 2)
481
482 /* Determine if the quantity number for register X represents a valid index
483 into the `qty_...' variables. */
484
485 #define REGNO_QTY_VALID_P(N) (reg_qty[N] != (N))
486
487 static struct table_elt *table[NBUCKETS];
488
489 /* Chain of `struct table_elt's made so far for this function
490 but currently removed from the table. */
491
492 static struct table_elt *free_element_chain;
493
494 /* Number of `struct table_elt' structures made so far for this function. */
495
496 static int n_elements_made;
497
498 /* Maximum value `n_elements_made' has had so far in this compilation
499 for functions previously processed. */
500
501 static int max_elements_made;
502
503 /* Surviving equivalence class when two equivalence classes are merged
504 by recording the effects of a jump in the last insn. Zero if the
505 last insn was not a conditional jump. */
506
507 static struct table_elt *last_jump_equiv_class;
508
509 /* Set to the cost of a constant pool reference if one was found for a
510 symbolic constant. If this was found, it means we should try to
511 convert constants into constant pool entries if they don't fit in
512 the insn. */
513
514 static int constant_pool_entries_cost;
515
516 /* Bits describing what kind of values in memory must be invalidated
517 for a particular instruction. If all three bits are zero,
518 no memory refs need to be invalidated. Each bit is more powerful
519 than the preceding ones, and if a bit is set then the preceding
520 bits are also set.
521
522 Here is how the bits are set:
523 Pushing onto the stack invalidates only the stack pointer,
524 writing at a fixed address invalidates only variable addresses,
525 writing in a structure element at variable address
526 invalidates all but scalar variables,
527 and writing in anything else at variable address invalidates everything. */
528
529 struct write_data
530 {
531 int sp : 1; /* Invalidate stack pointer. */
532 int var : 1; /* Invalidate variable addresses. */
533 int nonscalar : 1; /* Invalidate all but scalar variables. */
534 int all : 1; /* Invalidate all memory refs. */
535 };
536
537 /* Define maximum length of a branch path. */
538
539 #define PATHLENGTH 10
540
541 /* This data describes a block that will be processed by cse_basic_block. */
542
543 struct cse_basic_block_data {
544 /* Lowest CUID value of insns in block. */
545 int low_cuid;
546 /* Highest CUID value of insns in block. */
547 int high_cuid;
548 /* Total number of SETs in block. */
549 int nsets;
550 /* Last insn in the block. */
551 rtx last;
552 /* Size of current branch path, if any. */
553 int path_size;
554 /* Current branch path, indicating which branches will be taken. */
555 struct branch_path {
556 /* The branch insn. */
557 rtx branch;
558 /* Whether it should be taken or not. AROUND is the same as taken
559 except that it is used when the destination label is not preceded
560 by a BARRIER. */
561 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
562 } path[PATHLENGTH];
563 };
564
565 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
566 virtual regs here because the simplify_*_operation routines are called
567 by integrate.c, which is called before virtual register instantiation. */
568
569 #define FIXED_BASE_PLUS_P(X) \
570 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
571 || (X) == arg_pointer_rtx \
572 || (X) == virtual_stack_vars_rtx \
573 || (X) == virtual_incoming_args_rtx \
574 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
575 && (XEXP (X, 0) == frame_pointer_rtx \
576 || XEXP (X, 0) == hard_frame_pointer_rtx \
577 || XEXP (X, 0) == arg_pointer_rtx \
578 || XEXP (X, 0) == virtual_stack_vars_rtx \
579 || XEXP (X, 0) == virtual_incoming_args_rtx)))
580
581 /* Similar, but also allows reference to the stack pointer.
582
583 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
584 arg_pointer_rtx by itself is nonzero, because on at least one machine,
585 the i960, the arg pointer is zero when it is unused. */
586
587 #define NONZERO_BASE_PLUS_P(X) \
588 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
589 || (X) == virtual_stack_vars_rtx \
590 || (X) == virtual_incoming_args_rtx \
591 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
592 && (XEXP (X, 0) == frame_pointer_rtx \
593 || XEXP (X, 0) == hard_frame_pointer_rtx \
594 || XEXP (X, 0) == arg_pointer_rtx \
595 || XEXP (X, 0) == virtual_stack_vars_rtx \
596 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
597 || (X) == stack_pointer_rtx \
598 || (X) == virtual_stack_dynamic_rtx \
599 || (X) == virtual_outgoing_args_rtx \
600 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
601 && (XEXP (X, 0) == stack_pointer_rtx \
602 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
603 || XEXP (X, 0) == virtual_outgoing_args_rtx)))
604
605 static void new_basic_block PROTO((void));
606 static void make_new_qty PROTO((int));
607 static void make_regs_eqv PROTO((int, int));
608 static void delete_reg_equiv PROTO((int));
609 static int mention_regs PROTO((rtx));
610 static int insert_regs PROTO((rtx, struct table_elt *, int));
611 static void free_element PROTO((struct table_elt *));
612 static void remove_from_table PROTO((struct table_elt *, unsigned));
613 static struct table_elt *get_element PROTO((void));
614 static struct table_elt *lookup PROTO((rtx, unsigned, enum machine_mode)),
615 *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
616 static rtx lookup_as_function PROTO((rtx, enum rtx_code));
617 static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
618 enum machine_mode));
619 static void merge_equiv_classes PROTO((struct table_elt *,
620 struct table_elt *));
621 static void invalidate PROTO((rtx));
622 static void remove_invalid_refs PROTO((int));
623 static void rehash_using_reg PROTO((rtx));
624 static void invalidate_memory PROTO((struct write_data *));
625 static void invalidate_for_call PROTO((void));
626 static rtx use_related_value PROTO((rtx, struct table_elt *));
627 static unsigned canon_hash PROTO((rtx, enum machine_mode));
628 static unsigned safe_hash PROTO((rtx, enum machine_mode));
629 static int exp_equiv_p PROTO((rtx, rtx, int, int));
630 static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
631 HOST_WIDE_INT *,
632 HOST_WIDE_INT *));
633 static int refers_to_p PROTO((rtx, rtx));
634 static int refers_to_mem_p PROTO((rtx, rtx, HOST_WIDE_INT,
635 HOST_WIDE_INT));
636 static int cse_rtx_addr_varies_p PROTO((rtx));
637 static rtx canon_reg PROTO((rtx, rtx));
638 static void find_best_addr PROTO((rtx, rtx *));
639 static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
640 enum machine_mode *,
641 enum machine_mode *));
642 static rtx cse_gen_binary PROTO((enum rtx_code, enum machine_mode,
643 rtx, rtx));
644 static rtx simplify_plus_minus PROTO((enum rtx_code, enum machine_mode,
645 rtx, rtx));
646 static rtx fold_rtx PROTO((rtx, rtx));
647 static rtx equiv_constant PROTO((rtx));
648 static void record_jump_equiv PROTO((rtx, int));
649 static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
650 rtx, rtx, int));
651 static void cse_insn PROTO((rtx, int));
652 static void note_mem_written PROTO((rtx, struct write_data *));
653 static void invalidate_from_clobbers PROTO((struct write_data *, rtx));
654 static rtx cse_process_notes PROTO((rtx, rtx));
655 static void cse_around_loop PROTO((rtx));
656 static void invalidate_skipped_set PROTO((rtx, rtx));
657 static void invalidate_skipped_block PROTO((rtx));
658 static void cse_check_loop_start PROTO((rtx, rtx));
659 static void cse_set_around_loop PROTO((rtx, rtx, rtx));
660 static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
661 static void count_reg_usage PROTO((rtx, int *, rtx, int));
662
663 extern int rtx_equal_function_value_matters;
664 \f
665 /* Return an estimate of the cost of computing rtx X.
666 One use is in cse, to decide which expression to keep in the hash table.
667 Another is in rtl generation, to pick the cheapest way to multiply.
668 Other uses like the latter are expected in the future. */
669
670 /* Return the right cost to give to an operation
671 to make the cost of the corresponding register-to-register instruction
672 N times that of a fast register-to-register instruction. */
673
674 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
675
676 int
677 rtx_cost (x, outer_code)
678 rtx x;
679 enum rtx_code outer_code;
680 {
681 register int i, j;
682 register enum rtx_code code;
683 register char *fmt;
684 register int total;
685
686 if (x == 0)
687 return 0;
688
689 /* Compute the default costs of certain things.
690 Note that RTX_COSTS can override the defaults. */
691
692 code = GET_CODE (x);
693 switch (code)
694 {
695 case MULT:
696 /* Count multiplication by 2**n as a shift,
697 because if we are considering it, we would output it as a shift. */
698 if (GET_CODE (XEXP (x, 1)) == CONST_INT
699 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
700 total = 2;
701 else
702 total = COSTS_N_INSNS (5);
703 break;
704 case DIV:
705 case UDIV:
706 case MOD:
707 case UMOD:
708 total = COSTS_N_INSNS (7);
709 break;
710 case USE:
711 /* Used in loop.c and combine.c as a marker. */
712 total = 0;
713 break;
714 case ASM_OPERANDS:
715 /* We don't want these to be used in substitutions because
716 we have no way of validating the resulting insn. So assign
717 anything containing an ASM_OPERANDS a very high cost. */
718 total = 1000;
719 break;
720 default:
721 total = 2;
722 }
723
724 switch (code)
725 {
726 case REG:
727 return ! CHEAP_REG (x);
728
729 case SUBREG:
730 /* If we can't tie these modes, make this expensive. The larger
731 the mode, the more expensive it is. */
732 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
733 return COSTS_N_INSNS (2
734 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
735 return 2;
736 #ifdef RTX_COSTS
737 RTX_COSTS (x, code, outer_code);
738 #endif
739 CONST_COSTS (x, code, outer_code);
740 }
741
742 /* Sum the costs of the sub-rtx's, plus cost of this operation,
743 which is already in total. */
744
745 fmt = GET_RTX_FORMAT (code);
746 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
747 if (fmt[i] == 'e')
748 total += rtx_cost (XEXP (x, i), code);
749 else if (fmt[i] == 'E')
750 for (j = 0; j < XVECLEN (x, i); j++)
751 total += rtx_cost (XVECEXP (x, i, j), code);
752
753 return total;
754 }
755 \f
756 /* Clear the hash table and initialize each register with its own quantity,
757 for a new basic block. */
758
759 static void
760 new_basic_block ()
761 {
762 register int i;
763
764 next_qty = max_reg;
765
766 bzero ((char *) reg_tick, max_reg * sizeof (int));
767
768 bcopy ((char *) all_minus_one, (char *) reg_in_table,
769 max_reg * sizeof (int));
770 bcopy ((char *) consec_ints, (char *) reg_qty, max_reg * sizeof (int));
771 CLEAR_HARD_REG_SET (hard_regs_in_table);
772
773 /* The per-quantity values used to be initialized here, but it is
774 much faster to initialize each as it is made in `make_new_qty'. */
775
776 for (i = 0; i < NBUCKETS; i++)
777 {
778 register struct table_elt *this, *next;
779 for (this = table[i]; this; this = next)
780 {
781 next = this->next_same_hash;
782 free_element (this);
783 }
784 }
785
786 bzero ((char *) table, sizeof table);
787
788 prev_insn = 0;
789
790 #ifdef HAVE_cc0
791 prev_insn_cc0 = 0;
792 #endif
793 }
794
795 /* Say that register REG contains a quantity not in any register before
796 and initialize that quantity. */
797
798 static void
799 make_new_qty (reg)
800 register int reg;
801 {
802 register int q;
803
804 if (next_qty >= max_qty)
805 abort ();
806
807 q = reg_qty[reg] = next_qty++;
808 qty_first_reg[q] = reg;
809 qty_last_reg[q] = reg;
810 qty_const[q] = qty_const_insn[q] = 0;
811 qty_comparison_code[q] = UNKNOWN;
812
813 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
814 }
815
816 /* Make reg NEW equivalent to reg OLD.
817 OLD is not changing; NEW is. */
818
819 static void
820 make_regs_eqv (new, old)
821 register int new, old;
822 {
823 register int lastr, firstr;
824 register int q = reg_qty[old];
825
826 /* Nothing should become eqv until it has a "non-invalid" qty number. */
827 if (! REGNO_QTY_VALID_P (old))
828 abort ();
829
830 reg_qty[new] = q;
831 firstr = qty_first_reg[q];
832 lastr = qty_last_reg[q];
833
834 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
835 hard regs. Among pseudos, if NEW will live longer than any other reg
836 of the same qty, and that is beyond the current basic block,
837 make it the new canonical replacement for this qty. */
838 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
839 /* Certain fixed registers might be of the class NO_REGS. This means
840 that not only can they not be allocated by the compiler, but
841 they cannot be used in substitutions or canonicalizations
842 either. */
843 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
844 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
845 || (new >= FIRST_PSEUDO_REGISTER
846 && (firstr < FIRST_PSEUDO_REGISTER
847 || ((uid_cuid[regno_last_uid[new]] > cse_basic_block_end
848 || (uid_cuid[regno_first_uid[new]]
849 < cse_basic_block_start))
850 && (uid_cuid[regno_last_uid[new]]
851 > uid_cuid[regno_last_uid[firstr]]))))))
852 {
853 reg_prev_eqv[firstr] = new;
854 reg_next_eqv[new] = firstr;
855 reg_prev_eqv[new] = -1;
856 qty_first_reg[q] = new;
857 }
858 else
859 {
860 /* If NEW is a hard reg (known to be non-fixed), insert at end.
861 Otherwise, insert before any non-fixed hard regs that are at the
862 end. Registers of class NO_REGS cannot be used as an
863 equivalent for anything. */
864 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
865 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
866 && new >= FIRST_PSEUDO_REGISTER)
867 lastr = reg_prev_eqv[lastr];
868 reg_next_eqv[new] = reg_next_eqv[lastr];
869 if (reg_next_eqv[lastr] >= 0)
870 reg_prev_eqv[reg_next_eqv[lastr]] = new;
871 else
872 qty_last_reg[q] = new;
873 reg_next_eqv[lastr] = new;
874 reg_prev_eqv[new] = lastr;
875 }
876 }
877
878 /* Remove REG from its equivalence class. */
879
880 static void
881 delete_reg_equiv (reg)
882 register int reg;
883 {
884 register int q = reg_qty[reg];
885 register int p, n;
886
887 /* If invalid, do nothing. */
888 if (q == reg)
889 return;
890
891 p = reg_prev_eqv[reg];
892 n = reg_next_eqv[reg];
893
894 if (n != -1)
895 reg_prev_eqv[n] = p;
896 else
897 qty_last_reg[q] = p;
898 if (p != -1)
899 reg_next_eqv[p] = n;
900 else
901 qty_first_reg[q] = n;
902
903 reg_qty[reg] = reg;
904 }
905
906 /* Remove any invalid expressions from the hash table
907 that refer to any of the registers contained in expression X.
908
909 Make sure that newly inserted references to those registers
910 as subexpressions will be considered valid.
911
912 mention_regs is not called when a register itself
913 is being stored in the table.
914
915 Return 1 if we have done something that may have changed the hash code
916 of X. */
917
918 static int
919 mention_regs (x)
920 rtx x;
921 {
922 register enum rtx_code code;
923 register int i, j;
924 register char *fmt;
925 register int changed = 0;
926
927 if (x == 0)
928 return 0;
929
930 code = GET_CODE (x);
931 if (code == REG)
932 {
933 register int regno = REGNO (x);
934 register int endregno
935 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
936 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
937 int i;
938
939 for (i = regno; i < endregno; i++)
940 {
941 if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
942 remove_invalid_refs (i);
943
944 reg_in_table[i] = reg_tick[i];
945 }
946
947 return 0;
948 }
949
950 /* If X is a comparison or a COMPARE and either operand is a register
951 that does not have a quantity, give it one. This is so that a later
952 call to record_jump_equiv won't cause X to be assigned a different
953 hash code and not found in the table after that call.
954
955 It is not necessary to do this here, since rehash_using_reg can
956 fix up the table later, but doing this here eliminates the need to
957 call that expensive function in the most common case where the only
958 use of the register is in the comparison. */
959
960 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
961 {
962 if (GET_CODE (XEXP (x, 0)) == REG
963 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
964 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
965 {
966 rehash_using_reg (XEXP (x, 0));
967 changed = 1;
968 }
969
970 if (GET_CODE (XEXP (x, 1)) == REG
971 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
972 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
973 {
974 rehash_using_reg (XEXP (x, 1));
975 changed = 1;
976 }
977 }
978
979 fmt = GET_RTX_FORMAT (code);
980 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
981 if (fmt[i] == 'e')
982 changed |= mention_regs (XEXP (x, i));
983 else if (fmt[i] == 'E')
984 for (j = 0; j < XVECLEN (x, i); j++)
985 changed |= mention_regs (XVECEXP (x, i, j));
986
987 return changed;
988 }
989
990 /* Update the register quantities for inserting X into the hash table
991 with a value equivalent to CLASSP.
992 (If the class does not contain a REG, it is irrelevant.)
993 If MODIFIED is nonzero, X is a destination; it is being modified.
994 Note that delete_reg_equiv should be called on a register
995 before insert_regs is done on that register with MODIFIED != 0.
996
997 Nonzero value means that elements of reg_qty have changed
998 so X's hash code may be different. */
999
1000 static int
1001 insert_regs (x, classp, modified)
1002 rtx x;
1003 struct table_elt *classp;
1004 int modified;
1005 {
1006 if (GET_CODE (x) == REG)
1007 {
1008 register int regno = REGNO (x);
1009
1010 /* If REGNO is in the equivalence table already but is of the
1011 wrong mode for that equivalence, don't do anything here. */
1012
1013 if (REGNO_QTY_VALID_P (regno)
1014 && qty_mode[reg_qty[regno]] != GET_MODE (x))
1015 return 0;
1016
1017 if (modified || ! REGNO_QTY_VALID_P (regno))
1018 {
1019 if (classp)
1020 for (classp = classp->first_same_value;
1021 classp != 0;
1022 classp = classp->next_same_value)
1023 if (GET_CODE (classp->exp) == REG
1024 && GET_MODE (classp->exp) == GET_MODE (x))
1025 {
1026 make_regs_eqv (regno, REGNO (classp->exp));
1027 return 1;
1028 }
1029
1030 make_new_qty (regno);
1031 qty_mode[reg_qty[regno]] = GET_MODE (x);
1032 return 1;
1033 }
1034
1035 return 0;
1036 }
1037
1038 /* If X is a SUBREG, we will likely be inserting the inner register in the
1039 table. If that register doesn't have an assigned quantity number at
1040 this point but does later, the insertion that we will be doing now will
1041 not be accessible because its hash code will have changed. So assign
1042 a quantity number now. */
1043
1044 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1045 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1046 {
1047 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1048 mention_regs (SUBREG_REG (x));
1049 return 1;
1050 }
1051 else
1052 return mention_regs (x);
1053 }
1054 \f
1055 /* Look in or update the hash table. */
1056
1057 /* Put the element ELT on the list of free elements. */
1058
1059 static void
1060 free_element (elt)
1061 struct table_elt *elt;
1062 {
1063 elt->next_same_hash = free_element_chain;
1064 free_element_chain = elt;
1065 }
1066
1067 /* Return an element that is free for use. */
1068
1069 static struct table_elt *
1070 get_element ()
1071 {
1072 struct table_elt *elt = free_element_chain;
1073 if (elt)
1074 {
1075 free_element_chain = elt->next_same_hash;
1076 return elt;
1077 }
1078 n_elements_made++;
1079 return (struct table_elt *) oballoc (sizeof (struct table_elt));
1080 }
1081
1082 /* Remove table element ELT from use in the table.
1083 HASH is its hash code, made using the HASH macro.
1084 It's an argument because often that is known in advance
1085 and we save much time not recomputing it. */
1086
1087 static void
1088 remove_from_table (elt, hash)
1089 register struct table_elt *elt;
1090 unsigned hash;
1091 {
1092 if (elt == 0)
1093 return;
1094
1095 /* Mark this element as removed. See cse_insn. */
1096 elt->first_same_value = 0;
1097
1098 /* Remove the table element from its equivalence class. */
1099
1100 {
1101 register struct table_elt *prev = elt->prev_same_value;
1102 register struct table_elt *next = elt->next_same_value;
1103
1104 if (next) next->prev_same_value = prev;
1105
1106 if (prev)
1107 prev->next_same_value = next;
1108 else
1109 {
1110 register struct table_elt *newfirst = next;
1111 while (next)
1112 {
1113 next->first_same_value = newfirst;
1114 next = next->next_same_value;
1115 }
1116 }
1117 }
1118
1119 /* Remove the table element from its hash bucket. */
1120
1121 {
1122 register struct table_elt *prev = elt->prev_same_hash;
1123 register struct table_elt *next = elt->next_same_hash;
1124
1125 if (next) next->prev_same_hash = prev;
1126
1127 if (prev)
1128 prev->next_same_hash = next;
1129 else if (table[hash] == elt)
1130 table[hash] = next;
1131 else
1132 {
1133 /* This entry is not in the proper hash bucket. This can happen
1134 when two classes were merged by `merge_equiv_classes'. Search
1135 for the hash bucket that it heads. This happens only very
1136 rarely, so the cost is acceptable. */
1137 for (hash = 0; hash < NBUCKETS; hash++)
1138 if (table[hash] == elt)
1139 table[hash] = next;
1140 }
1141 }
1142
1143 /* Remove the table element from its related-value circular chain. */
1144
1145 if (elt->related_value != 0 && elt->related_value != elt)
1146 {
1147 register struct table_elt *p = elt->related_value;
1148 while (p->related_value != elt)
1149 p = p->related_value;
1150 p->related_value = elt->related_value;
1151 if (p->related_value == p)
1152 p->related_value = 0;
1153 }
1154
1155 free_element (elt);
1156 }
1157
1158 /* Look up X in the hash table and return its table element,
1159 or 0 if X is not in the table.
1160
1161 MODE is the machine-mode of X, or if X is an integer constant
1162 with VOIDmode then MODE is the mode with which X will be used.
1163
1164 Here we are satisfied to find an expression whose tree structure
1165 looks like X. */
1166
1167 static struct table_elt *
1168 lookup (x, hash, mode)
1169 rtx x;
1170 unsigned hash;
1171 enum machine_mode mode;
1172 {
1173 register struct table_elt *p;
1174
1175 for (p = table[hash]; p; p = p->next_same_hash)
1176 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1177 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1178 return p;
1179
1180 return 0;
1181 }
1182
1183 /* Like `lookup' but don't care whether the table element uses invalid regs.
1184 Also ignore discrepancies in the machine mode of a register. */
1185
1186 static struct table_elt *
1187 lookup_for_remove (x, hash, mode)
1188 rtx x;
1189 unsigned hash;
1190 enum machine_mode mode;
1191 {
1192 register struct table_elt *p;
1193
1194 if (GET_CODE (x) == REG)
1195 {
1196 int regno = REGNO (x);
1197 /* Don't check the machine mode when comparing registers;
1198 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1199 for (p = table[hash]; p; p = p->next_same_hash)
1200 if (GET_CODE (p->exp) == REG
1201 && REGNO (p->exp) == regno)
1202 return p;
1203 }
1204 else
1205 {
1206 for (p = table[hash]; p; p = p->next_same_hash)
1207 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1208 return p;
1209 }
1210
1211 return 0;
1212 }
1213
1214 /* Look for an expression equivalent to X and with code CODE.
1215 If one is found, return that expression. */
1216
1217 static rtx
1218 lookup_as_function (x, code)
1219 rtx x;
1220 enum rtx_code code;
1221 {
1222 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1223 GET_MODE (x));
1224 if (p == 0)
1225 return 0;
1226
1227 for (p = p->first_same_value; p; p = p->next_same_value)
1228 {
1229 if (GET_CODE (p->exp) == code
1230 /* Make sure this is a valid entry in the table. */
1231 && exp_equiv_p (p->exp, p->exp, 1, 0))
1232 return p->exp;
1233 }
1234
1235 return 0;
1236 }
1237
1238 /* Insert X in the hash table, assuming HASH is its hash code
1239 and CLASSP is an element of the class it should go in
1240 (or 0 if a new class should be made).
1241 It is inserted at the proper position to keep the class in
1242 the order cheapest first.
1243
1244 MODE is the machine-mode of X, or if X is an integer constant
1245 with VOIDmode then MODE is the mode with which X will be used.
1246
1247 For elements of equal cheapness, the most recent one
1248 goes in front, except that the first element in the list
1249 remains first unless a cheaper element is added. The order of
1250 pseudo-registers does not matter, as canon_reg will be called to
1251 find the cheapest when a register is retrieved from the table.
1252
1253 The in_memory field in the hash table element is set to 0.
1254 The caller must set it nonzero if appropriate.
1255
1256 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1257 and if insert_regs returns a nonzero value
1258 you must then recompute its hash code before calling here.
1259
1260 If necessary, update table showing constant values of quantities. */
1261
1262 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1263
1264 static struct table_elt *
1265 insert (x, classp, hash, mode)
1266 register rtx x;
1267 register struct table_elt *classp;
1268 unsigned hash;
1269 enum machine_mode mode;
1270 {
1271 register struct table_elt *elt;
1272
1273 /* If X is a register and we haven't made a quantity for it,
1274 something is wrong. */
1275 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1276 abort ();
1277
1278 /* If X is a hard register, show it is being put in the table. */
1279 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1280 {
1281 int regno = REGNO (x);
1282 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1283 int i;
1284
1285 for (i = regno; i < endregno; i++)
1286 SET_HARD_REG_BIT (hard_regs_in_table, i);
1287 }
1288
1289
1290 /* Put an element for X into the right hash bucket. */
1291
1292 elt = get_element ();
1293 elt->exp = x;
1294 elt->cost = COST (x);
1295 elt->next_same_value = 0;
1296 elt->prev_same_value = 0;
1297 elt->next_same_hash = table[hash];
1298 elt->prev_same_hash = 0;
1299 elt->related_value = 0;
1300 elt->in_memory = 0;
1301 elt->mode = mode;
1302 elt->is_const = (CONSTANT_P (x)
1303 /* GNU C++ takes advantage of this for `this'
1304 (and other const values). */
1305 || (RTX_UNCHANGING_P (x)
1306 && GET_CODE (x) == REG
1307 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1308 || FIXED_BASE_PLUS_P (x));
1309
1310 if (table[hash])
1311 table[hash]->prev_same_hash = elt;
1312 table[hash] = elt;
1313
1314 /* Put it into the proper value-class. */
1315 if (classp)
1316 {
1317 classp = classp->first_same_value;
1318 if (CHEAPER (elt, classp))
1319 /* Insert at the head of the class */
1320 {
1321 register struct table_elt *p;
1322 elt->next_same_value = classp;
1323 classp->prev_same_value = elt;
1324 elt->first_same_value = elt;
1325
1326 for (p = classp; p; p = p->next_same_value)
1327 p->first_same_value = elt;
1328 }
1329 else
1330 {
1331 /* Insert not at head of the class. */
1332 /* Put it after the last element cheaper than X. */
1333 register struct table_elt *p, *next;
1334 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1335 p = next);
1336 /* Put it after P and before NEXT. */
1337 elt->next_same_value = next;
1338 if (next)
1339 next->prev_same_value = elt;
1340 elt->prev_same_value = p;
1341 p->next_same_value = elt;
1342 elt->first_same_value = classp;
1343 }
1344 }
1345 else
1346 elt->first_same_value = elt;
1347
1348 /* If this is a constant being set equivalent to a register or a register
1349 being set equivalent to a constant, note the constant equivalence.
1350
1351 If this is a constant, it cannot be equivalent to a different constant,
1352 and a constant is the only thing that can be cheaper than a register. So
1353 we know the register is the head of the class (before the constant was
1354 inserted).
1355
1356 If this is a register that is not already known equivalent to a
1357 constant, we must check the entire class.
1358
1359 If this is a register that is already known equivalent to an insn,
1360 update `qty_const_insn' to show that `this_insn' is the latest
1361 insn making that quantity equivalent to the constant. */
1362
1363 if (elt->is_const && classp && GET_CODE (classp->exp) == REG)
1364 {
1365 qty_const[reg_qty[REGNO (classp->exp)]]
1366 = gen_lowpart_if_possible (qty_mode[reg_qty[REGNO (classp->exp)]], x);
1367 qty_const_insn[reg_qty[REGNO (classp->exp)]] = this_insn;
1368 }
1369
1370 else if (GET_CODE (x) == REG && classp && ! qty_const[reg_qty[REGNO (x)]])
1371 {
1372 register struct table_elt *p;
1373
1374 for (p = classp; p != 0; p = p->next_same_value)
1375 {
1376 if (p->is_const)
1377 {
1378 qty_const[reg_qty[REGNO (x)]]
1379 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1380 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1381 break;
1382 }
1383 }
1384 }
1385
1386 else if (GET_CODE (x) == REG && qty_const[reg_qty[REGNO (x)]]
1387 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]])
1388 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1389
1390 /* If this is a constant with symbolic value,
1391 and it has a term with an explicit integer value,
1392 link it up with related expressions. */
1393 if (GET_CODE (x) == CONST)
1394 {
1395 rtx subexp = get_related_value (x);
1396 unsigned subhash;
1397 struct table_elt *subelt, *subelt_prev;
1398
1399 if (subexp != 0)
1400 {
1401 /* Get the integer-free subexpression in the hash table. */
1402 subhash = safe_hash (subexp, mode) % NBUCKETS;
1403 subelt = lookup (subexp, subhash, mode);
1404 if (subelt == 0)
1405 subelt = insert (subexp, NULL_PTR, subhash, mode);
1406 /* Initialize SUBELT's circular chain if it has none. */
1407 if (subelt->related_value == 0)
1408 subelt->related_value = subelt;
1409 /* Find the element in the circular chain that precedes SUBELT. */
1410 subelt_prev = subelt;
1411 while (subelt_prev->related_value != subelt)
1412 subelt_prev = subelt_prev->related_value;
1413 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1414 This way the element that follows SUBELT is the oldest one. */
1415 elt->related_value = subelt_prev->related_value;
1416 subelt_prev->related_value = elt;
1417 }
1418 }
1419
1420 return elt;
1421 }
1422 \f
1423 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1424 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1425 the two classes equivalent.
1426
1427 CLASS1 will be the surviving class; CLASS2 should not be used after this
1428 call.
1429
1430 Any invalid entries in CLASS2 will not be copied. */
1431
1432 static void
1433 merge_equiv_classes (class1, class2)
1434 struct table_elt *class1, *class2;
1435 {
1436 struct table_elt *elt, *next, *new;
1437
1438 /* Ensure we start with the head of the classes. */
1439 class1 = class1->first_same_value;
1440 class2 = class2->first_same_value;
1441
1442 /* If they were already equal, forget it. */
1443 if (class1 == class2)
1444 return;
1445
1446 for (elt = class2; elt; elt = next)
1447 {
1448 unsigned hash;
1449 rtx exp = elt->exp;
1450 enum machine_mode mode = elt->mode;
1451
1452 next = elt->next_same_value;
1453
1454 /* Remove old entry, make a new one in CLASS1's class.
1455 Don't do this for invalid entries as we cannot find their
1456 hash code (it also isn't necessary). */
1457 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1458 {
1459 hash_arg_in_memory = 0;
1460 hash_arg_in_struct = 0;
1461 hash = HASH (exp, mode);
1462
1463 if (GET_CODE (exp) == REG)
1464 delete_reg_equiv (REGNO (exp));
1465
1466 remove_from_table (elt, hash);
1467
1468 if (insert_regs (exp, class1, 0))
1469 hash = HASH (exp, mode);
1470 new = insert (exp, class1, hash, mode);
1471 new->in_memory = hash_arg_in_memory;
1472 new->in_struct = hash_arg_in_struct;
1473 }
1474 }
1475 }
1476 \f
1477 /* Remove from the hash table, or mark as invalid,
1478 all expressions whose values could be altered by storing in X.
1479 X is a register, a subreg, or a memory reference with nonvarying address
1480 (because, when a memory reference with a varying address is stored in,
1481 all memory references are removed by invalidate_memory
1482 so specific invalidation is superfluous).
1483
1484 A nonvarying address may be just a register or just
1485 a symbol reference, or it may be either of those plus
1486 a numeric offset. */
1487
1488 static void
1489 invalidate (x)
1490 rtx x;
1491 {
1492 register int i;
1493 register struct table_elt *p;
1494 rtx base;
1495 HOST_WIDE_INT start, end;
1496
1497 /* If X is a register, dependencies on its contents
1498 are recorded through the qty number mechanism.
1499 Just change the qty number of the register,
1500 mark it as invalid for expressions that refer to it,
1501 and remove it itself. */
1502
1503 if (GET_CODE (x) == REG)
1504 {
1505 register int regno = REGNO (x);
1506 register unsigned hash = HASH (x, GET_MODE (x));
1507
1508 /* Remove REGNO from any quantity list it might be on and indicate
1509 that it's value might have changed. If it is a pseudo, remove its
1510 entry from the hash table.
1511
1512 For a hard register, we do the first two actions above for any
1513 additional hard registers corresponding to X. Then, if any of these
1514 registers are in the table, we must remove any REG entries that
1515 overlap these registers. */
1516
1517 delete_reg_equiv (regno);
1518 reg_tick[regno]++;
1519
1520 if (regno >= FIRST_PSEUDO_REGISTER)
1521 remove_from_table (lookup_for_remove (x, hash, GET_MODE (x)), hash);
1522 else
1523 {
1524 HOST_WIDE_INT in_table
1525 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1526 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1527 int tregno, tendregno;
1528 register struct table_elt *p, *next;
1529
1530 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1531
1532 for (i = regno + 1; i < endregno; i++)
1533 {
1534 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1535 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1536 delete_reg_equiv (i);
1537 reg_tick[i]++;
1538 }
1539
1540 if (in_table)
1541 for (hash = 0; hash < NBUCKETS; hash++)
1542 for (p = table[hash]; p; p = next)
1543 {
1544 next = p->next_same_hash;
1545
1546 if (GET_CODE (p->exp) != REG
1547 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1548 continue;
1549
1550 tregno = REGNO (p->exp);
1551 tendregno
1552 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1553 if (tendregno > regno && tregno < endregno)
1554 remove_from_table (p, hash);
1555 }
1556 }
1557
1558 return;
1559 }
1560
1561 if (GET_CODE (x) == SUBREG)
1562 {
1563 if (GET_CODE (SUBREG_REG (x)) != REG)
1564 abort ();
1565 invalidate (SUBREG_REG (x));
1566 return;
1567 }
1568
1569 /* X is not a register; it must be a memory reference with
1570 a nonvarying address. Remove all hash table elements
1571 that refer to overlapping pieces of memory. */
1572
1573 if (GET_CODE (x) != MEM)
1574 abort ();
1575
1576 set_nonvarying_address_components (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x)),
1577 &base, &start, &end);
1578
1579 for (i = 0; i < NBUCKETS; i++)
1580 {
1581 register struct table_elt *next;
1582 for (p = table[i]; p; p = next)
1583 {
1584 next = p->next_same_hash;
1585 if (refers_to_mem_p (p->exp, base, start, end))
1586 remove_from_table (p, i);
1587 }
1588 }
1589 }
1590
1591 /* Remove all expressions that refer to register REGNO,
1592 since they are already invalid, and we are about to
1593 mark that register valid again and don't want the old
1594 expressions to reappear as valid. */
1595
1596 static void
1597 remove_invalid_refs (regno)
1598 int regno;
1599 {
1600 register int i;
1601 register struct table_elt *p, *next;
1602
1603 for (i = 0; i < NBUCKETS; i++)
1604 for (p = table[i]; p; p = next)
1605 {
1606 next = p->next_same_hash;
1607 if (GET_CODE (p->exp) != REG
1608 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1609 remove_from_table (p, i);
1610 }
1611 }
1612 \f
1613 /* Recompute the hash codes of any valid entries in the hash table that
1614 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1615
1616 This is called when we make a jump equivalence. */
1617
1618 static void
1619 rehash_using_reg (x)
1620 rtx x;
1621 {
1622 int i;
1623 struct table_elt *p, *next;
1624 unsigned hash;
1625
1626 if (GET_CODE (x) == SUBREG)
1627 x = SUBREG_REG (x);
1628
1629 /* If X is not a register or if the register is known not to be in any
1630 valid entries in the table, we have no work to do. */
1631
1632 if (GET_CODE (x) != REG
1633 || reg_in_table[REGNO (x)] < 0
1634 || reg_in_table[REGNO (x)] != reg_tick[REGNO (x)])
1635 return;
1636
1637 /* Scan all hash chains looking for valid entries that mention X.
1638 If we find one and it is in the wrong hash chain, move it. We can skip
1639 objects that are registers, since they are handled specially. */
1640
1641 for (i = 0; i < NBUCKETS; i++)
1642 for (p = table[i]; p; p = next)
1643 {
1644 next = p->next_same_hash;
1645 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1646 && exp_equiv_p (p->exp, p->exp, 1, 0)
1647 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1648 {
1649 if (p->next_same_hash)
1650 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1651
1652 if (p->prev_same_hash)
1653 p->prev_same_hash->next_same_hash = p->next_same_hash;
1654 else
1655 table[i] = p->next_same_hash;
1656
1657 p->next_same_hash = table[hash];
1658 p->prev_same_hash = 0;
1659 if (table[hash])
1660 table[hash]->prev_same_hash = p;
1661 table[hash] = p;
1662 }
1663 }
1664 }
1665 \f
1666 /* Remove from the hash table all expressions that reference memory,
1667 or some of them as specified by *WRITES. */
1668
1669 static void
1670 invalidate_memory (writes)
1671 struct write_data *writes;
1672 {
1673 register int i;
1674 register struct table_elt *p, *next;
1675 int all = writes->all;
1676 int nonscalar = writes->nonscalar;
1677
1678 for (i = 0; i < NBUCKETS; i++)
1679 for (p = table[i]; p; p = next)
1680 {
1681 next = p->next_same_hash;
1682 if (p->in_memory
1683 && (all
1684 || (nonscalar && p->in_struct)
1685 || cse_rtx_addr_varies_p (p->exp)))
1686 remove_from_table (p, i);
1687 }
1688 }
1689 \f
1690 /* Remove from the hash table any expression that is a call-clobbered
1691 register. Also update their TICK values. */
1692
1693 static void
1694 invalidate_for_call ()
1695 {
1696 int regno, endregno;
1697 int i;
1698 unsigned hash;
1699 struct table_elt *p, *next;
1700 int in_table = 0;
1701
1702 /* Go through all the hard registers. For each that is clobbered in
1703 a CALL_INSN, remove the register from quantity chains and update
1704 reg_tick if defined. Also see if any of these registers is currently
1705 in the table. */
1706
1707 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1708 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1709 {
1710 delete_reg_equiv (regno);
1711 if (reg_tick[regno] >= 0)
1712 reg_tick[regno]++;
1713
1714 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1715 }
1716
1717 /* In the case where we have no call-clobbered hard registers in the
1718 table, we are done. Otherwise, scan the table and remove any
1719 entry that overlaps a call-clobbered register. */
1720
1721 if (in_table)
1722 for (hash = 0; hash < NBUCKETS; hash++)
1723 for (p = table[hash]; p; p = next)
1724 {
1725 next = p->next_same_hash;
1726
1727 if (GET_CODE (p->exp) != REG
1728 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1729 continue;
1730
1731 regno = REGNO (p->exp);
1732 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1733
1734 for (i = regno; i < endregno; i++)
1735 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1736 {
1737 remove_from_table (p, hash);
1738 break;
1739 }
1740 }
1741 }
1742 \f
1743 /* Given an expression X of type CONST,
1744 and ELT which is its table entry (or 0 if it
1745 is not in the hash table),
1746 return an alternate expression for X as a register plus integer.
1747 If none can be found, return 0. */
1748
1749 static rtx
1750 use_related_value (x, elt)
1751 rtx x;
1752 struct table_elt *elt;
1753 {
1754 register struct table_elt *relt = 0;
1755 register struct table_elt *p, *q;
1756 HOST_WIDE_INT offset;
1757
1758 /* First, is there anything related known?
1759 If we have a table element, we can tell from that.
1760 Otherwise, must look it up. */
1761
1762 if (elt != 0 && elt->related_value != 0)
1763 relt = elt;
1764 else if (elt == 0 && GET_CODE (x) == CONST)
1765 {
1766 rtx subexp = get_related_value (x);
1767 if (subexp != 0)
1768 relt = lookup (subexp,
1769 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
1770 GET_MODE (subexp));
1771 }
1772
1773 if (relt == 0)
1774 return 0;
1775
1776 /* Search all related table entries for one that has an
1777 equivalent register. */
1778
1779 p = relt;
1780 while (1)
1781 {
1782 /* This loop is strange in that it is executed in two different cases.
1783 The first is when X is already in the table. Then it is searching
1784 the RELATED_VALUE list of X's class (RELT). The second case is when
1785 X is not in the table. Then RELT points to a class for the related
1786 value.
1787
1788 Ensure that, whatever case we are in, that we ignore classes that have
1789 the same value as X. */
1790
1791 if (rtx_equal_p (x, p->exp))
1792 q = 0;
1793 else
1794 for (q = p->first_same_value; q; q = q->next_same_value)
1795 if (GET_CODE (q->exp) == REG)
1796 break;
1797
1798 if (q)
1799 break;
1800
1801 p = p->related_value;
1802
1803 /* We went all the way around, so there is nothing to be found.
1804 Alternatively, perhaps RELT was in the table for some other reason
1805 and it has no related values recorded. */
1806 if (p == relt || p == 0)
1807 break;
1808 }
1809
1810 if (q == 0)
1811 return 0;
1812
1813 offset = (get_integer_term (x) - get_integer_term (p->exp));
1814 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
1815 return plus_constant (q->exp, offset);
1816 }
1817 \f
1818 /* Hash an rtx. We are careful to make sure the value is never negative.
1819 Equivalent registers hash identically.
1820 MODE is used in hashing for CONST_INTs only;
1821 otherwise the mode of X is used.
1822
1823 Store 1 in do_not_record if any subexpression is volatile.
1824
1825 Store 1 in hash_arg_in_memory if X contains a MEM rtx
1826 which does not have the RTX_UNCHANGING_P bit set.
1827 In this case, also store 1 in hash_arg_in_struct
1828 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
1829
1830 Note that cse_insn knows that the hash code of a MEM expression
1831 is just (int) MEM plus the hash code of the address. */
1832
1833 static unsigned
1834 canon_hash (x, mode)
1835 rtx x;
1836 enum machine_mode mode;
1837 {
1838 register int i, j;
1839 register unsigned hash = 0;
1840 register enum rtx_code code;
1841 register char *fmt;
1842
1843 /* repeat is used to turn tail-recursion into iteration. */
1844 repeat:
1845 if (x == 0)
1846 return hash;
1847
1848 code = GET_CODE (x);
1849 switch (code)
1850 {
1851 case REG:
1852 {
1853 register int regno = REGNO (x);
1854
1855 /* On some machines, we can't record any non-fixed hard register,
1856 because extending its life will cause reload problems. We
1857 consider ap, fp, and sp to be fixed for this purpose.
1858 On all machines, we can't record any global registers. */
1859
1860 if (regno < FIRST_PSEUDO_REGISTER
1861 && (global_regs[regno]
1862 #ifdef SMALL_REGISTER_CLASSES
1863 || (! fixed_regs[regno]
1864 && regno != FRAME_POINTER_REGNUM
1865 && regno != HARD_FRAME_POINTER_REGNUM
1866 && regno != ARG_POINTER_REGNUM
1867 && regno != STACK_POINTER_REGNUM)
1868 #endif
1869 ))
1870 {
1871 do_not_record = 1;
1872 return 0;
1873 }
1874 hash += ((unsigned) REG << 7) + (unsigned) reg_qty[regno];
1875 return hash;
1876 }
1877
1878 case CONST_INT:
1879 {
1880 unsigned HOST_WIDE_INT tem = INTVAL (x);
1881 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
1882 return hash;
1883 }
1884
1885 case CONST_DOUBLE:
1886 /* This is like the general case, except that it only counts
1887 the integers representing the constant. */
1888 hash += (unsigned) code + (unsigned) GET_MODE (x);
1889 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1890 {
1891 unsigned tem = XINT (x, i);
1892 hash += tem;
1893 }
1894 return hash;
1895
1896 /* Assume there is only one rtx object for any given label. */
1897 case LABEL_REF:
1898 hash
1899 += ((unsigned) LABEL_REF << 7) + (unsigned HOST_WIDE_INT) XEXP (x, 0);
1900 return hash;
1901
1902 case SYMBOL_REF:
1903 hash
1904 += ((unsigned) SYMBOL_REF << 7) + (unsigned HOST_WIDE_INT) XEXP (x, 0);
1905 return hash;
1906
1907 case MEM:
1908 if (MEM_VOLATILE_P (x))
1909 {
1910 do_not_record = 1;
1911 return 0;
1912 }
1913 if (! RTX_UNCHANGING_P (x))
1914 {
1915 hash_arg_in_memory = 1;
1916 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
1917 }
1918 /* Now that we have already found this special case,
1919 might as well speed it up as much as possible. */
1920 hash += (unsigned) MEM;
1921 x = XEXP (x, 0);
1922 goto repeat;
1923
1924 case PRE_DEC:
1925 case PRE_INC:
1926 case POST_DEC:
1927 case POST_INC:
1928 case PC:
1929 case CC0:
1930 case CALL:
1931 case UNSPEC_VOLATILE:
1932 do_not_record = 1;
1933 return 0;
1934
1935 case ASM_OPERANDS:
1936 if (MEM_VOLATILE_P (x))
1937 {
1938 do_not_record = 1;
1939 return 0;
1940 }
1941 }
1942
1943 i = GET_RTX_LENGTH (code) - 1;
1944 hash += (unsigned) code + (unsigned) GET_MODE (x);
1945 fmt = GET_RTX_FORMAT (code);
1946 for (; i >= 0; i--)
1947 {
1948 if (fmt[i] == 'e')
1949 {
1950 rtx tem = XEXP (x, i);
1951 rtx tem1;
1952
1953 /* If the operand is a REG that is equivalent to a constant, hash
1954 as if we were hashing the constant, since we will be comparing
1955 that way. */
1956 if (tem != 0 && GET_CODE (tem) == REG
1957 && REGNO_QTY_VALID_P (REGNO (tem))
1958 && qty_mode[reg_qty[REGNO (tem)]] == GET_MODE (tem)
1959 && (tem1 = qty_const[reg_qty[REGNO (tem)]]) != 0
1960 && CONSTANT_P (tem1))
1961 tem = tem1;
1962
1963 /* If we are about to do the last recursive call
1964 needed at this level, change it into iteration.
1965 This function is called enough to be worth it. */
1966 if (i == 0)
1967 {
1968 x = tem;
1969 goto repeat;
1970 }
1971 hash += canon_hash (tem, 0);
1972 }
1973 else if (fmt[i] == 'E')
1974 for (j = 0; j < XVECLEN (x, i); j++)
1975 hash += canon_hash (XVECEXP (x, i, j), 0);
1976 else if (fmt[i] == 's')
1977 {
1978 register unsigned char *p = (unsigned char *) XSTR (x, i);
1979 if (p)
1980 while (*p)
1981 hash += *p++;
1982 }
1983 else if (fmt[i] == 'i')
1984 {
1985 register unsigned tem = XINT (x, i);
1986 hash += tem;
1987 }
1988 else
1989 abort ();
1990 }
1991 return hash;
1992 }
1993
1994 /* Like canon_hash but with no side effects. */
1995
1996 static unsigned
1997 safe_hash (x, mode)
1998 rtx x;
1999 enum machine_mode mode;
2000 {
2001 int save_do_not_record = do_not_record;
2002 int save_hash_arg_in_memory = hash_arg_in_memory;
2003 int save_hash_arg_in_struct = hash_arg_in_struct;
2004 unsigned hash = canon_hash (x, mode);
2005 hash_arg_in_memory = save_hash_arg_in_memory;
2006 hash_arg_in_struct = save_hash_arg_in_struct;
2007 do_not_record = save_do_not_record;
2008 return hash;
2009 }
2010 \f
2011 /* Return 1 iff X and Y would canonicalize into the same thing,
2012 without actually constructing the canonicalization of either one.
2013 If VALIDATE is nonzero,
2014 we assume X is an expression being processed from the rtl
2015 and Y was found in the hash table. We check register refs
2016 in Y for being marked as valid.
2017
2018 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2019 that is known to be in the register. Ordinarily, we don't allow them
2020 to match, because letting them match would cause unpredictable results
2021 in all the places that search a hash table chain for an equivalent
2022 for a given value. A possible equivalent that has different structure
2023 has its hash code computed from different data. Whether the hash code
2024 is the same as that of the the given value is pure luck. */
2025
2026 static int
2027 exp_equiv_p (x, y, validate, equal_values)
2028 rtx x, y;
2029 int validate;
2030 int equal_values;
2031 {
2032 register int i, j;
2033 register enum rtx_code code;
2034 register char *fmt;
2035
2036 /* Note: it is incorrect to assume an expression is equivalent to itself
2037 if VALIDATE is nonzero. */
2038 if (x == y && !validate)
2039 return 1;
2040 if (x == 0 || y == 0)
2041 return x == y;
2042
2043 code = GET_CODE (x);
2044 if (code != GET_CODE (y))
2045 {
2046 if (!equal_values)
2047 return 0;
2048
2049 /* If X is a constant and Y is a register or vice versa, they may be
2050 equivalent. We only have to validate if Y is a register. */
2051 if (CONSTANT_P (x) && GET_CODE (y) == REG
2052 && REGNO_QTY_VALID_P (REGNO (y))
2053 && GET_MODE (y) == qty_mode[reg_qty[REGNO (y)]]
2054 && rtx_equal_p (x, qty_const[reg_qty[REGNO (y)]])
2055 && (! validate || reg_in_table[REGNO (y)] == reg_tick[REGNO (y)]))
2056 return 1;
2057
2058 if (CONSTANT_P (y) && code == REG
2059 && REGNO_QTY_VALID_P (REGNO (x))
2060 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2061 && rtx_equal_p (y, qty_const[reg_qty[REGNO (x)]]))
2062 return 1;
2063
2064 return 0;
2065 }
2066
2067 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2068 if (GET_MODE (x) != GET_MODE (y))
2069 return 0;
2070
2071 switch (code)
2072 {
2073 case PC:
2074 case CC0:
2075 return x == y;
2076
2077 case CONST_INT:
2078 return INTVAL (x) == INTVAL (y);
2079
2080 case LABEL_REF:
2081 case SYMBOL_REF:
2082 return XEXP (x, 0) == XEXP (y, 0);
2083
2084 case REG:
2085 {
2086 int regno = REGNO (y);
2087 int endregno
2088 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2089 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2090 int i;
2091
2092 /* If the quantities are not the same, the expressions are not
2093 equivalent. If there are and we are not to validate, they
2094 are equivalent. Otherwise, ensure all regs are up-to-date. */
2095
2096 if (reg_qty[REGNO (x)] != reg_qty[regno])
2097 return 0;
2098
2099 if (! validate)
2100 return 1;
2101
2102 for (i = regno; i < endregno; i++)
2103 if (reg_in_table[i] != reg_tick[i])
2104 return 0;
2105
2106 return 1;
2107 }
2108
2109 /* For commutative operations, check both orders. */
2110 case PLUS:
2111 case MULT:
2112 case AND:
2113 case IOR:
2114 case XOR:
2115 case NE:
2116 case EQ:
2117 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2118 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2119 validate, equal_values))
2120 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2121 validate, equal_values)
2122 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2123 validate, equal_values)));
2124 }
2125
2126 /* Compare the elements. If any pair of corresponding elements
2127 fail to match, return 0 for the whole things. */
2128
2129 fmt = GET_RTX_FORMAT (code);
2130 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2131 {
2132 switch (fmt[i])
2133 {
2134 case 'e':
2135 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2136 return 0;
2137 break;
2138
2139 case 'E':
2140 if (XVECLEN (x, i) != XVECLEN (y, i))
2141 return 0;
2142 for (j = 0; j < XVECLEN (x, i); j++)
2143 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2144 validate, equal_values))
2145 return 0;
2146 break;
2147
2148 case 's':
2149 if (strcmp (XSTR (x, i), XSTR (y, i)))
2150 return 0;
2151 break;
2152
2153 case 'i':
2154 if (XINT (x, i) != XINT (y, i))
2155 return 0;
2156 break;
2157
2158 case 'w':
2159 if (XWINT (x, i) != XWINT (y, i))
2160 return 0;
2161 break;
2162
2163 case '0':
2164 break;
2165
2166 default:
2167 abort ();
2168 }
2169 }
2170
2171 return 1;
2172 }
2173 \f
2174 /* Return 1 iff any subexpression of X matches Y.
2175 Here we do not require that X or Y be valid (for registers referred to)
2176 for being in the hash table. */
2177
2178 static int
2179 refers_to_p (x, y)
2180 rtx x, y;
2181 {
2182 register int i;
2183 register enum rtx_code code;
2184 register char *fmt;
2185
2186 repeat:
2187 if (x == y)
2188 return 1;
2189 if (x == 0 || y == 0)
2190 return 0;
2191
2192 code = GET_CODE (x);
2193 /* If X as a whole has the same code as Y, they may match.
2194 If so, return 1. */
2195 if (code == GET_CODE (y))
2196 {
2197 if (exp_equiv_p (x, y, 0, 1))
2198 return 1;
2199 }
2200
2201 /* X does not match, so try its subexpressions. */
2202
2203 fmt = GET_RTX_FORMAT (code);
2204 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2205 if (fmt[i] == 'e')
2206 {
2207 if (i == 0)
2208 {
2209 x = XEXP (x, 0);
2210 goto repeat;
2211 }
2212 else
2213 if (refers_to_p (XEXP (x, i), y))
2214 return 1;
2215 }
2216 else if (fmt[i] == 'E')
2217 {
2218 int j;
2219 for (j = 0; j < XVECLEN (x, i); j++)
2220 if (refers_to_p (XVECEXP (x, i, j), y))
2221 return 1;
2222 }
2223
2224 return 0;
2225 }
2226 \f
2227 /* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2228 set PBASE, PSTART, and PEND which correspond to the base of the address,
2229 the starting offset, and ending offset respectively.
2230
2231 ADDR is known to be a nonvarying address.
2232
2233 cse_address_varies_p returns zero for nonvarying addresses. */
2234
2235 static void
2236 set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2237 rtx addr;
2238 int size;
2239 rtx *pbase;
2240 HOST_WIDE_INT *pstart, *pend;
2241 {
2242 rtx base;
2243 int start, end;
2244
2245 base = addr;
2246 start = 0;
2247 end = 0;
2248
2249 /* Registers with nonvarying addresses usually have constant equivalents;
2250 but the frame pointer register is also possible. */
2251 if (GET_CODE (base) == REG
2252 && qty_const != 0
2253 && REGNO_QTY_VALID_P (REGNO (base))
2254 && qty_mode[reg_qty[REGNO (base)]] == GET_MODE (base)
2255 && qty_const[reg_qty[REGNO (base)]] != 0)
2256 base = qty_const[reg_qty[REGNO (base)]];
2257 else if (GET_CODE (base) == PLUS
2258 && GET_CODE (XEXP (base, 1)) == CONST_INT
2259 && GET_CODE (XEXP (base, 0)) == REG
2260 && qty_const != 0
2261 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2262 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2263 == GET_MODE (XEXP (base, 0)))
2264 && qty_const[reg_qty[REGNO (XEXP (base, 0))]])
2265 {
2266 start = INTVAL (XEXP (base, 1));
2267 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2268 }
2269
2270 /* By definition, operand1 of a LO_SUM is the associated constant
2271 address. Use the associated constant address as the base instead. */
2272 if (GET_CODE (base) == LO_SUM)
2273 base = XEXP (base, 1);
2274
2275 /* Strip off CONST. */
2276 if (GET_CODE (base) == CONST)
2277 base = XEXP (base, 0);
2278
2279 if (GET_CODE (base) == PLUS
2280 && GET_CODE (XEXP (base, 1)) == CONST_INT)
2281 {
2282 start += INTVAL (XEXP (base, 1));
2283 base = XEXP (base, 0);
2284 }
2285
2286 end = start + size;
2287
2288 /* Set the return values. */
2289 *pbase = base;
2290 *pstart = start;
2291 *pend = end;
2292 }
2293
2294 /* Return 1 iff any subexpression of X refers to memory
2295 at an address of BASE plus some offset
2296 such that any of the bytes' offsets fall between START (inclusive)
2297 and END (exclusive).
2298
2299 The value is undefined if X is a varying address (as determined by
2300 cse_rtx_addr_varies_p). This function is not used in such cases.
2301
2302 When used in the cse pass, `qty_const' is nonzero, and it is used
2303 to treat an address that is a register with a known constant value
2304 as if it were that constant value.
2305 In the loop pass, `qty_const' is zero, so this is not done. */
2306
2307 static int
2308 refers_to_mem_p (x, base, start, end)
2309 rtx x, base;
2310 HOST_WIDE_INT start, end;
2311 {
2312 register HOST_WIDE_INT i;
2313 register enum rtx_code code;
2314 register char *fmt;
2315
2316 if (GET_CODE (base) == CONST_INT)
2317 {
2318 start += INTVAL (base);
2319 end += INTVAL (base);
2320 base = const0_rtx;
2321 }
2322
2323 repeat:
2324 if (x == 0)
2325 return 0;
2326
2327 code = GET_CODE (x);
2328 if (code == MEM)
2329 {
2330 register rtx addr = XEXP (x, 0); /* Get the address. */
2331 rtx mybase;
2332 HOST_WIDE_INT mystart, myend;
2333
2334 set_nonvarying_address_components (addr, GET_MODE_SIZE (GET_MODE (x)),
2335 &mybase, &mystart, &myend);
2336
2337
2338 /* refers_to_mem_p is never called with varying addresses.
2339 If the base addresses are not equal, there is no chance
2340 of the memory addresses conflicting. */
2341 if (! rtx_equal_p (mybase, base))
2342 return 0;
2343
2344 return myend > start && mystart < end;
2345 }
2346
2347 /* X does not match, so try its subexpressions. */
2348
2349 fmt = GET_RTX_FORMAT (code);
2350 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2351 if (fmt[i] == 'e')
2352 {
2353 if (i == 0)
2354 {
2355 x = XEXP (x, 0);
2356 goto repeat;
2357 }
2358 else
2359 if (refers_to_mem_p (XEXP (x, i), base, start, end))
2360 return 1;
2361 }
2362 else if (fmt[i] == 'E')
2363 {
2364 int j;
2365 for (j = 0; j < XVECLEN (x, i); j++)
2366 if (refers_to_mem_p (XVECEXP (x, i, j), base, start, end))
2367 return 1;
2368 }
2369
2370 return 0;
2371 }
2372
2373 /* Nonzero if X refers to memory at a varying address;
2374 except that a register which has at the moment a known constant value
2375 isn't considered variable. */
2376
2377 static int
2378 cse_rtx_addr_varies_p (x)
2379 rtx x;
2380 {
2381 /* We need not check for X and the equivalence class being of the same
2382 mode because if X is equivalent to a constant in some mode, it
2383 doesn't vary in any mode. */
2384
2385 if (GET_CODE (x) == MEM
2386 && GET_CODE (XEXP (x, 0)) == REG
2387 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2388 && GET_MODE (XEXP (x, 0)) == qty_mode[reg_qty[REGNO (XEXP (x, 0))]]
2389 && qty_const[reg_qty[REGNO (XEXP (x, 0))]] != 0)
2390 return 0;
2391
2392 if (GET_CODE (x) == MEM
2393 && GET_CODE (XEXP (x, 0)) == PLUS
2394 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2395 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2396 && REGNO_QTY_VALID_P (REGNO (XEXP (XEXP (x, 0), 0)))
2397 && (GET_MODE (XEXP (XEXP (x, 0), 0))
2398 == qty_mode[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2399 && qty_const[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2400 return 0;
2401
2402 return rtx_addr_varies_p (x);
2403 }
2404 \f
2405 /* Canonicalize an expression:
2406 replace each register reference inside it
2407 with the "oldest" equivalent register.
2408
2409 If INSN is non-zero and we are replacing a pseudo with a hard register
2410 or vice versa, validate_change is used to ensure that INSN remains valid
2411 after we make our substitution. The calls are made with IN_GROUP non-zero
2412 so apply_change_group must be called upon the outermost return from this
2413 function (unless INSN is zero). The result of apply_change_group can
2414 generally be discarded since the changes we are making are optional. */
2415
2416 static rtx
2417 canon_reg (x, insn)
2418 rtx x;
2419 rtx insn;
2420 {
2421 register int i;
2422 register enum rtx_code code;
2423 register char *fmt;
2424
2425 if (x == 0)
2426 return x;
2427
2428 code = GET_CODE (x);
2429 switch (code)
2430 {
2431 case PC:
2432 case CC0:
2433 case CONST:
2434 case CONST_INT:
2435 case CONST_DOUBLE:
2436 case SYMBOL_REF:
2437 case LABEL_REF:
2438 case ADDR_VEC:
2439 case ADDR_DIFF_VEC:
2440 return x;
2441
2442 case REG:
2443 {
2444 register int first;
2445
2446 /* Never replace a hard reg, because hard regs can appear
2447 in more than one machine mode, and we must preserve the mode
2448 of each occurrence. Also, some hard regs appear in
2449 MEMs that are shared and mustn't be altered. Don't try to
2450 replace any reg that maps to a reg of class NO_REGS. */
2451 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2452 || ! REGNO_QTY_VALID_P (REGNO (x)))
2453 return x;
2454
2455 first = qty_first_reg[reg_qty[REGNO (x)]];
2456 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2457 : REGNO_REG_CLASS (first) == NO_REGS ? x
2458 : gen_rtx (REG, qty_mode[reg_qty[REGNO (x)]], first));
2459 }
2460 }
2461
2462 fmt = GET_RTX_FORMAT (code);
2463 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2464 {
2465 register int j;
2466
2467 if (fmt[i] == 'e')
2468 {
2469 rtx new = canon_reg (XEXP (x, i), insn);
2470
2471 /* If replacing pseudo with hard reg or vice versa, ensure the
2472 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2473 if (insn != 0 && new != 0
2474 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2475 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2476 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2477 || insn_n_dups[recog_memoized (insn)] > 0))
2478 validate_change (insn, &XEXP (x, i), new, 1);
2479 else
2480 XEXP (x, i) = new;
2481 }
2482 else if (fmt[i] == 'E')
2483 for (j = 0; j < XVECLEN (x, i); j++)
2484 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2485 }
2486
2487 return x;
2488 }
2489 \f
2490 /* LOC is a location with INSN that is an operand address (the contents of
2491 a MEM). Find the best equivalent address to use that is valid for this
2492 insn.
2493
2494 On most CISC machines, complicated address modes are costly, and rtx_cost
2495 is a good approximation for that cost. However, most RISC machines have
2496 only a few (usually only one) memory reference formats. If an address is
2497 valid at all, it is often just as cheap as any other address. Hence, for
2498 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2499 costs of various addresses. For two addresses of equal cost, choose the one
2500 with the highest `rtx_cost' value as that has the potential of eliminating
2501 the most insns. For equal costs, we choose the first in the equivalence
2502 class. Note that we ignore the fact that pseudo registers are cheaper
2503 than hard registers here because we would also prefer the pseudo registers.
2504 */
2505
2506 static void
2507 find_best_addr (insn, loc)
2508 rtx insn;
2509 rtx *loc;
2510 {
2511 struct table_elt *elt, *p;
2512 rtx addr = *loc;
2513 int our_cost;
2514 int found_better = 1;
2515 int save_do_not_record = do_not_record;
2516 int save_hash_arg_in_memory = hash_arg_in_memory;
2517 int save_hash_arg_in_struct = hash_arg_in_struct;
2518 int addr_volatile;
2519 int regno;
2520 unsigned hash;
2521
2522 /* Do not try to replace constant addresses or addresses of local and
2523 argument slots. These MEM expressions are made only once and inserted
2524 in many instructions, as well as being used to control symbol table
2525 output. It is not safe to clobber them.
2526
2527 There are some uncommon cases where the address is already in a register
2528 for some reason, but we cannot take advantage of that because we have
2529 no easy way to unshare the MEM. In addition, looking up all stack
2530 addresses is costly. */
2531 if ((GET_CODE (addr) == PLUS
2532 && GET_CODE (XEXP (addr, 0)) == REG
2533 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2534 && (regno = REGNO (XEXP (addr, 0)),
2535 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2536 || regno == ARG_POINTER_REGNUM))
2537 || (GET_CODE (addr) == REG
2538 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2539 || regno == HARD_FRAME_POINTER_REGNUM
2540 || regno == ARG_POINTER_REGNUM))
2541 || CONSTANT_ADDRESS_P (addr))
2542 return;
2543
2544 /* If this address is not simply a register, try to fold it. This will
2545 sometimes simplify the expression. Many simplifications
2546 will not be valid, but some, usually applying the associative rule, will
2547 be valid and produce better code. */
2548 if (GET_CODE (addr) != REG
2549 && validate_change (insn, loc, fold_rtx (addr, insn), 0))
2550 addr = *loc;
2551
2552 /* If this address is not in the hash table, we can't look for equivalences
2553 of the whole address. Also, ignore if volatile. */
2554
2555 do_not_record = 0;
2556 hash = HASH (addr, Pmode);
2557 addr_volatile = do_not_record;
2558 do_not_record = save_do_not_record;
2559 hash_arg_in_memory = save_hash_arg_in_memory;
2560 hash_arg_in_struct = save_hash_arg_in_struct;
2561
2562 if (addr_volatile)
2563 return;
2564
2565 elt = lookup (addr, hash, Pmode);
2566
2567 #ifndef ADDRESS_COST
2568 if (elt)
2569 {
2570 our_cost = elt->cost;
2571
2572 /* Find the lowest cost below ours that works. */
2573 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2574 if (elt->cost < our_cost
2575 && (GET_CODE (elt->exp) == REG
2576 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2577 && validate_change (insn, loc,
2578 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2579 return;
2580 }
2581 #else
2582
2583 if (elt)
2584 {
2585 /* We need to find the best (under the criteria documented above) entry
2586 in the class that is valid. We use the `flag' field to indicate
2587 choices that were invalid and iterate until we can't find a better
2588 one that hasn't already been tried. */
2589
2590 for (p = elt->first_same_value; p; p = p->next_same_value)
2591 p->flag = 0;
2592
2593 while (found_better)
2594 {
2595 int best_addr_cost = ADDRESS_COST (*loc);
2596 int best_rtx_cost = (elt->cost + 1) >> 1;
2597 struct table_elt *best_elt = elt;
2598
2599 found_better = 0;
2600 for (p = elt->first_same_value; p; p = p->next_same_value)
2601 if (! p->flag
2602 && (GET_CODE (p->exp) == REG
2603 || exp_equiv_p (p->exp, p->exp, 1, 0))
2604 && (ADDRESS_COST (p->exp) < best_addr_cost
2605 || (ADDRESS_COST (p->exp) == best_addr_cost
2606 && (p->cost + 1) >> 1 > best_rtx_cost)))
2607 {
2608 found_better = 1;
2609 best_addr_cost = ADDRESS_COST (p->exp);
2610 best_rtx_cost = (p->cost + 1) >> 1;
2611 best_elt = p;
2612 }
2613
2614 if (found_better)
2615 {
2616 if (validate_change (insn, loc,
2617 canon_reg (copy_rtx (best_elt->exp),
2618 NULL_RTX), 0))
2619 return;
2620 else
2621 best_elt->flag = 1;
2622 }
2623 }
2624 }
2625
2626 /* If the address is a binary operation with the first operand a register
2627 and the second a constant, do the same as above, but looking for
2628 equivalences of the register. Then try to simplify before checking for
2629 the best address to use. This catches a few cases: First is when we
2630 have REG+const and the register is another REG+const. We can often merge
2631 the constants and eliminate one insn and one register. It may also be
2632 that a machine has a cheap REG+REG+const. Finally, this improves the
2633 code on the Alpha for unaligned byte stores. */
2634
2635 if (flag_expensive_optimizations
2636 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2637 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2638 && GET_CODE (XEXP (*loc, 0)) == REG
2639 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2640 {
2641 rtx c = XEXP (*loc, 1);
2642
2643 do_not_record = 0;
2644 hash = HASH (XEXP (*loc, 0), Pmode);
2645 do_not_record = save_do_not_record;
2646 hash_arg_in_memory = save_hash_arg_in_memory;
2647 hash_arg_in_struct = save_hash_arg_in_struct;
2648
2649 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2650 if (elt == 0)
2651 return;
2652
2653 /* We need to find the best (under the criteria documented above) entry
2654 in the class that is valid. We use the `flag' field to indicate
2655 choices that were invalid and iterate until we can't find a better
2656 one that hasn't already been tried. */
2657
2658 for (p = elt->first_same_value; p; p = p->next_same_value)
2659 p->flag = 0;
2660
2661 while (found_better)
2662 {
2663 int best_addr_cost = ADDRESS_COST (*loc);
2664 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2665 struct table_elt *best_elt = elt;
2666 rtx best_rtx = *loc;
2667 int count;
2668
2669 /* This is at worst case an O(n^2) algorithm, so limit our search
2670 to the first 32 elements on the list. This avoids trouble
2671 compiling code with very long basic blocks that can easily
2672 call cse_gen_binary so many times that we run out of memory. */
2673
2674 found_better = 0;
2675 for (p = elt->first_same_value, count = 0;
2676 p && count < 32;
2677 p = p->next_same_value, count++)
2678 if (! p->flag
2679 && (GET_CODE (p->exp) == REG
2680 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2681 {
2682 rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
2683
2684 if ((ADDRESS_COST (new) < best_addr_cost
2685 || (ADDRESS_COST (new) == best_addr_cost
2686 && (COST (new) + 1) >> 1 > best_rtx_cost)))
2687 {
2688 found_better = 1;
2689 best_addr_cost = ADDRESS_COST (new);
2690 best_rtx_cost = (COST (new) + 1) >> 1;
2691 best_elt = p;
2692 best_rtx = new;
2693 }
2694 }
2695
2696 if (found_better)
2697 {
2698 if (validate_change (insn, loc,
2699 canon_reg (copy_rtx (best_rtx),
2700 NULL_RTX), 0))
2701 return;
2702 else
2703 best_elt->flag = 1;
2704 }
2705 }
2706 }
2707 #endif
2708 }
2709 \f
2710 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2711 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2712 what values are being compared.
2713
2714 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2715 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2716 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2717 compared to produce cc0.
2718
2719 The return value is the comparison operator and is either the code of
2720 A or the code corresponding to the inverse of the comparison. */
2721
2722 static enum rtx_code
2723 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
2724 enum rtx_code code;
2725 rtx *parg1, *parg2;
2726 enum machine_mode *pmode1, *pmode2;
2727 {
2728 rtx arg1, arg2;
2729
2730 arg1 = *parg1, arg2 = *parg2;
2731
2732 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2733
2734 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2735 {
2736 /* Set non-zero when we find something of interest. */
2737 rtx x = 0;
2738 int reverse_code = 0;
2739 struct table_elt *p = 0;
2740
2741 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2742 On machines with CC0, this is the only case that can occur, since
2743 fold_rtx will return the COMPARE or item being compared with zero
2744 when given CC0. */
2745
2746 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2747 x = arg1;
2748
2749 /* If ARG1 is a comparison operator and CODE is testing for
2750 STORE_FLAG_VALUE, get the inner arguments. */
2751
2752 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2753 {
2754 if (code == NE
2755 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2756 && code == LT && STORE_FLAG_VALUE == -1)
2757 #ifdef FLOAT_STORE_FLAG_VALUE
2758 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2759 && FLOAT_STORE_FLAG_VALUE < 0)
2760 #endif
2761 )
2762 x = arg1;
2763 else if (code == EQ
2764 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2765 && code == GE && STORE_FLAG_VALUE == -1)
2766 #ifdef FLOAT_STORE_FLAG_VALUE
2767 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2768 && FLOAT_STORE_FLAG_VALUE < 0)
2769 #endif
2770 )
2771 x = arg1, reverse_code = 1;
2772 }
2773
2774 /* ??? We could also check for
2775
2776 (ne (and (eq (...) (const_int 1))) (const_int 0))
2777
2778 and related forms, but let's wait until we see them occurring. */
2779
2780 if (x == 0)
2781 /* Look up ARG1 in the hash table and see if it has an equivalence
2782 that lets us see what is being compared. */
2783 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
2784 GET_MODE (arg1));
2785 if (p) p = p->first_same_value;
2786
2787 for (; p; p = p->next_same_value)
2788 {
2789 enum machine_mode inner_mode = GET_MODE (p->exp);
2790
2791 /* If the entry isn't valid, skip it. */
2792 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2793 continue;
2794
2795 if (GET_CODE (p->exp) == COMPARE
2796 /* Another possibility is that this machine has a compare insn
2797 that includes the comparison code. In that case, ARG1 would
2798 be equivalent to a comparison operation that would set ARG1 to
2799 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2800 ORIG_CODE is the actual comparison being done; if it is an EQ,
2801 we must reverse ORIG_CODE. On machine with a negative value
2802 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2803 || ((code == NE
2804 || (code == LT
2805 && GET_MODE_CLASS (inner_mode) == MODE_INT
2806 && (GET_MODE_BITSIZE (inner_mode)
2807 <= HOST_BITS_PER_WIDE_INT)
2808 && (STORE_FLAG_VALUE
2809 & ((HOST_WIDE_INT) 1
2810 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2811 #ifdef FLOAT_STORE_FLAG_VALUE
2812 || (code == LT
2813 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2814 && FLOAT_STORE_FLAG_VALUE < 0)
2815 #endif
2816 )
2817 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
2818 {
2819 x = p->exp;
2820 break;
2821 }
2822 else if ((code == EQ
2823 || (code == GE
2824 && GET_MODE_CLASS (inner_mode) == MODE_INT
2825 && (GET_MODE_BITSIZE (inner_mode)
2826 <= HOST_BITS_PER_WIDE_INT)
2827 && (STORE_FLAG_VALUE
2828 & ((HOST_WIDE_INT) 1
2829 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2830 #ifdef FLOAT_STORE_FLAG_VALUE
2831 || (code == GE
2832 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2833 && FLOAT_STORE_FLAG_VALUE < 0)
2834 #endif
2835 )
2836 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
2837 {
2838 reverse_code = 1;
2839 x = p->exp;
2840 break;
2841 }
2842
2843 /* If this is fp + constant, the equivalent is a better operand since
2844 it may let us predict the value of the comparison. */
2845 else if (NONZERO_BASE_PLUS_P (p->exp))
2846 {
2847 arg1 = p->exp;
2848 continue;
2849 }
2850 }
2851
2852 /* If we didn't find a useful equivalence for ARG1, we are done.
2853 Otherwise, set up for the next iteration. */
2854 if (x == 0)
2855 break;
2856
2857 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
2858 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
2859 code = GET_CODE (x);
2860
2861 if (reverse_code)
2862 code = reverse_condition (code);
2863 }
2864
2865 /* Return our results. Return the modes from before fold_rtx
2866 because fold_rtx might produce const_int, and then it's too late. */
2867 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
2868 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
2869
2870 return code;
2871 }
2872 \f
2873 /* Try to simplify a unary operation CODE whose output mode is to be
2874 MODE with input operand OP whose mode was originally OP_MODE.
2875 Return zero if no simplification can be made. */
2876
2877 rtx
2878 simplify_unary_operation (code, mode, op, op_mode)
2879 enum rtx_code code;
2880 enum machine_mode mode;
2881 rtx op;
2882 enum machine_mode op_mode;
2883 {
2884 register int width = GET_MODE_BITSIZE (mode);
2885
2886 /* The order of these tests is critical so that, for example, we don't
2887 check the wrong mode (input vs. output) for a conversion operation,
2888 such as FIX. At some point, this should be simplified. */
2889
2890 #if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
2891
2892 if (code == FLOAT && GET_MODE (op) == VOIDmode
2893 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
2894 {
2895 HOST_WIDE_INT hv, lv;
2896 REAL_VALUE_TYPE d;
2897
2898 if (GET_CODE (op) == CONST_INT)
2899 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
2900 else
2901 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
2902
2903 #ifdef REAL_ARITHMETIC
2904 REAL_VALUE_FROM_INT (d, lv, hv);
2905 #else
2906 if (hv < 0)
2907 {
2908 d = (double) (~ hv);
2909 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2910 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2911 d += (double) (unsigned HOST_WIDE_INT) (~ lv);
2912 d = (- d - 1.0);
2913 }
2914 else
2915 {
2916 d = (double) hv;
2917 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2918 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2919 d += (double) (unsigned HOST_WIDE_INT) lv;
2920 }
2921 #endif /* REAL_ARITHMETIC */
2922
2923 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2924 }
2925 else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
2926 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
2927 {
2928 HOST_WIDE_INT hv, lv;
2929 REAL_VALUE_TYPE d;
2930
2931 if (GET_CODE (op) == CONST_INT)
2932 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
2933 else
2934 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
2935
2936 if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
2937 ;
2938 else
2939 hv = 0, lv &= GET_MODE_MASK (op_mode);
2940
2941 #ifdef REAL_ARITHMETIC
2942 REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv);
2943 #else
2944
2945 d = (double) hv;
2946 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2947 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2948 d += (double) (unsigned HOST_WIDE_INT) lv;
2949 #endif /* REAL_ARITHMETIC */
2950
2951 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2952 }
2953 #endif
2954
2955 if (GET_CODE (op) == CONST_INT
2956 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
2957 {
2958 register HOST_WIDE_INT arg0 = INTVAL (op);
2959 register HOST_WIDE_INT val;
2960
2961 switch (code)
2962 {
2963 case NOT:
2964 val = ~ arg0;
2965 break;
2966
2967 case NEG:
2968 val = - arg0;
2969 break;
2970
2971 case ABS:
2972 val = (arg0 >= 0 ? arg0 : - arg0);
2973 break;
2974
2975 case FFS:
2976 /* Don't use ffs here. Instead, get low order bit and then its
2977 number. If arg0 is zero, this will return 0, as desired. */
2978 arg0 &= GET_MODE_MASK (mode);
2979 val = exact_log2 (arg0 & (- arg0)) + 1;
2980 break;
2981
2982 case TRUNCATE:
2983 val = arg0;
2984 break;
2985
2986 case ZERO_EXTEND:
2987 if (op_mode == VOIDmode)
2988 op_mode = mode;
2989 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
2990 {
2991 /* If we were really extending the mode,
2992 we would have to distinguish between zero-extension
2993 and sign-extension. */
2994 if (width != GET_MODE_BITSIZE (op_mode))
2995 abort ();
2996 val = arg0;
2997 }
2998 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
2999 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3000 else
3001 return 0;
3002 break;
3003
3004 case SIGN_EXTEND:
3005 if (op_mode == VOIDmode)
3006 op_mode = mode;
3007 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3008 {
3009 /* If we were really extending the mode,
3010 we would have to distinguish between zero-extension
3011 and sign-extension. */
3012 if (width != GET_MODE_BITSIZE (op_mode))
3013 abort ();
3014 val = arg0;
3015 }
3016 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3017 {
3018 val
3019 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3020 if (val
3021 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3022 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3023 }
3024 else
3025 return 0;
3026 break;
3027
3028 case SQRT:
3029 return 0;
3030
3031 default:
3032 abort ();
3033 }
3034
3035 /* Clear the bits that don't belong in our mode,
3036 unless they and our sign bit are all one.
3037 So we get either a reasonable negative value or a reasonable
3038 unsigned value for this mode. */
3039 if (width < HOST_BITS_PER_WIDE_INT
3040 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3041 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3042 val &= (1 << width) - 1;
3043
3044 return GEN_INT (val);
3045 }
3046
3047 /* We can do some operations on integer CONST_DOUBLEs. Also allow
3048 for a DImode operation on a CONST_INT. */
3049 else if (GET_MODE (op) == VOIDmode && width == HOST_BITS_PER_INT * 2
3050 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3051 {
3052 HOST_WIDE_INT l1, h1, lv, hv;
3053
3054 if (GET_CODE (op) == CONST_DOUBLE)
3055 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3056 else
3057 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3058
3059 switch (code)
3060 {
3061 case NOT:
3062 lv = ~ l1;
3063 hv = ~ h1;
3064 break;
3065
3066 case NEG:
3067 neg_double (l1, h1, &lv, &hv);
3068 break;
3069
3070 case ABS:
3071 if (h1 < 0)
3072 neg_double (l1, h1, &lv, &hv);
3073 else
3074 lv = l1, hv = h1;
3075 break;
3076
3077 case FFS:
3078 hv = 0;
3079 if (l1 == 0)
3080 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
3081 else
3082 lv = exact_log2 (l1 & (-l1)) + 1;
3083 break;
3084
3085 case TRUNCATE:
3086 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3087 return GEN_INT (l1 & GET_MODE_MASK (mode));
3088 else
3089 return 0;
3090 break;
3091
3092 case ZERO_EXTEND:
3093 if (op_mode == VOIDmode
3094 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3095 return 0;
3096
3097 hv = 0;
3098 lv = l1 & GET_MODE_MASK (op_mode);
3099 break;
3100
3101 case SIGN_EXTEND:
3102 if (op_mode == VOIDmode
3103 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3104 return 0;
3105 else
3106 {
3107 lv = l1 & GET_MODE_MASK (op_mode);
3108 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3109 && (lv & ((HOST_WIDE_INT) 1
3110 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3111 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3112
3113 hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
3114 }
3115 break;
3116
3117 case SQRT:
3118 return 0;
3119
3120 default:
3121 return 0;
3122 }
3123
3124 return immed_double_const (lv, hv, mode);
3125 }
3126
3127 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3128 else if (GET_CODE (op) == CONST_DOUBLE
3129 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3130 {
3131 REAL_VALUE_TYPE d;
3132 jmp_buf handler;
3133 rtx x;
3134
3135 if (setjmp (handler))
3136 /* There used to be a warning here, but that is inadvisable.
3137 People may want to cause traps, and the natural way
3138 to do it should not get a warning. */
3139 return 0;
3140
3141 set_float_handler (handler);
3142
3143 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3144
3145 switch (code)
3146 {
3147 case NEG:
3148 d = REAL_VALUE_NEGATE (d);
3149 break;
3150
3151 case ABS:
3152 if (REAL_VALUE_NEGATIVE (d))
3153 d = REAL_VALUE_NEGATE (d);
3154 break;
3155
3156 case FLOAT_TRUNCATE:
3157 d = real_value_truncate (mode, d);
3158 break;
3159
3160 case FLOAT_EXTEND:
3161 /* All this does is change the mode. */
3162 break;
3163
3164 case FIX:
3165 d = REAL_VALUE_RNDZINT (d);
3166 break;
3167
3168 case UNSIGNED_FIX:
3169 d = REAL_VALUE_UNSIGNED_RNDZINT (d);
3170 break;
3171
3172 case SQRT:
3173 return 0;
3174
3175 default:
3176 abort ();
3177 }
3178
3179 x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3180 set_float_handler (NULL_PTR);
3181 return x;
3182 }
3183 else if (GET_CODE (op) == CONST_DOUBLE && GET_MODE_CLASS (mode) == MODE_INT
3184 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3185 {
3186 REAL_VALUE_TYPE d;
3187 jmp_buf handler;
3188 HOST_WIDE_INT val;
3189
3190 if (setjmp (handler))
3191 return 0;
3192
3193 set_float_handler (handler);
3194
3195 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3196
3197 switch (code)
3198 {
3199 case FIX:
3200 val = REAL_VALUE_FIX (d);
3201 break;
3202
3203 case UNSIGNED_FIX:
3204 val = REAL_VALUE_UNSIGNED_FIX (d);
3205 break;
3206
3207 default:
3208 abort ();
3209 }
3210
3211 set_float_handler (NULL_PTR);
3212
3213 /* Clear the bits that don't belong in our mode,
3214 unless they and our sign bit are all one.
3215 So we get either a reasonable negative value or a reasonable
3216 unsigned value for this mode. */
3217 if (width < HOST_BITS_PER_WIDE_INT
3218 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3219 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3220 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3221
3222 return GEN_INT (val);
3223 }
3224 #endif
3225 /* This was formerly used only for non-IEEE float.
3226 eggert@twinsun.com says it is safe for IEEE also. */
3227 else
3228 {
3229 /* There are some simplifications we can do even if the operands
3230 aren't constant. */
3231 switch (code)
3232 {
3233 case NEG:
3234 case NOT:
3235 /* (not (not X)) == X, similarly for NEG. */
3236 if (GET_CODE (op) == code)
3237 return XEXP (op, 0);
3238 break;
3239
3240 case SIGN_EXTEND:
3241 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3242 becomes just the MINUS if its mode is MODE. This allows
3243 folding switch statements on machines using casesi (such as
3244 the Vax). */
3245 if (GET_CODE (op) == TRUNCATE
3246 && GET_MODE (XEXP (op, 0)) == mode
3247 && GET_CODE (XEXP (op, 0)) == MINUS
3248 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3249 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3250 return XEXP (op, 0);
3251 break;
3252 }
3253
3254 return 0;
3255 }
3256 }
3257 \f
3258 /* Simplify a binary operation CODE with result mode MODE, operating on OP0
3259 and OP1. Return 0 if no simplification is possible.
3260
3261 Don't use this for relational operations such as EQ or LT.
3262 Use simplify_relational_operation instead. */
3263
3264 rtx
3265 simplify_binary_operation (code, mode, op0, op1)
3266 enum rtx_code code;
3267 enum machine_mode mode;
3268 rtx op0, op1;
3269 {
3270 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3271 HOST_WIDE_INT val;
3272 int width = GET_MODE_BITSIZE (mode);
3273 rtx tem;
3274
3275 /* Relational operations don't work here. We must know the mode
3276 of the operands in order to do the comparison correctly.
3277 Assuming a full word can give incorrect results.
3278 Consider comparing 128 with -128 in QImode. */
3279
3280 if (GET_RTX_CLASS (code) == '<')
3281 abort ();
3282
3283 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3284 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3285 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3286 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3287 {
3288 REAL_VALUE_TYPE f0, f1, value;
3289 jmp_buf handler;
3290
3291 if (setjmp (handler))
3292 return 0;
3293
3294 set_float_handler (handler);
3295
3296 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3297 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3298 f0 = real_value_truncate (mode, f0);
3299 f1 = real_value_truncate (mode, f1);
3300
3301 #ifdef REAL_ARITHMETIC
3302 REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
3303 #else
3304 switch (code)
3305 {
3306 case PLUS:
3307 value = f0 + f1;
3308 break;
3309 case MINUS:
3310 value = f0 - f1;
3311 break;
3312 case MULT:
3313 value = f0 * f1;
3314 break;
3315 case DIV:
3316 #ifndef REAL_INFINITY
3317 if (f1 == 0)
3318 return 0;
3319 #endif
3320 value = f0 / f1;
3321 break;
3322 case SMIN:
3323 value = MIN (f0, f1);
3324 break;
3325 case SMAX:
3326 value = MAX (f0, f1);
3327 break;
3328 default:
3329 abort ();
3330 }
3331 #endif
3332
3333 value = real_value_truncate (mode, value);
3334 set_float_handler (NULL_PTR);
3335 return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
3336 }
3337 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3338
3339 /* We can fold some multi-word operations. */
3340 if (GET_MODE_CLASS (mode) == MODE_INT
3341 && width == HOST_BITS_PER_WIDE_INT * 2
3342 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
3343 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3344 {
3345 HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
3346
3347 if (GET_CODE (op0) == CONST_DOUBLE)
3348 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3349 else
3350 l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
3351
3352 if (GET_CODE (op1) == CONST_DOUBLE)
3353 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3354 else
3355 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3356
3357 switch (code)
3358 {
3359 case MINUS:
3360 /* A - B == A + (-B). */
3361 neg_double (l2, h2, &lv, &hv);
3362 l2 = lv, h2 = hv;
3363
3364 /* .. fall through ... */
3365
3366 case PLUS:
3367 add_double (l1, h1, l2, h2, &lv, &hv);
3368 break;
3369
3370 case MULT:
3371 mul_double (l1, h1, l2, h2, &lv, &hv);
3372 break;
3373
3374 case DIV: case MOD: case UDIV: case UMOD:
3375 /* We'd need to include tree.h to do this and it doesn't seem worth
3376 it. */
3377 return 0;
3378
3379 case AND:
3380 lv = l1 & l2, hv = h1 & h2;
3381 break;
3382
3383 case IOR:
3384 lv = l1 | l2, hv = h1 | h2;
3385 break;
3386
3387 case XOR:
3388 lv = l1 ^ l2, hv = h1 ^ h2;
3389 break;
3390
3391 case SMIN:
3392 if (h1 < h2
3393 || (h1 == h2
3394 && ((unsigned HOST_WIDE_INT) l1
3395 < (unsigned HOST_WIDE_INT) l2)))
3396 lv = l1, hv = h1;
3397 else
3398 lv = l2, hv = h2;
3399 break;
3400
3401 case SMAX:
3402 if (h1 > h2
3403 || (h1 == h2
3404 && ((unsigned HOST_WIDE_INT) l1
3405 > (unsigned HOST_WIDE_INT) l2)))
3406 lv = l1, hv = h1;
3407 else
3408 lv = l2, hv = h2;
3409 break;
3410
3411 case UMIN:
3412 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3413 || (h1 == h2
3414 && ((unsigned HOST_WIDE_INT) l1
3415 < (unsigned HOST_WIDE_INT) l2)))
3416 lv = l1, hv = h1;
3417 else
3418 lv = l2, hv = h2;
3419 break;
3420
3421 case UMAX:
3422 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3423 || (h1 == h2
3424 && ((unsigned HOST_WIDE_INT) l1
3425 > (unsigned HOST_WIDE_INT) l2)))
3426 lv = l1, hv = h1;
3427 else
3428 lv = l2, hv = h2;
3429 break;
3430
3431 case LSHIFTRT: case ASHIFTRT:
3432 case ASHIFT:
3433 case ROTATE: case ROTATERT:
3434 #ifdef SHIFT_COUNT_TRUNCATED
3435 if (SHIFT_COUNT_TRUNCATED)
3436 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3437 #endif
3438
3439 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3440 return 0;
3441
3442 if (code == LSHIFTRT || code == ASHIFTRT)
3443 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3444 code == ASHIFTRT);
3445 else if (code == ASHIFT)
3446 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
3447 else if (code == ROTATE)
3448 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3449 else /* code == ROTATERT */
3450 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3451 break;
3452
3453 default:
3454 return 0;
3455 }
3456
3457 return immed_double_const (lv, hv, mode);
3458 }
3459
3460 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3461 || width > HOST_BITS_PER_WIDE_INT || width == 0)
3462 {
3463 /* Even if we can't compute a constant result,
3464 there are some cases worth simplifying. */
3465
3466 switch (code)
3467 {
3468 case PLUS:
3469 /* In IEEE floating point, x+0 is not the same as x. Similarly
3470 for the other optimizations below. */
3471 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3472 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3473 break;
3474
3475 if (op1 == CONST0_RTX (mode))
3476 return op0;
3477
3478 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3479 if (GET_CODE (op0) == NEG)
3480 return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
3481 else if (GET_CODE (op1) == NEG)
3482 return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
3483
3484 /* Handle both-operands-constant cases. We can only add
3485 CONST_INTs to constants since the sum of relocatable symbols
3486 can't be handled by most assemblers. Don't add CONST_INT
3487 to CONST_INT since overflow won't be computed properly if wider
3488 than HOST_BITS_PER_WIDE_INT. */
3489
3490 if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3491 && GET_CODE (op1) == CONST_INT)
3492 return plus_constant (op0, INTVAL (op1));
3493 else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3494 && GET_CODE (op0) == CONST_INT)
3495 return plus_constant (op1, INTVAL (op0));
3496
3497 /* See if this is something like X * C - X or vice versa or
3498 if the multiplication is written as a shift. If so, we can
3499 distribute and make a new multiply, shift, or maybe just
3500 have X (if C is 2 in the example above). But don't make
3501 real multiply if we didn't have one before. */
3502
3503 if (! FLOAT_MODE_P (mode))
3504 {
3505 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3506 rtx lhs = op0, rhs = op1;
3507 int had_mult = 0;
3508
3509 if (GET_CODE (lhs) == NEG)
3510 coeff0 = -1, lhs = XEXP (lhs, 0);
3511 else if (GET_CODE (lhs) == MULT
3512 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3513 {
3514 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3515 had_mult = 1;
3516 }
3517 else if (GET_CODE (lhs) == ASHIFT
3518 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3519 && INTVAL (XEXP (lhs, 1)) >= 0
3520 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3521 {
3522 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3523 lhs = XEXP (lhs, 0);
3524 }
3525
3526 if (GET_CODE (rhs) == NEG)
3527 coeff1 = -1, rhs = XEXP (rhs, 0);
3528 else if (GET_CODE (rhs) == MULT
3529 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3530 {
3531 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3532 had_mult = 1;
3533 }
3534 else if (GET_CODE (rhs) == ASHIFT
3535 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3536 && INTVAL (XEXP (rhs, 1)) >= 0
3537 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3538 {
3539 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3540 rhs = XEXP (rhs, 0);
3541 }
3542
3543 if (rtx_equal_p (lhs, rhs))
3544 {
3545 tem = cse_gen_binary (MULT, mode, lhs,
3546 GEN_INT (coeff0 + coeff1));
3547 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3548 }
3549 }
3550
3551 /* If one of the operands is a PLUS or a MINUS, see if we can
3552 simplify this by the associative law.
3553 Don't use the associative law for floating point.
3554 The inaccuracy makes it nonassociative,
3555 and subtle programs can break if operations are associated. */
3556
3557 if (INTEGRAL_MODE_P (mode)
3558 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3559 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3560 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3561 return tem;
3562 break;
3563
3564 case COMPARE:
3565 #ifdef HAVE_cc0
3566 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3567 using cc0, in which case we want to leave it as a COMPARE
3568 so we can distinguish it from a register-register-copy.
3569
3570 In IEEE floating point, x-0 is not the same as x. */
3571
3572 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3573 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3574 && op1 == CONST0_RTX (mode))
3575 return op0;
3576 #else
3577 /* Do nothing here. */
3578 #endif
3579 break;
3580
3581 case MINUS:
3582 /* None of these optimizations can be done for IEEE
3583 floating point. */
3584 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3585 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3586 break;
3587
3588 /* We can't assume x-x is 0 even with non-IEEE floating point,
3589 but since it is zero except in very strange circumstances, we
3590 will treat it as zero with -ffast-math. */
3591 if (rtx_equal_p (op0, op1)
3592 && ! side_effects_p (op0)
3593 && (! FLOAT_MODE_P (mode) || flag_fast_math))
3594 return CONST0_RTX (mode);
3595
3596 /* Change subtraction from zero into negation. */
3597 if (op0 == CONST0_RTX (mode))
3598 return gen_rtx (NEG, mode, op1);
3599
3600 /* (-1 - a) is ~a. */
3601 if (op0 == constm1_rtx)
3602 return gen_rtx (NOT, mode, op1);
3603
3604 /* Subtracting 0 has no effect. */
3605 if (op1 == CONST0_RTX (mode))
3606 return op0;
3607
3608 /* See if this is something like X * C - X or vice versa or
3609 if the multiplication is written as a shift. If so, we can
3610 distribute and make a new multiply, shift, or maybe just
3611 have X (if C is 2 in the example above). But don't make
3612 real multiply if we didn't have one before. */
3613
3614 if (! FLOAT_MODE_P (mode))
3615 {
3616 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3617 rtx lhs = op0, rhs = op1;
3618 int had_mult = 0;
3619
3620 if (GET_CODE (lhs) == NEG)
3621 coeff0 = -1, lhs = XEXP (lhs, 0);
3622 else if (GET_CODE (lhs) == MULT
3623 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3624 {
3625 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3626 had_mult = 1;
3627 }
3628 else if (GET_CODE (lhs) == ASHIFT
3629 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3630 && INTVAL (XEXP (lhs, 1)) >= 0
3631 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3632 {
3633 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3634 lhs = XEXP (lhs, 0);
3635 }
3636
3637 if (GET_CODE (rhs) == NEG)
3638 coeff1 = - 1, rhs = XEXP (rhs, 0);
3639 else if (GET_CODE (rhs) == MULT
3640 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3641 {
3642 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3643 had_mult = 1;
3644 }
3645 else if (GET_CODE (rhs) == ASHIFT
3646 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3647 && INTVAL (XEXP (rhs, 1)) >= 0
3648 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3649 {
3650 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3651 rhs = XEXP (rhs, 0);
3652 }
3653
3654 if (rtx_equal_p (lhs, rhs))
3655 {
3656 tem = cse_gen_binary (MULT, mode, lhs,
3657 GEN_INT (coeff0 - coeff1));
3658 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3659 }
3660 }
3661
3662 /* (a - (-b)) -> (a + b). */
3663 if (GET_CODE (op1) == NEG)
3664 return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
3665
3666 /* If one of the operands is a PLUS or a MINUS, see if we can
3667 simplify this by the associative law.
3668 Don't use the associative law for floating point.
3669 The inaccuracy makes it nonassociative,
3670 and subtle programs can break if operations are associated. */
3671
3672 if (INTEGRAL_MODE_P (mode)
3673 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3674 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3675 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3676 return tem;
3677
3678 /* Don't let a relocatable value get a negative coeff. */
3679 if (GET_CODE (op1) == CONST_INT && GET_MODE (op1) != VOIDmode)
3680 return plus_constant (op0, - INTVAL (op1));
3681 break;
3682
3683 case MULT:
3684 if (op1 == constm1_rtx)
3685 {
3686 tem = simplify_unary_operation (NEG, mode, op0, mode);
3687
3688 return tem ? tem : gen_rtx (NEG, mode, op0);
3689 }
3690
3691 /* In IEEE floating point, x*0 is not always 0. */
3692 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3693 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3694 && op1 == CONST0_RTX (mode)
3695 && ! side_effects_p (op0))
3696 return op1;
3697
3698 /* In IEEE floating point, x*1 is not equivalent to x for nans.
3699 However, ANSI says we can drop signals,
3700 so we can do this anyway. */
3701 if (op1 == CONST1_RTX (mode))
3702 return op0;
3703
3704 /* Convert multiply by constant power of two into shift unless
3705 we are still generating RTL. This test is a kludge. */
3706 if (GET_CODE (op1) == CONST_INT
3707 && (val = exact_log2 (INTVAL (op1))) >= 0
3708 && ! rtx_equal_function_value_matters)
3709 return gen_rtx (ASHIFT, mode, op0, GEN_INT (val));
3710
3711 if (GET_CODE (op1) == CONST_DOUBLE
3712 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
3713 {
3714 REAL_VALUE_TYPE d;
3715 jmp_buf handler;
3716 int op1is2, op1ism1;
3717
3718 if (setjmp (handler))
3719 return 0;
3720
3721 set_float_handler (handler);
3722 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3723 op1is2 = REAL_VALUES_EQUAL (d, dconst2);
3724 op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
3725 set_float_handler (NULL_PTR);
3726
3727 /* x*2 is x+x and x*(-1) is -x */
3728 if (op1is2 && GET_MODE (op0) == mode)
3729 return gen_rtx (PLUS, mode, op0, copy_rtx (op0));
3730
3731 else if (op1ism1 && GET_MODE (op0) == mode)
3732 return gen_rtx (NEG, mode, op0);
3733 }
3734 break;
3735
3736 case IOR:
3737 if (op1 == const0_rtx)
3738 return op0;
3739 if (GET_CODE (op1) == CONST_INT
3740 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3741 return op1;
3742 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3743 return op0;
3744 /* A | (~A) -> -1 */
3745 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3746 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3747 && ! side_effects_p (op0)
3748 && GET_MODE_CLASS (mode) != MODE_CC)
3749 return constm1_rtx;
3750 break;
3751
3752 case XOR:
3753 if (op1 == const0_rtx)
3754 return op0;
3755 if (GET_CODE (op1) == CONST_INT
3756 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3757 return gen_rtx (NOT, mode, op0);
3758 if (op0 == op1 && ! side_effects_p (op0)
3759 && GET_MODE_CLASS (mode) != MODE_CC)
3760 return const0_rtx;
3761 break;
3762
3763 case AND:
3764 if (op1 == const0_rtx && ! side_effects_p (op0))
3765 return const0_rtx;
3766 if (GET_CODE (op1) == CONST_INT
3767 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3768 return op0;
3769 if (op0 == op1 && ! side_effects_p (op0)
3770 && GET_MODE_CLASS (mode) != MODE_CC)
3771 return op0;
3772 /* A & (~A) -> 0 */
3773 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3774 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3775 && ! side_effects_p (op0)
3776 && GET_MODE_CLASS (mode) != MODE_CC)
3777 return const0_rtx;
3778 break;
3779
3780 case UDIV:
3781 /* Convert divide by power of two into shift (divide by 1 handled
3782 below). */
3783 if (GET_CODE (op1) == CONST_INT
3784 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
3785 return gen_rtx (LSHIFTRT, mode, op0, GEN_INT (arg1));
3786
3787 /* ... fall through ... */
3788
3789 case DIV:
3790 if (op1 == CONST1_RTX (mode))
3791 return op0;
3792
3793 /* In IEEE floating point, 0/x is not always 0. */
3794 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3795 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3796 && op0 == CONST0_RTX (mode)
3797 && ! side_effects_p (op1))
3798 return op0;
3799
3800 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3801 /* Change division by a constant into multiplication. Only do
3802 this with -ffast-math until an expert says it is safe in
3803 general. */
3804 else if (GET_CODE (op1) == CONST_DOUBLE
3805 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
3806 && op1 != CONST0_RTX (mode)
3807 && flag_fast_math)
3808 {
3809 REAL_VALUE_TYPE d;
3810 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3811
3812 if (! REAL_VALUES_EQUAL (d, dconst0))
3813 {
3814 #if defined (REAL_ARITHMETIC)
3815 REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
3816 return gen_rtx (MULT, mode, op0,
3817 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
3818 #else
3819 return gen_rtx (MULT, mode, op0,
3820 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
3821 #endif
3822 }
3823 }
3824 #endif
3825 break;
3826
3827 case UMOD:
3828 /* Handle modulus by power of two (mod with 1 handled below). */
3829 if (GET_CODE (op1) == CONST_INT
3830 && exact_log2 (INTVAL (op1)) > 0)
3831 return gen_rtx (AND, mode, op0, GEN_INT (INTVAL (op1) - 1));
3832
3833 /* ... fall through ... */
3834
3835 case MOD:
3836 if ((op0 == const0_rtx || op1 == const1_rtx)
3837 && ! side_effects_p (op0) && ! side_effects_p (op1))
3838 return const0_rtx;
3839 break;
3840
3841 case ROTATERT:
3842 case ROTATE:
3843 /* Rotating ~0 always results in ~0. */
3844 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
3845 && INTVAL (op0) == GET_MODE_MASK (mode)
3846 && ! side_effects_p (op1))
3847 return op0;
3848
3849 /* ... fall through ... */
3850
3851 case ASHIFT:
3852 case ASHIFTRT:
3853 case LSHIFTRT:
3854 if (op1 == const0_rtx)
3855 return op0;
3856 if (op0 == const0_rtx && ! side_effects_p (op1))
3857 return op0;
3858 break;
3859
3860 case SMIN:
3861 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3862 && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
3863 && ! side_effects_p (op0))
3864 return op1;
3865 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3866 return op0;
3867 break;
3868
3869 case SMAX:
3870 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3871 && (INTVAL (op1)
3872 == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
3873 && ! side_effects_p (op0))
3874 return op1;
3875 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3876 return op0;
3877 break;
3878
3879 case UMIN:
3880 if (op1 == const0_rtx && ! side_effects_p (op0))
3881 return op1;
3882 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3883 return op0;
3884 break;
3885
3886 case UMAX:
3887 if (op1 == constm1_rtx && ! side_effects_p (op0))
3888 return op1;
3889 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3890 return op0;
3891 break;
3892
3893 default:
3894 abort ();
3895 }
3896
3897 return 0;
3898 }
3899
3900 /* Get the integer argument values in two forms:
3901 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
3902
3903 arg0 = INTVAL (op0);
3904 arg1 = INTVAL (op1);
3905
3906 if (width < HOST_BITS_PER_WIDE_INT)
3907 {
3908 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
3909 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
3910
3911 arg0s = arg0;
3912 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
3913 arg0s |= ((HOST_WIDE_INT) (-1) << width);
3914
3915 arg1s = arg1;
3916 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
3917 arg1s |= ((HOST_WIDE_INT) (-1) << width);
3918 }
3919 else
3920 {
3921 arg0s = arg0;
3922 arg1s = arg1;
3923 }
3924
3925 /* Compute the value of the arithmetic. */
3926
3927 switch (code)
3928 {
3929 case PLUS:
3930 val = arg0s + arg1s;
3931 break;
3932
3933 case MINUS:
3934 val = arg0s - arg1s;
3935 break;
3936
3937 case MULT:
3938 val = arg0s * arg1s;
3939 break;
3940
3941 case DIV:
3942 if (arg1s == 0)
3943 return 0;
3944 val = arg0s / arg1s;
3945 break;
3946
3947 case MOD:
3948 if (arg1s == 0)
3949 return 0;
3950 val = arg0s % arg1s;
3951 break;
3952
3953 case UDIV:
3954 if (arg1 == 0)
3955 return 0;
3956 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
3957 break;
3958
3959 case UMOD:
3960 if (arg1 == 0)
3961 return 0;
3962 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
3963 break;
3964
3965 case AND:
3966 val = arg0 & arg1;
3967 break;
3968
3969 case IOR:
3970 val = arg0 | arg1;
3971 break;
3972
3973 case XOR:
3974 val = arg0 ^ arg1;
3975 break;
3976
3977 case LSHIFTRT:
3978 /* If shift count is undefined, don't fold it; let the machine do
3979 what it wants. But truncate it if the machine will do that. */
3980 if (arg1 < 0)
3981 return 0;
3982
3983 #ifdef SHIFT_COUNT_TRUNCATED
3984 if (SHIFT_COUNT_TRUNCATED)
3985 arg1 %= width;
3986 #endif
3987
3988 val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
3989 break;
3990
3991 case ASHIFT:
3992 if (arg1 < 0)
3993 return 0;
3994
3995 #ifdef SHIFT_COUNT_TRUNCATED
3996 if (SHIFT_COUNT_TRUNCATED)
3997 arg1 %= width;
3998 #endif
3999
4000 val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
4001 break;
4002
4003 case ASHIFTRT:
4004 if (arg1 < 0)
4005 return 0;
4006
4007 #ifdef SHIFT_COUNT_TRUNCATED
4008 if (SHIFT_COUNT_TRUNCATED)
4009 arg1 %= width;
4010 #endif
4011
4012 val = arg0s >> arg1;
4013
4014 /* Bootstrap compiler may not have sign extended the right shift.
4015 Manually extend the sign to insure bootstrap cc matches gcc. */
4016 if (arg0s < 0 && arg1 > 0)
4017 val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4018
4019 break;
4020
4021 case ROTATERT:
4022 if (arg1 < 0)
4023 return 0;
4024
4025 arg1 %= width;
4026 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4027 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
4028 break;
4029
4030 case ROTATE:
4031 if (arg1 < 0)
4032 return 0;
4033
4034 arg1 %= width;
4035 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4036 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
4037 break;
4038
4039 case COMPARE:
4040 /* Do nothing here. */
4041 return 0;
4042
4043 case SMIN:
4044 val = arg0s <= arg1s ? arg0s : arg1s;
4045 break;
4046
4047 case UMIN:
4048 val = ((unsigned HOST_WIDE_INT) arg0
4049 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4050 break;
4051
4052 case SMAX:
4053 val = arg0s > arg1s ? arg0s : arg1s;
4054 break;
4055
4056 case UMAX:
4057 val = ((unsigned HOST_WIDE_INT) arg0
4058 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4059 break;
4060
4061 default:
4062 abort ();
4063 }
4064
4065 /* Clear the bits that don't belong in our mode, unless they and our sign
4066 bit are all one. So we get either a reasonable negative value or a
4067 reasonable unsigned value for this mode. */
4068 if (width < HOST_BITS_PER_WIDE_INT
4069 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4070 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4071 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4072
4073 return GEN_INT (val);
4074 }
4075 \f
4076 /* Simplify a PLUS or MINUS, at least one of whose operands may be another
4077 PLUS or MINUS.
4078
4079 Rather than test for specific case, we do this by a brute-force method
4080 and do all possible simplifications until no more changes occur. Then
4081 we rebuild the operation. */
4082
4083 static rtx
4084 simplify_plus_minus (code, mode, op0, op1)
4085 enum rtx_code code;
4086 enum machine_mode mode;
4087 rtx op0, op1;
4088 {
4089 rtx ops[8];
4090 int negs[8];
4091 rtx result, tem;
4092 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
4093 int first = 1, negate = 0, changed;
4094 int i, j;
4095
4096 bzero ((char *) ops, sizeof ops);
4097
4098 /* Set up the two operands and then expand them until nothing has been
4099 changed. If we run out of room in our array, give up; this should
4100 almost never happen. */
4101
4102 ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4103
4104 changed = 1;
4105 while (changed)
4106 {
4107 changed = 0;
4108
4109 for (i = 0; i < n_ops; i++)
4110 switch (GET_CODE (ops[i]))
4111 {
4112 case PLUS:
4113 case MINUS:
4114 if (n_ops == 7)
4115 return 0;
4116
4117 ops[n_ops] = XEXP (ops[i], 1);
4118 negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4119 ops[i] = XEXP (ops[i], 0);
4120 input_ops++;
4121 changed = 1;
4122 break;
4123
4124 case NEG:
4125 ops[i] = XEXP (ops[i], 0);
4126 negs[i] = ! negs[i];
4127 changed = 1;
4128 break;
4129
4130 case CONST:
4131 ops[i] = XEXP (ops[i], 0);
4132 input_consts++;
4133 changed = 1;
4134 break;
4135
4136 case NOT:
4137 /* ~a -> (-a - 1) */
4138 if (n_ops != 7)
4139 {
4140 ops[n_ops] = constm1_rtx;
4141 negs[n_ops++] = negs[i];
4142 ops[i] = XEXP (ops[i], 0);
4143 negs[i] = ! negs[i];
4144 changed = 1;
4145 }
4146 break;
4147
4148 case CONST_INT:
4149 if (negs[i])
4150 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4151 break;
4152 }
4153 }
4154
4155 /* If we only have two operands, we can't do anything. */
4156 if (n_ops <= 2)
4157 return 0;
4158
4159 /* Now simplify each pair of operands until nothing changes. The first
4160 time through just simplify constants against each other. */
4161
4162 changed = 1;
4163 while (changed)
4164 {
4165 changed = first;
4166
4167 for (i = 0; i < n_ops - 1; i++)
4168 for (j = i + 1; j < n_ops; j++)
4169 if (ops[i] != 0 && ops[j] != 0
4170 && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4171 {
4172 rtx lhs = ops[i], rhs = ops[j];
4173 enum rtx_code ncode = PLUS;
4174
4175 if (negs[i] && ! negs[j])
4176 lhs = ops[j], rhs = ops[i], ncode = MINUS;
4177 else if (! negs[i] && negs[j])
4178 ncode = MINUS;
4179
4180 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
4181 if (tem)
4182 {
4183 ops[i] = tem, ops[j] = 0;
4184 negs[i] = negs[i] && negs[j];
4185 if (GET_CODE (tem) == NEG)
4186 ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4187
4188 if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4189 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4190 changed = 1;
4191 }
4192 }
4193
4194 first = 0;
4195 }
4196
4197 /* Pack all the operands to the lower-numbered entries and give up if
4198 we didn't reduce the number of operands we had. Make sure we
4199 count a CONST as two operands. If we have the same number of
4200 operands, but have made more CONSTs than we had, this is also
4201 an improvement, so accept it. */
4202
4203 for (i = 0, j = 0; j < n_ops; j++)
4204 if (ops[j] != 0)
4205 {
4206 ops[i] = ops[j], negs[i++] = negs[j];
4207 if (GET_CODE (ops[j]) == CONST)
4208 n_consts++;
4209 }
4210
4211 if (i + n_consts > input_ops
4212 || (i + n_consts == input_ops && n_consts <= input_consts))
4213 return 0;
4214
4215 n_ops = i;
4216
4217 /* If we have a CONST_INT, put it last. */
4218 for (i = 0; i < n_ops - 1; i++)
4219 if (GET_CODE (ops[i]) == CONST_INT)
4220 {
4221 tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4222 j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4223 }
4224
4225 /* Put a non-negated operand first. If there aren't any, make all
4226 operands positive and negate the whole thing later. */
4227 for (i = 0; i < n_ops && negs[i]; i++)
4228 ;
4229
4230 if (i == n_ops)
4231 {
4232 for (i = 0; i < n_ops; i++)
4233 negs[i] = 0;
4234 negate = 1;
4235 }
4236 else if (i != 0)
4237 {
4238 tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4239 j = negs[0], negs[0] = negs[i], negs[i] = j;
4240 }
4241
4242 /* Now make the result by performing the requested operations. */
4243 result = ops[0];
4244 for (i = 1; i < n_ops; i++)
4245 result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4246
4247 return negate ? gen_rtx (NEG, mode, result) : result;
4248 }
4249 \f
4250 /* Make a binary operation by properly ordering the operands and
4251 seeing if the expression folds. */
4252
4253 static rtx
4254 cse_gen_binary (code, mode, op0, op1)
4255 enum rtx_code code;
4256 enum machine_mode mode;
4257 rtx op0, op1;
4258 {
4259 rtx tem;
4260
4261 /* Put complex operands first and constants second if commutative. */
4262 if (GET_RTX_CLASS (code) == 'c'
4263 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4264 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4265 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4266 || (GET_CODE (op0) == SUBREG
4267 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4268 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4269 tem = op0, op0 = op1, op1 = tem;
4270
4271 /* If this simplifies, do it. */
4272 tem = simplify_binary_operation (code, mode, op0, op1);
4273
4274 if (tem)
4275 return tem;
4276
4277 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4278 just form the operation. */
4279
4280 if (code == PLUS && GET_CODE (op1) == CONST_INT
4281 && GET_MODE (op0) != VOIDmode)
4282 return plus_constant (op0, INTVAL (op1));
4283 else if (code == MINUS && GET_CODE (op1) == CONST_INT
4284 && GET_MODE (op0) != VOIDmode)
4285 return plus_constant (op0, - INTVAL (op1));
4286 else
4287 return gen_rtx (code, mode, op0, op1);
4288 }
4289 \f
4290 /* Like simplify_binary_operation except used for relational operators.
4291 MODE is the mode of the operands, not that of the result. If MODE
4292 is VOIDmode, both operands must also be VOIDmode and we compare the
4293 operands in "infinite precision".
4294
4295 If no simplification is possible, this function returns zero. Otherwise,
4296 it returns either const_true_rtx or const0_rtx. */
4297
4298 rtx
4299 simplify_relational_operation (code, mode, op0, op1)
4300 enum rtx_code code;
4301 enum machine_mode mode;
4302 rtx op0, op1;
4303 {
4304 int equal, op0lt, op0ltu, op1lt, op1ltu;
4305 rtx tem;
4306
4307 /* If op0 is a compare, extract the comparison arguments from it. */
4308 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4309 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4310
4311 /* We can't simplify MODE_CC values since we don't know what the
4312 actual comparison is. */
4313 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4314 #ifdef HAVE_cc0
4315 || op0 == cc0_rtx
4316 #endif
4317 )
4318 return 0;
4319
4320 /* For integer comparisons of A and B maybe we can simplify A - B and can
4321 then simplify a comparison of that with zero. If A and B are both either
4322 a register or a CONST_INT, this can't help; testing for these cases will
4323 prevent infinite recursion here and speed things up.
4324
4325 If CODE is an unsigned comparison, we can only do this if A - B is a
4326 constant integer, and then we have to compare that integer with zero as a
4327 signed comparison. Note that this will give the incorrect result from
4328 comparisons that overflow. Since these are undefined, this is probably
4329 OK. If it causes a problem, we can check for A or B being an address
4330 (fp + const or SYMBOL_REF) and only do it in that case. */
4331
4332 if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4333 && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4334 && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4335 && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
4336 && (GET_CODE (tem) == CONST_INT
4337 || (code != GTU && code != GEU &&
4338 code != LTU && code != LEU)))
4339 return simplify_relational_operation (signed_condition (code),
4340 mode, tem, const0_rtx);
4341
4342 /* For non-IEEE floating-point, if the two operands are equal, we know the
4343 result. */
4344 if (rtx_equal_p (op0, op1)
4345 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4346 || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4347 equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4348
4349 /* If the operands are floating-point constants, see if we can fold
4350 the result. */
4351 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4352 else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4353 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4354 {
4355 REAL_VALUE_TYPE d0, d1;
4356 jmp_buf handler;
4357
4358 if (setjmp (handler))
4359 return 0;
4360
4361 set_float_handler (handler);
4362 REAL_VALUE_FROM_CONST_DOUBLE (d0, op0);
4363 REAL_VALUE_FROM_CONST_DOUBLE (d1, op1);
4364 equal = REAL_VALUES_EQUAL (d0, d1);
4365 op0lt = op0ltu = REAL_VALUES_LESS (d0, d1);
4366 op1lt = op1ltu = REAL_VALUES_LESS (d1, d0);
4367 set_float_handler (NULL_PTR);
4368 }
4369 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4370
4371 /* Otherwise, see if the operands are both integers. */
4372 else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4373 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4374 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4375 {
4376 int width = GET_MODE_BITSIZE (mode);
4377 HOST_WIDE_INT l0s, h0s, l1s, h1s;
4378 unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
4379
4380 /* Get the two words comprising each integer constant. */
4381 if (GET_CODE (op0) == CONST_DOUBLE)
4382 {
4383 l0u = l0s = CONST_DOUBLE_LOW (op0);
4384 h0u = h0s = CONST_DOUBLE_HIGH (op0);
4385 }
4386 else
4387 {
4388 l0u = l0s = INTVAL (op0);
4389 h0u = 0, h0s = l0s < 0 ? -1 : 0;
4390 }
4391
4392 if (GET_CODE (op1) == CONST_DOUBLE)
4393 {
4394 l1u = l1s = CONST_DOUBLE_LOW (op1);
4395 h1u = h1s = CONST_DOUBLE_HIGH (op1);
4396 }
4397 else
4398 {
4399 l1u = l1s = INTVAL (op1);
4400 h1u = 0, h1s = l1s < 0 ? -1 : 0;
4401 }
4402
4403 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4404 we have to sign or zero-extend the values. */
4405 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4406 h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
4407
4408 if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4409 {
4410 l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4411 l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
4412
4413 if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4414 l0s |= ((HOST_WIDE_INT) (-1) << width);
4415
4416 if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4417 l1s |= ((HOST_WIDE_INT) (-1) << width);
4418 }
4419
4420 equal = (h0u == h1u && l0u == l1u);
4421 op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4422 op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4423 op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4424 op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4425 }
4426
4427 /* Otherwise, there are some code-specific tests we can make. */
4428 else
4429 {
4430 switch (code)
4431 {
4432 case EQ:
4433 /* References to the frame plus a constant or labels cannot
4434 be zero, but a SYMBOL_REF can due to #pragma weak. */
4435 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4436 || GET_CODE (op0) == LABEL_REF)
4437 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4438 /* On some machines, the ap reg can be 0 sometimes. */
4439 && op0 != arg_pointer_rtx
4440 #endif
4441 )
4442 return const0_rtx;
4443 break;
4444
4445 case NE:
4446 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4447 || GET_CODE (op0) == LABEL_REF)
4448 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4449 && op0 != arg_pointer_rtx
4450 #endif
4451 )
4452 return const_true_rtx;
4453 break;
4454
4455 case GEU:
4456 /* Unsigned values are never negative. */
4457 if (op1 == const0_rtx)
4458 return const_true_rtx;
4459 break;
4460
4461 case LTU:
4462 if (op1 == const0_rtx)
4463 return const0_rtx;
4464 break;
4465
4466 case LEU:
4467 /* Unsigned values are never greater than the largest
4468 unsigned value. */
4469 if (GET_CODE (op1) == CONST_INT
4470 && INTVAL (op1) == GET_MODE_MASK (mode)
4471 && INTEGRAL_MODE_P (mode))
4472 return const_true_rtx;
4473 break;
4474
4475 case GTU:
4476 if (GET_CODE (op1) == CONST_INT
4477 && INTVAL (op1) == GET_MODE_MASK (mode)
4478 && INTEGRAL_MODE_P (mode))
4479 return const0_rtx;
4480 break;
4481 }
4482
4483 return 0;
4484 }
4485
4486 /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4487 as appropriate. */
4488 switch (code)
4489 {
4490 case EQ:
4491 return equal ? const_true_rtx : const0_rtx;
4492 case NE:
4493 return ! equal ? const_true_rtx : const0_rtx;
4494 case LT:
4495 return op0lt ? const_true_rtx : const0_rtx;
4496 case GT:
4497 return op1lt ? const_true_rtx : const0_rtx;
4498 case LTU:
4499 return op0ltu ? const_true_rtx : const0_rtx;
4500 case GTU:
4501 return op1ltu ? const_true_rtx : const0_rtx;
4502 case LE:
4503 return equal || op0lt ? const_true_rtx : const0_rtx;
4504 case GE:
4505 return equal || op1lt ? const_true_rtx : const0_rtx;
4506 case LEU:
4507 return equal || op0ltu ? const_true_rtx : const0_rtx;
4508 case GEU:
4509 return equal || op1ltu ? const_true_rtx : const0_rtx;
4510 }
4511
4512 abort ();
4513 }
4514 \f
4515 /* Simplify CODE, an operation with result mode MODE and three operands,
4516 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4517 a constant. Return 0 if no simplifications is possible. */
4518
4519 rtx
4520 simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4521 enum rtx_code code;
4522 enum machine_mode mode, op0_mode;
4523 rtx op0, op1, op2;
4524 {
4525 int width = GET_MODE_BITSIZE (mode);
4526
4527 /* VOIDmode means "infinite" precision. */
4528 if (width == 0)
4529 width = HOST_BITS_PER_WIDE_INT;
4530
4531 switch (code)
4532 {
4533 case SIGN_EXTRACT:
4534 case ZERO_EXTRACT:
4535 if (GET_CODE (op0) == CONST_INT
4536 && GET_CODE (op1) == CONST_INT
4537 && GET_CODE (op2) == CONST_INT
4538 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4539 && width <= HOST_BITS_PER_WIDE_INT)
4540 {
4541 /* Extracting a bit-field from a constant */
4542 HOST_WIDE_INT val = INTVAL (op0);
4543
4544 #if BITS_BIG_ENDIAN
4545 val >>= (GET_MODE_BITSIZE (op0_mode) - INTVAL (op2) - INTVAL (op1));
4546 #else
4547 val >>= INTVAL (op2);
4548 #endif
4549 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4550 {
4551 /* First zero-extend. */
4552 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4553 /* If desired, propagate sign bit. */
4554 if (code == SIGN_EXTRACT
4555 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4556 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4557 }
4558
4559 /* Clear the bits that don't belong in our mode,
4560 unless they and our sign bit are all one.
4561 So we get either a reasonable negative value or a reasonable
4562 unsigned value for this mode. */
4563 if (width < HOST_BITS_PER_WIDE_INT
4564 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4565 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4566 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4567
4568 return GEN_INT (val);
4569 }
4570 break;
4571
4572 case IF_THEN_ELSE:
4573 if (GET_CODE (op0) == CONST_INT)
4574 return op0 != const0_rtx ? op1 : op2;
4575 break;
4576
4577 default:
4578 abort ();
4579 }
4580
4581 return 0;
4582 }
4583 \f
4584 /* If X is a nontrivial arithmetic operation on an argument
4585 for which a constant value can be determined, return
4586 the result of operating on that value, as a constant.
4587 Otherwise, return X, possibly with one or more operands
4588 modified by recursive calls to this function.
4589
4590 If X is a register whose contents are known, we do NOT
4591 return those contents here. equiv_constant is called to
4592 perform that task.
4593
4594 INSN is the insn that we may be modifying. If it is 0, make a copy
4595 of X before modifying it. */
4596
4597 static rtx
4598 fold_rtx (x, insn)
4599 rtx x;
4600 rtx insn;
4601 {
4602 register enum rtx_code code;
4603 register enum machine_mode mode;
4604 register char *fmt;
4605 register int i;
4606 rtx new = 0;
4607 int copied = 0;
4608 int must_swap = 0;
4609
4610 /* Folded equivalents of first two operands of X. */
4611 rtx folded_arg0;
4612 rtx folded_arg1;
4613
4614 /* Constant equivalents of first three operands of X;
4615 0 when no such equivalent is known. */
4616 rtx const_arg0;
4617 rtx const_arg1;
4618 rtx const_arg2;
4619
4620 /* The mode of the first operand of X. We need this for sign and zero
4621 extends. */
4622 enum machine_mode mode_arg0;
4623
4624 if (x == 0)
4625 return x;
4626
4627 mode = GET_MODE (x);
4628 code = GET_CODE (x);
4629 switch (code)
4630 {
4631 case CONST:
4632 case CONST_INT:
4633 case CONST_DOUBLE:
4634 case SYMBOL_REF:
4635 case LABEL_REF:
4636 case REG:
4637 /* No use simplifying an EXPR_LIST
4638 since they are used only for lists of args
4639 in a function call's REG_EQUAL note. */
4640 case EXPR_LIST:
4641 return x;
4642
4643 #ifdef HAVE_cc0
4644 case CC0:
4645 return prev_insn_cc0;
4646 #endif
4647
4648 case PC:
4649 /* If the next insn is a CODE_LABEL followed by a jump table,
4650 PC's value is a LABEL_REF pointing to that label. That
4651 lets us fold switch statements on the Vax. */
4652 if (insn && GET_CODE (insn) == JUMP_INSN)
4653 {
4654 rtx next = next_nonnote_insn (insn);
4655
4656 if (next && GET_CODE (next) == CODE_LABEL
4657 && NEXT_INSN (next) != 0
4658 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
4659 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
4660 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
4661 return gen_rtx (LABEL_REF, Pmode, next);
4662 }
4663 break;
4664
4665 case SUBREG:
4666 /* See if we previously assigned a constant value to this SUBREG. */
4667 if ((new = lookup_as_function (x, CONST_INT)) != 0
4668 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
4669 return new;
4670
4671 /* If this is a paradoxical SUBREG, we have no idea what value the
4672 extra bits would have. However, if the operand is equivalent
4673 to a SUBREG whose operand is the same as our mode, and all the
4674 modes are within a word, we can just use the inner operand
4675 because these SUBREGs just say how to treat the register.
4676
4677 Similarly if we find an integer constant. */
4678
4679 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4680 {
4681 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
4682 struct table_elt *elt;
4683
4684 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
4685 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
4686 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
4687 imode)) != 0)
4688 for (elt = elt->first_same_value;
4689 elt; elt = elt->next_same_value)
4690 {
4691 if (CONSTANT_P (elt->exp)
4692 && GET_MODE (elt->exp) == VOIDmode)
4693 return elt->exp;
4694
4695 if (GET_CODE (elt->exp) == SUBREG
4696 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4697 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4698 return copy_rtx (SUBREG_REG (elt->exp));
4699 }
4700
4701 return x;
4702 }
4703
4704 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
4705 We might be able to if the SUBREG is extracting a single word in an
4706 integral mode or extracting the low part. */
4707
4708 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
4709 const_arg0 = equiv_constant (folded_arg0);
4710 if (const_arg0)
4711 folded_arg0 = const_arg0;
4712
4713 if (folded_arg0 != SUBREG_REG (x))
4714 {
4715 new = 0;
4716
4717 if (GET_MODE_CLASS (mode) == MODE_INT
4718 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4719 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
4720 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
4721 GET_MODE (SUBREG_REG (x)));
4722 if (new == 0 && subreg_lowpart_p (x))
4723 new = gen_lowpart_if_possible (mode, folded_arg0);
4724 if (new)
4725 return new;
4726 }
4727
4728 /* If this is a narrowing SUBREG and our operand is a REG, see if
4729 we can find an equivalence for REG that is an arithmetic operation
4730 in a wider mode where both operands are paradoxical SUBREGs
4731 from objects of our result mode. In that case, we couldn't report
4732 an equivalent value for that operation, since we don't know what the
4733 extra bits will be. But we can find an equivalence for this SUBREG
4734 by folding that operation is the narrow mode. This allows us to
4735 fold arithmetic in narrow modes when the machine only supports
4736 word-sized arithmetic.
4737
4738 Also look for a case where we have a SUBREG whose operand is the
4739 same as our result. If both modes are smaller than a word, we
4740 are simply interpreting a register in different modes and we
4741 can use the inner value. */
4742
4743 if (GET_CODE (folded_arg0) == REG
4744 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
4745 && subreg_lowpart_p (x))
4746 {
4747 struct table_elt *elt;
4748
4749 /* We can use HASH here since we know that canon_hash won't be
4750 called. */
4751 elt = lookup (folded_arg0,
4752 HASH (folded_arg0, GET_MODE (folded_arg0)),
4753 GET_MODE (folded_arg0));
4754
4755 if (elt)
4756 elt = elt->first_same_value;
4757
4758 for (; elt; elt = elt->next_same_value)
4759 {
4760 enum rtx_code eltcode = GET_CODE (elt->exp);
4761
4762 /* Just check for unary and binary operations. */
4763 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
4764 && GET_CODE (elt->exp) != SIGN_EXTEND
4765 && GET_CODE (elt->exp) != ZERO_EXTEND
4766 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4767 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
4768 {
4769 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
4770
4771 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4772 op0 = fold_rtx (op0, NULL_RTX);
4773
4774 op0 = equiv_constant (op0);
4775 if (op0)
4776 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
4777 op0, mode);
4778 }
4779 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
4780 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
4781 && eltcode != DIV && eltcode != MOD
4782 && eltcode != UDIV && eltcode != UMOD
4783 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
4784 && eltcode != ROTATE && eltcode != ROTATERT
4785 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4786 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
4787 == mode))
4788 || CONSTANT_P (XEXP (elt->exp, 0)))
4789 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
4790 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
4791 == mode))
4792 || CONSTANT_P (XEXP (elt->exp, 1))))
4793 {
4794 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
4795 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
4796
4797 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4798 op0 = fold_rtx (op0, NULL_RTX);
4799
4800 if (op0)
4801 op0 = equiv_constant (op0);
4802
4803 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
4804 op1 = fold_rtx (op1, NULL_RTX);
4805
4806 if (op1)
4807 op1 = equiv_constant (op1);
4808
4809 /* If we are looking for the low SImode part of
4810 (ashift:DI c (const_int 32)), it doesn't work
4811 to compute that in SImode, because a 32-bit shift
4812 in SImode is unpredictable. We know the value is 0. */
4813 if (op0 && op1
4814 && GET_CODE (elt->exp) == ASHIFT
4815 && GET_CODE (op1) == CONST_INT
4816 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
4817 {
4818 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
4819
4820 /* If the count fits in the inner mode's width,
4821 but exceeds the outer mode's width,
4822 the value will get truncated to 0
4823 by the subreg. */
4824 new = const0_rtx;
4825 else
4826 /* If the count exceeds even the inner mode's width,
4827 don't fold this expression. */
4828 new = 0;
4829 }
4830 else if (op0 && op1)
4831 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
4832 op0, op1);
4833 }
4834
4835 else if (GET_CODE (elt->exp) == SUBREG
4836 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4837 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
4838 <= UNITS_PER_WORD)
4839 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4840 new = copy_rtx (SUBREG_REG (elt->exp));
4841
4842 if (new)
4843 return new;
4844 }
4845 }
4846
4847 return x;
4848
4849 case NOT:
4850 case NEG:
4851 /* If we have (NOT Y), see if Y is known to be (NOT Z).
4852 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
4853 new = lookup_as_function (XEXP (x, 0), code);
4854 if (new)
4855 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
4856 break;
4857
4858 case MEM:
4859 /* If we are not actually processing an insn, don't try to find the
4860 best address. Not only don't we care, but we could modify the
4861 MEM in an invalid way since we have no insn to validate against. */
4862 if (insn != 0)
4863 find_best_addr (insn, &XEXP (x, 0));
4864
4865 {
4866 /* Even if we don't fold in the insn itself,
4867 we can safely do so here, in hopes of getting a constant. */
4868 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
4869 rtx base = 0;
4870 HOST_WIDE_INT offset = 0;
4871
4872 if (GET_CODE (addr) == REG
4873 && REGNO_QTY_VALID_P (REGNO (addr))
4874 && GET_MODE (addr) == qty_mode[reg_qty[REGNO (addr)]]
4875 && qty_const[reg_qty[REGNO (addr)]] != 0)
4876 addr = qty_const[reg_qty[REGNO (addr)]];
4877
4878 /* If address is constant, split it into a base and integer offset. */
4879 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
4880 base = addr;
4881 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
4882 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
4883 {
4884 base = XEXP (XEXP (addr, 0), 0);
4885 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
4886 }
4887 else if (GET_CODE (addr) == LO_SUM
4888 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
4889 base = XEXP (addr, 1);
4890
4891 /* If this is a constant pool reference, we can fold it into its
4892 constant to allow better value tracking. */
4893 if (base && GET_CODE (base) == SYMBOL_REF
4894 && CONSTANT_POOL_ADDRESS_P (base))
4895 {
4896 rtx constant = get_pool_constant (base);
4897 enum machine_mode const_mode = get_pool_mode (base);
4898 rtx new;
4899
4900 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
4901 constant_pool_entries_cost = COST (constant);
4902
4903 /* If we are loading the full constant, we have an equivalence. */
4904 if (offset == 0 && mode == const_mode)
4905 return constant;
4906
4907 /* If this actually isn't a constant (wierd!), we can't do
4908 anything. Otherwise, handle the two most common cases:
4909 extracting a word from a multi-word constant, and extracting
4910 the low-order bits. Other cases don't seem common enough to
4911 worry about. */
4912 if (! CONSTANT_P (constant))
4913 return x;
4914
4915 if (GET_MODE_CLASS (mode) == MODE_INT
4916 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4917 && offset % UNITS_PER_WORD == 0
4918 && (new = operand_subword (constant,
4919 offset / UNITS_PER_WORD,
4920 0, const_mode)) != 0)
4921 return new;
4922
4923 if (((BYTES_BIG_ENDIAN
4924 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
4925 || (! BYTES_BIG_ENDIAN && offset == 0))
4926 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
4927 return new;
4928 }
4929
4930 /* If this is a reference to a label at a known position in a jump
4931 table, we also know its value. */
4932 if (base && GET_CODE (base) == LABEL_REF)
4933 {
4934 rtx label = XEXP (base, 0);
4935 rtx table_insn = NEXT_INSN (label);
4936
4937 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
4938 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
4939 {
4940 rtx table = PATTERN (table_insn);
4941
4942 if (offset >= 0
4943 && (offset / GET_MODE_SIZE (GET_MODE (table))
4944 < XVECLEN (table, 0)))
4945 return XVECEXP (table, 0,
4946 offset / GET_MODE_SIZE (GET_MODE (table)));
4947 }
4948 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
4949 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
4950 {
4951 rtx table = PATTERN (table_insn);
4952
4953 if (offset >= 0
4954 && (offset / GET_MODE_SIZE (GET_MODE (table))
4955 < XVECLEN (table, 1)))
4956 {
4957 offset /= GET_MODE_SIZE (GET_MODE (table));
4958 new = gen_rtx (MINUS, Pmode, XVECEXP (table, 1, offset),
4959 XEXP (table, 0));
4960
4961 if (GET_MODE (table) != Pmode)
4962 new = gen_rtx (TRUNCATE, GET_MODE (table), new);
4963
4964 return new;
4965 }
4966 }
4967 }
4968
4969 return x;
4970 }
4971 }
4972
4973 const_arg0 = 0;
4974 const_arg1 = 0;
4975 const_arg2 = 0;
4976 mode_arg0 = VOIDmode;
4977
4978 /* Try folding our operands.
4979 Then see which ones have constant values known. */
4980
4981 fmt = GET_RTX_FORMAT (code);
4982 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4983 if (fmt[i] == 'e')
4984 {
4985 rtx arg = XEXP (x, i);
4986 rtx folded_arg = arg, const_arg = 0;
4987 enum machine_mode mode_arg = GET_MODE (arg);
4988 rtx cheap_arg, expensive_arg;
4989 rtx replacements[2];
4990 int j;
4991
4992 /* Most arguments are cheap, so handle them specially. */
4993 switch (GET_CODE (arg))
4994 {
4995 case REG:
4996 /* This is the same as calling equiv_constant; it is duplicated
4997 here for speed. */
4998 if (REGNO_QTY_VALID_P (REGNO (arg))
4999 && qty_const[reg_qty[REGNO (arg)]] != 0
5000 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != REG
5001 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != PLUS)
5002 const_arg
5003 = gen_lowpart_if_possible (GET_MODE (arg),
5004 qty_const[reg_qty[REGNO (arg)]]);
5005 break;
5006
5007 case CONST:
5008 case CONST_INT:
5009 case SYMBOL_REF:
5010 case LABEL_REF:
5011 case CONST_DOUBLE:
5012 const_arg = arg;
5013 break;
5014
5015 #ifdef HAVE_cc0
5016 case CC0:
5017 folded_arg = prev_insn_cc0;
5018 mode_arg = prev_insn_cc0_mode;
5019 const_arg = equiv_constant (folded_arg);
5020 break;
5021 #endif
5022
5023 default:
5024 folded_arg = fold_rtx (arg, insn);
5025 const_arg = equiv_constant (folded_arg);
5026 }
5027
5028 /* For the first three operands, see if the operand
5029 is constant or equivalent to a constant. */
5030 switch (i)
5031 {
5032 case 0:
5033 folded_arg0 = folded_arg;
5034 const_arg0 = const_arg;
5035 mode_arg0 = mode_arg;
5036 break;
5037 case 1:
5038 folded_arg1 = folded_arg;
5039 const_arg1 = const_arg;
5040 break;
5041 case 2:
5042 const_arg2 = const_arg;
5043 break;
5044 }
5045
5046 /* Pick the least expensive of the folded argument and an
5047 equivalent constant argument. */
5048 if (const_arg == 0 || const_arg == folded_arg
5049 || COST (const_arg) > COST (folded_arg))
5050 cheap_arg = folded_arg, expensive_arg = const_arg;
5051 else
5052 cheap_arg = const_arg, expensive_arg = folded_arg;
5053
5054 /* Try to replace the operand with the cheapest of the two
5055 possibilities. If it doesn't work and this is either of the first
5056 two operands of a commutative operation, try swapping them.
5057 If THAT fails, try the more expensive, provided it is cheaper
5058 than what is already there. */
5059
5060 if (cheap_arg == XEXP (x, i))
5061 continue;
5062
5063 if (insn == 0 && ! copied)
5064 {
5065 x = copy_rtx (x);
5066 copied = 1;
5067 }
5068
5069 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5070 for (j = 0;
5071 j < 2 && replacements[j]
5072 && COST (replacements[j]) < COST (XEXP (x, i));
5073 j++)
5074 {
5075 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5076 break;
5077
5078 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5079 {
5080 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5081 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5082
5083 if (apply_change_group ())
5084 {
5085 /* Swap them back to be invalid so that this loop can
5086 continue and flag them to be swapped back later. */
5087 rtx tem;
5088
5089 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5090 XEXP (x, 1) = tem;
5091 must_swap = 1;
5092 break;
5093 }
5094 }
5095 }
5096 }
5097
5098 else if (fmt[i] == 'E')
5099 /* Don't try to fold inside of a vector of expressions.
5100 Doing nothing is harmless. */
5101 ;
5102
5103 /* If a commutative operation, place a constant integer as the second
5104 operand unless the first operand is also a constant integer. Otherwise,
5105 place any constant second unless the first operand is also a constant. */
5106
5107 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5108 {
5109 if (must_swap || (const_arg0
5110 && (const_arg1 == 0
5111 || (GET_CODE (const_arg0) == CONST_INT
5112 && GET_CODE (const_arg1) != CONST_INT))))
5113 {
5114 register rtx tem = XEXP (x, 0);
5115
5116 if (insn == 0 && ! copied)
5117 {
5118 x = copy_rtx (x);
5119 copied = 1;
5120 }
5121
5122 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5123 validate_change (insn, &XEXP (x, 1), tem, 1);
5124 if (apply_change_group ())
5125 {
5126 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5127 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5128 }
5129 }
5130 }
5131
5132 /* If X is an arithmetic operation, see if we can simplify it. */
5133
5134 switch (GET_RTX_CLASS (code))
5135 {
5136 case '1':
5137 /* We can't simplify extension ops unless we know the original mode. */
5138 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5139 && mode_arg0 == VOIDmode)
5140 break;
5141 new = simplify_unary_operation (code, mode,
5142 const_arg0 ? const_arg0 : folded_arg0,
5143 mode_arg0);
5144 break;
5145
5146 case '<':
5147 /* See what items are actually being compared and set FOLDED_ARG[01]
5148 to those values and CODE to the actual comparison code. If any are
5149 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5150 do anything if both operands are already known to be constant. */
5151
5152 if (const_arg0 == 0 || const_arg1 == 0)
5153 {
5154 struct table_elt *p0, *p1;
5155 rtx true = const_true_rtx, false = const0_rtx;
5156 enum machine_mode mode_arg1;
5157
5158 #ifdef FLOAT_STORE_FLAG_VALUE
5159 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5160 {
5161 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5162 mode);
5163 false = CONST0_RTX (mode);
5164 }
5165 #endif
5166
5167 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5168 &mode_arg0, &mode_arg1);
5169 const_arg0 = equiv_constant (folded_arg0);
5170 const_arg1 = equiv_constant (folded_arg1);
5171
5172 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5173 what kinds of things are being compared, so we can't do
5174 anything with this comparison. */
5175
5176 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5177 break;
5178
5179 /* If we do not now have two constants being compared, see if we
5180 can nevertheless deduce some things about the comparison. */
5181 if (const_arg0 == 0 || const_arg1 == 0)
5182 {
5183 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or non-explicit
5184 constant? These aren't zero, but we don't know their sign. */
5185 if (const_arg1 == const0_rtx
5186 && (NONZERO_BASE_PLUS_P (folded_arg0)
5187 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5188 come out as 0. */
5189 || GET_CODE (folded_arg0) == SYMBOL_REF
5190 #endif
5191 || GET_CODE (folded_arg0) == LABEL_REF
5192 || GET_CODE (folded_arg0) == CONST))
5193 {
5194 if (code == EQ)
5195 return false;
5196 else if (code == NE)
5197 return true;
5198 }
5199
5200 /* See if the two operands are the same. We don't do this
5201 for IEEE floating-point since we can't assume x == x
5202 since x might be a NaN. */
5203
5204 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5205 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
5206 && (folded_arg0 == folded_arg1
5207 || (GET_CODE (folded_arg0) == REG
5208 && GET_CODE (folded_arg1) == REG
5209 && (reg_qty[REGNO (folded_arg0)]
5210 == reg_qty[REGNO (folded_arg1)]))
5211 || ((p0 = lookup (folded_arg0,
5212 (safe_hash (folded_arg0, mode_arg0)
5213 % NBUCKETS), mode_arg0))
5214 && (p1 = lookup (folded_arg1,
5215 (safe_hash (folded_arg1, mode_arg0)
5216 % NBUCKETS), mode_arg0))
5217 && p0->first_same_value == p1->first_same_value)))
5218 return ((code == EQ || code == LE || code == GE
5219 || code == LEU || code == GEU)
5220 ? true : false);
5221
5222 /* If FOLDED_ARG0 is a register, see if the comparison we are
5223 doing now is either the same as we did before or the reverse
5224 (we only check the reverse if not floating-point). */
5225 else if (GET_CODE (folded_arg0) == REG)
5226 {
5227 int qty = reg_qty[REGNO (folded_arg0)];
5228
5229 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5230 && (comparison_dominates_p (qty_comparison_code[qty], code)
5231 || (comparison_dominates_p (qty_comparison_code[qty],
5232 reverse_condition (code))
5233 && ! FLOAT_MODE_P (mode_arg0)))
5234 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5235 || (const_arg1
5236 && rtx_equal_p (qty_comparison_const[qty],
5237 const_arg1))
5238 || (GET_CODE (folded_arg1) == REG
5239 && (reg_qty[REGNO (folded_arg1)]
5240 == qty_comparison_qty[qty]))))
5241 return (comparison_dominates_p (qty_comparison_code[qty],
5242 code)
5243 ? true : false);
5244 }
5245 }
5246 }
5247
5248 /* If we are comparing against zero, see if the first operand is
5249 equivalent to an IOR with a constant. If so, we may be able to
5250 determine the result of this comparison. */
5251
5252 if (const_arg1 == const0_rtx)
5253 {
5254 rtx y = lookup_as_function (folded_arg0, IOR);
5255 rtx inner_const;
5256
5257 if (y != 0
5258 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5259 && GET_CODE (inner_const) == CONST_INT
5260 && INTVAL (inner_const) != 0)
5261 {
5262 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
5263 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5264 && (INTVAL (inner_const)
5265 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
5266 rtx true = const_true_rtx, false = const0_rtx;
5267
5268 #ifdef FLOAT_STORE_FLAG_VALUE
5269 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5270 {
5271 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5272 mode);
5273 false = CONST0_RTX (mode);
5274 }
5275 #endif
5276
5277 switch (code)
5278 {
5279 case EQ:
5280 return false;
5281 case NE:
5282 return true;
5283 case LT: case LE:
5284 if (has_sign)
5285 return true;
5286 break;
5287 case GT: case GE:
5288 if (has_sign)
5289 return false;
5290 break;
5291 }
5292 }
5293 }
5294
5295 new = simplify_relational_operation (code, mode_arg0,
5296 const_arg0 ? const_arg0 : folded_arg0,
5297 const_arg1 ? const_arg1 : folded_arg1);
5298 #ifdef FLOAT_STORE_FLAG_VALUE
5299 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5300 new = ((new == const0_rtx) ? CONST0_RTX (mode)
5301 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
5302 #endif
5303 break;
5304
5305 case '2':
5306 case 'c':
5307 switch (code)
5308 {
5309 case PLUS:
5310 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5311 with that LABEL_REF as its second operand. If so, the result is
5312 the first operand of that MINUS. This handles switches with an
5313 ADDR_DIFF_VEC table. */
5314 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5315 {
5316 rtx y = lookup_as_function (folded_arg0, MINUS);
5317
5318 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5319 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5320 return XEXP (y, 0);
5321 }
5322
5323 /* If second operand is a register equivalent to a negative
5324 CONST_INT, see if we can find a register equivalent to the
5325 positive constant. Make a MINUS if so. Don't do this for
5326 a negative constant since we might then alternate between
5327 chosing positive and negative constants. Having the positive
5328 constant previously-used is the more common case. */
5329 if (const_arg1 && GET_CODE (const_arg1) == CONST_INT
5330 && INTVAL (const_arg1) < 0 && GET_CODE (folded_arg1) == REG)
5331 {
5332 rtx new_const = GEN_INT (- INTVAL (const_arg1));
5333 struct table_elt *p
5334 = lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5335 mode);
5336
5337 if (p)
5338 for (p = p->first_same_value; p; p = p->next_same_value)
5339 if (GET_CODE (p->exp) == REG)
5340 return cse_gen_binary (MINUS, mode, folded_arg0,
5341 canon_reg (p->exp, NULL_RTX));
5342 }
5343 goto from_plus;
5344
5345 case MINUS:
5346 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5347 If so, produce (PLUS Z C2-C). */
5348 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5349 {
5350 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5351 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
5352 return fold_rtx (plus_constant (copy_rtx (y),
5353 -INTVAL (const_arg1)),
5354 NULL_RTX);
5355 }
5356
5357 /* ... fall through ... */
5358
5359 from_plus:
5360 case SMIN: case SMAX: case UMIN: case UMAX:
5361 case IOR: case AND: case XOR:
5362 case MULT: case DIV: case UDIV:
5363 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
5364 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5365 is known to be of similar form, we may be able to replace the
5366 operation with a combined operation. This may eliminate the
5367 intermediate operation if every use is simplified in this way.
5368 Note that the similar optimization done by combine.c only works
5369 if the intermediate operation's result has only one reference. */
5370
5371 if (GET_CODE (folded_arg0) == REG
5372 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5373 {
5374 int is_shift
5375 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5376 rtx y = lookup_as_function (folded_arg0, code);
5377 rtx inner_const;
5378 enum rtx_code associate_code;
5379 rtx new_const;
5380
5381 if (y == 0
5382 || 0 == (inner_const
5383 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5384 || GET_CODE (inner_const) != CONST_INT
5385 /* If we have compiled a statement like
5386 "if (x == (x & mask1))", and now are looking at
5387 "x & mask2", we will have a case where the first operand
5388 of Y is the same as our first operand. Unless we detect
5389 this case, an infinite loop will result. */
5390 || XEXP (y, 0) == folded_arg0)
5391 break;
5392
5393 /* Don't associate these operations if they are a PLUS with the
5394 same constant and it is a power of two. These might be doable
5395 with a pre- or post-increment. Similarly for two subtracts of
5396 identical powers of two with post decrement. */
5397
5398 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
5399 && (0
5400 #if defined(HAVE_PRE_INCREMENT) || defined(HAVE_POST_INCREMENT)
5401 || exact_log2 (INTVAL (const_arg1)) >= 0
5402 #endif
5403 #if defined(HAVE_PRE_DECREMENT) || defined(HAVE_POST_DECREMENT)
5404 || exact_log2 (- INTVAL (const_arg1)) >= 0
5405 #endif
5406 ))
5407 break;
5408
5409 /* Compute the code used to compose the constants. For example,
5410 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5411
5412 associate_code
5413 = (code == MULT || code == DIV || code == UDIV ? MULT
5414 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5415
5416 new_const = simplify_binary_operation (associate_code, mode,
5417 const_arg1, inner_const);
5418
5419 if (new_const == 0)
5420 break;
5421
5422 /* If we are associating shift operations, don't let this
5423 produce a shift of the size of the object or larger.
5424 This could occur when we follow a sign-extend by a right
5425 shift on a machine that does a sign-extend as a pair
5426 of shifts. */
5427
5428 if (is_shift && GET_CODE (new_const) == CONST_INT
5429 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5430 {
5431 /* As an exception, we can turn an ASHIFTRT of this
5432 form into a shift of the number of bits - 1. */
5433 if (code == ASHIFTRT)
5434 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5435 else
5436 break;
5437 }
5438
5439 y = copy_rtx (XEXP (y, 0));
5440
5441 /* If Y contains our first operand (the most common way this
5442 can happen is if Y is a MEM), we would do into an infinite
5443 loop if we tried to fold it. So don't in that case. */
5444
5445 if (! reg_mentioned_p (folded_arg0, y))
5446 y = fold_rtx (y, insn);
5447
5448 return cse_gen_binary (code, mode, y, new_const);
5449 }
5450 }
5451
5452 new = simplify_binary_operation (code, mode,
5453 const_arg0 ? const_arg0 : folded_arg0,
5454 const_arg1 ? const_arg1 : folded_arg1);
5455 break;
5456
5457 case 'o':
5458 /* (lo_sum (high X) X) is simply X. */
5459 if (code == LO_SUM && const_arg0 != 0
5460 && GET_CODE (const_arg0) == HIGH
5461 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
5462 return const_arg1;
5463 break;
5464
5465 case '3':
5466 case 'b':
5467 new = simplify_ternary_operation (code, mode, mode_arg0,
5468 const_arg0 ? const_arg0 : folded_arg0,
5469 const_arg1 ? const_arg1 : folded_arg1,
5470 const_arg2 ? const_arg2 : XEXP (x, 2));
5471 break;
5472 }
5473
5474 return new ? new : x;
5475 }
5476 \f
5477 /* Return a constant value currently equivalent to X.
5478 Return 0 if we don't know one. */
5479
5480 static rtx
5481 equiv_constant (x)
5482 rtx x;
5483 {
5484 if (GET_CODE (x) == REG
5485 && REGNO_QTY_VALID_P (REGNO (x))
5486 && qty_const[reg_qty[REGNO (x)]])
5487 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[reg_qty[REGNO (x)]]);
5488
5489 if (x != 0 && CONSTANT_P (x))
5490 return x;
5491
5492 /* If X is a MEM, try to fold it outside the context of any insn to see if
5493 it might be equivalent to a constant. That handles the case where it
5494 is a constant-pool reference. Then try to look it up in the hash table
5495 in case it is something whose value we have seen before. */
5496
5497 if (GET_CODE (x) == MEM)
5498 {
5499 struct table_elt *elt;
5500
5501 x = fold_rtx (x, NULL_RTX);
5502 if (CONSTANT_P (x))
5503 return x;
5504
5505 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
5506 if (elt == 0)
5507 return 0;
5508
5509 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
5510 if (elt->is_const && CONSTANT_P (elt->exp))
5511 return elt->exp;
5512 }
5513
5514 return 0;
5515 }
5516 \f
5517 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
5518 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
5519 least-significant part of X.
5520 MODE specifies how big a part of X to return.
5521
5522 If the requested operation cannot be done, 0 is returned.
5523
5524 This is similar to gen_lowpart in emit-rtl.c. */
5525
5526 rtx
5527 gen_lowpart_if_possible (mode, x)
5528 enum machine_mode mode;
5529 register rtx x;
5530 {
5531 rtx result = gen_lowpart_common (mode, x);
5532
5533 if (result)
5534 return result;
5535 else if (GET_CODE (x) == MEM)
5536 {
5537 /* This is the only other case we handle. */
5538 register int offset = 0;
5539 rtx new;
5540
5541 #if WORDS_BIG_ENDIAN
5542 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
5543 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
5544 #endif
5545 #if BYTES_BIG_ENDIAN
5546 /* Adjust the address so that the address-after-the-data
5547 is unchanged. */
5548 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
5549 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
5550 #endif
5551 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
5552 if (! memory_address_p (mode, XEXP (new, 0)))
5553 return 0;
5554 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
5555 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
5556 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
5557 return new;
5558 }
5559 else
5560 return 0;
5561 }
5562 \f
5563 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
5564 branch. It will be zero if not.
5565
5566 In certain cases, this can cause us to add an equivalence. For example,
5567 if we are following the taken case of
5568 if (i == 2)
5569 we can add the fact that `i' and '2' are now equivalent.
5570
5571 In any case, we can record that this comparison was passed. If the same
5572 comparison is seen later, we will know its value. */
5573
5574 static void
5575 record_jump_equiv (insn, taken)
5576 rtx insn;
5577 int taken;
5578 {
5579 int cond_known_true;
5580 rtx op0, op1;
5581 enum machine_mode mode, mode0, mode1;
5582 int reversed_nonequality = 0;
5583 enum rtx_code code;
5584
5585 /* Ensure this is the right kind of insn. */
5586 if (! condjump_p (insn) || simplejump_p (insn))
5587 return;
5588
5589 /* See if this jump condition is known true or false. */
5590 if (taken)
5591 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
5592 else
5593 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
5594
5595 /* Get the type of comparison being done and the operands being compared.
5596 If we had to reverse a non-equality condition, record that fact so we
5597 know that it isn't valid for floating-point. */
5598 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
5599 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
5600 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
5601
5602 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
5603 if (! cond_known_true)
5604 {
5605 reversed_nonequality = (code != EQ && code != NE);
5606 code = reverse_condition (code);
5607 }
5608
5609 /* The mode is the mode of the non-constant. */
5610 mode = mode0;
5611 if (mode1 != VOIDmode)
5612 mode = mode1;
5613
5614 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
5615 }
5616
5617 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
5618 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
5619 Make any useful entries we can with that information. Called from
5620 above function and called recursively. */
5621
5622 static void
5623 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
5624 enum rtx_code code;
5625 enum machine_mode mode;
5626 rtx op0, op1;
5627 int reversed_nonequality;
5628 {
5629 unsigned op0_hash, op1_hash;
5630 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
5631 struct table_elt *op0_elt, *op1_elt;
5632
5633 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
5634 we know that they are also equal in the smaller mode (this is also
5635 true for all smaller modes whether or not there is a SUBREG, but
5636 is not worth testing for with no SUBREG. */
5637
5638 /* Note that GET_MODE (op0) may not equal MODE. */
5639 if (code == EQ && GET_CODE (op0) == SUBREG
5640 && (GET_MODE_SIZE (GET_MODE (op0))
5641 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
5642 {
5643 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5644 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5645
5646 record_jump_cond (code, mode, SUBREG_REG (op0),
5647 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5648 reversed_nonequality);
5649 }
5650
5651 if (code == EQ && GET_CODE (op1) == SUBREG
5652 && (GET_MODE_SIZE (GET_MODE (op1))
5653 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
5654 {
5655 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5656 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5657
5658 record_jump_cond (code, mode, SUBREG_REG (op1),
5659 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5660 reversed_nonequality);
5661 }
5662
5663 /* Similarly, if this is an NE comparison, and either is a SUBREG
5664 making a smaller mode, we know the whole thing is also NE. */
5665
5666 /* Note that GET_MODE (op0) may not equal MODE;
5667 if we test MODE instead, we can get an infinite recursion
5668 alternating between two modes each wider than MODE. */
5669
5670 if (code == NE && GET_CODE (op0) == SUBREG
5671 && subreg_lowpart_p (op0)
5672 && (GET_MODE_SIZE (GET_MODE (op0))
5673 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
5674 {
5675 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5676 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5677
5678 record_jump_cond (code, mode, SUBREG_REG (op0),
5679 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5680 reversed_nonequality);
5681 }
5682
5683 if (code == NE && GET_CODE (op1) == SUBREG
5684 && subreg_lowpart_p (op1)
5685 && (GET_MODE_SIZE (GET_MODE (op1))
5686 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
5687 {
5688 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5689 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5690
5691 record_jump_cond (code, mode, SUBREG_REG (op1),
5692 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5693 reversed_nonequality);
5694 }
5695
5696 /* Hash both operands. */
5697
5698 do_not_record = 0;
5699 hash_arg_in_memory = 0;
5700 hash_arg_in_struct = 0;
5701 op0_hash = HASH (op0, mode);
5702 op0_in_memory = hash_arg_in_memory;
5703 op0_in_struct = hash_arg_in_struct;
5704
5705 if (do_not_record)
5706 return;
5707
5708 do_not_record = 0;
5709 hash_arg_in_memory = 0;
5710 hash_arg_in_struct = 0;
5711 op1_hash = HASH (op1, mode);
5712 op1_in_memory = hash_arg_in_memory;
5713 op1_in_struct = hash_arg_in_struct;
5714
5715 if (do_not_record)
5716 return;
5717
5718 /* Look up both operands. */
5719 op0_elt = lookup (op0, op0_hash, mode);
5720 op1_elt = lookup (op1, op1_hash, mode);
5721
5722 /* If we aren't setting two things equal all we can do is save this
5723 comparison. Similarly if this is floating-point. In the latter
5724 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
5725 If we record the equality, we might inadvertently delete code
5726 whose intent was to change -0 to +0. */
5727
5728 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
5729 {
5730 /* If we reversed a floating-point comparison, if OP0 is not a
5731 register, or if OP1 is neither a register or constant, we can't
5732 do anything. */
5733
5734 if (GET_CODE (op1) != REG)
5735 op1 = equiv_constant (op1);
5736
5737 if ((reversed_nonequality && FLOAT_MODE_P (mode))
5738 || GET_CODE (op0) != REG || op1 == 0)
5739 return;
5740
5741 /* Put OP0 in the hash table if it isn't already. This gives it a
5742 new quantity number. */
5743 if (op0_elt == 0)
5744 {
5745 if (insert_regs (op0, NULL_PTR, 0))
5746 {
5747 rehash_using_reg (op0);
5748 op0_hash = HASH (op0, mode);
5749
5750 /* If OP0 is contained in OP1, this changes its hash code
5751 as well. Faster to rehash than to check, except
5752 for the simple case of a constant. */
5753 if (! CONSTANT_P (op1))
5754 op1_hash = HASH (op1,mode);
5755 }
5756
5757 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
5758 op0_elt->in_memory = op0_in_memory;
5759 op0_elt->in_struct = op0_in_struct;
5760 }
5761
5762 qty_comparison_code[reg_qty[REGNO (op0)]] = code;
5763 if (GET_CODE (op1) == REG)
5764 {
5765 /* Look it up again--in case op0 and op1 are the same. */
5766 op1_elt = lookup (op1, op1_hash, mode);
5767
5768 /* Put OP1 in the hash table so it gets a new quantity number. */
5769 if (op1_elt == 0)
5770 {
5771 if (insert_regs (op1, NULL_PTR, 0))
5772 {
5773 rehash_using_reg (op1);
5774 op1_hash = HASH (op1, mode);
5775 }
5776
5777 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
5778 op1_elt->in_memory = op1_in_memory;
5779 op1_elt->in_struct = op1_in_struct;
5780 }
5781
5782 qty_comparison_qty[reg_qty[REGNO (op0)]] = reg_qty[REGNO (op1)];
5783 qty_comparison_const[reg_qty[REGNO (op0)]] = 0;
5784 }
5785 else
5786 {
5787 qty_comparison_qty[reg_qty[REGNO (op0)]] = -1;
5788 qty_comparison_const[reg_qty[REGNO (op0)]] = op1;
5789 }
5790
5791 return;
5792 }
5793
5794 /* If either side is still missing an equivalence, make it now,
5795 then merge the equivalences. */
5796
5797 if (op0_elt == 0)
5798 {
5799 if (insert_regs (op0, NULL_PTR, 0))
5800 {
5801 rehash_using_reg (op0);
5802 op0_hash = HASH (op0, mode);
5803 }
5804
5805 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
5806 op0_elt->in_memory = op0_in_memory;
5807 op0_elt->in_struct = op0_in_struct;
5808 }
5809
5810 if (op1_elt == 0)
5811 {
5812 if (insert_regs (op1, NULL_PTR, 0))
5813 {
5814 rehash_using_reg (op1);
5815 op1_hash = HASH (op1, mode);
5816 }
5817
5818 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
5819 op1_elt->in_memory = op1_in_memory;
5820 op1_elt->in_struct = op1_in_struct;
5821 }
5822
5823 merge_equiv_classes (op0_elt, op1_elt);
5824 last_jump_equiv_class = op0_elt;
5825 }
5826 \f
5827 /* CSE processing for one instruction.
5828 First simplify sources and addresses of all assignments
5829 in the instruction, using previously-computed equivalents values.
5830 Then install the new sources and destinations in the table
5831 of available values.
5832
5833 If IN_LIBCALL_BLOCK is nonzero, don't record any equivalence made in
5834 the insn. */
5835
5836 /* Data on one SET contained in the instruction. */
5837
5838 struct set
5839 {
5840 /* The SET rtx itself. */
5841 rtx rtl;
5842 /* The SET_SRC of the rtx (the original value, if it is changing). */
5843 rtx src;
5844 /* The hash-table element for the SET_SRC of the SET. */
5845 struct table_elt *src_elt;
5846 /* Hash value for the SET_SRC. */
5847 unsigned src_hash;
5848 /* Hash value for the SET_DEST. */
5849 unsigned dest_hash;
5850 /* The SET_DEST, with SUBREG, etc., stripped. */
5851 rtx inner_dest;
5852 /* Place where the pointer to the INNER_DEST was found. */
5853 rtx *inner_dest_loc;
5854 /* Nonzero if the SET_SRC is in memory. */
5855 char src_in_memory;
5856 /* Nonzero if the SET_SRC is in a structure. */
5857 char src_in_struct;
5858 /* Nonzero if the SET_SRC contains something
5859 whose value cannot be predicted and understood. */
5860 char src_volatile;
5861 /* Original machine mode, in case it becomes a CONST_INT. */
5862 enum machine_mode mode;
5863 /* A constant equivalent for SET_SRC, if any. */
5864 rtx src_const;
5865 /* Hash value of constant equivalent for SET_SRC. */
5866 unsigned src_const_hash;
5867 /* Table entry for constant equivalent for SET_SRC, if any. */
5868 struct table_elt *src_const_elt;
5869 };
5870
5871 static void
5872 cse_insn (insn, in_libcall_block)
5873 rtx insn;
5874 int in_libcall_block;
5875 {
5876 register rtx x = PATTERN (insn);
5877 register int i;
5878 rtx tem;
5879 register int n_sets = 0;
5880
5881 /* Records what this insn does to set CC0. */
5882 rtx this_insn_cc0 = 0;
5883 enum machine_mode this_insn_cc0_mode;
5884 struct write_data writes_memory;
5885 static struct write_data init = {0, 0, 0, 0};
5886
5887 rtx src_eqv = 0;
5888 struct table_elt *src_eqv_elt = 0;
5889 int src_eqv_volatile;
5890 int src_eqv_in_memory;
5891 int src_eqv_in_struct;
5892 unsigned src_eqv_hash;
5893
5894 struct set *sets;
5895
5896 this_insn = insn;
5897 writes_memory = init;
5898
5899 /* Find all the SETs and CLOBBERs in this instruction.
5900 Record all the SETs in the array `set' and count them.
5901 Also determine whether there is a CLOBBER that invalidates
5902 all memory references, or all references at varying addresses. */
5903
5904 if (GET_CODE (insn) == CALL_INSN)
5905 {
5906 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
5907 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
5908 invalidate (SET_DEST (XEXP (tem, 0)));
5909 }
5910
5911 if (GET_CODE (x) == SET)
5912 {
5913 sets = (struct set *) alloca (sizeof (struct set));
5914 sets[0].rtl = x;
5915
5916 /* Ignore SETs that are unconditional jumps.
5917 They never need cse processing, so this does not hurt.
5918 The reason is not efficiency but rather
5919 so that we can test at the end for instructions
5920 that have been simplified to unconditional jumps
5921 and not be misled by unchanged instructions
5922 that were unconditional jumps to begin with. */
5923 if (SET_DEST (x) == pc_rtx
5924 && GET_CODE (SET_SRC (x)) == LABEL_REF)
5925 ;
5926
5927 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
5928 The hard function value register is used only once, to copy to
5929 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
5930 Ensure we invalidate the destination register. On the 80386 no
5931 other code would invalidate it since it is a fixed_reg.
5932 We need not check the return of apply_change_group; see canon_reg. */
5933
5934 else if (GET_CODE (SET_SRC (x)) == CALL)
5935 {
5936 canon_reg (SET_SRC (x), insn);
5937 apply_change_group ();
5938 fold_rtx (SET_SRC (x), insn);
5939 invalidate (SET_DEST (x));
5940 }
5941 else
5942 n_sets = 1;
5943 }
5944 else if (GET_CODE (x) == PARALLEL)
5945 {
5946 register int lim = XVECLEN (x, 0);
5947
5948 sets = (struct set *) alloca (lim * sizeof (struct set));
5949
5950 /* Find all regs explicitly clobbered in this insn,
5951 and ensure they are not replaced with any other regs
5952 elsewhere in this insn.
5953 When a reg that is clobbered is also used for input,
5954 we should presume that that is for a reason,
5955 and we should not substitute some other register
5956 which is not supposed to be clobbered.
5957 Therefore, this loop cannot be merged into the one below
5958 because a CALL may precede a CLOBBER and refer to the
5959 value clobbered. We must not let a canonicalization do
5960 anything in that case. */
5961 for (i = 0; i < lim; i++)
5962 {
5963 register rtx y = XVECEXP (x, 0, i);
5964 if (GET_CODE (y) == CLOBBER)
5965 {
5966 rtx clobbered = XEXP (y, 0);
5967
5968 if (GET_CODE (clobbered) == REG
5969 || GET_CODE (clobbered) == SUBREG)
5970 invalidate (clobbered);
5971 else if (GET_CODE (clobbered) == STRICT_LOW_PART
5972 || GET_CODE (clobbered) == ZERO_EXTRACT)
5973 invalidate (XEXP (clobbered, 0));
5974 }
5975 }
5976
5977 for (i = 0; i < lim; i++)
5978 {
5979 register rtx y = XVECEXP (x, 0, i);
5980 if (GET_CODE (y) == SET)
5981 {
5982 /* As above, we ignore unconditional jumps and call-insns and
5983 ignore the result of apply_change_group. */
5984 if (GET_CODE (SET_SRC (y)) == CALL)
5985 {
5986 canon_reg (SET_SRC (y), insn);
5987 apply_change_group ();
5988 fold_rtx (SET_SRC (y), insn);
5989 invalidate (SET_DEST (y));
5990 }
5991 else if (SET_DEST (y) == pc_rtx
5992 && GET_CODE (SET_SRC (y)) == LABEL_REF)
5993 ;
5994 else
5995 sets[n_sets++].rtl = y;
5996 }
5997 else if (GET_CODE (y) == CLOBBER)
5998 {
5999 /* If we clobber memory, take note of that,
6000 and canon the address.
6001 This does nothing when a register is clobbered
6002 because we have already invalidated the reg. */
6003 if (GET_CODE (XEXP (y, 0)) == MEM)
6004 {
6005 canon_reg (XEXP (y, 0), NULL_RTX);
6006 note_mem_written (XEXP (y, 0), &writes_memory);
6007 }
6008 }
6009 else if (GET_CODE (y) == USE
6010 && ! (GET_CODE (XEXP (y, 0)) == REG
6011 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
6012 canon_reg (y, NULL_RTX);
6013 else if (GET_CODE (y) == CALL)
6014 {
6015 /* The result of apply_change_group can be ignored; see
6016 canon_reg. */
6017 canon_reg (y, insn);
6018 apply_change_group ();
6019 fold_rtx (y, insn);
6020 }
6021 }
6022 }
6023 else if (GET_CODE (x) == CLOBBER)
6024 {
6025 if (GET_CODE (XEXP (x, 0)) == MEM)
6026 {
6027 canon_reg (XEXP (x, 0), NULL_RTX);
6028 note_mem_written (XEXP (x, 0), &writes_memory);
6029 }
6030 }
6031
6032 /* Canonicalize a USE of a pseudo register or memory location. */
6033 else if (GET_CODE (x) == USE
6034 && ! (GET_CODE (XEXP (x, 0)) == REG
6035 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
6036 canon_reg (XEXP (x, 0), NULL_RTX);
6037 else if (GET_CODE (x) == CALL)
6038 {
6039 /* The result of apply_change_group can be ignored; see canon_reg. */
6040 canon_reg (x, insn);
6041 apply_change_group ();
6042 fold_rtx (x, insn);
6043 }
6044
6045 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6046 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
6047 is handled specially for this case, and if it isn't set, then there will
6048 be no equivalence for the destinatation. */
6049 if (n_sets == 1 && REG_NOTES (insn) != 0
6050 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
6051 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6052 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
6053 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
6054
6055 /* Canonicalize sources and addresses of destinations.
6056 We do this in a separate pass to avoid problems when a MATCH_DUP is
6057 present in the insn pattern. In that case, we want to ensure that
6058 we don't break the duplicate nature of the pattern. So we will replace
6059 both operands at the same time. Otherwise, we would fail to find an
6060 equivalent substitution in the loop calling validate_change below.
6061
6062 We used to suppress canonicalization of DEST if it appears in SRC,
6063 but we don't do this any more. */
6064
6065 for (i = 0; i < n_sets; i++)
6066 {
6067 rtx dest = SET_DEST (sets[i].rtl);
6068 rtx src = SET_SRC (sets[i].rtl);
6069 rtx new = canon_reg (src, insn);
6070
6071 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6072 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6073 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
6074 || insn_n_dups[recog_memoized (insn)] > 0)
6075 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
6076 else
6077 SET_SRC (sets[i].rtl) = new;
6078
6079 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6080 {
6081 validate_change (insn, &XEXP (dest, 1),
6082 canon_reg (XEXP (dest, 1), insn), 1);
6083 validate_change (insn, &XEXP (dest, 2),
6084 canon_reg (XEXP (dest, 2), insn), 1);
6085 }
6086
6087 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6088 || GET_CODE (dest) == ZERO_EXTRACT
6089 || GET_CODE (dest) == SIGN_EXTRACT)
6090 dest = XEXP (dest, 0);
6091
6092 if (GET_CODE (dest) == MEM)
6093 canon_reg (dest, insn);
6094 }
6095
6096 /* Now that we have done all the replacements, we can apply the change
6097 group and see if they all work. Note that this will cause some
6098 canonicalizations that would have worked individually not to be applied
6099 because some other canonicalization didn't work, but this should not
6100 occur often.
6101
6102 The result of apply_change_group can be ignored; see canon_reg. */
6103
6104 apply_change_group ();
6105
6106 /* Set sets[i].src_elt to the class each source belongs to.
6107 Detect assignments from or to volatile things
6108 and set set[i] to zero so they will be ignored
6109 in the rest of this function.
6110
6111 Nothing in this loop changes the hash table or the register chains. */
6112
6113 for (i = 0; i < n_sets; i++)
6114 {
6115 register rtx src, dest;
6116 register rtx src_folded;
6117 register struct table_elt *elt = 0, *p;
6118 enum machine_mode mode;
6119 rtx src_eqv_here;
6120 rtx src_const = 0;
6121 rtx src_related = 0;
6122 struct table_elt *src_const_elt = 0;
6123 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6124 int src_related_cost = 10000, src_elt_cost = 10000;
6125 /* Set non-zero if we need to call force_const_mem on with the
6126 contents of src_folded before using it. */
6127 int src_folded_force_flag = 0;
6128
6129 dest = SET_DEST (sets[i].rtl);
6130 src = SET_SRC (sets[i].rtl);
6131
6132 /* If SRC is a constant that has no machine mode,
6133 hash it with the destination's machine mode.
6134 This way we can keep different modes separate. */
6135
6136 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6137 sets[i].mode = mode;
6138
6139 if (src_eqv)
6140 {
6141 enum machine_mode eqvmode = mode;
6142 if (GET_CODE (dest) == STRICT_LOW_PART)
6143 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6144 do_not_record = 0;
6145 hash_arg_in_memory = 0;
6146 hash_arg_in_struct = 0;
6147 src_eqv = fold_rtx (src_eqv, insn);
6148 src_eqv_hash = HASH (src_eqv, eqvmode);
6149
6150 /* Find the equivalence class for the equivalent expression. */
6151
6152 if (!do_not_record)
6153 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
6154
6155 src_eqv_volatile = do_not_record;
6156 src_eqv_in_memory = hash_arg_in_memory;
6157 src_eqv_in_struct = hash_arg_in_struct;
6158 }
6159
6160 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6161 value of the INNER register, not the destination. So it is not
6162 a legal substitution for the source. But save it for later. */
6163 if (GET_CODE (dest) == STRICT_LOW_PART)
6164 src_eqv_here = 0;
6165 else
6166 src_eqv_here = src_eqv;
6167
6168 /* Simplify and foldable subexpressions in SRC. Then get the fully-
6169 simplified result, which may not necessarily be valid. */
6170 src_folded = fold_rtx (src, insn);
6171
6172 /* If storing a constant in a bitfield, pre-truncate the constant
6173 so we will be able to record it later. */
6174 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6175 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6176 {
6177 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6178
6179 if (GET_CODE (src) == CONST_INT
6180 && GET_CODE (width) == CONST_INT
6181 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6182 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6183 src_folded
6184 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6185 << INTVAL (width)) - 1));
6186 }
6187
6188 /* Compute SRC's hash code, and also notice if it
6189 should not be recorded at all. In that case,
6190 prevent any further processing of this assignment. */
6191 do_not_record = 0;
6192 hash_arg_in_memory = 0;
6193 hash_arg_in_struct = 0;
6194
6195 sets[i].src = src;
6196 sets[i].src_hash = HASH (src, mode);
6197 sets[i].src_volatile = do_not_record;
6198 sets[i].src_in_memory = hash_arg_in_memory;
6199 sets[i].src_in_struct = hash_arg_in_struct;
6200
6201 #if 0
6202 /* It is no longer clear why we used to do this, but it doesn't
6203 appear to still be needed. So let's try without it since this
6204 code hurts cse'ing widened ops. */
6205 /* If source is a perverse subreg (such as QI treated as an SI),
6206 treat it as volatile. It may do the work of an SI in one context
6207 where the extra bits are not being used, but cannot replace an SI
6208 in general. */
6209 if (GET_CODE (src) == SUBREG
6210 && (GET_MODE_SIZE (GET_MODE (src))
6211 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6212 sets[i].src_volatile = 1;
6213 #endif
6214
6215 /* Locate all possible equivalent forms for SRC. Try to replace
6216 SRC in the insn with each cheaper equivalent.
6217
6218 We have the following types of equivalents: SRC itself, a folded
6219 version, a value given in a REG_EQUAL note, or a value related
6220 to a constant.
6221
6222 Each of these equivalents may be part of an additional class
6223 of equivalents (if more than one is in the table, they must be in
6224 the same class; we check for this).
6225
6226 If the source is volatile, we don't do any table lookups.
6227
6228 We note any constant equivalent for possible later use in a
6229 REG_NOTE. */
6230
6231 if (!sets[i].src_volatile)
6232 elt = lookup (src, sets[i].src_hash, mode);
6233
6234 sets[i].src_elt = elt;
6235
6236 if (elt && src_eqv_here && src_eqv_elt)
6237 {
6238 if (elt->first_same_value != src_eqv_elt->first_same_value)
6239 {
6240 /* The REG_EQUAL is indicating that two formerly distinct
6241 classes are now equivalent. So merge them. */
6242 merge_equiv_classes (elt, src_eqv_elt);
6243 src_eqv_hash = HASH (src_eqv, elt->mode);
6244 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
6245 }
6246
6247 src_eqv_here = 0;
6248 }
6249
6250 else if (src_eqv_elt)
6251 elt = src_eqv_elt;
6252
6253 /* Try to find a constant somewhere and record it in `src_const'.
6254 Record its table element, if any, in `src_const_elt'. Look in
6255 any known equivalences first. (If the constant is not in the
6256 table, also set `sets[i].src_const_hash'). */
6257 if (elt)
6258 for (p = elt->first_same_value; p; p = p->next_same_value)
6259 if (p->is_const)
6260 {
6261 src_const = p->exp;
6262 src_const_elt = elt;
6263 break;
6264 }
6265
6266 if (src_const == 0
6267 && (CONSTANT_P (src_folded)
6268 /* Consider (minus (label_ref L1) (label_ref L2)) as
6269 "constant" here so we will record it. This allows us
6270 to fold switch statements when an ADDR_DIFF_VEC is used. */
6271 || (GET_CODE (src_folded) == MINUS
6272 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6273 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6274 src_const = src_folded, src_const_elt = elt;
6275 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6276 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6277
6278 /* If we don't know if the constant is in the table, get its
6279 hash code and look it up. */
6280 if (src_const && src_const_elt == 0)
6281 {
6282 sets[i].src_const_hash = HASH (src_const, mode);
6283 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
6284 }
6285
6286 sets[i].src_const = src_const;
6287 sets[i].src_const_elt = src_const_elt;
6288
6289 /* If the constant and our source are both in the table, mark them as
6290 equivalent. Otherwise, if a constant is in the table but the source
6291 isn't, set ELT to it. */
6292 if (src_const_elt && elt
6293 && src_const_elt->first_same_value != elt->first_same_value)
6294 merge_equiv_classes (elt, src_const_elt);
6295 else if (src_const_elt && elt == 0)
6296 elt = src_const_elt;
6297
6298 /* See if there is a register linearly related to a constant
6299 equivalent of SRC. */
6300 if (src_const
6301 && (GET_CODE (src_const) == CONST
6302 || (src_const_elt && src_const_elt->related_value != 0)))
6303 {
6304 src_related = use_related_value (src_const, src_const_elt);
6305 if (src_related)
6306 {
6307 struct table_elt *src_related_elt
6308 = lookup (src_related, HASH (src_related, mode), mode);
6309 if (src_related_elt && elt)
6310 {
6311 if (elt->first_same_value
6312 != src_related_elt->first_same_value)
6313 /* This can occur when we previously saw a CONST
6314 involving a SYMBOL_REF and then see the SYMBOL_REF
6315 twice. Merge the involved classes. */
6316 merge_equiv_classes (elt, src_related_elt);
6317
6318 src_related = 0;
6319 src_related_elt = 0;
6320 }
6321 else if (src_related_elt && elt == 0)
6322 elt = src_related_elt;
6323 }
6324 }
6325
6326 /* See if we have a CONST_INT that is already in a register in a
6327 wider mode. */
6328
6329 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6330 && GET_MODE_CLASS (mode) == MODE_INT
6331 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6332 {
6333 enum machine_mode wider_mode;
6334
6335 for (wider_mode = GET_MODE_WIDER_MODE (mode);
6336 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6337 && src_related == 0;
6338 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6339 {
6340 struct table_elt *const_elt
6341 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6342
6343 if (const_elt == 0)
6344 continue;
6345
6346 for (const_elt = const_elt->first_same_value;
6347 const_elt; const_elt = const_elt->next_same_value)
6348 if (GET_CODE (const_elt->exp) == REG)
6349 {
6350 src_related = gen_lowpart_if_possible (mode,
6351 const_elt->exp);
6352 break;
6353 }
6354 }
6355 }
6356
6357 /* Another possibility is that we have an AND with a constant in
6358 a mode narrower than a word. If so, it might have been generated
6359 as part of an "if" which would narrow the AND. If we already
6360 have done the AND in a wider mode, we can use a SUBREG of that
6361 value. */
6362
6363 if (flag_expensive_optimizations && ! src_related
6364 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6365 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6366 {
6367 enum machine_mode tmode;
6368 rtx new_and = gen_rtx (AND, VOIDmode, NULL_RTX, XEXP (src, 1));
6369
6370 for (tmode = GET_MODE_WIDER_MODE (mode);
6371 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6372 tmode = GET_MODE_WIDER_MODE (tmode))
6373 {
6374 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6375 struct table_elt *larger_elt;
6376
6377 if (inner)
6378 {
6379 PUT_MODE (new_and, tmode);
6380 XEXP (new_and, 0) = inner;
6381 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6382 if (larger_elt == 0)
6383 continue;
6384
6385 for (larger_elt = larger_elt->first_same_value;
6386 larger_elt; larger_elt = larger_elt->next_same_value)
6387 if (GET_CODE (larger_elt->exp) == REG)
6388 {
6389 src_related
6390 = gen_lowpart_if_possible (mode, larger_elt->exp);
6391 break;
6392 }
6393
6394 if (src_related)
6395 break;
6396 }
6397 }
6398 }
6399
6400 #ifdef LOAD_EXTEND_OP
6401 /* See if a MEM has already been loaded with a widening operation;
6402 if it has, we can use a subreg of that. Many CISC machines
6403 also have such operations, but this is only likely to be
6404 beneficial these machines. */
6405
6406 if (flag_expensive_optimizations && src_related == 0
6407 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6408 && GET_MODE_CLASS (mode) == MODE_INT
6409 && GET_CODE (src) == MEM && ! do_not_record
6410 && LOAD_EXTEND_OP (mode) != NIL)
6411 {
6412 enum machine_mode tmode;
6413
6414 /* Set what we are trying to extend and the operation it might
6415 have been extended with. */
6416 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6417 XEXP (memory_extend_rtx, 0) = src;
6418
6419 for (tmode = GET_MODE_WIDER_MODE (mode);
6420 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6421 tmode = GET_MODE_WIDER_MODE (tmode))
6422 {
6423 struct table_elt *larger_elt;
6424
6425 PUT_MODE (memory_extend_rtx, tmode);
6426 larger_elt = lookup (memory_extend_rtx,
6427 HASH (memory_extend_rtx, tmode), tmode);
6428 if (larger_elt == 0)
6429 continue;
6430
6431 for (larger_elt = larger_elt->first_same_value;
6432 larger_elt; larger_elt = larger_elt->next_same_value)
6433 if (GET_CODE (larger_elt->exp) == REG)
6434 {
6435 src_related = gen_lowpart_if_possible (mode,
6436 larger_elt->exp);
6437 break;
6438 }
6439
6440 if (src_related)
6441 break;
6442 }
6443 }
6444 #endif /* LOAD_EXTEND_OP */
6445
6446 if (src == src_folded)
6447 src_folded = 0;
6448
6449 /* At this point, ELT, if non-zero, points to a class of expressions
6450 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6451 and SRC_RELATED, if non-zero, each contain additional equivalent
6452 expressions. Prune these latter expressions by deleting expressions
6453 already in the equivalence class.
6454
6455 Check for an equivalent identical to the destination. If found,
6456 this is the preferred equivalent since it will likely lead to
6457 elimination of the insn. Indicate this by placing it in
6458 `src_related'. */
6459
6460 if (elt) elt = elt->first_same_value;
6461 for (p = elt; p; p = p->next_same_value)
6462 {
6463 enum rtx_code code = GET_CODE (p->exp);
6464
6465 /* If the expression is not valid, ignore it. Then we do not
6466 have to check for validity below. In most cases, we can use
6467 `rtx_equal_p', since canonicalization has already been done. */
6468 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
6469 continue;
6470
6471 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
6472 src = 0;
6473 else if (src_folded && GET_CODE (src_folded) == code
6474 && rtx_equal_p (src_folded, p->exp))
6475 src_folded = 0;
6476 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
6477 && rtx_equal_p (src_eqv_here, p->exp))
6478 src_eqv_here = 0;
6479 else if (src_related && GET_CODE (src_related) == code
6480 && rtx_equal_p (src_related, p->exp))
6481 src_related = 0;
6482
6483 /* This is the same as the destination of the insns, we want
6484 to prefer it. Copy it to src_related. The code below will
6485 then give it a negative cost. */
6486 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
6487 src_related = dest;
6488
6489 }
6490
6491 /* Find the cheapest valid equivalent, trying all the available
6492 possibilities. Prefer items not in the hash table to ones
6493 that are when they are equal cost. Note that we can never
6494 worsen an insn as the current contents will also succeed.
6495 If we find an equivalent identical to the destination, use it as best,
6496 since this insn will probably be eliminated in that case. */
6497 if (src)
6498 {
6499 if (rtx_equal_p (src, dest))
6500 src_cost = -1;
6501 else
6502 src_cost = COST (src);
6503 }
6504
6505 if (src_eqv_here)
6506 {
6507 if (rtx_equal_p (src_eqv_here, dest))
6508 src_eqv_cost = -1;
6509 else
6510 src_eqv_cost = COST (src_eqv_here);
6511 }
6512
6513 if (src_folded)
6514 {
6515 if (rtx_equal_p (src_folded, dest))
6516 src_folded_cost = -1;
6517 else
6518 src_folded_cost = COST (src_folded);
6519 }
6520
6521 if (src_related)
6522 {
6523 if (rtx_equal_p (src_related, dest))
6524 src_related_cost = -1;
6525 else
6526 src_related_cost = COST (src_related);
6527 }
6528
6529 /* If this was an indirect jump insn, a known label will really be
6530 cheaper even though it looks more expensive. */
6531 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
6532 src_folded = src_const, src_folded_cost = -1;
6533
6534 /* Terminate loop when replacement made. This must terminate since
6535 the current contents will be tested and will always be valid. */
6536 while (1)
6537 {
6538 rtx trial;
6539
6540 /* Skip invalid entries. */
6541 while (elt && GET_CODE (elt->exp) != REG
6542 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6543 elt = elt->next_same_value;
6544
6545 if (elt) src_elt_cost = elt->cost;
6546
6547 /* Find cheapest and skip it for the next time. For items
6548 of equal cost, use this order:
6549 src_folded, src, src_eqv, src_related and hash table entry. */
6550 if (src_folded_cost <= src_cost
6551 && src_folded_cost <= src_eqv_cost
6552 && src_folded_cost <= src_related_cost
6553 && src_folded_cost <= src_elt_cost)
6554 {
6555 trial = src_folded, src_folded_cost = 10000;
6556 if (src_folded_force_flag)
6557 trial = force_const_mem (mode, trial);
6558 }
6559 else if (src_cost <= src_eqv_cost
6560 && src_cost <= src_related_cost
6561 && src_cost <= src_elt_cost)
6562 trial = src, src_cost = 10000;
6563 else if (src_eqv_cost <= src_related_cost
6564 && src_eqv_cost <= src_elt_cost)
6565 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
6566 else if (src_related_cost <= src_elt_cost)
6567 trial = copy_rtx (src_related), src_related_cost = 10000;
6568 else
6569 {
6570 trial = copy_rtx (elt->exp);
6571 elt = elt->next_same_value;
6572 src_elt_cost = 10000;
6573 }
6574
6575 /* We don't normally have an insn matching (set (pc) (pc)), so
6576 check for this separately here. We will delete such an
6577 insn below.
6578
6579 Tablejump insns contain a USE of the table, so simply replacing
6580 the operand with the constant won't match. This is simply an
6581 unconditional branch, however, and is therefore valid. Just
6582 insert the substitution here and we will delete and re-emit
6583 the insn later. */
6584
6585 if (n_sets == 1 && dest == pc_rtx
6586 && (trial == pc_rtx
6587 || (GET_CODE (trial) == LABEL_REF
6588 && ! condjump_p (insn))))
6589 {
6590 /* If TRIAL is a label in front of a jump table, we are
6591 really falling through the switch (this is how casesi
6592 insns work), so we must branch around the table. */
6593 if (GET_CODE (trial) == CODE_LABEL
6594 && NEXT_INSN (trial) != 0
6595 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
6596 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
6597 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
6598
6599 trial = gen_rtx (LABEL_REF, Pmode, get_label_after (trial));
6600
6601 SET_SRC (sets[i].rtl) = trial;
6602 cse_jumps_altered = 1;
6603 break;
6604 }
6605
6606 /* Look for a substitution that makes a valid insn. */
6607 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
6608 {
6609 /* The result of apply_change_group can be ignored; see
6610 canon_reg. */
6611
6612 validate_change (insn, &SET_SRC (sets[i].rtl),
6613 canon_reg (SET_SRC (sets[i].rtl), insn),
6614 1);
6615 apply_change_group ();
6616 break;
6617 }
6618
6619 /* If we previously found constant pool entries for
6620 constants and this is a constant, try making a
6621 pool entry. Put it in src_folded unless we already have done
6622 this since that is where it likely came from. */
6623
6624 else if (constant_pool_entries_cost
6625 && CONSTANT_P (trial)
6626 && (src_folded == 0 || GET_CODE (src_folded) != MEM)
6627 && GET_MODE_CLASS (mode) != MODE_CC)
6628 {
6629 src_folded_force_flag = 1;
6630 src_folded = trial;
6631 src_folded_cost = constant_pool_entries_cost;
6632 }
6633 }
6634
6635 src = SET_SRC (sets[i].rtl);
6636
6637 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
6638 However, there is an important exception: If both are registers
6639 that are not the head of their equivalence class, replace SET_SRC
6640 with the head of the class. If we do not do this, we will have
6641 both registers live over a portion of the basic block. This way,
6642 their lifetimes will likely abut instead of overlapping. */
6643 if (GET_CODE (dest) == REG
6644 && REGNO_QTY_VALID_P (REGNO (dest))
6645 && qty_mode[reg_qty[REGNO (dest)]] == GET_MODE (dest)
6646 && qty_first_reg[reg_qty[REGNO (dest)]] != REGNO (dest)
6647 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
6648 /* Don't do this if the original insn had a hard reg as
6649 SET_SRC. */
6650 && (GET_CODE (sets[i].src) != REG
6651 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
6652 /* We can't call canon_reg here because it won't do anything if
6653 SRC is a hard register. */
6654 {
6655 int first = qty_first_reg[reg_qty[REGNO (src)]];
6656
6657 src = SET_SRC (sets[i].rtl)
6658 = first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
6659 : gen_rtx (REG, GET_MODE (src), first);
6660
6661 /* If we had a constant that is cheaper than what we are now
6662 setting SRC to, use that constant. We ignored it when we
6663 thought we could make this into a no-op. */
6664 if (src_const && COST (src_const) < COST (src)
6665 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const, 0))
6666 src = src_const;
6667 }
6668
6669 /* If we made a change, recompute SRC values. */
6670 if (src != sets[i].src)
6671 {
6672 do_not_record = 0;
6673 hash_arg_in_memory = 0;
6674 hash_arg_in_struct = 0;
6675 sets[i].src = src;
6676 sets[i].src_hash = HASH (src, mode);
6677 sets[i].src_volatile = do_not_record;
6678 sets[i].src_in_memory = hash_arg_in_memory;
6679 sets[i].src_in_struct = hash_arg_in_struct;
6680 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
6681 }
6682
6683 /* If this is a single SET, we are setting a register, and we have an
6684 equivalent constant, we want to add a REG_NOTE. We don't want
6685 to write a REG_EQUAL note for a constant pseudo since verifying that
6686 that pseudo hasn't been eliminated is a pain. Such a note also
6687 won't help anything. */
6688 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
6689 && GET_CODE (src_const) != REG)
6690 {
6691 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6692
6693 /* Record the actual constant value in a REG_EQUAL note, making
6694 a new one if one does not already exist. */
6695 if (tem)
6696 XEXP (tem, 0) = src_const;
6697 else
6698 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL,
6699 src_const, REG_NOTES (insn));
6700
6701 /* If storing a constant value in a register that
6702 previously held the constant value 0,
6703 record this fact with a REG_WAS_0 note on this insn.
6704
6705 Note that the *register* is required to have previously held 0,
6706 not just any register in the quantity and we must point to the
6707 insn that set that register to zero.
6708
6709 Rather than track each register individually, we just see if
6710 the last set for this quantity was for this register. */
6711
6712 if (REGNO_QTY_VALID_P (REGNO (dest))
6713 && qty_const[reg_qty[REGNO (dest)]] == const0_rtx)
6714 {
6715 /* See if we previously had a REG_WAS_0 note. */
6716 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6717 rtx const_insn = qty_const_insn[reg_qty[REGNO (dest)]];
6718
6719 if ((tem = single_set (const_insn)) != 0
6720 && rtx_equal_p (SET_DEST (tem), dest))
6721 {
6722 if (note)
6723 XEXP (note, 0) = const_insn;
6724 else
6725 REG_NOTES (insn) = gen_rtx (INSN_LIST, REG_WAS_0,
6726 const_insn, REG_NOTES (insn));
6727 }
6728 }
6729 }
6730
6731 /* Now deal with the destination. */
6732 do_not_record = 0;
6733 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
6734
6735 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
6736 to the MEM or REG within it. */
6737 while (GET_CODE (dest) == SIGN_EXTRACT
6738 || GET_CODE (dest) == ZERO_EXTRACT
6739 || GET_CODE (dest) == SUBREG
6740 || GET_CODE (dest) == STRICT_LOW_PART)
6741 {
6742 sets[i].inner_dest_loc = &XEXP (dest, 0);
6743 dest = XEXP (dest, 0);
6744 }
6745
6746 sets[i].inner_dest = dest;
6747
6748 if (GET_CODE (dest) == MEM)
6749 {
6750 dest = fold_rtx (dest, insn);
6751
6752 /* Decide whether we invalidate everything in memory,
6753 or just things at non-fixed places.
6754 Writing a large aggregate must invalidate everything
6755 because we don't know how long it is. */
6756 note_mem_written (dest, &writes_memory);
6757 }
6758
6759 /* Compute the hash code of the destination now,
6760 before the effects of this instruction are recorded,
6761 since the register values used in the address computation
6762 are those before this instruction. */
6763 sets[i].dest_hash = HASH (dest, mode);
6764
6765 /* Don't enter a bit-field in the hash table
6766 because the value in it after the store
6767 may not equal what was stored, due to truncation. */
6768
6769 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6770 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6771 {
6772 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6773
6774 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
6775 && GET_CODE (width) == CONST_INT
6776 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6777 && ! (INTVAL (src_const)
6778 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6779 /* Exception: if the value is constant,
6780 and it won't be truncated, record it. */
6781 ;
6782 else
6783 {
6784 /* This is chosen so that the destination will be invalidated
6785 but no new value will be recorded.
6786 We must invalidate because sometimes constant
6787 values can be recorded for bitfields. */
6788 sets[i].src_elt = 0;
6789 sets[i].src_volatile = 1;
6790 src_eqv = 0;
6791 src_eqv_elt = 0;
6792 }
6793 }
6794
6795 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
6796 the insn. */
6797 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
6798 {
6799 PUT_CODE (insn, NOTE);
6800 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
6801 NOTE_SOURCE_FILE (insn) = 0;
6802 cse_jumps_altered = 1;
6803 /* One less use of the label this insn used to jump to. */
6804 --LABEL_NUSES (JUMP_LABEL (insn));
6805 /* No more processing for this set. */
6806 sets[i].rtl = 0;
6807 }
6808
6809 /* If this SET is now setting PC to a label, we know it used to
6810 be a conditional or computed branch. So we see if we can follow
6811 it. If it was a computed branch, delete it and re-emit. */
6812 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
6813 {
6814 rtx p;
6815
6816 /* If this is not in the format for a simple branch and
6817 we are the only SET in it, re-emit it. */
6818 if (! simplejump_p (insn) && n_sets == 1)
6819 {
6820 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
6821 JUMP_LABEL (new) = XEXP (src, 0);
6822 LABEL_NUSES (XEXP (src, 0))++;
6823 delete_insn (insn);
6824 insn = new;
6825 }
6826 else
6827 /* Otherwise, force rerecognition, since it probably had
6828 a different pattern before.
6829 This shouldn't really be necessary, since whatever
6830 changed the source value above should have done this.
6831 Until the right place is found, might as well do this here. */
6832 INSN_CODE (insn) = -1;
6833
6834 /* Now that we've converted this jump to an unconditional jump,
6835 there is dead code after it. Delete the dead code until we
6836 reach a BARRIER, the end of the function, or a label. Do
6837 not delete NOTEs except for NOTE_INSN_DELETED since later
6838 phases assume these notes are retained. */
6839
6840 p = insn;
6841
6842 while (NEXT_INSN (p) != 0
6843 && GET_CODE (NEXT_INSN (p)) != BARRIER
6844 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
6845 {
6846 if (GET_CODE (NEXT_INSN (p)) != NOTE
6847 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
6848 delete_insn (NEXT_INSN (p));
6849 else
6850 p = NEXT_INSN (p);
6851 }
6852
6853 /* If we don't have a BARRIER immediately after INSN, put one there.
6854 Much code assumes that there are no NOTEs between a JUMP_INSN and
6855 BARRIER. */
6856
6857 if (NEXT_INSN (insn) == 0
6858 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
6859 emit_barrier_after (insn);
6860
6861 /* We might have two BARRIERs separated by notes. Delete the second
6862 one if so. */
6863
6864 if (p != insn && NEXT_INSN (p) != 0
6865 && GET_CODE (NEXT_INSN (p)) == BARRIER)
6866 delete_insn (NEXT_INSN (p));
6867
6868 cse_jumps_altered = 1;
6869 sets[i].rtl = 0;
6870 }
6871
6872 /* If destination is volatile, invalidate it and then do no further
6873 processing for this assignment. */
6874
6875 else if (do_not_record)
6876 {
6877 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
6878 || GET_CODE (dest) == MEM)
6879 invalidate (dest);
6880 else if (GET_CODE (dest) == STRICT_LOW_PART
6881 || GET_CODE (dest) == ZERO_EXTRACT)
6882 invalidate (XEXP (dest, 0));
6883 sets[i].rtl = 0;
6884 }
6885
6886 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
6887 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
6888
6889 #ifdef HAVE_cc0
6890 /* If setting CC0, record what it was set to, or a constant, if it
6891 is equivalent to a constant. If it is being set to a floating-point
6892 value, make a COMPARE with the appropriate constant of 0. If we
6893 don't do this, later code can interpret this as a test against
6894 const0_rtx, which can cause problems if we try to put it into an
6895 insn as a floating-point operand. */
6896 if (dest == cc0_rtx)
6897 {
6898 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
6899 this_insn_cc0_mode = mode;
6900 if (FLOAT_MODE_P (mode))
6901 this_insn_cc0 = gen_rtx (COMPARE, VOIDmode, this_insn_cc0,
6902 CONST0_RTX (mode));
6903 }
6904 #endif
6905 }
6906
6907 /* Now enter all non-volatile source expressions in the hash table
6908 if they are not already present.
6909 Record their equivalence classes in src_elt.
6910 This way we can insert the corresponding destinations into
6911 the same classes even if the actual sources are no longer in them
6912 (having been invalidated). */
6913
6914 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
6915 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
6916 {
6917 register struct table_elt *elt;
6918 register struct table_elt *classp = sets[0].src_elt;
6919 rtx dest = SET_DEST (sets[0].rtl);
6920 enum machine_mode eqvmode = GET_MODE (dest);
6921
6922 if (GET_CODE (dest) == STRICT_LOW_PART)
6923 {
6924 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6925 classp = 0;
6926 }
6927 if (insert_regs (src_eqv, classp, 0))
6928 src_eqv_hash = HASH (src_eqv, eqvmode);
6929 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
6930 elt->in_memory = src_eqv_in_memory;
6931 elt->in_struct = src_eqv_in_struct;
6932 src_eqv_elt = elt;
6933
6934 /* Check to see if src_eqv_elt is the same as a set source which
6935 does not yet have an elt, and if so set the elt of the set source
6936 to src_eqv_elt. */
6937 for (i = 0; i < n_sets; i++)
6938 if (sets[i].rtl && sets[i].src_elt == 0
6939 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
6940 sets[i].src_elt = src_eqv_elt;
6941 }
6942
6943 for (i = 0; i < n_sets; i++)
6944 if (sets[i].rtl && ! sets[i].src_volatile
6945 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
6946 {
6947 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
6948 {
6949 /* REG_EQUAL in setting a STRICT_LOW_PART
6950 gives an equivalent for the entire destination register,
6951 not just for the subreg being stored in now.
6952 This is a more interesting equivalence, so we arrange later
6953 to treat the entire reg as the destination. */
6954 sets[i].src_elt = src_eqv_elt;
6955 sets[i].src_hash = src_eqv_hash;
6956 }
6957 else
6958 {
6959 /* Insert source and constant equivalent into hash table, if not
6960 already present. */
6961 register struct table_elt *classp = src_eqv_elt;
6962 register rtx src = sets[i].src;
6963 register rtx dest = SET_DEST (sets[i].rtl);
6964 enum machine_mode mode
6965 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6966
6967 if (sets[i].src_elt == 0)
6968 {
6969 register struct table_elt *elt;
6970
6971 /* Note that these insert_regs calls cannot remove
6972 any of the src_elt's, because they would have failed to
6973 match if not still valid. */
6974 if (insert_regs (src, classp, 0))
6975 sets[i].src_hash = HASH (src, mode);
6976 elt = insert (src, classp, sets[i].src_hash, mode);
6977 elt->in_memory = sets[i].src_in_memory;
6978 elt->in_struct = sets[i].src_in_struct;
6979 sets[i].src_elt = classp = elt;
6980 }
6981
6982 if (sets[i].src_const && sets[i].src_const_elt == 0
6983 && src != sets[i].src_const
6984 && ! rtx_equal_p (sets[i].src_const, src))
6985 sets[i].src_elt = insert (sets[i].src_const, classp,
6986 sets[i].src_const_hash, mode);
6987 }
6988 }
6989 else if (sets[i].src_elt == 0)
6990 /* If we did not insert the source into the hash table (e.g., it was
6991 volatile), note the equivalence class for the REG_EQUAL value, if any,
6992 so that the destination goes into that class. */
6993 sets[i].src_elt = src_eqv_elt;
6994
6995 invalidate_from_clobbers (&writes_memory, x);
6996
6997 /* Some registers are invalidated by subroutine calls. Memory is
6998 invalidated by non-constant calls. */
6999
7000 if (GET_CODE (insn) == CALL_INSN)
7001 {
7002 static struct write_data everything = {0, 1, 1, 1};
7003
7004 if (! CONST_CALL_P (insn))
7005 invalidate_memory (&everything);
7006 invalidate_for_call ();
7007 }
7008
7009 /* Now invalidate everything set by this instruction.
7010 If a SUBREG or other funny destination is being set,
7011 sets[i].rtl is still nonzero, so here we invalidate the reg
7012 a part of which is being set. */
7013
7014 for (i = 0; i < n_sets; i++)
7015 if (sets[i].rtl)
7016 {
7017 register rtx dest = sets[i].inner_dest;
7018
7019 /* Needed for registers to remove the register from its
7020 previous quantity's chain.
7021 Needed for memory if this is a nonvarying address, unless
7022 we have just done an invalidate_memory that covers even those. */
7023 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7024 || (! writes_memory.all && ! cse_rtx_addr_varies_p (dest)))
7025 invalidate (dest);
7026 else if (GET_CODE (dest) == STRICT_LOW_PART
7027 || GET_CODE (dest) == ZERO_EXTRACT)
7028 invalidate (XEXP (dest, 0));
7029 }
7030
7031 /* Make sure registers mentioned in destinations
7032 are safe for use in an expression to be inserted.
7033 This removes from the hash table
7034 any invalid entry that refers to one of these registers.
7035
7036 We don't care about the return value from mention_regs because
7037 we are going to hash the SET_DEST values unconditionally. */
7038
7039 for (i = 0; i < n_sets; i++)
7040 if (sets[i].rtl && GET_CODE (SET_DEST (sets[i].rtl)) != REG)
7041 mention_regs (SET_DEST (sets[i].rtl));
7042
7043 /* We may have just removed some of the src_elt's from the hash table.
7044 So replace each one with the current head of the same class. */
7045
7046 for (i = 0; i < n_sets; i++)
7047 if (sets[i].rtl)
7048 {
7049 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7050 /* If elt was removed, find current head of same class,
7051 or 0 if nothing remains of that class. */
7052 {
7053 register struct table_elt *elt = sets[i].src_elt;
7054
7055 while (elt && elt->prev_same_value)
7056 elt = elt->prev_same_value;
7057
7058 while (elt && elt->first_same_value == 0)
7059 elt = elt->next_same_value;
7060 sets[i].src_elt = elt ? elt->first_same_value : 0;
7061 }
7062 }
7063
7064 /* Now insert the destinations into their equivalence classes. */
7065
7066 for (i = 0; i < n_sets; i++)
7067 if (sets[i].rtl)
7068 {
7069 register rtx dest = SET_DEST (sets[i].rtl);
7070 register struct table_elt *elt;
7071
7072 /* Don't record value if we are not supposed to risk allocating
7073 floating-point values in registers that might be wider than
7074 memory. */
7075 if ((flag_float_store
7076 && GET_CODE (dest) == MEM
7077 && FLOAT_MODE_P (GET_MODE (dest)))
7078 /* Don't record values of destinations set inside a libcall block
7079 since we might delete the libcall. Things should have been set
7080 up so we won't want to reuse such a value, but we play it safe
7081 here. */
7082 || in_libcall_block
7083 /* If we didn't put a REG_EQUAL value or a source into the hash
7084 table, there is no point is recording DEST. */
7085 || sets[i].src_elt == 0)
7086 continue;
7087
7088 /* STRICT_LOW_PART isn't part of the value BEING set,
7089 and neither is the SUBREG inside it.
7090 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
7091 if (GET_CODE (dest) == STRICT_LOW_PART)
7092 dest = SUBREG_REG (XEXP (dest, 0));
7093
7094 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7095 /* Registers must also be inserted into chains for quantities. */
7096 if (insert_regs (dest, sets[i].src_elt, 1))
7097 /* If `insert_regs' changes something, the hash code must be
7098 recalculated. */
7099 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7100
7101 elt = insert (dest, sets[i].src_elt,
7102 sets[i].dest_hash, GET_MODE (dest));
7103 elt->in_memory = GET_CODE (sets[i].inner_dest) == MEM;
7104 if (elt->in_memory)
7105 {
7106 /* This implicitly assumes a whole struct
7107 need not have MEM_IN_STRUCT_P.
7108 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
7109 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7110 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7111 }
7112
7113 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7114 narrower than M2, and both M1 and M2 are the same number of words,
7115 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7116 make that equivalence as well.
7117
7118 However, BAR may have equivalences for which gen_lowpart_if_possible
7119 will produce a simpler value than gen_lowpart_if_possible applied to
7120 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7121 BAR's equivalences. If we don't get a simplified form, make
7122 the SUBREG. It will not be used in an equivalence, but will
7123 cause two similar assignments to be detected.
7124
7125 Note the loop below will find SUBREG_REG (DEST) since we have
7126 already entered SRC and DEST of the SET in the table. */
7127
7128 if (GET_CODE (dest) == SUBREG
7129 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7130 / UNITS_PER_WORD)
7131 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7132 && (GET_MODE_SIZE (GET_MODE (dest))
7133 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7134 && sets[i].src_elt != 0)
7135 {
7136 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7137 struct table_elt *elt, *classp = 0;
7138
7139 for (elt = sets[i].src_elt->first_same_value; elt;
7140 elt = elt->next_same_value)
7141 {
7142 rtx new_src = 0;
7143 unsigned src_hash;
7144 struct table_elt *src_elt;
7145
7146 /* Ignore invalid entries. */
7147 if (GET_CODE (elt->exp) != REG
7148 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7149 continue;
7150
7151 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7152 if (new_src == 0)
7153 new_src = gen_rtx (SUBREG, new_mode, elt->exp, 0);
7154
7155 src_hash = HASH (new_src, new_mode);
7156 src_elt = lookup (new_src, src_hash, new_mode);
7157
7158 /* Put the new source in the hash table is if isn't
7159 already. */
7160 if (src_elt == 0)
7161 {
7162 if (insert_regs (new_src, classp, 0))
7163 src_hash = HASH (new_src, new_mode);
7164 src_elt = insert (new_src, classp, src_hash, new_mode);
7165 src_elt->in_memory = elt->in_memory;
7166 src_elt->in_struct = elt->in_struct;
7167 }
7168 else if (classp && classp != src_elt->first_same_value)
7169 /* Show that two things that we've seen before are
7170 actually the same. */
7171 merge_equiv_classes (src_elt, classp);
7172
7173 classp = src_elt->first_same_value;
7174 }
7175 }
7176 }
7177
7178 /* Special handling for (set REG0 REG1)
7179 where REG0 is the "cheapest", cheaper than REG1.
7180 After cse, REG1 will probably not be used in the sequel,
7181 so (if easily done) change this insn to (set REG1 REG0) and
7182 replace REG1 with REG0 in the previous insn that computed their value.
7183 Then REG1 will become a dead store and won't cloud the situation
7184 for later optimizations.
7185
7186 Do not make this change if REG1 is a hard register, because it will
7187 then be used in the sequel and we may be changing a two-operand insn
7188 into a three-operand insn.
7189
7190 Also do not do this if we are operating on a copy of INSN. */
7191
7192 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7193 && NEXT_INSN (PREV_INSN (insn)) == insn
7194 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7195 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7196 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
7197 && (qty_first_reg[reg_qty[REGNO (SET_SRC (sets[0].rtl))]]
7198 == REGNO (SET_DEST (sets[0].rtl))))
7199 {
7200 rtx prev = PREV_INSN (insn);
7201 while (prev && GET_CODE (prev) == NOTE)
7202 prev = PREV_INSN (prev);
7203
7204 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7205 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7206 {
7207 rtx dest = SET_DEST (sets[0].rtl);
7208 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7209
7210 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7211 validate_change (insn, & SET_DEST (sets[0].rtl),
7212 SET_SRC (sets[0].rtl), 1);
7213 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7214 apply_change_group ();
7215
7216 /* If REG1 was equivalent to a constant, REG0 is not. */
7217 if (note)
7218 PUT_REG_NOTE_KIND (note, REG_EQUAL);
7219
7220 /* If there was a REG_WAS_0 note on PREV, remove it. Move
7221 any REG_WAS_0 note on INSN to PREV. */
7222 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7223 if (note)
7224 remove_note (prev, note);
7225
7226 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7227 if (note)
7228 {
7229 remove_note (insn, note);
7230 XEXP (note, 1) = REG_NOTES (prev);
7231 REG_NOTES (prev) = note;
7232 }
7233 }
7234 }
7235
7236 /* If this is a conditional jump insn, record any known equivalences due to
7237 the condition being tested. */
7238
7239 last_jump_equiv_class = 0;
7240 if (GET_CODE (insn) == JUMP_INSN
7241 && n_sets == 1 && GET_CODE (x) == SET
7242 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7243 record_jump_equiv (insn, 0);
7244
7245 #ifdef HAVE_cc0
7246 /* If the previous insn set CC0 and this insn no longer references CC0,
7247 delete the previous insn. Here we use the fact that nothing expects CC0
7248 to be valid over an insn, which is true until the final pass. */
7249 if (prev_insn && GET_CODE (prev_insn) == INSN
7250 && (tem = single_set (prev_insn)) != 0
7251 && SET_DEST (tem) == cc0_rtx
7252 && ! reg_mentioned_p (cc0_rtx, x))
7253 {
7254 PUT_CODE (prev_insn, NOTE);
7255 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7256 NOTE_SOURCE_FILE (prev_insn) = 0;
7257 }
7258
7259 prev_insn_cc0 = this_insn_cc0;
7260 prev_insn_cc0_mode = this_insn_cc0_mode;
7261 #endif
7262
7263 prev_insn = insn;
7264 }
7265 \f
7266 /* Store 1 in *WRITES_PTR for those categories of memory ref
7267 that must be invalidated when the expression WRITTEN is stored in.
7268 If WRITTEN is null, say everything must be invalidated. */
7269
7270 static void
7271 note_mem_written (written, writes_ptr)
7272 rtx written;
7273 struct write_data *writes_ptr;
7274 {
7275 static struct write_data everything = {0, 1, 1, 1};
7276
7277 if (written == 0)
7278 *writes_ptr = everything;
7279 else if (GET_CODE (written) == MEM)
7280 {
7281 /* Pushing or popping the stack invalidates just the stack pointer. */
7282 rtx addr = XEXP (written, 0);
7283 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7284 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7285 && GET_CODE (XEXP (addr, 0)) == REG
7286 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7287 {
7288 writes_ptr->sp = 1;
7289 return;
7290 }
7291 else if (GET_MODE (written) == BLKmode)
7292 *writes_ptr = everything;
7293 /* (mem (scratch)) means clobber everything. */
7294 else if (GET_CODE (addr) == SCRATCH)
7295 *writes_ptr = everything;
7296 else if (cse_rtx_addr_varies_p (written))
7297 {
7298 /* A varying address that is a sum indicates an array element,
7299 and that's just as good as a structure element
7300 in implying that we need not invalidate scalar variables.
7301 However, we must allow QImode aliasing of scalars, because the
7302 ANSI C standard allows character pointers to alias anything. */
7303 if (! ((MEM_IN_STRUCT_P (written)
7304 || GET_CODE (XEXP (written, 0)) == PLUS)
7305 && GET_MODE (written) != QImode))
7306 writes_ptr->all = 1;
7307 writes_ptr->nonscalar = 1;
7308 }
7309 writes_ptr->var = 1;
7310 }
7311 }
7312
7313 /* Perform invalidation on the basis of everything about an insn
7314 except for invalidating the actual places that are SET in it.
7315 This includes the places CLOBBERed, and anything that might
7316 alias with something that is SET or CLOBBERed.
7317
7318 W points to the writes_memory for this insn, a struct write_data
7319 saying which kinds of memory references must be invalidated.
7320 X is the pattern of the insn. */
7321
7322 static void
7323 invalidate_from_clobbers (w, x)
7324 struct write_data *w;
7325 rtx x;
7326 {
7327 /* If W->var is not set, W specifies no action.
7328 If W->all is set, this step gets all memory refs
7329 so they can be ignored in the rest of this function. */
7330 if (w->var)
7331 invalidate_memory (w);
7332
7333 if (w->sp)
7334 {
7335 if (reg_tick[STACK_POINTER_REGNUM] >= 0)
7336 reg_tick[STACK_POINTER_REGNUM]++;
7337
7338 /* This should be *very* rare. */
7339 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
7340 invalidate (stack_pointer_rtx);
7341 }
7342
7343 if (GET_CODE (x) == CLOBBER)
7344 {
7345 rtx ref = XEXP (x, 0);
7346 if (ref)
7347 {
7348 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7349 || (GET_CODE (ref) == MEM && ! w->all))
7350 invalidate (ref);
7351 else if (GET_CODE (ref) == STRICT_LOW_PART
7352 || GET_CODE (ref) == ZERO_EXTRACT)
7353 invalidate (XEXP (ref, 0));
7354 }
7355 }
7356 else if (GET_CODE (x) == PARALLEL)
7357 {
7358 register int i;
7359 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
7360 {
7361 register rtx y = XVECEXP (x, 0, i);
7362 if (GET_CODE (y) == CLOBBER)
7363 {
7364 rtx ref = XEXP (y, 0);
7365 if (ref)
7366 {
7367 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7368 || (GET_CODE (ref) == MEM && !w->all))
7369 invalidate (ref);
7370 else if (GET_CODE (ref) == STRICT_LOW_PART
7371 || GET_CODE (ref) == ZERO_EXTRACT)
7372 invalidate (XEXP (ref, 0));
7373 }
7374 }
7375 }
7376 }
7377 }
7378 \f
7379 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
7380 and replace any registers in them with either an equivalent constant
7381 or the canonical form of the register. If we are inside an address,
7382 only do this if the address remains valid.
7383
7384 OBJECT is 0 except when within a MEM in which case it is the MEM.
7385
7386 Return the replacement for X. */
7387
7388 static rtx
7389 cse_process_notes (x, object)
7390 rtx x;
7391 rtx object;
7392 {
7393 enum rtx_code code = GET_CODE (x);
7394 char *fmt = GET_RTX_FORMAT (code);
7395 int i;
7396
7397 switch (code)
7398 {
7399 case CONST_INT:
7400 case CONST:
7401 case SYMBOL_REF:
7402 case LABEL_REF:
7403 case CONST_DOUBLE:
7404 case PC:
7405 case CC0:
7406 case LO_SUM:
7407 return x;
7408
7409 case MEM:
7410 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
7411 return x;
7412
7413 case EXPR_LIST:
7414 case INSN_LIST:
7415 if (REG_NOTE_KIND (x) == REG_EQUAL)
7416 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7417 if (XEXP (x, 1))
7418 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7419 return x;
7420
7421 case SIGN_EXTEND:
7422 case ZERO_EXTEND:
7423 {
7424 rtx new = cse_process_notes (XEXP (x, 0), object);
7425 /* We don't substitute VOIDmode constants into these rtx,
7426 since they would impede folding. */
7427 if (GET_MODE (new) != VOIDmode)
7428 validate_change (object, &XEXP (x, 0), new, 0);
7429 return x;
7430 }
7431
7432 case REG:
7433 i = reg_qty[REGNO (x)];
7434
7435 /* Return a constant or a constant register. */
7436 if (REGNO_QTY_VALID_P (REGNO (x))
7437 && qty_const[i] != 0
7438 && (CONSTANT_P (qty_const[i])
7439 || GET_CODE (qty_const[i]) == REG))
7440 {
7441 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
7442 if (new)
7443 return new;
7444 }
7445
7446 /* Otherwise, canonicalize this register. */
7447 return canon_reg (x, NULL_RTX);
7448 }
7449
7450 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7451 if (fmt[i] == 'e')
7452 validate_change (object, &XEXP (x, i),
7453 cse_process_notes (XEXP (x, i), object), 0);
7454
7455 return x;
7456 }
7457 \f
7458 /* Find common subexpressions between the end test of a loop and the beginning
7459 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
7460
7461 Often we have a loop where an expression in the exit test is used
7462 in the body of the loop. For example "while (*p) *q++ = *p++;".
7463 Because of the way we duplicate the loop exit test in front of the loop,
7464 however, we don't detect that common subexpression. This will be caught
7465 when global cse is implemented, but this is a quite common case.
7466
7467 This function handles the most common cases of these common expressions.
7468 It is called after we have processed the basic block ending with the
7469 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
7470 jumps to a label used only once. */
7471
7472 static void
7473 cse_around_loop (loop_start)
7474 rtx loop_start;
7475 {
7476 rtx insn;
7477 int i;
7478 struct table_elt *p;
7479
7480 /* If the jump at the end of the loop doesn't go to the start, we don't
7481 do anything. */
7482 for (insn = PREV_INSN (loop_start);
7483 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
7484 insn = PREV_INSN (insn))
7485 ;
7486
7487 if (insn == 0
7488 || GET_CODE (insn) != NOTE
7489 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
7490 return;
7491
7492 /* If the last insn of the loop (the end test) was an NE comparison,
7493 we will interpret it as an EQ comparison, since we fell through
7494 the loop. Any equivalences resulting from that comparison are
7495 therefore not valid and must be invalidated. */
7496 if (last_jump_equiv_class)
7497 for (p = last_jump_equiv_class->first_same_value; p;
7498 p = p->next_same_value)
7499 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
7500 || GET_CODE (p->exp) == SUBREG)
7501 invalidate (p->exp);
7502 else if (GET_CODE (p->exp) == STRICT_LOW_PART
7503 || GET_CODE (p->exp) == ZERO_EXTRACT)
7504 invalidate (XEXP (p->exp, 0));
7505
7506 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
7507 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
7508
7509 The only thing we do with SET_DEST is invalidate entries, so we
7510 can safely process each SET in order. It is slightly less efficient
7511 to do so, but we only want to handle the most common cases. */
7512
7513 for (insn = NEXT_INSN (loop_start);
7514 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
7515 && ! (GET_CODE (insn) == NOTE
7516 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
7517 insn = NEXT_INSN (insn))
7518 {
7519 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7520 && (GET_CODE (PATTERN (insn)) == SET
7521 || GET_CODE (PATTERN (insn)) == CLOBBER))
7522 cse_set_around_loop (PATTERN (insn), insn, loop_start);
7523 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7524 && GET_CODE (PATTERN (insn)) == PARALLEL)
7525 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7526 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
7527 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
7528 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
7529 loop_start);
7530 }
7531 }
7532 \f
7533 /* Variable used for communications between the next two routines. */
7534
7535 static struct write_data skipped_writes_memory;
7536
7537 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
7538 since they are done elsewhere. This function is called via note_stores. */
7539
7540 static void
7541 invalidate_skipped_set (dest, set)
7542 rtx set;
7543 rtx dest;
7544 {
7545 if (GET_CODE (set) == CLOBBER
7546 #ifdef HAVE_cc0
7547 || dest == cc0_rtx
7548 #endif
7549 || dest == pc_rtx)
7550 return;
7551
7552 if (GET_CODE (dest) == MEM)
7553 note_mem_written (dest, &skipped_writes_memory);
7554
7555 /* There are times when an address can appear varying and be a PLUS
7556 during this scan when it would be a fixed address were we to know
7557 the proper equivalences. So promote "nonscalar" to be "all". */
7558 if (skipped_writes_memory.nonscalar)
7559 skipped_writes_memory.all = 1;
7560
7561 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7562 || (! skipped_writes_memory.all && ! cse_rtx_addr_varies_p (dest)))
7563 invalidate (dest);
7564 else if (GET_CODE (dest) == STRICT_LOW_PART
7565 || GET_CODE (dest) == ZERO_EXTRACT)
7566 invalidate (XEXP (dest, 0));
7567 }
7568
7569 /* Invalidate all insns from START up to the end of the function or the
7570 next label. This called when we wish to CSE around a block that is
7571 conditionally executed. */
7572
7573 static void
7574 invalidate_skipped_block (start)
7575 rtx start;
7576 {
7577 rtx insn;
7578 static struct write_data init = {0, 0, 0, 0};
7579 static struct write_data everything = {0, 1, 1, 1};
7580
7581 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
7582 insn = NEXT_INSN (insn))
7583 {
7584 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7585 continue;
7586
7587 skipped_writes_memory = init;
7588
7589 if (GET_CODE (insn) == CALL_INSN)
7590 {
7591 invalidate_for_call ();
7592 skipped_writes_memory = everything;
7593 }
7594
7595 note_stores (PATTERN (insn), invalidate_skipped_set);
7596 invalidate_from_clobbers (&skipped_writes_memory, PATTERN (insn));
7597 }
7598 }
7599 \f
7600 /* Used for communication between the following two routines; contains a
7601 value to be checked for modification. */
7602
7603 static rtx cse_check_loop_start_value;
7604
7605 /* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
7606 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
7607
7608 static void
7609 cse_check_loop_start (x, set)
7610 rtx x;
7611 rtx set;
7612 {
7613 if (cse_check_loop_start_value == 0
7614 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
7615 return;
7616
7617 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
7618 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
7619 cse_check_loop_start_value = 0;
7620 }
7621
7622 /* X is a SET or CLOBBER contained in INSN that was found near the start of
7623 a loop that starts with the label at LOOP_START.
7624
7625 If X is a SET, we see if its SET_SRC is currently in our hash table.
7626 If so, we see if it has a value equal to some register used only in the
7627 loop exit code (as marked by jump.c).
7628
7629 If those two conditions are true, we search backwards from the start of
7630 the loop to see if that same value was loaded into a register that still
7631 retains its value at the start of the loop.
7632
7633 If so, we insert an insn after the load to copy the destination of that
7634 load into the equivalent register and (try to) replace our SET_SRC with that
7635 register.
7636
7637 In any event, we invalidate whatever this SET or CLOBBER modifies. */
7638
7639 static void
7640 cse_set_around_loop (x, insn, loop_start)
7641 rtx x;
7642 rtx insn;
7643 rtx loop_start;
7644 {
7645 struct table_elt *src_elt;
7646 static struct write_data init = {0, 0, 0, 0};
7647 struct write_data writes_memory;
7648
7649 writes_memory = init;
7650
7651 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
7652 are setting PC or CC0 or whose SET_SRC is already a register. */
7653 if (GET_CODE (x) == SET
7654 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
7655 && GET_CODE (SET_SRC (x)) != REG)
7656 {
7657 src_elt = lookup (SET_SRC (x),
7658 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
7659 GET_MODE (SET_DEST (x)));
7660
7661 if (src_elt)
7662 for (src_elt = src_elt->first_same_value; src_elt;
7663 src_elt = src_elt->next_same_value)
7664 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
7665 && COST (src_elt->exp) < COST (SET_SRC (x)))
7666 {
7667 rtx p, set;
7668
7669 /* Look for an insn in front of LOOP_START that sets
7670 something in the desired mode to SET_SRC (x) before we hit
7671 a label or CALL_INSN. */
7672
7673 for (p = prev_nonnote_insn (loop_start);
7674 p && GET_CODE (p) != CALL_INSN
7675 && GET_CODE (p) != CODE_LABEL;
7676 p = prev_nonnote_insn (p))
7677 if ((set = single_set (p)) != 0
7678 && GET_CODE (SET_DEST (set)) == REG
7679 && GET_MODE (SET_DEST (set)) == src_elt->mode
7680 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
7681 {
7682 /* We now have to ensure that nothing between P
7683 and LOOP_START modified anything referenced in
7684 SET_SRC (x). We know that nothing within the loop
7685 can modify it, or we would have invalidated it in
7686 the hash table. */
7687 rtx q;
7688
7689 cse_check_loop_start_value = SET_SRC (x);
7690 for (q = p; q != loop_start; q = NEXT_INSN (q))
7691 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
7692 note_stores (PATTERN (q), cse_check_loop_start);
7693
7694 /* If nothing was changed and we can replace our
7695 SET_SRC, add an insn after P to copy its destination
7696 to what we will be replacing SET_SRC with. */
7697 if (cse_check_loop_start_value
7698 && validate_change (insn, &SET_SRC (x),
7699 src_elt->exp, 0))
7700 emit_insn_after (gen_move_insn (src_elt->exp,
7701 SET_DEST (set)),
7702 p);
7703 break;
7704 }
7705 }
7706 }
7707
7708 /* Now invalidate anything modified by X. */
7709 note_mem_written (SET_DEST (x), &writes_memory);
7710
7711 if (writes_memory.var)
7712 invalidate_memory (&writes_memory);
7713
7714 /* See comment on similar code in cse_insn for explanation of these tests. */
7715 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
7716 || (GET_CODE (SET_DEST (x)) == MEM && ! writes_memory.all
7717 && ! cse_rtx_addr_varies_p (SET_DEST (x))))
7718 invalidate (SET_DEST (x));
7719 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
7720 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
7721 invalidate (XEXP (SET_DEST (x), 0));
7722 }
7723 \f
7724 /* Find the end of INSN's basic block and return its range,
7725 the total number of SETs in all the insns of the block, the last insn of the
7726 block, and the branch path.
7727
7728 The branch path indicates which branches should be followed. If a non-zero
7729 path size is specified, the block should be rescanned and a different set
7730 of branches will be taken. The branch path is only used if
7731 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
7732
7733 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
7734 used to describe the block. It is filled in with the information about
7735 the current block. The incoming structure's branch path, if any, is used
7736 to construct the output branch path. */
7737
7738 void
7739 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
7740 rtx insn;
7741 struct cse_basic_block_data *data;
7742 int follow_jumps;
7743 int after_loop;
7744 int skip_blocks;
7745 {
7746 rtx p = insn, q;
7747 int nsets = 0;
7748 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
7749 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
7750 int path_size = data->path_size;
7751 int path_entry = 0;
7752 int i;
7753
7754 /* Update the previous branch path, if any. If the last branch was
7755 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
7756 shorten the path by one and look at the previous branch. We know that
7757 at least one branch must have been taken if PATH_SIZE is non-zero. */
7758 while (path_size > 0)
7759 {
7760 if (data->path[path_size - 1].status != NOT_TAKEN)
7761 {
7762 data->path[path_size - 1].status = NOT_TAKEN;
7763 break;
7764 }
7765 else
7766 path_size--;
7767 }
7768
7769 /* Scan to end of this basic block. */
7770 while (p && GET_CODE (p) != CODE_LABEL)
7771 {
7772 /* Don't cse out the end of a loop. This makes a difference
7773 only for the unusual loops that always execute at least once;
7774 all other loops have labels there so we will stop in any case.
7775 Cse'ing out the end of the loop is dangerous because it
7776 might cause an invariant expression inside the loop
7777 to be reused after the end of the loop. This would make it
7778 hard to move the expression out of the loop in loop.c,
7779 especially if it is one of several equivalent expressions
7780 and loop.c would like to eliminate it.
7781
7782 If we are running after loop.c has finished, we can ignore
7783 the NOTE_INSN_LOOP_END. */
7784
7785 if (! after_loop && GET_CODE (p) == NOTE
7786 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
7787 break;
7788
7789 /* Don't cse over a call to setjmp; on some machines (eg vax)
7790 the regs restored by the longjmp come from
7791 a later time than the setjmp. */
7792 if (GET_CODE (p) == NOTE
7793 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
7794 break;
7795
7796 /* A PARALLEL can have lots of SETs in it,
7797 especially if it is really an ASM_OPERANDS. */
7798 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
7799 && GET_CODE (PATTERN (p)) == PARALLEL)
7800 nsets += XVECLEN (PATTERN (p), 0);
7801 else if (GET_CODE (p) != NOTE)
7802 nsets += 1;
7803
7804 /* Ignore insns made by CSE; they cannot affect the boundaries of
7805 the basic block. */
7806
7807 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
7808 high_cuid = INSN_CUID (p);
7809 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
7810 low_cuid = INSN_CUID (p);
7811
7812 /* See if this insn is in our branch path. If it is and we are to
7813 take it, do so. */
7814 if (path_entry < path_size && data->path[path_entry].branch == p)
7815 {
7816 if (data->path[path_entry].status != NOT_TAKEN)
7817 p = JUMP_LABEL (p);
7818
7819 /* Point to next entry in path, if any. */
7820 path_entry++;
7821 }
7822
7823 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
7824 was specified, we haven't reached our maximum path length, there are
7825 insns following the target of the jump, this is the only use of the
7826 jump label, and the target label is preceded by a BARRIER.
7827
7828 Alternatively, we can follow the jump if it branches around a
7829 block of code and there are no other branches into the block.
7830 In this case invalidate_skipped_block will be called to invalidate any
7831 registers set in the block when following the jump. */
7832
7833 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
7834 && GET_CODE (p) == JUMP_INSN
7835 && GET_CODE (PATTERN (p)) == SET
7836 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
7837 && LABEL_NUSES (JUMP_LABEL (p)) == 1
7838 && NEXT_INSN (JUMP_LABEL (p)) != 0)
7839 {
7840 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
7841 if ((GET_CODE (q) != NOTE
7842 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
7843 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
7844 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
7845 break;
7846
7847 /* If we ran into a BARRIER, this code is an extension of the
7848 basic block when the branch is taken. */
7849 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
7850 {
7851 /* Don't allow ourself to keep walking around an
7852 always-executed loop. */
7853 if (next_real_insn (q) == next)
7854 {
7855 p = NEXT_INSN (p);
7856 continue;
7857 }
7858
7859 /* Similarly, don't put a branch in our path more than once. */
7860 for (i = 0; i < path_entry; i++)
7861 if (data->path[i].branch == p)
7862 break;
7863
7864 if (i != path_entry)
7865 break;
7866
7867 data->path[path_entry].branch = p;
7868 data->path[path_entry++].status = TAKEN;
7869
7870 /* This branch now ends our path. It was possible that we
7871 didn't see this branch the last time around (when the
7872 insn in front of the target was a JUMP_INSN that was
7873 turned into a no-op). */
7874 path_size = path_entry;
7875
7876 p = JUMP_LABEL (p);
7877 /* Mark block so we won't scan it again later. */
7878 PUT_MODE (NEXT_INSN (p), QImode);
7879 }
7880 /* Detect a branch around a block of code. */
7881 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
7882 {
7883 register rtx tmp;
7884
7885 if (next_real_insn (q) == next)
7886 {
7887 p = NEXT_INSN (p);
7888 continue;
7889 }
7890
7891 for (i = 0; i < path_entry; i++)
7892 if (data->path[i].branch == p)
7893 break;
7894
7895 if (i != path_entry)
7896 break;
7897
7898 /* This is no_labels_between_p (p, q) with an added check for
7899 reaching the end of a function (in case Q precedes P). */
7900 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
7901 if (GET_CODE (tmp) == CODE_LABEL)
7902 break;
7903
7904 if (tmp == q)
7905 {
7906 data->path[path_entry].branch = p;
7907 data->path[path_entry++].status = AROUND;
7908
7909 path_size = path_entry;
7910
7911 p = JUMP_LABEL (p);
7912 /* Mark block so we won't scan it again later. */
7913 PUT_MODE (NEXT_INSN (p), QImode);
7914 }
7915 }
7916 }
7917 p = NEXT_INSN (p);
7918 }
7919
7920 data->low_cuid = low_cuid;
7921 data->high_cuid = high_cuid;
7922 data->nsets = nsets;
7923 data->last = p;
7924
7925 /* If all jumps in the path are not taken, set our path length to zero
7926 so a rescan won't be done. */
7927 for (i = path_size - 1; i >= 0; i--)
7928 if (data->path[i].status != NOT_TAKEN)
7929 break;
7930
7931 if (i == -1)
7932 data->path_size = 0;
7933 else
7934 data->path_size = path_size;
7935
7936 /* End the current branch path. */
7937 data->path[path_size].branch = 0;
7938 }
7939 \f
7940 /* Perform cse on the instructions of a function.
7941 F is the first instruction.
7942 NREGS is one plus the highest pseudo-reg number used in the instruction.
7943
7944 AFTER_LOOP is 1 if this is the cse call done after loop optimization
7945 (only if -frerun-cse-after-loop).
7946
7947 Returns 1 if jump_optimize should be redone due to simplifications
7948 in conditional jump instructions. */
7949
7950 int
7951 cse_main (f, nregs, after_loop, file)
7952 rtx f;
7953 int nregs;
7954 int after_loop;
7955 FILE *file;
7956 {
7957 struct cse_basic_block_data val;
7958 register rtx insn = f;
7959 register int i;
7960
7961 cse_jumps_altered = 0;
7962 constant_pool_entries_cost = 0;
7963 val.path_size = 0;
7964
7965 init_recog ();
7966
7967 max_reg = nregs;
7968
7969 all_minus_one = (int *) alloca (nregs * sizeof (int));
7970 consec_ints = (int *) alloca (nregs * sizeof (int));
7971
7972 for (i = 0; i < nregs; i++)
7973 {
7974 all_minus_one[i] = -1;
7975 consec_ints[i] = i;
7976 }
7977
7978 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
7979 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
7980 reg_qty = (int *) alloca (nregs * sizeof (int));
7981 reg_in_table = (int *) alloca (nregs * sizeof (int));
7982 reg_tick = (int *) alloca (nregs * sizeof (int));
7983
7984 #ifdef LOAD_EXTEND_OP
7985
7986 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
7987 and change the code and mode as appropriate. */
7988 memory_extend_rtx = gen_rtx (ZERO_EXTEND, VOIDmode, 0);
7989 #endif
7990
7991 /* Discard all the free elements of the previous function
7992 since they are allocated in the temporarily obstack. */
7993 bzero ((char *) table, sizeof table);
7994 free_element_chain = 0;
7995 n_elements_made = 0;
7996
7997 /* Find the largest uid. */
7998
7999 max_uid = get_max_uid ();
8000 uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
8001 bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
8002
8003 /* Compute the mapping from uids to cuids.
8004 CUIDs are numbers assigned to insns, like uids,
8005 except that cuids increase monotonically through the code.
8006 Don't assign cuids to line-number NOTEs, so that the distance in cuids
8007 between two insns is not affected by -g. */
8008
8009 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8010 {
8011 if (GET_CODE (insn) != NOTE
8012 || NOTE_LINE_NUMBER (insn) < 0)
8013 INSN_CUID (insn) = ++i;
8014 else
8015 /* Give a line number note the same cuid as preceding insn. */
8016 INSN_CUID (insn) = i;
8017 }
8018
8019 /* Initialize which registers are clobbered by calls. */
8020
8021 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8022
8023 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8024 if ((call_used_regs[i]
8025 /* Used to check !fixed_regs[i] here, but that isn't safe;
8026 fixed regs are still call-clobbered, and sched can get
8027 confused if they can "live across calls".
8028
8029 The frame pointer is always preserved across calls. The arg
8030 pointer is if it is fixed. The stack pointer usually is, unless
8031 RETURN_POPS_ARGS, in which case an explicit CLOBBER
8032 will be present. If we are generating PIC code, the PIC offset
8033 table register is preserved across calls. */
8034
8035 && i != STACK_POINTER_REGNUM
8036 && i != FRAME_POINTER_REGNUM
8037 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8038 && i != HARD_FRAME_POINTER_REGNUM
8039 #endif
8040 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8041 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8042 #endif
8043 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
8044 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8045 #endif
8046 )
8047 || global_regs[i])
8048 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8049
8050 /* Loop over basic blocks.
8051 Compute the maximum number of qty's needed for each basic block
8052 (which is 2 for each SET). */
8053 insn = f;
8054 while (insn)
8055 {
8056 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8057 flag_cse_skip_blocks);
8058
8059 /* If this basic block was already processed or has no sets, skip it. */
8060 if (val.nsets == 0 || GET_MODE (insn) == QImode)
8061 {
8062 PUT_MODE (insn, VOIDmode);
8063 insn = (val.last ? NEXT_INSN (val.last) : 0);
8064 val.path_size = 0;
8065 continue;
8066 }
8067
8068 cse_basic_block_start = val.low_cuid;
8069 cse_basic_block_end = val.high_cuid;
8070 max_qty = val.nsets * 2;
8071
8072 if (file)
8073 fprintf (file, ";; Processing block from %d to %d, %d sets.\n",
8074 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8075 val.nsets);
8076
8077 /* Make MAX_QTY bigger to give us room to optimize
8078 past the end of this basic block, if that should prove useful. */
8079 if (max_qty < 500)
8080 max_qty = 500;
8081
8082 max_qty += max_reg;
8083
8084 /* If this basic block is being extended by following certain jumps,
8085 (see `cse_end_of_basic_block'), we reprocess the code from the start.
8086 Otherwise, we start after this basic block. */
8087 if (val.path_size > 0)
8088 cse_basic_block (insn, val.last, val.path, 0);
8089 else
8090 {
8091 int old_cse_jumps_altered = cse_jumps_altered;
8092 rtx temp;
8093
8094 /* When cse changes a conditional jump to an unconditional
8095 jump, we want to reprocess the block, since it will give
8096 us a new branch path to investigate. */
8097 cse_jumps_altered = 0;
8098 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8099 if (cse_jumps_altered == 0
8100 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8101 insn = temp;
8102
8103 cse_jumps_altered |= old_cse_jumps_altered;
8104 }
8105
8106 #ifdef USE_C_ALLOCA
8107 alloca (0);
8108 #endif
8109 }
8110
8111 /* Tell refers_to_mem_p that qty_const info is not available. */
8112 qty_const = 0;
8113
8114 if (max_elements_made < n_elements_made)
8115 max_elements_made = n_elements_made;
8116
8117 return cse_jumps_altered;
8118 }
8119
8120 /* Process a single basic block. FROM and TO and the limits of the basic
8121 block. NEXT_BRANCH points to the branch path when following jumps or
8122 a null path when not following jumps.
8123
8124 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8125 loop. This is true when we are being called for the last time on a
8126 block and this CSE pass is before loop.c. */
8127
8128 static rtx
8129 cse_basic_block (from, to, next_branch, around_loop)
8130 register rtx from, to;
8131 struct branch_path *next_branch;
8132 int around_loop;
8133 {
8134 register rtx insn;
8135 int to_usage = 0;
8136 int in_libcall_block = 0;
8137
8138 /* Each of these arrays is undefined before max_reg, so only allocate
8139 the space actually needed and adjust the start below. */
8140
8141 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8142 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8143 qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
8144 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8145 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8146 qty_comparison_code
8147 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8148 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8149 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8150
8151 qty_first_reg -= max_reg;
8152 qty_last_reg -= max_reg;
8153 qty_mode -= max_reg;
8154 qty_const -= max_reg;
8155 qty_const_insn -= max_reg;
8156 qty_comparison_code -= max_reg;
8157 qty_comparison_qty -= max_reg;
8158 qty_comparison_const -= max_reg;
8159
8160 new_basic_block ();
8161
8162 /* TO might be a label. If so, protect it from being deleted. */
8163 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8164 ++LABEL_NUSES (to);
8165
8166 for (insn = from; insn != to; insn = NEXT_INSN (insn))
8167 {
8168 register enum rtx_code code;
8169
8170 /* See if this is a branch that is part of the path. If so, and it is
8171 to be taken, do so. */
8172 if (next_branch->branch == insn)
8173 {
8174 enum taken status = next_branch++->status;
8175 if (status != NOT_TAKEN)
8176 {
8177 if (status == TAKEN)
8178 record_jump_equiv (insn, 1);
8179 else
8180 invalidate_skipped_block (NEXT_INSN (insn));
8181
8182 /* Set the last insn as the jump insn; it doesn't affect cc0.
8183 Then follow this branch. */
8184 #ifdef HAVE_cc0
8185 prev_insn_cc0 = 0;
8186 #endif
8187 prev_insn = insn;
8188 insn = JUMP_LABEL (insn);
8189 continue;
8190 }
8191 }
8192
8193 code = GET_CODE (insn);
8194 if (GET_MODE (insn) == QImode)
8195 PUT_MODE (insn, VOIDmode);
8196
8197 if (GET_RTX_CLASS (code) == 'i')
8198 {
8199 /* Process notes first so we have all notes in canonical forms when
8200 looking for duplicate operations. */
8201
8202 if (REG_NOTES (insn))
8203 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
8204
8205 /* Track when we are inside in LIBCALL block. Inside such a block,
8206 we do not want to record destinations. The last insn of a
8207 LIBCALL block is not considered to be part of the block, since
8208 its destination is the result of the block and hence should be
8209 recorded. */
8210
8211 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8212 in_libcall_block = 1;
8213 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8214 in_libcall_block = 0;
8215
8216 cse_insn (insn, in_libcall_block);
8217 }
8218
8219 /* If INSN is now an unconditional jump, skip to the end of our
8220 basic block by pretending that we just did the last insn in the
8221 basic block. If we are jumping to the end of our block, show
8222 that we can have one usage of TO. */
8223
8224 if (simplejump_p (insn))
8225 {
8226 if (to == 0)
8227 return 0;
8228
8229 if (JUMP_LABEL (insn) == to)
8230 to_usage = 1;
8231
8232 /* Maybe TO was deleted because the jump is unconditional.
8233 If so, there is nothing left in this basic block. */
8234 /* ??? Perhaps it would be smarter to set TO
8235 to whatever follows this insn,
8236 and pretend the basic block had always ended here. */
8237 if (INSN_DELETED_P (to))
8238 break;
8239
8240 insn = PREV_INSN (to);
8241 }
8242
8243 /* See if it is ok to keep on going past the label
8244 which used to end our basic block. Remember that we incremented
8245 the count of that label, so we decrement it here. If we made
8246 a jump unconditional, TO_USAGE will be one; in that case, we don't
8247 want to count the use in that jump. */
8248
8249 if (to != 0 && NEXT_INSN (insn) == to
8250 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8251 {
8252 struct cse_basic_block_data val;
8253
8254 insn = NEXT_INSN (to);
8255
8256 if (LABEL_NUSES (to) == 0)
8257 delete_insn (to);
8258
8259 /* Find the end of the following block. Note that we won't be
8260 following branches in this case. If TO was the last insn
8261 in the function, we are done. Similarly, if we deleted the
8262 insn after TO, it must have been because it was preceded by
8263 a BARRIER. In that case, we are done with this block because it
8264 has no continuation. */
8265
8266 if (insn == 0 || INSN_DELETED_P (insn))
8267 return 0;
8268
8269 to_usage = 0;
8270 val.path_size = 0;
8271 cse_end_of_basic_block (insn, &val, 0, 0, 0);
8272
8273 /* If the tables we allocated have enough space left
8274 to handle all the SETs in the next basic block,
8275 continue through it. Otherwise, return,
8276 and that block will be scanned individually. */
8277 if (val.nsets * 2 + next_qty > max_qty)
8278 break;
8279
8280 cse_basic_block_start = val.low_cuid;
8281 cse_basic_block_end = val.high_cuid;
8282 to = val.last;
8283
8284 /* Prevent TO from being deleted if it is a label. */
8285 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8286 ++LABEL_NUSES (to);
8287
8288 /* Back up so we process the first insn in the extension. */
8289 insn = PREV_INSN (insn);
8290 }
8291 }
8292
8293 if (next_qty > max_qty)
8294 abort ();
8295
8296 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
8297 the previous insn is the only insn that branches to the head of a loop,
8298 we can cse into the loop. Don't do this if we changed the jump
8299 structure of a loop unless we aren't going to be following jumps. */
8300
8301 if ((cse_jumps_altered == 0
8302 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8303 && around_loop && to != 0
8304 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
8305 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
8306 && JUMP_LABEL (PREV_INSN (to)) != 0
8307 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
8308 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
8309
8310 return to ? NEXT_INSN (to) : 0;
8311 }
8312 \f
8313 /* Count the number of times registers are used (not set) in X.
8314 COUNTS is an array in which we accumulate the count, INCR is how much
8315 we count each register usage.
8316
8317 Don't count a usage of DEST, which is the SET_DEST of a SET which
8318 contains X in its SET_SRC. This is because such a SET does not
8319 modify the liveness of DEST. */
8320
8321 static void
8322 count_reg_usage (x, counts, dest, incr)
8323 rtx x;
8324 int *counts;
8325 rtx dest;
8326 int incr;
8327 {
8328 enum rtx_code code;
8329 char *fmt;
8330 int i, j;
8331
8332 if (x == 0)
8333 return;
8334
8335 switch (code = GET_CODE (x))
8336 {
8337 case REG:
8338 if (x != dest)
8339 counts[REGNO (x)] += incr;
8340 return;
8341
8342 case PC:
8343 case CC0:
8344 case CONST:
8345 case CONST_INT:
8346 case CONST_DOUBLE:
8347 case SYMBOL_REF:
8348 case LABEL_REF:
8349 case CLOBBER:
8350 return;
8351
8352 case SET:
8353 /* Unless we are setting a REG, count everything in SET_DEST. */
8354 if (GET_CODE (SET_DEST (x)) != REG)
8355 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
8356
8357 /* If SRC has side-effects, then we can't delete this insn, so the
8358 usage of SET_DEST inside SRC counts.
8359
8360 ??? Strictly-speaking, we might be preserving this insn
8361 because some other SET has side-effects, but that's hard
8362 to do and can't happen now. */
8363 count_reg_usage (SET_SRC (x), counts,
8364 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
8365 incr);
8366 return;
8367
8368 case CALL_INSN:
8369 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
8370
8371 /* ... falls through ... */
8372 case INSN:
8373 case JUMP_INSN:
8374 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
8375
8376 /* Things used in a REG_EQUAL note aren't dead since loop may try to
8377 use them. */
8378
8379 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
8380 return;
8381
8382 case EXPR_LIST:
8383 case INSN_LIST:
8384 if (REG_NOTE_KIND (x) == REG_EQUAL
8385 || GET_CODE (XEXP (x,0)) == USE)
8386 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
8387 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
8388 return;
8389 }
8390
8391 fmt = GET_RTX_FORMAT (code);
8392 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8393 {
8394 if (fmt[i] == 'e')
8395 count_reg_usage (XEXP (x, i), counts, dest, incr);
8396 else if (fmt[i] == 'E')
8397 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8398 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
8399 }
8400 }
8401 \f
8402 /* Scan all the insns and delete any that are dead; i.e., they store a register
8403 that is never used or they copy a register to itself.
8404
8405 This is used to remove insns made obviously dead by cse. It improves the
8406 heuristics in loop since it won't try to move dead invariants out of loops
8407 or make givs for dead quantities. The remaining passes of the compilation
8408 are also sped up. */
8409
8410 void
8411 delete_dead_from_cse (insns, nreg)
8412 rtx insns;
8413 int nreg;
8414 {
8415 int *counts = (int *) alloca (nreg * sizeof (int));
8416 rtx insn, prev;
8417 rtx tem;
8418 int i;
8419 int in_libcall = 0;
8420
8421 /* First count the number of times each register is used. */
8422 bzero ((char *) counts, sizeof (int) * nreg);
8423 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
8424 count_reg_usage (insn, counts, NULL_RTX, 1);
8425
8426 /* Go from the last insn to the first and delete insns that only set unused
8427 registers or copy a register to itself. As we delete an insn, remove
8428 usage counts for registers it uses. */
8429 for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
8430 {
8431 int live_insn = 0;
8432
8433 prev = prev_real_insn (insn);
8434
8435 /* Don't delete any insns that are part of a libcall block.
8436 Flow or loop might get confused if we did that. Remember
8437 that we are scanning backwards. */
8438 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8439 in_libcall = 1;
8440
8441 if (in_libcall)
8442 live_insn = 1;
8443 else if (GET_CODE (PATTERN (insn)) == SET)
8444 {
8445 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
8446 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
8447 ;
8448
8449 #ifdef HAVE_cc0
8450 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
8451 && ! side_effects_p (SET_SRC (PATTERN (insn)))
8452 && ((tem = next_nonnote_insn (insn)) == 0
8453 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8454 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8455 ;
8456 #endif
8457 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
8458 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
8459 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
8460 || side_effects_p (SET_SRC (PATTERN (insn))))
8461 live_insn = 1;
8462 }
8463 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
8464 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8465 {
8466 rtx elt = XVECEXP (PATTERN (insn), 0, i);
8467
8468 if (GET_CODE (elt) == SET)
8469 {
8470 if (GET_CODE (SET_DEST (elt)) == REG
8471 && SET_DEST (elt) == SET_SRC (elt))
8472 ;
8473
8474 #ifdef HAVE_cc0
8475 else if (GET_CODE (SET_DEST (elt)) == CC0
8476 && ! side_effects_p (SET_SRC (elt))
8477 && ((tem = next_nonnote_insn (insn)) == 0
8478 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8479 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8480 ;
8481 #endif
8482 else if (GET_CODE (SET_DEST (elt)) != REG
8483 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
8484 || counts[REGNO (SET_DEST (elt))] != 0
8485 || side_effects_p (SET_SRC (elt)))
8486 live_insn = 1;
8487 }
8488 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
8489 live_insn = 1;
8490 }
8491 else
8492 live_insn = 1;
8493
8494 /* If this is a dead insn, delete it and show registers in it aren't
8495 being used. */
8496
8497 if (! live_insn)
8498 {
8499 count_reg_usage (insn, counts, NULL_RTX, -1);
8500 delete_insn (insn);
8501 }
8502
8503 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8504 in_libcall = 0;
8505 }
8506 }