(cse_insn): When do special handling for (set REG0 REG1), must delete
[gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 88, 89, 92, 93, 94, 1995 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 /* Must precede rtl.h for FFS. */
24 #include <stdio.h>
25
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "recog.h"
33
34 #include <setjmp.h>
35
36 /* The basic idea of common subexpression elimination is to go
37 through the code, keeping a record of expressions that would
38 have the same value at the current scan point, and replacing
39 expressions encountered with the cheapest equivalent expression.
40
41 It is too complicated to keep track of the different possibilities
42 when control paths merge; so, at each label, we forget all that is
43 known and start fresh. This can be described as processing each
44 basic block separately. Note, however, that these are not quite
45 the same as the basic blocks found by a later pass and used for
46 data flow analysis and register packing. We do not need to start fresh
47 after a conditional jump instruction if there is no label there.
48
49 We use two data structures to record the equivalent expressions:
50 a hash table for most expressions, and several vectors together
51 with "quantity numbers" to record equivalent (pseudo) registers.
52
53 The use of the special data structure for registers is desirable
54 because it is faster. It is possible because registers references
55 contain a fairly small number, the register number, taken from
56 a contiguously allocated series, and two register references are
57 identical if they have the same number. General expressions
58 do not have any such thing, so the only way to retrieve the
59 information recorded on an expression other than a register
60 is to keep it in a hash table.
61
62 Registers and "quantity numbers":
63
64 At the start of each basic block, all of the (hardware and pseudo)
65 registers used in the function are given distinct quantity
66 numbers to indicate their contents. During scan, when the code
67 copies one register into another, we copy the quantity number.
68 When a register is loaded in any other way, we allocate a new
69 quantity number to describe the value generated by this operation.
70 `reg_qty' records what quantity a register is currently thought
71 of as containing.
72
73 All real quantity numbers are greater than or equal to `max_reg'.
74 If register N has not been assigned a quantity, reg_qty[N] will equal N.
75
76 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
77 variables should be referenced with an index below `max_reg'.
78
79 We also maintain a bidirectional chain of registers for each
80 quantity number. `qty_first_reg', `qty_last_reg',
81 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
82
83 The first register in a chain is the one whose lifespan is least local.
84 Among equals, it is the one that was seen first.
85 We replace any equivalent register with that one.
86
87 If two registers have the same quantity number, it must be true that
88 REG expressions with `qty_mode' must be in the hash table for both
89 registers and must be in the same class.
90
91 The converse is not true. Since hard registers may be referenced in
92 any mode, two REG expressions might be equivalent in the hash table
93 but not have the same quantity number if the quantity number of one
94 of the registers is not the same mode as those expressions.
95
96 Constants and quantity numbers
97
98 When a quantity has a known constant value, that value is stored
99 in the appropriate element of qty_const. This is in addition to
100 putting the constant in the hash table as is usual for non-regs.
101
102 Whether a reg or a constant is preferred is determined by the configuration
103 macro CONST_COSTS and will often depend on the constant value. In any
104 event, expressions containing constants can be simplified, by fold_rtx.
105
106 When a quantity has a known nearly constant value (such as an address
107 of a stack slot), that value is stored in the appropriate element
108 of qty_const.
109
110 Integer constants don't have a machine mode. However, cse
111 determines the intended machine mode from the destination
112 of the instruction that moves the constant. The machine mode
113 is recorded in the hash table along with the actual RTL
114 constant expression so that different modes are kept separate.
115
116 Other expressions:
117
118 To record known equivalences among expressions in general
119 we use a hash table called `table'. It has a fixed number of buckets
120 that contain chains of `struct table_elt' elements for expressions.
121 These chains connect the elements whose expressions have the same
122 hash codes.
123
124 Other chains through the same elements connect the elements which
125 currently have equivalent values.
126
127 Register references in an expression are canonicalized before hashing
128 the expression. This is done using `reg_qty' and `qty_first_reg'.
129 The hash code of a register reference is computed using the quantity
130 number, not the register number.
131
132 When the value of an expression changes, it is necessary to remove from the
133 hash table not just that expression but all expressions whose values
134 could be different as a result.
135
136 1. If the value changing is in memory, except in special cases
137 ANYTHING referring to memory could be changed. That is because
138 nobody knows where a pointer does not point.
139 The function `invalidate_memory' removes what is necessary.
140
141 The special cases are when the address is constant or is
142 a constant plus a fixed register such as the frame pointer
143 or a static chain pointer. When such addresses are stored in,
144 we can tell exactly which other such addresses must be invalidated
145 due to overlap. `invalidate' does this.
146 All expressions that refer to non-constant
147 memory addresses are also invalidated. `invalidate_memory' does this.
148
149 2. If the value changing is a register, all expressions
150 containing references to that register, and only those,
151 must be removed.
152
153 Because searching the entire hash table for expressions that contain
154 a register is very slow, we try to figure out when it isn't necessary.
155 Precisely, this is necessary only when expressions have been
156 entered in the hash table using this register, and then the value has
157 changed, and then another expression wants to be added to refer to
158 the register's new value. This sequence of circumstances is rare
159 within any one basic block.
160
161 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
162 reg_tick[i] is incremented whenever a value is stored in register i.
163 reg_in_table[i] holds -1 if no references to register i have been
164 entered in the table; otherwise, it contains the value reg_tick[i] had
165 when the references were entered. If we want to enter a reference
166 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
167 Until we want to enter a new entry, the mere fact that the two vectors
168 don't match makes the entries be ignored if anyone tries to match them.
169
170 Registers themselves are entered in the hash table as well as in
171 the equivalent-register chains. However, the vectors `reg_tick'
172 and `reg_in_table' do not apply to expressions which are simple
173 register references. These expressions are removed from the table
174 immediately when they become invalid, and this can be done even if
175 we do not immediately search for all the expressions that refer to
176 the register.
177
178 A CLOBBER rtx in an instruction invalidates its operand for further
179 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
180 invalidates everything that resides in memory.
181
182 Related expressions:
183
184 Constant expressions that differ only by an additive integer
185 are called related. When a constant expression is put in
186 the table, the related expression with no constant term
187 is also entered. These are made to point at each other
188 so that it is possible to find out if there exists any
189 register equivalent to an expression related to a given expression. */
190
191 /* One plus largest register number used in this function. */
192
193 static int max_reg;
194
195 /* Length of vectors indexed by quantity number.
196 We know in advance we will not need a quantity number this big. */
197
198 static int max_qty;
199
200 /* Next quantity number to be allocated.
201 This is 1 + the largest number needed so far. */
202
203 static int next_qty;
204
205 /* Indexed by quantity number, gives the first (or last) (pseudo) register
206 in the chain of registers that currently contain this quantity. */
207
208 static int *qty_first_reg;
209 static int *qty_last_reg;
210
211 /* Index by quantity number, gives the mode of the quantity. */
212
213 static enum machine_mode *qty_mode;
214
215 /* Indexed by quantity number, gives the rtx of the constant value of the
216 quantity, or zero if it does not have a known value.
217 A sum of the frame pointer (or arg pointer) plus a constant
218 can also be entered here. */
219
220 static rtx *qty_const;
221
222 /* Indexed by qty number, gives the insn that stored the constant value
223 recorded in `qty_const'. */
224
225 static rtx *qty_const_insn;
226
227 /* The next three variables are used to track when a comparison between a
228 quantity and some constant or register has been passed. In that case, we
229 know the results of the comparison in case we see it again. These variables
230 record a comparison that is known to be true. */
231
232 /* Indexed by qty number, gives the rtx code of a comparison with a known
233 result involving this quantity. If none, it is UNKNOWN. */
234 static enum rtx_code *qty_comparison_code;
235
236 /* Indexed by qty number, gives the constant being compared against in a
237 comparison of known result. If no such comparison, it is undefined.
238 If the comparison is not with a constant, it is zero. */
239
240 static rtx *qty_comparison_const;
241
242 /* Indexed by qty number, gives the quantity being compared against in a
243 comparison of known result. If no such comparison, if it undefined.
244 If the comparison is not with a register, it is -1. */
245
246 static int *qty_comparison_qty;
247
248 #ifdef HAVE_cc0
249 /* For machines that have a CC0, we do not record its value in the hash
250 table since its use is guaranteed to be the insn immediately following
251 its definition and any other insn is presumed to invalidate it.
252
253 Instead, we store below the value last assigned to CC0. If it should
254 happen to be a constant, it is stored in preference to the actual
255 assigned value. In case it is a constant, we store the mode in which
256 the constant should be interpreted. */
257
258 static rtx prev_insn_cc0;
259 static enum machine_mode prev_insn_cc0_mode;
260 #endif
261
262 /* Previous actual insn. 0 if at first insn of basic block. */
263
264 static rtx prev_insn;
265
266 /* Insn being scanned. */
267
268 static rtx this_insn;
269
270 /* Index by (pseudo) register number, gives the quantity number
271 of the register's current contents. */
272
273 static int *reg_qty;
274
275 /* Index by (pseudo) register number, gives the number of the next (or
276 previous) (pseudo) register in the chain of registers sharing the same
277 value.
278
279 Or -1 if this register is at the end of the chain.
280
281 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
282
283 static int *reg_next_eqv;
284 static int *reg_prev_eqv;
285
286 /* Index by (pseudo) register number, gives the number of times
287 that register has been altered in the current basic block. */
288
289 static int *reg_tick;
290
291 /* Index by (pseudo) register number, gives the reg_tick value at which
292 rtx's containing this register are valid in the hash table.
293 If this does not equal the current reg_tick value, such expressions
294 existing in the hash table are invalid.
295 If this is -1, no expressions containing this register have been
296 entered in the table. */
297
298 static int *reg_in_table;
299
300 /* A HARD_REG_SET containing all the hard registers for which there is
301 currently a REG expression in the hash table. Note the difference
302 from the above variables, which indicate if the REG is mentioned in some
303 expression in the table. */
304
305 static HARD_REG_SET hard_regs_in_table;
306
307 /* A HARD_REG_SET containing all the hard registers that are invalidated
308 by a CALL_INSN. */
309
310 static HARD_REG_SET regs_invalidated_by_call;
311
312 /* Two vectors of ints:
313 one containing max_reg -1's; the other max_reg + 500 (an approximation
314 for max_qty) elements where element i contains i.
315 These are used to initialize various other vectors fast. */
316
317 static int *all_minus_one;
318 static int *consec_ints;
319
320 /* CUID of insn that starts the basic block currently being cse-processed. */
321
322 static int cse_basic_block_start;
323
324 /* CUID of insn that ends the basic block currently being cse-processed. */
325
326 static int cse_basic_block_end;
327
328 /* Vector mapping INSN_UIDs to cuids.
329 The cuids are like uids but increase monotonically always.
330 We use them to see whether a reg is used outside a given basic block. */
331
332 static int *uid_cuid;
333
334 /* Highest UID in UID_CUID. */
335 static int max_uid;
336
337 /* Get the cuid of an insn. */
338
339 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
340
341 /* Nonzero if cse has altered conditional jump insns
342 in such a way that jump optimization should be redone. */
343
344 static int cse_jumps_altered;
345
346 /* canon_hash stores 1 in do_not_record
347 if it notices a reference to CC0, PC, or some other volatile
348 subexpression. */
349
350 static int do_not_record;
351
352 #ifdef LOAD_EXTEND_OP
353
354 /* Scratch rtl used when looking for load-extended copy of a MEM. */
355 static rtx memory_extend_rtx;
356 #endif
357
358 /* canon_hash stores 1 in hash_arg_in_memory
359 if it notices a reference to memory within the expression being hashed. */
360
361 static int hash_arg_in_memory;
362
363 /* canon_hash stores 1 in hash_arg_in_struct
364 if it notices a reference to memory that's part of a structure. */
365
366 static int hash_arg_in_struct;
367
368 /* The hash table contains buckets which are chains of `struct table_elt's,
369 each recording one expression's information.
370 That expression is in the `exp' field.
371
372 Those elements with the same hash code are chained in both directions
373 through the `next_same_hash' and `prev_same_hash' fields.
374
375 Each set of expressions with equivalent values
376 are on a two-way chain through the `next_same_value'
377 and `prev_same_value' fields, and all point with
378 the `first_same_value' field at the first element in
379 that chain. The chain is in order of increasing cost.
380 Each element's cost value is in its `cost' field.
381
382 The `in_memory' field is nonzero for elements that
383 involve any reference to memory. These elements are removed
384 whenever a write is done to an unidentified location in memory.
385 To be safe, we assume that a memory address is unidentified unless
386 the address is either a symbol constant or a constant plus
387 the frame pointer or argument pointer.
388
389 The `in_struct' field is nonzero for elements that
390 involve any reference to memory inside a structure or array.
391
392 The `related_value' field is used to connect related expressions
393 (that differ by adding an integer).
394 The related expressions are chained in a circular fashion.
395 `related_value' is zero for expressions for which this
396 chain is not useful.
397
398 The `cost' field stores the cost of this element's expression.
399
400 The `is_const' flag is set if the element is a constant (including
401 a fixed address).
402
403 The `flag' field is used as a temporary during some search routines.
404
405 The `mode' field is usually the same as GET_MODE (`exp'), but
406 if `exp' is a CONST_INT and has no machine mode then the `mode'
407 field is the mode it was being used as. Each constant is
408 recorded separately for each mode it is used with. */
409
410
411 struct table_elt
412 {
413 rtx exp;
414 struct table_elt *next_same_hash;
415 struct table_elt *prev_same_hash;
416 struct table_elt *next_same_value;
417 struct table_elt *prev_same_value;
418 struct table_elt *first_same_value;
419 struct table_elt *related_value;
420 int cost;
421 enum machine_mode mode;
422 char in_memory;
423 char in_struct;
424 char is_const;
425 char flag;
426 };
427
428 /* We don't want a lot of buckets, because we rarely have very many
429 things stored in the hash table, and a lot of buckets slows
430 down a lot of loops that happen frequently. */
431 #define NBUCKETS 31
432
433 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
434 register (hard registers may require `do_not_record' to be set). */
435
436 #define HASH(X, M) \
437 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
438 ? (((unsigned) REG << 7) + (unsigned) reg_qty[REGNO (X)]) % NBUCKETS \
439 : canon_hash (X, M) % NBUCKETS)
440
441 /* Determine whether register number N is considered a fixed register for CSE.
442 It is desirable to replace other regs with fixed regs, to reduce need for
443 non-fixed hard regs.
444 A reg wins if it is either the frame pointer or designated as fixed,
445 but not if it is an overlapping register. */
446 #ifdef OVERLAPPING_REGNO_P
447 #define FIXED_REGNO_P(N) \
448 (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
449 || fixed_regs[N] || global_regs[N]) \
450 && ! OVERLAPPING_REGNO_P ((N)))
451 #else
452 #define FIXED_REGNO_P(N) \
453 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
454 || fixed_regs[N] || global_regs[N])
455 #endif
456
457 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
458 hard registers and pointers into the frame are the cheapest with a cost
459 of 0. Next come pseudos with a cost of one and other hard registers with
460 a cost of 2. Aside from these special cases, call `rtx_cost'. */
461
462 #define CHEAP_REGNO(N) \
463 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
464 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
465 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
466 || ((N) < FIRST_PSEUDO_REGISTER \
467 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
468
469 /* A register is cheap if it is a user variable assigned to the register
470 or if its register number always corresponds to a cheap register. */
471
472 #define CHEAP_REG(N) \
473 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
474 || CHEAP_REGNO (REGNO (N)))
475
476 #define COST(X) \
477 (GET_CODE (X) == REG \
478 ? (CHEAP_REG (X) ? 0 \
479 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
480 : 2) \
481 : rtx_cost (X, SET) * 2)
482
483 /* Determine if the quantity number for register X represents a valid index
484 into the `qty_...' variables. */
485
486 #define REGNO_QTY_VALID_P(N) (reg_qty[N] != (N))
487
488 static struct table_elt *table[NBUCKETS];
489
490 /* Chain of `struct table_elt's made so far for this function
491 but currently removed from the table. */
492
493 static struct table_elt *free_element_chain;
494
495 /* Number of `struct table_elt' structures made so far for this function. */
496
497 static int n_elements_made;
498
499 /* Maximum value `n_elements_made' has had so far in this compilation
500 for functions previously processed. */
501
502 static int max_elements_made;
503
504 /* Surviving equivalence class when two equivalence classes are merged
505 by recording the effects of a jump in the last insn. Zero if the
506 last insn was not a conditional jump. */
507
508 static struct table_elt *last_jump_equiv_class;
509
510 /* Set to the cost of a constant pool reference if one was found for a
511 symbolic constant. If this was found, it means we should try to
512 convert constants into constant pool entries if they don't fit in
513 the insn. */
514
515 static int constant_pool_entries_cost;
516
517 /* Bits describing what kind of values in memory must be invalidated
518 for a particular instruction. If all three bits are zero,
519 no memory refs need to be invalidated. Each bit is more powerful
520 than the preceding ones, and if a bit is set then the preceding
521 bits are also set.
522
523 Here is how the bits are set:
524 Pushing onto the stack invalidates only the stack pointer,
525 writing at a fixed address invalidates only variable addresses,
526 writing in a structure element at variable address
527 invalidates all but scalar variables,
528 and writing in anything else at variable address invalidates everything. */
529
530 struct write_data
531 {
532 int sp : 1; /* Invalidate stack pointer. */
533 int var : 1; /* Invalidate variable addresses. */
534 int nonscalar : 1; /* Invalidate all but scalar variables. */
535 int all : 1; /* Invalidate all memory refs. */
536 };
537
538 /* Define maximum length of a branch path. */
539
540 #define PATHLENGTH 10
541
542 /* This data describes a block that will be processed by cse_basic_block. */
543
544 struct cse_basic_block_data {
545 /* Lowest CUID value of insns in block. */
546 int low_cuid;
547 /* Highest CUID value of insns in block. */
548 int high_cuid;
549 /* Total number of SETs in block. */
550 int nsets;
551 /* Last insn in the block. */
552 rtx last;
553 /* Size of current branch path, if any. */
554 int path_size;
555 /* Current branch path, indicating which branches will be taken. */
556 struct branch_path {
557 /* The branch insn. */
558 rtx branch;
559 /* Whether it should be taken or not. AROUND is the same as taken
560 except that it is used when the destination label is not preceded
561 by a BARRIER. */
562 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
563 } path[PATHLENGTH];
564 };
565
566 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
567 virtual regs here because the simplify_*_operation routines are called
568 by integrate.c, which is called before virtual register instantiation. */
569
570 #define FIXED_BASE_PLUS_P(X) \
571 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
572 || (X) == arg_pointer_rtx \
573 || (X) == virtual_stack_vars_rtx \
574 || (X) == virtual_incoming_args_rtx \
575 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
576 && (XEXP (X, 0) == frame_pointer_rtx \
577 || XEXP (X, 0) == hard_frame_pointer_rtx \
578 || XEXP (X, 0) == arg_pointer_rtx \
579 || XEXP (X, 0) == virtual_stack_vars_rtx \
580 || XEXP (X, 0) == virtual_incoming_args_rtx)))
581
582 /* Similar, but also allows reference to the stack pointer.
583
584 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
585 arg_pointer_rtx by itself is nonzero, because on at least one machine,
586 the i960, the arg pointer is zero when it is unused. */
587
588 #define NONZERO_BASE_PLUS_P(X) \
589 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
590 || (X) == virtual_stack_vars_rtx \
591 || (X) == virtual_incoming_args_rtx \
592 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
593 && (XEXP (X, 0) == frame_pointer_rtx \
594 || XEXP (X, 0) == hard_frame_pointer_rtx \
595 || XEXP (X, 0) == arg_pointer_rtx \
596 || XEXP (X, 0) == virtual_stack_vars_rtx \
597 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
598 || (X) == stack_pointer_rtx \
599 || (X) == virtual_stack_dynamic_rtx \
600 || (X) == virtual_outgoing_args_rtx \
601 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
602 && (XEXP (X, 0) == stack_pointer_rtx \
603 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
604 || XEXP (X, 0) == virtual_outgoing_args_rtx)))
605
606 static void new_basic_block PROTO((void));
607 static void make_new_qty PROTO((int));
608 static void make_regs_eqv PROTO((int, int));
609 static void delete_reg_equiv PROTO((int));
610 static int mention_regs PROTO((rtx));
611 static int insert_regs PROTO((rtx, struct table_elt *, int));
612 static void free_element PROTO((struct table_elt *));
613 static void remove_from_table PROTO((struct table_elt *, unsigned));
614 static struct table_elt *get_element PROTO((void));
615 static struct table_elt *lookup PROTO((rtx, unsigned, enum machine_mode)),
616 *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
617 static rtx lookup_as_function PROTO((rtx, enum rtx_code));
618 static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
619 enum machine_mode));
620 static void merge_equiv_classes PROTO((struct table_elt *,
621 struct table_elt *));
622 static void invalidate PROTO((rtx, enum machine_mode));
623 static void remove_invalid_refs PROTO((int));
624 static void rehash_using_reg PROTO((rtx));
625 static void invalidate_memory PROTO((struct write_data *));
626 static void invalidate_for_call PROTO((void));
627 static rtx use_related_value PROTO((rtx, struct table_elt *));
628 static unsigned canon_hash PROTO((rtx, enum machine_mode));
629 static unsigned safe_hash PROTO((rtx, enum machine_mode));
630 static int exp_equiv_p PROTO((rtx, rtx, int, int));
631 static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
632 HOST_WIDE_INT *,
633 HOST_WIDE_INT *));
634 static int refers_to_p PROTO((rtx, rtx));
635 static int refers_to_mem_p PROTO((rtx, rtx, HOST_WIDE_INT,
636 HOST_WIDE_INT));
637 static int cse_rtx_addr_varies_p PROTO((rtx));
638 static rtx canon_reg PROTO((rtx, rtx));
639 static void find_best_addr PROTO((rtx, rtx *));
640 static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
641 enum machine_mode *,
642 enum machine_mode *));
643 static rtx cse_gen_binary PROTO((enum rtx_code, enum machine_mode,
644 rtx, rtx));
645 static rtx simplify_plus_minus PROTO((enum rtx_code, enum machine_mode,
646 rtx, rtx));
647 static rtx fold_rtx PROTO((rtx, rtx));
648 static rtx equiv_constant PROTO((rtx));
649 static void record_jump_equiv PROTO((rtx, int));
650 static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
651 rtx, rtx, int));
652 static void cse_insn PROTO((rtx, int));
653 static void note_mem_written PROTO((rtx, struct write_data *));
654 static void invalidate_from_clobbers PROTO((struct write_data *, rtx));
655 static rtx cse_process_notes PROTO((rtx, rtx));
656 static void cse_around_loop PROTO((rtx));
657 static void invalidate_skipped_set PROTO((rtx, rtx));
658 static void invalidate_skipped_block PROTO((rtx));
659 static void cse_check_loop_start PROTO((rtx, rtx));
660 static void cse_set_around_loop PROTO((rtx, rtx, rtx));
661 static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
662 static void count_reg_usage PROTO((rtx, int *, rtx, int));
663
664 extern int rtx_equal_function_value_matters;
665 \f
666 /* Return an estimate of the cost of computing rtx X.
667 One use is in cse, to decide which expression to keep in the hash table.
668 Another is in rtl generation, to pick the cheapest way to multiply.
669 Other uses like the latter are expected in the future. */
670
671 /* Return the right cost to give to an operation
672 to make the cost of the corresponding register-to-register instruction
673 N times that of a fast register-to-register instruction. */
674
675 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
676
677 int
678 rtx_cost (x, outer_code)
679 rtx x;
680 enum rtx_code outer_code;
681 {
682 register int i, j;
683 register enum rtx_code code;
684 register char *fmt;
685 register int total;
686
687 if (x == 0)
688 return 0;
689
690 /* Compute the default costs of certain things.
691 Note that RTX_COSTS can override the defaults. */
692
693 code = GET_CODE (x);
694 switch (code)
695 {
696 case MULT:
697 /* Count multiplication by 2**n as a shift,
698 because if we are considering it, we would output it as a shift. */
699 if (GET_CODE (XEXP (x, 1)) == CONST_INT
700 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
701 total = 2;
702 else
703 total = COSTS_N_INSNS (5);
704 break;
705 case DIV:
706 case UDIV:
707 case MOD:
708 case UMOD:
709 total = COSTS_N_INSNS (7);
710 break;
711 case USE:
712 /* Used in loop.c and combine.c as a marker. */
713 total = 0;
714 break;
715 case ASM_OPERANDS:
716 /* We don't want these to be used in substitutions because
717 we have no way of validating the resulting insn. So assign
718 anything containing an ASM_OPERANDS a very high cost. */
719 total = 1000;
720 break;
721 default:
722 total = 2;
723 }
724
725 switch (code)
726 {
727 case REG:
728 return ! CHEAP_REG (x);
729
730 case SUBREG:
731 /* If we can't tie these modes, make this expensive. The larger
732 the mode, the more expensive it is. */
733 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
734 return COSTS_N_INSNS (2
735 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
736 return 2;
737 #ifdef RTX_COSTS
738 RTX_COSTS (x, code, outer_code);
739 #endif
740 CONST_COSTS (x, code, outer_code);
741 }
742
743 /* Sum the costs of the sub-rtx's, plus cost of this operation,
744 which is already in total. */
745
746 fmt = GET_RTX_FORMAT (code);
747 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
748 if (fmt[i] == 'e')
749 total += rtx_cost (XEXP (x, i), code);
750 else if (fmt[i] == 'E')
751 for (j = 0; j < XVECLEN (x, i); j++)
752 total += rtx_cost (XVECEXP (x, i, j), code);
753
754 return total;
755 }
756 \f
757 /* Clear the hash table and initialize each register with its own quantity,
758 for a new basic block. */
759
760 static void
761 new_basic_block ()
762 {
763 register int i;
764
765 next_qty = max_reg;
766
767 bzero ((char *) reg_tick, max_reg * sizeof (int));
768
769 bcopy ((char *) all_minus_one, (char *) reg_in_table,
770 max_reg * sizeof (int));
771 bcopy ((char *) consec_ints, (char *) reg_qty, max_reg * sizeof (int));
772 CLEAR_HARD_REG_SET (hard_regs_in_table);
773
774 /* The per-quantity values used to be initialized here, but it is
775 much faster to initialize each as it is made in `make_new_qty'. */
776
777 for (i = 0; i < NBUCKETS; i++)
778 {
779 register struct table_elt *this, *next;
780 for (this = table[i]; this; this = next)
781 {
782 next = this->next_same_hash;
783 free_element (this);
784 }
785 }
786
787 bzero ((char *) table, sizeof table);
788
789 prev_insn = 0;
790
791 #ifdef HAVE_cc0
792 prev_insn_cc0 = 0;
793 #endif
794 }
795
796 /* Say that register REG contains a quantity not in any register before
797 and initialize that quantity. */
798
799 static void
800 make_new_qty (reg)
801 register int reg;
802 {
803 register int q;
804
805 if (next_qty >= max_qty)
806 abort ();
807
808 q = reg_qty[reg] = next_qty++;
809 qty_first_reg[q] = reg;
810 qty_last_reg[q] = reg;
811 qty_const[q] = qty_const_insn[q] = 0;
812 qty_comparison_code[q] = UNKNOWN;
813
814 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
815 }
816
817 /* Make reg NEW equivalent to reg OLD.
818 OLD is not changing; NEW is. */
819
820 static void
821 make_regs_eqv (new, old)
822 register int new, old;
823 {
824 register int lastr, firstr;
825 register int q = reg_qty[old];
826
827 /* Nothing should become eqv until it has a "non-invalid" qty number. */
828 if (! REGNO_QTY_VALID_P (old))
829 abort ();
830
831 reg_qty[new] = q;
832 firstr = qty_first_reg[q];
833 lastr = qty_last_reg[q];
834
835 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
836 hard regs. Among pseudos, if NEW will live longer than any other reg
837 of the same qty, and that is beyond the current basic block,
838 make it the new canonical replacement for this qty. */
839 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
840 /* Certain fixed registers might be of the class NO_REGS. This means
841 that not only can they not be allocated by the compiler, but
842 they cannot be used in substitutions or canonicalizations
843 either. */
844 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
845 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
846 || (new >= FIRST_PSEUDO_REGISTER
847 && (firstr < FIRST_PSEUDO_REGISTER
848 || ((uid_cuid[regno_last_uid[new]] > cse_basic_block_end
849 || (uid_cuid[regno_first_uid[new]]
850 < cse_basic_block_start))
851 && (uid_cuid[regno_last_uid[new]]
852 > uid_cuid[regno_last_uid[firstr]]))))))
853 {
854 reg_prev_eqv[firstr] = new;
855 reg_next_eqv[new] = firstr;
856 reg_prev_eqv[new] = -1;
857 qty_first_reg[q] = new;
858 }
859 else
860 {
861 /* If NEW is a hard reg (known to be non-fixed), insert at end.
862 Otherwise, insert before any non-fixed hard regs that are at the
863 end. Registers of class NO_REGS cannot be used as an
864 equivalent for anything. */
865 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
866 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
867 && new >= FIRST_PSEUDO_REGISTER)
868 lastr = reg_prev_eqv[lastr];
869 reg_next_eqv[new] = reg_next_eqv[lastr];
870 if (reg_next_eqv[lastr] >= 0)
871 reg_prev_eqv[reg_next_eqv[lastr]] = new;
872 else
873 qty_last_reg[q] = new;
874 reg_next_eqv[lastr] = new;
875 reg_prev_eqv[new] = lastr;
876 }
877 }
878
879 /* Remove REG from its equivalence class. */
880
881 static void
882 delete_reg_equiv (reg)
883 register int reg;
884 {
885 register int q = reg_qty[reg];
886 register int p, n;
887
888 /* If invalid, do nothing. */
889 if (q == reg)
890 return;
891
892 p = reg_prev_eqv[reg];
893 n = reg_next_eqv[reg];
894
895 if (n != -1)
896 reg_prev_eqv[n] = p;
897 else
898 qty_last_reg[q] = p;
899 if (p != -1)
900 reg_next_eqv[p] = n;
901 else
902 qty_first_reg[q] = n;
903
904 reg_qty[reg] = reg;
905 }
906
907 /* Remove any invalid expressions from the hash table
908 that refer to any of the registers contained in expression X.
909
910 Make sure that newly inserted references to those registers
911 as subexpressions will be considered valid.
912
913 mention_regs is not called when a register itself
914 is being stored in the table.
915
916 Return 1 if we have done something that may have changed the hash code
917 of X. */
918
919 static int
920 mention_regs (x)
921 rtx x;
922 {
923 register enum rtx_code code;
924 register int i, j;
925 register char *fmt;
926 register int changed = 0;
927
928 if (x == 0)
929 return 0;
930
931 code = GET_CODE (x);
932 if (code == REG)
933 {
934 register int regno = REGNO (x);
935 register int endregno
936 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
937 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
938 int i;
939
940 for (i = regno; i < endregno; i++)
941 {
942 if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
943 remove_invalid_refs (i);
944
945 reg_in_table[i] = reg_tick[i];
946 }
947
948 return 0;
949 }
950
951 /* If X is a comparison or a COMPARE and either operand is a register
952 that does not have a quantity, give it one. This is so that a later
953 call to record_jump_equiv won't cause X to be assigned a different
954 hash code and not found in the table after that call.
955
956 It is not necessary to do this here, since rehash_using_reg can
957 fix up the table later, but doing this here eliminates the need to
958 call that expensive function in the most common case where the only
959 use of the register is in the comparison. */
960
961 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
962 {
963 if (GET_CODE (XEXP (x, 0)) == REG
964 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
965 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
966 {
967 rehash_using_reg (XEXP (x, 0));
968 changed = 1;
969 }
970
971 if (GET_CODE (XEXP (x, 1)) == REG
972 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
973 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
974 {
975 rehash_using_reg (XEXP (x, 1));
976 changed = 1;
977 }
978 }
979
980 fmt = GET_RTX_FORMAT (code);
981 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
982 if (fmt[i] == 'e')
983 changed |= mention_regs (XEXP (x, i));
984 else if (fmt[i] == 'E')
985 for (j = 0; j < XVECLEN (x, i); j++)
986 changed |= mention_regs (XVECEXP (x, i, j));
987
988 return changed;
989 }
990
991 /* Update the register quantities for inserting X into the hash table
992 with a value equivalent to CLASSP.
993 (If the class does not contain a REG, it is irrelevant.)
994 If MODIFIED is nonzero, X is a destination; it is being modified.
995 Note that delete_reg_equiv should be called on a register
996 before insert_regs is done on that register with MODIFIED != 0.
997
998 Nonzero value means that elements of reg_qty have changed
999 so X's hash code may be different. */
1000
1001 static int
1002 insert_regs (x, classp, modified)
1003 rtx x;
1004 struct table_elt *classp;
1005 int modified;
1006 {
1007 if (GET_CODE (x) == REG)
1008 {
1009 register int regno = REGNO (x);
1010
1011 /* If REGNO is in the equivalence table already but is of the
1012 wrong mode for that equivalence, don't do anything here. */
1013
1014 if (REGNO_QTY_VALID_P (regno)
1015 && qty_mode[reg_qty[regno]] != GET_MODE (x))
1016 return 0;
1017
1018 if (modified || ! REGNO_QTY_VALID_P (regno))
1019 {
1020 if (classp)
1021 for (classp = classp->first_same_value;
1022 classp != 0;
1023 classp = classp->next_same_value)
1024 if (GET_CODE (classp->exp) == REG
1025 && GET_MODE (classp->exp) == GET_MODE (x))
1026 {
1027 make_regs_eqv (regno, REGNO (classp->exp));
1028 return 1;
1029 }
1030
1031 make_new_qty (regno);
1032 qty_mode[reg_qty[regno]] = GET_MODE (x);
1033 return 1;
1034 }
1035
1036 return 0;
1037 }
1038
1039 /* If X is a SUBREG, we will likely be inserting the inner register in the
1040 table. If that register doesn't have an assigned quantity number at
1041 this point but does later, the insertion that we will be doing now will
1042 not be accessible because its hash code will have changed. So assign
1043 a quantity number now. */
1044
1045 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1046 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1047 {
1048 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1049 mention_regs (SUBREG_REG (x));
1050 return 1;
1051 }
1052 else
1053 return mention_regs (x);
1054 }
1055 \f
1056 /* Look in or update the hash table. */
1057
1058 /* Put the element ELT on the list of free elements. */
1059
1060 static void
1061 free_element (elt)
1062 struct table_elt *elt;
1063 {
1064 elt->next_same_hash = free_element_chain;
1065 free_element_chain = elt;
1066 }
1067
1068 /* Return an element that is free for use. */
1069
1070 static struct table_elt *
1071 get_element ()
1072 {
1073 struct table_elt *elt = free_element_chain;
1074 if (elt)
1075 {
1076 free_element_chain = elt->next_same_hash;
1077 return elt;
1078 }
1079 n_elements_made++;
1080 return (struct table_elt *) oballoc (sizeof (struct table_elt));
1081 }
1082
1083 /* Remove table element ELT from use in the table.
1084 HASH is its hash code, made using the HASH macro.
1085 It's an argument because often that is known in advance
1086 and we save much time not recomputing it. */
1087
1088 static void
1089 remove_from_table (elt, hash)
1090 register struct table_elt *elt;
1091 unsigned hash;
1092 {
1093 if (elt == 0)
1094 return;
1095
1096 /* Mark this element as removed. See cse_insn. */
1097 elt->first_same_value = 0;
1098
1099 /* Remove the table element from its equivalence class. */
1100
1101 {
1102 register struct table_elt *prev = elt->prev_same_value;
1103 register struct table_elt *next = elt->next_same_value;
1104
1105 if (next) next->prev_same_value = prev;
1106
1107 if (prev)
1108 prev->next_same_value = next;
1109 else
1110 {
1111 register struct table_elt *newfirst = next;
1112 while (next)
1113 {
1114 next->first_same_value = newfirst;
1115 next = next->next_same_value;
1116 }
1117 }
1118 }
1119
1120 /* Remove the table element from its hash bucket. */
1121
1122 {
1123 register struct table_elt *prev = elt->prev_same_hash;
1124 register struct table_elt *next = elt->next_same_hash;
1125
1126 if (next) next->prev_same_hash = prev;
1127
1128 if (prev)
1129 prev->next_same_hash = next;
1130 else if (table[hash] == elt)
1131 table[hash] = next;
1132 else
1133 {
1134 /* This entry is not in the proper hash bucket. This can happen
1135 when two classes were merged by `merge_equiv_classes'. Search
1136 for the hash bucket that it heads. This happens only very
1137 rarely, so the cost is acceptable. */
1138 for (hash = 0; hash < NBUCKETS; hash++)
1139 if (table[hash] == elt)
1140 table[hash] = next;
1141 }
1142 }
1143
1144 /* Remove the table element from its related-value circular chain. */
1145
1146 if (elt->related_value != 0 && elt->related_value != elt)
1147 {
1148 register struct table_elt *p = elt->related_value;
1149 while (p->related_value != elt)
1150 p = p->related_value;
1151 p->related_value = elt->related_value;
1152 if (p->related_value == p)
1153 p->related_value = 0;
1154 }
1155
1156 free_element (elt);
1157 }
1158
1159 /* Look up X in the hash table and return its table element,
1160 or 0 if X is not in the table.
1161
1162 MODE is the machine-mode of X, or if X is an integer constant
1163 with VOIDmode then MODE is the mode with which X will be used.
1164
1165 Here we are satisfied to find an expression whose tree structure
1166 looks like X. */
1167
1168 static struct table_elt *
1169 lookup (x, hash, mode)
1170 rtx x;
1171 unsigned hash;
1172 enum machine_mode mode;
1173 {
1174 register struct table_elt *p;
1175
1176 for (p = table[hash]; p; p = p->next_same_hash)
1177 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1178 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1179 return p;
1180
1181 return 0;
1182 }
1183
1184 /* Like `lookup' but don't care whether the table element uses invalid regs.
1185 Also ignore discrepancies in the machine mode of a register. */
1186
1187 static struct table_elt *
1188 lookup_for_remove (x, hash, mode)
1189 rtx x;
1190 unsigned hash;
1191 enum machine_mode mode;
1192 {
1193 register struct table_elt *p;
1194
1195 if (GET_CODE (x) == REG)
1196 {
1197 int regno = REGNO (x);
1198 /* Don't check the machine mode when comparing registers;
1199 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1200 for (p = table[hash]; p; p = p->next_same_hash)
1201 if (GET_CODE (p->exp) == REG
1202 && REGNO (p->exp) == regno)
1203 return p;
1204 }
1205 else
1206 {
1207 for (p = table[hash]; p; p = p->next_same_hash)
1208 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1209 return p;
1210 }
1211
1212 return 0;
1213 }
1214
1215 /* Look for an expression equivalent to X and with code CODE.
1216 If one is found, return that expression. */
1217
1218 static rtx
1219 lookup_as_function (x, code)
1220 rtx x;
1221 enum rtx_code code;
1222 {
1223 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1224 GET_MODE (x));
1225 if (p == 0)
1226 return 0;
1227
1228 for (p = p->first_same_value; p; p = p->next_same_value)
1229 {
1230 if (GET_CODE (p->exp) == code
1231 /* Make sure this is a valid entry in the table. */
1232 && exp_equiv_p (p->exp, p->exp, 1, 0))
1233 return p->exp;
1234 }
1235
1236 return 0;
1237 }
1238
1239 /* Insert X in the hash table, assuming HASH is its hash code
1240 and CLASSP is an element of the class it should go in
1241 (or 0 if a new class should be made).
1242 It is inserted at the proper position to keep the class in
1243 the order cheapest first.
1244
1245 MODE is the machine-mode of X, or if X is an integer constant
1246 with VOIDmode then MODE is the mode with which X will be used.
1247
1248 For elements of equal cheapness, the most recent one
1249 goes in front, except that the first element in the list
1250 remains first unless a cheaper element is added. The order of
1251 pseudo-registers does not matter, as canon_reg will be called to
1252 find the cheapest when a register is retrieved from the table.
1253
1254 The in_memory field in the hash table element is set to 0.
1255 The caller must set it nonzero if appropriate.
1256
1257 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1258 and if insert_regs returns a nonzero value
1259 you must then recompute its hash code before calling here.
1260
1261 If necessary, update table showing constant values of quantities. */
1262
1263 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1264
1265 static struct table_elt *
1266 insert (x, classp, hash, mode)
1267 register rtx x;
1268 register struct table_elt *classp;
1269 unsigned hash;
1270 enum machine_mode mode;
1271 {
1272 register struct table_elt *elt;
1273
1274 /* If X is a register and we haven't made a quantity for it,
1275 something is wrong. */
1276 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1277 abort ();
1278
1279 /* If X is a hard register, show it is being put in the table. */
1280 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1281 {
1282 int regno = REGNO (x);
1283 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1284 int i;
1285
1286 for (i = regno; i < endregno; i++)
1287 SET_HARD_REG_BIT (hard_regs_in_table, i);
1288 }
1289
1290
1291 /* Put an element for X into the right hash bucket. */
1292
1293 elt = get_element ();
1294 elt->exp = x;
1295 elt->cost = COST (x);
1296 elt->next_same_value = 0;
1297 elt->prev_same_value = 0;
1298 elt->next_same_hash = table[hash];
1299 elt->prev_same_hash = 0;
1300 elt->related_value = 0;
1301 elt->in_memory = 0;
1302 elt->mode = mode;
1303 elt->is_const = (CONSTANT_P (x)
1304 /* GNU C++ takes advantage of this for `this'
1305 (and other const values). */
1306 || (RTX_UNCHANGING_P (x)
1307 && GET_CODE (x) == REG
1308 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1309 || FIXED_BASE_PLUS_P (x));
1310
1311 if (table[hash])
1312 table[hash]->prev_same_hash = elt;
1313 table[hash] = elt;
1314
1315 /* Put it into the proper value-class. */
1316 if (classp)
1317 {
1318 classp = classp->first_same_value;
1319 if (CHEAPER (elt, classp))
1320 /* Insert at the head of the class */
1321 {
1322 register struct table_elt *p;
1323 elt->next_same_value = classp;
1324 classp->prev_same_value = elt;
1325 elt->first_same_value = elt;
1326
1327 for (p = classp; p; p = p->next_same_value)
1328 p->first_same_value = elt;
1329 }
1330 else
1331 {
1332 /* Insert not at head of the class. */
1333 /* Put it after the last element cheaper than X. */
1334 register struct table_elt *p, *next;
1335 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1336 p = next);
1337 /* Put it after P and before NEXT. */
1338 elt->next_same_value = next;
1339 if (next)
1340 next->prev_same_value = elt;
1341 elt->prev_same_value = p;
1342 p->next_same_value = elt;
1343 elt->first_same_value = classp;
1344 }
1345 }
1346 else
1347 elt->first_same_value = elt;
1348
1349 /* If this is a constant being set equivalent to a register or a register
1350 being set equivalent to a constant, note the constant equivalence.
1351
1352 If this is a constant, it cannot be equivalent to a different constant,
1353 and a constant is the only thing that can be cheaper than a register. So
1354 we know the register is the head of the class (before the constant was
1355 inserted).
1356
1357 If this is a register that is not already known equivalent to a
1358 constant, we must check the entire class.
1359
1360 If this is a register that is already known equivalent to an insn,
1361 update `qty_const_insn' to show that `this_insn' is the latest
1362 insn making that quantity equivalent to the constant. */
1363
1364 if (elt->is_const && classp && GET_CODE (classp->exp) == REG)
1365 {
1366 qty_const[reg_qty[REGNO (classp->exp)]]
1367 = gen_lowpart_if_possible (qty_mode[reg_qty[REGNO (classp->exp)]], x);
1368 qty_const_insn[reg_qty[REGNO (classp->exp)]] = this_insn;
1369 }
1370
1371 else if (GET_CODE (x) == REG && classp && ! qty_const[reg_qty[REGNO (x)]])
1372 {
1373 register struct table_elt *p;
1374
1375 for (p = classp; p != 0; p = p->next_same_value)
1376 {
1377 if (p->is_const)
1378 {
1379 qty_const[reg_qty[REGNO (x)]]
1380 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1381 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1382 break;
1383 }
1384 }
1385 }
1386
1387 else if (GET_CODE (x) == REG && qty_const[reg_qty[REGNO (x)]]
1388 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]])
1389 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1390
1391 /* If this is a constant with symbolic value,
1392 and it has a term with an explicit integer value,
1393 link it up with related expressions. */
1394 if (GET_CODE (x) == CONST)
1395 {
1396 rtx subexp = get_related_value (x);
1397 unsigned subhash;
1398 struct table_elt *subelt, *subelt_prev;
1399
1400 if (subexp != 0)
1401 {
1402 /* Get the integer-free subexpression in the hash table. */
1403 subhash = safe_hash (subexp, mode) % NBUCKETS;
1404 subelt = lookup (subexp, subhash, mode);
1405 if (subelt == 0)
1406 subelt = insert (subexp, NULL_PTR, subhash, mode);
1407 /* Initialize SUBELT's circular chain if it has none. */
1408 if (subelt->related_value == 0)
1409 subelt->related_value = subelt;
1410 /* Find the element in the circular chain that precedes SUBELT. */
1411 subelt_prev = subelt;
1412 while (subelt_prev->related_value != subelt)
1413 subelt_prev = subelt_prev->related_value;
1414 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1415 This way the element that follows SUBELT is the oldest one. */
1416 elt->related_value = subelt_prev->related_value;
1417 subelt_prev->related_value = elt;
1418 }
1419 }
1420
1421 return elt;
1422 }
1423 \f
1424 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1425 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1426 the two classes equivalent.
1427
1428 CLASS1 will be the surviving class; CLASS2 should not be used after this
1429 call.
1430
1431 Any invalid entries in CLASS2 will not be copied. */
1432
1433 static void
1434 merge_equiv_classes (class1, class2)
1435 struct table_elt *class1, *class2;
1436 {
1437 struct table_elt *elt, *next, *new;
1438
1439 /* Ensure we start with the head of the classes. */
1440 class1 = class1->first_same_value;
1441 class2 = class2->first_same_value;
1442
1443 /* If they were already equal, forget it. */
1444 if (class1 == class2)
1445 return;
1446
1447 for (elt = class2; elt; elt = next)
1448 {
1449 unsigned hash;
1450 rtx exp = elt->exp;
1451 enum machine_mode mode = elt->mode;
1452
1453 next = elt->next_same_value;
1454
1455 /* Remove old entry, make a new one in CLASS1's class.
1456 Don't do this for invalid entries as we cannot find their
1457 hash code (it also isn't necessary). */
1458 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1459 {
1460 hash_arg_in_memory = 0;
1461 hash_arg_in_struct = 0;
1462 hash = HASH (exp, mode);
1463
1464 if (GET_CODE (exp) == REG)
1465 delete_reg_equiv (REGNO (exp));
1466
1467 remove_from_table (elt, hash);
1468
1469 if (insert_regs (exp, class1, 0))
1470 {
1471 rehash_using_reg (exp);
1472 hash = HASH (exp, mode);
1473 }
1474 new = insert (exp, class1, hash, mode);
1475 new->in_memory = hash_arg_in_memory;
1476 new->in_struct = hash_arg_in_struct;
1477 }
1478 }
1479 }
1480 \f
1481 /* Remove from the hash table, or mark as invalid,
1482 all expressions whose values could be altered by storing in X.
1483 X is a register, a subreg, or a memory reference with nonvarying address
1484 (because, when a memory reference with a varying address is stored in,
1485 all memory references are removed by invalidate_memory
1486 so specific invalidation is superfluous).
1487 FULL_MODE, if not VOIDmode, indicates that this much should be invalidated
1488 instead of just the amount indicated by the mode of X. This is only used
1489 for bitfield stores into memory.
1490
1491 A nonvarying address may be just a register or just
1492 a symbol reference, or it may be either of those plus
1493 a numeric offset. */
1494
1495 static void
1496 invalidate (x, full_mode)
1497 rtx x;
1498 enum machine_mode full_mode;
1499 {
1500 register int i;
1501 register struct table_elt *p;
1502 rtx base;
1503 HOST_WIDE_INT start, end;
1504
1505 /* If X is a register, dependencies on its contents
1506 are recorded through the qty number mechanism.
1507 Just change the qty number of the register,
1508 mark it as invalid for expressions that refer to it,
1509 and remove it itself. */
1510
1511 if (GET_CODE (x) == REG)
1512 {
1513 register int regno = REGNO (x);
1514 register unsigned hash = HASH (x, GET_MODE (x));
1515
1516 /* Remove REGNO from any quantity list it might be on and indicate
1517 that it's value might have changed. If it is a pseudo, remove its
1518 entry from the hash table.
1519
1520 For a hard register, we do the first two actions above for any
1521 additional hard registers corresponding to X. Then, if any of these
1522 registers are in the table, we must remove any REG entries that
1523 overlap these registers. */
1524
1525 delete_reg_equiv (regno);
1526 reg_tick[regno]++;
1527
1528 if (regno >= FIRST_PSEUDO_REGISTER)
1529 remove_from_table (lookup_for_remove (x, hash, GET_MODE (x)), hash);
1530 else
1531 {
1532 HOST_WIDE_INT in_table
1533 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1534 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1535 int tregno, tendregno;
1536 register struct table_elt *p, *next;
1537
1538 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1539
1540 for (i = regno + 1; i < endregno; i++)
1541 {
1542 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1543 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1544 delete_reg_equiv (i);
1545 reg_tick[i]++;
1546 }
1547
1548 if (in_table)
1549 for (hash = 0; hash < NBUCKETS; hash++)
1550 for (p = table[hash]; p; p = next)
1551 {
1552 next = p->next_same_hash;
1553
1554 if (GET_CODE (p->exp) != REG
1555 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1556 continue;
1557
1558 tregno = REGNO (p->exp);
1559 tendregno
1560 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1561 if (tendregno > regno && tregno < endregno)
1562 remove_from_table (p, hash);
1563 }
1564 }
1565
1566 return;
1567 }
1568
1569 if (GET_CODE (x) == SUBREG)
1570 {
1571 if (GET_CODE (SUBREG_REG (x)) != REG)
1572 abort ();
1573 invalidate (SUBREG_REG (x), VOIDmode);
1574 return;
1575 }
1576
1577 /* X is not a register; it must be a memory reference with
1578 a nonvarying address. Remove all hash table elements
1579 that refer to overlapping pieces of memory. */
1580
1581 if (GET_CODE (x) != MEM)
1582 abort ();
1583
1584 if (full_mode == VOIDmode)
1585 full_mode = GET_MODE (x);
1586
1587 set_nonvarying_address_components (XEXP (x, 0), GET_MODE_SIZE (full_mode),
1588 &base, &start, &end);
1589
1590 for (i = 0; i < NBUCKETS; i++)
1591 {
1592 register struct table_elt *next;
1593 for (p = table[i]; p; p = next)
1594 {
1595 next = p->next_same_hash;
1596 if (refers_to_mem_p (p->exp, base, start, end))
1597 remove_from_table (p, i);
1598 }
1599 }
1600 }
1601
1602 /* Remove all expressions that refer to register REGNO,
1603 since they are already invalid, and we are about to
1604 mark that register valid again and don't want the old
1605 expressions to reappear as valid. */
1606
1607 static void
1608 remove_invalid_refs (regno)
1609 int regno;
1610 {
1611 register int i;
1612 register struct table_elt *p, *next;
1613
1614 for (i = 0; i < NBUCKETS; i++)
1615 for (p = table[i]; p; p = next)
1616 {
1617 next = p->next_same_hash;
1618 if (GET_CODE (p->exp) != REG
1619 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1620 remove_from_table (p, i);
1621 }
1622 }
1623 \f
1624 /* Recompute the hash codes of any valid entries in the hash table that
1625 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1626
1627 This is called when we make a jump equivalence. */
1628
1629 static void
1630 rehash_using_reg (x)
1631 rtx x;
1632 {
1633 int i;
1634 struct table_elt *p, *next;
1635 unsigned hash;
1636
1637 if (GET_CODE (x) == SUBREG)
1638 x = SUBREG_REG (x);
1639
1640 /* If X is not a register or if the register is known not to be in any
1641 valid entries in the table, we have no work to do. */
1642
1643 if (GET_CODE (x) != REG
1644 || reg_in_table[REGNO (x)] < 0
1645 || reg_in_table[REGNO (x)] != reg_tick[REGNO (x)])
1646 return;
1647
1648 /* Scan all hash chains looking for valid entries that mention X.
1649 If we find one and it is in the wrong hash chain, move it. We can skip
1650 objects that are registers, since they are handled specially. */
1651
1652 for (i = 0; i < NBUCKETS; i++)
1653 for (p = table[i]; p; p = next)
1654 {
1655 next = p->next_same_hash;
1656 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1657 && exp_equiv_p (p->exp, p->exp, 1, 0)
1658 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1659 {
1660 if (p->next_same_hash)
1661 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1662
1663 if (p->prev_same_hash)
1664 p->prev_same_hash->next_same_hash = p->next_same_hash;
1665 else
1666 table[i] = p->next_same_hash;
1667
1668 p->next_same_hash = table[hash];
1669 p->prev_same_hash = 0;
1670 if (table[hash])
1671 table[hash]->prev_same_hash = p;
1672 table[hash] = p;
1673 }
1674 }
1675 }
1676 \f
1677 /* Remove from the hash table all expressions that reference memory,
1678 or some of them as specified by *WRITES. */
1679
1680 static void
1681 invalidate_memory (writes)
1682 struct write_data *writes;
1683 {
1684 register int i;
1685 register struct table_elt *p, *next;
1686 int all = writes->all;
1687 int nonscalar = writes->nonscalar;
1688
1689 for (i = 0; i < NBUCKETS; i++)
1690 for (p = table[i]; p; p = next)
1691 {
1692 next = p->next_same_hash;
1693 if (p->in_memory
1694 && (all
1695 || (nonscalar && p->in_struct)
1696 || cse_rtx_addr_varies_p (p->exp)))
1697 remove_from_table (p, i);
1698 }
1699 }
1700 \f
1701 /* Remove from the hash table any expression that is a call-clobbered
1702 register. Also update their TICK values. */
1703
1704 static void
1705 invalidate_for_call ()
1706 {
1707 int regno, endregno;
1708 int i;
1709 unsigned hash;
1710 struct table_elt *p, *next;
1711 int in_table = 0;
1712
1713 /* Go through all the hard registers. For each that is clobbered in
1714 a CALL_INSN, remove the register from quantity chains and update
1715 reg_tick if defined. Also see if any of these registers is currently
1716 in the table. */
1717
1718 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1719 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1720 {
1721 delete_reg_equiv (regno);
1722 if (reg_tick[regno] >= 0)
1723 reg_tick[regno]++;
1724
1725 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1726 }
1727
1728 /* In the case where we have no call-clobbered hard registers in the
1729 table, we are done. Otherwise, scan the table and remove any
1730 entry that overlaps a call-clobbered register. */
1731
1732 if (in_table)
1733 for (hash = 0; hash < NBUCKETS; hash++)
1734 for (p = table[hash]; p; p = next)
1735 {
1736 next = p->next_same_hash;
1737
1738 if (GET_CODE (p->exp) != REG
1739 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1740 continue;
1741
1742 regno = REGNO (p->exp);
1743 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1744
1745 for (i = regno; i < endregno; i++)
1746 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1747 {
1748 remove_from_table (p, hash);
1749 break;
1750 }
1751 }
1752 }
1753 \f
1754 /* Given an expression X of type CONST,
1755 and ELT which is its table entry (or 0 if it
1756 is not in the hash table),
1757 return an alternate expression for X as a register plus integer.
1758 If none can be found, return 0. */
1759
1760 static rtx
1761 use_related_value (x, elt)
1762 rtx x;
1763 struct table_elt *elt;
1764 {
1765 register struct table_elt *relt = 0;
1766 register struct table_elt *p, *q;
1767 HOST_WIDE_INT offset;
1768
1769 /* First, is there anything related known?
1770 If we have a table element, we can tell from that.
1771 Otherwise, must look it up. */
1772
1773 if (elt != 0 && elt->related_value != 0)
1774 relt = elt;
1775 else if (elt == 0 && GET_CODE (x) == CONST)
1776 {
1777 rtx subexp = get_related_value (x);
1778 if (subexp != 0)
1779 relt = lookup (subexp,
1780 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
1781 GET_MODE (subexp));
1782 }
1783
1784 if (relt == 0)
1785 return 0;
1786
1787 /* Search all related table entries for one that has an
1788 equivalent register. */
1789
1790 p = relt;
1791 while (1)
1792 {
1793 /* This loop is strange in that it is executed in two different cases.
1794 The first is when X is already in the table. Then it is searching
1795 the RELATED_VALUE list of X's class (RELT). The second case is when
1796 X is not in the table. Then RELT points to a class for the related
1797 value.
1798
1799 Ensure that, whatever case we are in, that we ignore classes that have
1800 the same value as X. */
1801
1802 if (rtx_equal_p (x, p->exp))
1803 q = 0;
1804 else
1805 for (q = p->first_same_value; q; q = q->next_same_value)
1806 if (GET_CODE (q->exp) == REG)
1807 break;
1808
1809 if (q)
1810 break;
1811
1812 p = p->related_value;
1813
1814 /* We went all the way around, so there is nothing to be found.
1815 Alternatively, perhaps RELT was in the table for some other reason
1816 and it has no related values recorded. */
1817 if (p == relt || p == 0)
1818 break;
1819 }
1820
1821 if (q == 0)
1822 return 0;
1823
1824 offset = (get_integer_term (x) - get_integer_term (p->exp));
1825 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
1826 return plus_constant (q->exp, offset);
1827 }
1828 \f
1829 /* Hash an rtx. We are careful to make sure the value is never negative.
1830 Equivalent registers hash identically.
1831 MODE is used in hashing for CONST_INTs only;
1832 otherwise the mode of X is used.
1833
1834 Store 1 in do_not_record if any subexpression is volatile.
1835
1836 Store 1 in hash_arg_in_memory if X contains a MEM rtx
1837 which does not have the RTX_UNCHANGING_P bit set.
1838 In this case, also store 1 in hash_arg_in_struct
1839 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
1840
1841 Note that cse_insn knows that the hash code of a MEM expression
1842 is just (int) MEM plus the hash code of the address. */
1843
1844 static unsigned
1845 canon_hash (x, mode)
1846 rtx x;
1847 enum machine_mode mode;
1848 {
1849 register int i, j;
1850 register unsigned hash = 0;
1851 register enum rtx_code code;
1852 register char *fmt;
1853
1854 /* repeat is used to turn tail-recursion into iteration. */
1855 repeat:
1856 if (x == 0)
1857 return hash;
1858
1859 code = GET_CODE (x);
1860 switch (code)
1861 {
1862 case REG:
1863 {
1864 register int regno = REGNO (x);
1865
1866 /* On some machines, we can't record any non-fixed hard register,
1867 because extending its life will cause reload problems. We
1868 consider ap, fp, and sp to be fixed for this purpose.
1869 On all machines, we can't record any global registers. */
1870
1871 if (regno < FIRST_PSEUDO_REGISTER
1872 && (global_regs[regno]
1873 #ifdef SMALL_REGISTER_CLASSES
1874 || (! fixed_regs[regno]
1875 && regno != FRAME_POINTER_REGNUM
1876 && regno != HARD_FRAME_POINTER_REGNUM
1877 && regno != ARG_POINTER_REGNUM
1878 && regno != STACK_POINTER_REGNUM)
1879 #endif
1880 ))
1881 {
1882 do_not_record = 1;
1883 return 0;
1884 }
1885 hash += ((unsigned) REG << 7) + (unsigned) reg_qty[regno];
1886 return hash;
1887 }
1888
1889 case CONST_INT:
1890 {
1891 unsigned HOST_WIDE_INT tem = INTVAL (x);
1892 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
1893 return hash;
1894 }
1895
1896 case CONST_DOUBLE:
1897 /* This is like the general case, except that it only counts
1898 the integers representing the constant. */
1899 hash += (unsigned) code + (unsigned) GET_MODE (x);
1900 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1901 {
1902 unsigned tem = XINT (x, i);
1903 hash += tem;
1904 }
1905 return hash;
1906
1907 /* Assume there is only one rtx object for any given label. */
1908 case LABEL_REF:
1909 hash
1910 += ((unsigned) LABEL_REF << 7) + (unsigned HOST_WIDE_INT) XEXP (x, 0);
1911 return hash;
1912
1913 case SYMBOL_REF:
1914 hash
1915 += ((unsigned) SYMBOL_REF << 7) + (unsigned HOST_WIDE_INT) XSTR (x, 0);
1916 return hash;
1917
1918 case MEM:
1919 if (MEM_VOLATILE_P (x))
1920 {
1921 do_not_record = 1;
1922 return 0;
1923 }
1924 if (! RTX_UNCHANGING_P (x))
1925 {
1926 hash_arg_in_memory = 1;
1927 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
1928 }
1929 /* Now that we have already found this special case,
1930 might as well speed it up as much as possible. */
1931 hash += (unsigned) MEM;
1932 x = XEXP (x, 0);
1933 goto repeat;
1934
1935 case PRE_DEC:
1936 case PRE_INC:
1937 case POST_DEC:
1938 case POST_INC:
1939 case PC:
1940 case CC0:
1941 case CALL:
1942 case UNSPEC_VOLATILE:
1943 do_not_record = 1;
1944 return 0;
1945
1946 case ASM_OPERANDS:
1947 if (MEM_VOLATILE_P (x))
1948 {
1949 do_not_record = 1;
1950 return 0;
1951 }
1952 }
1953
1954 i = GET_RTX_LENGTH (code) - 1;
1955 hash += (unsigned) code + (unsigned) GET_MODE (x);
1956 fmt = GET_RTX_FORMAT (code);
1957 for (; i >= 0; i--)
1958 {
1959 if (fmt[i] == 'e')
1960 {
1961 rtx tem = XEXP (x, i);
1962
1963 /* If we are about to do the last recursive call
1964 needed at this level, change it into iteration.
1965 This function is called enough to be worth it. */
1966 if (i == 0)
1967 {
1968 x = tem;
1969 goto repeat;
1970 }
1971 hash += canon_hash (tem, 0);
1972 }
1973 else if (fmt[i] == 'E')
1974 for (j = 0; j < XVECLEN (x, i); j++)
1975 hash += canon_hash (XVECEXP (x, i, j), 0);
1976 else if (fmt[i] == 's')
1977 {
1978 register unsigned char *p = (unsigned char *) XSTR (x, i);
1979 if (p)
1980 while (*p)
1981 hash += *p++;
1982 }
1983 else if (fmt[i] == 'i')
1984 {
1985 register unsigned tem = XINT (x, i);
1986 hash += tem;
1987 }
1988 else
1989 abort ();
1990 }
1991 return hash;
1992 }
1993
1994 /* Like canon_hash but with no side effects. */
1995
1996 static unsigned
1997 safe_hash (x, mode)
1998 rtx x;
1999 enum machine_mode mode;
2000 {
2001 int save_do_not_record = do_not_record;
2002 int save_hash_arg_in_memory = hash_arg_in_memory;
2003 int save_hash_arg_in_struct = hash_arg_in_struct;
2004 unsigned hash = canon_hash (x, mode);
2005 hash_arg_in_memory = save_hash_arg_in_memory;
2006 hash_arg_in_struct = save_hash_arg_in_struct;
2007 do_not_record = save_do_not_record;
2008 return hash;
2009 }
2010 \f
2011 /* Return 1 iff X and Y would canonicalize into the same thing,
2012 without actually constructing the canonicalization of either one.
2013 If VALIDATE is nonzero,
2014 we assume X is an expression being processed from the rtl
2015 and Y was found in the hash table. We check register refs
2016 in Y for being marked as valid.
2017
2018 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2019 that is known to be in the register. Ordinarily, we don't allow them
2020 to match, because letting them match would cause unpredictable results
2021 in all the places that search a hash table chain for an equivalent
2022 for a given value. A possible equivalent that has different structure
2023 has its hash code computed from different data. Whether the hash code
2024 is the same as that of the the given value is pure luck. */
2025
2026 static int
2027 exp_equiv_p (x, y, validate, equal_values)
2028 rtx x, y;
2029 int validate;
2030 int equal_values;
2031 {
2032 register int i, j;
2033 register enum rtx_code code;
2034 register char *fmt;
2035
2036 /* Note: it is incorrect to assume an expression is equivalent to itself
2037 if VALIDATE is nonzero. */
2038 if (x == y && !validate)
2039 return 1;
2040 if (x == 0 || y == 0)
2041 return x == y;
2042
2043 code = GET_CODE (x);
2044 if (code != GET_CODE (y))
2045 {
2046 if (!equal_values)
2047 return 0;
2048
2049 /* If X is a constant and Y is a register or vice versa, they may be
2050 equivalent. We only have to validate if Y is a register. */
2051 if (CONSTANT_P (x) && GET_CODE (y) == REG
2052 && REGNO_QTY_VALID_P (REGNO (y))
2053 && GET_MODE (y) == qty_mode[reg_qty[REGNO (y)]]
2054 && rtx_equal_p (x, qty_const[reg_qty[REGNO (y)]])
2055 && (! validate || reg_in_table[REGNO (y)] == reg_tick[REGNO (y)]))
2056 return 1;
2057
2058 if (CONSTANT_P (y) && code == REG
2059 && REGNO_QTY_VALID_P (REGNO (x))
2060 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2061 && rtx_equal_p (y, qty_const[reg_qty[REGNO (x)]]))
2062 return 1;
2063
2064 return 0;
2065 }
2066
2067 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2068 if (GET_MODE (x) != GET_MODE (y))
2069 return 0;
2070
2071 switch (code)
2072 {
2073 case PC:
2074 case CC0:
2075 return x == y;
2076
2077 case CONST_INT:
2078 return INTVAL (x) == INTVAL (y);
2079
2080 case LABEL_REF:
2081 return XEXP (x, 0) == XEXP (y, 0);
2082
2083 case SYMBOL_REF:
2084 return XSTR (x, 0) == XSTR (y, 0);
2085
2086 case REG:
2087 {
2088 int regno = REGNO (y);
2089 int endregno
2090 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2091 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2092 int i;
2093
2094 /* If the quantities are not the same, the expressions are not
2095 equivalent. If there are and we are not to validate, they
2096 are equivalent. Otherwise, ensure all regs are up-to-date. */
2097
2098 if (reg_qty[REGNO (x)] != reg_qty[regno])
2099 return 0;
2100
2101 if (! validate)
2102 return 1;
2103
2104 for (i = regno; i < endregno; i++)
2105 if (reg_in_table[i] != reg_tick[i])
2106 return 0;
2107
2108 return 1;
2109 }
2110
2111 /* For commutative operations, check both orders. */
2112 case PLUS:
2113 case MULT:
2114 case AND:
2115 case IOR:
2116 case XOR:
2117 case NE:
2118 case EQ:
2119 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2120 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2121 validate, equal_values))
2122 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2123 validate, equal_values)
2124 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2125 validate, equal_values)));
2126 }
2127
2128 /* Compare the elements. If any pair of corresponding elements
2129 fail to match, return 0 for the whole things. */
2130
2131 fmt = GET_RTX_FORMAT (code);
2132 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2133 {
2134 switch (fmt[i])
2135 {
2136 case 'e':
2137 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2138 return 0;
2139 break;
2140
2141 case 'E':
2142 if (XVECLEN (x, i) != XVECLEN (y, i))
2143 return 0;
2144 for (j = 0; j < XVECLEN (x, i); j++)
2145 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2146 validate, equal_values))
2147 return 0;
2148 break;
2149
2150 case 's':
2151 if (strcmp (XSTR (x, i), XSTR (y, i)))
2152 return 0;
2153 break;
2154
2155 case 'i':
2156 if (XINT (x, i) != XINT (y, i))
2157 return 0;
2158 break;
2159
2160 case 'w':
2161 if (XWINT (x, i) != XWINT (y, i))
2162 return 0;
2163 break;
2164
2165 case '0':
2166 break;
2167
2168 default:
2169 abort ();
2170 }
2171 }
2172
2173 return 1;
2174 }
2175 \f
2176 /* Return 1 iff any subexpression of X matches Y.
2177 Here we do not require that X or Y be valid (for registers referred to)
2178 for being in the hash table. */
2179
2180 static int
2181 refers_to_p (x, y)
2182 rtx x, y;
2183 {
2184 register int i;
2185 register enum rtx_code code;
2186 register char *fmt;
2187
2188 repeat:
2189 if (x == y)
2190 return 1;
2191 if (x == 0 || y == 0)
2192 return 0;
2193
2194 code = GET_CODE (x);
2195 /* If X as a whole has the same code as Y, they may match.
2196 If so, return 1. */
2197 if (code == GET_CODE (y))
2198 {
2199 if (exp_equiv_p (x, y, 0, 1))
2200 return 1;
2201 }
2202
2203 /* X does not match, so try its subexpressions. */
2204
2205 fmt = GET_RTX_FORMAT (code);
2206 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2207 if (fmt[i] == 'e')
2208 {
2209 if (i == 0)
2210 {
2211 x = XEXP (x, 0);
2212 goto repeat;
2213 }
2214 else
2215 if (refers_to_p (XEXP (x, i), y))
2216 return 1;
2217 }
2218 else if (fmt[i] == 'E')
2219 {
2220 int j;
2221 for (j = 0; j < XVECLEN (x, i); j++)
2222 if (refers_to_p (XVECEXP (x, i, j), y))
2223 return 1;
2224 }
2225
2226 return 0;
2227 }
2228 \f
2229 /* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2230 set PBASE, PSTART, and PEND which correspond to the base of the address,
2231 the starting offset, and ending offset respectively.
2232
2233 ADDR is known to be a nonvarying address. */
2234
2235 /* ??? Despite what the comments say, this function is in fact frequently
2236 passed varying addresses. This does not appear to cause any problems. */
2237
2238 static void
2239 set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2240 rtx addr;
2241 int size;
2242 rtx *pbase;
2243 HOST_WIDE_INT *pstart, *pend;
2244 {
2245 rtx base;
2246 HOST_WIDE_INT start, end;
2247
2248 base = addr;
2249 start = 0;
2250 end = 0;
2251
2252 /* Registers with nonvarying addresses usually have constant equivalents;
2253 but the frame pointer register is also possible. */
2254 if (GET_CODE (base) == REG
2255 && qty_const != 0
2256 && REGNO_QTY_VALID_P (REGNO (base))
2257 && qty_mode[reg_qty[REGNO (base)]] == GET_MODE (base)
2258 && qty_const[reg_qty[REGNO (base)]] != 0)
2259 base = qty_const[reg_qty[REGNO (base)]];
2260 else if (GET_CODE (base) == PLUS
2261 && GET_CODE (XEXP (base, 1)) == CONST_INT
2262 && GET_CODE (XEXP (base, 0)) == REG
2263 && qty_const != 0
2264 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2265 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2266 == GET_MODE (XEXP (base, 0)))
2267 && qty_const[reg_qty[REGNO (XEXP (base, 0))]])
2268 {
2269 start = INTVAL (XEXP (base, 1));
2270 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2271 }
2272
2273 /* Handle everything that we can find inside an address that has been
2274 viewed as constant. */
2275
2276 while (1)
2277 {
2278 /* If no part of this switch does a "continue", the code outside
2279 will exit this loop. */
2280
2281 switch (GET_CODE (base))
2282 {
2283 case LO_SUM:
2284 /* By definition, operand1 of a LO_SUM is the associated constant
2285 address. Use the associated constant address as the base
2286 instead. */
2287 base = XEXP (base, 1);
2288 continue;
2289
2290 case CONST:
2291 /* Strip off CONST. */
2292 base = XEXP (base, 0);
2293 continue;
2294
2295 case PLUS:
2296 if (GET_CODE (XEXP (base, 1)) == CONST_INT)
2297 {
2298 start += INTVAL (XEXP (base, 1));
2299 base = XEXP (base, 0);
2300 continue;
2301 }
2302 break;
2303
2304 case AND:
2305 /* Handle the case of an AND which is the negative of a power of
2306 two. This is used to represent unaligned memory operations. */
2307 if (GET_CODE (XEXP (base, 1)) == CONST_INT
2308 && exact_log2 (- INTVAL (XEXP (base, 1))) > 0)
2309 {
2310 set_nonvarying_address_components (XEXP (base, 0), size,
2311 pbase, pstart, pend);
2312
2313 /* Assume the worst misalignment. START is affected, but not
2314 END, so compensate but adjusting SIZE. Don't lose any
2315 constant we already had. */
2316
2317 size = *pend - *pstart - INTVAL (XEXP (base, 1)) - 1;
2318 start += *pstart - INTVAL (XEXP (base, 1)) - 1;
2319 base = *pbase;
2320 }
2321 break;
2322 }
2323
2324 break;
2325 }
2326
2327 if (GET_CODE (base) == CONST_INT)
2328 {
2329 start += INTVAL (base);
2330 base = const0_rtx;
2331 }
2332
2333 end = start + size;
2334
2335 /* Set the return values. */
2336 *pbase = base;
2337 *pstart = start;
2338 *pend = end;
2339 }
2340
2341 /* Return 1 iff any subexpression of X refers to memory
2342 at an address of BASE plus some offset
2343 such that any of the bytes' offsets fall between START (inclusive)
2344 and END (exclusive).
2345
2346 The value is undefined if X is a varying address (as determined by
2347 cse_rtx_addr_varies_p). This function is not used in such cases.
2348
2349 When used in the cse pass, `qty_const' is nonzero, and it is used
2350 to treat an address that is a register with a known constant value
2351 as if it were that constant value.
2352 In the loop pass, `qty_const' is zero, so this is not done. */
2353
2354 static int
2355 refers_to_mem_p (x, base, start, end)
2356 rtx x, base;
2357 HOST_WIDE_INT start, end;
2358 {
2359 register HOST_WIDE_INT i;
2360 register enum rtx_code code;
2361 register char *fmt;
2362
2363 repeat:
2364 if (x == 0)
2365 return 0;
2366
2367 code = GET_CODE (x);
2368 if (code == MEM)
2369 {
2370 register rtx addr = XEXP (x, 0); /* Get the address. */
2371 rtx mybase;
2372 HOST_WIDE_INT mystart, myend;
2373
2374 set_nonvarying_address_components (addr, GET_MODE_SIZE (GET_MODE (x)),
2375 &mybase, &mystart, &myend);
2376
2377
2378 /* refers_to_mem_p is never called with varying addresses.
2379 If the base addresses are not equal, there is no chance
2380 of the memory addresses conflicting. */
2381 if (! rtx_equal_p (mybase, base))
2382 return 0;
2383
2384 return myend > start && mystart < end;
2385 }
2386
2387 /* X does not match, so try its subexpressions. */
2388
2389 fmt = GET_RTX_FORMAT (code);
2390 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2391 if (fmt[i] == 'e')
2392 {
2393 if (i == 0)
2394 {
2395 x = XEXP (x, 0);
2396 goto repeat;
2397 }
2398 else
2399 if (refers_to_mem_p (XEXP (x, i), base, start, end))
2400 return 1;
2401 }
2402 else if (fmt[i] == 'E')
2403 {
2404 int j;
2405 for (j = 0; j < XVECLEN (x, i); j++)
2406 if (refers_to_mem_p (XVECEXP (x, i, j), base, start, end))
2407 return 1;
2408 }
2409
2410 return 0;
2411 }
2412
2413 /* Nonzero if X refers to memory at a varying address;
2414 except that a register which has at the moment a known constant value
2415 isn't considered variable. */
2416
2417 static int
2418 cse_rtx_addr_varies_p (x)
2419 rtx x;
2420 {
2421 /* We need not check for X and the equivalence class being of the same
2422 mode because if X is equivalent to a constant in some mode, it
2423 doesn't vary in any mode. */
2424
2425 if (GET_CODE (x) == MEM
2426 && GET_CODE (XEXP (x, 0)) == REG
2427 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2428 && GET_MODE (XEXP (x, 0)) == qty_mode[reg_qty[REGNO (XEXP (x, 0))]]
2429 && qty_const[reg_qty[REGNO (XEXP (x, 0))]] != 0)
2430 return 0;
2431
2432 if (GET_CODE (x) == MEM
2433 && GET_CODE (XEXP (x, 0)) == PLUS
2434 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2435 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2436 && REGNO_QTY_VALID_P (REGNO (XEXP (XEXP (x, 0), 0)))
2437 && (GET_MODE (XEXP (XEXP (x, 0), 0))
2438 == qty_mode[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2439 && qty_const[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2440 return 0;
2441
2442 return rtx_addr_varies_p (x);
2443 }
2444 \f
2445 /* Canonicalize an expression:
2446 replace each register reference inside it
2447 with the "oldest" equivalent register.
2448
2449 If INSN is non-zero and we are replacing a pseudo with a hard register
2450 or vice versa, validate_change is used to ensure that INSN remains valid
2451 after we make our substitution. The calls are made with IN_GROUP non-zero
2452 so apply_change_group must be called upon the outermost return from this
2453 function (unless INSN is zero). The result of apply_change_group can
2454 generally be discarded since the changes we are making are optional. */
2455
2456 static rtx
2457 canon_reg (x, insn)
2458 rtx x;
2459 rtx insn;
2460 {
2461 register int i;
2462 register enum rtx_code code;
2463 register char *fmt;
2464
2465 if (x == 0)
2466 return x;
2467
2468 code = GET_CODE (x);
2469 switch (code)
2470 {
2471 case PC:
2472 case CC0:
2473 case CONST:
2474 case CONST_INT:
2475 case CONST_DOUBLE:
2476 case SYMBOL_REF:
2477 case LABEL_REF:
2478 case ADDR_VEC:
2479 case ADDR_DIFF_VEC:
2480 return x;
2481
2482 case REG:
2483 {
2484 register int first;
2485
2486 /* Never replace a hard reg, because hard regs can appear
2487 in more than one machine mode, and we must preserve the mode
2488 of each occurrence. Also, some hard regs appear in
2489 MEMs that are shared and mustn't be altered. Don't try to
2490 replace any reg that maps to a reg of class NO_REGS. */
2491 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2492 || ! REGNO_QTY_VALID_P (REGNO (x)))
2493 return x;
2494
2495 first = qty_first_reg[reg_qty[REGNO (x)]];
2496 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2497 : REGNO_REG_CLASS (first) == NO_REGS ? x
2498 : gen_rtx (REG, qty_mode[reg_qty[REGNO (x)]], first));
2499 }
2500 }
2501
2502 fmt = GET_RTX_FORMAT (code);
2503 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2504 {
2505 register int j;
2506
2507 if (fmt[i] == 'e')
2508 {
2509 rtx new = canon_reg (XEXP (x, i), insn);
2510
2511 /* If replacing pseudo with hard reg or vice versa, ensure the
2512 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2513 if (insn != 0 && new != 0
2514 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2515 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2516 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2517 || insn_n_dups[recog_memoized (insn)] > 0))
2518 validate_change (insn, &XEXP (x, i), new, 1);
2519 else
2520 XEXP (x, i) = new;
2521 }
2522 else if (fmt[i] == 'E')
2523 for (j = 0; j < XVECLEN (x, i); j++)
2524 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2525 }
2526
2527 return x;
2528 }
2529 \f
2530 /* LOC is a location with INSN that is an operand address (the contents of
2531 a MEM). Find the best equivalent address to use that is valid for this
2532 insn.
2533
2534 On most CISC machines, complicated address modes are costly, and rtx_cost
2535 is a good approximation for that cost. However, most RISC machines have
2536 only a few (usually only one) memory reference formats. If an address is
2537 valid at all, it is often just as cheap as any other address. Hence, for
2538 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2539 costs of various addresses. For two addresses of equal cost, choose the one
2540 with the highest `rtx_cost' value as that has the potential of eliminating
2541 the most insns. For equal costs, we choose the first in the equivalence
2542 class. Note that we ignore the fact that pseudo registers are cheaper
2543 than hard registers here because we would also prefer the pseudo registers.
2544 */
2545
2546 static void
2547 find_best_addr (insn, loc)
2548 rtx insn;
2549 rtx *loc;
2550 {
2551 struct table_elt *elt, *p;
2552 rtx addr = *loc;
2553 int our_cost;
2554 int found_better = 1;
2555 int save_do_not_record = do_not_record;
2556 int save_hash_arg_in_memory = hash_arg_in_memory;
2557 int save_hash_arg_in_struct = hash_arg_in_struct;
2558 int addr_volatile;
2559 int regno;
2560 unsigned hash;
2561
2562 /* Do not try to replace constant addresses or addresses of local and
2563 argument slots. These MEM expressions are made only once and inserted
2564 in many instructions, as well as being used to control symbol table
2565 output. It is not safe to clobber them.
2566
2567 There are some uncommon cases where the address is already in a register
2568 for some reason, but we cannot take advantage of that because we have
2569 no easy way to unshare the MEM. In addition, looking up all stack
2570 addresses is costly. */
2571 if ((GET_CODE (addr) == PLUS
2572 && GET_CODE (XEXP (addr, 0)) == REG
2573 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2574 && (regno = REGNO (XEXP (addr, 0)),
2575 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2576 || regno == ARG_POINTER_REGNUM))
2577 || (GET_CODE (addr) == REG
2578 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2579 || regno == HARD_FRAME_POINTER_REGNUM
2580 || regno == ARG_POINTER_REGNUM))
2581 || CONSTANT_ADDRESS_P (addr))
2582 return;
2583
2584 /* If this address is not simply a register, try to fold it. This will
2585 sometimes simplify the expression. Many simplifications
2586 will not be valid, but some, usually applying the associative rule, will
2587 be valid and produce better code. */
2588 if (GET_CODE (addr) != REG
2589 && validate_change (insn, loc, fold_rtx (addr, insn), 0))
2590 addr = *loc;
2591
2592 /* If this address is not in the hash table, we can't look for equivalences
2593 of the whole address. Also, ignore if volatile. */
2594
2595 do_not_record = 0;
2596 hash = HASH (addr, Pmode);
2597 addr_volatile = do_not_record;
2598 do_not_record = save_do_not_record;
2599 hash_arg_in_memory = save_hash_arg_in_memory;
2600 hash_arg_in_struct = save_hash_arg_in_struct;
2601
2602 if (addr_volatile)
2603 return;
2604
2605 elt = lookup (addr, hash, Pmode);
2606
2607 #ifndef ADDRESS_COST
2608 if (elt)
2609 {
2610 our_cost = elt->cost;
2611
2612 /* Find the lowest cost below ours that works. */
2613 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2614 if (elt->cost < our_cost
2615 && (GET_CODE (elt->exp) == REG
2616 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2617 && validate_change (insn, loc,
2618 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2619 return;
2620 }
2621 #else
2622
2623 if (elt)
2624 {
2625 /* We need to find the best (under the criteria documented above) entry
2626 in the class that is valid. We use the `flag' field to indicate
2627 choices that were invalid and iterate until we can't find a better
2628 one that hasn't already been tried. */
2629
2630 for (p = elt->first_same_value; p; p = p->next_same_value)
2631 p->flag = 0;
2632
2633 while (found_better)
2634 {
2635 int best_addr_cost = ADDRESS_COST (*loc);
2636 int best_rtx_cost = (elt->cost + 1) >> 1;
2637 struct table_elt *best_elt = elt;
2638
2639 found_better = 0;
2640 for (p = elt->first_same_value; p; p = p->next_same_value)
2641 if (! p->flag
2642 && (GET_CODE (p->exp) == REG
2643 || exp_equiv_p (p->exp, p->exp, 1, 0))
2644 && (ADDRESS_COST (p->exp) < best_addr_cost
2645 || (ADDRESS_COST (p->exp) == best_addr_cost
2646 && (p->cost + 1) >> 1 > best_rtx_cost)))
2647 {
2648 found_better = 1;
2649 best_addr_cost = ADDRESS_COST (p->exp);
2650 best_rtx_cost = (p->cost + 1) >> 1;
2651 best_elt = p;
2652 }
2653
2654 if (found_better)
2655 {
2656 if (validate_change (insn, loc,
2657 canon_reg (copy_rtx (best_elt->exp),
2658 NULL_RTX), 0))
2659 return;
2660 else
2661 best_elt->flag = 1;
2662 }
2663 }
2664 }
2665
2666 /* If the address is a binary operation with the first operand a register
2667 and the second a constant, do the same as above, but looking for
2668 equivalences of the register. Then try to simplify before checking for
2669 the best address to use. This catches a few cases: First is when we
2670 have REG+const and the register is another REG+const. We can often merge
2671 the constants and eliminate one insn and one register. It may also be
2672 that a machine has a cheap REG+REG+const. Finally, this improves the
2673 code on the Alpha for unaligned byte stores. */
2674
2675 if (flag_expensive_optimizations
2676 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2677 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2678 && GET_CODE (XEXP (*loc, 0)) == REG
2679 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2680 {
2681 rtx c = XEXP (*loc, 1);
2682
2683 do_not_record = 0;
2684 hash = HASH (XEXP (*loc, 0), Pmode);
2685 do_not_record = save_do_not_record;
2686 hash_arg_in_memory = save_hash_arg_in_memory;
2687 hash_arg_in_struct = save_hash_arg_in_struct;
2688
2689 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2690 if (elt == 0)
2691 return;
2692
2693 /* We need to find the best (under the criteria documented above) entry
2694 in the class that is valid. We use the `flag' field to indicate
2695 choices that were invalid and iterate until we can't find a better
2696 one that hasn't already been tried. */
2697
2698 for (p = elt->first_same_value; p; p = p->next_same_value)
2699 p->flag = 0;
2700
2701 while (found_better)
2702 {
2703 int best_addr_cost = ADDRESS_COST (*loc);
2704 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2705 struct table_elt *best_elt = elt;
2706 rtx best_rtx = *loc;
2707 int count;
2708
2709 /* This is at worst case an O(n^2) algorithm, so limit our search
2710 to the first 32 elements on the list. This avoids trouble
2711 compiling code with very long basic blocks that can easily
2712 call cse_gen_binary so many times that we run out of memory. */
2713
2714 found_better = 0;
2715 for (p = elt->first_same_value, count = 0;
2716 p && count < 32;
2717 p = p->next_same_value, count++)
2718 if (! p->flag
2719 && (GET_CODE (p->exp) == REG
2720 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2721 {
2722 rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
2723
2724 if ((ADDRESS_COST (new) < best_addr_cost
2725 || (ADDRESS_COST (new) == best_addr_cost
2726 && (COST (new) + 1) >> 1 > best_rtx_cost)))
2727 {
2728 found_better = 1;
2729 best_addr_cost = ADDRESS_COST (new);
2730 best_rtx_cost = (COST (new) + 1) >> 1;
2731 best_elt = p;
2732 best_rtx = new;
2733 }
2734 }
2735
2736 if (found_better)
2737 {
2738 if (validate_change (insn, loc,
2739 canon_reg (copy_rtx (best_rtx),
2740 NULL_RTX), 0))
2741 return;
2742 else
2743 best_elt->flag = 1;
2744 }
2745 }
2746 }
2747 #endif
2748 }
2749 \f
2750 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2751 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2752 what values are being compared.
2753
2754 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2755 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2756 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2757 compared to produce cc0.
2758
2759 The return value is the comparison operator and is either the code of
2760 A or the code corresponding to the inverse of the comparison. */
2761
2762 static enum rtx_code
2763 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
2764 enum rtx_code code;
2765 rtx *parg1, *parg2;
2766 enum machine_mode *pmode1, *pmode2;
2767 {
2768 rtx arg1, arg2;
2769
2770 arg1 = *parg1, arg2 = *parg2;
2771
2772 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2773
2774 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2775 {
2776 /* Set non-zero when we find something of interest. */
2777 rtx x = 0;
2778 int reverse_code = 0;
2779 struct table_elt *p = 0;
2780
2781 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2782 On machines with CC0, this is the only case that can occur, since
2783 fold_rtx will return the COMPARE or item being compared with zero
2784 when given CC0. */
2785
2786 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2787 x = arg1;
2788
2789 /* If ARG1 is a comparison operator and CODE is testing for
2790 STORE_FLAG_VALUE, get the inner arguments. */
2791
2792 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2793 {
2794 if (code == NE
2795 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2796 && code == LT && STORE_FLAG_VALUE == -1)
2797 #ifdef FLOAT_STORE_FLAG_VALUE
2798 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2799 && FLOAT_STORE_FLAG_VALUE < 0)
2800 #endif
2801 )
2802 x = arg1;
2803 else if (code == EQ
2804 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2805 && code == GE && STORE_FLAG_VALUE == -1)
2806 #ifdef FLOAT_STORE_FLAG_VALUE
2807 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2808 && FLOAT_STORE_FLAG_VALUE < 0)
2809 #endif
2810 )
2811 x = arg1, reverse_code = 1;
2812 }
2813
2814 /* ??? We could also check for
2815
2816 (ne (and (eq (...) (const_int 1))) (const_int 0))
2817
2818 and related forms, but let's wait until we see them occurring. */
2819
2820 if (x == 0)
2821 /* Look up ARG1 in the hash table and see if it has an equivalence
2822 that lets us see what is being compared. */
2823 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
2824 GET_MODE (arg1));
2825 if (p) p = p->first_same_value;
2826
2827 for (; p; p = p->next_same_value)
2828 {
2829 enum machine_mode inner_mode = GET_MODE (p->exp);
2830
2831 /* If the entry isn't valid, skip it. */
2832 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2833 continue;
2834
2835 if (GET_CODE (p->exp) == COMPARE
2836 /* Another possibility is that this machine has a compare insn
2837 that includes the comparison code. In that case, ARG1 would
2838 be equivalent to a comparison operation that would set ARG1 to
2839 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2840 ORIG_CODE is the actual comparison being done; if it is an EQ,
2841 we must reverse ORIG_CODE. On machine with a negative value
2842 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2843 || ((code == NE
2844 || (code == LT
2845 && GET_MODE_CLASS (inner_mode) == MODE_INT
2846 && (GET_MODE_BITSIZE (inner_mode)
2847 <= HOST_BITS_PER_WIDE_INT)
2848 && (STORE_FLAG_VALUE
2849 & ((HOST_WIDE_INT) 1
2850 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2851 #ifdef FLOAT_STORE_FLAG_VALUE
2852 || (code == LT
2853 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2854 && FLOAT_STORE_FLAG_VALUE < 0)
2855 #endif
2856 )
2857 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
2858 {
2859 x = p->exp;
2860 break;
2861 }
2862 else if ((code == EQ
2863 || (code == GE
2864 && GET_MODE_CLASS (inner_mode) == MODE_INT
2865 && (GET_MODE_BITSIZE (inner_mode)
2866 <= HOST_BITS_PER_WIDE_INT)
2867 && (STORE_FLAG_VALUE
2868 & ((HOST_WIDE_INT) 1
2869 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2870 #ifdef FLOAT_STORE_FLAG_VALUE
2871 || (code == GE
2872 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2873 && FLOAT_STORE_FLAG_VALUE < 0)
2874 #endif
2875 )
2876 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
2877 {
2878 reverse_code = 1;
2879 x = p->exp;
2880 break;
2881 }
2882
2883 /* If this is fp + constant, the equivalent is a better operand since
2884 it may let us predict the value of the comparison. */
2885 else if (NONZERO_BASE_PLUS_P (p->exp))
2886 {
2887 arg1 = p->exp;
2888 continue;
2889 }
2890 }
2891
2892 /* If we didn't find a useful equivalence for ARG1, we are done.
2893 Otherwise, set up for the next iteration. */
2894 if (x == 0)
2895 break;
2896
2897 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
2898 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
2899 code = GET_CODE (x);
2900
2901 if (reverse_code)
2902 code = reverse_condition (code);
2903 }
2904
2905 /* Return our results. Return the modes from before fold_rtx
2906 because fold_rtx might produce const_int, and then it's too late. */
2907 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
2908 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
2909
2910 return code;
2911 }
2912 \f
2913 /* Try to simplify a unary operation CODE whose output mode is to be
2914 MODE with input operand OP whose mode was originally OP_MODE.
2915 Return zero if no simplification can be made. */
2916
2917 rtx
2918 simplify_unary_operation (code, mode, op, op_mode)
2919 enum rtx_code code;
2920 enum machine_mode mode;
2921 rtx op;
2922 enum machine_mode op_mode;
2923 {
2924 register int width = GET_MODE_BITSIZE (mode);
2925
2926 /* The order of these tests is critical so that, for example, we don't
2927 check the wrong mode (input vs. output) for a conversion operation,
2928 such as FIX. At some point, this should be simplified. */
2929
2930 #if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
2931
2932 if (code == FLOAT && GET_MODE (op) == VOIDmode
2933 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
2934 {
2935 HOST_WIDE_INT hv, lv;
2936 REAL_VALUE_TYPE d;
2937
2938 if (GET_CODE (op) == CONST_INT)
2939 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
2940 else
2941 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
2942
2943 #ifdef REAL_ARITHMETIC
2944 REAL_VALUE_FROM_INT (d, lv, hv);
2945 #else
2946 if (hv < 0)
2947 {
2948 d = (double) (~ hv);
2949 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2950 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2951 d += (double) (unsigned HOST_WIDE_INT) (~ lv);
2952 d = (- d - 1.0);
2953 }
2954 else
2955 {
2956 d = (double) hv;
2957 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2958 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2959 d += (double) (unsigned HOST_WIDE_INT) lv;
2960 }
2961 #endif /* REAL_ARITHMETIC */
2962 d = real_value_truncate (mode, d);
2963 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2964 }
2965 else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
2966 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
2967 {
2968 HOST_WIDE_INT hv, lv;
2969 REAL_VALUE_TYPE d;
2970
2971 if (GET_CODE (op) == CONST_INT)
2972 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
2973 else
2974 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
2975
2976 if (op_mode == VOIDmode)
2977 {
2978 /* We don't know how to interpret negative-looking numbers in
2979 this case, so don't try to fold those. */
2980 if (hv < 0)
2981 return 0;
2982 }
2983 else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
2984 ;
2985 else
2986 hv = 0, lv &= GET_MODE_MASK (op_mode);
2987
2988 #ifdef REAL_ARITHMETIC
2989 REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv);
2990 #else
2991
2992 d = (double) (unsigned HOST_WIDE_INT) hv;
2993 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2994 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2995 d += (double) (unsigned HOST_WIDE_INT) lv;
2996 #endif /* REAL_ARITHMETIC */
2997 d = real_value_truncate (mode, d);
2998 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2999 }
3000 #endif
3001
3002 if (GET_CODE (op) == CONST_INT
3003 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3004 {
3005 register HOST_WIDE_INT arg0 = INTVAL (op);
3006 register HOST_WIDE_INT val;
3007
3008 switch (code)
3009 {
3010 case NOT:
3011 val = ~ arg0;
3012 break;
3013
3014 case NEG:
3015 val = - arg0;
3016 break;
3017
3018 case ABS:
3019 val = (arg0 >= 0 ? arg0 : - arg0);
3020 break;
3021
3022 case FFS:
3023 /* Don't use ffs here. Instead, get low order bit and then its
3024 number. If arg0 is zero, this will return 0, as desired. */
3025 arg0 &= GET_MODE_MASK (mode);
3026 val = exact_log2 (arg0 & (- arg0)) + 1;
3027 break;
3028
3029 case TRUNCATE:
3030 val = arg0;
3031 break;
3032
3033 case ZERO_EXTEND:
3034 if (op_mode == VOIDmode)
3035 op_mode = mode;
3036 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3037 {
3038 /* If we were really extending the mode,
3039 we would have to distinguish between zero-extension
3040 and sign-extension. */
3041 if (width != GET_MODE_BITSIZE (op_mode))
3042 abort ();
3043 val = arg0;
3044 }
3045 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3046 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3047 else
3048 return 0;
3049 break;
3050
3051 case SIGN_EXTEND:
3052 if (op_mode == VOIDmode)
3053 op_mode = mode;
3054 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3055 {
3056 /* If we were really extending the mode,
3057 we would have to distinguish between zero-extension
3058 and sign-extension. */
3059 if (width != GET_MODE_BITSIZE (op_mode))
3060 abort ();
3061 val = arg0;
3062 }
3063 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3064 {
3065 val
3066 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3067 if (val
3068 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3069 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3070 }
3071 else
3072 return 0;
3073 break;
3074
3075 case SQRT:
3076 return 0;
3077
3078 default:
3079 abort ();
3080 }
3081
3082 /* Clear the bits that don't belong in our mode,
3083 unless they and our sign bit are all one.
3084 So we get either a reasonable negative value or a reasonable
3085 unsigned value for this mode. */
3086 if (width < HOST_BITS_PER_WIDE_INT
3087 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3088 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3089 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3090
3091 return GEN_INT (val);
3092 }
3093
3094 /* We can do some operations on integer CONST_DOUBLEs. Also allow
3095 for a DImode operation on a CONST_INT. */
3096 else if (GET_MODE (op) == VOIDmode && width <= HOST_BITS_PER_INT * 2
3097 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3098 {
3099 HOST_WIDE_INT l1, h1, lv, hv;
3100
3101 if (GET_CODE (op) == CONST_DOUBLE)
3102 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3103 else
3104 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3105
3106 switch (code)
3107 {
3108 case NOT:
3109 lv = ~ l1;
3110 hv = ~ h1;
3111 break;
3112
3113 case NEG:
3114 neg_double (l1, h1, &lv, &hv);
3115 break;
3116
3117 case ABS:
3118 if (h1 < 0)
3119 neg_double (l1, h1, &lv, &hv);
3120 else
3121 lv = l1, hv = h1;
3122 break;
3123
3124 case FFS:
3125 hv = 0;
3126 if (l1 == 0)
3127 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
3128 else
3129 lv = exact_log2 (l1 & (-l1)) + 1;
3130 break;
3131
3132 case TRUNCATE:
3133 /* This is just a change-of-mode, so do nothing. */
3134 lv = l1, hv = h1;
3135 break;
3136
3137 case ZERO_EXTEND:
3138 if (op_mode == VOIDmode
3139 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3140 return 0;
3141
3142 hv = 0;
3143 lv = l1 & GET_MODE_MASK (op_mode);
3144 break;
3145
3146 case SIGN_EXTEND:
3147 if (op_mode == VOIDmode
3148 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3149 return 0;
3150 else
3151 {
3152 lv = l1 & GET_MODE_MASK (op_mode);
3153 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3154 && (lv & ((HOST_WIDE_INT) 1
3155 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3156 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3157
3158 hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
3159 }
3160 break;
3161
3162 case SQRT:
3163 return 0;
3164
3165 default:
3166 return 0;
3167 }
3168
3169 return immed_double_const (lv, hv, mode);
3170 }
3171
3172 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3173 else if (GET_CODE (op) == CONST_DOUBLE
3174 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3175 {
3176 REAL_VALUE_TYPE d;
3177 jmp_buf handler;
3178 rtx x;
3179
3180 if (setjmp (handler))
3181 /* There used to be a warning here, but that is inadvisable.
3182 People may want to cause traps, and the natural way
3183 to do it should not get a warning. */
3184 return 0;
3185
3186 set_float_handler (handler);
3187
3188 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3189
3190 switch (code)
3191 {
3192 case NEG:
3193 d = REAL_VALUE_NEGATE (d);
3194 break;
3195
3196 case ABS:
3197 if (REAL_VALUE_NEGATIVE (d))
3198 d = REAL_VALUE_NEGATE (d);
3199 break;
3200
3201 case FLOAT_TRUNCATE:
3202 d = real_value_truncate (mode, d);
3203 break;
3204
3205 case FLOAT_EXTEND:
3206 /* All this does is change the mode. */
3207 break;
3208
3209 case FIX:
3210 d = REAL_VALUE_RNDZINT (d);
3211 break;
3212
3213 case UNSIGNED_FIX:
3214 d = REAL_VALUE_UNSIGNED_RNDZINT (d);
3215 break;
3216
3217 case SQRT:
3218 return 0;
3219
3220 default:
3221 abort ();
3222 }
3223
3224 x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3225 set_float_handler (NULL_PTR);
3226 return x;
3227 }
3228
3229 else if (GET_CODE (op) == CONST_DOUBLE
3230 && GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
3231 && GET_MODE_CLASS (mode) == MODE_INT
3232 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3233 {
3234 REAL_VALUE_TYPE d;
3235 jmp_buf handler;
3236 HOST_WIDE_INT val;
3237
3238 if (setjmp (handler))
3239 return 0;
3240
3241 set_float_handler (handler);
3242
3243 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3244
3245 switch (code)
3246 {
3247 case FIX:
3248 val = REAL_VALUE_FIX (d);
3249 break;
3250
3251 case UNSIGNED_FIX:
3252 val = REAL_VALUE_UNSIGNED_FIX (d);
3253 break;
3254
3255 default:
3256 abort ();
3257 }
3258
3259 set_float_handler (NULL_PTR);
3260
3261 /* Clear the bits that don't belong in our mode,
3262 unless they and our sign bit are all one.
3263 So we get either a reasonable negative value or a reasonable
3264 unsigned value for this mode. */
3265 if (width < HOST_BITS_PER_WIDE_INT
3266 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3267 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3268 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3269
3270 /* If this would be an entire word for the target, but is not for
3271 the host, then sign-extend on the host so that the number will look
3272 the same way on the host that it would on the target.
3273
3274 For example, when building a 64 bit alpha hosted 32 bit sparc
3275 targeted compiler, then we want the 32 bit unsigned value -1 to be
3276 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3277 The later confuses the sparc backend. */
3278
3279 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3280 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3281 val |= ((HOST_WIDE_INT) (-1) << width);
3282
3283 return GEN_INT (val);
3284 }
3285 #endif
3286 /* This was formerly used only for non-IEEE float.
3287 eggert@twinsun.com says it is safe for IEEE also. */
3288 else
3289 {
3290 /* There are some simplifications we can do even if the operands
3291 aren't constant. */
3292 switch (code)
3293 {
3294 case NEG:
3295 case NOT:
3296 /* (not (not X)) == X, similarly for NEG. */
3297 if (GET_CODE (op) == code)
3298 return XEXP (op, 0);
3299 break;
3300
3301 case SIGN_EXTEND:
3302 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3303 becomes just the MINUS if its mode is MODE. This allows
3304 folding switch statements on machines using casesi (such as
3305 the Vax). */
3306 if (GET_CODE (op) == TRUNCATE
3307 && GET_MODE (XEXP (op, 0)) == mode
3308 && GET_CODE (XEXP (op, 0)) == MINUS
3309 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3310 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3311 return XEXP (op, 0);
3312
3313 #ifdef POINTERS_EXTEND_UNSIGNED
3314 if (! POINTERS_EXTEND_UNSIGNED
3315 && mode == Pmode && GET_MODE (op) == ptr_mode
3316 && CONSTANT_P (op))
3317 return convert_memory_address (Pmode, op);
3318 #endif
3319 break;
3320
3321 #ifdef POINTERS_EXTEND_UNSIGNED
3322 case ZERO_EXTEND:
3323 if (POINTERS_EXTEND_UNSIGNED
3324 && mode == Pmode && GET_MODE (op) == ptr_mode
3325 && CONSTANT_P (op))
3326 return convert_memory_address (Pmode, op);
3327 break;
3328 #endif
3329 }
3330
3331 return 0;
3332 }
3333 }
3334 \f
3335 /* Simplify a binary operation CODE with result mode MODE, operating on OP0
3336 and OP1. Return 0 if no simplification is possible.
3337
3338 Don't use this for relational operations such as EQ or LT.
3339 Use simplify_relational_operation instead. */
3340
3341 rtx
3342 simplify_binary_operation (code, mode, op0, op1)
3343 enum rtx_code code;
3344 enum machine_mode mode;
3345 rtx op0, op1;
3346 {
3347 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3348 HOST_WIDE_INT val;
3349 int width = GET_MODE_BITSIZE (mode);
3350 rtx tem;
3351
3352 /* Relational operations don't work here. We must know the mode
3353 of the operands in order to do the comparison correctly.
3354 Assuming a full word can give incorrect results.
3355 Consider comparing 128 with -128 in QImode. */
3356
3357 if (GET_RTX_CLASS (code) == '<')
3358 abort ();
3359
3360 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3361 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3362 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3363 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3364 {
3365 REAL_VALUE_TYPE f0, f1, value;
3366 jmp_buf handler;
3367
3368 if (setjmp (handler))
3369 return 0;
3370
3371 set_float_handler (handler);
3372
3373 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3374 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3375 f0 = real_value_truncate (mode, f0);
3376 f1 = real_value_truncate (mode, f1);
3377
3378 #ifdef REAL_ARITHMETIC
3379 REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
3380 #else
3381 switch (code)
3382 {
3383 case PLUS:
3384 value = f0 + f1;
3385 break;
3386 case MINUS:
3387 value = f0 - f1;
3388 break;
3389 case MULT:
3390 value = f0 * f1;
3391 break;
3392 case DIV:
3393 #ifndef REAL_INFINITY
3394 if (f1 == 0)
3395 return 0;
3396 #endif
3397 value = f0 / f1;
3398 break;
3399 case SMIN:
3400 value = MIN (f0, f1);
3401 break;
3402 case SMAX:
3403 value = MAX (f0, f1);
3404 break;
3405 default:
3406 abort ();
3407 }
3408 #endif
3409
3410 value = real_value_truncate (mode, value);
3411 set_float_handler (NULL_PTR);
3412 return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
3413 }
3414 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3415
3416 /* We can fold some multi-word operations. */
3417 if (GET_MODE_CLASS (mode) == MODE_INT
3418 && width == HOST_BITS_PER_WIDE_INT * 2
3419 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
3420 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3421 {
3422 HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
3423
3424 if (GET_CODE (op0) == CONST_DOUBLE)
3425 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3426 else
3427 l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
3428
3429 if (GET_CODE (op1) == CONST_DOUBLE)
3430 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3431 else
3432 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3433
3434 switch (code)
3435 {
3436 case MINUS:
3437 /* A - B == A + (-B). */
3438 neg_double (l2, h2, &lv, &hv);
3439 l2 = lv, h2 = hv;
3440
3441 /* .. fall through ... */
3442
3443 case PLUS:
3444 add_double (l1, h1, l2, h2, &lv, &hv);
3445 break;
3446
3447 case MULT:
3448 mul_double (l1, h1, l2, h2, &lv, &hv);
3449 break;
3450
3451 case DIV: case MOD: case UDIV: case UMOD:
3452 /* We'd need to include tree.h to do this and it doesn't seem worth
3453 it. */
3454 return 0;
3455
3456 case AND:
3457 lv = l1 & l2, hv = h1 & h2;
3458 break;
3459
3460 case IOR:
3461 lv = l1 | l2, hv = h1 | h2;
3462 break;
3463
3464 case XOR:
3465 lv = l1 ^ l2, hv = h1 ^ h2;
3466 break;
3467
3468 case SMIN:
3469 if (h1 < h2
3470 || (h1 == h2
3471 && ((unsigned HOST_WIDE_INT) l1
3472 < (unsigned HOST_WIDE_INT) l2)))
3473 lv = l1, hv = h1;
3474 else
3475 lv = l2, hv = h2;
3476 break;
3477
3478 case SMAX:
3479 if (h1 > h2
3480 || (h1 == h2
3481 && ((unsigned HOST_WIDE_INT) l1
3482 > (unsigned HOST_WIDE_INT) l2)))
3483 lv = l1, hv = h1;
3484 else
3485 lv = l2, hv = h2;
3486 break;
3487
3488 case UMIN:
3489 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3490 || (h1 == h2
3491 && ((unsigned HOST_WIDE_INT) l1
3492 < (unsigned HOST_WIDE_INT) l2)))
3493 lv = l1, hv = h1;
3494 else
3495 lv = l2, hv = h2;
3496 break;
3497
3498 case UMAX:
3499 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3500 || (h1 == h2
3501 && ((unsigned HOST_WIDE_INT) l1
3502 > (unsigned HOST_WIDE_INT) l2)))
3503 lv = l1, hv = h1;
3504 else
3505 lv = l2, hv = h2;
3506 break;
3507
3508 case LSHIFTRT: case ASHIFTRT:
3509 case ASHIFT:
3510 case ROTATE: case ROTATERT:
3511 #ifdef SHIFT_COUNT_TRUNCATED
3512 if (SHIFT_COUNT_TRUNCATED)
3513 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3514 #endif
3515
3516 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3517 return 0;
3518
3519 if (code == LSHIFTRT || code == ASHIFTRT)
3520 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3521 code == ASHIFTRT);
3522 else if (code == ASHIFT)
3523 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
3524 else if (code == ROTATE)
3525 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3526 else /* code == ROTATERT */
3527 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3528 break;
3529
3530 default:
3531 return 0;
3532 }
3533
3534 return immed_double_const (lv, hv, mode);
3535 }
3536
3537 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3538 || width > HOST_BITS_PER_WIDE_INT || width == 0)
3539 {
3540 /* Even if we can't compute a constant result,
3541 there are some cases worth simplifying. */
3542
3543 switch (code)
3544 {
3545 case PLUS:
3546 /* In IEEE floating point, x+0 is not the same as x. Similarly
3547 for the other optimizations below. */
3548 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3549 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3550 break;
3551
3552 if (op1 == CONST0_RTX (mode))
3553 return op0;
3554
3555 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3556 if (GET_CODE (op0) == NEG)
3557 return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
3558 else if (GET_CODE (op1) == NEG)
3559 return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
3560
3561 /* Handle both-operands-constant cases. We can only add
3562 CONST_INTs to constants since the sum of relocatable symbols
3563 can't be handled by most assemblers. Don't add CONST_INT
3564 to CONST_INT since overflow won't be computed properly if wider
3565 than HOST_BITS_PER_WIDE_INT. */
3566
3567 if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3568 && GET_CODE (op1) == CONST_INT)
3569 return plus_constant (op0, INTVAL (op1));
3570 else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3571 && GET_CODE (op0) == CONST_INT)
3572 return plus_constant (op1, INTVAL (op0));
3573
3574 /* See if this is something like X * C - X or vice versa or
3575 if the multiplication is written as a shift. If so, we can
3576 distribute and make a new multiply, shift, or maybe just
3577 have X (if C is 2 in the example above). But don't make
3578 real multiply if we didn't have one before. */
3579
3580 if (! FLOAT_MODE_P (mode))
3581 {
3582 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3583 rtx lhs = op0, rhs = op1;
3584 int had_mult = 0;
3585
3586 if (GET_CODE (lhs) == NEG)
3587 coeff0 = -1, lhs = XEXP (lhs, 0);
3588 else if (GET_CODE (lhs) == MULT
3589 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3590 {
3591 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3592 had_mult = 1;
3593 }
3594 else if (GET_CODE (lhs) == ASHIFT
3595 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3596 && INTVAL (XEXP (lhs, 1)) >= 0
3597 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3598 {
3599 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3600 lhs = XEXP (lhs, 0);
3601 }
3602
3603 if (GET_CODE (rhs) == NEG)
3604 coeff1 = -1, rhs = XEXP (rhs, 0);
3605 else if (GET_CODE (rhs) == MULT
3606 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3607 {
3608 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3609 had_mult = 1;
3610 }
3611 else if (GET_CODE (rhs) == ASHIFT
3612 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3613 && INTVAL (XEXP (rhs, 1)) >= 0
3614 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3615 {
3616 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3617 rhs = XEXP (rhs, 0);
3618 }
3619
3620 if (rtx_equal_p (lhs, rhs))
3621 {
3622 tem = cse_gen_binary (MULT, mode, lhs,
3623 GEN_INT (coeff0 + coeff1));
3624 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3625 }
3626 }
3627
3628 /* If one of the operands is a PLUS or a MINUS, see if we can
3629 simplify this by the associative law.
3630 Don't use the associative law for floating point.
3631 The inaccuracy makes it nonassociative,
3632 and subtle programs can break if operations are associated. */
3633
3634 if (INTEGRAL_MODE_P (mode)
3635 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3636 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3637 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3638 return tem;
3639 break;
3640
3641 case COMPARE:
3642 #ifdef HAVE_cc0
3643 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3644 using cc0, in which case we want to leave it as a COMPARE
3645 so we can distinguish it from a register-register-copy.
3646
3647 In IEEE floating point, x-0 is not the same as x. */
3648
3649 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3650 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3651 && op1 == CONST0_RTX (mode))
3652 return op0;
3653 #else
3654 /* Do nothing here. */
3655 #endif
3656 break;
3657
3658 case MINUS:
3659 /* None of these optimizations can be done for IEEE
3660 floating point. */
3661 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3662 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3663 break;
3664
3665 /* We can't assume x-x is 0 even with non-IEEE floating point,
3666 but since it is zero except in very strange circumstances, we
3667 will treat it as zero with -ffast-math. */
3668 if (rtx_equal_p (op0, op1)
3669 && ! side_effects_p (op0)
3670 && (! FLOAT_MODE_P (mode) || flag_fast_math))
3671 return CONST0_RTX (mode);
3672
3673 /* Change subtraction from zero into negation. */
3674 if (op0 == CONST0_RTX (mode))
3675 return gen_rtx (NEG, mode, op1);
3676
3677 /* (-1 - a) is ~a. */
3678 if (op0 == constm1_rtx)
3679 return gen_rtx (NOT, mode, op1);
3680
3681 /* Subtracting 0 has no effect. */
3682 if (op1 == CONST0_RTX (mode))
3683 return op0;
3684
3685 /* See if this is something like X * C - X or vice versa or
3686 if the multiplication is written as a shift. If so, we can
3687 distribute and make a new multiply, shift, or maybe just
3688 have X (if C is 2 in the example above). But don't make
3689 real multiply if we didn't have one before. */
3690
3691 if (! FLOAT_MODE_P (mode))
3692 {
3693 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3694 rtx lhs = op0, rhs = op1;
3695 int had_mult = 0;
3696
3697 if (GET_CODE (lhs) == NEG)
3698 coeff0 = -1, lhs = XEXP (lhs, 0);
3699 else if (GET_CODE (lhs) == MULT
3700 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3701 {
3702 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3703 had_mult = 1;
3704 }
3705 else if (GET_CODE (lhs) == ASHIFT
3706 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3707 && INTVAL (XEXP (lhs, 1)) >= 0
3708 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3709 {
3710 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3711 lhs = XEXP (lhs, 0);
3712 }
3713
3714 if (GET_CODE (rhs) == NEG)
3715 coeff1 = - 1, rhs = XEXP (rhs, 0);
3716 else if (GET_CODE (rhs) == MULT
3717 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3718 {
3719 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3720 had_mult = 1;
3721 }
3722 else if (GET_CODE (rhs) == ASHIFT
3723 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3724 && INTVAL (XEXP (rhs, 1)) >= 0
3725 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3726 {
3727 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3728 rhs = XEXP (rhs, 0);
3729 }
3730
3731 if (rtx_equal_p (lhs, rhs))
3732 {
3733 tem = cse_gen_binary (MULT, mode, lhs,
3734 GEN_INT (coeff0 - coeff1));
3735 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3736 }
3737 }
3738
3739 /* (a - (-b)) -> (a + b). */
3740 if (GET_CODE (op1) == NEG)
3741 return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
3742
3743 /* If one of the operands is a PLUS or a MINUS, see if we can
3744 simplify this by the associative law.
3745 Don't use the associative law for floating point.
3746 The inaccuracy makes it nonassociative,
3747 and subtle programs can break if operations are associated. */
3748
3749 if (INTEGRAL_MODE_P (mode)
3750 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3751 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3752 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3753 return tem;
3754
3755 /* Don't let a relocatable value get a negative coeff. */
3756 if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
3757 return plus_constant (op0, - INTVAL (op1));
3758
3759 /* (x - (x & y)) -> (x & ~y) */
3760 if (GET_CODE (op1) == AND)
3761 {
3762 if (rtx_equal_p (op0, XEXP (op1, 0)))
3763 return cse_gen_binary (AND, mode, op0, gen_rtx (NOT, mode, XEXP (op1, 1)));
3764 if (rtx_equal_p (op0, XEXP (op1, 1)))
3765 return cse_gen_binary (AND, mode, op0, gen_rtx (NOT, mode, XEXP (op1, 0)));
3766 }
3767 break;
3768
3769 case MULT:
3770 if (op1 == constm1_rtx)
3771 {
3772 tem = simplify_unary_operation (NEG, mode, op0, mode);
3773
3774 return tem ? tem : gen_rtx (NEG, mode, op0);
3775 }
3776
3777 /* In IEEE floating point, x*0 is not always 0. */
3778 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3779 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3780 && op1 == CONST0_RTX (mode)
3781 && ! side_effects_p (op0))
3782 return op1;
3783
3784 /* In IEEE floating point, x*1 is not equivalent to x for nans.
3785 However, ANSI says we can drop signals,
3786 so we can do this anyway. */
3787 if (op1 == CONST1_RTX (mode))
3788 return op0;
3789
3790 /* Convert multiply by constant power of two into shift unless
3791 we are still generating RTL. This test is a kludge. */
3792 if (GET_CODE (op1) == CONST_INT
3793 && (val = exact_log2 (INTVAL (op1))) >= 0
3794 && ! rtx_equal_function_value_matters)
3795 return gen_rtx (ASHIFT, mode, op0, GEN_INT (val));
3796
3797 if (GET_CODE (op1) == CONST_DOUBLE
3798 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
3799 {
3800 REAL_VALUE_TYPE d;
3801 jmp_buf handler;
3802 int op1is2, op1ism1;
3803
3804 if (setjmp (handler))
3805 return 0;
3806
3807 set_float_handler (handler);
3808 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3809 op1is2 = REAL_VALUES_EQUAL (d, dconst2);
3810 op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
3811 set_float_handler (NULL_PTR);
3812
3813 /* x*2 is x+x and x*(-1) is -x */
3814 if (op1is2 && GET_MODE (op0) == mode)
3815 return gen_rtx (PLUS, mode, op0, copy_rtx (op0));
3816
3817 else if (op1ism1 && GET_MODE (op0) == mode)
3818 return gen_rtx (NEG, mode, op0);
3819 }
3820 break;
3821
3822 case IOR:
3823 if (op1 == const0_rtx)
3824 return op0;
3825 if (GET_CODE (op1) == CONST_INT
3826 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3827 return op1;
3828 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3829 return op0;
3830 /* A | (~A) -> -1 */
3831 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3832 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3833 && ! side_effects_p (op0)
3834 && GET_MODE_CLASS (mode) != MODE_CC)
3835 return constm1_rtx;
3836 break;
3837
3838 case XOR:
3839 if (op1 == const0_rtx)
3840 return op0;
3841 if (GET_CODE (op1) == CONST_INT
3842 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3843 return gen_rtx (NOT, mode, op0);
3844 if (op0 == op1 && ! side_effects_p (op0)
3845 && GET_MODE_CLASS (mode) != MODE_CC)
3846 return const0_rtx;
3847 break;
3848
3849 case AND:
3850 if (op1 == const0_rtx && ! side_effects_p (op0))
3851 return const0_rtx;
3852 if (GET_CODE (op1) == CONST_INT
3853 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3854 return op0;
3855 if (op0 == op1 && ! side_effects_p (op0)
3856 && GET_MODE_CLASS (mode) != MODE_CC)
3857 return op0;
3858 /* A & (~A) -> 0 */
3859 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3860 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3861 && ! side_effects_p (op0)
3862 && GET_MODE_CLASS (mode) != MODE_CC)
3863 return const0_rtx;
3864 break;
3865
3866 case UDIV:
3867 /* Convert divide by power of two into shift (divide by 1 handled
3868 below). */
3869 if (GET_CODE (op1) == CONST_INT
3870 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
3871 return gen_rtx (LSHIFTRT, mode, op0, GEN_INT (arg1));
3872
3873 /* ... fall through ... */
3874
3875 case DIV:
3876 if (op1 == CONST1_RTX (mode))
3877 return op0;
3878
3879 /* In IEEE floating point, 0/x is not always 0. */
3880 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3881 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3882 && op0 == CONST0_RTX (mode)
3883 && ! side_effects_p (op1))
3884 return op0;
3885
3886 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3887 /* Change division by a constant into multiplication. Only do
3888 this with -ffast-math until an expert says it is safe in
3889 general. */
3890 else if (GET_CODE (op1) == CONST_DOUBLE
3891 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
3892 && op1 != CONST0_RTX (mode)
3893 && flag_fast_math)
3894 {
3895 REAL_VALUE_TYPE d;
3896 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3897
3898 if (! REAL_VALUES_EQUAL (d, dconst0))
3899 {
3900 #if defined (REAL_ARITHMETIC)
3901 REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
3902 return gen_rtx (MULT, mode, op0,
3903 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
3904 #else
3905 return gen_rtx (MULT, mode, op0,
3906 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
3907 #endif
3908 }
3909 }
3910 #endif
3911 break;
3912
3913 case UMOD:
3914 /* Handle modulus by power of two (mod with 1 handled below). */
3915 if (GET_CODE (op1) == CONST_INT
3916 && exact_log2 (INTVAL (op1)) > 0)
3917 return gen_rtx (AND, mode, op0, GEN_INT (INTVAL (op1) - 1));
3918
3919 /* ... fall through ... */
3920
3921 case MOD:
3922 if ((op0 == const0_rtx || op1 == const1_rtx)
3923 && ! side_effects_p (op0) && ! side_effects_p (op1))
3924 return const0_rtx;
3925 break;
3926
3927 case ROTATERT:
3928 case ROTATE:
3929 /* Rotating ~0 always results in ~0. */
3930 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
3931 && INTVAL (op0) == GET_MODE_MASK (mode)
3932 && ! side_effects_p (op1))
3933 return op0;
3934
3935 /* ... fall through ... */
3936
3937 case ASHIFT:
3938 case ASHIFTRT:
3939 case LSHIFTRT:
3940 if (op1 == const0_rtx)
3941 return op0;
3942 if (op0 == const0_rtx && ! side_effects_p (op1))
3943 return op0;
3944 break;
3945
3946 case SMIN:
3947 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3948 && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
3949 && ! side_effects_p (op0))
3950 return op1;
3951 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3952 return op0;
3953 break;
3954
3955 case SMAX:
3956 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3957 && (INTVAL (op1)
3958 == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
3959 && ! side_effects_p (op0))
3960 return op1;
3961 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3962 return op0;
3963 break;
3964
3965 case UMIN:
3966 if (op1 == const0_rtx && ! side_effects_p (op0))
3967 return op1;
3968 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3969 return op0;
3970 break;
3971
3972 case UMAX:
3973 if (op1 == constm1_rtx && ! side_effects_p (op0))
3974 return op1;
3975 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3976 return op0;
3977 break;
3978
3979 default:
3980 abort ();
3981 }
3982
3983 return 0;
3984 }
3985
3986 /* Get the integer argument values in two forms:
3987 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
3988
3989 arg0 = INTVAL (op0);
3990 arg1 = INTVAL (op1);
3991
3992 if (width < HOST_BITS_PER_WIDE_INT)
3993 {
3994 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
3995 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
3996
3997 arg0s = arg0;
3998 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
3999 arg0s |= ((HOST_WIDE_INT) (-1) << width);
4000
4001 arg1s = arg1;
4002 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4003 arg1s |= ((HOST_WIDE_INT) (-1) << width);
4004 }
4005 else
4006 {
4007 arg0s = arg0;
4008 arg1s = arg1;
4009 }
4010
4011 /* Compute the value of the arithmetic. */
4012
4013 switch (code)
4014 {
4015 case PLUS:
4016 val = arg0s + arg1s;
4017 break;
4018
4019 case MINUS:
4020 val = arg0s - arg1s;
4021 break;
4022
4023 case MULT:
4024 val = arg0s * arg1s;
4025 break;
4026
4027 case DIV:
4028 if (arg1s == 0)
4029 return 0;
4030 val = arg0s / arg1s;
4031 break;
4032
4033 case MOD:
4034 if (arg1s == 0)
4035 return 0;
4036 val = arg0s % arg1s;
4037 break;
4038
4039 case UDIV:
4040 if (arg1 == 0)
4041 return 0;
4042 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
4043 break;
4044
4045 case UMOD:
4046 if (arg1 == 0)
4047 return 0;
4048 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
4049 break;
4050
4051 case AND:
4052 val = arg0 & arg1;
4053 break;
4054
4055 case IOR:
4056 val = arg0 | arg1;
4057 break;
4058
4059 case XOR:
4060 val = arg0 ^ arg1;
4061 break;
4062
4063 case LSHIFTRT:
4064 /* If shift count is undefined, don't fold it; let the machine do
4065 what it wants. But truncate it if the machine will do that. */
4066 if (arg1 < 0)
4067 return 0;
4068
4069 #ifdef SHIFT_COUNT_TRUNCATED
4070 if (SHIFT_COUNT_TRUNCATED)
4071 arg1 %= width;
4072 #endif
4073
4074 val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
4075 break;
4076
4077 case ASHIFT:
4078 if (arg1 < 0)
4079 return 0;
4080
4081 #ifdef SHIFT_COUNT_TRUNCATED
4082 if (SHIFT_COUNT_TRUNCATED)
4083 arg1 %= width;
4084 #endif
4085
4086 val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
4087 break;
4088
4089 case ASHIFTRT:
4090 if (arg1 < 0)
4091 return 0;
4092
4093 #ifdef SHIFT_COUNT_TRUNCATED
4094 if (SHIFT_COUNT_TRUNCATED)
4095 arg1 %= width;
4096 #endif
4097
4098 val = arg0s >> arg1;
4099
4100 /* Bootstrap compiler may not have sign extended the right shift.
4101 Manually extend the sign to insure bootstrap cc matches gcc. */
4102 if (arg0s < 0 && arg1 > 0)
4103 val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4104
4105 break;
4106
4107 case ROTATERT:
4108 if (arg1 < 0)
4109 return 0;
4110
4111 arg1 %= width;
4112 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4113 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
4114 break;
4115
4116 case ROTATE:
4117 if (arg1 < 0)
4118 return 0;
4119
4120 arg1 %= width;
4121 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4122 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
4123 break;
4124
4125 case COMPARE:
4126 /* Do nothing here. */
4127 return 0;
4128
4129 case SMIN:
4130 val = arg0s <= arg1s ? arg0s : arg1s;
4131 break;
4132
4133 case UMIN:
4134 val = ((unsigned HOST_WIDE_INT) arg0
4135 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4136 break;
4137
4138 case SMAX:
4139 val = arg0s > arg1s ? arg0s : arg1s;
4140 break;
4141
4142 case UMAX:
4143 val = ((unsigned HOST_WIDE_INT) arg0
4144 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4145 break;
4146
4147 default:
4148 abort ();
4149 }
4150
4151 /* Clear the bits that don't belong in our mode, unless they and our sign
4152 bit are all one. So we get either a reasonable negative value or a
4153 reasonable unsigned value for this mode. */
4154 if (width < HOST_BITS_PER_WIDE_INT
4155 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4156 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4157 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4158
4159 /* If this would be an entire word for the target, but is not for
4160 the host, then sign-extend on the host so that the number will look
4161 the same way on the host that it would on the target.
4162
4163 For example, when building a 64 bit alpha hosted 32 bit sparc
4164 targeted compiler, then we want the 32 bit unsigned value -1 to be
4165 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
4166 The later confuses the sparc backend. */
4167
4168 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
4169 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
4170 val |= ((HOST_WIDE_INT) (-1) << width);
4171
4172 return GEN_INT (val);
4173 }
4174 \f
4175 /* Simplify a PLUS or MINUS, at least one of whose operands may be another
4176 PLUS or MINUS.
4177
4178 Rather than test for specific case, we do this by a brute-force method
4179 and do all possible simplifications until no more changes occur. Then
4180 we rebuild the operation. */
4181
4182 static rtx
4183 simplify_plus_minus (code, mode, op0, op1)
4184 enum rtx_code code;
4185 enum machine_mode mode;
4186 rtx op0, op1;
4187 {
4188 rtx ops[8];
4189 int negs[8];
4190 rtx result, tem;
4191 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
4192 int first = 1, negate = 0, changed;
4193 int i, j;
4194
4195 bzero ((char *) ops, sizeof ops);
4196
4197 /* Set up the two operands and then expand them until nothing has been
4198 changed. If we run out of room in our array, give up; this should
4199 almost never happen. */
4200
4201 ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4202
4203 changed = 1;
4204 while (changed)
4205 {
4206 changed = 0;
4207
4208 for (i = 0; i < n_ops; i++)
4209 switch (GET_CODE (ops[i]))
4210 {
4211 case PLUS:
4212 case MINUS:
4213 if (n_ops == 7)
4214 return 0;
4215
4216 ops[n_ops] = XEXP (ops[i], 1);
4217 negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4218 ops[i] = XEXP (ops[i], 0);
4219 input_ops++;
4220 changed = 1;
4221 break;
4222
4223 case NEG:
4224 ops[i] = XEXP (ops[i], 0);
4225 negs[i] = ! negs[i];
4226 changed = 1;
4227 break;
4228
4229 case CONST:
4230 ops[i] = XEXP (ops[i], 0);
4231 input_consts++;
4232 changed = 1;
4233 break;
4234
4235 case NOT:
4236 /* ~a -> (-a - 1) */
4237 if (n_ops != 7)
4238 {
4239 ops[n_ops] = constm1_rtx;
4240 negs[n_ops++] = negs[i];
4241 ops[i] = XEXP (ops[i], 0);
4242 negs[i] = ! negs[i];
4243 changed = 1;
4244 }
4245 break;
4246
4247 case CONST_INT:
4248 if (negs[i])
4249 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4250 break;
4251 }
4252 }
4253
4254 /* If we only have two operands, we can't do anything. */
4255 if (n_ops <= 2)
4256 return 0;
4257
4258 /* Now simplify each pair of operands until nothing changes. The first
4259 time through just simplify constants against each other. */
4260
4261 changed = 1;
4262 while (changed)
4263 {
4264 changed = first;
4265
4266 for (i = 0; i < n_ops - 1; i++)
4267 for (j = i + 1; j < n_ops; j++)
4268 if (ops[i] != 0 && ops[j] != 0
4269 && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4270 {
4271 rtx lhs = ops[i], rhs = ops[j];
4272 enum rtx_code ncode = PLUS;
4273
4274 if (negs[i] && ! negs[j])
4275 lhs = ops[j], rhs = ops[i], ncode = MINUS;
4276 else if (! negs[i] && negs[j])
4277 ncode = MINUS;
4278
4279 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
4280 if (tem)
4281 {
4282 ops[i] = tem, ops[j] = 0;
4283 negs[i] = negs[i] && negs[j];
4284 if (GET_CODE (tem) == NEG)
4285 ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4286
4287 if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4288 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4289 changed = 1;
4290 }
4291 }
4292
4293 first = 0;
4294 }
4295
4296 /* Pack all the operands to the lower-numbered entries and give up if
4297 we didn't reduce the number of operands we had. Make sure we
4298 count a CONST as two operands. If we have the same number of
4299 operands, but have made more CONSTs than we had, this is also
4300 an improvement, so accept it. */
4301
4302 for (i = 0, j = 0; j < n_ops; j++)
4303 if (ops[j] != 0)
4304 {
4305 ops[i] = ops[j], negs[i++] = negs[j];
4306 if (GET_CODE (ops[j]) == CONST)
4307 n_consts++;
4308 }
4309
4310 if (i + n_consts > input_ops
4311 || (i + n_consts == input_ops && n_consts <= input_consts))
4312 return 0;
4313
4314 n_ops = i;
4315
4316 /* If we have a CONST_INT, put it last. */
4317 for (i = 0; i < n_ops - 1; i++)
4318 if (GET_CODE (ops[i]) == CONST_INT)
4319 {
4320 tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4321 j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4322 }
4323
4324 /* Put a non-negated operand first. If there aren't any, make all
4325 operands positive and negate the whole thing later. */
4326 for (i = 0; i < n_ops && negs[i]; i++)
4327 ;
4328
4329 if (i == n_ops)
4330 {
4331 for (i = 0; i < n_ops; i++)
4332 negs[i] = 0;
4333 negate = 1;
4334 }
4335 else if (i != 0)
4336 {
4337 tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4338 j = negs[0], negs[0] = negs[i], negs[i] = j;
4339 }
4340
4341 /* Now make the result by performing the requested operations. */
4342 result = ops[0];
4343 for (i = 1; i < n_ops; i++)
4344 result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4345
4346 return negate ? gen_rtx (NEG, mode, result) : result;
4347 }
4348 \f
4349 /* Make a binary operation by properly ordering the operands and
4350 seeing if the expression folds. */
4351
4352 static rtx
4353 cse_gen_binary (code, mode, op0, op1)
4354 enum rtx_code code;
4355 enum machine_mode mode;
4356 rtx op0, op1;
4357 {
4358 rtx tem;
4359
4360 /* Put complex operands first and constants second if commutative. */
4361 if (GET_RTX_CLASS (code) == 'c'
4362 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4363 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4364 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4365 || (GET_CODE (op0) == SUBREG
4366 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4367 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4368 tem = op0, op0 = op1, op1 = tem;
4369
4370 /* If this simplifies, do it. */
4371 tem = simplify_binary_operation (code, mode, op0, op1);
4372
4373 if (tem)
4374 return tem;
4375
4376 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4377 just form the operation. */
4378
4379 if (code == PLUS && GET_CODE (op1) == CONST_INT
4380 && GET_MODE (op0) != VOIDmode)
4381 return plus_constant (op0, INTVAL (op1));
4382 else if (code == MINUS && GET_CODE (op1) == CONST_INT
4383 && GET_MODE (op0) != VOIDmode)
4384 return plus_constant (op0, - INTVAL (op1));
4385 else
4386 return gen_rtx (code, mode, op0, op1);
4387 }
4388 \f
4389 /* Like simplify_binary_operation except used for relational operators.
4390 MODE is the mode of the operands, not that of the result. If MODE
4391 is VOIDmode, both operands must also be VOIDmode and we compare the
4392 operands in "infinite precision".
4393
4394 If no simplification is possible, this function returns zero. Otherwise,
4395 it returns either const_true_rtx or const0_rtx. */
4396
4397 rtx
4398 simplify_relational_operation (code, mode, op0, op1)
4399 enum rtx_code code;
4400 enum machine_mode mode;
4401 rtx op0, op1;
4402 {
4403 int equal, op0lt, op0ltu, op1lt, op1ltu;
4404 rtx tem;
4405
4406 /* If op0 is a compare, extract the comparison arguments from it. */
4407 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4408 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4409
4410 /* We can't simplify MODE_CC values since we don't know what the
4411 actual comparison is. */
4412 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4413 #ifdef HAVE_cc0
4414 || op0 == cc0_rtx
4415 #endif
4416 )
4417 return 0;
4418
4419 /* For integer comparisons of A and B maybe we can simplify A - B and can
4420 then simplify a comparison of that with zero. If A and B are both either
4421 a register or a CONST_INT, this can't help; testing for these cases will
4422 prevent infinite recursion here and speed things up.
4423
4424 If CODE is an unsigned comparison, then we can never do this optimization,
4425 because it gives an incorrect result if the subtraction wraps around zero.
4426 ANSI C defines unsigned operations such that they never overflow, and
4427 thus such cases can not be ignored. */
4428
4429 if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4430 && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4431 && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4432 && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
4433 && code != GTU && code != GEU && code != LTU && code != LEU)
4434 return simplify_relational_operation (signed_condition (code),
4435 mode, tem, const0_rtx);
4436
4437 /* For non-IEEE floating-point, if the two operands are equal, we know the
4438 result. */
4439 if (rtx_equal_p (op0, op1)
4440 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4441 || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4442 equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4443
4444 /* If the operands are floating-point constants, see if we can fold
4445 the result. */
4446 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4447 else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4448 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4449 {
4450 REAL_VALUE_TYPE d0, d1;
4451 jmp_buf handler;
4452
4453 if (setjmp (handler))
4454 return 0;
4455
4456 set_float_handler (handler);
4457 REAL_VALUE_FROM_CONST_DOUBLE (d0, op0);
4458 REAL_VALUE_FROM_CONST_DOUBLE (d1, op1);
4459 equal = REAL_VALUES_EQUAL (d0, d1);
4460 op0lt = op0ltu = REAL_VALUES_LESS (d0, d1);
4461 op1lt = op1ltu = REAL_VALUES_LESS (d1, d0);
4462 set_float_handler (NULL_PTR);
4463 }
4464 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4465
4466 /* Otherwise, see if the operands are both integers. */
4467 else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4468 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4469 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4470 {
4471 int width = GET_MODE_BITSIZE (mode);
4472 HOST_WIDE_INT l0s, h0s, l1s, h1s;
4473 unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
4474
4475 /* Get the two words comprising each integer constant. */
4476 if (GET_CODE (op0) == CONST_DOUBLE)
4477 {
4478 l0u = l0s = CONST_DOUBLE_LOW (op0);
4479 h0u = h0s = CONST_DOUBLE_HIGH (op0);
4480 }
4481 else
4482 {
4483 l0u = l0s = INTVAL (op0);
4484 h0u = 0, h0s = l0s < 0 ? -1 : 0;
4485 }
4486
4487 if (GET_CODE (op1) == CONST_DOUBLE)
4488 {
4489 l1u = l1s = CONST_DOUBLE_LOW (op1);
4490 h1u = h1s = CONST_DOUBLE_HIGH (op1);
4491 }
4492 else
4493 {
4494 l1u = l1s = INTVAL (op1);
4495 h1u = 0, h1s = l1s < 0 ? -1 : 0;
4496 }
4497
4498 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4499 we have to sign or zero-extend the values. */
4500 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4501 h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
4502
4503 if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4504 {
4505 l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4506 l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
4507
4508 if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4509 l0s |= ((HOST_WIDE_INT) (-1) << width);
4510
4511 if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4512 l1s |= ((HOST_WIDE_INT) (-1) << width);
4513 }
4514
4515 equal = (h0u == h1u && l0u == l1u);
4516 op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4517 op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4518 op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4519 op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4520 }
4521
4522 /* Otherwise, there are some code-specific tests we can make. */
4523 else
4524 {
4525 switch (code)
4526 {
4527 case EQ:
4528 /* References to the frame plus a constant or labels cannot
4529 be zero, but a SYMBOL_REF can due to #pragma weak. */
4530 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4531 || GET_CODE (op0) == LABEL_REF)
4532 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4533 /* On some machines, the ap reg can be 0 sometimes. */
4534 && op0 != arg_pointer_rtx
4535 #endif
4536 )
4537 return const0_rtx;
4538 break;
4539
4540 case NE:
4541 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4542 || GET_CODE (op0) == LABEL_REF)
4543 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4544 && op0 != arg_pointer_rtx
4545 #endif
4546 )
4547 return const_true_rtx;
4548 break;
4549
4550 case GEU:
4551 /* Unsigned values are never negative. */
4552 if (op1 == const0_rtx)
4553 return const_true_rtx;
4554 break;
4555
4556 case LTU:
4557 if (op1 == const0_rtx)
4558 return const0_rtx;
4559 break;
4560
4561 case LEU:
4562 /* Unsigned values are never greater than the largest
4563 unsigned value. */
4564 if (GET_CODE (op1) == CONST_INT
4565 && INTVAL (op1) == GET_MODE_MASK (mode)
4566 && INTEGRAL_MODE_P (mode))
4567 return const_true_rtx;
4568 break;
4569
4570 case GTU:
4571 if (GET_CODE (op1) == CONST_INT
4572 && INTVAL (op1) == GET_MODE_MASK (mode)
4573 && INTEGRAL_MODE_P (mode))
4574 return const0_rtx;
4575 break;
4576 }
4577
4578 return 0;
4579 }
4580
4581 /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4582 as appropriate. */
4583 switch (code)
4584 {
4585 case EQ:
4586 return equal ? const_true_rtx : const0_rtx;
4587 case NE:
4588 return ! equal ? const_true_rtx : const0_rtx;
4589 case LT:
4590 return op0lt ? const_true_rtx : const0_rtx;
4591 case GT:
4592 return op1lt ? const_true_rtx : const0_rtx;
4593 case LTU:
4594 return op0ltu ? const_true_rtx : const0_rtx;
4595 case GTU:
4596 return op1ltu ? const_true_rtx : const0_rtx;
4597 case LE:
4598 return equal || op0lt ? const_true_rtx : const0_rtx;
4599 case GE:
4600 return equal || op1lt ? const_true_rtx : const0_rtx;
4601 case LEU:
4602 return equal || op0ltu ? const_true_rtx : const0_rtx;
4603 case GEU:
4604 return equal || op1ltu ? const_true_rtx : const0_rtx;
4605 }
4606
4607 abort ();
4608 }
4609 \f
4610 /* Simplify CODE, an operation with result mode MODE and three operands,
4611 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4612 a constant. Return 0 if no simplifications is possible. */
4613
4614 rtx
4615 simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4616 enum rtx_code code;
4617 enum machine_mode mode, op0_mode;
4618 rtx op0, op1, op2;
4619 {
4620 int width = GET_MODE_BITSIZE (mode);
4621
4622 /* VOIDmode means "infinite" precision. */
4623 if (width == 0)
4624 width = HOST_BITS_PER_WIDE_INT;
4625
4626 switch (code)
4627 {
4628 case SIGN_EXTRACT:
4629 case ZERO_EXTRACT:
4630 if (GET_CODE (op0) == CONST_INT
4631 && GET_CODE (op1) == CONST_INT
4632 && GET_CODE (op2) == CONST_INT
4633 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4634 && width <= HOST_BITS_PER_WIDE_INT)
4635 {
4636 /* Extracting a bit-field from a constant */
4637 HOST_WIDE_INT val = INTVAL (op0);
4638
4639 if (BITS_BIG_ENDIAN)
4640 val >>= (GET_MODE_BITSIZE (op0_mode)
4641 - INTVAL (op2) - INTVAL (op1));
4642 else
4643 val >>= INTVAL (op2);
4644
4645 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4646 {
4647 /* First zero-extend. */
4648 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4649 /* If desired, propagate sign bit. */
4650 if (code == SIGN_EXTRACT
4651 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4652 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4653 }
4654
4655 /* Clear the bits that don't belong in our mode,
4656 unless they and our sign bit are all one.
4657 So we get either a reasonable negative value or a reasonable
4658 unsigned value for this mode. */
4659 if (width < HOST_BITS_PER_WIDE_INT
4660 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4661 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4662 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4663
4664 return GEN_INT (val);
4665 }
4666 break;
4667
4668 case IF_THEN_ELSE:
4669 if (GET_CODE (op0) == CONST_INT)
4670 return op0 != const0_rtx ? op1 : op2;
4671 break;
4672
4673 default:
4674 abort ();
4675 }
4676
4677 return 0;
4678 }
4679 \f
4680 /* If X is a nontrivial arithmetic operation on an argument
4681 for which a constant value can be determined, return
4682 the result of operating on that value, as a constant.
4683 Otherwise, return X, possibly with one or more operands
4684 modified by recursive calls to this function.
4685
4686 If X is a register whose contents are known, we do NOT
4687 return those contents here. equiv_constant is called to
4688 perform that task.
4689
4690 INSN is the insn that we may be modifying. If it is 0, make a copy
4691 of X before modifying it. */
4692
4693 static rtx
4694 fold_rtx (x, insn)
4695 rtx x;
4696 rtx insn;
4697 {
4698 register enum rtx_code code;
4699 register enum machine_mode mode;
4700 register char *fmt;
4701 register int i;
4702 rtx new = 0;
4703 int copied = 0;
4704 int must_swap = 0;
4705
4706 /* Folded equivalents of first two operands of X. */
4707 rtx folded_arg0;
4708 rtx folded_arg1;
4709
4710 /* Constant equivalents of first three operands of X;
4711 0 when no such equivalent is known. */
4712 rtx const_arg0;
4713 rtx const_arg1;
4714 rtx const_arg2;
4715
4716 /* The mode of the first operand of X. We need this for sign and zero
4717 extends. */
4718 enum machine_mode mode_arg0;
4719
4720 if (x == 0)
4721 return x;
4722
4723 mode = GET_MODE (x);
4724 code = GET_CODE (x);
4725 switch (code)
4726 {
4727 case CONST:
4728 case CONST_INT:
4729 case CONST_DOUBLE:
4730 case SYMBOL_REF:
4731 case LABEL_REF:
4732 case REG:
4733 /* No use simplifying an EXPR_LIST
4734 since they are used only for lists of args
4735 in a function call's REG_EQUAL note. */
4736 case EXPR_LIST:
4737 return x;
4738
4739 #ifdef HAVE_cc0
4740 case CC0:
4741 return prev_insn_cc0;
4742 #endif
4743
4744 case PC:
4745 /* If the next insn is a CODE_LABEL followed by a jump table,
4746 PC's value is a LABEL_REF pointing to that label. That
4747 lets us fold switch statements on the Vax. */
4748 if (insn && GET_CODE (insn) == JUMP_INSN)
4749 {
4750 rtx next = next_nonnote_insn (insn);
4751
4752 if (next && GET_CODE (next) == CODE_LABEL
4753 && NEXT_INSN (next) != 0
4754 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
4755 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
4756 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
4757 return gen_rtx (LABEL_REF, Pmode, next);
4758 }
4759 break;
4760
4761 case SUBREG:
4762 /* See if we previously assigned a constant value to this SUBREG. */
4763 if ((new = lookup_as_function (x, CONST_INT)) != 0
4764 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
4765 return new;
4766
4767 /* If this is a paradoxical SUBREG, we have no idea what value the
4768 extra bits would have. However, if the operand is equivalent
4769 to a SUBREG whose operand is the same as our mode, and all the
4770 modes are within a word, we can just use the inner operand
4771 because these SUBREGs just say how to treat the register.
4772
4773 Similarly if we find an integer constant. */
4774
4775 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4776 {
4777 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
4778 struct table_elt *elt;
4779
4780 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
4781 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
4782 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
4783 imode)) != 0)
4784 for (elt = elt->first_same_value;
4785 elt; elt = elt->next_same_value)
4786 {
4787 if (CONSTANT_P (elt->exp)
4788 && GET_MODE (elt->exp) == VOIDmode)
4789 return elt->exp;
4790
4791 if (GET_CODE (elt->exp) == SUBREG
4792 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4793 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4794 return copy_rtx (SUBREG_REG (elt->exp));
4795 }
4796
4797 return x;
4798 }
4799
4800 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
4801 We might be able to if the SUBREG is extracting a single word in an
4802 integral mode or extracting the low part. */
4803
4804 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
4805 const_arg0 = equiv_constant (folded_arg0);
4806 if (const_arg0)
4807 folded_arg0 = const_arg0;
4808
4809 if (folded_arg0 != SUBREG_REG (x))
4810 {
4811 new = 0;
4812
4813 if (GET_MODE_CLASS (mode) == MODE_INT
4814 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4815 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
4816 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
4817 GET_MODE (SUBREG_REG (x)));
4818 if (new == 0 && subreg_lowpart_p (x))
4819 new = gen_lowpart_if_possible (mode, folded_arg0);
4820 if (new)
4821 return new;
4822 }
4823
4824 /* If this is a narrowing SUBREG and our operand is a REG, see if
4825 we can find an equivalence for REG that is an arithmetic operation
4826 in a wider mode where both operands are paradoxical SUBREGs
4827 from objects of our result mode. In that case, we couldn't report
4828 an equivalent value for that operation, since we don't know what the
4829 extra bits will be. But we can find an equivalence for this SUBREG
4830 by folding that operation is the narrow mode. This allows us to
4831 fold arithmetic in narrow modes when the machine only supports
4832 word-sized arithmetic.
4833
4834 Also look for a case where we have a SUBREG whose operand is the
4835 same as our result. If both modes are smaller than a word, we
4836 are simply interpreting a register in different modes and we
4837 can use the inner value. */
4838
4839 if (GET_CODE (folded_arg0) == REG
4840 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
4841 && subreg_lowpart_p (x))
4842 {
4843 struct table_elt *elt;
4844
4845 /* We can use HASH here since we know that canon_hash won't be
4846 called. */
4847 elt = lookup (folded_arg0,
4848 HASH (folded_arg0, GET_MODE (folded_arg0)),
4849 GET_MODE (folded_arg0));
4850
4851 if (elt)
4852 elt = elt->first_same_value;
4853
4854 for (; elt; elt = elt->next_same_value)
4855 {
4856 enum rtx_code eltcode = GET_CODE (elt->exp);
4857
4858 /* Just check for unary and binary operations. */
4859 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
4860 && GET_CODE (elt->exp) != SIGN_EXTEND
4861 && GET_CODE (elt->exp) != ZERO_EXTEND
4862 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4863 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
4864 {
4865 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
4866
4867 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4868 op0 = fold_rtx (op0, NULL_RTX);
4869
4870 op0 = equiv_constant (op0);
4871 if (op0)
4872 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
4873 op0, mode);
4874 }
4875 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
4876 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
4877 && eltcode != DIV && eltcode != MOD
4878 && eltcode != UDIV && eltcode != UMOD
4879 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
4880 && eltcode != ROTATE && eltcode != ROTATERT
4881 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4882 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
4883 == mode))
4884 || CONSTANT_P (XEXP (elt->exp, 0)))
4885 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
4886 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
4887 == mode))
4888 || CONSTANT_P (XEXP (elt->exp, 1))))
4889 {
4890 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
4891 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
4892
4893 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4894 op0 = fold_rtx (op0, NULL_RTX);
4895
4896 if (op0)
4897 op0 = equiv_constant (op0);
4898
4899 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
4900 op1 = fold_rtx (op1, NULL_RTX);
4901
4902 if (op1)
4903 op1 = equiv_constant (op1);
4904
4905 /* If we are looking for the low SImode part of
4906 (ashift:DI c (const_int 32)), it doesn't work
4907 to compute that in SImode, because a 32-bit shift
4908 in SImode is unpredictable. We know the value is 0. */
4909 if (op0 && op1
4910 && GET_CODE (elt->exp) == ASHIFT
4911 && GET_CODE (op1) == CONST_INT
4912 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
4913 {
4914 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
4915
4916 /* If the count fits in the inner mode's width,
4917 but exceeds the outer mode's width,
4918 the value will get truncated to 0
4919 by the subreg. */
4920 new = const0_rtx;
4921 else
4922 /* If the count exceeds even the inner mode's width,
4923 don't fold this expression. */
4924 new = 0;
4925 }
4926 else if (op0 && op1)
4927 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
4928 op0, op1);
4929 }
4930
4931 else if (GET_CODE (elt->exp) == SUBREG
4932 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4933 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
4934 <= UNITS_PER_WORD)
4935 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4936 new = copy_rtx (SUBREG_REG (elt->exp));
4937
4938 if (new)
4939 return new;
4940 }
4941 }
4942
4943 return x;
4944
4945 case NOT:
4946 case NEG:
4947 /* If we have (NOT Y), see if Y is known to be (NOT Z).
4948 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
4949 new = lookup_as_function (XEXP (x, 0), code);
4950 if (new)
4951 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
4952 break;
4953
4954 case MEM:
4955 /* If we are not actually processing an insn, don't try to find the
4956 best address. Not only don't we care, but we could modify the
4957 MEM in an invalid way since we have no insn to validate against. */
4958 if (insn != 0)
4959 find_best_addr (insn, &XEXP (x, 0));
4960
4961 {
4962 /* Even if we don't fold in the insn itself,
4963 we can safely do so here, in hopes of getting a constant. */
4964 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
4965 rtx base = 0;
4966 HOST_WIDE_INT offset = 0;
4967
4968 if (GET_CODE (addr) == REG
4969 && REGNO_QTY_VALID_P (REGNO (addr))
4970 && GET_MODE (addr) == qty_mode[reg_qty[REGNO (addr)]]
4971 && qty_const[reg_qty[REGNO (addr)]] != 0)
4972 addr = qty_const[reg_qty[REGNO (addr)]];
4973
4974 /* If address is constant, split it into a base and integer offset. */
4975 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
4976 base = addr;
4977 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
4978 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
4979 {
4980 base = XEXP (XEXP (addr, 0), 0);
4981 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
4982 }
4983 else if (GET_CODE (addr) == LO_SUM
4984 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
4985 base = XEXP (addr, 1);
4986
4987 /* If this is a constant pool reference, we can fold it into its
4988 constant to allow better value tracking. */
4989 if (base && GET_CODE (base) == SYMBOL_REF
4990 && CONSTANT_POOL_ADDRESS_P (base))
4991 {
4992 rtx constant = get_pool_constant (base);
4993 enum machine_mode const_mode = get_pool_mode (base);
4994 rtx new;
4995
4996 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
4997 constant_pool_entries_cost = COST (constant);
4998
4999 /* If we are loading the full constant, we have an equivalence. */
5000 if (offset == 0 && mode == const_mode)
5001 return constant;
5002
5003 /* If this actually isn't a constant (weird!), we can't do
5004 anything. Otherwise, handle the two most common cases:
5005 extracting a word from a multi-word constant, and extracting
5006 the low-order bits. Other cases don't seem common enough to
5007 worry about. */
5008 if (! CONSTANT_P (constant))
5009 return x;
5010
5011 if (GET_MODE_CLASS (mode) == MODE_INT
5012 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5013 && offset % UNITS_PER_WORD == 0
5014 && (new = operand_subword (constant,
5015 offset / UNITS_PER_WORD,
5016 0, const_mode)) != 0)
5017 return new;
5018
5019 if (((BYTES_BIG_ENDIAN
5020 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
5021 || (! BYTES_BIG_ENDIAN && offset == 0))
5022 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
5023 return new;
5024 }
5025
5026 /* If this is a reference to a label at a known position in a jump
5027 table, we also know its value. */
5028 if (base && GET_CODE (base) == LABEL_REF)
5029 {
5030 rtx label = XEXP (base, 0);
5031 rtx table_insn = NEXT_INSN (label);
5032
5033 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5034 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
5035 {
5036 rtx table = PATTERN (table_insn);
5037
5038 if (offset >= 0
5039 && (offset / GET_MODE_SIZE (GET_MODE (table))
5040 < XVECLEN (table, 0)))
5041 return XVECEXP (table, 0,
5042 offset / GET_MODE_SIZE (GET_MODE (table)));
5043 }
5044 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5045 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
5046 {
5047 rtx table = PATTERN (table_insn);
5048
5049 if (offset >= 0
5050 && (offset / GET_MODE_SIZE (GET_MODE (table))
5051 < XVECLEN (table, 1)))
5052 {
5053 offset /= GET_MODE_SIZE (GET_MODE (table));
5054 new = gen_rtx (MINUS, Pmode, XVECEXP (table, 1, offset),
5055 XEXP (table, 0));
5056
5057 if (GET_MODE (table) != Pmode)
5058 new = gen_rtx (TRUNCATE, GET_MODE (table), new);
5059
5060 /* Indicate this is a constant. This isn't a
5061 valid form of CONST, but it will only be used
5062 to fold the next insns and then discarded, so
5063 it should be safe. */
5064 return gen_rtx (CONST, GET_MODE (new), new);
5065 }
5066 }
5067 }
5068
5069 return x;
5070 }
5071 }
5072
5073 const_arg0 = 0;
5074 const_arg1 = 0;
5075 const_arg2 = 0;
5076 mode_arg0 = VOIDmode;
5077
5078 /* Try folding our operands.
5079 Then see which ones have constant values known. */
5080
5081 fmt = GET_RTX_FORMAT (code);
5082 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5083 if (fmt[i] == 'e')
5084 {
5085 rtx arg = XEXP (x, i);
5086 rtx folded_arg = arg, const_arg = 0;
5087 enum machine_mode mode_arg = GET_MODE (arg);
5088 rtx cheap_arg, expensive_arg;
5089 rtx replacements[2];
5090 int j;
5091
5092 /* Most arguments are cheap, so handle them specially. */
5093 switch (GET_CODE (arg))
5094 {
5095 case REG:
5096 /* This is the same as calling equiv_constant; it is duplicated
5097 here for speed. */
5098 if (REGNO_QTY_VALID_P (REGNO (arg))
5099 && qty_const[reg_qty[REGNO (arg)]] != 0
5100 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != REG
5101 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != PLUS)
5102 const_arg
5103 = gen_lowpart_if_possible (GET_MODE (arg),
5104 qty_const[reg_qty[REGNO (arg)]]);
5105 break;
5106
5107 case CONST:
5108 case CONST_INT:
5109 case SYMBOL_REF:
5110 case LABEL_REF:
5111 case CONST_DOUBLE:
5112 const_arg = arg;
5113 break;
5114
5115 #ifdef HAVE_cc0
5116 case CC0:
5117 folded_arg = prev_insn_cc0;
5118 mode_arg = prev_insn_cc0_mode;
5119 const_arg = equiv_constant (folded_arg);
5120 break;
5121 #endif
5122
5123 default:
5124 folded_arg = fold_rtx (arg, insn);
5125 const_arg = equiv_constant (folded_arg);
5126 }
5127
5128 /* For the first three operands, see if the operand
5129 is constant or equivalent to a constant. */
5130 switch (i)
5131 {
5132 case 0:
5133 folded_arg0 = folded_arg;
5134 const_arg0 = const_arg;
5135 mode_arg0 = mode_arg;
5136 break;
5137 case 1:
5138 folded_arg1 = folded_arg;
5139 const_arg1 = const_arg;
5140 break;
5141 case 2:
5142 const_arg2 = const_arg;
5143 break;
5144 }
5145
5146 /* Pick the least expensive of the folded argument and an
5147 equivalent constant argument. */
5148 if (const_arg == 0 || const_arg == folded_arg
5149 || COST (const_arg) > COST (folded_arg))
5150 cheap_arg = folded_arg, expensive_arg = const_arg;
5151 else
5152 cheap_arg = const_arg, expensive_arg = folded_arg;
5153
5154 /* Try to replace the operand with the cheapest of the two
5155 possibilities. If it doesn't work and this is either of the first
5156 two operands of a commutative operation, try swapping them.
5157 If THAT fails, try the more expensive, provided it is cheaper
5158 than what is already there. */
5159
5160 if (cheap_arg == XEXP (x, i))
5161 continue;
5162
5163 if (insn == 0 && ! copied)
5164 {
5165 x = copy_rtx (x);
5166 copied = 1;
5167 }
5168
5169 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5170 for (j = 0;
5171 j < 2 && replacements[j]
5172 && COST (replacements[j]) < COST (XEXP (x, i));
5173 j++)
5174 {
5175 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5176 break;
5177
5178 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5179 {
5180 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5181 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5182
5183 if (apply_change_group ())
5184 {
5185 /* Swap them back to be invalid so that this loop can
5186 continue and flag them to be swapped back later. */
5187 rtx tem;
5188
5189 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5190 XEXP (x, 1) = tem;
5191 must_swap = 1;
5192 break;
5193 }
5194 }
5195 }
5196 }
5197
5198 else if (fmt[i] == 'E')
5199 /* Don't try to fold inside of a vector of expressions.
5200 Doing nothing is harmless. */
5201 ;
5202
5203 /* If a commutative operation, place a constant integer as the second
5204 operand unless the first operand is also a constant integer. Otherwise,
5205 place any constant second unless the first operand is also a constant. */
5206
5207 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5208 {
5209 if (must_swap || (const_arg0
5210 && (const_arg1 == 0
5211 || (GET_CODE (const_arg0) == CONST_INT
5212 && GET_CODE (const_arg1) != CONST_INT))))
5213 {
5214 register rtx tem = XEXP (x, 0);
5215
5216 if (insn == 0 && ! copied)
5217 {
5218 x = copy_rtx (x);
5219 copied = 1;
5220 }
5221
5222 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5223 validate_change (insn, &XEXP (x, 1), tem, 1);
5224 if (apply_change_group ())
5225 {
5226 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5227 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5228 }
5229 }
5230 }
5231
5232 /* If X is an arithmetic operation, see if we can simplify it. */
5233
5234 switch (GET_RTX_CLASS (code))
5235 {
5236 case '1':
5237 {
5238 int is_const = 0;
5239
5240 /* We can't simplify extension ops unless we know the
5241 original mode. */
5242 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5243 && mode_arg0 == VOIDmode)
5244 break;
5245
5246 /* If we had a CONST, strip it off and put it back later if we
5247 fold. */
5248 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
5249 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
5250
5251 new = simplify_unary_operation (code, mode,
5252 const_arg0 ? const_arg0 : folded_arg0,
5253 mode_arg0);
5254 if (new != 0 && is_const)
5255 new = gen_rtx (CONST, mode, new);
5256 }
5257 break;
5258
5259 case '<':
5260 /* See what items are actually being compared and set FOLDED_ARG[01]
5261 to those values and CODE to the actual comparison code. If any are
5262 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5263 do anything if both operands are already known to be constant. */
5264
5265 if (const_arg0 == 0 || const_arg1 == 0)
5266 {
5267 struct table_elt *p0, *p1;
5268 rtx true = const_true_rtx, false = const0_rtx;
5269 enum machine_mode mode_arg1;
5270
5271 #ifdef FLOAT_STORE_FLAG_VALUE
5272 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5273 {
5274 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5275 mode);
5276 false = CONST0_RTX (mode);
5277 }
5278 #endif
5279
5280 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5281 &mode_arg0, &mode_arg1);
5282 const_arg0 = equiv_constant (folded_arg0);
5283 const_arg1 = equiv_constant (folded_arg1);
5284
5285 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5286 what kinds of things are being compared, so we can't do
5287 anything with this comparison. */
5288
5289 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5290 break;
5291
5292 /* If we do not now have two constants being compared, see if we
5293 can nevertheless deduce some things about the comparison. */
5294 if (const_arg0 == 0 || const_arg1 == 0)
5295 {
5296 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or non-explicit
5297 constant? These aren't zero, but we don't know their sign. */
5298 if (const_arg1 == const0_rtx
5299 && (NONZERO_BASE_PLUS_P (folded_arg0)
5300 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5301 come out as 0. */
5302 || GET_CODE (folded_arg0) == SYMBOL_REF
5303 #endif
5304 || GET_CODE (folded_arg0) == LABEL_REF
5305 || GET_CODE (folded_arg0) == CONST))
5306 {
5307 if (code == EQ)
5308 return false;
5309 else if (code == NE)
5310 return true;
5311 }
5312
5313 /* See if the two operands are the same. We don't do this
5314 for IEEE floating-point since we can't assume x == x
5315 since x might be a NaN. */
5316
5317 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5318 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
5319 && (folded_arg0 == folded_arg1
5320 || (GET_CODE (folded_arg0) == REG
5321 && GET_CODE (folded_arg1) == REG
5322 && (reg_qty[REGNO (folded_arg0)]
5323 == reg_qty[REGNO (folded_arg1)]))
5324 || ((p0 = lookup (folded_arg0,
5325 (safe_hash (folded_arg0, mode_arg0)
5326 % NBUCKETS), mode_arg0))
5327 && (p1 = lookup (folded_arg1,
5328 (safe_hash (folded_arg1, mode_arg0)
5329 % NBUCKETS), mode_arg0))
5330 && p0->first_same_value == p1->first_same_value)))
5331 return ((code == EQ || code == LE || code == GE
5332 || code == LEU || code == GEU)
5333 ? true : false);
5334
5335 /* If FOLDED_ARG0 is a register, see if the comparison we are
5336 doing now is either the same as we did before or the reverse
5337 (we only check the reverse if not floating-point). */
5338 else if (GET_CODE (folded_arg0) == REG)
5339 {
5340 int qty = reg_qty[REGNO (folded_arg0)];
5341
5342 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5343 && (comparison_dominates_p (qty_comparison_code[qty], code)
5344 || (comparison_dominates_p (qty_comparison_code[qty],
5345 reverse_condition (code))
5346 && ! FLOAT_MODE_P (mode_arg0)))
5347 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5348 || (const_arg1
5349 && rtx_equal_p (qty_comparison_const[qty],
5350 const_arg1))
5351 || (GET_CODE (folded_arg1) == REG
5352 && (reg_qty[REGNO (folded_arg1)]
5353 == qty_comparison_qty[qty]))))
5354 return (comparison_dominates_p (qty_comparison_code[qty],
5355 code)
5356 ? true : false);
5357 }
5358 }
5359 }
5360
5361 /* If we are comparing against zero, see if the first operand is
5362 equivalent to an IOR with a constant. If so, we may be able to
5363 determine the result of this comparison. */
5364
5365 if (const_arg1 == const0_rtx)
5366 {
5367 rtx y = lookup_as_function (folded_arg0, IOR);
5368 rtx inner_const;
5369
5370 if (y != 0
5371 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5372 && GET_CODE (inner_const) == CONST_INT
5373 && INTVAL (inner_const) != 0)
5374 {
5375 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
5376 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5377 && (INTVAL (inner_const)
5378 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
5379 rtx true = const_true_rtx, false = const0_rtx;
5380
5381 #ifdef FLOAT_STORE_FLAG_VALUE
5382 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5383 {
5384 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5385 mode);
5386 false = CONST0_RTX (mode);
5387 }
5388 #endif
5389
5390 switch (code)
5391 {
5392 case EQ:
5393 return false;
5394 case NE:
5395 return true;
5396 case LT: case LE:
5397 if (has_sign)
5398 return true;
5399 break;
5400 case GT: case GE:
5401 if (has_sign)
5402 return false;
5403 break;
5404 }
5405 }
5406 }
5407
5408 new = simplify_relational_operation (code, mode_arg0,
5409 const_arg0 ? const_arg0 : folded_arg0,
5410 const_arg1 ? const_arg1 : folded_arg1);
5411 #ifdef FLOAT_STORE_FLAG_VALUE
5412 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5413 new = ((new == const0_rtx) ? CONST0_RTX (mode)
5414 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
5415 #endif
5416 break;
5417
5418 case '2':
5419 case 'c':
5420 switch (code)
5421 {
5422 case PLUS:
5423 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5424 with that LABEL_REF as its second operand. If so, the result is
5425 the first operand of that MINUS. This handles switches with an
5426 ADDR_DIFF_VEC table. */
5427 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5428 {
5429 rtx y
5430 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
5431 : lookup_as_function (folded_arg0, MINUS);
5432
5433 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5434 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5435 return XEXP (y, 0);
5436
5437 /* Now try for a CONST of a MINUS like the above. */
5438 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
5439 : lookup_as_function (folded_arg0, CONST))) != 0
5440 && GET_CODE (XEXP (y, 0)) == MINUS
5441 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5442 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
5443 return XEXP (XEXP (y, 0), 0);
5444 }
5445
5446 /* Likewise if the operands are in the other order. */
5447 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
5448 {
5449 rtx y
5450 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
5451 : lookup_as_function (folded_arg1, MINUS);
5452
5453 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5454 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
5455 return XEXP (y, 0);
5456
5457 /* Now try for a CONST of a MINUS like the above. */
5458 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
5459 : lookup_as_function (folded_arg1, CONST))) != 0
5460 && GET_CODE (XEXP (y, 0)) == MINUS
5461 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5462 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
5463 return XEXP (XEXP (y, 0), 0);
5464 }
5465
5466 /* If second operand is a register equivalent to a negative
5467 CONST_INT, see if we can find a register equivalent to the
5468 positive constant. Make a MINUS if so. Don't do this for
5469 a negative constant since we might then alternate between
5470 chosing positive and negative constants. Having the positive
5471 constant previously-used is the more common case. */
5472 if (const_arg1 && GET_CODE (const_arg1) == CONST_INT
5473 && INTVAL (const_arg1) < 0 && GET_CODE (folded_arg1) == REG)
5474 {
5475 rtx new_const = GEN_INT (- INTVAL (const_arg1));
5476 struct table_elt *p
5477 = lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5478 mode);
5479
5480 if (p)
5481 for (p = p->first_same_value; p; p = p->next_same_value)
5482 if (GET_CODE (p->exp) == REG)
5483 return cse_gen_binary (MINUS, mode, folded_arg0,
5484 canon_reg (p->exp, NULL_RTX));
5485 }
5486 goto from_plus;
5487
5488 case MINUS:
5489 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5490 If so, produce (PLUS Z C2-C). */
5491 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5492 {
5493 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5494 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
5495 return fold_rtx (plus_constant (copy_rtx (y),
5496 -INTVAL (const_arg1)),
5497 NULL_RTX);
5498 }
5499
5500 /* ... fall through ... */
5501
5502 from_plus:
5503 case SMIN: case SMAX: case UMIN: case UMAX:
5504 case IOR: case AND: case XOR:
5505 case MULT: case DIV: case UDIV:
5506 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
5507 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5508 is known to be of similar form, we may be able to replace the
5509 operation with a combined operation. This may eliminate the
5510 intermediate operation if every use is simplified in this way.
5511 Note that the similar optimization done by combine.c only works
5512 if the intermediate operation's result has only one reference. */
5513
5514 if (GET_CODE (folded_arg0) == REG
5515 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5516 {
5517 int is_shift
5518 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5519 rtx y = lookup_as_function (folded_arg0, code);
5520 rtx inner_const;
5521 enum rtx_code associate_code;
5522 rtx new_const;
5523
5524 if (y == 0
5525 || 0 == (inner_const
5526 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5527 || GET_CODE (inner_const) != CONST_INT
5528 /* If we have compiled a statement like
5529 "if (x == (x & mask1))", and now are looking at
5530 "x & mask2", we will have a case where the first operand
5531 of Y is the same as our first operand. Unless we detect
5532 this case, an infinite loop will result. */
5533 || XEXP (y, 0) == folded_arg0)
5534 break;
5535
5536 /* Don't associate these operations if they are a PLUS with the
5537 same constant and it is a power of two. These might be doable
5538 with a pre- or post-increment. Similarly for two subtracts of
5539 identical powers of two with post decrement. */
5540
5541 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
5542 && (0
5543 #if defined(HAVE_PRE_INCREMENT) || defined(HAVE_POST_INCREMENT)
5544 || exact_log2 (INTVAL (const_arg1)) >= 0
5545 #endif
5546 #if defined(HAVE_PRE_DECREMENT) || defined(HAVE_POST_DECREMENT)
5547 || exact_log2 (- INTVAL (const_arg1)) >= 0
5548 #endif
5549 ))
5550 break;
5551
5552 /* Compute the code used to compose the constants. For example,
5553 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5554
5555 associate_code
5556 = (code == MULT || code == DIV || code == UDIV ? MULT
5557 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5558
5559 new_const = simplify_binary_operation (associate_code, mode,
5560 const_arg1, inner_const);
5561
5562 if (new_const == 0)
5563 break;
5564
5565 /* If we are associating shift operations, don't let this
5566 produce a shift of the size of the object or larger.
5567 This could occur when we follow a sign-extend by a right
5568 shift on a machine that does a sign-extend as a pair
5569 of shifts. */
5570
5571 if (is_shift && GET_CODE (new_const) == CONST_INT
5572 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5573 {
5574 /* As an exception, we can turn an ASHIFTRT of this
5575 form into a shift of the number of bits - 1. */
5576 if (code == ASHIFTRT)
5577 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5578 else
5579 break;
5580 }
5581
5582 y = copy_rtx (XEXP (y, 0));
5583
5584 /* If Y contains our first operand (the most common way this
5585 can happen is if Y is a MEM), we would do into an infinite
5586 loop if we tried to fold it. So don't in that case. */
5587
5588 if (! reg_mentioned_p (folded_arg0, y))
5589 y = fold_rtx (y, insn);
5590
5591 return cse_gen_binary (code, mode, y, new_const);
5592 }
5593 }
5594
5595 new = simplify_binary_operation (code, mode,
5596 const_arg0 ? const_arg0 : folded_arg0,
5597 const_arg1 ? const_arg1 : folded_arg1);
5598 break;
5599
5600 case 'o':
5601 /* (lo_sum (high X) X) is simply X. */
5602 if (code == LO_SUM && const_arg0 != 0
5603 && GET_CODE (const_arg0) == HIGH
5604 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
5605 return const_arg1;
5606 break;
5607
5608 case '3':
5609 case 'b':
5610 new = simplify_ternary_operation (code, mode, mode_arg0,
5611 const_arg0 ? const_arg0 : folded_arg0,
5612 const_arg1 ? const_arg1 : folded_arg1,
5613 const_arg2 ? const_arg2 : XEXP (x, 2));
5614 break;
5615 }
5616
5617 return new ? new : x;
5618 }
5619 \f
5620 /* Return a constant value currently equivalent to X.
5621 Return 0 if we don't know one. */
5622
5623 static rtx
5624 equiv_constant (x)
5625 rtx x;
5626 {
5627 if (GET_CODE (x) == REG
5628 && REGNO_QTY_VALID_P (REGNO (x))
5629 && qty_const[reg_qty[REGNO (x)]])
5630 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[reg_qty[REGNO (x)]]);
5631
5632 if (x != 0 && CONSTANT_P (x))
5633 return x;
5634
5635 /* If X is a MEM, try to fold it outside the context of any insn to see if
5636 it might be equivalent to a constant. That handles the case where it
5637 is a constant-pool reference. Then try to look it up in the hash table
5638 in case it is something whose value we have seen before. */
5639
5640 if (GET_CODE (x) == MEM)
5641 {
5642 struct table_elt *elt;
5643
5644 x = fold_rtx (x, NULL_RTX);
5645 if (CONSTANT_P (x))
5646 return x;
5647
5648 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
5649 if (elt == 0)
5650 return 0;
5651
5652 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
5653 if (elt->is_const && CONSTANT_P (elt->exp))
5654 return elt->exp;
5655 }
5656
5657 return 0;
5658 }
5659 \f
5660 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
5661 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
5662 least-significant part of X.
5663 MODE specifies how big a part of X to return.
5664
5665 If the requested operation cannot be done, 0 is returned.
5666
5667 This is similar to gen_lowpart in emit-rtl.c. */
5668
5669 rtx
5670 gen_lowpart_if_possible (mode, x)
5671 enum machine_mode mode;
5672 register rtx x;
5673 {
5674 rtx result = gen_lowpart_common (mode, x);
5675
5676 if (result)
5677 return result;
5678 else if (GET_CODE (x) == MEM)
5679 {
5680 /* This is the only other case we handle. */
5681 register int offset = 0;
5682 rtx new;
5683
5684 if (WORDS_BIG_ENDIAN)
5685 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
5686 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
5687 if (BYTES_BIG_ENDIAN)
5688 /* Adjust the address so that the address-after-the-data is
5689 unchanged. */
5690 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
5691 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
5692 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
5693 if (! memory_address_p (mode, XEXP (new, 0)))
5694 return 0;
5695 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
5696 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
5697 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
5698 return new;
5699 }
5700 else
5701 return 0;
5702 }
5703 \f
5704 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
5705 branch. It will be zero if not.
5706
5707 In certain cases, this can cause us to add an equivalence. For example,
5708 if we are following the taken case of
5709 if (i == 2)
5710 we can add the fact that `i' and '2' are now equivalent.
5711
5712 In any case, we can record that this comparison was passed. If the same
5713 comparison is seen later, we will know its value. */
5714
5715 static void
5716 record_jump_equiv (insn, taken)
5717 rtx insn;
5718 int taken;
5719 {
5720 int cond_known_true;
5721 rtx op0, op1;
5722 enum machine_mode mode, mode0, mode1;
5723 int reversed_nonequality = 0;
5724 enum rtx_code code;
5725
5726 /* Ensure this is the right kind of insn. */
5727 if (! condjump_p (insn) || simplejump_p (insn))
5728 return;
5729
5730 /* See if this jump condition is known true or false. */
5731 if (taken)
5732 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
5733 else
5734 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
5735
5736 /* Get the type of comparison being done and the operands being compared.
5737 If we had to reverse a non-equality condition, record that fact so we
5738 know that it isn't valid for floating-point. */
5739 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
5740 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
5741 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
5742
5743 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
5744 if (! cond_known_true)
5745 {
5746 reversed_nonequality = (code != EQ && code != NE);
5747 code = reverse_condition (code);
5748 }
5749
5750 /* The mode is the mode of the non-constant. */
5751 mode = mode0;
5752 if (mode1 != VOIDmode)
5753 mode = mode1;
5754
5755 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
5756 }
5757
5758 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
5759 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
5760 Make any useful entries we can with that information. Called from
5761 above function and called recursively. */
5762
5763 static void
5764 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
5765 enum rtx_code code;
5766 enum machine_mode mode;
5767 rtx op0, op1;
5768 int reversed_nonequality;
5769 {
5770 unsigned op0_hash, op1_hash;
5771 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
5772 struct table_elt *op0_elt, *op1_elt;
5773
5774 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
5775 we know that they are also equal in the smaller mode (this is also
5776 true for all smaller modes whether or not there is a SUBREG, but
5777 is not worth testing for with no SUBREG. */
5778
5779 /* Note that GET_MODE (op0) may not equal MODE. */
5780 if (code == EQ && GET_CODE (op0) == SUBREG
5781 && (GET_MODE_SIZE (GET_MODE (op0))
5782 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
5783 {
5784 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5785 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5786
5787 record_jump_cond (code, mode, SUBREG_REG (op0),
5788 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5789 reversed_nonequality);
5790 }
5791
5792 if (code == EQ && GET_CODE (op1) == SUBREG
5793 && (GET_MODE_SIZE (GET_MODE (op1))
5794 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
5795 {
5796 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5797 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5798
5799 record_jump_cond (code, mode, SUBREG_REG (op1),
5800 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5801 reversed_nonequality);
5802 }
5803
5804 /* Similarly, if this is an NE comparison, and either is a SUBREG
5805 making a smaller mode, we know the whole thing is also NE. */
5806
5807 /* Note that GET_MODE (op0) may not equal MODE;
5808 if we test MODE instead, we can get an infinite recursion
5809 alternating between two modes each wider than MODE. */
5810
5811 if (code == NE && GET_CODE (op0) == SUBREG
5812 && subreg_lowpart_p (op0)
5813 && (GET_MODE_SIZE (GET_MODE (op0))
5814 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
5815 {
5816 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5817 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5818
5819 record_jump_cond (code, mode, SUBREG_REG (op0),
5820 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5821 reversed_nonequality);
5822 }
5823
5824 if (code == NE && GET_CODE (op1) == SUBREG
5825 && subreg_lowpart_p (op1)
5826 && (GET_MODE_SIZE (GET_MODE (op1))
5827 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
5828 {
5829 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5830 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5831
5832 record_jump_cond (code, mode, SUBREG_REG (op1),
5833 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5834 reversed_nonequality);
5835 }
5836
5837 /* Hash both operands. */
5838
5839 do_not_record = 0;
5840 hash_arg_in_memory = 0;
5841 hash_arg_in_struct = 0;
5842 op0_hash = HASH (op0, mode);
5843 op0_in_memory = hash_arg_in_memory;
5844 op0_in_struct = hash_arg_in_struct;
5845
5846 if (do_not_record)
5847 return;
5848
5849 do_not_record = 0;
5850 hash_arg_in_memory = 0;
5851 hash_arg_in_struct = 0;
5852 op1_hash = HASH (op1, mode);
5853 op1_in_memory = hash_arg_in_memory;
5854 op1_in_struct = hash_arg_in_struct;
5855
5856 if (do_not_record)
5857 return;
5858
5859 /* Look up both operands. */
5860 op0_elt = lookup (op0, op0_hash, mode);
5861 op1_elt = lookup (op1, op1_hash, mode);
5862
5863 /* If both operands are already equivalent or if they are not in the
5864 table but are identical, do nothing. */
5865 if ((op0_elt != 0 && op1_elt != 0
5866 && op0_elt->first_same_value == op1_elt->first_same_value)
5867 || op0 == op1 || rtx_equal_p (op0, op1))
5868 return;
5869
5870 /* If we aren't setting two things equal all we can do is save this
5871 comparison. Similarly if this is floating-point. In the latter
5872 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
5873 If we record the equality, we might inadvertently delete code
5874 whose intent was to change -0 to +0. */
5875
5876 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
5877 {
5878 /* If we reversed a floating-point comparison, if OP0 is not a
5879 register, or if OP1 is neither a register or constant, we can't
5880 do anything. */
5881
5882 if (GET_CODE (op1) != REG)
5883 op1 = equiv_constant (op1);
5884
5885 if ((reversed_nonequality && FLOAT_MODE_P (mode))
5886 || GET_CODE (op0) != REG || op1 == 0)
5887 return;
5888
5889 /* Put OP0 in the hash table if it isn't already. This gives it a
5890 new quantity number. */
5891 if (op0_elt == 0)
5892 {
5893 if (insert_regs (op0, NULL_PTR, 0))
5894 {
5895 rehash_using_reg (op0);
5896 op0_hash = HASH (op0, mode);
5897
5898 /* If OP0 is contained in OP1, this changes its hash code
5899 as well. Faster to rehash than to check, except
5900 for the simple case of a constant. */
5901 if (! CONSTANT_P (op1))
5902 op1_hash = HASH (op1,mode);
5903 }
5904
5905 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
5906 op0_elt->in_memory = op0_in_memory;
5907 op0_elt->in_struct = op0_in_struct;
5908 }
5909
5910 qty_comparison_code[reg_qty[REGNO (op0)]] = code;
5911 if (GET_CODE (op1) == REG)
5912 {
5913 /* Look it up again--in case op0 and op1 are the same. */
5914 op1_elt = lookup (op1, op1_hash, mode);
5915
5916 /* Put OP1 in the hash table so it gets a new quantity number. */
5917 if (op1_elt == 0)
5918 {
5919 if (insert_regs (op1, NULL_PTR, 0))
5920 {
5921 rehash_using_reg (op1);
5922 op1_hash = HASH (op1, mode);
5923 }
5924
5925 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
5926 op1_elt->in_memory = op1_in_memory;
5927 op1_elt->in_struct = op1_in_struct;
5928 }
5929
5930 qty_comparison_qty[reg_qty[REGNO (op0)]] = reg_qty[REGNO (op1)];
5931 qty_comparison_const[reg_qty[REGNO (op0)]] = 0;
5932 }
5933 else
5934 {
5935 qty_comparison_qty[reg_qty[REGNO (op0)]] = -1;
5936 qty_comparison_const[reg_qty[REGNO (op0)]] = op1;
5937 }
5938
5939 return;
5940 }
5941
5942 /* If either side is still missing an equivalence, make it now,
5943 then merge the equivalences. */
5944
5945 if (op0_elt == 0)
5946 {
5947 if (insert_regs (op0, NULL_PTR, 0))
5948 {
5949 rehash_using_reg (op0);
5950 op0_hash = HASH (op0, mode);
5951 }
5952
5953 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
5954 op0_elt->in_memory = op0_in_memory;
5955 op0_elt->in_struct = op0_in_struct;
5956 }
5957
5958 if (op1_elt == 0)
5959 {
5960 if (insert_regs (op1, NULL_PTR, 0))
5961 {
5962 rehash_using_reg (op1);
5963 op1_hash = HASH (op1, mode);
5964 }
5965
5966 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
5967 op1_elt->in_memory = op1_in_memory;
5968 op1_elt->in_struct = op1_in_struct;
5969 }
5970
5971 merge_equiv_classes (op0_elt, op1_elt);
5972 last_jump_equiv_class = op0_elt;
5973 }
5974 \f
5975 /* CSE processing for one instruction.
5976 First simplify sources and addresses of all assignments
5977 in the instruction, using previously-computed equivalents values.
5978 Then install the new sources and destinations in the table
5979 of available values.
5980
5981 If IN_LIBCALL_BLOCK is nonzero, don't record any equivalence made in
5982 the insn. */
5983
5984 /* Data on one SET contained in the instruction. */
5985
5986 struct set
5987 {
5988 /* The SET rtx itself. */
5989 rtx rtl;
5990 /* The SET_SRC of the rtx (the original value, if it is changing). */
5991 rtx src;
5992 /* The hash-table element for the SET_SRC of the SET. */
5993 struct table_elt *src_elt;
5994 /* Hash value for the SET_SRC. */
5995 unsigned src_hash;
5996 /* Hash value for the SET_DEST. */
5997 unsigned dest_hash;
5998 /* The SET_DEST, with SUBREG, etc., stripped. */
5999 rtx inner_dest;
6000 /* Place where the pointer to the INNER_DEST was found. */
6001 rtx *inner_dest_loc;
6002 /* Nonzero if the SET_SRC is in memory. */
6003 char src_in_memory;
6004 /* Nonzero if the SET_SRC is in a structure. */
6005 char src_in_struct;
6006 /* Nonzero if the SET_SRC contains something
6007 whose value cannot be predicted and understood. */
6008 char src_volatile;
6009 /* Original machine mode, in case it becomes a CONST_INT. */
6010 enum machine_mode mode;
6011 /* A constant equivalent for SET_SRC, if any. */
6012 rtx src_const;
6013 /* Hash value of constant equivalent for SET_SRC. */
6014 unsigned src_const_hash;
6015 /* Table entry for constant equivalent for SET_SRC, if any. */
6016 struct table_elt *src_const_elt;
6017 };
6018
6019 static void
6020 cse_insn (insn, in_libcall_block)
6021 rtx insn;
6022 int in_libcall_block;
6023 {
6024 register rtx x = PATTERN (insn);
6025 register int i;
6026 rtx tem;
6027 register int n_sets = 0;
6028
6029 /* Records what this insn does to set CC0. */
6030 rtx this_insn_cc0 = 0;
6031 enum machine_mode this_insn_cc0_mode;
6032 struct write_data writes_memory;
6033 static struct write_data init = {0, 0, 0, 0};
6034
6035 rtx src_eqv = 0;
6036 struct table_elt *src_eqv_elt = 0;
6037 int src_eqv_volatile;
6038 int src_eqv_in_memory;
6039 int src_eqv_in_struct;
6040 unsigned src_eqv_hash;
6041
6042 struct set *sets;
6043
6044 this_insn = insn;
6045 writes_memory = init;
6046
6047 /* Find all the SETs and CLOBBERs in this instruction.
6048 Record all the SETs in the array `set' and count them.
6049 Also determine whether there is a CLOBBER that invalidates
6050 all memory references, or all references at varying addresses. */
6051
6052 if (GET_CODE (insn) == CALL_INSN)
6053 {
6054 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6055 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
6056 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
6057 }
6058
6059 if (GET_CODE (x) == SET)
6060 {
6061 sets = (struct set *) alloca (sizeof (struct set));
6062 sets[0].rtl = x;
6063
6064 /* Ignore SETs that are unconditional jumps.
6065 They never need cse processing, so this does not hurt.
6066 The reason is not efficiency but rather
6067 so that we can test at the end for instructions
6068 that have been simplified to unconditional jumps
6069 and not be misled by unchanged instructions
6070 that were unconditional jumps to begin with. */
6071 if (SET_DEST (x) == pc_rtx
6072 && GET_CODE (SET_SRC (x)) == LABEL_REF)
6073 ;
6074
6075 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
6076 The hard function value register is used only once, to copy to
6077 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
6078 Ensure we invalidate the destination register. On the 80386 no
6079 other code would invalidate it since it is a fixed_reg.
6080 We need not check the return of apply_change_group; see canon_reg. */
6081
6082 else if (GET_CODE (SET_SRC (x)) == CALL)
6083 {
6084 canon_reg (SET_SRC (x), insn);
6085 apply_change_group ();
6086 fold_rtx (SET_SRC (x), insn);
6087 invalidate (SET_DEST (x), VOIDmode);
6088 }
6089 else
6090 n_sets = 1;
6091 }
6092 else if (GET_CODE (x) == PARALLEL)
6093 {
6094 register int lim = XVECLEN (x, 0);
6095
6096 sets = (struct set *) alloca (lim * sizeof (struct set));
6097
6098 /* Find all regs explicitly clobbered in this insn,
6099 and ensure they are not replaced with any other regs
6100 elsewhere in this insn.
6101 When a reg that is clobbered is also used for input,
6102 we should presume that that is for a reason,
6103 and we should not substitute some other register
6104 which is not supposed to be clobbered.
6105 Therefore, this loop cannot be merged into the one below
6106 because a CALL may precede a CLOBBER and refer to the
6107 value clobbered. We must not let a canonicalization do
6108 anything in that case. */
6109 for (i = 0; i < lim; i++)
6110 {
6111 register rtx y = XVECEXP (x, 0, i);
6112 if (GET_CODE (y) == CLOBBER)
6113 {
6114 rtx clobbered = XEXP (y, 0);
6115
6116 if (GET_CODE (clobbered) == REG
6117 || GET_CODE (clobbered) == SUBREG)
6118 invalidate (clobbered, VOIDmode);
6119 else if (GET_CODE (clobbered) == STRICT_LOW_PART
6120 || GET_CODE (clobbered) == ZERO_EXTRACT)
6121 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6122 }
6123 }
6124
6125 for (i = 0; i < lim; i++)
6126 {
6127 register rtx y = XVECEXP (x, 0, i);
6128 if (GET_CODE (y) == SET)
6129 {
6130 /* As above, we ignore unconditional jumps and call-insns and
6131 ignore the result of apply_change_group. */
6132 if (GET_CODE (SET_SRC (y)) == CALL)
6133 {
6134 canon_reg (SET_SRC (y), insn);
6135 apply_change_group ();
6136 fold_rtx (SET_SRC (y), insn);
6137 invalidate (SET_DEST (y), VOIDmode);
6138 }
6139 else if (SET_DEST (y) == pc_rtx
6140 && GET_CODE (SET_SRC (y)) == LABEL_REF)
6141 ;
6142 else
6143 sets[n_sets++].rtl = y;
6144 }
6145 else if (GET_CODE (y) == CLOBBER)
6146 {
6147 /* If we clobber memory, take note of that,
6148 and canon the address.
6149 This does nothing when a register is clobbered
6150 because we have already invalidated the reg. */
6151 if (GET_CODE (XEXP (y, 0)) == MEM)
6152 {
6153 canon_reg (XEXP (y, 0), NULL_RTX);
6154 note_mem_written (XEXP (y, 0), &writes_memory);
6155 }
6156 }
6157 else if (GET_CODE (y) == USE
6158 && ! (GET_CODE (XEXP (y, 0)) == REG
6159 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
6160 canon_reg (y, NULL_RTX);
6161 else if (GET_CODE (y) == CALL)
6162 {
6163 /* The result of apply_change_group can be ignored; see
6164 canon_reg. */
6165 canon_reg (y, insn);
6166 apply_change_group ();
6167 fold_rtx (y, insn);
6168 }
6169 }
6170 }
6171 else if (GET_CODE (x) == CLOBBER)
6172 {
6173 if (GET_CODE (XEXP (x, 0)) == MEM)
6174 {
6175 canon_reg (XEXP (x, 0), NULL_RTX);
6176 note_mem_written (XEXP (x, 0), &writes_memory);
6177 }
6178 }
6179
6180 /* Canonicalize a USE of a pseudo register or memory location. */
6181 else if (GET_CODE (x) == USE
6182 && ! (GET_CODE (XEXP (x, 0)) == REG
6183 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
6184 canon_reg (XEXP (x, 0), NULL_RTX);
6185 else if (GET_CODE (x) == CALL)
6186 {
6187 /* The result of apply_change_group can be ignored; see canon_reg. */
6188 canon_reg (x, insn);
6189 apply_change_group ();
6190 fold_rtx (x, insn);
6191 }
6192
6193 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6194 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
6195 is handled specially for this case, and if it isn't set, then there will
6196 be no equivalence for the destination. */
6197 if (n_sets == 1 && REG_NOTES (insn) != 0
6198 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
6199 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6200 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
6201 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
6202
6203 /* Canonicalize sources and addresses of destinations.
6204 We do this in a separate pass to avoid problems when a MATCH_DUP is
6205 present in the insn pattern. In that case, we want to ensure that
6206 we don't break the duplicate nature of the pattern. So we will replace
6207 both operands at the same time. Otherwise, we would fail to find an
6208 equivalent substitution in the loop calling validate_change below.
6209
6210 We used to suppress canonicalization of DEST if it appears in SRC,
6211 but we don't do this any more. */
6212
6213 for (i = 0; i < n_sets; i++)
6214 {
6215 rtx dest = SET_DEST (sets[i].rtl);
6216 rtx src = SET_SRC (sets[i].rtl);
6217 rtx new = canon_reg (src, insn);
6218
6219 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6220 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6221 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
6222 || insn_n_dups[recog_memoized (insn)] > 0)
6223 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
6224 else
6225 SET_SRC (sets[i].rtl) = new;
6226
6227 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6228 {
6229 validate_change (insn, &XEXP (dest, 1),
6230 canon_reg (XEXP (dest, 1), insn), 1);
6231 validate_change (insn, &XEXP (dest, 2),
6232 canon_reg (XEXP (dest, 2), insn), 1);
6233 }
6234
6235 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6236 || GET_CODE (dest) == ZERO_EXTRACT
6237 || GET_CODE (dest) == SIGN_EXTRACT)
6238 dest = XEXP (dest, 0);
6239
6240 if (GET_CODE (dest) == MEM)
6241 canon_reg (dest, insn);
6242 }
6243
6244 /* Now that we have done all the replacements, we can apply the change
6245 group and see if they all work. Note that this will cause some
6246 canonicalizations that would have worked individually not to be applied
6247 because some other canonicalization didn't work, but this should not
6248 occur often.
6249
6250 The result of apply_change_group can be ignored; see canon_reg. */
6251
6252 apply_change_group ();
6253
6254 /* Set sets[i].src_elt to the class each source belongs to.
6255 Detect assignments from or to volatile things
6256 and set set[i] to zero so they will be ignored
6257 in the rest of this function.
6258
6259 Nothing in this loop changes the hash table or the register chains. */
6260
6261 for (i = 0; i < n_sets; i++)
6262 {
6263 register rtx src, dest;
6264 register rtx src_folded;
6265 register struct table_elt *elt = 0, *p;
6266 enum machine_mode mode;
6267 rtx src_eqv_here;
6268 rtx src_const = 0;
6269 rtx src_related = 0;
6270 struct table_elt *src_const_elt = 0;
6271 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6272 int src_related_cost = 10000, src_elt_cost = 10000;
6273 /* Set non-zero if we need to call force_const_mem on with the
6274 contents of src_folded before using it. */
6275 int src_folded_force_flag = 0;
6276
6277 dest = SET_DEST (sets[i].rtl);
6278 src = SET_SRC (sets[i].rtl);
6279
6280 /* If SRC is a constant that has no machine mode,
6281 hash it with the destination's machine mode.
6282 This way we can keep different modes separate. */
6283
6284 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6285 sets[i].mode = mode;
6286
6287 if (src_eqv)
6288 {
6289 enum machine_mode eqvmode = mode;
6290 if (GET_CODE (dest) == STRICT_LOW_PART)
6291 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6292 do_not_record = 0;
6293 hash_arg_in_memory = 0;
6294 hash_arg_in_struct = 0;
6295 src_eqv = fold_rtx (src_eqv, insn);
6296 src_eqv_hash = HASH (src_eqv, eqvmode);
6297
6298 /* Find the equivalence class for the equivalent expression. */
6299
6300 if (!do_not_record)
6301 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
6302
6303 src_eqv_volatile = do_not_record;
6304 src_eqv_in_memory = hash_arg_in_memory;
6305 src_eqv_in_struct = hash_arg_in_struct;
6306 }
6307
6308 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6309 value of the INNER register, not the destination. So it is not
6310 a valid substitution for the source. But save it for later. */
6311 if (GET_CODE (dest) == STRICT_LOW_PART)
6312 src_eqv_here = 0;
6313 else
6314 src_eqv_here = src_eqv;
6315
6316 /* Simplify and foldable subexpressions in SRC. Then get the fully-
6317 simplified result, which may not necessarily be valid. */
6318 src_folded = fold_rtx (src, insn);
6319
6320 #if 0
6321 /* ??? This caused bad code to be generated for the m68k port with -O2.
6322 Suppose src is (CONST_INT -1), and that after truncation src_folded
6323 is (CONST_INT 3). Suppose src_folded is then used for src_const.
6324 At the end we will add src and src_const to the same equivalence
6325 class. We now have 3 and -1 on the same equivalence class. This
6326 causes later instructions to be mis-optimized. */
6327 /* If storing a constant in a bitfield, pre-truncate the constant
6328 so we will be able to record it later. */
6329 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6330 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6331 {
6332 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6333
6334 if (GET_CODE (src) == CONST_INT
6335 && GET_CODE (width) == CONST_INT
6336 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6337 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6338 src_folded
6339 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6340 << INTVAL (width)) - 1));
6341 }
6342 #endif
6343
6344 /* Compute SRC's hash code, and also notice if it
6345 should not be recorded at all. In that case,
6346 prevent any further processing of this assignment. */
6347 do_not_record = 0;
6348 hash_arg_in_memory = 0;
6349 hash_arg_in_struct = 0;
6350
6351 sets[i].src = src;
6352 sets[i].src_hash = HASH (src, mode);
6353 sets[i].src_volatile = do_not_record;
6354 sets[i].src_in_memory = hash_arg_in_memory;
6355 sets[i].src_in_struct = hash_arg_in_struct;
6356
6357 #if 0
6358 /* It is no longer clear why we used to do this, but it doesn't
6359 appear to still be needed. So let's try without it since this
6360 code hurts cse'ing widened ops. */
6361 /* If source is a perverse subreg (such as QI treated as an SI),
6362 treat it as volatile. It may do the work of an SI in one context
6363 where the extra bits are not being used, but cannot replace an SI
6364 in general. */
6365 if (GET_CODE (src) == SUBREG
6366 && (GET_MODE_SIZE (GET_MODE (src))
6367 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6368 sets[i].src_volatile = 1;
6369 #endif
6370
6371 /* Locate all possible equivalent forms for SRC. Try to replace
6372 SRC in the insn with each cheaper equivalent.
6373
6374 We have the following types of equivalents: SRC itself, a folded
6375 version, a value given in a REG_EQUAL note, or a value related
6376 to a constant.
6377
6378 Each of these equivalents may be part of an additional class
6379 of equivalents (if more than one is in the table, they must be in
6380 the same class; we check for this).
6381
6382 If the source is volatile, we don't do any table lookups.
6383
6384 We note any constant equivalent for possible later use in a
6385 REG_NOTE. */
6386
6387 if (!sets[i].src_volatile)
6388 elt = lookup (src, sets[i].src_hash, mode);
6389
6390 sets[i].src_elt = elt;
6391
6392 if (elt && src_eqv_here && src_eqv_elt)
6393 {
6394 if (elt->first_same_value != src_eqv_elt->first_same_value)
6395 {
6396 /* The REG_EQUAL is indicating that two formerly distinct
6397 classes are now equivalent. So merge them. */
6398 merge_equiv_classes (elt, src_eqv_elt);
6399 src_eqv_hash = HASH (src_eqv, elt->mode);
6400 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
6401 }
6402
6403 src_eqv_here = 0;
6404 }
6405
6406 else if (src_eqv_elt)
6407 elt = src_eqv_elt;
6408
6409 /* Try to find a constant somewhere and record it in `src_const'.
6410 Record its table element, if any, in `src_const_elt'. Look in
6411 any known equivalences first. (If the constant is not in the
6412 table, also set `sets[i].src_const_hash'). */
6413 if (elt)
6414 for (p = elt->first_same_value; p; p = p->next_same_value)
6415 if (p->is_const)
6416 {
6417 src_const = p->exp;
6418 src_const_elt = elt;
6419 break;
6420 }
6421
6422 if (src_const == 0
6423 && (CONSTANT_P (src_folded)
6424 /* Consider (minus (label_ref L1) (label_ref L2)) as
6425 "constant" here so we will record it. This allows us
6426 to fold switch statements when an ADDR_DIFF_VEC is used. */
6427 || (GET_CODE (src_folded) == MINUS
6428 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6429 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6430 src_const = src_folded, src_const_elt = elt;
6431 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6432 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6433
6434 /* If we don't know if the constant is in the table, get its
6435 hash code and look it up. */
6436 if (src_const && src_const_elt == 0)
6437 {
6438 sets[i].src_const_hash = HASH (src_const, mode);
6439 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
6440 }
6441
6442 sets[i].src_const = src_const;
6443 sets[i].src_const_elt = src_const_elt;
6444
6445 /* If the constant and our source are both in the table, mark them as
6446 equivalent. Otherwise, if a constant is in the table but the source
6447 isn't, set ELT to it. */
6448 if (src_const_elt && elt
6449 && src_const_elt->first_same_value != elt->first_same_value)
6450 merge_equiv_classes (elt, src_const_elt);
6451 else if (src_const_elt && elt == 0)
6452 elt = src_const_elt;
6453
6454 /* See if there is a register linearly related to a constant
6455 equivalent of SRC. */
6456 if (src_const
6457 && (GET_CODE (src_const) == CONST
6458 || (src_const_elt && src_const_elt->related_value != 0)))
6459 {
6460 src_related = use_related_value (src_const, src_const_elt);
6461 if (src_related)
6462 {
6463 struct table_elt *src_related_elt
6464 = lookup (src_related, HASH (src_related, mode), mode);
6465 if (src_related_elt && elt)
6466 {
6467 if (elt->first_same_value
6468 != src_related_elt->first_same_value)
6469 /* This can occur when we previously saw a CONST
6470 involving a SYMBOL_REF and then see the SYMBOL_REF
6471 twice. Merge the involved classes. */
6472 merge_equiv_classes (elt, src_related_elt);
6473
6474 src_related = 0;
6475 src_related_elt = 0;
6476 }
6477 else if (src_related_elt && elt == 0)
6478 elt = src_related_elt;
6479 }
6480 }
6481
6482 /* See if we have a CONST_INT that is already in a register in a
6483 wider mode. */
6484
6485 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6486 && GET_MODE_CLASS (mode) == MODE_INT
6487 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6488 {
6489 enum machine_mode wider_mode;
6490
6491 for (wider_mode = GET_MODE_WIDER_MODE (mode);
6492 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6493 && src_related == 0;
6494 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6495 {
6496 struct table_elt *const_elt
6497 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6498
6499 if (const_elt == 0)
6500 continue;
6501
6502 for (const_elt = const_elt->first_same_value;
6503 const_elt; const_elt = const_elt->next_same_value)
6504 if (GET_CODE (const_elt->exp) == REG)
6505 {
6506 src_related = gen_lowpart_if_possible (mode,
6507 const_elt->exp);
6508 break;
6509 }
6510 }
6511 }
6512
6513 /* Another possibility is that we have an AND with a constant in
6514 a mode narrower than a word. If so, it might have been generated
6515 as part of an "if" which would narrow the AND. If we already
6516 have done the AND in a wider mode, we can use a SUBREG of that
6517 value. */
6518
6519 if (flag_expensive_optimizations && ! src_related
6520 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6521 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6522 {
6523 enum machine_mode tmode;
6524 rtx new_and = gen_rtx (AND, VOIDmode, NULL_RTX, XEXP (src, 1));
6525
6526 for (tmode = GET_MODE_WIDER_MODE (mode);
6527 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6528 tmode = GET_MODE_WIDER_MODE (tmode))
6529 {
6530 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6531 struct table_elt *larger_elt;
6532
6533 if (inner)
6534 {
6535 PUT_MODE (new_and, tmode);
6536 XEXP (new_and, 0) = inner;
6537 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6538 if (larger_elt == 0)
6539 continue;
6540
6541 for (larger_elt = larger_elt->first_same_value;
6542 larger_elt; larger_elt = larger_elt->next_same_value)
6543 if (GET_CODE (larger_elt->exp) == REG)
6544 {
6545 src_related
6546 = gen_lowpart_if_possible (mode, larger_elt->exp);
6547 break;
6548 }
6549
6550 if (src_related)
6551 break;
6552 }
6553 }
6554 }
6555
6556 #ifdef LOAD_EXTEND_OP
6557 /* See if a MEM has already been loaded with a widening operation;
6558 if it has, we can use a subreg of that. Many CISC machines
6559 also have such operations, but this is only likely to be
6560 beneficial these machines. */
6561
6562 if (flag_expensive_optimizations && src_related == 0
6563 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6564 && GET_MODE_CLASS (mode) == MODE_INT
6565 && GET_CODE (src) == MEM && ! do_not_record
6566 && LOAD_EXTEND_OP (mode) != NIL)
6567 {
6568 enum machine_mode tmode;
6569
6570 /* Set what we are trying to extend and the operation it might
6571 have been extended with. */
6572 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6573 XEXP (memory_extend_rtx, 0) = src;
6574
6575 for (tmode = GET_MODE_WIDER_MODE (mode);
6576 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6577 tmode = GET_MODE_WIDER_MODE (tmode))
6578 {
6579 struct table_elt *larger_elt;
6580
6581 PUT_MODE (memory_extend_rtx, tmode);
6582 larger_elt = lookup (memory_extend_rtx,
6583 HASH (memory_extend_rtx, tmode), tmode);
6584 if (larger_elt == 0)
6585 continue;
6586
6587 for (larger_elt = larger_elt->first_same_value;
6588 larger_elt; larger_elt = larger_elt->next_same_value)
6589 if (GET_CODE (larger_elt->exp) == REG)
6590 {
6591 src_related = gen_lowpart_if_possible (mode,
6592 larger_elt->exp);
6593 break;
6594 }
6595
6596 if (src_related)
6597 break;
6598 }
6599 }
6600 #endif /* LOAD_EXTEND_OP */
6601
6602 if (src == src_folded)
6603 src_folded = 0;
6604
6605 /* At this point, ELT, if non-zero, points to a class of expressions
6606 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6607 and SRC_RELATED, if non-zero, each contain additional equivalent
6608 expressions. Prune these latter expressions by deleting expressions
6609 already in the equivalence class.
6610
6611 Check for an equivalent identical to the destination. If found,
6612 this is the preferred equivalent since it will likely lead to
6613 elimination of the insn. Indicate this by placing it in
6614 `src_related'. */
6615
6616 if (elt) elt = elt->first_same_value;
6617 for (p = elt; p; p = p->next_same_value)
6618 {
6619 enum rtx_code code = GET_CODE (p->exp);
6620
6621 /* If the expression is not valid, ignore it. Then we do not
6622 have to check for validity below. In most cases, we can use
6623 `rtx_equal_p', since canonicalization has already been done. */
6624 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
6625 continue;
6626
6627 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
6628 src = 0;
6629 else if (src_folded && GET_CODE (src_folded) == code
6630 && rtx_equal_p (src_folded, p->exp))
6631 src_folded = 0;
6632 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
6633 && rtx_equal_p (src_eqv_here, p->exp))
6634 src_eqv_here = 0;
6635 else if (src_related && GET_CODE (src_related) == code
6636 && rtx_equal_p (src_related, p->exp))
6637 src_related = 0;
6638
6639 /* This is the same as the destination of the insns, we want
6640 to prefer it. Copy it to src_related. The code below will
6641 then give it a negative cost. */
6642 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
6643 src_related = dest;
6644
6645 }
6646
6647 /* Find the cheapest valid equivalent, trying all the available
6648 possibilities. Prefer items not in the hash table to ones
6649 that are when they are equal cost. Note that we can never
6650 worsen an insn as the current contents will also succeed.
6651 If we find an equivalent identical to the destination, use it as best,
6652 since this insn will probably be eliminated in that case. */
6653 if (src)
6654 {
6655 if (rtx_equal_p (src, dest))
6656 src_cost = -1;
6657 else
6658 src_cost = COST (src);
6659 }
6660
6661 if (src_eqv_here)
6662 {
6663 if (rtx_equal_p (src_eqv_here, dest))
6664 src_eqv_cost = -1;
6665 else
6666 src_eqv_cost = COST (src_eqv_here);
6667 }
6668
6669 if (src_folded)
6670 {
6671 if (rtx_equal_p (src_folded, dest))
6672 src_folded_cost = -1;
6673 else
6674 src_folded_cost = COST (src_folded);
6675 }
6676
6677 if (src_related)
6678 {
6679 if (rtx_equal_p (src_related, dest))
6680 src_related_cost = -1;
6681 else
6682 src_related_cost = COST (src_related);
6683 }
6684
6685 /* If this was an indirect jump insn, a known label will really be
6686 cheaper even though it looks more expensive. */
6687 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
6688 src_folded = src_const, src_folded_cost = -1;
6689
6690 /* Terminate loop when replacement made. This must terminate since
6691 the current contents will be tested and will always be valid. */
6692 while (1)
6693 {
6694 rtx trial;
6695
6696 /* Skip invalid entries. */
6697 while (elt && GET_CODE (elt->exp) != REG
6698 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6699 elt = elt->next_same_value;
6700
6701 if (elt) src_elt_cost = elt->cost;
6702
6703 /* Find cheapest and skip it for the next time. For items
6704 of equal cost, use this order:
6705 src_folded, src, src_eqv, src_related and hash table entry. */
6706 if (src_folded_cost <= src_cost
6707 && src_folded_cost <= src_eqv_cost
6708 && src_folded_cost <= src_related_cost
6709 && src_folded_cost <= src_elt_cost)
6710 {
6711 trial = src_folded, src_folded_cost = 10000;
6712 if (src_folded_force_flag)
6713 trial = force_const_mem (mode, trial);
6714 }
6715 else if (src_cost <= src_eqv_cost
6716 && src_cost <= src_related_cost
6717 && src_cost <= src_elt_cost)
6718 trial = src, src_cost = 10000;
6719 else if (src_eqv_cost <= src_related_cost
6720 && src_eqv_cost <= src_elt_cost)
6721 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
6722 else if (src_related_cost <= src_elt_cost)
6723 trial = copy_rtx (src_related), src_related_cost = 10000;
6724 else
6725 {
6726 trial = copy_rtx (elt->exp);
6727 elt = elt->next_same_value;
6728 src_elt_cost = 10000;
6729 }
6730
6731 /* We don't normally have an insn matching (set (pc) (pc)), so
6732 check for this separately here. We will delete such an
6733 insn below.
6734
6735 Tablejump insns contain a USE of the table, so simply replacing
6736 the operand with the constant won't match. This is simply an
6737 unconditional branch, however, and is therefore valid. Just
6738 insert the substitution here and we will delete and re-emit
6739 the insn later. */
6740
6741 if (n_sets == 1 && dest == pc_rtx
6742 && (trial == pc_rtx
6743 || (GET_CODE (trial) == LABEL_REF
6744 && ! condjump_p (insn))))
6745 {
6746 /* If TRIAL is a label in front of a jump table, we are
6747 really falling through the switch (this is how casesi
6748 insns work), so we must branch around the table. */
6749 if (GET_CODE (trial) == CODE_LABEL
6750 && NEXT_INSN (trial) != 0
6751 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
6752 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
6753 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
6754
6755 trial = gen_rtx (LABEL_REF, Pmode, get_label_after (trial));
6756
6757 SET_SRC (sets[i].rtl) = trial;
6758 cse_jumps_altered = 1;
6759 break;
6760 }
6761
6762 /* Look for a substitution that makes a valid insn. */
6763 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
6764 {
6765 /* The result of apply_change_group can be ignored; see
6766 canon_reg. */
6767
6768 validate_change (insn, &SET_SRC (sets[i].rtl),
6769 canon_reg (SET_SRC (sets[i].rtl), insn),
6770 1);
6771 apply_change_group ();
6772 break;
6773 }
6774
6775 /* If we previously found constant pool entries for
6776 constants and this is a constant, try making a
6777 pool entry. Put it in src_folded unless we already have done
6778 this since that is where it likely came from. */
6779
6780 else if (constant_pool_entries_cost
6781 && CONSTANT_P (trial)
6782 && ! (GET_CODE (trial) == CONST
6783 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
6784 && (src_folded == 0
6785 || (GET_CODE (src_folded) != MEM
6786 && ! src_folded_force_flag))
6787 && GET_MODE_CLASS (mode) != MODE_CC)
6788 {
6789 src_folded_force_flag = 1;
6790 src_folded = trial;
6791 src_folded_cost = constant_pool_entries_cost;
6792 }
6793 }
6794
6795 src = SET_SRC (sets[i].rtl);
6796
6797 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
6798 However, there is an important exception: If both are registers
6799 that are not the head of their equivalence class, replace SET_SRC
6800 with the head of the class. If we do not do this, we will have
6801 both registers live over a portion of the basic block. This way,
6802 their lifetimes will likely abut instead of overlapping. */
6803 if (GET_CODE (dest) == REG
6804 && REGNO_QTY_VALID_P (REGNO (dest))
6805 && qty_mode[reg_qty[REGNO (dest)]] == GET_MODE (dest)
6806 && qty_first_reg[reg_qty[REGNO (dest)]] != REGNO (dest)
6807 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
6808 /* Don't do this if the original insn had a hard reg as
6809 SET_SRC. */
6810 && (GET_CODE (sets[i].src) != REG
6811 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
6812 /* We can't call canon_reg here because it won't do anything if
6813 SRC is a hard register. */
6814 {
6815 int first = qty_first_reg[reg_qty[REGNO (src)]];
6816
6817 src = SET_SRC (sets[i].rtl)
6818 = first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
6819 : gen_rtx (REG, GET_MODE (src), first);
6820
6821 /* If we had a constant that is cheaper than what we are now
6822 setting SRC to, use that constant. We ignored it when we
6823 thought we could make this into a no-op. */
6824 if (src_const && COST (src_const) < COST (src)
6825 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const, 0))
6826 src = src_const;
6827 }
6828
6829 /* If we made a change, recompute SRC values. */
6830 if (src != sets[i].src)
6831 {
6832 do_not_record = 0;
6833 hash_arg_in_memory = 0;
6834 hash_arg_in_struct = 0;
6835 sets[i].src = src;
6836 sets[i].src_hash = HASH (src, mode);
6837 sets[i].src_volatile = do_not_record;
6838 sets[i].src_in_memory = hash_arg_in_memory;
6839 sets[i].src_in_struct = hash_arg_in_struct;
6840 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
6841 }
6842
6843 /* If this is a single SET, we are setting a register, and we have an
6844 equivalent constant, we want to add a REG_NOTE. We don't want
6845 to write a REG_EQUAL note for a constant pseudo since verifying that
6846 that pseudo hasn't been eliminated is a pain. Such a note also
6847 won't help anything. */
6848 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
6849 && GET_CODE (src_const) != REG)
6850 {
6851 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6852
6853 /* Record the actual constant value in a REG_EQUAL note, making
6854 a new one if one does not already exist. */
6855 if (tem)
6856 XEXP (tem, 0) = src_const;
6857 else
6858 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL,
6859 src_const, REG_NOTES (insn));
6860
6861 /* If storing a constant value in a register that
6862 previously held the constant value 0,
6863 record this fact with a REG_WAS_0 note on this insn.
6864
6865 Note that the *register* is required to have previously held 0,
6866 not just any register in the quantity and we must point to the
6867 insn that set that register to zero.
6868
6869 Rather than track each register individually, we just see if
6870 the last set for this quantity was for this register. */
6871
6872 if (REGNO_QTY_VALID_P (REGNO (dest))
6873 && qty_const[reg_qty[REGNO (dest)]] == const0_rtx)
6874 {
6875 /* See if we previously had a REG_WAS_0 note. */
6876 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6877 rtx const_insn = qty_const_insn[reg_qty[REGNO (dest)]];
6878
6879 if ((tem = single_set (const_insn)) != 0
6880 && rtx_equal_p (SET_DEST (tem), dest))
6881 {
6882 if (note)
6883 XEXP (note, 0) = const_insn;
6884 else
6885 REG_NOTES (insn) = gen_rtx (INSN_LIST, REG_WAS_0,
6886 const_insn, REG_NOTES (insn));
6887 }
6888 }
6889 }
6890
6891 /* Now deal with the destination. */
6892 do_not_record = 0;
6893 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
6894
6895 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
6896 to the MEM or REG within it. */
6897 while (GET_CODE (dest) == SIGN_EXTRACT
6898 || GET_CODE (dest) == ZERO_EXTRACT
6899 || GET_CODE (dest) == SUBREG
6900 || GET_CODE (dest) == STRICT_LOW_PART)
6901 {
6902 sets[i].inner_dest_loc = &XEXP (dest, 0);
6903 dest = XEXP (dest, 0);
6904 }
6905
6906 sets[i].inner_dest = dest;
6907
6908 if (GET_CODE (dest) == MEM)
6909 {
6910 dest = fold_rtx (dest, insn);
6911
6912 /* Decide whether we invalidate everything in memory,
6913 or just things at non-fixed places.
6914 Writing a large aggregate must invalidate everything
6915 because we don't know how long it is. */
6916 note_mem_written (dest, &writes_memory);
6917 }
6918
6919 /* Compute the hash code of the destination now,
6920 before the effects of this instruction are recorded,
6921 since the register values used in the address computation
6922 are those before this instruction. */
6923 sets[i].dest_hash = HASH (dest, mode);
6924
6925 /* Don't enter a bit-field in the hash table
6926 because the value in it after the store
6927 may not equal what was stored, due to truncation. */
6928
6929 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6930 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6931 {
6932 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6933
6934 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
6935 && GET_CODE (width) == CONST_INT
6936 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6937 && ! (INTVAL (src_const)
6938 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6939 /* Exception: if the value is constant,
6940 and it won't be truncated, record it. */
6941 ;
6942 else
6943 {
6944 /* This is chosen so that the destination will be invalidated
6945 but no new value will be recorded.
6946 We must invalidate because sometimes constant
6947 values can be recorded for bitfields. */
6948 sets[i].src_elt = 0;
6949 sets[i].src_volatile = 1;
6950 src_eqv = 0;
6951 src_eqv_elt = 0;
6952 }
6953 }
6954
6955 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
6956 the insn. */
6957 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
6958 {
6959 PUT_CODE (insn, NOTE);
6960 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
6961 NOTE_SOURCE_FILE (insn) = 0;
6962 cse_jumps_altered = 1;
6963 /* One less use of the label this insn used to jump to. */
6964 --LABEL_NUSES (JUMP_LABEL (insn));
6965 /* No more processing for this set. */
6966 sets[i].rtl = 0;
6967 }
6968
6969 /* If this SET is now setting PC to a label, we know it used to
6970 be a conditional or computed branch. So we see if we can follow
6971 it. If it was a computed branch, delete it and re-emit. */
6972 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
6973 {
6974 rtx p;
6975
6976 /* If this is not in the format for a simple branch and
6977 we are the only SET in it, re-emit it. */
6978 if (! simplejump_p (insn) && n_sets == 1)
6979 {
6980 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
6981 JUMP_LABEL (new) = XEXP (src, 0);
6982 LABEL_NUSES (XEXP (src, 0))++;
6983 delete_insn (insn);
6984 insn = new;
6985 }
6986 else
6987 /* Otherwise, force rerecognition, since it probably had
6988 a different pattern before.
6989 This shouldn't really be necessary, since whatever
6990 changed the source value above should have done this.
6991 Until the right place is found, might as well do this here. */
6992 INSN_CODE (insn) = -1;
6993
6994 /* Now that we've converted this jump to an unconditional jump,
6995 there is dead code after it. Delete the dead code until we
6996 reach a BARRIER, the end of the function, or a label. Do
6997 not delete NOTEs except for NOTE_INSN_DELETED since later
6998 phases assume these notes are retained. */
6999
7000 p = insn;
7001
7002 while (NEXT_INSN (p) != 0
7003 && GET_CODE (NEXT_INSN (p)) != BARRIER
7004 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
7005 {
7006 if (GET_CODE (NEXT_INSN (p)) != NOTE
7007 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
7008 delete_insn (NEXT_INSN (p));
7009 else
7010 p = NEXT_INSN (p);
7011 }
7012
7013 /* If we don't have a BARRIER immediately after INSN, put one there.
7014 Much code assumes that there are no NOTEs between a JUMP_INSN and
7015 BARRIER. */
7016
7017 if (NEXT_INSN (insn) == 0
7018 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
7019 emit_barrier_before (NEXT_INSN (insn));
7020
7021 /* We might have two BARRIERs separated by notes. Delete the second
7022 one if so. */
7023
7024 if (p != insn && NEXT_INSN (p) != 0
7025 && GET_CODE (NEXT_INSN (p)) == BARRIER)
7026 delete_insn (NEXT_INSN (p));
7027
7028 cse_jumps_altered = 1;
7029 sets[i].rtl = 0;
7030 }
7031
7032 /* If destination is volatile, invalidate it and then do no further
7033 processing for this assignment. */
7034
7035 else if (do_not_record)
7036 {
7037 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7038 || GET_CODE (dest) == MEM)
7039 invalidate (dest, VOIDmode);
7040 else if (GET_CODE (dest) == STRICT_LOW_PART
7041 || GET_CODE (dest) == ZERO_EXTRACT)
7042 invalidate (XEXP (dest, 0), GET_MODE (dest));
7043 sets[i].rtl = 0;
7044 }
7045
7046 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
7047 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7048
7049 #ifdef HAVE_cc0
7050 /* If setting CC0, record what it was set to, or a constant, if it
7051 is equivalent to a constant. If it is being set to a floating-point
7052 value, make a COMPARE with the appropriate constant of 0. If we
7053 don't do this, later code can interpret this as a test against
7054 const0_rtx, which can cause problems if we try to put it into an
7055 insn as a floating-point operand. */
7056 if (dest == cc0_rtx)
7057 {
7058 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
7059 this_insn_cc0_mode = mode;
7060 if (FLOAT_MODE_P (mode))
7061 this_insn_cc0 = gen_rtx (COMPARE, VOIDmode, this_insn_cc0,
7062 CONST0_RTX (mode));
7063 }
7064 #endif
7065 }
7066
7067 /* Now enter all non-volatile source expressions in the hash table
7068 if they are not already present.
7069 Record their equivalence classes in src_elt.
7070 This way we can insert the corresponding destinations into
7071 the same classes even if the actual sources are no longer in them
7072 (having been invalidated). */
7073
7074 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
7075 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
7076 {
7077 register struct table_elt *elt;
7078 register struct table_elt *classp = sets[0].src_elt;
7079 rtx dest = SET_DEST (sets[0].rtl);
7080 enum machine_mode eqvmode = GET_MODE (dest);
7081
7082 if (GET_CODE (dest) == STRICT_LOW_PART)
7083 {
7084 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
7085 classp = 0;
7086 }
7087 if (insert_regs (src_eqv, classp, 0))
7088 {
7089 rehash_using_reg (src_eqv);
7090 src_eqv_hash = HASH (src_eqv, eqvmode);
7091 }
7092 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7093 elt->in_memory = src_eqv_in_memory;
7094 elt->in_struct = src_eqv_in_struct;
7095 src_eqv_elt = elt;
7096
7097 /* Check to see if src_eqv_elt is the same as a set source which
7098 does not yet have an elt, and if so set the elt of the set source
7099 to src_eqv_elt. */
7100 for (i = 0; i < n_sets; i++)
7101 if (sets[i].rtl && sets[i].src_elt == 0
7102 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
7103 sets[i].src_elt = src_eqv_elt;
7104 }
7105
7106 for (i = 0; i < n_sets; i++)
7107 if (sets[i].rtl && ! sets[i].src_volatile
7108 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
7109 {
7110 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
7111 {
7112 /* REG_EQUAL in setting a STRICT_LOW_PART
7113 gives an equivalent for the entire destination register,
7114 not just for the subreg being stored in now.
7115 This is a more interesting equivalence, so we arrange later
7116 to treat the entire reg as the destination. */
7117 sets[i].src_elt = src_eqv_elt;
7118 sets[i].src_hash = src_eqv_hash;
7119 }
7120 else
7121 {
7122 /* Insert source and constant equivalent into hash table, if not
7123 already present. */
7124 register struct table_elt *classp = src_eqv_elt;
7125 register rtx src = sets[i].src;
7126 register rtx dest = SET_DEST (sets[i].rtl);
7127 enum machine_mode mode
7128 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
7129
7130 if (sets[i].src_elt == 0)
7131 {
7132 register struct table_elt *elt;
7133
7134 /* Note that these insert_regs calls cannot remove
7135 any of the src_elt's, because they would have failed to
7136 match if not still valid. */
7137 if (insert_regs (src, classp, 0))
7138 {
7139 rehash_using_reg (src);
7140 sets[i].src_hash = HASH (src, mode);
7141 }
7142 elt = insert (src, classp, sets[i].src_hash, mode);
7143 elt->in_memory = sets[i].src_in_memory;
7144 elt->in_struct = sets[i].src_in_struct;
7145 sets[i].src_elt = classp = elt;
7146 }
7147
7148 if (sets[i].src_const && sets[i].src_const_elt == 0
7149 && src != sets[i].src_const
7150 && ! rtx_equal_p (sets[i].src_const, src))
7151 sets[i].src_elt = insert (sets[i].src_const, classp,
7152 sets[i].src_const_hash, mode);
7153 }
7154 }
7155 else if (sets[i].src_elt == 0)
7156 /* If we did not insert the source into the hash table (e.g., it was
7157 volatile), note the equivalence class for the REG_EQUAL value, if any,
7158 so that the destination goes into that class. */
7159 sets[i].src_elt = src_eqv_elt;
7160
7161 invalidate_from_clobbers (&writes_memory, x);
7162
7163 /* Some registers are invalidated by subroutine calls. Memory is
7164 invalidated by non-constant calls. */
7165
7166 if (GET_CODE (insn) == CALL_INSN)
7167 {
7168 static struct write_data everything = {0, 1, 1, 1};
7169
7170 if (! CONST_CALL_P (insn))
7171 invalidate_memory (&everything);
7172 invalidate_for_call ();
7173 }
7174
7175 /* Now invalidate everything set by this instruction.
7176 If a SUBREG or other funny destination is being set,
7177 sets[i].rtl is still nonzero, so here we invalidate the reg
7178 a part of which is being set. */
7179
7180 for (i = 0; i < n_sets; i++)
7181 if (sets[i].rtl)
7182 {
7183 /* We can't use the inner dest, because the mode associated with
7184 a ZERO_EXTRACT is significant. */
7185 register rtx dest = SET_DEST (sets[i].rtl);
7186
7187 /* Needed for registers to remove the register from its
7188 previous quantity's chain.
7189 Needed for memory if this is a nonvarying address, unless
7190 we have just done an invalidate_memory that covers even those. */
7191 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7192 || (GET_CODE (dest) == MEM && ! writes_memory.all
7193 && ! cse_rtx_addr_varies_p (dest)))
7194 invalidate (dest, VOIDmode);
7195 else if (GET_CODE (dest) == STRICT_LOW_PART
7196 || GET_CODE (dest) == ZERO_EXTRACT)
7197 invalidate (XEXP (dest, 0), GET_MODE (dest));
7198 }
7199
7200 /* Make sure registers mentioned in destinations
7201 are safe for use in an expression to be inserted.
7202 This removes from the hash table
7203 any invalid entry that refers to one of these registers.
7204
7205 We don't care about the return value from mention_regs because
7206 we are going to hash the SET_DEST values unconditionally. */
7207
7208 for (i = 0; i < n_sets; i++)
7209 if (sets[i].rtl && GET_CODE (SET_DEST (sets[i].rtl)) != REG)
7210 mention_regs (SET_DEST (sets[i].rtl));
7211
7212 /* We may have just removed some of the src_elt's from the hash table.
7213 So replace each one with the current head of the same class. */
7214
7215 for (i = 0; i < n_sets; i++)
7216 if (sets[i].rtl)
7217 {
7218 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7219 /* If elt was removed, find current head of same class,
7220 or 0 if nothing remains of that class. */
7221 {
7222 register struct table_elt *elt = sets[i].src_elt;
7223
7224 while (elt && elt->prev_same_value)
7225 elt = elt->prev_same_value;
7226
7227 while (elt && elt->first_same_value == 0)
7228 elt = elt->next_same_value;
7229 sets[i].src_elt = elt ? elt->first_same_value : 0;
7230 }
7231 }
7232
7233 /* Now insert the destinations into their equivalence classes. */
7234
7235 for (i = 0; i < n_sets; i++)
7236 if (sets[i].rtl)
7237 {
7238 register rtx dest = SET_DEST (sets[i].rtl);
7239 register struct table_elt *elt;
7240
7241 /* Don't record value if we are not supposed to risk allocating
7242 floating-point values in registers that might be wider than
7243 memory. */
7244 if ((flag_float_store
7245 && GET_CODE (dest) == MEM
7246 && FLOAT_MODE_P (GET_MODE (dest)))
7247 /* Don't record values of destinations set inside a libcall block
7248 since we might delete the libcall. Things should have been set
7249 up so we won't want to reuse such a value, but we play it safe
7250 here. */
7251 || in_libcall_block
7252 /* If we didn't put a REG_EQUAL value or a source into the hash
7253 table, there is no point is recording DEST. */
7254 || sets[i].src_elt == 0
7255 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
7256 or SIGN_EXTEND, don't record DEST since it can cause
7257 some tracking to be wrong.
7258
7259 ??? Think about this more later. */
7260 || (GET_CODE (dest) == SUBREG
7261 && (GET_MODE_SIZE (GET_MODE (dest))
7262 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7263 && (GET_CODE (sets[i].src) == SIGN_EXTEND
7264 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7265 continue;
7266
7267 /* STRICT_LOW_PART isn't part of the value BEING set,
7268 and neither is the SUBREG inside it.
7269 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
7270 if (GET_CODE (dest) == STRICT_LOW_PART)
7271 dest = SUBREG_REG (XEXP (dest, 0));
7272
7273 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7274 /* Registers must also be inserted into chains for quantities. */
7275 if (insert_regs (dest, sets[i].src_elt, 1))
7276 {
7277 /* If `insert_regs' changes something, the hash code must be
7278 recalculated. */
7279 rehash_using_reg (dest);
7280 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7281 }
7282
7283 elt = insert (dest, sets[i].src_elt,
7284 sets[i].dest_hash, GET_MODE (dest));
7285 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
7286 && ! RTX_UNCHANGING_P (sets[i].inner_dest));
7287
7288 if (elt->in_memory)
7289 {
7290 /* This implicitly assumes a whole struct
7291 need not have MEM_IN_STRUCT_P.
7292 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
7293 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7294 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7295 }
7296
7297 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7298 narrower than M2, and both M1 and M2 are the same number of words,
7299 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7300 make that equivalence as well.
7301
7302 However, BAR may have equivalences for which gen_lowpart_if_possible
7303 will produce a simpler value than gen_lowpart_if_possible applied to
7304 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7305 BAR's equivalences. If we don't get a simplified form, make
7306 the SUBREG. It will not be used in an equivalence, but will
7307 cause two similar assignments to be detected.
7308
7309 Note the loop below will find SUBREG_REG (DEST) since we have
7310 already entered SRC and DEST of the SET in the table. */
7311
7312 if (GET_CODE (dest) == SUBREG
7313 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7314 / UNITS_PER_WORD)
7315 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7316 && (GET_MODE_SIZE (GET_MODE (dest))
7317 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7318 && sets[i].src_elt != 0)
7319 {
7320 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7321 struct table_elt *elt, *classp = 0;
7322
7323 for (elt = sets[i].src_elt->first_same_value; elt;
7324 elt = elt->next_same_value)
7325 {
7326 rtx new_src = 0;
7327 unsigned src_hash;
7328 struct table_elt *src_elt;
7329
7330 /* Ignore invalid entries. */
7331 if (GET_CODE (elt->exp) != REG
7332 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7333 continue;
7334
7335 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7336 if (new_src == 0)
7337 new_src = gen_rtx (SUBREG, new_mode, elt->exp, 0);
7338
7339 src_hash = HASH (new_src, new_mode);
7340 src_elt = lookup (new_src, src_hash, new_mode);
7341
7342 /* Put the new source in the hash table is if isn't
7343 already. */
7344 if (src_elt == 0)
7345 {
7346 if (insert_regs (new_src, classp, 0))
7347 {
7348 rehash_using_reg (new_src);
7349 src_hash = HASH (new_src, new_mode);
7350 }
7351 src_elt = insert (new_src, classp, src_hash, new_mode);
7352 src_elt->in_memory = elt->in_memory;
7353 src_elt->in_struct = elt->in_struct;
7354 }
7355 else if (classp && classp != src_elt->first_same_value)
7356 /* Show that two things that we've seen before are
7357 actually the same. */
7358 merge_equiv_classes (src_elt, classp);
7359
7360 classp = src_elt->first_same_value;
7361 }
7362 }
7363 }
7364
7365 /* Special handling for (set REG0 REG1)
7366 where REG0 is the "cheapest", cheaper than REG1.
7367 After cse, REG1 will probably not be used in the sequel,
7368 so (if easily done) change this insn to (set REG1 REG0) and
7369 replace REG1 with REG0 in the previous insn that computed their value.
7370 Then REG1 will become a dead store and won't cloud the situation
7371 for later optimizations.
7372
7373 Do not make this change if REG1 is a hard register, because it will
7374 then be used in the sequel and we may be changing a two-operand insn
7375 into a three-operand insn.
7376
7377 Also do not do this if we are operating on a copy of INSN. */
7378
7379 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7380 && NEXT_INSN (PREV_INSN (insn)) == insn
7381 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7382 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7383 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
7384 && (qty_first_reg[reg_qty[REGNO (SET_SRC (sets[0].rtl))]]
7385 == REGNO (SET_DEST (sets[0].rtl))))
7386 {
7387 rtx prev = PREV_INSN (insn);
7388 while (prev && GET_CODE (prev) == NOTE)
7389 prev = PREV_INSN (prev);
7390
7391 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7392 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7393 {
7394 rtx dest = SET_DEST (sets[0].rtl);
7395 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7396
7397 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7398 validate_change (insn, & SET_DEST (sets[0].rtl),
7399 SET_SRC (sets[0].rtl), 1);
7400 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7401 apply_change_group ();
7402
7403 /* If REG1 was equivalent to a constant, REG0 is not. */
7404 if (note)
7405 PUT_REG_NOTE_KIND (note, REG_EQUAL);
7406
7407 /* If there was a REG_WAS_0 note on PREV, remove it. Move
7408 any REG_WAS_0 note on INSN to PREV. */
7409 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7410 if (note)
7411 remove_note (prev, note);
7412
7413 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7414 if (note)
7415 {
7416 remove_note (insn, note);
7417 XEXP (note, 1) = REG_NOTES (prev);
7418 REG_NOTES (prev) = note;
7419 }
7420
7421 /* If INSN has a REG_EQUAL note, and this note mentions REG0,
7422 then we must delete it, because the value in REG0 has changed. */
7423 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7424 if (note && reg_mentioned_p (dest, XEXP (note, 0)))
7425 remove_note (insn, note);
7426 }
7427 }
7428
7429 /* If this is a conditional jump insn, record any known equivalences due to
7430 the condition being tested. */
7431
7432 last_jump_equiv_class = 0;
7433 if (GET_CODE (insn) == JUMP_INSN
7434 && n_sets == 1 && GET_CODE (x) == SET
7435 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7436 record_jump_equiv (insn, 0);
7437
7438 #ifdef HAVE_cc0
7439 /* If the previous insn set CC0 and this insn no longer references CC0,
7440 delete the previous insn. Here we use the fact that nothing expects CC0
7441 to be valid over an insn, which is true until the final pass. */
7442 if (prev_insn && GET_CODE (prev_insn) == INSN
7443 && (tem = single_set (prev_insn)) != 0
7444 && SET_DEST (tem) == cc0_rtx
7445 && ! reg_mentioned_p (cc0_rtx, x))
7446 {
7447 PUT_CODE (prev_insn, NOTE);
7448 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7449 NOTE_SOURCE_FILE (prev_insn) = 0;
7450 }
7451
7452 prev_insn_cc0 = this_insn_cc0;
7453 prev_insn_cc0_mode = this_insn_cc0_mode;
7454 #endif
7455
7456 prev_insn = insn;
7457 }
7458 \f
7459 /* Store 1 in *WRITES_PTR for those categories of memory ref
7460 that must be invalidated when the expression WRITTEN is stored in.
7461 If WRITTEN is null, say everything must be invalidated. */
7462
7463 static void
7464 note_mem_written (written, writes_ptr)
7465 rtx written;
7466 struct write_data *writes_ptr;
7467 {
7468 static struct write_data everything = {0, 1, 1, 1};
7469
7470 if (written == 0)
7471 *writes_ptr = everything;
7472 else if (GET_CODE (written) == MEM)
7473 {
7474 /* Pushing or popping the stack invalidates just the stack pointer. */
7475 rtx addr = XEXP (written, 0);
7476 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7477 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7478 && GET_CODE (XEXP (addr, 0)) == REG
7479 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7480 {
7481 writes_ptr->sp = 1;
7482 return;
7483 }
7484 else if (GET_MODE (written) == BLKmode)
7485 *writes_ptr = everything;
7486 /* (mem (scratch)) means clobber everything. */
7487 else if (GET_CODE (addr) == SCRATCH)
7488 *writes_ptr = everything;
7489 else if (cse_rtx_addr_varies_p (written))
7490 {
7491 /* A varying address that is a sum indicates an array element,
7492 and that's just as good as a structure element
7493 in implying that we need not invalidate scalar variables.
7494 However, we must allow QImode aliasing of scalars, because the
7495 ANSI C standard allows character pointers to alias anything. */
7496 if (! ((MEM_IN_STRUCT_P (written)
7497 || GET_CODE (XEXP (written, 0)) == PLUS)
7498 && GET_MODE (written) != QImode))
7499 writes_ptr->all = 1;
7500 writes_ptr->nonscalar = 1;
7501 }
7502 writes_ptr->var = 1;
7503 }
7504 }
7505
7506 /* Perform invalidation on the basis of everything about an insn
7507 except for invalidating the actual places that are SET in it.
7508 This includes the places CLOBBERed, and anything that might
7509 alias with something that is SET or CLOBBERed.
7510
7511 W points to the writes_memory for this insn, a struct write_data
7512 saying which kinds of memory references must be invalidated.
7513 X is the pattern of the insn. */
7514
7515 static void
7516 invalidate_from_clobbers (w, x)
7517 struct write_data *w;
7518 rtx x;
7519 {
7520 /* If W->var is not set, W specifies no action.
7521 If W->all is set, this step gets all memory refs
7522 so they can be ignored in the rest of this function. */
7523 if (w->var)
7524 invalidate_memory (w);
7525
7526 if (w->sp)
7527 {
7528 if (reg_tick[STACK_POINTER_REGNUM] >= 0)
7529 reg_tick[STACK_POINTER_REGNUM]++;
7530
7531 /* This should be *very* rare. */
7532 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
7533 invalidate (stack_pointer_rtx, VOIDmode);
7534 }
7535
7536 if (GET_CODE (x) == CLOBBER)
7537 {
7538 rtx ref = XEXP (x, 0);
7539 if (ref)
7540 {
7541 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7542 || (GET_CODE (ref) == MEM && ! w->all))
7543 invalidate (ref, VOIDmode);
7544 else if (GET_CODE (ref) == STRICT_LOW_PART
7545 || GET_CODE (ref) == ZERO_EXTRACT)
7546 invalidate (XEXP (ref, 0), GET_MODE (ref));
7547 }
7548 }
7549 else if (GET_CODE (x) == PARALLEL)
7550 {
7551 register int i;
7552 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
7553 {
7554 register rtx y = XVECEXP (x, 0, i);
7555 if (GET_CODE (y) == CLOBBER)
7556 {
7557 rtx ref = XEXP (y, 0);
7558 if (ref)
7559 {
7560 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7561 || (GET_CODE (ref) == MEM && !w->all))
7562 invalidate (ref, VOIDmode);
7563 else if (GET_CODE (ref) == STRICT_LOW_PART
7564 || GET_CODE (ref) == ZERO_EXTRACT)
7565 invalidate (XEXP (ref, 0), GET_MODE (ref));
7566 }
7567 }
7568 }
7569 }
7570 }
7571 \f
7572 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
7573 and replace any registers in them with either an equivalent constant
7574 or the canonical form of the register. If we are inside an address,
7575 only do this if the address remains valid.
7576
7577 OBJECT is 0 except when within a MEM in which case it is the MEM.
7578
7579 Return the replacement for X. */
7580
7581 static rtx
7582 cse_process_notes (x, object)
7583 rtx x;
7584 rtx object;
7585 {
7586 enum rtx_code code = GET_CODE (x);
7587 char *fmt = GET_RTX_FORMAT (code);
7588 int i;
7589
7590 switch (code)
7591 {
7592 case CONST_INT:
7593 case CONST:
7594 case SYMBOL_REF:
7595 case LABEL_REF:
7596 case CONST_DOUBLE:
7597 case PC:
7598 case CC0:
7599 case LO_SUM:
7600 return x;
7601
7602 case MEM:
7603 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
7604 return x;
7605
7606 case EXPR_LIST:
7607 case INSN_LIST:
7608 if (REG_NOTE_KIND (x) == REG_EQUAL)
7609 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7610 if (XEXP (x, 1))
7611 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7612 return x;
7613
7614 case SIGN_EXTEND:
7615 case ZERO_EXTEND:
7616 {
7617 rtx new = cse_process_notes (XEXP (x, 0), object);
7618 /* We don't substitute VOIDmode constants into these rtx,
7619 since they would impede folding. */
7620 if (GET_MODE (new) != VOIDmode)
7621 validate_change (object, &XEXP (x, 0), new, 0);
7622 return x;
7623 }
7624
7625 case REG:
7626 i = reg_qty[REGNO (x)];
7627
7628 /* Return a constant or a constant register. */
7629 if (REGNO_QTY_VALID_P (REGNO (x))
7630 && qty_const[i] != 0
7631 && (CONSTANT_P (qty_const[i])
7632 || GET_CODE (qty_const[i]) == REG))
7633 {
7634 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
7635 if (new)
7636 return new;
7637 }
7638
7639 /* Otherwise, canonicalize this register. */
7640 return canon_reg (x, NULL_RTX);
7641 }
7642
7643 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7644 if (fmt[i] == 'e')
7645 validate_change (object, &XEXP (x, i),
7646 cse_process_notes (XEXP (x, i), object), 0);
7647
7648 return x;
7649 }
7650 \f
7651 /* Find common subexpressions between the end test of a loop and the beginning
7652 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
7653
7654 Often we have a loop where an expression in the exit test is used
7655 in the body of the loop. For example "while (*p) *q++ = *p++;".
7656 Because of the way we duplicate the loop exit test in front of the loop,
7657 however, we don't detect that common subexpression. This will be caught
7658 when global cse is implemented, but this is a quite common case.
7659
7660 This function handles the most common cases of these common expressions.
7661 It is called after we have processed the basic block ending with the
7662 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
7663 jumps to a label used only once. */
7664
7665 static void
7666 cse_around_loop (loop_start)
7667 rtx loop_start;
7668 {
7669 rtx insn;
7670 int i;
7671 struct table_elt *p;
7672
7673 /* If the jump at the end of the loop doesn't go to the start, we don't
7674 do anything. */
7675 for (insn = PREV_INSN (loop_start);
7676 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
7677 insn = PREV_INSN (insn))
7678 ;
7679
7680 if (insn == 0
7681 || GET_CODE (insn) != NOTE
7682 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
7683 return;
7684
7685 /* If the last insn of the loop (the end test) was an NE comparison,
7686 we will interpret it as an EQ comparison, since we fell through
7687 the loop. Any equivalences resulting from that comparison are
7688 therefore not valid and must be invalidated. */
7689 if (last_jump_equiv_class)
7690 for (p = last_jump_equiv_class->first_same_value; p;
7691 p = p->next_same_value)
7692 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
7693 || (GET_CODE (p->exp) == SUBREG
7694 && GET_CODE (SUBREG_REG (p->exp)) == REG))
7695 invalidate (p->exp, VOIDmode);
7696 else if (GET_CODE (p->exp) == STRICT_LOW_PART
7697 || GET_CODE (p->exp) == ZERO_EXTRACT)
7698 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
7699
7700 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
7701 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
7702
7703 The only thing we do with SET_DEST is invalidate entries, so we
7704 can safely process each SET in order. It is slightly less efficient
7705 to do so, but we only want to handle the most common cases. */
7706
7707 for (insn = NEXT_INSN (loop_start);
7708 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
7709 && ! (GET_CODE (insn) == NOTE
7710 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
7711 insn = NEXT_INSN (insn))
7712 {
7713 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7714 && (GET_CODE (PATTERN (insn)) == SET
7715 || GET_CODE (PATTERN (insn)) == CLOBBER))
7716 cse_set_around_loop (PATTERN (insn), insn, loop_start);
7717 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7718 && GET_CODE (PATTERN (insn)) == PARALLEL)
7719 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7720 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
7721 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
7722 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
7723 loop_start);
7724 }
7725 }
7726 \f
7727 /* Variable used for communications between the next two routines. */
7728
7729 static struct write_data skipped_writes_memory;
7730
7731 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
7732 since they are done elsewhere. This function is called via note_stores. */
7733
7734 static void
7735 invalidate_skipped_set (dest, set)
7736 rtx set;
7737 rtx dest;
7738 {
7739 if (GET_CODE (set) == CLOBBER
7740 #ifdef HAVE_cc0
7741 || dest == cc0_rtx
7742 #endif
7743 || dest == pc_rtx)
7744 return;
7745
7746 if (GET_CODE (dest) == MEM)
7747 note_mem_written (dest, &skipped_writes_memory);
7748
7749 /* There are times when an address can appear varying and be a PLUS
7750 during this scan when it would be a fixed address were we to know
7751 the proper equivalences. So promote "nonscalar" to be "all". */
7752 if (skipped_writes_memory.nonscalar)
7753 skipped_writes_memory.all = 1;
7754
7755 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7756 || (! skipped_writes_memory.all && ! cse_rtx_addr_varies_p (dest)))
7757 invalidate (dest, VOIDmode);
7758 else if (GET_CODE (dest) == STRICT_LOW_PART
7759 || GET_CODE (dest) == ZERO_EXTRACT)
7760 invalidate (XEXP (dest, 0), GET_MODE (dest));
7761 }
7762
7763 /* Invalidate all insns from START up to the end of the function or the
7764 next label. This called when we wish to CSE around a block that is
7765 conditionally executed. */
7766
7767 static void
7768 invalidate_skipped_block (start)
7769 rtx start;
7770 {
7771 rtx insn;
7772 static struct write_data init = {0, 0, 0, 0};
7773 static struct write_data everything = {0, 1, 1, 1};
7774
7775 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
7776 insn = NEXT_INSN (insn))
7777 {
7778 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7779 continue;
7780
7781 skipped_writes_memory = init;
7782
7783 if (GET_CODE (insn) == CALL_INSN)
7784 {
7785 invalidate_for_call ();
7786 skipped_writes_memory = everything;
7787 }
7788
7789 note_stores (PATTERN (insn), invalidate_skipped_set);
7790 invalidate_from_clobbers (&skipped_writes_memory, PATTERN (insn));
7791 }
7792 }
7793 \f
7794 /* Used for communication between the following two routines; contains a
7795 value to be checked for modification. */
7796
7797 static rtx cse_check_loop_start_value;
7798
7799 /* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
7800 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
7801
7802 static void
7803 cse_check_loop_start (x, set)
7804 rtx x;
7805 rtx set;
7806 {
7807 if (cse_check_loop_start_value == 0
7808 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
7809 return;
7810
7811 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
7812 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
7813 cse_check_loop_start_value = 0;
7814 }
7815
7816 /* X is a SET or CLOBBER contained in INSN that was found near the start of
7817 a loop that starts with the label at LOOP_START.
7818
7819 If X is a SET, we see if its SET_SRC is currently in our hash table.
7820 If so, we see if it has a value equal to some register used only in the
7821 loop exit code (as marked by jump.c).
7822
7823 If those two conditions are true, we search backwards from the start of
7824 the loop to see if that same value was loaded into a register that still
7825 retains its value at the start of the loop.
7826
7827 If so, we insert an insn after the load to copy the destination of that
7828 load into the equivalent register and (try to) replace our SET_SRC with that
7829 register.
7830
7831 In any event, we invalidate whatever this SET or CLOBBER modifies. */
7832
7833 static void
7834 cse_set_around_loop (x, insn, loop_start)
7835 rtx x;
7836 rtx insn;
7837 rtx loop_start;
7838 {
7839 struct table_elt *src_elt;
7840 static struct write_data init = {0, 0, 0, 0};
7841 struct write_data writes_memory;
7842
7843 writes_memory = init;
7844
7845 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
7846 are setting PC or CC0 or whose SET_SRC is already a register. */
7847 if (GET_CODE (x) == SET
7848 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
7849 && GET_CODE (SET_SRC (x)) != REG)
7850 {
7851 src_elt = lookup (SET_SRC (x),
7852 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
7853 GET_MODE (SET_DEST (x)));
7854
7855 if (src_elt)
7856 for (src_elt = src_elt->first_same_value; src_elt;
7857 src_elt = src_elt->next_same_value)
7858 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
7859 && COST (src_elt->exp) < COST (SET_SRC (x)))
7860 {
7861 rtx p, set;
7862
7863 /* Look for an insn in front of LOOP_START that sets
7864 something in the desired mode to SET_SRC (x) before we hit
7865 a label or CALL_INSN. */
7866
7867 for (p = prev_nonnote_insn (loop_start);
7868 p && GET_CODE (p) != CALL_INSN
7869 && GET_CODE (p) != CODE_LABEL;
7870 p = prev_nonnote_insn (p))
7871 if ((set = single_set (p)) != 0
7872 && GET_CODE (SET_DEST (set)) == REG
7873 && GET_MODE (SET_DEST (set)) == src_elt->mode
7874 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
7875 {
7876 /* We now have to ensure that nothing between P
7877 and LOOP_START modified anything referenced in
7878 SET_SRC (x). We know that nothing within the loop
7879 can modify it, or we would have invalidated it in
7880 the hash table. */
7881 rtx q;
7882
7883 cse_check_loop_start_value = SET_SRC (x);
7884 for (q = p; q != loop_start; q = NEXT_INSN (q))
7885 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
7886 note_stores (PATTERN (q), cse_check_loop_start);
7887
7888 /* If nothing was changed and we can replace our
7889 SET_SRC, add an insn after P to copy its destination
7890 to what we will be replacing SET_SRC with. */
7891 if (cse_check_loop_start_value
7892 && validate_change (insn, &SET_SRC (x),
7893 src_elt->exp, 0))
7894 emit_insn_after (gen_move_insn (src_elt->exp,
7895 SET_DEST (set)),
7896 p);
7897 break;
7898 }
7899 }
7900 }
7901
7902 /* Now invalidate anything modified by X. */
7903 note_mem_written (SET_DEST (x), &writes_memory);
7904
7905 if (writes_memory.var)
7906 invalidate_memory (&writes_memory);
7907
7908 /* See comment on similar code in cse_insn for explanation of these tests. */
7909 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
7910 || (GET_CODE (SET_DEST (x)) == MEM && ! writes_memory.all
7911 && ! cse_rtx_addr_varies_p (SET_DEST (x))))
7912 invalidate (SET_DEST (x), VOIDmode);
7913 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
7914 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
7915 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
7916 }
7917 \f
7918 /* Find the end of INSN's basic block and return its range,
7919 the total number of SETs in all the insns of the block, the last insn of the
7920 block, and the branch path.
7921
7922 The branch path indicates which branches should be followed. If a non-zero
7923 path size is specified, the block should be rescanned and a different set
7924 of branches will be taken. The branch path is only used if
7925 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
7926
7927 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
7928 used to describe the block. It is filled in with the information about
7929 the current block. The incoming structure's branch path, if any, is used
7930 to construct the output branch path. */
7931
7932 void
7933 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
7934 rtx insn;
7935 struct cse_basic_block_data *data;
7936 int follow_jumps;
7937 int after_loop;
7938 int skip_blocks;
7939 {
7940 rtx p = insn, q;
7941 int nsets = 0;
7942 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
7943 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
7944 int path_size = data->path_size;
7945 int path_entry = 0;
7946 int i;
7947
7948 /* Update the previous branch path, if any. If the last branch was
7949 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
7950 shorten the path by one and look at the previous branch. We know that
7951 at least one branch must have been taken if PATH_SIZE is non-zero. */
7952 while (path_size > 0)
7953 {
7954 if (data->path[path_size - 1].status != NOT_TAKEN)
7955 {
7956 data->path[path_size - 1].status = NOT_TAKEN;
7957 break;
7958 }
7959 else
7960 path_size--;
7961 }
7962
7963 /* Scan to end of this basic block. */
7964 while (p && GET_CODE (p) != CODE_LABEL)
7965 {
7966 /* Don't cse out the end of a loop. This makes a difference
7967 only for the unusual loops that always execute at least once;
7968 all other loops have labels there so we will stop in any case.
7969 Cse'ing out the end of the loop is dangerous because it
7970 might cause an invariant expression inside the loop
7971 to be reused after the end of the loop. This would make it
7972 hard to move the expression out of the loop in loop.c,
7973 especially if it is one of several equivalent expressions
7974 and loop.c would like to eliminate it.
7975
7976 If we are running after loop.c has finished, we can ignore
7977 the NOTE_INSN_LOOP_END. */
7978
7979 if (! after_loop && GET_CODE (p) == NOTE
7980 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
7981 break;
7982
7983 /* Don't cse over a call to setjmp; on some machines (eg vax)
7984 the regs restored by the longjmp come from
7985 a later time than the setjmp. */
7986 if (GET_CODE (p) == NOTE
7987 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
7988 break;
7989
7990 /* A PARALLEL can have lots of SETs in it,
7991 especially if it is really an ASM_OPERANDS. */
7992 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
7993 && GET_CODE (PATTERN (p)) == PARALLEL)
7994 nsets += XVECLEN (PATTERN (p), 0);
7995 else if (GET_CODE (p) != NOTE)
7996 nsets += 1;
7997
7998 /* Ignore insns made by CSE; they cannot affect the boundaries of
7999 the basic block. */
8000
8001 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8002 high_cuid = INSN_CUID (p);
8003 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
8004 low_cuid = INSN_CUID (p);
8005
8006 /* See if this insn is in our branch path. If it is and we are to
8007 take it, do so. */
8008 if (path_entry < path_size && data->path[path_entry].branch == p)
8009 {
8010 if (data->path[path_entry].status != NOT_TAKEN)
8011 p = JUMP_LABEL (p);
8012
8013 /* Point to next entry in path, if any. */
8014 path_entry++;
8015 }
8016
8017 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
8018 was specified, we haven't reached our maximum path length, there are
8019 insns following the target of the jump, this is the only use of the
8020 jump label, and the target label is preceded by a BARRIER.
8021
8022 Alternatively, we can follow the jump if it branches around a
8023 block of code and there are no other branches into the block.
8024 In this case invalidate_skipped_block will be called to invalidate any
8025 registers set in the block when following the jump. */
8026
8027 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
8028 && GET_CODE (p) == JUMP_INSN
8029 && GET_CODE (PATTERN (p)) == SET
8030 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
8031 && LABEL_NUSES (JUMP_LABEL (p)) == 1
8032 && NEXT_INSN (JUMP_LABEL (p)) != 0)
8033 {
8034 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
8035 if ((GET_CODE (q) != NOTE
8036 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
8037 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
8038 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
8039 break;
8040
8041 /* If we ran into a BARRIER, this code is an extension of the
8042 basic block when the branch is taken. */
8043 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
8044 {
8045 /* Don't allow ourself to keep walking around an
8046 always-executed loop. */
8047 if (next_real_insn (q) == next)
8048 {
8049 p = NEXT_INSN (p);
8050 continue;
8051 }
8052
8053 /* Similarly, don't put a branch in our path more than once. */
8054 for (i = 0; i < path_entry; i++)
8055 if (data->path[i].branch == p)
8056 break;
8057
8058 if (i != path_entry)
8059 break;
8060
8061 data->path[path_entry].branch = p;
8062 data->path[path_entry++].status = TAKEN;
8063
8064 /* This branch now ends our path. It was possible that we
8065 didn't see this branch the last time around (when the
8066 insn in front of the target was a JUMP_INSN that was
8067 turned into a no-op). */
8068 path_size = path_entry;
8069
8070 p = JUMP_LABEL (p);
8071 /* Mark block so we won't scan it again later. */
8072 PUT_MODE (NEXT_INSN (p), QImode);
8073 }
8074 /* Detect a branch around a block of code. */
8075 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
8076 {
8077 register rtx tmp;
8078
8079 if (next_real_insn (q) == next)
8080 {
8081 p = NEXT_INSN (p);
8082 continue;
8083 }
8084
8085 for (i = 0; i < path_entry; i++)
8086 if (data->path[i].branch == p)
8087 break;
8088
8089 if (i != path_entry)
8090 break;
8091
8092 /* This is no_labels_between_p (p, q) with an added check for
8093 reaching the end of a function (in case Q precedes P). */
8094 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
8095 if (GET_CODE (tmp) == CODE_LABEL)
8096 break;
8097
8098 if (tmp == q)
8099 {
8100 data->path[path_entry].branch = p;
8101 data->path[path_entry++].status = AROUND;
8102
8103 path_size = path_entry;
8104
8105 p = JUMP_LABEL (p);
8106 /* Mark block so we won't scan it again later. */
8107 PUT_MODE (NEXT_INSN (p), QImode);
8108 }
8109 }
8110 }
8111 p = NEXT_INSN (p);
8112 }
8113
8114 data->low_cuid = low_cuid;
8115 data->high_cuid = high_cuid;
8116 data->nsets = nsets;
8117 data->last = p;
8118
8119 /* If all jumps in the path are not taken, set our path length to zero
8120 so a rescan won't be done. */
8121 for (i = path_size - 1; i >= 0; i--)
8122 if (data->path[i].status != NOT_TAKEN)
8123 break;
8124
8125 if (i == -1)
8126 data->path_size = 0;
8127 else
8128 data->path_size = path_size;
8129
8130 /* End the current branch path. */
8131 data->path[path_size].branch = 0;
8132 }
8133 \f
8134 /* Perform cse on the instructions of a function.
8135 F is the first instruction.
8136 NREGS is one plus the highest pseudo-reg number used in the instruction.
8137
8138 AFTER_LOOP is 1 if this is the cse call done after loop optimization
8139 (only if -frerun-cse-after-loop).
8140
8141 Returns 1 if jump_optimize should be redone due to simplifications
8142 in conditional jump instructions. */
8143
8144 int
8145 cse_main (f, nregs, after_loop, file)
8146 rtx f;
8147 int nregs;
8148 int after_loop;
8149 FILE *file;
8150 {
8151 struct cse_basic_block_data val;
8152 register rtx insn = f;
8153 register int i;
8154
8155 cse_jumps_altered = 0;
8156 constant_pool_entries_cost = 0;
8157 val.path_size = 0;
8158
8159 init_recog ();
8160
8161 max_reg = nregs;
8162
8163 all_minus_one = (int *) alloca (nregs * sizeof (int));
8164 consec_ints = (int *) alloca (nregs * sizeof (int));
8165
8166 for (i = 0; i < nregs; i++)
8167 {
8168 all_minus_one[i] = -1;
8169 consec_ints[i] = i;
8170 }
8171
8172 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
8173 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
8174 reg_qty = (int *) alloca (nregs * sizeof (int));
8175 reg_in_table = (int *) alloca (nregs * sizeof (int));
8176 reg_tick = (int *) alloca (nregs * sizeof (int));
8177
8178 #ifdef LOAD_EXTEND_OP
8179
8180 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
8181 and change the code and mode as appropriate. */
8182 memory_extend_rtx = gen_rtx (ZERO_EXTEND, VOIDmode, 0);
8183 #endif
8184
8185 /* Discard all the free elements of the previous function
8186 since they are allocated in the temporarily obstack. */
8187 bzero ((char *) table, sizeof table);
8188 free_element_chain = 0;
8189 n_elements_made = 0;
8190
8191 /* Find the largest uid. */
8192
8193 max_uid = get_max_uid ();
8194 uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
8195 bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
8196
8197 /* Compute the mapping from uids to cuids.
8198 CUIDs are numbers assigned to insns, like uids,
8199 except that cuids increase monotonically through the code.
8200 Don't assign cuids to line-number NOTEs, so that the distance in cuids
8201 between two insns is not affected by -g. */
8202
8203 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8204 {
8205 if (GET_CODE (insn) != NOTE
8206 || NOTE_LINE_NUMBER (insn) < 0)
8207 INSN_CUID (insn) = ++i;
8208 else
8209 /* Give a line number note the same cuid as preceding insn. */
8210 INSN_CUID (insn) = i;
8211 }
8212
8213 /* Initialize which registers are clobbered by calls. */
8214
8215 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8216
8217 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8218 if ((call_used_regs[i]
8219 /* Used to check !fixed_regs[i] here, but that isn't safe;
8220 fixed regs are still call-clobbered, and sched can get
8221 confused if they can "live across calls".
8222
8223 The frame pointer is always preserved across calls. The arg
8224 pointer is if it is fixed. The stack pointer usually is, unless
8225 RETURN_POPS_ARGS, in which case an explicit CLOBBER
8226 will be present. If we are generating PIC code, the PIC offset
8227 table register is preserved across calls. */
8228
8229 && i != STACK_POINTER_REGNUM
8230 && i != FRAME_POINTER_REGNUM
8231 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8232 && i != HARD_FRAME_POINTER_REGNUM
8233 #endif
8234 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8235 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8236 #endif
8237 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
8238 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8239 #endif
8240 )
8241 || global_regs[i])
8242 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8243
8244 /* Loop over basic blocks.
8245 Compute the maximum number of qty's needed for each basic block
8246 (which is 2 for each SET). */
8247 insn = f;
8248 while (insn)
8249 {
8250 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8251 flag_cse_skip_blocks);
8252
8253 /* If this basic block was already processed or has no sets, skip it. */
8254 if (val.nsets == 0 || GET_MODE (insn) == QImode)
8255 {
8256 PUT_MODE (insn, VOIDmode);
8257 insn = (val.last ? NEXT_INSN (val.last) : 0);
8258 val.path_size = 0;
8259 continue;
8260 }
8261
8262 cse_basic_block_start = val.low_cuid;
8263 cse_basic_block_end = val.high_cuid;
8264 max_qty = val.nsets * 2;
8265
8266 if (file)
8267 fprintf (file, ";; Processing block from %d to %d, %d sets.\n",
8268 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8269 val.nsets);
8270
8271 /* Make MAX_QTY bigger to give us room to optimize
8272 past the end of this basic block, if that should prove useful. */
8273 if (max_qty < 500)
8274 max_qty = 500;
8275
8276 max_qty += max_reg;
8277
8278 /* If this basic block is being extended by following certain jumps,
8279 (see `cse_end_of_basic_block'), we reprocess the code from the start.
8280 Otherwise, we start after this basic block. */
8281 if (val.path_size > 0)
8282 cse_basic_block (insn, val.last, val.path, 0);
8283 else
8284 {
8285 int old_cse_jumps_altered = cse_jumps_altered;
8286 rtx temp;
8287
8288 /* When cse changes a conditional jump to an unconditional
8289 jump, we want to reprocess the block, since it will give
8290 us a new branch path to investigate. */
8291 cse_jumps_altered = 0;
8292 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8293 if (cse_jumps_altered == 0
8294 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8295 insn = temp;
8296
8297 cse_jumps_altered |= old_cse_jumps_altered;
8298 }
8299
8300 #ifdef USE_C_ALLOCA
8301 alloca (0);
8302 #endif
8303 }
8304
8305 /* Tell refers_to_mem_p that qty_const info is not available. */
8306 qty_const = 0;
8307
8308 if (max_elements_made < n_elements_made)
8309 max_elements_made = n_elements_made;
8310
8311 return cse_jumps_altered;
8312 }
8313
8314 /* Process a single basic block. FROM and TO and the limits of the basic
8315 block. NEXT_BRANCH points to the branch path when following jumps or
8316 a null path when not following jumps.
8317
8318 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8319 loop. This is true when we are being called for the last time on a
8320 block and this CSE pass is before loop.c. */
8321
8322 static rtx
8323 cse_basic_block (from, to, next_branch, around_loop)
8324 register rtx from, to;
8325 struct branch_path *next_branch;
8326 int around_loop;
8327 {
8328 register rtx insn;
8329 int to_usage = 0;
8330 int in_libcall_block = 0;
8331
8332 /* Each of these arrays is undefined before max_reg, so only allocate
8333 the space actually needed and adjust the start below. */
8334
8335 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8336 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8337 qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
8338 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8339 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8340 qty_comparison_code
8341 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8342 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8343 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8344
8345 qty_first_reg -= max_reg;
8346 qty_last_reg -= max_reg;
8347 qty_mode -= max_reg;
8348 qty_const -= max_reg;
8349 qty_const_insn -= max_reg;
8350 qty_comparison_code -= max_reg;
8351 qty_comparison_qty -= max_reg;
8352 qty_comparison_const -= max_reg;
8353
8354 new_basic_block ();
8355
8356 /* TO might be a label. If so, protect it from being deleted. */
8357 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8358 ++LABEL_NUSES (to);
8359
8360 for (insn = from; insn != to; insn = NEXT_INSN (insn))
8361 {
8362 register enum rtx_code code;
8363
8364 /* See if this is a branch that is part of the path. If so, and it is
8365 to be taken, do so. */
8366 if (next_branch->branch == insn)
8367 {
8368 enum taken status = next_branch++->status;
8369 if (status != NOT_TAKEN)
8370 {
8371 if (status == TAKEN)
8372 record_jump_equiv (insn, 1);
8373 else
8374 invalidate_skipped_block (NEXT_INSN (insn));
8375
8376 /* Set the last insn as the jump insn; it doesn't affect cc0.
8377 Then follow this branch. */
8378 #ifdef HAVE_cc0
8379 prev_insn_cc0 = 0;
8380 #endif
8381 prev_insn = insn;
8382 insn = JUMP_LABEL (insn);
8383 continue;
8384 }
8385 }
8386
8387 code = GET_CODE (insn);
8388 if (GET_MODE (insn) == QImode)
8389 PUT_MODE (insn, VOIDmode);
8390
8391 if (GET_RTX_CLASS (code) == 'i')
8392 {
8393 /* Process notes first so we have all notes in canonical forms when
8394 looking for duplicate operations. */
8395
8396 if (REG_NOTES (insn))
8397 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
8398
8399 /* Track when we are inside in LIBCALL block. Inside such a block,
8400 we do not want to record destinations. The last insn of a
8401 LIBCALL block is not considered to be part of the block, since
8402 its destination is the result of the block and hence should be
8403 recorded. */
8404
8405 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8406 in_libcall_block = 1;
8407 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8408 in_libcall_block = 0;
8409
8410 cse_insn (insn, in_libcall_block);
8411 }
8412
8413 /* If INSN is now an unconditional jump, skip to the end of our
8414 basic block by pretending that we just did the last insn in the
8415 basic block. If we are jumping to the end of our block, show
8416 that we can have one usage of TO. */
8417
8418 if (simplejump_p (insn))
8419 {
8420 if (to == 0)
8421 return 0;
8422
8423 if (JUMP_LABEL (insn) == to)
8424 to_usage = 1;
8425
8426 /* Maybe TO was deleted because the jump is unconditional.
8427 If so, there is nothing left in this basic block. */
8428 /* ??? Perhaps it would be smarter to set TO
8429 to whatever follows this insn,
8430 and pretend the basic block had always ended here. */
8431 if (INSN_DELETED_P (to))
8432 break;
8433
8434 insn = PREV_INSN (to);
8435 }
8436
8437 /* See if it is ok to keep on going past the label
8438 which used to end our basic block. Remember that we incremented
8439 the count of that label, so we decrement it here. If we made
8440 a jump unconditional, TO_USAGE will be one; in that case, we don't
8441 want to count the use in that jump. */
8442
8443 if (to != 0 && NEXT_INSN (insn) == to
8444 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8445 {
8446 struct cse_basic_block_data val;
8447 rtx prev;
8448
8449 insn = NEXT_INSN (to);
8450
8451 if (LABEL_NUSES (to) == 0)
8452 insn = delete_insn (to);
8453
8454 /* If TO was the last insn in the function, we are done. */
8455 if (insn == 0)
8456 return 0;
8457
8458 /* If TO was preceded by a BARRIER we are done with this block
8459 because it has no continuation. */
8460 prev = prev_nonnote_insn (to);
8461 if (prev && GET_CODE (prev) == BARRIER)
8462 return insn;
8463
8464 /* Find the end of the following block. Note that we won't be
8465 following branches in this case. */
8466 to_usage = 0;
8467 val.path_size = 0;
8468 cse_end_of_basic_block (insn, &val, 0, 0, 0);
8469
8470 /* If the tables we allocated have enough space left
8471 to handle all the SETs in the next basic block,
8472 continue through it. Otherwise, return,
8473 and that block will be scanned individually. */
8474 if (val.nsets * 2 + next_qty > max_qty)
8475 break;
8476
8477 cse_basic_block_start = val.low_cuid;
8478 cse_basic_block_end = val.high_cuid;
8479 to = val.last;
8480
8481 /* Prevent TO from being deleted if it is a label. */
8482 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8483 ++LABEL_NUSES (to);
8484
8485 /* Back up so we process the first insn in the extension. */
8486 insn = PREV_INSN (insn);
8487 }
8488 }
8489
8490 if (next_qty > max_qty)
8491 abort ();
8492
8493 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
8494 the previous insn is the only insn that branches to the head of a loop,
8495 we can cse into the loop. Don't do this if we changed the jump
8496 structure of a loop unless we aren't going to be following jumps. */
8497
8498 if ((cse_jumps_altered == 0
8499 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8500 && around_loop && to != 0
8501 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
8502 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
8503 && JUMP_LABEL (PREV_INSN (to)) != 0
8504 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
8505 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
8506
8507 return to ? NEXT_INSN (to) : 0;
8508 }
8509 \f
8510 /* Count the number of times registers are used (not set) in X.
8511 COUNTS is an array in which we accumulate the count, INCR is how much
8512 we count each register usage.
8513
8514 Don't count a usage of DEST, which is the SET_DEST of a SET which
8515 contains X in its SET_SRC. This is because such a SET does not
8516 modify the liveness of DEST. */
8517
8518 static void
8519 count_reg_usage (x, counts, dest, incr)
8520 rtx x;
8521 int *counts;
8522 rtx dest;
8523 int incr;
8524 {
8525 enum rtx_code code;
8526 char *fmt;
8527 int i, j;
8528
8529 if (x == 0)
8530 return;
8531
8532 switch (code = GET_CODE (x))
8533 {
8534 case REG:
8535 if (x != dest)
8536 counts[REGNO (x)] += incr;
8537 return;
8538
8539 case PC:
8540 case CC0:
8541 case CONST:
8542 case CONST_INT:
8543 case CONST_DOUBLE:
8544 case SYMBOL_REF:
8545 case LABEL_REF:
8546 case CLOBBER:
8547 return;
8548
8549 case SET:
8550 /* Unless we are setting a REG, count everything in SET_DEST. */
8551 if (GET_CODE (SET_DEST (x)) != REG)
8552 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
8553
8554 /* If SRC has side-effects, then we can't delete this insn, so the
8555 usage of SET_DEST inside SRC counts.
8556
8557 ??? Strictly-speaking, we might be preserving this insn
8558 because some other SET has side-effects, but that's hard
8559 to do and can't happen now. */
8560 count_reg_usage (SET_SRC (x), counts,
8561 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
8562 incr);
8563 return;
8564
8565 case CALL_INSN:
8566 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
8567
8568 /* ... falls through ... */
8569 case INSN:
8570 case JUMP_INSN:
8571 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
8572
8573 /* Things used in a REG_EQUAL note aren't dead since loop may try to
8574 use them. */
8575
8576 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
8577 return;
8578
8579 case EXPR_LIST:
8580 case INSN_LIST:
8581 if (REG_NOTE_KIND (x) == REG_EQUAL
8582 || GET_CODE (XEXP (x,0)) == USE)
8583 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
8584 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
8585 return;
8586 }
8587
8588 fmt = GET_RTX_FORMAT (code);
8589 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8590 {
8591 if (fmt[i] == 'e')
8592 count_reg_usage (XEXP (x, i), counts, dest, incr);
8593 else if (fmt[i] == 'E')
8594 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8595 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
8596 }
8597 }
8598 \f
8599 /* Scan all the insns and delete any that are dead; i.e., they store a register
8600 that is never used or they copy a register to itself.
8601
8602 This is used to remove insns made obviously dead by cse. It improves the
8603 heuristics in loop since it won't try to move dead invariants out of loops
8604 or make givs for dead quantities. The remaining passes of the compilation
8605 are also sped up. */
8606
8607 void
8608 delete_dead_from_cse (insns, nreg)
8609 rtx insns;
8610 int nreg;
8611 {
8612 int *counts = (int *) alloca (nreg * sizeof (int));
8613 rtx insn, prev;
8614 rtx tem;
8615 int i;
8616 int in_libcall = 0;
8617
8618 /* First count the number of times each register is used. */
8619 bzero ((char *) counts, sizeof (int) * nreg);
8620 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
8621 count_reg_usage (insn, counts, NULL_RTX, 1);
8622
8623 /* Go from the last insn to the first and delete insns that only set unused
8624 registers or copy a register to itself. As we delete an insn, remove
8625 usage counts for registers it uses. */
8626 for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
8627 {
8628 int live_insn = 0;
8629
8630 prev = prev_real_insn (insn);
8631
8632 /* Don't delete any insns that are part of a libcall block.
8633 Flow or loop might get confused if we did that. Remember
8634 that we are scanning backwards. */
8635 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8636 in_libcall = 1;
8637
8638 if (in_libcall)
8639 live_insn = 1;
8640 else if (GET_CODE (PATTERN (insn)) == SET)
8641 {
8642 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
8643 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
8644 ;
8645
8646 #ifdef HAVE_cc0
8647 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
8648 && ! side_effects_p (SET_SRC (PATTERN (insn)))
8649 && ((tem = next_nonnote_insn (insn)) == 0
8650 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8651 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8652 ;
8653 #endif
8654 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
8655 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
8656 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
8657 || side_effects_p (SET_SRC (PATTERN (insn))))
8658 live_insn = 1;
8659 }
8660 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
8661 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8662 {
8663 rtx elt = XVECEXP (PATTERN (insn), 0, i);
8664
8665 if (GET_CODE (elt) == SET)
8666 {
8667 if (GET_CODE (SET_DEST (elt)) == REG
8668 && SET_DEST (elt) == SET_SRC (elt))
8669 ;
8670
8671 #ifdef HAVE_cc0
8672 else if (GET_CODE (SET_DEST (elt)) == CC0
8673 && ! side_effects_p (SET_SRC (elt))
8674 && ((tem = next_nonnote_insn (insn)) == 0
8675 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8676 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8677 ;
8678 #endif
8679 else if (GET_CODE (SET_DEST (elt)) != REG
8680 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
8681 || counts[REGNO (SET_DEST (elt))] != 0
8682 || side_effects_p (SET_SRC (elt)))
8683 live_insn = 1;
8684 }
8685 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
8686 live_insn = 1;
8687 }
8688 else
8689 live_insn = 1;
8690
8691 /* If this is a dead insn, delete it and show registers in it aren't
8692 being used. */
8693
8694 if (! live_insn)
8695 {
8696 count_reg_usage (insn, counts, NULL_RTX, -1);
8697 delete_insn (insn);
8698 }
8699
8700 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8701 in_libcall = 0;
8702 }
8703 }