(cse_insn): Properly set IN_MEMORY for SET_DEST.
[gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 88, 89, 92, 93, 94, 1995 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include "config.h"
22 /* Must precede rtl.h for FFS. */
23 #include <stdio.h>
24
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "flags.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "recog.h"
32
33 #include <setjmp.h>
34
35 /* The basic idea of common subexpression elimination is to go
36 through the code, keeping a record of expressions that would
37 have the same value at the current scan point, and replacing
38 expressions encountered with the cheapest equivalent expression.
39
40 It is too complicated to keep track of the different possibilities
41 when control paths merge; so, at each label, we forget all that is
42 known and start fresh. This can be described as processing each
43 basic block separately. Note, however, that these are not quite
44 the same as the basic blocks found by a later pass and used for
45 data flow analysis and register packing. We do not need to start fresh
46 after a conditional jump instruction if there is no label there.
47
48 We use two data structures to record the equivalent expressions:
49 a hash table for most expressions, and several vectors together
50 with "quantity numbers" to record equivalent (pseudo) registers.
51
52 The use of the special data structure for registers is desirable
53 because it is faster. It is possible because registers references
54 contain a fairly small number, the register number, taken from
55 a contiguously allocated series, and two register references are
56 identical if they have the same number. General expressions
57 do not have any such thing, so the only way to retrieve the
58 information recorded on an expression other than a register
59 is to keep it in a hash table.
60
61 Registers and "quantity numbers":
62
63 At the start of each basic block, all of the (hardware and pseudo)
64 registers used in the function are given distinct quantity
65 numbers to indicate their contents. During scan, when the code
66 copies one register into another, we copy the quantity number.
67 When a register is loaded in any other way, we allocate a new
68 quantity number to describe the value generated by this operation.
69 `reg_qty' records what quantity a register is currently thought
70 of as containing.
71
72 All real quantity numbers are greater than or equal to `max_reg'.
73 If register N has not been assigned a quantity, reg_qty[N] will equal N.
74
75 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
76 variables should be referenced with an index below `max_reg'.
77
78 We also maintain a bidirectional chain of registers for each
79 quantity number. `qty_first_reg', `qty_last_reg',
80 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
81
82 The first register in a chain is the one whose lifespan is least local.
83 Among equals, it is the one that was seen first.
84 We replace any equivalent register with that one.
85
86 If two registers have the same quantity number, it must be true that
87 REG expressions with `qty_mode' must be in the hash table for both
88 registers and must be in the same class.
89
90 The converse is not true. Since hard registers may be referenced in
91 any mode, two REG expressions might be equivalent in the hash table
92 but not have the same quantity number if the quantity number of one
93 of the registers is not the same mode as those expressions.
94
95 Constants and quantity numbers
96
97 When a quantity has a known constant value, that value is stored
98 in the appropriate element of qty_const. This is in addition to
99 putting the constant in the hash table as is usual for non-regs.
100
101 Whether a reg or a constant is preferred is determined by the configuration
102 macro CONST_COSTS and will often depend on the constant value. In any
103 event, expressions containing constants can be simplified, by fold_rtx.
104
105 When a quantity has a known nearly constant value (such as an address
106 of a stack slot), that value is stored in the appropriate element
107 of qty_const.
108
109 Integer constants don't have a machine mode. However, cse
110 determines the intended machine mode from the destination
111 of the instruction that moves the constant. The machine mode
112 is recorded in the hash table along with the actual RTL
113 constant expression so that different modes are kept separate.
114
115 Other expressions:
116
117 To record known equivalences among expressions in general
118 we use a hash table called `table'. It has a fixed number of buckets
119 that contain chains of `struct table_elt' elements for expressions.
120 These chains connect the elements whose expressions have the same
121 hash codes.
122
123 Other chains through the same elements connect the elements which
124 currently have equivalent values.
125
126 Register references in an expression are canonicalized before hashing
127 the expression. This is done using `reg_qty' and `qty_first_reg'.
128 The hash code of a register reference is computed using the quantity
129 number, not the register number.
130
131 When the value of an expression changes, it is necessary to remove from the
132 hash table not just that expression but all expressions whose values
133 could be different as a result.
134
135 1. If the value changing is in memory, except in special cases
136 ANYTHING referring to memory could be changed. That is because
137 nobody knows where a pointer does not point.
138 The function `invalidate_memory' removes what is necessary.
139
140 The special cases are when the address is constant or is
141 a constant plus a fixed register such as the frame pointer
142 or a static chain pointer. When such addresses are stored in,
143 we can tell exactly which other such addresses must be invalidated
144 due to overlap. `invalidate' does this.
145 All expressions that refer to non-constant
146 memory addresses are also invalidated. `invalidate_memory' does this.
147
148 2. If the value changing is a register, all expressions
149 containing references to that register, and only those,
150 must be removed.
151
152 Because searching the entire hash table for expressions that contain
153 a register is very slow, we try to figure out when it isn't necessary.
154 Precisely, this is necessary only when expressions have been
155 entered in the hash table using this register, and then the value has
156 changed, and then another expression wants to be added to refer to
157 the register's new value. This sequence of circumstances is rare
158 within any one basic block.
159
160 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
161 reg_tick[i] is incremented whenever a value is stored in register i.
162 reg_in_table[i] holds -1 if no references to register i have been
163 entered in the table; otherwise, it contains the value reg_tick[i] had
164 when the references were entered. If we want to enter a reference
165 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
166 Until we want to enter a new entry, the mere fact that the two vectors
167 don't match makes the entries be ignored if anyone tries to match them.
168
169 Registers themselves are entered in the hash table as well as in
170 the equivalent-register chains. However, the vectors `reg_tick'
171 and `reg_in_table' do not apply to expressions which are simple
172 register references. These expressions are removed from the table
173 immediately when they become invalid, and this can be done even if
174 we do not immediately search for all the expressions that refer to
175 the register.
176
177 A CLOBBER rtx in an instruction invalidates its operand for further
178 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
179 invalidates everything that resides in memory.
180
181 Related expressions:
182
183 Constant expressions that differ only by an additive integer
184 are called related. When a constant expression is put in
185 the table, the related expression with no constant term
186 is also entered. These are made to point at each other
187 so that it is possible to find out if there exists any
188 register equivalent to an expression related to a given expression. */
189
190 /* One plus largest register number used in this function. */
191
192 static int max_reg;
193
194 /* Length of vectors indexed by quantity number.
195 We know in advance we will not need a quantity number this big. */
196
197 static int max_qty;
198
199 /* Next quantity number to be allocated.
200 This is 1 + the largest number needed so far. */
201
202 static int next_qty;
203
204 /* Indexed by quantity number, gives the first (or last) (pseudo) register
205 in the chain of registers that currently contain this quantity. */
206
207 static int *qty_first_reg;
208 static int *qty_last_reg;
209
210 /* Index by quantity number, gives the mode of the quantity. */
211
212 static enum machine_mode *qty_mode;
213
214 /* Indexed by quantity number, gives the rtx of the constant value of the
215 quantity, or zero if it does not have a known value.
216 A sum of the frame pointer (or arg pointer) plus a constant
217 can also be entered here. */
218
219 static rtx *qty_const;
220
221 /* Indexed by qty number, gives the insn that stored the constant value
222 recorded in `qty_const'. */
223
224 static rtx *qty_const_insn;
225
226 /* The next three variables are used to track when a comparison between a
227 quantity and some constant or register has been passed. In that case, we
228 know the results of the comparison in case we see it again. These variables
229 record a comparison that is known to be true. */
230
231 /* Indexed by qty number, gives the rtx code of a comparison with a known
232 result involving this quantity. If none, it is UNKNOWN. */
233 static enum rtx_code *qty_comparison_code;
234
235 /* Indexed by qty number, gives the constant being compared against in a
236 comparison of known result. If no such comparison, it is undefined.
237 If the comparison is not with a constant, it is zero. */
238
239 static rtx *qty_comparison_const;
240
241 /* Indexed by qty number, gives the quantity being compared against in a
242 comparison of known result. If no such comparison, if it undefined.
243 If the comparison is not with a register, it is -1. */
244
245 static int *qty_comparison_qty;
246
247 #ifdef HAVE_cc0
248 /* For machines that have a CC0, we do not record its value in the hash
249 table since its use is guaranteed to be the insn immediately following
250 its definition and any other insn is presumed to invalidate it.
251
252 Instead, we store below the value last assigned to CC0. If it should
253 happen to be a constant, it is stored in preference to the actual
254 assigned value. In case it is a constant, we store the mode in which
255 the constant should be interpreted. */
256
257 static rtx prev_insn_cc0;
258 static enum machine_mode prev_insn_cc0_mode;
259 #endif
260
261 /* Previous actual insn. 0 if at first insn of basic block. */
262
263 static rtx prev_insn;
264
265 /* Insn being scanned. */
266
267 static rtx this_insn;
268
269 /* Index by (pseudo) register number, gives the quantity number
270 of the register's current contents. */
271
272 static int *reg_qty;
273
274 /* Index by (pseudo) register number, gives the number of the next (or
275 previous) (pseudo) register in the chain of registers sharing the same
276 value.
277
278 Or -1 if this register is at the end of the chain.
279
280 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
281
282 static int *reg_next_eqv;
283 static int *reg_prev_eqv;
284
285 /* Index by (pseudo) register number, gives the number of times
286 that register has been altered in the current basic block. */
287
288 static int *reg_tick;
289
290 /* Index by (pseudo) register number, gives the reg_tick value at which
291 rtx's containing this register are valid in the hash table.
292 If this does not equal the current reg_tick value, such expressions
293 existing in the hash table are invalid.
294 If this is -1, no expressions containing this register have been
295 entered in the table. */
296
297 static int *reg_in_table;
298
299 /* A HARD_REG_SET containing all the hard registers for which there is
300 currently a REG expression in the hash table. Note the difference
301 from the above variables, which indicate if the REG is mentioned in some
302 expression in the table. */
303
304 static HARD_REG_SET hard_regs_in_table;
305
306 /* A HARD_REG_SET containing all the hard registers that are invalidated
307 by a CALL_INSN. */
308
309 static HARD_REG_SET regs_invalidated_by_call;
310
311 /* Two vectors of ints:
312 one containing max_reg -1's; the other max_reg + 500 (an approximation
313 for max_qty) elements where element i contains i.
314 These are used to initialize various other vectors fast. */
315
316 static int *all_minus_one;
317 static int *consec_ints;
318
319 /* CUID of insn that starts the basic block currently being cse-processed. */
320
321 static int cse_basic_block_start;
322
323 /* CUID of insn that ends the basic block currently being cse-processed. */
324
325 static int cse_basic_block_end;
326
327 /* Vector mapping INSN_UIDs to cuids.
328 The cuids are like uids but increase monotonically always.
329 We use them to see whether a reg is used outside a given basic block. */
330
331 static int *uid_cuid;
332
333 /* Highest UID in UID_CUID. */
334 static int max_uid;
335
336 /* Get the cuid of an insn. */
337
338 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
339
340 /* Nonzero if cse has altered conditional jump insns
341 in such a way that jump optimization should be redone. */
342
343 static int cse_jumps_altered;
344
345 /* canon_hash stores 1 in do_not_record
346 if it notices a reference to CC0, PC, or some other volatile
347 subexpression. */
348
349 static int do_not_record;
350
351 #ifdef LOAD_EXTEND_OP
352
353 /* Scratch rtl used when looking for load-extended copy of a MEM. */
354 static rtx memory_extend_rtx;
355 #endif
356
357 /* canon_hash stores 1 in hash_arg_in_memory
358 if it notices a reference to memory within the expression being hashed. */
359
360 static int hash_arg_in_memory;
361
362 /* canon_hash stores 1 in hash_arg_in_struct
363 if it notices a reference to memory that's part of a structure. */
364
365 static int hash_arg_in_struct;
366
367 /* The hash table contains buckets which are chains of `struct table_elt's,
368 each recording one expression's information.
369 That expression is in the `exp' field.
370
371 Those elements with the same hash code are chained in both directions
372 through the `next_same_hash' and `prev_same_hash' fields.
373
374 Each set of expressions with equivalent values
375 are on a two-way chain through the `next_same_value'
376 and `prev_same_value' fields, and all point with
377 the `first_same_value' field at the first element in
378 that chain. The chain is in order of increasing cost.
379 Each element's cost value is in its `cost' field.
380
381 The `in_memory' field is nonzero for elements that
382 involve any reference to memory. These elements are removed
383 whenever a write is done to an unidentified location in memory.
384 To be safe, we assume that a memory address is unidentified unless
385 the address is either a symbol constant or a constant plus
386 the frame pointer or argument pointer.
387
388 The `in_struct' field is nonzero for elements that
389 involve any reference to memory inside a structure or array.
390
391 The `related_value' field is used to connect related expressions
392 (that differ by adding an integer).
393 The related expressions are chained in a circular fashion.
394 `related_value' is zero for expressions for which this
395 chain is not useful.
396
397 The `cost' field stores the cost of this element's expression.
398
399 The `is_const' flag is set if the element is a constant (including
400 a fixed address).
401
402 The `flag' field is used as a temporary during some search routines.
403
404 The `mode' field is usually the same as GET_MODE (`exp'), but
405 if `exp' is a CONST_INT and has no machine mode then the `mode'
406 field is the mode it was being used as. Each constant is
407 recorded separately for each mode it is used with. */
408
409
410 struct table_elt
411 {
412 rtx exp;
413 struct table_elt *next_same_hash;
414 struct table_elt *prev_same_hash;
415 struct table_elt *next_same_value;
416 struct table_elt *prev_same_value;
417 struct table_elt *first_same_value;
418 struct table_elt *related_value;
419 int cost;
420 enum machine_mode mode;
421 char in_memory;
422 char in_struct;
423 char is_const;
424 char flag;
425 };
426
427 /* We don't want a lot of buckets, because we rarely have very many
428 things stored in the hash table, and a lot of buckets slows
429 down a lot of loops that happen frequently. */
430 #define NBUCKETS 31
431
432 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
433 register (hard registers may require `do_not_record' to be set). */
434
435 #define HASH(X, M) \
436 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
437 ? (((unsigned) REG << 7) + (unsigned) reg_qty[REGNO (X)]) % NBUCKETS \
438 : canon_hash (X, M) % NBUCKETS)
439
440 /* Determine whether register number N is considered a fixed register for CSE.
441 It is desirable to replace other regs with fixed regs, to reduce need for
442 non-fixed hard regs.
443 A reg wins if it is either the frame pointer or designated as fixed,
444 but not if it is an overlapping register. */
445 #ifdef OVERLAPPING_REGNO_P
446 #define FIXED_REGNO_P(N) \
447 (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
448 || fixed_regs[N] || global_regs[N]) \
449 && ! OVERLAPPING_REGNO_P ((N)))
450 #else
451 #define FIXED_REGNO_P(N) \
452 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
453 || fixed_regs[N] || global_regs[N])
454 #endif
455
456 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
457 hard registers and pointers into the frame are the cheapest with a cost
458 of 0. Next come pseudos with a cost of one and other hard registers with
459 a cost of 2. Aside from these special cases, call `rtx_cost'. */
460
461 #define CHEAP_REGNO(N) \
462 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
463 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
464 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
465 || ((N) < FIRST_PSEUDO_REGISTER \
466 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
467
468 /* A register is cheap if it is a user variable assigned to the register
469 or if its register number always corresponds to a cheap register. */
470
471 #define CHEAP_REG(N) \
472 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
473 || CHEAP_REGNO (REGNO (N)))
474
475 #define COST(X) \
476 (GET_CODE (X) == REG \
477 ? (CHEAP_REG (X) ? 0 \
478 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
479 : 2) \
480 : rtx_cost (X, SET) * 2)
481
482 /* Determine if the quantity number for register X represents a valid index
483 into the `qty_...' variables. */
484
485 #define REGNO_QTY_VALID_P(N) (reg_qty[N] != (N))
486
487 static struct table_elt *table[NBUCKETS];
488
489 /* Chain of `struct table_elt's made so far for this function
490 but currently removed from the table. */
491
492 static struct table_elt *free_element_chain;
493
494 /* Number of `struct table_elt' structures made so far for this function. */
495
496 static int n_elements_made;
497
498 /* Maximum value `n_elements_made' has had so far in this compilation
499 for functions previously processed. */
500
501 static int max_elements_made;
502
503 /* Surviving equivalence class when two equivalence classes are merged
504 by recording the effects of a jump in the last insn. Zero if the
505 last insn was not a conditional jump. */
506
507 static struct table_elt *last_jump_equiv_class;
508
509 /* Set to the cost of a constant pool reference if one was found for a
510 symbolic constant. If this was found, it means we should try to
511 convert constants into constant pool entries if they don't fit in
512 the insn. */
513
514 static int constant_pool_entries_cost;
515
516 /* Bits describing what kind of values in memory must be invalidated
517 for a particular instruction. If all three bits are zero,
518 no memory refs need to be invalidated. Each bit is more powerful
519 than the preceding ones, and if a bit is set then the preceding
520 bits are also set.
521
522 Here is how the bits are set:
523 Pushing onto the stack invalidates only the stack pointer,
524 writing at a fixed address invalidates only variable addresses,
525 writing in a structure element at variable address
526 invalidates all but scalar variables,
527 and writing in anything else at variable address invalidates everything. */
528
529 struct write_data
530 {
531 int sp : 1; /* Invalidate stack pointer. */
532 int var : 1; /* Invalidate variable addresses. */
533 int nonscalar : 1; /* Invalidate all but scalar variables. */
534 int all : 1; /* Invalidate all memory refs. */
535 };
536
537 /* Define maximum length of a branch path. */
538
539 #define PATHLENGTH 10
540
541 /* This data describes a block that will be processed by cse_basic_block. */
542
543 struct cse_basic_block_data {
544 /* Lowest CUID value of insns in block. */
545 int low_cuid;
546 /* Highest CUID value of insns in block. */
547 int high_cuid;
548 /* Total number of SETs in block. */
549 int nsets;
550 /* Last insn in the block. */
551 rtx last;
552 /* Size of current branch path, if any. */
553 int path_size;
554 /* Current branch path, indicating which branches will be taken. */
555 struct branch_path {
556 /* The branch insn. */
557 rtx branch;
558 /* Whether it should be taken or not. AROUND is the same as taken
559 except that it is used when the destination label is not preceded
560 by a BARRIER. */
561 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
562 } path[PATHLENGTH];
563 };
564
565 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
566 virtual regs here because the simplify_*_operation routines are called
567 by integrate.c, which is called before virtual register instantiation. */
568
569 #define FIXED_BASE_PLUS_P(X) \
570 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
571 || (X) == arg_pointer_rtx \
572 || (X) == virtual_stack_vars_rtx \
573 || (X) == virtual_incoming_args_rtx \
574 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
575 && (XEXP (X, 0) == frame_pointer_rtx \
576 || XEXP (X, 0) == hard_frame_pointer_rtx \
577 || XEXP (X, 0) == arg_pointer_rtx \
578 || XEXP (X, 0) == virtual_stack_vars_rtx \
579 || XEXP (X, 0) == virtual_incoming_args_rtx)))
580
581 /* Similar, but also allows reference to the stack pointer.
582
583 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
584 arg_pointer_rtx by itself is nonzero, because on at least one machine,
585 the i960, the arg pointer is zero when it is unused. */
586
587 #define NONZERO_BASE_PLUS_P(X) \
588 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
589 || (X) == virtual_stack_vars_rtx \
590 || (X) == virtual_incoming_args_rtx \
591 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
592 && (XEXP (X, 0) == frame_pointer_rtx \
593 || XEXP (X, 0) == hard_frame_pointer_rtx \
594 || XEXP (X, 0) == arg_pointer_rtx \
595 || XEXP (X, 0) == virtual_stack_vars_rtx \
596 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
597 || (X) == stack_pointer_rtx \
598 || (X) == virtual_stack_dynamic_rtx \
599 || (X) == virtual_outgoing_args_rtx \
600 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
601 && (XEXP (X, 0) == stack_pointer_rtx \
602 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
603 || XEXP (X, 0) == virtual_outgoing_args_rtx)))
604
605 static void new_basic_block PROTO((void));
606 static void make_new_qty PROTO((int));
607 static void make_regs_eqv PROTO((int, int));
608 static void delete_reg_equiv PROTO((int));
609 static int mention_regs PROTO((rtx));
610 static int insert_regs PROTO((rtx, struct table_elt *, int));
611 static void free_element PROTO((struct table_elt *));
612 static void remove_from_table PROTO((struct table_elt *, unsigned));
613 static struct table_elt *get_element PROTO((void));
614 static struct table_elt *lookup PROTO((rtx, unsigned, enum machine_mode)),
615 *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
616 static rtx lookup_as_function PROTO((rtx, enum rtx_code));
617 static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
618 enum machine_mode));
619 static void merge_equiv_classes PROTO((struct table_elt *,
620 struct table_elt *));
621 static void invalidate PROTO((rtx, enum machine_mode));
622 static void remove_invalid_refs PROTO((int));
623 static void rehash_using_reg PROTO((rtx));
624 static void invalidate_memory PROTO((struct write_data *));
625 static void invalidate_for_call PROTO((void));
626 static rtx use_related_value PROTO((rtx, struct table_elt *));
627 static unsigned canon_hash PROTO((rtx, enum machine_mode));
628 static unsigned safe_hash PROTO((rtx, enum machine_mode));
629 static int exp_equiv_p PROTO((rtx, rtx, int, int));
630 static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
631 HOST_WIDE_INT *,
632 HOST_WIDE_INT *));
633 static int refers_to_p PROTO((rtx, rtx));
634 static int refers_to_mem_p PROTO((rtx, rtx, HOST_WIDE_INT,
635 HOST_WIDE_INT));
636 static int cse_rtx_addr_varies_p PROTO((rtx));
637 static rtx canon_reg PROTO((rtx, rtx));
638 static void find_best_addr PROTO((rtx, rtx *));
639 static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
640 enum machine_mode *,
641 enum machine_mode *));
642 static rtx cse_gen_binary PROTO((enum rtx_code, enum machine_mode,
643 rtx, rtx));
644 static rtx simplify_plus_minus PROTO((enum rtx_code, enum machine_mode,
645 rtx, rtx));
646 static rtx fold_rtx PROTO((rtx, rtx));
647 static rtx equiv_constant PROTO((rtx));
648 static void record_jump_equiv PROTO((rtx, int));
649 static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
650 rtx, rtx, int));
651 static void cse_insn PROTO((rtx, int));
652 static void note_mem_written PROTO((rtx, struct write_data *));
653 static void invalidate_from_clobbers PROTO((struct write_data *, rtx));
654 static rtx cse_process_notes PROTO((rtx, rtx));
655 static void cse_around_loop PROTO((rtx));
656 static void invalidate_skipped_set PROTO((rtx, rtx));
657 static void invalidate_skipped_block PROTO((rtx));
658 static void cse_check_loop_start PROTO((rtx, rtx));
659 static void cse_set_around_loop PROTO((rtx, rtx, rtx));
660 static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
661 static void count_reg_usage PROTO((rtx, int *, rtx, int));
662
663 extern int rtx_equal_function_value_matters;
664 \f
665 /* Return an estimate of the cost of computing rtx X.
666 One use is in cse, to decide which expression to keep in the hash table.
667 Another is in rtl generation, to pick the cheapest way to multiply.
668 Other uses like the latter are expected in the future. */
669
670 /* Return the right cost to give to an operation
671 to make the cost of the corresponding register-to-register instruction
672 N times that of a fast register-to-register instruction. */
673
674 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
675
676 int
677 rtx_cost (x, outer_code)
678 rtx x;
679 enum rtx_code outer_code;
680 {
681 register int i, j;
682 register enum rtx_code code;
683 register char *fmt;
684 register int total;
685
686 if (x == 0)
687 return 0;
688
689 /* Compute the default costs of certain things.
690 Note that RTX_COSTS can override the defaults. */
691
692 code = GET_CODE (x);
693 switch (code)
694 {
695 case MULT:
696 /* Count multiplication by 2**n as a shift,
697 because if we are considering it, we would output it as a shift. */
698 if (GET_CODE (XEXP (x, 1)) == CONST_INT
699 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
700 total = 2;
701 else
702 total = COSTS_N_INSNS (5);
703 break;
704 case DIV:
705 case UDIV:
706 case MOD:
707 case UMOD:
708 total = COSTS_N_INSNS (7);
709 break;
710 case USE:
711 /* Used in loop.c and combine.c as a marker. */
712 total = 0;
713 break;
714 case ASM_OPERANDS:
715 /* We don't want these to be used in substitutions because
716 we have no way of validating the resulting insn. So assign
717 anything containing an ASM_OPERANDS a very high cost. */
718 total = 1000;
719 break;
720 default:
721 total = 2;
722 }
723
724 switch (code)
725 {
726 case REG:
727 return ! CHEAP_REG (x);
728
729 case SUBREG:
730 /* If we can't tie these modes, make this expensive. The larger
731 the mode, the more expensive it is. */
732 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
733 return COSTS_N_INSNS (2
734 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
735 return 2;
736 #ifdef RTX_COSTS
737 RTX_COSTS (x, code, outer_code);
738 #endif
739 CONST_COSTS (x, code, outer_code);
740 }
741
742 /* Sum the costs of the sub-rtx's, plus cost of this operation,
743 which is already in total. */
744
745 fmt = GET_RTX_FORMAT (code);
746 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
747 if (fmt[i] == 'e')
748 total += rtx_cost (XEXP (x, i), code);
749 else if (fmt[i] == 'E')
750 for (j = 0; j < XVECLEN (x, i); j++)
751 total += rtx_cost (XVECEXP (x, i, j), code);
752
753 return total;
754 }
755 \f
756 /* Clear the hash table and initialize each register with its own quantity,
757 for a new basic block. */
758
759 static void
760 new_basic_block ()
761 {
762 register int i;
763
764 next_qty = max_reg;
765
766 bzero ((char *) reg_tick, max_reg * sizeof (int));
767
768 bcopy ((char *) all_minus_one, (char *) reg_in_table,
769 max_reg * sizeof (int));
770 bcopy ((char *) consec_ints, (char *) reg_qty, max_reg * sizeof (int));
771 CLEAR_HARD_REG_SET (hard_regs_in_table);
772
773 /* The per-quantity values used to be initialized here, but it is
774 much faster to initialize each as it is made in `make_new_qty'. */
775
776 for (i = 0; i < NBUCKETS; i++)
777 {
778 register struct table_elt *this, *next;
779 for (this = table[i]; this; this = next)
780 {
781 next = this->next_same_hash;
782 free_element (this);
783 }
784 }
785
786 bzero ((char *) table, sizeof table);
787
788 prev_insn = 0;
789
790 #ifdef HAVE_cc0
791 prev_insn_cc0 = 0;
792 #endif
793 }
794
795 /* Say that register REG contains a quantity not in any register before
796 and initialize that quantity. */
797
798 static void
799 make_new_qty (reg)
800 register int reg;
801 {
802 register int q;
803
804 if (next_qty >= max_qty)
805 abort ();
806
807 q = reg_qty[reg] = next_qty++;
808 qty_first_reg[q] = reg;
809 qty_last_reg[q] = reg;
810 qty_const[q] = qty_const_insn[q] = 0;
811 qty_comparison_code[q] = UNKNOWN;
812
813 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
814 }
815
816 /* Make reg NEW equivalent to reg OLD.
817 OLD is not changing; NEW is. */
818
819 static void
820 make_regs_eqv (new, old)
821 register int new, old;
822 {
823 register int lastr, firstr;
824 register int q = reg_qty[old];
825
826 /* Nothing should become eqv until it has a "non-invalid" qty number. */
827 if (! REGNO_QTY_VALID_P (old))
828 abort ();
829
830 reg_qty[new] = q;
831 firstr = qty_first_reg[q];
832 lastr = qty_last_reg[q];
833
834 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
835 hard regs. Among pseudos, if NEW will live longer than any other reg
836 of the same qty, and that is beyond the current basic block,
837 make it the new canonical replacement for this qty. */
838 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
839 /* Certain fixed registers might be of the class NO_REGS. This means
840 that not only can they not be allocated by the compiler, but
841 they cannot be used in substitutions or canonicalizations
842 either. */
843 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
844 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
845 || (new >= FIRST_PSEUDO_REGISTER
846 && (firstr < FIRST_PSEUDO_REGISTER
847 || ((uid_cuid[regno_last_uid[new]] > cse_basic_block_end
848 || (uid_cuid[regno_first_uid[new]]
849 < cse_basic_block_start))
850 && (uid_cuid[regno_last_uid[new]]
851 > uid_cuid[regno_last_uid[firstr]]))))))
852 {
853 reg_prev_eqv[firstr] = new;
854 reg_next_eqv[new] = firstr;
855 reg_prev_eqv[new] = -1;
856 qty_first_reg[q] = new;
857 }
858 else
859 {
860 /* If NEW is a hard reg (known to be non-fixed), insert at end.
861 Otherwise, insert before any non-fixed hard regs that are at the
862 end. Registers of class NO_REGS cannot be used as an
863 equivalent for anything. */
864 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
865 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
866 && new >= FIRST_PSEUDO_REGISTER)
867 lastr = reg_prev_eqv[lastr];
868 reg_next_eqv[new] = reg_next_eqv[lastr];
869 if (reg_next_eqv[lastr] >= 0)
870 reg_prev_eqv[reg_next_eqv[lastr]] = new;
871 else
872 qty_last_reg[q] = new;
873 reg_next_eqv[lastr] = new;
874 reg_prev_eqv[new] = lastr;
875 }
876 }
877
878 /* Remove REG from its equivalence class. */
879
880 static void
881 delete_reg_equiv (reg)
882 register int reg;
883 {
884 register int q = reg_qty[reg];
885 register int p, n;
886
887 /* If invalid, do nothing. */
888 if (q == reg)
889 return;
890
891 p = reg_prev_eqv[reg];
892 n = reg_next_eqv[reg];
893
894 if (n != -1)
895 reg_prev_eqv[n] = p;
896 else
897 qty_last_reg[q] = p;
898 if (p != -1)
899 reg_next_eqv[p] = n;
900 else
901 qty_first_reg[q] = n;
902
903 reg_qty[reg] = reg;
904 }
905
906 /* Remove any invalid expressions from the hash table
907 that refer to any of the registers contained in expression X.
908
909 Make sure that newly inserted references to those registers
910 as subexpressions will be considered valid.
911
912 mention_regs is not called when a register itself
913 is being stored in the table.
914
915 Return 1 if we have done something that may have changed the hash code
916 of X. */
917
918 static int
919 mention_regs (x)
920 rtx x;
921 {
922 register enum rtx_code code;
923 register int i, j;
924 register char *fmt;
925 register int changed = 0;
926
927 if (x == 0)
928 return 0;
929
930 code = GET_CODE (x);
931 if (code == REG)
932 {
933 register int regno = REGNO (x);
934 register int endregno
935 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
936 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
937 int i;
938
939 for (i = regno; i < endregno; i++)
940 {
941 if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
942 remove_invalid_refs (i);
943
944 reg_in_table[i] = reg_tick[i];
945 }
946
947 return 0;
948 }
949
950 /* If X is a comparison or a COMPARE and either operand is a register
951 that does not have a quantity, give it one. This is so that a later
952 call to record_jump_equiv won't cause X to be assigned a different
953 hash code and not found in the table after that call.
954
955 It is not necessary to do this here, since rehash_using_reg can
956 fix up the table later, but doing this here eliminates the need to
957 call that expensive function in the most common case where the only
958 use of the register is in the comparison. */
959
960 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
961 {
962 if (GET_CODE (XEXP (x, 0)) == REG
963 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
964 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
965 {
966 rehash_using_reg (XEXP (x, 0));
967 changed = 1;
968 }
969
970 if (GET_CODE (XEXP (x, 1)) == REG
971 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
972 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
973 {
974 rehash_using_reg (XEXP (x, 1));
975 changed = 1;
976 }
977 }
978
979 fmt = GET_RTX_FORMAT (code);
980 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
981 if (fmt[i] == 'e')
982 changed |= mention_regs (XEXP (x, i));
983 else if (fmt[i] == 'E')
984 for (j = 0; j < XVECLEN (x, i); j++)
985 changed |= mention_regs (XVECEXP (x, i, j));
986
987 return changed;
988 }
989
990 /* Update the register quantities for inserting X into the hash table
991 with a value equivalent to CLASSP.
992 (If the class does not contain a REG, it is irrelevant.)
993 If MODIFIED is nonzero, X is a destination; it is being modified.
994 Note that delete_reg_equiv should be called on a register
995 before insert_regs is done on that register with MODIFIED != 0.
996
997 Nonzero value means that elements of reg_qty have changed
998 so X's hash code may be different. */
999
1000 static int
1001 insert_regs (x, classp, modified)
1002 rtx x;
1003 struct table_elt *classp;
1004 int modified;
1005 {
1006 if (GET_CODE (x) == REG)
1007 {
1008 register int regno = REGNO (x);
1009
1010 /* If REGNO is in the equivalence table already but is of the
1011 wrong mode for that equivalence, don't do anything here. */
1012
1013 if (REGNO_QTY_VALID_P (regno)
1014 && qty_mode[reg_qty[regno]] != GET_MODE (x))
1015 return 0;
1016
1017 if (modified || ! REGNO_QTY_VALID_P (regno))
1018 {
1019 if (classp)
1020 for (classp = classp->first_same_value;
1021 classp != 0;
1022 classp = classp->next_same_value)
1023 if (GET_CODE (classp->exp) == REG
1024 && GET_MODE (classp->exp) == GET_MODE (x))
1025 {
1026 make_regs_eqv (regno, REGNO (classp->exp));
1027 return 1;
1028 }
1029
1030 make_new_qty (regno);
1031 qty_mode[reg_qty[regno]] = GET_MODE (x);
1032 return 1;
1033 }
1034
1035 return 0;
1036 }
1037
1038 /* If X is a SUBREG, we will likely be inserting the inner register in the
1039 table. If that register doesn't have an assigned quantity number at
1040 this point but does later, the insertion that we will be doing now will
1041 not be accessible because its hash code will have changed. So assign
1042 a quantity number now. */
1043
1044 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1045 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1046 {
1047 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1048 mention_regs (SUBREG_REG (x));
1049 return 1;
1050 }
1051 else
1052 return mention_regs (x);
1053 }
1054 \f
1055 /* Look in or update the hash table. */
1056
1057 /* Put the element ELT on the list of free elements. */
1058
1059 static void
1060 free_element (elt)
1061 struct table_elt *elt;
1062 {
1063 elt->next_same_hash = free_element_chain;
1064 free_element_chain = elt;
1065 }
1066
1067 /* Return an element that is free for use. */
1068
1069 static struct table_elt *
1070 get_element ()
1071 {
1072 struct table_elt *elt = free_element_chain;
1073 if (elt)
1074 {
1075 free_element_chain = elt->next_same_hash;
1076 return elt;
1077 }
1078 n_elements_made++;
1079 return (struct table_elt *) oballoc (sizeof (struct table_elt));
1080 }
1081
1082 /* Remove table element ELT from use in the table.
1083 HASH is its hash code, made using the HASH macro.
1084 It's an argument because often that is known in advance
1085 and we save much time not recomputing it. */
1086
1087 static void
1088 remove_from_table (elt, hash)
1089 register struct table_elt *elt;
1090 unsigned hash;
1091 {
1092 if (elt == 0)
1093 return;
1094
1095 /* Mark this element as removed. See cse_insn. */
1096 elt->first_same_value = 0;
1097
1098 /* Remove the table element from its equivalence class. */
1099
1100 {
1101 register struct table_elt *prev = elt->prev_same_value;
1102 register struct table_elt *next = elt->next_same_value;
1103
1104 if (next) next->prev_same_value = prev;
1105
1106 if (prev)
1107 prev->next_same_value = next;
1108 else
1109 {
1110 register struct table_elt *newfirst = next;
1111 while (next)
1112 {
1113 next->first_same_value = newfirst;
1114 next = next->next_same_value;
1115 }
1116 }
1117 }
1118
1119 /* Remove the table element from its hash bucket. */
1120
1121 {
1122 register struct table_elt *prev = elt->prev_same_hash;
1123 register struct table_elt *next = elt->next_same_hash;
1124
1125 if (next) next->prev_same_hash = prev;
1126
1127 if (prev)
1128 prev->next_same_hash = next;
1129 else if (table[hash] == elt)
1130 table[hash] = next;
1131 else
1132 {
1133 /* This entry is not in the proper hash bucket. This can happen
1134 when two classes were merged by `merge_equiv_classes'. Search
1135 for the hash bucket that it heads. This happens only very
1136 rarely, so the cost is acceptable. */
1137 for (hash = 0; hash < NBUCKETS; hash++)
1138 if (table[hash] == elt)
1139 table[hash] = next;
1140 }
1141 }
1142
1143 /* Remove the table element from its related-value circular chain. */
1144
1145 if (elt->related_value != 0 && elt->related_value != elt)
1146 {
1147 register struct table_elt *p = elt->related_value;
1148 while (p->related_value != elt)
1149 p = p->related_value;
1150 p->related_value = elt->related_value;
1151 if (p->related_value == p)
1152 p->related_value = 0;
1153 }
1154
1155 free_element (elt);
1156 }
1157
1158 /* Look up X in the hash table and return its table element,
1159 or 0 if X is not in the table.
1160
1161 MODE is the machine-mode of X, or if X is an integer constant
1162 with VOIDmode then MODE is the mode with which X will be used.
1163
1164 Here we are satisfied to find an expression whose tree structure
1165 looks like X. */
1166
1167 static struct table_elt *
1168 lookup (x, hash, mode)
1169 rtx x;
1170 unsigned hash;
1171 enum machine_mode mode;
1172 {
1173 register struct table_elt *p;
1174
1175 for (p = table[hash]; p; p = p->next_same_hash)
1176 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1177 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1178 return p;
1179
1180 return 0;
1181 }
1182
1183 /* Like `lookup' but don't care whether the table element uses invalid regs.
1184 Also ignore discrepancies in the machine mode of a register. */
1185
1186 static struct table_elt *
1187 lookup_for_remove (x, hash, mode)
1188 rtx x;
1189 unsigned hash;
1190 enum machine_mode mode;
1191 {
1192 register struct table_elt *p;
1193
1194 if (GET_CODE (x) == REG)
1195 {
1196 int regno = REGNO (x);
1197 /* Don't check the machine mode when comparing registers;
1198 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1199 for (p = table[hash]; p; p = p->next_same_hash)
1200 if (GET_CODE (p->exp) == REG
1201 && REGNO (p->exp) == regno)
1202 return p;
1203 }
1204 else
1205 {
1206 for (p = table[hash]; p; p = p->next_same_hash)
1207 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1208 return p;
1209 }
1210
1211 return 0;
1212 }
1213
1214 /* Look for an expression equivalent to X and with code CODE.
1215 If one is found, return that expression. */
1216
1217 static rtx
1218 lookup_as_function (x, code)
1219 rtx x;
1220 enum rtx_code code;
1221 {
1222 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1223 GET_MODE (x));
1224 if (p == 0)
1225 return 0;
1226
1227 for (p = p->first_same_value; p; p = p->next_same_value)
1228 {
1229 if (GET_CODE (p->exp) == code
1230 /* Make sure this is a valid entry in the table. */
1231 && exp_equiv_p (p->exp, p->exp, 1, 0))
1232 return p->exp;
1233 }
1234
1235 return 0;
1236 }
1237
1238 /* Insert X in the hash table, assuming HASH is its hash code
1239 and CLASSP is an element of the class it should go in
1240 (or 0 if a new class should be made).
1241 It is inserted at the proper position to keep the class in
1242 the order cheapest first.
1243
1244 MODE is the machine-mode of X, or if X is an integer constant
1245 with VOIDmode then MODE is the mode with which X will be used.
1246
1247 For elements of equal cheapness, the most recent one
1248 goes in front, except that the first element in the list
1249 remains first unless a cheaper element is added. The order of
1250 pseudo-registers does not matter, as canon_reg will be called to
1251 find the cheapest when a register is retrieved from the table.
1252
1253 The in_memory field in the hash table element is set to 0.
1254 The caller must set it nonzero if appropriate.
1255
1256 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1257 and if insert_regs returns a nonzero value
1258 you must then recompute its hash code before calling here.
1259
1260 If necessary, update table showing constant values of quantities. */
1261
1262 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1263
1264 static struct table_elt *
1265 insert (x, classp, hash, mode)
1266 register rtx x;
1267 register struct table_elt *classp;
1268 unsigned hash;
1269 enum machine_mode mode;
1270 {
1271 register struct table_elt *elt;
1272
1273 /* If X is a register and we haven't made a quantity for it,
1274 something is wrong. */
1275 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1276 abort ();
1277
1278 /* If X is a hard register, show it is being put in the table. */
1279 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1280 {
1281 int regno = REGNO (x);
1282 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1283 int i;
1284
1285 for (i = regno; i < endregno; i++)
1286 SET_HARD_REG_BIT (hard_regs_in_table, i);
1287 }
1288
1289
1290 /* Put an element for X into the right hash bucket. */
1291
1292 elt = get_element ();
1293 elt->exp = x;
1294 elt->cost = COST (x);
1295 elt->next_same_value = 0;
1296 elt->prev_same_value = 0;
1297 elt->next_same_hash = table[hash];
1298 elt->prev_same_hash = 0;
1299 elt->related_value = 0;
1300 elt->in_memory = 0;
1301 elt->mode = mode;
1302 elt->is_const = (CONSTANT_P (x)
1303 /* GNU C++ takes advantage of this for `this'
1304 (and other const values). */
1305 || (RTX_UNCHANGING_P (x)
1306 && GET_CODE (x) == REG
1307 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1308 || FIXED_BASE_PLUS_P (x));
1309
1310 if (table[hash])
1311 table[hash]->prev_same_hash = elt;
1312 table[hash] = elt;
1313
1314 /* Put it into the proper value-class. */
1315 if (classp)
1316 {
1317 classp = classp->first_same_value;
1318 if (CHEAPER (elt, classp))
1319 /* Insert at the head of the class */
1320 {
1321 register struct table_elt *p;
1322 elt->next_same_value = classp;
1323 classp->prev_same_value = elt;
1324 elt->first_same_value = elt;
1325
1326 for (p = classp; p; p = p->next_same_value)
1327 p->first_same_value = elt;
1328 }
1329 else
1330 {
1331 /* Insert not at head of the class. */
1332 /* Put it after the last element cheaper than X. */
1333 register struct table_elt *p, *next;
1334 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1335 p = next);
1336 /* Put it after P and before NEXT. */
1337 elt->next_same_value = next;
1338 if (next)
1339 next->prev_same_value = elt;
1340 elt->prev_same_value = p;
1341 p->next_same_value = elt;
1342 elt->first_same_value = classp;
1343 }
1344 }
1345 else
1346 elt->first_same_value = elt;
1347
1348 /* If this is a constant being set equivalent to a register or a register
1349 being set equivalent to a constant, note the constant equivalence.
1350
1351 If this is a constant, it cannot be equivalent to a different constant,
1352 and a constant is the only thing that can be cheaper than a register. So
1353 we know the register is the head of the class (before the constant was
1354 inserted).
1355
1356 If this is a register that is not already known equivalent to a
1357 constant, we must check the entire class.
1358
1359 If this is a register that is already known equivalent to an insn,
1360 update `qty_const_insn' to show that `this_insn' is the latest
1361 insn making that quantity equivalent to the constant. */
1362
1363 if (elt->is_const && classp && GET_CODE (classp->exp) == REG)
1364 {
1365 qty_const[reg_qty[REGNO (classp->exp)]]
1366 = gen_lowpart_if_possible (qty_mode[reg_qty[REGNO (classp->exp)]], x);
1367 qty_const_insn[reg_qty[REGNO (classp->exp)]] = this_insn;
1368 }
1369
1370 else if (GET_CODE (x) == REG && classp && ! qty_const[reg_qty[REGNO (x)]])
1371 {
1372 register struct table_elt *p;
1373
1374 for (p = classp; p != 0; p = p->next_same_value)
1375 {
1376 if (p->is_const)
1377 {
1378 qty_const[reg_qty[REGNO (x)]]
1379 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1380 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1381 break;
1382 }
1383 }
1384 }
1385
1386 else if (GET_CODE (x) == REG && qty_const[reg_qty[REGNO (x)]]
1387 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]])
1388 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1389
1390 /* If this is a constant with symbolic value,
1391 and it has a term with an explicit integer value,
1392 link it up with related expressions. */
1393 if (GET_CODE (x) == CONST)
1394 {
1395 rtx subexp = get_related_value (x);
1396 unsigned subhash;
1397 struct table_elt *subelt, *subelt_prev;
1398
1399 if (subexp != 0)
1400 {
1401 /* Get the integer-free subexpression in the hash table. */
1402 subhash = safe_hash (subexp, mode) % NBUCKETS;
1403 subelt = lookup (subexp, subhash, mode);
1404 if (subelt == 0)
1405 subelt = insert (subexp, NULL_PTR, subhash, mode);
1406 /* Initialize SUBELT's circular chain if it has none. */
1407 if (subelt->related_value == 0)
1408 subelt->related_value = subelt;
1409 /* Find the element in the circular chain that precedes SUBELT. */
1410 subelt_prev = subelt;
1411 while (subelt_prev->related_value != subelt)
1412 subelt_prev = subelt_prev->related_value;
1413 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1414 This way the element that follows SUBELT is the oldest one. */
1415 elt->related_value = subelt_prev->related_value;
1416 subelt_prev->related_value = elt;
1417 }
1418 }
1419
1420 return elt;
1421 }
1422 \f
1423 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1424 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1425 the two classes equivalent.
1426
1427 CLASS1 will be the surviving class; CLASS2 should not be used after this
1428 call.
1429
1430 Any invalid entries in CLASS2 will not be copied. */
1431
1432 static void
1433 merge_equiv_classes (class1, class2)
1434 struct table_elt *class1, *class2;
1435 {
1436 struct table_elt *elt, *next, *new;
1437
1438 /* Ensure we start with the head of the classes. */
1439 class1 = class1->first_same_value;
1440 class2 = class2->first_same_value;
1441
1442 /* If they were already equal, forget it. */
1443 if (class1 == class2)
1444 return;
1445
1446 for (elt = class2; elt; elt = next)
1447 {
1448 unsigned hash;
1449 rtx exp = elt->exp;
1450 enum machine_mode mode = elt->mode;
1451
1452 next = elt->next_same_value;
1453
1454 /* Remove old entry, make a new one in CLASS1's class.
1455 Don't do this for invalid entries as we cannot find their
1456 hash code (it also isn't necessary). */
1457 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1458 {
1459 hash_arg_in_memory = 0;
1460 hash_arg_in_struct = 0;
1461 hash = HASH (exp, mode);
1462
1463 if (GET_CODE (exp) == REG)
1464 delete_reg_equiv (REGNO (exp));
1465
1466 remove_from_table (elt, hash);
1467
1468 if (insert_regs (exp, class1, 0))
1469 {
1470 rehash_using_reg (exp);
1471 hash = HASH (exp, mode);
1472 }
1473 new = insert (exp, class1, hash, mode);
1474 new->in_memory = hash_arg_in_memory;
1475 new->in_struct = hash_arg_in_struct;
1476 }
1477 }
1478 }
1479 \f
1480 /* Remove from the hash table, or mark as invalid,
1481 all expressions whose values could be altered by storing in X.
1482 X is a register, a subreg, or a memory reference with nonvarying address
1483 (because, when a memory reference with a varying address is stored in,
1484 all memory references are removed by invalidate_memory
1485 so specific invalidation is superfluous).
1486 FULL_MODE, if not VOIDmode, indicates that this much should be invalidated
1487 instead of just the amount indicated by the mode of X. This is only used
1488 for bitfield stores into memory.
1489
1490 A nonvarying address may be just a register or just
1491 a symbol reference, or it may be either of those plus
1492 a numeric offset. */
1493
1494 static void
1495 invalidate (x, full_mode)
1496 rtx x;
1497 enum machine_mode full_mode;
1498 {
1499 register int i;
1500 register struct table_elt *p;
1501 rtx base;
1502 HOST_WIDE_INT start, end;
1503
1504 /* If X is a register, dependencies on its contents
1505 are recorded through the qty number mechanism.
1506 Just change the qty number of the register,
1507 mark it as invalid for expressions that refer to it,
1508 and remove it itself. */
1509
1510 if (GET_CODE (x) == REG)
1511 {
1512 register int regno = REGNO (x);
1513 register unsigned hash = HASH (x, GET_MODE (x));
1514
1515 /* Remove REGNO from any quantity list it might be on and indicate
1516 that it's value might have changed. If it is a pseudo, remove its
1517 entry from the hash table.
1518
1519 For a hard register, we do the first two actions above for any
1520 additional hard registers corresponding to X. Then, if any of these
1521 registers are in the table, we must remove any REG entries that
1522 overlap these registers. */
1523
1524 delete_reg_equiv (regno);
1525 reg_tick[regno]++;
1526
1527 if (regno >= FIRST_PSEUDO_REGISTER)
1528 remove_from_table (lookup_for_remove (x, hash, GET_MODE (x)), hash);
1529 else
1530 {
1531 HOST_WIDE_INT in_table
1532 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1533 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1534 int tregno, tendregno;
1535 register struct table_elt *p, *next;
1536
1537 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1538
1539 for (i = regno + 1; i < endregno; i++)
1540 {
1541 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1542 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1543 delete_reg_equiv (i);
1544 reg_tick[i]++;
1545 }
1546
1547 if (in_table)
1548 for (hash = 0; hash < NBUCKETS; hash++)
1549 for (p = table[hash]; p; p = next)
1550 {
1551 next = p->next_same_hash;
1552
1553 if (GET_CODE (p->exp) != REG
1554 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1555 continue;
1556
1557 tregno = REGNO (p->exp);
1558 tendregno
1559 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1560 if (tendregno > regno && tregno < endregno)
1561 remove_from_table (p, hash);
1562 }
1563 }
1564
1565 return;
1566 }
1567
1568 if (GET_CODE (x) == SUBREG)
1569 {
1570 if (GET_CODE (SUBREG_REG (x)) != REG)
1571 abort ();
1572 invalidate (SUBREG_REG (x), VOIDmode);
1573 return;
1574 }
1575
1576 /* X is not a register; it must be a memory reference with
1577 a nonvarying address. Remove all hash table elements
1578 that refer to overlapping pieces of memory. */
1579
1580 if (GET_CODE (x) != MEM)
1581 abort ();
1582
1583 if (full_mode == VOIDmode)
1584 full_mode = GET_MODE (x);
1585
1586 set_nonvarying_address_components (XEXP (x, 0), GET_MODE_SIZE (full_mode),
1587 &base, &start, &end);
1588
1589 for (i = 0; i < NBUCKETS; i++)
1590 {
1591 register struct table_elt *next;
1592 for (p = table[i]; p; p = next)
1593 {
1594 next = p->next_same_hash;
1595 if (refers_to_mem_p (p->exp, base, start, end))
1596 remove_from_table (p, i);
1597 }
1598 }
1599 }
1600
1601 /* Remove all expressions that refer to register REGNO,
1602 since they are already invalid, and we are about to
1603 mark that register valid again and don't want the old
1604 expressions to reappear as valid. */
1605
1606 static void
1607 remove_invalid_refs (regno)
1608 int regno;
1609 {
1610 register int i;
1611 register struct table_elt *p, *next;
1612
1613 for (i = 0; i < NBUCKETS; i++)
1614 for (p = table[i]; p; p = next)
1615 {
1616 next = p->next_same_hash;
1617 if (GET_CODE (p->exp) != REG
1618 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1619 remove_from_table (p, i);
1620 }
1621 }
1622 \f
1623 /* Recompute the hash codes of any valid entries in the hash table that
1624 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1625
1626 This is called when we make a jump equivalence. */
1627
1628 static void
1629 rehash_using_reg (x)
1630 rtx x;
1631 {
1632 int i;
1633 struct table_elt *p, *next;
1634 unsigned hash;
1635
1636 if (GET_CODE (x) == SUBREG)
1637 x = SUBREG_REG (x);
1638
1639 /* If X is not a register or if the register is known not to be in any
1640 valid entries in the table, we have no work to do. */
1641
1642 if (GET_CODE (x) != REG
1643 || reg_in_table[REGNO (x)] < 0
1644 || reg_in_table[REGNO (x)] != reg_tick[REGNO (x)])
1645 return;
1646
1647 /* Scan all hash chains looking for valid entries that mention X.
1648 If we find one and it is in the wrong hash chain, move it. We can skip
1649 objects that are registers, since they are handled specially. */
1650
1651 for (i = 0; i < NBUCKETS; i++)
1652 for (p = table[i]; p; p = next)
1653 {
1654 next = p->next_same_hash;
1655 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1656 && exp_equiv_p (p->exp, p->exp, 1, 0)
1657 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1658 {
1659 if (p->next_same_hash)
1660 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1661
1662 if (p->prev_same_hash)
1663 p->prev_same_hash->next_same_hash = p->next_same_hash;
1664 else
1665 table[i] = p->next_same_hash;
1666
1667 p->next_same_hash = table[hash];
1668 p->prev_same_hash = 0;
1669 if (table[hash])
1670 table[hash]->prev_same_hash = p;
1671 table[hash] = p;
1672 }
1673 }
1674 }
1675 \f
1676 /* Remove from the hash table all expressions that reference memory,
1677 or some of them as specified by *WRITES. */
1678
1679 static void
1680 invalidate_memory (writes)
1681 struct write_data *writes;
1682 {
1683 register int i;
1684 register struct table_elt *p, *next;
1685 int all = writes->all;
1686 int nonscalar = writes->nonscalar;
1687
1688 for (i = 0; i < NBUCKETS; i++)
1689 for (p = table[i]; p; p = next)
1690 {
1691 next = p->next_same_hash;
1692 if (p->in_memory
1693 && (all
1694 || (nonscalar && p->in_struct)
1695 || cse_rtx_addr_varies_p (p->exp)))
1696 remove_from_table (p, i);
1697 }
1698 }
1699 \f
1700 /* Remove from the hash table any expression that is a call-clobbered
1701 register. Also update their TICK values. */
1702
1703 static void
1704 invalidate_for_call ()
1705 {
1706 int regno, endregno;
1707 int i;
1708 unsigned hash;
1709 struct table_elt *p, *next;
1710 int in_table = 0;
1711
1712 /* Go through all the hard registers. For each that is clobbered in
1713 a CALL_INSN, remove the register from quantity chains and update
1714 reg_tick if defined. Also see if any of these registers is currently
1715 in the table. */
1716
1717 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1718 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1719 {
1720 delete_reg_equiv (regno);
1721 if (reg_tick[regno] >= 0)
1722 reg_tick[regno]++;
1723
1724 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1725 }
1726
1727 /* In the case where we have no call-clobbered hard registers in the
1728 table, we are done. Otherwise, scan the table and remove any
1729 entry that overlaps a call-clobbered register. */
1730
1731 if (in_table)
1732 for (hash = 0; hash < NBUCKETS; hash++)
1733 for (p = table[hash]; p; p = next)
1734 {
1735 next = p->next_same_hash;
1736
1737 if (GET_CODE (p->exp) != REG
1738 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1739 continue;
1740
1741 regno = REGNO (p->exp);
1742 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1743
1744 for (i = regno; i < endregno; i++)
1745 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1746 {
1747 remove_from_table (p, hash);
1748 break;
1749 }
1750 }
1751 }
1752 \f
1753 /* Given an expression X of type CONST,
1754 and ELT which is its table entry (or 0 if it
1755 is not in the hash table),
1756 return an alternate expression for X as a register plus integer.
1757 If none can be found, return 0. */
1758
1759 static rtx
1760 use_related_value (x, elt)
1761 rtx x;
1762 struct table_elt *elt;
1763 {
1764 register struct table_elt *relt = 0;
1765 register struct table_elt *p, *q;
1766 HOST_WIDE_INT offset;
1767
1768 /* First, is there anything related known?
1769 If we have a table element, we can tell from that.
1770 Otherwise, must look it up. */
1771
1772 if (elt != 0 && elt->related_value != 0)
1773 relt = elt;
1774 else if (elt == 0 && GET_CODE (x) == CONST)
1775 {
1776 rtx subexp = get_related_value (x);
1777 if (subexp != 0)
1778 relt = lookup (subexp,
1779 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
1780 GET_MODE (subexp));
1781 }
1782
1783 if (relt == 0)
1784 return 0;
1785
1786 /* Search all related table entries for one that has an
1787 equivalent register. */
1788
1789 p = relt;
1790 while (1)
1791 {
1792 /* This loop is strange in that it is executed in two different cases.
1793 The first is when X is already in the table. Then it is searching
1794 the RELATED_VALUE list of X's class (RELT). The second case is when
1795 X is not in the table. Then RELT points to a class for the related
1796 value.
1797
1798 Ensure that, whatever case we are in, that we ignore classes that have
1799 the same value as X. */
1800
1801 if (rtx_equal_p (x, p->exp))
1802 q = 0;
1803 else
1804 for (q = p->first_same_value; q; q = q->next_same_value)
1805 if (GET_CODE (q->exp) == REG)
1806 break;
1807
1808 if (q)
1809 break;
1810
1811 p = p->related_value;
1812
1813 /* We went all the way around, so there is nothing to be found.
1814 Alternatively, perhaps RELT was in the table for some other reason
1815 and it has no related values recorded. */
1816 if (p == relt || p == 0)
1817 break;
1818 }
1819
1820 if (q == 0)
1821 return 0;
1822
1823 offset = (get_integer_term (x) - get_integer_term (p->exp));
1824 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
1825 return plus_constant (q->exp, offset);
1826 }
1827 \f
1828 /* Hash an rtx. We are careful to make sure the value is never negative.
1829 Equivalent registers hash identically.
1830 MODE is used in hashing for CONST_INTs only;
1831 otherwise the mode of X is used.
1832
1833 Store 1 in do_not_record if any subexpression is volatile.
1834
1835 Store 1 in hash_arg_in_memory if X contains a MEM rtx
1836 which does not have the RTX_UNCHANGING_P bit set.
1837 In this case, also store 1 in hash_arg_in_struct
1838 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
1839
1840 Note that cse_insn knows that the hash code of a MEM expression
1841 is just (int) MEM plus the hash code of the address. */
1842
1843 static unsigned
1844 canon_hash (x, mode)
1845 rtx x;
1846 enum machine_mode mode;
1847 {
1848 register int i, j;
1849 register unsigned hash = 0;
1850 register enum rtx_code code;
1851 register char *fmt;
1852
1853 /* repeat is used to turn tail-recursion into iteration. */
1854 repeat:
1855 if (x == 0)
1856 return hash;
1857
1858 code = GET_CODE (x);
1859 switch (code)
1860 {
1861 case REG:
1862 {
1863 register int regno = REGNO (x);
1864
1865 /* On some machines, we can't record any non-fixed hard register,
1866 because extending its life will cause reload problems. We
1867 consider ap, fp, and sp to be fixed for this purpose.
1868 On all machines, we can't record any global registers. */
1869
1870 if (regno < FIRST_PSEUDO_REGISTER
1871 && (global_regs[regno]
1872 #ifdef SMALL_REGISTER_CLASSES
1873 || (! fixed_regs[regno]
1874 && regno != FRAME_POINTER_REGNUM
1875 && regno != HARD_FRAME_POINTER_REGNUM
1876 && regno != ARG_POINTER_REGNUM
1877 && regno != STACK_POINTER_REGNUM)
1878 #endif
1879 ))
1880 {
1881 do_not_record = 1;
1882 return 0;
1883 }
1884 hash += ((unsigned) REG << 7) + (unsigned) reg_qty[regno];
1885 return hash;
1886 }
1887
1888 case CONST_INT:
1889 {
1890 unsigned HOST_WIDE_INT tem = INTVAL (x);
1891 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
1892 return hash;
1893 }
1894
1895 case CONST_DOUBLE:
1896 /* This is like the general case, except that it only counts
1897 the integers representing the constant. */
1898 hash += (unsigned) code + (unsigned) GET_MODE (x);
1899 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1900 {
1901 unsigned tem = XINT (x, i);
1902 hash += tem;
1903 }
1904 return hash;
1905
1906 /* Assume there is only one rtx object for any given label. */
1907 case LABEL_REF:
1908 hash
1909 += ((unsigned) LABEL_REF << 7) + (unsigned HOST_WIDE_INT) XEXP (x, 0);
1910 return hash;
1911
1912 case SYMBOL_REF:
1913 hash
1914 += ((unsigned) SYMBOL_REF << 7) + (unsigned HOST_WIDE_INT) XSTR (x, 0);
1915 return hash;
1916
1917 case MEM:
1918 if (MEM_VOLATILE_P (x))
1919 {
1920 do_not_record = 1;
1921 return 0;
1922 }
1923 if (! RTX_UNCHANGING_P (x))
1924 {
1925 hash_arg_in_memory = 1;
1926 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
1927 }
1928 /* Now that we have already found this special case,
1929 might as well speed it up as much as possible. */
1930 hash += (unsigned) MEM;
1931 x = XEXP (x, 0);
1932 goto repeat;
1933
1934 case PRE_DEC:
1935 case PRE_INC:
1936 case POST_DEC:
1937 case POST_INC:
1938 case PC:
1939 case CC0:
1940 case CALL:
1941 case UNSPEC_VOLATILE:
1942 do_not_record = 1;
1943 return 0;
1944
1945 case ASM_OPERANDS:
1946 if (MEM_VOLATILE_P (x))
1947 {
1948 do_not_record = 1;
1949 return 0;
1950 }
1951 }
1952
1953 i = GET_RTX_LENGTH (code) - 1;
1954 hash += (unsigned) code + (unsigned) GET_MODE (x);
1955 fmt = GET_RTX_FORMAT (code);
1956 for (; i >= 0; i--)
1957 {
1958 if (fmt[i] == 'e')
1959 {
1960 rtx tem = XEXP (x, i);
1961
1962 /* If we are about to do the last recursive call
1963 needed at this level, change it into iteration.
1964 This function is called enough to be worth it. */
1965 if (i == 0)
1966 {
1967 x = tem;
1968 goto repeat;
1969 }
1970 hash += canon_hash (tem, 0);
1971 }
1972 else if (fmt[i] == 'E')
1973 for (j = 0; j < XVECLEN (x, i); j++)
1974 hash += canon_hash (XVECEXP (x, i, j), 0);
1975 else if (fmt[i] == 's')
1976 {
1977 register unsigned char *p = (unsigned char *) XSTR (x, i);
1978 if (p)
1979 while (*p)
1980 hash += *p++;
1981 }
1982 else if (fmt[i] == 'i')
1983 {
1984 register unsigned tem = XINT (x, i);
1985 hash += tem;
1986 }
1987 else
1988 abort ();
1989 }
1990 return hash;
1991 }
1992
1993 /* Like canon_hash but with no side effects. */
1994
1995 static unsigned
1996 safe_hash (x, mode)
1997 rtx x;
1998 enum machine_mode mode;
1999 {
2000 int save_do_not_record = do_not_record;
2001 int save_hash_arg_in_memory = hash_arg_in_memory;
2002 int save_hash_arg_in_struct = hash_arg_in_struct;
2003 unsigned hash = canon_hash (x, mode);
2004 hash_arg_in_memory = save_hash_arg_in_memory;
2005 hash_arg_in_struct = save_hash_arg_in_struct;
2006 do_not_record = save_do_not_record;
2007 return hash;
2008 }
2009 \f
2010 /* Return 1 iff X and Y would canonicalize into the same thing,
2011 without actually constructing the canonicalization of either one.
2012 If VALIDATE is nonzero,
2013 we assume X is an expression being processed from the rtl
2014 and Y was found in the hash table. We check register refs
2015 in Y for being marked as valid.
2016
2017 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2018 that is known to be in the register. Ordinarily, we don't allow them
2019 to match, because letting them match would cause unpredictable results
2020 in all the places that search a hash table chain for an equivalent
2021 for a given value. A possible equivalent that has different structure
2022 has its hash code computed from different data. Whether the hash code
2023 is the same as that of the the given value is pure luck. */
2024
2025 static int
2026 exp_equiv_p (x, y, validate, equal_values)
2027 rtx x, y;
2028 int validate;
2029 int equal_values;
2030 {
2031 register int i, j;
2032 register enum rtx_code code;
2033 register char *fmt;
2034
2035 /* Note: it is incorrect to assume an expression is equivalent to itself
2036 if VALIDATE is nonzero. */
2037 if (x == y && !validate)
2038 return 1;
2039 if (x == 0 || y == 0)
2040 return x == y;
2041
2042 code = GET_CODE (x);
2043 if (code != GET_CODE (y))
2044 {
2045 if (!equal_values)
2046 return 0;
2047
2048 /* If X is a constant and Y is a register or vice versa, they may be
2049 equivalent. We only have to validate if Y is a register. */
2050 if (CONSTANT_P (x) && GET_CODE (y) == REG
2051 && REGNO_QTY_VALID_P (REGNO (y))
2052 && GET_MODE (y) == qty_mode[reg_qty[REGNO (y)]]
2053 && rtx_equal_p (x, qty_const[reg_qty[REGNO (y)]])
2054 && (! validate || reg_in_table[REGNO (y)] == reg_tick[REGNO (y)]))
2055 return 1;
2056
2057 if (CONSTANT_P (y) && code == REG
2058 && REGNO_QTY_VALID_P (REGNO (x))
2059 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2060 && rtx_equal_p (y, qty_const[reg_qty[REGNO (x)]]))
2061 return 1;
2062
2063 return 0;
2064 }
2065
2066 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2067 if (GET_MODE (x) != GET_MODE (y))
2068 return 0;
2069
2070 switch (code)
2071 {
2072 case PC:
2073 case CC0:
2074 return x == y;
2075
2076 case CONST_INT:
2077 return INTVAL (x) == INTVAL (y);
2078
2079 case LABEL_REF:
2080 return XEXP (x, 0) == XEXP (y, 0);
2081
2082 case SYMBOL_REF:
2083 return XSTR (x, 0) == XSTR (y, 0);
2084
2085 case REG:
2086 {
2087 int regno = REGNO (y);
2088 int endregno
2089 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2090 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2091 int i;
2092
2093 /* If the quantities are not the same, the expressions are not
2094 equivalent. If there are and we are not to validate, they
2095 are equivalent. Otherwise, ensure all regs are up-to-date. */
2096
2097 if (reg_qty[REGNO (x)] != reg_qty[regno])
2098 return 0;
2099
2100 if (! validate)
2101 return 1;
2102
2103 for (i = regno; i < endregno; i++)
2104 if (reg_in_table[i] != reg_tick[i])
2105 return 0;
2106
2107 return 1;
2108 }
2109
2110 /* For commutative operations, check both orders. */
2111 case PLUS:
2112 case MULT:
2113 case AND:
2114 case IOR:
2115 case XOR:
2116 case NE:
2117 case EQ:
2118 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2119 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2120 validate, equal_values))
2121 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2122 validate, equal_values)
2123 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2124 validate, equal_values)));
2125 }
2126
2127 /* Compare the elements. If any pair of corresponding elements
2128 fail to match, return 0 for the whole things. */
2129
2130 fmt = GET_RTX_FORMAT (code);
2131 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2132 {
2133 switch (fmt[i])
2134 {
2135 case 'e':
2136 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2137 return 0;
2138 break;
2139
2140 case 'E':
2141 if (XVECLEN (x, i) != XVECLEN (y, i))
2142 return 0;
2143 for (j = 0; j < XVECLEN (x, i); j++)
2144 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2145 validate, equal_values))
2146 return 0;
2147 break;
2148
2149 case 's':
2150 if (strcmp (XSTR (x, i), XSTR (y, i)))
2151 return 0;
2152 break;
2153
2154 case 'i':
2155 if (XINT (x, i) != XINT (y, i))
2156 return 0;
2157 break;
2158
2159 case 'w':
2160 if (XWINT (x, i) != XWINT (y, i))
2161 return 0;
2162 break;
2163
2164 case '0':
2165 break;
2166
2167 default:
2168 abort ();
2169 }
2170 }
2171
2172 return 1;
2173 }
2174 \f
2175 /* Return 1 iff any subexpression of X matches Y.
2176 Here we do not require that X or Y be valid (for registers referred to)
2177 for being in the hash table. */
2178
2179 static int
2180 refers_to_p (x, y)
2181 rtx x, y;
2182 {
2183 register int i;
2184 register enum rtx_code code;
2185 register char *fmt;
2186
2187 repeat:
2188 if (x == y)
2189 return 1;
2190 if (x == 0 || y == 0)
2191 return 0;
2192
2193 code = GET_CODE (x);
2194 /* If X as a whole has the same code as Y, they may match.
2195 If so, return 1. */
2196 if (code == GET_CODE (y))
2197 {
2198 if (exp_equiv_p (x, y, 0, 1))
2199 return 1;
2200 }
2201
2202 /* X does not match, so try its subexpressions. */
2203
2204 fmt = GET_RTX_FORMAT (code);
2205 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2206 if (fmt[i] == 'e')
2207 {
2208 if (i == 0)
2209 {
2210 x = XEXP (x, 0);
2211 goto repeat;
2212 }
2213 else
2214 if (refers_to_p (XEXP (x, i), y))
2215 return 1;
2216 }
2217 else if (fmt[i] == 'E')
2218 {
2219 int j;
2220 for (j = 0; j < XVECLEN (x, i); j++)
2221 if (refers_to_p (XVECEXP (x, i, j), y))
2222 return 1;
2223 }
2224
2225 return 0;
2226 }
2227 \f
2228 /* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2229 set PBASE, PSTART, and PEND which correspond to the base of the address,
2230 the starting offset, and ending offset respectively.
2231
2232 ADDR is known to be a nonvarying address. */
2233
2234 /* ??? Despite what the comments say, this function is in fact frequently
2235 passed varying addresses. This does not appear to cause any problems. */
2236
2237 static void
2238 set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2239 rtx addr;
2240 int size;
2241 rtx *pbase;
2242 HOST_WIDE_INT *pstart, *pend;
2243 {
2244 rtx base;
2245 HOST_WIDE_INT start, end;
2246
2247 base = addr;
2248 start = 0;
2249 end = 0;
2250
2251 /* Registers with nonvarying addresses usually have constant equivalents;
2252 but the frame pointer register is also possible. */
2253 if (GET_CODE (base) == REG
2254 && qty_const != 0
2255 && REGNO_QTY_VALID_P (REGNO (base))
2256 && qty_mode[reg_qty[REGNO (base)]] == GET_MODE (base)
2257 && qty_const[reg_qty[REGNO (base)]] != 0)
2258 base = qty_const[reg_qty[REGNO (base)]];
2259 else if (GET_CODE (base) == PLUS
2260 && GET_CODE (XEXP (base, 1)) == CONST_INT
2261 && GET_CODE (XEXP (base, 0)) == REG
2262 && qty_const != 0
2263 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2264 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2265 == GET_MODE (XEXP (base, 0)))
2266 && qty_const[reg_qty[REGNO (XEXP (base, 0))]])
2267 {
2268 start = INTVAL (XEXP (base, 1));
2269 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2270 }
2271
2272 /* Handle everything that we can find inside an address that has been
2273 viewed as constant. */
2274
2275 while (1)
2276 {
2277 /* If no part of this switch does a "continue", the code outside
2278 will exit this loop. */
2279
2280 switch (GET_CODE (base))
2281 {
2282 case LO_SUM:
2283 /* By definition, operand1 of a LO_SUM is the associated constant
2284 address. Use the associated constant address as the base
2285 instead. */
2286 base = XEXP (base, 1);
2287 continue;
2288
2289 case CONST:
2290 /* Strip off CONST. */
2291 base = XEXP (base, 0);
2292 continue;
2293
2294 case PLUS:
2295 if (GET_CODE (XEXP (base, 1)) == CONST_INT)
2296 {
2297 start += INTVAL (XEXP (base, 1));
2298 base = XEXP (base, 0);
2299 continue;
2300 }
2301 break;
2302
2303 case AND:
2304 /* Handle the case of an AND which is the negative of a power of
2305 two. This is used to represent unaligned memory operations. */
2306 if (GET_CODE (XEXP (base, 1)) == CONST_INT
2307 && exact_log2 (- INTVAL (XEXP (base, 1))) > 0)
2308 {
2309 set_nonvarying_address_components (XEXP (base, 0), size,
2310 pbase, pstart, pend);
2311
2312 /* Assume the worst misalignment. START is affected, but not
2313 END, so compensate but adjusting SIZE. Don't lose any
2314 constant we already had. */
2315
2316 size = *pend - *pstart - INTVAL (XEXP (base, 1)) - 1;
2317 start += *pstart - INTVAL (XEXP (base, 1)) - 1;
2318 base = *pbase;
2319 }
2320 break;
2321 }
2322
2323 break;
2324 }
2325
2326 if (GET_CODE (base) == CONST_INT)
2327 {
2328 start += INTVAL (base);
2329 base = const0_rtx;
2330 }
2331
2332 end = start + size;
2333
2334 /* Set the return values. */
2335 *pbase = base;
2336 *pstart = start;
2337 *pend = end;
2338 }
2339
2340 /* Return 1 iff any subexpression of X refers to memory
2341 at an address of BASE plus some offset
2342 such that any of the bytes' offsets fall between START (inclusive)
2343 and END (exclusive).
2344
2345 The value is undefined if X is a varying address (as determined by
2346 cse_rtx_addr_varies_p). This function is not used in such cases.
2347
2348 When used in the cse pass, `qty_const' is nonzero, and it is used
2349 to treat an address that is a register with a known constant value
2350 as if it were that constant value.
2351 In the loop pass, `qty_const' is zero, so this is not done. */
2352
2353 static int
2354 refers_to_mem_p (x, base, start, end)
2355 rtx x, base;
2356 HOST_WIDE_INT start, end;
2357 {
2358 register HOST_WIDE_INT i;
2359 register enum rtx_code code;
2360 register char *fmt;
2361
2362 repeat:
2363 if (x == 0)
2364 return 0;
2365
2366 code = GET_CODE (x);
2367 if (code == MEM)
2368 {
2369 register rtx addr = XEXP (x, 0); /* Get the address. */
2370 rtx mybase;
2371 HOST_WIDE_INT mystart, myend;
2372
2373 set_nonvarying_address_components (addr, GET_MODE_SIZE (GET_MODE (x)),
2374 &mybase, &mystart, &myend);
2375
2376
2377 /* refers_to_mem_p is never called with varying addresses.
2378 If the base addresses are not equal, there is no chance
2379 of the memory addresses conflicting. */
2380 if (! rtx_equal_p (mybase, base))
2381 return 0;
2382
2383 return myend > start && mystart < end;
2384 }
2385
2386 /* X does not match, so try its subexpressions. */
2387
2388 fmt = GET_RTX_FORMAT (code);
2389 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2390 if (fmt[i] == 'e')
2391 {
2392 if (i == 0)
2393 {
2394 x = XEXP (x, 0);
2395 goto repeat;
2396 }
2397 else
2398 if (refers_to_mem_p (XEXP (x, i), base, start, end))
2399 return 1;
2400 }
2401 else if (fmt[i] == 'E')
2402 {
2403 int j;
2404 for (j = 0; j < XVECLEN (x, i); j++)
2405 if (refers_to_mem_p (XVECEXP (x, i, j), base, start, end))
2406 return 1;
2407 }
2408
2409 return 0;
2410 }
2411
2412 /* Nonzero if X refers to memory at a varying address;
2413 except that a register which has at the moment a known constant value
2414 isn't considered variable. */
2415
2416 static int
2417 cse_rtx_addr_varies_p (x)
2418 rtx x;
2419 {
2420 /* We need not check for X and the equivalence class being of the same
2421 mode because if X is equivalent to a constant in some mode, it
2422 doesn't vary in any mode. */
2423
2424 if (GET_CODE (x) == MEM
2425 && GET_CODE (XEXP (x, 0)) == REG
2426 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2427 && GET_MODE (XEXP (x, 0)) == qty_mode[reg_qty[REGNO (XEXP (x, 0))]]
2428 && qty_const[reg_qty[REGNO (XEXP (x, 0))]] != 0)
2429 return 0;
2430
2431 if (GET_CODE (x) == MEM
2432 && GET_CODE (XEXP (x, 0)) == PLUS
2433 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2434 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2435 && REGNO_QTY_VALID_P (REGNO (XEXP (XEXP (x, 0), 0)))
2436 && (GET_MODE (XEXP (XEXP (x, 0), 0))
2437 == qty_mode[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2438 && qty_const[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2439 return 0;
2440
2441 return rtx_addr_varies_p (x);
2442 }
2443 \f
2444 /* Canonicalize an expression:
2445 replace each register reference inside it
2446 with the "oldest" equivalent register.
2447
2448 If INSN is non-zero and we are replacing a pseudo with a hard register
2449 or vice versa, validate_change is used to ensure that INSN remains valid
2450 after we make our substitution. The calls are made with IN_GROUP non-zero
2451 so apply_change_group must be called upon the outermost return from this
2452 function (unless INSN is zero). The result of apply_change_group can
2453 generally be discarded since the changes we are making are optional. */
2454
2455 static rtx
2456 canon_reg (x, insn)
2457 rtx x;
2458 rtx insn;
2459 {
2460 register int i;
2461 register enum rtx_code code;
2462 register char *fmt;
2463
2464 if (x == 0)
2465 return x;
2466
2467 code = GET_CODE (x);
2468 switch (code)
2469 {
2470 case PC:
2471 case CC0:
2472 case CONST:
2473 case CONST_INT:
2474 case CONST_DOUBLE:
2475 case SYMBOL_REF:
2476 case LABEL_REF:
2477 case ADDR_VEC:
2478 case ADDR_DIFF_VEC:
2479 return x;
2480
2481 case REG:
2482 {
2483 register int first;
2484
2485 /* Never replace a hard reg, because hard regs can appear
2486 in more than one machine mode, and we must preserve the mode
2487 of each occurrence. Also, some hard regs appear in
2488 MEMs that are shared and mustn't be altered. Don't try to
2489 replace any reg that maps to a reg of class NO_REGS. */
2490 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2491 || ! REGNO_QTY_VALID_P (REGNO (x)))
2492 return x;
2493
2494 first = qty_first_reg[reg_qty[REGNO (x)]];
2495 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2496 : REGNO_REG_CLASS (first) == NO_REGS ? x
2497 : gen_rtx (REG, qty_mode[reg_qty[REGNO (x)]], first));
2498 }
2499 }
2500
2501 fmt = GET_RTX_FORMAT (code);
2502 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2503 {
2504 register int j;
2505
2506 if (fmt[i] == 'e')
2507 {
2508 rtx new = canon_reg (XEXP (x, i), insn);
2509
2510 /* If replacing pseudo with hard reg or vice versa, ensure the
2511 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2512 if (insn != 0 && new != 0
2513 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2514 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2515 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2516 || insn_n_dups[recog_memoized (insn)] > 0))
2517 validate_change (insn, &XEXP (x, i), new, 1);
2518 else
2519 XEXP (x, i) = new;
2520 }
2521 else if (fmt[i] == 'E')
2522 for (j = 0; j < XVECLEN (x, i); j++)
2523 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2524 }
2525
2526 return x;
2527 }
2528 \f
2529 /* LOC is a location with INSN that is an operand address (the contents of
2530 a MEM). Find the best equivalent address to use that is valid for this
2531 insn.
2532
2533 On most CISC machines, complicated address modes are costly, and rtx_cost
2534 is a good approximation for that cost. However, most RISC machines have
2535 only a few (usually only one) memory reference formats. If an address is
2536 valid at all, it is often just as cheap as any other address. Hence, for
2537 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2538 costs of various addresses. For two addresses of equal cost, choose the one
2539 with the highest `rtx_cost' value as that has the potential of eliminating
2540 the most insns. For equal costs, we choose the first in the equivalence
2541 class. Note that we ignore the fact that pseudo registers are cheaper
2542 than hard registers here because we would also prefer the pseudo registers.
2543 */
2544
2545 static void
2546 find_best_addr (insn, loc)
2547 rtx insn;
2548 rtx *loc;
2549 {
2550 struct table_elt *elt, *p;
2551 rtx addr = *loc;
2552 int our_cost;
2553 int found_better = 1;
2554 int save_do_not_record = do_not_record;
2555 int save_hash_arg_in_memory = hash_arg_in_memory;
2556 int save_hash_arg_in_struct = hash_arg_in_struct;
2557 int addr_volatile;
2558 int regno;
2559 unsigned hash;
2560
2561 /* Do not try to replace constant addresses or addresses of local and
2562 argument slots. These MEM expressions are made only once and inserted
2563 in many instructions, as well as being used to control symbol table
2564 output. It is not safe to clobber them.
2565
2566 There are some uncommon cases where the address is already in a register
2567 for some reason, but we cannot take advantage of that because we have
2568 no easy way to unshare the MEM. In addition, looking up all stack
2569 addresses is costly. */
2570 if ((GET_CODE (addr) == PLUS
2571 && GET_CODE (XEXP (addr, 0)) == REG
2572 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2573 && (regno = REGNO (XEXP (addr, 0)),
2574 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2575 || regno == ARG_POINTER_REGNUM))
2576 || (GET_CODE (addr) == REG
2577 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2578 || regno == HARD_FRAME_POINTER_REGNUM
2579 || regno == ARG_POINTER_REGNUM))
2580 || CONSTANT_ADDRESS_P (addr))
2581 return;
2582
2583 /* If this address is not simply a register, try to fold it. This will
2584 sometimes simplify the expression. Many simplifications
2585 will not be valid, but some, usually applying the associative rule, will
2586 be valid and produce better code. */
2587 if (GET_CODE (addr) != REG
2588 && validate_change (insn, loc, fold_rtx (addr, insn), 0))
2589 addr = *loc;
2590
2591 /* If this address is not in the hash table, we can't look for equivalences
2592 of the whole address. Also, ignore if volatile. */
2593
2594 do_not_record = 0;
2595 hash = HASH (addr, Pmode);
2596 addr_volatile = do_not_record;
2597 do_not_record = save_do_not_record;
2598 hash_arg_in_memory = save_hash_arg_in_memory;
2599 hash_arg_in_struct = save_hash_arg_in_struct;
2600
2601 if (addr_volatile)
2602 return;
2603
2604 elt = lookup (addr, hash, Pmode);
2605
2606 #ifndef ADDRESS_COST
2607 if (elt)
2608 {
2609 our_cost = elt->cost;
2610
2611 /* Find the lowest cost below ours that works. */
2612 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2613 if (elt->cost < our_cost
2614 && (GET_CODE (elt->exp) == REG
2615 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2616 && validate_change (insn, loc,
2617 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2618 return;
2619 }
2620 #else
2621
2622 if (elt)
2623 {
2624 /* We need to find the best (under the criteria documented above) entry
2625 in the class that is valid. We use the `flag' field to indicate
2626 choices that were invalid and iterate until we can't find a better
2627 one that hasn't already been tried. */
2628
2629 for (p = elt->first_same_value; p; p = p->next_same_value)
2630 p->flag = 0;
2631
2632 while (found_better)
2633 {
2634 int best_addr_cost = ADDRESS_COST (*loc);
2635 int best_rtx_cost = (elt->cost + 1) >> 1;
2636 struct table_elt *best_elt = elt;
2637
2638 found_better = 0;
2639 for (p = elt->first_same_value; p; p = p->next_same_value)
2640 if (! p->flag
2641 && (GET_CODE (p->exp) == REG
2642 || exp_equiv_p (p->exp, p->exp, 1, 0))
2643 && (ADDRESS_COST (p->exp) < best_addr_cost
2644 || (ADDRESS_COST (p->exp) == best_addr_cost
2645 && (p->cost + 1) >> 1 > best_rtx_cost)))
2646 {
2647 found_better = 1;
2648 best_addr_cost = ADDRESS_COST (p->exp);
2649 best_rtx_cost = (p->cost + 1) >> 1;
2650 best_elt = p;
2651 }
2652
2653 if (found_better)
2654 {
2655 if (validate_change (insn, loc,
2656 canon_reg (copy_rtx (best_elt->exp),
2657 NULL_RTX), 0))
2658 return;
2659 else
2660 best_elt->flag = 1;
2661 }
2662 }
2663 }
2664
2665 /* If the address is a binary operation with the first operand a register
2666 and the second a constant, do the same as above, but looking for
2667 equivalences of the register. Then try to simplify before checking for
2668 the best address to use. This catches a few cases: First is when we
2669 have REG+const and the register is another REG+const. We can often merge
2670 the constants and eliminate one insn and one register. It may also be
2671 that a machine has a cheap REG+REG+const. Finally, this improves the
2672 code on the Alpha for unaligned byte stores. */
2673
2674 if (flag_expensive_optimizations
2675 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2676 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2677 && GET_CODE (XEXP (*loc, 0)) == REG
2678 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2679 {
2680 rtx c = XEXP (*loc, 1);
2681
2682 do_not_record = 0;
2683 hash = HASH (XEXP (*loc, 0), Pmode);
2684 do_not_record = save_do_not_record;
2685 hash_arg_in_memory = save_hash_arg_in_memory;
2686 hash_arg_in_struct = save_hash_arg_in_struct;
2687
2688 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2689 if (elt == 0)
2690 return;
2691
2692 /* We need to find the best (under the criteria documented above) entry
2693 in the class that is valid. We use the `flag' field to indicate
2694 choices that were invalid and iterate until we can't find a better
2695 one that hasn't already been tried. */
2696
2697 for (p = elt->first_same_value; p; p = p->next_same_value)
2698 p->flag = 0;
2699
2700 while (found_better)
2701 {
2702 int best_addr_cost = ADDRESS_COST (*loc);
2703 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2704 struct table_elt *best_elt = elt;
2705 rtx best_rtx = *loc;
2706 int count;
2707
2708 /* This is at worst case an O(n^2) algorithm, so limit our search
2709 to the first 32 elements on the list. This avoids trouble
2710 compiling code with very long basic blocks that can easily
2711 call cse_gen_binary so many times that we run out of memory. */
2712
2713 found_better = 0;
2714 for (p = elt->first_same_value, count = 0;
2715 p && count < 32;
2716 p = p->next_same_value, count++)
2717 if (! p->flag
2718 && (GET_CODE (p->exp) == REG
2719 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2720 {
2721 rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
2722
2723 if ((ADDRESS_COST (new) < best_addr_cost
2724 || (ADDRESS_COST (new) == best_addr_cost
2725 && (COST (new) + 1) >> 1 > best_rtx_cost)))
2726 {
2727 found_better = 1;
2728 best_addr_cost = ADDRESS_COST (new);
2729 best_rtx_cost = (COST (new) + 1) >> 1;
2730 best_elt = p;
2731 best_rtx = new;
2732 }
2733 }
2734
2735 if (found_better)
2736 {
2737 if (validate_change (insn, loc,
2738 canon_reg (copy_rtx (best_rtx),
2739 NULL_RTX), 0))
2740 return;
2741 else
2742 best_elt->flag = 1;
2743 }
2744 }
2745 }
2746 #endif
2747 }
2748 \f
2749 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2750 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2751 what values are being compared.
2752
2753 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2754 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2755 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2756 compared to produce cc0.
2757
2758 The return value is the comparison operator and is either the code of
2759 A or the code corresponding to the inverse of the comparison. */
2760
2761 static enum rtx_code
2762 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
2763 enum rtx_code code;
2764 rtx *parg1, *parg2;
2765 enum machine_mode *pmode1, *pmode2;
2766 {
2767 rtx arg1, arg2;
2768
2769 arg1 = *parg1, arg2 = *parg2;
2770
2771 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2772
2773 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2774 {
2775 /* Set non-zero when we find something of interest. */
2776 rtx x = 0;
2777 int reverse_code = 0;
2778 struct table_elt *p = 0;
2779
2780 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2781 On machines with CC0, this is the only case that can occur, since
2782 fold_rtx will return the COMPARE or item being compared with zero
2783 when given CC0. */
2784
2785 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2786 x = arg1;
2787
2788 /* If ARG1 is a comparison operator and CODE is testing for
2789 STORE_FLAG_VALUE, get the inner arguments. */
2790
2791 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2792 {
2793 if (code == NE
2794 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2795 && code == LT && STORE_FLAG_VALUE == -1)
2796 #ifdef FLOAT_STORE_FLAG_VALUE
2797 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2798 && FLOAT_STORE_FLAG_VALUE < 0)
2799 #endif
2800 )
2801 x = arg1;
2802 else if (code == EQ
2803 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2804 && code == GE && STORE_FLAG_VALUE == -1)
2805 #ifdef FLOAT_STORE_FLAG_VALUE
2806 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2807 && FLOAT_STORE_FLAG_VALUE < 0)
2808 #endif
2809 )
2810 x = arg1, reverse_code = 1;
2811 }
2812
2813 /* ??? We could also check for
2814
2815 (ne (and (eq (...) (const_int 1))) (const_int 0))
2816
2817 and related forms, but let's wait until we see them occurring. */
2818
2819 if (x == 0)
2820 /* Look up ARG1 in the hash table and see if it has an equivalence
2821 that lets us see what is being compared. */
2822 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
2823 GET_MODE (arg1));
2824 if (p) p = p->first_same_value;
2825
2826 for (; p; p = p->next_same_value)
2827 {
2828 enum machine_mode inner_mode = GET_MODE (p->exp);
2829
2830 /* If the entry isn't valid, skip it. */
2831 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2832 continue;
2833
2834 if (GET_CODE (p->exp) == COMPARE
2835 /* Another possibility is that this machine has a compare insn
2836 that includes the comparison code. In that case, ARG1 would
2837 be equivalent to a comparison operation that would set ARG1 to
2838 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2839 ORIG_CODE is the actual comparison being done; if it is an EQ,
2840 we must reverse ORIG_CODE. On machine with a negative value
2841 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2842 || ((code == NE
2843 || (code == LT
2844 && GET_MODE_CLASS (inner_mode) == MODE_INT
2845 && (GET_MODE_BITSIZE (inner_mode)
2846 <= HOST_BITS_PER_WIDE_INT)
2847 && (STORE_FLAG_VALUE
2848 & ((HOST_WIDE_INT) 1
2849 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2850 #ifdef FLOAT_STORE_FLAG_VALUE
2851 || (code == LT
2852 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2853 && FLOAT_STORE_FLAG_VALUE < 0)
2854 #endif
2855 )
2856 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
2857 {
2858 x = p->exp;
2859 break;
2860 }
2861 else if ((code == EQ
2862 || (code == GE
2863 && GET_MODE_CLASS (inner_mode) == MODE_INT
2864 && (GET_MODE_BITSIZE (inner_mode)
2865 <= HOST_BITS_PER_WIDE_INT)
2866 && (STORE_FLAG_VALUE
2867 & ((HOST_WIDE_INT) 1
2868 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2869 #ifdef FLOAT_STORE_FLAG_VALUE
2870 || (code == GE
2871 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2872 && FLOAT_STORE_FLAG_VALUE < 0)
2873 #endif
2874 )
2875 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
2876 {
2877 reverse_code = 1;
2878 x = p->exp;
2879 break;
2880 }
2881
2882 /* If this is fp + constant, the equivalent is a better operand since
2883 it may let us predict the value of the comparison. */
2884 else if (NONZERO_BASE_PLUS_P (p->exp))
2885 {
2886 arg1 = p->exp;
2887 continue;
2888 }
2889 }
2890
2891 /* If we didn't find a useful equivalence for ARG1, we are done.
2892 Otherwise, set up for the next iteration. */
2893 if (x == 0)
2894 break;
2895
2896 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
2897 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
2898 code = GET_CODE (x);
2899
2900 if (reverse_code)
2901 code = reverse_condition (code);
2902 }
2903
2904 /* Return our results. Return the modes from before fold_rtx
2905 because fold_rtx might produce const_int, and then it's too late. */
2906 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
2907 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
2908
2909 return code;
2910 }
2911 \f
2912 /* Try to simplify a unary operation CODE whose output mode is to be
2913 MODE with input operand OP whose mode was originally OP_MODE.
2914 Return zero if no simplification can be made. */
2915
2916 rtx
2917 simplify_unary_operation (code, mode, op, op_mode)
2918 enum rtx_code code;
2919 enum machine_mode mode;
2920 rtx op;
2921 enum machine_mode op_mode;
2922 {
2923 register int width = GET_MODE_BITSIZE (mode);
2924
2925 /* The order of these tests is critical so that, for example, we don't
2926 check the wrong mode (input vs. output) for a conversion operation,
2927 such as FIX. At some point, this should be simplified. */
2928
2929 #if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
2930
2931 if (code == FLOAT && GET_MODE (op) == VOIDmode
2932 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
2933 {
2934 HOST_WIDE_INT hv, lv;
2935 REAL_VALUE_TYPE d;
2936
2937 if (GET_CODE (op) == CONST_INT)
2938 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
2939 else
2940 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
2941
2942 #ifdef REAL_ARITHMETIC
2943 REAL_VALUE_FROM_INT (d, lv, hv);
2944 #else
2945 if (hv < 0)
2946 {
2947 d = (double) (~ hv);
2948 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2949 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2950 d += (double) (unsigned HOST_WIDE_INT) (~ lv);
2951 d = (- d - 1.0);
2952 }
2953 else
2954 {
2955 d = (double) hv;
2956 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2957 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2958 d += (double) (unsigned HOST_WIDE_INT) lv;
2959 }
2960 #endif /* REAL_ARITHMETIC */
2961
2962 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2963 }
2964 else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
2965 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
2966 {
2967 HOST_WIDE_INT hv, lv;
2968 REAL_VALUE_TYPE d;
2969
2970 if (GET_CODE (op) == CONST_INT)
2971 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
2972 else
2973 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
2974
2975 if (op_mode == VOIDmode)
2976 {
2977 /* We don't know how to interpret negative-looking numbers in
2978 this case, so don't try to fold those. */
2979 if (hv < 0)
2980 return 0;
2981 }
2982 else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
2983 ;
2984 else
2985 hv = 0, lv &= GET_MODE_MASK (op_mode);
2986
2987 #ifdef REAL_ARITHMETIC
2988 REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv);
2989 #else
2990
2991 d = (double) (unsigned HOST_WIDE_INT) hv;
2992 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2993 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2994 d += (double) (unsigned HOST_WIDE_INT) lv;
2995 #endif /* REAL_ARITHMETIC */
2996
2997 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2998 }
2999 #endif
3000
3001 if (GET_CODE (op) == CONST_INT
3002 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3003 {
3004 register HOST_WIDE_INT arg0 = INTVAL (op);
3005 register HOST_WIDE_INT val;
3006
3007 switch (code)
3008 {
3009 case NOT:
3010 val = ~ arg0;
3011 break;
3012
3013 case NEG:
3014 val = - arg0;
3015 break;
3016
3017 case ABS:
3018 val = (arg0 >= 0 ? arg0 : - arg0);
3019 break;
3020
3021 case FFS:
3022 /* Don't use ffs here. Instead, get low order bit and then its
3023 number. If arg0 is zero, this will return 0, as desired. */
3024 arg0 &= GET_MODE_MASK (mode);
3025 val = exact_log2 (arg0 & (- arg0)) + 1;
3026 break;
3027
3028 case TRUNCATE:
3029 val = arg0;
3030 break;
3031
3032 case ZERO_EXTEND:
3033 if (op_mode == VOIDmode)
3034 op_mode = mode;
3035 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3036 {
3037 /* If we were really extending the mode,
3038 we would have to distinguish between zero-extension
3039 and sign-extension. */
3040 if (width != GET_MODE_BITSIZE (op_mode))
3041 abort ();
3042 val = arg0;
3043 }
3044 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3045 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3046 else
3047 return 0;
3048 break;
3049
3050 case SIGN_EXTEND:
3051 if (op_mode == VOIDmode)
3052 op_mode = mode;
3053 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3054 {
3055 /* If we were really extending the mode,
3056 we would have to distinguish between zero-extension
3057 and sign-extension. */
3058 if (width != GET_MODE_BITSIZE (op_mode))
3059 abort ();
3060 val = arg0;
3061 }
3062 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3063 {
3064 val
3065 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3066 if (val
3067 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3068 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3069 }
3070 else
3071 return 0;
3072 break;
3073
3074 case SQRT:
3075 return 0;
3076
3077 default:
3078 abort ();
3079 }
3080
3081 /* Clear the bits that don't belong in our mode,
3082 unless they and our sign bit are all one.
3083 So we get either a reasonable negative value or a reasonable
3084 unsigned value for this mode. */
3085 if (width < HOST_BITS_PER_WIDE_INT
3086 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3087 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3088 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3089
3090 return GEN_INT (val);
3091 }
3092
3093 /* We can do some operations on integer CONST_DOUBLEs. Also allow
3094 for a DImode operation on a CONST_INT. */
3095 else if (GET_MODE (op) == VOIDmode && width <= HOST_BITS_PER_INT * 2
3096 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3097 {
3098 HOST_WIDE_INT l1, h1, lv, hv;
3099
3100 if (GET_CODE (op) == CONST_DOUBLE)
3101 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3102 else
3103 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3104
3105 switch (code)
3106 {
3107 case NOT:
3108 lv = ~ l1;
3109 hv = ~ h1;
3110 break;
3111
3112 case NEG:
3113 neg_double (l1, h1, &lv, &hv);
3114 break;
3115
3116 case ABS:
3117 if (h1 < 0)
3118 neg_double (l1, h1, &lv, &hv);
3119 else
3120 lv = l1, hv = h1;
3121 break;
3122
3123 case FFS:
3124 hv = 0;
3125 if (l1 == 0)
3126 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
3127 else
3128 lv = exact_log2 (l1 & (-l1)) + 1;
3129 break;
3130
3131 case TRUNCATE:
3132 /* This is just a change-of-mode, so do nothing. */
3133 lv = l1, hv = h1;
3134 break;
3135
3136 case ZERO_EXTEND:
3137 if (op_mode == VOIDmode
3138 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3139 return 0;
3140
3141 hv = 0;
3142 lv = l1 & GET_MODE_MASK (op_mode);
3143 break;
3144
3145 case SIGN_EXTEND:
3146 if (op_mode == VOIDmode
3147 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3148 return 0;
3149 else
3150 {
3151 lv = l1 & GET_MODE_MASK (op_mode);
3152 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3153 && (lv & ((HOST_WIDE_INT) 1
3154 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3155 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3156
3157 hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
3158 }
3159 break;
3160
3161 case SQRT:
3162 return 0;
3163
3164 default:
3165 return 0;
3166 }
3167
3168 return immed_double_const (lv, hv, mode);
3169 }
3170
3171 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3172 else if (GET_CODE (op) == CONST_DOUBLE
3173 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3174 {
3175 REAL_VALUE_TYPE d;
3176 jmp_buf handler;
3177 rtx x;
3178
3179 if (setjmp (handler))
3180 /* There used to be a warning here, but that is inadvisable.
3181 People may want to cause traps, and the natural way
3182 to do it should not get a warning. */
3183 return 0;
3184
3185 set_float_handler (handler);
3186
3187 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3188
3189 switch (code)
3190 {
3191 case NEG:
3192 d = REAL_VALUE_NEGATE (d);
3193 break;
3194
3195 case ABS:
3196 if (REAL_VALUE_NEGATIVE (d))
3197 d = REAL_VALUE_NEGATE (d);
3198 break;
3199
3200 case FLOAT_TRUNCATE:
3201 d = real_value_truncate (mode, d);
3202 break;
3203
3204 case FLOAT_EXTEND:
3205 /* All this does is change the mode. */
3206 break;
3207
3208 case FIX:
3209 d = REAL_VALUE_RNDZINT (d);
3210 break;
3211
3212 case UNSIGNED_FIX:
3213 d = REAL_VALUE_UNSIGNED_RNDZINT (d);
3214 break;
3215
3216 case SQRT:
3217 return 0;
3218
3219 default:
3220 abort ();
3221 }
3222
3223 x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3224 set_float_handler (NULL_PTR);
3225 return x;
3226 }
3227
3228 else if (GET_CODE (op) == CONST_DOUBLE
3229 && GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
3230 && GET_MODE_CLASS (mode) == MODE_INT
3231 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3232 {
3233 REAL_VALUE_TYPE d;
3234 jmp_buf handler;
3235 HOST_WIDE_INT val;
3236
3237 if (setjmp (handler))
3238 return 0;
3239
3240 set_float_handler (handler);
3241
3242 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3243
3244 switch (code)
3245 {
3246 case FIX:
3247 val = REAL_VALUE_FIX (d);
3248 break;
3249
3250 case UNSIGNED_FIX:
3251 val = REAL_VALUE_UNSIGNED_FIX (d);
3252 break;
3253
3254 default:
3255 abort ();
3256 }
3257
3258 set_float_handler (NULL_PTR);
3259
3260 /* Clear the bits that don't belong in our mode,
3261 unless they and our sign bit are all one.
3262 So we get either a reasonable negative value or a reasonable
3263 unsigned value for this mode. */
3264 if (width < HOST_BITS_PER_WIDE_INT
3265 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3266 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3267 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3268
3269 /* If this would be an entire word for the target, but is not for
3270 the host, then sign-extend on the host so that the number will look
3271 the same way on the host that it would on the target.
3272
3273 For example, when building a 64 bit alpha hosted 32 bit sparc
3274 targeted compiler, then we want the 32 bit unsigned value -1 to be
3275 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3276 The later confuses the sparc backend. */
3277
3278 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3279 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3280 val |= ((HOST_WIDE_INT) (-1) << width);
3281
3282 return GEN_INT (val);
3283 }
3284 #endif
3285 /* This was formerly used only for non-IEEE float.
3286 eggert@twinsun.com says it is safe for IEEE also. */
3287 else
3288 {
3289 /* There are some simplifications we can do even if the operands
3290 aren't constant. */
3291 switch (code)
3292 {
3293 case NEG:
3294 case NOT:
3295 /* (not (not X)) == X, similarly for NEG. */
3296 if (GET_CODE (op) == code)
3297 return XEXP (op, 0);
3298 break;
3299
3300 case SIGN_EXTEND:
3301 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3302 becomes just the MINUS if its mode is MODE. This allows
3303 folding switch statements on machines using casesi (such as
3304 the Vax). */
3305 if (GET_CODE (op) == TRUNCATE
3306 && GET_MODE (XEXP (op, 0)) == mode
3307 && GET_CODE (XEXP (op, 0)) == MINUS
3308 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3309 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3310 return XEXP (op, 0);
3311
3312 #ifdef POINTERS_EXTEND_UNSIGNED
3313 if (! POINTERS_EXTEND_UNSIGNED
3314 && mode == Pmode && GET_MODE (op) == ptr_mode
3315 && CONSTANT_P (op))
3316 return convert_memory_address (Pmode, op);
3317 #endif
3318 break;
3319
3320 #ifdef POINTERS_EXTEND_UNSIGNED
3321 case ZERO_EXTEND:
3322 if (POINTERS_EXTEND_UNSIGNED
3323 && mode == Pmode && GET_MODE (op) == ptr_mode
3324 && CONSTANT_P (op))
3325 return convert_memory_address (Pmode, op);
3326 break;
3327 #endif
3328 }
3329
3330 return 0;
3331 }
3332 }
3333 \f
3334 /* Simplify a binary operation CODE with result mode MODE, operating on OP0
3335 and OP1. Return 0 if no simplification is possible.
3336
3337 Don't use this for relational operations such as EQ or LT.
3338 Use simplify_relational_operation instead. */
3339
3340 rtx
3341 simplify_binary_operation (code, mode, op0, op1)
3342 enum rtx_code code;
3343 enum machine_mode mode;
3344 rtx op0, op1;
3345 {
3346 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3347 HOST_WIDE_INT val;
3348 int width = GET_MODE_BITSIZE (mode);
3349 rtx tem;
3350
3351 /* Relational operations don't work here. We must know the mode
3352 of the operands in order to do the comparison correctly.
3353 Assuming a full word can give incorrect results.
3354 Consider comparing 128 with -128 in QImode. */
3355
3356 if (GET_RTX_CLASS (code) == '<')
3357 abort ();
3358
3359 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3360 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3361 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3362 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3363 {
3364 REAL_VALUE_TYPE f0, f1, value;
3365 jmp_buf handler;
3366
3367 if (setjmp (handler))
3368 return 0;
3369
3370 set_float_handler (handler);
3371
3372 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3373 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3374 f0 = real_value_truncate (mode, f0);
3375 f1 = real_value_truncate (mode, f1);
3376
3377 #ifdef REAL_ARITHMETIC
3378 REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
3379 #else
3380 switch (code)
3381 {
3382 case PLUS:
3383 value = f0 + f1;
3384 break;
3385 case MINUS:
3386 value = f0 - f1;
3387 break;
3388 case MULT:
3389 value = f0 * f1;
3390 break;
3391 case DIV:
3392 #ifndef REAL_INFINITY
3393 if (f1 == 0)
3394 return 0;
3395 #endif
3396 value = f0 / f1;
3397 break;
3398 case SMIN:
3399 value = MIN (f0, f1);
3400 break;
3401 case SMAX:
3402 value = MAX (f0, f1);
3403 break;
3404 default:
3405 abort ();
3406 }
3407 #endif
3408
3409 value = real_value_truncate (mode, value);
3410 set_float_handler (NULL_PTR);
3411 return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
3412 }
3413 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3414
3415 /* We can fold some multi-word operations. */
3416 if (GET_MODE_CLASS (mode) == MODE_INT
3417 && width == HOST_BITS_PER_WIDE_INT * 2
3418 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
3419 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3420 {
3421 HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
3422
3423 if (GET_CODE (op0) == CONST_DOUBLE)
3424 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3425 else
3426 l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
3427
3428 if (GET_CODE (op1) == CONST_DOUBLE)
3429 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3430 else
3431 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3432
3433 switch (code)
3434 {
3435 case MINUS:
3436 /* A - B == A + (-B). */
3437 neg_double (l2, h2, &lv, &hv);
3438 l2 = lv, h2 = hv;
3439
3440 /* .. fall through ... */
3441
3442 case PLUS:
3443 add_double (l1, h1, l2, h2, &lv, &hv);
3444 break;
3445
3446 case MULT:
3447 mul_double (l1, h1, l2, h2, &lv, &hv);
3448 break;
3449
3450 case DIV: case MOD: case UDIV: case UMOD:
3451 /* We'd need to include tree.h to do this and it doesn't seem worth
3452 it. */
3453 return 0;
3454
3455 case AND:
3456 lv = l1 & l2, hv = h1 & h2;
3457 break;
3458
3459 case IOR:
3460 lv = l1 | l2, hv = h1 | h2;
3461 break;
3462
3463 case XOR:
3464 lv = l1 ^ l2, hv = h1 ^ h2;
3465 break;
3466
3467 case SMIN:
3468 if (h1 < h2
3469 || (h1 == h2
3470 && ((unsigned HOST_WIDE_INT) l1
3471 < (unsigned HOST_WIDE_INT) l2)))
3472 lv = l1, hv = h1;
3473 else
3474 lv = l2, hv = h2;
3475 break;
3476
3477 case SMAX:
3478 if (h1 > h2
3479 || (h1 == h2
3480 && ((unsigned HOST_WIDE_INT) l1
3481 > (unsigned HOST_WIDE_INT) l2)))
3482 lv = l1, hv = h1;
3483 else
3484 lv = l2, hv = h2;
3485 break;
3486
3487 case UMIN:
3488 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3489 || (h1 == h2
3490 && ((unsigned HOST_WIDE_INT) l1
3491 < (unsigned HOST_WIDE_INT) l2)))
3492 lv = l1, hv = h1;
3493 else
3494 lv = l2, hv = h2;
3495 break;
3496
3497 case UMAX:
3498 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3499 || (h1 == h2
3500 && ((unsigned HOST_WIDE_INT) l1
3501 > (unsigned HOST_WIDE_INT) l2)))
3502 lv = l1, hv = h1;
3503 else
3504 lv = l2, hv = h2;
3505 break;
3506
3507 case LSHIFTRT: case ASHIFTRT:
3508 case ASHIFT:
3509 case ROTATE: case ROTATERT:
3510 #ifdef SHIFT_COUNT_TRUNCATED
3511 if (SHIFT_COUNT_TRUNCATED)
3512 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3513 #endif
3514
3515 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3516 return 0;
3517
3518 if (code == LSHIFTRT || code == ASHIFTRT)
3519 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3520 code == ASHIFTRT);
3521 else if (code == ASHIFT)
3522 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
3523 else if (code == ROTATE)
3524 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3525 else /* code == ROTATERT */
3526 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3527 break;
3528
3529 default:
3530 return 0;
3531 }
3532
3533 return immed_double_const (lv, hv, mode);
3534 }
3535
3536 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3537 || width > HOST_BITS_PER_WIDE_INT || width == 0)
3538 {
3539 /* Even if we can't compute a constant result,
3540 there are some cases worth simplifying. */
3541
3542 switch (code)
3543 {
3544 case PLUS:
3545 /* In IEEE floating point, x+0 is not the same as x. Similarly
3546 for the other optimizations below. */
3547 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3548 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3549 break;
3550
3551 if (op1 == CONST0_RTX (mode))
3552 return op0;
3553
3554 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3555 if (GET_CODE (op0) == NEG)
3556 return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
3557 else if (GET_CODE (op1) == NEG)
3558 return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
3559
3560 /* Handle both-operands-constant cases. We can only add
3561 CONST_INTs to constants since the sum of relocatable symbols
3562 can't be handled by most assemblers. Don't add CONST_INT
3563 to CONST_INT since overflow won't be computed properly if wider
3564 than HOST_BITS_PER_WIDE_INT. */
3565
3566 if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3567 && GET_CODE (op1) == CONST_INT)
3568 return plus_constant (op0, INTVAL (op1));
3569 else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3570 && GET_CODE (op0) == CONST_INT)
3571 return plus_constant (op1, INTVAL (op0));
3572
3573 /* See if this is something like X * C - X or vice versa or
3574 if the multiplication is written as a shift. If so, we can
3575 distribute and make a new multiply, shift, or maybe just
3576 have X (if C is 2 in the example above). But don't make
3577 real multiply if we didn't have one before. */
3578
3579 if (! FLOAT_MODE_P (mode))
3580 {
3581 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3582 rtx lhs = op0, rhs = op1;
3583 int had_mult = 0;
3584
3585 if (GET_CODE (lhs) == NEG)
3586 coeff0 = -1, lhs = XEXP (lhs, 0);
3587 else if (GET_CODE (lhs) == MULT
3588 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3589 {
3590 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3591 had_mult = 1;
3592 }
3593 else if (GET_CODE (lhs) == ASHIFT
3594 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3595 && INTVAL (XEXP (lhs, 1)) >= 0
3596 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3597 {
3598 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3599 lhs = XEXP (lhs, 0);
3600 }
3601
3602 if (GET_CODE (rhs) == NEG)
3603 coeff1 = -1, rhs = XEXP (rhs, 0);
3604 else if (GET_CODE (rhs) == MULT
3605 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3606 {
3607 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3608 had_mult = 1;
3609 }
3610 else if (GET_CODE (rhs) == ASHIFT
3611 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3612 && INTVAL (XEXP (rhs, 1)) >= 0
3613 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3614 {
3615 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3616 rhs = XEXP (rhs, 0);
3617 }
3618
3619 if (rtx_equal_p (lhs, rhs))
3620 {
3621 tem = cse_gen_binary (MULT, mode, lhs,
3622 GEN_INT (coeff0 + coeff1));
3623 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3624 }
3625 }
3626
3627 /* If one of the operands is a PLUS or a MINUS, see if we can
3628 simplify this by the associative law.
3629 Don't use the associative law for floating point.
3630 The inaccuracy makes it nonassociative,
3631 and subtle programs can break if operations are associated. */
3632
3633 if (INTEGRAL_MODE_P (mode)
3634 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3635 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3636 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3637 return tem;
3638 break;
3639
3640 case COMPARE:
3641 #ifdef HAVE_cc0
3642 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3643 using cc0, in which case we want to leave it as a COMPARE
3644 so we can distinguish it from a register-register-copy.
3645
3646 In IEEE floating point, x-0 is not the same as x. */
3647
3648 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3649 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3650 && op1 == CONST0_RTX (mode))
3651 return op0;
3652 #else
3653 /* Do nothing here. */
3654 #endif
3655 break;
3656
3657 case MINUS:
3658 /* None of these optimizations can be done for IEEE
3659 floating point. */
3660 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3661 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3662 break;
3663
3664 /* We can't assume x-x is 0 even with non-IEEE floating point,
3665 but since it is zero except in very strange circumstances, we
3666 will treat it as zero with -ffast-math. */
3667 if (rtx_equal_p (op0, op1)
3668 && ! side_effects_p (op0)
3669 && (! FLOAT_MODE_P (mode) || flag_fast_math))
3670 return CONST0_RTX (mode);
3671
3672 /* Change subtraction from zero into negation. */
3673 if (op0 == CONST0_RTX (mode))
3674 return gen_rtx (NEG, mode, op1);
3675
3676 /* (-1 - a) is ~a. */
3677 if (op0 == constm1_rtx)
3678 return gen_rtx (NOT, mode, op1);
3679
3680 /* Subtracting 0 has no effect. */
3681 if (op1 == CONST0_RTX (mode))
3682 return op0;
3683
3684 /* See if this is something like X * C - X or vice versa or
3685 if the multiplication is written as a shift. If so, we can
3686 distribute and make a new multiply, shift, or maybe just
3687 have X (if C is 2 in the example above). But don't make
3688 real multiply if we didn't have one before. */
3689
3690 if (! FLOAT_MODE_P (mode))
3691 {
3692 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3693 rtx lhs = op0, rhs = op1;
3694 int had_mult = 0;
3695
3696 if (GET_CODE (lhs) == NEG)
3697 coeff0 = -1, lhs = XEXP (lhs, 0);
3698 else if (GET_CODE (lhs) == MULT
3699 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3700 {
3701 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3702 had_mult = 1;
3703 }
3704 else if (GET_CODE (lhs) == ASHIFT
3705 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3706 && INTVAL (XEXP (lhs, 1)) >= 0
3707 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3708 {
3709 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3710 lhs = XEXP (lhs, 0);
3711 }
3712
3713 if (GET_CODE (rhs) == NEG)
3714 coeff1 = - 1, rhs = XEXP (rhs, 0);
3715 else if (GET_CODE (rhs) == MULT
3716 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3717 {
3718 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3719 had_mult = 1;
3720 }
3721 else if (GET_CODE (rhs) == ASHIFT
3722 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3723 && INTVAL (XEXP (rhs, 1)) >= 0
3724 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3725 {
3726 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3727 rhs = XEXP (rhs, 0);
3728 }
3729
3730 if (rtx_equal_p (lhs, rhs))
3731 {
3732 tem = cse_gen_binary (MULT, mode, lhs,
3733 GEN_INT (coeff0 - coeff1));
3734 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3735 }
3736 }
3737
3738 /* (a - (-b)) -> (a + b). */
3739 if (GET_CODE (op1) == NEG)
3740 return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
3741
3742 /* If one of the operands is a PLUS or a MINUS, see if we can
3743 simplify this by the associative law.
3744 Don't use the associative law for floating point.
3745 The inaccuracy makes it nonassociative,
3746 and subtle programs can break if operations are associated. */
3747
3748 if (INTEGRAL_MODE_P (mode)
3749 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3750 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3751 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3752 return tem;
3753
3754 /* Don't let a relocatable value get a negative coeff. */
3755 if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
3756 return plus_constant (op0, - INTVAL (op1));
3757
3758 /* (x - (x & y)) -> (x & ~y) */
3759 if (GET_CODE (op1) == AND)
3760 {
3761 if (rtx_equal_p (op0, XEXP (op1, 0)))
3762 return cse_gen_binary (AND, mode, op0, gen_rtx (NOT, mode, XEXP (op1, 1)));
3763 if (rtx_equal_p (op0, XEXP (op1, 1)))
3764 return cse_gen_binary (AND, mode, op0, gen_rtx (NOT, mode, XEXP (op1, 0)));
3765 }
3766 break;
3767
3768 case MULT:
3769 if (op1 == constm1_rtx)
3770 {
3771 tem = simplify_unary_operation (NEG, mode, op0, mode);
3772
3773 return tem ? tem : gen_rtx (NEG, mode, op0);
3774 }
3775
3776 /* In IEEE floating point, x*0 is not always 0. */
3777 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3778 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3779 && op1 == CONST0_RTX (mode)
3780 && ! side_effects_p (op0))
3781 return op1;
3782
3783 /* In IEEE floating point, x*1 is not equivalent to x for nans.
3784 However, ANSI says we can drop signals,
3785 so we can do this anyway. */
3786 if (op1 == CONST1_RTX (mode))
3787 return op0;
3788
3789 /* Convert multiply by constant power of two into shift unless
3790 we are still generating RTL. This test is a kludge. */
3791 if (GET_CODE (op1) == CONST_INT
3792 && (val = exact_log2 (INTVAL (op1))) >= 0
3793 && ! rtx_equal_function_value_matters)
3794 return gen_rtx (ASHIFT, mode, op0, GEN_INT (val));
3795
3796 if (GET_CODE (op1) == CONST_DOUBLE
3797 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
3798 {
3799 REAL_VALUE_TYPE d;
3800 jmp_buf handler;
3801 int op1is2, op1ism1;
3802
3803 if (setjmp (handler))
3804 return 0;
3805
3806 set_float_handler (handler);
3807 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3808 op1is2 = REAL_VALUES_EQUAL (d, dconst2);
3809 op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
3810 set_float_handler (NULL_PTR);
3811
3812 /* x*2 is x+x and x*(-1) is -x */
3813 if (op1is2 && GET_MODE (op0) == mode)
3814 return gen_rtx (PLUS, mode, op0, copy_rtx (op0));
3815
3816 else if (op1ism1 && GET_MODE (op0) == mode)
3817 return gen_rtx (NEG, mode, op0);
3818 }
3819 break;
3820
3821 case IOR:
3822 if (op1 == const0_rtx)
3823 return op0;
3824 if (GET_CODE (op1) == CONST_INT
3825 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3826 return op1;
3827 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3828 return op0;
3829 /* A | (~A) -> -1 */
3830 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3831 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3832 && ! side_effects_p (op0)
3833 && GET_MODE_CLASS (mode) != MODE_CC)
3834 return constm1_rtx;
3835 break;
3836
3837 case XOR:
3838 if (op1 == const0_rtx)
3839 return op0;
3840 if (GET_CODE (op1) == CONST_INT
3841 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3842 return gen_rtx (NOT, mode, op0);
3843 if (op0 == op1 && ! side_effects_p (op0)
3844 && GET_MODE_CLASS (mode) != MODE_CC)
3845 return const0_rtx;
3846 break;
3847
3848 case AND:
3849 if (op1 == const0_rtx && ! side_effects_p (op0))
3850 return const0_rtx;
3851 if (GET_CODE (op1) == CONST_INT
3852 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3853 return op0;
3854 if (op0 == op1 && ! side_effects_p (op0)
3855 && GET_MODE_CLASS (mode) != MODE_CC)
3856 return op0;
3857 /* A & (~A) -> 0 */
3858 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3859 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3860 && ! side_effects_p (op0)
3861 && GET_MODE_CLASS (mode) != MODE_CC)
3862 return const0_rtx;
3863 break;
3864
3865 case UDIV:
3866 /* Convert divide by power of two into shift (divide by 1 handled
3867 below). */
3868 if (GET_CODE (op1) == CONST_INT
3869 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
3870 return gen_rtx (LSHIFTRT, mode, op0, GEN_INT (arg1));
3871
3872 /* ... fall through ... */
3873
3874 case DIV:
3875 if (op1 == CONST1_RTX (mode))
3876 return op0;
3877
3878 /* In IEEE floating point, 0/x is not always 0. */
3879 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3880 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3881 && op0 == CONST0_RTX (mode)
3882 && ! side_effects_p (op1))
3883 return op0;
3884
3885 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3886 /* Change division by a constant into multiplication. Only do
3887 this with -ffast-math until an expert says it is safe in
3888 general. */
3889 else if (GET_CODE (op1) == CONST_DOUBLE
3890 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
3891 && op1 != CONST0_RTX (mode)
3892 && flag_fast_math)
3893 {
3894 REAL_VALUE_TYPE d;
3895 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3896
3897 if (! REAL_VALUES_EQUAL (d, dconst0))
3898 {
3899 #if defined (REAL_ARITHMETIC)
3900 REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
3901 return gen_rtx (MULT, mode, op0,
3902 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
3903 #else
3904 return gen_rtx (MULT, mode, op0,
3905 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
3906 #endif
3907 }
3908 }
3909 #endif
3910 break;
3911
3912 case UMOD:
3913 /* Handle modulus by power of two (mod with 1 handled below). */
3914 if (GET_CODE (op1) == CONST_INT
3915 && exact_log2 (INTVAL (op1)) > 0)
3916 return gen_rtx (AND, mode, op0, GEN_INT (INTVAL (op1) - 1));
3917
3918 /* ... fall through ... */
3919
3920 case MOD:
3921 if ((op0 == const0_rtx || op1 == const1_rtx)
3922 && ! side_effects_p (op0) && ! side_effects_p (op1))
3923 return const0_rtx;
3924 break;
3925
3926 case ROTATERT:
3927 case ROTATE:
3928 /* Rotating ~0 always results in ~0. */
3929 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
3930 && INTVAL (op0) == GET_MODE_MASK (mode)
3931 && ! side_effects_p (op1))
3932 return op0;
3933
3934 /* ... fall through ... */
3935
3936 case ASHIFT:
3937 case ASHIFTRT:
3938 case LSHIFTRT:
3939 if (op1 == const0_rtx)
3940 return op0;
3941 if (op0 == const0_rtx && ! side_effects_p (op1))
3942 return op0;
3943 break;
3944
3945 case SMIN:
3946 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3947 && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
3948 && ! side_effects_p (op0))
3949 return op1;
3950 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3951 return op0;
3952 break;
3953
3954 case SMAX:
3955 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3956 && (INTVAL (op1)
3957 == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
3958 && ! side_effects_p (op0))
3959 return op1;
3960 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3961 return op0;
3962 break;
3963
3964 case UMIN:
3965 if (op1 == const0_rtx && ! side_effects_p (op0))
3966 return op1;
3967 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3968 return op0;
3969 break;
3970
3971 case UMAX:
3972 if (op1 == constm1_rtx && ! side_effects_p (op0))
3973 return op1;
3974 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3975 return op0;
3976 break;
3977
3978 default:
3979 abort ();
3980 }
3981
3982 return 0;
3983 }
3984
3985 /* Get the integer argument values in two forms:
3986 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
3987
3988 arg0 = INTVAL (op0);
3989 arg1 = INTVAL (op1);
3990
3991 if (width < HOST_BITS_PER_WIDE_INT)
3992 {
3993 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
3994 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
3995
3996 arg0s = arg0;
3997 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
3998 arg0s |= ((HOST_WIDE_INT) (-1) << width);
3999
4000 arg1s = arg1;
4001 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4002 arg1s |= ((HOST_WIDE_INT) (-1) << width);
4003 }
4004 else
4005 {
4006 arg0s = arg0;
4007 arg1s = arg1;
4008 }
4009
4010 /* Compute the value of the arithmetic. */
4011
4012 switch (code)
4013 {
4014 case PLUS:
4015 val = arg0s + arg1s;
4016 break;
4017
4018 case MINUS:
4019 val = arg0s - arg1s;
4020 break;
4021
4022 case MULT:
4023 val = arg0s * arg1s;
4024 break;
4025
4026 case DIV:
4027 if (arg1s == 0)
4028 return 0;
4029 val = arg0s / arg1s;
4030 break;
4031
4032 case MOD:
4033 if (arg1s == 0)
4034 return 0;
4035 val = arg0s % arg1s;
4036 break;
4037
4038 case UDIV:
4039 if (arg1 == 0)
4040 return 0;
4041 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
4042 break;
4043
4044 case UMOD:
4045 if (arg1 == 0)
4046 return 0;
4047 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
4048 break;
4049
4050 case AND:
4051 val = arg0 & arg1;
4052 break;
4053
4054 case IOR:
4055 val = arg0 | arg1;
4056 break;
4057
4058 case XOR:
4059 val = arg0 ^ arg1;
4060 break;
4061
4062 case LSHIFTRT:
4063 /* If shift count is undefined, don't fold it; let the machine do
4064 what it wants. But truncate it if the machine will do that. */
4065 if (arg1 < 0)
4066 return 0;
4067
4068 #ifdef SHIFT_COUNT_TRUNCATED
4069 if (SHIFT_COUNT_TRUNCATED)
4070 arg1 %= width;
4071 #endif
4072
4073 val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
4074 break;
4075
4076 case ASHIFT:
4077 if (arg1 < 0)
4078 return 0;
4079
4080 #ifdef SHIFT_COUNT_TRUNCATED
4081 if (SHIFT_COUNT_TRUNCATED)
4082 arg1 %= width;
4083 #endif
4084
4085 val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
4086 break;
4087
4088 case ASHIFTRT:
4089 if (arg1 < 0)
4090 return 0;
4091
4092 #ifdef SHIFT_COUNT_TRUNCATED
4093 if (SHIFT_COUNT_TRUNCATED)
4094 arg1 %= width;
4095 #endif
4096
4097 val = arg0s >> arg1;
4098
4099 /* Bootstrap compiler may not have sign extended the right shift.
4100 Manually extend the sign to insure bootstrap cc matches gcc. */
4101 if (arg0s < 0 && arg1 > 0)
4102 val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4103
4104 break;
4105
4106 case ROTATERT:
4107 if (arg1 < 0)
4108 return 0;
4109
4110 arg1 %= width;
4111 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4112 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
4113 break;
4114
4115 case ROTATE:
4116 if (arg1 < 0)
4117 return 0;
4118
4119 arg1 %= width;
4120 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4121 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
4122 break;
4123
4124 case COMPARE:
4125 /* Do nothing here. */
4126 return 0;
4127
4128 case SMIN:
4129 val = arg0s <= arg1s ? arg0s : arg1s;
4130 break;
4131
4132 case UMIN:
4133 val = ((unsigned HOST_WIDE_INT) arg0
4134 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4135 break;
4136
4137 case SMAX:
4138 val = arg0s > arg1s ? arg0s : arg1s;
4139 break;
4140
4141 case UMAX:
4142 val = ((unsigned HOST_WIDE_INT) arg0
4143 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4144 break;
4145
4146 default:
4147 abort ();
4148 }
4149
4150 /* Clear the bits that don't belong in our mode, unless they and our sign
4151 bit are all one. So we get either a reasonable negative value or a
4152 reasonable unsigned value for this mode. */
4153 if (width < HOST_BITS_PER_WIDE_INT
4154 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4155 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4156 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4157
4158 /* If this would be an entire word for the target, but is not for
4159 the host, then sign-extend on the host so that the number will look
4160 the same way on the host that it would on the target.
4161
4162 For example, when building a 64 bit alpha hosted 32 bit sparc
4163 targeted compiler, then we want the 32 bit unsigned value -1 to be
4164 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
4165 The later confuses the sparc backend. */
4166
4167 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
4168 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
4169 val |= ((HOST_WIDE_INT) (-1) << width);
4170
4171 return GEN_INT (val);
4172 }
4173 \f
4174 /* Simplify a PLUS or MINUS, at least one of whose operands may be another
4175 PLUS or MINUS.
4176
4177 Rather than test for specific case, we do this by a brute-force method
4178 and do all possible simplifications until no more changes occur. Then
4179 we rebuild the operation. */
4180
4181 static rtx
4182 simplify_plus_minus (code, mode, op0, op1)
4183 enum rtx_code code;
4184 enum machine_mode mode;
4185 rtx op0, op1;
4186 {
4187 rtx ops[8];
4188 int negs[8];
4189 rtx result, tem;
4190 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
4191 int first = 1, negate = 0, changed;
4192 int i, j;
4193
4194 bzero ((char *) ops, sizeof ops);
4195
4196 /* Set up the two operands and then expand them until nothing has been
4197 changed. If we run out of room in our array, give up; this should
4198 almost never happen. */
4199
4200 ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4201
4202 changed = 1;
4203 while (changed)
4204 {
4205 changed = 0;
4206
4207 for (i = 0; i < n_ops; i++)
4208 switch (GET_CODE (ops[i]))
4209 {
4210 case PLUS:
4211 case MINUS:
4212 if (n_ops == 7)
4213 return 0;
4214
4215 ops[n_ops] = XEXP (ops[i], 1);
4216 negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4217 ops[i] = XEXP (ops[i], 0);
4218 input_ops++;
4219 changed = 1;
4220 break;
4221
4222 case NEG:
4223 ops[i] = XEXP (ops[i], 0);
4224 negs[i] = ! negs[i];
4225 changed = 1;
4226 break;
4227
4228 case CONST:
4229 ops[i] = XEXP (ops[i], 0);
4230 input_consts++;
4231 changed = 1;
4232 break;
4233
4234 case NOT:
4235 /* ~a -> (-a - 1) */
4236 if (n_ops != 7)
4237 {
4238 ops[n_ops] = constm1_rtx;
4239 negs[n_ops++] = negs[i];
4240 ops[i] = XEXP (ops[i], 0);
4241 negs[i] = ! negs[i];
4242 changed = 1;
4243 }
4244 break;
4245
4246 case CONST_INT:
4247 if (negs[i])
4248 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4249 break;
4250 }
4251 }
4252
4253 /* If we only have two operands, we can't do anything. */
4254 if (n_ops <= 2)
4255 return 0;
4256
4257 /* Now simplify each pair of operands until nothing changes. The first
4258 time through just simplify constants against each other. */
4259
4260 changed = 1;
4261 while (changed)
4262 {
4263 changed = first;
4264
4265 for (i = 0; i < n_ops - 1; i++)
4266 for (j = i + 1; j < n_ops; j++)
4267 if (ops[i] != 0 && ops[j] != 0
4268 && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4269 {
4270 rtx lhs = ops[i], rhs = ops[j];
4271 enum rtx_code ncode = PLUS;
4272
4273 if (negs[i] && ! negs[j])
4274 lhs = ops[j], rhs = ops[i], ncode = MINUS;
4275 else if (! negs[i] && negs[j])
4276 ncode = MINUS;
4277
4278 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
4279 if (tem)
4280 {
4281 ops[i] = tem, ops[j] = 0;
4282 negs[i] = negs[i] && negs[j];
4283 if (GET_CODE (tem) == NEG)
4284 ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4285
4286 if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4287 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4288 changed = 1;
4289 }
4290 }
4291
4292 first = 0;
4293 }
4294
4295 /* Pack all the operands to the lower-numbered entries and give up if
4296 we didn't reduce the number of operands we had. Make sure we
4297 count a CONST as two operands. If we have the same number of
4298 operands, but have made more CONSTs than we had, this is also
4299 an improvement, so accept it. */
4300
4301 for (i = 0, j = 0; j < n_ops; j++)
4302 if (ops[j] != 0)
4303 {
4304 ops[i] = ops[j], negs[i++] = negs[j];
4305 if (GET_CODE (ops[j]) == CONST)
4306 n_consts++;
4307 }
4308
4309 if (i + n_consts > input_ops
4310 || (i + n_consts == input_ops && n_consts <= input_consts))
4311 return 0;
4312
4313 n_ops = i;
4314
4315 /* If we have a CONST_INT, put it last. */
4316 for (i = 0; i < n_ops - 1; i++)
4317 if (GET_CODE (ops[i]) == CONST_INT)
4318 {
4319 tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4320 j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4321 }
4322
4323 /* Put a non-negated operand first. If there aren't any, make all
4324 operands positive and negate the whole thing later. */
4325 for (i = 0; i < n_ops && negs[i]; i++)
4326 ;
4327
4328 if (i == n_ops)
4329 {
4330 for (i = 0; i < n_ops; i++)
4331 negs[i] = 0;
4332 negate = 1;
4333 }
4334 else if (i != 0)
4335 {
4336 tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4337 j = negs[0], negs[0] = negs[i], negs[i] = j;
4338 }
4339
4340 /* Now make the result by performing the requested operations. */
4341 result = ops[0];
4342 for (i = 1; i < n_ops; i++)
4343 result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4344
4345 return negate ? gen_rtx (NEG, mode, result) : result;
4346 }
4347 \f
4348 /* Make a binary operation by properly ordering the operands and
4349 seeing if the expression folds. */
4350
4351 static rtx
4352 cse_gen_binary (code, mode, op0, op1)
4353 enum rtx_code code;
4354 enum machine_mode mode;
4355 rtx op0, op1;
4356 {
4357 rtx tem;
4358
4359 /* Put complex operands first and constants second if commutative. */
4360 if (GET_RTX_CLASS (code) == 'c'
4361 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4362 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4363 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4364 || (GET_CODE (op0) == SUBREG
4365 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4366 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4367 tem = op0, op0 = op1, op1 = tem;
4368
4369 /* If this simplifies, do it. */
4370 tem = simplify_binary_operation (code, mode, op0, op1);
4371
4372 if (tem)
4373 return tem;
4374
4375 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4376 just form the operation. */
4377
4378 if (code == PLUS && GET_CODE (op1) == CONST_INT
4379 && GET_MODE (op0) != VOIDmode)
4380 return plus_constant (op0, INTVAL (op1));
4381 else if (code == MINUS && GET_CODE (op1) == CONST_INT
4382 && GET_MODE (op0) != VOIDmode)
4383 return plus_constant (op0, - INTVAL (op1));
4384 else
4385 return gen_rtx (code, mode, op0, op1);
4386 }
4387 \f
4388 /* Like simplify_binary_operation except used for relational operators.
4389 MODE is the mode of the operands, not that of the result. If MODE
4390 is VOIDmode, both operands must also be VOIDmode and we compare the
4391 operands in "infinite precision".
4392
4393 If no simplification is possible, this function returns zero. Otherwise,
4394 it returns either const_true_rtx or const0_rtx. */
4395
4396 rtx
4397 simplify_relational_operation (code, mode, op0, op1)
4398 enum rtx_code code;
4399 enum machine_mode mode;
4400 rtx op0, op1;
4401 {
4402 int equal, op0lt, op0ltu, op1lt, op1ltu;
4403 rtx tem;
4404
4405 /* If op0 is a compare, extract the comparison arguments from it. */
4406 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4407 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4408
4409 /* We can't simplify MODE_CC values since we don't know what the
4410 actual comparison is. */
4411 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4412 #ifdef HAVE_cc0
4413 || op0 == cc0_rtx
4414 #endif
4415 )
4416 return 0;
4417
4418 /* For integer comparisons of A and B maybe we can simplify A - B and can
4419 then simplify a comparison of that with zero. If A and B are both either
4420 a register or a CONST_INT, this can't help; testing for these cases will
4421 prevent infinite recursion here and speed things up.
4422
4423 If CODE is an unsigned comparison, then we can never do this optimization,
4424 because it gives an incorrect result if the subtraction wraps around zero.
4425 ANSI C defines unsigned operations such that they never overflow, and
4426 thus such cases can not be ignored. */
4427
4428 if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4429 && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4430 && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4431 && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
4432 && code != GTU && code != GEU && code != LTU && code != LEU)
4433 return simplify_relational_operation (signed_condition (code),
4434 mode, tem, const0_rtx);
4435
4436 /* For non-IEEE floating-point, if the two operands are equal, we know the
4437 result. */
4438 if (rtx_equal_p (op0, op1)
4439 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4440 || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4441 equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4442
4443 /* If the operands are floating-point constants, see if we can fold
4444 the result. */
4445 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4446 else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4447 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4448 {
4449 REAL_VALUE_TYPE d0, d1;
4450 jmp_buf handler;
4451
4452 if (setjmp (handler))
4453 return 0;
4454
4455 set_float_handler (handler);
4456 REAL_VALUE_FROM_CONST_DOUBLE (d0, op0);
4457 REAL_VALUE_FROM_CONST_DOUBLE (d1, op1);
4458 equal = REAL_VALUES_EQUAL (d0, d1);
4459 op0lt = op0ltu = REAL_VALUES_LESS (d0, d1);
4460 op1lt = op1ltu = REAL_VALUES_LESS (d1, d0);
4461 set_float_handler (NULL_PTR);
4462 }
4463 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4464
4465 /* Otherwise, see if the operands are both integers. */
4466 else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4467 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4468 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4469 {
4470 int width = GET_MODE_BITSIZE (mode);
4471 HOST_WIDE_INT l0s, h0s, l1s, h1s;
4472 unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
4473
4474 /* Get the two words comprising each integer constant. */
4475 if (GET_CODE (op0) == CONST_DOUBLE)
4476 {
4477 l0u = l0s = CONST_DOUBLE_LOW (op0);
4478 h0u = h0s = CONST_DOUBLE_HIGH (op0);
4479 }
4480 else
4481 {
4482 l0u = l0s = INTVAL (op0);
4483 h0u = 0, h0s = l0s < 0 ? -1 : 0;
4484 }
4485
4486 if (GET_CODE (op1) == CONST_DOUBLE)
4487 {
4488 l1u = l1s = CONST_DOUBLE_LOW (op1);
4489 h1u = h1s = CONST_DOUBLE_HIGH (op1);
4490 }
4491 else
4492 {
4493 l1u = l1s = INTVAL (op1);
4494 h1u = 0, h1s = l1s < 0 ? -1 : 0;
4495 }
4496
4497 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4498 we have to sign or zero-extend the values. */
4499 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4500 h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
4501
4502 if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4503 {
4504 l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4505 l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
4506
4507 if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4508 l0s |= ((HOST_WIDE_INT) (-1) << width);
4509
4510 if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4511 l1s |= ((HOST_WIDE_INT) (-1) << width);
4512 }
4513
4514 equal = (h0u == h1u && l0u == l1u);
4515 op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4516 op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4517 op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4518 op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4519 }
4520
4521 /* Otherwise, there are some code-specific tests we can make. */
4522 else
4523 {
4524 switch (code)
4525 {
4526 case EQ:
4527 /* References to the frame plus a constant or labels cannot
4528 be zero, but a SYMBOL_REF can due to #pragma weak. */
4529 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4530 || GET_CODE (op0) == LABEL_REF)
4531 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4532 /* On some machines, the ap reg can be 0 sometimes. */
4533 && op0 != arg_pointer_rtx
4534 #endif
4535 )
4536 return const0_rtx;
4537 break;
4538
4539 case NE:
4540 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4541 || GET_CODE (op0) == LABEL_REF)
4542 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4543 && op0 != arg_pointer_rtx
4544 #endif
4545 )
4546 return const_true_rtx;
4547 break;
4548
4549 case GEU:
4550 /* Unsigned values are never negative. */
4551 if (op1 == const0_rtx)
4552 return const_true_rtx;
4553 break;
4554
4555 case LTU:
4556 if (op1 == const0_rtx)
4557 return const0_rtx;
4558 break;
4559
4560 case LEU:
4561 /* Unsigned values are never greater than the largest
4562 unsigned value. */
4563 if (GET_CODE (op1) == CONST_INT
4564 && INTVAL (op1) == GET_MODE_MASK (mode)
4565 && INTEGRAL_MODE_P (mode))
4566 return const_true_rtx;
4567 break;
4568
4569 case GTU:
4570 if (GET_CODE (op1) == CONST_INT
4571 && INTVAL (op1) == GET_MODE_MASK (mode)
4572 && INTEGRAL_MODE_P (mode))
4573 return const0_rtx;
4574 break;
4575 }
4576
4577 return 0;
4578 }
4579
4580 /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4581 as appropriate. */
4582 switch (code)
4583 {
4584 case EQ:
4585 return equal ? const_true_rtx : const0_rtx;
4586 case NE:
4587 return ! equal ? const_true_rtx : const0_rtx;
4588 case LT:
4589 return op0lt ? const_true_rtx : const0_rtx;
4590 case GT:
4591 return op1lt ? const_true_rtx : const0_rtx;
4592 case LTU:
4593 return op0ltu ? const_true_rtx : const0_rtx;
4594 case GTU:
4595 return op1ltu ? const_true_rtx : const0_rtx;
4596 case LE:
4597 return equal || op0lt ? const_true_rtx : const0_rtx;
4598 case GE:
4599 return equal || op1lt ? const_true_rtx : const0_rtx;
4600 case LEU:
4601 return equal || op0ltu ? const_true_rtx : const0_rtx;
4602 case GEU:
4603 return equal || op1ltu ? const_true_rtx : const0_rtx;
4604 }
4605
4606 abort ();
4607 }
4608 \f
4609 /* Simplify CODE, an operation with result mode MODE and three operands,
4610 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4611 a constant. Return 0 if no simplifications is possible. */
4612
4613 rtx
4614 simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4615 enum rtx_code code;
4616 enum machine_mode mode, op0_mode;
4617 rtx op0, op1, op2;
4618 {
4619 int width = GET_MODE_BITSIZE (mode);
4620
4621 /* VOIDmode means "infinite" precision. */
4622 if (width == 0)
4623 width = HOST_BITS_PER_WIDE_INT;
4624
4625 switch (code)
4626 {
4627 case SIGN_EXTRACT:
4628 case ZERO_EXTRACT:
4629 if (GET_CODE (op0) == CONST_INT
4630 && GET_CODE (op1) == CONST_INT
4631 && GET_CODE (op2) == CONST_INT
4632 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4633 && width <= HOST_BITS_PER_WIDE_INT)
4634 {
4635 /* Extracting a bit-field from a constant */
4636 HOST_WIDE_INT val = INTVAL (op0);
4637
4638 if (BITS_BIG_ENDIAN)
4639 val >>= (GET_MODE_BITSIZE (op0_mode)
4640 - INTVAL (op2) - INTVAL (op1));
4641 else
4642 val >>= INTVAL (op2);
4643
4644 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4645 {
4646 /* First zero-extend. */
4647 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4648 /* If desired, propagate sign bit. */
4649 if (code == SIGN_EXTRACT
4650 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4651 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4652 }
4653
4654 /* Clear the bits that don't belong in our mode,
4655 unless they and our sign bit are all one.
4656 So we get either a reasonable negative value or a reasonable
4657 unsigned value for this mode. */
4658 if (width < HOST_BITS_PER_WIDE_INT
4659 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4660 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4661 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4662
4663 return GEN_INT (val);
4664 }
4665 break;
4666
4667 case IF_THEN_ELSE:
4668 if (GET_CODE (op0) == CONST_INT)
4669 return op0 != const0_rtx ? op1 : op2;
4670 break;
4671
4672 default:
4673 abort ();
4674 }
4675
4676 return 0;
4677 }
4678 \f
4679 /* If X is a nontrivial arithmetic operation on an argument
4680 for which a constant value can be determined, return
4681 the result of operating on that value, as a constant.
4682 Otherwise, return X, possibly with one or more operands
4683 modified by recursive calls to this function.
4684
4685 If X is a register whose contents are known, we do NOT
4686 return those contents here. equiv_constant is called to
4687 perform that task.
4688
4689 INSN is the insn that we may be modifying. If it is 0, make a copy
4690 of X before modifying it. */
4691
4692 static rtx
4693 fold_rtx (x, insn)
4694 rtx x;
4695 rtx insn;
4696 {
4697 register enum rtx_code code;
4698 register enum machine_mode mode;
4699 register char *fmt;
4700 register int i;
4701 rtx new = 0;
4702 int copied = 0;
4703 int must_swap = 0;
4704
4705 /* Folded equivalents of first two operands of X. */
4706 rtx folded_arg0;
4707 rtx folded_arg1;
4708
4709 /* Constant equivalents of first three operands of X;
4710 0 when no such equivalent is known. */
4711 rtx const_arg0;
4712 rtx const_arg1;
4713 rtx const_arg2;
4714
4715 /* The mode of the first operand of X. We need this for sign and zero
4716 extends. */
4717 enum machine_mode mode_arg0;
4718
4719 if (x == 0)
4720 return x;
4721
4722 mode = GET_MODE (x);
4723 code = GET_CODE (x);
4724 switch (code)
4725 {
4726 case CONST:
4727 case CONST_INT:
4728 case CONST_DOUBLE:
4729 case SYMBOL_REF:
4730 case LABEL_REF:
4731 case REG:
4732 /* No use simplifying an EXPR_LIST
4733 since they are used only for lists of args
4734 in a function call's REG_EQUAL note. */
4735 case EXPR_LIST:
4736 return x;
4737
4738 #ifdef HAVE_cc0
4739 case CC0:
4740 return prev_insn_cc0;
4741 #endif
4742
4743 case PC:
4744 /* If the next insn is a CODE_LABEL followed by a jump table,
4745 PC's value is a LABEL_REF pointing to that label. That
4746 lets us fold switch statements on the Vax. */
4747 if (insn && GET_CODE (insn) == JUMP_INSN)
4748 {
4749 rtx next = next_nonnote_insn (insn);
4750
4751 if (next && GET_CODE (next) == CODE_LABEL
4752 && NEXT_INSN (next) != 0
4753 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
4754 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
4755 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
4756 return gen_rtx (LABEL_REF, Pmode, next);
4757 }
4758 break;
4759
4760 case SUBREG:
4761 /* See if we previously assigned a constant value to this SUBREG. */
4762 if ((new = lookup_as_function (x, CONST_INT)) != 0
4763 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
4764 return new;
4765
4766 /* If this is a paradoxical SUBREG, we have no idea what value the
4767 extra bits would have. However, if the operand is equivalent
4768 to a SUBREG whose operand is the same as our mode, and all the
4769 modes are within a word, we can just use the inner operand
4770 because these SUBREGs just say how to treat the register.
4771
4772 Similarly if we find an integer constant. */
4773
4774 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4775 {
4776 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
4777 struct table_elt *elt;
4778
4779 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
4780 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
4781 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
4782 imode)) != 0)
4783 for (elt = elt->first_same_value;
4784 elt; elt = elt->next_same_value)
4785 {
4786 if (CONSTANT_P (elt->exp)
4787 && GET_MODE (elt->exp) == VOIDmode)
4788 return elt->exp;
4789
4790 if (GET_CODE (elt->exp) == SUBREG
4791 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4792 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4793 return copy_rtx (SUBREG_REG (elt->exp));
4794 }
4795
4796 return x;
4797 }
4798
4799 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
4800 We might be able to if the SUBREG is extracting a single word in an
4801 integral mode or extracting the low part. */
4802
4803 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
4804 const_arg0 = equiv_constant (folded_arg0);
4805 if (const_arg0)
4806 folded_arg0 = const_arg0;
4807
4808 if (folded_arg0 != SUBREG_REG (x))
4809 {
4810 new = 0;
4811
4812 if (GET_MODE_CLASS (mode) == MODE_INT
4813 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4814 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
4815 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
4816 GET_MODE (SUBREG_REG (x)));
4817 if (new == 0 && subreg_lowpart_p (x))
4818 new = gen_lowpart_if_possible (mode, folded_arg0);
4819 if (new)
4820 return new;
4821 }
4822
4823 /* If this is a narrowing SUBREG and our operand is a REG, see if
4824 we can find an equivalence for REG that is an arithmetic operation
4825 in a wider mode where both operands are paradoxical SUBREGs
4826 from objects of our result mode. In that case, we couldn't report
4827 an equivalent value for that operation, since we don't know what the
4828 extra bits will be. But we can find an equivalence for this SUBREG
4829 by folding that operation is the narrow mode. This allows us to
4830 fold arithmetic in narrow modes when the machine only supports
4831 word-sized arithmetic.
4832
4833 Also look for a case where we have a SUBREG whose operand is the
4834 same as our result. If both modes are smaller than a word, we
4835 are simply interpreting a register in different modes and we
4836 can use the inner value. */
4837
4838 if (GET_CODE (folded_arg0) == REG
4839 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
4840 && subreg_lowpart_p (x))
4841 {
4842 struct table_elt *elt;
4843
4844 /* We can use HASH here since we know that canon_hash won't be
4845 called. */
4846 elt = lookup (folded_arg0,
4847 HASH (folded_arg0, GET_MODE (folded_arg0)),
4848 GET_MODE (folded_arg0));
4849
4850 if (elt)
4851 elt = elt->first_same_value;
4852
4853 for (; elt; elt = elt->next_same_value)
4854 {
4855 enum rtx_code eltcode = GET_CODE (elt->exp);
4856
4857 /* Just check for unary and binary operations. */
4858 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
4859 && GET_CODE (elt->exp) != SIGN_EXTEND
4860 && GET_CODE (elt->exp) != ZERO_EXTEND
4861 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4862 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
4863 {
4864 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
4865
4866 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4867 op0 = fold_rtx (op0, NULL_RTX);
4868
4869 op0 = equiv_constant (op0);
4870 if (op0)
4871 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
4872 op0, mode);
4873 }
4874 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
4875 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
4876 && eltcode != DIV && eltcode != MOD
4877 && eltcode != UDIV && eltcode != UMOD
4878 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
4879 && eltcode != ROTATE && eltcode != ROTATERT
4880 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4881 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
4882 == mode))
4883 || CONSTANT_P (XEXP (elt->exp, 0)))
4884 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
4885 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
4886 == mode))
4887 || CONSTANT_P (XEXP (elt->exp, 1))))
4888 {
4889 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
4890 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
4891
4892 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4893 op0 = fold_rtx (op0, NULL_RTX);
4894
4895 if (op0)
4896 op0 = equiv_constant (op0);
4897
4898 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
4899 op1 = fold_rtx (op1, NULL_RTX);
4900
4901 if (op1)
4902 op1 = equiv_constant (op1);
4903
4904 /* If we are looking for the low SImode part of
4905 (ashift:DI c (const_int 32)), it doesn't work
4906 to compute that in SImode, because a 32-bit shift
4907 in SImode is unpredictable. We know the value is 0. */
4908 if (op0 && op1
4909 && GET_CODE (elt->exp) == ASHIFT
4910 && GET_CODE (op1) == CONST_INT
4911 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
4912 {
4913 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
4914
4915 /* If the count fits in the inner mode's width,
4916 but exceeds the outer mode's width,
4917 the value will get truncated to 0
4918 by the subreg. */
4919 new = const0_rtx;
4920 else
4921 /* If the count exceeds even the inner mode's width,
4922 don't fold this expression. */
4923 new = 0;
4924 }
4925 else if (op0 && op1)
4926 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
4927 op0, op1);
4928 }
4929
4930 else if (GET_CODE (elt->exp) == SUBREG
4931 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4932 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
4933 <= UNITS_PER_WORD)
4934 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4935 new = copy_rtx (SUBREG_REG (elt->exp));
4936
4937 if (new)
4938 return new;
4939 }
4940 }
4941
4942 return x;
4943
4944 case NOT:
4945 case NEG:
4946 /* If we have (NOT Y), see if Y is known to be (NOT Z).
4947 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
4948 new = lookup_as_function (XEXP (x, 0), code);
4949 if (new)
4950 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
4951 break;
4952
4953 case MEM:
4954 /* If we are not actually processing an insn, don't try to find the
4955 best address. Not only don't we care, but we could modify the
4956 MEM in an invalid way since we have no insn to validate against. */
4957 if (insn != 0)
4958 find_best_addr (insn, &XEXP (x, 0));
4959
4960 {
4961 /* Even if we don't fold in the insn itself,
4962 we can safely do so here, in hopes of getting a constant. */
4963 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
4964 rtx base = 0;
4965 HOST_WIDE_INT offset = 0;
4966
4967 if (GET_CODE (addr) == REG
4968 && REGNO_QTY_VALID_P (REGNO (addr))
4969 && GET_MODE (addr) == qty_mode[reg_qty[REGNO (addr)]]
4970 && qty_const[reg_qty[REGNO (addr)]] != 0)
4971 addr = qty_const[reg_qty[REGNO (addr)]];
4972
4973 /* If address is constant, split it into a base and integer offset. */
4974 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
4975 base = addr;
4976 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
4977 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
4978 {
4979 base = XEXP (XEXP (addr, 0), 0);
4980 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
4981 }
4982 else if (GET_CODE (addr) == LO_SUM
4983 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
4984 base = XEXP (addr, 1);
4985
4986 /* If this is a constant pool reference, we can fold it into its
4987 constant to allow better value tracking. */
4988 if (base && GET_CODE (base) == SYMBOL_REF
4989 && CONSTANT_POOL_ADDRESS_P (base))
4990 {
4991 rtx constant = get_pool_constant (base);
4992 enum machine_mode const_mode = get_pool_mode (base);
4993 rtx new;
4994
4995 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
4996 constant_pool_entries_cost = COST (constant);
4997
4998 /* If we are loading the full constant, we have an equivalence. */
4999 if (offset == 0 && mode == const_mode)
5000 return constant;
5001
5002 /* If this actually isn't a constant (wierd!), we can't do
5003 anything. Otherwise, handle the two most common cases:
5004 extracting a word from a multi-word constant, and extracting
5005 the low-order bits. Other cases don't seem common enough to
5006 worry about. */
5007 if (! CONSTANT_P (constant))
5008 return x;
5009
5010 if (GET_MODE_CLASS (mode) == MODE_INT
5011 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5012 && offset % UNITS_PER_WORD == 0
5013 && (new = operand_subword (constant,
5014 offset / UNITS_PER_WORD,
5015 0, const_mode)) != 0)
5016 return new;
5017
5018 if (((BYTES_BIG_ENDIAN
5019 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
5020 || (! BYTES_BIG_ENDIAN && offset == 0))
5021 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
5022 return new;
5023 }
5024
5025 /* If this is a reference to a label at a known position in a jump
5026 table, we also know its value. */
5027 if (base && GET_CODE (base) == LABEL_REF)
5028 {
5029 rtx label = XEXP (base, 0);
5030 rtx table_insn = NEXT_INSN (label);
5031
5032 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5033 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
5034 {
5035 rtx table = PATTERN (table_insn);
5036
5037 if (offset >= 0
5038 && (offset / GET_MODE_SIZE (GET_MODE (table))
5039 < XVECLEN (table, 0)))
5040 return XVECEXP (table, 0,
5041 offset / GET_MODE_SIZE (GET_MODE (table)));
5042 }
5043 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5044 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
5045 {
5046 rtx table = PATTERN (table_insn);
5047
5048 if (offset >= 0
5049 && (offset / GET_MODE_SIZE (GET_MODE (table))
5050 < XVECLEN (table, 1)))
5051 {
5052 offset /= GET_MODE_SIZE (GET_MODE (table));
5053 new = gen_rtx (MINUS, Pmode, XVECEXP (table, 1, offset),
5054 XEXP (table, 0));
5055
5056 if (GET_MODE (table) != Pmode)
5057 new = gen_rtx (TRUNCATE, GET_MODE (table), new);
5058
5059 /* Indicate this is a constant. This isn't a
5060 valid form of CONST, but it will only be used
5061 to fold the next insns and then discarded, so
5062 it should be safe. */
5063 return gen_rtx (CONST, GET_MODE (new), new);
5064 }
5065 }
5066 }
5067
5068 return x;
5069 }
5070 }
5071
5072 const_arg0 = 0;
5073 const_arg1 = 0;
5074 const_arg2 = 0;
5075 mode_arg0 = VOIDmode;
5076
5077 /* Try folding our operands.
5078 Then see which ones have constant values known. */
5079
5080 fmt = GET_RTX_FORMAT (code);
5081 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5082 if (fmt[i] == 'e')
5083 {
5084 rtx arg = XEXP (x, i);
5085 rtx folded_arg = arg, const_arg = 0;
5086 enum machine_mode mode_arg = GET_MODE (arg);
5087 rtx cheap_arg, expensive_arg;
5088 rtx replacements[2];
5089 int j;
5090
5091 /* Most arguments are cheap, so handle them specially. */
5092 switch (GET_CODE (arg))
5093 {
5094 case REG:
5095 /* This is the same as calling equiv_constant; it is duplicated
5096 here for speed. */
5097 if (REGNO_QTY_VALID_P (REGNO (arg))
5098 && qty_const[reg_qty[REGNO (arg)]] != 0
5099 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != REG
5100 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != PLUS)
5101 const_arg
5102 = gen_lowpart_if_possible (GET_MODE (arg),
5103 qty_const[reg_qty[REGNO (arg)]]);
5104 break;
5105
5106 case CONST:
5107 case CONST_INT:
5108 case SYMBOL_REF:
5109 case LABEL_REF:
5110 case CONST_DOUBLE:
5111 const_arg = arg;
5112 break;
5113
5114 #ifdef HAVE_cc0
5115 case CC0:
5116 folded_arg = prev_insn_cc0;
5117 mode_arg = prev_insn_cc0_mode;
5118 const_arg = equiv_constant (folded_arg);
5119 break;
5120 #endif
5121
5122 default:
5123 folded_arg = fold_rtx (arg, insn);
5124 const_arg = equiv_constant (folded_arg);
5125 }
5126
5127 /* For the first three operands, see if the operand
5128 is constant or equivalent to a constant. */
5129 switch (i)
5130 {
5131 case 0:
5132 folded_arg0 = folded_arg;
5133 const_arg0 = const_arg;
5134 mode_arg0 = mode_arg;
5135 break;
5136 case 1:
5137 folded_arg1 = folded_arg;
5138 const_arg1 = const_arg;
5139 break;
5140 case 2:
5141 const_arg2 = const_arg;
5142 break;
5143 }
5144
5145 /* Pick the least expensive of the folded argument and an
5146 equivalent constant argument. */
5147 if (const_arg == 0 || const_arg == folded_arg
5148 || COST (const_arg) > COST (folded_arg))
5149 cheap_arg = folded_arg, expensive_arg = const_arg;
5150 else
5151 cheap_arg = const_arg, expensive_arg = folded_arg;
5152
5153 /* Try to replace the operand with the cheapest of the two
5154 possibilities. If it doesn't work and this is either of the first
5155 two operands of a commutative operation, try swapping them.
5156 If THAT fails, try the more expensive, provided it is cheaper
5157 than what is already there. */
5158
5159 if (cheap_arg == XEXP (x, i))
5160 continue;
5161
5162 if (insn == 0 && ! copied)
5163 {
5164 x = copy_rtx (x);
5165 copied = 1;
5166 }
5167
5168 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5169 for (j = 0;
5170 j < 2 && replacements[j]
5171 && COST (replacements[j]) < COST (XEXP (x, i));
5172 j++)
5173 {
5174 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5175 break;
5176
5177 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5178 {
5179 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5180 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5181
5182 if (apply_change_group ())
5183 {
5184 /* Swap them back to be invalid so that this loop can
5185 continue and flag them to be swapped back later. */
5186 rtx tem;
5187
5188 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5189 XEXP (x, 1) = tem;
5190 must_swap = 1;
5191 break;
5192 }
5193 }
5194 }
5195 }
5196
5197 else if (fmt[i] == 'E')
5198 /* Don't try to fold inside of a vector of expressions.
5199 Doing nothing is harmless. */
5200 ;
5201
5202 /* If a commutative operation, place a constant integer as the second
5203 operand unless the first operand is also a constant integer. Otherwise,
5204 place any constant second unless the first operand is also a constant. */
5205
5206 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5207 {
5208 if (must_swap || (const_arg0
5209 && (const_arg1 == 0
5210 || (GET_CODE (const_arg0) == CONST_INT
5211 && GET_CODE (const_arg1) != CONST_INT))))
5212 {
5213 register rtx tem = XEXP (x, 0);
5214
5215 if (insn == 0 && ! copied)
5216 {
5217 x = copy_rtx (x);
5218 copied = 1;
5219 }
5220
5221 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5222 validate_change (insn, &XEXP (x, 1), tem, 1);
5223 if (apply_change_group ())
5224 {
5225 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5226 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5227 }
5228 }
5229 }
5230
5231 /* If X is an arithmetic operation, see if we can simplify it. */
5232
5233 switch (GET_RTX_CLASS (code))
5234 {
5235 case '1':
5236 {
5237 int is_const = 0;
5238
5239 /* We can't simplify extension ops unless we know the
5240 original mode. */
5241 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5242 && mode_arg0 == VOIDmode)
5243 break;
5244
5245 /* If we had a CONST, strip it off and put it back later if we
5246 fold. */
5247 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
5248 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
5249
5250 new = simplify_unary_operation (code, mode,
5251 const_arg0 ? const_arg0 : folded_arg0,
5252 mode_arg0);
5253 if (new != 0 && is_const)
5254 new = gen_rtx (CONST, mode, new);
5255 }
5256 break;
5257
5258 case '<':
5259 /* See what items are actually being compared and set FOLDED_ARG[01]
5260 to those values and CODE to the actual comparison code. If any are
5261 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5262 do anything if both operands are already known to be constant. */
5263
5264 if (const_arg0 == 0 || const_arg1 == 0)
5265 {
5266 struct table_elt *p0, *p1;
5267 rtx true = const_true_rtx, false = const0_rtx;
5268 enum machine_mode mode_arg1;
5269
5270 #ifdef FLOAT_STORE_FLAG_VALUE
5271 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5272 {
5273 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5274 mode);
5275 false = CONST0_RTX (mode);
5276 }
5277 #endif
5278
5279 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5280 &mode_arg0, &mode_arg1);
5281 const_arg0 = equiv_constant (folded_arg0);
5282 const_arg1 = equiv_constant (folded_arg1);
5283
5284 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5285 what kinds of things are being compared, so we can't do
5286 anything with this comparison. */
5287
5288 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5289 break;
5290
5291 /* If we do not now have two constants being compared, see if we
5292 can nevertheless deduce some things about the comparison. */
5293 if (const_arg0 == 0 || const_arg1 == 0)
5294 {
5295 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or non-explicit
5296 constant? These aren't zero, but we don't know their sign. */
5297 if (const_arg1 == const0_rtx
5298 && (NONZERO_BASE_PLUS_P (folded_arg0)
5299 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5300 come out as 0. */
5301 || GET_CODE (folded_arg0) == SYMBOL_REF
5302 #endif
5303 || GET_CODE (folded_arg0) == LABEL_REF
5304 || GET_CODE (folded_arg0) == CONST))
5305 {
5306 if (code == EQ)
5307 return false;
5308 else if (code == NE)
5309 return true;
5310 }
5311
5312 /* See if the two operands are the same. We don't do this
5313 for IEEE floating-point since we can't assume x == x
5314 since x might be a NaN. */
5315
5316 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5317 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
5318 && (folded_arg0 == folded_arg1
5319 || (GET_CODE (folded_arg0) == REG
5320 && GET_CODE (folded_arg1) == REG
5321 && (reg_qty[REGNO (folded_arg0)]
5322 == reg_qty[REGNO (folded_arg1)]))
5323 || ((p0 = lookup (folded_arg0,
5324 (safe_hash (folded_arg0, mode_arg0)
5325 % NBUCKETS), mode_arg0))
5326 && (p1 = lookup (folded_arg1,
5327 (safe_hash (folded_arg1, mode_arg0)
5328 % NBUCKETS), mode_arg0))
5329 && p0->first_same_value == p1->first_same_value)))
5330 return ((code == EQ || code == LE || code == GE
5331 || code == LEU || code == GEU)
5332 ? true : false);
5333
5334 /* If FOLDED_ARG0 is a register, see if the comparison we are
5335 doing now is either the same as we did before or the reverse
5336 (we only check the reverse if not floating-point). */
5337 else if (GET_CODE (folded_arg0) == REG)
5338 {
5339 int qty = reg_qty[REGNO (folded_arg0)];
5340
5341 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5342 && (comparison_dominates_p (qty_comparison_code[qty], code)
5343 || (comparison_dominates_p (qty_comparison_code[qty],
5344 reverse_condition (code))
5345 && ! FLOAT_MODE_P (mode_arg0)))
5346 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5347 || (const_arg1
5348 && rtx_equal_p (qty_comparison_const[qty],
5349 const_arg1))
5350 || (GET_CODE (folded_arg1) == REG
5351 && (reg_qty[REGNO (folded_arg1)]
5352 == qty_comparison_qty[qty]))))
5353 return (comparison_dominates_p (qty_comparison_code[qty],
5354 code)
5355 ? true : false);
5356 }
5357 }
5358 }
5359
5360 /* If we are comparing against zero, see if the first operand is
5361 equivalent to an IOR with a constant. If so, we may be able to
5362 determine the result of this comparison. */
5363
5364 if (const_arg1 == const0_rtx)
5365 {
5366 rtx y = lookup_as_function (folded_arg0, IOR);
5367 rtx inner_const;
5368
5369 if (y != 0
5370 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5371 && GET_CODE (inner_const) == CONST_INT
5372 && INTVAL (inner_const) != 0)
5373 {
5374 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
5375 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5376 && (INTVAL (inner_const)
5377 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
5378 rtx true = const_true_rtx, false = const0_rtx;
5379
5380 #ifdef FLOAT_STORE_FLAG_VALUE
5381 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5382 {
5383 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5384 mode);
5385 false = CONST0_RTX (mode);
5386 }
5387 #endif
5388
5389 switch (code)
5390 {
5391 case EQ:
5392 return false;
5393 case NE:
5394 return true;
5395 case LT: case LE:
5396 if (has_sign)
5397 return true;
5398 break;
5399 case GT: case GE:
5400 if (has_sign)
5401 return false;
5402 break;
5403 }
5404 }
5405 }
5406
5407 new = simplify_relational_operation (code, mode_arg0,
5408 const_arg0 ? const_arg0 : folded_arg0,
5409 const_arg1 ? const_arg1 : folded_arg1);
5410 #ifdef FLOAT_STORE_FLAG_VALUE
5411 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5412 new = ((new == const0_rtx) ? CONST0_RTX (mode)
5413 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
5414 #endif
5415 break;
5416
5417 case '2':
5418 case 'c':
5419 switch (code)
5420 {
5421 case PLUS:
5422 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5423 with that LABEL_REF as its second operand. If so, the result is
5424 the first operand of that MINUS. This handles switches with an
5425 ADDR_DIFF_VEC table. */
5426 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5427 {
5428 rtx y
5429 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
5430 : lookup_as_function (folded_arg0, MINUS);
5431
5432 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5433 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5434 return XEXP (y, 0);
5435
5436 /* Now try for a CONST of a MINUS like the above. */
5437 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
5438 : lookup_as_function (folded_arg0, CONST))) != 0
5439 && GET_CODE (XEXP (y, 0)) == MINUS
5440 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5441 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
5442 return XEXP (XEXP (y, 0), 0);
5443 }
5444
5445 /* Likewise if the operands are in the other order. */
5446 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
5447 {
5448 rtx y
5449 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
5450 : lookup_as_function (folded_arg1, MINUS);
5451
5452 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5453 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
5454 return XEXP (y, 0);
5455
5456 /* Now try for a CONST of a MINUS like the above. */
5457 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
5458 : lookup_as_function (folded_arg1, CONST))) != 0
5459 && GET_CODE (XEXP (y, 0)) == MINUS
5460 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5461 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
5462 return XEXP (XEXP (y, 0), 0);
5463 }
5464
5465 /* If second operand is a register equivalent to a negative
5466 CONST_INT, see if we can find a register equivalent to the
5467 positive constant. Make a MINUS if so. Don't do this for
5468 a negative constant since we might then alternate between
5469 chosing positive and negative constants. Having the positive
5470 constant previously-used is the more common case. */
5471 if (const_arg1 && GET_CODE (const_arg1) == CONST_INT
5472 && INTVAL (const_arg1) < 0 && GET_CODE (folded_arg1) == REG)
5473 {
5474 rtx new_const = GEN_INT (- INTVAL (const_arg1));
5475 struct table_elt *p
5476 = lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5477 mode);
5478
5479 if (p)
5480 for (p = p->first_same_value; p; p = p->next_same_value)
5481 if (GET_CODE (p->exp) == REG)
5482 return cse_gen_binary (MINUS, mode, folded_arg0,
5483 canon_reg (p->exp, NULL_RTX));
5484 }
5485 goto from_plus;
5486
5487 case MINUS:
5488 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5489 If so, produce (PLUS Z C2-C). */
5490 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5491 {
5492 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5493 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
5494 return fold_rtx (plus_constant (copy_rtx (y),
5495 -INTVAL (const_arg1)),
5496 NULL_RTX);
5497 }
5498
5499 /* ... fall through ... */
5500
5501 from_plus:
5502 case SMIN: case SMAX: case UMIN: case UMAX:
5503 case IOR: case AND: case XOR:
5504 case MULT: case DIV: case UDIV:
5505 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
5506 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5507 is known to be of similar form, we may be able to replace the
5508 operation with a combined operation. This may eliminate the
5509 intermediate operation if every use is simplified in this way.
5510 Note that the similar optimization done by combine.c only works
5511 if the intermediate operation's result has only one reference. */
5512
5513 if (GET_CODE (folded_arg0) == REG
5514 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5515 {
5516 int is_shift
5517 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5518 rtx y = lookup_as_function (folded_arg0, code);
5519 rtx inner_const;
5520 enum rtx_code associate_code;
5521 rtx new_const;
5522
5523 if (y == 0
5524 || 0 == (inner_const
5525 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5526 || GET_CODE (inner_const) != CONST_INT
5527 /* If we have compiled a statement like
5528 "if (x == (x & mask1))", and now are looking at
5529 "x & mask2", we will have a case where the first operand
5530 of Y is the same as our first operand. Unless we detect
5531 this case, an infinite loop will result. */
5532 || XEXP (y, 0) == folded_arg0)
5533 break;
5534
5535 /* Don't associate these operations if they are a PLUS with the
5536 same constant and it is a power of two. These might be doable
5537 with a pre- or post-increment. Similarly for two subtracts of
5538 identical powers of two with post decrement. */
5539
5540 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
5541 && (0
5542 #if defined(HAVE_PRE_INCREMENT) || defined(HAVE_POST_INCREMENT)
5543 || exact_log2 (INTVAL (const_arg1)) >= 0
5544 #endif
5545 #if defined(HAVE_PRE_DECREMENT) || defined(HAVE_POST_DECREMENT)
5546 || exact_log2 (- INTVAL (const_arg1)) >= 0
5547 #endif
5548 ))
5549 break;
5550
5551 /* Compute the code used to compose the constants. For example,
5552 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5553
5554 associate_code
5555 = (code == MULT || code == DIV || code == UDIV ? MULT
5556 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5557
5558 new_const = simplify_binary_operation (associate_code, mode,
5559 const_arg1, inner_const);
5560
5561 if (new_const == 0)
5562 break;
5563
5564 /* If we are associating shift operations, don't let this
5565 produce a shift of the size of the object or larger.
5566 This could occur when we follow a sign-extend by a right
5567 shift on a machine that does a sign-extend as a pair
5568 of shifts. */
5569
5570 if (is_shift && GET_CODE (new_const) == CONST_INT
5571 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5572 {
5573 /* As an exception, we can turn an ASHIFTRT of this
5574 form into a shift of the number of bits - 1. */
5575 if (code == ASHIFTRT)
5576 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5577 else
5578 break;
5579 }
5580
5581 y = copy_rtx (XEXP (y, 0));
5582
5583 /* If Y contains our first operand (the most common way this
5584 can happen is if Y is a MEM), we would do into an infinite
5585 loop if we tried to fold it. So don't in that case. */
5586
5587 if (! reg_mentioned_p (folded_arg0, y))
5588 y = fold_rtx (y, insn);
5589
5590 return cse_gen_binary (code, mode, y, new_const);
5591 }
5592 }
5593
5594 new = simplify_binary_operation (code, mode,
5595 const_arg0 ? const_arg0 : folded_arg0,
5596 const_arg1 ? const_arg1 : folded_arg1);
5597 break;
5598
5599 case 'o':
5600 /* (lo_sum (high X) X) is simply X. */
5601 if (code == LO_SUM && const_arg0 != 0
5602 && GET_CODE (const_arg0) == HIGH
5603 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
5604 return const_arg1;
5605 break;
5606
5607 case '3':
5608 case 'b':
5609 new = simplify_ternary_operation (code, mode, mode_arg0,
5610 const_arg0 ? const_arg0 : folded_arg0,
5611 const_arg1 ? const_arg1 : folded_arg1,
5612 const_arg2 ? const_arg2 : XEXP (x, 2));
5613 break;
5614 }
5615
5616 return new ? new : x;
5617 }
5618 \f
5619 /* Return a constant value currently equivalent to X.
5620 Return 0 if we don't know one. */
5621
5622 static rtx
5623 equiv_constant (x)
5624 rtx x;
5625 {
5626 if (GET_CODE (x) == REG
5627 && REGNO_QTY_VALID_P (REGNO (x))
5628 && qty_const[reg_qty[REGNO (x)]])
5629 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[reg_qty[REGNO (x)]]);
5630
5631 if (x != 0 && CONSTANT_P (x))
5632 return x;
5633
5634 /* If X is a MEM, try to fold it outside the context of any insn to see if
5635 it might be equivalent to a constant. That handles the case where it
5636 is a constant-pool reference. Then try to look it up in the hash table
5637 in case it is something whose value we have seen before. */
5638
5639 if (GET_CODE (x) == MEM)
5640 {
5641 struct table_elt *elt;
5642
5643 x = fold_rtx (x, NULL_RTX);
5644 if (CONSTANT_P (x))
5645 return x;
5646
5647 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
5648 if (elt == 0)
5649 return 0;
5650
5651 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
5652 if (elt->is_const && CONSTANT_P (elt->exp))
5653 return elt->exp;
5654 }
5655
5656 return 0;
5657 }
5658 \f
5659 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
5660 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
5661 least-significant part of X.
5662 MODE specifies how big a part of X to return.
5663
5664 If the requested operation cannot be done, 0 is returned.
5665
5666 This is similar to gen_lowpart in emit-rtl.c. */
5667
5668 rtx
5669 gen_lowpart_if_possible (mode, x)
5670 enum machine_mode mode;
5671 register rtx x;
5672 {
5673 rtx result = gen_lowpart_common (mode, x);
5674
5675 if (result)
5676 return result;
5677 else if (GET_CODE (x) == MEM)
5678 {
5679 /* This is the only other case we handle. */
5680 register int offset = 0;
5681 rtx new;
5682
5683 if (WORDS_BIG_ENDIAN)
5684 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
5685 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
5686 if (BYTES_BIG_ENDIAN)
5687 /* Adjust the address so that the address-after-the-data is
5688 unchanged. */
5689 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
5690 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
5691 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
5692 if (! memory_address_p (mode, XEXP (new, 0)))
5693 return 0;
5694 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
5695 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
5696 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
5697 return new;
5698 }
5699 else
5700 return 0;
5701 }
5702 \f
5703 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
5704 branch. It will be zero if not.
5705
5706 In certain cases, this can cause us to add an equivalence. For example,
5707 if we are following the taken case of
5708 if (i == 2)
5709 we can add the fact that `i' and '2' are now equivalent.
5710
5711 In any case, we can record that this comparison was passed. If the same
5712 comparison is seen later, we will know its value. */
5713
5714 static void
5715 record_jump_equiv (insn, taken)
5716 rtx insn;
5717 int taken;
5718 {
5719 int cond_known_true;
5720 rtx op0, op1;
5721 enum machine_mode mode, mode0, mode1;
5722 int reversed_nonequality = 0;
5723 enum rtx_code code;
5724
5725 /* Ensure this is the right kind of insn. */
5726 if (! condjump_p (insn) || simplejump_p (insn))
5727 return;
5728
5729 /* See if this jump condition is known true or false. */
5730 if (taken)
5731 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
5732 else
5733 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
5734
5735 /* Get the type of comparison being done and the operands being compared.
5736 If we had to reverse a non-equality condition, record that fact so we
5737 know that it isn't valid for floating-point. */
5738 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
5739 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
5740 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
5741
5742 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
5743 if (! cond_known_true)
5744 {
5745 reversed_nonequality = (code != EQ && code != NE);
5746 code = reverse_condition (code);
5747 }
5748
5749 /* The mode is the mode of the non-constant. */
5750 mode = mode0;
5751 if (mode1 != VOIDmode)
5752 mode = mode1;
5753
5754 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
5755 }
5756
5757 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
5758 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
5759 Make any useful entries we can with that information. Called from
5760 above function and called recursively. */
5761
5762 static void
5763 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
5764 enum rtx_code code;
5765 enum machine_mode mode;
5766 rtx op0, op1;
5767 int reversed_nonequality;
5768 {
5769 unsigned op0_hash, op1_hash;
5770 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
5771 struct table_elt *op0_elt, *op1_elt;
5772
5773 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
5774 we know that they are also equal in the smaller mode (this is also
5775 true for all smaller modes whether or not there is a SUBREG, but
5776 is not worth testing for with no SUBREG. */
5777
5778 /* Note that GET_MODE (op0) may not equal MODE. */
5779 if (code == EQ && GET_CODE (op0) == SUBREG
5780 && (GET_MODE_SIZE (GET_MODE (op0))
5781 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
5782 {
5783 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5784 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5785
5786 record_jump_cond (code, mode, SUBREG_REG (op0),
5787 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5788 reversed_nonequality);
5789 }
5790
5791 if (code == EQ && GET_CODE (op1) == SUBREG
5792 && (GET_MODE_SIZE (GET_MODE (op1))
5793 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
5794 {
5795 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5796 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5797
5798 record_jump_cond (code, mode, SUBREG_REG (op1),
5799 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5800 reversed_nonequality);
5801 }
5802
5803 /* Similarly, if this is an NE comparison, and either is a SUBREG
5804 making a smaller mode, we know the whole thing is also NE. */
5805
5806 /* Note that GET_MODE (op0) may not equal MODE;
5807 if we test MODE instead, we can get an infinite recursion
5808 alternating between two modes each wider than MODE. */
5809
5810 if (code == NE && GET_CODE (op0) == SUBREG
5811 && subreg_lowpart_p (op0)
5812 && (GET_MODE_SIZE (GET_MODE (op0))
5813 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
5814 {
5815 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5816 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5817
5818 record_jump_cond (code, mode, SUBREG_REG (op0),
5819 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5820 reversed_nonequality);
5821 }
5822
5823 if (code == NE && GET_CODE (op1) == SUBREG
5824 && subreg_lowpart_p (op1)
5825 && (GET_MODE_SIZE (GET_MODE (op1))
5826 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
5827 {
5828 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5829 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5830
5831 record_jump_cond (code, mode, SUBREG_REG (op1),
5832 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5833 reversed_nonequality);
5834 }
5835
5836 /* Hash both operands. */
5837
5838 do_not_record = 0;
5839 hash_arg_in_memory = 0;
5840 hash_arg_in_struct = 0;
5841 op0_hash = HASH (op0, mode);
5842 op0_in_memory = hash_arg_in_memory;
5843 op0_in_struct = hash_arg_in_struct;
5844
5845 if (do_not_record)
5846 return;
5847
5848 do_not_record = 0;
5849 hash_arg_in_memory = 0;
5850 hash_arg_in_struct = 0;
5851 op1_hash = HASH (op1, mode);
5852 op1_in_memory = hash_arg_in_memory;
5853 op1_in_struct = hash_arg_in_struct;
5854
5855 if (do_not_record)
5856 return;
5857
5858 /* Look up both operands. */
5859 op0_elt = lookup (op0, op0_hash, mode);
5860 op1_elt = lookup (op1, op1_hash, mode);
5861
5862 /* If both operands are already equivalent or if they are not in the
5863 table but are identical, do nothing. */
5864 if ((op0_elt != 0 && op1_elt != 0
5865 && op0_elt->first_same_value == op1_elt->first_same_value)
5866 || op0 == op1 || rtx_equal_p (op0, op1))
5867 return;
5868
5869 /* If we aren't setting two things equal all we can do is save this
5870 comparison. Similarly if this is floating-point. In the latter
5871 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
5872 If we record the equality, we might inadvertently delete code
5873 whose intent was to change -0 to +0. */
5874
5875 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
5876 {
5877 /* If we reversed a floating-point comparison, if OP0 is not a
5878 register, or if OP1 is neither a register or constant, we can't
5879 do anything. */
5880
5881 if (GET_CODE (op1) != REG)
5882 op1 = equiv_constant (op1);
5883
5884 if ((reversed_nonequality && FLOAT_MODE_P (mode))
5885 || GET_CODE (op0) != REG || op1 == 0)
5886 return;
5887
5888 /* Put OP0 in the hash table if it isn't already. This gives it a
5889 new quantity number. */
5890 if (op0_elt == 0)
5891 {
5892 if (insert_regs (op0, NULL_PTR, 0))
5893 {
5894 rehash_using_reg (op0);
5895 op0_hash = HASH (op0, mode);
5896
5897 /* If OP0 is contained in OP1, this changes its hash code
5898 as well. Faster to rehash than to check, except
5899 for the simple case of a constant. */
5900 if (! CONSTANT_P (op1))
5901 op1_hash = HASH (op1,mode);
5902 }
5903
5904 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
5905 op0_elt->in_memory = op0_in_memory;
5906 op0_elt->in_struct = op0_in_struct;
5907 }
5908
5909 qty_comparison_code[reg_qty[REGNO (op0)]] = code;
5910 if (GET_CODE (op1) == REG)
5911 {
5912 /* Look it up again--in case op0 and op1 are the same. */
5913 op1_elt = lookup (op1, op1_hash, mode);
5914
5915 /* Put OP1 in the hash table so it gets a new quantity number. */
5916 if (op1_elt == 0)
5917 {
5918 if (insert_regs (op1, NULL_PTR, 0))
5919 {
5920 rehash_using_reg (op1);
5921 op1_hash = HASH (op1, mode);
5922 }
5923
5924 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
5925 op1_elt->in_memory = op1_in_memory;
5926 op1_elt->in_struct = op1_in_struct;
5927 }
5928
5929 qty_comparison_qty[reg_qty[REGNO (op0)]] = reg_qty[REGNO (op1)];
5930 qty_comparison_const[reg_qty[REGNO (op0)]] = 0;
5931 }
5932 else
5933 {
5934 qty_comparison_qty[reg_qty[REGNO (op0)]] = -1;
5935 qty_comparison_const[reg_qty[REGNO (op0)]] = op1;
5936 }
5937
5938 return;
5939 }
5940
5941 /* If either side is still missing an equivalence, make it now,
5942 then merge the equivalences. */
5943
5944 if (op0_elt == 0)
5945 {
5946 if (insert_regs (op0, NULL_PTR, 0))
5947 {
5948 rehash_using_reg (op0);
5949 op0_hash = HASH (op0, mode);
5950 }
5951
5952 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
5953 op0_elt->in_memory = op0_in_memory;
5954 op0_elt->in_struct = op0_in_struct;
5955 }
5956
5957 if (op1_elt == 0)
5958 {
5959 if (insert_regs (op1, NULL_PTR, 0))
5960 {
5961 rehash_using_reg (op1);
5962 op1_hash = HASH (op1, mode);
5963 }
5964
5965 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
5966 op1_elt->in_memory = op1_in_memory;
5967 op1_elt->in_struct = op1_in_struct;
5968 }
5969
5970 merge_equiv_classes (op0_elt, op1_elt);
5971 last_jump_equiv_class = op0_elt;
5972 }
5973 \f
5974 /* CSE processing for one instruction.
5975 First simplify sources and addresses of all assignments
5976 in the instruction, using previously-computed equivalents values.
5977 Then install the new sources and destinations in the table
5978 of available values.
5979
5980 If IN_LIBCALL_BLOCK is nonzero, don't record any equivalence made in
5981 the insn. */
5982
5983 /* Data on one SET contained in the instruction. */
5984
5985 struct set
5986 {
5987 /* The SET rtx itself. */
5988 rtx rtl;
5989 /* The SET_SRC of the rtx (the original value, if it is changing). */
5990 rtx src;
5991 /* The hash-table element for the SET_SRC of the SET. */
5992 struct table_elt *src_elt;
5993 /* Hash value for the SET_SRC. */
5994 unsigned src_hash;
5995 /* Hash value for the SET_DEST. */
5996 unsigned dest_hash;
5997 /* The SET_DEST, with SUBREG, etc., stripped. */
5998 rtx inner_dest;
5999 /* Place where the pointer to the INNER_DEST was found. */
6000 rtx *inner_dest_loc;
6001 /* Nonzero if the SET_SRC is in memory. */
6002 char src_in_memory;
6003 /* Nonzero if the SET_SRC is in a structure. */
6004 char src_in_struct;
6005 /* Nonzero if the SET_SRC contains something
6006 whose value cannot be predicted and understood. */
6007 char src_volatile;
6008 /* Original machine mode, in case it becomes a CONST_INT. */
6009 enum machine_mode mode;
6010 /* A constant equivalent for SET_SRC, if any. */
6011 rtx src_const;
6012 /* Hash value of constant equivalent for SET_SRC. */
6013 unsigned src_const_hash;
6014 /* Table entry for constant equivalent for SET_SRC, if any. */
6015 struct table_elt *src_const_elt;
6016 };
6017
6018 static void
6019 cse_insn (insn, in_libcall_block)
6020 rtx insn;
6021 int in_libcall_block;
6022 {
6023 register rtx x = PATTERN (insn);
6024 register int i;
6025 rtx tem;
6026 register int n_sets = 0;
6027
6028 /* Records what this insn does to set CC0. */
6029 rtx this_insn_cc0 = 0;
6030 enum machine_mode this_insn_cc0_mode;
6031 struct write_data writes_memory;
6032 static struct write_data init = {0, 0, 0, 0};
6033
6034 rtx src_eqv = 0;
6035 struct table_elt *src_eqv_elt = 0;
6036 int src_eqv_volatile;
6037 int src_eqv_in_memory;
6038 int src_eqv_in_struct;
6039 unsigned src_eqv_hash;
6040
6041 struct set *sets;
6042
6043 this_insn = insn;
6044 writes_memory = init;
6045
6046 /* Find all the SETs and CLOBBERs in this instruction.
6047 Record all the SETs in the array `set' and count them.
6048 Also determine whether there is a CLOBBER that invalidates
6049 all memory references, or all references at varying addresses. */
6050
6051 if (GET_CODE (insn) == CALL_INSN)
6052 {
6053 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6054 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
6055 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
6056 }
6057
6058 if (GET_CODE (x) == SET)
6059 {
6060 sets = (struct set *) alloca (sizeof (struct set));
6061 sets[0].rtl = x;
6062
6063 /* Ignore SETs that are unconditional jumps.
6064 They never need cse processing, so this does not hurt.
6065 The reason is not efficiency but rather
6066 so that we can test at the end for instructions
6067 that have been simplified to unconditional jumps
6068 and not be misled by unchanged instructions
6069 that were unconditional jumps to begin with. */
6070 if (SET_DEST (x) == pc_rtx
6071 && GET_CODE (SET_SRC (x)) == LABEL_REF)
6072 ;
6073
6074 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
6075 The hard function value register is used only once, to copy to
6076 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
6077 Ensure we invalidate the destination register. On the 80386 no
6078 other code would invalidate it since it is a fixed_reg.
6079 We need not check the return of apply_change_group; see canon_reg. */
6080
6081 else if (GET_CODE (SET_SRC (x)) == CALL)
6082 {
6083 canon_reg (SET_SRC (x), insn);
6084 apply_change_group ();
6085 fold_rtx (SET_SRC (x), insn);
6086 invalidate (SET_DEST (x), VOIDmode);
6087 }
6088 else
6089 n_sets = 1;
6090 }
6091 else if (GET_CODE (x) == PARALLEL)
6092 {
6093 register int lim = XVECLEN (x, 0);
6094
6095 sets = (struct set *) alloca (lim * sizeof (struct set));
6096
6097 /* Find all regs explicitly clobbered in this insn,
6098 and ensure they are not replaced with any other regs
6099 elsewhere in this insn.
6100 When a reg that is clobbered is also used for input,
6101 we should presume that that is for a reason,
6102 and we should not substitute some other register
6103 which is not supposed to be clobbered.
6104 Therefore, this loop cannot be merged into the one below
6105 because a CALL may precede a CLOBBER and refer to the
6106 value clobbered. We must not let a canonicalization do
6107 anything in that case. */
6108 for (i = 0; i < lim; i++)
6109 {
6110 register rtx y = XVECEXP (x, 0, i);
6111 if (GET_CODE (y) == CLOBBER)
6112 {
6113 rtx clobbered = XEXP (y, 0);
6114
6115 if (GET_CODE (clobbered) == REG
6116 || GET_CODE (clobbered) == SUBREG)
6117 invalidate (clobbered, VOIDmode);
6118 else if (GET_CODE (clobbered) == STRICT_LOW_PART
6119 || GET_CODE (clobbered) == ZERO_EXTRACT)
6120 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6121 }
6122 }
6123
6124 for (i = 0; i < lim; i++)
6125 {
6126 register rtx y = XVECEXP (x, 0, i);
6127 if (GET_CODE (y) == SET)
6128 {
6129 /* As above, we ignore unconditional jumps and call-insns and
6130 ignore the result of apply_change_group. */
6131 if (GET_CODE (SET_SRC (y)) == CALL)
6132 {
6133 canon_reg (SET_SRC (y), insn);
6134 apply_change_group ();
6135 fold_rtx (SET_SRC (y), insn);
6136 invalidate (SET_DEST (y), VOIDmode);
6137 }
6138 else if (SET_DEST (y) == pc_rtx
6139 && GET_CODE (SET_SRC (y)) == LABEL_REF)
6140 ;
6141 else
6142 sets[n_sets++].rtl = y;
6143 }
6144 else if (GET_CODE (y) == CLOBBER)
6145 {
6146 /* If we clobber memory, take note of that,
6147 and canon the address.
6148 This does nothing when a register is clobbered
6149 because we have already invalidated the reg. */
6150 if (GET_CODE (XEXP (y, 0)) == MEM)
6151 {
6152 canon_reg (XEXP (y, 0), NULL_RTX);
6153 note_mem_written (XEXP (y, 0), &writes_memory);
6154 }
6155 }
6156 else if (GET_CODE (y) == USE
6157 && ! (GET_CODE (XEXP (y, 0)) == REG
6158 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
6159 canon_reg (y, NULL_RTX);
6160 else if (GET_CODE (y) == CALL)
6161 {
6162 /* The result of apply_change_group can be ignored; see
6163 canon_reg. */
6164 canon_reg (y, insn);
6165 apply_change_group ();
6166 fold_rtx (y, insn);
6167 }
6168 }
6169 }
6170 else if (GET_CODE (x) == CLOBBER)
6171 {
6172 if (GET_CODE (XEXP (x, 0)) == MEM)
6173 {
6174 canon_reg (XEXP (x, 0), NULL_RTX);
6175 note_mem_written (XEXP (x, 0), &writes_memory);
6176 }
6177 }
6178
6179 /* Canonicalize a USE of a pseudo register or memory location. */
6180 else if (GET_CODE (x) == USE
6181 && ! (GET_CODE (XEXP (x, 0)) == REG
6182 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
6183 canon_reg (XEXP (x, 0), NULL_RTX);
6184 else if (GET_CODE (x) == CALL)
6185 {
6186 /* The result of apply_change_group can be ignored; see canon_reg. */
6187 canon_reg (x, insn);
6188 apply_change_group ();
6189 fold_rtx (x, insn);
6190 }
6191
6192 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6193 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
6194 is handled specially for this case, and if it isn't set, then there will
6195 be no equivalence for the destinatation. */
6196 if (n_sets == 1 && REG_NOTES (insn) != 0
6197 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
6198 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6199 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
6200 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
6201
6202 /* Canonicalize sources and addresses of destinations.
6203 We do this in a separate pass to avoid problems when a MATCH_DUP is
6204 present in the insn pattern. In that case, we want to ensure that
6205 we don't break the duplicate nature of the pattern. So we will replace
6206 both operands at the same time. Otherwise, we would fail to find an
6207 equivalent substitution in the loop calling validate_change below.
6208
6209 We used to suppress canonicalization of DEST if it appears in SRC,
6210 but we don't do this any more. */
6211
6212 for (i = 0; i < n_sets; i++)
6213 {
6214 rtx dest = SET_DEST (sets[i].rtl);
6215 rtx src = SET_SRC (sets[i].rtl);
6216 rtx new = canon_reg (src, insn);
6217
6218 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6219 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6220 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
6221 || insn_n_dups[recog_memoized (insn)] > 0)
6222 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
6223 else
6224 SET_SRC (sets[i].rtl) = new;
6225
6226 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6227 {
6228 validate_change (insn, &XEXP (dest, 1),
6229 canon_reg (XEXP (dest, 1), insn), 1);
6230 validate_change (insn, &XEXP (dest, 2),
6231 canon_reg (XEXP (dest, 2), insn), 1);
6232 }
6233
6234 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6235 || GET_CODE (dest) == ZERO_EXTRACT
6236 || GET_CODE (dest) == SIGN_EXTRACT)
6237 dest = XEXP (dest, 0);
6238
6239 if (GET_CODE (dest) == MEM)
6240 canon_reg (dest, insn);
6241 }
6242
6243 /* Now that we have done all the replacements, we can apply the change
6244 group and see if they all work. Note that this will cause some
6245 canonicalizations that would have worked individually not to be applied
6246 because some other canonicalization didn't work, but this should not
6247 occur often.
6248
6249 The result of apply_change_group can be ignored; see canon_reg. */
6250
6251 apply_change_group ();
6252
6253 /* Set sets[i].src_elt to the class each source belongs to.
6254 Detect assignments from or to volatile things
6255 and set set[i] to zero so they will be ignored
6256 in the rest of this function.
6257
6258 Nothing in this loop changes the hash table or the register chains. */
6259
6260 for (i = 0; i < n_sets; i++)
6261 {
6262 register rtx src, dest;
6263 register rtx src_folded;
6264 register struct table_elt *elt = 0, *p;
6265 enum machine_mode mode;
6266 rtx src_eqv_here;
6267 rtx src_const = 0;
6268 rtx src_related = 0;
6269 struct table_elt *src_const_elt = 0;
6270 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6271 int src_related_cost = 10000, src_elt_cost = 10000;
6272 /* Set non-zero if we need to call force_const_mem on with the
6273 contents of src_folded before using it. */
6274 int src_folded_force_flag = 0;
6275
6276 dest = SET_DEST (sets[i].rtl);
6277 src = SET_SRC (sets[i].rtl);
6278
6279 /* If SRC is a constant that has no machine mode,
6280 hash it with the destination's machine mode.
6281 This way we can keep different modes separate. */
6282
6283 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6284 sets[i].mode = mode;
6285
6286 if (src_eqv)
6287 {
6288 enum machine_mode eqvmode = mode;
6289 if (GET_CODE (dest) == STRICT_LOW_PART)
6290 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6291 do_not_record = 0;
6292 hash_arg_in_memory = 0;
6293 hash_arg_in_struct = 0;
6294 src_eqv = fold_rtx (src_eqv, insn);
6295 src_eqv_hash = HASH (src_eqv, eqvmode);
6296
6297 /* Find the equivalence class for the equivalent expression. */
6298
6299 if (!do_not_record)
6300 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
6301
6302 src_eqv_volatile = do_not_record;
6303 src_eqv_in_memory = hash_arg_in_memory;
6304 src_eqv_in_struct = hash_arg_in_struct;
6305 }
6306
6307 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6308 value of the INNER register, not the destination. So it is not
6309 a valid substitution for the source. But save it for later. */
6310 if (GET_CODE (dest) == STRICT_LOW_PART)
6311 src_eqv_here = 0;
6312 else
6313 src_eqv_here = src_eqv;
6314
6315 /* Simplify and foldable subexpressions in SRC. Then get the fully-
6316 simplified result, which may not necessarily be valid. */
6317 src_folded = fold_rtx (src, insn);
6318
6319 /* If storing a constant in a bitfield, pre-truncate the constant
6320 so we will be able to record it later. */
6321 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6322 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6323 {
6324 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6325
6326 if (GET_CODE (src) == CONST_INT
6327 && GET_CODE (width) == CONST_INT
6328 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6329 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6330 src_folded
6331 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6332 << INTVAL (width)) - 1));
6333 }
6334
6335 /* Compute SRC's hash code, and also notice if it
6336 should not be recorded at all. In that case,
6337 prevent any further processing of this assignment. */
6338 do_not_record = 0;
6339 hash_arg_in_memory = 0;
6340 hash_arg_in_struct = 0;
6341
6342 sets[i].src = src;
6343 sets[i].src_hash = HASH (src, mode);
6344 sets[i].src_volatile = do_not_record;
6345 sets[i].src_in_memory = hash_arg_in_memory;
6346 sets[i].src_in_struct = hash_arg_in_struct;
6347
6348 #if 0
6349 /* It is no longer clear why we used to do this, but it doesn't
6350 appear to still be needed. So let's try without it since this
6351 code hurts cse'ing widened ops. */
6352 /* If source is a perverse subreg (such as QI treated as an SI),
6353 treat it as volatile. It may do the work of an SI in one context
6354 where the extra bits are not being used, but cannot replace an SI
6355 in general. */
6356 if (GET_CODE (src) == SUBREG
6357 && (GET_MODE_SIZE (GET_MODE (src))
6358 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6359 sets[i].src_volatile = 1;
6360 #endif
6361
6362 /* Locate all possible equivalent forms for SRC. Try to replace
6363 SRC in the insn with each cheaper equivalent.
6364
6365 We have the following types of equivalents: SRC itself, a folded
6366 version, a value given in a REG_EQUAL note, or a value related
6367 to a constant.
6368
6369 Each of these equivalents may be part of an additional class
6370 of equivalents (if more than one is in the table, they must be in
6371 the same class; we check for this).
6372
6373 If the source is volatile, we don't do any table lookups.
6374
6375 We note any constant equivalent for possible later use in a
6376 REG_NOTE. */
6377
6378 if (!sets[i].src_volatile)
6379 elt = lookup (src, sets[i].src_hash, mode);
6380
6381 sets[i].src_elt = elt;
6382
6383 if (elt && src_eqv_here && src_eqv_elt)
6384 {
6385 if (elt->first_same_value != src_eqv_elt->first_same_value)
6386 {
6387 /* The REG_EQUAL is indicating that two formerly distinct
6388 classes are now equivalent. So merge them. */
6389 merge_equiv_classes (elt, src_eqv_elt);
6390 src_eqv_hash = HASH (src_eqv, elt->mode);
6391 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
6392 }
6393
6394 src_eqv_here = 0;
6395 }
6396
6397 else if (src_eqv_elt)
6398 elt = src_eqv_elt;
6399
6400 /* Try to find a constant somewhere and record it in `src_const'.
6401 Record its table element, if any, in `src_const_elt'. Look in
6402 any known equivalences first. (If the constant is not in the
6403 table, also set `sets[i].src_const_hash'). */
6404 if (elt)
6405 for (p = elt->first_same_value; p; p = p->next_same_value)
6406 if (p->is_const)
6407 {
6408 src_const = p->exp;
6409 src_const_elt = elt;
6410 break;
6411 }
6412
6413 if (src_const == 0
6414 && (CONSTANT_P (src_folded)
6415 /* Consider (minus (label_ref L1) (label_ref L2)) as
6416 "constant" here so we will record it. This allows us
6417 to fold switch statements when an ADDR_DIFF_VEC is used. */
6418 || (GET_CODE (src_folded) == MINUS
6419 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6420 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6421 src_const = src_folded, src_const_elt = elt;
6422 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6423 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6424
6425 /* If we don't know if the constant is in the table, get its
6426 hash code and look it up. */
6427 if (src_const && src_const_elt == 0)
6428 {
6429 sets[i].src_const_hash = HASH (src_const, mode);
6430 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
6431 }
6432
6433 sets[i].src_const = src_const;
6434 sets[i].src_const_elt = src_const_elt;
6435
6436 /* If the constant and our source are both in the table, mark them as
6437 equivalent. Otherwise, if a constant is in the table but the source
6438 isn't, set ELT to it. */
6439 if (src_const_elt && elt
6440 && src_const_elt->first_same_value != elt->first_same_value)
6441 merge_equiv_classes (elt, src_const_elt);
6442 else if (src_const_elt && elt == 0)
6443 elt = src_const_elt;
6444
6445 /* See if there is a register linearly related to a constant
6446 equivalent of SRC. */
6447 if (src_const
6448 && (GET_CODE (src_const) == CONST
6449 || (src_const_elt && src_const_elt->related_value != 0)))
6450 {
6451 src_related = use_related_value (src_const, src_const_elt);
6452 if (src_related)
6453 {
6454 struct table_elt *src_related_elt
6455 = lookup (src_related, HASH (src_related, mode), mode);
6456 if (src_related_elt && elt)
6457 {
6458 if (elt->first_same_value
6459 != src_related_elt->first_same_value)
6460 /* This can occur when we previously saw a CONST
6461 involving a SYMBOL_REF and then see the SYMBOL_REF
6462 twice. Merge the involved classes. */
6463 merge_equiv_classes (elt, src_related_elt);
6464
6465 src_related = 0;
6466 src_related_elt = 0;
6467 }
6468 else if (src_related_elt && elt == 0)
6469 elt = src_related_elt;
6470 }
6471 }
6472
6473 /* See if we have a CONST_INT that is already in a register in a
6474 wider mode. */
6475
6476 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6477 && GET_MODE_CLASS (mode) == MODE_INT
6478 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6479 {
6480 enum machine_mode wider_mode;
6481
6482 for (wider_mode = GET_MODE_WIDER_MODE (mode);
6483 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6484 && src_related == 0;
6485 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6486 {
6487 struct table_elt *const_elt
6488 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6489
6490 if (const_elt == 0)
6491 continue;
6492
6493 for (const_elt = const_elt->first_same_value;
6494 const_elt; const_elt = const_elt->next_same_value)
6495 if (GET_CODE (const_elt->exp) == REG)
6496 {
6497 src_related = gen_lowpart_if_possible (mode,
6498 const_elt->exp);
6499 break;
6500 }
6501 }
6502 }
6503
6504 /* Another possibility is that we have an AND with a constant in
6505 a mode narrower than a word. If so, it might have been generated
6506 as part of an "if" which would narrow the AND. If we already
6507 have done the AND in a wider mode, we can use a SUBREG of that
6508 value. */
6509
6510 if (flag_expensive_optimizations && ! src_related
6511 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6512 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6513 {
6514 enum machine_mode tmode;
6515 rtx new_and = gen_rtx (AND, VOIDmode, NULL_RTX, XEXP (src, 1));
6516
6517 for (tmode = GET_MODE_WIDER_MODE (mode);
6518 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6519 tmode = GET_MODE_WIDER_MODE (tmode))
6520 {
6521 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6522 struct table_elt *larger_elt;
6523
6524 if (inner)
6525 {
6526 PUT_MODE (new_and, tmode);
6527 XEXP (new_and, 0) = inner;
6528 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6529 if (larger_elt == 0)
6530 continue;
6531
6532 for (larger_elt = larger_elt->first_same_value;
6533 larger_elt; larger_elt = larger_elt->next_same_value)
6534 if (GET_CODE (larger_elt->exp) == REG)
6535 {
6536 src_related
6537 = gen_lowpart_if_possible (mode, larger_elt->exp);
6538 break;
6539 }
6540
6541 if (src_related)
6542 break;
6543 }
6544 }
6545 }
6546
6547 #ifdef LOAD_EXTEND_OP
6548 /* See if a MEM has already been loaded with a widening operation;
6549 if it has, we can use a subreg of that. Many CISC machines
6550 also have such operations, but this is only likely to be
6551 beneficial these machines. */
6552
6553 if (flag_expensive_optimizations && src_related == 0
6554 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6555 && GET_MODE_CLASS (mode) == MODE_INT
6556 && GET_CODE (src) == MEM && ! do_not_record
6557 && LOAD_EXTEND_OP (mode) != NIL)
6558 {
6559 enum machine_mode tmode;
6560
6561 /* Set what we are trying to extend and the operation it might
6562 have been extended with. */
6563 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6564 XEXP (memory_extend_rtx, 0) = src;
6565
6566 for (tmode = GET_MODE_WIDER_MODE (mode);
6567 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6568 tmode = GET_MODE_WIDER_MODE (tmode))
6569 {
6570 struct table_elt *larger_elt;
6571
6572 PUT_MODE (memory_extend_rtx, tmode);
6573 larger_elt = lookup (memory_extend_rtx,
6574 HASH (memory_extend_rtx, tmode), tmode);
6575 if (larger_elt == 0)
6576 continue;
6577
6578 for (larger_elt = larger_elt->first_same_value;
6579 larger_elt; larger_elt = larger_elt->next_same_value)
6580 if (GET_CODE (larger_elt->exp) == REG)
6581 {
6582 src_related = gen_lowpart_if_possible (mode,
6583 larger_elt->exp);
6584 break;
6585 }
6586
6587 if (src_related)
6588 break;
6589 }
6590 }
6591 #endif /* LOAD_EXTEND_OP */
6592
6593 if (src == src_folded)
6594 src_folded = 0;
6595
6596 /* At this point, ELT, if non-zero, points to a class of expressions
6597 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6598 and SRC_RELATED, if non-zero, each contain additional equivalent
6599 expressions. Prune these latter expressions by deleting expressions
6600 already in the equivalence class.
6601
6602 Check for an equivalent identical to the destination. If found,
6603 this is the preferred equivalent since it will likely lead to
6604 elimination of the insn. Indicate this by placing it in
6605 `src_related'. */
6606
6607 if (elt) elt = elt->first_same_value;
6608 for (p = elt; p; p = p->next_same_value)
6609 {
6610 enum rtx_code code = GET_CODE (p->exp);
6611
6612 /* If the expression is not valid, ignore it. Then we do not
6613 have to check for validity below. In most cases, we can use
6614 `rtx_equal_p', since canonicalization has already been done. */
6615 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
6616 continue;
6617
6618 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
6619 src = 0;
6620 else if (src_folded && GET_CODE (src_folded) == code
6621 && rtx_equal_p (src_folded, p->exp))
6622 src_folded = 0;
6623 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
6624 && rtx_equal_p (src_eqv_here, p->exp))
6625 src_eqv_here = 0;
6626 else if (src_related && GET_CODE (src_related) == code
6627 && rtx_equal_p (src_related, p->exp))
6628 src_related = 0;
6629
6630 /* This is the same as the destination of the insns, we want
6631 to prefer it. Copy it to src_related. The code below will
6632 then give it a negative cost. */
6633 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
6634 src_related = dest;
6635
6636 }
6637
6638 /* Find the cheapest valid equivalent, trying all the available
6639 possibilities. Prefer items not in the hash table to ones
6640 that are when they are equal cost. Note that we can never
6641 worsen an insn as the current contents will also succeed.
6642 If we find an equivalent identical to the destination, use it as best,
6643 since this insn will probably be eliminated in that case. */
6644 if (src)
6645 {
6646 if (rtx_equal_p (src, dest))
6647 src_cost = -1;
6648 else
6649 src_cost = COST (src);
6650 }
6651
6652 if (src_eqv_here)
6653 {
6654 if (rtx_equal_p (src_eqv_here, dest))
6655 src_eqv_cost = -1;
6656 else
6657 src_eqv_cost = COST (src_eqv_here);
6658 }
6659
6660 if (src_folded)
6661 {
6662 if (rtx_equal_p (src_folded, dest))
6663 src_folded_cost = -1;
6664 else
6665 src_folded_cost = COST (src_folded);
6666 }
6667
6668 if (src_related)
6669 {
6670 if (rtx_equal_p (src_related, dest))
6671 src_related_cost = -1;
6672 else
6673 src_related_cost = COST (src_related);
6674 }
6675
6676 /* If this was an indirect jump insn, a known label will really be
6677 cheaper even though it looks more expensive. */
6678 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
6679 src_folded = src_const, src_folded_cost = -1;
6680
6681 /* Terminate loop when replacement made. This must terminate since
6682 the current contents will be tested and will always be valid. */
6683 while (1)
6684 {
6685 rtx trial;
6686
6687 /* Skip invalid entries. */
6688 while (elt && GET_CODE (elt->exp) != REG
6689 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6690 elt = elt->next_same_value;
6691
6692 if (elt) src_elt_cost = elt->cost;
6693
6694 /* Find cheapest and skip it for the next time. For items
6695 of equal cost, use this order:
6696 src_folded, src, src_eqv, src_related and hash table entry. */
6697 if (src_folded_cost <= src_cost
6698 && src_folded_cost <= src_eqv_cost
6699 && src_folded_cost <= src_related_cost
6700 && src_folded_cost <= src_elt_cost)
6701 {
6702 trial = src_folded, src_folded_cost = 10000;
6703 if (src_folded_force_flag)
6704 trial = force_const_mem (mode, trial);
6705 }
6706 else if (src_cost <= src_eqv_cost
6707 && src_cost <= src_related_cost
6708 && src_cost <= src_elt_cost)
6709 trial = src, src_cost = 10000;
6710 else if (src_eqv_cost <= src_related_cost
6711 && src_eqv_cost <= src_elt_cost)
6712 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
6713 else if (src_related_cost <= src_elt_cost)
6714 trial = copy_rtx (src_related), src_related_cost = 10000;
6715 else
6716 {
6717 trial = copy_rtx (elt->exp);
6718 elt = elt->next_same_value;
6719 src_elt_cost = 10000;
6720 }
6721
6722 /* We don't normally have an insn matching (set (pc) (pc)), so
6723 check for this separately here. We will delete such an
6724 insn below.
6725
6726 Tablejump insns contain a USE of the table, so simply replacing
6727 the operand with the constant won't match. This is simply an
6728 unconditional branch, however, and is therefore valid. Just
6729 insert the substitution here and we will delete and re-emit
6730 the insn later. */
6731
6732 if (n_sets == 1 && dest == pc_rtx
6733 && (trial == pc_rtx
6734 || (GET_CODE (trial) == LABEL_REF
6735 && ! condjump_p (insn))))
6736 {
6737 /* If TRIAL is a label in front of a jump table, we are
6738 really falling through the switch (this is how casesi
6739 insns work), so we must branch around the table. */
6740 if (GET_CODE (trial) == CODE_LABEL
6741 && NEXT_INSN (trial) != 0
6742 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
6743 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
6744 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
6745
6746 trial = gen_rtx (LABEL_REF, Pmode, get_label_after (trial));
6747
6748 SET_SRC (sets[i].rtl) = trial;
6749 cse_jumps_altered = 1;
6750 break;
6751 }
6752
6753 /* Look for a substitution that makes a valid insn. */
6754 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
6755 {
6756 /* The result of apply_change_group can be ignored; see
6757 canon_reg. */
6758
6759 validate_change (insn, &SET_SRC (sets[i].rtl),
6760 canon_reg (SET_SRC (sets[i].rtl), insn),
6761 1);
6762 apply_change_group ();
6763 break;
6764 }
6765
6766 /* If we previously found constant pool entries for
6767 constants and this is a constant, try making a
6768 pool entry. Put it in src_folded unless we already have done
6769 this since that is where it likely came from. */
6770
6771 else if (constant_pool_entries_cost
6772 && CONSTANT_P (trial)
6773 && ! (GET_CODE (trial) == CONST
6774 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
6775 && (src_folded == 0
6776 || (GET_CODE (src_folded) != MEM
6777 && ! src_folded_force_flag))
6778 && GET_MODE_CLASS (mode) != MODE_CC)
6779 {
6780 src_folded_force_flag = 1;
6781 src_folded = trial;
6782 src_folded_cost = constant_pool_entries_cost;
6783 }
6784 }
6785
6786 src = SET_SRC (sets[i].rtl);
6787
6788 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
6789 However, there is an important exception: If both are registers
6790 that are not the head of their equivalence class, replace SET_SRC
6791 with the head of the class. If we do not do this, we will have
6792 both registers live over a portion of the basic block. This way,
6793 their lifetimes will likely abut instead of overlapping. */
6794 if (GET_CODE (dest) == REG
6795 && REGNO_QTY_VALID_P (REGNO (dest))
6796 && qty_mode[reg_qty[REGNO (dest)]] == GET_MODE (dest)
6797 && qty_first_reg[reg_qty[REGNO (dest)]] != REGNO (dest)
6798 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
6799 /* Don't do this if the original insn had a hard reg as
6800 SET_SRC. */
6801 && (GET_CODE (sets[i].src) != REG
6802 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
6803 /* We can't call canon_reg here because it won't do anything if
6804 SRC is a hard register. */
6805 {
6806 int first = qty_first_reg[reg_qty[REGNO (src)]];
6807
6808 src = SET_SRC (sets[i].rtl)
6809 = first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
6810 : gen_rtx (REG, GET_MODE (src), first);
6811
6812 /* If we had a constant that is cheaper than what we are now
6813 setting SRC to, use that constant. We ignored it when we
6814 thought we could make this into a no-op. */
6815 if (src_const && COST (src_const) < COST (src)
6816 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const, 0))
6817 src = src_const;
6818 }
6819
6820 /* If we made a change, recompute SRC values. */
6821 if (src != sets[i].src)
6822 {
6823 do_not_record = 0;
6824 hash_arg_in_memory = 0;
6825 hash_arg_in_struct = 0;
6826 sets[i].src = src;
6827 sets[i].src_hash = HASH (src, mode);
6828 sets[i].src_volatile = do_not_record;
6829 sets[i].src_in_memory = hash_arg_in_memory;
6830 sets[i].src_in_struct = hash_arg_in_struct;
6831 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
6832 }
6833
6834 /* If this is a single SET, we are setting a register, and we have an
6835 equivalent constant, we want to add a REG_NOTE. We don't want
6836 to write a REG_EQUAL note for a constant pseudo since verifying that
6837 that pseudo hasn't been eliminated is a pain. Such a note also
6838 won't help anything. */
6839 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
6840 && GET_CODE (src_const) != REG)
6841 {
6842 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6843
6844 /* Record the actual constant value in a REG_EQUAL note, making
6845 a new one if one does not already exist. */
6846 if (tem)
6847 XEXP (tem, 0) = src_const;
6848 else
6849 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL,
6850 src_const, REG_NOTES (insn));
6851
6852 /* If storing a constant value in a register that
6853 previously held the constant value 0,
6854 record this fact with a REG_WAS_0 note on this insn.
6855
6856 Note that the *register* is required to have previously held 0,
6857 not just any register in the quantity and we must point to the
6858 insn that set that register to zero.
6859
6860 Rather than track each register individually, we just see if
6861 the last set for this quantity was for this register. */
6862
6863 if (REGNO_QTY_VALID_P (REGNO (dest))
6864 && qty_const[reg_qty[REGNO (dest)]] == const0_rtx)
6865 {
6866 /* See if we previously had a REG_WAS_0 note. */
6867 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6868 rtx const_insn = qty_const_insn[reg_qty[REGNO (dest)]];
6869
6870 if ((tem = single_set (const_insn)) != 0
6871 && rtx_equal_p (SET_DEST (tem), dest))
6872 {
6873 if (note)
6874 XEXP (note, 0) = const_insn;
6875 else
6876 REG_NOTES (insn) = gen_rtx (INSN_LIST, REG_WAS_0,
6877 const_insn, REG_NOTES (insn));
6878 }
6879 }
6880 }
6881
6882 /* Now deal with the destination. */
6883 do_not_record = 0;
6884 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
6885
6886 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
6887 to the MEM or REG within it. */
6888 while (GET_CODE (dest) == SIGN_EXTRACT
6889 || GET_CODE (dest) == ZERO_EXTRACT
6890 || GET_CODE (dest) == SUBREG
6891 || GET_CODE (dest) == STRICT_LOW_PART)
6892 {
6893 sets[i].inner_dest_loc = &XEXP (dest, 0);
6894 dest = XEXP (dest, 0);
6895 }
6896
6897 sets[i].inner_dest = dest;
6898
6899 if (GET_CODE (dest) == MEM)
6900 {
6901 dest = fold_rtx (dest, insn);
6902
6903 /* Decide whether we invalidate everything in memory,
6904 or just things at non-fixed places.
6905 Writing a large aggregate must invalidate everything
6906 because we don't know how long it is. */
6907 note_mem_written (dest, &writes_memory);
6908 }
6909
6910 /* Compute the hash code of the destination now,
6911 before the effects of this instruction are recorded,
6912 since the register values used in the address computation
6913 are those before this instruction. */
6914 sets[i].dest_hash = HASH (dest, mode);
6915
6916 /* Don't enter a bit-field in the hash table
6917 because the value in it after the store
6918 may not equal what was stored, due to truncation. */
6919
6920 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6921 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6922 {
6923 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6924
6925 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
6926 && GET_CODE (width) == CONST_INT
6927 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6928 && ! (INTVAL (src_const)
6929 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6930 /* Exception: if the value is constant,
6931 and it won't be truncated, record it. */
6932 ;
6933 else
6934 {
6935 /* This is chosen so that the destination will be invalidated
6936 but no new value will be recorded.
6937 We must invalidate because sometimes constant
6938 values can be recorded for bitfields. */
6939 sets[i].src_elt = 0;
6940 sets[i].src_volatile = 1;
6941 src_eqv = 0;
6942 src_eqv_elt = 0;
6943 }
6944 }
6945
6946 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
6947 the insn. */
6948 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
6949 {
6950 PUT_CODE (insn, NOTE);
6951 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
6952 NOTE_SOURCE_FILE (insn) = 0;
6953 cse_jumps_altered = 1;
6954 /* One less use of the label this insn used to jump to. */
6955 --LABEL_NUSES (JUMP_LABEL (insn));
6956 /* No more processing for this set. */
6957 sets[i].rtl = 0;
6958 }
6959
6960 /* If this SET is now setting PC to a label, we know it used to
6961 be a conditional or computed branch. So we see if we can follow
6962 it. If it was a computed branch, delete it and re-emit. */
6963 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
6964 {
6965 rtx p;
6966
6967 /* If this is not in the format for a simple branch and
6968 we are the only SET in it, re-emit it. */
6969 if (! simplejump_p (insn) && n_sets == 1)
6970 {
6971 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
6972 JUMP_LABEL (new) = XEXP (src, 0);
6973 LABEL_NUSES (XEXP (src, 0))++;
6974 delete_insn (insn);
6975 insn = new;
6976 }
6977 else
6978 /* Otherwise, force rerecognition, since it probably had
6979 a different pattern before.
6980 This shouldn't really be necessary, since whatever
6981 changed the source value above should have done this.
6982 Until the right place is found, might as well do this here. */
6983 INSN_CODE (insn) = -1;
6984
6985 /* Now that we've converted this jump to an unconditional jump,
6986 there is dead code after it. Delete the dead code until we
6987 reach a BARRIER, the end of the function, or a label. Do
6988 not delete NOTEs except for NOTE_INSN_DELETED since later
6989 phases assume these notes are retained. */
6990
6991 p = insn;
6992
6993 while (NEXT_INSN (p) != 0
6994 && GET_CODE (NEXT_INSN (p)) != BARRIER
6995 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
6996 {
6997 if (GET_CODE (NEXT_INSN (p)) != NOTE
6998 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
6999 delete_insn (NEXT_INSN (p));
7000 else
7001 p = NEXT_INSN (p);
7002 }
7003
7004 /* If we don't have a BARRIER immediately after INSN, put one there.
7005 Much code assumes that there are no NOTEs between a JUMP_INSN and
7006 BARRIER. */
7007
7008 if (NEXT_INSN (insn) == 0
7009 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
7010 emit_barrier_before (NEXT_INSN (insn));
7011
7012 /* We might have two BARRIERs separated by notes. Delete the second
7013 one if so. */
7014
7015 if (p != insn && NEXT_INSN (p) != 0
7016 && GET_CODE (NEXT_INSN (p)) == BARRIER)
7017 delete_insn (NEXT_INSN (p));
7018
7019 cse_jumps_altered = 1;
7020 sets[i].rtl = 0;
7021 }
7022
7023 /* If destination is volatile, invalidate it and then do no further
7024 processing for this assignment. */
7025
7026 else if (do_not_record)
7027 {
7028 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7029 || GET_CODE (dest) == MEM)
7030 invalidate (dest, VOIDmode);
7031 else if (GET_CODE (dest) == STRICT_LOW_PART
7032 || GET_CODE (dest) == ZERO_EXTRACT)
7033 invalidate (XEXP (dest, 0), GET_MODE (dest));
7034 sets[i].rtl = 0;
7035 }
7036
7037 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
7038 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7039
7040 #ifdef HAVE_cc0
7041 /* If setting CC0, record what it was set to, or a constant, if it
7042 is equivalent to a constant. If it is being set to a floating-point
7043 value, make a COMPARE with the appropriate constant of 0. If we
7044 don't do this, later code can interpret this as a test against
7045 const0_rtx, which can cause problems if we try to put it into an
7046 insn as a floating-point operand. */
7047 if (dest == cc0_rtx)
7048 {
7049 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
7050 this_insn_cc0_mode = mode;
7051 if (FLOAT_MODE_P (mode))
7052 this_insn_cc0 = gen_rtx (COMPARE, VOIDmode, this_insn_cc0,
7053 CONST0_RTX (mode));
7054 }
7055 #endif
7056 }
7057
7058 /* Now enter all non-volatile source expressions in the hash table
7059 if they are not already present.
7060 Record their equivalence classes in src_elt.
7061 This way we can insert the corresponding destinations into
7062 the same classes even if the actual sources are no longer in them
7063 (having been invalidated). */
7064
7065 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
7066 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
7067 {
7068 register struct table_elt *elt;
7069 register struct table_elt *classp = sets[0].src_elt;
7070 rtx dest = SET_DEST (sets[0].rtl);
7071 enum machine_mode eqvmode = GET_MODE (dest);
7072
7073 if (GET_CODE (dest) == STRICT_LOW_PART)
7074 {
7075 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
7076 classp = 0;
7077 }
7078 if (insert_regs (src_eqv, classp, 0))
7079 {
7080 rehash_using_reg (src_eqv);
7081 src_eqv_hash = HASH (src_eqv, eqvmode);
7082 }
7083 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7084 elt->in_memory = src_eqv_in_memory;
7085 elt->in_struct = src_eqv_in_struct;
7086 src_eqv_elt = elt;
7087
7088 /* Check to see if src_eqv_elt is the same as a set source which
7089 does not yet have an elt, and if so set the elt of the set source
7090 to src_eqv_elt. */
7091 for (i = 0; i < n_sets; i++)
7092 if (sets[i].rtl && sets[i].src_elt == 0
7093 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
7094 sets[i].src_elt = src_eqv_elt;
7095 }
7096
7097 for (i = 0; i < n_sets; i++)
7098 if (sets[i].rtl && ! sets[i].src_volatile
7099 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
7100 {
7101 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
7102 {
7103 /* REG_EQUAL in setting a STRICT_LOW_PART
7104 gives an equivalent for the entire destination register,
7105 not just for the subreg being stored in now.
7106 This is a more interesting equivalence, so we arrange later
7107 to treat the entire reg as the destination. */
7108 sets[i].src_elt = src_eqv_elt;
7109 sets[i].src_hash = src_eqv_hash;
7110 }
7111 else
7112 {
7113 /* Insert source and constant equivalent into hash table, if not
7114 already present. */
7115 register struct table_elt *classp = src_eqv_elt;
7116 register rtx src = sets[i].src;
7117 register rtx dest = SET_DEST (sets[i].rtl);
7118 enum machine_mode mode
7119 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
7120
7121 if (sets[i].src_elt == 0)
7122 {
7123 register struct table_elt *elt;
7124
7125 /* Note that these insert_regs calls cannot remove
7126 any of the src_elt's, because they would have failed to
7127 match if not still valid. */
7128 if (insert_regs (src, classp, 0))
7129 {
7130 rehash_using_reg (src);
7131 sets[i].src_hash = HASH (src, mode);
7132 }
7133 elt = insert (src, classp, sets[i].src_hash, mode);
7134 elt->in_memory = sets[i].src_in_memory;
7135 elt->in_struct = sets[i].src_in_struct;
7136 sets[i].src_elt = classp = elt;
7137 }
7138
7139 if (sets[i].src_const && sets[i].src_const_elt == 0
7140 && src != sets[i].src_const
7141 && ! rtx_equal_p (sets[i].src_const, src))
7142 sets[i].src_elt = insert (sets[i].src_const, classp,
7143 sets[i].src_const_hash, mode);
7144 }
7145 }
7146 else if (sets[i].src_elt == 0)
7147 /* If we did not insert the source into the hash table (e.g., it was
7148 volatile), note the equivalence class for the REG_EQUAL value, if any,
7149 so that the destination goes into that class. */
7150 sets[i].src_elt = src_eqv_elt;
7151
7152 invalidate_from_clobbers (&writes_memory, x);
7153
7154 /* Some registers are invalidated by subroutine calls. Memory is
7155 invalidated by non-constant calls. */
7156
7157 if (GET_CODE (insn) == CALL_INSN)
7158 {
7159 static struct write_data everything = {0, 1, 1, 1};
7160
7161 if (! CONST_CALL_P (insn))
7162 invalidate_memory (&everything);
7163 invalidate_for_call ();
7164 }
7165
7166 /* Now invalidate everything set by this instruction.
7167 If a SUBREG or other funny destination is being set,
7168 sets[i].rtl is still nonzero, so here we invalidate the reg
7169 a part of which is being set. */
7170
7171 for (i = 0; i < n_sets; i++)
7172 if (sets[i].rtl)
7173 {
7174 /* We can't use the inner dest, because the mode associated with
7175 a ZERO_EXTRACT is significant. */
7176 register rtx dest = SET_DEST (sets[i].rtl);
7177
7178 /* Needed for registers to remove the register from its
7179 previous quantity's chain.
7180 Needed for memory if this is a nonvarying address, unless
7181 we have just done an invalidate_memory that covers even those. */
7182 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7183 || (GET_CODE (dest) == MEM && ! writes_memory.all
7184 && ! cse_rtx_addr_varies_p (dest)))
7185 invalidate (dest, VOIDmode);
7186 else if (GET_CODE (dest) == STRICT_LOW_PART
7187 || GET_CODE (dest) == ZERO_EXTRACT)
7188 invalidate (XEXP (dest, 0), GET_MODE (dest));
7189 }
7190
7191 /* Make sure registers mentioned in destinations
7192 are safe for use in an expression to be inserted.
7193 This removes from the hash table
7194 any invalid entry that refers to one of these registers.
7195
7196 We don't care about the return value from mention_regs because
7197 we are going to hash the SET_DEST values unconditionally. */
7198
7199 for (i = 0; i < n_sets; i++)
7200 if (sets[i].rtl && GET_CODE (SET_DEST (sets[i].rtl)) != REG)
7201 mention_regs (SET_DEST (sets[i].rtl));
7202
7203 /* We may have just removed some of the src_elt's from the hash table.
7204 So replace each one with the current head of the same class. */
7205
7206 for (i = 0; i < n_sets; i++)
7207 if (sets[i].rtl)
7208 {
7209 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7210 /* If elt was removed, find current head of same class,
7211 or 0 if nothing remains of that class. */
7212 {
7213 register struct table_elt *elt = sets[i].src_elt;
7214
7215 while (elt && elt->prev_same_value)
7216 elt = elt->prev_same_value;
7217
7218 while (elt && elt->first_same_value == 0)
7219 elt = elt->next_same_value;
7220 sets[i].src_elt = elt ? elt->first_same_value : 0;
7221 }
7222 }
7223
7224 /* Now insert the destinations into their equivalence classes. */
7225
7226 for (i = 0; i < n_sets; i++)
7227 if (sets[i].rtl)
7228 {
7229 register rtx dest = SET_DEST (sets[i].rtl);
7230 register struct table_elt *elt;
7231
7232 /* Don't record value if we are not supposed to risk allocating
7233 floating-point values in registers that might be wider than
7234 memory. */
7235 if ((flag_float_store
7236 && GET_CODE (dest) == MEM
7237 && FLOAT_MODE_P (GET_MODE (dest)))
7238 /* Don't record values of destinations set inside a libcall block
7239 since we might delete the libcall. Things should have been set
7240 up so we won't want to reuse such a value, but we play it safe
7241 here. */
7242 || in_libcall_block
7243 /* If we didn't put a REG_EQUAL value or a source into the hash
7244 table, there is no point is recording DEST. */
7245 || sets[i].src_elt == 0
7246 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
7247 or SIGN_EXTEND, don't record DEST since it can cause
7248 some tracking to be wrong.
7249
7250 ??? Think about this more later. */
7251 || (GET_CODE (dest) == SUBREG
7252 && (GET_MODE_SIZE (GET_MODE (dest))
7253 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7254 && (GET_CODE (sets[i].src) == SIGN_EXTEND
7255 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7256 continue;
7257
7258 /* STRICT_LOW_PART isn't part of the value BEING set,
7259 and neither is the SUBREG inside it.
7260 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
7261 if (GET_CODE (dest) == STRICT_LOW_PART)
7262 dest = SUBREG_REG (XEXP (dest, 0));
7263
7264 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7265 /* Registers must also be inserted into chains for quantities. */
7266 if (insert_regs (dest, sets[i].src_elt, 1))
7267 {
7268 /* If `insert_regs' changes something, the hash code must be
7269 recalculated. */
7270 rehash_using_reg (dest);
7271 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7272 }
7273
7274 elt = insert (dest, sets[i].src_elt,
7275 sets[i].dest_hash, GET_MODE (dest));
7276 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
7277 && ! RTX_UNCHANGING_P (sets[i].inner_dest));
7278
7279 if (elt->in_memory)
7280 {
7281 /* This implicitly assumes a whole struct
7282 need not have MEM_IN_STRUCT_P.
7283 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
7284 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7285 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7286 }
7287
7288 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7289 narrower than M2, and both M1 and M2 are the same number of words,
7290 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7291 make that equivalence as well.
7292
7293 However, BAR may have equivalences for which gen_lowpart_if_possible
7294 will produce a simpler value than gen_lowpart_if_possible applied to
7295 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7296 BAR's equivalences. If we don't get a simplified form, make
7297 the SUBREG. It will not be used in an equivalence, but will
7298 cause two similar assignments to be detected.
7299
7300 Note the loop below will find SUBREG_REG (DEST) since we have
7301 already entered SRC and DEST of the SET in the table. */
7302
7303 if (GET_CODE (dest) == SUBREG
7304 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7305 / UNITS_PER_WORD)
7306 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7307 && (GET_MODE_SIZE (GET_MODE (dest))
7308 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7309 && sets[i].src_elt != 0)
7310 {
7311 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7312 struct table_elt *elt, *classp = 0;
7313
7314 for (elt = sets[i].src_elt->first_same_value; elt;
7315 elt = elt->next_same_value)
7316 {
7317 rtx new_src = 0;
7318 unsigned src_hash;
7319 struct table_elt *src_elt;
7320
7321 /* Ignore invalid entries. */
7322 if (GET_CODE (elt->exp) != REG
7323 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7324 continue;
7325
7326 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7327 if (new_src == 0)
7328 new_src = gen_rtx (SUBREG, new_mode, elt->exp, 0);
7329
7330 src_hash = HASH (new_src, new_mode);
7331 src_elt = lookup (new_src, src_hash, new_mode);
7332
7333 /* Put the new source in the hash table is if isn't
7334 already. */
7335 if (src_elt == 0)
7336 {
7337 if (insert_regs (new_src, classp, 0))
7338 {
7339 rehash_using_reg (new_src);
7340 src_hash = HASH (new_src, new_mode);
7341 }
7342 src_elt = insert (new_src, classp, src_hash, new_mode);
7343 src_elt->in_memory = elt->in_memory;
7344 src_elt->in_struct = elt->in_struct;
7345 }
7346 else if (classp && classp != src_elt->first_same_value)
7347 /* Show that two things that we've seen before are
7348 actually the same. */
7349 merge_equiv_classes (src_elt, classp);
7350
7351 classp = src_elt->first_same_value;
7352 }
7353 }
7354 }
7355
7356 /* Special handling for (set REG0 REG1)
7357 where REG0 is the "cheapest", cheaper than REG1.
7358 After cse, REG1 will probably not be used in the sequel,
7359 so (if easily done) change this insn to (set REG1 REG0) and
7360 replace REG1 with REG0 in the previous insn that computed their value.
7361 Then REG1 will become a dead store and won't cloud the situation
7362 for later optimizations.
7363
7364 Do not make this change if REG1 is a hard register, because it will
7365 then be used in the sequel and we may be changing a two-operand insn
7366 into a three-operand insn.
7367
7368 Also do not do this if we are operating on a copy of INSN. */
7369
7370 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7371 && NEXT_INSN (PREV_INSN (insn)) == insn
7372 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7373 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7374 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
7375 && (qty_first_reg[reg_qty[REGNO (SET_SRC (sets[0].rtl))]]
7376 == REGNO (SET_DEST (sets[0].rtl))))
7377 {
7378 rtx prev = PREV_INSN (insn);
7379 while (prev && GET_CODE (prev) == NOTE)
7380 prev = PREV_INSN (prev);
7381
7382 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7383 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7384 {
7385 rtx dest = SET_DEST (sets[0].rtl);
7386 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7387
7388 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7389 validate_change (insn, & SET_DEST (sets[0].rtl),
7390 SET_SRC (sets[0].rtl), 1);
7391 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7392 apply_change_group ();
7393
7394 /* If REG1 was equivalent to a constant, REG0 is not. */
7395 if (note)
7396 PUT_REG_NOTE_KIND (note, REG_EQUAL);
7397
7398 /* If there was a REG_WAS_0 note on PREV, remove it. Move
7399 any REG_WAS_0 note on INSN to PREV. */
7400 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7401 if (note)
7402 remove_note (prev, note);
7403
7404 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7405 if (note)
7406 {
7407 remove_note (insn, note);
7408 XEXP (note, 1) = REG_NOTES (prev);
7409 REG_NOTES (prev) = note;
7410 }
7411 }
7412 }
7413
7414 /* If this is a conditional jump insn, record any known equivalences due to
7415 the condition being tested. */
7416
7417 last_jump_equiv_class = 0;
7418 if (GET_CODE (insn) == JUMP_INSN
7419 && n_sets == 1 && GET_CODE (x) == SET
7420 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7421 record_jump_equiv (insn, 0);
7422
7423 #ifdef HAVE_cc0
7424 /* If the previous insn set CC0 and this insn no longer references CC0,
7425 delete the previous insn. Here we use the fact that nothing expects CC0
7426 to be valid over an insn, which is true until the final pass. */
7427 if (prev_insn && GET_CODE (prev_insn) == INSN
7428 && (tem = single_set (prev_insn)) != 0
7429 && SET_DEST (tem) == cc0_rtx
7430 && ! reg_mentioned_p (cc0_rtx, x))
7431 {
7432 PUT_CODE (prev_insn, NOTE);
7433 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7434 NOTE_SOURCE_FILE (prev_insn) = 0;
7435 }
7436
7437 prev_insn_cc0 = this_insn_cc0;
7438 prev_insn_cc0_mode = this_insn_cc0_mode;
7439 #endif
7440
7441 prev_insn = insn;
7442 }
7443 \f
7444 /* Store 1 in *WRITES_PTR for those categories of memory ref
7445 that must be invalidated when the expression WRITTEN is stored in.
7446 If WRITTEN is null, say everything must be invalidated. */
7447
7448 static void
7449 note_mem_written (written, writes_ptr)
7450 rtx written;
7451 struct write_data *writes_ptr;
7452 {
7453 static struct write_data everything = {0, 1, 1, 1};
7454
7455 if (written == 0)
7456 *writes_ptr = everything;
7457 else if (GET_CODE (written) == MEM)
7458 {
7459 /* Pushing or popping the stack invalidates just the stack pointer. */
7460 rtx addr = XEXP (written, 0);
7461 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7462 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7463 && GET_CODE (XEXP (addr, 0)) == REG
7464 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7465 {
7466 writes_ptr->sp = 1;
7467 return;
7468 }
7469 else if (GET_MODE (written) == BLKmode)
7470 *writes_ptr = everything;
7471 /* (mem (scratch)) means clobber everything. */
7472 else if (GET_CODE (addr) == SCRATCH)
7473 *writes_ptr = everything;
7474 else if (cse_rtx_addr_varies_p (written))
7475 {
7476 /* A varying address that is a sum indicates an array element,
7477 and that's just as good as a structure element
7478 in implying that we need not invalidate scalar variables.
7479 However, we must allow QImode aliasing of scalars, because the
7480 ANSI C standard allows character pointers to alias anything. */
7481 if (! ((MEM_IN_STRUCT_P (written)
7482 || GET_CODE (XEXP (written, 0)) == PLUS)
7483 && GET_MODE (written) != QImode))
7484 writes_ptr->all = 1;
7485 writes_ptr->nonscalar = 1;
7486 }
7487 writes_ptr->var = 1;
7488 }
7489 }
7490
7491 /* Perform invalidation on the basis of everything about an insn
7492 except for invalidating the actual places that are SET in it.
7493 This includes the places CLOBBERed, and anything that might
7494 alias with something that is SET or CLOBBERed.
7495
7496 W points to the writes_memory for this insn, a struct write_data
7497 saying which kinds of memory references must be invalidated.
7498 X is the pattern of the insn. */
7499
7500 static void
7501 invalidate_from_clobbers (w, x)
7502 struct write_data *w;
7503 rtx x;
7504 {
7505 /* If W->var is not set, W specifies no action.
7506 If W->all is set, this step gets all memory refs
7507 so they can be ignored in the rest of this function. */
7508 if (w->var)
7509 invalidate_memory (w);
7510
7511 if (w->sp)
7512 {
7513 if (reg_tick[STACK_POINTER_REGNUM] >= 0)
7514 reg_tick[STACK_POINTER_REGNUM]++;
7515
7516 /* This should be *very* rare. */
7517 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
7518 invalidate (stack_pointer_rtx, VOIDmode);
7519 }
7520
7521 if (GET_CODE (x) == CLOBBER)
7522 {
7523 rtx ref = XEXP (x, 0);
7524 if (ref)
7525 {
7526 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7527 || (GET_CODE (ref) == MEM && ! w->all))
7528 invalidate (ref, VOIDmode);
7529 else if (GET_CODE (ref) == STRICT_LOW_PART
7530 || GET_CODE (ref) == ZERO_EXTRACT)
7531 invalidate (XEXP (ref, 0), GET_MODE (ref));
7532 }
7533 }
7534 else if (GET_CODE (x) == PARALLEL)
7535 {
7536 register int i;
7537 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
7538 {
7539 register rtx y = XVECEXP (x, 0, i);
7540 if (GET_CODE (y) == CLOBBER)
7541 {
7542 rtx ref = XEXP (y, 0);
7543 if (ref)
7544 {
7545 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7546 || (GET_CODE (ref) == MEM && !w->all))
7547 invalidate (ref, VOIDmode);
7548 else if (GET_CODE (ref) == STRICT_LOW_PART
7549 || GET_CODE (ref) == ZERO_EXTRACT)
7550 invalidate (XEXP (ref, 0), GET_MODE (ref));
7551 }
7552 }
7553 }
7554 }
7555 }
7556 \f
7557 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
7558 and replace any registers in them with either an equivalent constant
7559 or the canonical form of the register. If we are inside an address,
7560 only do this if the address remains valid.
7561
7562 OBJECT is 0 except when within a MEM in which case it is the MEM.
7563
7564 Return the replacement for X. */
7565
7566 static rtx
7567 cse_process_notes (x, object)
7568 rtx x;
7569 rtx object;
7570 {
7571 enum rtx_code code = GET_CODE (x);
7572 char *fmt = GET_RTX_FORMAT (code);
7573 int i;
7574
7575 switch (code)
7576 {
7577 case CONST_INT:
7578 case CONST:
7579 case SYMBOL_REF:
7580 case LABEL_REF:
7581 case CONST_DOUBLE:
7582 case PC:
7583 case CC0:
7584 case LO_SUM:
7585 return x;
7586
7587 case MEM:
7588 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
7589 return x;
7590
7591 case EXPR_LIST:
7592 case INSN_LIST:
7593 if (REG_NOTE_KIND (x) == REG_EQUAL)
7594 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7595 if (XEXP (x, 1))
7596 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7597 return x;
7598
7599 case SIGN_EXTEND:
7600 case ZERO_EXTEND:
7601 {
7602 rtx new = cse_process_notes (XEXP (x, 0), object);
7603 /* We don't substitute VOIDmode constants into these rtx,
7604 since they would impede folding. */
7605 if (GET_MODE (new) != VOIDmode)
7606 validate_change (object, &XEXP (x, 0), new, 0);
7607 return x;
7608 }
7609
7610 case REG:
7611 i = reg_qty[REGNO (x)];
7612
7613 /* Return a constant or a constant register. */
7614 if (REGNO_QTY_VALID_P (REGNO (x))
7615 && qty_const[i] != 0
7616 && (CONSTANT_P (qty_const[i])
7617 || GET_CODE (qty_const[i]) == REG))
7618 {
7619 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
7620 if (new)
7621 return new;
7622 }
7623
7624 /* Otherwise, canonicalize this register. */
7625 return canon_reg (x, NULL_RTX);
7626 }
7627
7628 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7629 if (fmt[i] == 'e')
7630 validate_change (object, &XEXP (x, i),
7631 cse_process_notes (XEXP (x, i), object), 0);
7632
7633 return x;
7634 }
7635 \f
7636 /* Find common subexpressions between the end test of a loop and the beginning
7637 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
7638
7639 Often we have a loop where an expression in the exit test is used
7640 in the body of the loop. For example "while (*p) *q++ = *p++;".
7641 Because of the way we duplicate the loop exit test in front of the loop,
7642 however, we don't detect that common subexpression. This will be caught
7643 when global cse is implemented, but this is a quite common case.
7644
7645 This function handles the most common cases of these common expressions.
7646 It is called after we have processed the basic block ending with the
7647 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
7648 jumps to a label used only once. */
7649
7650 static void
7651 cse_around_loop (loop_start)
7652 rtx loop_start;
7653 {
7654 rtx insn;
7655 int i;
7656 struct table_elt *p;
7657
7658 /* If the jump at the end of the loop doesn't go to the start, we don't
7659 do anything. */
7660 for (insn = PREV_INSN (loop_start);
7661 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
7662 insn = PREV_INSN (insn))
7663 ;
7664
7665 if (insn == 0
7666 || GET_CODE (insn) != NOTE
7667 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
7668 return;
7669
7670 /* If the last insn of the loop (the end test) was an NE comparison,
7671 we will interpret it as an EQ comparison, since we fell through
7672 the loop. Any equivalences resulting from that comparison are
7673 therefore not valid and must be invalidated. */
7674 if (last_jump_equiv_class)
7675 for (p = last_jump_equiv_class->first_same_value; p;
7676 p = p->next_same_value)
7677 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
7678 || (GET_CODE (p->exp) == SUBREG
7679 && GET_CODE (SUBREG_REG (p->exp)) == REG))
7680 invalidate (p->exp, VOIDmode);
7681 else if (GET_CODE (p->exp) == STRICT_LOW_PART
7682 || GET_CODE (p->exp) == ZERO_EXTRACT)
7683 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
7684
7685 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
7686 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
7687
7688 The only thing we do with SET_DEST is invalidate entries, so we
7689 can safely process each SET in order. It is slightly less efficient
7690 to do so, but we only want to handle the most common cases. */
7691
7692 for (insn = NEXT_INSN (loop_start);
7693 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
7694 && ! (GET_CODE (insn) == NOTE
7695 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
7696 insn = NEXT_INSN (insn))
7697 {
7698 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7699 && (GET_CODE (PATTERN (insn)) == SET
7700 || GET_CODE (PATTERN (insn)) == CLOBBER))
7701 cse_set_around_loop (PATTERN (insn), insn, loop_start);
7702 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7703 && GET_CODE (PATTERN (insn)) == PARALLEL)
7704 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7705 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
7706 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
7707 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
7708 loop_start);
7709 }
7710 }
7711 \f
7712 /* Variable used for communications between the next two routines. */
7713
7714 static struct write_data skipped_writes_memory;
7715
7716 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
7717 since they are done elsewhere. This function is called via note_stores. */
7718
7719 static void
7720 invalidate_skipped_set (dest, set)
7721 rtx set;
7722 rtx dest;
7723 {
7724 if (GET_CODE (set) == CLOBBER
7725 #ifdef HAVE_cc0
7726 || dest == cc0_rtx
7727 #endif
7728 || dest == pc_rtx)
7729 return;
7730
7731 if (GET_CODE (dest) == MEM)
7732 note_mem_written (dest, &skipped_writes_memory);
7733
7734 /* There are times when an address can appear varying and be a PLUS
7735 during this scan when it would be a fixed address were we to know
7736 the proper equivalences. So promote "nonscalar" to be "all". */
7737 if (skipped_writes_memory.nonscalar)
7738 skipped_writes_memory.all = 1;
7739
7740 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7741 || (! skipped_writes_memory.all && ! cse_rtx_addr_varies_p (dest)))
7742 invalidate (dest, VOIDmode);
7743 else if (GET_CODE (dest) == STRICT_LOW_PART
7744 || GET_CODE (dest) == ZERO_EXTRACT)
7745 invalidate (XEXP (dest, 0), GET_MODE (dest));
7746 }
7747
7748 /* Invalidate all insns from START up to the end of the function or the
7749 next label. This called when we wish to CSE around a block that is
7750 conditionally executed. */
7751
7752 static void
7753 invalidate_skipped_block (start)
7754 rtx start;
7755 {
7756 rtx insn;
7757 static struct write_data init = {0, 0, 0, 0};
7758 static struct write_data everything = {0, 1, 1, 1};
7759
7760 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
7761 insn = NEXT_INSN (insn))
7762 {
7763 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7764 continue;
7765
7766 skipped_writes_memory = init;
7767
7768 if (GET_CODE (insn) == CALL_INSN)
7769 {
7770 invalidate_for_call ();
7771 skipped_writes_memory = everything;
7772 }
7773
7774 note_stores (PATTERN (insn), invalidate_skipped_set);
7775 invalidate_from_clobbers (&skipped_writes_memory, PATTERN (insn));
7776 }
7777 }
7778 \f
7779 /* Used for communication between the following two routines; contains a
7780 value to be checked for modification. */
7781
7782 static rtx cse_check_loop_start_value;
7783
7784 /* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
7785 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
7786
7787 static void
7788 cse_check_loop_start (x, set)
7789 rtx x;
7790 rtx set;
7791 {
7792 if (cse_check_loop_start_value == 0
7793 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
7794 return;
7795
7796 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
7797 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
7798 cse_check_loop_start_value = 0;
7799 }
7800
7801 /* X is a SET or CLOBBER contained in INSN that was found near the start of
7802 a loop that starts with the label at LOOP_START.
7803
7804 If X is a SET, we see if its SET_SRC is currently in our hash table.
7805 If so, we see if it has a value equal to some register used only in the
7806 loop exit code (as marked by jump.c).
7807
7808 If those two conditions are true, we search backwards from the start of
7809 the loop to see if that same value was loaded into a register that still
7810 retains its value at the start of the loop.
7811
7812 If so, we insert an insn after the load to copy the destination of that
7813 load into the equivalent register and (try to) replace our SET_SRC with that
7814 register.
7815
7816 In any event, we invalidate whatever this SET or CLOBBER modifies. */
7817
7818 static void
7819 cse_set_around_loop (x, insn, loop_start)
7820 rtx x;
7821 rtx insn;
7822 rtx loop_start;
7823 {
7824 struct table_elt *src_elt;
7825 static struct write_data init = {0, 0, 0, 0};
7826 struct write_data writes_memory;
7827
7828 writes_memory = init;
7829
7830 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
7831 are setting PC or CC0 or whose SET_SRC is already a register. */
7832 if (GET_CODE (x) == SET
7833 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
7834 && GET_CODE (SET_SRC (x)) != REG)
7835 {
7836 src_elt = lookup (SET_SRC (x),
7837 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
7838 GET_MODE (SET_DEST (x)));
7839
7840 if (src_elt)
7841 for (src_elt = src_elt->first_same_value; src_elt;
7842 src_elt = src_elt->next_same_value)
7843 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
7844 && COST (src_elt->exp) < COST (SET_SRC (x)))
7845 {
7846 rtx p, set;
7847
7848 /* Look for an insn in front of LOOP_START that sets
7849 something in the desired mode to SET_SRC (x) before we hit
7850 a label or CALL_INSN. */
7851
7852 for (p = prev_nonnote_insn (loop_start);
7853 p && GET_CODE (p) != CALL_INSN
7854 && GET_CODE (p) != CODE_LABEL;
7855 p = prev_nonnote_insn (p))
7856 if ((set = single_set (p)) != 0
7857 && GET_CODE (SET_DEST (set)) == REG
7858 && GET_MODE (SET_DEST (set)) == src_elt->mode
7859 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
7860 {
7861 /* We now have to ensure that nothing between P
7862 and LOOP_START modified anything referenced in
7863 SET_SRC (x). We know that nothing within the loop
7864 can modify it, or we would have invalidated it in
7865 the hash table. */
7866 rtx q;
7867
7868 cse_check_loop_start_value = SET_SRC (x);
7869 for (q = p; q != loop_start; q = NEXT_INSN (q))
7870 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
7871 note_stores (PATTERN (q), cse_check_loop_start);
7872
7873 /* If nothing was changed and we can replace our
7874 SET_SRC, add an insn after P to copy its destination
7875 to what we will be replacing SET_SRC with. */
7876 if (cse_check_loop_start_value
7877 && validate_change (insn, &SET_SRC (x),
7878 src_elt->exp, 0))
7879 emit_insn_after (gen_move_insn (src_elt->exp,
7880 SET_DEST (set)),
7881 p);
7882 break;
7883 }
7884 }
7885 }
7886
7887 /* Now invalidate anything modified by X. */
7888 note_mem_written (SET_DEST (x), &writes_memory);
7889
7890 if (writes_memory.var)
7891 invalidate_memory (&writes_memory);
7892
7893 /* See comment on similar code in cse_insn for explanation of these tests. */
7894 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
7895 || (GET_CODE (SET_DEST (x)) == MEM && ! writes_memory.all
7896 && ! cse_rtx_addr_varies_p (SET_DEST (x))))
7897 invalidate (SET_DEST (x), VOIDmode);
7898 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
7899 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
7900 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
7901 }
7902 \f
7903 /* Find the end of INSN's basic block and return its range,
7904 the total number of SETs in all the insns of the block, the last insn of the
7905 block, and the branch path.
7906
7907 The branch path indicates which branches should be followed. If a non-zero
7908 path size is specified, the block should be rescanned and a different set
7909 of branches will be taken. The branch path is only used if
7910 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
7911
7912 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
7913 used to describe the block. It is filled in with the information about
7914 the current block. The incoming structure's branch path, if any, is used
7915 to construct the output branch path. */
7916
7917 void
7918 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
7919 rtx insn;
7920 struct cse_basic_block_data *data;
7921 int follow_jumps;
7922 int after_loop;
7923 int skip_blocks;
7924 {
7925 rtx p = insn, q;
7926 int nsets = 0;
7927 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
7928 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
7929 int path_size = data->path_size;
7930 int path_entry = 0;
7931 int i;
7932
7933 /* Update the previous branch path, if any. If the last branch was
7934 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
7935 shorten the path by one and look at the previous branch. We know that
7936 at least one branch must have been taken if PATH_SIZE is non-zero. */
7937 while (path_size > 0)
7938 {
7939 if (data->path[path_size - 1].status != NOT_TAKEN)
7940 {
7941 data->path[path_size - 1].status = NOT_TAKEN;
7942 break;
7943 }
7944 else
7945 path_size--;
7946 }
7947
7948 /* Scan to end of this basic block. */
7949 while (p && GET_CODE (p) != CODE_LABEL)
7950 {
7951 /* Don't cse out the end of a loop. This makes a difference
7952 only for the unusual loops that always execute at least once;
7953 all other loops have labels there so we will stop in any case.
7954 Cse'ing out the end of the loop is dangerous because it
7955 might cause an invariant expression inside the loop
7956 to be reused after the end of the loop. This would make it
7957 hard to move the expression out of the loop in loop.c,
7958 especially if it is one of several equivalent expressions
7959 and loop.c would like to eliminate it.
7960
7961 If we are running after loop.c has finished, we can ignore
7962 the NOTE_INSN_LOOP_END. */
7963
7964 if (! after_loop && GET_CODE (p) == NOTE
7965 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
7966 break;
7967
7968 /* Don't cse over a call to setjmp; on some machines (eg vax)
7969 the regs restored by the longjmp come from
7970 a later time than the setjmp. */
7971 if (GET_CODE (p) == NOTE
7972 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
7973 break;
7974
7975 /* A PARALLEL can have lots of SETs in it,
7976 especially if it is really an ASM_OPERANDS. */
7977 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
7978 && GET_CODE (PATTERN (p)) == PARALLEL)
7979 nsets += XVECLEN (PATTERN (p), 0);
7980 else if (GET_CODE (p) != NOTE)
7981 nsets += 1;
7982
7983 /* Ignore insns made by CSE; they cannot affect the boundaries of
7984 the basic block. */
7985
7986 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
7987 high_cuid = INSN_CUID (p);
7988 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
7989 low_cuid = INSN_CUID (p);
7990
7991 /* See if this insn is in our branch path. If it is and we are to
7992 take it, do so. */
7993 if (path_entry < path_size && data->path[path_entry].branch == p)
7994 {
7995 if (data->path[path_entry].status != NOT_TAKEN)
7996 p = JUMP_LABEL (p);
7997
7998 /* Point to next entry in path, if any. */
7999 path_entry++;
8000 }
8001
8002 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
8003 was specified, we haven't reached our maximum path length, there are
8004 insns following the target of the jump, this is the only use of the
8005 jump label, and the target label is preceded by a BARRIER.
8006
8007 Alternatively, we can follow the jump if it branches around a
8008 block of code and there are no other branches into the block.
8009 In this case invalidate_skipped_block will be called to invalidate any
8010 registers set in the block when following the jump. */
8011
8012 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
8013 && GET_CODE (p) == JUMP_INSN
8014 && GET_CODE (PATTERN (p)) == SET
8015 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
8016 && LABEL_NUSES (JUMP_LABEL (p)) == 1
8017 && NEXT_INSN (JUMP_LABEL (p)) != 0)
8018 {
8019 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
8020 if ((GET_CODE (q) != NOTE
8021 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
8022 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
8023 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
8024 break;
8025
8026 /* If we ran into a BARRIER, this code is an extension of the
8027 basic block when the branch is taken. */
8028 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
8029 {
8030 /* Don't allow ourself to keep walking around an
8031 always-executed loop. */
8032 if (next_real_insn (q) == next)
8033 {
8034 p = NEXT_INSN (p);
8035 continue;
8036 }
8037
8038 /* Similarly, don't put a branch in our path more than once. */
8039 for (i = 0; i < path_entry; i++)
8040 if (data->path[i].branch == p)
8041 break;
8042
8043 if (i != path_entry)
8044 break;
8045
8046 data->path[path_entry].branch = p;
8047 data->path[path_entry++].status = TAKEN;
8048
8049 /* This branch now ends our path. It was possible that we
8050 didn't see this branch the last time around (when the
8051 insn in front of the target was a JUMP_INSN that was
8052 turned into a no-op). */
8053 path_size = path_entry;
8054
8055 p = JUMP_LABEL (p);
8056 /* Mark block so we won't scan it again later. */
8057 PUT_MODE (NEXT_INSN (p), QImode);
8058 }
8059 /* Detect a branch around a block of code. */
8060 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
8061 {
8062 register rtx tmp;
8063
8064 if (next_real_insn (q) == next)
8065 {
8066 p = NEXT_INSN (p);
8067 continue;
8068 }
8069
8070 for (i = 0; i < path_entry; i++)
8071 if (data->path[i].branch == p)
8072 break;
8073
8074 if (i != path_entry)
8075 break;
8076
8077 /* This is no_labels_between_p (p, q) with an added check for
8078 reaching the end of a function (in case Q precedes P). */
8079 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
8080 if (GET_CODE (tmp) == CODE_LABEL)
8081 break;
8082
8083 if (tmp == q)
8084 {
8085 data->path[path_entry].branch = p;
8086 data->path[path_entry++].status = AROUND;
8087
8088 path_size = path_entry;
8089
8090 p = JUMP_LABEL (p);
8091 /* Mark block so we won't scan it again later. */
8092 PUT_MODE (NEXT_INSN (p), QImode);
8093 }
8094 }
8095 }
8096 p = NEXT_INSN (p);
8097 }
8098
8099 data->low_cuid = low_cuid;
8100 data->high_cuid = high_cuid;
8101 data->nsets = nsets;
8102 data->last = p;
8103
8104 /* If all jumps in the path are not taken, set our path length to zero
8105 so a rescan won't be done. */
8106 for (i = path_size - 1; i >= 0; i--)
8107 if (data->path[i].status != NOT_TAKEN)
8108 break;
8109
8110 if (i == -1)
8111 data->path_size = 0;
8112 else
8113 data->path_size = path_size;
8114
8115 /* End the current branch path. */
8116 data->path[path_size].branch = 0;
8117 }
8118 \f
8119 /* Perform cse on the instructions of a function.
8120 F is the first instruction.
8121 NREGS is one plus the highest pseudo-reg number used in the instruction.
8122
8123 AFTER_LOOP is 1 if this is the cse call done after loop optimization
8124 (only if -frerun-cse-after-loop).
8125
8126 Returns 1 if jump_optimize should be redone due to simplifications
8127 in conditional jump instructions. */
8128
8129 int
8130 cse_main (f, nregs, after_loop, file)
8131 rtx f;
8132 int nregs;
8133 int after_loop;
8134 FILE *file;
8135 {
8136 struct cse_basic_block_data val;
8137 register rtx insn = f;
8138 register int i;
8139
8140 cse_jumps_altered = 0;
8141 constant_pool_entries_cost = 0;
8142 val.path_size = 0;
8143
8144 init_recog ();
8145
8146 max_reg = nregs;
8147
8148 all_minus_one = (int *) alloca (nregs * sizeof (int));
8149 consec_ints = (int *) alloca (nregs * sizeof (int));
8150
8151 for (i = 0; i < nregs; i++)
8152 {
8153 all_minus_one[i] = -1;
8154 consec_ints[i] = i;
8155 }
8156
8157 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
8158 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
8159 reg_qty = (int *) alloca (nregs * sizeof (int));
8160 reg_in_table = (int *) alloca (nregs * sizeof (int));
8161 reg_tick = (int *) alloca (nregs * sizeof (int));
8162
8163 #ifdef LOAD_EXTEND_OP
8164
8165 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
8166 and change the code and mode as appropriate. */
8167 memory_extend_rtx = gen_rtx (ZERO_EXTEND, VOIDmode, 0);
8168 #endif
8169
8170 /* Discard all the free elements of the previous function
8171 since they are allocated in the temporarily obstack. */
8172 bzero ((char *) table, sizeof table);
8173 free_element_chain = 0;
8174 n_elements_made = 0;
8175
8176 /* Find the largest uid. */
8177
8178 max_uid = get_max_uid ();
8179 uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
8180 bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
8181
8182 /* Compute the mapping from uids to cuids.
8183 CUIDs are numbers assigned to insns, like uids,
8184 except that cuids increase monotonically through the code.
8185 Don't assign cuids to line-number NOTEs, so that the distance in cuids
8186 between two insns is not affected by -g. */
8187
8188 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8189 {
8190 if (GET_CODE (insn) != NOTE
8191 || NOTE_LINE_NUMBER (insn) < 0)
8192 INSN_CUID (insn) = ++i;
8193 else
8194 /* Give a line number note the same cuid as preceding insn. */
8195 INSN_CUID (insn) = i;
8196 }
8197
8198 /* Initialize which registers are clobbered by calls. */
8199
8200 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8201
8202 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8203 if ((call_used_regs[i]
8204 /* Used to check !fixed_regs[i] here, but that isn't safe;
8205 fixed regs are still call-clobbered, and sched can get
8206 confused if they can "live across calls".
8207
8208 The frame pointer is always preserved across calls. The arg
8209 pointer is if it is fixed. The stack pointer usually is, unless
8210 RETURN_POPS_ARGS, in which case an explicit CLOBBER
8211 will be present. If we are generating PIC code, the PIC offset
8212 table register is preserved across calls. */
8213
8214 && i != STACK_POINTER_REGNUM
8215 && i != FRAME_POINTER_REGNUM
8216 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8217 && i != HARD_FRAME_POINTER_REGNUM
8218 #endif
8219 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8220 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8221 #endif
8222 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
8223 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8224 #endif
8225 )
8226 || global_regs[i])
8227 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8228
8229 /* Loop over basic blocks.
8230 Compute the maximum number of qty's needed for each basic block
8231 (which is 2 for each SET). */
8232 insn = f;
8233 while (insn)
8234 {
8235 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8236 flag_cse_skip_blocks);
8237
8238 /* If this basic block was already processed or has no sets, skip it. */
8239 if (val.nsets == 0 || GET_MODE (insn) == QImode)
8240 {
8241 PUT_MODE (insn, VOIDmode);
8242 insn = (val.last ? NEXT_INSN (val.last) : 0);
8243 val.path_size = 0;
8244 continue;
8245 }
8246
8247 cse_basic_block_start = val.low_cuid;
8248 cse_basic_block_end = val.high_cuid;
8249 max_qty = val.nsets * 2;
8250
8251 if (file)
8252 fprintf (file, ";; Processing block from %d to %d, %d sets.\n",
8253 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8254 val.nsets);
8255
8256 /* Make MAX_QTY bigger to give us room to optimize
8257 past the end of this basic block, if that should prove useful. */
8258 if (max_qty < 500)
8259 max_qty = 500;
8260
8261 max_qty += max_reg;
8262
8263 /* If this basic block is being extended by following certain jumps,
8264 (see `cse_end_of_basic_block'), we reprocess the code from the start.
8265 Otherwise, we start after this basic block. */
8266 if (val.path_size > 0)
8267 cse_basic_block (insn, val.last, val.path, 0);
8268 else
8269 {
8270 int old_cse_jumps_altered = cse_jumps_altered;
8271 rtx temp;
8272
8273 /* When cse changes a conditional jump to an unconditional
8274 jump, we want to reprocess the block, since it will give
8275 us a new branch path to investigate. */
8276 cse_jumps_altered = 0;
8277 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8278 if (cse_jumps_altered == 0
8279 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8280 insn = temp;
8281
8282 cse_jumps_altered |= old_cse_jumps_altered;
8283 }
8284
8285 #ifdef USE_C_ALLOCA
8286 alloca (0);
8287 #endif
8288 }
8289
8290 /* Tell refers_to_mem_p that qty_const info is not available. */
8291 qty_const = 0;
8292
8293 if (max_elements_made < n_elements_made)
8294 max_elements_made = n_elements_made;
8295
8296 return cse_jumps_altered;
8297 }
8298
8299 /* Process a single basic block. FROM and TO and the limits of the basic
8300 block. NEXT_BRANCH points to the branch path when following jumps or
8301 a null path when not following jumps.
8302
8303 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8304 loop. This is true when we are being called for the last time on a
8305 block and this CSE pass is before loop.c. */
8306
8307 static rtx
8308 cse_basic_block (from, to, next_branch, around_loop)
8309 register rtx from, to;
8310 struct branch_path *next_branch;
8311 int around_loop;
8312 {
8313 register rtx insn;
8314 int to_usage = 0;
8315 int in_libcall_block = 0;
8316
8317 /* Each of these arrays is undefined before max_reg, so only allocate
8318 the space actually needed and adjust the start below. */
8319
8320 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8321 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8322 qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
8323 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8324 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8325 qty_comparison_code
8326 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8327 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8328 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8329
8330 qty_first_reg -= max_reg;
8331 qty_last_reg -= max_reg;
8332 qty_mode -= max_reg;
8333 qty_const -= max_reg;
8334 qty_const_insn -= max_reg;
8335 qty_comparison_code -= max_reg;
8336 qty_comparison_qty -= max_reg;
8337 qty_comparison_const -= max_reg;
8338
8339 new_basic_block ();
8340
8341 /* TO might be a label. If so, protect it from being deleted. */
8342 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8343 ++LABEL_NUSES (to);
8344
8345 for (insn = from; insn != to; insn = NEXT_INSN (insn))
8346 {
8347 register enum rtx_code code;
8348
8349 /* See if this is a branch that is part of the path. If so, and it is
8350 to be taken, do so. */
8351 if (next_branch->branch == insn)
8352 {
8353 enum taken status = next_branch++->status;
8354 if (status != NOT_TAKEN)
8355 {
8356 if (status == TAKEN)
8357 record_jump_equiv (insn, 1);
8358 else
8359 invalidate_skipped_block (NEXT_INSN (insn));
8360
8361 /* Set the last insn as the jump insn; it doesn't affect cc0.
8362 Then follow this branch. */
8363 #ifdef HAVE_cc0
8364 prev_insn_cc0 = 0;
8365 #endif
8366 prev_insn = insn;
8367 insn = JUMP_LABEL (insn);
8368 continue;
8369 }
8370 }
8371
8372 code = GET_CODE (insn);
8373 if (GET_MODE (insn) == QImode)
8374 PUT_MODE (insn, VOIDmode);
8375
8376 if (GET_RTX_CLASS (code) == 'i')
8377 {
8378 /* Process notes first so we have all notes in canonical forms when
8379 looking for duplicate operations. */
8380
8381 if (REG_NOTES (insn))
8382 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
8383
8384 /* Track when we are inside in LIBCALL block. Inside such a block,
8385 we do not want to record destinations. The last insn of a
8386 LIBCALL block is not considered to be part of the block, since
8387 its destination is the result of the block and hence should be
8388 recorded. */
8389
8390 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8391 in_libcall_block = 1;
8392 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8393 in_libcall_block = 0;
8394
8395 cse_insn (insn, in_libcall_block);
8396 }
8397
8398 /* If INSN is now an unconditional jump, skip to the end of our
8399 basic block by pretending that we just did the last insn in the
8400 basic block. If we are jumping to the end of our block, show
8401 that we can have one usage of TO. */
8402
8403 if (simplejump_p (insn))
8404 {
8405 if (to == 0)
8406 return 0;
8407
8408 if (JUMP_LABEL (insn) == to)
8409 to_usage = 1;
8410
8411 /* Maybe TO was deleted because the jump is unconditional.
8412 If so, there is nothing left in this basic block. */
8413 /* ??? Perhaps it would be smarter to set TO
8414 to whatever follows this insn,
8415 and pretend the basic block had always ended here. */
8416 if (INSN_DELETED_P (to))
8417 break;
8418
8419 insn = PREV_INSN (to);
8420 }
8421
8422 /* See if it is ok to keep on going past the label
8423 which used to end our basic block. Remember that we incremented
8424 the count of that label, so we decrement it here. If we made
8425 a jump unconditional, TO_USAGE will be one; in that case, we don't
8426 want to count the use in that jump. */
8427
8428 if (to != 0 && NEXT_INSN (insn) == to
8429 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8430 {
8431 struct cse_basic_block_data val;
8432
8433 insn = NEXT_INSN (to);
8434
8435 if (LABEL_NUSES (to) == 0)
8436 delete_insn (to);
8437
8438 /* Find the end of the following block. Note that we won't be
8439 following branches in this case. If TO was the last insn
8440 in the function, we are done. Similarly, if we deleted the
8441 insn after TO, it must have been because it was preceded by
8442 a BARRIER. In that case, we are done with this block because it
8443 has no continuation. */
8444
8445 if (insn == 0 || INSN_DELETED_P (insn))
8446 return 0;
8447
8448 to_usage = 0;
8449 val.path_size = 0;
8450 cse_end_of_basic_block (insn, &val, 0, 0, 0);
8451
8452 /* If the tables we allocated have enough space left
8453 to handle all the SETs in the next basic block,
8454 continue through it. Otherwise, return,
8455 and that block will be scanned individually. */
8456 if (val.nsets * 2 + next_qty > max_qty)
8457 break;
8458
8459 cse_basic_block_start = val.low_cuid;
8460 cse_basic_block_end = val.high_cuid;
8461 to = val.last;
8462
8463 /* Prevent TO from being deleted if it is a label. */
8464 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8465 ++LABEL_NUSES (to);
8466
8467 /* Back up so we process the first insn in the extension. */
8468 insn = PREV_INSN (insn);
8469 }
8470 }
8471
8472 if (next_qty > max_qty)
8473 abort ();
8474
8475 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
8476 the previous insn is the only insn that branches to the head of a loop,
8477 we can cse into the loop. Don't do this if we changed the jump
8478 structure of a loop unless we aren't going to be following jumps. */
8479
8480 if ((cse_jumps_altered == 0
8481 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8482 && around_loop && to != 0
8483 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
8484 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
8485 && JUMP_LABEL (PREV_INSN (to)) != 0
8486 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
8487 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
8488
8489 return to ? NEXT_INSN (to) : 0;
8490 }
8491 \f
8492 /* Count the number of times registers are used (not set) in X.
8493 COUNTS is an array in which we accumulate the count, INCR is how much
8494 we count each register usage.
8495
8496 Don't count a usage of DEST, which is the SET_DEST of a SET which
8497 contains X in its SET_SRC. This is because such a SET does not
8498 modify the liveness of DEST. */
8499
8500 static void
8501 count_reg_usage (x, counts, dest, incr)
8502 rtx x;
8503 int *counts;
8504 rtx dest;
8505 int incr;
8506 {
8507 enum rtx_code code;
8508 char *fmt;
8509 int i, j;
8510
8511 if (x == 0)
8512 return;
8513
8514 switch (code = GET_CODE (x))
8515 {
8516 case REG:
8517 if (x != dest)
8518 counts[REGNO (x)] += incr;
8519 return;
8520
8521 case PC:
8522 case CC0:
8523 case CONST:
8524 case CONST_INT:
8525 case CONST_DOUBLE:
8526 case SYMBOL_REF:
8527 case LABEL_REF:
8528 case CLOBBER:
8529 return;
8530
8531 case SET:
8532 /* Unless we are setting a REG, count everything in SET_DEST. */
8533 if (GET_CODE (SET_DEST (x)) != REG)
8534 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
8535
8536 /* If SRC has side-effects, then we can't delete this insn, so the
8537 usage of SET_DEST inside SRC counts.
8538
8539 ??? Strictly-speaking, we might be preserving this insn
8540 because some other SET has side-effects, but that's hard
8541 to do and can't happen now. */
8542 count_reg_usage (SET_SRC (x), counts,
8543 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
8544 incr);
8545 return;
8546
8547 case CALL_INSN:
8548 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
8549
8550 /* ... falls through ... */
8551 case INSN:
8552 case JUMP_INSN:
8553 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
8554
8555 /* Things used in a REG_EQUAL note aren't dead since loop may try to
8556 use them. */
8557
8558 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
8559 return;
8560
8561 case EXPR_LIST:
8562 case INSN_LIST:
8563 if (REG_NOTE_KIND (x) == REG_EQUAL
8564 || GET_CODE (XEXP (x,0)) == USE)
8565 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
8566 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
8567 return;
8568 }
8569
8570 fmt = GET_RTX_FORMAT (code);
8571 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8572 {
8573 if (fmt[i] == 'e')
8574 count_reg_usage (XEXP (x, i), counts, dest, incr);
8575 else if (fmt[i] == 'E')
8576 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8577 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
8578 }
8579 }
8580 \f
8581 /* Scan all the insns and delete any that are dead; i.e., they store a register
8582 that is never used or they copy a register to itself.
8583
8584 This is used to remove insns made obviously dead by cse. It improves the
8585 heuristics in loop since it won't try to move dead invariants out of loops
8586 or make givs for dead quantities. The remaining passes of the compilation
8587 are also sped up. */
8588
8589 void
8590 delete_dead_from_cse (insns, nreg)
8591 rtx insns;
8592 int nreg;
8593 {
8594 int *counts = (int *) alloca (nreg * sizeof (int));
8595 rtx insn, prev;
8596 rtx tem;
8597 int i;
8598 int in_libcall = 0;
8599
8600 /* First count the number of times each register is used. */
8601 bzero ((char *) counts, sizeof (int) * nreg);
8602 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
8603 count_reg_usage (insn, counts, NULL_RTX, 1);
8604
8605 /* Go from the last insn to the first and delete insns that only set unused
8606 registers or copy a register to itself. As we delete an insn, remove
8607 usage counts for registers it uses. */
8608 for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
8609 {
8610 int live_insn = 0;
8611
8612 prev = prev_real_insn (insn);
8613
8614 /* Don't delete any insns that are part of a libcall block.
8615 Flow or loop might get confused if we did that. Remember
8616 that we are scanning backwards. */
8617 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8618 in_libcall = 1;
8619
8620 if (in_libcall)
8621 live_insn = 1;
8622 else if (GET_CODE (PATTERN (insn)) == SET)
8623 {
8624 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
8625 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
8626 ;
8627
8628 #ifdef HAVE_cc0
8629 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
8630 && ! side_effects_p (SET_SRC (PATTERN (insn)))
8631 && ((tem = next_nonnote_insn (insn)) == 0
8632 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8633 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8634 ;
8635 #endif
8636 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
8637 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
8638 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
8639 || side_effects_p (SET_SRC (PATTERN (insn))))
8640 live_insn = 1;
8641 }
8642 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
8643 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8644 {
8645 rtx elt = XVECEXP (PATTERN (insn), 0, i);
8646
8647 if (GET_CODE (elt) == SET)
8648 {
8649 if (GET_CODE (SET_DEST (elt)) == REG
8650 && SET_DEST (elt) == SET_SRC (elt))
8651 ;
8652
8653 #ifdef HAVE_cc0
8654 else if (GET_CODE (SET_DEST (elt)) == CC0
8655 && ! side_effects_p (SET_SRC (elt))
8656 && ((tem = next_nonnote_insn (insn)) == 0
8657 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8658 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8659 ;
8660 #endif
8661 else if (GET_CODE (SET_DEST (elt)) != REG
8662 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
8663 || counts[REGNO (SET_DEST (elt))] != 0
8664 || side_effects_p (SET_SRC (elt)))
8665 live_insn = 1;
8666 }
8667 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
8668 live_insn = 1;
8669 }
8670 else
8671 live_insn = 1;
8672
8673 /* If this is a dead insn, delete it and show registers in it aren't
8674 being used. */
8675
8676 if (! live_insn)
8677 {
8678 count_reg_usage (insn, counts, NULL_RTX, -1);
8679 delete_insn (insn);
8680 }
8681
8682 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8683 in_libcall = 0;
8684 }
8685 }