(merge_equiv_classes, cse_insn): Call rehash_using_reg after insert_regs.
[gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include "config.h"
22 /* Must precede rtl.h for FFS. */
23 #include <stdio.h>
24
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "flags.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "recog.h"
32
33 #include <setjmp.h>
34
35 /* The basic idea of common subexpression elimination is to go
36 through the code, keeping a record of expressions that would
37 have the same value at the current scan point, and replacing
38 expressions encountered with the cheapest equivalent expression.
39
40 It is too complicated to keep track of the different possibilities
41 when control paths merge; so, at each label, we forget all that is
42 known and start fresh. This can be described as processing each
43 basic block separately. Note, however, that these are not quite
44 the same as the basic blocks found by a later pass and used for
45 data flow analysis and register packing. We do not need to start fresh
46 after a conditional jump instruction if there is no label there.
47
48 We use two data structures to record the equivalent expressions:
49 a hash table for most expressions, and several vectors together
50 with "quantity numbers" to record equivalent (pseudo) registers.
51
52 The use of the special data structure for registers is desirable
53 because it is faster. It is possible because registers references
54 contain a fairly small number, the register number, taken from
55 a contiguously allocated series, and two register references are
56 identical if they have the same number. General expressions
57 do not have any such thing, so the only way to retrieve the
58 information recorded on an expression other than a register
59 is to keep it in a hash table.
60
61 Registers and "quantity numbers":
62
63 At the start of each basic block, all of the (hardware and pseudo)
64 registers used in the function are given distinct quantity
65 numbers to indicate their contents. During scan, when the code
66 copies one register into another, we copy the quantity number.
67 When a register is loaded in any other way, we allocate a new
68 quantity number to describe the value generated by this operation.
69 `reg_qty' records what quantity a register is currently thought
70 of as containing.
71
72 All real quantity numbers are greater than or equal to `max_reg'.
73 If register N has not been assigned a quantity, reg_qty[N] will equal N.
74
75 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
76 variables should be referenced with an index below `max_reg'.
77
78 We also maintain a bidirectional chain of registers for each
79 quantity number. `qty_first_reg', `qty_last_reg',
80 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
81
82 The first register in a chain is the one whose lifespan is least local.
83 Among equals, it is the one that was seen first.
84 We replace any equivalent register with that one.
85
86 If two registers have the same quantity number, it must be true that
87 REG expressions with `qty_mode' must be in the hash table for both
88 registers and must be in the same class.
89
90 The converse is not true. Since hard registers may be referenced in
91 any mode, two REG expressions might be equivalent in the hash table
92 but not have the same quantity number if the quantity number of one
93 of the registers is not the same mode as those expressions.
94
95 Constants and quantity numbers
96
97 When a quantity has a known constant value, that value is stored
98 in the appropriate element of qty_const. This is in addition to
99 putting the constant in the hash table as is usual for non-regs.
100
101 Whether a reg or a constant is preferred is determined by the configuration
102 macro CONST_COSTS and will often depend on the constant value. In any
103 event, expressions containing constants can be simplified, by fold_rtx.
104
105 When a quantity has a known nearly constant value (such as an address
106 of a stack slot), that value is stored in the appropriate element
107 of qty_const.
108
109 Integer constants don't have a machine mode. However, cse
110 determines the intended machine mode from the destination
111 of the instruction that moves the constant. The machine mode
112 is recorded in the hash table along with the actual RTL
113 constant expression so that different modes are kept separate.
114
115 Other expressions:
116
117 To record known equivalences among expressions in general
118 we use a hash table called `table'. It has a fixed number of buckets
119 that contain chains of `struct table_elt' elements for expressions.
120 These chains connect the elements whose expressions have the same
121 hash codes.
122
123 Other chains through the same elements connect the elements which
124 currently have equivalent values.
125
126 Register references in an expression are canonicalized before hashing
127 the expression. This is done using `reg_qty' and `qty_first_reg'.
128 The hash code of a register reference is computed using the quantity
129 number, not the register number.
130
131 When the value of an expression changes, it is necessary to remove from the
132 hash table not just that expression but all expressions whose values
133 could be different as a result.
134
135 1. If the value changing is in memory, except in special cases
136 ANYTHING referring to memory could be changed. That is because
137 nobody knows where a pointer does not point.
138 The function `invalidate_memory' removes what is necessary.
139
140 The special cases are when the address is constant or is
141 a constant plus a fixed register such as the frame pointer
142 or a static chain pointer. When such addresses are stored in,
143 we can tell exactly which other such addresses must be invalidated
144 due to overlap. `invalidate' does this.
145 All expressions that refer to non-constant
146 memory addresses are also invalidated. `invalidate_memory' does this.
147
148 2. If the value changing is a register, all expressions
149 containing references to that register, and only those,
150 must be removed.
151
152 Because searching the entire hash table for expressions that contain
153 a register is very slow, we try to figure out when it isn't necessary.
154 Precisely, this is necessary only when expressions have been
155 entered in the hash table using this register, and then the value has
156 changed, and then another expression wants to be added to refer to
157 the register's new value. This sequence of circumstances is rare
158 within any one basic block.
159
160 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
161 reg_tick[i] is incremented whenever a value is stored in register i.
162 reg_in_table[i] holds -1 if no references to register i have been
163 entered in the table; otherwise, it contains the value reg_tick[i] had
164 when the references were entered. If we want to enter a reference
165 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
166 Until we want to enter a new entry, the mere fact that the two vectors
167 don't match makes the entries be ignored if anyone tries to match them.
168
169 Registers themselves are entered in the hash table as well as in
170 the equivalent-register chains. However, the vectors `reg_tick'
171 and `reg_in_table' do not apply to expressions which are simple
172 register references. These expressions are removed from the table
173 immediately when they become invalid, and this can be done even if
174 we do not immediately search for all the expressions that refer to
175 the register.
176
177 A CLOBBER rtx in an instruction invalidates its operand for further
178 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
179 invalidates everything that resides in memory.
180
181 Related expressions:
182
183 Constant expressions that differ only by an additive integer
184 are called related. When a constant expression is put in
185 the table, the related expression with no constant term
186 is also entered. These are made to point at each other
187 so that it is possible to find out if there exists any
188 register equivalent to an expression related to a given expression. */
189
190 /* One plus largest register number used in this function. */
191
192 static int max_reg;
193
194 /* Length of vectors indexed by quantity number.
195 We know in advance we will not need a quantity number this big. */
196
197 static int max_qty;
198
199 /* Next quantity number to be allocated.
200 This is 1 + the largest number needed so far. */
201
202 static int next_qty;
203
204 /* Indexed by quantity number, gives the first (or last) (pseudo) register
205 in the chain of registers that currently contain this quantity. */
206
207 static int *qty_first_reg;
208 static int *qty_last_reg;
209
210 /* Index by quantity number, gives the mode of the quantity. */
211
212 static enum machine_mode *qty_mode;
213
214 /* Indexed by quantity number, gives the rtx of the constant value of the
215 quantity, or zero if it does not have a known value.
216 A sum of the frame pointer (or arg pointer) plus a constant
217 can also be entered here. */
218
219 static rtx *qty_const;
220
221 /* Indexed by qty number, gives the insn that stored the constant value
222 recorded in `qty_const'. */
223
224 static rtx *qty_const_insn;
225
226 /* The next three variables are used to track when a comparison between a
227 quantity and some constant or register has been passed. In that case, we
228 know the results of the comparison in case we see it again. These variables
229 record a comparison that is known to be true. */
230
231 /* Indexed by qty number, gives the rtx code of a comparison with a known
232 result involving this quantity. If none, it is UNKNOWN. */
233 static enum rtx_code *qty_comparison_code;
234
235 /* Indexed by qty number, gives the constant being compared against in a
236 comparison of known result. If no such comparison, it is undefined.
237 If the comparison is not with a constant, it is zero. */
238
239 static rtx *qty_comparison_const;
240
241 /* Indexed by qty number, gives the quantity being compared against in a
242 comparison of known result. If no such comparison, if it undefined.
243 If the comparison is not with a register, it is -1. */
244
245 static int *qty_comparison_qty;
246
247 #ifdef HAVE_cc0
248 /* For machines that have a CC0, we do not record its value in the hash
249 table since its use is guaranteed to be the insn immediately following
250 its definition and any other insn is presumed to invalidate it.
251
252 Instead, we store below the value last assigned to CC0. If it should
253 happen to be a constant, it is stored in preference to the actual
254 assigned value. In case it is a constant, we store the mode in which
255 the constant should be interpreted. */
256
257 static rtx prev_insn_cc0;
258 static enum machine_mode prev_insn_cc0_mode;
259 #endif
260
261 /* Previous actual insn. 0 if at first insn of basic block. */
262
263 static rtx prev_insn;
264
265 /* Insn being scanned. */
266
267 static rtx this_insn;
268
269 /* Index by (pseudo) register number, gives the quantity number
270 of the register's current contents. */
271
272 static int *reg_qty;
273
274 /* Index by (pseudo) register number, gives the number of the next (or
275 previous) (pseudo) register in the chain of registers sharing the same
276 value.
277
278 Or -1 if this register is at the end of the chain.
279
280 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
281
282 static int *reg_next_eqv;
283 static int *reg_prev_eqv;
284
285 /* Index by (pseudo) register number, gives the number of times
286 that register has been altered in the current basic block. */
287
288 static int *reg_tick;
289
290 /* Index by (pseudo) register number, gives the reg_tick value at which
291 rtx's containing this register are valid in the hash table.
292 If this does not equal the current reg_tick value, such expressions
293 existing in the hash table are invalid.
294 If this is -1, no expressions containing this register have been
295 entered in the table. */
296
297 static int *reg_in_table;
298
299 /* A HARD_REG_SET containing all the hard registers for which there is
300 currently a REG expression in the hash table. Note the difference
301 from the above variables, which indicate if the REG is mentioned in some
302 expression in the table. */
303
304 static HARD_REG_SET hard_regs_in_table;
305
306 /* A HARD_REG_SET containing all the hard registers that are invalidated
307 by a CALL_INSN. */
308
309 static HARD_REG_SET regs_invalidated_by_call;
310
311 /* Two vectors of ints:
312 one containing max_reg -1's; the other max_reg + 500 (an approximation
313 for max_qty) elements where element i contains i.
314 These are used to initialize various other vectors fast. */
315
316 static int *all_minus_one;
317 static int *consec_ints;
318
319 /* CUID of insn that starts the basic block currently being cse-processed. */
320
321 static int cse_basic_block_start;
322
323 /* CUID of insn that ends the basic block currently being cse-processed. */
324
325 static int cse_basic_block_end;
326
327 /* Vector mapping INSN_UIDs to cuids.
328 The cuids are like uids but increase monotonically always.
329 We use them to see whether a reg is used outside a given basic block. */
330
331 static int *uid_cuid;
332
333 /* Highest UID in UID_CUID. */
334 static int max_uid;
335
336 /* Get the cuid of an insn. */
337
338 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
339
340 /* Nonzero if cse has altered conditional jump insns
341 in such a way that jump optimization should be redone. */
342
343 static int cse_jumps_altered;
344
345 /* canon_hash stores 1 in do_not_record
346 if it notices a reference to CC0, PC, or some other volatile
347 subexpression. */
348
349 static int do_not_record;
350
351 #ifdef LOAD_EXTEND_OP
352
353 /* Scratch rtl used when looking for load-extended copy of a MEM. */
354 static rtx memory_extend_rtx;
355 #endif
356
357 /* canon_hash stores 1 in hash_arg_in_memory
358 if it notices a reference to memory within the expression being hashed. */
359
360 static int hash_arg_in_memory;
361
362 /* canon_hash stores 1 in hash_arg_in_struct
363 if it notices a reference to memory that's part of a structure. */
364
365 static int hash_arg_in_struct;
366
367 /* The hash table contains buckets which are chains of `struct table_elt's,
368 each recording one expression's information.
369 That expression is in the `exp' field.
370
371 Those elements with the same hash code are chained in both directions
372 through the `next_same_hash' and `prev_same_hash' fields.
373
374 Each set of expressions with equivalent values
375 are on a two-way chain through the `next_same_value'
376 and `prev_same_value' fields, and all point with
377 the `first_same_value' field at the first element in
378 that chain. The chain is in order of increasing cost.
379 Each element's cost value is in its `cost' field.
380
381 The `in_memory' field is nonzero for elements that
382 involve any reference to memory. These elements are removed
383 whenever a write is done to an unidentified location in memory.
384 To be safe, we assume that a memory address is unidentified unless
385 the address is either a symbol constant or a constant plus
386 the frame pointer or argument pointer.
387
388 The `in_struct' field is nonzero for elements that
389 involve any reference to memory inside a structure or array.
390
391 The `related_value' field is used to connect related expressions
392 (that differ by adding an integer).
393 The related expressions are chained in a circular fashion.
394 `related_value' is zero for expressions for which this
395 chain is not useful.
396
397 The `cost' field stores the cost of this element's expression.
398
399 The `is_const' flag is set if the element is a constant (including
400 a fixed address).
401
402 The `flag' field is used as a temporary during some search routines.
403
404 The `mode' field is usually the same as GET_MODE (`exp'), but
405 if `exp' is a CONST_INT and has no machine mode then the `mode'
406 field is the mode it was being used as. Each constant is
407 recorded separately for each mode it is used with. */
408
409
410 struct table_elt
411 {
412 rtx exp;
413 struct table_elt *next_same_hash;
414 struct table_elt *prev_same_hash;
415 struct table_elt *next_same_value;
416 struct table_elt *prev_same_value;
417 struct table_elt *first_same_value;
418 struct table_elt *related_value;
419 int cost;
420 enum machine_mode mode;
421 char in_memory;
422 char in_struct;
423 char is_const;
424 char flag;
425 };
426
427 /* We don't want a lot of buckets, because we rarely have very many
428 things stored in the hash table, and a lot of buckets slows
429 down a lot of loops that happen frequently. */
430 #define NBUCKETS 31
431
432 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
433 register (hard registers may require `do_not_record' to be set). */
434
435 #define HASH(X, M) \
436 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
437 ? (((unsigned) REG << 7) + (unsigned) reg_qty[REGNO (X)]) % NBUCKETS \
438 : canon_hash (X, M) % NBUCKETS)
439
440 /* Determine whether register number N is considered a fixed register for CSE.
441 It is desirable to replace other regs with fixed regs, to reduce need for
442 non-fixed hard regs.
443 A reg wins if it is either the frame pointer or designated as fixed,
444 but not if it is an overlapping register. */
445 #ifdef OVERLAPPING_REGNO_P
446 #define FIXED_REGNO_P(N) \
447 (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
448 || fixed_regs[N] || global_regs[N]) \
449 && ! OVERLAPPING_REGNO_P ((N)))
450 #else
451 #define FIXED_REGNO_P(N) \
452 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
453 || fixed_regs[N] || global_regs[N])
454 #endif
455
456 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
457 hard registers and pointers into the frame are the cheapest with a cost
458 of 0. Next come pseudos with a cost of one and other hard registers with
459 a cost of 2. Aside from these special cases, call `rtx_cost'. */
460
461 #define CHEAP_REGNO(N) \
462 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
463 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
464 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
465 || ((N) < FIRST_PSEUDO_REGISTER \
466 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
467
468 /* A register is cheap if it is a user variable assigned to the register
469 or if its register number always corresponds to a cheap register. */
470
471 #define CHEAP_REG(N) \
472 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
473 || CHEAP_REGNO (REGNO (N)))
474
475 #define COST(X) \
476 (GET_CODE (X) == REG \
477 ? (CHEAP_REG (X) ? 0 \
478 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
479 : 2) \
480 : rtx_cost (X, SET) * 2)
481
482 /* Determine if the quantity number for register X represents a valid index
483 into the `qty_...' variables. */
484
485 #define REGNO_QTY_VALID_P(N) (reg_qty[N] != (N))
486
487 static struct table_elt *table[NBUCKETS];
488
489 /* Chain of `struct table_elt's made so far for this function
490 but currently removed from the table. */
491
492 static struct table_elt *free_element_chain;
493
494 /* Number of `struct table_elt' structures made so far for this function. */
495
496 static int n_elements_made;
497
498 /* Maximum value `n_elements_made' has had so far in this compilation
499 for functions previously processed. */
500
501 static int max_elements_made;
502
503 /* Surviving equivalence class when two equivalence classes are merged
504 by recording the effects of a jump in the last insn. Zero if the
505 last insn was not a conditional jump. */
506
507 static struct table_elt *last_jump_equiv_class;
508
509 /* Set to the cost of a constant pool reference if one was found for a
510 symbolic constant. If this was found, it means we should try to
511 convert constants into constant pool entries if they don't fit in
512 the insn. */
513
514 static int constant_pool_entries_cost;
515
516 /* Bits describing what kind of values in memory must be invalidated
517 for a particular instruction. If all three bits are zero,
518 no memory refs need to be invalidated. Each bit is more powerful
519 than the preceding ones, and if a bit is set then the preceding
520 bits are also set.
521
522 Here is how the bits are set:
523 Pushing onto the stack invalidates only the stack pointer,
524 writing at a fixed address invalidates only variable addresses,
525 writing in a structure element at variable address
526 invalidates all but scalar variables,
527 and writing in anything else at variable address invalidates everything. */
528
529 struct write_data
530 {
531 int sp : 1; /* Invalidate stack pointer. */
532 int var : 1; /* Invalidate variable addresses. */
533 int nonscalar : 1; /* Invalidate all but scalar variables. */
534 int all : 1; /* Invalidate all memory refs. */
535 };
536
537 /* Define maximum length of a branch path. */
538
539 #define PATHLENGTH 10
540
541 /* This data describes a block that will be processed by cse_basic_block. */
542
543 struct cse_basic_block_data {
544 /* Lowest CUID value of insns in block. */
545 int low_cuid;
546 /* Highest CUID value of insns in block. */
547 int high_cuid;
548 /* Total number of SETs in block. */
549 int nsets;
550 /* Last insn in the block. */
551 rtx last;
552 /* Size of current branch path, if any. */
553 int path_size;
554 /* Current branch path, indicating which branches will be taken. */
555 struct branch_path {
556 /* The branch insn. */
557 rtx branch;
558 /* Whether it should be taken or not. AROUND is the same as taken
559 except that it is used when the destination label is not preceded
560 by a BARRIER. */
561 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
562 } path[PATHLENGTH];
563 };
564
565 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
566 virtual regs here because the simplify_*_operation routines are called
567 by integrate.c, which is called before virtual register instantiation. */
568
569 #define FIXED_BASE_PLUS_P(X) \
570 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
571 || (X) == arg_pointer_rtx \
572 || (X) == virtual_stack_vars_rtx \
573 || (X) == virtual_incoming_args_rtx \
574 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
575 && (XEXP (X, 0) == frame_pointer_rtx \
576 || XEXP (X, 0) == hard_frame_pointer_rtx \
577 || XEXP (X, 0) == arg_pointer_rtx \
578 || XEXP (X, 0) == virtual_stack_vars_rtx \
579 || XEXP (X, 0) == virtual_incoming_args_rtx)))
580
581 /* Similar, but also allows reference to the stack pointer.
582
583 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
584 arg_pointer_rtx by itself is nonzero, because on at least one machine,
585 the i960, the arg pointer is zero when it is unused. */
586
587 #define NONZERO_BASE_PLUS_P(X) \
588 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
589 || (X) == virtual_stack_vars_rtx \
590 || (X) == virtual_incoming_args_rtx \
591 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
592 && (XEXP (X, 0) == frame_pointer_rtx \
593 || XEXP (X, 0) == hard_frame_pointer_rtx \
594 || XEXP (X, 0) == arg_pointer_rtx \
595 || XEXP (X, 0) == virtual_stack_vars_rtx \
596 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
597 || (X) == stack_pointer_rtx \
598 || (X) == virtual_stack_dynamic_rtx \
599 || (X) == virtual_outgoing_args_rtx \
600 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
601 && (XEXP (X, 0) == stack_pointer_rtx \
602 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
603 || XEXP (X, 0) == virtual_outgoing_args_rtx)))
604
605 static void new_basic_block PROTO((void));
606 static void make_new_qty PROTO((int));
607 static void make_regs_eqv PROTO((int, int));
608 static void delete_reg_equiv PROTO((int));
609 static int mention_regs PROTO((rtx));
610 static int insert_regs PROTO((rtx, struct table_elt *, int));
611 static void free_element PROTO((struct table_elt *));
612 static void remove_from_table PROTO((struct table_elt *, unsigned));
613 static struct table_elt *get_element PROTO((void));
614 static struct table_elt *lookup PROTO((rtx, unsigned, enum machine_mode)),
615 *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
616 static rtx lookup_as_function PROTO((rtx, enum rtx_code));
617 static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
618 enum machine_mode));
619 static void merge_equiv_classes PROTO((struct table_elt *,
620 struct table_elt *));
621 static void invalidate PROTO((rtx, enum machine_mode));
622 static void remove_invalid_refs PROTO((int));
623 static void rehash_using_reg PROTO((rtx));
624 static void invalidate_memory PROTO((struct write_data *));
625 static void invalidate_for_call PROTO((void));
626 static rtx use_related_value PROTO((rtx, struct table_elt *));
627 static unsigned canon_hash PROTO((rtx, enum machine_mode));
628 static unsigned safe_hash PROTO((rtx, enum machine_mode));
629 static int exp_equiv_p PROTO((rtx, rtx, int, int));
630 static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
631 HOST_WIDE_INT *,
632 HOST_WIDE_INT *));
633 static int refers_to_p PROTO((rtx, rtx));
634 static int refers_to_mem_p PROTO((rtx, rtx, HOST_WIDE_INT,
635 HOST_WIDE_INT));
636 static int cse_rtx_addr_varies_p PROTO((rtx));
637 static rtx canon_reg PROTO((rtx, rtx));
638 static void find_best_addr PROTO((rtx, rtx *));
639 static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
640 enum machine_mode *,
641 enum machine_mode *));
642 static rtx cse_gen_binary PROTO((enum rtx_code, enum machine_mode,
643 rtx, rtx));
644 static rtx simplify_plus_minus PROTO((enum rtx_code, enum machine_mode,
645 rtx, rtx));
646 static rtx fold_rtx PROTO((rtx, rtx));
647 static rtx equiv_constant PROTO((rtx));
648 static void record_jump_equiv PROTO((rtx, int));
649 static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
650 rtx, rtx, int));
651 static void cse_insn PROTO((rtx, int));
652 static void note_mem_written PROTO((rtx, struct write_data *));
653 static void invalidate_from_clobbers PROTO((struct write_data *, rtx));
654 static rtx cse_process_notes PROTO((rtx, rtx));
655 static void cse_around_loop PROTO((rtx));
656 static void invalidate_skipped_set PROTO((rtx, rtx));
657 static void invalidate_skipped_block PROTO((rtx));
658 static void cse_check_loop_start PROTO((rtx, rtx));
659 static void cse_set_around_loop PROTO((rtx, rtx, rtx));
660 static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
661 static void count_reg_usage PROTO((rtx, int *, rtx, int));
662
663 extern int rtx_equal_function_value_matters;
664 \f
665 /* Return an estimate of the cost of computing rtx X.
666 One use is in cse, to decide which expression to keep in the hash table.
667 Another is in rtl generation, to pick the cheapest way to multiply.
668 Other uses like the latter are expected in the future. */
669
670 /* Return the right cost to give to an operation
671 to make the cost of the corresponding register-to-register instruction
672 N times that of a fast register-to-register instruction. */
673
674 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
675
676 int
677 rtx_cost (x, outer_code)
678 rtx x;
679 enum rtx_code outer_code;
680 {
681 register int i, j;
682 register enum rtx_code code;
683 register char *fmt;
684 register int total;
685
686 if (x == 0)
687 return 0;
688
689 /* Compute the default costs of certain things.
690 Note that RTX_COSTS can override the defaults. */
691
692 code = GET_CODE (x);
693 switch (code)
694 {
695 case MULT:
696 /* Count multiplication by 2**n as a shift,
697 because if we are considering it, we would output it as a shift. */
698 if (GET_CODE (XEXP (x, 1)) == CONST_INT
699 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
700 total = 2;
701 else
702 total = COSTS_N_INSNS (5);
703 break;
704 case DIV:
705 case UDIV:
706 case MOD:
707 case UMOD:
708 total = COSTS_N_INSNS (7);
709 break;
710 case USE:
711 /* Used in loop.c and combine.c as a marker. */
712 total = 0;
713 break;
714 case ASM_OPERANDS:
715 /* We don't want these to be used in substitutions because
716 we have no way of validating the resulting insn. So assign
717 anything containing an ASM_OPERANDS a very high cost. */
718 total = 1000;
719 break;
720 default:
721 total = 2;
722 }
723
724 switch (code)
725 {
726 case REG:
727 return ! CHEAP_REG (x);
728
729 case SUBREG:
730 /* If we can't tie these modes, make this expensive. The larger
731 the mode, the more expensive it is. */
732 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
733 return COSTS_N_INSNS (2
734 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
735 return 2;
736 #ifdef RTX_COSTS
737 RTX_COSTS (x, code, outer_code);
738 #endif
739 CONST_COSTS (x, code, outer_code);
740 }
741
742 /* Sum the costs of the sub-rtx's, plus cost of this operation,
743 which is already in total. */
744
745 fmt = GET_RTX_FORMAT (code);
746 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
747 if (fmt[i] == 'e')
748 total += rtx_cost (XEXP (x, i), code);
749 else if (fmt[i] == 'E')
750 for (j = 0; j < XVECLEN (x, i); j++)
751 total += rtx_cost (XVECEXP (x, i, j), code);
752
753 return total;
754 }
755 \f
756 /* Clear the hash table and initialize each register with its own quantity,
757 for a new basic block. */
758
759 static void
760 new_basic_block ()
761 {
762 register int i;
763
764 next_qty = max_reg;
765
766 bzero ((char *) reg_tick, max_reg * sizeof (int));
767
768 bcopy ((char *) all_minus_one, (char *) reg_in_table,
769 max_reg * sizeof (int));
770 bcopy ((char *) consec_ints, (char *) reg_qty, max_reg * sizeof (int));
771 CLEAR_HARD_REG_SET (hard_regs_in_table);
772
773 /* The per-quantity values used to be initialized here, but it is
774 much faster to initialize each as it is made in `make_new_qty'. */
775
776 for (i = 0; i < NBUCKETS; i++)
777 {
778 register struct table_elt *this, *next;
779 for (this = table[i]; this; this = next)
780 {
781 next = this->next_same_hash;
782 free_element (this);
783 }
784 }
785
786 bzero ((char *) table, sizeof table);
787
788 prev_insn = 0;
789
790 #ifdef HAVE_cc0
791 prev_insn_cc0 = 0;
792 #endif
793 }
794
795 /* Say that register REG contains a quantity not in any register before
796 and initialize that quantity. */
797
798 static void
799 make_new_qty (reg)
800 register int reg;
801 {
802 register int q;
803
804 if (next_qty >= max_qty)
805 abort ();
806
807 q = reg_qty[reg] = next_qty++;
808 qty_first_reg[q] = reg;
809 qty_last_reg[q] = reg;
810 qty_const[q] = qty_const_insn[q] = 0;
811 qty_comparison_code[q] = UNKNOWN;
812
813 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
814 }
815
816 /* Make reg NEW equivalent to reg OLD.
817 OLD is not changing; NEW is. */
818
819 static void
820 make_regs_eqv (new, old)
821 register int new, old;
822 {
823 register int lastr, firstr;
824 register int q = reg_qty[old];
825
826 /* Nothing should become eqv until it has a "non-invalid" qty number. */
827 if (! REGNO_QTY_VALID_P (old))
828 abort ();
829
830 reg_qty[new] = q;
831 firstr = qty_first_reg[q];
832 lastr = qty_last_reg[q];
833
834 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
835 hard regs. Among pseudos, if NEW will live longer than any other reg
836 of the same qty, and that is beyond the current basic block,
837 make it the new canonical replacement for this qty. */
838 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
839 /* Certain fixed registers might be of the class NO_REGS. This means
840 that not only can they not be allocated by the compiler, but
841 they cannot be used in substitutions or canonicalizations
842 either. */
843 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
844 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
845 || (new >= FIRST_PSEUDO_REGISTER
846 && (firstr < FIRST_PSEUDO_REGISTER
847 || ((uid_cuid[regno_last_uid[new]] > cse_basic_block_end
848 || (uid_cuid[regno_first_uid[new]]
849 < cse_basic_block_start))
850 && (uid_cuid[regno_last_uid[new]]
851 > uid_cuid[regno_last_uid[firstr]]))))))
852 {
853 reg_prev_eqv[firstr] = new;
854 reg_next_eqv[new] = firstr;
855 reg_prev_eqv[new] = -1;
856 qty_first_reg[q] = new;
857 }
858 else
859 {
860 /* If NEW is a hard reg (known to be non-fixed), insert at end.
861 Otherwise, insert before any non-fixed hard regs that are at the
862 end. Registers of class NO_REGS cannot be used as an
863 equivalent for anything. */
864 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
865 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
866 && new >= FIRST_PSEUDO_REGISTER)
867 lastr = reg_prev_eqv[lastr];
868 reg_next_eqv[new] = reg_next_eqv[lastr];
869 if (reg_next_eqv[lastr] >= 0)
870 reg_prev_eqv[reg_next_eqv[lastr]] = new;
871 else
872 qty_last_reg[q] = new;
873 reg_next_eqv[lastr] = new;
874 reg_prev_eqv[new] = lastr;
875 }
876 }
877
878 /* Remove REG from its equivalence class. */
879
880 static void
881 delete_reg_equiv (reg)
882 register int reg;
883 {
884 register int q = reg_qty[reg];
885 register int p, n;
886
887 /* If invalid, do nothing. */
888 if (q == reg)
889 return;
890
891 p = reg_prev_eqv[reg];
892 n = reg_next_eqv[reg];
893
894 if (n != -1)
895 reg_prev_eqv[n] = p;
896 else
897 qty_last_reg[q] = p;
898 if (p != -1)
899 reg_next_eqv[p] = n;
900 else
901 qty_first_reg[q] = n;
902
903 reg_qty[reg] = reg;
904 }
905
906 /* Remove any invalid expressions from the hash table
907 that refer to any of the registers contained in expression X.
908
909 Make sure that newly inserted references to those registers
910 as subexpressions will be considered valid.
911
912 mention_regs is not called when a register itself
913 is being stored in the table.
914
915 Return 1 if we have done something that may have changed the hash code
916 of X. */
917
918 static int
919 mention_regs (x)
920 rtx x;
921 {
922 register enum rtx_code code;
923 register int i, j;
924 register char *fmt;
925 register int changed = 0;
926
927 if (x == 0)
928 return 0;
929
930 code = GET_CODE (x);
931 if (code == REG)
932 {
933 register int regno = REGNO (x);
934 register int endregno
935 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
936 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
937 int i;
938
939 for (i = regno; i < endregno; i++)
940 {
941 if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
942 remove_invalid_refs (i);
943
944 reg_in_table[i] = reg_tick[i];
945 }
946
947 return 0;
948 }
949
950 /* If X is a comparison or a COMPARE and either operand is a register
951 that does not have a quantity, give it one. This is so that a later
952 call to record_jump_equiv won't cause X to be assigned a different
953 hash code and not found in the table after that call.
954
955 It is not necessary to do this here, since rehash_using_reg can
956 fix up the table later, but doing this here eliminates the need to
957 call that expensive function in the most common case where the only
958 use of the register is in the comparison. */
959
960 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
961 {
962 if (GET_CODE (XEXP (x, 0)) == REG
963 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
964 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
965 {
966 rehash_using_reg (XEXP (x, 0));
967 changed = 1;
968 }
969
970 if (GET_CODE (XEXP (x, 1)) == REG
971 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
972 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
973 {
974 rehash_using_reg (XEXP (x, 1));
975 changed = 1;
976 }
977 }
978
979 fmt = GET_RTX_FORMAT (code);
980 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
981 if (fmt[i] == 'e')
982 changed |= mention_regs (XEXP (x, i));
983 else if (fmt[i] == 'E')
984 for (j = 0; j < XVECLEN (x, i); j++)
985 changed |= mention_regs (XVECEXP (x, i, j));
986
987 return changed;
988 }
989
990 /* Update the register quantities for inserting X into the hash table
991 with a value equivalent to CLASSP.
992 (If the class does not contain a REG, it is irrelevant.)
993 If MODIFIED is nonzero, X is a destination; it is being modified.
994 Note that delete_reg_equiv should be called on a register
995 before insert_regs is done on that register with MODIFIED != 0.
996
997 Nonzero value means that elements of reg_qty have changed
998 so X's hash code may be different. */
999
1000 static int
1001 insert_regs (x, classp, modified)
1002 rtx x;
1003 struct table_elt *classp;
1004 int modified;
1005 {
1006 if (GET_CODE (x) == REG)
1007 {
1008 register int regno = REGNO (x);
1009
1010 /* If REGNO is in the equivalence table already but is of the
1011 wrong mode for that equivalence, don't do anything here. */
1012
1013 if (REGNO_QTY_VALID_P (regno)
1014 && qty_mode[reg_qty[regno]] != GET_MODE (x))
1015 return 0;
1016
1017 if (modified || ! REGNO_QTY_VALID_P (regno))
1018 {
1019 if (classp)
1020 for (classp = classp->first_same_value;
1021 classp != 0;
1022 classp = classp->next_same_value)
1023 if (GET_CODE (classp->exp) == REG
1024 && GET_MODE (classp->exp) == GET_MODE (x))
1025 {
1026 make_regs_eqv (regno, REGNO (classp->exp));
1027 return 1;
1028 }
1029
1030 make_new_qty (regno);
1031 qty_mode[reg_qty[regno]] = GET_MODE (x);
1032 return 1;
1033 }
1034
1035 return 0;
1036 }
1037
1038 /* If X is a SUBREG, we will likely be inserting the inner register in the
1039 table. If that register doesn't have an assigned quantity number at
1040 this point but does later, the insertion that we will be doing now will
1041 not be accessible because its hash code will have changed. So assign
1042 a quantity number now. */
1043
1044 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1045 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1046 {
1047 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1048 mention_regs (SUBREG_REG (x));
1049 return 1;
1050 }
1051 else
1052 return mention_regs (x);
1053 }
1054 \f
1055 /* Look in or update the hash table. */
1056
1057 /* Put the element ELT on the list of free elements. */
1058
1059 static void
1060 free_element (elt)
1061 struct table_elt *elt;
1062 {
1063 elt->next_same_hash = free_element_chain;
1064 free_element_chain = elt;
1065 }
1066
1067 /* Return an element that is free for use. */
1068
1069 static struct table_elt *
1070 get_element ()
1071 {
1072 struct table_elt *elt = free_element_chain;
1073 if (elt)
1074 {
1075 free_element_chain = elt->next_same_hash;
1076 return elt;
1077 }
1078 n_elements_made++;
1079 return (struct table_elt *) oballoc (sizeof (struct table_elt));
1080 }
1081
1082 /* Remove table element ELT from use in the table.
1083 HASH is its hash code, made using the HASH macro.
1084 It's an argument because often that is known in advance
1085 and we save much time not recomputing it. */
1086
1087 static void
1088 remove_from_table (elt, hash)
1089 register struct table_elt *elt;
1090 unsigned hash;
1091 {
1092 if (elt == 0)
1093 return;
1094
1095 /* Mark this element as removed. See cse_insn. */
1096 elt->first_same_value = 0;
1097
1098 /* Remove the table element from its equivalence class. */
1099
1100 {
1101 register struct table_elt *prev = elt->prev_same_value;
1102 register struct table_elt *next = elt->next_same_value;
1103
1104 if (next) next->prev_same_value = prev;
1105
1106 if (prev)
1107 prev->next_same_value = next;
1108 else
1109 {
1110 register struct table_elt *newfirst = next;
1111 while (next)
1112 {
1113 next->first_same_value = newfirst;
1114 next = next->next_same_value;
1115 }
1116 }
1117 }
1118
1119 /* Remove the table element from its hash bucket. */
1120
1121 {
1122 register struct table_elt *prev = elt->prev_same_hash;
1123 register struct table_elt *next = elt->next_same_hash;
1124
1125 if (next) next->prev_same_hash = prev;
1126
1127 if (prev)
1128 prev->next_same_hash = next;
1129 else if (table[hash] == elt)
1130 table[hash] = next;
1131 else
1132 {
1133 /* This entry is not in the proper hash bucket. This can happen
1134 when two classes were merged by `merge_equiv_classes'. Search
1135 for the hash bucket that it heads. This happens only very
1136 rarely, so the cost is acceptable. */
1137 for (hash = 0; hash < NBUCKETS; hash++)
1138 if (table[hash] == elt)
1139 table[hash] = next;
1140 }
1141 }
1142
1143 /* Remove the table element from its related-value circular chain. */
1144
1145 if (elt->related_value != 0 && elt->related_value != elt)
1146 {
1147 register struct table_elt *p = elt->related_value;
1148 while (p->related_value != elt)
1149 p = p->related_value;
1150 p->related_value = elt->related_value;
1151 if (p->related_value == p)
1152 p->related_value = 0;
1153 }
1154
1155 free_element (elt);
1156 }
1157
1158 /* Look up X in the hash table and return its table element,
1159 or 0 if X is not in the table.
1160
1161 MODE is the machine-mode of X, or if X is an integer constant
1162 with VOIDmode then MODE is the mode with which X will be used.
1163
1164 Here we are satisfied to find an expression whose tree structure
1165 looks like X. */
1166
1167 static struct table_elt *
1168 lookup (x, hash, mode)
1169 rtx x;
1170 unsigned hash;
1171 enum machine_mode mode;
1172 {
1173 register struct table_elt *p;
1174
1175 for (p = table[hash]; p; p = p->next_same_hash)
1176 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1177 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1178 return p;
1179
1180 return 0;
1181 }
1182
1183 /* Like `lookup' but don't care whether the table element uses invalid regs.
1184 Also ignore discrepancies in the machine mode of a register. */
1185
1186 static struct table_elt *
1187 lookup_for_remove (x, hash, mode)
1188 rtx x;
1189 unsigned hash;
1190 enum machine_mode mode;
1191 {
1192 register struct table_elt *p;
1193
1194 if (GET_CODE (x) == REG)
1195 {
1196 int regno = REGNO (x);
1197 /* Don't check the machine mode when comparing registers;
1198 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1199 for (p = table[hash]; p; p = p->next_same_hash)
1200 if (GET_CODE (p->exp) == REG
1201 && REGNO (p->exp) == regno)
1202 return p;
1203 }
1204 else
1205 {
1206 for (p = table[hash]; p; p = p->next_same_hash)
1207 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1208 return p;
1209 }
1210
1211 return 0;
1212 }
1213
1214 /* Look for an expression equivalent to X and with code CODE.
1215 If one is found, return that expression. */
1216
1217 static rtx
1218 lookup_as_function (x, code)
1219 rtx x;
1220 enum rtx_code code;
1221 {
1222 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1223 GET_MODE (x));
1224 if (p == 0)
1225 return 0;
1226
1227 for (p = p->first_same_value; p; p = p->next_same_value)
1228 {
1229 if (GET_CODE (p->exp) == code
1230 /* Make sure this is a valid entry in the table. */
1231 && exp_equiv_p (p->exp, p->exp, 1, 0))
1232 return p->exp;
1233 }
1234
1235 return 0;
1236 }
1237
1238 /* Insert X in the hash table, assuming HASH is its hash code
1239 and CLASSP is an element of the class it should go in
1240 (or 0 if a new class should be made).
1241 It is inserted at the proper position to keep the class in
1242 the order cheapest first.
1243
1244 MODE is the machine-mode of X, or if X is an integer constant
1245 with VOIDmode then MODE is the mode with which X will be used.
1246
1247 For elements of equal cheapness, the most recent one
1248 goes in front, except that the first element in the list
1249 remains first unless a cheaper element is added. The order of
1250 pseudo-registers does not matter, as canon_reg will be called to
1251 find the cheapest when a register is retrieved from the table.
1252
1253 The in_memory field in the hash table element is set to 0.
1254 The caller must set it nonzero if appropriate.
1255
1256 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1257 and if insert_regs returns a nonzero value
1258 you must then recompute its hash code before calling here.
1259
1260 If necessary, update table showing constant values of quantities. */
1261
1262 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1263
1264 static struct table_elt *
1265 insert (x, classp, hash, mode)
1266 register rtx x;
1267 register struct table_elt *classp;
1268 unsigned hash;
1269 enum machine_mode mode;
1270 {
1271 register struct table_elt *elt;
1272
1273 /* If X is a register and we haven't made a quantity for it,
1274 something is wrong. */
1275 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1276 abort ();
1277
1278 /* If X is a hard register, show it is being put in the table. */
1279 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1280 {
1281 int regno = REGNO (x);
1282 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1283 int i;
1284
1285 for (i = regno; i < endregno; i++)
1286 SET_HARD_REG_BIT (hard_regs_in_table, i);
1287 }
1288
1289
1290 /* Put an element for X into the right hash bucket. */
1291
1292 elt = get_element ();
1293 elt->exp = x;
1294 elt->cost = COST (x);
1295 elt->next_same_value = 0;
1296 elt->prev_same_value = 0;
1297 elt->next_same_hash = table[hash];
1298 elt->prev_same_hash = 0;
1299 elt->related_value = 0;
1300 elt->in_memory = 0;
1301 elt->mode = mode;
1302 elt->is_const = (CONSTANT_P (x)
1303 /* GNU C++ takes advantage of this for `this'
1304 (and other const values). */
1305 || (RTX_UNCHANGING_P (x)
1306 && GET_CODE (x) == REG
1307 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1308 || FIXED_BASE_PLUS_P (x));
1309
1310 if (table[hash])
1311 table[hash]->prev_same_hash = elt;
1312 table[hash] = elt;
1313
1314 /* Put it into the proper value-class. */
1315 if (classp)
1316 {
1317 classp = classp->first_same_value;
1318 if (CHEAPER (elt, classp))
1319 /* Insert at the head of the class */
1320 {
1321 register struct table_elt *p;
1322 elt->next_same_value = classp;
1323 classp->prev_same_value = elt;
1324 elt->first_same_value = elt;
1325
1326 for (p = classp; p; p = p->next_same_value)
1327 p->first_same_value = elt;
1328 }
1329 else
1330 {
1331 /* Insert not at head of the class. */
1332 /* Put it after the last element cheaper than X. */
1333 register struct table_elt *p, *next;
1334 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1335 p = next);
1336 /* Put it after P and before NEXT. */
1337 elt->next_same_value = next;
1338 if (next)
1339 next->prev_same_value = elt;
1340 elt->prev_same_value = p;
1341 p->next_same_value = elt;
1342 elt->first_same_value = classp;
1343 }
1344 }
1345 else
1346 elt->first_same_value = elt;
1347
1348 /* If this is a constant being set equivalent to a register or a register
1349 being set equivalent to a constant, note the constant equivalence.
1350
1351 If this is a constant, it cannot be equivalent to a different constant,
1352 and a constant is the only thing that can be cheaper than a register. So
1353 we know the register is the head of the class (before the constant was
1354 inserted).
1355
1356 If this is a register that is not already known equivalent to a
1357 constant, we must check the entire class.
1358
1359 If this is a register that is already known equivalent to an insn,
1360 update `qty_const_insn' to show that `this_insn' is the latest
1361 insn making that quantity equivalent to the constant. */
1362
1363 if (elt->is_const && classp && GET_CODE (classp->exp) == REG)
1364 {
1365 qty_const[reg_qty[REGNO (classp->exp)]]
1366 = gen_lowpart_if_possible (qty_mode[reg_qty[REGNO (classp->exp)]], x);
1367 qty_const_insn[reg_qty[REGNO (classp->exp)]] = this_insn;
1368 }
1369
1370 else if (GET_CODE (x) == REG && classp && ! qty_const[reg_qty[REGNO (x)]])
1371 {
1372 register struct table_elt *p;
1373
1374 for (p = classp; p != 0; p = p->next_same_value)
1375 {
1376 if (p->is_const)
1377 {
1378 qty_const[reg_qty[REGNO (x)]]
1379 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1380 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1381 break;
1382 }
1383 }
1384 }
1385
1386 else if (GET_CODE (x) == REG && qty_const[reg_qty[REGNO (x)]]
1387 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]])
1388 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1389
1390 /* If this is a constant with symbolic value,
1391 and it has a term with an explicit integer value,
1392 link it up with related expressions. */
1393 if (GET_CODE (x) == CONST)
1394 {
1395 rtx subexp = get_related_value (x);
1396 unsigned subhash;
1397 struct table_elt *subelt, *subelt_prev;
1398
1399 if (subexp != 0)
1400 {
1401 /* Get the integer-free subexpression in the hash table. */
1402 subhash = safe_hash (subexp, mode) % NBUCKETS;
1403 subelt = lookup (subexp, subhash, mode);
1404 if (subelt == 0)
1405 subelt = insert (subexp, NULL_PTR, subhash, mode);
1406 /* Initialize SUBELT's circular chain if it has none. */
1407 if (subelt->related_value == 0)
1408 subelt->related_value = subelt;
1409 /* Find the element in the circular chain that precedes SUBELT. */
1410 subelt_prev = subelt;
1411 while (subelt_prev->related_value != subelt)
1412 subelt_prev = subelt_prev->related_value;
1413 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1414 This way the element that follows SUBELT is the oldest one. */
1415 elt->related_value = subelt_prev->related_value;
1416 subelt_prev->related_value = elt;
1417 }
1418 }
1419
1420 return elt;
1421 }
1422 \f
1423 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1424 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1425 the two classes equivalent.
1426
1427 CLASS1 will be the surviving class; CLASS2 should not be used after this
1428 call.
1429
1430 Any invalid entries in CLASS2 will not be copied. */
1431
1432 static void
1433 merge_equiv_classes (class1, class2)
1434 struct table_elt *class1, *class2;
1435 {
1436 struct table_elt *elt, *next, *new;
1437
1438 /* Ensure we start with the head of the classes. */
1439 class1 = class1->first_same_value;
1440 class2 = class2->first_same_value;
1441
1442 /* If they were already equal, forget it. */
1443 if (class1 == class2)
1444 return;
1445
1446 for (elt = class2; elt; elt = next)
1447 {
1448 unsigned hash;
1449 rtx exp = elt->exp;
1450 enum machine_mode mode = elt->mode;
1451
1452 next = elt->next_same_value;
1453
1454 /* Remove old entry, make a new one in CLASS1's class.
1455 Don't do this for invalid entries as we cannot find their
1456 hash code (it also isn't necessary). */
1457 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1458 {
1459 hash_arg_in_memory = 0;
1460 hash_arg_in_struct = 0;
1461 hash = HASH (exp, mode);
1462
1463 if (GET_CODE (exp) == REG)
1464 delete_reg_equiv (REGNO (exp));
1465
1466 remove_from_table (elt, hash);
1467
1468 if (insert_regs (exp, class1, 0))
1469 {
1470 rehash_using_reg (exp);
1471 hash = HASH (exp, mode);
1472 }
1473 new = insert (exp, class1, hash, mode);
1474 new->in_memory = hash_arg_in_memory;
1475 new->in_struct = hash_arg_in_struct;
1476 }
1477 }
1478 }
1479 \f
1480 /* Remove from the hash table, or mark as invalid,
1481 all expressions whose values could be altered by storing in X.
1482 X is a register, a subreg, or a memory reference with nonvarying address
1483 (because, when a memory reference with a varying address is stored in,
1484 all memory references are removed by invalidate_memory
1485 so specific invalidation is superfluous).
1486 FULL_MODE, if not VOIDmode, indicates that this much should be invalidated
1487 instead of just the amount indicated by the mode of X. This is only used
1488 for bitfield stores into memory.
1489
1490 A nonvarying address may be just a register or just
1491 a symbol reference, or it may be either of those plus
1492 a numeric offset. */
1493
1494 static void
1495 invalidate (x, full_mode)
1496 rtx x;
1497 enum machine_mode full_mode;
1498 {
1499 register int i;
1500 register struct table_elt *p;
1501 rtx base;
1502 HOST_WIDE_INT start, end;
1503
1504 /* If X is a register, dependencies on its contents
1505 are recorded through the qty number mechanism.
1506 Just change the qty number of the register,
1507 mark it as invalid for expressions that refer to it,
1508 and remove it itself. */
1509
1510 if (GET_CODE (x) == REG)
1511 {
1512 register int regno = REGNO (x);
1513 register unsigned hash = HASH (x, GET_MODE (x));
1514
1515 /* Remove REGNO from any quantity list it might be on and indicate
1516 that it's value might have changed. If it is a pseudo, remove its
1517 entry from the hash table.
1518
1519 For a hard register, we do the first two actions above for any
1520 additional hard registers corresponding to X. Then, if any of these
1521 registers are in the table, we must remove any REG entries that
1522 overlap these registers. */
1523
1524 delete_reg_equiv (regno);
1525 reg_tick[regno]++;
1526
1527 if (regno >= FIRST_PSEUDO_REGISTER)
1528 remove_from_table (lookup_for_remove (x, hash, GET_MODE (x)), hash);
1529 else
1530 {
1531 HOST_WIDE_INT in_table
1532 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1533 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1534 int tregno, tendregno;
1535 register struct table_elt *p, *next;
1536
1537 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1538
1539 for (i = regno + 1; i < endregno; i++)
1540 {
1541 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1542 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1543 delete_reg_equiv (i);
1544 reg_tick[i]++;
1545 }
1546
1547 if (in_table)
1548 for (hash = 0; hash < NBUCKETS; hash++)
1549 for (p = table[hash]; p; p = next)
1550 {
1551 next = p->next_same_hash;
1552
1553 if (GET_CODE (p->exp) != REG
1554 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1555 continue;
1556
1557 tregno = REGNO (p->exp);
1558 tendregno
1559 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1560 if (tendregno > regno && tregno < endregno)
1561 remove_from_table (p, hash);
1562 }
1563 }
1564
1565 return;
1566 }
1567
1568 if (GET_CODE (x) == SUBREG)
1569 {
1570 if (GET_CODE (SUBREG_REG (x)) != REG)
1571 abort ();
1572 invalidate (SUBREG_REG (x), VOIDmode);
1573 return;
1574 }
1575
1576 /* X is not a register; it must be a memory reference with
1577 a nonvarying address. Remove all hash table elements
1578 that refer to overlapping pieces of memory. */
1579
1580 if (GET_CODE (x) != MEM)
1581 abort ();
1582
1583 if (full_mode == VOIDmode)
1584 full_mode = GET_MODE (x);
1585
1586 set_nonvarying_address_components (XEXP (x, 0), GET_MODE_SIZE (full_mode),
1587 &base, &start, &end);
1588
1589 for (i = 0; i < NBUCKETS; i++)
1590 {
1591 register struct table_elt *next;
1592 for (p = table[i]; p; p = next)
1593 {
1594 next = p->next_same_hash;
1595 if (refers_to_mem_p (p->exp, base, start, end))
1596 remove_from_table (p, i);
1597 }
1598 }
1599 }
1600
1601 /* Remove all expressions that refer to register REGNO,
1602 since they are already invalid, and we are about to
1603 mark that register valid again and don't want the old
1604 expressions to reappear as valid. */
1605
1606 static void
1607 remove_invalid_refs (regno)
1608 int regno;
1609 {
1610 register int i;
1611 register struct table_elt *p, *next;
1612
1613 for (i = 0; i < NBUCKETS; i++)
1614 for (p = table[i]; p; p = next)
1615 {
1616 next = p->next_same_hash;
1617 if (GET_CODE (p->exp) != REG
1618 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1619 remove_from_table (p, i);
1620 }
1621 }
1622 \f
1623 /* Recompute the hash codes of any valid entries in the hash table that
1624 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1625
1626 This is called when we make a jump equivalence. */
1627
1628 static void
1629 rehash_using_reg (x)
1630 rtx x;
1631 {
1632 int i;
1633 struct table_elt *p, *next;
1634 unsigned hash;
1635
1636 if (GET_CODE (x) == SUBREG)
1637 x = SUBREG_REG (x);
1638
1639 /* If X is not a register or if the register is known not to be in any
1640 valid entries in the table, we have no work to do. */
1641
1642 if (GET_CODE (x) != REG
1643 || reg_in_table[REGNO (x)] < 0
1644 || reg_in_table[REGNO (x)] != reg_tick[REGNO (x)])
1645 return;
1646
1647 /* Scan all hash chains looking for valid entries that mention X.
1648 If we find one and it is in the wrong hash chain, move it. We can skip
1649 objects that are registers, since they are handled specially. */
1650
1651 for (i = 0; i < NBUCKETS; i++)
1652 for (p = table[i]; p; p = next)
1653 {
1654 next = p->next_same_hash;
1655 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1656 && exp_equiv_p (p->exp, p->exp, 1, 0)
1657 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1658 {
1659 if (p->next_same_hash)
1660 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1661
1662 if (p->prev_same_hash)
1663 p->prev_same_hash->next_same_hash = p->next_same_hash;
1664 else
1665 table[i] = p->next_same_hash;
1666
1667 p->next_same_hash = table[hash];
1668 p->prev_same_hash = 0;
1669 if (table[hash])
1670 table[hash]->prev_same_hash = p;
1671 table[hash] = p;
1672 }
1673 }
1674 }
1675 \f
1676 /* Remove from the hash table all expressions that reference memory,
1677 or some of them as specified by *WRITES. */
1678
1679 static void
1680 invalidate_memory (writes)
1681 struct write_data *writes;
1682 {
1683 register int i;
1684 register struct table_elt *p, *next;
1685 int all = writes->all;
1686 int nonscalar = writes->nonscalar;
1687
1688 for (i = 0; i < NBUCKETS; i++)
1689 for (p = table[i]; p; p = next)
1690 {
1691 next = p->next_same_hash;
1692 if (p->in_memory
1693 && (all
1694 || (nonscalar && p->in_struct)
1695 || cse_rtx_addr_varies_p (p->exp)))
1696 remove_from_table (p, i);
1697 }
1698 }
1699 \f
1700 /* Remove from the hash table any expression that is a call-clobbered
1701 register. Also update their TICK values. */
1702
1703 static void
1704 invalidate_for_call ()
1705 {
1706 int regno, endregno;
1707 int i;
1708 unsigned hash;
1709 struct table_elt *p, *next;
1710 int in_table = 0;
1711
1712 /* Go through all the hard registers. For each that is clobbered in
1713 a CALL_INSN, remove the register from quantity chains and update
1714 reg_tick if defined. Also see if any of these registers is currently
1715 in the table. */
1716
1717 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1718 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1719 {
1720 delete_reg_equiv (regno);
1721 if (reg_tick[regno] >= 0)
1722 reg_tick[regno]++;
1723
1724 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1725 }
1726
1727 /* In the case where we have no call-clobbered hard registers in the
1728 table, we are done. Otherwise, scan the table and remove any
1729 entry that overlaps a call-clobbered register. */
1730
1731 if (in_table)
1732 for (hash = 0; hash < NBUCKETS; hash++)
1733 for (p = table[hash]; p; p = next)
1734 {
1735 next = p->next_same_hash;
1736
1737 if (GET_CODE (p->exp) != REG
1738 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1739 continue;
1740
1741 regno = REGNO (p->exp);
1742 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1743
1744 for (i = regno; i < endregno; i++)
1745 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1746 {
1747 remove_from_table (p, hash);
1748 break;
1749 }
1750 }
1751 }
1752 \f
1753 /* Given an expression X of type CONST,
1754 and ELT which is its table entry (or 0 if it
1755 is not in the hash table),
1756 return an alternate expression for X as a register plus integer.
1757 If none can be found, return 0. */
1758
1759 static rtx
1760 use_related_value (x, elt)
1761 rtx x;
1762 struct table_elt *elt;
1763 {
1764 register struct table_elt *relt = 0;
1765 register struct table_elt *p, *q;
1766 HOST_WIDE_INT offset;
1767
1768 /* First, is there anything related known?
1769 If we have a table element, we can tell from that.
1770 Otherwise, must look it up. */
1771
1772 if (elt != 0 && elt->related_value != 0)
1773 relt = elt;
1774 else if (elt == 0 && GET_CODE (x) == CONST)
1775 {
1776 rtx subexp = get_related_value (x);
1777 if (subexp != 0)
1778 relt = lookup (subexp,
1779 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
1780 GET_MODE (subexp));
1781 }
1782
1783 if (relt == 0)
1784 return 0;
1785
1786 /* Search all related table entries for one that has an
1787 equivalent register. */
1788
1789 p = relt;
1790 while (1)
1791 {
1792 /* This loop is strange in that it is executed in two different cases.
1793 The first is when X is already in the table. Then it is searching
1794 the RELATED_VALUE list of X's class (RELT). The second case is when
1795 X is not in the table. Then RELT points to a class for the related
1796 value.
1797
1798 Ensure that, whatever case we are in, that we ignore classes that have
1799 the same value as X. */
1800
1801 if (rtx_equal_p (x, p->exp))
1802 q = 0;
1803 else
1804 for (q = p->first_same_value; q; q = q->next_same_value)
1805 if (GET_CODE (q->exp) == REG)
1806 break;
1807
1808 if (q)
1809 break;
1810
1811 p = p->related_value;
1812
1813 /* We went all the way around, so there is nothing to be found.
1814 Alternatively, perhaps RELT was in the table for some other reason
1815 and it has no related values recorded. */
1816 if (p == relt || p == 0)
1817 break;
1818 }
1819
1820 if (q == 0)
1821 return 0;
1822
1823 offset = (get_integer_term (x) - get_integer_term (p->exp));
1824 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
1825 return plus_constant (q->exp, offset);
1826 }
1827 \f
1828 /* Hash an rtx. We are careful to make sure the value is never negative.
1829 Equivalent registers hash identically.
1830 MODE is used in hashing for CONST_INTs only;
1831 otherwise the mode of X is used.
1832
1833 Store 1 in do_not_record if any subexpression is volatile.
1834
1835 Store 1 in hash_arg_in_memory if X contains a MEM rtx
1836 which does not have the RTX_UNCHANGING_P bit set.
1837 In this case, also store 1 in hash_arg_in_struct
1838 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
1839
1840 Note that cse_insn knows that the hash code of a MEM expression
1841 is just (int) MEM plus the hash code of the address. */
1842
1843 static unsigned
1844 canon_hash (x, mode)
1845 rtx x;
1846 enum machine_mode mode;
1847 {
1848 register int i, j;
1849 register unsigned hash = 0;
1850 register enum rtx_code code;
1851 register char *fmt;
1852
1853 /* repeat is used to turn tail-recursion into iteration. */
1854 repeat:
1855 if (x == 0)
1856 return hash;
1857
1858 code = GET_CODE (x);
1859 switch (code)
1860 {
1861 case REG:
1862 {
1863 register int regno = REGNO (x);
1864
1865 /* On some machines, we can't record any non-fixed hard register,
1866 because extending its life will cause reload problems. We
1867 consider ap, fp, and sp to be fixed for this purpose.
1868 On all machines, we can't record any global registers. */
1869
1870 if (regno < FIRST_PSEUDO_REGISTER
1871 && (global_regs[regno]
1872 #ifdef SMALL_REGISTER_CLASSES
1873 || (! fixed_regs[regno]
1874 && regno != FRAME_POINTER_REGNUM
1875 && regno != HARD_FRAME_POINTER_REGNUM
1876 && regno != ARG_POINTER_REGNUM
1877 && regno != STACK_POINTER_REGNUM)
1878 #endif
1879 ))
1880 {
1881 do_not_record = 1;
1882 return 0;
1883 }
1884 hash += ((unsigned) REG << 7) + (unsigned) reg_qty[regno];
1885 return hash;
1886 }
1887
1888 case CONST_INT:
1889 {
1890 unsigned HOST_WIDE_INT tem = INTVAL (x);
1891 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
1892 return hash;
1893 }
1894
1895 case CONST_DOUBLE:
1896 /* This is like the general case, except that it only counts
1897 the integers representing the constant. */
1898 hash += (unsigned) code + (unsigned) GET_MODE (x);
1899 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1900 {
1901 unsigned tem = XINT (x, i);
1902 hash += tem;
1903 }
1904 return hash;
1905
1906 /* Assume there is only one rtx object for any given label. */
1907 case LABEL_REF:
1908 hash
1909 += ((unsigned) LABEL_REF << 7) + (unsigned HOST_WIDE_INT) XEXP (x, 0);
1910 return hash;
1911
1912 case SYMBOL_REF:
1913 hash
1914 += ((unsigned) SYMBOL_REF << 7) + (unsigned HOST_WIDE_INT) XSTR (x, 0);
1915 return hash;
1916
1917 case MEM:
1918 if (MEM_VOLATILE_P (x))
1919 {
1920 do_not_record = 1;
1921 return 0;
1922 }
1923 if (! RTX_UNCHANGING_P (x))
1924 {
1925 hash_arg_in_memory = 1;
1926 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
1927 }
1928 /* Now that we have already found this special case,
1929 might as well speed it up as much as possible. */
1930 hash += (unsigned) MEM;
1931 x = XEXP (x, 0);
1932 goto repeat;
1933
1934 case PRE_DEC:
1935 case PRE_INC:
1936 case POST_DEC:
1937 case POST_INC:
1938 case PC:
1939 case CC0:
1940 case CALL:
1941 case UNSPEC_VOLATILE:
1942 do_not_record = 1;
1943 return 0;
1944
1945 case ASM_OPERANDS:
1946 if (MEM_VOLATILE_P (x))
1947 {
1948 do_not_record = 1;
1949 return 0;
1950 }
1951 }
1952
1953 i = GET_RTX_LENGTH (code) - 1;
1954 hash += (unsigned) code + (unsigned) GET_MODE (x);
1955 fmt = GET_RTX_FORMAT (code);
1956 for (; i >= 0; i--)
1957 {
1958 if (fmt[i] == 'e')
1959 {
1960 rtx tem = XEXP (x, i);
1961
1962 /* If we are about to do the last recursive call
1963 needed at this level, change it into iteration.
1964 This function is called enough to be worth it. */
1965 if (i == 0)
1966 {
1967 x = tem;
1968 goto repeat;
1969 }
1970 hash += canon_hash (tem, 0);
1971 }
1972 else if (fmt[i] == 'E')
1973 for (j = 0; j < XVECLEN (x, i); j++)
1974 hash += canon_hash (XVECEXP (x, i, j), 0);
1975 else if (fmt[i] == 's')
1976 {
1977 register unsigned char *p = (unsigned char *) XSTR (x, i);
1978 if (p)
1979 while (*p)
1980 hash += *p++;
1981 }
1982 else if (fmt[i] == 'i')
1983 {
1984 register unsigned tem = XINT (x, i);
1985 hash += tem;
1986 }
1987 else
1988 abort ();
1989 }
1990 return hash;
1991 }
1992
1993 /* Like canon_hash but with no side effects. */
1994
1995 static unsigned
1996 safe_hash (x, mode)
1997 rtx x;
1998 enum machine_mode mode;
1999 {
2000 int save_do_not_record = do_not_record;
2001 int save_hash_arg_in_memory = hash_arg_in_memory;
2002 int save_hash_arg_in_struct = hash_arg_in_struct;
2003 unsigned hash = canon_hash (x, mode);
2004 hash_arg_in_memory = save_hash_arg_in_memory;
2005 hash_arg_in_struct = save_hash_arg_in_struct;
2006 do_not_record = save_do_not_record;
2007 return hash;
2008 }
2009 \f
2010 /* Return 1 iff X and Y would canonicalize into the same thing,
2011 without actually constructing the canonicalization of either one.
2012 If VALIDATE is nonzero,
2013 we assume X is an expression being processed from the rtl
2014 and Y was found in the hash table. We check register refs
2015 in Y for being marked as valid.
2016
2017 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2018 that is known to be in the register. Ordinarily, we don't allow them
2019 to match, because letting them match would cause unpredictable results
2020 in all the places that search a hash table chain for an equivalent
2021 for a given value. A possible equivalent that has different structure
2022 has its hash code computed from different data. Whether the hash code
2023 is the same as that of the the given value is pure luck. */
2024
2025 static int
2026 exp_equiv_p (x, y, validate, equal_values)
2027 rtx x, y;
2028 int validate;
2029 int equal_values;
2030 {
2031 register int i, j;
2032 register enum rtx_code code;
2033 register char *fmt;
2034
2035 /* Note: it is incorrect to assume an expression is equivalent to itself
2036 if VALIDATE is nonzero. */
2037 if (x == y && !validate)
2038 return 1;
2039 if (x == 0 || y == 0)
2040 return x == y;
2041
2042 code = GET_CODE (x);
2043 if (code != GET_CODE (y))
2044 {
2045 if (!equal_values)
2046 return 0;
2047
2048 /* If X is a constant and Y is a register or vice versa, they may be
2049 equivalent. We only have to validate if Y is a register. */
2050 if (CONSTANT_P (x) && GET_CODE (y) == REG
2051 && REGNO_QTY_VALID_P (REGNO (y))
2052 && GET_MODE (y) == qty_mode[reg_qty[REGNO (y)]]
2053 && rtx_equal_p (x, qty_const[reg_qty[REGNO (y)]])
2054 && (! validate || reg_in_table[REGNO (y)] == reg_tick[REGNO (y)]))
2055 return 1;
2056
2057 if (CONSTANT_P (y) && code == REG
2058 && REGNO_QTY_VALID_P (REGNO (x))
2059 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2060 && rtx_equal_p (y, qty_const[reg_qty[REGNO (x)]]))
2061 return 1;
2062
2063 return 0;
2064 }
2065
2066 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2067 if (GET_MODE (x) != GET_MODE (y))
2068 return 0;
2069
2070 switch (code)
2071 {
2072 case PC:
2073 case CC0:
2074 return x == y;
2075
2076 case CONST_INT:
2077 return INTVAL (x) == INTVAL (y);
2078
2079 case LABEL_REF:
2080 return XEXP (x, 0) == XEXP (y, 0);
2081
2082 case SYMBOL_REF:
2083 return XSTR (x, 0) == XSTR (y, 0);
2084
2085 case REG:
2086 {
2087 int regno = REGNO (y);
2088 int endregno
2089 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2090 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2091 int i;
2092
2093 /* If the quantities are not the same, the expressions are not
2094 equivalent. If there are and we are not to validate, they
2095 are equivalent. Otherwise, ensure all regs are up-to-date. */
2096
2097 if (reg_qty[REGNO (x)] != reg_qty[regno])
2098 return 0;
2099
2100 if (! validate)
2101 return 1;
2102
2103 for (i = regno; i < endregno; i++)
2104 if (reg_in_table[i] != reg_tick[i])
2105 return 0;
2106
2107 return 1;
2108 }
2109
2110 /* For commutative operations, check both orders. */
2111 case PLUS:
2112 case MULT:
2113 case AND:
2114 case IOR:
2115 case XOR:
2116 case NE:
2117 case EQ:
2118 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2119 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2120 validate, equal_values))
2121 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2122 validate, equal_values)
2123 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2124 validate, equal_values)));
2125 }
2126
2127 /* Compare the elements. If any pair of corresponding elements
2128 fail to match, return 0 for the whole things. */
2129
2130 fmt = GET_RTX_FORMAT (code);
2131 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2132 {
2133 switch (fmt[i])
2134 {
2135 case 'e':
2136 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2137 return 0;
2138 break;
2139
2140 case 'E':
2141 if (XVECLEN (x, i) != XVECLEN (y, i))
2142 return 0;
2143 for (j = 0; j < XVECLEN (x, i); j++)
2144 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2145 validate, equal_values))
2146 return 0;
2147 break;
2148
2149 case 's':
2150 if (strcmp (XSTR (x, i), XSTR (y, i)))
2151 return 0;
2152 break;
2153
2154 case 'i':
2155 if (XINT (x, i) != XINT (y, i))
2156 return 0;
2157 break;
2158
2159 case 'w':
2160 if (XWINT (x, i) != XWINT (y, i))
2161 return 0;
2162 break;
2163
2164 case '0':
2165 break;
2166
2167 default:
2168 abort ();
2169 }
2170 }
2171
2172 return 1;
2173 }
2174 \f
2175 /* Return 1 iff any subexpression of X matches Y.
2176 Here we do not require that X or Y be valid (for registers referred to)
2177 for being in the hash table. */
2178
2179 static int
2180 refers_to_p (x, y)
2181 rtx x, y;
2182 {
2183 register int i;
2184 register enum rtx_code code;
2185 register char *fmt;
2186
2187 repeat:
2188 if (x == y)
2189 return 1;
2190 if (x == 0 || y == 0)
2191 return 0;
2192
2193 code = GET_CODE (x);
2194 /* If X as a whole has the same code as Y, they may match.
2195 If so, return 1. */
2196 if (code == GET_CODE (y))
2197 {
2198 if (exp_equiv_p (x, y, 0, 1))
2199 return 1;
2200 }
2201
2202 /* X does not match, so try its subexpressions. */
2203
2204 fmt = GET_RTX_FORMAT (code);
2205 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2206 if (fmt[i] == 'e')
2207 {
2208 if (i == 0)
2209 {
2210 x = XEXP (x, 0);
2211 goto repeat;
2212 }
2213 else
2214 if (refers_to_p (XEXP (x, i), y))
2215 return 1;
2216 }
2217 else if (fmt[i] == 'E')
2218 {
2219 int j;
2220 for (j = 0; j < XVECLEN (x, i); j++)
2221 if (refers_to_p (XVECEXP (x, i, j), y))
2222 return 1;
2223 }
2224
2225 return 0;
2226 }
2227 \f
2228 /* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2229 set PBASE, PSTART, and PEND which correspond to the base of the address,
2230 the starting offset, and ending offset respectively.
2231
2232 ADDR is known to be a nonvarying address. */
2233
2234 /* ??? Despite what the comments say, this function is in fact frequently
2235 passed varying addresses. This does not appear to cause any problems. */
2236
2237 static void
2238 set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2239 rtx addr;
2240 int size;
2241 rtx *pbase;
2242 HOST_WIDE_INT *pstart, *pend;
2243 {
2244 rtx base;
2245 HOST_WIDE_INT start, end;
2246
2247 base = addr;
2248 start = 0;
2249 end = 0;
2250
2251 /* Registers with nonvarying addresses usually have constant equivalents;
2252 but the frame pointer register is also possible. */
2253 if (GET_CODE (base) == REG
2254 && qty_const != 0
2255 && REGNO_QTY_VALID_P (REGNO (base))
2256 && qty_mode[reg_qty[REGNO (base)]] == GET_MODE (base)
2257 && qty_const[reg_qty[REGNO (base)]] != 0)
2258 base = qty_const[reg_qty[REGNO (base)]];
2259 else if (GET_CODE (base) == PLUS
2260 && GET_CODE (XEXP (base, 1)) == CONST_INT
2261 && GET_CODE (XEXP (base, 0)) == REG
2262 && qty_const != 0
2263 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2264 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2265 == GET_MODE (XEXP (base, 0)))
2266 && qty_const[reg_qty[REGNO (XEXP (base, 0))]])
2267 {
2268 start = INTVAL (XEXP (base, 1));
2269 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2270 }
2271
2272 /* Handle everything that we can find inside an address that has been
2273 viewed as constant. */
2274
2275 while (1)
2276 {
2277 /* If no part of this switch does a "continue", the code outside
2278 will exit this loop. */
2279
2280 switch (GET_CODE (base))
2281 {
2282 case LO_SUM:
2283 /* By definition, operand1 of a LO_SUM is the associated constant
2284 address. Use the associated constant address as the base
2285 instead. */
2286 base = XEXP (base, 1);
2287 continue;
2288
2289 case CONST:
2290 /* Strip off CONST. */
2291 base = XEXP (base, 0);
2292 continue;
2293
2294 case PLUS:
2295 if (GET_CODE (XEXP (base, 1)) == CONST_INT)
2296 {
2297 start += INTVAL (XEXP (base, 1));
2298 base = XEXP (base, 0);
2299 continue;
2300 }
2301 break;
2302
2303 case AND:
2304 /* Handle the case of an AND which is the negative of a power of
2305 two. This is used to represent unaligned memory operations. */
2306 if (GET_CODE (XEXP (base, 1)) == CONST_INT
2307 && exact_log2 (- INTVAL (XEXP (base, 1))) > 0)
2308 {
2309 set_nonvarying_address_components (XEXP (base, 0), size,
2310 pbase, pstart, pend);
2311
2312 /* Assume the worst misalignment. START is affected, but not
2313 END, so compensate but adjusting SIZE. Don't lose any
2314 constant we already had. */
2315
2316 size = *pend - *pstart - INTVAL (XEXP (base, 1)) - 1;
2317 start += *pstart - INTVAL (XEXP (base, 1)) - 1;
2318 base = *pbase;
2319 }
2320 break;
2321 }
2322
2323 break;
2324 }
2325
2326 if (GET_CODE (base) == CONST_INT)
2327 {
2328 start += INTVAL (base);
2329 base = const0_rtx;
2330 }
2331
2332 end = start + size;
2333
2334 /* Set the return values. */
2335 *pbase = base;
2336 *pstart = start;
2337 *pend = end;
2338 }
2339
2340 /* Return 1 iff any subexpression of X refers to memory
2341 at an address of BASE plus some offset
2342 such that any of the bytes' offsets fall between START (inclusive)
2343 and END (exclusive).
2344
2345 The value is undefined if X is a varying address (as determined by
2346 cse_rtx_addr_varies_p). This function is not used in such cases.
2347
2348 When used in the cse pass, `qty_const' is nonzero, and it is used
2349 to treat an address that is a register with a known constant value
2350 as if it were that constant value.
2351 In the loop pass, `qty_const' is zero, so this is not done. */
2352
2353 static int
2354 refers_to_mem_p (x, base, start, end)
2355 rtx x, base;
2356 HOST_WIDE_INT start, end;
2357 {
2358 register HOST_WIDE_INT i;
2359 register enum rtx_code code;
2360 register char *fmt;
2361
2362 repeat:
2363 if (x == 0)
2364 return 0;
2365
2366 code = GET_CODE (x);
2367 if (code == MEM)
2368 {
2369 register rtx addr = XEXP (x, 0); /* Get the address. */
2370 rtx mybase;
2371 HOST_WIDE_INT mystart, myend;
2372
2373 set_nonvarying_address_components (addr, GET_MODE_SIZE (GET_MODE (x)),
2374 &mybase, &mystart, &myend);
2375
2376
2377 /* refers_to_mem_p is never called with varying addresses.
2378 If the base addresses are not equal, there is no chance
2379 of the memory addresses conflicting. */
2380 if (! rtx_equal_p (mybase, base))
2381 return 0;
2382
2383 return myend > start && mystart < end;
2384 }
2385
2386 /* X does not match, so try its subexpressions. */
2387
2388 fmt = GET_RTX_FORMAT (code);
2389 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2390 if (fmt[i] == 'e')
2391 {
2392 if (i == 0)
2393 {
2394 x = XEXP (x, 0);
2395 goto repeat;
2396 }
2397 else
2398 if (refers_to_mem_p (XEXP (x, i), base, start, end))
2399 return 1;
2400 }
2401 else if (fmt[i] == 'E')
2402 {
2403 int j;
2404 for (j = 0; j < XVECLEN (x, i); j++)
2405 if (refers_to_mem_p (XVECEXP (x, i, j), base, start, end))
2406 return 1;
2407 }
2408
2409 return 0;
2410 }
2411
2412 /* Nonzero if X refers to memory at a varying address;
2413 except that a register which has at the moment a known constant value
2414 isn't considered variable. */
2415
2416 static int
2417 cse_rtx_addr_varies_p (x)
2418 rtx x;
2419 {
2420 /* We need not check for X and the equivalence class being of the same
2421 mode because if X is equivalent to a constant in some mode, it
2422 doesn't vary in any mode. */
2423
2424 if (GET_CODE (x) == MEM
2425 && GET_CODE (XEXP (x, 0)) == REG
2426 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2427 && GET_MODE (XEXP (x, 0)) == qty_mode[reg_qty[REGNO (XEXP (x, 0))]]
2428 && qty_const[reg_qty[REGNO (XEXP (x, 0))]] != 0)
2429 return 0;
2430
2431 if (GET_CODE (x) == MEM
2432 && GET_CODE (XEXP (x, 0)) == PLUS
2433 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2434 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2435 && REGNO_QTY_VALID_P (REGNO (XEXP (XEXP (x, 0), 0)))
2436 && (GET_MODE (XEXP (XEXP (x, 0), 0))
2437 == qty_mode[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2438 && qty_const[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2439 return 0;
2440
2441 return rtx_addr_varies_p (x);
2442 }
2443 \f
2444 /* Canonicalize an expression:
2445 replace each register reference inside it
2446 with the "oldest" equivalent register.
2447
2448 If INSN is non-zero and we are replacing a pseudo with a hard register
2449 or vice versa, validate_change is used to ensure that INSN remains valid
2450 after we make our substitution. The calls are made with IN_GROUP non-zero
2451 so apply_change_group must be called upon the outermost return from this
2452 function (unless INSN is zero). The result of apply_change_group can
2453 generally be discarded since the changes we are making are optional. */
2454
2455 static rtx
2456 canon_reg (x, insn)
2457 rtx x;
2458 rtx insn;
2459 {
2460 register int i;
2461 register enum rtx_code code;
2462 register char *fmt;
2463
2464 if (x == 0)
2465 return x;
2466
2467 code = GET_CODE (x);
2468 switch (code)
2469 {
2470 case PC:
2471 case CC0:
2472 case CONST:
2473 case CONST_INT:
2474 case CONST_DOUBLE:
2475 case SYMBOL_REF:
2476 case LABEL_REF:
2477 case ADDR_VEC:
2478 case ADDR_DIFF_VEC:
2479 return x;
2480
2481 case REG:
2482 {
2483 register int first;
2484
2485 /* Never replace a hard reg, because hard regs can appear
2486 in more than one machine mode, and we must preserve the mode
2487 of each occurrence. Also, some hard regs appear in
2488 MEMs that are shared and mustn't be altered. Don't try to
2489 replace any reg that maps to a reg of class NO_REGS. */
2490 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2491 || ! REGNO_QTY_VALID_P (REGNO (x)))
2492 return x;
2493
2494 first = qty_first_reg[reg_qty[REGNO (x)]];
2495 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2496 : REGNO_REG_CLASS (first) == NO_REGS ? x
2497 : gen_rtx (REG, qty_mode[reg_qty[REGNO (x)]], first));
2498 }
2499 }
2500
2501 fmt = GET_RTX_FORMAT (code);
2502 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2503 {
2504 register int j;
2505
2506 if (fmt[i] == 'e')
2507 {
2508 rtx new = canon_reg (XEXP (x, i), insn);
2509
2510 /* If replacing pseudo with hard reg or vice versa, ensure the
2511 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2512 if (insn != 0 && new != 0
2513 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2514 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2515 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2516 || insn_n_dups[recog_memoized (insn)] > 0))
2517 validate_change (insn, &XEXP (x, i), new, 1);
2518 else
2519 XEXP (x, i) = new;
2520 }
2521 else if (fmt[i] == 'E')
2522 for (j = 0; j < XVECLEN (x, i); j++)
2523 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2524 }
2525
2526 return x;
2527 }
2528 \f
2529 /* LOC is a location with INSN that is an operand address (the contents of
2530 a MEM). Find the best equivalent address to use that is valid for this
2531 insn.
2532
2533 On most CISC machines, complicated address modes are costly, and rtx_cost
2534 is a good approximation for that cost. However, most RISC machines have
2535 only a few (usually only one) memory reference formats. If an address is
2536 valid at all, it is often just as cheap as any other address. Hence, for
2537 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2538 costs of various addresses. For two addresses of equal cost, choose the one
2539 with the highest `rtx_cost' value as that has the potential of eliminating
2540 the most insns. For equal costs, we choose the first in the equivalence
2541 class. Note that we ignore the fact that pseudo registers are cheaper
2542 than hard registers here because we would also prefer the pseudo registers.
2543 */
2544
2545 static void
2546 find_best_addr (insn, loc)
2547 rtx insn;
2548 rtx *loc;
2549 {
2550 struct table_elt *elt, *p;
2551 rtx addr = *loc;
2552 int our_cost;
2553 int found_better = 1;
2554 int save_do_not_record = do_not_record;
2555 int save_hash_arg_in_memory = hash_arg_in_memory;
2556 int save_hash_arg_in_struct = hash_arg_in_struct;
2557 int addr_volatile;
2558 int regno;
2559 unsigned hash;
2560
2561 /* Do not try to replace constant addresses or addresses of local and
2562 argument slots. These MEM expressions are made only once and inserted
2563 in many instructions, as well as being used to control symbol table
2564 output. It is not safe to clobber them.
2565
2566 There are some uncommon cases where the address is already in a register
2567 for some reason, but we cannot take advantage of that because we have
2568 no easy way to unshare the MEM. In addition, looking up all stack
2569 addresses is costly. */
2570 if ((GET_CODE (addr) == PLUS
2571 && GET_CODE (XEXP (addr, 0)) == REG
2572 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2573 && (regno = REGNO (XEXP (addr, 0)),
2574 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2575 || regno == ARG_POINTER_REGNUM))
2576 || (GET_CODE (addr) == REG
2577 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2578 || regno == HARD_FRAME_POINTER_REGNUM
2579 || regno == ARG_POINTER_REGNUM))
2580 || CONSTANT_ADDRESS_P (addr))
2581 return;
2582
2583 /* If this address is not simply a register, try to fold it. This will
2584 sometimes simplify the expression. Many simplifications
2585 will not be valid, but some, usually applying the associative rule, will
2586 be valid and produce better code. */
2587 if (GET_CODE (addr) != REG
2588 && validate_change (insn, loc, fold_rtx (addr, insn), 0))
2589 addr = *loc;
2590
2591 /* If this address is not in the hash table, we can't look for equivalences
2592 of the whole address. Also, ignore if volatile. */
2593
2594 do_not_record = 0;
2595 hash = HASH (addr, Pmode);
2596 addr_volatile = do_not_record;
2597 do_not_record = save_do_not_record;
2598 hash_arg_in_memory = save_hash_arg_in_memory;
2599 hash_arg_in_struct = save_hash_arg_in_struct;
2600
2601 if (addr_volatile)
2602 return;
2603
2604 elt = lookup (addr, hash, Pmode);
2605
2606 #ifndef ADDRESS_COST
2607 if (elt)
2608 {
2609 our_cost = elt->cost;
2610
2611 /* Find the lowest cost below ours that works. */
2612 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2613 if (elt->cost < our_cost
2614 && (GET_CODE (elt->exp) == REG
2615 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2616 && validate_change (insn, loc,
2617 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2618 return;
2619 }
2620 #else
2621
2622 if (elt)
2623 {
2624 /* We need to find the best (under the criteria documented above) entry
2625 in the class that is valid. We use the `flag' field to indicate
2626 choices that were invalid and iterate until we can't find a better
2627 one that hasn't already been tried. */
2628
2629 for (p = elt->first_same_value; p; p = p->next_same_value)
2630 p->flag = 0;
2631
2632 while (found_better)
2633 {
2634 int best_addr_cost = ADDRESS_COST (*loc);
2635 int best_rtx_cost = (elt->cost + 1) >> 1;
2636 struct table_elt *best_elt = elt;
2637
2638 found_better = 0;
2639 for (p = elt->first_same_value; p; p = p->next_same_value)
2640 if (! p->flag
2641 && (GET_CODE (p->exp) == REG
2642 || exp_equiv_p (p->exp, p->exp, 1, 0))
2643 && (ADDRESS_COST (p->exp) < best_addr_cost
2644 || (ADDRESS_COST (p->exp) == best_addr_cost
2645 && (p->cost + 1) >> 1 > best_rtx_cost)))
2646 {
2647 found_better = 1;
2648 best_addr_cost = ADDRESS_COST (p->exp);
2649 best_rtx_cost = (p->cost + 1) >> 1;
2650 best_elt = p;
2651 }
2652
2653 if (found_better)
2654 {
2655 if (validate_change (insn, loc,
2656 canon_reg (copy_rtx (best_elt->exp),
2657 NULL_RTX), 0))
2658 return;
2659 else
2660 best_elt->flag = 1;
2661 }
2662 }
2663 }
2664
2665 /* If the address is a binary operation with the first operand a register
2666 and the second a constant, do the same as above, but looking for
2667 equivalences of the register. Then try to simplify before checking for
2668 the best address to use. This catches a few cases: First is when we
2669 have REG+const and the register is another REG+const. We can often merge
2670 the constants and eliminate one insn and one register. It may also be
2671 that a machine has a cheap REG+REG+const. Finally, this improves the
2672 code on the Alpha for unaligned byte stores. */
2673
2674 if (flag_expensive_optimizations
2675 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2676 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2677 && GET_CODE (XEXP (*loc, 0)) == REG
2678 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2679 {
2680 rtx c = XEXP (*loc, 1);
2681
2682 do_not_record = 0;
2683 hash = HASH (XEXP (*loc, 0), Pmode);
2684 do_not_record = save_do_not_record;
2685 hash_arg_in_memory = save_hash_arg_in_memory;
2686 hash_arg_in_struct = save_hash_arg_in_struct;
2687
2688 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2689 if (elt == 0)
2690 return;
2691
2692 /* We need to find the best (under the criteria documented above) entry
2693 in the class that is valid. We use the `flag' field to indicate
2694 choices that were invalid and iterate until we can't find a better
2695 one that hasn't already been tried. */
2696
2697 for (p = elt->first_same_value; p; p = p->next_same_value)
2698 p->flag = 0;
2699
2700 while (found_better)
2701 {
2702 int best_addr_cost = ADDRESS_COST (*loc);
2703 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2704 struct table_elt *best_elt = elt;
2705 rtx best_rtx = *loc;
2706 int count;
2707
2708 /* This is at worst case an O(n^2) algorithm, so limit our search
2709 to the first 32 elements on the list. This avoids trouble
2710 compiling code with very long basic blocks that can easily
2711 call cse_gen_binary so many times that we run out of memory. */
2712
2713 found_better = 0;
2714 for (p = elt->first_same_value, count = 0;
2715 p && count < 32;
2716 p = p->next_same_value, count++)
2717 if (! p->flag
2718 && (GET_CODE (p->exp) == REG
2719 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2720 {
2721 rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
2722
2723 if ((ADDRESS_COST (new) < best_addr_cost
2724 || (ADDRESS_COST (new) == best_addr_cost
2725 && (COST (new) + 1) >> 1 > best_rtx_cost)))
2726 {
2727 found_better = 1;
2728 best_addr_cost = ADDRESS_COST (new);
2729 best_rtx_cost = (COST (new) + 1) >> 1;
2730 best_elt = p;
2731 best_rtx = new;
2732 }
2733 }
2734
2735 if (found_better)
2736 {
2737 if (validate_change (insn, loc,
2738 canon_reg (copy_rtx (best_rtx),
2739 NULL_RTX), 0))
2740 return;
2741 else
2742 best_elt->flag = 1;
2743 }
2744 }
2745 }
2746 #endif
2747 }
2748 \f
2749 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2750 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2751 what values are being compared.
2752
2753 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2754 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2755 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2756 compared to produce cc0.
2757
2758 The return value is the comparison operator and is either the code of
2759 A or the code corresponding to the inverse of the comparison. */
2760
2761 static enum rtx_code
2762 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
2763 enum rtx_code code;
2764 rtx *parg1, *parg2;
2765 enum machine_mode *pmode1, *pmode2;
2766 {
2767 rtx arg1, arg2;
2768
2769 arg1 = *parg1, arg2 = *parg2;
2770
2771 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2772
2773 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2774 {
2775 /* Set non-zero when we find something of interest. */
2776 rtx x = 0;
2777 int reverse_code = 0;
2778 struct table_elt *p = 0;
2779
2780 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2781 On machines with CC0, this is the only case that can occur, since
2782 fold_rtx will return the COMPARE or item being compared with zero
2783 when given CC0. */
2784
2785 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2786 x = arg1;
2787
2788 /* If ARG1 is a comparison operator and CODE is testing for
2789 STORE_FLAG_VALUE, get the inner arguments. */
2790
2791 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2792 {
2793 if (code == NE
2794 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2795 && code == LT && STORE_FLAG_VALUE == -1)
2796 #ifdef FLOAT_STORE_FLAG_VALUE
2797 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2798 && FLOAT_STORE_FLAG_VALUE < 0)
2799 #endif
2800 )
2801 x = arg1;
2802 else if (code == EQ
2803 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2804 && code == GE && STORE_FLAG_VALUE == -1)
2805 #ifdef FLOAT_STORE_FLAG_VALUE
2806 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2807 && FLOAT_STORE_FLAG_VALUE < 0)
2808 #endif
2809 )
2810 x = arg1, reverse_code = 1;
2811 }
2812
2813 /* ??? We could also check for
2814
2815 (ne (and (eq (...) (const_int 1))) (const_int 0))
2816
2817 and related forms, but let's wait until we see them occurring. */
2818
2819 if (x == 0)
2820 /* Look up ARG1 in the hash table and see if it has an equivalence
2821 that lets us see what is being compared. */
2822 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
2823 GET_MODE (arg1));
2824 if (p) p = p->first_same_value;
2825
2826 for (; p; p = p->next_same_value)
2827 {
2828 enum machine_mode inner_mode = GET_MODE (p->exp);
2829
2830 /* If the entry isn't valid, skip it. */
2831 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2832 continue;
2833
2834 if (GET_CODE (p->exp) == COMPARE
2835 /* Another possibility is that this machine has a compare insn
2836 that includes the comparison code. In that case, ARG1 would
2837 be equivalent to a comparison operation that would set ARG1 to
2838 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2839 ORIG_CODE is the actual comparison being done; if it is an EQ,
2840 we must reverse ORIG_CODE. On machine with a negative value
2841 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2842 || ((code == NE
2843 || (code == LT
2844 && GET_MODE_CLASS (inner_mode) == MODE_INT
2845 && (GET_MODE_BITSIZE (inner_mode)
2846 <= HOST_BITS_PER_WIDE_INT)
2847 && (STORE_FLAG_VALUE
2848 & ((HOST_WIDE_INT) 1
2849 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2850 #ifdef FLOAT_STORE_FLAG_VALUE
2851 || (code == LT
2852 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2853 && FLOAT_STORE_FLAG_VALUE < 0)
2854 #endif
2855 )
2856 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
2857 {
2858 x = p->exp;
2859 break;
2860 }
2861 else if ((code == EQ
2862 || (code == GE
2863 && GET_MODE_CLASS (inner_mode) == MODE_INT
2864 && (GET_MODE_BITSIZE (inner_mode)
2865 <= HOST_BITS_PER_WIDE_INT)
2866 && (STORE_FLAG_VALUE
2867 & ((HOST_WIDE_INT) 1
2868 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2869 #ifdef FLOAT_STORE_FLAG_VALUE
2870 || (code == GE
2871 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2872 && FLOAT_STORE_FLAG_VALUE < 0)
2873 #endif
2874 )
2875 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
2876 {
2877 reverse_code = 1;
2878 x = p->exp;
2879 break;
2880 }
2881
2882 /* If this is fp + constant, the equivalent is a better operand since
2883 it may let us predict the value of the comparison. */
2884 else if (NONZERO_BASE_PLUS_P (p->exp))
2885 {
2886 arg1 = p->exp;
2887 continue;
2888 }
2889 }
2890
2891 /* If we didn't find a useful equivalence for ARG1, we are done.
2892 Otherwise, set up for the next iteration. */
2893 if (x == 0)
2894 break;
2895
2896 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
2897 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
2898 code = GET_CODE (x);
2899
2900 if (reverse_code)
2901 code = reverse_condition (code);
2902 }
2903
2904 /* Return our results. Return the modes from before fold_rtx
2905 because fold_rtx might produce const_int, and then it's too late. */
2906 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
2907 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
2908
2909 return code;
2910 }
2911 \f
2912 /* Try to simplify a unary operation CODE whose output mode is to be
2913 MODE with input operand OP whose mode was originally OP_MODE.
2914 Return zero if no simplification can be made. */
2915
2916 rtx
2917 simplify_unary_operation (code, mode, op, op_mode)
2918 enum rtx_code code;
2919 enum machine_mode mode;
2920 rtx op;
2921 enum machine_mode op_mode;
2922 {
2923 register int width = GET_MODE_BITSIZE (mode);
2924
2925 /* The order of these tests is critical so that, for example, we don't
2926 check the wrong mode (input vs. output) for a conversion operation,
2927 such as FIX. At some point, this should be simplified. */
2928
2929 #if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
2930
2931 if (code == FLOAT && GET_MODE (op) == VOIDmode
2932 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
2933 {
2934 HOST_WIDE_INT hv, lv;
2935 REAL_VALUE_TYPE d;
2936
2937 if (GET_CODE (op) == CONST_INT)
2938 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
2939 else
2940 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
2941
2942 #ifdef REAL_ARITHMETIC
2943 REAL_VALUE_FROM_INT (d, lv, hv);
2944 #else
2945 if (hv < 0)
2946 {
2947 d = (double) (~ hv);
2948 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2949 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2950 d += (double) (unsigned HOST_WIDE_INT) (~ lv);
2951 d = (- d - 1.0);
2952 }
2953 else
2954 {
2955 d = (double) hv;
2956 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2957 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2958 d += (double) (unsigned HOST_WIDE_INT) lv;
2959 }
2960 #endif /* REAL_ARITHMETIC */
2961
2962 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2963 }
2964 else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
2965 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
2966 {
2967 HOST_WIDE_INT hv, lv;
2968 REAL_VALUE_TYPE d;
2969
2970 if (GET_CODE (op) == CONST_INT)
2971 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
2972 else
2973 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
2974
2975 if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
2976 ;
2977 else
2978 hv = 0, lv &= GET_MODE_MASK (op_mode);
2979
2980 #ifdef REAL_ARITHMETIC
2981 REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv);
2982 #else
2983
2984 d = (double) (unsigned HOST_WIDE_INT) hv;
2985 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2986 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2987 d += (double) (unsigned HOST_WIDE_INT) lv;
2988 #endif /* REAL_ARITHMETIC */
2989
2990 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2991 }
2992 #endif
2993
2994 if (GET_CODE (op) == CONST_INT
2995 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
2996 {
2997 register HOST_WIDE_INT arg0 = INTVAL (op);
2998 register HOST_WIDE_INT val;
2999
3000 switch (code)
3001 {
3002 case NOT:
3003 val = ~ arg0;
3004 break;
3005
3006 case NEG:
3007 val = - arg0;
3008 break;
3009
3010 case ABS:
3011 val = (arg0 >= 0 ? arg0 : - arg0);
3012 break;
3013
3014 case FFS:
3015 /* Don't use ffs here. Instead, get low order bit and then its
3016 number. If arg0 is zero, this will return 0, as desired. */
3017 arg0 &= GET_MODE_MASK (mode);
3018 val = exact_log2 (arg0 & (- arg0)) + 1;
3019 break;
3020
3021 case TRUNCATE:
3022 val = arg0;
3023 break;
3024
3025 case ZERO_EXTEND:
3026 if (op_mode == VOIDmode)
3027 op_mode = mode;
3028 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3029 {
3030 /* If we were really extending the mode,
3031 we would have to distinguish between zero-extension
3032 and sign-extension. */
3033 if (width != GET_MODE_BITSIZE (op_mode))
3034 abort ();
3035 val = arg0;
3036 }
3037 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3038 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3039 else
3040 return 0;
3041 break;
3042
3043 case SIGN_EXTEND:
3044 if (op_mode == VOIDmode)
3045 op_mode = mode;
3046 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3047 {
3048 /* If we were really extending the mode,
3049 we would have to distinguish between zero-extension
3050 and sign-extension. */
3051 if (width != GET_MODE_BITSIZE (op_mode))
3052 abort ();
3053 val = arg0;
3054 }
3055 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3056 {
3057 val
3058 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3059 if (val
3060 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3061 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3062 }
3063 else
3064 return 0;
3065 break;
3066
3067 case SQRT:
3068 return 0;
3069
3070 default:
3071 abort ();
3072 }
3073
3074 /* Clear the bits that don't belong in our mode,
3075 unless they and our sign bit are all one.
3076 So we get either a reasonable negative value or a reasonable
3077 unsigned value for this mode. */
3078 if (width < HOST_BITS_PER_WIDE_INT
3079 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3080 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3081 val &= (1 << width) - 1;
3082
3083 return GEN_INT (val);
3084 }
3085
3086 /* We can do some operations on integer CONST_DOUBLEs. Also allow
3087 for a DImode operation on a CONST_INT. */
3088 else if (GET_MODE (op) == VOIDmode && width <= HOST_BITS_PER_INT * 2
3089 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3090 {
3091 HOST_WIDE_INT l1, h1, lv, hv;
3092
3093 if (GET_CODE (op) == CONST_DOUBLE)
3094 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3095 else
3096 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3097
3098 switch (code)
3099 {
3100 case NOT:
3101 lv = ~ l1;
3102 hv = ~ h1;
3103 break;
3104
3105 case NEG:
3106 neg_double (l1, h1, &lv, &hv);
3107 break;
3108
3109 case ABS:
3110 if (h1 < 0)
3111 neg_double (l1, h1, &lv, &hv);
3112 else
3113 lv = l1, hv = h1;
3114 break;
3115
3116 case FFS:
3117 hv = 0;
3118 if (l1 == 0)
3119 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
3120 else
3121 lv = exact_log2 (l1 & (-l1)) + 1;
3122 break;
3123
3124 case TRUNCATE:
3125 /* This is just a change-of-mode, so do nothing. */
3126 break;
3127
3128 case ZERO_EXTEND:
3129 if (op_mode == VOIDmode
3130 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3131 return 0;
3132
3133 hv = 0;
3134 lv = l1 & GET_MODE_MASK (op_mode);
3135 break;
3136
3137 case SIGN_EXTEND:
3138 if (op_mode == VOIDmode
3139 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3140 return 0;
3141 else
3142 {
3143 lv = l1 & GET_MODE_MASK (op_mode);
3144 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3145 && (lv & ((HOST_WIDE_INT) 1
3146 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3147 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3148
3149 hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
3150 }
3151 break;
3152
3153 case SQRT:
3154 return 0;
3155
3156 default:
3157 return 0;
3158 }
3159
3160 return immed_double_const (lv, hv, mode);
3161 }
3162
3163 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3164 else if (GET_CODE (op) == CONST_DOUBLE
3165 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3166 {
3167 REAL_VALUE_TYPE d;
3168 jmp_buf handler;
3169 rtx x;
3170
3171 if (setjmp (handler))
3172 /* There used to be a warning here, but that is inadvisable.
3173 People may want to cause traps, and the natural way
3174 to do it should not get a warning. */
3175 return 0;
3176
3177 set_float_handler (handler);
3178
3179 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3180
3181 switch (code)
3182 {
3183 case NEG:
3184 d = REAL_VALUE_NEGATE (d);
3185 break;
3186
3187 case ABS:
3188 if (REAL_VALUE_NEGATIVE (d))
3189 d = REAL_VALUE_NEGATE (d);
3190 break;
3191
3192 case FLOAT_TRUNCATE:
3193 d = real_value_truncate (mode, d);
3194 break;
3195
3196 case FLOAT_EXTEND:
3197 /* All this does is change the mode. */
3198 break;
3199
3200 case FIX:
3201 d = REAL_VALUE_RNDZINT (d);
3202 break;
3203
3204 case UNSIGNED_FIX:
3205 d = REAL_VALUE_UNSIGNED_RNDZINT (d);
3206 break;
3207
3208 case SQRT:
3209 return 0;
3210
3211 default:
3212 abort ();
3213 }
3214
3215 x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3216 set_float_handler (NULL_PTR);
3217 return x;
3218 }
3219
3220 else if (GET_CODE (op) == CONST_DOUBLE
3221 && GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
3222 && GET_MODE_CLASS (mode) == MODE_INT
3223 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3224 {
3225 REAL_VALUE_TYPE d;
3226 jmp_buf handler;
3227 HOST_WIDE_INT val;
3228
3229 if (setjmp (handler))
3230 return 0;
3231
3232 set_float_handler (handler);
3233
3234 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3235
3236 switch (code)
3237 {
3238 case FIX:
3239 val = REAL_VALUE_FIX (d);
3240 break;
3241
3242 case UNSIGNED_FIX:
3243 val = REAL_VALUE_UNSIGNED_FIX (d);
3244 break;
3245
3246 default:
3247 abort ();
3248 }
3249
3250 set_float_handler (NULL_PTR);
3251
3252 /* Clear the bits that don't belong in our mode,
3253 unless they and our sign bit are all one.
3254 So we get either a reasonable negative value or a reasonable
3255 unsigned value for this mode. */
3256 if (width < HOST_BITS_PER_WIDE_INT
3257 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3258 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3259 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3260
3261 return GEN_INT (val);
3262 }
3263 #endif
3264 /* This was formerly used only for non-IEEE float.
3265 eggert@twinsun.com says it is safe for IEEE also. */
3266 else
3267 {
3268 /* There are some simplifications we can do even if the operands
3269 aren't constant. */
3270 switch (code)
3271 {
3272 case NEG:
3273 case NOT:
3274 /* (not (not X)) == X, similarly for NEG. */
3275 if (GET_CODE (op) == code)
3276 return XEXP (op, 0);
3277 break;
3278
3279 case SIGN_EXTEND:
3280 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3281 becomes just the MINUS if its mode is MODE. This allows
3282 folding switch statements on machines using casesi (such as
3283 the Vax). */
3284 if (GET_CODE (op) == TRUNCATE
3285 && GET_MODE (XEXP (op, 0)) == mode
3286 && GET_CODE (XEXP (op, 0)) == MINUS
3287 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3288 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3289 return XEXP (op, 0);
3290 break;
3291 }
3292
3293 return 0;
3294 }
3295 }
3296 \f
3297 /* Simplify a binary operation CODE with result mode MODE, operating on OP0
3298 and OP1. Return 0 if no simplification is possible.
3299
3300 Don't use this for relational operations such as EQ or LT.
3301 Use simplify_relational_operation instead. */
3302
3303 rtx
3304 simplify_binary_operation (code, mode, op0, op1)
3305 enum rtx_code code;
3306 enum machine_mode mode;
3307 rtx op0, op1;
3308 {
3309 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3310 HOST_WIDE_INT val;
3311 int width = GET_MODE_BITSIZE (mode);
3312 rtx tem;
3313
3314 /* Relational operations don't work here. We must know the mode
3315 of the operands in order to do the comparison correctly.
3316 Assuming a full word can give incorrect results.
3317 Consider comparing 128 with -128 in QImode. */
3318
3319 if (GET_RTX_CLASS (code) == '<')
3320 abort ();
3321
3322 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3323 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3324 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3325 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3326 {
3327 REAL_VALUE_TYPE f0, f1, value;
3328 jmp_buf handler;
3329
3330 if (setjmp (handler))
3331 return 0;
3332
3333 set_float_handler (handler);
3334
3335 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3336 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3337 f0 = real_value_truncate (mode, f0);
3338 f1 = real_value_truncate (mode, f1);
3339
3340 #ifdef REAL_ARITHMETIC
3341 REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
3342 #else
3343 switch (code)
3344 {
3345 case PLUS:
3346 value = f0 + f1;
3347 break;
3348 case MINUS:
3349 value = f0 - f1;
3350 break;
3351 case MULT:
3352 value = f0 * f1;
3353 break;
3354 case DIV:
3355 #ifndef REAL_INFINITY
3356 if (f1 == 0)
3357 return 0;
3358 #endif
3359 value = f0 / f1;
3360 break;
3361 case SMIN:
3362 value = MIN (f0, f1);
3363 break;
3364 case SMAX:
3365 value = MAX (f0, f1);
3366 break;
3367 default:
3368 abort ();
3369 }
3370 #endif
3371
3372 value = real_value_truncate (mode, value);
3373 set_float_handler (NULL_PTR);
3374 return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
3375 }
3376 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3377
3378 /* We can fold some multi-word operations. */
3379 if (GET_MODE_CLASS (mode) == MODE_INT
3380 && width == HOST_BITS_PER_WIDE_INT * 2
3381 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
3382 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3383 {
3384 HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
3385
3386 if (GET_CODE (op0) == CONST_DOUBLE)
3387 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3388 else
3389 l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
3390
3391 if (GET_CODE (op1) == CONST_DOUBLE)
3392 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3393 else
3394 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3395
3396 switch (code)
3397 {
3398 case MINUS:
3399 /* A - B == A + (-B). */
3400 neg_double (l2, h2, &lv, &hv);
3401 l2 = lv, h2 = hv;
3402
3403 /* .. fall through ... */
3404
3405 case PLUS:
3406 add_double (l1, h1, l2, h2, &lv, &hv);
3407 break;
3408
3409 case MULT:
3410 mul_double (l1, h1, l2, h2, &lv, &hv);
3411 break;
3412
3413 case DIV: case MOD: case UDIV: case UMOD:
3414 /* We'd need to include tree.h to do this and it doesn't seem worth
3415 it. */
3416 return 0;
3417
3418 case AND:
3419 lv = l1 & l2, hv = h1 & h2;
3420 break;
3421
3422 case IOR:
3423 lv = l1 | l2, hv = h1 | h2;
3424 break;
3425
3426 case XOR:
3427 lv = l1 ^ l2, hv = h1 ^ h2;
3428 break;
3429
3430 case SMIN:
3431 if (h1 < h2
3432 || (h1 == h2
3433 && ((unsigned HOST_WIDE_INT) l1
3434 < (unsigned HOST_WIDE_INT) l2)))
3435 lv = l1, hv = h1;
3436 else
3437 lv = l2, hv = h2;
3438 break;
3439
3440 case SMAX:
3441 if (h1 > h2
3442 || (h1 == h2
3443 && ((unsigned HOST_WIDE_INT) l1
3444 > (unsigned HOST_WIDE_INT) l2)))
3445 lv = l1, hv = h1;
3446 else
3447 lv = l2, hv = h2;
3448 break;
3449
3450 case UMIN:
3451 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3452 || (h1 == h2
3453 && ((unsigned HOST_WIDE_INT) l1
3454 < (unsigned HOST_WIDE_INT) l2)))
3455 lv = l1, hv = h1;
3456 else
3457 lv = l2, hv = h2;
3458 break;
3459
3460 case UMAX:
3461 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3462 || (h1 == h2
3463 && ((unsigned HOST_WIDE_INT) l1
3464 > (unsigned HOST_WIDE_INT) l2)))
3465 lv = l1, hv = h1;
3466 else
3467 lv = l2, hv = h2;
3468 break;
3469
3470 case LSHIFTRT: case ASHIFTRT:
3471 case ASHIFT:
3472 case ROTATE: case ROTATERT:
3473 #ifdef SHIFT_COUNT_TRUNCATED
3474 if (SHIFT_COUNT_TRUNCATED)
3475 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3476 #endif
3477
3478 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3479 return 0;
3480
3481 if (code == LSHIFTRT || code == ASHIFTRT)
3482 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3483 code == ASHIFTRT);
3484 else if (code == ASHIFT)
3485 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
3486 else if (code == ROTATE)
3487 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3488 else /* code == ROTATERT */
3489 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3490 break;
3491
3492 default:
3493 return 0;
3494 }
3495
3496 return immed_double_const (lv, hv, mode);
3497 }
3498
3499 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3500 || width > HOST_BITS_PER_WIDE_INT || width == 0)
3501 {
3502 /* Even if we can't compute a constant result,
3503 there are some cases worth simplifying. */
3504
3505 switch (code)
3506 {
3507 case PLUS:
3508 /* In IEEE floating point, x+0 is not the same as x. Similarly
3509 for the other optimizations below. */
3510 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3511 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3512 break;
3513
3514 if (op1 == CONST0_RTX (mode))
3515 return op0;
3516
3517 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3518 if (GET_CODE (op0) == NEG)
3519 return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
3520 else if (GET_CODE (op1) == NEG)
3521 return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
3522
3523 /* Handle both-operands-constant cases. We can only add
3524 CONST_INTs to constants since the sum of relocatable symbols
3525 can't be handled by most assemblers. Don't add CONST_INT
3526 to CONST_INT since overflow won't be computed properly if wider
3527 than HOST_BITS_PER_WIDE_INT. */
3528
3529 if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3530 && GET_CODE (op1) == CONST_INT)
3531 return plus_constant (op0, INTVAL (op1));
3532 else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3533 && GET_CODE (op0) == CONST_INT)
3534 return plus_constant (op1, INTVAL (op0));
3535
3536 /* See if this is something like X * C - X or vice versa or
3537 if the multiplication is written as a shift. If so, we can
3538 distribute and make a new multiply, shift, or maybe just
3539 have X (if C is 2 in the example above). But don't make
3540 real multiply if we didn't have one before. */
3541
3542 if (! FLOAT_MODE_P (mode))
3543 {
3544 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3545 rtx lhs = op0, rhs = op1;
3546 int had_mult = 0;
3547
3548 if (GET_CODE (lhs) == NEG)
3549 coeff0 = -1, lhs = XEXP (lhs, 0);
3550 else if (GET_CODE (lhs) == MULT
3551 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3552 {
3553 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3554 had_mult = 1;
3555 }
3556 else if (GET_CODE (lhs) == ASHIFT
3557 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3558 && INTVAL (XEXP (lhs, 1)) >= 0
3559 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3560 {
3561 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3562 lhs = XEXP (lhs, 0);
3563 }
3564
3565 if (GET_CODE (rhs) == NEG)
3566 coeff1 = -1, rhs = XEXP (rhs, 0);
3567 else if (GET_CODE (rhs) == MULT
3568 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3569 {
3570 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3571 had_mult = 1;
3572 }
3573 else if (GET_CODE (rhs) == ASHIFT
3574 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3575 && INTVAL (XEXP (rhs, 1)) >= 0
3576 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3577 {
3578 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3579 rhs = XEXP (rhs, 0);
3580 }
3581
3582 if (rtx_equal_p (lhs, rhs))
3583 {
3584 tem = cse_gen_binary (MULT, mode, lhs,
3585 GEN_INT (coeff0 + coeff1));
3586 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3587 }
3588 }
3589
3590 /* If one of the operands is a PLUS or a MINUS, see if we can
3591 simplify this by the associative law.
3592 Don't use the associative law for floating point.
3593 The inaccuracy makes it nonassociative,
3594 and subtle programs can break if operations are associated. */
3595
3596 if (INTEGRAL_MODE_P (mode)
3597 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3598 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3599 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3600 return tem;
3601 break;
3602
3603 case COMPARE:
3604 #ifdef HAVE_cc0
3605 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3606 using cc0, in which case we want to leave it as a COMPARE
3607 so we can distinguish it from a register-register-copy.
3608
3609 In IEEE floating point, x-0 is not the same as x. */
3610
3611 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3612 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3613 && op1 == CONST0_RTX (mode))
3614 return op0;
3615 #else
3616 /* Do nothing here. */
3617 #endif
3618 break;
3619
3620 case MINUS:
3621 /* None of these optimizations can be done for IEEE
3622 floating point. */
3623 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3624 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3625 break;
3626
3627 /* We can't assume x-x is 0 even with non-IEEE floating point,
3628 but since it is zero except in very strange circumstances, we
3629 will treat it as zero with -ffast-math. */
3630 if (rtx_equal_p (op0, op1)
3631 && ! side_effects_p (op0)
3632 && (! FLOAT_MODE_P (mode) || flag_fast_math))
3633 return CONST0_RTX (mode);
3634
3635 /* Change subtraction from zero into negation. */
3636 if (op0 == CONST0_RTX (mode))
3637 return gen_rtx (NEG, mode, op1);
3638
3639 /* (-1 - a) is ~a. */
3640 if (op0 == constm1_rtx)
3641 return gen_rtx (NOT, mode, op1);
3642
3643 /* Subtracting 0 has no effect. */
3644 if (op1 == CONST0_RTX (mode))
3645 return op0;
3646
3647 /* See if this is something like X * C - X or vice versa or
3648 if the multiplication is written as a shift. If so, we can
3649 distribute and make a new multiply, shift, or maybe just
3650 have X (if C is 2 in the example above). But don't make
3651 real multiply if we didn't have one before. */
3652
3653 if (! FLOAT_MODE_P (mode))
3654 {
3655 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3656 rtx lhs = op0, rhs = op1;
3657 int had_mult = 0;
3658
3659 if (GET_CODE (lhs) == NEG)
3660 coeff0 = -1, lhs = XEXP (lhs, 0);
3661 else if (GET_CODE (lhs) == MULT
3662 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3663 {
3664 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3665 had_mult = 1;
3666 }
3667 else if (GET_CODE (lhs) == ASHIFT
3668 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3669 && INTVAL (XEXP (lhs, 1)) >= 0
3670 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3671 {
3672 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3673 lhs = XEXP (lhs, 0);
3674 }
3675
3676 if (GET_CODE (rhs) == NEG)
3677 coeff1 = - 1, rhs = XEXP (rhs, 0);
3678 else if (GET_CODE (rhs) == MULT
3679 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3680 {
3681 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3682 had_mult = 1;
3683 }
3684 else if (GET_CODE (rhs) == ASHIFT
3685 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3686 && INTVAL (XEXP (rhs, 1)) >= 0
3687 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3688 {
3689 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3690 rhs = XEXP (rhs, 0);
3691 }
3692
3693 if (rtx_equal_p (lhs, rhs))
3694 {
3695 tem = cse_gen_binary (MULT, mode, lhs,
3696 GEN_INT (coeff0 - coeff1));
3697 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3698 }
3699 }
3700
3701 /* (a - (-b)) -> (a + b). */
3702 if (GET_CODE (op1) == NEG)
3703 return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
3704
3705 /* If one of the operands is a PLUS or a MINUS, see if we can
3706 simplify this by the associative law.
3707 Don't use the associative law for floating point.
3708 The inaccuracy makes it nonassociative,
3709 and subtle programs can break if operations are associated. */
3710
3711 if (INTEGRAL_MODE_P (mode)
3712 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3713 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3714 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3715 return tem;
3716
3717 /* Don't let a relocatable value get a negative coeff. */
3718 if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
3719 return plus_constant (op0, - INTVAL (op1));
3720 break;
3721
3722 case MULT:
3723 if (op1 == constm1_rtx)
3724 {
3725 tem = simplify_unary_operation (NEG, mode, op0, mode);
3726
3727 return tem ? tem : gen_rtx (NEG, mode, op0);
3728 }
3729
3730 /* In IEEE floating point, x*0 is not always 0. */
3731 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3732 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3733 && op1 == CONST0_RTX (mode)
3734 && ! side_effects_p (op0))
3735 return op1;
3736
3737 /* In IEEE floating point, x*1 is not equivalent to x for nans.
3738 However, ANSI says we can drop signals,
3739 so we can do this anyway. */
3740 if (op1 == CONST1_RTX (mode))
3741 return op0;
3742
3743 /* Convert multiply by constant power of two into shift unless
3744 we are still generating RTL. This test is a kludge. */
3745 if (GET_CODE (op1) == CONST_INT
3746 && (val = exact_log2 (INTVAL (op1))) >= 0
3747 && ! rtx_equal_function_value_matters)
3748 return gen_rtx (ASHIFT, mode, op0, GEN_INT (val));
3749
3750 if (GET_CODE (op1) == CONST_DOUBLE
3751 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
3752 {
3753 REAL_VALUE_TYPE d;
3754 jmp_buf handler;
3755 int op1is2, op1ism1;
3756
3757 if (setjmp (handler))
3758 return 0;
3759
3760 set_float_handler (handler);
3761 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3762 op1is2 = REAL_VALUES_EQUAL (d, dconst2);
3763 op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
3764 set_float_handler (NULL_PTR);
3765
3766 /* x*2 is x+x and x*(-1) is -x */
3767 if (op1is2 && GET_MODE (op0) == mode)
3768 return gen_rtx (PLUS, mode, op0, copy_rtx (op0));
3769
3770 else if (op1ism1 && GET_MODE (op0) == mode)
3771 return gen_rtx (NEG, mode, op0);
3772 }
3773 break;
3774
3775 case IOR:
3776 if (op1 == const0_rtx)
3777 return op0;
3778 if (GET_CODE (op1) == CONST_INT
3779 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3780 return op1;
3781 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3782 return op0;
3783 /* A | (~A) -> -1 */
3784 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3785 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3786 && ! side_effects_p (op0)
3787 && GET_MODE_CLASS (mode) != MODE_CC)
3788 return constm1_rtx;
3789 break;
3790
3791 case XOR:
3792 if (op1 == const0_rtx)
3793 return op0;
3794 if (GET_CODE (op1) == CONST_INT
3795 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3796 return gen_rtx (NOT, mode, op0);
3797 if (op0 == op1 && ! side_effects_p (op0)
3798 && GET_MODE_CLASS (mode) != MODE_CC)
3799 return const0_rtx;
3800 break;
3801
3802 case AND:
3803 if (op1 == const0_rtx && ! side_effects_p (op0))
3804 return const0_rtx;
3805 if (GET_CODE (op1) == CONST_INT
3806 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3807 return op0;
3808 if (op0 == op1 && ! side_effects_p (op0)
3809 && GET_MODE_CLASS (mode) != MODE_CC)
3810 return op0;
3811 /* A & (~A) -> 0 */
3812 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3813 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3814 && ! side_effects_p (op0)
3815 && GET_MODE_CLASS (mode) != MODE_CC)
3816 return const0_rtx;
3817 break;
3818
3819 case UDIV:
3820 /* Convert divide by power of two into shift (divide by 1 handled
3821 below). */
3822 if (GET_CODE (op1) == CONST_INT
3823 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
3824 return gen_rtx (LSHIFTRT, mode, op0, GEN_INT (arg1));
3825
3826 /* ... fall through ... */
3827
3828 case DIV:
3829 if (op1 == CONST1_RTX (mode))
3830 return op0;
3831
3832 /* In IEEE floating point, 0/x is not always 0. */
3833 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3834 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3835 && op0 == CONST0_RTX (mode)
3836 && ! side_effects_p (op1))
3837 return op0;
3838
3839 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3840 /* Change division by a constant into multiplication. Only do
3841 this with -ffast-math until an expert says it is safe in
3842 general. */
3843 else if (GET_CODE (op1) == CONST_DOUBLE
3844 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
3845 && op1 != CONST0_RTX (mode)
3846 && flag_fast_math)
3847 {
3848 REAL_VALUE_TYPE d;
3849 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3850
3851 if (! REAL_VALUES_EQUAL (d, dconst0))
3852 {
3853 #if defined (REAL_ARITHMETIC)
3854 REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
3855 return gen_rtx (MULT, mode, op0,
3856 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
3857 #else
3858 return gen_rtx (MULT, mode, op0,
3859 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
3860 #endif
3861 }
3862 }
3863 #endif
3864 break;
3865
3866 case UMOD:
3867 /* Handle modulus by power of two (mod with 1 handled below). */
3868 if (GET_CODE (op1) == CONST_INT
3869 && exact_log2 (INTVAL (op1)) > 0)
3870 return gen_rtx (AND, mode, op0, GEN_INT (INTVAL (op1) - 1));
3871
3872 /* ... fall through ... */
3873
3874 case MOD:
3875 if ((op0 == const0_rtx || op1 == const1_rtx)
3876 && ! side_effects_p (op0) && ! side_effects_p (op1))
3877 return const0_rtx;
3878 break;
3879
3880 case ROTATERT:
3881 case ROTATE:
3882 /* Rotating ~0 always results in ~0. */
3883 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
3884 && INTVAL (op0) == GET_MODE_MASK (mode)
3885 && ! side_effects_p (op1))
3886 return op0;
3887
3888 /* ... fall through ... */
3889
3890 case ASHIFT:
3891 case ASHIFTRT:
3892 case LSHIFTRT:
3893 if (op1 == const0_rtx)
3894 return op0;
3895 if (op0 == const0_rtx && ! side_effects_p (op1))
3896 return op0;
3897 break;
3898
3899 case SMIN:
3900 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3901 && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
3902 && ! side_effects_p (op0))
3903 return op1;
3904 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3905 return op0;
3906 break;
3907
3908 case SMAX:
3909 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3910 && (INTVAL (op1)
3911 == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
3912 && ! side_effects_p (op0))
3913 return op1;
3914 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3915 return op0;
3916 break;
3917
3918 case UMIN:
3919 if (op1 == const0_rtx && ! side_effects_p (op0))
3920 return op1;
3921 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3922 return op0;
3923 break;
3924
3925 case UMAX:
3926 if (op1 == constm1_rtx && ! side_effects_p (op0))
3927 return op1;
3928 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3929 return op0;
3930 break;
3931
3932 default:
3933 abort ();
3934 }
3935
3936 return 0;
3937 }
3938
3939 /* Get the integer argument values in two forms:
3940 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
3941
3942 arg0 = INTVAL (op0);
3943 arg1 = INTVAL (op1);
3944
3945 if (width < HOST_BITS_PER_WIDE_INT)
3946 {
3947 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
3948 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
3949
3950 arg0s = arg0;
3951 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
3952 arg0s |= ((HOST_WIDE_INT) (-1) << width);
3953
3954 arg1s = arg1;
3955 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
3956 arg1s |= ((HOST_WIDE_INT) (-1) << width);
3957 }
3958 else
3959 {
3960 arg0s = arg0;
3961 arg1s = arg1;
3962 }
3963
3964 /* Compute the value of the arithmetic. */
3965
3966 switch (code)
3967 {
3968 case PLUS:
3969 val = arg0s + arg1s;
3970 break;
3971
3972 case MINUS:
3973 val = arg0s - arg1s;
3974 break;
3975
3976 case MULT:
3977 val = arg0s * arg1s;
3978 break;
3979
3980 case DIV:
3981 if (arg1s == 0)
3982 return 0;
3983 val = arg0s / arg1s;
3984 break;
3985
3986 case MOD:
3987 if (arg1s == 0)
3988 return 0;
3989 val = arg0s % arg1s;
3990 break;
3991
3992 case UDIV:
3993 if (arg1 == 0)
3994 return 0;
3995 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
3996 break;
3997
3998 case UMOD:
3999 if (arg1 == 0)
4000 return 0;
4001 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
4002 break;
4003
4004 case AND:
4005 val = arg0 & arg1;
4006 break;
4007
4008 case IOR:
4009 val = arg0 | arg1;
4010 break;
4011
4012 case XOR:
4013 val = arg0 ^ arg1;
4014 break;
4015
4016 case LSHIFTRT:
4017 /* If shift count is undefined, don't fold it; let the machine do
4018 what it wants. But truncate it if the machine will do that. */
4019 if (arg1 < 0)
4020 return 0;
4021
4022 #ifdef SHIFT_COUNT_TRUNCATED
4023 if (SHIFT_COUNT_TRUNCATED)
4024 arg1 %= width;
4025 #endif
4026
4027 val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
4028 break;
4029
4030 case ASHIFT:
4031 if (arg1 < 0)
4032 return 0;
4033
4034 #ifdef SHIFT_COUNT_TRUNCATED
4035 if (SHIFT_COUNT_TRUNCATED)
4036 arg1 %= width;
4037 #endif
4038
4039 val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
4040 break;
4041
4042 case ASHIFTRT:
4043 if (arg1 < 0)
4044 return 0;
4045
4046 #ifdef SHIFT_COUNT_TRUNCATED
4047 if (SHIFT_COUNT_TRUNCATED)
4048 arg1 %= width;
4049 #endif
4050
4051 val = arg0s >> arg1;
4052
4053 /* Bootstrap compiler may not have sign extended the right shift.
4054 Manually extend the sign to insure bootstrap cc matches gcc. */
4055 if (arg0s < 0 && arg1 > 0)
4056 val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4057
4058 break;
4059
4060 case ROTATERT:
4061 if (arg1 < 0)
4062 return 0;
4063
4064 arg1 %= width;
4065 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4066 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
4067 break;
4068
4069 case ROTATE:
4070 if (arg1 < 0)
4071 return 0;
4072
4073 arg1 %= width;
4074 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4075 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
4076 break;
4077
4078 case COMPARE:
4079 /* Do nothing here. */
4080 return 0;
4081
4082 case SMIN:
4083 val = arg0s <= arg1s ? arg0s : arg1s;
4084 break;
4085
4086 case UMIN:
4087 val = ((unsigned HOST_WIDE_INT) arg0
4088 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4089 break;
4090
4091 case SMAX:
4092 val = arg0s > arg1s ? arg0s : arg1s;
4093 break;
4094
4095 case UMAX:
4096 val = ((unsigned HOST_WIDE_INT) arg0
4097 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4098 break;
4099
4100 default:
4101 abort ();
4102 }
4103
4104 /* Clear the bits that don't belong in our mode, unless they and our sign
4105 bit are all one. So we get either a reasonable negative value or a
4106 reasonable unsigned value for this mode. */
4107 if (width < HOST_BITS_PER_WIDE_INT
4108 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4109 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4110 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4111
4112 return GEN_INT (val);
4113 }
4114 \f
4115 /* Simplify a PLUS or MINUS, at least one of whose operands may be another
4116 PLUS or MINUS.
4117
4118 Rather than test for specific case, we do this by a brute-force method
4119 and do all possible simplifications until no more changes occur. Then
4120 we rebuild the operation. */
4121
4122 static rtx
4123 simplify_plus_minus (code, mode, op0, op1)
4124 enum rtx_code code;
4125 enum machine_mode mode;
4126 rtx op0, op1;
4127 {
4128 rtx ops[8];
4129 int negs[8];
4130 rtx result, tem;
4131 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
4132 int first = 1, negate = 0, changed;
4133 int i, j;
4134
4135 bzero ((char *) ops, sizeof ops);
4136
4137 /* Set up the two operands and then expand them until nothing has been
4138 changed. If we run out of room in our array, give up; this should
4139 almost never happen. */
4140
4141 ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4142
4143 changed = 1;
4144 while (changed)
4145 {
4146 changed = 0;
4147
4148 for (i = 0; i < n_ops; i++)
4149 switch (GET_CODE (ops[i]))
4150 {
4151 case PLUS:
4152 case MINUS:
4153 if (n_ops == 7)
4154 return 0;
4155
4156 ops[n_ops] = XEXP (ops[i], 1);
4157 negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4158 ops[i] = XEXP (ops[i], 0);
4159 input_ops++;
4160 changed = 1;
4161 break;
4162
4163 case NEG:
4164 ops[i] = XEXP (ops[i], 0);
4165 negs[i] = ! negs[i];
4166 changed = 1;
4167 break;
4168
4169 case CONST:
4170 ops[i] = XEXP (ops[i], 0);
4171 input_consts++;
4172 changed = 1;
4173 break;
4174
4175 case NOT:
4176 /* ~a -> (-a - 1) */
4177 if (n_ops != 7)
4178 {
4179 ops[n_ops] = constm1_rtx;
4180 negs[n_ops++] = negs[i];
4181 ops[i] = XEXP (ops[i], 0);
4182 negs[i] = ! negs[i];
4183 changed = 1;
4184 }
4185 break;
4186
4187 case CONST_INT:
4188 if (negs[i])
4189 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4190 break;
4191 }
4192 }
4193
4194 /* If we only have two operands, we can't do anything. */
4195 if (n_ops <= 2)
4196 return 0;
4197
4198 /* Now simplify each pair of operands until nothing changes. The first
4199 time through just simplify constants against each other. */
4200
4201 changed = 1;
4202 while (changed)
4203 {
4204 changed = first;
4205
4206 for (i = 0; i < n_ops - 1; i++)
4207 for (j = i + 1; j < n_ops; j++)
4208 if (ops[i] != 0 && ops[j] != 0
4209 && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4210 {
4211 rtx lhs = ops[i], rhs = ops[j];
4212 enum rtx_code ncode = PLUS;
4213
4214 if (negs[i] && ! negs[j])
4215 lhs = ops[j], rhs = ops[i], ncode = MINUS;
4216 else if (! negs[i] && negs[j])
4217 ncode = MINUS;
4218
4219 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
4220 if (tem)
4221 {
4222 ops[i] = tem, ops[j] = 0;
4223 negs[i] = negs[i] && negs[j];
4224 if (GET_CODE (tem) == NEG)
4225 ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4226
4227 if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4228 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4229 changed = 1;
4230 }
4231 }
4232
4233 first = 0;
4234 }
4235
4236 /* Pack all the operands to the lower-numbered entries and give up if
4237 we didn't reduce the number of operands we had. Make sure we
4238 count a CONST as two operands. If we have the same number of
4239 operands, but have made more CONSTs than we had, this is also
4240 an improvement, so accept it. */
4241
4242 for (i = 0, j = 0; j < n_ops; j++)
4243 if (ops[j] != 0)
4244 {
4245 ops[i] = ops[j], negs[i++] = negs[j];
4246 if (GET_CODE (ops[j]) == CONST)
4247 n_consts++;
4248 }
4249
4250 if (i + n_consts > input_ops
4251 || (i + n_consts == input_ops && n_consts <= input_consts))
4252 return 0;
4253
4254 n_ops = i;
4255
4256 /* If we have a CONST_INT, put it last. */
4257 for (i = 0; i < n_ops - 1; i++)
4258 if (GET_CODE (ops[i]) == CONST_INT)
4259 {
4260 tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4261 j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4262 }
4263
4264 /* Put a non-negated operand first. If there aren't any, make all
4265 operands positive and negate the whole thing later. */
4266 for (i = 0; i < n_ops && negs[i]; i++)
4267 ;
4268
4269 if (i == n_ops)
4270 {
4271 for (i = 0; i < n_ops; i++)
4272 negs[i] = 0;
4273 negate = 1;
4274 }
4275 else if (i != 0)
4276 {
4277 tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4278 j = negs[0], negs[0] = negs[i], negs[i] = j;
4279 }
4280
4281 /* Now make the result by performing the requested operations. */
4282 result = ops[0];
4283 for (i = 1; i < n_ops; i++)
4284 result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4285
4286 return negate ? gen_rtx (NEG, mode, result) : result;
4287 }
4288 \f
4289 /* Make a binary operation by properly ordering the operands and
4290 seeing if the expression folds. */
4291
4292 static rtx
4293 cse_gen_binary (code, mode, op0, op1)
4294 enum rtx_code code;
4295 enum machine_mode mode;
4296 rtx op0, op1;
4297 {
4298 rtx tem;
4299
4300 /* Put complex operands first and constants second if commutative. */
4301 if (GET_RTX_CLASS (code) == 'c'
4302 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4303 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4304 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4305 || (GET_CODE (op0) == SUBREG
4306 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4307 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4308 tem = op0, op0 = op1, op1 = tem;
4309
4310 /* If this simplifies, do it. */
4311 tem = simplify_binary_operation (code, mode, op0, op1);
4312
4313 if (tem)
4314 return tem;
4315
4316 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4317 just form the operation. */
4318
4319 if (code == PLUS && GET_CODE (op1) == CONST_INT
4320 && GET_MODE (op0) != VOIDmode)
4321 return plus_constant (op0, INTVAL (op1));
4322 else if (code == MINUS && GET_CODE (op1) == CONST_INT
4323 && GET_MODE (op0) != VOIDmode)
4324 return plus_constant (op0, - INTVAL (op1));
4325 else
4326 return gen_rtx (code, mode, op0, op1);
4327 }
4328 \f
4329 /* Like simplify_binary_operation except used for relational operators.
4330 MODE is the mode of the operands, not that of the result. If MODE
4331 is VOIDmode, both operands must also be VOIDmode and we compare the
4332 operands in "infinite precision".
4333
4334 If no simplification is possible, this function returns zero. Otherwise,
4335 it returns either const_true_rtx or const0_rtx. */
4336
4337 rtx
4338 simplify_relational_operation (code, mode, op0, op1)
4339 enum rtx_code code;
4340 enum machine_mode mode;
4341 rtx op0, op1;
4342 {
4343 int equal, op0lt, op0ltu, op1lt, op1ltu;
4344 rtx tem;
4345
4346 /* If op0 is a compare, extract the comparison arguments from it. */
4347 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4348 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4349
4350 /* We can't simplify MODE_CC values since we don't know what the
4351 actual comparison is. */
4352 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4353 #ifdef HAVE_cc0
4354 || op0 == cc0_rtx
4355 #endif
4356 )
4357 return 0;
4358
4359 /* For integer comparisons of A and B maybe we can simplify A - B and can
4360 then simplify a comparison of that with zero. If A and B are both either
4361 a register or a CONST_INT, this can't help; testing for these cases will
4362 prevent infinite recursion here and speed things up.
4363
4364 If CODE is an unsigned comparison, we can only do this if A - B is a
4365 constant integer, and then we have to compare that integer with zero as a
4366 signed comparison. Note that this will give the incorrect result from
4367 comparisons that overflow. Since these are undefined, this is probably
4368 OK. If it causes a problem, we can check for A or B being an address
4369 (fp + const or SYMBOL_REF) and only do it in that case. */
4370
4371 if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4372 && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4373 && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4374 && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
4375 && (GET_CODE (tem) == CONST_INT
4376 || (code != GTU && code != GEU &&
4377 code != LTU && code != LEU)))
4378 return simplify_relational_operation (signed_condition (code),
4379 mode, tem, const0_rtx);
4380
4381 /* For non-IEEE floating-point, if the two operands are equal, we know the
4382 result. */
4383 if (rtx_equal_p (op0, op1)
4384 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4385 || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4386 equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4387
4388 /* If the operands are floating-point constants, see if we can fold
4389 the result. */
4390 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4391 else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4392 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4393 {
4394 REAL_VALUE_TYPE d0, d1;
4395 jmp_buf handler;
4396
4397 if (setjmp (handler))
4398 return 0;
4399
4400 set_float_handler (handler);
4401 REAL_VALUE_FROM_CONST_DOUBLE (d0, op0);
4402 REAL_VALUE_FROM_CONST_DOUBLE (d1, op1);
4403 equal = REAL_VALUES_EQUAL (d0, d1);
4404 op0lt = op0ltu = REAL_VALUES_LESS (d0, d1);
4405 op1lt = op1ltu = REAL_VALUES_LESS (d1, d0);
4406 set_float_handler (NULL_PTR);
4407 }
4408 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4409
4410 /* Otherwise, see if the operands are both integers. */
4411 else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4412 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4413 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4414 {
4415 int width = GET_MODE_BITSIZE (mode);
4416 HOST_WIDE_INT l0s, h0s, l1s, h1s;
4417 unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
4418
4419 /* Get the two words comprising each integer constant. */
4420 if (GET_CODE (op0) == CONST_DOUBLE)
4421 {
4422 l0u = l0s = CONST_DOUBLE_LOW (op0);
4423 h0u = h0s = CONST_DOUBLE_HIGH (op0);
4424 }
4425 else
4426 {
4427 l0u = l0s = INTVAL (op0);
4428 h0u = 0, h0s = l0s < 0 ? -1 : 0;
4429 }
4430
4431 if (GET_CODE (op1) == CONST_DOUBLE)
4432 {
4433 l1u = l1s = CONST_DOUBLE_LOW (op1);
4434 h1u = h1s = CONST_DOUBLE_HIGH (op1);
4435 }
4436 else
4437 {
4438 l1u = l1s = INTVAL (op1);
4439 h1u = 0, h1s = l1s < 0 ? -1 : 0;
4440 }
4441
4442 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4443 we have to sign or zero-extend the values. */
4444 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4445 h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
4446
4447 if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4448 {
4449 l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4450 l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
4451
4452 if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4453 l0s |= ((HOST_WIDE_INT) (-1) << width);
4454
4455 if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4456 l1s |= ((HOST_WIDE_INT) (-1) << width);
4457 }
4458
4459 equal = (h0u == h1u && l0u == l1u);
4460 op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4461 op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4462 op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4463 op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4464 }
4465
4466 /* Otherwise, there are some code-specific tests we can make. */
4467 else
4468 {
4469 switch (code)
4470 {
4471 case EQ:
4472 /* References to the frame plus a constant or labels cannot
4473 be zero, but a SYMBOL_REF can due to #pragma weak. */
4474 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4475 || GET_CODE (op0) == LABEL_REF)
4476 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4477 /* On some machines, the ap reg can be 0 sometimes. */
4478 && op0 != arg_pointer_rtx
4479 #endif
4480 )
4481 return const0_rtx;
4482 break;
4483
4484 case NE:
4485 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4486 || GET_CODE (op0) == LABEL_REF)
4487 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4488 && op0 != arg_pointer_rtx
4489 #endif
4490 )
4491 return const_true_rtx;
4492 break;
4493
4494 case GEU:
4495 /* Unsigned values are never negative. */
4496 if (op1 == const0_rtx)
4497 return const_true_rtx;
4498 break;
4499
4500 case LTU:
4501 if (op1 == const0_rtx)
4502 return const0_rtx;
4503 break;
4504
4505 case LEU:
4506 /* Unsigned values are never greater than the largest
4507 unsigned value. */
4508 if (GET_CODE (op1) == CONST_INT
4509 && INTVAL (op1) == GET_MODE_MASK (mode)
4510 && INTEGRAL_MODE_P (mode))
4511 return const_true_rtx;
4512 break;
4513
4514 case GTU:
4515 if (GET_CODE (op1) == CONST_INT
4516 && INTVAL (op1) == GET_MODE_MASK (mode)
4517 && INTEGRAL_MODE_P (mode))
4518 return const0_rtx;
4519 break;
4520 }
4521
4522 return 0;
4523 }
4524
4525 /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4526 as appropriate. */
4527 switch (code)
4528 {
4529 case EQ:
4530 return equal ? const_true_rtx : const0_rtx;
4531 case NE:
4532 return ! equal ? const_true_rtx : const0_rtx;
4533 case LT:
4534 return op0lt ? const_true_rtx : const0_rtx;
4535 case GT:
4536 return op1lt ? const_true_rtx : const0_rtx;
4537 case LTU:
4538 return op0ltu ? const_true_rtx : const0_rtx;
4539 case GTU:
4540 return op1ltu ? const_true_rtx : const0_rtx;
4541 case LE:
4542 return equal || op0lt ? const_true_rtx : const0_rtx;
4543 case GE:
4544 return equal || op1lt ? const_true_rtx : const0_rtx;
4545 case LEU:
4546 return equal || op0ltu ? const_true_rtx : const0_rtx;
4547 case GEU:
4548 return equal || op1ltu ? const_true_rtx : const0_rtx;
4549 }
4550
4551 abort ();
4552 }
4553 \f
4554 /* Simplify CODE, an operation with result mode MODE and three operands,
4555 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4556 a constant. Return 0 if no simplifications is possible. */
4557
4558 rtx
4559 simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4560 enum rtx_code code;
4561 enum machine_mode mode, op0_mode;
4562 rtx op0, op1, op2;
4563 {
4564 int width = GET_MODE_BITSIZE (mode);
4565
4566 /* VOIDmode means "infinite" precision. */
4567 if (width == 0)
4568 width = HOST_BITS_PER_WIDE_INT;
4569
4570 switch (code)
4571 {
4572 case SIGN_EXTRACT:
4573 case ZERO_EXTRACT:
4574 if (GET_CODE (op0) == CONST_INT
4575 && GET_CODE (op1) == CONST_INT
4576 && GET_CODE (op2) == CONST_INT
4577 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4578 && width <= HOST_BITS_PER_WIDE_INT)
4579 {
4580 /* Extracting a bit-field from a constant */
4581 HOST_WIDE_INT val = INTVAL (op0);
4582
4583 #if BITS_BIG_ENDIAN
4584 val >>= (GET_MODE_BITSIZE (op0_mode) - INTVAL (op2) - INTVAL (op1));
4585 #else
4586 val >>= INTVAL (op2);
4587 #endif
4588 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4589 {
4590 /* First zero-extend. */
4591 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4592 /* If desired, propagate sign bit. */
4593 if (code == SIGN_EXTRACT
4594 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4595 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4596 }
4597
4598 /* Clear the bits that don't belong in our mode,
4599 unless they and our sign bit are all one.
4600 So we get either a reasonable negative value or a reasonable
4601 unsigned value for this mode. */
4602 if (width < HOST_BITS_PER_WIDE_INT
4603 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4604 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4605 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4606
4607 return GEN_INT (val);
4608 }
4609 break;
4610
4611 case IF_THEN_ELSE:
4612 if (GET_CODE (op0) == CONST_INT)
4613 return op0 != const0_rtx ? op1 : op2;
4614 break;
4615
4616 default:
4617 abort ();
4618 }
4619
4620 return 0;
4621 }
4622 \f
4623 /* If X is a nontrivial arithmetic operation on an argument
4624 for which a constant value can be determined, return
4625 the result of operating on that value, as a constant.
4626 Otherwise, return X, possibly with one or more operands
4627 modified by recursive calls to this function.
4628
4629 If X is a register whose contents are known, we do NOT
4630 return those contents here. equiv_constant is called to
4631 perform that task.
4632
4633 INSN is the insn that we may be modifying. If it is 0, make a copy
4634 of X before modifying it. */
4635
4636 static rtx
4637 fold_rtx (x, insn)
4638 rtx x;
4639 rtx insn;
4640 {
4641 register enum rtx_code code;
4642 register enum machine_mode mode;
4643 register char *fmt;
4644 register int i;
4645 rtx new = 0;
4646 int copied = 0;
4647 int must_swap = 0;
4648
4649 /* Folded equivalents of first two operands of X. */
4650 rtx folded_arg0;
4651 rtx folded_arg1;
4652
4653 /* Constant equivalents of first three operands of X;
4654 0 when no such equivalent is known. */
4655 rtx const_arg0;
4656 rtx const_arg1;
4657 rtx const_arg2;
4658
4659 /* The mode of the first operand of X. We need this for sign and zero
4660 extends. */
4661 enum machine_mode mode_arg0;
4662
4663 if (x == 0)
4664 return x;
4665
4666 mode = GET_MODE (x);
4667 code = GET_CODE (x);
4668 switch (code)
4669 {
4670 case CONST:
4671 case CONST_INT:
4672 case CONST_DOUBLE:
4673 case SYMBOL_REF:
4674 case LABEL_REF:
4675 case REG:
4676 /* No use simplifying an EXPR_LIST
4677 since they are used only for lists of args
4678 in a function call's REG_EQUAL note. */
4679 case EXPR_LIST:
4680 return x;
4681
4682 #ifdef HAVE_cc0
4683 case CC0:
4684 return prev_insn_cc0;
4685 #endif
4686
4687 case PC:
4688 /* If the next insn is a CODE_LABEL followed by a jump table,
4689 PC's value is a LABEL_REF pointing to that label. That
4690 lets us fold switch statements on the Vax. */
4691 if (insn && GET_CODE (insn) == JUMP_INSN)
4692 {
4693 rtx next = next_nonnote_insn (insn);
4694
4695 if (next && GET_CODE (next) == CODE_LABEL
4696 && NEXT_INSN (next) != 0
4697 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
4698 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
4699 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
4700 return gen_rtx (LABEL_REF, Pmode, next);
4701 }
4702 break;
4703
4704 case SUBREG:
4705 /* See if we previously assigned a constant value to this SUBREG. */
4706 if ((new = lookup_as_function (x, CONST_INT)) != 0
4707 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
4708 return new;
4709
4710 /* If this is a paradoxical SUBREG, we have no idea what value the
4711 extra bits would have. However, if the operand is equivalent
4712 to a SUBREG whose operand is the same as our mode, and all the
4713 modes are within a word, we can just use the inner operand
4714 because these SUBREGs just say how to treat the register.
4715
4716 Similarly if we find an integer constant. */
4717
4718 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4719 {
4720 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
4721 struct table_elt *elt;
4722
4723 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
4724 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
4725 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
4726 imode)) != 0)
4727 for (elt = elt->first_same_value;
4728 elt; elt = elt->next_same_value)
4729 {
4730 if (CONSTANT_P (elt->exp)
4731 && GET_MODE (elt->exp) == VOIDmode)
4732 return elt->exp;
4733
4734 if (GET_CODE (elt->exp) == SUBREG
4735 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4736 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4737 return copy_rtx (SUBREG_REG (elt->exp));
4738 }
4739
4740 return x;
4741 }
4742
4743 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
4744 We might be able to if the SUBREG is extracting a single word in an
4745 integral mode or extracting the low part. */
4746
4747 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
4748 const_arg0 = equiv_constant (folded_arg0);
4749 if (const_arg0)
4750 folded_arg0 = const_arg0;
4751
4752 if (folded_arg0 != SUBREG_REG (x))
4753 {
4754 new = 0;
4755
4756 if (GET_MODE_CLASS (mode) == MODE_INT
4757 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4758 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
4759 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
4760 GET_MODE (SUBREG_REG (x)));
4761 if (new == 0 && subreg_lowpart_p (x))
4762 new = gen_lowpart_if_possible (mode, folded_arg0);
4763 if (new)
4764 return new;
4765 }
4766
4767 /* If this is a narrowing SUBREG and our operand is a REG, see if
4768 we can find an equivalence for REG that is an arithmetic operation
4769 in a wider mode where both operands are paradoxical SUBREGs
4770 from objects of our result mode. In that case, we couldn't report
4771 an equivalent value for that operation, since we don't know what the
4772 extra bits will be. But we can find an equivalence for this SUBREG
4773 by folding that operation is the narrow mode. This allows us to
4774 fold arithmetic in narrow modes when the machine only supports
4775 word-sized arithmetic.
4776
4777 Also look for a case where we have a SUBREG whose operand is the
4778 same as our result. If both modes are smaller than a word, we
4779 are simply interpreting a register in different modes and we
4780 can use the inner value. */
4781
4782 if (GET_CODE (folded_arg0) == REG
4783 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
4784 && subreg_lowpart_p (x))
4785 {
4786 struct table_elt *elt;
4787
4788 /* We can use HASH here since we know that canon_hash won't be
4789 called. */
4790 elt = lookup (folded_arg0,
4791 HASH (folded_arg0, GET_MODE (folded_arg0)),
4792 GET_MODE (folded_arg0));
4793
4794 if (elt)
4795 elt = elt->first_same_value;
4796
4797 for (; elt; elt = elt->next_same_value)
4798 {
4799 enum rtx_code eltcode = GET_CODE (elt->exp);
4800
4801 /* Just check for unary and binary operations. */
4802 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
4803 && GET_CODE (elt->exp) != SIGN_EXTEND
4804 && GET_CODE (elt->exp) != ZERO_EXTEND
4805 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4806 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
4807 {
4808 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
4809
4810 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4811 op0 = fold_rtx (op0, NULL_RTX);
4812
4813 op0 = equiv_constant (op0);
4814 if (op0)
4815 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
4816 op0, mode);
4817 }
4818 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
4819 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
4820 && eltcode != DIV && eltcode != MOD
4821 && eltcode != UDIV && eltcode != UMOD
4822 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
4823 && eltcode != ROTATE && eltcode != ROTATERT
4824 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4825 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
4826 == mode))
4827 || CONSTANT_P (XEXP (elt->exp, 0)))
4828 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
4829 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
4830 == mode))
4831 || CONSTANT_P (XEXP (elt->exp, 1))))
4832 {
4833 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
4834 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
4835
4836 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4837 op0 = fold_rtx (op0, NULL_RTX);
4838
4839 if (op0)
4840 op0 = equiv_constant (op0);
4841
4842 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
4843 op1 = fold_rtx (op1, NULL_RTX);
4844
4845 if (op1)
4846 op1 = equiv_constant (op1);
4847
4848 /* If we are looking for the low SImode part of
4849 (ashift:DI c (const_int 32)), it doesn't work
4850 to compute that in SImode, because a 32-bit shift
4851 in SImode is unpredictable. We know the value is 0. */
4852 if (op0 && op1
4853 && GET_CODE (elt->exp) == ASHIFT
4854 && GET_CODE (op1) == CONST_INT
4855 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
4856 {
4857 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
4858
4859 /* If the count fits in the inner mode's width,
4860 but exceeds the outer mode's width,
4861 the value will get truncated to 0
4862 by the subreg. */
4863 new = const0_rtx;
4864 else
4865 /* If the count exceeds even the inner mode's width,
4866 don't fold this expression. */
4867 new = 0;
4868 }
4869 else if (op0 && op1)
4870 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
4871 op0, op1);
4872 }
4873
4874 else if (GET_CODE (elt->exp) == SUBREG
4875 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4876 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
4877 <= UNITS_PER_WORD)
4878 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4879 new = copy_rtx (SUBREG_REG (elt->exp));
4880
4881 if (new)
4882 return new;
4883 }
4884 }
4885
4886 return x;
4887
4888 case NOT:
4889 case NEG:
4890 /* If we have (NOT Y), see if Y is known to be (NOT Z).
4891 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
4892 new = lookup_as_function (XEXP (x, 0), code);
4893 if (new)
4894 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
4895 break;
4896
4897 case MEM:
4898 /* If we are not actually processing an insn, don't try to find the
4899 best address. Not only don't we care, but we could modify the
4900 MEM in an invalid way since we have no insn to validate against. */
4901 if (insn != 0)
4902 find_best_addr (insn, &XEXP (x, 0));
4903
4904 {
4905 /* Even if we don't fold in the insn itself,
4906 we can safely do so here, in hopes of getting a constant. */
4907 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
4908 rtx base = 0;
4909 HOST_WIDE_INT offset = 0;
4910
4911 if (GET_CODE (addr) == REG
4912 && REGNO_QTY_VALID_P (REGNO (addr))
4913 && GET_MODE (addr) == qty_mode[reg_qty[REGNO (addr)]]
4914 && qty_const[reg_qty[REGNO (addr)]] != 0)
4915 addr = qty_const[reg_qty[REGNO (addr)]];
4916
4917 /* If address is constant, split it into a base and integer offset. */
4918 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
4919 base = addr;
4920 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
4921 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
4922 {
4923 base = XEXP (XEXP (addr, 0), 0);
4924 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
4925 }
4926 else if (GET_CODE (addr) == LO_SUM
4927 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
4928 base = XEXP (addr, 1);
4929
4930 /* If this is a constant pool reference, we can fold it into its
4931 constant to allow better value tracking. */
4932 if (base && GET_CODE (base) == SYMBOL_REF
4933 && CONSTANT_POOL_ADDRESS_P (base))
4934 {
4935 rtx constant = get_pool_constant (base);
4936 enum machine_mode const_mode = get_pool_mode (base);
4937 rtx new;
4938
4939 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
4940 constant_pool_entries_cost = COST (constant);
4941
4942 /* If we are loading the full constant, we have an equivalence. */
4943 if (offset == 0 && mode == const_mode)
4944 return constant;
4945
4946 /* If this actually isn't a constant (wierd!), we can't do
4947 anything. Otherwise, handle the two most common cases:
4948 extracting a word from a multi-word constant, and extracting
4949 the low-order bits. Other cases don't seem common enough to
4950 worry about. */
4951 if (! CONSTANT_P (constant))
4952 return x;
4953
4954 if (GET_MODE_CLASS (mode) == MODE_INT
4955 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4956 && offset % UNITS_PER_WORD == 0
4957 && (new = operand_subword (constant,
4958 offset / UNITS_PER_WORD,
4959 0, const_mode)) != 0)
4960 return new;
4961
4962 if (((BYTES_BIG_ENDIAN
4963 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
4964 || (! BYTES_BIG_ENDIAN && offset == 0))
4965 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
4966 return new;
4967 }
4968
4969 /* If this is a reference to a label at a known position in a jump
4970 table, we also know its value. */
4971 if (base && GET_CODE (base) == LABEL_REF)
4972 {
4973 rtx label = XEXP (base, 0);
4974 rtx table_insn = NEXT_INSN (label);
4975
4976 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
4977 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
4978 {
4979 rtx table = PATTERN (table_insn);
4980
4981 if (offset >= 0
4982 && (offset / GET_MODE_SIZE (GET_MODE (table))
4983 < XVECLEN (table, 0)))
4984 return XVECEXP (table, 0,
4985 offset / GET_MODE_SIZE (GET_MODE (table)));
4986 }
4987 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
4988 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
4989 {
4990 rtx table = PATTERN (table_insn);
4991
4992 if (offset >= 0
4993 && (offset / GET_MODE_SIZE (GET_MODE (table))
4994 < XVECLEN (table, 1)))
4995 {
4996 offset /= GET_MODE_SIZE (GET_MODE (table));
4997 new = gen_rtx (MINUS, Pmode, XVECEXP (table, 1, offset),
4998 XEXP (table, 0));
4999
5000 if (GET_MODE (table) != Pmode)
5001 new = gen_rtx (TRUNCATE, GET_MODE (table), new);
5002
5003 /* Indicate this is a constant. This isn't a
5004 valid form of CONST, but it will only be used
5005 to fold the next insns and then discarded, so
5006 it should be safe. */
5007 return gen_rtx (CONST, GET_MODE (new), new);
5008 }
5009 }
5010 }
5011
5012 return x;
5013 }
5014 }
5015
5016 const_arg0 = 0;
5017 const_arg1 = 0;
5018 const_arg2 = 0;
5019 mode_arg0 = VOIDmode;
5020
5021 /* Try folding our operands.
5022 Then see which ones have constant values known. */
5023
5024 fmt = GET_RTX_FORMAT (code);
5025 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5026 if (fmt[i] == 'e')
5027 {
5028 rtx arg = XEXP (x, i);
5029 rtx folded_arg = arg, const_arg = 0;
5030 enum machine_mode mode_arg = GET_MODE (arg);
5031 rtx cheap_arg, expensive_arg;
5032 rtx replacements[2];
5033 int j;
5034
5035 /* Most arguments are cheap, so handle them specially. */
5036 switch (GET_CODE (arg))
5037 {
5038 case REG:
5039 /* This is the same as calling equiv_constant; it is duplicated
5040 here for speed. */
5041 if (REGNO_QTY_VALID_P (REGNO (arg))
5042 && qty_const[reg_qty[REGNO (arg)]] != 0
5043 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != REG
5044 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != PLUS)
5045 const_arg
5046 = gen_lowpart_if_possible (GET_MODE (arg),
5047 qty_const[reg_qty[REGNO (arg)]]);
5048 break;
5049
5050 case CONST:
5051 case CONST_INT:
5052 case SYMBOL_REF:
5053 case LABEL_REF:
5054 case CONST_DOUBLE:
5055 const_arg = arg;
5056 break;
5057
5058 #ifdef HAVE_cc0
5059 case CC0:
5060 folded_arg = prev_insn_cc0;
5061 mode_arg = prev_insn_cc0_mode;
5062 const_arg = equiv_constant (folded_arg);
5063 break;
5064 #endif
5065
5066 default:
5067 folded_arg = fold_rtx (arg, insn);
5068 const_arg = equiv_constant (folded_arg);
5069 }
5070
5071 /* For the first three operands, see if the operand
5072 is constant or equivalent to a constant. */
5073 switch (i)
5074 {
5075 case 0:
5076 folded_arg0 = folded_arg;
5077 const_arg0 = const_arg;
5078 mode_arg0 = mode_arg;
5079 break;
5080 case 1:
5081 folded_arg1 = folded_arg;
5082 const_arg1 = const_arg;
5083 break;
5084 case 2:
5085 const_arg2 = const_arg;
5086 break;
5087 }
5088
5089 /* Pick the least expensive of the folded argument and an
5090 equivalent constant argument. */
5091 if (const_arg == 0 || const_arg == folded_arg
5092 || COST (const_arg) > COST (folded_arg))
5093 cheap_arg = folded_arg, expensive_arg = const_arg;
5094 else
5095 cheap_arg = const_arg, expensive_arg = folded_arg;
5096
5097 /* Try to replace the operand with the cheapest of the two
5098 possibilities. If it doesn't work and this is either of the first
5099 two operands of a commutative operation, try swapping them.
5100 If THAT fails, try the more expensive, provided it is cheaper
5101 than what is already there. */
5102
5103 if (cheap_arg == XEXP (x, i))
5104 continue;
5105
5106 if (insn == 0 && ! copied)
5107 {
5108 x = copy_rtx (x);
5109 copied = 1;
5110 }
5111
5112 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5113 for (j = 0;
5114 j < 2 && replacements[j]
5115 && COST (replacements[j]) < COST (XEXP (x, i));
5116 j++)
5117 {
5118 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5119 break;
5120
5121 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5122 {
5123 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5124 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5125
5126 if (apply_change_group ())
5127 {
5128 /* Swap them back to be invalid so that this loop can
5129 continue and flag them to be swapped back later. */
5130 rtx tem;
5131
5132 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5133 XEXP (x, 1) = tem;
5134 must_swap = 1;
5135 break;
5136 }
5137 }
5138 }
5139 }
5140
5141 else if (fmt[i] == 'E')
5142 /* Don't try to fold inside of a vector of expressions.
5143 Doing nothing is harmless. */
5144 ;
5145
5146 /* If a commutative operation, place a constant integer as the second
5147 operand unless the first operand is also a constant integer. Otherwise,
5148 place any constant second unless the first operand is also a constant. */
5149
5150 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5151 {
5152 if (must_swap || (const_arg0
5153 && (const_arg1 == 0
5154 || (GET_CODE (const_arg0) == CONST_INT
5155 && GET_CODE (const_arg1) != CONST_INT))))
5156 {
5157 register rtx tem = XEXP (x, 0);
5158
5159 if (insn == 0 && ! copied)
5160 {
5161 x = copy_rtx (x);
5162 copied = 1;
5163 }
5164
5165 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5166 validate_change (insn, &XEXP (x, 1), tem, 1);
5167 if (apply_change_group ())
5168 {
5169 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5170 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5171 }
5172 }
5173 }
5174
5175 /* If X is an arithmetic operation, see if we can simplify it. */
5176
5177 switch (GET_RTX_CLASS (code))
5178 {
5179 case '1':
5180 {
5181 int is_const = 0;
5182
5183 /* We can't simplify extension ops unless we know the
5184 original mode. */
5185 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5186 && mode_arg0 == VOIDmode)
5187 break;
5188
5189 /* If we had a CONST, strip it off and put it back later if we
5190 fold. */
5191 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
5192 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
5193
5194 new = simplify_unary_operation (code, mode,
5195 const_arg0 ? const_arg0 : folded_arg0,
5196 mode_arg0);
5197 if (new != 0 && is_const)
5198 new = gen_rtx (CONST, mode, new);
5199 }
5200 break;
5201
5202 case '<':
5203 /* See what items are actually being compared and set FOLDED_ARG[01]
5204 to those values and CODE to the actual comparison code. If any are
5205 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5206 do anything if both operands are already known to be constant. */
5207
5208 if (const_arg0 == 0 || const_arg1 == 0)
5209 {
5210 struct table_elt *p0, *p1;
5211 rtx true = const_true_rtx, false = const0_rtx;
5212 enum machine_mode mode_arg1;
5213
5214 #ifdef FLOAT_STORE_FLAG_VALUE
5215 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5216 {
5217 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5218 mode);
5219 false = CONST0_RTX (mode);
5220 }
5221 #endif
5222
5223 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5224 &mode_arg0, &mode_arg1);
5225 const_arg0 = equiv_constant (folded_arg0);
5226 const_arg1 = equiv_constant (folded_arg1);
5227
5228 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5229 what kinds of things are being compared, so we can't do
5230 anything with this comparison. */
5231
5232 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5233 break;
5234
5235 /* If we do not now have two constants being compared, see if we
5236 can nevertheless deduce some things about the comparison. */
5237 if (const_arg0 == 0 || const_arg1 == 0)
5238 {
5239 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or non-explicit
5240 constant? These aren't zero, but we don't know their sign. */
5241 if (const_arg1 == const0_rtx
5242 && (NONZERO_BASE_PLUS_P (folded_arg0)
5243 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5244 come out as 0. */
5245 || GET_CODE (folded_arg0) == SYMBOL_REF
5246 #endif
5247 || GET_CODE (folded_arg0) == LABEL_REF
5248 || GET_CODE (folded_arg0) == CONST))
5249 {
5250 if (code == EQ)
5251 return false;
5252 else if (code == NE)
5253 return true;
5254 }
5255
5256 /* See if the two operands are the same. We don't do this
5257 for IEEE floating-point since we can't assume x == x
5258 since x might be a NaN. */
5259
5260 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5261 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
5262 && (folded_arg0 == folded_arg1
5263 || (GET_CODE (folded_arg0) == REG
5264 && GET_CODE (folded_arg1) == REG
5265 && (reg_qty[REGNO (folded_arg0)]
5266 == reg_qty[REGNO (folded_arg1)]))
5267 || ((p0 = lookup (folded_arg0,
5268 (safe_hash (folded_arg0, mode_arg0)
5269 % NBUCKETS), mode_arg0))
5270 && (p1 = lookup (folded_arg1,
5271 (safe_hash (folded_arg1, mode_arg0)
5272 % NBUCKETS), mode_arg0))
5273 && p0->first_same_value == p1->first_same_value)))
5274 return ((code == EQ || code == LE || code == GE
5275 || code == LEU || code == GEU)
5276 ? true : false);
5277
5278 /* If FOLDED_ARG0 is a register, see if the comparison we are
5279 doing now is either the same as we did before or the reverse
5280 (we only check the reverse if not floating-point). */
5281 else if (GET_CODE (folded_arg0) == REG)
5282 {
5283 int qty = reg_qty[REGNO (folded_arg0)];
5284
5285 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5286 && (comparison_dominates_p (qty_comparison_code[qty], code)
5287 || (comparison_dominates_p (qty_comparison_code[qty],
5288 reverse_condition (code))
5289 && ! FLOAT_MODE_P (mode_arg0)))
5290 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5291 || (const_arg1
5292 && rtx_equal_p (qty_comparison_const[qty],
5293 const_arg1))
5294 || (GET_CODE (folded_arg1) == REG
5295 && (reg_qty[REGNO (folded_arg1)]
5296 == qty_comparison_qty[qty]))))
5297 return (comparison_dominates_p (qty_comparison_code[qty],
5298 code)
5299 ? true : false);
5300 }
5301 }
5302 }
5303
5304 /* If we are comparing against zero, see if the first operand is
5305 equivalent to an IOR with a constant. If so, we may be able to
5306 determine the result of this comparison. */
5307
5308 if (const_arg1 == const0_rtx)
5309 {
5310 rtx y = lookup_as_function (folded_arg0, IOR);
5311 rtx inner_const;
5312
5313 if (y != 0
5314 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5315 && GET_CODE (inner_const) == CONST_INT
5316 && INTVAL (inner_const) != 0)
5317 {
5318 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
5319 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5320 && (INTVAL (inner_const)
5321 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
5322 rtx true = const_true_rtx, false = const0_rtx;
5323
5324 #ifdef FLOAT_STORE_FLAG_VALUE
5325 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5326 {
5327 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5328 mode);
5329 false = CONST0_RTX (mode);
5330 }
5331 #endif
5332
5333 switch (code)
5334 {
5335 case EQ:
5336 return false;
5337 case NE:
5338 return true;
5339 case LT: case LE:
5340 if (has_sign)
5341 return true;
5342 break;
5343 case GT: case GE:
5344 if (has_sign)
5345 return false;
5346 break;
5347 }
5348 }
5349 }
5350
5351 new = simplify_relational_operation (code, mode_arg0,
5352 const_arg0 ? const_arg0 : folded_arg0,
5353 const_arg1 ? const_arg1 : folded_arg1);
5354 #ifdef FLOAT_STORE_FLAG_VALUE
5355 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5356 new = ((new == const0_rtx) ? CONST0_RTX (mode)
5357 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
5358 #endif
5359 break;
5360
5361 case '2':
5362 case 'c':
5363 switch (code)
5364 {
5365 case PLUS:
5366 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5367 with that LABEL_REF as its second operand. If so, the result is
5368 the first operand of that MINUS. This handles switches with an
5369 ADDR_DIFF_VEC table. */
5370 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5371 {
5372 rtx y
5373 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
5374 : lookup_as_function (folded_arg0, MINUS);
5375
5376 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5377 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5378 return XEXP (y, 0);
5379
5380 /* Now try for a CONST of a MINUS like the above. */
5381 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
5382 : lookup_as_function (folded_arg0, CONST))) != 0
5383 && GET_CODE (XEXP (y, 0)) == MINUS
5384 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5385 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
5386 return XEXP (XEXP (y, 0), 0);
5387 }
5388
5389 /* Likewise if the operands are in the other order. */
5390 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
5391 {
5392 rtx y
5393 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
5394 : lookup_as_function (folded_arg1, MINUS);
5395
5396 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5397 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
5398 return XEXP (y, 0);
5399
5400 /* Now try for a CONST of a MINUS like the above. */
5401 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
5402 : lookup_as_function (folded_arg1, CONST))) != 0
5403 && GET_CODE (XEXP (y, 0)) == MINUS
5404 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5405 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
5406 return XEXP (XEXP (y, 0), 0);
5407 }
5408
5409 /* If second operand is a register equivalent to a negative
5410 CONST_INT, see if we can find a register equivalent to the
5411 positive constant. Make a MINUS if so. Don't do this for
5412 a negative constant since we might then alternate between
5413 chosing positive and negative constants. Having the positive
5414 constant previously-used is the more common case. */
5415 if (const_arg1 && GET_CODE (const_arg1) == CONST_INT
5416 && INTVAL (const_arg1) < 0 && GET_CODE (folded_arg1) == REG)
5417 {
5418 rtx new_const = GEN_INT (- INTVAL (const_arg1));
5419 struct table_elt *p
5420 = lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5421 mode);
5422
5423 if (p)
5424 for (p = p->first_same_value; p; p = p->next_same_value)
5425 if (GET_CODE (p->exp) == REG)
5426 return cse_gen_binary (MINUS, mode, folded_arg0,
5427 canon_reg (p->exp, NULL_RTX));
5428 }
5429 goto from_plus;
5430
5431 case MINUS:
5432 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5433 If so, produce (PLUS Z C2-C). */
5434 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5435 {
5436 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5437 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
5438 return fold_rtx (plus_constant (copy_rtx (y),
5439 -INTVAL (const_arg1)),
5440 NULL_RTX);
5441 }
5442
5443 /* ... fall through ... */
5444
5445 from_plus:
5446 case SMIN: case SMAX: case UMIN: case UMAX:
5447 case IOR: case AND: case XOR:
5448 case MULT: case DIV: case UDIV:
5449 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
5450 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5451 is known to be of similar form, we may be able to replace the
5452 operation with a combined operation. This may eliminate the
5453 intermediate operation if every use is simplified in this way.
5454 Note that the similar optimization done by combine.c only works
5455 if the intermediate operation's result has only one reference. */
5456
5457 if (GET_CODE (folded_arg0) == REG
5458 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5459 {
5460 int is_shift
5461 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5462 rtx y = lookup_as_function (folded_arg0, code);
5463 rtx inner_const;
5464 enum rtx_code associate_code;
5465 rtx new_const;
5466
5467 if (y == 0
5468 || 0 == (inner_const
5469 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5470 || GET_CODE (inner_const) != CONST_INT
5471 /* If we have compiled a statement like
5472 "if (x == (x & mask1))", and now are looking at
5473 "x & mask2", we will have a case where the first operand
5474 of Y is the same as our first operand. Unless we detect
5475 this case, an infinite loop will result. */
5476 || XEXP (y, 0) == folded_arg0)
5477 break;
5478
5479 /* Don't associate these operations if they are a PLUS with the
5480 same constant and it is a power of two. These might be doable
5481 with a pre- or post-increment. Similarly for two subtracts of
5482 identical powers of two with post decrement. */
5483
5484 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
5485 && (0
5486 #if defined(HAVE_PRE_INCREMENT) || defined(HAVE_POST_INCREMENT)
5487 || exact_log2 (INTVAL (const_arg1)) >= 0
5488 #endif
5489 #if defined(HAVE_PRE_DECREMENT) || defined(HAVE_POST_DECREMENT)
5490 || exact_log2 (- INTVAL (const_arg1)) >= 0
5491 #endif
5492 ))
5493 break;
5494
5495 /* Compute the code used to compose the constants. For example,
5496 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5497
5498 associate_code
5499 = (code == MULT || code == DIV || code == UDIV ? MULT
5500 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5501
5502 new_const = simplify_binary_operation (associate_code, mode,
5503 const_arg1, inner_const);
5504
5505 if (new_const == 0)
5506 break;
5507
5508 /* If we are associating shift operations, don't let this
5509 produce a shift of the size of the object or larger.
5510 This could occur when we follow a sign-extend by a right
5511 shift on a machine that does a sign-extend as a pair
5512 of shifts. */
5513
5514 if (is_shift && GET_CODE (new_const) == CONST_INT
5515 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5516 {
5517 /* As an exception, we can turn an ASHIFTRT of this
5518 form into a shift of the number of bits - 1. */
5519 if (code == ASHIFTRT)
5520 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5521 else
5522 break;
5523 }
5524
5525 y = copy_rtx (XEXP (y, 0));
5526
5527 /* If Y contains our first operand (the most common way this
5528 can happen is if Y is a MEM), we would do into an infinite
5529 loop if we tried to fold it. So don't in that case. */
5530
5531 if (! reg_mentioned_p (folded_arg0, y))
5532 y = fold_rtx (y, insn);
5533
5534 return cse_gen_binary (code, mode, y, new_const);
5535 }
5536 }
5537
5538 new = simplify_binary_operation (code, mode,
5539 const_arg0 ? const_arg0 : folded_arg0,
5540 const_arg1 ? const_arg1 : folded_arg1);
5541 break;
5542
5543 case 'o':
5544 /* (lo_sum (high X) X) is simply X. */
5545 if (code == LO_SUM && const_arg0 != 0
5546 && GET_CODE (const_arg0) == HIGH
5547 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
5548 return const_arg1;
5549 break;
5550
5551 case '3':
5552 case 'b':
5553 new = simplify_ternary_operation (code, mode, mode_arg0,
5554 const_arg0 ? const_arg0 : folded_arg0,
5555 const_arg1 ? const_arg1 : folded_arg1,
5556 const_arg2 ? const_arg2 : XEXP (x, 2));
5557 break;
5558 }
5559
5560 return new ? new : x;
5561 }
5562 \f
5563 /* Return a constant value currently equivalent to X.
5564 Return 0 if we don't know one. */
5565
5566 static rtx
5567 equiv_constant (x)
5568 rtx x;
5569 {
5570 if (GET_CODE (x) == REG
5571 && REGNO_QTY_VALID_P (REGNO (x))
5572 && qty_const[reg_qty[REGNO (x)]])
5573 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[reg_qty[REGNO (x)]]);
5574
5575 if (x != 0 && CONSTANT_P (x))
5576 return x;
5577
5578 /* If X is a MEM, try to fold it outside the context of any insn to see if
5579 it might be equivalent to a constant. That handles the case where it
5580 is a constant-pool reference. Then try to look it up in the hash table
5581 in case it is something whose value we have seen before. */
5582
5583 if (GET_CODE (x) == MEM)
5584 {
5585 struct table_elt *elt;
5586
5587 x = fold_rtx (x, NULL_RTX);
5588 if (CONSTANT_P (x))
5589 return x;
5590
5591 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
5592 if (elt == 0)
5593 return 0;
5594
5595 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
5596 if (elt->is_const && CONSTANT_P (elt->exp))
5597 return elt->exp;
5598 }
5599
5600 return 0;
5601 }
5602 \f
5603 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
5604 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
5605 least-significant part of X.
5606 MODE specifies how big a part of X to return.
5607
5608 If the requested operation cannot be done, 0 is returned.
5609
5610 This is similar to gen_lowpart in emit-rtl.c. */
5611
5612 rtx
5613 gen_lowpart_if_possible (mode, x)
5614 enum machine_mode mode;
5615 register rtx x;
5616 {
5617 rtx result = gen_lowpart_common (mode, x);
5618
5619 if (result)
5620 return result;
5621 else if (GET_CODE (x) == MEM)
5622 {
5623 /* This is the only other case we handle. */
5624 register int offset = 0;
5625 rtx new;
5626
5627 #if WORDS_BIG_ENDIAN
5628 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
5629 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
5630 #endif
5631 #if BYTES_BIG_ENDIAN
5632 /* Adjust the address so that the address-after-the-data
5633 is unchanged. */
5634 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
5635 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
5636 #endif
5637 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
5638 if (! memory_address_p (mode, XEXP (new, 0)))
5639 return 0;
5640 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
5641 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
5642 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
5643 return new;
5644 }
5645 else
5646 return 0;
5647 }
5648 \f
5649 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
5650 branch. It will be zero if not.
5651
5652 In certain cases, this can cause us to add an equivalence. For example,
5653 if we are following the taken case of
5654 if (i == 2)
5655 we can add the fact that `i' and '2' are now equivalent.
5656
5657 In any case, we can record that this comparison was passed. If the same
5658 comparison is seen later, we will know its value. */
5659
5660 static void
5661 record_jump_equiv (insn, taken)
5662 rtx insn;
5663 int taken;
5664 {
5665 int cond_known_true;
5666 rtx op0, op1;
5667 enum machine_mode mode, mode0, mode1;
5668 int reversed_nonequality = 0;
5669 enum rtx_code code;
5670
5671 /* Ensure this is the right kind of insn. */
5672 if (! condjump_p (insn) || simplejump_p (insn))
5673 return;
5674
5675 /* See if this jump condition is known true or false. */
5676 if (taken)
5677 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
5678 else
5679 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
5680
5681 /* Get the type of comparison being done and the operands being compared.
5682 If we had to reverse a non-equality condition, record that fact so we
5683 know that it isn't valid for floating-point. */
5684 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
5685 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
5686 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
5687
5688 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
5689 if (! cond_known_true)
5690 {
5691 reversed_nonequality = (code != EQ && code != NE);
5692 code = reverse_condition (code);
5693 }
5694
5695 /* The mode is the mode of the non-constant. */
5696 mode = mode0;
5697 if (mode1 != VOIDmode)
5698 mode = mode1;
5699
5700 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
5701 }
5702
5703 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
5704 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
5705 Make any useful entries we can with that information. Called from
5706 above function and called recursively. */
5707
5708 static void
5709 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
5710 enum rtx_code code;
5711 enum machine_mode mode;
5712 rtx op0, op1;
5713 int reversed_nonequality;
5714 {
5715 unsigned op0_hash, op1_hash;
5716 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
5717 struct table_elt *op0_elt, *op1_elt;
5718
5719 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
5720 we know that they are also equal in the smaller mode (this is also
5721 true for all smaller modes whether or not there is a SUBREG, but
5722 is not worth testing for with no SUBREG. */
5723
5724 /* Note that GET_MODE (op0) may not equal MODE. */
5725 if (code == EQ && GET_CODE (op0) == SUBREG
5726 && (GET_MODE_SIZE (GET_MODE (op0))
5727 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
5728 {
5729 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5730 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5731
5732 record_jump_cond (code, mode, SUBREG_REG (op0),
5733 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5734 reversed_nonequality);
5735 }
5736
5737 if (code == EQ && GET_CODE (op1) == SUBREG
5738 && (GET_MODE_SIZE (GET_MODE (op1))
5739 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
5740 {
5741 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5742 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5743
5744 record_jump_cond (code, mode, SUBREG_REG (op1),
5745 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5746 reversed_nonequality);
5747 }
5748
5749 /* Similarly, if this is an NE comparison, and either is a SUBREG
5750 making a smaller mode, we know the whole thing is also NE. */
5751
5752 /* Note that GET_MODE (op0) may not equal MODE;
5753 if we test MODE instead, we can get an infinite recursion
5754 alternating between two modes each wider than MODE. */
5755
5756 if (code == NE && GET_CODE (op0) == SUBREG
5757 && subreg_lowpart_p (op0)
5758 && (GET_MODE_SIZE (GET_MODE (op0))
5759 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
5760 {
5761 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5762 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5763
5764 record_jump_cond (code, mode, SUBREG_REG (op0),
5765 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5766 reversed_nonequality);
5767 }
5768
5769 if (code == NE && GET_CODE (op1) == SUBREG
5770 && subreg_lowpart_p (op1)
5771 && (GET_MODE_SIZE (GET_MODE (op1))
5772 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
5773 {
5774 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5775 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5776
5777 record_jump_cond (code, mode, SUBREG_REG (op1),
5778 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5779 reversed_nonequality);
5780 }
5781
5782 /* Hash both operands. */
5783
5784 do_not_record = 0;
5785 hash_arg_in_memory = 0;
5786 hash_arg_in_struct = 0;
5787 op0_hash = HASH (op0, mode);
5788 op0_in_memory = hash_arg_in_memory;
5789 op0_in_struct = hash_arg_in_struct;
5790
5791 if (do_not_record)
5792 return;
5793
5794 do_not_record = 0;
5795 hash_arg_in_memory = 0;
5796 hash_arg_in_struct = 0;
5797 op1_hash = HASH (op1, mode);
5798 op1_in_memory = hash_arg_in_memory;
5799 op1_in_struct = hash_arg_in_struct;
5800
5801 if (do_not_record)
5802 return;
5803
5804 /* Look up both operands. */
5805 op0_elt = lookup (op0, op0_hash, mode);
5806 op1_elt = lookup (op1, op1_hash, mode);
5807
5808 /* If both operands are already equivalent or if they are not in the
5809 table but are identical, do nothing. */
5810 if ((op0_elt != 0 && op1_elt != 0
5811 && op0_elt->first_same_value == op1_elt->first_same_value)
5812 || op0 == op1 || rtx_equal_p (op0, op1))
5813 return;
5814
5815 /* If we aren't setting two things equal all we can do is save this
5816 comparison. Similarly if this is floating-point. In the latter
5817 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
5818 If we record the equality, we might inadvertently delete code
5819 whose intent was to change -0 to +0. */
5820
5821 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
5822 {
5823 /* If we reversed a floating-point comparison, if OP0 is not a
5824 register, or if OP1 is neither a register or constant, we can't
5825 do anything. */
5826
5827 if (GET_CODE (op1) != REG)
5828 op1 = equiv_constant (op1);
5829
5830 if ((reversed_nonequality && FLOAT_MODE_P (mode))
5831 || GET_CODE (op0) != REG || op1 == 0)
5832 return;
5833
5834 /* Put OP0 in the hash table if it isn't already. This gives it a
5835 new quantity number. */
5836 if (op0_elt == 0)
5837 {
5838 if (insert_regs (op0, NULL_PTR, 0))
5839 {
5840 rehash_using_reg (op0);
5841 op0_hash = HASH (op0, mode);
5842
5843 /* If OP0 is contained in OP1, this changes its hash code
5844 as well. Faster to rehash than to check, except
5845 for the simple case of a constant. */
5846 if (! CONSTANT_P (op1))
5847 op1_hash = HASH (op1,mode);
5848 }
5849
5850 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
5851 op0_elt->in_memory = op0_in_memory;
5852 op0_elt->in_struct = op0_in_struct;
5853 }
5854
5855 qty_comparison_code[reg_qty[REGNO (op0)]] = code;
5856 if (GET_CODE (op1) == REG)
5857 {
5858 /* Look it up again--in case op0 and op1 are the same. */
5859 op1_elt = lookup (op1, op1_hash, mode);
5860
5861 /* Put OP1 in the hash table so it gets a new quantity number. */
5862 if (op1_elt == 0)
5863 {
5864 if (insert_regs (op1, NULL_PTR, 0))
5865 {
5866 rehash_using_reg (op1);
5867 op1_hash = HASH (op1, mode);
5868 }
5869
5870 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
5871 op1_elt->in_memory = op1_in_memory;
5872 op1_elt->in_struct = op1_in_struct;
5873 }
5874
5875 qty_comparison_qty[reg_qty[REGNO (op0)]] = reg_qty[REGNO (op1)];
5876 qty_comparison_const[reg_qty[REGNO (op0)]] = 0;
5877 }
5878 else
5879 {
5880 qty_comparison_qty[reg_qty[REGNO (op0)]] = -1;
5881 qty_comparison_const[reg_qty[REGNO (op0)]] = op1;
5882 }
5883
5884 return;
5885 }
5886
5887 /* If either side is still missing an equivalence, make it now,
5888 then merge the equivalences. */
5889
5890 if (op0_elt == 0)
5891 {
5892 if (insert_regs (op0, NULL_PTR, 0))
5893 {
5894 rehash_using_reg (op0);
5895 op0_hash = HASH (op0, mode);
5896 }
5897
5898 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
5899 op0_elt->in_memory = op0_in_memory;
5900 op0_elt->in_struct = op0_in_struct;
5901 }
5902
5903 if (op1_elt == 0)
5904 {
5905 if (insert_regs (op1, NULL_PTR, 0))
5906 {
5907 rehash_using_reg (op1);
5908 op1_hash = HASH (op1, mode);
5909 }
5910
5911 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
5912 op1_elt->in_memory = op1_in_memory;
5913 op1_elt->in_struct = op1_in_struct;
5914 }
5915
5916 merge_equiv_classes (op0_elt, op1_elt);
5917 last_jump_equiv_class = op0_elt;
5918 }
5919 \f
5920 /* CSE processing for one instruction.
5921 First simplify sources and addresses of all assignments
5922 in the instruction, using previously-computed equivalents values.
5923 Then install the new sources and destinations in the table
5924 of available values.
5925
5926 If IN_LIBCALL_BLOCK is nonzero, don't record any equivalence made in
5927 the insn. */
5928
5929 /* Data on one SET contained in the instruction. */
5930
5931 struct set
5932 {
5933 /* The SET rtx itself. */
5934 rtx rtl;
5935 /* The SET_SRC of the rtx (the original value, if it is changing). */
5936 rtx src;
5937 /* The hash-table element for the SET_SRC of the SET. */
5938 struct table_elt *src_elt;
5939 /* Hash value for the SET_SRC. */
5940 unsigned src_hash;
5941 /* Hash value for the SET_DEST. */
5942 unsigned dest_hash;
5943 /* The SET_DEST, with SUBREG, etc., stripped. */
5944 rtx inner_dest;
5945 /* Place where the pointer to the INNER_DEST was found. */
5946 rtx *inner_dest_loc;
5947 /* Nonzero if the SET_SRC is in memory. */
5948 char src_in_memory;
5949 /* Nonzero if the SET_SRC is in a structure. */
5950 char src_in_struct;
5951 /* Nonzero if the SET_SRC contains something
5952 whose value cannot be predicted and understood. */
5953 char src_volatile;
5954 /* Original machine mode, in case it becomes a CONST_INT. */
5955 enum machine_mode mode;
5956 /* A constant equivalent for SET_SRC, if any. */
5957 rtx src_const;
5958 /* Hash value of constant equivalent for SET_SRC. */
5959 unsigned src_const_hash;
5960 /* Table entry for constant equivalent for SET_SRC, if any. */
5961 struct table_elt *src_const_elt;
5962 };
5963
5964 static void
5965 cse_insn (insn, in_libcall_block)
5966 rtx insn;
5967 int in_libcall_block;
5968 {
5969 register rtx x = PATTERN (insn);
5970 register int i;
5971 rtx tem;
5972 register int n_sets = 0;
5973
5974 /* Records what this insn does to set CC0. */
5975 rtx this_insn_cc0 = 0;
5976 enum machine_mode this_insn_cc0_mode;
5977 struct write_data writes_memory;
5978 static struct write_data init = {0, 0, 0, 0};
5979
5980 rtx src_eqv = 0;
5981 struct table_elt *src_eqv_elt = 0;
5982 int src_eqv_volatile;
5983 int src_eqv_in_memory;
5984 int src_eqv_in_struct;
5985 unsigned src_eqv_hash;
5986
5987 struct set *sets;
5988
5989 this_insn = insn;
5990 writes_memory = init;
5991
5992 /* Find all the SETs and CLOBBERs in this instruction.
5993 Record all the SETs in the array `set' and count them.
5994 Also determine whether there is a CLOBBER that invalidates
5995 all memory references, or all references at varying addresses. */
5996
5997 if (GET_CODE (insn) == CALL_INSN)
5998 {
5999 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6000 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
6001 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
6002 }
6003
6004 if (GET_CODE (x) == SET)
6005 {
6006 sets = (struct set *) alloca (sizeof (struct set));
6007 sets[0].rtl = x;
6008
6009 /* Ignore SETs that are unconditional jumps.
6010 They never need cse processing, so this does not hurt.
6011 The reason is not efficiency but rather
6012 so that we can test at the end for instructions
6013 that have been simplified to unconditional jumps
6014 and not be misled by unchanged instructions
6015 that were unconditional jumps to begin with. */
6016 if (SET_DEST (x) == pc_rtx
6017 && GET_CODE (SET_SRC (x)) == LABEL_REF)
6018 ;
6019
6020 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
6021 The hard function value register is used only once, to copy to
6022 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
6023 Ensure we invalidate the destination register. On the 80386 no
6024 other code would invalidate it since it is a fixed_reg.
6025 We need not check the return of apply_change_group; see canon_reg. */
6026
6027 else if (GET_CODE (SET_SRC (x)) == CALL)
6028 {
6029 canon_reg (SET_SRC (x), insn);
6030 apply_change_group ();
6031 fold_rtx (SET_SRC (x), insn);
6032 invalidate (SET_DEST (x), VOIDmode);
6033 }
6034 else
6035 n_sets = 1;
6036 }
6037 else if (GET_CODE (x) == PARALLEL)
6038 {
6039 register int lim = XVECLEN (x, 0);
6040
6041 sets = (struct set *) alloca (lim * sizeof (struct set));
6042
6043 /* Find all regs explicitly clobbered in this insn,
6044 and ensure they are not replaced with any other regs
6045 elsewhere in this insn.
6046 When a reg that is clobbered is also used for input,
6047 we should presume that that is for a reason,
6048 and we should not substitute some other register
6049 which is not supposed to be clobbered.
6050 Therefore, this loop cannot be merged into the one below
6051 because a CALL may precede a CLOBBER and refer to the
6052 value clobbered. We must not let a canonicalization do
6053 anything in that case. */
6054 for (i = 0; i < lim; i++)
6055 {
6056 register rtx y = XVECEXP (x, 0, i);
6057 if (GET_CODE (y) == CLOBBER)
6058 {
6059 rtx clobbered = XEXP (y, 0);
6060
6061 if (GET_CODE (clobbered) == REG
6062 || GET_CODE (clobbered) == SUBREG)
6063 invalidate (clobbered, VOIDmode);
6064 else if (GET_CODE (clobbered) == STRICT_LOW_PART
6065 || GET_CODE (clobbered) == ZERO_EXTRACT)
6066 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6067 }
6068 }
6069
6070 for (i = 0; i < lim; i++)
6071 {
6072 register rtx y = XVECEXP (x, 0, i);
6073 if (GET_CODE (y) == SET)
6074 {
6075 /* As above, we ignore unconditional jumps and call-insns and
6076 ignore the result of apply_change_group. */
6077 if (GET_CODE (SET_SRC (y)) == CALL)
6078 {
6079 canon_reg (SET_SRC (y), insn);
6080 apply_change_group ();
6081 fold_rtx (SET_SRC (y), insn);
6082 invalidate (SET_DEST (y), VOIDmode);
6083 }
6084 else if (SET_DEST (y) == pc_rtx
6085 && GET_CODE (SET_SRC (y)) == LABEL_REF)
6086 ;
6087 else
6088 sets[n_sets++].rtl = y;
6089 }
6090 else if (GET_CODE (y) == CLOBBER)
6091 {
6092 /* If we clobber memory, take note of that,
6093 and canon the address.
6094 This does nothing when a register is clobbered
6095 because we have already invalidated the reg. */
6096 if (GET_CODE (XEXP (y, 0)) == MEM)
6097 {
6098 canon_reg (XEXP (y, 0), NULL_RTX);
6099 note_mem_written (XEXP (y, 0), &writes_memory);
6100 }
6101 }
6102 else if (GET_CODE (y) == USE
6103 && ! (GET_CODE (XEXP (y, 0)) == REG
6104 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
6105 canon_reg (y, NULL_RTX);
6106 else if (GET_CODE (y) == CALL)
6107 {
6108 /* The result of apply_change_group can be ignored; see
6109 canon_reg. */
6110 canon_reg (y, insn);
6111 apply_change_group ();
6112 fold_rtx (y, insn);
6113 }
6114 }
6115 }
6116 else if (GET_CODE (x) == CLOBBER)
6117 {
6118 if (GET_CODE (XEXP (x, 0)) == MEM)
6119 {
6120 canon_reg (XEXP (x, 0), NULL_RTX);
6121 note_mem_written (XEXP (x, 0), &writes_memory);
6122 }
6123 }
6124
6125 /* Canonicalize a USE of a pseudo register or memory location. */
6126 else if (GET_CODE (x) == USE
6127 && ! (GET_CODE (XEXP (x, 0)) == REG
6128 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
6129 canon_reg (XEXP (x, 0), NULL_RTX);
6130 else if (GET_CODE (x) == CALL)
6131 {
6132 /* The result of apply_change_group can be ignored; see canon_reg. */
6133 canon_reg (x, insn);
6134 apply_change_group ();
6135 fold_rtx (x, insn);
6136 }
6137
6138 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6139 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
6140 is handled specially for this case, and if it isn't set, then there will
6141 be no equivalence for the destinatation. */
6142 if (n_sets == 1 && REG_NOTES (insn) != 0
6143 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
6144 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6145 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
6146 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
6147
6148 /* Canonicalize sources and addresses of destinations.
6149 We do this in a separate pass to avoid problems when a MATCH_DUP is
6150 present in the insn pattern. In that case, we want to ensure that
6151 we don't break the duplicate nature of the pattern. So we will replace
6152 both operands at the same time. Otherwise, we would fail to find an
6153 equivalent substitution in the loop calling validate_change below.
6154
6155 We used to suppress canonicalization of DEST if it appears in SRC,
6156 but we don't do this any more. */
6157
6158 for (i = 0; i < n_sets; i++)
6159 {
6160 rtx dest = SET_DEST (sets[i].rtl);
6161 rtx src = SET_SRC (sets[i].rtl);
6162 rtx new = canon_reg (src, insn);
6163
6164 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6165 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6166 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
6167 || insn_n_dups[recog_memoized (insn)] > 0)
6168 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
6169 else
6170 SET_SRC (sets[i].rtl) = new;
6171
6172 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6173 {
6174 validate_change (insn, &XEXP (dest, 1),
6175 canon_reg (XEXP (dest, 1), insn), 1);
6176 validate_change (insn, &XEXP (dest, 2),
6177 canon_reg (XEXP (dest, 2), insn), 1);
6178 }
6179
6180 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6181 || GET_CODE (dest) == ZERO_EXTRACT
6182 || GET_CODE (dest) == SIGN_EXTRACT)
6183 dest = XEXP (dest, 0);
6184
6185 if (GET_CODE (dest) == MEM)
6186 canon_reg (dest, insn);
6187 }
6188
6189 /* Now that we have done all the replacements, we can apply the change
6190 group and see if they all work. Note that this will cause some
6191 canonicalizations that would have worked individually not to be applied
6192 because some other canonicalization didn't work, but this should not
6193 occur often.
6194
6195 The result of apply_change_group can be ignored; see canon_reg. */
6196
6197 apply_change_group ();
6198
6199 /* Set sets[i].src_elt to the class each source belongs to.
6200 Detect assignments from or to volatile things
6201 and set set[i] to zero so they will be ignored
6202 in the rest of this function.
6203
6204 Nothing in this loop changes the hash table or the register chains. */
6205
6206 for (i = 0; i < n_sets; i++)
6207 {
6208 register rtx src, dest;
6209 register rtx src_folded;
6210 register struct table_elt *elt = 0, *p;
6211 enum machine_mode mode;
6212 rtx src_eqv_here;
6213 rtx src_const = 0;
6214 rtx src_related = 0;
6215 struct table_elt *src_const_elt = 0;
6216 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6217 int src_related_cost = 10000, src_elt_cost = 10000;
6218 /* Set non-zero if we need to call force_const_mem on with the
6219 contents of src_folded before using it. */
6220 int src_folded_force_flag = 0;
6221
6222 dest = SET_DEST (sets[i].rtl);
6223 src = SET_SRC (sets[i].rtl);
6224
6225 /* If SRC is a constant that has no machine mode,
6226 hash it with the destination's machine mode.
6227 This way we can keep different modes separate. */
6228
6229 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6230 sets[i].mode = mode;
6231
6232 if (src_eqv)
6233 {
6234 enum machine_mode eqvmode = mode;
6235 if (GET_CODE (dest) == STRICT_LOW_PART)
6236 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6237 do_not_record = 0;
6238 hash_arg_in_memory = 0;
6239 hash_arg_in_struct = 0;
6240 src_eqv = fold_rtx (src_eqv, insn);
6241 src_eqv_hash = HASH (src_eqv, eqvmode);
6242
6243 /* Find the equivalence class for the equivalent expression. */
6244
6245 if (!do_not_record)
6246 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
6247
6248 src_eqv_volatile = do_not_record;
6249 src_eqv_in_memory = hash_arg_in_memory;
6250 src_eqv_in_struct = hash_arg_in_struct;
6251 }
6252
6253 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6254 value of the INNER register, not the destination. So it is not
6255 a legal substitution for the source. But save it for later. */
6256 if (GET_CODE (dest) == STRICT_LOW_PART)
6257 src_eqv_here = 0;
6258 else
6259 src_eqv_here = src_eqv;
6260
6261 /* Simplify and foldable subexpressions in SRC. Then get the fully-
6262 simplified result, which may not necessarily be valid. */
6263 src_folded = fold_rtx (src, insn);
6264
6265 /* If storing a constant in a bitfield, pre-truncate the constant
6266 so we will be able to record it later. */
6267 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6268 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6269 {
6270 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6271
6272 if (GET_CODE (src) == CONST_INT
6273 && GET_CODE (width) == CONST_INT
6274 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6275 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6276 src_folded
6277 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6278 << INTVAL (width)) - 1));
6279 }
6280
6281 /* Compute SRC's hash code, and also notice if it
6282 should not be recorded at all. In that case,
6283 prevent any further processing of this assignment. */
6284 do_not_record = 0;
6285 hash_arg_in_memory = 0;
6286 hash_arg_in_struct = 0;
6287
6288 sets[i].src = src;
6289 sets[i].src_hash = HASH (src, mode);
6290 sets[i].src_volatile = do_not_record;
6291 sets[i].src_in_memory = hash_arg_in_memory;
6292 sets[i].src_in_struct = hash_arg_in_struct;
6293
6294 #if 0
6295 /* It is no longer clear why we used to do this, but it doesn't
6296 appear to still be needed. So let's try without it since this
6297 code hurts cse'ing widened ops. */
6298 /* If source is a perverse subreg (such as QI treated as an SI),
6299 treat it as volatile. It may do the work of an SI in one context
6300 where the extra bits are not being used, but cannot replace an SI
6301 in general. */
6302 if (GET_CODE (src) == SUBREG
6303 && (GET_MODE_SIZE (GET_MODE (src))
6304 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6305 sets[i].src_volatile = 1;
6306 #endif
6307
6308 /* Locate all possible equivalent forms for SRC. Try to replace
6309 SRC in the insn with each cheaper equivalent.
6310
6311 We have the following types of equivalents: SRC itself, a folded
6312 version, a value given in a REG_EQUAL note, or a value related
6313 to a constant.
6314
6315 Each of these equivalents may be part of an additional class
6316 of equivalents (if more than one is in the table, they must be in
6317 the same class; we check for this).
6318
6319 If the source is volatile, we don't do any table lookups.
6320
6321 We note any constant equivalent for possible later use in a
6322 REG_NOTE. */
6323
6324 if (!sets[i].src_volatile)
6325 elt = lookup (src, sets[i].src_hash, mode);
6326
6327 sets[i].src_elt = elt;
6328
6329 if (elt && src_eqv_here && src_eqv_elt)
6330 {
6331 if (elt->first_same_value != src_eqv_elt->first_same_value)
6332 {
6333 /* The REG_EQUAL is indicating that two formerly distinct
6334 classes are now equivalent. So merge them. */
6335 merge_equiv_classes (elt, src_eqv_elt);
6336 src_eqv_hash = HASH (src_eqv, elt->mode);
6337 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
6338 }
6339
6340 src_eqv_here = 0;
6341 }
6342
6343 else if (src_eqv_elt)
6344 elt = src_eqv_elt;
6345
6346 /* Try to find a constant somewhere and record it in `src_const'.
6347 Record its table element, if any, in `src_const_elt'. Look in
6348 any known equivalences first. (If the constant is not in the
6349 table, also set `sets[i].src_const_hash'). */
6350 if (elt)
6351 for (p = elt->first_same_value; p; p = p->next_same_value)
6352 if (p->is_const)
6353 {
6354 src_const = p->exp;
6355 src_const_elt = elt;
6356 break;
6357 }
6358
6359 if (src_const == 0
6360 && (CONSTANT_P (src_folded)
6361 /* Consider (minus (label_ref L1) (label_ref L2)) as
6362 "constant" here so we will record it. This allows us
6363 to fold switch statements when an ADDR_DIFF_VEC is used. */
6364 || (GET_CODE (src_folded) == MINUS
6365 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6366 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6367 src_const = src_folded, src_const_elt = elt;
6368 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6369 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6370
6371 /* If we don't know if the constant is in the table, get its
6372 hash code and look it up. */
6373 if (src_const && src_const_elt == 0)
6374 {
6375 sets[i].src_const_hash = HASH (src_const, mode);
6376 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
6377 }
6378
6379 sets[i].src_const = src_const;
6380 sets[i].src_const_elt = src_const_elt;
6381
6382 /* If the constant and our source are both in the table, mark them as
6383 equivalent. Otherwise, if a constant is in the table but the source
6384 isn't, set ELT to it. */
6385 if (src_const_elt && elt
6386 && src_const_elt->first_same_value != elt->first_same_value)
6387 merge_equiv_classes (elt, src_const_elt);
6388 else if (src_const_elt && elt == 0)
6389 elt = src_const_elt;
6390
6391 /* See if there is a register linearly related to a constant
6392 equivalent of SRC. */
6393 if (src_const
6394 && (GET_CODE (src_const) == CONST
6395 || (src_const_elt && src_const_elt->related_value != 0)))
6396 {
6397 src_related = use_related_value (src_const, src_const_elt);
6398 if (src_related)
6399 {
6400 struct table_elt *src_related_elt
6401 = lookup (src_related, HASH (src_related, mode), mode);
6402 if (src_related_elt && elt)
6403 {
6404 if (elt->first_same_value
6405 != src_related_elt->first_same_value)
6406 /* This can occur when we previously saw a CONST
6407 involving a SYMBOL_REF and then see the SYMBOL_REF
6408 twice. Merge the involved classes. */
6409 merge_equiv_classes (elt, src_related_elt);
6410
6411 src_related = 0;
6412 src_related_elt = 0;
6413 }
6414 else if (src_related_elt && elt == 0)
6415 elt = src_related_elt;
6416 }
6417 }
6418
6419 /* See if we have a CONST_INT that is already in a register in a
6420 wider mode. */
6421
6422 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6423 && GET_MODE_CLASS (mode) == MODE_INT
6424 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6425 {
6426 enum machine_mode wider_mode;
6427
6428 for (wider_mode = GET_MODE_WIDER_MODE (mode);
6429 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6430 && src_related == 0;
6431 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6432 {
6433 struct table_elt *const_elt
6434 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6435
6436 if (const_elt == 0)
6437 continue;
6438
6439 for (const_elt = const_elt->first_same_value;
6440 const_elt; const_elt = const_elt->next_same_value)
6441 if (GET_CODE (const_elt->exp) == REG)
6442 {
6443 src_related = gen_lowpart_if_possible (mode,
6444 const_elt->exp);
6445 break;
6446 }
6447 }
6448 }
6449
6450 /* Another possibility is that we have an AND with a constant in
6451 a mode narrower than a word. If so, it might have been generated
6452 as part of an "if" which would narrow the AND. If we already
6453 have done the AND in a wider mode, we can use a SUBREG of that
6454 value. */
6455
6456 if (flag_expensive_optimizations && ! src_related
6457 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6458 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6459 {
6460 enum machine_mode tmode;
6461 rtx new_and = gen_rtx (AND, VOIDmode, NULL_RTX, XEXP (src, 1));
6462
6463 for (tmode = GET_MODE_WIDER_MODE (mode);
6464 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6465 tmode = GET_MODE_WIDER_MODE (tmode))
6466 {
6467 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6468 struct table_elt *larger_elt;
6469
6470 if (inner)
6471 {
6472 PUT_MODE (new_and, tmode);
6473 XEXP (new_and, 0) = inner;
6474 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6475 if (larger_elt == 0)
6476 continue;
6477
6478 for (larger_elt = larger_elt->first_same_value;
6479 larger_elt; larger_elt = larger_elt->next_same_value)
6480 if (GET_CODE (larger_elt->exp) == REG)
6481 {
6482 src_related
6483 = gen_lowpart_if_possible (mode, larger_elt->exp);
6484 break;
6485 }
6486
6487 if (src_related)
6488 break;
6489 }
6490 }
6491 }
6492
6493 #ifdef LOAD_EXTEND_OP
6494 /* See if a MEM has already been loaded with a widening operation;
6495 if it has, we can use a subreg of that. Many CISC machines
6496 also have such operations, but this is only likely to be
6497 beneficial these machines. */
6498
6499 if (flag_expensive_optimizations && src_related == 0
6500 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6501 && GET_MODE_CLASS (mode) == MODE_INT
6502 && GET_CODE (src) == MEM && ! do_not_record
6503 && LOAD_EXTEND_OP (mode) != NIL)
6504 {
6505 enum machine_mode tmode;
6506
6507 /* Set what we are trying to extend and the operation it might
6508 have been extended with. */
6509 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6510 XEXP (memory_extend_rtx, 0) = src;
6511
6512 for (tmode = GET_MODE_WIDER_MODE (mode);
6513 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6514 tmode = GET_MODE_WIDER_MODE (tmode))
6515 {
6516 struct table_elt *larger_elt;
6517
6518 PUT_MODE (memory_extend_rtx, tmode);
6519 larger_elt = lookup (memory_extend_rtx,
6520 HASH (memory_extend_rtx, tmode), tmode);
6521 if (larger_elt == 0)
6522 continue;
6523
6524 for (larger_elt = larger_elt->first_same_value;
6525 larger_elt; larger_elt = larger_elt->next_same_value)
6526 if (GET_CODE (larger_elt->exp) == REG)
6527 {
6528 src_related = gen_lowpart_if_possible (mode,
6529 larger_elt->exp);
6530 break;
6531 }
6532
6533 if (src_related)
6534 break;
6535 }
6536 }
6537 #endif /* LOAD_EXTEND_OP */
6538
6539 if (src == src_folded)
6540 src_folded = 0;
6541
6542 /* At this point, ELT, if non-zero, points to a class of expressions
6543 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6544 and SRC_RELATED, if non-zero, each contain additional equivalent
6545 expressions. Prune these latter expressions by deleting expressions
6546 already in the equivalence class.
6547
6548 Check for an equivalent identical to the destination. If found,
6549 this is the preferred equivalent since it will likely lead to
6550 elimination of the insn. Indicate this by placing it in
6551 `src_related'. */
6552
6553 if (elt) elt = elt->first_same_value;
6554 for (p = elt; p; p = p->next_same_value)
6555 {
6556 enum rtx_code code = GET_CODE (p->exp);
6557
6558 /* If the expression is not valid, ignore it. Then we do not
6559 have to check for validity below. In most cases, we can use
6560 `rtx_equal_p', since canonicalization has already been done. */
6561 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
6562 continue;
6563
6564 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
6565 src = 0;
6566 else if (src_folded && GET_CODE (src_folded) == code
6567 && rtx_equal_p (src_folded, p->exp))
6568 src_folded = 0;
6569 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
6570 && rtx_equal_p (src_eqv_here, p->exp))
6571 src_eqv_here = 0;
6572 else if (src_related && GET_CODE (src_related) == code
6573 && rtx_equal_p (src_related, p->exp))
6574 src_related = 0;
6575
6576 /* This is the same as the destination of the insns, we want
6577 to prefer it. Copy it to src_related. The code below will
6578 then give it a negative cost. */
6579 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
6580 src_related = dest;
6581
6582 }
6583
6584 /* Find the cheapest valid equivalent, trying all the available
6585 possibilities. Prefer items not in the hash table to ones
6586 that are when they are equal cost. Note that we can never
6587 worsen an insn as the current contents will also succeed.
6588 If we find an equivalent identical to the destination, use it as best,
6589 since this insn will probably be eliminated in that case. */
6590 if (src)
6591 {
6592 if (rtx_equal_p (src, dest))
6593 src_cost = -1;
6594 else
6595 src_cost = COST (src);
6596 }
6597
6598 if (src_eqv_here)
6599 {
6600 if (rtx_equal_p (src_eqv_here, dest))
6601 src_eqv_cost = -1;
6602 else
6603 src_eqv_cost = COST (src_eqv_here);
6604 }
6605
6606 if (src_folded)
6607 {
6608 if (rtx_equal_p (src_folded, dest))
6609 src_folded_cost = -1;
6610 else
6611 src_folded_cost = COST (src_folded);
6612 }
6613
6614 if (src_related)
6615 {
6616 if (rtx_equal_p (src_related, dest))
6617 src_related_cost = -1;
6618 else
6619 src_related_cost = COST (src_related);
6620 }
6621
6622 /* If this was an indirect jump insn, a known label will really be
6623 cheaper even though it looks more expensive. */
6624 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
6625 src_folded = src_const, src_folded_cost = -1;
6626
6627 /* Terminate loop when replacement made. This must terminate since
6628 the current contents will be tested and will always be valid. */
6629 while (1)
6630 {
6631 rtx trial;
6632
6633 /* Skip invalid entries. */
6634 while (elt && GET_CODE (elt->exp) != REG
6635 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6636 elt = elt->next_same_value;
6637
6638 if (elt) src_elt_cost = elt->cost;
6639
6640 /* Find cheapest and skip it for the next time. For items
6641 of equal cost, use this order:
6642 src_folded, src, src_eqv, src_related and hash table entry. */
6643 if (src_folded_cost <= src_cost
6644 && src_folded_cost <= src_eqv_cost
6645 && src_folded_cost <= src_related_cost
6646 && src_folded_cost <= src_elt_cost)
6647 {
6648 trial = src_folded, src_folded_cost = 10000;
6649 if (src_folded_force_flag)
6650 trial = force_const_mem (mode, trial);
6651 }
6652 else if (src_cost <= src_eqv_cost
6653 && src_cost <= src_related_cost
6654 && src_cost <= src_elt_cost)
6655 trial = src, src_cost = 10000;
6656 else if (src_eqv_cost <= src_related_cost
6657 && src_eqv_cost <= src_elt_cost)
6658 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
6659 else if (src_related_cost <= src_elt_cost)
6660 trial = copy_rtx (src_related), src_related_cost = 10000;
6661 else
6662 {
6663 trial = copy_rtx (elt->exp);
6664 elt = elt->next_same_value;
6665 src_elt_cost = 10000;
6666 }
6667
6668 /* We don't normally have an insn matching (set (pc) (pc)), so
6669 check for this separately here. We will delete such an
6670 insn below.
6671
6672 Tablejump insns contain a USE of the table, so simply replacing
6673 the operand with the constant won't match. This is simply an
6674 unconditional branch, however, and is therefore valid. Just
6675 insert the substitution here and we will delete and re-emit
6676 the insn later. */
6677
6678 if (n_sets == 1 && dest == pc_rtx
6679 && (trial == pc_rtx
6680 || (GET_CODE (trial) == LABEL_REF
6681 && ! condjump_p (insn))))
6682 {
6683 /* If TRIAL is a label in front of a jump table, we are
6684 really falling through the switch (this is how casesi
6685 insns work), so we must branch around the table. */
6686 if (GET_CODE (trial) == CODE_LABEL
6687 && NEXT_INSN (trial) != 0
6688 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
6689 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
6690 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
6691
6692 trial = gen_rtx (LABEL_REF, Pmode, get_label_after (trial));
6693
6694 SET_SRC (sets[i].rtl) = trial;
6695 cse_jumps_altered = 1;
6696 break;
6697 }
6698
6699 /* Look for a substitution that makes a valid insn. */
6700 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
6701 {
6702 /* The result of apply_change_group can be ignored; see
6703 canon_reg. */
6704
6705 validate_change (insn, &SET_SRC (sets[i].rtl),
6706 canon_reg (SET_SRC (sets[i].rtl), insn),
6707 1);
6708 apply_change_group ();
6709 break;
6710 }
6711
6712 /* If we previously found constant pool entries for
6713 constants and this is a constant, try making a
6714 pool entry. Put it in src_folded unless we already have done
6715 this since that is where it likely came from. */
6716
6717 else if (constant_pool_entries_cost
6718 && CONSTANT_P (trial)
6719 && ! (GET_CODE (trial) == CONST
6720 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
6721 && (src_folded == 0
6722 || (GET_CODE (src_folded) != MEM
6723 && ! src_folded_force_flag))
6724 && GET_MODE_CLASS (mode) != MODE_CC)
6725 {
6726 src_folded_force_flag = 1;
6727 src_folded = trial;
6728 src_folded_cost = constant_pool_entries_cost;
6729 }
6730 }
6731
6732 src = SET_SRC (sets[i].rtl);
6733
6734 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
6735 However, there is an important exception: If both are registers
6736 that are not the head of their equivalence class, replace SET_SRC
6737 with the head of the class. If we do not do this, we will have
6738 both registers live over a portion of the basic block. This way,
6739 their lifetimes will likely abut instead of overlapping. */
6740 if (GET_CODE (dest) == REG
6741 && REGNO_QTY_VALID_P (REGNO (dest))
6742 && qty_mode[reg_qty[REGNO (dest)]] == GET_MODE (dest)
6743 && qty_first_reg[reg_qty[REGNO (dest)]] != REGNO (dest)
6744 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
6745 /* Don't do this if the original insn had a hard reg as
6746 SET_SRC. */
6747 && (GET_CODE (sets[i].src) != REG
6748 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
6749 /* We can't call canon_reg here because it won't do anything if
6750 SRC is a hard register. */
6751 {
6752 int first = qty_first_reg[reg_qty[REGNO (src)]];
6753
6754 src = SET_SRC (sets[i].rtl)
6755 = first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
6756 : gen_rtx (REG, GET_MODE (src), first);
6757
6758 /* If we had a constant that is cheaper than what we are now
6759 setting SRC to, use that constant. We ignored it when we
6760 thought we could make this into a no-op. */
6761 if (src_const && COST (src_const) < COST (src)
6762 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const, 0))
6763 src = src_const;
6764 }
6765
6766 /* If we made a change, recompute SRC values. */
6767 if (src != sets[i].src)
6768 {
6769 do_not_record = 0;
6770 hash_arg_in_memory = 0;
6771 hash_arg_in_struct = 0;
6772 sets[i].src = src;
6773 sets[i].src_hash = HASH (src, mode);
6774 sets[i].src_volatile = do_not_record;
6775 sets[i].src_in_memory = hash_arg_in_memory;
6776 sets[i].src_in_struct = hash_arg_in_struct;
6777 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
6778 }
6779
6780 /* If this is a single SET, we are setting a register, and we have an
6781 equivalent constant, we want to add a REG_NOTE. We don't want
6782 to write a REG_EQUAL note for a constant pseudo since verifying that
6783 that pseudo hasn't been eliminated is a pain. Such a note also
6784 won't help anything. */
6785 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
6786 && GET_CODE (src_const) != REG)
6787 {
6788 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6789
6790 /* Record the actual constant value in a REG_EQUAL note, making
6791 a new one if one does not already exist. */
6792 if (tem)
6793 XEXP (tem, 0) = src_const;
6794 else
6795 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL,
6796 src_const, REG_NOTES (insn));
6797
6798 /* If storing a constant value in a register that
6799 previously held the constant value 0,
6800 record this fact with a REG_WAS_0 note on this insn.
6801
6802 Note that the *register* is required to have previously held 0,
6803 not just any register in the quantity and we must point to the
6804 insn that set that register to zero.
6805
6806 Rather than track each register individually, we just see if
6807 the last set for this quantity was for this register. */
6808
6809 if (REGNO_QTY_VALID_P (REGNO (dest))
6810 && qty_const[reg_qty[REGNO (dest)]] == const0_rtx)
6811 {
6812 /* See if we previously had a REG_WAS_0 note. */
6813 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6814 rtx const_insn = qty_const_insn[reg_qty[REGNO (dest)]];
6815
6816 if ((tem = single_set (const_insn)) != 0
6817 && rtx_equal_p (SET_DEST (tem), dest))
6818 {
6819 if (note)
6820 XEXP (note, 0) = const_insn;
6821 else
6822 REG_NOTES (insn) = gen_rtx (INSN_LIST, REG_WAS_0,
6823 const_insn, REG_NOTES (insn));
6824 }
6825 }
6826 }
6827
6828 /* Now deal with the destination. */
6829 do_not_record = 0;
6830 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
6831
6832 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
6833 to the MEM or REG within it. */
6834 while (GET_CODE (dest) == SIGN_EXTRACT
6835 || GET_CODE (dest) == ZERO_EXTRACT
6836 || GET_CODE (dest) == SUBREG
6837 || GET_CODE (dest) == STRICT_LOW_PART)
6838 {
6839 sets[i].inner_dest_loc = &XEXP (dest, 0);
6840 dest = XEXP (dest, 0);
6841 }
6842
6843 sets[i].inner_dest = dest;
6844
6845 if (GET_CODE (dest) == MEM)
6846 {
6847 dest = fold_rtx (dest, insn);
6848
6849 /* Decide whether we invalidate everything in memory,
6850 or just things at non-fixed places.
6851 Writing a large aggregate must invalidate everything
6852 because we don't know how long it is. */
6853 note_mem_written (dest, &writes_memory);
6854 }
6855
6856 /* Compute the hash code of the destination now,
6857 before the effects of this instruction are recorded,
6858 since the register values used in the address computation
6859 are those before this instruction. */
6860 sets[i].dest_hash = HASH (dest, mode);
6861
6862 /* Don't enter a bit-field in the hash table
6863 because the value in it after the store
6864 may not equal what was stored, due to truncation. */
6865
6866 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6867 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6868 {
6869 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6870
6871 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
6872 && GET_CODE (width) == CONST_INT
6873 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6874 && ! (INTVAL (src_const)
6875 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6876 /* Exception: if the value is constant,
6877 and it won't be truncated, record it. */
6878 ;
6879 else
6880 {
6881 /* This is chosen so that the destination will be invalidated
6882 but no new value will be recorded.
6883 We must invalidate because sometimes constant
6884 values can be recorded for bitfields. */
6885 sets[i].src_elt = 0;
6886 sets[i].src_volatile = 1;
6887 src_eqv = 0;
6888 src_eqv_elt = 0;
6889 }
6890 }
6891
6892 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
6893 the insn. */
6894 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
6895 {
6896 PUT_CODE (insn, NOTE);
6897 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
6898 NOTE_SOURCE_FILE (insn) = 0;
6899 cse_jumps_altered = 1;
6900 /* One less use of the label this insn used to jump to. */
6901 --LABEL_NUSES (JUMP_LABEL (insn));
6902 /* No more processing for this set. */
6903 sets[i].rtl = 0;
6904 }
6905
6906 /* If this SET is now setting PC to a label, we know it used to
6907 be a conditional or computed branch. So we see if we can follow
6908 it. If it was a computed branch, delete it and re-emit. */
6909 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
6910 {
6911 rtx p;
6912
6913 /* If this is not in the format for a simple branch and
6914 we are the only SET in it, re-emit it. */
6915 if (! simplejump_p (insn) && n_sets == 1)
6916 {
6917 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
6918 JUMP_LABEL (new) = XEXP (src, 0);
6919 LABEL_NUSES (XEXP (src, 0))++;
6920 delete_insn (insn);
6921 insn = new;
6922 }
6923 else
6924 /* Otherwise, force rerecognition, since it probably had
6925 a different pattern before.
6926 This shouldn't really be necessary, since whatever
6927 changed the source value above should have done this.
6928 Until the right place is found, might as well do this here. */
6929 INSN_CODE (insn) = -1;
6930
6931 /* Now that we've converted this jump to an unconditional jump,
6932 there is dead code after it. Delete the dead code until we
6933 reach a BARRIER, the end of the function, or a label. Do
6934 not delete NOTEs except for NOTE_INSN_DELETED since later
6935 phases assume these notes are retained. */
6936
6937 p = insn;
6938
6939 while (NEXT_INSN (p) != 0
6940 && GET_CODE (NEXT_INSN (p)) != BARRIER
6941 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
6942 {
6943 if (GET_CODE (NEXT_INSN (p)) != NOTE
6944 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
6945 delete_insn (NEXT_INSN (p));
6946 else
6947 p = NEXT_INSN (p);
6948 }
6949
6950 /* If we don't have a BARRIER immediately after INSN, put one there.
6951 Much code assumes that there are no NOTEs between a JUMP_INSN and
6952 BARRIER. */
6953
6954 if (NEXT_INSN (insn) == 0
6955 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
6956 emit_barrier_after (insn);
6957
6958 /* We might have two BARRIERs separated by notes. Delete the second
6959 one if so. */
6960
6961 if (p != insn && NEXT_INSN (p) != 0
6962 && GET_CODE (NEXT_INSN (p)) == BARRIER)
6963 delete_insn (NEXT_INSN (p));
6964
6965 cse_jumps_altered = 1;
6966 sets[i].rtl = 0;
6967 }
6968
6969 /* If destination is volatile, invalidate it and then do no further
6970 processing for this assignment. */
6971
6972 else if (do_not_record)
6973 {
6974 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
6975 || GET_CODE (dest) == MEM)
6976 invalidate (dest, VOIDmode);
6977 else if (GET_CODE (dest) == STRICT_LOW_PART
6978 || GET_CODE (dest) == ZERO_EXTRACT)
6979 invalidate (XEXP (dest, 0), GET_MODE (dest));
6980 sets[i].rtl = 0;
6981 }
6982
6983 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
6984 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
6985
6986 #ifdef HAVE_cc0
6987 /* If setting CC0, record what it was set to, or a constant, if it
6988 is equivalent to a constant. If it is being set to a floating-point
6989 value, make a COMPARE with the appropriate constant of 0. If we
6990 don't do this, later code can interpret this as a test against
6991 const0_rtx, which can cause problems if we try to put it into an
6992 insn as a floating-point operand. */
6993 if (dest == cc0_rtx)
6994 {
6995 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
6996 this_insn_cc0_mode = mode;
6997 if (FLOAT_MODE_P (mode))
6998 this_insn_cc0 = gen_rtx (COMPARE, VOIDmode, this_insn_cc0,
6999 CONST0_RTX (mode));
7000 }
7001 #endif
7002 }
7003
7004 /* Now enter all non-volatile source expressions in the hash table
7005 if they are not already present.
7006 Record their equivalence classes in src_elt.
7007 This way we can insert the corresponding destinations into
7008 the same classes even if the actual sources are no longer in them
7009 (having been invalidated). */
7010
7011 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
7012 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
7013 {
7014 register struct table_elt *elt;
7015 register struct table_elt *classp = sets[0].src_elt;
7016 rtx dest = SET_DEST (sets[0].rtl);
7017 enum machine_mode eqvmode = GET_MODE (dest);
7018
7019 if (GET_CODE (dest) == STRICT_LOW_PART)
7020 {
7021 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
7022 classp = 0;
7023 }
7024 if (insert_regs (src_eqv, classp, 0))
7025 {
7026 rehash_using_reg (src_eqv);
7027 src_eqv_hash = HASH (src_eqv, eqvmode);
7028 }
7029 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7030 elt->in_memory = src_eqv_in_memory;
7031 elt->in_struct = src_eqv_in_struct;
7032 src_eqv_elt = elt;
7033
7034 /* Check to see if src_eqv_elt is the same as a set source which
7035 does not yet have an elt, and if so set the elt of the set source
7036 to src_eqv_elt. */
7037 for (i = 0; i < n_sets; i++)
7038 if (sets[i].rtl && sets[i].src_elt == 0
7039 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
7040 sets[i].src_elt = src_eqv_elt;
7041 }
7042
7043 for (i = 0; i < n_sets; i++)
7044 if (sets[i].rtl && ! sets[i].src_volatile
7045 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
7046 {
7047 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
7048 {
7049 /* REG_EQUAL in setting a STRICT_LOW_PART
7050 gives an equivalent for the entire destination register,
7051 not just for the subreg being stored in now.
7052 This is a more interesting equivalence, so we arrange later
7053 to treat the entire reg as the destination. */
7054 sets[i].src_elt = src_eqv_elt;
7055 sets[i].src_hash = src_eqv_hash;
7056 }
7057 else
7058 {
7059 /* Insert source and constant equivalent into hash table, if not
7060 already present. */
7061 register struct table_elt *classp = src_eqv_elt;
7062 register rtx src = sets[i].src;
7063 register rtx dest = SET_DEST (sets[i].rtl);
7064 enum machine_mode mode
7065 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
7066
7067 if (sets[i].src_elt == 0)
7068 {
7069 register struct table_elt *elt;
7070
7071 /* Note that these insert_regs calls cannot remove
7072 any of the src_elt's, because they would have failed to
7073 match if not still valid. */
7074 if (insert_regs (src, classp, 0))
7075 {
7076 rehash_using_reg (src);
7077 sets[i].src_hash = HASH (src, mode);
7078 }
7079 elt = insert (src, classp, sets[i].src_hash, mode);
7080 elt->in_memory = sets[i].src_in_memory;
7081 elt->in_struct = sets[i].src_in_struct;
7082 sets[i].src_elt = classp = elt;
7083 }
7084
7085 if (sets[i].src_const && sets[i].src_const_elt == 0
7086 && src != sets[i].src_const
7087 && ! rtx_equal_p (sets[i].src_const, src))
7088 sets[i].src_elt = insert (sets[i].src_const, classp,
7089 sets[i].src_const_hash, mode);
7090 }
7091 }
7092 else if (sets[i].src_elt == 0)
7093 /* If we did not insert the source into the hash table (e.g., it was
7094 volatile), note the equivalence class for the REG_EQUAL value, if any,
7095 so that the destination goes into that class. */
7096 sets[i].src_elt = src_eqv_elt;
7097
7098 invalidate_from_clobbers (&writes_memory, x);
7099
7100 /* Some registers are invalidated by subroutine calls. Memory is
7101 invalidated by non-constant calls. */
7102
7103 if (GET_CODE (insn) == CALL_INSN)
7104 {
7105 static struct write_data everything = {0, 1, 1, 1};
7106
7107 if (! CONST_CALL_P (insn))
7108 invalidate_memory (&everything);
7109 invalidate_for_call ();
7110 }
7111
7112 /* Now invalidate everything set by this instruction.
7113 If a SUBREG or other funny destination is being set,
7114 sets[i].rtl is still nonzero, so here we invalidate the reg
7115 a part of which is being set. */
7116
7117 for (i = 0; i < n_sets; i++)
7118 if (sets[i].rtl)
7119 {
7120 /* We can't use the inner dest, because the mode associated with
7121 a ZERO_EXTRACT is significant. */
7122 register rtx dest = SET_DEST (sets[i].rtl);
7123
7124 /* Needed for registers to remove the register from its
7125 previous quantity's chain.
7126 Needed for memory if this is a nonvarying address, unless
7127 we have just done an invalidate_memory that covers even those. */
7128 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7129 || (GET_CODE (dest) == MEM && ! writes_memory.all
7130 && ! cse_rtx_addr_varies_p (dest)))
7131 invalidate (dest, VOIDmode);
7132 else if (GET_CODE (dest) == STRICT_LOW_PART
7133 || GET_CODE (dest) == ZERO_EXTRACT)
7134 invalidate (XEXP (dest, 0), GET_MODE (dest));
7135 }
7136
7137 /* Make sure registers mentioned in destinations
7138 are safe for use in an expression to be inserted.
7139 This removes from the hash table
7140 any invalid entry that refers to one of these registers.
7141
7142 We don't care about the return value from mention_regs because
7143 we are going to hash the SET_DEST values unconditionally. */
7144
7145 for (i = 0; i < n_sets; i++)
7146 if (sets[i].rtl && GET_CODE (SET_DEST (sets[i].rtl)) != REG)
7147 mention_regs (SET_DEST (sets[i].rtl));
7148
7149 /* We may have just removed some of the src_elt's from the hash table.
7150 So replace each one with the current head of the same class. */
7151
7152 for (i = 0; i < n_sets; i++)
7153 if (sets[i].rtl)
7154 {
7155 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7156 /* If elt was removed, find current head of same class,
7157 or 0 if nothing remains of that class. */
7158 {
7159 register struct table_elt *elt = sets[i].src_elt;
7160
7161 while (elt && elt->prev_same_value)
7162 elt = elt->prev_same_value;
7163
7164 while (elt && elt->first_same_value == 0)
7165 elt = elt->next_same_value;
7166 sets[i].src_elt = elt ? elt->first_same_value : 0;
7167 }
7168 }
7169
7170 /* Now insert the destinations into their equivalence classes. */
7171
7172 for (i = 0; i < n_sets; i++)
7173 if (sets[i].rtl)
7174 {
7175 register rtx dest = SET_DEST (sets[i].rtl);
7176 register struct table_elt *elt;
7177
7178 /* Don't record value if we are not supposed to risk allocating
7179 floating-point values in registers that might be wider than
7180 memory. */
7181 if ((flag_float_store
7182 && GET_CODE (dest) == MEM
7183 && FLOAT_MODE_P (GET_MODE (dest)))
7184 /* Don't record values of destinations set inside a libcall block
7185 since we might delete the libcall. Things should have been set
7186 up so we won't want to reuse such a value, but we play it safe
7187 here. */
7188 || in_libcall_block
7189 /* If we didn't put a REG_EQUAL value or a source into the hash
7190 table, there is no point is recording DEST. */
7191 || sets[i].src_elt == 0)
7192 continue;
7193
7194 /* STRICT_LOW_PART isn't part of the value BEING set,
7195 and neither is the SUBREG inside it.
7196 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
7197 if (GET_CODE (dest) == STRICT_LOW_PART)
7198 dest = SUBREG_REG (XEXP (dest, 0));
7199
7200 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7201 /* Registers must also be inserted into chains for quantities. */
7202 if (insert_regs (dest, sets[i].src_elt, 1))
7203 {
7204 /* If `insert_regs' changes something, the hash code must be
7205 recalculated. */
7206 rehash_using_reg (dest);
7207 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7208 }
7209
7210 elt = insert (dest, sets[i].src_elt,
7211 sets[i].dest_hash, GET_MODE (dest));
7212 elt->in_memory = GET_CODE (sets[i].inner_dest) == MEM;
7213 if (elt->in_memory)
7214 {
7215 /* This implicitly assumes a whole struct
7216 need not have MEM_IN_STRUCT_P.
7217 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
7218 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7219 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7220 }
7221
7222 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7223 narrower than M2, and both M1 and M2 are the same number of words,
7224 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7225 make that equivalence as well.
7226
7227 However, BAR may have equivalences for which gen_lowpart_if_possible
7228 will produce a simpler value than gen_lowpart_if_possible applied to
7229 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7230 BAR's equivalences. If we don't get a simplified form, make
7231 the SUBREG. It will not be used in an equivalence, but will
7232 cause two similar assignments to be detected.
7233
7234 Note the loop below will find SUBREG_REG (DEST) since we have
7235 already entered SRC and DEST of the SET in the table. */
7236
7237 if (GET_CODE (dest) == SUBREG
7238 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7239 / UNITS_PER_WORD)
7240 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7241 && (GET_MODE_SIZE (GET_MODE (dest))
7242 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7243 && sets[i].src_elt != 0)
7244 {
7245 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7246 struct table_elt *elt, *classp = 0;
7247
7248 for (elt = sets[i].src_elt->first_same_value; elt;
7249 elt = elt->next_same_value)
7250 {
7251 rtx new_src = 0;
7252 unsigned src_hash;
7253 struct table_elt *src_elt;
7254
7255 /* Ignore invalid entries. */
7256 if (GET_CODE (elt->exp) != REG
7257 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7258 continue;
7259
7260 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7261 if (new_src == 0)
7262 new_src = gen_rtx (SUBREG, new_mode, elt->exp, 0);
7263
7264 src_hash = HASH (new_src, new_mode);
7265 src_elt = lookup (new_src, src_hash, new_mode);
7266
7267 /* Put the new source in the hash table is if isn't
7268 already. */
7269 if (src_elt == 0)
7270 {
7271 if (insert_regs (new_src, classp, 0))
7272 {
7273 rehash_using_reg (new_src);
7274 src_hash = HASH (new_src, new_mode);
7275 }
7276 src_elt = insert (new_src, classp, src_hash, new_mode);
7277 src_elt->in_memory = elt->in_memory;
7278 src_elt->in_struct = elt->in_struct;
7279 }
7280 else if (classp && classp != src_elt->first_same_value)
7281 /* Show that two things that we've seen before are
7282 actually the same. */
7283 merge_equiv_classes (src_elt, classp);
7284
7285 classp = src_elt->first_same_value;
7286 }
7287 }
7288 }
7289
7290 /* Special handling for (set REG0 REG1)
7291 where REG0 is the "cheapest", cheaper than REG1.
7292 After cse, REG1 will probably not be used in the sequel,
7293 so (if easily done) change this insn to (set REG1 REG0) and
7294 replace REG1 with REG0 in the previous insn that computed their value.
7295 Then REG1 will become a dead store and won't cloud the situation
7296 for later optimizations.
7297
7298 Do not make this change if REG1 is a hard register, because it will
7299 then be used in the sequel and we may be changing a two-operand insn
7300 into a three-operand insn.
7301
7302 Also do not do this if we are operating on a copy of INSN. */
7303
7304 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7305 && NEXT_INSN (PREV_INSN (insn)) == insn
7306 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7307 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7308 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
7309 && (qty_first_reg[reg_qty[REGNO (SET_SRC (sets[0].rtl))]]
7310 == REGNO (SET_DEST (sets[0].rtl))))
7311 {
7312 rtx prev = PREV_INSN (insn);
7313 while (prev && GET_CODE (prev) == NOTE)
7314 prev = PREV_INSN (prev);
7315
7316 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7317 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7318 {
7319 rtx dest = SET_DEST (sets[0].rtl);
7320 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7321
7322 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7323 validate_change (insn, & SET_DEST (sets[0].rtl),
7324 SET_SRC (sets[0].rtl), 1);
7325 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7326 apply_change_group ();
7327
7328 /* If REG1 was equivalent to a constant, REG0 is not. */
7329 if (note)
7330 PUT_REG_NOTE_KIND (note, REG_EQUAL);
7331
7332 /* If there was a REG_WAS_0 note on PREV, remove it. Move
7333 any REG_WAS_0 note on INSN to PREV. */
7334 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7335 if (note)
7336 remove_note (prev, note);
7337
7338 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7339 if (note)
7340 {
7341 remove_note (insn, note);
7342 XEXP (note, 1) = REG_NOTES (prev);
7343 REG_NOTES (prev) = note;
7344 }
7345 }
7346 }
7347
7348 /* If this is a conditional jump insn, record any known equivalences due to
7349 the condition being tested. */
7350
7351 last_jump_equiv_class = 0;
7352 if (GET_CODE (insn) == JUMP_INSN
7353 && n_sets == 1 && GET_CODE (x) == SET
7354 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7355 record_jump_equiv (insn, 0);
7356
7357 #ifdef HAVE_cc0
7358 /* If the previous insn set CC0 and this insn no longer references CC0,
7359 delete the previous insn. Here we use the fact that nothing expects CC0
7360 to be valid over an insn, which is true until the final pass. */
7361 if (prev_insn && GET_CODE (prev_insn) == INSN
7362 && (tem = single_set (prev_insn)) != 0
7363 && SET_DEST (tem) == cc0_rtx
7364 && ! reg_mentioned_p (cc0_rtx, x))
7365 {
7366 PUT_CODE (prev_insn, NOTE);
7367 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7368 NOTE_SOURCE_FILE (prev_insn) = 0;
7369 }
7370
7371 prev_insn_cc0 = this_insn_cc0;
7372 prev_insn_cc0_mode = this_insn_cc0_mode;
7373 #endif
7374
7375 prev_insn = insn;
7376 }
7377 \f
7378 /* Store 1 in *WRITES_PTR for those categories of memory ref
7379 that must be invalidated when the expression WRITTEN is stored in.
7380 If WRITTEN is null, say everything must be invalidated. */
7381
7382 static void
7383 note_mem_written (written, writes_ptr)
7384 rtx written;
7385 struct write_data *writes_ptr;
7386 {
7387 static struct write_data everything = {0, 1, 1, 1};
7388
7389 if (written == 0)
7390 *writes_ptr = everything;
7391 else if (GET_CODE (written) == MEM)
7392 {
7393 /* Pushing or popping the stack invalidates just the stack pointer. */
7394 rtx addr = XEXP (written, 0);
7395 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7396 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7397 && GET_CODE (XEXP (addr, 0)) == REG
7398 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7399 {
7400 writes_ptr->sp = 1;
7401 return;
7402 }
7403 else if (GET_MODE (written) == BLKmode)
7404 *writes_ptr = everything;
7405 /* (mem (scratch)) means clobber everything. */
7406 else if (GET_CODE (addr) == SCRATCH)
7407 *writes_ptr = everything;
7408 else if (cse_rtx_addr_varies_p (written))
7409 {
7410 /* A varying address that is a sum indicates an array element,
7411 and that's just as good as a structure element
7412 in implying that we need not invalidate scalar variables.
7413 However, we must allow QImode aliasing of scalars, because the
7414 ANSI C standard allows character pointers to alias anything. */
7415 if (! ((MEM_IN_STRUCT_P (written)
7416 || GET_CODE (XEXP (written, 0)) == PLUS)
7417 && GET_MODE (written) != QImode))
7418 writes_ptr->all = 1;
7419 writes_ptr->nonscalar = 1;
7420 }
7421 writes_ptr->var = 1;
7422 }
7423 }
7424
7425 /* Perform invalidation on the basis of everything about an insn
7426 except for invalidating the actual places that are SET in it.
7427 This includes the places CLOBBERed, and anything that might
7428 alias with something that is SET or CLOBBERed.
7429
7430 W points to the writes_memory for this insn, a struct write_data
7431 saying which kinds of memory references must be invalidated.
7432 X is the pattern of the insn. */
7433
7434 static void
7435 invalidate_from_clobbers (w, x)
7436 struct write_data *w;
7437 rtx x;
7438 {
7439 /* If W->var is not set, W specifies no action.
7440 If W->all is set, this step gets all memory refs
7441 so they can be ignored in the rest of this function. */
7442 if (w->var)
7443 invalidate_memory (w);
7444
7445 if (w->sp)
7446 {
7447 if (reg_tick[STACK_POINTER_REGNUM] >= 0)
7448 reg_tick[STACK_POINTER_REGNUM]++;
7449
7450 /* This should be *very* rare. */
7451 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
7452 invalidate (stack_pointer_rtx, VOIDmode);
7453 }
7454
7455 if (GET_CODE (x) == CLOBBER)
7456 {
7457 rtx ref = XEXP (x, 0);
7458 if (ref)
7459 {
7460 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7461 || (GET_CODE (ref) == MEM && ! w->all))
7462 invalidate (ref, VOIDmode);
7463 else if (GET_CODE (ref) == STRICT_LOW_PART
7464 || GET_CODE (ref) == ZERO_EXTRACT)
7465 invalidate (XEXP (ref, 0), GET_MODE (ref));
7466 }
7467 }
7468 else if (GET_CODE (x) == PARALLEL)
7469 {
7470 register int i;
7471 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
7472 {
7473 register rtx y = XVECEXP (x, 0, i);
7474 if (GET_CODE (y) == CLOBBER)
7475 {
7476 rtx ref = XEXP (y, 0);
7477 if (ref)
7478 {
7479 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7480 || (GET_CODE (ref) == MEM && !w->all))
7481 invalidate (ref, VOIDmode);
7482 else if (GET_CODE (ref) == STRICT_LOW_PART
7483 || GET_CODE (ref) == ZERO_EXTRACT)
7484 invalidate (XEXP (ref, 0), GET_MODE (ref));
7485 }
7486 }
7487 }
7488 }
7489 }
7490 \f
7491 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
7492 and replace any registers in them with either an equivalent constant
7493 or the canonical form of the register. If we are inside an address,
7494 only do this if the address remains valid.
7495
7496 OBJECT is 0 except when within a MEM in which case it is the MEM.
7497
7498 Return the replacement for X. */
7499
7500 static rtx
7501 cse_process_notes (x, object)
7502 rtx x;
7503 rtx object;
7504 {
7505 enum rtx_code code = GET_CODE (x);
7506 char *fmt = GET_RTX_FORMAT (code);
7507 int i;
7508
7509 switch (code)
7510 {
7511 case CONST_INT:
7512 case CONST:
7513 case SYMBOL_REF:
7514 case LABEL_REF:
7515 case CONST_DOUBLE:
7516 case PC:
7517 case CC0:
7518 case LO_SUM:
7519 return x;
7520
7521 case MEM:
7522 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
7523 return x;
7524
7525 case EXPR_LIST:
7526 case INSN_LIST:
7527 if (REG_NOTE_KIND (x) == REG_EQUAL)
7528 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7529 if (XEXP (x, 1))
7530 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7531 return x;
7532
7533 case SIGN_EXTEND:
7534 case ZERO_EXTEND:
7535 {
7536 rtx new = cse_process_notes (XEXP (x, 0), object);
7537 /* We don't substitute VOIDmode constants into these rtx,
7538 since they would impede folding. */
7539 if (GET_MODE (new) != VOIDmode)
7540 validate_change (object, &XEXP (x, 0), new, 0);
7541 return x;
7542 }
7543
7544 case REG:
7545 i = reg_qty[REGNO (x)];
7546
7547 /* Return a constant or a constant register. */
7548 if (REGNO_QTY_VALID_P (REGNO (x))
7549 && qty_const[i] != 0
7550 && (CONSTANT_P (qty_const[i])
7551 || GET_CODE (qty_const[i]) == REG))
7552 {
7553 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
7554 if (new)
7555 return new;
7556 }
7557
7558 /* Otherwise, canonicalize this register. */
7559 return canon_reg (x, NULL_RTX);
7560 }
7561
7562 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7563 if (fmt[i] == 'e')
7564 validate_change (object, &XEXP (x, i),
7565 cse_process_notes (XEXP (x, i), object), 0);
7566
7567 return x;
7568 }
7569 \f
7570 /* Find common subexpressions between the end test of a loop and the beginning
7571 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
7572
7573 Often we have a loop where an expression in the exit test is used
7574 in the body of the loop. For example "while (*p) *q++ = *p++;".
7575 Because of the way we duplicate the loop exit test in front of the loop,
7576 however, we don't detect that common subexpression. This will be caught
7577 when global cse is implemented, but this is a quite common case.
7578
7579 This function handles the most common cases of these common expressions.
7580 It is called after we have processed the basic block ending with the
7581 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
7582 jumps to a label used only once. */
7583
7584 static void
7585 cse_around_loop (loop_start)
7586 rtx loop_start;
7587 {
7588 rtx insn;
7589 int i;
7590 struct table_elt *p;
7591
7592 /* If the jump at the end of the loop doesn't go to the start, we don't
7593 do anything. */
7594 for (insn = PREV_INSN (loop_start);
7595 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
7596 insn = PREV_INSN (insn))
7597 ;
7598
7599 if (insn == 0
7600 || GET_CODE (insn) != NOTE
7601 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
7602 return;
7603
7604 /* If the last insn of the loop (the end test) was an NE comparison,
7605 we will interpret it as an EQ comparison, since we fell through
7606 the loop. Any equivalences resulting from that comparison are
7607 therefore not valid and must be invalidated. */
7608 if (last_jump_equiv_class)
7609 for (p = last_jump_equiv_class->first_same_value; p;
7610 p = p->next_same_value)
7611 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
7612 || (GET_CODE (p->exp) == SUBREG
7613 && GET_CODE (SUBREG_REG (p->exp)) == REG))
7614 invalidate (p->exp, VOIDmode);
7615 else if (GET_CODE (p->exp) == STRICT_LOW_PART
7616 || GET_CODE (p->exp) == ZERO_EXTRACT)
7617 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
7618
7619 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
7620 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
7621
7622 The only thing we do with SET_DEST is invalidate entries, so we
7623 can safely process each SET in order. It is slightly less efficient
7624 to do so, but we only want to handle the most common cases. */
7625
7626 for (insn = NEXT_INSN (loop_start);
7627 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
7628 && ! (GET_CODE (insn) == NOTE
7629 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
7630 insn = NEXT_INSN (insn))
7631 {
7632 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7633 && (GET_CODE (PATTERN (insn)) == SET
7634 || GET_CODE (PATTERN (insn)) == CLOBBER))
7635 cse_set_around_loop (PATTERN (insn), insn, loop_start);
7636 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7637 && GET_CODE (PATTERN (insn)) == PARALLEL)
7638 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7639 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
7640 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
7641 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
7642 loop_start);
7643 }
7644 }
7645 \f
7646 /* Variable used for communications between the next two routines. */
7647
7648 static struct write_data skipped_writes_memory;
7649
7650 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
7651 since they are done elsewhere. This function is called via note_stores. */
7652
7653 static void
7654 invalidate_skipped_set (dest, set)
7655 rtx set;
7656 rtx dest;
7657 {
7658 if (GET_CODE (set) == CLOBBER
7659 #ifdef HAVE_cc0
7660 || dest == cc0_rtx
7661 #endif
7662 || dest == pc_rtx)
7663 return;
7664
7665 if (GET_CODE (dest) == MEM)
7666 note_mem_written (dest, &skipped_writes_memory);
7667
7668 /* There are times when an address can appear varying and be a PLUS
7669 during this scan when it would be a fixed address were we to know
7670 the proper equivalences. So promote "nonscalar" to be "all". */
7671 if (skipped_writes_memory.nonscalar)
7672 skipped_writes_memory.all = 1;
7673
7674 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7675 || (! skipped_writes_memory.all && ! cse_rtx_addr_varies_p (dest)))
7676 invalidate (dest, VOIDmode);
7677 else if (GET_CODE (dest) == STRICT_LOW_PART
7678 || GET_CODE (dest) == ZERO_EXTRACT)
7679 invalidate (XEXP (dest, 0), GET_MODE (dest));
7680 }
7681
7682 /* Invalidate all insns from START up to the end of the function or the
7683 next label. This called when we wish to CSE around a block that is
7684 conditionally executed. */
7685
7686 static void
7687 invalidate_skipped_block (start)
7688 rtx start;
7689 {
7690 rtx insn;
7691 static struct write_data init = {0, 0, 0, 0};
7692 static struct write_data everything = {0, 1, 1, 1};
7693
7694 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
7695 insn = NEXT_INSN (insn))
7696 {
7697 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7698 continue;
7699
7700 skipped_writes_memory = init;
7701
7702 if (GET_CODE (insn) == CALL_INSN)
7703 {
7704 invalidate_for_call ();
7705 skipped_writes_memory = everything;
7706 }
7707
7708 note_stores (PATTERN (insn), invalidate_skipped_set);
7709 invalidate_from_clobbers (&skipped_writes_memory, PATTERN (insn));
7710 }
7711 }
7712 \f
7713 /* Used for communication between the following two routines; contains a
7714 value to be checked for modification. */
7715
7716 static rtx cse_check_loop_start_value;
7717
7718 /* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
7719 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
7720
7721 static void
7722 cse_check_loop_start (x, set)
7723 rtx x;
7724 rtx set;
7725 {
7726 if (cse_check_loop_start_value == 0
7727 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
7728 return;
7729
7730 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
7731 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
7732 cse_check_loop_start_value = 0;
7733 }
7734
7735 /* X is a SET or CLOBBER contained in INSN that was found near the start of
7736 a loop that starts with the label at LOOP_START.
7737
7738 If X is a SET, we see if its SET_SRC is currently in our hash table.
7739 If so, we see if it has a value equal to some register used only in the
7740 loop exit code (as marked by jump.c).
7741
7742 If those two conditions are true, we search backwards from the start of
7743 the loop to see if that same value was loaded into a register that still
7744 retains its value at the start of the loop.
7745
7746 If so, we insert an insn after the load to copy the destination of that
7747 load into the equivalent register and (try to) replace our SET_SRC with that
7748 register.
7749
7750 In any event, we invalidate whatever this SET or CLOBBER modifies. */
7751
7752 static void
7753 cse_set_around_loop (x, insn, loop_start)
7754 rtx x;
7755 rtx insn;
7756 rtx loop_start;
7757 {
7758 struct table_elt *src_elt;
7759 static struct write_data init = {0, 0, 0, 0};
7760 struct write_data writes_memory;
7761
7762 writes_memory = init;
7763
7764 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
7765 are setting PC or CC0 or whose SET_SRC is already a register. */
7766 if (GET_CODE (x) == SET
7767 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
7768 && GET_CODE (SET_SRC (x)) != REG)
7769 {
7770 src_elt = lookup (SET_SRC (x),
7771 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
7772 GET_MODE (SET_DEST (x)));
7773
7774 if (src_elt)
7775 for (src_elt = src_elt->first_same_value; src_elt;
7776 src_elt = src_elt->next_same_value)
7777 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
7778 && COST (src_elt->exp) < COST (SET_SRC (x)))
7779 {
7780 rtx p, set;
7781
7782 /* Look for an insn in front of LOOP_START that sets
7783 something in the desired mode to SET_SRC (x) before we hit
7784 a label or CALL_INSN. */
7785
7786 for (p = prev_nonnote_insn (loop_start);
7787 p && GET_CODE (p) != CALL_INSN
7788 && GET_CODE (p) != CODE_LABEL;
7789 p = prev_nonnote_insn (p))
7790 if ((set = single_set (p)) != 0
7791 && GET_CODE (SET_DEST (set)) == REG
7792 && GET_MODE (SET_DEST (set)) == src_elt->mode
7793 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
7794 {
7795 /* We now have to ensure that nothing between P
7796 and LOOP_START modified anything referenced in
7797 SET_SRC (x). We know that nothing within the loop
7798 can modify it, or we would have invalidated it in
7799 the hash table. */
7800 rtx q;
7801
7802 cse_check_loop_start_value = SET_SRC (x);
7803 for (q = p; q != loop_start; q = NEXT_INSN (q))
7804 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
7805 note_stores (PATTERN (q), cse_check_loop_start);
7806
7807 /* If nothing was changed and we can replace our
7808 SET_SRC, add an insn after P to copy its destination
7809 to what we will be replacing SET_SRC with. */
7810 if (cse_check_loop_start_value
7811 && validate_change (insn, &SET_SRC (x),
7812 src_elt->exp, 0))
7813 emit_insn_after (gen_move_insn (src_elt->exp,
7814 SET_DEST (set)),
7815 p);
7816 break;
7817 }
7818 }
7819 }
7820
7821 /* Now invalidate anything modified by X. */
7822 note_mem_written (SET_DEST (x), &writes_memory);
7823
7824 if (writes_memory.var)
7825 invalidate_memory (&writes_memory);
7826
7827 /* See comment on similar code in cse_insn for explanation of these tests. */
7828 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
7829 || (GET_CODE (SET_DEST (x)) == MEM && ! writes_memory.all
7830 && ! cse_rtx_addr_varies_p (SET_DEST (x))))
7831 invalidate (SET_DEST (x), VOIDmode);
7832 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
7833 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
7834 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
7835 }
7836 \f
7837 /* Find the end of INSN's basic block and return its range,
7838 the total number of SETs in all the insns of the block, the last insn of the
7839 block, and the branch path.
7840
7841 The branch path indicates which branches should be followed. If a non-zero
7842 path size is specified, the block should be rescanned and a different set
7843 of branches will be taken. The branch path is only used if
7844 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
7845
7846 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
7847 used to describe the block. It is filled in with the information about
7848 the current block. The incoming structure's branch path, if any, is used
7849 to construct the output branch path. */
7850
7851 void
7852 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
7853 rtx insn;
7854 struct cse_basic_block_data *data;
7855 int follow_jumps;
7856 int after_loop;
7857 int skip_blocks;
7858 {
7859 rtx p = insn, q;
7860 int nsets = 0;
7861 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
7862 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
7863 int path_size = data->path_size;
7864 int path_entry = 0;
7865 int i;
7866
7867 /* Update the previous branch path, if any. If the last branch was
7868 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
7869 shorten the path by one and look at the previous branch. We know that
7870 at least one branch must have been taken if PATH_SIZE is non-zero. */
7871 while (path_size > 0)
7872 {
7873 if (data->path[path_size - 1].status != NOT_TAKEN)
7874 {
7875 data->path[path_size - 1].status = NOT_TAKEN;
7876 break;
7877 }
7878 else
7879 path_size--;
7880 }
7881
7882 /* Scan to end of this basic block. */
7883 while (p && GET_CODE (p) != CODE_LABEL)
7884 {
7885 /* Don't cse out the end of a loop. This makes a difference
7886 only for the unusual loops that always execute at least once;
7887 all other loops have labels there so we will stop in any case.
7888 Cse'ing out the end of the loop is dangerous because it
7889 might cause an invariant expression inside the loop
7890 to be reused after the end of the loop. This would make it
7891 hard to move the expression out of the loop in loop.c,
7892 especially if it is one of several equivalent expressions
7893 and loop.c would like to eliminate it.
7894
7895 If we are running after loop.c has finished, we can ignore
7896 the NOTE_INSN_LOOP_END. */
7897
7898 if (! after_loop && GET_CODE (p) == NOTE
7899 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
7900 break;
7901
7902 /* Don't cse over a call to setjmp; on some machines (eg vax)
7903 the regs restored by the longjmp come from
7904 a later time than the setjmp. */
7905 if (GET_CODE (p) == NOTE
7906 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
7907 break;
7908
7909 /* A PARALLEL can have lots of SETs in it,
7910 especially if it is really an ASM_OPERANDS. */
7911 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
7912 && GET_CODE (PATTERN (p)) == PARALLEL)
7913 nsets += XVECLEN (PATTERN (p), 0);
7914 else if (GET_CODE (p) != NOTE)
7915 nsets += 1;
7916
7917 /* Ignore insns made by CSE; they cannot affect the boundaries of
7918 the basic block. */
7919
7920 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
7921 high_cuid = INSN_CUID (p);
7922 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
7923 low_cuid = INSN_CUID (p);
7924
7925 /* See if this insn is in our branch path. If it is and we are to
7926 take it, do so. */
7927 if (path_entry < path_size && data->path[path_entry].branch == p)
7928 {
7929 if (data->path[path_entry].status != NOT_TAKEN)
7930 p = JUMP_LABEL (p);
7931
7932 /* Point to next entry in path, if any. */
7933 path_entry++;
7934 }
7935
7936 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
7937 was specified, we haven't reached our maximum path length, there are
7938 insns following the target of the jump, this is the only use of the
7939 jump label, and the target label is preceded by a BARRIER.
7940
7941 Alternatively, we can follow the jump if it branches around a
7942 block of code and there are no other branches into the block.
7943 In this case invalidate_skipped_block will be called to invalidate any
7944 registers set in the block when following the jump. */
7945
7946 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
7947 && GET_CODE (p) == JUMP_INSN
7948 && GET_CODE (PATTERN (p)) == SET
7949 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
7950 && LABEL_NUSES (JUMP_LABEL (p)) == 1
7951 && NEXT_INSN (JUMP_LABEL (p)) != 0)
7952 {
7953 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
7954 if ((GET_CODE (q) != NOTE
7955 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
7956 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
7957 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
7958 break;
7959
7960 /* If we ran into a BARRIER, this code is an extension of the
7961 basic block when the branch is taken. */
7962 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
7963 {
7964 /* Don't allow ourself to keep walking around an
7965 always-executed loop. */
7966 if (next_real_insn (q) == next)
7967 {
7968 p = NEXT_INSN (p);
7969 continue;
7970 }
7971
7972 /* Similarly, don't put a branch in our path more than once. */
7973 for (i = 0; i < path_entry; i++)
7974 if (data->path[i].branch == p)
7975 break;
7976
7977 if (i != path_entry)
7978 break;
7979
7980 data->path[path_entry].branch = p;
7981 data->path[path_entry++].status = TAKEN;
7982
7983 /* This branch now ends our path. It was possible that we
7984 didn't see this branch the last time around (when the
7985 insn in front of the target was a JUMP_INSN that was
7986 turned into a no-op). */
7987 path_size = path_entry;
7988
7989 p = JUMP_LABEL (p);
7990 /* Mark block so we won't scan it again later. */
7991 PUT_MODE (NEXT_INSN (p), QImode);
7992 }
7993 /* Detect a branch around a block of code. */
7994 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
7995 {
7996 register rtx tmp;
7997
7998 if (next_real_insn (q) == next)
7999 {
8000 p = NEXT_INSN (p);
8001 continue;
8002 }
8003
8004 for (i = 0; i < path_entry; i++)
8005 if (data->path[i].branch == p)
8006 break;
8007
8008 if (i != path_entry)
8009 break;
8010
8011 /* This is no_labels_between_p (p, q) with an added check for
8012 reaching the end of a function (in case Q precedes P). */
8013 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
8014 if (GET_CODE (tmp) == CODE_LABEL)
8015 break;
8016
8017 if (tmp == q)
8018 {
8019 data->path[path_entry].branch = p;
8020 data->path[path_entry++].status = AROUND;
8021
8022 path_size = path_entry;
8023
8024 p = JUMP_LABEL (p);
8025 /* Mark block so we won't scan it again later. */
8026 PUT_MODE (NEXT_INSN (p), QImode);
8027 }
8028 }
8029 }
8030 p = NEXT_INSN (p);
8031 }
8032
8033 data->low_cuid = low_cuid;
8034 data->high_cuid = high_cuid;
8035 data->nsets = nsets;
8036 data->last = p;
8037
8038 /* If all jumps in the path are not taken, set our path length to zero
8039 so a rescan won't be done. */
8040 for (i = path_size - 1; i >= 0; i--)
8041 if (data->path[i].status != NOT_TAKEN)
8042 break;
8043
8044 if (i == -1)
8045 data->path_size = 0;
8046 else
8047 data->path_size = path_size;
8048
8049 /* End the current branch path. */
8050 data->path[path_size].branch = 0;
8051 }
8052 \f
8053 /* Perform cse on the instructions of a function.
8054 F is the first instruction.
8055 NREGS is one plus the highest pseudo-reg number used in the instruction.
8056
8057 AFTER_LOOP is 1 if this is the cse call done after loop optimization
8058 (only if -frerun-cse-after-loop).
8059
8060 Returns 1 if jump_optimize should be redone due to simplifications
8061 in conditional jump instructions. */
8062
8063 int
8064 cse_main (f, nregs, after_loop, file)
8065 rtx f;
8066 int nregs;
8067 int after_loop;
8068 FILE *file;
8069 {
8070 struct cse_basic_block_data val;
8071 register rtx insn = f;
8072 register int i;
8073
8074 cse_jumps_altered = 0;
8075 constant_pool_entries_cost = 0;
8076 val.path_size = 0;
8077
8078 init_recog ();
8079
8080 max_reg = nregs;
8081
8082 all_minus_one = (int *) alloca (nregs * sizeof (int));
8083 consec_ints = (int *) alloca (nregs * sizeof (int));
8084
8085 for (i = 0; i < nregs; i++)
8086 {
8087 all_minus_one[i] = -1;
8088 consec_ints[i] = i;
8089 }
8090
8091 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
8092 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
8093 reg_qty = (int *) alloca (nregs * sizeof (int));
8094 reg_in_table = (int *) alloca (nregs * sizeof (int));
8095 reg_tick = (int *) alloca (nregs * sizeof (int));
8096
8097 #ifdef LOAD_EXTEND_OP
8098
8099 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
8100 and change the code and mode as appropriate. */
8101 memory_extend_rtx = gen_rtx (ZERO_EXTEND, VOIDmode, 0);
8102 #endif
8103
8104 /* Discard all the free elements of the previous function
8105 since they are allocated in the temporarily obstack. */
8106 bzero ((char *) table, sizeof table);
8107 free_element_chain = 0;
8108 n_elements_made = 0;
8109
8110 /* Find the largest uid. */
8111
8112 max_uid = get_max_uid ();
8113 uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
8114 bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
8115
8116 /* Compute the mapping from uids to cuids.
8117 CUIDs are numbers assigned to insns, like uids,
8118 except that cuids increase monotonically through the code.
8119 Don't assign cuids to line-number NOTEs, so that the distance in cuids
8120 between two insns is not affected by -g. */
8121
8122 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8123 {
8124 if (GET_CODE (insn) != NOTE
8125 || NOTE_LINE_NUMBER (insn) < 0)
8126 INSN_CUID (insn) = ++i;
8127 else
8128 /* Give a line number note the same cuid as preceding insn. */
8129 INSN_CUID (insn) = i;
8130 }
8131
8132 /* Initialize which registers are clobbered by calls. */
8133
8134 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8135
8136 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8137 if ((call_used_regs[i]
8138 /* Used to check !fixed_regs[i] here, but that isn't safe;
8139 fixed regs are still call-clobbered, and sched can get
8140 confused if they can "live across calls".
8141
8142 The frame pointer is always preserved across calls. The arg
8143 pointer is if it is fixed. The stack pointer usually is, unless
8144 RETURN_POPS_ARGS, in which case an explicit CLOBBER
8145 will be present. If we are generating PIC code, the PIC offset
8146 table register is preserved across calls. */
8147
8148 && i != STACK_POINTER_REGNUM
8149 && i != FRAME_POINTER_REGNUM
8150 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8151 && i != HARD_FRAME_POINTER_REGNUM
8152 #endif
8153 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8154 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8155 #endif
8156 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
8157 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8158 #endif
8159 )
8160 || global_regs[i])
8161 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8162
8163 /* Loop over basic blocks.
8164 Compute the maximum number of qty's needed for each basic block
8165 (which is 2 for each SET). */
8166 insn = f;
8167 while (insn)
8168 {
8169 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8170 flag_cse_skip_blocks);
8171
8172 /* If this basic block was already processed or has no sets, skip it. */
8173 if (val.nsets == 0 || GET_MODE (insn) == QImode)
8174 {
8175 PUT_MODE (insn, VOIDmode);
8176 insn = (val.last ? NEXT_INSN (val.last) : 0);
8177 val.path_size = 0;
8178 continue;
8179 }
8180
8181 cse_basic_block_start = val.low_cuid;
8182 cse_basic_block_end = val.high_cuid;
8183 max_qty = val.nsets * 2;
8184
8185 if (file)
8186 fprintf (file, ";; Processing block from %d to %d, %d sets.\n",
8187 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8188 val.nsets);
8189
8190 /* Make MAX_QTY bigger to give us room to optimize
8191 past the end of this basic block, if that should prove useful. */
8192 if (max_qty < 500)
8193 max_qty = 500;
8194
8195 max_qty += max_reg;
8196
8197 /* If this basic block is being extended by following certain jumps,
8198 (see `cse_end_of_basic_block'), we reprocess the code from the start.
8199 Otherwise, we start after this basic block. */
8200 if (val.path_size > 0)
8201 cse_basic_block (insn, val.last, val.path, 0);
8202 else
8203 {
8204 int old_cse_jumps_altered = cse_jumps_altered;
8205 rtx temp;
8206
8207 /* When cse changes a conditional jump to an unconditional
8208 jump, we want to reprocess the block, since it will give
8209 us a new branch path to investigate. */
8210 cse_jumps_altered = 0;
8211 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8212 if (cse_jumps_altered == 0
8213 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8214 insn = temp;
8215
8216 cse_jumps_altered |= old_cse_jumps_altered;
8217 }
8218
8219 #ifdef USE_C_ALLOCA
8220 alloca (0);
8221 #endif
8222 }
8223
8224 /* Tell refers_to_mem_p that qty_const info is not available. */
8225 qty_const = 0;
8226
8227 if (max_elements_made < n_elements_made)
8228 max_elements_made = n_elements_made;
8229
8230 return cse_jumps_altered;
8231 }
8232
8233 /* Process a single basic block. FROM and TO and the limits of the basic
8234 block. NEXT_BRANCH points to the branch path when following jumps or
8235 a null path when not following jumps.
8236
8237 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8238 loop. This is true when we are being called for the last time on a
8239 block and this CSE pass is before loop.c. */
8240
8241 static rtx
8242 cse_basic_block (from, to, next_branch, around_loop)
8243 register rtx from, to;
8244 struct branch_path *next_branch;
8245 int around_loop;
8246 {
8247 register rtx insn;
8248 int to_usage = 0;
8249 int in_libcall_block = 0;
8250
8251 /* Each of these arrays is undefined before max_reg, so only allocate
8252 the space actually needed and adjust the start below. */
8253
8254 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8255 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8256 qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
8257 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8258 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8259 qty_comparison_code
8260 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8261 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8262 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8263
8264 qty_first_reg -= max_reg;
8265 qty_last_reg -= max_reg;
8266 qty_mode -= max_reg;
8267 qty_const -= max_reg;
8268 qty_const_insn -= max_reg;
8269 qty_comparison_code -= max_reg;
8270 qty_comparison_qty -= max_reg;
8271 qty_comparison_const -= max_reg;
8272
8273 new_basic_block ();
8274
8275 /* TO might be a label. If so, protect it from being deleted. */
8276 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8277 ++LABEL_NUSES (to);
8278
8279 for (insn = from; insn != to; insn = NEXT_INSN (insn))
8280 {
8281 register enum rtx_code code;
8282
8283 /* See if this is a branch that is part of the path. If so, and it is
8284 to be taken, do so. */
8285 if (next_branch->branch == insn)
8286 {
8287 enum taken status = next_branch++->status;
8288 if (status != NOT_TAKEN)
8289 {
8290 if (status == TAKEN)
8291 record_jump_equiv (insn, 1);
8292 else
8293 invalidate_skipped_block (NEXT_INSN (insn));
8294
8295 /* Set the last insn as the jump insn; it doesn't affect cc0.
8296 Then follow this branch. */
8297 #ifdef HAVE_cc0
8298 prev_insn_cc0 = 0;
8299 #endif
8300 prev_insn = insn;
8301 insn = JUMP_LABEL (insn);
8302 continue;
8303 }
8304 }
8305
8306 code = GET_CODE (insn);
8307 if (GET_MODE (insn) == QImode)
8308 PUT_MODE (insn, VOIDmode);
8309
8310 if (GET_RTX_CLASS (code) == 'i')
8311 {
8312 /* Process notes first so we have all notes in canonical forms when
8313 looking for duplicate operations. */
8314
8315 if (REG_NOTES (insn))
8316 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
8317
8318 /* Track when we are inside in LIBCALL block. Inside such a block,
8319 we do not want to record destinations. The last insn of a
8320 LIBCALL block is not considered to be part of the block, since
8321 its destination is the result of the block and hence should be
8322 recorded. */
8323
8324 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8325 in_libcall_block = 1;
8326 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8327 in_libcall_block = 0;
8328
8329 cse_insn (insn, in_libcall_block);
8330 }
8331
8332 /* If INSN is now an unconditional jump, skip to the end of our
8333 basic block by pretending that we just did the last insn in the
8334 basic block. If we are jumping to the end of our block, show
8335 that we can have one usage of TO. */
8336
8337 if (simplejump_p (insn))
8338 {
8339 if (to == 0)
8340 return 0;
8341
8342 if (JUMP_LABEL (insn) == to)
8343 to_usage = 1;
8344
8345 /* Maybe TO was deleted because the jump is unconditional.
8346 If so, there is nothing left in this basic block. */
8347 /* ??? Perhaps it would be smarter to set TO
8348 to whatever follows this insn,
8349 and pretend the basic block had always ended here. */
8350 if (INSN_DELETED_P (to))
8351 break;
8352
8353 insn = PREV_INSN (to);
8354 }
8355
8356 /* See if it is ok to keep on going past the label
8357 which used to end our basic block. Remember that we incremented
8358 the count of that label, so we decrement it here. If we made
8359 a jump unconditional, TO_USAGE will be one; in that case, we don't
8360 want to count the use in that jump. */
8361
8362 if (to != 0 && NEXT_INSN (insn) == to
8363 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8364 {
8365 struct cse_basic_block_data val;
8366
8367 insn = NEXT_INSN (to);
8368
8369 if (LABEL_NUSES (to) == 0)
8370 delete_insn (to);
8371
8372 /* Find the end of the following block. Note that we won't be
8373 following branches in this case. If TO was the last insn
8374 in the function, we are done. Similarly, if we deleted the
8375 insn after TO, it must have been because it was preceded by
8376 a BARRIER. In that case, we are done with this block because it
8377 has no continuation. */
8378
8379 if (insn == 0 || INSN_DELETED_P (insn))
8380 return 0;
8381
8382 to_usage = 0;
8383 val.path_size = 0;
8384 cse_end_of_basic_block (insn, &val, 0, 0, 0);
8385
8386 /* If the tables we allocated have enough space left
8387 to handle all the SETs in the next basic block,
8388 continue through it. Otherwise, return,
8389 and that block will be scanned individually. */
8390 if (val.nsets * 2 + next_qty > max_qty)
8391 break;
8392
8393 cse_basic_block_start = val.low_cuid;
8394 cse_basic_block_end = val.high_cuid;
8395 to = val.last;
8396
8397 /* Prevent TO from being deleted if it is a label. */
8398 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8399 ++LABEL_NUSES (to);
8400
8401 /* Back up so we process the first insn in the extension. */
8402 insn = PREV_INSN (insn);
8403 }
8404 }
8405
8406 if (next_qty > max_qty)
8407 abort ();
8408
8409 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
8410 the previous insn is the only insn that branches to the head of a loop,
8411 we can cse into the loop. Don't do this if we changed the jump
8412 structure of a loop unless we aren't going to be following jumps. */
8413
8414 if ((cse_jumps_altered == 0
8415 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8416 && around_loop && to != 0
8417 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
8418 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
8419 && JUMP_LABEL (PREV_INSN (to)) != 0
8420 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
8421 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
8422
8423 return to ? NEXT_INSN (to) : 0;
8424 }
8425 \f
8426 /* Count the number of times registers are used (not set) in X.
8427 COUNTS is an array in which we accumulate the count, INCR is how much
8428 we count each register usage.
8429
8430 Don't count a usage of DEST, which is the SET_DEST of a SET which
8431 contains X in its SET_SRC. This is because such a SET does not
8432 modify the liveness of DEST. */
8433
8434 static void
8435 count_reg_usage (x, counts, dest, incr)
8436 rtx x;
8437 int *counts;
8438 rtx dest;
8439 int incr;
8440 {
8441 enum rtx_code code;
8442 char *fmt;
8443 int i, j;
8444
8445 if (x == 0)
8446 return;
8447
8448 switch (code = GET_CODE (x))
8449 {
8450 case REG:
8451 if (x != dest)
8452 counts[REGNO (x)] += incr;
8453 return;
8454
8455 case PC:
8456 case CC0:
8457 case CONST:
8458 case CONST_INT:
8459 case CONST_DOUBLE:
8460 case SYMBOL_REF:
8461 case LABEL_REF:
8462 case CLOBBER:
8463 return;
8464
8465 case SET:
8466 /* Unless we are setting a REG, count everything in SET_DEST. */
8467 if (GET_CODE (SET_DEST (x)) != REG)
8468 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
8469
8470 /* If SRC has side-effects, then we can't delete this insn, so the
8471 usage of SET_DEST inside SRC counts.
8472
8473 ??? Strictly-speaking, we might be preserving this insn
8474 because some other SET has side-effects, but that's hard
8475 to do and can't happen now. */
8476 count_reg_usage (SET_SRC (x), counts,
8477 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
8478 incr);
8479 return;
8480
8481 case CALL_INSN:
8482 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
8483
8484 /* ... falls through ... */
8485 case INSN:
8486 case JUMP_INSN:
8487 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
8488
8489 /* Things used in a REG_EQUAL note aren't dead since loop may try to
8490 use them. */
8491
8492 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
8493 return;
8494
8495 case EXPR_LIST:
8496 case INSN_LIST:
8497 if (REG_NOTE_KIND (x) == REG_EQUAL
8498 || GET_CODE (XEXP (x,0)) == USE)
8499 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
8500 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
8501 return;
8502 }
8503
8504 fmt = GET_RTX_FORMAT (code);
8505 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8506 {
8507 if (fmt[i] == 'e')
8508 count_reg_usage (XEXP (x, i), counts, dest, incr);
8509 else if (fmt[i] == 'E')
8510 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8511 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
8512 }
8513 }
8514 \f
8515 /* Scan all the insns and delete any that are dead; i.e., they store a register
8516 that is never used or they copy a register to itself.
8517
8518 This is used to remove insns made obviously dead by cse. It improves the
8519 heuristics in loop since it won't try to move dead invariants out of loops
8520 or make givs for dead quantities. The remaining passes of the compilation
8521 are also sped up. */
8522
8523 void
8524 delete_dead_from_cse (insns, nreg)
8525 rtx insns;
8526 int nreg;
8527 {
8528 int *counts = (int *) alloca (nreg * sizeof (int));
8529 rtx insn, prev;
8530 rtx tem;
8531 int i;
8532 int in_libcall = 0;
8533
8534 /* First count the number of times each register is used. */
8535 bzero ((char *) counts, sizeof (int) * nreg);
8536 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
8537 count_reg_usage (insn, counts, NULL_RTX, 1);
8538
8539 /* Go from the last insn to the first and delete insns that only set unused
8540 registers or copy a register to itself. As we delete an insn, remove
8541 usage counts for registers it uses. */
8542 for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
8543 {
8544 int live_insn = 0;
8545
8546 prev = prev_real_insn (insn);
8547
8548 /* Don't delete any insns that are part of a libcall block.
8549 Flow or loop might get confused if we did that. Remember
8550 that we are scanning backwards. */
8551 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8552 in_libcall = 1;
8553
8554 if (in_libcall)
8555 live_insn = 1;
8556 else if (GET_CODE (PATTERN (insn)) == SET)
8557 {
8558 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
8559 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
8560 ;
8561
8562 #ifdef HAVE_cc0
8563 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
8564 && ! side_effects_p (SET_SRC (PATTERN (insn)))
8565 && ((tem = next_nonnote_insn (insn)) == 0
8566 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8567 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8568 ;
8569 #endif
8570 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
8571 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
8572 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
8573 || side_effects_p (SET_SRC (PATTERN (insn))))
8574 live_insn = 1;
8575 }
8576 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
8577 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8578 {
8579 rtx elt = XVECEXP (PATTERN (insn), 0, i);
8580
8581 if (GET_CODE (elt) == SET)
8582 {
8583 if (GET_CODE (SET_DEST (elt)) == REG
8584 && SET_DEST (elt) == SET_SRC (elt))
8585 ;
8586
8587 #ifdef HAVE_cc0
8588 else if (GET_CODE (SET_DEST (elt)) == CC0
8589 && ! side_effects_p (SET_SRC (elt))
8590 && ((tem = next_nonnote_insn (insn)) == 0
8591 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8592 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8593 ;
8594 #endif
8595 else if (GET_CODE (SET_DEST (elt)) != REG
8596 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
8597 || counts[REGNO (SET_DEST (elt))] != 0
8598 || side_effects_p (SET_SRC (elt)))
8599 live_insn = 1;
8600 }
8601 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
8602 live_insn = 1;
8603 }
8604 else
8605 live_insn = 1;
8606
8607 /* If this is a dead insn, delete it and show registers in it aren't
8608 being used. */
8609
8610 if (! live_insn)
8611 {
8612 count_reg_usage (insn, counts, NULL_RTX, -1);
8613 delete_insn (insn);
8614 }
8615
8616 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8617 in_libcall = 0;
8618 }
8619 }