Fix spelling errors.
[gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 88, 89, 92, 93, 94, 1995 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 /* Must precede rtl.h for FFS. */
24 #include <stdio.h>
25
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "recog.h"
33
34 #include <setjmp.h>
35
36 /* The basic idea of common subexpression elimination is to go
37 through the code, keeping a record of expressions that would
38 have the same value at the current scan point, and replacing
39 expressions encountered with the cheapest equivalent expression.
40
41 It is too complicated to keep track of the different possibilities
42 when control paths merge; so, at each label, we forget all that is
43 known and start fresh. This can be described as processing each
44 basic block separately. Note, however, that these are not quite
45 the same as the basic blocks found by a later pass and used for
46 data flow analysis and register packing. We do not need to start fresh
47 after a conditional jump instruction if there is no label there.
48
49 We use two data structures to record the equivalent expressions:
50 a hash table for most expressions, and several vectors together
51 with "quantity numbers" to record equivalent (pseudo) registers.
52
53 The use of the special data structure for registers is desirable
54 because it is faster. It is possible because registers references
55 contain a fairly small number, the register number, taken from
56 a contiguously allocated series, and two register references are
57 identical if they have the same number. General expressions
58 do not have any such thing, so the only way to retrieve the
59 information recorded on an expression other than a register
60 is to keep it in a hash table.
61
62 Registers and "quantity numbers":
63
64 At the start of each basic block, all of the (hardware and pseudo)
65 registers used in the function are given distinct quantity
66 numbers to indicate their contents. During scan, when the code
67 copies one register into another, we copy the quantity number.
68 When a register is loaded in any other way, we allocate a new
69 quantity number to describe the value generated by this operation.
70 `reg_qty' records what quantity a register is currently thought
71 of as containing.
72
73 All real quantity numbers are greater than or equal to `max_reg'.
74 If register N has not been assigned a quantity, reg_qty[N] will equal N.
75
76 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
77 variables should be referenced with an index below `max_reg'.
78
79 We also maintain a bidirectional chain of registers for each
80 quantity number. `qty_first_reg', `qty_last_reg',
81 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
82
83 The first register in a chain is the one whose lifespan is least local.
84 Among equals, it is the one that was seen first.
85 We replace any equivalent register with that one.
86
87 If two registers have the same quantity number, it must be true that
88 REG expressions with `qty_mode' must be in the hash table for both
89 registers and must be in the same class.
90
91 The converse is not true. Since hard registers may be referenced in
92 any mode, two REG expressions might be equivalent in the hash table
93 but not have the same quantity number if the quantity number of one
94 of the registers is not the same mode as those expressions.
95
96 Constants and quantity numbers
97
98 When a quantity has a known constant value, that value is stored
99 in the appropriate element of qty_const. This is in addition to
100 putting the constant in the hash table as is usual for non-regs.
101
102 Whether a reg or a constant is preferred is determined by the configuration
103 macro CONST_COSTS and will often depend on the constant value. In any
104 event, expressions containing constants can be simplified, by fold_rtx.
105
106 When a quantity has a known nearly constant value (such as an address
107 of a stack slot), that value is stored in the appropriate element
108 of qty_const.
109
110 Integer constants don't have a machine mode. However, cse
111 determines the intended machine mode from the destination
112 of the instruction that moves the constant. The machine mode
113 is recorded in the hash table along with the actual RTL
114 constant expression so that different modes are kept separate.
115
116 Other expressions:
117
118 To record known equivalences among expressions in general
119 we use a hash table called `table'. It has a fixed number of buckets
120 that contain chains of `struct table_elt' elements for expressions.
121 These chains connect the elements whose expressions have the same
122 hash codes.
123
124 Other chains through the same elements connect the elements which
125 currently have equivalent values.
126
127 Register references in an expression are canonicalized before hashing
128 the expression. This is done using `reg_qty' and `qty_first_reg'.
129 The hash code of a register reference is computed using the quantity
130 number, not the register number.
131
132 When the value of an expression changes, it is necessary to remove from the
133 hash table not just that expression but all expressions whose values
134 could be different as a result.
135
136 1. If the value changing is in memory, except in special cases
137 ANYTHING referring to memory could be changed. That is because
138 nobody knows where a pointer does not point.
139 The function `invalidate_memory' removes what is necessary.
140
141 The special cases are when the address is constant or is
142 a constant plus a fixed register such as the frame pointer
143 or a static chain pointer. When such addresses are stored in,
144 we can tell exactly which other such addresses must be invalidated
145 due to overlap. `invalidate' does this.
146 All expressions that refer to non-constant
147 memory addresses are also invalidated. `invalidate_memory' does this.
148
149 2. If the value changing is a register, all expressions
150 containing references to that register, and only those,
151 must be removed.
152
153 Because searching the entire hash table for expressions that contain
154 a register is very slow, we try to figure out when it isn't necessary.
155 Precisely, this is necessary only when expressions have been
156 entered in the hash table using this register, and then the value has
157 changed, and then another expression wants to be added to refer to
158 the register's new value. This sequence of circumstances is rare
159 within any one basic block.
160
161 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
162 reg_tick[i] is incremented whenever a value is stored in register i.
163 reg_in_table[i] holds -1 if no references to register i have been
164 entered in the table; otherwise, it contains the value reg_tick[i] had
165 when the references were entered. If we want to enter a reference
166 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
167 Until we want to enter a new entry, the mere fact that the two vectors
168 don't match makes the entries be ignored if anyone tries to match them.
169
170 Registers themselves are entered in the hash table as well as in
171 the equivalent-register chains. However, the vectors `reg_tick'
172 and `reg_in_table' do not apply to expressions which are simple
173 register references. These expressions are removed from the table
174 immediately when they become invalid, and this can be done even if
175 we do not immediately search for all the expressions that refer to
176 the register.
177
178 A CLOBBER rtx in an instruction invalidates its operand for further
179 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
180 invalidates everything that resides in memory.
181
182 Related expressions:
183
184 Constant expressions that differ only by an additive integer
185 are called related. When a constant expression is put in
186 the table, the related expression with no constant term
187 is also entered. These are made to point at each other
188 so that it is possible to find out if there exists any
189 register equivalent to an expression related to a given expression. */
190
191 /* One plus largest register number used in this function. */
192
193 static int max_reg;
194
195 /* Length of vectors indexed by quantity number.
196 We know in advance we will not need a quantity number this big. */
197
198 static int max_qty;
199
200 /* Next quantity number to be allocated.
201 This is 1 + the largest number needed so far. */
202
203 static int next_qty;
204
205 /* Indexed by quantity number, gives the first (or last) (pseudo) register
206 in the chain of registers that currently contain this quantity. */
207
208 static int *qty_first_reg;
209 static int *qty_last_reg;
210
211 /* Index by quantity number, gives the mode of the quantity. */
212
213 static enum machine_mode *qty_mode;
214
215 /* Indexed by quantity number, gives the rtx of the constant value of the
216 quantity, or zero if it does not have a known value.
217 A sum of the frame pointer (or arg pointer) plus a constant
218 can also be entered here. */
219
220 static rtx *qty_const;
221
222 /* Indexed by qty number, gives the insn that stored the constant value
223 recorded in `qty_const'. */
224
225 static rtx *qty_const_insn;
226
227 /* The next three variables are used to track when a comparison between a
228 quantity and some constant or register has been passed. In that case, we
229 know the results of the comparison in case we see it again. These variables
230 record a comparison that is known to be true. */
231
232 /* Indexed by qty number, gives the rtx code of a comparison with a known
233 result involving this quantity. If none, it is UNKNOWN. */
234 static enum rtx_code *qty_comparison_code;
235
236 /* Indexed by qty number, gives the constant being compared against in a
237 comparison of known result. If no such comparison, it is undefined.
238 If the comparison is not with a constant, it is zero. */
239
240 static rtx *qty_comparison_const;
241
242 /* Indexed by qty number, gives the quantity being compared against in a
243 comparison of known result. If no such comparison, if it undefined.
244 If the comparison is not with a register, it is -1. */
245
246 static int *qty_comparison_qty;
247
248 #ifdef HAVE_cc0
249 /* For machines that have a CC0, we do not record its value in the hash
250 table since its use is guaranteed to be the insn immediately following
251 its definition and any other insn is presumed to invalidate it.
252
253 Instead, we store below the value last assigned to CC0. If it should
254 happen to be a constant, it is stored in preference to the actual
255 assigned value. In case it is a constant, we store the mode in which
256 the constant should be interpreted. */
257
258 static rtx prev_insn_cc0;
259 static enum machine_mode prev_insn_cc0_mode;
260 #endif
261
262 /* Previous actual insn. 0 if at first insn of basic block. */
263
264 static rtx prev_insn;
265
266 /* Insn being scanned. */
267
268 static rtx this_insn;
269
270 /* Index by (pseudo) register number, gives the quantity number
271 of the register's current contents. */
272
273 static int *reg_qty;
274
275 /* Index by (pseudo) register number, gives the number of the next (or
276 previous) (pseudo) register in the chain of registers sharing the same
277 value.
278
279 Or -1 if this register is at the end of the chain.
280
281 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
282
283 static int *reg_next_eqv;
284 static int *reg_prev_eqv;
285
286 /* Index by (pseudo) register number, gives the number of times
287 that register has been altered in the current basic block. */
288
289 static int *reg_tick;
290
291 /* Index by (pseudo) register number, gives the reg_tick value at which
292 rtx's containing this register are valid in the hash table.
293 If this does not equal the current reg_tick value, such expressions
294 existing in the hash table are invalid.
295 If this is -1, no expressions containing this register have been
296 entered in the table. */
297
298 static int *reg_in_table;
299
300 /* A HARD_REG_SET containing all the hard registers for which there is
301 currently a REG expression in the hash table. Note the difference
302 from the above variables, which indicate if the REG is mentioned in some
303 expression in the table. */
304
305 static HARD_REG_SET hard_regs_in_table;
306
307 /* A HARD_REG_SET containing all the hard registers that are invalidated
308 by a CALL_INSN. */
309
310 static HARD_REG_SET regs_invalidated_by_call;
311
312 /* Two vectors of ints:
313 one containing max_reg -1's; the other max_reg + 500 (an approximation
314 for max_qty) elements where element i contains i.
315 These are used to initialize various other vectors fast. */
316
317 static int *all_minus_one;
318 static int *consec_ints;
319
320 /* CUID of insn that starts the basic block currently being cse-processed. */
321
322 static int cse_basic_block_start;
323
324 /* CUID of insn that ends the basic block currently being cse-processed. */
325
326 static int cse_basic_block_end;
327
328 /* Vector mapping INSN_UIDs to cuids.
329 The cuids are like uids but increase monotonically always.
330 We use them to see whether a reg is used outside a given basic block. */
331
332 static int *uid_cuid;
333
334 /* Highest UID in UID_CUID. */
335 static int max_uid;
336
337 /* Get the cuid of an insn. */
338
339 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
340
341 /* Nonzero if cse has altered conditional jump insns
342 in such a way that jump optimization should be redone. */
343
344 static int cse_jumps_altered;
345
346 /* canon_hash stores 1 in do_not_record
347 if it notices a reference to CC0, PC, or some other volatile
348 subexpression. */
349
350 static int do_not_record;
351
352 #ifdef LOAD_EXTEND_OP
353
354 /* Scratch rtl used when looking for load-extended copy of a MEM. */
355 static rtx memory_extend_rtx;
356 #endif
357
358 /* canon_hash stores 1 in hash_arg_in_memory
359 if it notices a reference to memory within the expression being hashed. */
360
361 static int hash_arg_in_memory;
362
363 /* canon_hash stores 1 in hash_arg_in_struct
364 if it notices a reference to memory that's part of a structure. */
365
366 static int hash_arg_in_struct;
367
368 /* The hash table contains buckets which are chains of `struct table_elt's,
369 each recording one expression's information.
370 That expression is in the `exp' field.
371
372 Those elements with the same hash code are chained in both directions
373 through the `next_same_hash' and `prev_same_hash' fields.
374
375 Each set of expressions with equivalent values
376 are on a two-way chain through the `next_same_value'
377 and `prev_same_value' fields, and all point with
378 the `first_same_value' field at the first element in
379 that chain. The chain is in order of increasing cost.
380 Each element's cost value is in its `cost' field.
381
382 The `in_memory' field is nonzero for elements that
383 involve any reference to memory. These elements are removed
384 whenever a write is done to an unidentified location in memory.
385 To be safe, we assume that a memory address is unidentified unless
386 the address is either a symbol constant or a constant plus
387 the frame pointer or argument pointer.
388
389 The `in_struct' field is nonzero for elements that
390 involve any reference to memory inside a structure or array.
391
392 The `related_value' field is used to connect related expressions
393 (that differ by adding an integer).
394 The related expressions are chained in a circular fashion.
395 `related_value' is zero for expressions for which this
396 chain is not useful.
397
398 The `cost' field stores the cost of this element's expression.
399
400 The `is_const' flag is set if the element is a constant (including
401 a fixed address).
402
403 The `flag' field is used as a temporary during some search routines.
404
405 The `mode' field is usually the same as GET_MODE (`exp'), but
406 if `exp' is a CONST_INT and has no machine mode then the `mode'
407 field is the mode it was being used as. Each constant is
408 recorded separately for each mode it is used with. */
409
410
411 struct table_elt
412 {
413 rtx exp;
414 struct table_elt *next_same_hash;
415 struct table_elt *prev_same_hash;
416 struct table_elt *next_same_value;
417 struct table_elt *prev_same_value;
418 struct table_elt *first_same_value;
419 struct table_elt *related_value;
420 int cost;
421 enum machine_mode mode;
422 char in_memory;
423 char in_struct;
424 char is_const;
425 char flag;
426 };
427
428 /* We don't want a lot of buckets, because we rarely have very many
429 things stored in the hash table, and a lot of buckets slows
430 down a lot of loops that happen frequently. */
431 #define NBUCKETS 31
432
433 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
434 register (hard registers may require `do_not_record' to be set). */
435
436 #define HASH(X, M) \
437 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
438 ? (((unsigned) REG << 7) + (unsigned) reg_qty[REGNO (X)]) % NBUCKETS \
439 : canon_hash (X, M) % NBUCKETS)
440
441 /* Determine whether register number N is considered a fixed register for CSE.
442 It is desirable to replace other regs with fixed regs, to reduce need for
443 non-fixed hard regs.
444 A reg wins if it is either the frame pointer or designated as fixed,
445 but not if it is an overlapping register. */
446 #ifdef OVERLAPPING_REGNO_P
447 #define FIXED_REGNO_P(N) \
448 (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
449 || fixed_regs[N] || global_regs[N]) \
450 && ! OVERLAPPING_REGNO_P ((N)))
451 #else
452 #define FIXED_REGNO_P(N) \
453 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
454 || fixed_regs[N] || global_regs[N])
455 #endif
456
457 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
458 hard registers and pointers into the frame are the cheapest with a cost
459 of 0. Next come pseudos with a cost of one and other hard registers with
460 a cost of 2. Aside from these special cases, call `rtx_cost'. */
461
462 #define CHEAP_REGNO(N) \
463 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
464 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
465 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
466 || ((N) < FIRST_PSEUDO_REGISTER \
467 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
468
469 /* A register is cheap if it is a user variable assigned to the register
470 or if its register number always corresponds to a cheap register. */
471
472 #define CHEAP_REG(N) \
473 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
474 || CHEAP_REGNO (REGNO (N)))
475
476 #define COST(X) \
477 (GET_CODE (X) == REG \
478 ? (CHEAP_REG (X) ? 0 \
479 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
480 : 2) \
481 : rtx_cost (X, SET) * 2)
482
483 /* Determine if the quantity number for register X represents a valid index
484 into the `qty_...' variables. */
485
486 #define REGNO_QTY_VALID_P(N) (reg_qty[N] != (N))
487
488 static struct table_elt *table[NBUCKETS];
489
490 /* Chain of `struct table_elt's made so far for this function
491 but currently removed from the table. */
492
493 static struct table_elt *free_element_chain;
494
495 /* Number of `struct table_elt' structures made so far for this function. */
496
497 static int n_elements_made;
498
499 /* Maximum value `n_elements_made' has had so far in this compilation
500 for functions previously processed. */
501
502 static int max_elements_made;
503
504 /* Surviving equivalence class when two equivalence classes are merged
505 by recording the effects of a jump in the last insn. Zero if the
506 last insn was not a conditional jump. */
507
508 static struct table_elt *last_jump_equiv_class;
509
510 /* Set to the cost of a constant pool reference if one was found for a
511 symbolic constant. If this was found, it means we should try to
512 convert constants into constant pool entries if they don't fit in
513 the insn. */
514
515 static int constant_pool_entries_cost;
516
517 /* Bits describing what kind of values in memory must be invalidated
518 for a particular instruction. If all three bits are zero,
519 no memory refs need to be invalidated. Each bit is more powerful
520 than the preceding ones, and if a bit is set then the preceding
521 bits are also set.
522
523 Here is how the bits are set:
524 Pushing onto the stack invalidates only the stack pointer,
525 writing at a fixed address invalidates only variable addresses,
526 writing in a structure element at variable address
527 invalidates all but scalar variables,
528 and writing in anything else at variable address invalidates everything. */
529
530 struct write_data
531 {
532 int sp : 1; /* Invalidate stack pointer. */
533 int var : 1; /* Invalidate variable addresses. */
534 int nonscalar : 1; /* Invalidate all but scalar variables. */
535 int all : 1; /* Invalidate all memory refs. */
536 };
537
538 /* Define maximum length of a branch path. */
539
540 #define PATHLENGTH 10
541
542 /* This data describes a block that will be processed by cse_basic_block. */
543
544 struct cse_basic_block_data {
545 /* Lowest CUID value of insns in block. */
546 int low_cuid;
547 /* Highest CUID value of insns in block. */
548 int high_cuid;
549 /* Total number of SETs in block. */
550 int nsets;
551 /* Last insn in the block. */
552 rtx last;
553 /* Size of current branch path, if any. */
554 int path_size;
555 /* Current branch path, indicating which branches will be taken. */
556 struct branch_path {
557 /* The branch insn. */
558 rtx branch;
559 /* Whether it should be taken or not. AROUND is the same as taken
560 except that it is used when the destination label is not preceded
561 by a BARRIER. */
562 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
563 } path[PATHLENGTH];
564 };
565
566 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
567 virtual regs here because the simplify_*_operation routines are called
568 by integrate.c, which is called before virtual register instantiation. */
569
570 #define FIXED_BASE_PLUS_P(X) \
571 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
572 || (X) == arg_pointer_rtx \
573 || (X) == virtual_stack_vars_rtx \
574 || (X) == virtual_incoming_args_rtx \
575 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
576 && (XEXP (X, 0) == frame_pointer_rtx \
577 || XEXP (X, 0) == hard_frame_pointer_rtx \
578 || XEXP (X, 0) == arg_pointer_rtx \
579 || XEXP (X, 0) == virtual_stack_vars_rtx \
580 || XEXP (X, 0) == virtual_incoming_args_rtx)))
581
582 /* Similar, but also allows reference to the stack pointer.
583
584 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
585 arg_pointer_rtx by itself is nonzero, because on at least one machine,
586 the i960, the arg pointer is zero when it is unused. */
587
588 #define NONZERO_BASE_PLUS_P(X) \
589 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
590 || (X) == virtual_stack_vars_rtx \
591 || (X) == virtual_incoming_args_rtx \
592 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
593 && (XEXP (X, 0) == frame_pointer_rtx \
594 || XEXP (X, 0) == hard_frame_pointer_rtx \
595 || XEXP (X, 0) == arg_pointer_rtx \
596 || XEXP (X, 0) == virtual_stack_vars_rtx \
597 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
598 || (X) == stack_pointer_rtx \
599 || (X) == virtual_stack_dynamic_rtx \
600 || (X) == virtual_outgoing_args_rtx \
601 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
602 && (XEXP (X, 0) == stack_pointer_rtx \
603 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
604 || XEXP (X, 0) == virtual_outgoing_args_rtx)))
605
606 static void new_basic_block PROTO((void));
607 static void make_new_qty PROTO((int));
608 static void make_regs_eqv PROTO((int, int));
609 static void delete_reg_equiv PROTO((int));
610 static int mention_regs PROTO((rtx));
611 static int insert_regs PROTO((rtx, struct table_elt *, int));
612 static void free_element PROTO((struct table_elt *));
613 static void remove_from_table PROTO((struct table_elt *, unsigned));
614 static struct table_elt *get_element PROTO((void));
615 static struct table_elt *lookup PROTO((rtx, unsigned, enum machine_mode)),
616 *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
617 static rtx lookup_as_function PROTO((rtx, enum rtx_code));
618 static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
619 enum machine_mode));
620 static void merge_equiv_classes PROTO((struct table_elt *,
621 struct table_elt *));
622 static void invalidate PROTO((rtx, enum machine_mode));
623 static void remove_invalid_refs PROTO((int));
624 static void rehash_using_reg PROTO((rtx));
625 static void invalidate_memory PROTO((struct write_data *));
626 static void invalidate_for_call PROTO((void));
627 static rtx use_related_value PROTO((rtx, struct table_elt *));
628 static unsigned canon_hash PROTO((rtx, enum machine_mode));
629 static unsigned safe_hash PROTO((rtx, enum machine_mode));
630 static int exp_equiv_p PROTO((rtx, rtx, int, int));
631 static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
632 HOST_WIDE_INT *,
633 HOST_WIDE_INT *));
634 static int refers_to_p PROTO((rtx, rtx));
635 static int refers_to_mem_p PROTO((rtx, rtx, HOST_WIDE_INT,
636 HOST_WIDE_INT));
637 static int cse_rtx_addr_varies_p PROTO((rtx));
638 static rtx canon_reg PROTO((rtx, rtx));
639 static void find_best_addr PROTO((rtx, rtx *));
640 static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
641 enum machine_mode *,
642 enum machine_mode *));
643 static rtx cse_gen_binary PROTO((enum rtx_code, enum machine_mode,
644 rtx, rtx));
645 static rtx simplify_plus_minus PROTO((enum rtx_code, enum machine_mode,
646 rtx, rtx));
647 static rtx fold_rtx PROTO((rtx, rtx));
648 static rtx equiv_constant PROTO((rtx));
649 static void record_jump_equiv PROTO((rtx, int));
650 static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
651 rtx, rtx, int));
652 static void cse_insn PROTO((rtx, int));
653 static void note_mem_written PROTO((rtx, struct write_data *));
654 static void invalidate_from_clobbers PROTO((struct write_data *, rtx));
655 static rtx cse_process_notes PROTO((rtx, rtx));
656 static void cse_around_loop PROTO((rtx));
657 static void invalidate_skipped_set PROTO((rtx, rtx));
658 static void invalidate_skipped_block PROTO((rtx));
659 static void cse_check_loop_start PROTO((rtx, rtx));
660 static void cse_set_around_loop PROTO((rtx, rtx, rtx));
661 static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
662 static void count_reg_usage PROTO((rtx, int *, rtx, int));
663
664 extern int rtx_equal_function_value_matters;
665 \f
666 /* Return an estimate of the cost of computing rtx X.
667 One use is in cse, to decide which expression to keep in the hash table.
668 Another is in rtl generation, to pick the cheapest way to multiply.
669 Other uses like the latter are expected in the future. */
670
671 /* Return the right cost to give to an operation
672 to make the cost of the corresponding register-to-register instruction
673 N times that of a fast register-to-register instruction. */
674
675 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
676
677 int
678 rtx_cost (x, outer_code)
679 rtx x;
680 enum rtx_code outer_code;
681 {
682 register int i, j;
683 register enum rtx_code code;
684 register char *fmt;
685 register int total;
686
687 if (x == 0)
688 return 0;
689
690 /* Compute the default costs of certain things.
691 Note that RTX_COSTS can override the defaults. */
692
693 code = GET_CODE (x);
694 switch (code)
695 {
696 case MULT:
697 /* Count multiplication by 2**n as a shift,
698 because if we are considering it, we would output it as a shift. */
699 if (GET_CODE (XEXP (x, 1)) == CONST_INT
700 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
701 total = 2;
702 else
703 total = COSTS_N_INSNS (5);
704 break;
705 case DIV:
706 case UDIV:
707 case MOD:
708 case UMOD:
709 total = COSTS_N_INSNS (7);
710 break;
711 case USE:
712 /* Used in loop.c and combine.c as a marker. */
713 total = 0;
714 break;
715 case ASM_OPERANDS:
716 /* We don't want these to be used in substitutions because
717 we have no way of validating the resulting insn. So assign
718 anything containing an ASM_OPERANDS a very high cost. */
719 total = 1000;
720 break;
721 default:
722 total = 2;
723 }
724
725 switch (code)
726 {
727 case REG:
728 return ! CHEAP_REG (x);
729
730 case SUBREG:
731 /* If we can't tie these modes, make this expensive. The larger
732 the mode, the more expensive it is. */
733 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
734 return COSTS_N_INSNS (2
735 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
736 return 2;
737 #ifdef RTX_COSTS
738 RTX_COSTS (x, code, outer_code);
739 #endif
740 CONST_COSTS (x, code, outer_code);
741 }
742
743 /* Sum the costs of the sub-rtx's, plus cost of this operation,
744 which is already in total. */
745
746 fmt = GET_RTX_FORMAT (code);
747 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
748 if (fmt[i] == 'e')
749 total += rtx_cost (XEXP (x, i), code);
750 else if (fmt[i] == 'E')
751 for (j = 0; j < XVECLEN (x, i); j++)
752 total += rtx_cost (XVECEXP (x, i, j), code);
753
754 return total;
755 }
756 \f
757 /* Clear the hash table and initialize each register with its own quantity,
758 for a new basic block. */
759
760 static void
761 new_basic_block ()
762 {
763 register int i;
764
765 next_qty = max_reg;
766
767 bzero ((char *) reg_tick, max_reg * sizeof (int));
768
769 bcopy ((char *) all_minus_one, (char *) reg_in_table,
770 max_reg * sizeof (int));
771 bcopy ((char *) consec_ints, (char *) reg_qty, max_reg * sizeof (int));
772 CLEAR_HARD_REG_SET (hard_regs_in_table);
773
774 /* The per-quantity values used to be initialized here, but it is
775 much faster to initialize each as it is made in `make_new_qty'. */
776
777 for (i = 0; i < NBUCKETS; i++)
778 {
779 register struct table_elt *this, *next;
780 for (this = table[i]; this; this = next)
781 {
782 next = this->next_same_hash;
783 free_element (this);
784 }
785 }
786
787 bzero ((char *) table, sizeof table);
788
789 prev_insn = 0;
790
791 #ifdef HAVE_cc0
792 prev_insn_cc0 = 0;
793 #endif
794 }
795
796 /* Say that register REG contains a quantity not in any register before
797 and initialize that quantity. */
798
799 static void
800 make_new_qty (reg)
801 register int reg;
802 {
803 register int q;
804
805 if (next_qty >= max_qty)
806 abort ();
807
808 q = reg_qty[reg] = next_qty++;
809 qty_first_reg[q] = reg;
810 qty_last_reg[q] = reg;
811 qty_const[q] = qty_const_insn[q] = 0;
812 qty_comparison_code[q] = UNKNOWN;
813
814 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
815 }
816
817 /* Make reg NEW equivalent to reg OLD.
818 OLD is not changing; NEW is. */
819
820 static void
821 make_regs_eqv (new, old)
822 register int new, old;
823 {
824 register int lastr, firstr;
825 register int q = reg_qty[old];
826
827 /* Nothing should become eqv until it has a "non-invalid" qty number. */
828 if (! REGNO_QTY_VALID_P (old))
829 abort ();
830
831 reg_qty[new] = q;
832 firstr = qty_first_reg[q];
833 lastr = qty_last_reg[q];
834
835 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
836 hard regs. Among pseudos, if NEW will live longer than any other reg
837 of the same qty, and that is beyond the current basic block,
838 make it the new canonical replacement for this qty. */
839 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
840 /* Certain fixed registers might be of the class NO_REGS. This means
841 that not only can they not be allocated by the compiler, but
842 they cannot be used in substitutions or canonicalizations
843 either. */
844 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
845 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
846 || (new >= FIRST_PSEUDO_REGISTER
847 && (firstr < FIRST_PSEUDO_REGISTER
848 || ((uid_cuid[regno_last_uid[new]] > cse_basic_block_end
849 || (uid_cuid[regno_first_uid[new]]
850 < cse_basic_block_start))
851 && (uid_cuid[regno_last_uid[new]]
852 > uid_cuid[regno_last_uid[firstr]]))))))
853 {
854 reg_prev_eqv[firstr] = new;
855 reg_next_eqv[new] = firstr;
856 reg_prev_eqv[new] = -1;
857 qty_first_reg[q] = new;
858 }
859 else
860 {
861 /* If NEW is a hard reg (known to be non-fixed), insert at end.
862 Otherwise, insert before any non-fixed hard regs that are at the
863 end. Registers of class NO_REGS cannot be used as an
864 equivalent for anything. */
865 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
866 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
867 && new >= FIRST_PSEUDO_REGISTER)
868 lastr = reg_prev_eqv[lastr];
869 reg_next_eqv[new] = reg_next_eqv[lastr];
870 if (reg_next_eqv[lastr] >= 0)
871 reg_prev_eqv[reg_next_eqv[lastr]] = new;
872 else
873 qty_last_reg[q] = new;
874 reg_next_eqv[lastr] = new;
875 reg_prev_eqv[new] = lastr;
876 }
877 }
878
879 /* Remove REG from its equivalence class. */
880
881 static void
882 delete_reg_equiv (reg)
883 register int reg;
884 {
885 register int q = reg_qty[reg];
886 register int p, n;
887
888 /* If invalid, do nothing. */
889 if (q == reg)
890 return;
891
892 p = reg_prev_eqv[reg];
893 n = reg_next_eqv[reg];
894
895 if (n != -1)
896 reg_prev_eqv[n] = p;
897 else
898 qty_last_reg[q] = p;
899 if (p != -1)
900 reg_next_eqv[p] = n;
901 else
902 qty_first_reg[q] = n;
903
904 reg_qty[reg] = reg;
905 }
906
907 /* Remove any invalid expressions from the hash table
908 that refer to any of the registers contained in expression X.
909
910 Make sure that newly inserted references to those registers
911 as subexpressions will be considered valid.
912
913 mention_regs is not called when a register itself
914 is being stored in the table.
915
916 Return 1 if we have done something that may have changed the hash code
917 of X. */
918
919 static int
920 mention_regs (x)
921 rtx x;
922 {
923 register enum rtx_code code;
924 register int i, j;
925 register char *fmt;
926 register int changed = 0;
927
928 if (x == 0)
929 return 0;
930
931 code = GET_CODE (x);
932 if (code == REG)
933 {
934 register int regno = REGNO (x);
935 register int endregno
936 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
937 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
938 int i;
939
940 for (i = regno; i < endregno; i++)
941 {
942 if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
943 remove_invalid_refs (i);
944
945 reg_in_table[i] = reg_tick[i];
946 }
947
948 return 0;
949 }
950
951 /* If X is a comparison or a COMPARE and either operand is a register
952 that does not have a quantity, give it one. This is so that a later
953 call to record_jump_equiv won't cause X to be assigned a different
954 hash code and not found in the table after that call.
955
956 It is not necessary to do this here, since rehash_using_reg can
957 fix up the table later, but doing this here eliminates the need to
958 call that expensive function in the most common case where the only
959 use of the register is in the comparison. */
960
961 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
962 {
963 if (GET_CODE (XEXP (x, 0)) == REG
964 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
965 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
966 {
967 rehash_using_reg (XEXP (x, 0));
968 changed = 1;
969 }
970
971 if (GET_CODE (XEXP (x, 1)) == REG
972 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
973 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
974 {
975 rehash_using_reg (XEXP (x, 1));
976 changed = 1;
977 }
978 }
979
980 fmt = GET_RTX_FORMAT (code);
981 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
982 if (fmt[i] == 'e')
983 changed |= mention_regs (XEXP (x, i));
984 else if (fmt[i] == 'E')
985 for (j = 0; j < XVECLEN (x, i); j++)
986 changed |= mention_regs (XVECEXP (x, i, j));
987
988 return changed;
989 }
990
991 /* Update the register quantities for inserting X into the hash table
992 with a value equivalent to CLASSP.
993 (If the class does not contain a REG, it is irrelevant.)
994 If MODIFIED is nonzero, X is a destination; it is being modified.
995 Note that delete_reg_equiv should be called on a register
996 before insert_regs is done on that register with MODIFIED != 0.
997
998 Nonzero value means that elements of reg_qty have changed
999 so X's hash code may be different. */
1000
1001 static int
1002 insert_regs (x, classp, modified)
1003 rtx x;
1004 struct table_elt *classp;
1005 int modified;
1006 {
1007 if (GET_CODE (x) == REG)
1008 {
1009 register int regno = REGNO (x);
1010
1011 /* If REGNO is in the equivalence table already but is of the
1012 wrong mode for that equivalence, don't do anything here. */
1013
1014 if (REGNO_QTY_VALID_P (regno)
1015 && qty_mode[reg_qty[regno]] != GET_MODE (x))
1016 return 0;
1017
1018 if (modified || ! REGNO_QTY_VALID_P (regno))
1019 {
1020 if (classp)
1021 for (classp = classp->first_same_value;
1022 classp != 0;
1023 classp = classp->next_same_value)
1024 if (GET_CODE (classp->exp) == REG
1025 && GET_MODE (classp->exp) == GET_MODE (x))
1026 {
1027 make_regs_eqv (regno, REGNO (classp->exp));
1028 return 1;
1029 }
1030
1031 make_new_qty (regno);
1032 qty_mode[reg_qty[regno]] = GET_MODE (x);
1033 return 1;
1034 }
1035
1036 return 0;
1037 }
1038
1039 /* If X is a SUBREG, we will likely be inserting the inner register in the
1040 table. If that register doesn't have an assigned quantity number at
1041 this point but does later, the insertion that we will be doing now will
1042 not be accessible because its hash code will have changed. So assign
1043 a quantity number now. */
1044
1045 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1046 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1047 {
1048 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1049 mention_regs (SUBREG_REG (x));
1050 return 1;
1051 }
1052 else
1053 return mention_regs (x);
1054 }
1055 \f
1056 /* Look in or update the hash table. */
1057
1058 /* Put the element ELT on the list of free elements. */
1059
1060 static void
1061 free_element (elt)
1062 struct table_elt *elt;
1063 {
1064 elt->next_same_hash = free_element_chain;
1065 free_element_chain = elt;
1066 }
1067
1068 /* Return an element that is free for use. */
1069
1070 static struct table_elt *
1071 get_element ()
1072 {
1073 struct table_elt *elt = free_element_chain;
1074 if (elt)
1075 {
1076 free_element_chain = elt->next_same_hash;
1077 return elt;
1078 }
1079 n_elements_made++;
1080 return (struct table_elt *) oballoc (sizeof (struct table_elt));
1081 }
1082
1083 /* Remove table element ELT from use in the table.
1084 HASH is its hash code, made using the HASH macro.
1085 It's an argument because often that is known in advance
1086 and we save much time not recomputing it. */
1087
1088 static void
1089 remove_from_table (elt, hash)
1090 register struct table_elt *elt;
1091 unsigned hash;
1092 {
1093 if (elt == 0)
1094 return;
1095
1096 /* Mark this element as removed. See cse_insn. */
1097 elt->first_same_value = 0;
1098
1099 /* Remove the table element from its equivalence class. */
1100
1101 {
1102 register struct table_elt *prev = elt->prev_same_value;
1103 register struct table_elt *next = elt->next_same_value;
1104
1105 if (next) next->prev_same_value = prev;
1106
1107 if (prev)
1108 prev->next_same_value = next;
1109 else
1110 {
1111 register struct table_elt *newfirst = next;
1112 while (next)
1113 {
1114 next->first_same_value = newfirst;
1115 next = next->next_same_value;
1116 }
1117 }
1118 }
1119
1120 /* Remove the table element from its hash bucket. */
1121
1122 {
1123 register struct table_elt *prev = elt->prev_same_hash;
1124 register struct table_elt *next = elt->next_same_hash;
1125
1126 if (next) next->prev_same_hash = prev;
1127
1128 if (prev)
1129 prev->next_same_hash = next;
1130 else if (table[hash] == elt)
1131 table[hash] = next;
1132 else
1133 {
1134 /* This entry is not in the proper hash bucket. This can happen
1135 when two classes were merged by `merge_equiv_classes'. Search
1136 for the hash bucket that it heads. This happens only very
1137 rarely, so the cost is acceptable. */
1138 for (hash = 0; hash < NBUCKETS; hash++)
1139 if (table[hash] == elt)
1140 table[hash] = next;
1141 }
1142 }
1143
1144 /* Remove the table element from its related-value circular chain. */
1145
1146 if (elt->related_value != 0 && elt->related_value != elt)
1147 {
1148 register struct table_elt *p = elt->related_value;
1149 while (p->related_value != elt)
1150 p = p->related_value;
1151 p->related_value = elt->related_value;
1152 if (p->related_value == p)
1153 p->related_value = 0;
1154 }
1155
1156 free_element (elt);
1157 }
1158
1159 /* Look up X in the hash table and return its table element,
1160 or 0 if X is not in the table.
1161
1162 MODE is the machine-mode of X, or if X is an integer constant
1163 with VOIDmode then MODE is the mode with which X will be used.
1164
1165 Here we are satisfied to find an expression whose tree structure
1166 looks like X. */
1167
1168 static struct table_elt *
1169 lookup (x, hash, mode)
1170 rtx x;
1171 unsigned hash;
1172 enum machine_mode mode;
1173 {
1174 register struct table_elt *p;
1175
1176 for (p = table[hash]; p; p = p->next_same_hash)
1177 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1178 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1179 return p;
1180
1181 return 0;
1182 }
1183
1184 /* Like `lookup' but don't care whether the table element uses invalid regs.
1185 Also ignore discrepancies in the machine mode of a register. */
1186
1187 static struct table_elt *
1188 lookup_for_remove (x, hash, mode)
1189 rtx x;
1190 unsigned hash;
1191 enum machine_mode mode;
1192 {
1193 register struct table_elt *p;
1194
1195 if (GET_CODE (x) == REG)
1196 {
1197 int regno = REGNO (x);
1198 /* Don't check the machine mode when comparing registers;
1199 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1200 for (p = table[hash]; p; p = p->next_same_hash)
1201 if (GET_CODE (p->exp) == REG
1202 && REGNO (p->exp) == regno)
1203 return p;
1204 }
1205 else
1206 {
1207 for (p = table[hash]; p; p = p->next_same_hash)
1208 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1209 return p;
1210 }
1211
1212 return 0;
1213 }
1214
1215 /* Look for an expression equivalent to X and with code CODE.
1216 If one is found, return that expression. */
1217
1218 static rtx
1219 lookup_as_function (x, code)
1220 rtx x;
1221 enum rtx_code code;
1222 {
1223 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1224 GET_MODE (x));
1225 if (p == 0)
1226 return 0;
1227
1228 for (p = p->first_same_value; p; p = p->next_same_value)
1229 {
1230 if (GET_CODE (p->exp) == code
1231 /* Make sure this is a valid entry in the table. */
1232 && exp_equiv_p (p->exp, p->exp, 1, 0))
1233 return p->exp;
1234 }
1235
1236 return 0;
1237 }
1238
1239 /* Insert X in the hash table, assuming HASH is its hash code
1240 and CLASSP is an element of the class it should go in
1241 (or 0 if a new class should be made).
1242 It is inserted at the proper position to keep the class in
1243 the order cheapest first.
1244
1245 MODE is the machine-mode of X, or if X is an integer constant
1246 with VOIDmode then MODE is the mode with which X will be used.
1247
1248 For elements of equal cheapness, the most recent one
1249 goes in front, except that the first element in the list
1250 remains first unless a cheaper element is added. The order of
1251 pseudo-registers does not matter, as canon_reg will be called to
1252 find the cheapest when a register is retrieved from the table.
1253
1254 The in_memory field in the hash table element is set to 0.
1255 The caller must set it nonzero if appropriate.
1256
1257 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1258 and if insert_regs returns a nonzero value
1259 you must then recompute its hash code before calling here.
1260
1261 If necessary, update table showing constant values of quantities. */
1262
1263 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1264
1265 static struct table_elt *
1266 insert (x, classp, hash, mode)
1267 register rtx x;
1268 register struct table_elt *classp;
1269 unsigned hash;
1270 enum machine_mode mode;
1271 {
1272 register struct table_elt *elt;
1273
1274 /* If X is a register and we haven't made a quantity for it,
1275 something is wrong. */
1276 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1277 abort ();
1278
1279 /* If X is a hard register, show it is being put in the table. */
1280 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1281 {
1282 int regno = REGNO (x);
1283 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1284 int i;
1285
1286 for (i = regno; i < endregno; i++)
1287 SET_HARD_REG_BIT (hard_regs_in_table, i);
1288 }
1289
1290
1291 /* Put an element for X into the right hash bucket. */
1292
1293 elt = get_element ();
1294 elt->exp = x;
1295 elt->cost = COST (x);
1296 elt->next_same_value = 0;
1297 elt->prev_same_value = 0;
1298 elt->next_same_hash = table[hash];
1299 elt->prev_same_hash = 0;
1300 elt->related_value = 0;
1301 elt->in_memory = 0;
1302 elt->mode = mode;
1303 elt->is_const = (CONSTANT_P (x)
1304 /* GNU C++ takes advantage of this for `this'
1305 (and other const values). */
1306 || (RTX_UNCHANGING_P (x)
1307 && GET_CODE (x) == REG
1308 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1309 || FIXED_BASE_PLUS_P (x));
1310
1311 if (table[hash])
1312 table[hash]->prev_same_hash = elt;
1313 table[hash] = elt;
1314
1315 /* Put it into the proper value-class. */
1316 if (classp)
1317 {
1318 classp = classp->first_same_value;
1319 if (CHEAPER (elt, classp))
1320 /* Insert at the head of the class */
1321 {
1322 register struct table_elt *p;
1323 elt->next_same_value = classp;
1324 classp->prev_same_value = elt;
1325 elt->first_same_value = elt;
1326
1327 for (p = classp; p; p = p->next_same_value)
1328 p->first_same_value = elt;
1329 }
1330 else
1331 {
1332 /* Insert not at head of the class. */
1333 /* Put it after the last element cheaper than X. */
1334 register struct table_elt *p, *next;
1335 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1336 p = next);
1337 /* Put it after P and before NEXT. */
1338 elt->next_same_value = next;
1339 if (next)
1340 next->prev_same_value = elt;
1341 elt->prev_same_value = p;
1342 p->next_same_value = elt;
1343 elt->first_same_value = classp;
1344 }
1345 }
1346 else
1347 elt->first_same_value = elt;
1348
1349 /* If this is a constant being set equivalent to a register or a register
1350 being set equivalent to a constant, note the constant equivalence.
1351
1352 If this is a constant, it cannot be equivalent to a different constant,
1353 and a constant is the only thing that can be cheaper than a register. So
1354 we know the register is the head of the class (before the constant was
1355 inserted).
1356
1357 If this is a register that is not already known equivalent to a
1358 constant, we must check the entire class.
1359
1360 If this is a register that is already known equivalent to an insn,
1361 update `qty_const_insn' to show that `this_insn' is the latest
1362 insn making that quantity equivalent to the constant. */
1363
1364 if (elt->is_const && classp && GET_CODE (classp->exp) == REG)
1365 {
1366 qty_const[reg_qty[REGNO (classp->exp)]]
1367 = gen_lowpart_if_possible (qty_mode[reg_qty[REGNO (classp->exp)]], x);
1368 qty_const_insn[reg_qty[REGNO (classp->exp)]] = this_insn;
1369 }
1370
1371 else if (GET_CODE (x) == REG && classp && ! qty_const[reg_qty[REGNO (x)]])
1372 {
1373 register struct table_elt *p;
1374
1375 for (p = classp; p != 0; p = p->next_same_value)
1376 {
1377 if (p->is_const)
1378 {
1379 qty_const[reg_qty[REGNO (x)]]
1380 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1381 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1382 break;
1383 }
1384 }
1385 }
1386
1387 else if (GET_CODE (x) == REG && qty_const[reg_qty[REGNO (x)]]
1388 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]])
1389 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1390
1391 /* If this is a constant with symbolic value,
1392 and it has a term with an explicit integer value,
1393 link it up with related expressions. */
1394 if (GET_CODE (x) == CONST)
1395 {
1396 rtx subexp = get_related_value (x);
1397 unsigned subhash;
1398 struct table_elt *subelt, *subelt_prev;
1399
1400 if (subexp != 0)
1401 {
1402 /* Get the integer-free subexpression in the hash table. */
1403 subhash = safe_hash (subexp, mode) % NBUCKETS;
1404 subelt = lookup (subexp, subhash, mode);
1405 if (subelt == 0)
1406 subelt = insert (subexp, NULL_PTR, subhash, mode);
1407 /* Initialize SUBELT's circular chain if it has none. */
1408 if (subelt->related_value == 0)
1409 subelt->related_value = subelt;
1410 /* Find the element in the circular chain that precedes SUBELT. */
1411 subelt_prev = subelt;
1412 while (subelt_prev->related_value != subelt)
1413 subelt_prev = subelt_prev->related_value;
1414 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1415 This way the element that follows SUBELT is the oldest one. */
1416 elt->related_value = subelt_prev->related_value;
1417 subelt_prev->related_value = elt;
1418 }
1419 }
1420
1421 return elt;
1422 }
1423 \f
1424 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1425 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1426 the two classes equivalent.
1427
1428 CLASS1 will be the surviving class; CLASS2 should not be used after this
1429 call.
1430
1431 Any invalid entries in CLASS2 will not be copied. */
1432
1433 static void
1434 merge_equiv_classes (class1, class2)
1435 struct table_elt *class1, *class2;
1436 {
1437 struct table_elt *elt, *next, *new;
1438
1439 /* Ensure we start with the head of the classes. */
1440 class1 = class1->first_same_value;
1441 class2 = class2->first_same_value;
1442
1443 /* If they were already equal, forget it. */
1444 if (class1 == class2)
1445 return;
1446
1447 for (elt = class2; elt; elt = next)
1448 {
1449 unsigned hash;
1450 rtx exp = elt->exp;
1451 enum machine_mode mode = elt->mode;
1452
1453 next = elt->next_same_value;
1454
1455 /* Remove old entry, make a new one in CLASS1's class.
1456 Don't do this for invalid entries as we cannot find their
1457 hash code (it also isn't necessary). */
1458 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1459 {
1460 hash_arg_in_memory = 0;
1461 hash_arg_in_struct = 0;
1462 hash = HASH (exp, mode);
1463
1464 if (GET_CODE (exp) == REG)
1465 delete_reg_equiv (REGNO (exp));
1466
1467 remove_from_table (elt, hash);
1468
1469 if (insert_regs (exp, class1, 0))
1470 {
1471 rehash_using_reg (exp);
1472 hash = HASH (exp, mode);
1473 }
1474 new = insert (exp, class1, hash, mode);
1475 new->in_memory = hash_arg_in_memory;
1476 new->in_struct = hash_arg_in_struct;
1477 }
1478 }
1479 }
1480 \f
1481 /* Remove from the hash table, or mark as invalid,
1482 all expressions whose values could be altered by storing in X.
1483 X is a register, a subreg, or a memory reference with nonvarying address
1484 (because, when a memory reference with a varying address is stored in,
1485 all memory references are removed by invalidate_memory
1486 so specific invalidation is superfluous).
1487 FULL_MODE, if not VOIDmode, indicates that this much should be invalidated
1488 instead of just the amount indicated by the mode of X. This is only used
1489 for bitfield stores into memory.
1490
1491 A nonvarying address may be just a register or just
1492 a symbol reference, or it may be either of those plus
1493 a numeric offset. */
1494
1495 static void
1496 invalidate (x, full_mode)
1497 rtx x;
1498 enum machine_mode full_mode;
1499 {
1500 register int i;
1501 register struct table_elt *p;
1502 rtx base;
1503 HOST_WIDE_INT start, end;
1504
1505 /* If X is a register, dependencies on its contents
1506 are recorded through the qty number mechanism.
1507 Just change the qty number of the register,
1508 mark it as invalid for expressions that refer to it,
1509 and remove it itself. */
1510
1511 if (GET_CODE (x) == REG)
1512 {
1513 register int regno = REGNO (x);
1514 register unsigned hash = HASH (x, GET_MODE (x));
1515
1516 /* Remove REGNO from any quantity list it might be on and indicate
1517 that it's value might have changed. If it is a pseudo, remove its
1518 entry from the hash table.
1519
1520 For a hard register, we do the first two actions above for any
1521 additional hard registers corresponding to X. Then, if any of these
1522 registers are in the table, we must remove any REG entries that
1523 overlap these registers. */
1524
1525 delete_reg_equiv (regno);
1526 reg_tick[regno]++;
1527
1528 if (regno >= FIRST_PSEUDO_REGISTER)
1529 remove_from_table (lookup_for_remove (x, hash, GET_MODE (x)), hash);
1530 else
1531 {
1532 HOST_WIDE_INT in_table
1533 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1534 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1535 int tregno, tendregno;
1536 register struct table_elt *p, *next;
1537
1538 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1539
1540 for (i = regno + 1; i < endregno; i++)
1541 {
1542 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1543 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1544 delete_reg_equiv (i);
1545 reg_tick[i]++;
1546 }
1547
1548 if (in_table)
1549 for (hash = 0; hash < NBUCKETS; hash++)
1550 for (p = table[hash]; p; p = next)
1551 {
1552 next = p->next_same_hash;
1553
1554 if (GET_CODE (p->exp) != REG
1555 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1556 continue;
1557
1558 tregno = REGNO (p->exp);
1559 tendregno
1560 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1561 if (tendregno > regno && tregno < endregno)
1562 remove_from_table (p, hash);
1563 }
1564 }
1565
1566 return;
1567 }
1568
1569 if (GET_CODE (x) == SUBREG)
1570 {
1571 if (GET_CODE (SUBREG_REG (x)) != REG)
1572 abort ();
1573 invalidate (SUBREG_REG (x), VOIDmode);
1574 return;
1575 }
1576
1577 /* X is not a register; it must be a memory reference with
1578 a nonvarying address. Remove all hash table elements
1579 that refer to overlapping pieces of memory. */
1580
1581 if (GET_CODE (x) != MEM)
1582 abort ();
1583
1584 if (full_mode == VOIDmode)
1585 full_mode = GET_MODE (x);
1586
1587 set_nonvarying_address_components (XEXP (x, 0), GET_MODE_SIZE (full_mode),
1588 &base, &start, &end);
1589
1590 for (i = 0; i < NBUCKETS; i++)
1591 {
1592 register struct table_elt *next;
1593 for (p = table[i]; p; p = next)
1594 {
1595 next = p->next_same_hash;
1596 if (refers_to_mem_p (p->exp, base, start, end))
1597 remove_from_table (p, i);
1598 }
1599 }
1600 }
1601
1602 /* Remove all expressions that refer to register REGNO,
1603 since they are already invalid, and we are about to
1604 mark that register valid again and don't want the old
1605 expressions to reappear as valid. */
1606
1607 static void
1608 remove_invalid_refs (regno)
1609 int regno;
1610 {
1611 register int i;
1612 register struct table_elt *p, *next;
1613
1614 for (i = 0; i < NBUCKETS; i++)
1615 for (p = table[i]; p; p = next)
1616 {
1617 next = p->next_same_hash;
1618 if (GET_CODE (p->exp) != REG
1619 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1620 remove_from_table (p, i);
1621 }
1622 }
1623 \f
1624 /* Recompute the hash codes of any valid entries in the hash table that
1625 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1626
1627 This is called when we make a jump equivalence. */
1628
1629 static void
1630 rehash_using_reg (x)
1631 rtx x;
1632 {
1633 int i;
1634 struct table_elt *p, *next;
1635 unsigned hash;
1636
1637 if (GET_CODE (x) == SUBREG)
1638 x = SUBREG_REG (x);
1639
1640 /* If X is not a register or if the register is known not to be in any
1641 valid entries in the table, we have no work to do. */
1642
1643 if (GET_CODE (x) != REG
1644 || reg_in_table[REGNO (x)] < 0
1645 || reg_in_table[REGNO (x)] != reg_tick[REGNO (x)])
1646 return;
1647
1648 /* Scan all hash chains looking for valid entries that mention X.
1649 If we find one and it is in the wrong hash chain, move it. We can skip
1650 objects that are registers, since they are handled specially. */
1651
1652 for (i = 0; i < NBUCKETS; i++)
1653 for (p = table[i]; p; p = next)
1654 {
1655 next = p->next_same_hash;
1656 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1657 && exp_equiv_p (p->exp, p->exp, 1, 0)
1658 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1659 {
1660 if (p->next_same_hash)
1661 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1662
1663 if (p->prev_same_hash)
1664 p->prev_same_hash->next_same_hash = p->next_same_hash;
1665 else
1666 table[i] = p->next_same_hash;
1667
1668 p->next_same_hash = table[hash];
1669 p->prev_same_hash = 0;
1670 if (table[hash])
1671 table[hash]->prev_same_hash = p;
1672 table[hash] = p;
1673 }
1674 }
1675 }
1676 \f
1677 /* Remove from the hash table all expressions that reference memory,
1678 or some of them as specified by *WRITES. */
1679
1680 static void
1681 invalidate_memory (writes)
1682 struct write_data *writes;
1683 {
1684 register int i;
1685 register struct table_elt *p, *next;
1686 int all = writes->all;
1687 int nonscalar = writes->nonscalar;
1688
1689 for (i = 0; i < NBUCKETS; i++)
1690 for (p = table[i]; p; p = next)
1691 {
1692 next = p->next_same_hash;
1693 if (p->in_memory
1694 && (all
1695 || (nonscalar && p->in_struct)
1696 || cse_rtx_addr_varies_p (p->exp)))
1697 remove_from_table (p, i);
1698 }
1699 }
1700 \f
1701 /* Remove from the hash table any expression that is a call-clobbered
1702 register. Also update their TICK values. */
1703
1704 static void
1705 invalidate_for_call ()
1706 {
1707 int regno, endregno;
1708 int i;
1709 unsigned hash;
1710 struct table_elt *p, *next;
1711 int in_table = 0;
1712
1713 /* Go through all the hard registers. For each that is clobbered in
1714 a CALL_INSN, remove the register from quantity chains and update
1715 reg_tick if defined. Also see if any of these registers is currently
1716 in the table. */
1717
1718 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1719 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1720 {
1721 delete_reg_equiv (regno);
1722 if (reg_tick[regno] >= 0)
1723 reg_tick[regno]++;
1724
1725 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1726 }
1727
1728 /* In the case where we have no call-clobbered hard registers in the
1729 table, we are done. Otherwise, scan the table and remove any
1730 entry that overlaps a call-clobbered register. */
1731
1732 if (in_table)
1733 for (hash = 0; hash < NBUCKETS; hash++)
1734 for (p = table[hash]; p; p = next)
1735 {
1736 next = p->next_same_hash;
1737
1738 if (GET_CODE (p->exp) != REG
1739 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1740 continue;
1741
1742 regno = REGNO (p->exp);
1743 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1744
1745 for (i = regno; i < endregno; i++)
1746 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1747 {
1748 remove_from_table (p, hash);
1749 break;
1750 }
1751 }
1752 }
1753 \f
1754 /* Given an expression X of type CONST,
1755 and ELT which is its table entry (or 0 if it
1756 is not in the hash table),
1757 return an alternate expression for X as a register plus integer.
1758 If none can be found, return 0. */
1759
1760 static rtx
1761 use_related_value (x, elt)
1762 rtx x;
1763 struct table_elt *elt;
1764 {
1765 register struct table_elt *relt = 0;
1766 register struct table_elt *p, *q;
1767 HOST_WIDE_INT offset;
1768
1769 /* First, is there anything related known?
1770 If we have a table element, we can tell from that.
1771 Otherwise, must look it up. */
1772
1773 if (elt != 0 && elt->related_value != 0)
1774 relt = elt;
1775 else if (elt == 0 && GET_CODE (x) == CONST)
1776 {
1777 rtx subexp = get_related_value (x);
1778 if (subexp != 0)
1779 relt = lookup (subexp,
1780 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
1781 GET_MODE (subexp));
1782 }
1783
1784 if (relt == 0)
1785 return 0;
1786
1787 /* Search all related table entries for one that has an
1788 equivalent register. */
1789
1790 p = relt;
1791 while (1)
1792 {
1793 /* This loop is strange in that it is executed in two different cases.
1794 The first is when X is already in the table. Then it is searching
1795 the RELATED_VALUE list of X's class (RELT). The second case is when
1796 X is not in the table. Then RELT points to a class for the related
1797 value.
1798
1799 Ensure that, whatever case we are in, that we ignore classes that have
1800 the same value as X. */
1801
1802 if (rtx_equal_p (x, p->exp))
1803 q = 0;
1804 else
1805 for (q = p->first_same_value; q; q = q->next_same_value)
1806 if (GET_CODE (q->exp) == REG)
1807 break;
1808
1809 if (q)
1810 break;
1811
1812 p = p->related_value;
1813
1814 /* We went all the way around, so there is nothing to be found.
1815 Alternatively, perhaps RELT was in the table for some other reason
1816 and it has no related values recorded. */
1817 if (p == relt || p == 0)
1818 break;
1819 }
1820
1821 if (q == 0)
1822 return 0;
1823
1824 offset = (get_integer_term (x) - get_integer_term (p->exp));
1825 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
1826 return plus_constant (q->exp, offset);
1827 }
1828 \f
1829 /* Hash an rtx. We are careful to make sure the value is never negative.
1830 Equivalent registers hash identically.
1831 MODE is used in hashing for CONST_INTs only;
1832 otherwise the mode of X is used.
1833
1834 Store 1 in do_not_record if any subexpression is volatile.
1835
1836 Store 1 in hash_arg_in_memory if X contains a MEM rtx
1837 which does not have the RTX_UNCHANGING_P bit set.
1838 In this case, also store 1 in hash_arg_in_struct
1839 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
1840
1841 Note that cse_insn knows that the hash code of a MEM expression
1842 is just (int) MEM plus the hash code of the address. */
1843
1844 static unsigned
1845 canon_hash (x, mode)
1846 rtx x;
1847 enum machine_mode mode;
1848 {
1849 register int i, j;
1850 register unsigned hash = 0;
1851 register enum rtx_code code;
1852 register char *fmt;
1853
1854 /* repeat is used to turn tail-recursion into iteration. */
1855 repeat:
1856 if (x == 0)
1857 return hash;
1858
1859 code = GET_CODE (x);
1860 switch (code)
1861 {
1862 case REG:
1863 {
1864 register int regno = REGNO (x);
1865
1866 /* On some machines, we can't record any non-fixed hard register,
1867 because extending its life will cause reload problems. We
1868 consider ap, fp, and sp to be fixed for this purpose.
1869 On all machines, we can't record any global registers. */
1870
1871 if (regno < FIRST_PSEUDO_REGISTER
1872 && (global_regs[regno]
1873 #ifdef SMALL_REGISTER_CLASSES
1874 || (! fixed_regs[regno]
1875 && regno != FRAME_POINTER_REGNUM
1876 && regno != HARD_FRAME_POINTER_REGNUM
1877 && regno != ARG_POINTER_REGNUM
1878 && regno != STACK_POINTER_REGNUM)
1879 #endif
1880 ))
1881 {
1882 do_not_record = 1;
1883 return 0;
1884 }
1885 hash += ((unsigned) REG << 7) + (unsigned) reg_qty[regno];
1886 return hash;
1887 }
1888
1889 case CONST_INT:
1890 {
1891 unsigned HOST_WIDE_INT tem = INTVAL (x);
1892 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
1893 return hash;
1894 }
1895
1896 case CONST_DOUBLE:
1897 /* This is like the general case, except that it only counts
1898 the integers representing the constant. */
1899 hash += (unsigned) code + (unsigned) GET_MODE (x);
1900 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1901 {
1902 unsigned tem = XINT (x, i);
1903 hash += tem;
1904 }
1905 return hash;
1906
1907 /* Assume there is only one rtx object for any given label. */
1908 case LABEL_REF:
1909 hash
1910 += ((unsigned) LABEL_REF << 7) + (unsigned HOST_WIDE_INT) XEXP (x, 0);
1911 return hash;
1912
1913 case SYMBOL_REF:
1914 hash
1915 += ((unsigned) SYMBOL_REF << 7) + (unsigned HOST_WIDE_INT) XSTR (x, 0);
1916 return hash;
1917
1918 case MEM:
1919 if (MEM_VOLATILE_P (x))
1920 {
1921 do_not_record = 1;
1922 return 0;
1923 }
1924 if (! RTX_UNCHANGING_P (x))
1925 {
1926 hash_arg_in_memory = 1;
1927 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
1928 }
1929 /* Now that we have already found this special case,
1930 might as well speed it up as much as possible. */
1931 hash += (unsigned) MEM;
1932 x = XEXP (x, 0);
1933 goto repeat;
1934
1935 case PRE_DEC:
1936 case PRE_INC:
1937 case POST_DEC:
1938 case POST_INC:
1939 case PC:
1940 case CC0:
1941 case CALL:
1942 case UNSPEC_VOLATILE:
1943 do_not_record = 1;
1944 return 0;
1945
1946 case ASM_OPERANDS:
1947 if (MEM_VOLATILE_P (x))
1948 {
1949 do_not_record = 1;
1950 return 0;
1951 }
1952 }
1953
1954 i = GET_RTX_LENGTH (code) - 1;
1955 hash += (unsigned) code + (unsigned) GET_MODE (x);
1956 fmt = GET_RTX_FORMAT (code);
1957 for (; i >= 0; i--)
1958 {
1959 if (fmt[i] == 'e')
1960 {
1961 rtx tem = XEXP (x, i);
1962
1963 /* If we are about to do the last recursive call
1964 needed at this level, change it into iteration.
1965 This function is called enough to be worth it. */
1966 if (i == 0)
1967 {
1968 x = tem;
1969 goto repeat;
1970 }
1971 hash += canon_hash (tem, 0);
1972 }
1973 else if (fmt[i] == 'E')
1974 for (j = 0; j < XVECLEN (x, i); j++)
1975 hash += canon_hash (XVECEXP (x, i, j), 0);
1976 else if (fmt[i] == 's')
1977 {
1978 register unsigned char *p = (unsigned char *) XSTR (x, i);
1979 if (p)
1980 while (*p)
1981 hash += *p++;
1982 }
1983 else if (fmt[i] == 'i')
1984 {
1985 register unsigned tem = XINT (x, i);
1986 hash += tem;
1987 }
1988 else
1989 abort ();
1990 }
1991 return hash;
1992 }
1993
1994 /* Like canon_hash but with no side effects. */
1995
1996 static unsigned
1997 safe_hash (x, mode)
1998 rtx x;
1999 enum machine_mode mode;
2000 {
2001 int save_do_not_record = do_not_record;
2002 int save_hash_arg_in_memory = hash_arg_in_memory;
2003 int save_hash_arg_in_struct = hash_arg_in_struct;
2004 unsigned hash = canon_hash (x, mode);
2005 hash_arg_in_memory = save_hash_arg_in_memory;
2006 hash_arg_in_struct = save_hash_arg_in_struct;
2007 do_not_record = save_do_not_record;
2008 return hash;
2009 }
2010 \f
2011 /* Return 1 iff X and Y would canonicalize into the same thing,
2012 without actually constructing the canonicalization of either one.
2013 If VALIDATE is nonzero,
2014 we assume X is an expression being processed from the rtl
2015 and Y was found in the hash table. We check register refs
2016 in Y for being marked as valid.
2017
2018 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2019 that is known to be in the register. Ordinarily, we don't allow them
2020 to match, because letting them match would cause unpredictable results
2021 in all the places that search a hash table chain for an equivalent
2022 for a given value. A possible equivalent that has different structure
2023 has its hash code computed from different data. Whether the hash code
2024 is the same as that of the the given value is pure luck. */
2025
2026 static int
2027 exp_equiv_p (x, y, validate, equal_values)
2028 rtx x, y;
2029 int validate;
2030 int equal_values;
2031 {
2032 register int i, j;
2033 register enum rtx_code code;
2034 register char *fmt;
2035
2036 /* Note: it is incorrect to assume an expression is equivalent to itself
2037 if VALIDATE is nonzero. */
2038 if (x == y && !validate)
2039 return 1;
2040 if (x == 0 || y == 0)
2041 return x == y;
2042
2043 code = GET_CODE (x);
2044 if (code != GET_CODE (y))
2045 {
2046 if (!equal_values)
2047 return 0;
2048
2049 /* If X is a constant and Y is a register or vice versa, they may be
2050 equivalent. We only have to validate if Y is a register. */
2051 if (CONSTANT_P (x) && GET_CODE (y) == REG
2052 && REGNO_QTY_VALID_P (REGNO (y))
2053 && GET_MODE (y) == qty_mode[reg_qty[REGNO (y)]]
2054 && rtx_equal_p (x, qty_const[reg_qty[REGNO (y)]])
2055 && (! validate || reg_in_table[REGNO (y)] == reg_tick[REGNO (y)]))
2056 return 1;
2057
2058 if (CONSTANT_P (y) && code == REG
2059 && REGNO_QTY_VALID_P (REGNO (x))
2060 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2061 && rtx_equal_p (y, qty_const[reg_qty[REGNO (x)]]))
2062 return 1;
2063
2064 return 0;
2065 }
2066
2067 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2068 if (GET_MODE (x) != GET_MODE (y))
2069 return 0;
2070
2071 switch (code)
2072 {
2073 case PC:
2074 case CC0:
2075 return x == y;
2076
2077 case CONST_INT:
2078 return INTVAL (x) == INTVAL (y);
2079
2080 case LABEL_REF:
2081 return XEXP (x, 0) == XEXP (y, 0);
2082
2083 case SYMBOL_REF:
2084 return XSTR (x, 0) == XSTR (y, 0);
2085
2086 case REG:
2087 {
2088 int regno = REGNO (y);
2089 int endregno
2090 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2091 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2092 int i;
2093
2094 /* If the quantities are not the same, the expressions are not
2095 equivalent. If there are and we are not to validate, they
2096 are equivalent. Otherwise, ensure all regs are up-to-date. */
2097
2098 if (reg_qty[REGNO (x)] != reg_qty[regno])
2099 return 0;
2100
2101 if (! validate)
2102 return 1;
2103
2104 for (i = regno; i < endregno; i++)
2105 if (reg_in_table[i] != reg_tick[i])
2106 return 0;
2107
2108 return 1;
2109 }
2110
2111 /* For commutative operations, check both orders. */
2112 case PLUS:
2113 case MULT:
2114 case AND:
2115 case IOR:
2116 case XOR:
2117 case NE:
2118 case EQ:
2119 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2120 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2121 validate, equal_values))
2122 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2123 validate, equal_values)
2124 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2125 validate, equal_values)));
2126 }
2127
2128 /* Compare the elements. If any pair of corresponding elements
2129 fail to match, return 0 for the whole things. */
2130
2131 fmt = GET_RTX_FORMAT (code);
2132 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2133 {
2134 switch (fmt[i])
2135 {
2136 case 'e':
2137 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2138 return 0;
2139 break;
2140
2141 case 'E':
2142 if (XVECLEN (x, i) != XVECLEN (y, i))
2143 return 0;
2144 for (j = 0; j < XVECLEN (x, i); j++)
2145 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2146 validate, equal_values))
2147 return 0;
2148 break;
2149
2150 case 's':
2151 if (strcmp (XSTR (x, i), XSTR (y, i)))
2152 return 0;
2153 break;
2154
2155 case 'i':
2156 if (XINT (x, i) != XINT (y, i))
2157 return 0;
2158 break;
2159
2160 case 'w':
2161 if (XWINT (x, i) != XWINT (y, i))
2162 return 0;
2163 break;
2164
2165 case '0':
2166 break;
2167
2168 default:
2169 abort ();
2170 }
2171 }
2172
2173 return 1;
2174 }
2175 \f
2176 /* Return 1 iff any subexpression of X matches Y.
2177 Here we do not require that X or Y be valid (for registers referred to)
2178 for being in the hash table. */
2179
2180 static int
2181 refers_to_p (x, y)
2182 rtx x, y;
2183 {
2184 register int i;
2185 register enum rtx_code code;
2186 register char *fmt;
2187
2188 repeat:
2189 if (x == y)
2190 return 1;
2191 if (x == 0 || y == 0)
2192 return 0;
2193
2194 code = GET_CODE (x);
2195 /* If X as a whole has the same code as Y, they may match.
2196 If so, return 1. */
2197 if (code == GET_CODE (y))
2198 {
2199 if (exp_equiv_p (x, y, 0, 1))
2200 return 1;
2201 }
2202
2203 /* X does not match, so try its subexpressions. */
2204
2205 fmt = GET_RTX_FORMAT (code);
2206 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2207 if (fmt[i] == 'e')
2208 {
2209 if (i == 0)
2210 {
2211 x = XEXP (x, 0);
2212 goto repeat;
2213 }
2214 else
2215 if (refers_to_p (XEXP (x, i), y))
2216 return 1;
2217 }
2218 else if (fmt[i] == 'E')
2219 {
2220 int j;
2221 for (j = 0; j < XVECLEN (x, i); j++)
2222 if (refers_to_p (XVECEXP (x, i, j), y))
2223 return 1;
2224 }
2225
2226 return 0;
2227 }
2228 \f
2229 /* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2230 set PBASE, PSTART, and PEND which correspond to the base of the address,
2231 the starting offset, and ending offset respectively.
2232
2233 ADDR is known to be a nonvarying address. */
2234
2235 /* ??? Despite what the comments say, this function is in fact frequently
2236 passed varying addresses. This does not appear to cause any problems. */
2237
2238 static void
2239 set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2240 rtx addr;
2241 int size;
2242 rtx *pbase;
2243 HOST_WIDE_INT *pstart, *pend;
2244 {
2245 rtx base;
2246 HOST_WIDE_INT start, end;
2247
2248 base = addr;
2249 start = 0;
2250 end = 0;
2251
2252 /* Registers with nonvarying addresses usually have constant equivalents;
2253 but the frame pointer register is also possible. */
2254 if (GET_CODE (base) == REG
2255 && qty_const != 0
2256 && REGNO_QTY_VALID_P (REGNO (base))
2257 && qty_mode[reg_qty[REGNO (base)]] == GET_MODE (base)
2258 && qty_const[reg_qty[REGNO (base)]] != 0)
2259 base = qty_const[reg_qty[REGNO (base)]];
2260 else if (GET_CODE (base) == PLUS
2261 && GET_CODE (XEXP (base, 1)) == CONST_INT
2262 && GET_CODE (XEXP (base, 0)) == REG
2263 && qty_const != 0
2264 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2265 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2266 == GET_MODE (XEXP (base, 0)))
2267 && qty_const[reg_qty[REGNO (XEXP (base, 0))]])
2268 {
2269 start = INTVAL (XEXP (base, 1));
2270 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2271 }
2272 /* This can happen as the result of virtual register instantiation,
2273 if the initial offset is too large to be a valid address. */
2274 else if (GET_CODE (base) == PLUS
2275 && GET_CODE (XEXP (base, 0)) == REG
2276 && GET_CODE (XEXP (base, 1)) == REG
2277 && qty_const != 0
2278 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2279 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2280 == GET_MODE (XEXP (base, 0)))
2281 && qty_const[reg_qty[REGNO (XEXP (base, 0))]]
2282 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 1)))
2283 && (qty_mode[reg_qty[REGNO (XEXP (base, 1))]]
2284 == GET_MODE (XEXP (base, 1)))
2285 && qty_const[reg_qty[REGNO (XEXP (base, 1))]])
2286 {
2287 rtx tem = qty_const[reg_qty[REGNO (XEXP (base, 1))]];
2288 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2289
2290 /* One of the two values must be a constant. */
2291 if (GET_CODE (base) != CONST_INT)
2292 {
2293 if (GET_CODE (tem) != CONST_INT)
2294 abort ();
2295 start = INTVAL (tem);
2296 }
2297 else
2298 {
2299 start = INTVAL (base);
2300 base = tem;
2301 }
2302 }
2303
2304 /* Handle everything that we can find inside an address that has been
2305 viewed as constant. */
2306
2307 while (1)
2308 {
2309 /* If no part of this switch does a "continue", the code outside
2310 will exit this loop. */
2311
2312 switch (GET_CODE (base))
2313 {
2314 case LO_SUM:
2315 /* By definition, operand1 of a LO_SUM is the associated constant
2316 address. Use the associated constant address as the base
2317 instead. */
2318 base = XEXP (base, 1);
2319 continue;
2320
2321 case CONST:
2322 /* Strip off CONST. */
2323 base = XEXP (base, 0);
2324 continue;
2325
2326 case PLUS:
2327 if (GET_CODE (XEXP (base, 1)) == CONST_INT)
2328 {
2329 start += INTVAL (XEXP (base, 1));
2330 base = XEXP (base, 0);
2331 continue;
2332 }
2333 break;
2334
2335 case AND:
2336 /* Handle the case of an AND which is the negative of a power of
2337 two. This is used to represent unaligned memory operations. */
2338 if (GET_CODE (XEXP (base, 1)) == CONST_INT
2339 && exact_log2 (- INTVAL (XEXP (base, 1))) > 0)
2340 {
2341 set_nonvarying_address_components (XEXP (base, 0), size,
2342 pbase, pstart, pend);
2343
2344 /* Assume the worst misalignment. START is affected, but not
2345 END, so compensate but adjusting SIZE. Don't lose any
2346 constant we already had. */
2347
2348 size = *pend - *pstart - INTVAL (XEXP (base, 1)) - 1;
2349 start += *pstart - INTVAL (XEXP (base, 1)) - 1;
2350 base = *pbase;
2351 }
2352 break;
2353 }
2354
2355 break;
2356 }
2357
2358 if (GET_CODE (base) == CONST_INT)
2359 {
2360 start += INTVAL (base);
2361 base = const0_rtx;
2362 }
2363
2364 end = start + size;
2365
2366 /* Set the return values. */
2367 *pbase = base;
2368 *pstart = start;
2369 *pend = end;
2370 }
2371
2372 /* Return 1 iff any subexpression of X refers to memory
2373 at an address of BASE plus some offset
2374 such that any of the bytes' offsets fall between START (inclusive)
2375 and END (exclusive).
2376
2377 The value is undefined if X is a varying address (as determined by
2378 cse_rtx_addr_varies_p). This function is not used in such cases.
2379
2380 When used in the cse pass, `qty_const' is nonzero, and it is used
2381 to treat an address that is a register with a known constant value
2382 as if it were that constant value.
2383 In the loop pass, `qty_const' is zero, so this is not done. */
2384
2385 static int
2386 refers_to_mem_p (x, base, start, end)
2387 rtx x, base;
2388 HOST_WIDE_INT start, end;
2389 {
2390 register HOST_WIDE_INT i;
2391 register enum rtx_code code;
2392 register char *fmt;
2393
2394 repeat:
2395 if (x == 0)
2396 return 0;
2397
2398 code = GET_CODE (x);
2399 if (code == MEM)
2400 {
2401 register rtx addr = XEXP (x, 0); /* Get the address. */
2402 rtx mybase;
2403 HOST_WIDE_INT mystart, myend;
2404
2405 set_nonvarying_address_components (addr, GET_MODE_SIZE (GET_MODE (x)),
2406 &mybase, &mystart, &myend);
2407
2408
2409 /* refers_to_mem_p is never called with varying addresses.
2410 If the base addresses are not equal, there is no chance
2411 of the memory addresses conflicting. */
2412 if (! rtx_equal_p (mybase, base))
2413 return 0;
2414
2415 return myend > start && mystart < end;
2416 }
2417
2418 /* X does not match, so try its subexpressions. */
2419
2420 fmt = GET_RTX_FORMAT (code);
2421 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2422 if (fmt[i] == 'e')
2423 {
2424 if (i == 0)
2425 {
2426 x = XEXP (x, 0);
2427 goto repeat;
2428 }
2429 else
2430 if (refers_to_mem_p (XEXP (x, i), base, start, end))
2431 return 1;
2432 }
2433 else if (fmt[i] == 'E')
2434 {
2435 int j;
2436 for (j = 0; j < XVECLEN (x, i); j++)
2437 if (refers_to_mem_p (XVECEXP (x, i, j), base, start, end))
2438 return 1;
2439 }
2440
2441 return 0;
2442 }
2443
2444 /* Nonzero if X refers to memory at a varying address;
2445 except that a register which has at the moment a known constant value
2446 isn't considered variable. */
2447
2448 static int
2449 cse_rtx_addr_varies_p (x)
2450 rtx x;
2451 {
2452 /* We need not check for X and the equivalence class being of the same
2453 mode because if X is equivalent to a constant in some mode, it
2454 doesn't vary in any mode. */
2455
2456 if (GET_CODE (x) == MEM
2457 && GET_CODE (XEXP (x, 0)) == REG
2458 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2459 && GET_MODE (XEXP (x, 0)) == qty_mode[reg_qty[REGNO (XEXP (x, 0))]]
2460 && qty_const[reg_qty[REGNO (XEXP (x, 0))]] != 0)
2461 return 0;
2462
2463 if (GET_CODE (x) == MEM
2464 && GET_CODE (XEXP (x, 0)) == PLUS
2465 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2466 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2467 && REGNO_QTY_VALID_P (REGNO (XEXP (XEXP (x, 0), 0)))
2468 && (GET_MODE (XEXP (XEXP (x, 0), 0))
2469 == qty_mode[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2470 && qty_const[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2471 return 0;
2472
2473 /* This can happen as the result of virtual register instantiation, if
2474 the initial constant is too large to be a valid address. This gives
2475 us a three instruction sequence, load large offset into a register,
2476 load fp minus a constant into a register, then a MEM which is the
2477 sum of the two `constant' registers. */
2478 if (GET_CODE (x) == MEM
2479 && GET_CODE (XEXP (x, 0)) == PLUS
2480 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2481 && GET_CODE (XEXP (XEXP (x, 0), 1)) == REG
2482 && REGNO_QTY_VALID_P (REGNO (XEXP (XEXP (x, 0), 0)))
2483 && (GET_MODE (XEXP (XEXP (x, 0), 0))
2484 == qty_mode[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2485 && qty_const[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]]
2486 && REGNO_QTY_VALID_P (REGNO (XEXP (XEXP (x, 0), 1)))
2487 && (GET_MODE (XEXP (XEXP (x, 0), 1))
2488 == qty_mode[reg_qty[REGNO (XEXP (XEXP (x, 0), 1))]])
2489 && qty_const[reg_qty[REGNO (XEXP (XEXP (x, 0), 1))]])
2490 return 0;
2491
2492 return rtx_addr_varies_p (x);
2493 }
2494 \f
2495 /* Canonicalize an expression:
2496 replace each register reference inside it
2497 with the "oldest" equivalent register.
2498
2499 If INSN is non-zero and we are replacing a pseudo with a hard register
2500 or vice versa, validate_change is used to ensure that INSN remains valid
2501 after we make our substitution. The calls are made with IN_GROUP non-zero
2502 so apply_change_group must be called upon the outermost return from this
2503 function (unless INSN is zero). The result of apply_change_group can
2504 generally be discarded since the changes we are making are optional. */
2505
2506 static rtx
2507 canon_reg (x, insn)
2508 rtx x;
2509 rtx insn;
2510 {
2511 register int i;
2512 register enum rtx_code code;
2513 register char *fmt;
2514
2515 if (x == 0)
2516 return x;
2517
2518 code = GET_CODE (x);
2519 switch (code)
2520 {
2521 case PC:
2522 case CC0:
2523 case CONST:
2524 case CONST_INT:
2525 case CONST_DOUBLE:
2526 case SYMBOL_REF:
2527 case LABEL_REF:
2528 case ADDR_VEC:
2529 case ADDR_DIFF_VEC:
2530 return x;
2531
2532 case REG:
2533 {
2534 register int first;
2535
2536 /* Never replace a hard reg, because hard regs can appear
2537 in more than one machine mode, and we must preserve the mode
2538 of each occurrence. Also, some hard regs appear in
2539 MEMs that are shared and mustn't be altered. Don't try to
2540 replace any reg that maps to a reg of class NO_REGS. */
2541 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2542 || ! REGNO_QTY_VALID_P (REGNO (x)))
2543 return x;
2544
2545 first = qty_first_reg[reg_qty[REGNO (x)]];
2546 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2547 : REGNO_REG_CLASS (first) == NO_REGS ? x
2548 : gen_rtx (REG, qty_mode[reg_qty[REGNO (x)]], first));
2549 }
2550 }
2551
2552 fmt = GET_RTX_FORMAT (code);
2553 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2554 {
2555 register int j;
2556
2557 if (fmt[i] == 'e')
2558 {
2559 rtx new = canon_reg (XEXP (x, i), insn);
2560
2561 /* If replacing pseudo with hard reg or vice versa, ensure the
2562 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2563 if (insn != 0 && new != 0
2564 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2565 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2566 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2567 || insn_n_dups[recog_memoized (insn)] > 0))
2568 validate_change (insn, &XEXP (x, i), new, 1);
2569 else
2570 XEXP (x, i) = new;
2571 }
2572 else if (fmt[i] == 'E')
2573 for (j = 0; j < XVECLEN (x, i); j++)
2574 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2575 }
2576
2577 return x;
2578 }
2579 \f
2580 /* LOC is a location with INSN that is an operand address (the contents of
2581 a MEM). Find the best equivalent address to use that is valid for this
2582 insn.
2583
2584 On most CISC machines, complicated address modes are costly, and rtx_cost
2585 is a good approximation for that cost. However, most RISC machines have
2586 only a few (usually only one) memory reference formats. If an address is
2587 valid at all, it is often just as cheap as any other address. Hence, for
2588 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2589 costs of various addresses. For two addresses of equal cost, choose the one
2590 with the highest `rtx_cost' value as that has the potential of eliminating
2591 the most insns. For equal costs, we choose the first in the equivalence
2592 class. Note that we ignore the fact that pseudo registers are cheaper
2593 than hard registers here because we would also prefer the pseudo registers.
2594 */
2595
2596 static void
2597 find_best_addr (insn, loc)
2598 rtx insn;
2599 rtx *loc;
2600 {
2601 struct table_elt *elt, *p;
2602 rtx addr = *loc;
2603 int our_cost;
2604 int found_better = 1;
2605 int save_do_not_record = do_not_record;
2606 int save_hash_arg_in_memory = hash_arg_in_memory;
2607 int save_hash_arg_in_struct = hash_arg_in_struct;
2608 int addr_volatile;
2609 int regno;
2610 unsigned hash;
2611
2612 /* Do not try to replace constant addresses or addresses of local and
2613 argument slots. These MEM expressions are made only once and inserted
2614 in many instructions, as well as being used to control symbol table
2615 output. It is not safe to clobber them.
2616
2617 There are some uncommon cases where the address is already in a register
2618 for some reason, but we cannot take advantage of that because we have
2619 no easy way to unshare the MEM. In addition, looking up all stack
2620 addresses is costly. */
2621 if ((GET_CODE (addr) == PLUS
2622 && GET_CODE (XEXP (addr, 0)) == REG
2623 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2624 && (regno = REGNO (XEXP (addr, 0)),
2625 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2626 || regno == ARG_POINTER_REGNUM))
2627 || (GET_CODE (addr) == REG
2628 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2629 || regno == HARD_FRAME_POINTER_REGNUM
2630 || regno == ARG_POINTER_REGNUM))
2631 || CONSTANT_ADDRESS_P (addr))
2632 return;
2633
2634 /* If this address is not simply a register, try to fold it. This will
2635 sometimes simplify the expression. Many simplifications
2636 will not be valid, but some, usually applying the associative rule, will
2637 be valid and produce better code. */
2638 if (GET_CODE (addr) != REG
2639 && validate_change (insn, loc, fold_rtx (addr, insn), 0))
2640 addr = *loc;
2641
2642 /* If this address is not in the hash table, we can't look for equivalences
2643 of the whole address. Also, ignore if volatile. */
2644
2645 do_not_record = 0;
2646 hash = HASH (addr, Pmode);
2647 addr_volatile = do_not_record;
2648 do_not_record = save_do_not_record;
2649 hash_arg_in_memory = save_hash_arg_in_memory;
2650 hash_arg_in_struct = save_hash_arg_in_struct;
2651
2652 if (addr_volatile)
2653 return;
2654
2655 elt = lookup (addr, hash, Pmode);
2656
2657 #ifndef ADDRESS_COST
2658 if (elt)
2659 {
2660 our_cost = elt->cost;
2661
2662 /* Find the lowest cost below ours that works. */
2663 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2664 if (elt->cost < our_cost
2665 && (GET_CODE (elt->exp) == REG
2666 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2667 && validate_change (insn, loc,
2668 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2669 return;
2670 }
2671 #else
2672
2673 if (elt)
2674 {
2675 /* We need to find the best (under the criteria documented above) entry
2676 in the class that is valid. We use the `flag' field to indicate
2677 choices that were invalid and iterate until we can't find a better
2678 one that hasn't already been tried. */
2679
2680 for (p = elt->first_same_value; p; p = p->next_same_value)
2681 p->flag = 0;
2682
2683 while (found_better)
2684 {
2685 int best_addr_cost = ADDRESS_COST (*loc);
2686 int best_rtx_cost = (elt->cost + 1) >> 1;
2687 struct table_elt *best_elt = elt;
2688
2689 found_better = 0;
2690 for (p = elt->first_same_value; p; p = p->next_same_value)
2691 if (! p->flag
2692 && (GET_CODE (p->exp) == REG
2693 || exp_equiv_p (p->exp, p->exp, 1, 0))
2694 && (ADDRESS_COST (p->exp) < best_addr_cost
2695 || (ADDRESS_COST (p->exp) == best_addr_cost
2696 && (p->cost + 1) >> 1 > best_rtx_cost)))
2697 {
2698 found_better = 1;
2699 best_addr_cost = ADDRESS_COST (p->exp);
2700 best_rtx_cost = (p->cost + 1) >> 1;
2701 best_elt = p;
2702 }
2703
2704 if (found_better)
2705 {
2706 if (validate_change (insn, loc,
2707 canon_reg (copy_rtx (best_elt->exp),
2708 NULL_RTX), 0))
2709 return;
2710 else
2711 best_elt->flag = 1;
2712 }
2713 }
2714 }
2715
2716 /* If the address is a binary operation with the first operand a register
2717 and the second a constant, do the same as above, but looking for
2718 equivalences of the register. Then try to simplify before checking for
2719 the best address to use. This catches a few cases: First is when we
2720 have REG+const and the register is another REG+const. We can often merge
2721 the constants and eliminate one insn and one register. It may also be
2722 that a machine has a cheap REG+REG+const. Finally, this improves the
2723 code on the Alpha for unaligned byte stores. */
2724
2725 if (flag_expensive_optimizations
2726 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2727 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2728 && GET_CODE (XEXP (*loc, 0)) == REG
2729 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2730 {
2731 rtx c = XEXP (*loc, 1);
2732
2733 do_not_record = 0;
2734 hash = HASH (XEXP (*loc, 0), Pmode);
2735 do_not_record = save_do_not_record;
2736 hash_arg_in_memory = save_hash_arg_in_memory;
2737 hash_arg_in_struct = save_hash_arg_in_struct;
2738
2739 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2740 if (elt == 0)
2741 return;
2742
2743 /* We need to find the best (under the criteria documented above) entry
2744 in the class that is valid. We use the `flag' field to indicate
2745 choices that were invalid and iterate until we can't find a better
2746 one that hasn't already been tried. */
2747
2748 for (p = elt->first_same_value; p; p = p->next_same_value)
2749 p->flag = 0;
2750
2751 while (found_better)
2752 {
2753 int best_addr_cost = ADDRESS_COST (*loc);
2754 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2755 struct table_elt *best_elt = elt;
2756 rtx best_rtx = *loc;
2757 int count;
2758
2759 /* This is at worst case an O(n^2) algorithm, so limit our search
2760 to the first 32 elements on the list. This avoids trouble
2761 compiling code with very long basic blocks that can easily
2762 call cse_gen_binary so many times that we run out of memory. */
2763
2764 found_better = 0;
2765 for (p = elt->first_same_value, count = 0;
2766 p && count < 32;
2767 p = p->next_same_value, count++)
2768 if (! p->flag
2769 && (GET_CODE (p->exp) == REG
2770 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2771 {
2772 rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
2773
2774 if ((ADDRESS_COST (new) < best_addr_cost
2775 || (ADDRESS_COST (new) == best_addr_cost
2776 && (COST (new) + 1) >> 1 > best_rtx_cost)))
2777 {
2778 found_better = 1;
2779 best_addr_cost = ADDRESS_COST (new);
2780 best_rtx_cost = (COST (new) + 1) >> 1;
2781 best_elt = p;
2782 best_rtx = new;
2783 }
2784 }
2785
2786 if (found_better)
2787 {
2788 if (validate_change (insn, loc,
2789 canon_reg (copy_rtx (best_rtx),
2790 NULL_RTX), 0))
2791 return;
2792 else
2793 best_elt->flag = 1;
2794 }
2795 }
2796 }
2797 #endif
2798 }
2799 \f
2800 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2801 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2802 what values are being compared.
2803
2804 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2805 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2806 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2807 compared to produce cc0.
2808
2809 The return value is the comparison operator and is either the code of
2810 A or the code corresponding to the inverse of the comparison. */
2811
2812 static enum rtx_code
2813 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
2814 enum rtx_code code;
2815 rtx *parg1, *parg2;
2816 enum machine_mode *pmode1, *pmode2;
2817 {
2818 rtx arg1, arg2;
2819
2820 arg1 = *parg1, arg2 = *parg2;
2821
2822 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2823
2824 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2825 {
2826 /* Set non-zero when we find something of interest. */
2827 rtx x = 0;
2828 int reverse_code = 0;
2829 struct table_elt *p = 0;
2830
2831 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2832 On machines with CC0, this is the only case that can occur, since
2833 fold_rtx will return the COMPARE or item being compared with zero
2834 when given CC0. */
2835
2836 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2837 x = arg1;
2838
2839 /* If ARG1 is a comparison operator and CODE is testing for
2840 STORE_FLAG_VALUE, get the inner arguments. */
2841
2842 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2843 {
2844 if (code == NE
2845 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2846 && code == LT && STORE_FLAG_VALUE == -1)
2847 #ifdef FLOAT_STORE_FLAG_VALUE
2848 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2849 && FLOAT_STORE_FLAG_VALUE < 0)
2850 #endif
2851 )
2852 x = arg1;
2853 else if (code == EQ
2854 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2855 && code == GE && STORE_FLAG_VALUE == -1)
2856 #ifdef FLOAT_STORE_FLAG_VALUE
2857 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2858 && FLOAT_STORE_FLAG_VALUE < 0)
2859 #endif
2860 )
2861 x = arg1, reverse_code = 1;
2862 }
2863
2864 /* ??? We could also check for
2865
2866 (ne (and (eq (...) (const_int 1))) (const_int 0))
2867
2868 and related forms, but let's wait until we see them occurring. */
2869
2870 if (x == 0)
2871 /* Look up ARG1 in the hash table and see if it has an equivalence
2872 that lets us see what is being compared. */
2873 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
2874 GET_MODE (arg1));
2875 if (p) p = p->first_same_value;
2876
2877 for (; p; p = p->next_same_value)
2878 {
2879 enum machine_mode inner_mode = GET_MODE (p->exp);
2880
2881 /* If the entry isn't valid, skip it. */
2882 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2883 continue;
2884
2885 if (GET_CODE (p->exp) == COMPARE
2886 /* Another possibility is that this machine has a compare insn
2887 that includes the comparison code. In that case, ARG1 would
2888 be equivalent to a comparison operation that would set ARG1 to
2889 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2890 ORIG_CODE is the actual comparison being done; if it is an EQ,
2891 we must reverse ORIG_CODE. On machine with a negative value
2892 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2893 || ((code == NE
2894 || (code == LT
2895 && GET_MODE_CLASS (inner_mode) == MODE_INT
2896 && (GET_MODE_BITSIZE (inner_mode)
2897 <= HOST_BITS_PER_WIDE_INT)
2898 && (STORE_FLAG_VALUE
2899 & ((HOST_WIDE_INT) 1
2900 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2901 #ifdef FLOAT_STORE_FLAG_VALUE
2902 || (code == LT
2903 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2904 && FLOAT_STORE_FLAG_VALUE < 0)
2905 #endif
2906 )
2907 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
2908 {
2909 x = p->exp;
2910 break;
2911 }
2912 else if ((code == EQ
2913 || (code == GE
2914 && GET_MODE_CLASS (inner_mode) == MODE_INT
2915 && (GET_MODE_BITSIZE (inner_mode)
2916 <= HOST_BITS_PER_WIDE_INT)
2917 && (STORE_FLAG_VALUE
2918 & ((HOST_WIDE_INT) 1
2919 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2920 #ifdef FLOAT_STORE_FLAG_VALUE
2921 || (code == GE
2922 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2923 && FLOAT_STORE_FLAG_VALUE < 0)
2924 #endif
2925 )
2926 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
2927 {
2928 reverse_code = 1;
2929 x = p->exp;
2930 break;
2931 }
2932
2933 /* If this is fp + constant, the equivalent is a better operand since
2934 it may let us predict the value of the comparison. */
2935 else if (NONZERO_BASE_PLUS_P (p->exp))
2936 {
2937 arg1 = p->exp;
2938 continue;
2939 }
2940 }
2941
2942 /* If we didn't find a useful equivalence for ARG1, we are done.
2943 Otherwise, set up for the next iteration. */
2944 if (x == 0)
2945 break;
2946
2947 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
2948 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
2949 code = GET_CODE (x);
2950
2951 if (reverse_code)
2952 code = reverse_condition (code);
2953 }
2954
2955 /* Return our results. Return the modes from before fold_rtx
2956 because fold_rtx might produce const_int, and then it's too late. */
2957 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
2958 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
2959
2960 return code;
2961 }
2962 \f
2963 /* Try to simplify a unary operation CODE whose output mode is to be
2964 MODE with input operand OP whose mode was originally OP_MODE.
2965 Return zero if no simplification can be made. */
2966
2967 rtx
2968 simplify_unary_operation (code, mode, op, op_mode)
2969 enum rtx_code code;
2970 enum machine_mode mode;
2971 rtx op;
2972 enum machine_mode op_mode;
2973 {
2974 register int width = GET_MODE_BITSIZE (mode);
2975
2976 /* The order of these tests is critical so that, for example, we don't
2977 check the wrong mode (input vs. output) for a conversion operation,
2978 such as FIX. At some point, this should be simplified. */
2979
2980 #if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
2981
2982 if (code == FLOAT && GET_MODE (op) == VOIDmode
2983 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
2984 {
2985 HOST_WIDE_INT hv, lv;
2986 REAL_VALUE_TYPE d;
2987
2988 if (GET_CODE (op) == CONST_INT)
2989 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
2990 else
2991 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
2992
2993 #ifdef REAL_ARITHMETIC
2994 REAL_VALUE_FROM_INT (d, lv, hv);
2995 #else
2996 if (hv < 0)
2997 {
2998 d = (double) (~ hv);
2999 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3000 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3001 d += (double) (unsigned HOST_WIDE_INT) (~ lv);
3002 d = (- d - 1.0);
3003 }
3004 else
3005 {
3006 d = (double) hv;
3007 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3008 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3009 d += (double) (unsigned HOST_WIDE_INT) lv;
3010 }
3011 #endif /* REAL_ARITHMETIC */
3012 d = real_value_truncate (mode, d);
3013 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3014 }
3015 else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
3016 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3017 {
3018 HOST_WIDE_INT hv, lv;
3019 REAL_VALUE_TYPE d;
3020
3021 if (GET_CODE (op) == CONST_INT)
3022 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3023 else
3024 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
3025
3026 if (op_mode == VOIDmode)
3027 {
3028 /* We don't know how to interpret negative-looking numbers in
3029 this case, so don't try to fold those. */
3030 if (hv < 0)
3031 return 0;
3032 }
3033 else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
3034 ;
3035 else
3036 hv = 0, lv &= GET_MODE_MASK (op_mode);
3037
3038 #ifdef REAL_ARITHMETIC
3039 REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv);
3040 #else
3041
3042 d = (double) (unsigned HOST_WIDE_INT) hv;
3043 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3044 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3045 d += (double) (unsigned HOST_WIDE_INT) lv;
3046 #endif /* REAL_ARITHMETIC */
3047 d = real_value_truncate (mode, d);
3048 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3049 }
3050 #endif
3051
3052 if (GET_CODE (op) == CONST_INT
3053 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3054 {
3055 register HOST_WIDE_INT arg0 = INTVAL (op);
3056 register HOST_WIDE_INT val;
3057
3058 switch (code)
3059 {
3060 case NOT:
3061 val = ~ arg0;
3062 break;
3063
3064 case NEG:
3065 val = - arg0;
3066 break;
3067
3068 case ABS:
3069 val = (arg0 >= 0 ? arg0 : - arg0);
3070 break;
3071
3072 case FFS:
3073 /* Don't use ffs here. Instead, get low order bit and then its
3074 number. If arg0 is zero, this will return 0, as desired. */
3075 arg0 &= GET_MODE_MASK (mode);
3076 val = exact_log2 (arg0 & (- arg0)) + 1;
3077 break;
3078
3079 case TRUNCATE:
3080 val = arg0;
3081 break;
3082
3083 case ZERO_EXTEND:
3084 if (op_mode == VOIDmode)
3085 op_mode = mode;
3086 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3087 {
3088 /* If we were really extending the mode,
3089 we would have to distinguish between zero-extension
3090 and sign-extension. */
3091 if (width != GET_MODE_BITSIZE (op_mode))
3092 abort ();
3093 val = arg0;
3094 }
3095 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3096 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3097 else
3098 return 0;
3099 break;
3100
3101 case SIGN_EXTEND:
3102 if (op_mode == VOIDmode)
3103 op_mode = mode;
3104 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3105 {
3106 /* If we were really extending the mode,
3107 we would have to distinguish between zero-extension
3108 and sign-extension. */
3109 if (width != GET_MODE_BITSIZE (op_mode))
3110 abort ();
3111 val = arg0;
3112 }
3113 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3114 {
3115 val
3116 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3117 if (val
3118 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3119 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3120 }
3121 else
3122 return 0;
3123 break;
3124
3125 case SQRT:
3126 return 0;
3127
3128 default:
3129 abort ();
3130 }
3131
3132 /* Clear the bits that don't belong in our mode,
3133 unless they and our sign bit are all one.
3134 So we get either a reasonable negative value or a reasonable
3135 unsigned value for this mode. */
3136 if (width < HOST_BITS_PER_WIDE_INT
3137 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3138 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3139 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3140
3141 return GEN_INT (val);
3142 }
3143
3144 /* We can do some operations on integer CONST_DOUBLEs. Also allow
3145 for a DImode operation on a CONST_INT. */
3146 else if (GET_MODE (op) == VOIDmode && width <= HOST_BITS_PER_INT * 2
3147 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3148 {
3149 HOST_WIDE_INT l1, h1, lv, hv;
3150
3151 if (GET_CODE (op) == CONST_DOUBLE)
3152 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3153 else
3154 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3155
3156 switch (code)
3157 {
3158 case NOT:
3159 lv = ~ l1;
3160 hv = ~ h1;
3161 break;
3162
3163 case NEG:
3164 neg_double (l1, h1, &lv, &hv);
3165 break;
3166
3167 case ABS:
3168 if (h1 < 0)
3169 neg_double (l1, h1, &lv, &hv);
3170 else
3171 lv = l1, hv = h1;
3172 break;
3173
3174 case FFS:
3175 hv = 0;
3176 if (l1 == 0)
3177 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
3178 else
3179 lv = exact_log2 (l1 & (-l1)) + 1;
3180 break;
3181
3182 case TRUNCATE:
3183 /* This is just a change-of-mode, so do nothing. */
3184 lv = l1, hv = h1;
3185 break;
3186
3187 case ZERO_EXTEND:
3188 if (op_mode == VOIDmode
3189 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3190 return 0;
3191
3192 hv = 0;
3193 lv = l1 & GET_MODE_MASK (op_mode);
3194 break;
3195
3196 case SIGN_EXTEND:
3197 if (op_mode == VOIDmode
3198 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3199 return 0;
3200 else
3201 {
3202 lv = l1 & GET_MODE_MASK (op_mode);
3203 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3204 && (lv & ((HOST_WIDE_INT) 1
3205 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3206 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3207
3208 hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
3209 }
3210 break;
3211
3212 case SQRT:
3213 return 0;
3214
3215 default:
3216 return 0;
3217 }
3218
3219 return immed_double_const (lv, hv, mode);
3220 }
3221
3222 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3223 else if (GET_CODE (op) == CONST_DOUBLE
3224 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3225 {
3226 REAL_VALUE_TYPE d;
3227 jmp_buf handler;
3228 rtx x;
3229
3230 if (setjmp (handler))
3231 /* There used to be a warning here, but that is inadvisable.
3232 People may want to cause traps, and the natural way
3233 to do it should not get a warning. */
3234 return 0;
3235
3236 set_float_handler (handler);
3237
3238 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3239
3240 switch (code)
3241 {
3242 case NEG:
3243 d = REAL_VALUE_NEGATE (d);
3244 break;
3245
3246 case ABS:
3247 if (REAL_VALUE_NEGATIVE (d))
3248 d = REAL_VALUE_NEGATE (d);
3249 break;
3250
3251 case FLOAT_TRUNCATE:
3252 d = real_value_truncate (mode, d);
3253 break;
3254
3255 case FLOAT_EXTEND:
3256 /* All this does is change the mode. */
3257 break;
3258
3259 case FIX:
3260 d = REAL_VALUE_RNDZINT (d);
3261 break;
3262
3263 case UNSIGNED_FIX:
3264 d = REAL_VALUE_UNSIGNED_RNDZINT (d);
3265 break;
3266
3267 case SQRT:
3268 return 0;
3269
3270 default:
3271 abort ();
3272 }
3273
3274 x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3275 set_float_handler (NULL_PTR);
3276 return x;
3277 }
3278
3279 else if (GET_CODE (op) == CONST_DOUBLE
3280 && GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
3281 && GET_MODE_CLASS (mode) == MODE_INT
3282 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3283 {
3284 REAL_VALUE_TYPE d;
3285 jmp_buf handler;
3286 HOST_WIDE_INT val;
3287
3288 if (setjmp (handler))
3289 return 0;
3290
3291 set_float_handler (handler);
3292
3293 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3294
3295 switch (code)
3296 {
3297 case FIX:
3298 val = REAL_VALUE_FIX (d);
3299 break;
3300
3301 case UNSIGNED_FIX:
3302 val = REAL_VALUE_UNSIGNED_FIX (d);
3303 break;
3304
3305 default:
3306 abort ();
3307 }
3308
3309 set_float_handler (NULL_PTR);
3310
3311 /* Clear the bits that don't belong in our mode,
3312 unless they and our sign bit are all one.
3313 So we get either a reasonable negative value or a reasonable
3314 unsigned value for this mode. */
3315 if (width < HOST_BITS_PER_WIDE_INT
3316 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3317 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3318 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3319
3320 /* If this would be an entire word for the target, but is not for
3321 the host, then sign-extend on the host so that the number will look
3322 the same way on the host that it would on the target.
3323
3324 For example, when building a 64 bit alpha hosted 32 bit sparc
3325 targeted compiler, then we want the 32 bit unsigned value -1 to be
3326 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3327 The later confuses the sparc backend. */
3328
3329 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3330 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3331 val |= ((HOST_WIDE_INT) (-1) << width);
3332
3333 return GEN_INT (val);
3334 }
3335 #endif
3336 /* This was formerly used only for non-IEEE float.
3337 eggert@twinsun.com says it is safe for IEEE also. */
3338 else
3339 {
3340 /* There are some simplifications we can do even if the operands
3341 aren't constant. */
3342 switch (code)
3343 {
3344 case NEG:
3345 case NOT:
3346 /* (not (not X)) == X, similarly for NEG. */
3347 if (GET_CODE (op) == code)
3348 return XEXP (op, 0);
3349 break;
3350
3351 case SIGN_EXTEND:
3352 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3353 becomes just the MINUS if its mode is MODE. This allows
3354 folding switch statements on machines using casesi (such as
3355 the Vax). */
3356 if (GET_CODE (op) == TRUNCATE
3357 && GET_MODE (XEXP (op, 0)) == mode
3358 && GET_CODE (XEXP (op, 0)) == MINUS
3359 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3360 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3361 return XEXP (op, 0);
3362
3363 #ifdef POINTERS_EXTEND_UNSIGNED
3364 if (! POINTERS_EXTEND_UNSIGNED
3365 && mode == Pmode && GET_MODE (op) == ptr_mode
3366 && CONSTANT_P (op))
3367 return convert_memory_address (Pmode, op);
3368 #endif
3369 break;
3370
3371 #ifdef POINTERS_EXTEND_UNSIGNED
3372 case ZERO_EXTEND:
3373 if (POINTERS_EXTEND_UNSIGNED
3374 && mode == Pmode && GET_MODE (op) == ptr_mode
3375 && CONSTANT_P (op))
3376 return convert_memory_address (Pmode, op);
3377 break;
3378 #endif
3379 }
3380
3381 return 0;
3382 }
3383 }
3384 \f
3385 /* Simplify a binary operation CODE with result mode MODE, operating on OP0
3386 and OP1. Return 0 if no simplification is possible.
3387
3388 Don't use this for relational operations such as EQ or LT.
3389 Use simplify_relational_operation instead. */
3390
3391 rtx
3392 simplify_binary_operation (code, mode, op0, op1)
3393 enum rtx_code code;
3394 enum machine_mode mode;
3395 rtx op0, op1;
3396 {
3397 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3398 HOST_WIDE_INT val;
3399 int width = GET_MODE_BITSIZE (mode);
3400 rtx tem;
3401
3402 /* Relational operations don't work here. We must know the mode
3403 of the operands in order to do the comparison correctly.
3404 Assuming a full word can give incorrect results.
3405 Consider comparing 128 with -128 in QImode. */
3406
3407 if (GET_RTX_CLASS (code) == '<')
3408 abort ();
3409
3410 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3411 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3412 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3413 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3414 {
3415 REAL_VALUE_TYPE f0, f1, value;
3416 jmp_buf handler;
3417
3418 if (setjmp (handler))
3419 return 0;
3420
3421 set_float_handler (handler);
3422
3423 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3424 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3425 f0 = real_value_truncate (mode, f0);
3426 f1 = real_value_truncate (mode, f1);
3427
3428 #ifdef REAL_ARITHMETIC
3429 REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
3430 #else
3431 switch (code)
3432 {
3433 case PLUS:
3434 value = f0 + f1;
3435 break;
3436 case MINUS:
3437 value = f0 - f1;
3438 break;
3439 case MULT:
3440 value = f0 * f1;
3441 break;
3442 case DIV:
3443 #ifndef REAL_INFINITY
3444 if (f1 == 0)
3445 return 0;
3446 #endif
3447 value = f0 / f1;
3448 break;
3449 case SMIN:
3450 value = MIN (f0, f1);
3451 break;
3452 case SMAX:
3453 value = MAX (f0, f1);
3454 break;
3455 default:
3456 abort ();
3457 }
3458 #endif
3459
3460 value = real_value_truncate (mode, value);
3461 set_float_handler (NULL_PTR);
3462 return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
3463 }
3464 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3465
3466 /* We can fold some multi-word operations. */
3467 if (GET_MODE_CLASS (mode) == MODE_INT
3468 && width == HOST_BITS_PER_WIDE_INT * 2
3469 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
3470 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3471 {
3472 HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
3473
3474 if (GET_CODE (op0) == CONST_DOUBLE)
3475 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3476 else
3477 l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
3478
3479 if (GET_CODE (op1) == CONST_DOUBLE)
3480 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3481 else
3482 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3483
3484 switch (code)
3485 {
3486 case MINUS:
3487 /* A - B == A + (-B). */
3488 neg_double (l2, h2, &lv, &hv);
3489 l2 = lv, h2 = hv;
3490
3491 /* .. fall through ... */
3492
3493 case PLUS:
3494 add_double (l1, h1, l2, h2, &lv, &hv);
3495 break;
3496
3497 case MULT:
3498 mul_double (l1, h1, l2, h2, &lv, &hv);
3499 break;
3500
3501 case DIV: case MOD: case UDIV: case UMOD:
3502 /* We'd need to include tree.h to do this and it doesn't seem worth
3503 it. */
3504 return 0;
3505
3506 case AND:
3507 lv = l1 & l2, hv = h1 & h2;
3508 break;
3509
3510 case IOR:
3511 lv = l1 | l2, hv = h1 | h2;
3512 break;
3513
3514 case XOR:
3515 lv = l1 ^ l2, hv = h1 ^ h2;
3516 break;
3517
3518 case SMIN:
3519 if (h1 < h2
3520 || (h1 == h2
3521 && ((unsigned HOST_WIDE_INT) l1
3522 < (unsigned HOST_WIDE_INT) l2)))
3523 lv = l1, hv = h1;
3524 else
3525 lv = l2, hv = h2;
3526 break;
3527
3528 case SMAX:
3529 if (h1 > h2
3530 || (h1 == h2
3531 && ((unsigned HOST_WIDE_INT) l1
3532 > (unsigned HOST_WIDE_INT) l2)))
3533 lv = l1, hv = h1;
3534 else
3535 lv = l2, hv = h2;
3536 break;
3537
3538 case UMIN:
3539 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3540 || (h1 == h2
3541 && ((unsigned HOST_WIDE_INT) l1
3542 < (unsigned HOST_WIDE_INT) l2)))
3543 lv = l1, hv = h1;
3544 else
3545 lv = l2, hv = h2;
3546 break;
3547
3548 case UMAX:
3549 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3550 || (h1 == h2
3551 && ((unsigned HOST_WIDE_INT) l1
3552 > (unsigned HOST_WIDE_INT) l2)))
3553 lv = l1, hv = h1;
3554 else
3555 lv = l2, hv = h2;
3556 break;
3557
3558 case LSHIFTRT: case ASHIFTRT:
3559 case ASHIFT:
3560 case ROTATE: case ROTATERT:
3561 #ifdef SHIFT_COUNT_TRUNCATED
3562 if (SHIFT_COUNT_TRUNCATED)
3563 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3564 #endif
3565
3566 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3567 return 0;
3568
3569 if (code == LSHIFTRT || code == ASHIFTRT)
3570 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3571 code == ASHIFTRT);
3572 else if (code == ASHIFT)
3573 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
3574 else if (code == ROTATE)
3575 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3576 else /* code == ROTATERT */
3577 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3578 break;
3579
3580 default:
3581 return 0;
3582 }
3583
3584 return immed_double_const (lv, hv, mode);
3585 }
3586
3587 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3588 || width > HOST_BITS_PER_WIDE_INT || width == 0)
3589 {
3590 /* Even if we can't compute a constant result,
3591 there are some cases worth simplifying. */
3592
3593 switch (code)
3594 {
3595 case PLUS:
3596 /* In IEEE floating point, x+0 is not the same as x. Similarly
3597 for the other optimizations below. */
3598 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3599 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3600 break;
3601
3602 if (op1 == CONST0_RTX (mode))
3603 return op0;
3604
3605 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3606 if (GET_CODE (op0) == NEG)
3607 return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
3608 else if (GET_CODE (op1) == NEG)
3609 return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
3610
3611 /* Handle both-operands-constant cases. We can only add
3612 CONST_INTs to constants since the sum of relocatable symbols
3613 can't be handled by most assemblers. Don't add CONST_INT
3614 to CONST_INT since overflow won't be computed properly if wider
3615 than HOST_BITS_PER_WIDE_INT. */
3616
3617 if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3618 && GET_CODE (op1) == CONST_INT)
3619 return plus_constant (op0, INTVAL (op1));
3620 else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3621 && GET_CODE (op0) == CONST_INT)
3622 return plus_constant (op1, INTVAL (op0));
3623
3624 /* See if this is something like X * C - X or vice versa or
3625 if the multiplication is written as a shift. If so, we can
3626 distribute and make a new multiply, shift, or maybe just
3627 have X (if C is 2 in the example above). But don't make
3628 real multiply if we didn't have one before. */
3629
3630 if (! FLOAT_MODE_P (mode))
3631 {
3632 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3633 rtx lhs = op0, rhs = op1;
3634 int had_mult = 0;
3635
3636 if (GET_CODE (lhs) == NEG)
3637 coeff0 = -1, lhs = XEXP (lhs, 0);
3638 else if (GET_CODE (lhs) == MULT
3639 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3640 {
3641 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3642 had_mult = 1;
3643 }
3644 else if (GET_CODE (lhs) == ASHIFT
3645 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3646 && INTVAL (XEXP (lhs, 1)) >= 0
3647 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3648 {
3649 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3650 lhs = XEXP (lhs, 0);
3651 }
3652
3653 if (GET_CODE (rhs) == NEG)
3654 coeff1 = -1, rhs = XEXP (rhs, 0);
3655 else if (GET_CODE (rhs) == MULT
3656 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3657 {
3658 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3659 had_mult = 1;
3660 }
3661 else if (GET_CODE (rhs) == ASHIFT
3662 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3663 && INTVAL (XEXP (rhs, 1)) >= 0
3664 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3665 {
3666 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3667 rhs = XEXP (rhs, 0);
3668 }
3669
3670 if (rtx_equal_p (lhs, rhs))
3671 {
3672 tem = cse_gen_binary (MULT, mode, lhs,
3673 GEN_INT (coeff0 + coeff1));
3674 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3675 }
3676 }
3677
3678 /* If one of the operands is a PLUS or a MINUS, see if we can
3679 simplify this by the associative law.
3680 Don't use the associative law for floating point.
3681 The inaccuracy makes it nonassociative,
3682 and subtle programs can break if operations are associated. */
3683
3684 if (INTEGRAL_MODE_P (mode)
3685 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3686 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3687 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3688 return tem;
3689 break;
3690
3691 case COMPARE:
3692 #ifdef HAVE_cc0
3693 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3694 using cc0, in which case we want to leave it as a COMPARE
3695 so we can distinguish it from a register-register-copy.
3696
3697 In IEEE floating point, x-0 is not the same as x. */
3698
3699 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3700 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3701 && op1 == CONST0_RTX (mode))
3702 return op0;
3703 #else
3704 /* Do nothing here. */
3705 #endif
3706 break;
3707
3708 case MINUS:
3709 /* None of these optimizations can be done for IEEE
3710 floating point. */
3711 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3712 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3713 break;
3714
3715 /* We can't assume x-x is 0 even with non-IEEE floating point,
3716 but since it is zero except in very strange circumstances, we
3717 will treat it as zero with -ffast-math. */
3718 if (rtx_equal_p (op0, op1)
3719 && ! side_effects_p (op0)
3720 && (! FLOAT_MODE_P (mode) || flag_fast_math))
3721 return CONST0_RTX (mode);
3722
3723 /* Change subtraction from zero into negation. */
3724 if (op0 == CONST0_RTX (mode))
3725 return gen_rtx (NEG, mode, op1);
3726
3727 /* (-1 - a) is ~a. */
3728 if (op0 == constm1_rtx)
3729 return gen_rtx (NOT, mode, op1);
3730
3731 /* Subtracting 0 has no effect. */
3732 if (op1 == CONST0_RTX (mode))
3733 return op0;
3734
3735 /* See if this is something like X * C - X or vice versa or
3736 if the multiplication is written as a shift. If so, we can
3737 distribute and make a new multiply, shift, or maybe just
3738 have X (if C is 2 in the example above). But don't make
3739 real multiply if we didn't have one before. */
3740
3741 if (! FLOAT_MODE_P (mode))
3742 {
3743 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3744 rtx lhs = op0, rhs = op1;
3745 int had_mult = 0;
3746
3747 if (GET_CODE (lhs) == NEG)
3748 coeff0 = -1, lhs = XEXP (lhs, 0);
3749 else if (GET_CODE (lhs) == MULT
3750 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3751 {
3752 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3753 had_mult = 1;
3754 }
3755 else if (GET_CODE (lhs) == ASHIFT
3756 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3757 && INTVAL (XEXP (lhs, 1)) >= 0
3758 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3759 {
3760 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3761 lhs = XEXP (lhs, 0);
3762 }
3763
3764 if (GET_CODE (rhs) == NEG)
3765 coeff1 = - 1, rhs = XEXP (rhs, 0);
3766 else if (GET_CODE (rhs) == MULT
3767 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3768 {
3769 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3770 had_mult = 1;
3771 }
3772 else if (GET_CODE (rhs) == ASHIFT
3773 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3774 && INTVAL (XEXP (rhs, 1)) >= 0
3775 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3776 {
3777 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3778 rhs = XEXP (rhs, 0);
3779 }
3780
3781 if (rtx_equal_p (lhs, rhs))
3782 {
3783 tem = cse_gen_binary (MULT, mode, lhs,
3784 GEN_INT (coeff0 - coeff1));
3785 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3786 }
3787 }
3788
3789 /* (a - (-b)) -> (a + b). */
3790 if (GET_CODE (op1) == NEG)
3791 return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
3792
3793 /* If one of the operands is a PLUS or a MINUS, see if we can
3794 simplify this by the associative law.
3795 Don't use the associative law for floating point.
3796 The inaccuracy makes it nonassociative,
3797 and subtle programs can break if operations are associated. */
3798
3799 if (INTEGRAL_MODE_P (mode)
3800 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3801 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3802 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3803 return tem;
3804
3805 /* Don't let a relocatable value get a negative coeff. */
3806 if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
3807 return plus_constant (op0, - INTVAL (op1));
3808
3809 /* (x - (x & y)) -> (x & ~y) */
3810 if (GET_CODE (op1) == AND)
3811 {
3812 if (rtx_equal_p (op0, XEXP (op1, 0)))
3813 return cse_gen_binary (AND, mode, op0, gen_rtx (NOT, mode, XEXP (op1, 1)));
3814 if (rtx_equal_p (op0, XEXP (op1, 1)))
3815 return cse_gen_binary (AND, mode, op0, gen_rtx (NOT, mode, XEXP (op1, 0)));
3816 }
3817 break;
3818
3819 case MULT:
3820 if (op1 == constm1_rtx)
3821 {
3822 tem = simplify_unary_operation (NEG, mode, op0, mode);
3823
3824 return tem ? tem : gen_rtx (NEG, mode, op0);
3825 }
3826
3827 /* In IEEE floating point, x*0 is not always 0. */
3828 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3829 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3830 && op1 == CONST0_RTX (mode)
3831 && ! side_effects_p (op0))
3832 return op1;
3833
3834 /* In IEEE floating point, x*1 is not equivalent to x for nans.
3835 However, ANSI says we can drop signals,
3836 so we can do this anyway. */
3837 if (op1 == CONST1_RTX (mode))
3838 return op0;
3839
3840 /* Convert multiply by constant power of two into shift unless
3841 we are still generating RTL. This test is a kludge. */
3842 if (GET_CODE (op1) == CONST_INT
3843 && (val = exact_log2 (INTVAL (op1))) >= 0
3844 && ! rtx_equal_function_value_matters)
3845 return gen_rtx (ASHIFT, mode, op0, GEN_INT (val));
3846
3847 if (GET_CODE (op1) == CONST_DOUBLE
3848 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
3849 {
3850 REAL_VALUE_TYPE d;
3851 jmp_buf handler;
3852 int op1is2, op1ism1;
3853
3854 if (setjmp (handler))
3855 return 0;
3856
3857 set_float_handler (handler);
3858 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3859 op1is2 = REAL_VALUES_EQUAL (d, dconst2);
3860 op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
3861 set_float_handler (NULL_PTR);
3862
3863 /* x*2 is x+x and x*(-1) is -x */
3864 if (op1is2 && GET_MODE (op0) == mode)
3865 return gen_rtx (PLUS, mode, op0, copy_rtx (op0));
3866
3867 else if (op1ism1 && GET_MODE (op0) == mode)
3868 return gen_rtx (NEG, mode, op0);
3869 }
3870 break;
3871
3872 case IOR:
3873 if (op1 == const0_rtx)
3874 return op0;
3875 if (GET_CODE (op1) == CONST_INT
3876 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3877 return op1;
3878 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3879 return op0;
3880 /* A | (~A) -> -1 */
3881 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3882 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3883 && ! side_effects_p (op0)
3884 && GET_MODE_CLASS (mode) != MODE_CC)
3885 return constm1_rtx;
3886 break;
3887
3888 case XOR:
3889 if (op1 == const0_rtx)
3890 return op0;
3891 if (GET_CODE (op1) == CONST_INT
3892 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3893 return gen_rtx (NOT, mode, op0);
3894 if (op0 == op1 && ! side_effects_p (op0)
3895 && GET_MODE_CLASS (mode) != MODE_CC)
3896 return const0_rtx;
3897 break;
3898
3899 case AND:
3900 if (op1 == const0_rtx && ! side_effects_p (op0))
3901 return const0_rtx;
3902 if (GET_CODE (op1) == CONST_INT
3903 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3904 return op0;
3905 if (op0 == op1 && ! side_effects_p (op0)
3906 && GET_MODE_CLASS (mode) != MODE_CC)
3907 return op0;
3908 /* A & (~A) -> 0 */
3909 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3910 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3911 && ! side_effects_p (op0)
3912 && GET_MODE_CLASS (mode) != MODE_CC)
3913 return const0_rtx;
3914 break;
3915
3916 case UDIV:
3917 /* Convert divide by power of two into shift (divide by 1 handled
3918 below). */
3919 if (GET_CODE (op1) == CONST_INT
3920 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
3921 return gen_rtx (LSHIFTRT, mode, op0, GEN_INT (arg1));
3922
3923 /* ... fall through ... */
3924
3925 case DIV:
3926 if (op1 == CONST1_RTX (mode))
3927 return op0;
3928
3929 /* In IEEE floating point, 0/x is not always 0. */
3930 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3931 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3932 && op0 == CONST0_RTX (mode)
3933 && ! side_effects_p (op1))
3934 return op0;
3935
3936 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3937 /* Change division by a constant into multiplication. Only do
3938 this with -ffast-math until an expert says it is safe in
3939 general. */
3940 else if (GET_CODE (op1) == CONST_DOUBLE
3941 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
3942 && op1 != CONST0_RTX (mode)
3943 && flag_fast_math)
3944 {
3945 REAL_VALUE_TYPE d;
3946 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3947
3948 if (! REAL_VALUES_EQUAL (d, dconst0))
3949 {
3950 #if defined (REAL_ARITHMETIC)
3951 REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
3952 return gen_rtx (MULT, mode, op0,
3953 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
3954 #else
3955 return gen_rtx (MULT, mode, op0,
3956 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
3957 #endif
3958 }
3959 }
3960 #endif
3961 break;
3962
3963 case UMOD:
3964 /* Handle modulus by power of two (mod with 1 handled below). */
3965 if (GET_CODE (op1) == CONST_INT
3966 && exact_log2 (INTVAL (op1)) > 0)
3967 return gen_rtx (AND, mode, op0, GEN_INT (INTVAL (op1) - 1));
3968
3969 /* ... fall through ... */
3970
3971 case MOD:
3972 if ((op0 == const0_rtx || op1 == const1_rtx)
3973 && ! side_effects_p (op0) && ! side_effects_p (op1))
3974 return const0_rtx;
3975 break;
3976
3977 case ROTATERT:
3978 case ROTATE:
3979 /* Rotating ~0 always results in ~0. */
3980 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
3981 && INTVAL (op0) == GET_MODE_MASK (mode)
3982 && ! side_effects_p (op1))
3983 return op0;
3984
3985 /* ... fall through ... */
3986
3987 case ASHIFT:
3988 case ASHIFTRT:
3989 case LSHIFTRT:
3990 if (op1 == const0_rtx)
3991 return op0;
3992 if (op0 == const0_rtx && ! side_effects_p (op1))
3993 return op0;
3994 break;
3995
3996 case SMIN:
3997 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3998 && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
3999 && ! side_effects_p (op0))
4000 return op1;
4001 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4002 return op0;
4003 break;
4004
4005 case SMAX:
4006 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4007 && (INTVAL (op1)
4008 == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
4009 && ! side_effects_p (op0))
4010 return op1;
4011 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4012 return op0;
4013 break;
4014
4015 case UMIN:
4016 if (op1 == const0_rtx && ! side_effects_p (op0))
4017 return op1;
4018 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4019 return op0;
4020 break;
4021
4022 case UMAX:
4023 if (op1 == constm1_rtx && ! side_effects_p (op0))
4024 return op1;
4025 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4026 return op0;
4027 break;
4028
4029 default:
4030 abort ();
4031 }
4032
4033 return 0;
4034 }
4035
4036 /* Get the integer argument values in two forms:
4037 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
4038
4039 arg0 = INTVAL (op0);
4040 arg1 = INTVAL (op1);
4041
4042 if (width < HOST_BITS_PER_WIDE_INT)
4043 {
4044 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
4045 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
4046
4047 arg0s = arg0;
4048 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4049 arg0s |= ((HOST_WIDE_INT) (-1) << width);
4050
4051 arg1s = arg1;
4052 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4053 arg1s |= ((HOST_WIDE_INT) (-1) << width);
4054 }
4055 else
4056 {
4057 arg0s = arg0;
4058 arg1s = arg1;
4059 }
4060
4061 /* Compute the value of the arithmetic. */
4062
4063 switch (code)
4064 {
4065 case PLUS:
4066 val = arg0s + arg1s;
4067 break;
4068
4069 case MINUS:
4070 val = arg0s - arg1s;
4071 break;
4072
4073 case MULT:
4074 val = arg0s * arg1s;
4075 break;
4076
4077 case DIV:
4078 if (arg1s == 0)
4079 return 0;
4080 val = arg0s / arg1s;
4081 break;
4082
4083 case MOD:
4084 if (arg1s == 0)
4085 return 0;
4086 val = arg0s % arg1s;
4087 break;
4088
4089 case UDIV:
4090 if (arg1 == 0)
4091 return 0;
4092 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
4093 break;
4094
4095 case UMOD:
4096 if (arg1 == 0)
4097 return 0;
4098 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
4099 break;
4100
4101 case AND:
4102 val = arg0 & arg1;
4103 break;
4104
4105 case IOR:
4106 val = arg0 | arg1;
4107 break;
4108
4109 case XOR:
4110 val = arg0 ^ arg1;
4111 break;
4112
4113 case LSHIFTRT:
4114 /* If shift count is undefined, don't fold it; let the machine do
4115 what it wants. But truncate it if the machine will do that. */
4116 if (arg1 < 0)
4117 return 0;
4118
4119 #ifdef SHIFT_COUNT_TRUNCATED
4120 if (SHIFT_COUNT_TRUNCATED)
4121 arg1 %= width;
4122 #endif
4123
4124 val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
4125 break;
4126
4127 case ASHIFT:
4128 if (arg1 < 0)
4129 return 0;
4130
4131 #ifdef SHIFT_COUNT_TRUNCATED
4132 if (SHIFT_COUNT_TRUNCATED)
4133 arg1 %= width;
4134 #endif
4135
4136 val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
4137 break;
4138
4139 case ASHIFTRT:
4140 if (arg1 < 0)
4141 return 0;
4142
4143 #ifdef SHIFT_COUNT_TRUNCATED
4144 if (SHIFT_COUNT_TRUNCATED)
4145 arg1 %= width;
4146 #endif
4147
4148 val = arg0s >> arg1;
4149
4150 /* Bootstrap compiler may not have sign extended the right shift.
4151 Manually extend the sign to insure bootstrap cc matches gcc. */
4152 if (arg0s < 0 && arg1 > 0)
4153 val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4154
4155 break;
4156
4157 case ROTATERT:
4158 if (arg1 < 0)
4159 return 0;
4160
4161 arg1 %= width;
4162 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4163 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
4164 break;
4165
4166 case ROTATE:
4167 if (arg1 < 0)
4168 return 0;
4169
4170 arg1 %= width;
4171 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4172 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
4173 break;
4174
4175 case COMPARE:
4176 /* Do nothing here. */
4177 return 0;
4178
4179 case SMIN:
4180 val = arg0s <= arg1s ? arg0s : arg1s;
4181 break;
4182
4183 case UMIN:
4184 val = ((unsigned HOST_WIDE_INT) arg0
4185 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4186 break;
4187
4188 case SMAX:
4189 val = arg0s > arg1s ? arg0s : arg1s;
4190 break;
4191
4192 case UMAX:
4193 val = ((unsigned HOST_WIDE_INT) arg0
4194 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4195 break;
4196
4197 default:
4198 abort ();
4199 }
4200
4201 /* Clear the bits that don't belong in our mode, unless they and our sign
4202 bit are all one. So we get either a reasonable negative value or a
4203 reasonable unsigned value for this mode. */
4204 if (width < HOST_BITS_PER_WIDE_INT
4205 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4206 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4207 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4208
4209 /* If this would be an entire word for the target, but is not for
4210 the host, then sign-extend on the host so that the number will look
4211 the same way on the host that it would on the target.
4212
4213 For example, when building a 64 bit alpha hosted 32 bit sparc
4214 targeted compiler, then we want the 32 bit unsigned value -1 to be
4215 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
4216 The later confuses the sparc backend. */
4217
4218 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
4219 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
4220 val |= ((HOST_WIDE_INT) (-1) << width);
4221
4222 return GEN_INT (val);
4223 }
4224 \f
4225 /* Simplify a PLUS or MINUS, at least one of whose operands may be another
4226 PLUS or MINUS.
4227
4228 Rather than test for specific case, we do this by a brute-force method
4229 and do all possible simplifications until no more changes occur. Then
4230 we rebuild the operation. */
4231
4232 static rtx
4233 simplify_plus_minus (code, mode, op0, op1)
4234 enum rtx_code code;
4235 enum machine_mode mode;
4236 rtx op0, op1;
4237 {
4238 rtx ops[8];
4239 int negs[8];
4240 rtx result, tem;
4241 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
4242 int first = 1, negate = 0, changed;
4243 int i, j;
4244
4245 bzero ((char *) ops, sizeof ops);
4246
4247 /* Set up the two operands and then expand them until nothing has been
4248 changed. If we run out of room in our array, give up; this should
4249 almost never happen. */
4250
4251 ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4252
4253 changed = 1;
4254 while (changed)
4255 {
4256 changed = 0;
4257
4258 for (i = 0; i < n_ops; i++)
4259 switch (GET_CODE (ops[i]))
4260 {
4261 case PLUS:
4262 case MINUS:
4263 if (n_ops == 7)
4264 return 0;
4265
4266 ops[n_ops] = XEXP (ops[i], 1);
4267 negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4268 ops[i] = XEXP (ops[i], 0);
4269 input_ops++;
4270 changed = 1;
4271 break;
4272
4273 case NEG:
4274 ops[i] = XEXP (ops[i], 0);
4275 negs[i] = ! negs[i];
4276 changed = 1;
4277 break;
4278
4279 case CONST:
4280 ops[i] = XEXP (ops[i], 0);
4281 input_consts++;
4282 changed = 1;
4283 break;
4284
4285 case NOT:
4286 /* ~a -> (-a - 1) */
4287 if (n_ops != 7)
4288 {
4289 ops[n_ops] = constm1_rtx;
4290 negs[n_ops++] = negs[i];
4291 ops[i] = XEXP (ops[i], 0);
4292 negs[i] = ! negs[i];
4293 changed = 1;
4294 }
4295 break;
4296
4297 case CONST_INT:
4298 if (negs[i])
4299 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4300 break;
4301 }
4302 }
4303
4304 /* If we only have two operands, we can't do anything. */
4305 if (n_ops <= 2)
4306 return 0;
4307
4308 /* Now simplify each pair of operands until nothing changes. The first
4309 time through just simplify constants against each other. */
4310
4311 changed = 1;
4312 while (changed)
4313 {
4314 changed = first;
4315
4316 for (i = 0; i < n_ops - 1; i++)
4317 for (j = i + 1; j < n_ops; j++)
4318 if (ops[i] != 0 && ops[j] != 0
4319 && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4320 {
4321 rtx lhs = ops[i], rhs = ops[j];
4322 enum rtx_code ncode = PLUS;
4323
4324 if (negs[i] && ! negs[j])
4325 lhs = ops[j], rhs = ops[i], ncode = MINUS;
4326 else if (! negs[i] && negs[j])
4327 ncode = MINUS;
4328
4329 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
4330 if (tem)
4331 {
4332 ops[i] = tem, ops[j] = 0;
4333 negs[i] = negs[i] && negs[j];
4334 if (GET_CODE (tem) == NEG)
4335 ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4336
4337 if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4338 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4339 changed = 1;
4340 }
4341 }
4342
4343 first = 0;
4344 }
4345
4346 /* Pack all the operands to the lower-numbered entries and give up if
4347 we didn't reduce the number of operands we had. Make sure we
4348 count a CONST as two operands. If we have the same number of
4349 operands, but have made more CONSTs than we had, this is also
4350 an improvement, so accept it. */
4351
4352 for (i = 0, j = 0; j < n_ops; j++)
4353 if (ops[j] != 0)
4354 {
4355 ops[i] = ops[j], negs[i++] = negs[j];
4356 if (GET_CODE (ops[j]) == CONST)
4357 n_consts++;
4358 }
4359
4360 if (i + n_consts > input_ops
4361 || (i + n_consts == input_ops && n_consts <= input_consts))
4362 return 0;
4363
4364 n_ops = i;
4365
4366 /* If we have a CONST_INT, put it last. */
4367 for (i = 0; i < n_ops - 1; i++)
4368 if (GET_CODE (ops[i]) == CONST_INT)
4369 {
4370 tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4371 j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4372 }
4373
4374 /* Put a non-negated operand first. If there aren't any, make all
4375 operands positive and negate the whole thing later. */
4376 for (i = 0; i < n_ops && negs[i]; i++)
4377 ;
4378
4379 if (i == n_ops)
4380 {
4381 for (i = 0; i < n_ops; i++)
4382 negs[i] = 0;
4383 negate = 1;
4384 }
4385 else if (i != 0)
4386 {
4387 tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4388 j = negs[0], negs[0] = negs[i], negs[i] = j;
4389 }
4390
4391 /* Now make the result by performing the requested operations. */
4392 result = ops[0];
4393 for (i = 1; i < n_ops; i++)
4394 result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4395
4396 return negate ? gen_rtx (NEG, mode, result) : result;
4397 }
4398 \f
4399 /* Make a binary operation by properly ordering the operands and
4400 seeing if the expression folds. */
4401
4402 static rtx
4403 cse_gen_binary (code, mode, op0, op1)
4404 enum rtx_code code;
4405 enum machine_mode mode;
4406 rtx op0, op1;
4407 {
4408 rtx tem;
4409
4410 /* Put complex operands first and constants second if commutative. */
4411 if (GET_RTX_CLASS (code) == 'c'
4412 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4413 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4414 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4415 || (GET_CODE (op0) == SUBREG
4416 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4417 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4418 tem = op0, op0 = op1, op1 = tem;
4419
4420 /* If this simplifies, do it. */
4421 tem = simplify_binary_operation (code, mode, op0, op1);
4422
4423 if (tem)
4424 return tem;
4425
4426 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4427 just form the operation. */
4428
4429 if (code == PLUS && GET_CODE (op1) == CONST_INT
4430 && GET_MODE (op0) != VOIDmode)
4431 return plus_constant (op0, INTVAL (op1));
4432 else if (code == MINUS && GET_CODE (op1) == CONST_INT
4433 && GET_MODE (op0) != VOIDmode)
4434 return plus_constant (op0, - INTVAL (op1));
4435 else
4436 return gen_rtx (code, mode, op0, op1);
4437 }
4438 \f
4439 /* Like simplify_binary_operation except used for relational operators.
4440 MODE is the mode of the operands, not that of the result. If MODE
4441 is VOIDmode, both operands must also be VOIDmode and we compare the
4442 operands in "infinite precision".
4443
4444 If no simplification is possible, this function returns zero. Otherwise,
4445 it returns either const_true_rtx or const0_rtx. */
4446
4447 rtx
4448 simplify_relational_operation (code, mode, op0, op1)
4449 enum rtx_code code;
4450 enum machine_mode mode;
4451 rtx op0, op1;
4452 {
4453 int equal, op0lt, op0ltu, op1lt, op1ltu;
4454 rtx tem;
4455
4456 /* If op0 is a compare, extract the comparison arguments from it. */
4457 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4458 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4459
4460 /* We can't simplify MODE_CC values since we don't know what the
4461 actual comparison is. */
4462 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4463 #ifdef HAVE_cc0
4464 || op0 == cc0_rtx
4465 #endif
4466 )
4467 return 0;
4468
4469 /* For integer comparisons of A and B maybe we can simplify A - B and can
4470 then simplify a comparison of that with zero. If A and B are both either
4471 a register or a CONST_INT, this can't help; testing for these cases will
4472 prevent infinite recursion here and speed things up.
4473
4474 If CODE is an unsigned comparison, then we can never do this optimization,
4475 because it gives an incorrect result if the subtraction wraps around zero.
4476 ANSI C defines unsigned operations such that they never overflow, and
4477 thus such cases can not be ignored. */
4478
4479 if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4480 && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4481 && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4482 && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
4483 && code != GTU && code != GEU && code != LTU && code != LEU)
4484 return simplify_relational_operation (signed_condition (code),
4485 mode, tem, const0_rtx);
4486
4487 /* For non-IEEE floating-point, if the two operands are equal, we know the
4488 result. */
4489 if (rtx_equal_p (op0, op1)
4490 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4491 || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4492 equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4493
4494 /* If the operands are floating-point constants, see if we can fold
4495 the result. */
4496 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4497 else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4498 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4499 {
4500 REAL_VALUE_TYPE d0, d1;
4501 jmp_buf handler;
4502
4503 if (setjmp (handler))
4504 return 0;
4505
4506 set_float_handler (handler);
4507 REAL_VALUE_FROM_CONST_DOUBLE (d0, op0);
4508 REAL_VALUE_FROM_CONST_DOUBLE (d1, op1);
4509 equal = REAL_VALUES_EQUAL (d0, d1);
4510 op0lt = op0ltu = REAL_VALUES_LESS (d0, d1);
4511 op1lt = op1ltu = REAL_VALUES_LESS (d1, d0);
4512 set_float_handler (NULL_PTR);
4513 }
4514 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4515
4516 /* Otherwise, see if the operands are both integers. */
4517 else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4518 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4519 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4520 {
4521 int width = GET_MODE_BITSIZE (mode);
4522 HOST_WIDE_INT l0s, h0s, l1s, h1s;
4523 unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
4524
4525 /* Get the two words comprising each integer constant. */
4526 if (GET_CODE (op0) == CONST_DOUBLE)
4527 {
4528 l0u = l0s = CONST_DOUBLE_LOW (op0);
4529 h0u = h0s = CONST_DOUBLE_HIGH (op0);
4530 }
4531 else
4532 {
4533 l0u = l0s = INTVAL (op0);
4534 h0u = 0, h0s = l0s < 0 ? -1 : 0;
4535 }
4536
4537 if (GET_CODE (op1) == CONST_DOUBLE)
4538 {
4539 l1u = l1s = CONST_DOUBLE_LOW (op1);
4540 h1u = h1s = CONST_DOUBLE_HIGH (op1);
4541 }
4542 else
4543 {
4544 l1u = l1s = INTVAL (op1);
4545 h1u = 0, h1s = l1s < 0 ? -1 : 0;
4546 }
4547
4548 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4549 we have to sign or zero-extend the values. */
4550 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4551 h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
4552
4553 if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4554 {
4555 l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4556 l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
4557
4558 if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4559 l0s |= ((HOST_WIDE_INT) (-1) << width);
4560
4561 if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4562 l1s |= ((HOST_WIDE_INT) (-1) << width);
4563 }
4564
4565 equal = (h0u == h1u && l0u == l1u);
4566 op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4567 op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4568 op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4569 op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4570 }
4571
4572 /* Otherwise, there are some code-specific tests we can make. */
4573 else
4574 {
4575 switch (code)
4576 {
4577 case EQ:
4578 /* References to the frame plus a constant or labels cannot
4579 be zero, but a SYMBOL_REF can due to #pragma weak. */
4580 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4581 || GET_CODE (op0) == LABEL_REF)
4582 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4583 /* On some machines, the ap reg can be 0 sometimes. */
4584 && op0 != arg_pointer_rtx
4585 #endif
4586 )
4587 return const0_rtx;
4588 break;
4589
4590 case NE:
4591 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4592 || GET_CODE (op0) == LABEL_REF)
4593 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4594 && op0 != arg_pointer_rtx
4595 #endif
4596 )
4597 return const_true_rtx;
4598 break;
4599
4600 case GEU:
4601 /* Unsigned values are never negative. */
4602 if (op1 == const0_rtx)
4603 return const_true_rtx;
4604 break;
4605
4606 case LTU:
4607 if (op1 == const0_rtx)
4608 return const0_rtx;
4609 break;
4610
4611 case LEU:
4612 /* Unsigned values are never greater than the largest
4613 unsigned value. */
4614 if (GET_CODE (op1) == CONST_INT
4615 && INTVAL (op1) == GET_MODE_MASK (mode)
4616 && INTEGRAL_MODE_P (mode))
4617 return const_true_rtx;
4618 break;
4619
4620 case GTU:
4621 if (GET_CODE (op1) == CONST_INT
4622 && INTVAL (op1) == GET_MODE_MASK (mode)
4623 && INTEGRAL_MODE_P (mode))
4624 return const0_rtx;
4625 break;
4626 }
4627
4628 return 0;
4629 }
4630
4631 /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4632 as appropriate. */
4633 switch (code)
4634 {
4635 case EQ:
4636 return equal ? const_true_rtx : const0_rtx;
4637 case NE:
4638 return ! equal ? const_true_rtx : const0_rtx;
4639 case LT:
4640 return op0lt ? const_true_rtx : const0_rtx;
4641 case GT:
4642 return op1lt ? const_true_rtx : const0_rtx;
4643 case LTU:
4644 return op0ltu ? const_true_rtx : const0_rtx;
4645 case GTU:
4646 return op1ltu ? const_true_rtx : const0_rtx;
4647 case LE:
4648 return equal || op0lt ? const_true_rtx : const0_rtx;
4649 case GE:
4650 return equal || op1lt ? const_true_rtx : const0_rtx;
4651 case LEU:
4652 return equal || op0ltu ? const_true_rtx : const0_rtx;
4653 case GEU:
4654 return equal || op1ltu ? const_true_rtx : const0_rtx;
4655 }
4656
4657 abort ();
4658 }
4659 \f
4660 /* Simplify CODE, an operation with result mode MODE and three operands,
4661 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4662 a constant. Return 0 if no simplifications is possible. */
4663
4664 rtx
4665 simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4666 enum rtx_code code;
4667 enum machine_mode mode, op0_mode;
4668 rtx op0, op1, op2;
4669 {
4670 int width = GET_MODE_BITSIZE (mode);
4671
4672 /* VOIDmode means "infinite" precision. */
4673 if (width == 0)
4674 width = HOST_BITS_PER_WIDE_INT;
4675
4676 switch (code)
4677 {
4678 case SIGN_EXTRACT:
4679 case ZERO_EXTRACT:
4680 if (GET_CODE (op0) == CONST_INT
4681 && GET_CODE (op1) == CONST_INT
4682 && GET_CODE (op2) == CONST_INT
4683 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4684 && width <= HOST_BITS_PER_WIDE_INT)
4685 {
4686 /* Extracting a bit-field from a constant */
4687 HOST_WIDE_INT val = INTVAL (op0);
4688
4689 if (BITS_BIG_ENDIAN)
4690 val >>= (GET_MODE_BITSIZE (op0_mode)
4691 - INTVAL (op2) - INTVAL (op1));
4692 else
4693 val >>= INTVAL (op2);
4694
4695 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4696 {
4697 /* First zero-extend. */
4698 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4699 /* If desired, propagate sign bit. */
4700 if (code == SIGN_EXTRACT
4701 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4702 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4703 }
4704
4705 /* Clear the bits that don't belong in our mode,
4706 unless they and our sign bit are all one.
4707 So we get either a reasonable negative value or a reasonable
4708 unsigned value for this mode. */
4709 if (width < HOST_BITS_PER_WIDE_INT
4710 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4711 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4712 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4713
4714 return GEN_INT (val);
4715 }
4716 break;
4717
4718 case IF_THEN_ELSE:
4719 if (GET_CODE (op0) == CONST_INT)
4720 return op0 != const0_rtx ? op1 : op2;
4721 break;
4722
4723 default:
4724 abort ();
4725 }
4726
4727 return 0;
4728 }
4729 \f
4730 /* If X is a nontrivial arithmetic operation on an argument
4731 for which a constant value can be determined, return
4732 the result of operating on that value, as a constant.
4733 Otherwise, return X, possibly with one or more operands
4734 modified by recursive calls to this function.
4735
4736 If X is a register whose contents are known, we do NOT
4737 return those contents here. equiv_constant is called to
4738 perform that task.
4739
4740 INSN is the insn that we may be modifying. If it is 0, make a copy
4741 of X before modifying it. */
4742
4743 static rtx
4744 fold_rtx (x, insn)
4745 rtx x;
4746 rtx insn;
4747 {
4748 register enum rtx_code code;
4749 register enum machine_mode mode;
4750 register char *fmt;
4751 register int i;
4752 rtx new = 0;
4753 int copied = 0;
4754 int must_swap = 0;
4755
4756 /* Folded equivalents of first two operands of X. */
4757 rtx folded_arg0;
4758 rtx folded_arg1;
4759
4760 /* Constant equivalents of first three operands of X;
4761 0 when no such equivalent is known. */
4762 rtx const_arg0;
4763 rtx const_arg1;
4764 rtx const_arg2;
4765
4766 /* The mode of the first operand of X. We need this for sign and zero
4767 extends. */
4768 enum machine_mode mode_arg0;
4769
4770 if (x == 0)
4771 return x;
4772
4773 mode = GET_MODE (x);
4774 code = GET_CODE (x);
4775 switch (code)
4776 {
4777 case CONST:
4778 case CONST_INT:
4779 case CONST_DOUBLE:
4780 case SYMBOL_REF:
4781 case LABEL_REF:
4782 case REG:
4783 /* No use simplifying an EXPR_LIST
4784 since they are used only for lists of args
4785 in a function call's REG_EQUAL note. */
4786 case EXPR_LIST:
4787 return x;
4788
4789 #ifdef HAVE_cc0
4790 case CC0:
4791 return prev_insn_cc0;
4792 #endif
4793
4794 case PC:
4795 /* If the next insn is a CODE_LABEL followed by a jump table,
4796 PC's value is a LABEL_REF pointing to that label. That
4797 lets us fold switch statements on the Vax. */
4798 if (insn && GET_CODE (insn) == JUMP_INSN)
4799 {
4800 rtx next = next_nonnote_insn (insn);
4801
4802 if (next && GET_CODE (next) == CODE_LABEL
4803 && NEXT_INSN (next) != 0
4804 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
4805 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
4806 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
4807 return gen_rtx (LABEL_REF, Pmode, next);
4808 }
4809 break;
4810
4811 case SUBREG:
4812 /* See if we previously assigned a constant value to this SUBREG. */
4813 if ((new = lookup_as_function (x, CONST_INT)) != 0
4814 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
4815 return new;
4816
4817 /* If this is a paradoxical SUBREG, we have no idea what value the
4818 extra bits would have. However, if the operand is equivalent
4819 to a SUBREG whose operand is the same as our mode, and all the
4820 modes are within a word, we can just use the inner operand
4821 because these SUBREGs just say how to treat the register.
4822
4823 Similarly if we find an integer constant. */
4824
4825 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4826 {
4827 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
4828 struct table_elt *elt;
4829
4830 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
4831 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
4832 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
4833 imode)) != 0)
4834 for (elt = elt->first_same_value;
4835 elt; elt = elt->next_same_value)
4836 {
4837 if (CONSTANT_P (elt->exp)
4838 && GET_MODE (elt->exp) == VOIDmode)
4839 return elt->exp;
4840
4841 if (GET_CODE (elt->exp) == SUBREG
4842 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4843 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4844 return copy_rtx (SUBREG_REG (elt->exp));
4845 }
4846
4847 return x;
4848 }
4849
4850 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
4851 We might be able to if the SUBREG is extracting a single word in an
4852 integral mode or extracting the low part. */
4853
4854 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
4855 const_arg0 = equiv_constant (folded_arg0);
4856 if (const_arg0)
4857 folded_arg0 = const_arg0;
4858
4859 if (folded_arg0 != SUBREG_REG (x))
4860 {
4861 new = 0;
4862
4863 if (GET_MODE_CLASS (mode) == MODE_INT
4864 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4865 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
4866 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
4867 GET_MODE (SUBREG_REG (x)));
4868 if (new == 0 && subreg_lowpart_p (x))
4869 new = gen_lowpart_if_possible (mode, folded_arg0);
4870 if (new)
4871 return new;
4872 }
4873
4874 /* If this is a narrowing SUBREG and our operand is a REG, see if
4875 we can find an equivalence for REG that is an arithmetic operation
4876 in a wider mode where both operands are paradoxical SUBREGs
4877 from objects of our result mode. In that case, we couldn't report
4878 an equivalent value for that operation, since we don't know what the
4879 extra bits will be. But we can find an equivalence for this SUBREG
4880 by folding that operation is the narrow mode. This allows us to
4881 fold arithmetic in narrow modes when the machine only supports
4882 word-sized arithmetic.
4883
4884 Also look for a case where we have a SUBREG whose operand is the
4885 same as our result. If both modes are smaller than a word, we
4886 are simply interpreting a register in different modes and we
4887 can use the inner value. */
4888
4889 if (GET_CODE (folded_arg0) == REG
4890 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
4891 && subreg_lowpart_p (x))
4892 {
4893 struct table_elt *elt;
4894
4895 /* We can use HASH here since we know that canon_hash won't be
4896 called. */
4897 elt = lookup (folded_arg0,
4898 HASH (folded_arg0, GET_MODE (folded_arg0)),
4899 GET_MODE (folded_arg0));
4900
4901 if (elt)
4902 elt = elt->first_same_value;
4903
4904 for (; elt; elt = elt->next_same_value)
4905 {
4906 enum rtx_code eltcode = GET_CODE (elt->exp);
4907
4908 /* Just check for unary and binary operations. */
4909 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
4910 && GET_CODE (elt->exp) != SIGN_EXTEND
4911 && GET_CODE (elt->exp) != ZERO_EXTEND
4912 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4913 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
4914 {
4915 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
4916
4917 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4918 op0 = fold_rtx (op0, NULL_RTX);
4919
4920 op0 = equiv_constant (op0);
4921 if (op0)
4922 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
4923 op0, mode);
4924 }
4925 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
4926 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
4927 && eltcode != DIV && eltcode != MOD
4928 && eltcode != UDIV && eltcode != UMOD
4929 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
4930 && eltcode != ROTATE && eltcode != ROTATERT
4931 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4932 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
4933 == mode))
4934 || CONSTANT_P (XEXP (elt->exp, 0)))
4935 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
4936 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
4937 == mode))
4938 || CONSTANT_P (XEXP (elt->exp, 1))))
4939 {
4940 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
4941 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
4942
4943 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4944 op0 = fold_rtx (op0, NULL_RTX);
4945
4946 if (op0)
4947 op0 = equiv_constant (op0);
4948
4949 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
4950 op1 = fold_rtx (op1, NULL_RTX);
4951
4952 if (op1)
4953 op1 = equiv_constant (op1);
4954
4955 /* If we are looking for the low SImode part of
4956 (ashift:DI c (const_int 32)), it doesn't work
4957 to compute that in SImode, because a 32-bit shift
4958 in SImode is unpredictable. We know the value is 0. */
4959 if (op0 && op1
4960 && GET_CODE (elt->exp) == ASHIFT
4961 && GET_CODE (op1) == CONST_INT
4962 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
4963 {
4964 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
4965
4966 /* If the count fits in the inner mode's width,
4967 but exceeds the outer mode's width,
4968 the value will get truncated to 0
4969 by the subreg. */
4970 new = const0_rtx;
4971 else
4972 /* If the count exceeds even the inner mode's width,
4973 don't fold this expression. */
4974 new = 0;
4975 }
4976 else if (op0 && op1)
4977 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
4978 op0, op1);
4979 }
4980
4981 else if (GET_CODE (elt->exp) == SUBREG
4982 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4983 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
4984 <= UNITS_PER_WORD)
4985 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4986 new = copy_rtx (SUBREG_REG (elt->exp));
4987
4988 if (new)
4989 return new;
4990 }
4991 }
4992
4993 return x;
4994
4995 case NOT:
4996 case NEG:
4997 /* If we have (NOT Y), see if Y is known to be (NOT Z).
4998 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
4999 new = lookup_as_function (XEXP (x, 0), code);
5000 if (new)
5001 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
5002 break;
5003
5004 case MEM:
5005 /* If we are not actually processing an insn, don't try to find the
5006 best address. Not only don't we care, but we could modify the
5007 MEM in an invalid way since we have no insn to validate against. */
5008 if (insn != 0)
5009 find_best_addr (insn, &XEXP (x, 0));
5010
5011 {
5012 /* Even if we don't fold in the insn itself,
5013 we can safely do so here, in hopes of getting a constant. */
5014 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
5015 rtx base = 0;
5016 HOST_WIDE_INT offset = 0;
5017
5018 if (GET_CODE (addr) == REG
5019 && REGNO_QTY_VALID_P (REGNO (addr))
5020 && GET_MODE (addr) == qty_mode[reg_qty[REGNO (addr)]]
5021 && qty_const[reg_qty[REGNO (addr)]] != 0)
5022 addr = qty_const[reg_qty[REGNO (addr)]];
5023
5024 /* If address is constant, split it into a base and integer offset. */
5025 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
5026 base = addr;
5027 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
5028 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
5029 {
5030 base = XEXP (XEXP (addr, 0), 0);
5031 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
5032 }
5033 else if (GET_CODE (addr) == LO_SUM
5034 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
5035 base = XEXP (addr, 1);
5036
5037 /* If this is a constant pool reference, we can fold it into its
5038 constant to allow better value tracking. */
5039 if (base && GET_CODE (base) == SYMBOL_REF
5040 && CONSTANT_POOL_ADDRESS_P (base))
5041 {
5042 rtx constant = get_pool_constant (base);
5043 enum machine_mode const_mode = get_pool_mode (base);
5044 rtx new;
5045
5046 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
5047 constant_pool_entries_cost = COST (constant);
5048
5049 /* If we are loading the full constant, we have an equivalence. */
5050 if (offset == 0 && mode == const_mode)
5051 return constant;
5052
5053 /* If this actually isn't a constant (weird!), we can't do
5054 anything. Otherwise, handle the two most common cases:
5055 extracting a word from a multi-word constant, and extracting
5056 the low-order bits. Other cases don't seem common enough to
5057 worry about. */
5058 if (! CONSTANT_P (constant))
5059 return x;
5060
5061 if (GET_MODE_CLASS (mode) == MODE_INT
5062 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5063 && offset % UNITS_PER_WORD == 0
5064 && (new = operand_subword (constant,
5065 offset / UNITS_PER_WORD,
5066 0, const_mode)) != 0)
5067 return new;
5068
5069 if (((BYTES_BIG_ENDIAN
5070 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
5071 || (! BYTES_BIG_ENDIAN && offset == 0))
5072 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
5073 return new;
5074 }
5075
5076 /* If this is a reference to a label at a known position in a jump
5077 table, we also know its value. */
5078 if (base && GET_CODE (base) == LABEL_REF)
5079 {
5080 rtx label = XEXP (base, 0);
5081 rtx table_insn = NEXT_INSN (label);
5082
5083 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5084 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
5085 {
5086 rtx table = PATTERN (table_insn);
5087
5088 if (offset >= 0
5089 && (offset / GET_MODE_SIZE (GET_MODE (table))
5090 < XVECLEN (table, 0)))
5091 return XVECEXP (table, 0,
5092 offset / GET_MODE_SIZE (GET_MODE (table)));
5093 }
5094 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5095 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
5096 {
5097 rtx table = PATTERN (table_insn);
5098
5099 if (offset >= 0
5100 && (offset / GET_MODE_SIZE (GET_MODE (table))
5101 < XVECLEN (table, 1)))
5102 {
5103 offset /= GET_MODE_SIZE (GET_MODE (table));
5104 new = gen_rtx (MINUS, Pmode, XVECEXP (table, 1, offset),
5105 XEXP (table, 0));
5106
5107 if (GET_MODE (table) != Pmode)
5108 new = gen_rtx (TRUNCATE, GET_MODE (table), new);
5109
5110 /* Indicate this is a constant. This isn't a
5111 valid form of CONST, but it will only be used
5112 to fold the next insns and then discarded, so
5113 it should be safe. */
5114 return gen_rtx (CONST, GET_MODE (new), new);
5115 }
5116 }
5117 }
5118
5119 return x;
5120 }
5121 }
5122
5123 const_arg0 = 0;
5124 const_arg1 = 0;
5125 const_arg2 = 0;
5126 mode_arg0 = VOIDmode;
5127
5128 /* Try folding our operands.
5129 Then see which ones have constant values known. */
5130
5131 fmt = GET_RTX_FORMAT (code);
5132 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5133 if (fmt[i] == 'e')
5134 {
5135 rtx arg = XEXP (x, i);
5136 rtx folded_arg = arg, const_arg = 0;
5137 enum machine_mode mode_arg = GET_MODE (arg);
5138 rtx cheap_arg, expensive_arg;
5139 rtx replacements[2];
5140 int j;
5141
5142 /* Most arguments are cheap, so handle them specially. */
5143 switch (GET_CODE (arg))
5144 {
5145 case REG:
5146 /* This is the same as calling equiv_constant; it is duplicated
5147 here for speed. */
5148 if (REGNO_QTY_VALID_P (REGNO (arg))
5149 && qty_const[reg_qty[REGNO (arg)]] != 0
5150 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != REG
5151 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != PLUS)
5152 const_arg
5153 = gen_lowpart_if_possible (GET_MODE (arg),
5154 qty_const[reg_qty[REGNO (arg)]]);
5155 break;
5156
5157 case CONST:
5158 case CONST_INT:
5159 case SYMBOL_REF:
5160 case LABEL_REF:
5161 case CONST_DOUBLE:
5162 const_arg = arg;
5163 break;
5164
5165 #ifdef HAVE_cc0
5166 case CC0:
5167 folded_arg = prev_insn_cc0;
5168 mode_arg = prev_insn_cc0_mode;
5169 const_arg = equiv_constant (folded_arg);
5170 break;
5171 #endif
5172
5173 default:
5174 folded_arg = fold_rtx (arg, insn);
5175 const_arg = equiv_constant (folded_arg);
5176 }
5177
5178 /* For the first three operands, see if the operand
5179 is constant or equivalent to a constant. */
5180 switch (i)
5181 {
5182 case 0:
5183 folded_arg0 = folded_arg;
5184 const_arg0 = const_arg;
5185 mode_arg0 = mode_arg;
5186 break;
5187 case 1:
5188 folded_arg1 = folded_arg;
5189 const_arg1 = const_arg;
5190 break;
5191 case 2:
5192 const_arg2 = const_arg;
5193 break;
5194 }
5195
5196 /* Pick the least expensive of the folded argument and an
5197 equivalent constant argument. */
5198 if (const_arg == 0 || const_arg == folded_arg
5199 || COST (const_arg) > COST (folded_arg))
5200 cheap_arg = folded_arg, expensive_arg = const_arg;
5201 else
5202 cheap_arg = const_arg, expensive_arg = folded_arg;
5203
5204 /* Try to replace the operand with the cheapest of the two
5205 possibilities. If it doesn't work and this is either of the first
5206 two operands of a commutative operation, try swapping them.
5207 If THAT fails, try the more expensive, provided it is cheaper
5208 than what is already there. */
5209
5210 if (cheap_arg == XEXP (x, i))
5211 continue;
5212
5213 if (insn == 0 && ! copied)
5214 {
5215 x = copy_rtx (x);
5216 copied = 1;
5217 }
5218
5219 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5220 for (j = 0;
5221 j < 2 && replacements[j]
5222 && COST (replacements[j]) < COST (XEXP (x, i));
5223 j++)
5224 {
5225 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5226 break;
5227
5228 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5229 {
5230 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5231 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5232
5233 if (apply_change_group ())
5234 {
5235 /* Swap them back to be invalid so that this loop can
5236 continue and flag them to be swapped back later. */
5237 rtx tem;
5238
5239 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5240 XEXP (x, 1) = tem;
5241 must_swap = 1;
5242 break;
5243 }
5244 }
5245 }
5246 }
5247
5248 else if (fmt[i] == 'E')
5249 /* Don't try to fold inside of a vector of expressions.
5250 Doing nothing is harmless. */
5251 ;
5252
5253 /* If a commutative operation, place a constant integer as the second
5254 operand unless the first operand is also a constant integer. Otherwise,
5255 place any constant second unless the first operand is also a constant. */
5256
5257 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5258 {
5259 if (must_swap || (const_arg0
5260 && (const_arg1 == 0
5261 || (GET_CODE (const_arg0) == CONST_INT
5262 && GET_CODE (const_arg1) != CONST_INT))))
5263 {
5264 register rtx tem = XEXP (x, 0);
5265
5266 if (insn == 0 && ! copied)
5267 {
5268 x = copy_rtx (x);
5269 copied = 1;
5270 }
5271
5272 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5273 validate_change (insn, &XEXP (x, 1), tem, 1);
5274 if (apply_change_group ())
5275 {
5276 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5277 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5278 }
5279 }
5280 }
5281
5282 /* If X is an arithmetic operation, see if we can simplify it. */
5283
5284 switch (GET_RTX_CLASS (code))
5285 {
5286 case '1':
5287 {
5288 int is_const = 0;
5289
5290 /* We can't simplify extension ops unless we know the
5291 original mode. */
5292 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5293 && mode_arg0 == VOIDmode)
5294 break;
5295
5296 /* If we had a CONST, strip it off and put it back later if we
5297 fold. */
5298 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
5299 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
5300
5301 new = simplify_unary_operation (code, mode,
5302 const_arg0 ? const_arg0 : folded_arg0,
5303 mode_arg0);
5304 if (new != 0 && is_const)
5305 new = gen_rtx (CONST, mode, new);
5306 }
5307 break;
5308
5309 case '<':
5310 /* See what items are actually being compared and set FOLDED_ARG[01]
5311 to those values and CODE to the actual comparison code. If any are
5312 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5313 do anything if both operands are already known to be constant. */
5314
5315 if (const_arg0 == 0 || const_arg1 == 0)
5316 {
5317 struct table_elt *p0, *p1;
5318 rtx true = const_true_rtx, false = const0_rtx;
5319 enum machine_mode mode_arg1;
5320
5321 #ifdef FLOAT_STORE_FLAG_VALUE
5322 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5323 {
5324 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5325 mode);
5326 false = CONST0_RTX (mode);
5327 }
5328 #endif
5329
5330 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5331 &mode_arg0, &mode_arg1);
5332 const_arg0 = equiv_constant (folded_arg0);
5333 const_arg1 = equiv_constant (folded_arg1);
5334
5335 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5336 what kinds of things are being compared, so we can't do
5337 anything with this comparison. */
5338
5339 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5340 break;
5341
5342 /* If we do not now have two constants being compared, see if we
5343 can nevertheless deduce some things about the comparison. */
5344 if (const_arg0 == 0 || const_arg1 == 0)
5345 {
5346 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or non-explicit
5347 constant? These aren't zero, but we don't know their sign. */
5348 if (const_arg1 == const0_rtx
5349 && (NONZERO_BASE_PLUS_P (folded_arg0)
5350 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5351 come out as 0. */
5352 || GET_CODE (folded_arg0) == SYMBOL_REF
5353 #endif
5354 || GET_CODE (folded_arg0) == LABEL_REF
5355 || GET_CODE (folded_arg0) == CONST))
5356 {
5357 if (code == EQ)
5358 return false;
5359 else if (code == NE)
5360 return true;
5361 }
5362
5363 /* See if the two operands are the same. We don't do this
5364 for IEEE floating-point since we can't assume x == x
5365 since x might be a NaN. */
5366
5367 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5368 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
5369 && (folded_arg0 == folded_arg1
5370 || (GET_CODE (folded_arg0) == REG
5371 && GET_CODE (folded_arg1) == REG
5372 && (reg_qty[REGNO (folded_arg0)]
5373 == reg_qty[REGNO (folded_arg1)]))
5374 || ((p0 = lookup (folded_arg0,
5375 (safe_hash (folded_arg0, mode_arg0)
5376 % NBUCKETS), mode_arg0))
5377 && (p1 = lookup (folded_arg1,
5378 (safe_hash (folded_arg1, mode_arg0)
5379 % NBUCKETS), mode_arg0))
5380 && p0->first_same_value == p1->first_same_value)))
5381 return ((code == EQ || code == LE || code == GE
5382 || code == LEU || code == GEU)
5383 ? true : false);
5384
5385 /* If FOLDED_ARG0 is a register, see if the comparison we are
5386 doing now is either the same as we did before or the reverse
5387 (we only check the reverse if not floating-point). */
5388 else if (GET_CODE (folded_arg0) == REG)
5389 {
5390 int qty = reg_qty[REGNO (folded_arg0)];
5391
5392 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5393 && (comparison_dominates_p (qty_comparison_code[qty], code)
5394 || (comparison_dominates_p (qty_comparison_code[qty],
5395 reverse_condition (code))
5396 && ! FLOAT_MODE_P (mode_arg0)))
5397 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5398 || (const_arg1
5399 && rtx_equal_p (qty_comparison_const[qty],
5400 const_arg1))
5401 || (GET_CODE (folded_arg1) == REG
5402 && (reg_qty[REGNO (folded_arg1)]
5403 == qty_comparison_qty[qty]))))
5404 return (comparison_dominates_p (qty_comparison_code[qty],
5405 code)
5406 ? true : false);
5407 }
5408 }
5409 }
5410
5411 /* If we are comparing against zero, see if the first operand is
5412 equivalent to an IOR with a constant. If so, we may be able to
5413 determine the result of this comparison. */
5414
5415 if (const_arg1 == const0_rtx)
5416 {
5417 rtx y = lookup_as_function (folded_arg0, IOR);
5418 rtx inner_const;
5419
5420 if (y != 0
5421 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5422 && GET_CODE (inner_const) == CONST_INT
5423 && INTVAL (inner_const) != 0)
5424 {
5425 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
5426 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5427 && (INTVAL (inner_const)
5428 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
5429 rtx true = const_true_rtx, false = const0_rtx;
5430
5431 #ifdef FLOAT_STORE_FLAG_VALUE
5432 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5433 {
5434 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5435 mode);
5436 false = CONST0_RTX (mode);
5437 }
5438 #endif
5439
5440 switch (code)
5441 {
5442 case EQ:
5443 return false;
5444 case NE:
5445 return true;
5446 case LT: case LE:
5447 if (has_sign)
5448 return true;
5449 break;
5450 case GT: case GE:
5451 if (has_sign)
5452 return false;
5453 break;
5454 }
5455 }
5456 }
5457
5458 new = simplify_relational_operation (code, mode_arg0,
5459 const_arg0 ? const_arg0 : folded_arg0,
5460 const_arg1 ? const_arg1 : folded_arg1);
5461 #ifdef FLOAT_STORE_FLAG_VALUE
5462 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5463 new = ((new == const0_rtx) ? CONST0_RTX (mode)
5464 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
5465 #endif
5466 break;
5467
5468 case '2':
5469 case 'c':
5470 switch (code)
5471 {
5472 case PLUS:
5473 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5474 with that LABEL_REF as its second operand. If so, the result is
5475 the first operand of that MINUS. This handles switches with an
5476 ADDR_DIFF_VEC table. */
5477 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5478 {
5479 rtx y
5480 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
5481 : lookup_as_function (folded_arg0, MINUS);
5482
5483 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5484 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5485 return XEXP (y, 0);
5486
5487 /* Now try for a CONST of a MINUS like the above. */
5488 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
5489 : lookup_as_function (folded_arg0, CONST))) != 0
5490 && GET_CODE (XEXP (y, 0)) == MINUS
5491 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5492 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
5493 return XEXP (XEXP (y, 0), 0);
5494 }
5495
5496 /* Likewise if the operands are in the other order. */
5497 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
5498 {
5499 rtx y
5500 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
5501 : lookup_as_function (folded_arg1, MINUS);
5502
5503 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5504 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
5505 return XEXP (y, 0);
5506
5507 /* Now try for a CONST of a MINUS like the above. */
5508 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
5509 : lookup_as_function (folded_arg1, CONST))) != 0
5510 && GET_CODE (XEXP (y, 0)) == MINUS
5511 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5512 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
5513 return XEXP (XEXP (y, 0), 0);
5514 }
5515
5516 /* If second operand is a register equivalent to a negative
5517 CONST_INT, see if we can find a register equivalent to the
5518 positive constant. Make a MINUS if so. Don't do this for
5519 a negative constant since we might then alternate between
5520 chosing positive and negative constants. Having the positive
5521 constant previously-used is the more common case. */
5522 if (const_arg1 && GET_CODE (const_arg1) == CONST_INT
5523 && INTVAL (const_arg1) < 0 && GET_CODE (folded_arg1) == REG)
5524 {
5525 rtx new_const = GEN_INT (- INTVAL (const_arg1));
5526 struct table_elt *p
5527 = lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5528 mode);
5529
5530 if (p)
5531 for (p = p->first_same_value; p; p = p->next_same_value)
5532 if (GET_CODE (p->exp) == REG)
5533 return cse_gen_binary (MINUS, mode, folded_arg0,
5534 canon_reg (p->exp, NULL_RTX));
5535 }
5536 goto from_plus;
5537
5538 case MINUS:
5539 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5540 If so, produce (PLUS Z C2-C). */
5541 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5542 {
5543 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5544 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
5545 return fold_rtx (plus_constant (copy_rtx (y),
5546 -INTVAL (const_arg1)),
5547 NULL_RTX);
5548 }
5549
5550 /* ... fall through ... */
5551
5552 from_plus:
5553 case SMIN: case SMAX: case UMIN: case UMAX:
5554 case IOR: case AND: case XOR:
5555 case MULT: case DIV: case UDIV:
5556 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
5557 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5558 is known to be of similar form, we may be able to replace the
5559 operation with a combined operation. This may eliminate the
5560 intermediate operation if every use is simplified in this way.
5561 Note that the similar optimization done by combine.c only works
5562 if the intermediate operation's result has only one reference. */
5563
5564 if (GET_CODE (folded_arg0) == REG
5565 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5566 {
5567 int is_shift
5568 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5569 rtx y = lookup_as_function (folded_arg0, code);
5570 rtx inner_const;
5571 enum rtx_code associate_code;
5572 rtx new_const;
5573
5574 if (y == 0
5575 || 0 == (inner_const
5576 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5577 || GET_CODE (inner_const) != CONST_INT
5578 /* If we have compiled a statement like
5579 "if (x == (x & mask1))", and now are looking at
5580 "x & mask2", we will have a case where the first operand
5581 of Y is the same as our first operand. Unless we detect
5582 this case, an infinite loop will result. */
5583 || XEXP (y, 0) == folded_arg0)
5584 break;
5585
5586 /* Don't associate these operations if they are a PLUS with the
5587 same constant and it is a power of two. These might be doable
5588 with a pre- or post-increment. Similarly for two subtracts of
5589 identical powers of two with post decrement. */
5590
5591 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
5592 && (0
5593 #if defined(HAVE_PRE_INCREMENT) || defined(HAVE_POST_INCREMENT)
5594 || exact_log2 (INTVAL (const_arg1)) >= 0
5595 #endif
5596 #if defined(HAVE_PRE_DECREMENT) || defined(HAVE_POST_DECREMENT)
5597 || exact_log2 (- INTVAL (const_arg1)) >= 0
5598 #endif
5599 ))
5600 break;
5601
5602 /* Compute the code used to compose the constants. For example,
5603 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5604
5605 associate_code
5606 = (code == MULT || code == DIV || code == UDIV ? MULT
5607 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5608
5609 new_const = simplify_binary_operation (associate_code, mode,
5610 const_arg1, inner_const);
5611
5612 if (new_const == 0)
5613 break;
5614
5615 /* If we are associating shift operations, don't let this
5616 produce a shift of the size of the object or larger.
5617 This could occur when we follow a sign-extend by a right
5618 shift on a machine that does a sign-extend as a pair
5619 of shifts. */
5620
5621 if (is_shift && GET_CODE (new_const) == CONST_INT
5622 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5623 {
5624 /* As an exception, we can turn an ASHIFTRT of this
5625 form into a shift of the number of bits - 1. */
5626 if (code == ASHIFTRT)
5627 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5628 else
5629 break;
5630 }
5631
5632 y = copy_rtx (XEXP (y, 0));
5633
5634 /* If Y contains our first operand (the most common way this
5635 can happen is if Y is a MEM), we would do into an infinite
5636 loop if we tried to fold it. So don't in that case. */
5637
5638 if (! reg_mentioned_p (folded_arg0, y))
5639 y = fold_rtx (y, insn);
5640
5641 return cse_gen_binary (code, mode, y, new_const);
5642 }
5643 }
5644
5645 new = simplify_binary_operation (code, mode,
5646 const_arg0 ? const_arg0 : folded_arg0,
5647 const_arg1 ? const_arg1 : folded_arg1);
5648 break;
5649
5650 case 'o':
5651 /* (lo_sum (high X) X) is simply X. */
5652 if (code == LO_SUM && const_arg0 != 0
5653 && GET_CODE (const_arg0) == HIGH
5654 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
5655 return const_arg1;
5656 break;
5657
5658 case '3':
5659 case 'b':
5660 new = simplify_ternary_operation (code, mode, mode_arg0,
5661 const_arg0 ? const_arg0 : folded_arg0,
5662 const_arg1 ? const_arg1 : folded_arg1,
5663 const_arg2 ? const_arg2 : XEXP (x, 2));
5664 break;
5665 }
5666
5667 return new ? new : x;
5668 }
5669 \f
5670 /* Return a constant value currently equivalent to X.
5671 Return 0 if we don't know one. */
5672
5673 static rtx
5674 equiv_constant (x)
5675 rtx x;
5676 {
5677 if (GET_CODE (x) == REG
5678 && REGNO_QTY_VALID_P (REGNO (x))
5679 && qty_const[reg_qty[REGNO (x)]])
5680 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[reg_qty[REGNO (x)]]);
5681
5682 if (x != 0 && CONSTANT_P (x))
5683 return x;
5684
5685 /* If X is a MEM, try to fold it outside the context of any insn to see if
5686 it might be equivalent to a constant. That handles the case where it
5687 is a constant-pool reference. Then try to look it up in the hash table
5688 in case it is something whose value we have seen before. */
5689
5690 if (GET_CODE (x) == MEM)
5691 {
5692 struct table_elt *elt;
5693
5694 x = fold_rtx (x, NULL_RTX);
5695 if (CONSTANT_P (x))
5696 return x;
5697
5698 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
5699 if (elt == 0)
5700 return 0;
5701
5702 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
5703 if (elt->is_const && CONSTANT_P (elt->exp))
5704 return elt->exp;
5705 }
5706
5707 return 0;
5708 }
5709 \f
5710 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
5711 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
5712 least-significant part of X.
5713 MODE specifies how big a part of X to return.
5714
5715 If the requested operation cannot be done, 0 is returned.
5716
5717 This is similar to gen_lowpart in emit-rtl.c. */
5718
5719 rtx
5720 gen_lowpart_if_possible (mode, x)
5721 enum machine_mode mode;
5722 register rtx x;
5723 {
5724 rtx result = gen_lowpart_common (mode, x);
5725
5726 if (result)
5727 return result;
5728 else if (GET_CODE (x) == MEM)
5729 {
5730 /* This is the only other case we handle. */
5731 register int offset = 0;
5732 rtx new;
5733
5734 if (WORDS_BIG_ENDIAN)
5735 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
5736 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
5737 if (BYTES_BIG_ENDIAN)
5738 /* Adjust the address so that the address-after-the-data is
5739 unchanged. */
5740 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
5741 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
5742 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
5743 if (! memory_address_p (mode, XEXP (new, 0)))
5744 return 0;
5745 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
5746 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
5747 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
5748 return new;
5749 }
5750 else
5751 return 0;
5752 }
5753 \f
5754 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
5755 branch. It will be zero if not.
5756
5757 In certain cases, this can cause us to add an equivalence. For example,
5758 if we are following the taken case of
5759 if (i == 2)
5760 we can add the fact that `i' and '2' are now equivalent.
5761
5762 In any case, we can record that this comparison was passed. If the same
5763 comparison is seen later, we will know its value. */
5764
5765 static void
5766 record_jump_equiv (insn, taken)
5767 rtx insn;
5768 int taken;
5769 {
5770 int cond_known_true;
5771 rtx op0, op1;
5772 enum machine_mode mode, mode0, mode1;
5773 int reversed_nonequality = 0;
5774 enum rtx_code code;
5775
5776 /* Ensure this is the right kind of insn. */
5777 if (! condjump_p (insn) || simplejump_p (insn))
5778 return;
5779
5780 /* See if this jump condition is known true or false. */
5781 if (taken)
5782 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
5783 else
5784 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
5785
5786 /* Get the type of comparison being done and the operands being compared.
5787 If we had to reverse a non-equality condition, record that fact so we
5788 know that it isn't valid for floating-point. */
5789 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
5790 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
5791 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
5792
5793 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
5794 if (! cond_known_true)
5795 {
5796 reversed_nonequality = (code != EQ && code != NE);
5797 code = reverse_condition (code);
5798 }
5799
5800 /* The mode is the mode of the non-constant. */
5801 mode = mode0;
5802 if (mode1 != VOIDmode)
5803 mode = mode1;
5804
5805 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
5806 }
5807
5808 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
5809 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
5810 Make any useful entries we can with that information. Called from
5811 above function and called recursively. */
5812
5813 static void
5814 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
5815 enum rtx_code code;
5816 enum machine_mode mode;
5817 rtx op0, op1;
5818 int reversed_nonequality;
5819 {
5820 unsigned op0_hash, op1_hash;
5821 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
5822 struct table_elt *op0_elt, *op1_elt;
5823
5824 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
5825 we know that they are also equal in the smaller mode (this is also
5826 true for all smaller modes whether or not there is a SUBREG, but
5827 is not worth testing for with no SUBREG. */
5828
5829 /* Note that GET_MODE (op0) may not equal MODE. */
5830 if (code == EQ && GET_CODE (op0) == SUBREG
5831 && (GET_MODE_SIZE (GET_MODE (op0))
5832 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
5833 {
5834 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5835 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5836
5837 record_jump_cond (code, mode, SUBREG_REG (op0),
5838 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5839 reversed_nonequality);
5840 }
5841
5842 if (code == EQ && GET_CODE (op1) == SUBREG
5843 && (GET_MODE_SIZE (GET_MODE (op1))
5844 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
5845 {
5846 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5847 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5848
5849 record_jump_cond (code, mode, SUBREG_REG (op1),
5850 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5851 reversed_nonequality);
5852 }
5853
5854 /* Similarly, if this is an NE comparison, and either is a SUBREG
5855 making a smaller mode, we know the whole thing is also NE. */
5856
5857 /* Note that GET_MODE (op0) may not equal MODE;
5858 if we test MODE instead, we can get an infinite recursion
5859 alternating between two modes each wider than MODE. */
5860
5861 if (code == NE && GET_CODE (op0) == SUBREG
5862 && subreg_lowpart_p (op0)
5863 && (GET_MODE_SIZE (GET_MODE (op0))
5864 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
5865 {
5866 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5867 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5868
5869 record_jump_cond (code, mode, SUBREG_REG (op0),
5870 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5871 reversed_nonequality);
5872 }
5873
5874 if (code == NE && GET_CODE (op1) == SUBREG
5875 && subreg_lowpart_p (op1)
5876 && (GET_MODE_SIZE (GET_MODE (op1))
5877 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
5878 {
5879 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5880 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5881
5882 record_jump_cond (code, mode, SUBREG_REG (op1),
5883 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5884 reversed_nonequality);
5885 }
5886
5887 /* Hash both operands. */
5888
5889 do_not_record = 0;
5890 hash_arg_in_memory = 0;
5891 hash_arg_in_struct = 0;
5892 op0_hash = HASH (op0, mode);
5893 op0_in_memory = hash_arg_in_memory;
5894 op0_in_struct = hash_arg_in_struct;
5895
5896 if (do_not_record)
5897 return;
5898
5899 do_not_record = 0;
5900 hash_arg_in_memory = 0;
5901 hash_arg_in_struct = 0;
5902 op1_hash = HASH (op1, mode);
5903 op1_in_memory = hash_arg_in_memory;
5904 op1_in_struct = hash_arg_in_struct;
5905
5906 if (do_not_record)
5907 return;
5908
5909 /* Look up both operands. */
5910 op0_elt = lookup (op0, op0_hash, mode);
5911 op1_elt = lookup (op1, op1_hash, mode);
5912
5913 /* If both operands are already equivalent or if they are not in the
5914 table but are identical, do nothing. */
5915 if ((op0_elt != 0 && op1_elt != 0
5916 && op0_elt->first_same_value == op1_elt->first_same_value)
5917 || op0 == op1 || rtx_equal_p (op0, op1))
5918 return;
5919
5920 /* If we aren't setting two things equal all we can do is save this
5921 comparison. Similarly if this is floating-point. In the latter
5922 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
5923 If we record the equality, we might inadvertently delete code
5924 whose intent was to change -0 to +0. */
5925
5926 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
5927 {
5928 /* If we reversed a floating-point comparison, if OP0 is not a
5929 register, or if OP1 is neither a register or constant, we can't
5930 do anything. */
5931
5932 if (GET_CODE (op1) != REG)
5933 op1 = equiv_constant (op1);
5934
5935 if ((reversed_nonequality && FLOAT_MODE_P (mode))
5936 || GET_CODE (op0) != REG || op1 == 0)
5937 return;
5938
5939 /* Put OP0 in the hash table if it isn't already. This gives it a
5940 new quantity number. */
5941 if (op0_elt == 0)
5942 {
5943 if (insert_regs (op0, NULL_PTR, 0))
5944 {
5945 rehash_using_reg (op0);
5946 op0_hash = HASH (op0, mode);
5947
5948 /* If OP0 is contained in OP1, this changes its hash code
5949 as well. Faster to rehash than to check, except
5950 for the simple case of a constant. */
5951 if (! CONSTANT_P (op1))
5952 op1_hash = HASH (op1,mode);
5953 }
5954
5955 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
5956 op0_elt->in_memory = op0_in_memory;
5957 op0_elt->in_struct = op0_in_struct;
5958 }
5959
5960 qty_comparison_code[reg_qty[REGNO (op0)]] = code;
5961 if (GET_CODE (op1) == REG)
5962 {
5963 /* Look it up again--in case op0 and op1 are the same. */
5964 op1_elt = lookup (op1, op1_hash, mode);
5965
5966 /* Put OP1 in the hash table so it gets a new quantity number. */
5967 if (op1_elt == 0)
5968 {
5969 if (insert_regs (op1, NULL_PTR, 0))
5970 {
5971 rehash_using_reg (op1);
5972 op1_hash = HASH (op1, mode);
5973 }
5974
5975 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
5976 op1_elt->in_memory = op1_in_memory;
5977 op1_elt->in_struct = op1_in_struct;
5978 }
5979
5980 qty_comparison_qty[reg_qty[REGNO (op0)]] = reg_qty[REGNO (op1)];
5981 qty_comparison_const[reg_qty[REGNO (op0)]] = 0;
5982 }
5983 else
5984 {
5985 qty_comparison_qty[reg_qty[REGNO (op0)]] = -1;
5986 qty_comparison_const[reg_qty[REGNO (op0)]] = op1;
5987 }
5988
5989 return;
5990 }
5991
5992 /* If either side is still missing an equivalence, make it now,
5993 then merge the equivalences. */
5994
5995 if (op0_elt == 0)
5996 {
5997 if (insert_regs (op0, NULL_PTR, 0))
5998 {
5999 rehash_using_reg (op0);
6000 op0_hash = HASH (op0, mode);
6001 }
6002
6003 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
6004 op0_elt->in_memory = op0_in_memory;
6005 op0_elt->in_struct = op0_in_struct;
6006 }
6007
6008 if (op1_elt == 0)
6009 {
6010 if (insert_regs (op1, NULL_PTR, 0))
6011 {
6012 rehash_using_reg (op1);
6013 op1_hash = HASH (op1, mode);
6014 }
6015
6016 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
6017 op1_elt->in_memory = op1_in_memory;
6018 op1_elt->in_struct = op1_in_struct;
6019 }
6020
6021 merge_equiv_classes (op0_elt, op1_elt);
6022 last_jump_equiv_class = op0_elt;
6023 }
6024 \f
6025 /* CSE processing for one instruction.
6026 First simplify sources and addresses of all assignments
6027 in the instruction, using previously-computed equivalents values.
6028 Then install the new sources and destinations in the table
6029 of available values.
6030
6031 If IN_LIBCALL_BLOCK is nonzero, don't record any equivalence made in
6032 the insn. */
6033
6034 /* Data on one SET contained in the instruction. */
6035
6036 struct set
6037 {
6038 /* The SET rtx itself. */
6039 rtx rtl;
6040 /* The SET_SRC of the rtx (the original value, if it is changing). */
6041 rtx src;
6042 /* The hash-table element for the SET_SRC of the SET. */
6043 struct table_elt *src_elt;
6044 /* Hash value for the SET_SRC. */
6045 unsigned src_hash;
6046 /* Hash value for the SET_DEST. */
6047 unsigned dest_hash;
6048 /* The SET_DEST, with SUBREG, etc., stripped. */
6049 rtx inner_dest;
6050 /* Place where the pointer to the INNER_DEST was found. */
6051 rtx *inner_dest_loc;
6052 /* Nonzero if the SET_SRC is in memory. */
6053 char src_in_memory;
6054 /* Nonzero if the SET_SRC is in a structure. */
6055 char src_in_struct;
6056 /* Nonzero if the SET_SRC contains something
6057 whose value cannot be predicted and understood. */
6058 char src_volatile;
6059 /* Original machine mode, in case it becomes a CONST_INT. */
6060 enum machine_mode mode;
6061 /* A constant equivalent for SET_SRC, if any. */
6062 rtx src_const;
6063 /* Hash value of constant equivalent for SET_SRC. */
6064 unsigned src_const_hash;
6065 /* Table entry for constant equivalent for SET_SRC, if any. */
6066 struct table_elt *src_const_elt;
6067 };
6068
6069 static void
6070 cse_insn (insn, in_libcall_block)
6071 rtx insn;
6072 int in_libcall_block;
6073 {
6074 register rtx x = PATTERN (insn);
6075 register int i;
6076 rtx tem;
6077 register int n_sets = 0;
6078
6079 /* Records what this insn does to set CC0. */
6080 rtx this_insn_cc0 = 0;
6081 enum machine_mode this_insn_cc0_mode;
6082 struct write_data writes_memory;
6083 static struct write_data init = {0, 0, 0, 0};
6084
6085 rtx src_eqv = 0;
6086 struct table_elt *src_eqv_elt = 0;
6087 int src_eqv_volatile;
6088 int src_eqv_in_memory;
6089 int src_eqv_in_struct;
6090 unsigned src_eqv_hash;
6091
6092 struct set *sets;
6093
6094 this_insn = insn;
6095 writes_memory = init;
6096
6097 /* Find all the SETs and CLOBBERs in this instruction.
6098 Record all the SETs in the array `set' and count them.
6099 Also determine whether there is a CLOBBER that invalidates
6100 all memory references, or all references at varying addresses. */
6101
6102 if (GET_CODE (insn) == CALL_INSN)
6103 {
6104 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6105 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
6106 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
6107 }
6108
6109 if (GET_CODE (x) == SET)
6110 {
6111 sets = (struct set *) alloca (sizeof (struct set));
6112 sets[0].rtl = x;
6113
6114 /* Ignore SETs that are unconditional jumps.
6115 They never need cse processing, so this does not hurt.
6116 The reason is not efficiency but rather
6117 so that we can test at the end for instructions
6118 that have been simplified to unconditional jumps
6119 and not be misled by unchanged instructions
6120 that were unconditional jumps to begin with. */
6121 if (SET_DEST (x) == pc_rtx
6122 && GET_CODE (SET_SRC (x)) == LABEL_REF)
6123 ;
6124
6125 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
6126 The hard function value register is used only once, to copy to
6127 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
6128 Ensure we invalidate the destination register. On the 80386 no
6129 other code would invalidate it since it is a fixed_reg.
6130 We need not check the return of apply_change_group; see canon_reg. */
6131
6132 else if (GET_CODE (SET_SRC (x)) == CALL)
6133 {
6134 canon_reg (SET_SRC (x), insn);
6135 apply_change_group ();
6136 fold_rtx (SET_SRC (x), insn);
6137 invalidate (SET_DEST (x), VOIDmode);
6138 }
6139 else
6140 n_sets = 1;
6141 }
6142 else if (GET_CODE (x) == PARALLEL)
6143 {
6144 register int lim = XVECLEN (x, 0);
6145
6146 sets = (struct set *) alloca (lim * sizeof (struct set));
6147
6148 /* Find all regs explicitly clobbered in this insn,
6149 and ensure they are not replaced with any other regs
6150 elsewhere in this insn.
6151 When a reg that is clobbered is also used for input,
6152 we should presume that that is for a reason,
6153 and we should not substitute some other register
6154 which is not supposed to be clobbered.
6155 Therefore, this loop cannot be merged into the one below
6156 because a CALL may precede a CLOBBER and refer to the
6157 value clobbered. We must not let a canonicalization do
6158 anything in that case. */
6159 for (i = 0; i < lim; i++)
6160 {
6161 register rtx y = XVECEXP (x, 0, i);
6162 if (GET_CODE (y) == CLOBBER)
6163 {
6164 rtx clobbered = XEXP (y, 0);
6165
6166 if (GET_CODE (clobbered) == REG
6167 || GET_CODE (clobbered) == SUBREG)
6168 invalidate (clobbered, VOIDmode);
6169 else if (GET_CODE (clobbered) == STRICT_LOW_PART
6170 || GET_CODE (clobbered) == ZERO_EXTRACT)
6171 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6172 }
6173 }
6174
6175 for (i = 0; i < lim; i++)
6176 {
6177 register rtx y = XVECEXP (x, 0, i);
6178 if (GET_CODE (y) == SET)
6179 {
6180 /* As above, we ignore unconditional jumps and call-insns and
6181 ignore the result of apply_change_group. */
6182 if (GET_CODE (SET_SRC (y)) == CALL)
6183 {
6184 canon_reg (SET_SRC (y), insn);
6185 apply_change_group ();
6186 fold_rtx (SET_SRC (y), insn);
6187 invalidate (SET_DEST (y), VOIDmode);
6188 }
6189 else if (SET_DEST (y) == pc_rtx
6190 && GET_CODE (SET_SRC (y)) == LABEL_REF)
6191 ;
6192 else
6193 sets[n_sets++].rtl = y;
6194 }
6195 else if (GET_CODE (y) == CLOBBER)
6196 {
6197 /* If we clobber memory, take note of that,
6198 and canon the address.
6199 This does nothing when a register is clobbered
6200 because we have already invalidated the reg. */
6201 if (GET_CODE (XEXP (y, 0)) == MEM)
6202 {
6203 canon_reg (XEXP (y, 0), NULL_RTX);
6204 note_mem_written (XEXP (y, 0), &writes_memory);
6205 }
6206 }
6207 else if (GET_CODE (y) == USE
6208 && ! (GET_CODE (XEXP (y, 0)) == REG
6209 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
6210 canon_reg (y, NULL_RTX);
6211 else if (GET_CODE (y) == CALL)
6212 {
6213 /* The result of apply_change_group can be ignored; see
6214 canon_reg. */
6215 canon_reg (y, insn);
6216 apply_change_group ();
6217 fold_rtx (y, insn);
6218 }
6219 }
6220 }
6221 else if (GET_CODE (x) == CLOBBER)
6222 {
6223 if (GET_CODE (XEXP (x, 0)) == MEM)
6224 {
6225 canon_reg (XEXP (x, 0), NULL_RTX);
6226 note_mem_written (XEXP (x, 0), &writes_memory);
6227 }
6228 }
6229
6230 /* Canonicalize a USE of a pseudo register or memory location. */
6231 else if (GET_CODE (x) == USE
6232 && ! (GET_CODE (XEXP (x, 0)) == REG
6233 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
6234 canon_reg (XEXP (x, 0), NULL_RTX);
6235 else if (GET_CODE (x) == CALL)
6236 {
6237 /* The result of apply_change_group can be ignored; see canon_reg. */
6238 canon_reg (x, insn);
6239 apply_change_group ();
6240 fold_rtx (x, insn);
6241 }
6242
6243 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6244 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
6245 is handled specially for this case, and if it isn't set, then there will
6246 be no equivalence for the destination. */
6247 if (n_sets == 1 && REG_NOTES (insn) != 0
6248 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
6249 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6250 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
6251 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
6252
6253 /* Canonicalize sources and addresses of destinations.
6254 We do this in a separate pass to avoid problems when a MATCH_DUP is
6255 present in the insn pattern. In that case, we want to ensure that
6256 we don't break the duplicate nature of the pattern. So we will replace
6257 both operands at the same time. Otherwise, we would fail to find an
6258 equivalent substitution in the loop calling validate_change below.
6259
6260 We used to suppress canonicalization of DEST if it appears in SRC,
6261 but we don't do this any more. */
6262
6263 for (i = 0; i < n_sets; i++)
6264 {
6265 rtx dest = SET_DEST (sets[i].rtl);
6266 rtx src = SET_SRC (sets[i].rtl);
6267 rtx new = canon_reg (src, insn);
6268
6269 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6270 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6271 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
6272 || insn_n_dups[recog_memoized (insn)] > 0)
6273 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
6274 else
6275 SET_SRC (sets[i].rtl) = new;
6276
6277 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6278 {
6279 validate_change (insn, &XEXP (dest, 1),
6280 canon_reg (XEXP (dest, 1), insn), 1);
6281 validate_change (insn, &XEXP (dest, 2),
6282 canon_reg (XEXP (dest, 2), insn), 1);
6283 }
6284
6285 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6286 || GET_CODE (dest) == ZERO_EXTRACT
6287 || GET_CODE (dest) == SIGN_EXTRACT)
6288 dest = XEXP (dest, 0);
6289
6290 if (GET_CODE (dest) == MEM)
6291 canon_reg (dest, insn);
6292 }
6293
6294 /* Now that we have done all the replacements, we can apply the change
6295 group and see if they all work. Note that this will cause some
6296 canonicalizations that would have worked individually not to be applied
6297 because some other canonicalization didn't work, but this should not
6298 occur often.
6299
6300 The result of apply_change_group can be ignored; see canon_reg. */
6301
6302 apply_change_group ();
6303
6304 /* Set sets[i].src_elt to the class each source belongs to.
6305 Detect assignments from or to volatile things
6306 and set set[i] to zero so they will be ignored
6307 in the rest of this function.
6308
6309 Nothing in this loop changes the hash table or the register chains. */
6310
6311 for (i = 0; i < n_sets; i++)
6312 {
6313 register rtx src, dest;
6314 register rtx src_folded;
6315 register struct table_elt *elt = 0, *p;
6316 enum machine_mode mode;
6317 rtx src_eqv_here;
6318 rtx src_const = 0;
6319 rtx src_related = 0;
6320 struct table_elt *src_const_elt = 0;
6321 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6322 int src_related_cost = 10000, src_elt_cost = 10000;
6323 /* Set non-zero if we need to call force_const_mem on with the
6324 contents of src_folded before using it. */
6325 int src_folded_force_flag = 0;
6326
6327 dest = SET_DEST (sets[i].rtl);
6328 src = SET_SRC (sets[i].rtl);
6329
6330 /* If SRC is a constant that has no machine mode,
6331 hash it with the destination's machine mode.
6332 This way we can keep different modes separate. */
6333
6334 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6335 sets[i].mode = mode;
6336
6337 if (src_eqv)
6338 {
6339 enum machine_mode eqvmode = mode;
6340 if (GET_CODE (dest) == STRICT_LOW_PART)
6341 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6342 do_not_record = 0;
6343 hash_arg_in_memory = 0;
6344 hash_arg_in_struct = 0;
6345 src_eqv = fold_rtx (src_eqv, insn);
6346 src_eqv_hash = HASH (src_eqv, eqvmode);
6347
6348 /* Find the equivalence class for the equivalent expression. */
6349
6350 if (!do_not_record)
6351 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
6352
6353 src_eqv_volatile = do_not_record;
6354 src_eqv_in_memory = hash_arg_in_memory;
6355 src_eqv_in_struct = hash_arg_in_struct;
6356 }
6357
6358 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6359 value of the INNER register, not the destination. So it is not
6360 a valid substitution for the source. But save it for later. */
6361 if (GET_CODE (dest) == STRICT_LOW_PART)
6362 src_eqv_here = 0;
6363 else
6364 src_eqv_here = src_eqv;
6365
6366 /* Simplify and foldable subexpressions in SRC. Then get the fully-
6367 simplified result, which may not necessarily be valid. */
6368 src_folded = fold_rtx (src, insn);
6369
6370 #if 0
6371 /* ??? This caused bad code to be generated for the m68k port with -O2.
6372 Suppose src is (CONST_INT -1), and that after truncation src_folded
6373 is (CONST_INT 3). Suppose src_folded is then used for src_const.
6374 At the end we will add src and src_const to the same equivalence
6375 class. We now have 3 and -1 on the same equivalence class. This
6376 causes later instructions to be mis-optimized. */
6377 /* If storing a constant in a bitfield, pre-truncate the constant
6378 so we will be able to record it later. */
6379 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6380 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6381 {
6382 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6383
6384 if (GET_CODE (src) == CONST_INT
6385 && GET_CODE (width) == CONST_INT
6386 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6387 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6388 src_folded
6389 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6390 << INTVAL (width)) - 1));
6391 }
6392 #endif
6393
6394 /* Compute SRC's hash code, and also notice if it
6395 should not be recorded at all. In that case,
6396 prevent any further processing of this assignment. */
6397 do_not_record = 0;
6398 hash_arg_in_memory = 0;
6399 hash_arg_in_struct = 0;
6400
6401 sets[i].src = src;
6402 sets[i].src_hash = HASH (src, mode);
6403 sets[i].src_volatile = do_not_record;
6404 sets[i].src_in_memory = hash_arg_in_memory;
6405 sets[i].src_in_struct = hash_arg_in_struct;
6406
6407 #if 0
6408 /* It is no longer clear why we used to do this, but it doesn't
6409 appear to still be needed. So let's try without it since this
6410 code hurts cse'ing widened ops. */
6411 /* If source is a perverse subreg (such as QI treated as an SI),
6412 treat it as volatile. It may do the work of an SI in one context
6413 where the extra bits are not being used, but cannot replace an SI
6414 in general. */
6415 if (GET_CODE (src) == SUBREG
6416 && (GET_MODE_SIZE (GET_MODE (src))
6417 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6418 sets[i].src_volatile = 1;
6419 #endif
6420
6421 /* Locate all possible equivalent forms for SRC. Try to replace
6422 SRC in the insn with each cheaper equivalent.
6423
6424 We have the following types of equivalents: SRC itself, a folded
6425 version, a value given in a REG_EQUAL note, or a value related
6426 to a constant.
6427
6428 Each of these equivalents may be part of an additional class
6429 of equivalents (if more than one is in the table, they must be in
6430 the same class; we check for this).
6431
6432 If the source is volatile, we don't do any table lookups.
6433
6434 We note any constant equivalent for possible later use in a
6435 REG_NOTE. */
6436
6437 if (!sets[i].src_volatile)
6438 elt = lookup (src, sets[i].src_hash, mode);
6439
6440 sets[i].src_elt = elt;
6441
6442 if (elt && src_eqv_here && src_eqv_elt)
6443 {
6444 if (elt->first_same_value != src_eqv_elt->first_same_value)
6445 {
6446 /* The REG_EQUAL is indicating that two formerly distinct
6447 classes are now equivalent. So merge them. */
6448 merge_equiv_classes (elt, src_eqv_elt);
6449 src_eqv_hash = HASH (src_eqv, elt->mode);
6450 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
6451 }
6452
6453 src_eqv_here = 0;
6454 }
6455
6456 else if (src_eqv_elt)
6457 elt = src_eqv_elt;
6458
6459 /* Try to find a constant somewhere and record it in `src_const'.
6460 Record its table element, if any, in `src_const_elt'. Look in
6461 any known equivalences first. (If the constant is not in the
6462 table, also set `sets[i].src_const_hash'). */
6463 if (elt)
6464 for (p = elt->first_same_value; p; p = p->next_same_value)
6465 if (p->is_const)
6466 {
6467 src_const = p->exp;
6468 src_const_elt = elt;
6469 break;
6470 }
6471
6472 if (src_const == 0
6473 && (CONSTANT_P (src_folded)
6474 /* Consider (minus (label_ref L1) (label_ref L2)) as
6475 "constant" here so we will record it. This allows us
6476 to fold switch statements when an ADDR_DIFF_VEC is used. */
6477 || (GET_CODE (src_folded) == MINUS
6478 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6479 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6480 src_const = src_folded, src_const_elt = elt;
6481 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6482 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6483
6484 /* If we don't know if the constant is in the table, get its
6485 hash code and look it up. */
6486 if (src_const && src_const_elt == 0)
6487 {
6488 sets[i].src_const_hash = HASH (src_const, mode);
6489 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
6490 }
6491
6492 sets[i].src_const = src_const;
6493 sets[i].src_const_elt = src_const_elt;
6494
6495 /* If the constant and our source are both in the table, mark them as
6496 equivalent. Otherwise, if a constant is in the table but the source
6497 isn't, set ELT to it. */
6498 if (src_const_elt && elt
6499 && src_const_elt->first_same_value != elt->first_same_value)
6500 merge_equiv_classes (elt, src_const_elt);
6501 else if (src_const_elt && elt == 0)
6502 elt = src_const_elt;
6503
6504 /* See if there is a register linearly related to a constant
6505 equivalent of SRC. */
6506 if (src_const
6507 && (GET_CODE (src_const) == CONST
6508 || (src_const_elt && src_const_elt->related_value != 0)))
6509 {
6510 src_related = use_related_value (src_const, src_const_elt);
6511 if (src_related)
6512 {
6513 struct table_elt *src_related_elt
6514 = lookup (src_related, HASH (src_related, mode), mode);
6515 if (src_related_elt && elt)
6516 {
6517 if (elt->first_same_value
6518 != src_related_elt->first_same_value)
6519 /* This can occur when we previously saw a CONST
6520 involving a SYMBOL_REF and then see the SYMBOL_REF
6521 twice. Merge the involved classes. */
6522 merge_equiv_classes (elt, src_related_elt);
6523
6524 src_related = 0;
6525 src_related_elt = 0;
6526 }
6527 else if (src_related_elt && elt == 0)
6528 elt = src_related_elt;
6529 }
6530 }
6531
6532 /* See if we have a CONST_INT that is already in a register in a
6533 wider mode. */
6534
6535 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6536 && GET_MODE_CLASS (mode) == MODE_INT
6537 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6538 {
6539 enum machine_mode wider_mode;
6540
6541 for (wider_mode = GET_MODE_WIDER_MODE (mode);
6542 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6543 && src_related == 0;
6544 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6545 {
6546 struct table_elt *const_elt
6547 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6548
6549 if (const_elt == 0)
6550 continue;
6551
6552 for (const_elt = const_elt->first_same_value;
6553 const_elt; const_elt = const_elt->next_same_value)
6554 if (GET_CODE (const_elt->exp) == REG)
6555 {
6556 src_related = gen_lowpart_if_possible (mode,
6557 const_elt->exp);
6558 break;
6559 }
6560 }
6561 }
6562
6563 /* Another possibility is that we have an AND with a constant in
6564 a mode narrower than a word. If so, it might have been generated
6565 as part of an "if" which would narrow the AND. If we already
6566 have done the AND in a wider mode, we can use a SUBREG of that
6567 value. */
6568
6569 if (flag_expensive_optimizations && ! src_related
6570 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6571 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6572 {
6573 enum machine_mode tmode;
6574 rtx new_and = gen_rtx (AND, VOIDmode, NULL_RTX, XEXP (src, 1));
6575
6576 for (tmode = GET_MODE_WIDER_MODE (mode);
6577 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6578 tmode = GET_MODE_WIDER_MODE (tmode))
6579 {
6580 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6581 struct table_elt *larger_elt;
6582
6583 if (inner)
6584 {
6585 PUT_MODE (new_and, tmode);
6586 XEXP (new_and, 0) = inner;
6587 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6588 if (larger_elt == 0)
6589 continue;
6590
6591 for (larger_elt = larger_elt->first_same_value;
6592 larger_elt; larger_elt = larger_elt->next_same_value)
6593 if (GET_CODE (larger_elt->exp) == REG)
6594 {
6595 src_related
6596 = gen_lowpart_if_possible (mode, larger_elt->exp);
6597 break;
6598 }
6599
6600 if (src_related)
6601 break;
6602 }
6603 }
6604 }
6605
6606 #ifdef LOAD_EXTEND_OP
6607 /* See if a MEM has already been loaded with a widening operation;
6608 if it has, we can use a subreg of that. Many CISC machines
6609 also have such operations, but this is only likely to be
6610 beneficial these machines. */
6611
6612 if (flag_expensive_optimizations && src_related == 0
6613 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6614 && GET_MODE_CLASS (mode) == MODE_INT
6615 && GET_CODE (src) == MEM && ! do_not_record
6616 && LOAD_EXTEND_OP (mode) != NIL)
6617 {
6618 enum machine_mode tmode;
6619
6620 /* Set what we are trying to extend and the operation it might
6621 have been extended with. */
6622 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6623 XEXP (memory_extend_rtx, 0) = src;
6624
6625 for (tmode = GET_MODE_WIDER_MODE (mode);
6626 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6627 tmode = GET_MODE_WIDER_MODE (tmode))
6628 {
6629 struct table_elt *larger_elt;
6630
6631 PUT_MODE (memory_extend_rtx, tmode);
6632 larger_elt = lookup (memory_extend_rtx,
6633 HASH (memory_extend_rtx, tmode), tmode);
6634 if (larger_elt == 0)
6635 continue;
6636
6637 for (larger_elt = larger_elt->first_same_value;
6638 larger_elt; larger_elt = larger_elt->next_same_value)
6639 if (GET_CODE (larger_elt->exp) == REG)
6640 {
6641 src_related = gen_lowpart_if_possible (mode,
6642 larger_elt->exp);
6643 break;
6644 }
6645
6646 if (src_related)
6647 break;
6648 }
6649 }
6650 #endif /* LOAD_EXTEND_OP */
6651
6652 if (src == src_folded)
6653 src_folded = 0;
6654
6655 /* At this point, ELT, if non-zero, points to a class of expressions
6656 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6657 and SRC_RELATED, if non-zero, each contain additional equivalent
6658 expressions. Prune these latter expressions by deleting expressions
6659 already in the equivalence class.
6660
6661 Check for an equivalent identical to the destination. If found,
6662 this is the preferred equivalent since it will likely lead to
6663 elimination of the insn. Indicate this by placing it in
6664 `src_related'. */
6665
6666 if (elt) elt = elt->first_same_value;
6667 for (p = elt; p; p = p->next_same_value)
6668 {
6669 enum rtx_code code = GET_CODE (p->exp);
6670
6671 /* If the expression is not valid, ignore it. Then we do not
6672 have to check for validity below. In most cases, we can use
6673 `rtx_equal_p', since canonicalization has already been done. */
6674 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
6675 continue;
6676
6677 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
6678 src = 0;
6679 else if (src_folded && GET_CODE (src_folded) == code
6680 && rtx_equal_p (src_folded, p->exp))
6681 src_folded = 0;
6682 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
6683 && rtx_equal_p (src_eqv_here, p->exp))
6684 src_eqv_here = 0;
6685 else if (src_related && GET_CODE (src_related) == code
6686 && rtx_equal_p (src_related, p->exp))
6687 src_related = 0;
6688
6689 /* This is the same as the destination of the insns, we want
6690 to prefer it. Copy it to src_related. The code below will
6691 then give it a negative cost. */
6692 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
6693 src_related = dest;
6694
6695 }
6696
6697 /* Find the cheapest valid equivalent, trying all the available
6698 possibilities. Prefer items not in the hash table to ones
6699 that are when they are equal cost. Note that we can never
6700 worsen an insn as the current contents will also succeed.
6701 If we find an equivalent identical to the destination, use it as best,
6702 since this insn will probably be eliminated in that case. */
6703 if (src)
6704 {
6705 if (rtx_equal_p (src, dest))
6706 src_cost = -1;
6707 else
6708 src_cost = COST (src);
6709 }
6710
6711 if (src_eqv_here)
6712 {
6713 if (rtx_equal_p (src_eqv_here, dest))
6714 src_eqv_cost = -1;
6715 else
6716 src_eqv_cost = COST (src_eqv_here);
6717 }
6718
6719 if (src_folded)
6720 {
6721 if (rtx_equal_p (src_folded, dest))
6722 src_folded_cost = -1;
6723 else
6724 src_folded_cost = COST (src_folded);
6725 }
6726
6727 if (src_related)
6728 {
6729 if (rtx_equal_p (src_related, dest))
6730 src_related_cost = -1;
6731 else
6732 src_related_cost = COST (src_related);
6733 }
6734
6735 /* If this was an indirect jump insn, a known label will really be
6736 cheaper even though it looks more expensive. */
6737 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
6738 src_folded = src_const, src_folded_cost = -1;
6739
6740 /* Terminate loop when replacement made. This must terminate since
6741 the current contents will be tested and will always be valid. */
6742 while (1)
6743 {
6744 rtx trial;
6745
6746 /* Skip invalid entries. */
6747 while (elt && GET_CODE (elt->exp) != REG
6748 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6749 elt = elt->next_same_value;
6750
6751 if (elt) src_elt_cost = elt->cost;
6752
6753 /* Find cheapest and skip it for the next time. For items
6754 of equal cost, use this order:
6755 src_folded, src, src_eqv, src_related and hash table entry. */
6756 if (src_folded_cost <= src_cost
6757 && src_folded_cost <= src_eqv_cost
6758 && src_folded_cost <= src_related_cost
6759 && src_folded_cost <= src_elt_cost)
6760 {
6761 trial = src_folded, src_folded_cost = 10000;
6762 if (src_folded_force_flag)
6763 trial = force_const_mem (mode, trial);
6764 }
6765 else if (src_cost <= src_eqv_cost
6766 && src_cost <= src_related_cost
6767 && src_cost <= src_elt_cost)
6768 trial = src, src_cost = 10000;
6769 else if (src_eqv_cost <= src_related_cost
6770 && src_eqv_cost <= src_elt_cost)
6771 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
6772 else if (src_related_cost <= src_elt_cost)
6773 trial = copy_rtx (src_related), src_related_cost = 10000;
6774 else
6775 {
6776 trial = copy_rtx (elt->exp);
6777 elt = elt->next_same_value;
6778 src_elt_cost = 10000;
6779 }
6780
6781 /* We don't normally have an insn matching (set (pc) (pc)), so
6782 check for this separately here. We will delete such an
6783 insn below.
6784
6785 Tablejump insns contain a USE of the table, so simply replacing
6786 the operand with the constant won't match. This is simply an
6787 unconditional branch, however, and is therefore valid. Just
6788 insert the substitution here and we will delete and re-emit
6789 the insn later. */
6790
6791 if (n_sets == 1 && dest == pc_rtx
6792 && (trial == pc_rtx
6793 || (GET_CODE (trial) == LABEL_REF
6794 && ! condjump_p (insn))))
6795 {
6796 /* If TRIAL is a label in front of a jump table, we are
6797 really falling through the switch (this is how casesi
6798 insns work), so we must branch around the table. */
6799 if (GET_CODE (trial) == CODE_LABEL
6800 && NEXT_INSN (trial) != 0
6801 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
6802 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
6803 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
6804
6805 trial = gen_rtx (LABEL_REF, Pmode, get_label_after (trial));
6806
6807 SET_SRC (sets[i].rtl) = trial;
6808 cse_jumps_altered = 1;
6809 break;
6810 }
6811
6812 /* Look for a substitution that makes a valid insn. */
6813 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
6814 {
6815 /* The result of apply_change_group can be ignored; see
6816 canon_reg. */
6817
6818 validate_change (insn, &SET_SRC (sets[i].rtl),
6819 canon_reg (SET_SRC (sets[i].rtl), insn),
6820 1);
6821 apply_change_group ();
6822 break;
6823 }
6824
6825 /* If we previously found constant pool entries for
6826 constants and this is a constant, try making a
6827 pool entry. Put it in src_folded unless we already have done
6828 this since that is where it likely came from. */
6829
6830 else if (constant_pool_entries_cost
6831 && CONSTANT_P (trial)
6832 && ! (GET_CODE (trial) == CONST
6833 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
6834 && (src_folded == 0
6835 || (GET_CODE (src_folded) != MEM
6836 && ! src_folded_force_flag))
6837 && GET_MODE_CLASS (mode) != MODE_CC)
6838 {
6839 src_folded_force_flag = 1;
6840 src_folded = trial;
6841 src_folded_cost = constant_pool_entries_cost;
6842 }
6843 }
6844
6845 src = SET_SRC (sets[i].rtl);
6846
6847 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
6848 However, there is an important exception: If both are registers
6849 that are not the head of their equivalence class, replace SET_SRC
6850 with the head of the class. If we do not do this, we will have
6851 both registers live over a portion of the basic block. This way,
6852 their lifetimes will likely abut instead of overlapping. */
6853 if (GET_CODE (dest) == REG
6854 && REGNO_QTY_VALID_P (REGNO (dest))
6855 && qty_mode[reg_qty[REGNO (dest)]] == GET_MODE (dest)
6856 && qty_first_reg[reg_qty[REGNO (dest)]] != REGNO (dest)
6857 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
6858 /* Don't do this if the original insn had a hard reg as
6859 SET_SRC. */
6860 && (GET_CODE (sets[i].src) != REG
6861 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
6862 /* We can't call canon_reg here because it won't do anything if
6863 SRC is a hard register. */
6864 {
6865 int first = qty_first_reg[reg_qty[REGNO (src)]];
6866
6867 src = SET_SRC (sets[i].rtl)
6868 = first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
6869 : gen_rtx (REG, GET_MODE (src), first);
6870
6871 /* If we had a constant that is cheaper than what we are now
6872 setting SRC to, use that constant. We ignored it when we
6873 thought we could make this into a no-op. */
6874 if (src_const && COST (src_const) < COST (src)
6875 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const, 0))
6876 src = src_const;
6877 }
6878
6879 /* If we made a change, recompute SRC values. */
6880 if (src != sets[i].src)
6881 {
6882 do_not_record = 0;
6883 hash_arg_in_memory = 0;
6884 hash_arg_in_struct = 0;
6885 sets[i].src = src;
6886 sets[i].src_hash = HASH (src, mode);
6887 sets[i].src_volatile = do_not_record;
6888 sets[i].src_in_memory = hash_arg_in_memory;
6889 sets[i].src_in_struct = hash_arg_in_struct;
6890 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
6891 }
6892
6893 /* If this is a single SET, we are setting a register, and we have an
6894 equivalent constant, we want to add a REG_NOTE. We don't want
6895 to write a REG_EQUAL note for a constant pseudo since verifying that
6896 that pseudo hasn't been eliminated is a pain. Such a note also
6897 won't help anything. */
6898 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
6899 && GET_CODE (src_const) != REG)
6900 {
6901 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6902
6903 /* Record the actual constant value in a REG_EQUAL note, making
6904 a new one if one does not already exist. */
6905 if (tem)
6906 XEXP (tem, 0) = src_const;
6907 else
6908 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL,
6909 src_const, REG_NOTES (insn));
6910
6911 /* If storing a constant value in a register that
6912 previously held the constant value 0,
6913 record this fact with a REG_WAS_0 note on this insn.
6914
6915 Note that the *register* is required to have previously held 0,
6916 not just any register in the quantity and we must point to the
6917 insn that set that register to zero.
6918
6919 Rather than track each register individually, we just see if
6920 the last set for this quantity was for this register. */
6921
6922 if (REGNO_QTY_VALID_P (REGNO (dest))
6923 && qty_const[reg_qty[REGNO (dest)]] == const0_rtx)
6924 {
6925 /* See if we previously had a REG_WAS_0 note. */
6926 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6927 rtx const_insn = qty_const_insn[reg_qty[REGNO (dest)]];
6928
6929 if ((tem = single_set (const_insn)) != 0
6930 && rtx_equal_p (SET_DEST (tem), dest))
6931 {
6932 if (note)
6933 XEXP (note, 0) = const_insn;
6934 else
6935 REG_NOTES (insn) = gen_rtx (INSN_LIST, REG_WAS_0,
6936 const_insn, REG_NOTES (insn));
6937 }
6938 }
6939 }
6940
6941 /* Now deal with the destination. */
6942 do_not_record = 0;
6943 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
6944
6945 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
6946 to the MEM or REG within it. */
6947 while (GET_CODE (dest) == SIGN_EXTRACT
6948 || GET_CODE (dest) == ZERO_EXTRACT
6949 || GET_CODE (dest) == SUBREG
6950 || GET_CODE (dest) == STRICT_LOW_PART)
6951 {
6952 sets[i].inner_dest_loc = &XEXP (dest, 0);
6953 dest = XEXP (dest, 0);
6954 }
6955
6956 sets[i].inner_dest = dest;
6957
6958 if (GET_CODE (dest) == MEM)
6959 {
6960 dest = fold_rtx (dest, insn);
6961
6962 /* Decide whether we invalidate everything in memory,
6963 or just things at non-fixed places.
6964 Writing a large aggregate must invalidate everything
6965 because we don't know how long it is. */
6966 note_mem_written (dest, &writes_memory);
6967 }
6968
6969 /* Compute the hash code of the destination now,
6970 before the effects of this instruction are recorded,
6971 since the register values used in the address computation
6972 are those before this instruction. */
6973 sets[i].dest_hash = HASH (dest, mode);
6974
6975 /* Don't enter a bit-field in the hash table
6976 because the value in it after the store
6977 may not equal what was stored, due to truncation. */
6978
6979 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6980 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6981 {
6982 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6983
6984 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
6985 && GET_CODE (width) == CONST_INT
6986 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6987 && ! (INTVAL (src_const)
6988 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6989 /* Exception: if the value is constant,
6990 and it won't be truncated, record it. */
6991 ;
6992 else
6993 {
6994 /* This is chosen so that the destination will be invalidated
6995 but no new value will be recorded.
6996 We must invalidate because sometimes constant
6997 values can be recorded for bitfields. */
6998 sets[i].src_elt = 0;
6999 sets[i].src_volatile = 1;
7000 src_eqv = 0;
7001 src_eqv_elt = 0;
7002 }
7003 }
7004
7005 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
7006 the insn. */
7007 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
7008 {
7009 PUT_CODE (insn, NOTE);
7010 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7011 NOTE_SOURCE_FILE (insn) = 0;
7012 cse_jumps_altered = 1;
7013 /* One less use of the label this insn used to jump to. */
7014 --LABEL_NUSES (JUMP_LABEL (insn));
7015 /* No more processing for this set. */
7016 sets[i].rtl = 0;
7017 }
7018
7019 /* If this SET is now setting PC to a label, we know it used to
7020 be a conditional or computed branch. So we see if we can follow
7021 it. If it was a computed branch, delete it and re-emit. */
7022 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
7023 {
7024 rtx p;
7025
7026 /* If this is not in the format for a simple branch and
7027 we are the only SET in it, re-emit it. */
7028 if (! simplejump_p (insn) && n_sets == 1)
7029 {
7030 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
7031 JUMP_LABEL (new) = XEXP (src, 0);
7032 LABEL_NUSES (XEXP (src, 0))++;
7033 delete_insn (insn);
7034 insn = new;
7035 }
7036 else
7037 /* Otherwise, force rerecognition, since it probably had
7038 a different pattern before.
7039 This shouldn't really be necessary, since whatever
7040 changed the source value above should have done this.
7041 Until the right place is found, might as well do this here. */
7042 INSN_CODE (insn) = -1;
7043
7044 /* Now that we've converted this jump to an unconditional jump,
7045 there is dead code after it. Delete the dead code until we
7046 reach a BARRIER, the end of the function, or a label. Do
7047 not delete NOTEs except for NOTE_INSN_DELETED since later
7048 phases assume these notes are retained. */
7049
7050 p = insn;
7051
7052 while (NEXT_INSN (p) != 0
7053 && GET_CODE (NEXT_INSN (p)) != BARRIER
7054 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
7055 {
7056 if (GET_CODE (NEXT_INSN (p)) != NOTE
7057 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
7058 delete_insn (NEXT_INSN (p));
7059 else
7060 p = NEXT_INSN (p);
7061 }
7062
7063 /* If we don't have a BARRIER immediately after INSN, put one there.
7064 Much code assumes that there are no NOTEs between a JUMP_INSN and
7065 BARRIER. */
7066
7067 if (NEXT_INSN (insn) == 0
7068 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
7069 emit_barrier_before (NEXT_INSN (insn));
7070
7071 /* We might have two BARRIERs separated by notes. Delete the second
7072 one if so. */
7073
7074 if (p != insn && NEXT_INSN (p) != 0
7075 && GET_CODE (NEXT_INSN (p)) == BARRIER)
7076 delete_insn (NEXT_INSN (p));
7077
7078 cse_jumps_altered = 1;
7079 sets[i].rtl = 0;
7080 }
7081
7082 /* If destination is volatile, invalidate it and then do no further
7083 processing for this assignment. */
7084
7085 else if (do_not_record)
7086 {
7087 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7088 || GET_CODE (dest) == MEM)
7089 invalidate (dest, VOIDmode);
7090 else if (GET_CODE (dest) == STRICT_LOW_PART
7091 || GET_CODE (dest) == ZERO_EXTRACT)
7092 invalidate (XEXP (dest, 0), GET_MODE (dest));
7093 sets[i].rtl = 0;
7094 }
7095
7096 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
7097 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7098
7099 #ifdef HAVE_cc0
7100 /* If setting CC0, record what it was set to, or a constant, if it
7101 is equivalent to a constant. If it is being set to a floating-point
7102 value, make a COMPARE with the appropriate constant of 0. If we
7103 don't do this, later code can interpret this as a test against
7104 const0_rtx, which can cause problems if we try to put it into an
7105 insn as a floating-point operand. */
7106 if (dest == cc0_rtx)
7107 {
7108 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
7109 this_insn_cc0_mode = mode;
7110 if (FLOAT_MODE_P (mode))
7111 this_insn_cc0 = gen_rtx (COMPARE, VOIDmode, this_insn_cc0,
7112 CONST0_RTX (mode));
7113 }
7114 #endif
7115 }
7116
7117 /* Now enter all non-volatile source expressions in the hash table
7118 if they are not already present.
7119 Record their equivalence classes in src_elt.
7120 This way we can insert the corresponding destinations into
7121 the same classes even if the actual sources are no longer in them
7122 (having been invalidated). */
7123
7124 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
7125 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
7126 {
7127 register struct table_elt *elt;
7128 register struct table_elt *classp = sets[0].src_elt;
7129 rtx dest = SET_DEST (sets[0].rtl);
7130 enum machine_mode eqvmode = GET_MODE (dest);
7131
7132 if (GET_CODE (dest) == STRICT_LOW_PART)
7133 {
7134 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
7135 classp = 0;
7136 }
7137 if (insert_regs (src_eqv, classp, 0))
7138 {
7139 rehash_using_reg (src_eqv);
7140 src_eqv_hash = HASH (src_eqv, eqvmode);
7141 }
7142 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7143 elt->in_memory = src_eqv_in_memory;
7144 elt->in_struct = src_eqv_in_struct;
7145 src_eqv_elt = elt;
7146
7147 /* Check to see if src_eqv_elt is the same as a set source which
7148 does not yet have an elt, and if so set the elt of the set source
7149 to src_eqv_elt. */
7150 for (i = 0; i < n_sets; i++)
7151 if (sets[i].rtl && sets[i].src_elt == 0
7152 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
7153 sets[i].src_elt = src_eqv_elt;
7154 }
7155
7156 for (i = 0; i < n_sets; i++)
7157 if (sets[i].rtl && ! sets[i].src_volatile
7158 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
7159 {
7160 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
7161 {
7162 /* REG_EQUAL in setting a STRICT_LOW_PART
7163 gives an equivalent for the entire destination register,
7164 not just for the subreg being stored in now.
7165 This is a more interesting equivalence, so we arrange later
7166 to treat the entire reg as the destination. */
7167 sets[i].src_elt = src_eqv_elt;
7168 sets[i].src_hash = src_eqv_hash;
7169 }
7170 else
7171 {
7172 /* Insert source and constant equivalent into hash table, if not
7173 already present. */
7174 register struct table_elt *classp = src_eqv_elt;
7175 register rtx src = sets[i].src;
7176 register rtx dest = SET_DEST (sets[i].rtl);
7177 enum machine_mode mode
7178 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
7179
7180 if (sets[i].src_elt == 0)
7181 {
7182 register struct table_elt *elt;
7183
7184 /* Note that these insert_regs calls cannot remove
7185 any of the src_elt's, because they would have failed to
7186 match if not still valid. */
7187 if (insert_regs (src, classp, 0))
7188 {
7189 rehash_using_reg (src);
7190 sets[i].src_hash = HASH (src, mode);
7191 }
7192 elt = insert (src, classp, sets[i].src_hash, mode);
7193 elt->in_memory = sets[i].src_in_memory;
7194 elt->in_struct = sets[i].src_in_struct;
7195 sets[i].src_elt = classp = elt;
7196 }
7197
7198 if (sets[i].src_const && sets[i].src_const_elt == 0
7199 && src != sets[i].src_const
7200 && ! rtx_equal_p (sets[i].src_const, src))
7201 sets[i].src_elt = insert (sets[i].src_const, classp,
7202 sets[i].src_const_hash, mode);
7203 }
7204 }
7205 else if (sets[i].src_elt == 0)
7206 /* If we did not insert the source into the hash table (e.g., it was
7207 volatile), note the equivalence class for the REG_EQUAL value, if any,
7208 so that the destination goes into that class. */
7209 sets[i].src_elt = src_eqv_elt;
7210
7211 invalidate_from_clobbers (&writes_memory, x);
7212
7213 /* Some registers are invalidated by subroutine calls. Memory is
7214 invalidated by non-constant calls. */
7215
7216 if (GET_CODE (insn) == CALL_INSN)
7217 {
7218 static struct write_data everything = {0, 1, 1, 1};
7219
7220 if (! CONST_CALL_P (insn))
7221 invalidate_memory (&everything);
7222 invalidate_for_call ();
7223 }
7224
7225 /* Now invalidate everything set by this instruction.
7226 If a SUBREG or other funny destination is being set,
7227 sets[i].rtl is still nonzero, so here we invalidate the reg
7228 a part of which is being set. */
7229
7230 for (i = 0; i < n_sets; i++)
7231 if (sets[i].rtl)
7232 {
7233 /* We can't use the inner dest, because the mode associated with
7234 a ZERO_EXTRACT is significant. */
7235 register rtx dest = SET_DEST (sets[i].rtl);
7236
7237 /* Needed for registers to remove the register from its
7238 previous quantity's chain.
7239 Needed for memory if this is a nonvarying address, unless
7240 we have just done an invalidate_memory that covers even those. */
7241 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7242 || (GET_CODE (dest) == MEM && ! writes_memory.all
7243 && ! cse_rtx_addr_varies_p (dest)))
7244 invalidate (dest, VOIDmode);
7245 else if (GET_CODE (dest) == STRICT_LOW_PART
7246 || GET_CODE (dest) == ZERO_EXTRACT)
7247 invalidate (XEXP (dest, 0), GET_MODE (dest));
7248 }
7249
7250 /* Make sure registers mentioned in destinations
7251 are safe for use in an expression to be inserted.
7252 This removes from the hash table
7253 any invalid entry that refers to one of these registers.
7254
7255 We don't care about the return value from mention_regs because
7256 we are going to hash the SET_DEST values unconditionally. */
7257
7258 for (i = 0; i < n_sets; i++)
7259 if (sets[i].rtl && GET_CODE (SET_DEST (sets[i].rtl)) != REG)
7260 mention_regs (SET_DEST (sets[i].rtl));
7261
7262 /* We may have just removed some of the src_elt's from the hash table.
7263 So replace each one with the current head of the same class. */
7264
7265 for (i = 0; i < n_sets; i++)
7266 if (sets[i].rtl)
7267 {
7268 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7269 /* If elt was removed, find current head of same class,
7270 or 0 if nothing remains of that class. */
7271 {
7272 register struct table_elt *elt = sets[i].src_elt;
7273
7274 while (elt && elt->prev_same_value)
7275 elt = elt->prev_same_value;
7276
7277 while (elt && elt->first_same_value == 0)
7278 elt = elt->next_same_value;
7279 sets[i].src_elt = elt ? elt->first_same_value : 0;
7280 }
7281 }
7282
7283 /* Now insert the destinations into their equivalence classes. */
7284
7285 for (i = 0; i < n_sets; i++)
7286 if (sets[i].rtl)
7287 {
7288 register rtx dest = SET_DEST (sets[i].rtl);
7289 register struct table_elt *elt;
7290
7291 /* Don't record value if we are not supposed to risk allocating
7292 floating-point values in registers that might be wider than
7293 memory. */
7294 if ((flag_float_store
7295 && GET_CODE (dest) == MEM
7296 && FLOAT_MODE_P (GET_MODE (dest)))
7297 /* Don't record values of destinations set inside a libcall block
7298 since we might delete the libcall. Things should have been set
7299 up so we won't want to reuse such a value, but we play it safe
7300 here. */
7301 || in_libcall_block
7302 /* If we didn't put a REG_EQUAL value or a source into the hash
7303 table, there is no point is recording DEST. */
7304 || sets[i].src_elt == 0
7305 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
7306 or SIGN_EXTEND, don't record DEST since it can cause
7307 some tracking to be wrong.
7308
7309 ??? Think about this more later. */
7310 || (GET_CODE (dest) == SUBREG
7311 && (GET_MODE_SIZE (GET_MODE (dest))
7312 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7313 && (GET_CODE (sets[i].src) == SIGN_EXTEND
7314 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7315 continue;
7316
7317 /* STRICT_LOW_PART isn't part of the value BEING set,
7318 and neither is the SUBREG inside it.
7319 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
7320 if (GET_CODE (dest) == STRICT_LOW_PART)
7321 dest = SUBREG_REG (XEXP (dest, 0));
7322
7323 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7324 /* Registers must also be inserted into chains for quantities. */
7325 if (insert_regs (dest, sets[i].src_elt, 1))
7326 {
7327 /* If `insert_regs' changes something, the hash code must be
7328 recalculated. */
7329 rehash_using_reg (dest);
7330 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7331 }
7332
7333 elt = insert (dest, sets[i].src_elt,
7334 sets[i].dest_hash, GET_MODE (dest));
7335 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
7336 && ! RTX_UNCHANGING_P (sets[i].inner_dest));
7337
7338 if (elt->in_memory)
7339 {
7340 /* This implicitly assumes a whole struct
7341 need not have MEM_IN_STRUCT_P.
7342 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
7343 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7344 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7345 }
7346
7347 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7348 narrower than M2, and both M1 and M2 are the same number of words,
7349 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7350 make that equivalence as well.
7351
7352 However, BAR may have equivalences for which gen_lowpart_if_possible
7353 will produce a simpler value than gen_lowpart_if_possible applied to
7354 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7355 BAR's equivalences. If we don't get a simplified form, make
7356 the SUBREG. It will not be used in an equivalence, but will
7357 cause two similar assignments to be detected.
7358
7359 Note the loop below will find SUBREG_REG (DEST) since we have
7360 already entered SRC and DEST of the SET in the table. */
7361
7362 if (GET_CODE (dest) == SUBREG
7363 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7364 / UNITS_PER_WORD)
7365 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7366 && (GET_MODE_SIZE (GET_MODE (dest))
7367 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7368 && sets[i].src_elt != 0)
7369 {
7370 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7371 struct table_elt *elt, *classp = 0;
7372
7373 for (elt = sets[i].src_elt->first_same_value; elt;
7374 elt = elt->next_same_value)
7375 {
7376 rtx new_src = 0;
7377 unsigned src_hash;
7378 struct table_elt *src_elt;
7379
7380 /* Ignore invalid entries. */
7381 if (GET_CODE (elt->exp) != REG
7382 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7383 continue;
7384
7385 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7386 if (new_src == 0)
7387 new_src = gen_rtx (SUBREG, new_mode, elt->exp, 0);
7388
7389 src_hash = HASH (new_src, new_mode);
7390 src_elt = lookup (new_src, src_hash, new_mode);
7391
7392 /* Put the new source in the hash table is if isn't
7393 already. */
7394 if (src_elt == 0)
7395 {
7396 if (insert_regs (new_src, classp, 0))
7397 {
7398 rehash_using_reg (new_src);
7399 src_hash = HASH (new_src, new_mode);
7400 }
7401 src_elt = insert (new_src, classp, src_hash, new_mode);
7402 src_elt->in_memory = elt->in_memory;
7403 src_elt->in_struct = elt->in_struct;
7404 }
7405 else if (classp && classp != src_elt->first_same_value)
7406 /* Show that two things that we've seen before are
7407 actually the same. */
7408 merge_equiv_classes (src_elt, classp);
7409
7410 classp = src_elt->first_same_value;
7411 }
7412 }
7413 }
7414
7415 /* Special handling for (set REG0 REG1)
7416 where REG0 is the "cheapest", cheaper than REG1.
7417 After cse, REG1 will probably not be used in the sequel,
7418 so (if easily done) change this insn to (set REG1 REG0) and
7419 replace REG1 with REG0 in the previous insn that computed their value.
7420 Then REG1 will become a dead store and won't cloud the situation
7421 for later optimizations.
7422
7423 Do not make this change if REG1 is a hard register, because it will
7424 then be used in the sequel and we may be changing a two-operand insn
7425 into a three-operand insn.
7426
7427 Also do not do this if we are operating on a copy of INSN. */
7428
7429 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7430 && NEXT_INSN (PREV_INSN (insn)) == insn
7431 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7432 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7433 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
7434 && (qty_first_reg[reg_qty[REGNO (SET_SRC (sets[0].rtl))]]
7435 == REGNO (SET_DEST (sets[0].rtl))))
7436 {
7437 rtx prev = PREV_INSN (insn);
7438 while (prev && GET_CODE (prev) == NOTE)
7439 prev = PREV_INSN (prev);
7440
7441 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7442 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7443 {
7444 rtx dest = SET_DEST (sets[0].rtl);
7445 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7446
7447 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7448 validate_change (insn, & SET_DEST (sets[0].rtl),
7449 SET_SRC (sets[0].rtl), 1);
7450 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7451 apply_change_group ();
7452
7453 /* If REG1 was equivalent to a constant, REG0 is not. */
7454 if (note)
7455 PUT_REG_NOTE_KIND (note, REG_EQUAL);
7456
7457 /* If there was a REG_WAS_0 note on PREV, remove it. Move
7458 any REG_WAS_0 note on INSN to PREV. */
7459 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7460 if (note)
7461 remove_note (prev, note);
7462
7463 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7464 if (note)
7465 {
7466 remove_note (insn, note);
7467 XEXP (note, 1) = REG_NOTES (prev);
7468 REG_NOTES (prev) = note;
7469 }
7470
7471 /* If INSN has a REG_EQUAL note, and this note mentions REG0,
7472 then we must delete it, because the value in REG0 has changed. */
7473 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7474 if (note && reg_mentioned_p (dest, XEXP (note, 0)))
7475 remove_note (insn, note);
7476 }
7477 }
7478
7479 /* If this is a conditional jump insn, record any known equivalences due to
7480 the condition being tested. */
7481
7482 last_jump_equiv_class = 0;
7483 if (GET_CODE (insn) == JUMP_INSN
7484 && n_sets == 1 && GET_CODE (x) == SET
7485 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7486 record_jump_equiv (insn, 0);
7487
7488 #ifdef HAVE_cc0
7489 /* If the previous insn set CC0 and this insn no longer references CC0,
7490 delete the previous insn. Here we use the fact that nothing expects CC0
7491 to be valid over an insn, which is true until the final pass. */
7492 if (prev_insn && GET_CODE (prev_insn) == INSN
7493 && (tem = single_set (prev_insn)) != 0
7494 && SET_DEST (tem) == cc0_rtx
7495 && ! reg_mentioned_p (cc0_rtx, x))
7496 {
7497 PUT_CODE (prev_insn, NOTE);
7498 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7499 NOTE_SOURCE_FILE (prev_insn) = 0;
7500 }
7501
7502 prev_insn_cc0 = this_insn_cc0;
7503 prev_insn_cc0_mode = this_insn_cc0_mode;
7504 #endif
7505
7506 prev_insn = insn;
7507 }
7508 \f
7509 /* Store 1 in *WRITES_PTR for those categories of memory ref
7510 that must be invalidated when the expression WRITTEN is stored in.
7511 If WRITTEN is null, say everything must be invalidated. */
7512
7513 static void
7514 note_mem_written (written, writes_ptr)
7515 rtx written;
7516 struct write_data *writes_ptr;
7517 {
7518 static struct write_data everything = {0, 1, 1, 1};
7519
7520 if (written == 0)
7521 *writes_ptr = everything;
7522 else if (GET_CODE (written) == MEM)
7523 {
7524 /* Pushing or popping the stack invalidates just the stack pointer. */
7525 rtx addr = XEXP (written, 0);
7526 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7527 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7528 && GET_CODE (XEXP (addr, 0)) == REG
7529 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7530 {
7531 writes_ptr->sp = 1;
7532 return;
7533 }
7534 else if (GET_MODE (written) == BLKmode)
7535 *writes_ptr = everything;
7536 /* (mem (scratch)) means clobber everything. */
7537 else if (GET_CODE (addr) == SCRATCH)
7538 *writes_ptr = everything;
7539 else if (cse_rtx_addr_varies_p (written))
7540 {
7541 /* A varying address that is a sum indicates an array element,
7542 and that's just as good as a structure element
7543 in implying that we need not invalidate scalar variables.
7544 However, we must allow QImode aliasing of scalars, because the
7545 ANSI C standard allows character pointers to alias anything. */
7546 if (! ((MEM_IN_STRUCT_P (written)
7547 || GET_CODE (XEXP (written, 0)) == PLUS)
7548 && GET_MODE (written) != QImode))
7549 writes_ptr->all = 1;
7550 writes_ptr->nonscalar = 1;
7551 }
7552 writes_ptr->var = 1;
7553 }
7554 }
7555
7556 /* Perform invalidation on the basis of everything about an insn
7557 except for invalidating the actual places that are SET in it.
7558 This includes the places CLOBBERed, and anything that might
7559 alias with something that is SET or CLOBBERed.
7560
7561 W points to the writes_memory for this insn, a struct write_data
7562 saying which kinds of memory references must be invalidated.
7563 X is the pattern of the insn. */
7564
7565 static void
7566 invalidate_from_clobbers (w, x)
7567 struct write_data *w;
7568 rtx x;
7569 {
7570 /* If W->var is not set, W specifies no action.
7571 If W->all is set, this step gets all memory refs
7572 so they can be ignored in the rest of this function. */
7573 if (w->var)
7574 invalidate_memory (w);
7575
7576 if (w->sp)
7577 {
7578 if (reg_tick[STACK_POINTER_REGNUM] >= 0)
7579 reg_tick[STACK_POINTER_REGNUM]++;
7580
7581 /* This should be *very* rare. */
7582 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
7583 invalidate (stack_pointer_rtx, VOIDmode);
7584 }
7585
7586 if (GET_CODE (x) == CLOBBER)
7587 {
7588 rtx ref = XEXP (x, 0);
7589 if (ref)
7590 {
7591 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7592 || (GET_CODE (ref) == MEM && ! w->all))
7593 invalidate (ref, VOIDmode);
7594 else if (GET_CODE (ref) == STRICT_LOW_PART
7595 || GET_CODE (ref) == ZERO_EXTRACT)
7596 invalidate (XEXP (ref, 0), GET_MODE (ref));
7597 }
7598 }
7599 else if (GET_CODE (x) == PARALLEL)
7600 {
7601 register int i;
7602 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
7603 {
7604 register rtx y = XVECEXP (x, 0, i);
7605 if (GET_CODE (y) == CLOBBER)
7606 {
7607 rtx ref = XEXP (y, 0);
7608 if (ref)
7609 {
7610 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7611 || (GET_CODE (ref) == MEM && !w->all))
7612 invalidate (ref, VOIDmode);
7613 else if (GET_CODE (ref) == STRICT_LOW_PART
7614 || GET_CODE (ref) == ZERO_EXTRACT)
7615 invalidate (XEXP (ref, 0), GET_MODE (ref));
7616 }
7617 }
7618 }
7619 }
7620 }
7621 \f
7622 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
7623 and replace any registers in them with either an equivalent constant
7624 or the canonical form of the register. If we are inside an address,
7625 only do this if the address remains valid.
7626
7627 OBJECT is 0 except when within a MEM in which case it is the MEM.
7628
7629 Return the replacement for X. */
7630
7631 static rtx
7632 cse_process_notes (x, object)
7633 rtx x;
7634 rtx object;
7635 {
7636 enum rtx_code code = GET_CODE (x);
7637 char *fmt = GET_RTX_FORMAT (code);
7638 int i;
7639
7640 switch (code)
7641 {
7642 case CONST_INT:
7643 case CONST:
7644 case SYMBOL_REF:
7645 case LABEL_REF:
7646 case CONST_DOUBLE:
7647 case PC:
7648 case CC0:
7649 case LO_SUM:
7650 return x;
7651
7652 case MEM:
7653 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
7654 return x;
7655
7656 case EXPR_LIST:
7657 case INSN_LIST:
7658 if (REG_NOTE_KIND (x) == REG_EQUAL)
7659 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7660 if (XEXP (x, 1))
7661 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7662 return x;
7663
7664 case SIGN_EXTEND:
7665 case ZERO_EXTEND:
7666 {
7667 rtx new = cse_process_notes (XEXP (x, 0), object);
7668 /* We don't substitute VOIDmode constants into these rtx,
7669 since they would impede folding. */
7670 if (GET_MODE (new) != VOIDmode)
7671 validate_change (object, &XEXP (x, 0), new, 0);
7672 return x;
7673 }
7674
7675 case REG:
7676 i = reg_qty[REGNO (x)];
7677
7678 /* Return a constant or a constant register. */
7679 if (REGNO_QTY_VALID_P (REGNO (x))
7680 && qty_const[i] != 0
7681 && (CONSTANT_P (qty_const[i])
7682 || GET_CODE (qty_const[i]) == REG))
7683 {
7684 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
7685 if (new)
7686 return new;
7687 }
7688
7689 /* Otherwise, canonicalize this register. */
7690 return canon_reg (x, NULL_RTX);
7691 }
7692
7693 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7694 if (fmt[i] == 'e')
7695 validate_change (object, &XEXP (x, i),
7696 cse_process_notes (XEXP (x, i), object), 0);
7697
7698 return x;
7699 }
7700 \f
7701 /* Find common subexpressions between the end test of a loop and the beginning
7702 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
7703
7704 Often we have a loop where an expression in the exit test is used
7705 in the body of the loop. For example "while (*p) *q++ = *p++;".
7706 Because of the way we duplicate the loop exit test in front of the loop,
7707 however, we don't detect that common subexpression. This will be caught
7708 when global cse is implemented, but this is a quite common case.
7709
7710 This function handles the most common cases of these common expressions.
7711 It is called after we have processed the basic block ending with the
7712 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
7713 jumps to a label used only once. */
7714
7715 static void
7716 cse_around_loop (loop_start)
7717 rtx loop_start;
7718 {
7719 rtx insn;
7720 int i;
7721 struct table_elt *p;
7722
7723 /* If the jump at the end of the loop doesn't go to the start, we don't
7724 do anything. */
7725 for (insn = PREV_INSN (loop_start);
7726 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
7727 insn = PREV_INSN (insn))
7728 ;
7729
7730 if (insn == 0
7731 || GET_CODE (insn) != NOTE
7732 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
7733 return;
7734
7735 /* If the last insn of the loop (the end test) was an NE comparison,
7736 we will interpret it as an EQ comparison, since we fell through
7737 the loop. Any equivalences resulting from that comparison are
7738 therefore not valid and must be invalidated. */
7739 if (last_jump_equiv_class)
7740 for (p = last_jump_equiv_class->first_same_value; p;
7741 p = p->next_same_value)
7742 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
7743 || (GET_CODE (p->exp) == SUBREG
7744 && GET_CODE (SUBREG_REG (p->exp)) == REG))
7745 invalidate (p->exp, VOIDmode);
7746 else if (GET_CODE (p->exp) == STRICT_LOW_PART
7747 || GET_CODE (p->exp) == ZERO_EXTRACT)
7748 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
7749
7750 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
7751 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
7752
7753 The only thing we do with SET_DEST is invalidate entries, so we
7754 can safely process each SET in order. It is slightly less efficient
7755 to do so, but we only want to handle the most common cases. */
7756
7757 for (insn = NEXT_INSN (loop_start);
7758 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
7759 && ! (GET_CODE (insn) == NOTE
7760 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
7761 insn = NEXT_INSN (insn))
7762 {
7763 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7764 && (GET_CODE (PATTERN (insn)) == SET
7765 || GET_CODE (PATTERN (insn)) == CLOBBER))
7766 cse_set_around_loop (PATTERN (insn), insn, loop_start);
7767 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7768 && GET_CODE (PATTERN (insn)) == PARALLEL)
7769 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7770 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
7771 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
7772 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
7773 loop_start);
7774 }
7775 }
7776 \f
7777 /* Variable used for communications between the next two routines. */
7778
7779 static struct write_data skipped_writes_memory;
7780
7781 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
7782 since they are done elsewhere. This function is called via note_stores. */
7783
7784 static void
7785 invalidate_skipped_set (dest, set)
7786 rtx set;
7787 rtx dest;
7788 {
7789 if (GET_CODE (set) == CLOBBER
7790 #ifdef HAVE_cc0
7791 || dest == cc0_rtx
7792 #endif
7793 || dest == pc_rtx)
7794 return;
7795
7796 if (GET_CODE (dest) == MEM)
7797 note_mem_written (dest, &skipped_writes_memory);
7798
7799 /* There are times when an address can appear varying and be a PLUS
7800 during this scan when it would be a fixed address were we to know
7801 the proper equivalences. So promote "nonscalar" to be "all". */
7802 if (skipped_writes_memory.nonscalar)
7803 skipped_writes_memory.all = 1;
7804
7805 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7806 || (! skipped_writes_memory.all && ! cse_rtx_addr_varies_p (dest)))
7807 invalidate (dest, VOIDmode);
7808 else if (GET_CODE (dest) == STRICT_LOW_PART
7809 || GET_CODE (dest) == ZERO_EXTRACT)
7810 invalidate (XEXP (dest, 0), GET_MODE (dest));
7811 }
7812
7813 /* Invalidate all insns from START up to the end of the function or the
7814 next label. This called when we wish to CSE around a block that is
7815 conditionally executed. */
7816
7817 static void
7818 invalidate_skipped_block (start)
7819 rtx start;
7820 {
7821 rtx insn;
7822 static struct write_data init = {0, 0, 0, 0};
7823 static struct write_data everything = {0, 1, 1, 1};
7824
7825 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
7826 insn = NEXT_INSN (insn))
7827 {
7828 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7829 continue;
7830
7831 skipped_writes_memory = init;
7832
7833 if (GET_CODE (insn) == CALL_INSN)
7834 {
7835 invalidate_for_call ();
7836 skipped_writes_memory = everything;
7837 }
7838
7839 note_stores (PATTERN (insn), invalidate_skipped_set);
7840 invalidate_from_clobbers (&skipped_writes_memory, PATTERN (insn));
7841 }
7842 }
7843 \f
7844 /* Used for communication between the following two routines; contains a
7845 value to be checked for modification. */
7846
7847 static rtx cse_check_loop_start_value;
7848
7849 /* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
7850 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
7851
7852 static void
7853 cse_check_loop_start (x, set)
7854 rtx x;
7855 rtx set;
7856 {
7857 if (cse_check_loop_start_value == 0
7858 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
7859 return;
7860
7861 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
7862 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
7863 cse_check_loop_start_value = 0;
7864 }
7865
7866 /* X is a SET or CLOBBER contained in INSN that was found near the start of
7867 a loop that starts with the label at LOOP_START.
7868
7869 If X is a SET, we see if its SET_SRC is currently in our hash table.
7870 If so, we see if it has a value equal to some register used only in the
7871 loop exit code (as marked by jump.c).
7872
7873 If those two conditions are true, we search backwards from the start of
7874 the loop to see if that same value was loaded into a register that still
7875 retains its value at the start of the loop.
7876
7877 If so, we insert an insn after the load to copy the destination of that
7878 load into the equivalent register and (try to) replace our SET_SRC with that
7879 register.
7880
7881 In any event, we invalidate whatever this SET or CLOBBER modifies. */
7882
7883 static void
7884 cse_set_around_loop (x, insn, loop_start)
7885 rtx x;
7886 rtx insn;
7887 rtx loop_start;
7888 {
7889 struct table_elt *src_elt;
7890 static struct write_data init = {0, 0, 0, 0};
7891 struct write_data writes_memory;
7892
7893 writes_memory = init;
7894
7895 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
7896 are setting PC or CC0 or whose SET_SRC is already a register. */
7897 if (GET_CODE (x) == SET
7898 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
7899 && GET_CODE (SET_SRC (x)) != REG)
7900 {
7901 src_elt = lookup (SET_SRC (x),
7902 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
7903 GET_MODE (SET_DEST (x)));
7904
7905 if (src_elt)
7906 for (src_elt = src_elt->first_same_value; src_elt;
7907 src_elt = src_elt->next_same_value)
7908 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
7909 && COST (src_elt->exp) < COST (SET_SRC (x)))
7910 {
7911 rtx p, set;
7912
7913 /* Look for an insn in front of LOOP_START that sets
7914 something in the desired mode to SET_SRC (x) before we hit
7915 a label or CALL_INSN. */
7916
7917 for (p = prev_nonnote_insn (loop_start);
7918 p && GET_CODE (p) != CALL_INSN
7919 && GET_CODE (p) != CODE_LABEL;
7920 p = prev_nonnote_insn (p))
7921 if ((set = single_set (p)) != 0
7922 && GET_CODE (SET_DEST (set)) == REG
7923 && GET_MODE (SET_DEST (set)) == src_elt->mode
7924 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
7925 {
7926 /* We now have to ensure that nothing between P
7927 and LOOP_START modified anything referenced in
7928 SET_SRC (x). We know that nothing within the loop
7929 can modify it, or we would have invalidated it in
7930 the hash table. */
7931 rtx q;
7932
7933 cse_check_loop_start_value = SET_SRC (x);
7934 for (q = p; q != loop_start; q = NEXT_INSN (q))
7935 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
7936 note_stores (PATTERN (q), cse_check_loop_start);
7937
7938 /* If nothing was changed and we can replace our
7939 SET_SRC, add an insn after P to copy its destination
7940 to what we will be replacing SET_SRC with. */
7941 if (cse_check_loop_start_value
7942 && validate_change (insn, &SET_SRC (x),
7943 src_elt->exp, 0))
7944 emit_insn_after (gen_move_insn (src_elt->exp,
7945 SET_DEST (set)),
7946 p);
7947 break;
7948 }
7949 }
7950 }
7951
7952 /* Now invalidate anything modified by X. */
7953 note_mem_written (SET_DEST (x), &writes_memory);
7954
7955 if (writes_memory.var)
7956 invalidate_memory (&writes_memory);
7957
7958 /* See comment on similar code in cse_insn for explanation of these tests. */
7959 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
7960 || (GET_CODE (SET_DEST (x)) == MEM && ! writes_memory.all
7961 && ! cse_rtx_addr_varies_p (SET_DEST (x))))
7962 invalidate (SET_DEST (x), VOIDmode);
7963 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
7964 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
7965 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
7966 }
7967 \f
7968 /* Find the end of INSN's basic block and return its range,
7969 the total number of SETs in all the insns of the block, the last insn of the
7970 block, and the branch path.
7971
7972 The branch path indicates which branches should be followed. If a non-zero
7973 path size is specified, the block should be rescanned and a different set
7974 of branches will be taken. The branch path is only used if
7975 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
7976
7977 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
7978 used to describe the block. It is filled in with the information about
7979 the current block. The incoming structure's branch path, if any, is used
7980 to construct the output branch path. */
7981
7982 void
7983 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
7984 rtx insn;
7985 struct cse_basic_block_data *data;
7986 int follow_jumps;
7987 int after_loop;
7988 int skip_blocks;
7989 {
7990 rtx p = insn, q;
7991 int nsets = 0;
7992 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
7993 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
7994 int path_size = data->path_size;
7995 int path_entry = 0;
7996 int i;
7997
7998 /* Update the previous branch path, if any. If the last branch was
7999 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
8000 shorten the path by one and look at the previous branch. We know that
8001 at least one branch must have been taken if PATH_SIZE is non-zero. */
8002 while (path_size > 0)
8003 {
8004 if (data->path[path_size - 1].status != NOT_TAKEN)
8005 {
8006 data->path[path_size - 1].status = NOT_TAKEN;
8007 break;
8008 }
8009 else
8010 path_size--;
8011 }
8012
8013 /* Scan to end of this basic block. */
8014 while (p && GET_CODE (p) != CODE_LABEL)
8015 {
8016 /* Don't cse out the end of a loop. This makes a difference
8017 only for the unusual loops that always execute at least once;
8018 all other loops have labels there so we will stop in any case.
8019 Cse'ing out the end of the loop is dangerous because it
8020 might cause an invariant expression inside the loop
8021 to be reused after the end of the loop. This would make it
8022 hard to move the expression out of the loop in loop.c,
8023 especially if it is one of several equivalent expressions
8024 and loop.c would like to eliminate it.
8025
8026 If we are running after loop.c has finished, we can ignore
8027 the NOTE_INSN_LOOP_END. */
8028
8029 if (! after_loop && GET_CODE (p) == NOTE
8030 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
8031 break;
8032
8033 /* Don't cse over a call to setjmp; on some machines (eg vax)
8034 the regs restored by the longjmp come from
8035 a later time than the setjmp. */
8036 if (GET_CODE (p) == NOTE
8037 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
8038 break;
8039
8040 /* A PARALLEL can have lots of SETs in it,
8041 especially if it is really an ASM_OPERANDS. */
8042 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
8043 && GET_CODE (PATTERN (p)) == PARALLEL)
8044 nsets += XVECLEN (PATTERN (p), 0);
8045 else if (GET_CODE (p) != NOTE)
8046 nsets += 1;
8047
8048 /* Ignore insns made by CSE; they cannot affect the boundaries of
8049 the basic block. */
8050
8051 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8052 high_cuid = INSN_CUID (p);
8053 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
8054 low_cuid = INSN_CUID (p);
8055
8056 /* See if this insn is in our branch path. If it is and we are to
8057 take it, do so. */
8058 if (path_entry < path_size && data->path[path_entry].branch == p)
8059 {
8060 if (data->path[path_entry].status != NOT_TAKEN)
8061 p = JUMP_LABEL (p);
8062
8063 /* Point to next entry in path, if any. */
8064 path_entry++;
8065 }
8066
8067 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
8068 was specified, we haven't reached our maximum path length, there are
8069 insns following the target of the jump, this is the only use of the
8070 jump label, and the target label is preceded by a BARRIER.
8071
8072 Alternatively, we can follow the jump if it branches around a
8073 block of code and there are no other branches into the block.
8074 In this case invalidate_skipped_block will be called to invalidate any
8075 registers set in the block when following the jump. */
8076
8077 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
8078 && GET_CODE (p) == JUMP_INSN
8079 && GET_CODE (PATTERN (p)) == SET
8080 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
8081 && LABEL_NUSES (JUMP_LABEL (p)) == 1
8082 && NEXT_INSN (JUMP_LABEL (p)) != 0)
8083 {
8084 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
8085 if ((GET_CODE (q) != NOTE
8086 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
8087 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
8088 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
8089 break;
8090
8091 /* If we ran into a BARRIER, this code is an extension of the
8092 basic block when the branch is taken. */
8093 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
8094 {
8095 /* Don't allow ourself to keep walking around an
8096 always-executed loop. */
8097 if (next_real_insn (q) == next)
8098 {
8099 p = NEXT_INSN (p);
8100 continue;
8101 }
8102
8103 /* Similarly, don't put a branch in our path more than once. */
8104 for (i = 0; i < path_entry; i++)
8105 if (data->path[i].branch == p)
8106 break;
8107
8108 if (i != path_entry)
8109 break;
8110
8111 data->path[path_entry].branch = p;
8112 data->path[path_entry++].status = TAKEN;
8113
8114 /* This branch now ends our path. It was possible that we
8115 didn't see this branch the last time around (when the
8116 insn in front of the target was a JUMP_INSN that was
8117 turned into a no-op). */
8118 path_size = path_entry;
8119
8120 p = JUMP_LABEL (p);
8121 /* Mark block so we won't scan it again later. */
8122 PUT_MODE (NEXT_INSN (p), QImode);
8123 }
8124 /* Detect a branch around a block of code. */
8125 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
8126 {
8127 register rtx tmp;
8128
8129 if (next_real_insn (q) == next)
8130 {
8131 p = NEXT_INSN (p);
8132 continue;
8133 }
8134
8135 for (i = 0; i < path_entry; i++)
8136 if (data->path[i].branch == p)
8137 break;
8138
8139 if (i != path_entry)
8140 break;
8141
8142 /* This is no_labels_between_p (p, q) with an added check for
8143 reaching the end of a function (in case Q precedes P). */
8144 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
8145 if (GET_CODE (tmp) == CODE_LABEL)
8146 break;
8147
8148 if (tmp == q)
8149 {
8150 data->path[path_entry].branch = p;
8151 data->path[path_entry++].status = AROUND;
8152
8153 path_size = path_entry;
8154
8155 p = JUMP_LABEL (p);
8156 /* Mark block so we won't scan it again later. */
8157 PUT_MODE (NEXT_INSN (p), QImode);
8158 }
8159 }
8160 }
8161 p = NEXT_INSN (p);
8162 }
8163
8164 data->low_cuid = low_cuid;
8165 data->high_cuid = high_cuid;
8166 data->nsets = nsets;
8167 data->last = p;
8168
8169 /* If all jumps in the path are not taken, set our path length to zero
8170 so a rescan won't be done. */
8171 for (i = path_size - 1; i >= 0; i--)
8172 if (data->path[i].status != NOT_TAKEN)
8173 break;
8174
8175 if (i == -1)
8176 data->path_size = 0;
8177 else
8178 data->path_size = path_size;
8179
8180 /* End the current branch path. */
8181 data->path[path_size].branch = 0;
8182 }
8183 \f
8184 /* Perform cse on the instructions of a function.
8185 F is the first instruction.
8186 NREGS is one plus the highest pseudo-reg number used in the instruction.
8187
8188 AFTER_LOOP is 1 if this is the cse call done after loop optimization
8189 (only if -frerun-cse-after-loop).
8190
8191 Returns 1 if jump_optimize should be redone due to simplifications
8192 in conditional jump instructions. */
8193
8194 int
8195 cse_main (f, nregs, after_loop, file)
8196 rtx f;
8197 int nregs;
8198 int after_loop;
8199 FILE *file;
8200 {
8201 struct cse_basic_block_data val;
8202 register rtx insn = f;
8203 register int i;
8204
8205 cse_jumps_altered = 0;
8206 constant_pool_entries_cost = 0;
8207 val.path_size = 0;
8208
8209 init_recog ();
8210
8211 max_reg = nregs;
8212
8213 all_minus_one = (int *) alloca (nregs * sizeof (int));
8214 consec_ints = (int *) alloca (nregs * sizeof (int));
8215
8216 for (i = 0; i < nregs; i++)
8217 {
8218 all_minus_one[i] = -1;
8219 consec_ints[i] = i;
8220 }
8221
8222 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
8223 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
8224 reg_qty = (int *) alloca (nregs * sizeof (int));
8225 reg_in_table = (int *) alloca (nregs * sizeof (int));
8226 reg_tick = (int *) alloca (nregs * sizeof (int));
8227
8228 #ifdef LOAD_EXTEND_OP
8229
8230 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
8231 and change the code and mode as appropriate. */
8232 memory_extend_rtx = gen_rtx (ZERO_EXTEND, VOIDmode, 0);
8233 #endif
8234
8235 /* Discard all the free elements of the previous function
8236 since they are allocated in the temporarily obstack. */
8237 bzero ((char *) table, sizeof table);
8238 free_element_chain = 0;
8239 n_elements_made = 0;
8240
8241 /* Find the largest uid. */
8242
8243 max_uid = get_max_uid ();
8244 uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
8245 bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
8246
8247 /* Compute the mapping from uids to cuids.
8248 CUIDs are numbers assigned to insns, like uids,
8249 except that cuids increase monotonically through the code.
8250 Don't assign cuids to line-number NOTEs, so that the distance in cuids
8251 between two insns is not affected by -g. */
8252
8253 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8254 {
8255 if (GET_CODE (insn) != NOTE
8256 || NOTE_LINE_NUMBER (insn) < 0)
8257 INSN_CUID (insn) = ++i;
8258 else
8259 /* Give a line number note the same cuid as preceding insn. */
8260 INSN_CUID (insn) = i;
8261 }
8262
8263 /* Initialize which registers are clobbered by calls. */
8264
8265 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8266
8267 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8268 if ((call_used_regs[i]
8269 /* Used to check !fixed_regs[i] here, but that isn't safe;
8270 fixed regs are still call-clobbered, and sched can get
8271 confused if they can "live across calls".
8272
8273 The frame pointer is always preserved across calls. The arg
8274 pointer is if it is fixed. The stack pointer usually is, unless
8275 RETURN_POPS_ARGS, in which case an explicit CLOBBER
8276 will be present. If we are generating PIC code, the PIC offset
8277 table register is preserved across calls. */
8278
8279 && i != STACK_POINTER_REGNUM
8280 && i != FRAME_POINTER_REGNUM
8281 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8282 && i != HARD_FRAME_POINTER_REGNUM
8283 #endif
8284 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8285 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8286 #endif
8287 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
8288 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8289 #endif
8290 )
8291 || global_regs[i])
8292 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8293
8294 /* Loop over basic blocks.
8295 Compute the maximum number of qty's needed for each basic block
8296 (which is 2 for each SET). */
8297 insn = f;
8298 while (insn)
8299 {
8300 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8301 flag_cse_skip_blocks);
8302
8303 /* If this basic block was already processed or has no sets, skip it. */
8304 if (val.nsets == 0 || GET_MODE (insn) == QImode)
8305 {
8306 PUT_MODE (insn, VOIDmode);
8307 insn = (val.last ? NEXT_INSN (val.last) : 0);
8308 val.path_size = 0;
8309 continue;
8310 }
8311
8312 cse_basic_block_start = val.low_cuid;
8313 cse_basic_block_end = val.high_cuid;
8314 max_qty = val.nsets * 2;
8315
8316 if (file)
8317 fprintf (file, ";; Processing block from %d to %d, %d sets.\n",
8318 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8319 val.nsets);
8320
8321 /* Make MAX_QTY bigger to give us room to optimize
8322 past the end of this basic block, if that should prove useful. */
8323 if (max_qty < 500)
8324 max_qty = 500;
8325
8326 max_qty += max_reg;
8327
8328 /* If this basic block is being extended by following certain jumps,
8329 (see `cse_end_of_basic_block'), we reprocess the code from the start.
8330 Otherwise, we start after this basic block. */
8331 if (val.path_size > 0)
8332 cse_basic_block (insn, val.last, val.path, 0);
8333 else
8334 {
8335 int old_cse_jumps_altered = cse_jumps_altered;
8336 rtx temp;
8337
8338 /* When cse changes a conditional jump to an unconditional
8339 jump, we want to reprocess the block, since it will give
8340 us a new branch path to investigate. */
8341 cse_jumps_altered = 0;
8342 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8343 if (cse_jumps_altered == 0
8344 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8345 insn = temp;
8346
8347 cse_jumps_altered |= old_cse_jumps_altered;
8348 }
8349
8350 #ifdef USE_C_ALLOCA
8351 alloca (0);
8352 #endif
8353 }
8354
8355 /* Tell refers_to_mem_p that qty_const info is not available. */
8356 qty_const = 0;
8357
8358 if (max_elements_made < n_elements_made)
8359 max_elements_made = n_elements_made;
8360
8361 return cse_jumps_altered;
8362 }
8363
8364 /* Process a single basic block. FROM and TO and the limits of the basic
8365 block. NEXT_BRANCH points to the branch path when following jumps or
8366 a null path when not following jumps.
8367
8368 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8369 loop. This is true when we are being called for the last time on a
8370 block and this CSE pass is before loop.c. */
8371
8372 static rtx
8373 cse_basic_block (from, to, next_branch, around_loop)
8374 register rtx from, to;
8375 struct branch_path *next_branch;
8376 int around_loop;
8377 {
8378 register rtx insn;
8379 int to_usage = 0;
8380 int in_libcall_block = 0;
8381
8382 /* Each of these arrays is undefined before max_reg, so only allocate
8383 the space actually needed and adjust the start below. */
8384
8385 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8386 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8387 qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
8388 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8389 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8390 qty_comparison_code
8391 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8392 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8393 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8394
8395 qty_first_reg -= max_reg;
8396 qty_last_reg -= max_reg;
8397 qty_mode -= max_reg;
8398 qty_const -= max_reg;
8399 qty_const_insn -= max_reg;
8400 qty_comparison_code -= max_reg;
8401 qty_comparison_qty -= max_reg;
8402 qty_comparison_const -= max_reg;
8403
8404 new_basic_block ();
8405
8406 /* TO might be a label. If so, protect it from being deleted. */
8407 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8408 ++LABEL_NUSES (to);
8409
8410 for (insn = from; insn != to; insn = NEXT_INSN (insn))
8411 {
8412 register enum rtx_code code;
8413
8414 /* See if this is a branch that is part of the path. If so, and it is
8415 to be taken, do so. */
8416 if (next_branch->branch == insn)
8417 {
8418 enum taken status = next_branch++->status;
8419 if (status != NOT_TAKEN)
8420 {
8421 if (status == TAKEN)
8422 record_jump_equiv (insn, 1);
8423 else
8424 invalidate_skipped_block (NEXT_INSN (insn));
8425
8426 /* Set the last insn as the jump insn; it doesn't affect cc0.
8427 Then follow this branch. */
8428 #ifdef HAVE_cc0
8429 prev_insn_cc0 = 0;
8430 #endif
8431 prev_insn = insn;
8432 insn = JUMP_LABEL (insn);
8433 continue;
8434 }
8435 }
8436
8437 code = GET_CODE (insn);
8438 if (GET_MODE (insn) == QImode)
8439 PUT_MODE (insn, VOIDmode);
8440
8441 if (GET_RTX_CLASS (code) == 'i')
8442 {
8443 /* Process notes first so we have all notes in canonical forms when
8444 looking for duplicate operations. */
8445
8446 if (REG_NOTES (insn))
8447 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
8448
8449 /* Track when we are inside in LIBCALL block. Inside such a block,
8450 we do not want to record destinations. The last insn of a
8451 LIBCALL block is not considered to be part of the block, since
8452 its destination is the result of the block and hence should be
8453 recorded. */
8454
8455 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8456 in_libcall_block = 1;
8457 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8458 in_libcall_block = 0;
8459
8460 cse_insn (insn, in_libcall_block);
8461 }
8462
8463 /* If INSN is now an unconditional jump, skip to the end of our
8464 basic block by pretending that we just did the last insn in the
8465 basic block. If we are jumping to the end of our block, show
8466 that we can have one usage of TO. */
8467
8468 if (simplejump_p (insn))
8469 {
8470 if (to == 0)
8471 return 0;
8472
8473 if (JUMP_LABEL (insn) == to)
8474 to_usage = 1;
8475
8476 /* Maybe TO was deleted because the jump is unconditional.
8477 If so, there is nothing left in this basic block. */
8478 /* ??? Perhaps it would be smarter to set TO
8479 to whatever follows this insn,
8480 and pretend the basic block had always ended here. */
8481 if (INSN_DELETED_P (to))
8482 break;
8483
8484 insn = PREV_INSN (to);
8485 }
8486
8487 /* See if it is ok to keep on going past the label
8488 which used to end our basic block. Remember that we incremented
8489 the count of that label, so we decrement it here. If we made
8490 a jump unconditional, TO_USAGE will be one; in that case, we don't
8491 want to count the use in that jump. */
8492
8493 if (to != 0 && NEXT_INSN (insn) == to
8494 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8495 {
8496 struct cse_basic_block_data val;
8497 rtx prev;
8498
8499 insn = NEXT_INSN (to);
8500
8501 if (LABEL_NUSES (to) == 0)
8502 insn = delete_insn (to);
8503
8504 /* If TO was the last insn in the function, we are done. */
8505 if (insn == 0)
8506 return 0;
8507
8508 /* If TO was preceded by a BARRIER we are done with this block
8509 because it has no continuation. */
8510 prev = prev_nonnote_insn (to);
8511 if (prev && GET_CODE (prev) == BARRIER)
8512 return insn;
8513
8514 /* Find the end of the following block. Note that we won't be
8515 following branches in this case. */
8516 to_usage = 0;
8517 val.path_size = 0;
8518 cse_end_of_basic_block (insn, &val, 0, 0, 0);
8519
8520 /* If the tables we allocated have enough space left
8521 to handle all the SETs in the next basic block,
8522 continue through it. Otherwise, return,
8523 and that block will be scanned individually. */
8524 if (val.nsets * 2 + next_qty > max_qty)
8525 break;
8526
8527 cse_basic_block_start = val.low_cuid;
8528 cse_basic_block_end = val.high_cuid;
8529 to = val.last;
8530
8531 /* Prevent TO from being deleted if it is a label. */
8532 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8533 ++LABEL_NUSES (to);
8534
8535 /* Back up so we process the first insn in the extension. */
8536 insn = PREV_INSN (insn);
8537 }
8538 }
8539
8540 if (next_qty > max_qty)
8541 abort ();
8542
8543 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
8544 the previous insn is the only insn that branches to the head of a loop,
8545 we can cse into the loop. Don't do this if we changed the jump
8546 structure of a loop unless we aren't going to be following jumps. */
8547
8548 if ((cse_jumps_altered == 0
8549 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8550 && around_loop && to != 0
8551 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
8552 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
8553 && JUMP_LABEL (PREV_INSN (to)) != 0
8554 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
8555 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
8556
8557 return to ? NEXT_INSN (to) : 0;
8558 }
8559 \f
8560 /* Count the number of times registers are used (not set) in X.
8561 COUNTS is an array in which we accumulate the count, INCR is how much
8562 we count each register usage.
8563
8564 Don't count a usage of DEST, which is the SET_DEST of a SET which
8565 contains X in its SET_SRC. This is because such a SET does not
8566 modify the liveness of DEST. */
8567
8568 static void
8569 count_reg_usage (x, counts, dest, incr)
8570 rtx x;
8571 int *counts;
8572 rtx dest;
8573 int incr;
8574 {
8575 enum rtx_code code;
8576 char *fmt;
8577 int i, j;
8578
8579 if (x == 0)
8580 return;
8581
8582 switch (code = GET_CODE (x))
8583 {
8584 case REG:
8585 if (x != dest)
8586 counts[REGNO (x)] += incr;
8587 return;
8588
8589 case PC:
8590 case CC0:
8591 case CONST:
8592 case CONST_INT:
8593 case CONST_DOUBLE:
8594 case SYMBOL_REF:
8595 case LABEL_REF:
8596 case CLOBBER:
8597 return;
8598
8599 case SET:
8600 /* Unless we are setting a REG, count everything in SET_DEST. */
8601 if (GET_CODE (SET_DEST (x)) != REG)
8602 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
8603
8604 /* If SRC has side-effects, then we can't delete this insn, so the
8605 usage of SET_DEST inside SRC counts.
8606
8607 ??? Strictly-speaking, we might be preserving this insn
8608 because some other SET has side-effects, but that's hard
8609 to do and can't happen now. */
8610 count_reg_usage (SET_SRC (x), counts,
8611 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
8612 incr);
8613 return;
8614
8615 case CALL_INSN:
8616 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
8617
8618 /* ... falls through ... */
8619 case INSN:
8620 case JUMP_INSN:
8621 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
8622
8623 /* Things used in a REG_EQUAL note aren't dead since loop may try to
8624 use them. */
8625
8626 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
8627 return;
8628
8629 case EXPR_LIST:
8630 case INSN_LIST:
8631 if (REG_NOTE_KIND (x) == REG_EQUAL
8632 || GET_CODE (XEXP (x,0)) == USE)
8633 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
8634 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
8635 return;
8636 }
8637
8638 fmt = GET_RTX_FORMAT (code);
8639 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8640 {
8641 if (fmt[i] == 'e')
8642 count_reg_usage (XEXP (x, i), counts, dest, incr);
8643 else if (fmt[i] == 'E')
8644 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8645 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
8646 }
8647 }
8648 \f
8649 /* Scan all the insns and delete any that are dead; i.e., they store a register
8650 that is never used or they copy a register to itself.
8651
8652 This is used to remove insns made obviously dead by cse. It improves the
8653 heuristics in loop since it won't try to move dead invariants out of loops
8654 or make givs for dead quantities. The remaining passes of the compilation
8655 are also sped up. */
8656
8657 void
8658 delete_dead_from_cse (insns, nreg)
8659 rtx insns;
8660 int nreg;
8661 {
8662 int *counts = (int *) alloca (nreg * sizeof (int));
8663 rtx insn, prev;
8664 rtx tem;
8665 int i;
8666 int in_libcall = 0;
8667
8668 /* First count the number of times each register is used. */
8669 bzero ((char *) counts, sizeof (int) * nreg);
8670 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
8671 count_reg_usage (insn, counts, NULL_RTX, 1);
8672
8673 /* Go from the last insn to the first and delete insns that only set unused
8674 registers or copy a register to itself. As we delete an insn, remove
8675 usage counts for registers it uses. */
8676 for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
8677 {
8678 int live_insn = 0;
8679
8680 prev = prev_real_insn (insn);
8681
8682 /* Don't delete any insns that are part of a libcall block.
8683 Flow or loop might get confused if we did that. Remember
8684 that we are scanning backwards. */
8685 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8686 in_libcall = 1;
8687
8688 if (in_libcall)
8689 live_insn = 1;
8690 else if (GET_CODE (PATTERN (insn)) == SET)
8691 {
8692 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
8693 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
8694 ;
8695
8696 #ifdef HAVE_cc0
8697 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
8698 && ! side_effects_p (SET_SRC (PATTERN (insn)))
8699 && ((tem = next_nonnote_insn (insn)) == 0
8700 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8701 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8702 ;
8703 #endif
8704 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
8705 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
8706 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
8707 || side_effects_p (SET_SRC (PATTERN (insn))))
8708 live_insn = 1;
8709 }
8710 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
8711 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8712 {
8713 rtx elt = XVECEXP (PATTERN (insn), 0, i);
8714
8715 if (GET_CODE (elt) == SET)
8716 {
8717 if (GET_CODE (SET_DEST (elt)) == REG
8718 && SET_DEST (elt) == SET_SRC (elt))
8719 ;
8720
8721 #ifdef HAVE_cc0
8722 else if (GET_CODE (SET_DEST (elt)) == CC0
8723 && ! side_effects_p (SET_SRC (elt))
8724 && ((tem = next_nonnote_insn (insn)) == 0
8725 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8726 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8727 ;
8728 #endif
8729 else if (GET_CODE (SET_DEST (elt)) != REG
8730 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
8731 || counts[REGNO (SET_DEST (elt))] != 0
8732 || side_effects_p (SET_SRC (elt)))
8733 live_insn = 1;
8734 }
8735 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
8736 live_insn = 1;
8737 }
8738 else
8739 live_insn = 1;
8740
8741 /* If this is a dead insn, delete it and show registers in it aren't
8742 being used. */
8743
8744 if (! live_insn)
8745 {
8746 count_reg_usage (insn, counts, NULL_RTX, -1);
8747 delete_insn (insn);
8748 }
8749
8750 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8751 in_libcall = 0;
8752 }
8753 }