Patches to fix minor optimizer bugs
[gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 88, 89, 92-6, 1997 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 /* Must precede rtl.h for FFS. */
24 #include <stdio.h>
25
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "recog.h"
33
34 #include <setjmp.h>
35
36 /* The basic idea of common subexpression elimination is to go
37 through the code, keeping a record of expressions that would
38 have the same value at the current scan point, and replacing
39 expressions encountered with the cheapest equivalent expression.
40
41 It is too complicated to keep track of the different possibilities
42 when control paths merge; so, at each label, we forget all that is
43 known and start fresh. This can be described as processing each
44 basic block separately. Note, however, that these are not quite
45 the same as the basic blocks found by a later pass and used for
46 data flow analysis and register packing. We do not need to start fresh
47 after a conditional jump instruction if there is no label there.
48
49 We use two data structures to record the equivalent expressions:
50 a hash table for most expressions, and several vectors together
51 with "quantity numbers" to record equivalent (pseudo) registers.
52
53 The use of the special data structure for registers is desirable
54 because it is faster. It is possible because registers references
55 contain a fairly small number, the register number, taken from
56 a contiguously allocated series, and two register references are
57 identical if they have the same number. General expressions
58 do not have any such thing, so the only way to retrieve the
59 information recorded on an expression other than a register
60 is to keep it in a hash table.
61
62 Registers and "quantity numbers":
63
64 At the start of each basic block, all of the (hardware and pseudo)
65 registers used in the function are given distinct quantity
66 numbers to indicate their contents. During scan, when the code
67 copies one register into another, we copy the quantity number.
68 When a register is loaded in any other way, we allocate a new
69 quantity number to describe the value generated by this operation.
70 `reg_qty' records what quantity a register is currently thought
71 of as containing.
72
73 All real quantity numbers are greater than or equal to `max_reg'.
74 If register N has not been assigned a quantity, reg_qty[N] will equal N.
75
76 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
77 variables should be referenced with an index below `max_reg'.
78
79 We also maintain a bidirectional chain of registers for each
80 quantity number. `qty_first_reg', `qty_last_reg',
81 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
82
83 The first register in a chain is the one whose lifespan is least local.
84 Among equals, it is the one that was seen first.
85 We replace any equivalent register with that one.
86
87 If two registers have the same quantity number, it must be true that
88 REG expressions with `qty_mode' must be in the hash table for both
89 registers and must be in the same class.
90
91 The converse is not true. Since hard registers may be referenced in
92 any mode, two REG expressions might be equivalent in the hash table
93 but not have the same quantity number if the quantity number of one
94 of the registers is not the same mode as those expressions.
95
96 Constants and quantity numbers
97
98 When a quantity has a known constant value, that value is stored
99 in the appropriate element of qty_const. This is in addition to
100 putting the constant in the hash table as is usual for non-regs.
101
102 Whether a reg or a constant is preferred is determined by the configuration
103 macro CONST_COSTS and will often depend on the constant value. In any
104 event, expressions containing constants can be simplified, by fold_rtx.
105
106 When a quantity has a known nearly constant value (such as an address
107 of a stack slot), that value is stored in the appropriate element
108 of qty_const.
109
110 Integer constants don't have a machine mode. However, cse
111 determines the intended machine mode from the destination
112 of the instruction that moves the constant. The machine mode
113 is recorded in the hash table along with the actual RTL
114 constant expression so that different modes are kept separate.
115
116 Other expressions:
117
118 To record known equivalences among expressions in general
119 we use a hash table called `table'. It has a fixed number of buckets
120 that contain chains of `struct table_elt' elements for expressions.
121 These chains connect the elements whose expressions have the same
122 hash codes.
123
124 Other chains through the same elements connect the elements which
125 currently have equivalent values.
126
127 Register references in an expression are canonicalized before hashing
128 the expression. This is done using `reg_qty' and `qty_first_reg'.
129 The hash code of a register reference is computed using the quantity
130 number, not the register number.
131
132 When the value of an expression changes, it is necessary to remove from the
133 hash table not just that expression but all expressions whose values
134 could be different as a result.
135
136 1. If the value changing is in memory, except in special cases
137 ANYTHING referring to memory could be changed. That is because
138 nobody knows where a pointer does not point.
139 The function `invalidate_memory' removes what is necessary.
140
141 The special cases are when the address is constant or is
142 a constant plus a fixed register such as the frame pointer
143 or a static chain pointer. When such addresses are stored in,
144 we can tell exactly which other such addresses must be invalidated
145 due to overlap. `invalidate' does this.
146 All expressions that refer to non-constant
147 memory addresses are also invalidated. `invalidate_memory' does this.
148
149 2. If the value changing is a register, all expressions
150 containing references to that register, and only those,
151 must be removed.
152
153 Because searching the entire hash table for expressions that contain
154 a register is very slow, we try to figure out when it isn't necessary.
155 Precisely, this is necessary only when expressions have been
156 entered in the hash table using this register, and then the value has
157 changed, and then another expression wants to be added to refer to
158 the register's new value. This sequence of circumstances is rare
159 within any one basic block.
160
161 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
162 reg_tick[i] is incremented whenever a value is stored in register i.
163 reg_in_table[i] holds -1 if no references to register i have been
164 entered in the table; otherwise, it contains the value reg_tick[i] had
165 when the references were entered. If we want to enter a reference
166 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
167 Until we want to enter a new entry, the mere fact that the two vectors
168 don't match makes the entries be ignored if anyone tries to match them.
169
170 Registers themselves are entered in the hash table as well as in
171 the equivalent-register chains. However, the vectors `reg_tick'
172 and `reg_in_table' do not apply to expressions which are simple
173 register references. These expressions are removed from the table
174 immediately when they become invalid, and this can be done even if
175 we do not immediately search for all the expressions that refer to
176 the register.
177
178 A CLOBBER rtx in an instruction invalidates its operand for further
179 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
180 invalidates everything that resides in memory.
181
182 Related expressions:
183
184 Constant expressions that differ only by an additive integer
185 are called related. When a constant expression is put in
186 the table, the related expression with no constant term
187 is also entered. These are made to point at each other
188 so that it is possible to find out if there exists any
189 register equivalent to an expression related to a given expression. */
190
191 /* One plus largest register number used in this function. */
192
193 static int max_reg;
194
195 /* Length of vectors indexed by quantity number.
196 We know in advance we will not need a quantity number this big. */
197
198 static int max_qty;
199
200 /* Next quantity number to be allocated.
201 This is 1 + the largest number needed so far. */
202
203 static int next_qty;
204
205 /* Indexed by quantity number, gives the first (or last) register
206 in the chain of registers that currently contain this quantity. */
207
208 static int *qty_first_reg;
209 static int *qty_last_reg;
210
211 /* Index by quantity number, gives the mode of the quantity. */
212
213 static enum machine_mode *qty_mode;
214
215 /* Indexed by quantity number, gives the rtx of the constant value of the
216 quantity, or zero if it does not have a known value.
217 A sum of the frame pointer (or arg pointer) plus a constant
218 can also be entered here. */
219
220 static rtx *qty_const;
221
222 /* Indexed by qty number, gives the insn that stored the constant value
223 recorded in `qty_const'. */
224
225 static rtx *qty_const_insn;
226
227 /* The next three variables are used to track when a comparison between a
228 quantity and some constant or register has been passed. In that case, we
229 know the results of the comparison in case we see it again. These variables
230 record a comparison that is known to be true. */
231
232 /* Indexed by qty number, gives the rtx code of a comparison with a known
233 result involving this quantity. If none, it is UNKNOWN. */
234 static enum rtx_code *qty_comparison_code;
235
236 /* Indexed by qty number, gives the constant being compared against in a
237 comparison of known result. If no such comparison, it is undefined.
238 If the comparison is not with a constant, it is zero. */
239
240 static rtx *qty_comparison_const;
241
242 /* Indexed by qty number, gives the quantity being compared against in a
243 comparison of known result. If no such comparison, if it undefined.
244 If the comparison is not with a register, it is -1. */
245
246 static int *qty_comparison_qty;
247
248 #ifdef HAVE_cc0
249 /* For machines that have a CC0, we do not record its value in the hash
250 table since its use is guaranteed to be the insn immediately following
251 its definition and any other insn is presumed to invalidate it.
252
253 Instead, we store below the value last assigned to CC0. If it should
254 happen to be a constant, it is stored in preference to the actual
255 assigned value. In case it is a constant, we store the mode in which
256 the constant should be interpreted. */
257
258 static rtx prev_insn_cc0;
259 static enum machine_mode prev_insn_cc0_mode;
260 #endif
261
262 /* Previous actual insn. 0 if at first insn of basic block. */
263
264 static rtx prev_insn;
265
266 /* Insn being scanned. */
267
268 static rtx this_insn;
269
270 /* Index by register number, gives the quantity number
271 of the register's current contents. */
272
273 static int *reg_qty;
274
275 /* Index by register number, gives the number of the next (or
276 previous) register in the chain of registers sharing the same
277 value.
278
279 Or -1 if this register is at the end of the chain.
280
281 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
282
283 static int *reg_next_eqv;
284 static int *reg_prev_eqv;
285
286 /* Index by register number, gives the number of times
287 that register has been altered in the current basic block. */
288
289 static int *reg_tick;
290
291 /* Index by register number, gives the reg_tick value at which
292 rtx's containing this register are valid in the hash table.
293 If this does not equal the current reg_tick value, such expressions
294 existing in the hash table are invalid.
295 If this is -1, no expressions containing this register have been
296 entered in the table. */
297
298 static int *reg_in_table;
299
300 /* A HARD_REG_SET containing all the hard registers for which there is
301 currently a REG expression in the hash table. Note the difference
302 from the above variables, which indicate if the REG is mentioned in some
303 expression in the table. */
304
305 static HARD_REG_SET hard_regs_in_table;
306
307 /* A HARD_REG_SET containing all the hard registers that are invalidated
308 by a CALL_INSN. */
309
310 static HARD_REG_SET regs_invalidated_by_call;
311
312 /* Two vectors of ints:
313 one containing max_reg -1's; the other max_reg + 500 (an approximation
314 for max_qty) elements where element i contains i.
315 These are used to initialize various other vectors fast. */
316
317 static int *all_minus_one;
318 static int *consec_ints;
319
320 /* CUID of insn that starts the basic block currently being cse-processed. */
321
322 static int cse_basic_block_start;
323
324 /* CUID of insn that ends the basic block currently being cse-processed. */
325
326 static int cse_basic_block_end;
327
328 /* Vector mapping INSN_UIDs to cuids.
329 The cuids are like uids but increase monotonically always.
330 We use them to see whether a reg is used outside a given basic block. */
331
332 static int *uid_cuid;
333
334 /* Highest UID in UID_CUID. */
335 static int max_uid;
336
337 /* Get the cuid of an insn. */
338
339 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
340
341 /* Nonzero if cse has altered conditional jump insns
342 in such a way that jump optimization should be redone. */
343
344 static int cse_jumps_altered;
345
346 /* Nonzero if we put a LABEL_REF into the hash table. Since we may have put
347 it into an INSN without a REG_LABEL, we have to rerun jump after CSE
348 to put in the note. */
349 static int recorded_label_ref;
350
351 /* canon_hash stores 1 in do_not_record
352 if it notices a reference to CC0, PC, or some other volatile
353 subexpression. */
354
355 static int do_not_record;
356
357 #ifdef LOAD_EXTEND_OP
358
359 /* Scratch rtl used when looking for load-extended copy of a MEM. */
360 static rtx memory_extend_rtx;
361 #endif
362
363 /* canon_hash stores 1 in hash_arg_in_memory
364 if it notices a reference to memory within the expression being hashed. */
365
366 static int hash_arg_in_memory;
367
368 /* canon_hash stores 1 in hash_arg_in_struct
369 if it notices a reference to memory that's part of a structure. */
370
371 static int hash_arg_in_struct;
372
373 /* The hash table contains buckets which are chains of `struct table_elt's,
374 each recording one expression's information.
375 That expression is in the `exp' field.
376
377 Those elements with the same hash code are chained in both directions
378 through the `next_same_hash' and `prev_same_hash' fields.
379
380 Each set of expressions with equivalent values
381 are on a two-way chain through the `next_same_value'
382 and `prev_same_value' fields, and all point with
383 the `first_same_value' field at the first element in
384 that chain. The chain is in order of increasing cost.
385 Each element's cost value is in its `cost' field.
386
387 The `in_memory' field is nonzero for elements that
388 involve any reference to memory. These elements are removed
389 whenever a write is done to an unidentified location in memory.
390 To be safe, we assume that a memory address is unidentified unless
391 the address is either a symbol constant or a constant plus
392 the frame pointer or argument pointer.
393
394 The `in_struct' field is nonzero for elements that
395 involve any reference to memory inside a structure or array.
396
397 The `related_value' field is used to connect related expressions
398 (that differ by adding an integer).
399 The related expressions are chained in a circular fashion.
400 `related_value' is zero for expressions for which this
401 chain is not useful.
402
403 The `cost' field stores the cost of this element's expression.
404
405 The `is_const' flag is set if the element is a constant (including
406 a fixed address).
407
408 The `flag' field is used as a temporary during some search routines.
409
410 The `mode' field is usually the same as GET_MODE (`exp'), but
411 if `exp' is a CONST_INT and has no machine mode then the `mode'
412 field is the mode it was being used as. Each constant is
413 recorded separately for each mode it is used with. */
414
415
416 struct table_elt
417 {
418 rtx exp;
419 struct table_elt *next_same_hash;
420 struct table_elt *prev_same_hash;
421 struct table_elt *next_same_value;
422 struct table_elt *prev_same_value;
423 struct table_elt *first_same_value;
424 struct table_elt *related_value;
425 int cost;
426 enum machine_mode mode;
427 char in_memory;
428 char in_struct;
429 char is_const;
430 char flag;
431 };
432
433 /* We don't want a lot of buckets, because we rarely have very many
434 things stored in the hash table, and a lot of buckets slows
435 down a lot of loops that happen frequently. */
436 #define NBUCKETS 31
437
438 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
439 register (hard registers may require `do_not_record' to be set). */
440
441 #define HASH(X, M) \
442 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
443 ? (((unsigned) REG << 7) + (unsigned) reg_qty[REGNO (X)]) % NBUCKETS \
444 : canon_hash (X, M) % NBUCKETS)
445
446 /* Determine whether register number N is considered a fixed register for CSE.
447 It is desirable to replace other regs with fixed regs, to reduce need for
448 non-fixed hard regs.
449 A reg wins if it is either the frame pointer or designated as fixed,
450 but not if it is an overlapping register. */
451 #ifdef OVERLAPPING_REGNO_P
452 #define FIXED_REGNO_P(N) \
453 (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
454 || fixed_regs[N] || global_regs[N]) \
455 && ! OVERLAPPING_REGNO_P ((N)))
456 #else
457 #define FIXED_REGNO_P(N) \
458 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
459 || fixed_regs[N] || global_regs[N])
460 #endif
461
462 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
463 hard registers and pointers into the frame are the cheapest with a cost
464 of 0. Next come pseudos with a cost of one and other hard registers with
465 a cost of 2. Aside from these special cases, call `rtx_cost'. */
466
467 #define CHEAP_REGNO(N) \
468 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
469 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
470 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
471 || ((N) < FIRST_PSEUDO_REGISTER \
472 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
473
474 /* A register is cheap if it is a user variable assigned to the register
475 or if its register number always corresponds to a cheap register. */
476
477 #define CHEAP_REG(N) \
478 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
479 || CHEAP_REGNO (REGNO (N)))
480
481 #define COST(X) \
482 (GET_CODE (X) == REG \
483 ? (CHEAP_REG (X) ? 0 \
484 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
485 : 2) \
486 : notreg_cost(X))
487
488 /* Determine if the quantity number for register X represents a valid index
489 into the `qty_...' variables. */
490
491 #define REGNO_QTY_VALID_P(N) (reg_qty[N] != (N))
492
493 static struct table_elt *table[NBUCKETS];
494
495 /* Chain of `struct table_elt's made so far for this function
496 but currently removed from the table. */
497
498 static struct table_elt *free_element_chain;
499
500 /* Number of `struct table_elt' structures made so far for this function. */
501
502 static int n_elements_made;
503
504 /* Maximum value `n_elements_made' has had so far in this compilation
505 for functions previously processed. */
506
507 static int max_elements_made;
508
509 /* Surviving equivalence class when two equivalence classes are merged
510 by recording the effects of a jump in the last insn. Zero if the
511 last insn was not a conditional jump. */
512
513 static struct table_elt *last_jump_equiv_class;
514
515 /* Set to the cost of a constant pool reference if one was found for a
516 symbolic constant. If this was found, it means we should try to
517 convert constants into constant pool entries if they don't fit in
518 the insn. */
519
520 static int constant_pool_entries_cost;
521
522 /* Define maximum length of a branch path. */
523
524 #define PATHLENGTH 10
525
526 /* This data describes a block that will be processed by cse_basic_block. */
527
528 struct cse_basic_block_data {
529 /* Lowest CUID value of insns in block. */
530 int low_cuid;
531 /* Highest CUID value of insns in block. */
532 int high_cuid;
533 /* Total number of SETs in block. */
534 int nsets;
535 /* Last insn in the block. */
536 rtx last;
537 /* Size of current branch path, if any. */
538 int path_size;
539 /* Current branch path, indicating which branches will be taken. */
540 struct branch_path {
541 /* The branch insn. */
542 rtx branch;
543 /* Whether it should be taken or not. AROUND is the same as taken
544 except that it is used when the destination label is not preceded
545 by a BARRIER. */
546 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
547 } path[PATHLENGTH];
548 };
549
550 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
551 virtual regs here because the simplify_*_operation routines are called
552 by integrate.c, which is called before virtual register instantiation. */
553
554 #define FIXED_BASE_PLUS_P(X) \
555 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
556 || (X) == arg_pointer_rtx \
557 || (X) == virtual_stack_vars_rtx \
558 || (X) == virtual_incoming_args_rtx \
559 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
560 && (XEXP (X, 0) == frame_pointer_rtx \
561 || XEXP (X, 0) == hard_frame_pointer_rtx \
562 || XEXP (X, 0) == arg_pointer_rtx \
563 || XEXP (X, 0) == virtual_stack_vars_rtx \
564 || XEXP (X, 0) == virtual_incoming_args_rtx)))
565
566 /* Similar, but also allows reference to the stack pointer.
567
568 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
569 arg_pointer_rtx by itself is nonzero, because on at least one machine,
570 the i960, the arg pointer is zero when it is unused. */
571
572 #define NONZERO_BASE_PLUS_P(X) \
573 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
574 || (X) == virtual_stack_vars_rtx \
575 || (X) == virtual_incoming_args_rtx \
576 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
577 && (XEXP (X, 0) == frame_pointer_rtx \
578 || XEXP (X, 0) == hard_frame_pointer_rtx \
579 || XEXP (X, 0) == arg_pointer_rtx \
580 || XEXP (X, 0) == virtual_stack_vars_rtx \
581 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
582 || (X) == stack_pointer_rtx \
583 || (X) == virtual_stack_dynamic_rtx \
584 || (X) == virtual_outgoing_args_rtx \
585 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
586 && (XEXP (X, 0) == stack_pointer_rtx \
587 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
588 || XEXP (X, 0) == virtual_outgoing_args_rtx)))
589
590 static int notreg_cost PROTO((rtx));
591 static void new_basic_block PROTO((void));
592 static void make_new_qty PROTO((int));
593 static void make_regs_eqv PROTO((int, int));
594 static void delete_reg_equiv PROTO((int));
595 static int mention_regs PROTO((rtx));
596 static int insert_regs PROTO((rtx, struct table_elt *, int));
597 static void free_element PROTO((struct table_elt *));
598 static void remove_from_table PROTO((struct table_elt *, unsigned));
599 static struct table_elt *get_element PROTO((void));
600 static struct table_elt *lookup PROTO((rtx, unsigned, enum machine_mode)),
601 *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
602 static rtx lookup_as_function PROTO((rtx, enum rtx_code));
603 static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
604 enum machine_mode));
605 static void merge_equiv_classes PROTO((struct table_elt *,
606 struct table_elt *));
607 static void invalidate PROTO((rtx, enum machine_mode));
608 static int cse_rtx_varies_p PROTO((rtx));
609 static void remove_invalid_refs PROTO((int));
610 static void rehash_using_reg PROTO((rtx));
611 static void invalidate_memory PROTO((void));
612 static void invalidate_for_call PROTO((void));
613 static rtx use_related_value PROTO((rtx, struct table_elt *));
614 static unsigned canon_hash PROTO((rtx, enum machine_mode));
615 static unsigned safe_hash PROTO((rtx, enum machine_mode));
616 static int exp_equiv_p PROTO((rtx, rtx, int, int));
617 static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
618 HOST_WIDE_INT *,
619 HOST_WIDE_INT *));
620 static int refers_to_p PROTO((rtx, rtx));
621 static rtx canon_reg PROTO((rtx, rtx));
622 static void find_best_addr PROTO((rtx, rtx *));
623 static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
624 enum machine_mode *,
625 enum machine_mode *));
626 static rtx cse_gen_binary PROTO((enum rtx_code, enum machine_mode,
627 rtx, rtx));
628 static rtx simplify_plus_minus PROTO((enum rtx_code, enum machine_mode,
629 rtx, rtx));
630 static rtx fold_rtx PROTO((rtx, rtx));
631 static rtx equiv_constant PROTO((rtx));
632 static void record_jump_equiv PROTO((rtx, int));
633 static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
634 rtx, rtx, int));
635 static void cse_insn PROTO((rtx, int));
636 static int note_mem_written PROTO((rtx));
637 static void invalidate_from_clobbers PROTO((rtx));
638 static rtx cse_process_notes PROTO((rtx, rtx));
639 static void cse_around_loop PROTO((rtx));
640 static void invalidate_skipped_set PROTO((rtx, rtx));
641 static void invalidate_skipped_block PROTO((rtx));
642 static void cse_check_loop_start PROTO((rtx, rtx));
643 static void cse_set_around_loop PROTO((rtx, rtx, rtx));
644 static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
645 static void count_reg_usage PROTO((rtx, int *, rtx, int));
646
647 extern int rtx_equal_function_value_matters;
648 \f
649 /* Return an estimate of the cost of computing rtx X.
650 One use is in cse, to decide which expression to keep in the hash table.
651 Another is in rtl generation, to pick the cheapest way to multiply.
652 Other uses like the latter are expected in the future. */
653
654 /* Internal function, to compute cost when X is not a register; called
655 from COST macro to keep it simple. */
656
657 static int
658 notreg_cost (x)
659 rtx x;
660 {
661 return ((GET_CODE (x) == SUBREG
662 && GET_CODE (SUBREG_REG (x)) == REG
663 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
664 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
665 && (GET_MODE_SIZE (GET_MODE (x))
666 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
667 && subreg_lowpart_p (x)
668 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
669 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
670 ? (CHEAP_REG (SUBREG_REG (x)) ? 0
671 : (REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER ? 1
672 : 2))
673 : rtx_cost (x, SET) * 2);
674 }
675
676 /* Return the right cost to give to an operation
677 to make the cost of the corresponding register-to-register instruction
678 N times that of a fast register-to-register instruction. */
679
680 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
681
682 int
683 rtx_cost (x, outer_code)
684 rtx x;
685 enum rtx_code outer_code;
686 {
687 register int i, j;
688 register enum rtx_code code;
689 register char *fmt;
690 register int total;
691
692 if (x == 0)
693 return 0;
694
695 /* Compute the default costs of certain things.
696 Note that RTX_COSTS can override the defaults. */
697
698 code = GET_CODE (x);
699 switch (code)
700 {
701 case MULT:
702 /* Count multiplication by 2**n as a shift,
703 because if we are considering it, we would output it as a shift. */
704 if (GET_CODE (XEXP (x, 1)) == CONST_INT
705 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
706 total = 2;
707 else
708 total = COSTS_N_INSNS (5);
709 break;
710 case DIV:
711 case UDIV:
712 case MOD:
713 case UMOD:
714 total = COSTS_N_INSNS (7);
715 break;
716 case USE:
717 /* Used in loop.c and combine.c as a marker. */
718 total = 0;
719 break;
720 case ASM_OPERANDS:
721 /* We don't want these to be used in substitutions because
722 we have no way of validating the resulting insn. So assign
723 anything containing an ASM_OPERANDS a very high cost. */
724 total = 1000;
725 break;
726 default:
727 total = 2;
728 }
729
730 switch (code)
731 {
732 case REG:
733 return ! CHEAP_REG (x);
734
735 case SUBREG:
736 /* If we can't tie these modes, make this expensive. The larger
737 the mode, the more expensive it is. */
738 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
739 return COSTS_N_INSNS (2
740 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
741 return 2;
742 #ifdef RTX_COSTS
743 RTX_COSTS (x, code, outer_code);
744 #endif
745 CONST_COSTS (x, code, outer_code);
746 }
747
748 /* Sum the costs of the sub-rtx's, plus cost of this operation,
749 which is already in total. */
750
751 fmt = GET_RTX_FORMAT (code);
752 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
753 if (fmt[i] == 'e')
754 total += rtx_cost (XEXP (x, i), code);
755 else if (fmt[i] == 'E')
756 for (j = 0; j < XVECLEN (x, i); j++)
757 total += rtx_cost (XVECEXP (x, i, j), code);
758
759 return total;
760 }
761 \f
762 /* Clear the hash table and initialize each register with its own quantity,
763 for a new basic block. */
764
765 static void
766 new_basic_block ()
767 {
768 register int i;
769
770 next_qty = max_reg;
771
772 bzero ((char *) reg_tick, max_reg * sizeof (int));
773
774 bcopy ((char *) all_minus_one, (char *) reg_in_table,
775 max_reg * sizeof (int));
776 bcopy ((char *) consec_ints, (char *) reg_qty, max_reg * sizeof (int));
777 CLEAR_HARD_REG_SET (hard_regs_in_table);
778
779 /* The per-quantity values used to be initialized here, but it is
780 much faster to initialize each as it is made in `make_new_qty'. */
781
782 for (i = 0; i < NBUCKETS; i++)
783 {
784 register struct table_elt *this, *next;
785 for (this = table[i]; this; this = next)
786 {
787 next = this->next_same_hash;
788 free_element (this);
789 }
790 }
791
792 bzero ((char *) table, sizeof table);
793
794 prev_insn = 0;
795
796 #ifdef HAVE_cc0
797 prev_insn_cc0 = 0;
798 #endif
799 }
800
801 /* Say that register REG contains a quantity not in any register before
802 and initialize that quantity. */
803
804 static void
805 make_new_qty (reg)
806 register int reg;
807 {
808 register int q;
809
810 if (next_qty >= max_qty)
811 abort ();
812
813 q = reg_qty[reg] = next_qty++;
814 qty_first_reg[q] = reg;
815 qty_last_reg[q] = reg;
816 qty_const[q] = qty_const_insn[q] = 0;
817 qty_comparison_code[q] = UNKNOWN;
818
819 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
820 }
821
822 /* Make reg NEW equivalent to reg OLD.
823 OLD is not changing; NEW is. */
824
825 static void
826 make_regs_eqv (new, old)
827 register int new, old;
828 {
829 register int lastr, firstr;
830 register int q = reg_qty[old];
831
832 /* Nothing should become eqv until it has a "non-invalid" qty number. */
833 if (! REGNO_QTY_VALID_P (old))
834 abort ();
835
836 reg_qty[new] = q;
837 firstr = qty_first_reg[q];
838 lastr = qty_last_reg[q];
839
840 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
841 hard regs. Among pseudos, if NEW will live longer than any other reg
842 of the same qty, and that is beyond the current basic block,
843 make it the new canonical replacement for this qty. */
844 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
845 /* Certain fixed registers might be of the class NO_REGS. This means
846 that not only can they not be allocated by the compiler, but
847 they cannot be used in substitutions or canonicalizations
848 either. */
849 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
850 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
851 || (new >= FIRST_PSEUDO_REGISTER
852 && (firstr < FIRST_PSEUDO_REGISTER
853 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
854 || (uid_cuid[REGNO_FIRST_UID (new)]
855 < cse_basic_block_start))
856 && (uid_cuid[REGNO_LAST_UID (new)]
857 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
858 {
859 reg_prev_eqv[firstr] = new;
860 reg_next_eqv[new] = firstr;
861 reg_prev_eqv[new] = -1;
862 qty_first_reg[q] = new;
863 }
864 else
865 {
866 /* If NEW is a hard reg (known to be non-fixed), insert at end.
867 Otherwise, insert before any non-fixed hard regs that are at the
868 end. Registers of class NO_REGS cannot be used as an
869 equivalent for anything. */
870 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
871 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
872 && new >= FIRST_PSEUDO_REGISTER)
873 lastr = reg_prev_eqv[lastr];
874 reg_next_eqv[new] = reg_next_eqv[lastr];
875 if (reg_next_eqv[lastr] >= 0)
876 reg_prev_eqv[reg_next_eqv[lastr]] = new;
877 else
878 qty_last_reg[q] = new;
879 reg_next_eqv[lastr] = new;
880 reg_prev_eqv[new] = lastr;
881 }
882 }
883
884 /* Remove REG from its equivalence class. */
885
886 static void
887 delete_reg_equiv (reg)
888 register int reg;
889 {
890 register int q = reg_qty[reg];
891 register int p, n;
892
893 /* If invalid, do nothing. */
894 if (q == reg)
895 return;
896
897 p = reg_prev_eqv[reg];
898 n = reg_next_eqv[reg];
899
900 if (n != -1)
901 reg_prev_eqv[n] = p;
902 else
903 qty_last_reg[q] = p;
904 if (p != -1)
905 reg_next_eqv[p] = n;
906 else
907 qty_first_reg[q] = n;
908
909 reg_qty[reg] = reg;
910 }
911
912 /* Remove any invalid expressions from the hash table
913 that refer to any of the registers contained in expression X.
914
915 Make sure that newly inserted references to those registers
916 as subexpressions will be considered valid.
917
918 mention_regs is not called when a register itself
919 is being stored in the table.
920
921 Return 1 if we have done something that may have changed the hash code
922 of X. */
923
924 static int
925 mention_regs (x)
926 rtx x;
927 {
928 register enum rtx_code code;
929 register int i, j;
930 register char *fmt;
931 register int changed = 0;
932
933 if (x == 0)
934 return 0;
935
936 code = GET_CODE (x);
937 if (code == REG)
938 {
939 register int regno = REGNO (x);
940 register int endregno
941 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
942 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
943 int i;
944
945 for (i = regno; i < endregno; i++)
946 {
947 if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
948 remove_invalid_refs (i);
949
950 reg_in_table[i] = reg_tick[i];
951 }
952
953 return 0;
954 }
955
956 /* If X is a comparison or a COMPARE and either operand is a register
957 that does not have a quantity, give it one. This is so that a later
958 call to record_jump_equiv won't cause X to be assigned a different
959 hash code and not found in the table after that call.
960
961 It is not necessary to do this here, since rehash_using_reg can
962 fix up the table later, but doing this here eliminates the need to
963 call that expensive function in the most common case where the only
964 use of the register is in the comparison. */
965
966 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
967 {
968 if (GET_CODE (XEXP (x, 0)) == REG
969 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
970 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
971 {
972 rehash_using_reg (XEXP (x, 0));
973 changed = 1;
974 }
975
976 if (GET_CODE (XEXP (x, 1)) == REG
977 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
978 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
979 {
980 rehash_using_reg (XEXP (x, 1));
981 changed = 1;
982 }
983 }
984
985 fmt = GET_RTX_FORMAT (code);
986 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
987 if (fmt[i] == 'e')
988 changed |= mention_regs (XEXP (x, i));
989 else if (fmt[i] == 'E')
990 for (j = 0; j < XVECLEN (x, i); j++)
991 changed |= mention_regs (XVECEXP (x, i, j));
992
993 return changed;
994 }
995
996 /* Update the register quantities for inserting X into the hash table
997 with a value equivalent to CLASSP.
998 (If the class does not contain a REG, it is irrelevant.)
999 If MODIFIED is nonzero, X is a destination; it is being modified.
1000 Note that delete_reg_equiv should be called on a register
1001 before insert_regs is done on that register with MODIFIED != 0.
1002
1003 Nonzero value means that elements of reg_qty have changed
1004 so X's hash code may be different. */
1005
1006 static int
1007 insert_regs (x, classp, modified)
1008 rtx x;
1009 struct table_elt *classp;
1010 int modified;
1011 {
1012 if (GET_CODE (x) == REG)
1013 {
1014 register int regno = REGNO (x);
1015
1016 /* If REGNO is in the equivalence table already but is of the
1017 wrong mode for that equivalence, don't do anything here. */
1018
1019 if (REGNO_QTY_VALID_P (regno)
1020 && qty_mode[reg_qty[regno]] != GET_MODE (x))
1021 return 0;
1022
1023 if (modified || ! REGNO_QTY_VALID_P (regno))
1024 {
1025 if (classp)
1026 for (classp = classp->first_same_value;
1027 classp != 0;
1028 classp = classp->next_same_value)
1029 if (GET_CODE (classp->exp) == REG
1030 && GET_MODE (classp->exp) == GET_MODE (x))
1031 {
1032 make_regs_eqv (regno, REGNO (classp->exp));
1033 return 1;
1034 }
1035
1036 make_new_qty (regno);
1037 qty_mode[reg_qty[regno]] = GET_MODE (x);
1038 return 1;
1039 }
1040
1041 return 0;
1042 }
1043
1044 /* If X is a SUBREG, we will likely be inserting the inner register in the
1045 table. If that register doesn't have an assigned quantity number at
1046 this point but does later, the insertion that we will be doing now will
1047 not be accessible because its hash code will have changed. So assign
1048 a quantity number now. */
1049
1050 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1051 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1052 {
1053 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1054 mention_regs (SUBREG_REG (x));
1055 return 1;
1056 }
1057 else
1058 return mention_regs (x);
1059 }
1060 \f
1061 /* Look in or update the hash table. */
1062
1063 /* Put the element ELT on the list of free elements. */
1064
1065 static void
1066 free_element (elt)
1067 struct table_elt *elt;
1068 {
1069 elt->next_same_hash = free_element_chain;
1070 free_element_chain = elt;
1071 }
1072
1073 /* Return an element that is free for use. */
1074
1075 static struct table_elt *
1076 get_element ()
1077 {
1078 struct table_elt *elt = free_element_chain;
1079 if (elt)
1080 {
1081 free_element_chain = elt->next_same_hash;
1082 return elt;
1083 }
1084 n_elements_made++;
1085 return (struct table_elt *) oballoc (sizeof (struct table_elt));
1086 }
1087
1088 /* Remove table element ELT from use in the table.
1089 HASH is its hash code, made using the HASH macro.
1090 It's an argument because often that is known in advance
1091 and we save much time not recomputing it. */
1092
1093 static void
1094 remove_from_table (elt, hash)
1095 register struct table_elt *elt;
1096 unsigned hash;
1097 {
1098 if (elt == 0)
1099 return;
1100
1101 /* Mark this element as removed. See cse_insn. */
1102 elt->first_same_value = 0;
1103
1104 /* Remove the table element from its equivalence class. */
1105
1106 {
1107 register struct table_elt *prev = elt->prev_same_value;
1108 register struct table_elt *next = elt->next_same_value;
1109
1110 if (next) next->prev_same_value = prev;
1111
1112 if (prev)
1113 prev->next_same_value = next;
1114 else
1115 {
1116 register struct table_elt *newfirst = next;
1117 while (next)
1118 {
1119 next->first_same_value = newfirst;
1120 next = next->next_same_value;
1121 }
1122 }
1123 }
1124
1125 /* Remove the table element from its hash bucket. */
1126
1127 {
1128 register struct table_elt *prev = elt->prev_same_hash;
1129 register struct table_elt *next = elt->next_same_hash;
1130
1131 if (next) next->prev_same_hash = prev;
1132
1133 if (prev)
1134 prev->next_same_hash = next;
1135 else if (table[hash] == elt)
1136 table[hash] = next;
1137 else
1138 {
1139 /* This entry is not in the proper hash bucket. This can happen
1140 when two classes were merged by `merge_equiv_classes'. Search
1141 for the hash bucket that it heads. This happens only very
1142 rarely, so the cost is acceptable. */
1143 for (hash = 0; hash < NBUCKETS; hash++)
1144 if (table[hash] == elt)
1145 table[hash] = next;
1146 }
1147 }
1148
1149 /* Remove the table element from its related-value circular chain. */
1150
1151 if (elt->related_value != 0 && elt->related_value != elt)
1152 {
1153 register struct table_elt *p = elt->related_value;
1154 while (p->related_value != elt)
1155 p = p->related_value;
1156 p->related_value = elt->related_value;
1157 if (p->related_value == p)
1158 p->related_value = 0;
1159 }
1160
1161 free_element (elt);
1162 }
1163
1164 /* Look up X in the hash table and return its table element,
1165 or 0 if X is not in the table.
1166
1167 MODE is the machine-mode of X, or if X is an integer constant
1168 with VOIDmode then MODE is the mode with which X will be used.
1169
1170 Here we are satisfied to find an expression whose tree structure
1171 looks like X. */
1172
1173 static struct table_elt *
1174 lookup (x, hash, mode)
1175 rtx x;
1176 unsigned hash;
1177 enum machine_mode mode;
1178 {
1179 register struct table_elt *p;
1180
1181 for (p = table[hash]; p; p = p->next_same_hash)
1182 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1183 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1184 return p;
1185
1186 return 0;
1187 }
1188
1189 /* Like `lookup' but don't care whether the table element uses invalid regs.
1190 Also ignore discrepancies in the machine mode of a register. */
1191
1192 static struct table_elt *
1193 lookup_for_remove (x, hash, mode)
1194 rtx x;
1195 unsigned hash;
1196 enum machine_mode mode;
1197 {
1198 register struct table_elt *p;
1199
1200 if (GET_CODE (x) == REG)
1201 {
1202 int regno = REGNO (x);
1203 /* Don't check the machine mode when comparing registers;
1204 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1205 for (p = table[hash]; p; p = p->next_same_hash)
1206 if (GET_CODE (p->exp) == REG
1207 && REGNO (p->exp) == regno)
1208 return p;
1209 }
1210 else
1211 {
1212 for (p = table[hash]; p; p = p->next_same_hash)
1213 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1214 return p;
1215 }
1216
1217 return 0;
1218 }
1219
1220 /* Look for an expression equivalent to X and with code CODE.
1221 If one is found, return that expression. */
1222
1223 static rtx
1224 lookup_as_function (x, code)
1225 rtx x;
1226 enum rtx_code code;
1227 {
1228 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1229 GET_MODE (x));
1230 if (p == 0)
1231 return 0;
1232
1233 for (p = p->first_same_value; p; p = p->next_same_value)
1234 {
1235 if (GET_CODE (p->exp) == code
1236 /* Make sure this is a valid entry in the table. */
1237 && exp_equiv_p (p->exp, p->exp, 1, 0))
1238 return p->exp;
1239 }
1240
1241 return 0;
1242 }
1243
1244 /* Insert X in the hash table, assuming HASH is its hash code
1245 and CLASSP is an element of the class it should go in
1246 (or 0 if a new class should be made).
1247 It is inserted at the proper position to keep the class in
1248 the order cheapest first.
1249
1250 MODE is the machine-mode of X, or if X is an integer constant
1251 with VOIDmode then MODE is the mode with which X will be used.
1252
1253 For elements of equal cheapness, the most recent one
1254 goes in front, except that the first element in the list
1255 remains first unless a cheaper element is added. The order of
1256 pseudo-registers does not matter, as canon_reg will be called to
1257 find the cheapest when a register is retrieved from the table.
1258
1259 The in_memory field in the hash table element is set to 0.
1260 The caller must set it nonzero if appropriate.
1261
1262 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1263 and if insert_regs returns a nonzero value
1264 you must then recompute its hash code before calling here.
1265
1266 If necessary, update table showing constant values of quantities. */
1267
1268 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1269
1270 static struct table_elt *
1271 insert (x, classp, hash, mode)
1272 register rtx x;
1273 register struct table_elt *classp;
1274 unsigned hash;
1275 enum machine_mode mode;
1276 {
1277 register struct table_elt *elt;
1278
1279 /* If X is a register and we haven't made a quantity for it,
1280 something is wrong. */
1281 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1282 abort ();
1283
1284 /* If X is a hard register, show it is being put in the table. */
1285 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1286 {
1287 int regno = REGNO (x);
1288 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1289 int i;
1290
1291 for (i = regno; i < endregno; i++)
1292 SET_HARD_REG_BIT (hard_regs_in_table, i);
1293 }
1294
1295 /* If X is a label, show we recorded it. */
1296 if (GET_CODE (x) == LABEL_REF
1297 || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1298 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
1299 recorded_label_ref = 1;
1300
1301 /* Put an element for X into the right hash bucket. */
1302
1303 elt = get_element ();
1304 elt->exp = x;
1305 elt->cost = COST (x);
1306 elt->next_same_value = 0;
1307 elt->prev_same_value = 0;
1308 elt->next_same_hash = table[hash];
1309 elt->prev_same_hash = 0;
1310 elt->related_value = 0;
1311 elt->in_memory = 0;
1312 elt->mode = mode;
1313 elt->is_const = (CONSTANT_P (x)
1314 /* GNU C++ takes advantage of this for `this'
1315 (and other const values). */
1316 || (RTX_UNCHANGING_P (x)
1317 && GET_CODE (x) == REG
1318 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1319 || FIXED_BASE_PLUS_P (x));
1320
1321 if (table[hash])
1322 table[hash]->prev_same_hash = elt;
1323 table[hash] = elt;
1324
1325 /* Put it into the proper value-class. */
1326 if (classp)
1327 {
1328 classp = classp->first_same_value;
1329 if (CHEAPER (elt, classp))
1330 /* Insert at the head of the class */
1331 {
1332 register struct table_elt *p;
1333 elt->next_same_value = classp;
1334 classp->prev_same_value = elt;
1335 elt->first_same_value = elt;
1336
1337 for (p = classp; p; p = p->next_same_value)
1338 p->first_same_value = elt;
1339 }
1340 else
1341 {
1342 /* Insert not at head of the class. */
1343 /* Put it after the last element cheaper than X. */
1344 register struct table_elt *p, *next;
1345 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1346 p = next);
1347 /* Put it after P and before NEXT. */
1348 elt->next_same_value = next;
1349 if (next)
1350 next->prev_same_value = elt;
1351 elt->prev_same_value = p;
1352 p->next_same_value = elt;
1353 elt->first_same_value = classp;
1354 }
1355 }
1356 else
1357 elt->first_same_value = elt;
1358
1359 /* If this is a constant being set equivalent to a register or a register
1360 being set equivalent to a constant, note the constant equivalence.
1361
1362 If this is a constant, it cannot be equivalent to a different constant,
1363 and a constant is the only thing that can be cheaper than a register. So
1364 we know the register is the head of the class (before the constant was
1365 inserted).
1366
1367 If this is a register that is not already known equivalent to a
1368 constant, we must check the entire class.
1369
1370 If this is a register that is already known equivalent to an insn,
1371 update `qty_const_insn' to show that `this_insn' is the latest
1372 insn making that quantity equivalent to the constant. */
1373
1374 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1375 && GET_CODE (x) != REG)
1376 {
1377 qty_const[reg_qty[REGNO (classp->exp)]]
1378 = gen_lowpart_if_possible (qty_mode[reg_qty[REGNO (classp->exp)]], x);
1379 qty_const_insn[reg_qty[REGNO (classp->exp)]] = this_insn;
1380 }
1381
1382 else if (GET_CODE (x) == REG && classp && ! qty_const[reg_qty[REGNO (x)]]
1383 && ! elt->is_const)
1384 {
1385 register struct table_elt *p;
1386
1387 for (p = classp; p != 0; p = p->next_same_value)
1388 {
1389 if (p->is_const && GET_CODE (p->exp) != REG)
1390 {
1391 qty_const[reg_qty[REGNO (x)]]
1392 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1393 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1394 break;
1395 }
1396 }
1397 }
1398
1399 else if (GET_CODE (x) == REG && qty_const[reg_qty[REGNO (x)]]
1400 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]])
1401 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1402
1403 /* If this is a constant with symbolic value,
1404 and it has a term with an explicit integer value,
1405 link it up with related expressions. */
1406 if (GET_CODE (x) == CONST)
1407 {
1408 rtx subexp = get_related_value (x);
1409 unsigned subhash;
1410 struct table_elt *subelt, *subelt_prev;
1411
1412 if (subexp != 0)
1413 {
1414 /* Get the integer-free subexpression in the hash table. */
1415 subhash = safe_hash (subexp, mode) % NBUCKETS;
1416 subelt = lookup (subexp, subhash, mode);
1417 if (subelt == 0)
1418 subelt = insert (subexp, NULL_PTR, subhash, mode);
1419 /* Initialize SUBELT's circular chain if it has none. */
1420 if (subelt->related_value == 0)
1421 subelt->related_value = subelt;
1422 /* Find the element in the circular chain that precedes SUBELT. */
1423 subelt_prev = subelt;
1424 while (subelt_prev->related_value != subelt)
1425 subelt_prev = subelt_prev->related_value;
1426 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1427 This way the element that follows SUBELT is the oldest one. */
1428 elt->related_value = subelt_prev->related_value;
1429 subelt_prev->related_value = elt;
1430 }
1431 }
1432
1433 return elt;
1434 }
1435 \f
1436 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1437 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1438 the two classes equivalent.
1439
1440 CLASS1 will be the surviving class; CLASS2 should not be used after this
1441 call.
1442
1443 Any invalid entries in CLASS2 will not be copied. */
1444
1445 static void
1446 merge_equiv_classes (class1, class2)
1447 struct table_elt *class1, *class2;
1448 {
1449 struct table_elt *elt, *next, *new;
1450
1451 /* Ensure we start with the head of the classes. */
1452 class1 = class1->first_same_value;
1453 class2 = class2->first_same_value;
1454
1455 /* If they were already equal, forget it. */
1456 if (class1 == class2)
1457 return;
1458
1459 for (elt = class2; elt; elt = next)
1460 {
1461 unsigned hash;
1462 rtx exp = elt->exp;
1463 enum machine_mode mode = elt->mode;
1464
1465 next = elt->next_same_value;
1466
1467 /* Remove old entry, make a new one in CLASS1's class.
1468 Don't do this for invalid entries as we cannot find their
1469 hash code (it also isn't necessary). */
1470 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1471 {
1472 hash_arg_in_memory = 0;
1473 hash_arg_in_struct = 0;
1474 hash = HASH (exp, mode);
1475
1476 if (GET_CODE (exp) == REG)
1477 delete_reg_equiv (REGNO (exp));
1478
1479 remove_from_table (elt, hash);
1480
1481 if (insert_regs (exp, class1, 0))
1482 {
1483 rehash_using_reg (exp);
1484 hash = HASH (exp, mode);
1485 }
1486 new = insert (exp, class1, hash, mode);
1487 new->in_memory = hash_arg_in_memory;
1488 new->in_struct = hash_arg_in_struct;
1489 }
1490 }
1491 }
1492 \f
1493 /* Remove from the hash table, or mark as invalid,
1494 all expressions whose values could be altered by storing in X.
1495 X is a register, a subreg, or a memory reference with nonvarying address
1496 (because, when a memory reference with a varying address is stored in,
1497 all memory references are removed by invalidate_memory
1498 so specific invalidation is superfluous).
1499 FULL_MODE, if not VOIDmode, indicates that this much should be invalidated
1500 instead of just the amount indicated by the mode of X. This is only used
1501 for bitfield stores into memory.
1502
1503 A nonvarying address may be just a register or just
1504 a symbol reference, or it may be either of those plus
1505 a numeric offset. */
1506
1507 static void
1508 invalidate (x, full_mode)
1509 rtx x;
1510 enum machine_mode full_mode;
1511 {
1512 register int i;
1513 register struct table_elt *p;
1514
1515 /* If X is a register, dependencies on its contents
1516 are recorded through the qty number mechanism.
1517 Just change the qty number of the register,
1518 mark it as invalid for expressions that refer to it,
1519 and remove it itself. */
1520
1521 if (GET_CODE (x) == REG)
1522 {
1523 register int regno = REGNO (x);
1524 register unsigned hash = HASH (x, GET_MODE (x));
1525
1526 /* Remove REGNO from any quantity list it might be on and indicate
1527 that it's value might have changed. If it is a pseudo, remove its
1528 entry from the hash table.
1529
1530 For a hard register, we do the first two actions above for any
1531 additional hard registers corresponding to X. Then, if any of these
1532 registers are in the table, we must remove any REG entries that
1533 overlap these registers. */
1534
1535 delete_reg_equiv (regno);
1536 reg_tick[regno]++;
1537
1538 if (regno >= FIRST_PSEUDO_REGISTER)
1539 {
1540 /* Because a register can be referenced in more than one mode,
1541 we might have to remove more than one table entry. */
1542
1543 struct table_elt *elt;
1544
1545 while (elt = lookup_for_remove (x, hash, GET_MODE (x)))
1546 remove_from_table (elt, hash);
1547 }
1548 else
1549 {
1550 HOST_WIDE_INT in_table
1551 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1552 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1553 int tregno, tendregno;
1554 register struct table_elt *p, *next;
1555
1556 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1557
1558 for (i = regno + 1; i < endregno; i++)
1559 {
1560 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1561 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1562 delete_reg_equiv (i);
1563 reg_tick[i]++;
1564 }
1565
1566 if (in_table)
1567 for (hash = 0; hash < NBUCKETS; hash++)
1568 for (p = table[hash]; p; p = next)
1569 {
1570 next = p->next_same_hash;
1571
1572 if (GET_CODE (p->exp) != REG
1573 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1574 continue;
1575
1576 tregno = REGNO (p->exp);
1577 tendregno
1578 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1579 if (tendregno > regno && tregno < endregno)
1580 remove_from_table (p, hash);
1581 }
1582 }
1583
1584 return;
1585 }
1586
1587 if (GET_CODE (x) == SUBREG)
1588 {
1589 if (GET_CODE (SUBREG_REG (x)) != REG)
1590 abort ();
1591 invalidate (SUBREG_REG (x), VOIDmode);
1592 return;
1593 }
1594
1595 /* X is not a register; it must be a memory reference with
1596 a nonvarying address. Remove all hash table elements
1597 that refer to overlapping pieces of memory. */
1598
1599 if (GET_CODE (x) != MEM)
1600 abort ();
1601
1602 if (full_mode == VOIDmode)
1603 full_mode = GET_MODE (x);
1604
1605 for (i = 0; i < NBUCKETS; i++)
1606 {
1607 register struct table_elt *next;
1608 for (p = table[i]; p; p = next)
1609 {
1610 next = p->next_same_hash;
1611 /* Invalidate ASM_OPERANDS which reference memory (this is easier
1612 than checking all the aliases). */
1613 if (p->in_memory
1614 && (GET_CODE (p->exp) != MEM
1615 || true_dependence (x, full_mode, p->exp, cse_rtx_varies_p)))
1616 remove_from_table (p, i);
1617 }
1618 }
1619 }
1620
1621 /* Remove all expressions that refer to register REGNO,
1622 since they are already invalid, and we are about to
1623 mark that register valid again and don't want the old
1624 expressions to reappear as valid. */
1625
1626 static void
1627 remove_invalid_refs (regno)
1628 int regno;
1629 {
1630 register int i;
1631 register struct table_elt *p, *next;
1632
1633 for (i = 0; i < NBUCKETS; i++)
1634 for (p = table[i]; p; p = next)
1635 {
1636 next = p->next_same_hash;
1637 if (GET_CODE (p->exp) != REG
1638 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1639 remove_from_table (p, i);
1640 }
1641 }
1642 \f
1643 /* Recompute the hash codes of any valid entries in the hash table that
1644 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1645
1646 This is called when we make a jump equivalence. */
1647
1648 static void
1649 rehash_using_reg (x)
1650 rtx x;
1651 {
1652 int i;
1653 struct table_elt *p, *next;
1654 unsigned hash;
1655
1656 if (GET_CODE (x) == SUBREG)
1657 x = SUBREG_REG (x);
1658
1659 /* If X is not a register or if the register is known not to be in any
1660 valid entries in the table, we have no work to do. */
1661
1662 if (GET_CODE (x) != REG
1663 || reg_in_table[REGNO (x)] < 0
1664 || reg_in_table[REGNO (x)] != reg_tick[REGNO (x)])
1665 return;
1666
1667 /* Scan all hash chains looking for valid entries that mention X.
1668 If we find one and it is in the wrong hash chain, move it. We can skip
1669 objects that are registers, since they are handled specially. */
1670
1671 for (i = 0; i < NBUCKETS; i++)
1672 for (p = table[i]; p; p = next)
1673 {
1674 next = p->next_same_hash;
1675 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1676 && exp_equiv_p (p->exp, p->exp, 1, 0)
1677 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1678 {
1679 if (p->next_same_hash)
1680 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1681
1682 if (p->prev_same_hash)
1683 p->prev_same_hash->next_same_hash = p->next_same_hash;
1684 else
1685 table[i] = p->next_same_hash;
1686
1687 p->next_same_hash = table[hash];
1688 p->prev_same_hash = 0;
1689 if (table[hash])
1690 table[hash]->prev_same_hash = p;
1691 table[hash] = p;
1692 }
1693 }
1694 }
1695 \f
1696 /* Remove from the hash table any expression that is a call-clobbered
1697 register. Also update their TICK values. */
1698
1699 static void
1700 invalidate_for_call ()
1701 {
1702 int regno, endregno;
1703 int i;
1704 unsigned hash;
1705 struct table_elt *p, *next;
1706 int in_table = 0;
1707
1708 /* Go through all the hard registers. For each that is clobbered in
1709 a CALL_INSN, remove the register from quantity chains and update
1710 reg_tick if defined. Also see if any of these registers is currently
1711 in the table. */
1712
1713 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1714 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1715 {
1716 delete_reg_equiv (regno);
1717 if (reg_tick[regno] >= 0)
1718 reg_tick[regno]++;
1719
1720 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1721 }
1722
1723 /* In the case where we have no call-clobbered hard registers in the
1724 table, we are done. Otherwise, scan the table and remove any
1725 entry that overlaps a call-clobbered register. */
1726
1727 if (in_table)
1728 for (hash = 0; hash < NBUCKETS; hash++)
1729 for (p = table[hash]; p; p = next)
1730 {
1731 next = p->next_same_hash;
1732
1733 if (p->in_memory)
1734 {
1735 remove_from_table (p, hash);
1736 continue;
1737 }
1738
1739 if (GET_CODE (p->exp) != REG
1740 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1741 continue;
1742
1743 regno = REGNO (p->exp);
1744 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1745
1746 for (i = regno; i < endregno; i++)
1747 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1748 {
1749 remove_from_table (p, hash);
1750 break;
1751 }
1752 }
1753 }
1754 \f
1755 /* Given an expression X of type CONST,
1756 and ELT which is its table entry (or 0 if it
1757 is not in the hash table),
1758 return an alternate expression for X as a register plus integer.
1759 If none can be found, return 0. */
1760
1761 static rtx
1762 use_related_value (x, elt)
1763 rtx x;
1764 struct table_elt *elt;
1765 {
1766 register struct table_elt *relt = 0;
1767 register struct table_elt *p, *q;
1768 HOST_WIDE_INT offset;
1769
1770 /* First, is there anything related known?
1771 If we have a table element, we can tell from that.
1772 Otherwise, must look it up. */
1773
1774 if (elt != 0 && elt->related_value != 0)
1775 relt = elt;
1776 else if (elt == 0 && GET_CODE (x) == CONST)
1777 {
1778 rtx subexp = get_related_value (x);
1779 if (subexp != 0)
1780 relt = lookup (subexp,
1781 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
1782 GET_MODE (subexp));
1783 }
1784
1785 if (relt == 0)
1786 return 0;
1787
1788 /* Search all related table entries for one that has an
1789 equivalent register. */
1790
1791 p = relt;
1792 while (1)
1793 {
1794 /* This loop is strange in that it is executed in two different cases.
1795 The first is when X is already in the table. Then it is searching
1796 the RELATED_VALUE list of X's class (RELT). The second case is when
1797 X is not in the table. Then RELT points to a class for the related
1798 value.
1799
1800 Ensure that, whatever case we are in, that we ignore classes that have
1801 the same value as X. */
1802
1803 if (rtx_equal_p (x, p->exp))
1804 q = 0;
1805 else
1806 for (q = p->first_same_value; q; q = q->next_same_value)
1807 if (GET_CODE (q->exp) == REG)
1808 break;
1809
1810 if (q)
1811 break;
1812
1813 p = p->related_value;
1814
1815 /* We went all the way around, so there is nothing to be found.
1816 Alternatively, perhaps RELT was in the table for some other reason
1817 and it has no related values recorded. */
1818 if (p == relt || p == 0)
1819 break;
1820 }
1821
1822 if (q == 0)
1823 return 0;
1824
1825 offset = (get_integer_term (x) - get_integer_term (p->exp));
1826 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
1827 return plus_constant (q->exp, offset);
1828 }
1829 \f
1830 /* Hash an rtx. We are careful to make sure the value is never negative.
1831 Equivalent registers hash identically.
1832 MODE is used in hashing for CONST_INTs only;
1833 otherwise the mode of X is used.
1834
1835 Store 1 in do_not_record if any subexpression is volatile.
1836
1837 Store 1 in hash_arg_in_memory if X contains a MEM rtx
1838 which does not have the RTX_UNCHANGING_P bit set.
1839 In this case, also store 1 in hash_arg_in_struct
1840 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
1841
1842 Note that cse_insn knows that the hash code of a MEM expression
1843 is just (int) MEM plus the hash code of the address. */
1844
1845 static unsigned
1846 canon_hash (x, mode)
1847 rtx x;
1848 enum machine_mode mode;
1849 {
1850 register int i, j;
1851 register unsigned hash = 0;
1852 register enum rtx_code code;
1853 register char *fmt;
1854
1855 /* repeat is used to turn tail-recursion into iteration. */
1856 repeat:
1857 if (x == 0)
1858 return hash;
1859
1860 code = GET_CODE (x);
1861 switch (code)
1862 {
1863 case REG:
1864 {
1865 register int regno = REGNO (x);
1866
1867 /* On some machines, we can't record any non-fixed hard register,
1868 because extending its life will cause reload problems. We
1869 consider ap, fp, and sp to be fixed for this purpose.
1870 On all machines, we can't record any global registers. */
1871
1872 if (regno < FIRST_PSEUDO_REGISTER
1873 && (global_regs[regno]
1874 #ifdef SMALL_REGISTER_CLASSES
1875 || (SMALL_REGISTER_CLASSES
1876 && ! fixed_regs[regno]
1877 && regno != FRAME_POINTER_REGNUM
1878 && regno != HARD_FRAME_POINTER_REGNUM
1879 && regno != ARG_POINTER_REGNUM
1880 && regno != STACK_POINTER_REGNUM)
1881 #endif
1882 ))
1883 {
1884 do_not_record = 1;
1885 return 0;
1886 }
1887 hash += ((unsigned) REG << 7) + (unsigned) reg_qty[regno];
1888 return hash;
1889 }
1890
1891 case CONST_INT:
1892 {
1893 unsigned HOST_WIDE_INT tem = INTVAL (x);
1894 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
1895 return hash;
1896 }
1897
1898 case CONST_DOUBLE:
1899 /* This is like the general case, except that it only counts
1900 the integers representing the constant. */
1901 hash += (unsigned) code + (unsigned) GET_MODE (x);
1902 if (GET_MODE (x) != VOIDmode)
1903 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1904 {
1905 unsigned tem = XINT (x, i);
1906 hash += tem;
1907 }
1908 else
1909 hash += ((unsigned) CONST_DOUBLE_LOW (x)
1910 + (unsigned) CONST_DOUBLE_HIGH (x));
1911 return hash;
1912
1913 /* Assume there is only one rtx object for any given label. */
1914 case LABEL_REF:
1915 hash
1916 += ((unsigned) LABEL_REF << 7) + (unsigned HOST_WIDE_INT) XEXP (x, 0);
1917 return hash;
1918
1919 case SYMBOL_REF:
1920 hash
1921 += ((unsigned) SYMBOL_REF << 7) + (unsigned HOST_WIDE_INT) XSTR (x, 0);
1922 return hash;
1923
1924 case MEM:
1925 if (MEM_VOLATILE_P (x))
1926 {
1927 do_not_record = 1;
1928 return 0;
1929 }
1930 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
1931 {
1932 hash_arg_in_memory = 1;
1933 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
1934 }
1935 /* Now that we have already found this special case,
1936 might as well speed it up as much as possible. */
1937 hash += (unsigned) MEM;
1938 x = XEXP (x, 0);
1939 goto repeat;
1940
1941 case PRE_DEC:
1942 case PRE_INC:
1943 case POST_DEC:
1944 case POST_INC:
1945 case PC:
1946 case CC0:
1947 case CALL:
1948 case UNSPEC_VOLATILE:
1949 do_not_record = 1;
1950 return 0;
1951
1952 case ASM_OPERANDS:
1953 if (MEM_VOLATILE_P (x))
1954 {
1955 do_not_record = 1;
1956 return 0;
1957 }
1958 }
1959
1960 i = GET_RTX_LENGTH (code) - 1;
1961 hash += (unsigned) code + (unsigned) GET_MODE (x);
1962 fmt = GET_RTX_FORMAT (code);
1963 for (; i >= 0; i--)
1964 {
1965 if (fmt[i] == 'e')
1966 {
1967 rtx tem = XEXP (x, i);
1968
1969 /* If we are about to do the last recursive call
1970 needed at this level, change it into iteration.
1971 This function is called enough to be worth it. */
1972 if (i == 0)
1973 {
1974 x = tem;
1975 goto repeat;
1976 }
1977 hash += canon_hash (tem, 0);
1978 }
1979 else if (fmt[i] == 'E')
1980 for (j = 0; j < XVECLEN (x, i); j++)
1981 hash += canon_hash (XVECEXP (x, i, j), 0);
1982 else if (fmt[i] == 's')
1983 {
1984 register unsigned char *p = (unsigned char *) XSTR (x, i);
1985 if (p)
1986 while (*p)
1987 hash += *p++;
1988 }
1989 else if (fmt[i] == 'i')
1990 {
1991 register unsigned tem = XINT (x, i);
1992 hash += tem;
1993 }
1994 else
1995 abort ();
1996 }
1997 return hash;
1998 }
1999
2000 /* Like canon_hash but with no side effects. */
2001
2002 static unsigned
2003 safe_hash (x, mode)
2004 rtx x;
2005 enum machine_mode mode;
2006 {
2007 int save_do_not_record = do_not_record;
2008 int save_hash_arg_in_memory = hash_arg_in_memory;
2009 int save_hash_arg_in_struct = hash_arg_in_struct;
2010 unsigned hash = canon_hash (x, mode);
2011 hash_arg_in_memory = save_hash_arg_in_memory;
2012 hash_arg_in_struct = save_hash_arg_in_struct;
2013 do_not_record = save_do_not_record;
2014 return hash;
2015 }
2016 \f
2017 /* Return 1 iff X and Y would canonicalize into the same thing,
2018 without actually constructing the canonicalization of either one.
2019 If VALIDATE is nonzero,
2020 we assume X is an expression being processed from the rtl
2021 and Y was found in the hash table. We check register refs
2022 in Y for being marked as valid.
2023
2024 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2025 that is known to be in the register. Ordinarily, we don't allow them
2026 to match, because letting them match would cause unpredictable results
2027 in all the places that search a hash table chain for an equivalent
2028 for a given value. A possible equivalent that has different structure
2029 has its hash code computed from different data. Whether the hash code
2030 is the same as that of the the given value is pure luck. */
2031
2032 static int
2033 exp_equiv_p (x, y, validate, equal_values)
2034 rtx x, y;
2035 int validate;
2036 int equal_values;
2037 {
2038 register int i, j;
2039 register enum rtx_code code;
2040 register char *fmt;
2041
2042 /* Note: it is incorrect to assume an expression is equivalent to itself
2043 if VALIDATE is nonzero. */
2044 if (x == y && !validate)
2045 return 1;
2046 if (x == 0 || y == 0)
2047 return x == y;
2048
2049 code = GET_CODE (x);
2050 if (code != GET_CODE (y))
2051 {
2052 if (!equal_values)
2053 return 0;
2054
2055 /* If X is a constant and Y is a register or vice versa, they may be
2056 equivalent. We only have to validate if Y is a register. */
2057 if (CONSTANT_P (x) && GET_CODE (y) == REG
2058 && REGNO_QTY_VALID_P (REGNO (y))
2059 && GET_MODE (y) == qty_mode[reg_qty[REGNO (y)]]
2060 && rtx_equal_p (x, qty_const[reg_qty[REGNO (y)]])
2061 && (! validate || reg_in_table[REGNO (y)] == reg_tick[REGNO (y)]))
2062 return 1;
2063
2064 if (CONSTANT_P (y) && code == REG
2065 && REGNO_QTY_VALID_P (REGNO (x))
2066 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2067 && rtx_equal_p (y, qty_const[reg_qty[REGNO (x)]]))
2068 return 1;
2069
2070 return 0;
2071 }
2072
2073 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2074 if (GET_MODE (x) != GET_MODE (y))
2075 return 0;
2076
2077 switch (code)
2078 {
2079 case PC:
2080 case CC0:
2081 return x == y;
2082
2083 case CONST_INT:
2084 return INTVAL (x) == INTVAL (y);
2085
2086 case LABEL_REF:
2087 return XEXP (x, 0) == XEXP (y, 0);
2088
2089 case SYMBOL_REF:
2090 return XSTR (x, 0) == XSTR (y, 0);
2091
2092 case REG:
2093 {
2094 int regno = REGNO (y);
2095 int endregno
2096 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2097 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2098 int i;
2099
2100 /* If the quantities are not the same, the expressions are not
2101 equivalent. If there are and we are not to validate, they
2102 are equivalent. Otherwise, ensure all regs are up-to-date. */
2103
2104 if (reg_qty[REGNO (x)] != reg_qty[regno])
2105 return 0;
2106
2107 if (! validate)
2108 return 1;
2109
2110 for (i = regno; i < endregno; i++)
2111 if (reg_in_table[i] != reg_tick[i])
2112 return 0;
2113
2114 return 1;
2115 }
2116
2117 /* For commutative operations, check both orders. */
2118 case PLUS:
2119 case MULT:
2120 case AND:
2121 case IOR:
2122 case XOR:
2123 case NE:
2124 case EQ:
2125 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2126 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2127 validate, equal_values))
2128 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2129 validate, equal_values)
2130 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2131 validate, equal_values)));
2132 }
2133
2134 /* Compare the elements. If any pair of corresponding elements
2135 fail to match, return 0 for the whole things. */
2136
2137 fmt = GET_RTX_FORMAT (code);
2138 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2139 {
2140 switch (fmt[i])
2141 {
2142 case 'e':
2143 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2144 return 0;
2145 break;
2146
2147 case 'E':
2148 if (XVECLEN (x, i) != XVECLEN (y, i))
2149 return 0;
2150 for (j = 0; j < XVECLEN (x, i); j++)
2151 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2152 validate, equal_values))
2153 return 0;
2154 break;
2155
2156 case 's':
2157 if (strcmp (XSTR (x, i), XSTR (y, i)))
2158 return 0;
2159 break;
2160
2161 case 'i':
2162 if (XINT (x, i) != XINT (y, i))
2163 return 0;
2164 break;
2165
2166 case 'w':
2167 if (XWINT (x, i) != XWINT (y, i))
2168 return 0;
2169 break;
2170
2171 case '0':
2172 break;
2173
2174 default:
2175 abort ();
2176 }
2177 }
2178
2179 return 1;
2180 }
2181 \f
2182 /* Return 1 iff any subexpression of X matches Y.
2183 Here we do not require that X or Y be valid (for registers referred to)
2184 for being in the hash table. */
2185
2186 static int
2187 refers_to_p (x, y)
2188 rtx x, y;
2189 {
2190 register int i;
2191 register enum rtx_code code;
2192 register char *fmt;
2193
2194 repeat:
2195 if (x == y)
2196 return 1;
2197 if (x == 0 || y == 0)
2198 return 0;
2199
2200 code = GET_CODE (x);
2201 /* If X as a whole has the same code as Y, they may match.
2202 If so, return 1. */
2203 if (code == GET_CODE (y))
2204 {
2205 if (exp_equiv_p (x, y, 0, 1))
2206 return 1;
2207 }
2208
2209 /* X does not match, so try its subexpressions. */
2210
2211 fmt = GET_RTX_FORMAT (code);
2212 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2213 if (fmt[i] == 'e')
2214 {
2215 if (i == 0)
2216 {
2217 x = XEXP (x, 0);
2218 goto repeat;
2219 }
2220 else
2221 if (refers_to_p (XEXP (x, i), y))
2222 return 1;
2223 }
2224 else if (fmt[i] == 'E')
2225 {
2226 int j;
2227 for (j = 0; j < XVECLEN (x, i); j++)
2228 if (refers_to_p (XVECEXP (x, i, j), y))
2229 return 1;
2230 }
2231
2232 return 0;
2233 }
2234 \f
2235 /* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2236 set PBASE, PSTART, and PEND which correspond to the base of the address,
2237 the starting offset, and ending offset respectively.
2238
2239 ADDR is known to be a nonvarying address. */
2240
2241 /* ??? Despite what the comments say, this function is in fact frequently
2242 passed varying addresses. This does not appear to cause any problems. */
2243
2244 static void
2245 set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2246 rtx addr;
2247 int size;
2248 rtx *pbase;
2249 HOST_WIDE_INT *pstart, *pend;
2250 {
2251 rtx base;
2252 HOST_WIDE_INT start, end;
2253
2254 base = addr;
2255 start = 0;
2256 end = 0;
2257
2258 /* Registers with nonvarying addresses usually have constant equivalents;
2259 but the frame pointer register is also possible. */
2260 if (GET_CODE (base) == REG
2261 && qty_const != 0
2262 && REGNO_QTY_VALID_P (REGNO (base))
2263 && qty_mode[reg_qty[REGNO (base)]] == GET_MODE (base)
2264 && qty_const[reg_qty[REGNO (base)]] != 0)
2265 base = qty_const[reg_qty[REGNO (base)]];
2266 else if (GET_CODE (base) == PLUS
2267 && GET_CODE (XEXP (base, 1)) == CONST_INT
2268 && GET_CODE (XEXP (base, 0)) == REG
2269 && qty_const != 0
2270 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2271 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2272 == GET_MODE (XEXP (base, 0)))
2273 && qty_const[reg_qty[REGNO (XEXP (base, 0))]])
2274 {
2275 start = INTVAL (XEXP (base, 1));
2276 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2277 }
2278 /* This can happen as the result of virtual register instantiation,
2279 if the initial offset is too large to be a valid address. */
2280 else if (GET_CODE (base) == PLUS
2281 && GET_CODE (XEXP (base, 0)) == REG
2282 && GET_CODE (XEXP (base, 1)) == REG
2283 && qty_const != 0
2284 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2285 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2286 == GET_MODE (XEXP (base, 0)))
2287 && qty_const[reg_qty[REGNO (XEXP (base, 0))]]
2288 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 1)))
2289 && (qty_mode[reg_qty[REGNO (XEXP (base, 1))]]
2290 == GET_MODE (XEXP (base, 1)))
2291 && qty_const[reg_qty[REGNO (XEXP (base, 1))]])
2292 {
2293 rtx tem = qty_const[reg_qty[REGNO (XEXP (base, 1))]];
2294 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2295
2296 /* One of the two values must be a constant. */
2297 if (GET_CODE (base) != CONST_INT)
2298 {
2299 if (GET_CODE (tem) != CONST_INT)
2300 abort ();
2301 start = INTVAL (tem);
2302 }
2303 else
2304 {
2305 start = INTVAL (base);
2306 base = tem;
2307 }
2308 }
2309
2310 /* Handle everything that we can find inside an address that has been
2311 viewed as constant. */
2312
2313 while (1)
2314 {
2315 /* If no part of this switch does a "continue", the code outside
2316 will exit this loop. */
2317
2318 switch (GET_CODE (base))
2319 {
2320 case LO_SUM:
2321 /* By definition, operand1 of a LO_SUM is the associated constant
2322 address. Use the associated constant address as the base
2323 instead. */
2324 base = XEXP (base, 1);
2325 continue;
2326
2327 case CONST:
2328 /* Strip off CONST. */
2329 base = XEXP (base, 0);
2330 continue;
2331
2332 case PLUS:
2333 if (GET_CODE (XEXP (base, 1)) == CONST_INT)
2334 {
2335 start += INTVAL (XEXP (base, 1));
2336 base = XEXP (base, 0);
2337 continue;
2338 }
2339 break;
2340
2341 case AND:
2342 /* Handle the case of an AND which is the negative of a power of
2343 two. This is used to represent unaligned memory operations. */
2344 if (GET_CODE (XEXP (base, 1)) == CONST_INT
2345 && exact_log2 (- INTVAL (XEXP (base, 1))) > 0)
2346 {
2347 set_nonvarying_address_components (XEXP (base, 0), size,
2348 pbase, pstart, pend);
2349
2350 /* Assume the worst misalignment. START is affected, but not
2351 END, so compensate but adjusting SIZE. Don't lose any
2352 constant we already had. */
2353
2354 size = *pend - *pstart - INTVAL (XEXP (base, 1)) - 1;
2355 start += *pstart + INTVAL (XEXP (base, 1)) + 1;
2356 end += *pend;
2357 base = *pbase;
2358 }
2359 break;
2360 }
2361
2362 break;
2363 }
2364
2365 if (GET_CODE (base) == CONST_INT)
2366 {
2367 start += INTVAL (base);
2368 base = const0_rtx;
2369 }
2370
2371 end = start + size;
2372
2373 /* Set the return values. */
2374 *pbase = base;
2375 *pstart = start;
2376 *pend = end;
2377 }
2378
2379 /* Return 1 if X has a value that can vary even between two
2380 executions of the program. 0 means X can be compared reliably
2381 against certain constants or near-constants. */
2382
2383 static int
2384 cse_rtx_varies_p (x)
2385 register rtx x;
2386 {
2387 /* We need not check for X and the equivalence class being of the same
2388 mode because if X is equivalent to a constant in some mode, it
2389 doesn't vary in any mode. */
2390
2391 if (GET_CODE (x) == REG
2392 && REGNO_QTY_VALID_P (REGNO (x))
2393 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2394 && qty_const[reg_qty[REGNO (x)]] != 0)
2395 return 0;
2396
2397 if (GET_CODE (x) == PLUS
2398 && GET_CODE (XEXP (x, 1)) == CONST_INT
2399 && GET_CODE (XEXP (x, 0)) == REG
2400 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2401 && (GET_MODE (XEXP (x, 0))
2402 == qty_mode[reg_qty[REGNO (XEXP (x, 0))]])
2403 && qty_const[reg_qty[REGNO (XEXP (x, 0))]])
2404 return 0;
2405
2406 /* This can happen as the result of virtual register instantiation, if
2407 the initial constant is too large to be a valid address. This gives
2408 us a three instruction sequence, load large offset into a register,
2409 load fp minus a constant into a register, then a MEM which is the
2410 sum of the two `constant' registers. */
2411 if (GET_CODE (x) == PLUS
2412 && GET_CODE (XEXP (x, 0)) == REG
2413 && GET_CODE (XEXP (x, 1)) == REG
2414 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2415 && (GET_MODE (XEXP (x, 0))
2416 == qty_mode[reg_qty[REGNO (XEXP (x, 0))]])
2417 && qty_const[reg_qty[REGNO (XEXP (x, 0))]]
2418 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1)))
2419 && (GET_MODE (XEXP (x, 1))
2420 == qty_mode[reg_qty[REGNO (XEXP (x, 1))]])
2421 && qty_const[reg_qty[REGNO (XEXP (x, 1))]])
2422 return 0;
2423
2424 return rtx_varies_p (x);
2425 }
2426 \f
2427 /* Canonicalize an expression:
2428 replace each register reference inside it
2429 with the "oldest" equivalent register.
2430
2431 If INSN is non-zero and we are replacing a pseudo with a hard register
2432 or vice versa, validate_change is used to ensure that INSN remains valid
2433 after we make our substitution. The calls are made with IN_GROUP non-zero
2434 so apply_change_group must be called upon the outermost return from this
2435 function (unless INSN is zero). The result of apply_change_group can
2436 generally be discarded since the changes we are making are optional. */
2437
2438 static rtx
2439 canon_reg (x, insn)
2440 rtx x;
2441 rtx insn;
2442 {
2443 register int i;
2444 register enum rtx_code code;
2445 register char *fmt;
2446
2447 if (x == 0)
2448 return x;
2449
2450 code = GET_CODE (x);
2451 switch (code)
2452 {
2453 case PC:
2454 case CC0:
2455 case CONST:
2456 case CONST_INT:
2457 case CONST_DOUBLE:
2458 case SYMBOL_REF:
2459 case LABEL_REF:
2460 case ADDR_VEC:
2461 case ADDR_DIFF_VEC:
2462 return x;
2463
2464 case REG:
2465 {
2466 register int first;
2467
2468 /* Never replace a hard reg, because hard regs can appear
2469 in more than one machine mode, and we must preserve the mode
2470 of each occurrence. Also, some hard regs appear in
2471 MEMs that are shared and mustn't be altered. Don't try to
2472 replace any reg that maps to a reg of class NO_REGS. */
2473 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2474 || ! REGNO_QTY_VALID_P (REGNO (x)))
2475 return x;
2476
2477 first = qty_first_reg[reg_qty[REGNO (x)]];
2478 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2479 : REGNO_REG_CLASS (first) == NO_REGS ? x
2480 : gen_rtx (REG, qty_mode[reg_qty[REGNO (x)]], first));
2481 }
2482 }
2483
2484 fmt = GET_RTX_FORMAT (code);
2485 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2486 {
2487 register int j;
2488
2489 if (fmt[i] == 'e')
2490 {
2491 rtx new = canon_reg (XEXP (x, i), insn);
2492 int insn_code;
2493
2494 /* If replacing pseudo with hard reg or vice versa, ensure the
2495 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2496 if (insn != 0 && new != 0
2497 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2498 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2499 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2500 || (insn_code = recog_memoized (insn)) < 0
2501 || insn_n_dups[insn_code] > 0))
2502 validate_change (insn, &XEXP (x, i), new, 1);
2503 else
2504 XEXP (x, i) = new;
2505 }
2506 else if (fmt[i] == 'E')
2507 for (j = 0; j < XVECLEN (x, i); j++)
2508 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2509 }
2510
2511 return x;
2512 }
2513 \f
2514 /* LOC is a location within INSN that is an operand address (the contents of
2515 a MEM). Find the best equivalent address to use that is valid for this
2516 insn.
2517
2518 On most CISC machines, complicated address modes are costly, and rtx_cost
2519 is a good approximation for that cost. However, most RISC machines have
2520 only a few (usually only one) memory reference formats. If an address is
2521 valid at all, it is often just as cheap as any other address. Hence, for
2522 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2523 costs of various addresses. For two addresses of equal cost, choose the one
2524 with the highest `rtx_cost' value as that has the potential of eliminating
2525 the most insns. For equal costs, we choose the first in the equivalence
2526 class. Note that we ignore the fact that pseudo registers are cheaper
2527 than hard registers here because we would also prefer the pseudo registers.
2528 */
2529
2530 static void
2531 find_best_addr (insn, loc)
2532 rtx insn;
2533 rtx *loc;
2534 {
2535 struct table_elt *elt, *p;
2536 rtx addr = *loc;
2537 int our_cost;
2538 int found_better = 1;
2539 int save_do_not_record = do_not_record;
2540 int save_hash_arg_in_memory = hash_arg_in_memory;
2541 int save_hash_arg_in_struct = hash_arg_in_struct;
2542 int addr_volatile;
2543 int regno;
2544 unsigned hash;
2545
2546 /* Do not try to replace constant addresses or addresses of local and
2547 argument slots. These MEM expressions are made only once and inserted
2548 in many instructions, as well as being used to control symbol table
2549 output. It is not safe to clobber them.
2550
2551 There are some uncommon cases where the address is already in a register
2552 for some reason, but we cannot take advantage of that because we have
2553 no easy way to unshare the MEM. In addition, looking up all stack
2554 addresses is costly. */
2555 if ((GET_CODE (addr) == PLUS
2556 && GET_CODE (XEXP (addr, 0)) == REG
2557 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2558 && (regno = REGNO (XEXP (addr, 0)),
2559 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2560 || regno == ARG_POINTER_REGNUM))
2561 || (GET_CODE (addr) == REG
2562 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2563 || regno == HARD_FRAME_POINTER_REGNUM
2564 || regno == ARG_POINTER_REGNUM))
2565 || CONSTANT_ADDRESS_P (addr))
2566 return;
2567
2568 /* If this address is not simply a register, try to fold it. This will
2569 sometimes simplify the expression. Many simplifications
2570 will not be valid, but some, usually applying the associative rule, will
2571 be valid and produce better code. */
2572 if (GET_CODE (addr) != REG)
2573 {
2574 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2575
2576 if (1
2577 #ifdef ADDRESS_COST
2578 && (ADDRESS_COST (folded) < ADDRESS_COST (addr)
2579 || (ADDRESS_COST (folded) == ADDRESS_COST (addr)
2580 && rtx_cost (folded, MEM) > rtx_cost (addr, MEM)))
2581 #else
2582 && rtx_cost (folded, MEM) < rtx_cost (addr, MEM)
2583 #endif
2584 && validate_change (insn, loc, folded, 0))
2585 addr = folded;
2586 }
2587
2588 /* If this address is not in the hash table, we can't look for equivalences
2589 of the whole address. Also, ignore if volatile. */
2590
2591 do_not_record = 0;
2592 hash = HASH (addr, Pmode);
2593 addr_volatile = do_not_record;
2594 do_not_record = save_do_not_record;
2595 hash_arg_in_memory = save_hash_arg_in_memory;
2596 hash_arg_in_struct = save_hash_arg_in_struct;
2597
2598 if (addr_volatile)
2599 return;
2600
2601 elt = lookup (addr, hash, Pmode);
2602
2603 #ifndef ADDRESS_COST
2604 if (elt)
2605 {
2606 our_cost = elt->cost;
2607
2608 /* Find the lowest cost below ours that works. */
2609 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2610 if (elt->cost < our_cost
2611 && (GET_CODE (elt->exp) == REG
2612 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2613 && validate_change (insn, loc,
2614 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2615 return;
2616 }
2617 #else
2618
2619 if (elt)
2620 {
2621 /* We need to find the best (under the criteria documented above) entry
2622 in the class that is valid. We use the `flag' field to indicate
2623 choices that were invalid and iterate until we can't find a better
2624 one that hasn't already been tried. */
2625
2626 for (p = elt->first_same_value; p; p = p->next_same_value)
2627 p->flag = 0;
2628
2629 while (found_better)
2630 {
2631 int best_addr_cost = ADDRESS_COST (*loc);
2632 int best_rtx_cost = (elt->cost + 1) >> 1;
2633 struct table_elt *best_elt = elt;
2634
2635 found_better = 0;
2636 for (p = elt->first_same_value; p; p = p->next_same_value)
2637 if (! p->flag
2638 && (GET_CODE (p->exp) == REG
2639 || exp_equiv_p (p->exp, p->exp, 1, 0))
2640 && (ADDRESS_COST (p->exp) < best_addr_cost
2641 || (ADDRESS_COST (p->exp) == best_addr_cost
2642 && (p->cost + 1) >> 1 > best_rtx_cost)))
2643 {
2644 found_better = 1;
2645 best_addr_cost = ADDRESS_COST (p->exp);
2646 best_rtx_cost = (p->cost + 1) >> 1;
2647 best_elt = p;
2648 }
2649
2650 if (found_better)
2651 {
2652 if (validate_change (insn, loc,
2653 canon_reg (copy_rtx (best_elt->exp),
2654 NULL_RTX), 0))
2655 return;
2656 else
2657 best_elt->flag = 1;
2658 }
2659 }
2660 }
2661
2662 /* If the address is a binary operation with the first operand a register
2663 and the second a constant, do the same as above, but looking for
2664 equivalences of the register. Then try to simplify before checking for
2665 the best address to use. This catches a few cases: First is when we
2666 have REG+const and the register is another REG+const. We can often merge
2667 the constants and eliminate one insn and one register. It may also be
2668 that a machine has a cheap REG+REG+const. Finally, this improves the
2669 code on the Alpha for unaligned byte stores. */
2670
2671 if (flag_expensive_optimizations
2672 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2673 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2674 && GET_CODE (XEXP (*loc, 0)) == REG
2675 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2676 {
2677 rtx c = XEXP (*loc, 1);
2678
2679 do_not_record = 0;
2680 hash = HASH (XEXP (*loc, 0), Pmode);
2681 do_not_record = save_do_not_record;
2682 hash_arg_in_memory = save_hash_arg_in_memory;
2683 hash_arg_in_struct = save_hash_arg_in_struct;
2684
2685 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2686 if (elt == 0)
2687 return;
2688
2689 /* We need to find the best (under the criteria documented above) entry
2690 in the class that is valid. We use the `flag' field to indicate
2691 choices that were invalid and iterate until we can't find a better
2692 one that hasn't already been tried. */
2693
2694 for (p = elt->first_same_value; p; p = p->next_same_value)
2695 p->flag = 0;
2696
2697 while (found_better)
2698 {
2699 int best_addr_cost = ADDRESS_COST (*loc);
2700 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2701 struct table_elt *best_elt = elt;
2702 rtx best_rtx = *loc;
2703 int count;
2704
2705 /* This is at worst case an O(n^2) algorithm, so limit our search
2706 to the first 32 elements on the list. This avoids trouble
2707 compiling code with very long basic blocks that can easily
2708 call cse_gen_binary so many times that we run out of memory. */
2709
2710 found_better = 0;
2711 for (p = elt->first_same_value, count = 0;
2712 p && count < 32;
2713 p = p->next_same_value, count++)
2714 if (! p->flag
2715 && (GET_CODE (p->exp) == REG
2716 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2717 {
2718 rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
2719
2720 if ((ADDRESS_COST (new) < best_addr_cost
2721 || (ADDRESS_COST (new) == best_addr_cost
2722 && (COST (new) + 1) >> 1 > best_rtx_cost)))
2723 {
2724 found_better = 1;
2725 best_addr_cost = ADDRESS_COST (new);
2726 best_rtx_cost = (COST (new) + 1) >> 1;
2727 best_elt = p;
2728 best_rtx = new;
2729 }
2730 }
2731
2732 if (found_better)
2733 {
2734 if (validate_change (insn, loc,
2735 canon_reg (copy_rtx (best_rtx),
2736 NULL_RTX), 0))
2737 return;
2738 else
2739 best_elt->flag = 1;
2740 }
2741 }
2742 }
2743 #endif
2744 }
2745 \f
2746 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2747 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2748 what values are being compared.
2749
2750 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2751 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2752 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2753 compared to produce cc0.
2754
2755 The return value is the comparison operator and is either the code of
2756 A or the code corresponding to the inverse of the comparison. */
2757
2758 static enum rtx_code
2759 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
2760 enum rtx_code code;
2761 rtx *parg1, *parg2;
2762 enum machine_mode *pmode1, *pmode2;
2763 {
2764 rtx arg1, arg2;
2765
2766 arg1 = *parg1, arg2 = *parg2;
2767
2768 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2769
2770 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2771 {
2772 /* Set non-zero when we find something of interest. */
2773 rtx x = 0;
2774 int reverse_code = 0;
2775 struct table_elt *p = 0;
2776
2777 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2778 On machines with CC0, this is the only case that can occur, since
2779 fold_rtx will return the COMPARE or item being compared with zero
2780 when given CC0. */
2781
2782 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2783 x = arg1;
2784
2785 /* If ARG1 is a comparison operator and CODE is testing for
2786 STORE_FLAG_VALUE, get the inner arguments. */
2787
2788 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2789 {
2790 if (code == NE
2791 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2792 && code == LT && STORE_FLAG_VALUE == -1)
2793 #ifdef FLOAT_STORE_FLAG_VALUE
2794 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2795 && FLOAT_STORE_FLAG_VALUE < 0)
2796 #endif
2797 )
2798 x = arg1;
2799 else if (code == EQ
2800 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2801 && code == GE && STORE_FLAG_VALUE == -1)
2802 #ifdef FLOAT_STORE_FLAG_VALUE
2803 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2804 && FLOAT_STORE_FLAG_VALUE < 0)
2805 #endif
2806 )
2807 x = arg1, reverse_code = 1;
2808 }
2809
2810 /* ??? We could also check for
2811
2812 (ne (and (eq (...) (const_int 1))) (const_int 0))
2813
2814 and related forms, but let's wait until we see them occurring. */
2815
2816 if (x == 0)
2817 /* Look up ARG1 in the hash table and see if it has an equivalence
2818 that lets us see what is being compared. */
2819 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
2820 GET_MODE (arg1));
2821 if (p) p = p->first_same_value;
2822
2823 for (; p; p = p->next_same_value)
2824 {
2825 enum machine_mode inner_mode = GET_MODE (p->exp);
2826
2827 /* If the entry isn't valid, skip it. */
2828 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2829 continue;
2830
2831 if (GET_CODE (p->exp) == COMPARE
2832 /* Another possibility is that this machine has a compare insn
2833 that includes the comparison code. In that case, ARG1 would
2834 be equivalent to a comparison operation that would set ARG1 to
2835 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2836 ORIG_CODE is the actual comparison being done; if it is an EQ,
2837 we must reverse ORIG_CODE. On machine with a negative value
2838 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2839 || ((code == NE
2840 || (code == LT
2841 && GET_MODE_CLASS (inner_mode) == MODE_INT
2842 && (GET_MODE_BITSIZE (inner_mode)
2843 <= HOST_BITS_PER_WIDE_INT)
2844 && (STORE_FLAG_VALUE
2845 & ((HOST_WIDE_INT) 1
2846 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2847 #ifdef FLOAT_STORE_FLAG_VALUE
2848 || (code == LT
2849 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2850 && FLOAT_STORE_FLAG_VALUE < 0)
2851 #endif
2852 )
2853 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
2854 {
2855 x = p->exp;
2856 break;
2857 }
2858 else if ((code == EQ
2859 || (code == GE
2860 && GET_MODE_CLASS (inner_mode) == MODE_INT
2861 && (GET_MODE_BITSIZE (inner_mode)
2862 <= HOST_BITS_PER_WIDE_INT)
2863 && (STORE_FLAG_VALUE
2864 & ((HOST_WIDE_INT) 1
2865 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2866 #ifdef FLOAT_STORE_FLAG_VALUE
2867 || (code == GE
2868 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2869 && FLOAT_STORE_FLAG_VALUE < 0)
2870 #endif
2871 )
2872 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
2873 {
2874 reverse_code = 1;
2875 x = p->exp;
2876 break;
2877 }
2878
2879 /* If this is fp + constant, the equivalent is a better operand since
2880 it may let us predict the value of the comparison. */
2881 else if (NONZERO_BASE_PLUS_P (p->exp))
2882 {
2883 arg1 = p->exp;
2884 continue;
2885 }
2886 }
2887
2888 /* If we didn't find a useful equivalence for ARG1, we are done.
2889 Otherwise, set up for the next iteration. */
2890 if (x == 0)
2891 break;
2892
2893 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
2894 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
2895 code = GET_CODE (x);
2896
2897 if (reverse_code)
2898 code = reverse_condition (code);
2899 }
2900
2901 /* Return our results. Return the modes from before fold_rtx
2902 because fold_rtx might produce const_int, and then it's too late. */
2903 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
2904 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
2905
2906 return code;
2907 }
2908 \f
2909 /* Try to simplify a unary operation CODE whose output mode is to be
2910 MODE with input operand OP whose mode was originally OP_MODE.
2911 Return zero if no simplification can be made. */
2912
2913 rtx
2914 simplify_unary_operation (code, mode, op, op_mode)
2915 enum rtx_code code;
2916 enum machine_mode mode;
2917 rtx op;
2918 enum machine_mode op_mode;
2919 {
2920 register int width = GET_MODE_BITSIZE (mode);
2921
2922 /* The order of these tests is critical so that, for example, we don't
2923 check the wrong mode (input vs. output) for a conversion operation,
2924 such as FIX. At some point, this should be simplified. */
2925
2926 #if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
2927
2928 if (code == FLOAT && GET_MODE (op) == VOIDmode
2929 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
2930 {
2931 HOST_WIDE_INT hv, lv;
2932 REAL_VALUE_TYPE d;
2933
2934 if (GET_CODE (op) == CONST_INT)
2935 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
2936 else
2937 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
2938
2939 #ifdef REAL_ARITHMETIC
2940 REAL_VALUE_FROM_INT (d, lv, hv, mode);
2941 #else
2942 if (hv < 0)
2943 {
2944 d = (double) (~ hv);
2945 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2946 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2947 d += (double) (unsigned HOST_WIDE_INT) (~ lv);
2948 d = (- d - 1.0);
2949 }
2950 else
2951 {
2952 d = (double) hv;
2953 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2954 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2955 d += (double) (unsigned HOST_WIDE_INT) lv;
2956 }
2957 #endif /* REAL_ARITHMETIC */
2958 d = real_value_truncate (mode, d);
2959 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2960 }
2961 else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
2962 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
2963 {
2964 HOST_WIDE_INT hv, lv;
2965 REAL_VALUE_TYPE d;
2966
2967 if (GET_CODE (op) == CONST_INT)
2968 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
2969 else
2970 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
2971
2972 if (op_mode == VOIDmode)
2973 {
2974 /* We don't know how to interpret negative-looking numbers in
2975 this case, so don't try to fold those. */
2976 if (hv < 0)
2977 return 0;
2978 }
2979 else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
2980 ;
2981 else
2982 hv = 0, lv &= GET_MODE_MASK (op_mode);
2983
2984 #ifdef REAL_ARITHMETIC
2985 REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv, mode);
2986 #else
2987
2988 d = (double) (unsigned HOST_WIDE_INT) hv;
2989 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2990 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2991 d += (double) (unsigned HOST_WIDE_INT) lv;
2992 #endif /* REAL_ARITHMETIC */
2993 d = real_value_truncate (mode, d);
2994 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2995 }
2996 #endif
2997
2998 if (GET_CODE (op) == CONST_INT
2999 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3000 {
3001 register HOST_WIDE_INT arg0 = INTVAL (op);
3002 register HOST_WIDE_INT val;
3003
3004 switch (code)
3005 {
3006 case NOT:
3007 val = ~ arg0;
3008 break;
3009
3010 case NEG:
3011 val = - arg0;
3012 break;
3013
3014 case ABS:
3015 val = (arg0 >= 0 ? arg0 : - arg0);
3016 break;
3017
3018 case FFS:
3019 /* Don't use ffs here. Instead, get low order bit and then its
3020 number. If arg0 is zero, this will return 0, as desired. */
3021 arg0 &= GET_MODE_MASK (mode);
3022 val = exact_log2 (arg0 & (- arg0)) + 1;
3023 break;
3024
3025 case TRUNCATE:
3026 val = arg0;
3027 break;
3028
3029 case ZERO_EXTEND:
3030 if (op_mode == VOIDmode)
3031 op_mode = mode;
3032 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3033 {
3034 /* If we were really extending the mode,
3035 we would have to distinguish between zero-extension
3036 and sign-extension. */
3037 if (width != GET_MODE_BITSIZE (op_mode))
3038 abort ();
3039 val = arg0;
3040 }
3041 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3042 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3043 else
3044 return 0;
3045 break;
3046
3047 case SIGN_EXTEND:
3048 if (op_mode == VOIDmode)
3049 op_mode = mode;
3050 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3051 {
3052 /* If we were really extending the mode,
3053 we would have to distinguish between zero-extension
3054 and sign-extension. */
3055 if (width != GET_MODE_BITSIZE (op_mode))
3056 abort ();
3057 val = arg0;
3058 }
3059 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3060 {
3061 val
3062 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3063 if (val
3064 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3065 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3066 }
3067 else
3068 return 0;
3069 break;
3070
3071 case SQRT:
3072 return 0;
3073
3074 default:
3075 abort ();
3076 }
3077
3078 /* Clear the bits that don't belong in our mode,
3079 unless they and our sign bit are all one.
3080 So we get either a reasonable negative value or a reasonable
3081 unsigned value for this mode. */
3082 if (width < HOST_BITS_PER_WIDE_INT
3083 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3084 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3085 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3086
3087 return GEN_INT (val);
3088 }
3089
3090 /* We can do some operations on integer CONST_DOUBLEs. Also allow
3091 for a DImode operation on a CONST_INT. */
3092 else if (GET_MODE (op) == VOIDmode && width <= HOST_BITS_PER_INT * 2
3093 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3094 {
3095 HOST_WIDE_INT l1, h1, lv, hv;
3096
3097 if (GET_CODE (op) == CONST_DOUBLE)
3098 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3099 else
3100 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3101
3102 switch (code)
3103 {
3104 case NOT:
3105 lv = ~ l1;
3106 hv = ~ h1;
3107 break;
3108
3109 case NEG:
3110 neg_double (l1, h1, &lv, &hv);
3111 break;
3112
3113 case ABS:
3114 if (h1 < 0)
3115 neg_double (l1, h1, &lv, &hv);
3116 else
3117 lv = l1, hv = h1;
3118 break;
3119
3120 case FFS:
3121 hv = 0;
3122 if (l1 == 0)
3123 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
3124 else
3125 lv = exact_log2 (l1 & (-l1)) + 1;
3126 break;
3127
3128 case TRUNCATE:
3129 /* This is just a change-of-mode, so do nothing. */
3130 lv = l1, hv = h1;
3131 break;
3132
3133 case ZERO_EXTEND:
3134 if (op_mode == VOIDmode
3135 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3136 return 0;
3137
3138 hv = 0;
3139 lv = l1 & GET_MODE_MASK (op_mode);
3140 break;
3141
3142 case SIGN_EXTEND:
3143 if (op_mode == VOIDmode
3144 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3145 return 0;
3146 else
3147 {
3148 lv = l1 & GET_MODE_MASK (op_mode);
3149 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3150 && (lv & ((HOST_WIDE_INT) 1
3151 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3152 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3153
3154 hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
3155 }
3156 break;
3157
3158 case SQRT:
3159 return 0;
3160
3161 default:
3162 return 0;
3163 }
3164
3165 return immed_double_const (lv, hv, mode);
3166 }
3167
3168 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3169 else if (GET_CODE (op) == CONST_DOUBLE
3170 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3171 {
3172 REAL_VALUE_TYPE d;
3173 jmp_buf handler;
3174 rtx x;
3175
3176 if (setjmp (handler))
3177 /* There used to be a warning here, but that is inadvisable.
3178 People may want to cause traps, and the natural way
3179 to do it should not get a warning. */
3180 return 0;
3181
3182 set_float_handler (handler);
3183
3184 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3185
3186 switch (code)
3187 {
3188 case NEG:
3189 d = REAL_VALUE_NEGATE (d);
3190 break;
3191
3192 case ABS:
3193 if (REAL_VALUE_NEGATIVE (d))
3194 d = REAL_VALUE_NEGATE (d);
3195 break;
3196
3197 case FLOAT_TRUNCATE:
3198 d = real_value_truncate (mode, d);
3199 break;
3200
3201 case FLOAT_EXTEND:
3202 /* All this does is change the mode. */
3203 break;
3204
3205 case FIX:
3206 d = REAL_VALUE_RNDZINT (d);
3207 break;
3208
3209 case UNSIGNED_FIX:
3210 d = REAL_VALUE_UNSIGNED_RNDZINT (d);
3211 break;
3212
3213 case SQRT:
3214 return 0;
3215
3216 default:
3217 abort ();
3218 }
3219
3220 x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3221 set_float_handler (NULL_PTR);
3222 return x;
3223 }
3224
3225 else if (GET_CODE (op) == CONST_DOUBLE
3226 && GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
3227 && GET_MODE_CLASS (mode) == MODE_INT
3228 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3229 {
3230 REAL_VALUE_TYPE d;
3231 jmp_buf handler;
3232 HOST_WIDE_INT val;
3233
3234 if (setjmp (handler))
3235 return 0;
3236
3237 set_float_handler (handler);
3238
3239 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3240
3241 switch (code)
3242 {
3243 case FIX:
3244 val = REAL_VALUE_FIX (d);
3245 break;
3246
3247 case UNSIGNED_FIX:
3248 val = REAL_VALUE_UNSIGNED_FIX (d);
3249 break;
3250
3251 default:
3252 abort ();
3253 }
3254
3255 set_float_handler (NULL_PTR);
3256
3257 /* Clear the bits that don't belong in our mode,
3258 unless they and our sign bit are all one.
3259 So we get either a reasonable negative value or a reasonable
3260 unsigned value for this mode. */
3261 if (width < HOST_BITS_PER_WIDE_INT
3262 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3263 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3264 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3265
3266 /* If this would be an entire word for the target, but is not for
3267 the host, then sign-extend on the host so that the number will look
3268 the same way on the host that it would on the target.
3269
3270 For example, when building a 64 bit alpha hosted 32 bit sparc
3271 targeted compiler, then we want the 32 bit unsigned value -1 to be
3272 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3273 The later confuses the sparc backend. */
3274
3275 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3276 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3277 val |= ((HOST_WIDE_INT) (-1) << width);
3278
3279 return GEN_INT (val);
3280 }
3281 #endif
3282 /* This was formerly used only for non-IEEE float.
3283 eggert@twinsun.com says it is safe for IEEE also. */
3284 else
3285 {
3286 /* There are some simplifications we can do even if the operands
3287 aren't constant. */
3288 switch (code)
3289 {
3290 case NEG:
3291 case NOT:
3292 /* (not (not X)) == X, similarly for NEG. */
3293 if (GET_CODE (op) == code)
3294 return XEXP (op, 0);
3295 break;
3296
3297 case SIGN_EXTEND:
3298 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3299 becomes just the MINUS if its mode is MODE. This allows
3300 folding switch statements on machines using casesi (such as
3301 the Vax). */
3302 if (GET_CODE (op) == TRUNCATE
3303 && GET_MODE (XEXP (op, 0)) == mode
3304 && GET_CODE (XEXP (op, 0)) == MINUS
3305 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3306 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3307 return XEXP (op, 0);
3308
3309 #ifdef POINTERS_EXTEND_UNSIGNED
3310 if (! POINTERS_EXTEND_UNSIGNED
3311 && mode == Pmode && GET_MODE (op) == ptr_mode
3312 && CONSTANT_P (op))
3313 return convert_memory_address (Pmode, op);
3314 #endif
3315 break;
3316
3317 #ifdef POINTERS_EXTEND_UNSIGNED
3318 case ZERO_EXTEND:
3319 if (POINTERS_EXTEND_UNSIGNED
3320 && mode == Pmode && GET_MODE (op) == ptr_mode
3321 && CONSTANT_P (op))
3322 return convert_memory_address (Pmode, op);
3323 break;
3324 #endif
3325 }
3326
3327 return 0;
3328 }
3329 }
3330 \f
3331 /* Simplify a binary operation CODE with result mode MODE, operating on OP0
3332 and OP1. Return 0 if no simplification is possible.
3333
3334 Don't use this for relational operations such as EQ or LT.
3335 Use simplify_relational_operation instead. */
3336
3337 rtx
3338 simplify_binary_operation (code, mode, op0, op1)
3339 enum rtx_code code;
3340 enum machine_mode mode;
3341 rtx op0, op1;
3342 {
3343 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3344 HOST_WIDE_INT val;
3345 int width = GET_MODE_BITSIZE (mode);
3346 rtx tem;
3347
3348 /* Relational operations don't work here. We must know the mode
3349 of the operands in order to do the comparison correctly.
3350 Assuming a full word can give incorrect results.
3351 Consider comparing 128 with -128 in QImode. */
3352
3353 if (GET_RTX_CLASS (code) == '<')
3354 abort ();
3355
3356 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3357 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3358 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3359 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3360 {
3361 REAL_VALUE_TYPE f0, f1, value;
3362 jmp_buf handler;
3363
3364 if (setjmp (handler))
3365 return 0;
3366
3367 set_float_handler (handler);
3368
3369 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3370 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3371 f0 = real_value_truncate (mode, f0);
3372 f1 = real_value_truncate (mode, f1);
3373
3374 #ifdef REAL_ARITHMETIC
3375 REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
3376 #else
3377 switch (code)
3378 {
3379 case PLUS:
3380 value = f0 + f1;
3381 break;
3382 case MINUS:
3383 value = f0 - f1;
3384 break;
3385 case MULT:
3386 value = f0 * f1;
3387 break;
3388 case DIV:
3389 #ifndef REAL_INFINITY
3390 if (f1 == 0)
3391 return 0;
3392 #endif
3393 value = f0 / f1;
3394 break;
3395 case SMIN:
3396 value = MIN (f0, f1);
3397 break;
3398 case SMAX:
3399 value = MAX (f0, f1);
3400 break;
3401 default:
3402 abort ();
3403 }
3404 #endif
3405
3406 value = real_value_truncate (mode, value);
3407 set_float_handler (NULL_PTR);
3408 return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
3409 }
3410 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3411
3412 /* We can fold some multi-word operations. */
3413 if (GET_MODE_CLASS (mode) == MODE_INT
3414 && width == HOST_BITS_PER_WIDE_INT * 2
3415 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
3416 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3417 {
3418 HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
3419
3420 if (GET_CODE (op0) == CONST_DOUBLE)
3421 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3422 else
3423 l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
3424
3425 if (GET_CODE (op1) == CONST_DOUBLE)
3426 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3427 else
3428 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3429
3430 switch (code)
3431 {
3432 case MINUS:
3433 /* A - B == A + (-B). */
3434 neg_double (l2, h2, &lv, &hv);
3435 l2 = lv, h2 = hv;
3436
3437 /* .. fall through ... */
3438
3439 case PLUS:
3440 add_double (l1, h1, l2, h2, &lv, &hv);
3441 break;
3442
3443 case MULT:
3444 mul_double (l1, h1, l2, h2, &lv, &hv);
3445 break;
3446
3447 case DIV: case MOD: case UDIV: case UMOD:
3448 /* We'd need to include tree.h to do this and it doesn't seem worth
3449 it. */
3450 return 0;
3451
3452 case AND:
3453 lv = l1 & l2, hv = h1 & h2;
3454 break;
3455
3456 case IOR:
3457 lv = l1 | l2, hv = h1 | h2;
3458 break;
3459
3460 case XOR:
3461 lv = l1 ^ l2, hv = h1 ^ h2;
3462 break;
3463
3464 case SMIN:
3465 if (h1 < h2
3466 || (h1 == h2
3467 && ((unsigned HOST_WIDE_INT) l1
3468 < (unsigned HOST_WIDE_INT) l2)))
3469 lv = l1, hv = h1;
3470 else
3471 lv = l2, hv = h2;
3472 break;
3473
3474 case SMAX:
3475 if (h1 > h2
3476 || (h1 == h2
3477 && ((unsigned HOST_WIDE_INT) l1
3478 > (unsigned HOST_WIDE_INT) l2)))
3479 lv = l1, hv = h1;
3480 else
3481 lv = l2, hv = h2;
3482 break;
3483
3484 case UMIN:
3485 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3486 || (h1 == h2
3487 && ((unsigned HOST_WIDE_INT) l1
3488 < (unsigned HOST_WIDE_INT) l2)))
3489 lv = l1, hv = h1;
3490 else
3491 lv = l2, hv = h2;
3492 break;
3493
3494 case UMAX:
3495 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3496 || (h1 == h2
3497 && ((unsigned HOST_WIDE_INT) l1
3498 > (unsigned HOST_WIDE_INT) l2)))
3499 lv = l1, hv = h1;
3500 else
3501 lv = l2, hv = h2;
3502 break;
3503
3504 case LSHIFTRT: case ASHIFTRT:
3505 case ASHIFT:
3506 case ROTATE: case ROTATERT:
3507 #ifdef SHIFT_COUNT_TRUNCATED
3508 if (SHIFT_COUNT_TRUNCATED)
3509 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3510 #endif
3511
3512 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3513 return 0;
3514
3515 if (code == LSHIFTRT || code == ASHIFTRT)
3516 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3517 code == ASHIFTRT);
3518 else if (code == ASHIFT)
3519 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
3520 else if (code == ROTATE)
3521 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3522 else /* code == ROTATERT */
3523 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3524 break;
3525
3526 default:
3527 return 0;
3528 }
3529
3530 return immed_double_const (lv, hv, mode);
3531 }
3532
3533 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3534 || width > HOST_BITS_PER_WIDE_INT || width == 0)
3535 {
3536 /* Even if we can't compute a constant result,
3537 there are some cases worth simplifying. */
3538
3539 switch (code)
3540 {
3541 case PLUS:
3542 /* In IEEE floating point, x+0 is not the same as x. Similarly
3543 for the other optimizations below. */
3544 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3545 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3546 break;
3547
3548 if (op1 == CONST0_RTX (mode))
3549 return op0;
3550
3551 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3552 if (GET_CODE (op0) == NEG)
3553 return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
3554 else if (GET_CODE (op1) == NEG)
3555 return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
3556
3557 /* Handle both-operands-constant cases. We can only add
3558 CONST_INTs to constants since the sum of relocatable symbols
3559 can't be handled by most assemblers. Don't add CONST_INT
3560 to CONST_INT since overflow won't be computed properly if wider
3561 than HOST_BITS_PER_WIDE_INT. */
3562
3563 if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3564 && GET_CODE (op1) == CONST_INT)
3565 return plus_constant (op0, INTVAL (op1));
3566 else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3567 && GET_CODE (op0) == CONST_INT)
3568 return plus_constant (op1, INTVAL (op0));
3569
3570 /* See if this is something like X * C - X or vice versa or
3571 if the multiplication is written as a shift. If so, we can
3572 distribute and make a new multiply, shift, or maybe just
3573 have X (if C is 2 in the example above). But don't make
3574 real multiply if we didn't have one before. */
3575
3576 if (! FLOAT_MODE_P (mode))
3577 {
3578 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3579 rtx lhs = op0, rhs = op1;
3580 int had_mult = 0;
3581
3582 if (GET_CODE (lhs) == NEG)
3583 coeff0 = -1, lhs = XEXP (lhs, 0);
3584 else if (GET_CODE (lhs) == MULT
3585 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3586 {
3587 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3588 had_mult = 1;
3589 }
3590 else if (GET_CODE (lhs) == ASHIFT
3591 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3592 && INTVAL (XEXP (lhs, 1)) >= 0
3593 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3594 {
3595 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3596 lhs = XEXP (lhs, 0);
3597 }
3598
3599 if (GET_CODE (rhs) == NEG)
3600 coeff1 = -1, rhs = XEXP (rhs, 0);
3601 else if (GET_CODE (rhs) == MULT
3602 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3603 {
3604 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3605 had_mult = 1;
3606 }
3607 else if (GET_CODE (rhs) == ASHIFT
3608 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3609 && INTVAL (XEXP (rhs, 1)) >= 0
3610 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3611 {
3612 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3613 rhs = XEXP (rhs, 0);
3614 }
3615
3616 if (rtx_equal_p (lhs, rhs))
3617 {
3618 tem = cse_gen_binary (MULT, mode, lhs,
3619 GEN_INT (coeff0 + coeff1));
3620 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3621 }
3622 }
3623
3624 /* If one of the operands is a PLUS or a MINUS, see if we can
3625 simplify this by the associative law.
3626 Don't use the associative law for floating point.
3627 The inaccuracy makes it nonassociative,
3628 and subtle programs can break if operations are associated. */
3629
3630 if (INTEGRAL_MODE_P (mode)
3631 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3632 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3633 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3634 return tem;
3635 break;
3636
3637 case COMPARE:
3638 #ifdef HAVE_cc0
3639 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3640 using cc0, in which case we want to leave it as a COMPARE
3641 so we can distinguish it from a register-register-copy.
3642
3643 In IEEE floating point, x-0 is not the same as x. */
3644
3645 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3646 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3647 && op1 == CONST0_RTX (mode))
3648 return op0;
3649 #else
3650 /* Do nothing here. */
3651 #endif
3652 break;
3653
3654 case MINUS:
3655 /* None of these optimizations can be done for IEEE
3656 floating point. */
3657 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3658 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3659 break;
3660
3661 /* We can't assume x-x is 0 even with non-IEEE floating point,
3662 but since it is zero except in very strange circumstances, we
3663 will treat it as zero with -ffast-math. */
3664 if (rtx_equal_p (op0, op1)
3665 && ! side_effects_p (op0)
3666 && (! FLOAT_MODE_P (mode) || flag_fast_math))
3667 return CONST0_RTX (mode);
3668
3669 /* Change subtraction from zero into negation. */
3670 if (op0 == CONST0_RTX (mode))
3671 return gen_rtx (NEG, mode, op1);
3672
3673 /* (-1 - a) is ~a. */
3674 if (op0 == constm1_rtx)
3675 return gen_rtx (NOT, mode, op1);
3676
3677 /* Subtracting 0 has no effect. */
3678 if (op1 == CONST0_RTX (mode))
3679 return op0;
3680
3681 /* See if this is something like X * C - X or vice versa or
3682 if the multiplication is written as a shift. If so, we can
3683 distribute and make a new multiply, shift, or maybe just
3684 have X (if C is 2 in the example above). But don't make
3685 real multiply if we didn't have one before. */
3686
3687 if (! FLOAT_MODE_P (mode))
3688 {
3689 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3690 rtx lhs = op0, rhs = op1;
3691 int had_mult = 0;
3692
3693 if (GET_CODE (lhs) == NEG)
3694 coeff0 = -1, lhs = XEXP (lhs, 0);
3695 else if (GET_CODE (lhs) == MULT
3696 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3697 {
3698 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3699 had_mult = 1;
3700 }
3701 else if (GET_CODE (lhs) == ASHIFT
3702 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3703 && INTVAL (XEXP (lhs, 1)) >= 0
3704 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3705 {
3706 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3707 lhs = XEXP (lhs, 0);
3708 }
3709
3710 if (GET_CODE (rhs) == NEG)
3711 coeff1 = - 1, rhs = XEXP (rhs, 0);
3712 else if (GET_CODE (rhs) == MULT
3713 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3714 {
3715 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3716 had_mult = 1;
3717 }
3718 else if (GET_CODE (rhs) == ASHIFT
3719 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3720 && INTVAL (XEXP (rhs, 1)) >= 0
3721 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3722 {
3723 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3724 rhs = XEXP (rhs, 0);
3725 }
3726
3727 if (rtx_equal_p (lhs, rhs))
3728 {
3729 tem = cse_gen_binary (MULT, mode, lhs,
3730 GEN_INT (coeff0 - coeff1));
3731 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3732 }
3733 }
3734
3735 /* (a - (-b)) -> (a + b). */
3736 if (GET_CODE (op1) == NEG)
3737 return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
3738
3739 /* If one of the operands is a PLUS or a MINUS, see if we can
3740 simplify this by the associative law.
3741 Don't use the associative law for floating point.
3742 The inaccuracy makes it nonassociative,
3743 and subtle programs can break if operations are associated. */
3744
3745 if (INTEGRAL_MODE_P (mode)
3746 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3747 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3748 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3749 return tem;
3750
3751 /* Don't let a relocatable value get a negative coeff. */
3752 if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
3753 return plus_constant (op0, - INTVAL (op1));
3754
3755 /* (x - (x & y)) -> (x & ~y) */
3756 if (GET_CODE (op1) == AND)
3757 {
3758 if (rtx_equal_p (op0, XEXP (op1, 0)))
3759 return cse_gen_binary (AND, mode, op0, gen_rtx (NOT, mode, XEXP (op1, 1)));
3760 if (rtx_equal_p (op0, XEXP (op1, 1)))
3761 return cse_gen_binary (AND, mode, op0, gen_rtx (NOT, mode, XEXP (op1, 0)));
3762 }
3763 break;
3764
3765 case MULT:
3766 if (op1 == constm1_rtx)
3767 {
3768 tem = simplify_unary_operation (NEG, mode, op0, mode);
3769
3770 return tem ? tem : gen_rtx (NEG, mode, op0);
3771 }
3772
3773 /* In IEEE floating point, x*0 is not always 0. */
3774 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3775 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3776 && op1 == CONST0_RTX (mode)
3777 && ! side_effects_p (op0))
3778 return op1;
3779
3780 /* In IEEE floating point, x*1 is not equivalent to x for nans.
3781 However, ANSI says we can drop signals,
3782 so we can do this anyway. */
3783 if (op1 == CONST1_RTX (mode))
3784 return op0;
3785
3786 /* Convert multiply by constant power of two into shift unless
3787 we are still generating RTL. This test is a kludge. */
3788 if (GET_CODE (op1) == CONST_INT
3789 && (val = exact_log2 (INTVAL (op1))) >= 0
3790 /* If the mode is larger than the host word size, and the
3791 uppermost bit is set, then this isn't a power of two due
3792 to implicit sign extension. */
3793 && (width <= HOST_BITS_PER_WIDE_INT
3794 || val != HOST_BITS_PER_WIDE_INT - 1)
3795 && ! rtx_equal_function_value_matters)
3796 return gen_rtx (ASHIFT, mode, op0, GEN_INT (val));
3797
3798 if (GET_CODE (op1) == CONST_DOUBLE
3799 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
3800 {
3801 REAL_VALUE_TYPE d;
3802 jmp_buf handler;
3803 int op1is2, op1ism1;
3804
3805 if (setjmp (handler))
3806 return 0;
3807
3808 set_float_handler (handler);
3809 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3810 op1is2 = REAL_VALUES_EQUAL (d, dconst2);
3811 op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
3812 set_float_handler (NULL_PTR);
3813
3814 /* x*2 is x+x and x*(-1) is -x */
3815 if (op1is2 && GET_MODE (op0) == mode)
3816 return gen_rtx (PLUS, mode, op0, copy_rtx (op0));
3817
3818 else if (op1ism1 && GET_MODE (op0) == mode)
3819 return gen_rtx (NEG, mode, op0);
3820 }
3821 break;
3822
3823 case IOR:
3824 if (op1 == const0_rtx)
3825 return op0;
3826 if (GET_CODE (op1) == CONST_INT
3827 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3828 return op1;
3829 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3830 return op0;
3831 /* A | (~A) -> -1 */
3832 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3833 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3834 && ! side_effects_p (op0)
3835 && GET_MODE_CLASS (mode) != MODE_CC)
3836 return constm1_rtx;
3837 break;
3838
3839 case XOR:
3840 if (op1 == const0_rtx)
3841 return op0;
3842 if (GET_CODE (op1) == CONST_INT
3843 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3844 return gen_rtx (NOT, mode, op0);
3845 if (op0 == op1 && ! side_effects_p (op0)
3846 && GET_MODE_CLASS (mode) != MODE_CC)
3847 return const0_rtx;
3848 break;
3849
3850 case AND:
3851 if (op1 == const0_rtx && ! side_effects_p (op0))
3852 return const0_rtx;
3853 if (GET_CODE (op1) == CONST_INT
3854 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3855 return op0;
3856 if (op0 == op1 && ! side_effects_p (op0)
3857 && GET_MODE_CLASS (mode) != MODE_CC)
3858 return op0;
3859 /* A & (~A) -> 0 */
3860 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3861 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3862 && ! side_effects_p (op0)
3863 && GET_MODE_CLASS (mode) != MODE_CC)
3864 return const0_rtx;
3865 break;
3866
3867 case UDIV:
3868 /* Convert divide by power of two into shift (divide by 1 handled
3869 below). */
3870 if (GET_CODE (op1) == CONST_INT
3871 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
3872 return gen_rtx (LSHIFTRT, mode, op0, GEN_INT (arg1));
3873
3874 /* ... fall through ... */
3875
3876 case DIV:
3877 if (op1 == CONST1_RTX (mode))
3878 return op0;
3879
3880 /* In IEEE floating point, 0/x is not always 0. */
3881 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3882 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3883 && op0 == CONST0_RTX (mode)
3884 && ! side_effects_p (op1))
3885 return op0;
3886
3887 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3888 /* Change division by a constant into multiplication. Only do
3889 this with -ffast-math until an expert says it is safe in
3890 general. */
3891 else if (GET_CODE (op1) == CONST_DOUBLE
3892 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
3893 && op1 != CONST0_RTX (mode)
3894 && flag_fast_math)
3895 {
3896 REAL_VALUE_TYPE d;
3897 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3898
3899 if (! REAL_VALUES_EQUAL (d, dconst0))
3900 {
3901 #if defined (REAL_ARITHMETIC)
3902 REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
3903 return gen_rtx (MULT, mode, op0,
3904 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
3905 #else
3906 return gen_rtx (MULT, mode, op0,
3907 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
3908 #endif
3909 }
3910 }
3911 #endif
3912 break;
3913
3914 case UMOD:
3915 /* Handle modulus by power of two (mod with 1 handled below). */
3916 if (GET_CODE (op1) == CONST_INT
3917 && exact_log2 (INTVAL (op1)) > 0)
3918 return gen_rtx (AND, mode, op0, GEN_INT (INTVAL (op1) - 1));
3919
3920 /* ... fall through ... */
3921
3922 case MOD:
3923 if ((op0 == const0_rtx || op1 == const1_rtx)
3924 && ! side_effects_p (op0) && ! side_effects_p (op1))
3925 return const0_rtx;
3926 break;
3927
3928 case ROTATERT:
3929 case ROTATE:
3930 /* Rotating ~0 always results in ~0. */
3931 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
3932 && INTVAL (op0) == GET_MODE_MASK (mode)
3933 && ! side_effects_p (op1))
3934 return op0;
3935
3936 /* ... fall through ... */
3937
3938 case ASHIFT:
3939 case ASHIFTRT:
3940 case LSHIFTRT:
3941 if (op1 == const0_rtx)
3942 return op0;
3943 if (op0 == const0_rtx && ! side_effects_p (op1))
3944 return op0;
3945 break;
3946
3947 case SMIN:
3948 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3949 && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
3950 && ! side_effects_p (op0))
3951 return op1;
3952 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3953 return op0;
3954 break;
3955
3956 case SMAX:
3957 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3958 && (INTVAL (op1)
3959 == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
3960 && ! side_effects_p (op0))
3961 return op1;
3962 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3963 return op0;
3964 break;
3965
3966 case UMIN:
3967 if (op1 == const0_rtx && ! side_effects_p (op0))
3968 return op1;
3969 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3970 return op0;
3971 break;
3972
3973 case UMAX:
3974 if (op1 == constm1_rtx && ! side_effects_p (op0))
3975 return op1;
3976 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3977 return op0;
3978 break;
3979
3980 default:
3981 abort ();
3982 }
3983
3984 return 0;
3985 }
3986
3987 /* Get the integer argument values in two forms:
3988 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
3989
3990 arg0 = INTVAL (op0);
3991 arg1 = INTVAL (op1);
3992
3993 if (width < HOST_BITS_PER_WIDE_INT)
3994 {
3995 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
3996 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
3997
3998 arg0s = arg0;
3999 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4000 arg0s |= ((HOST_WIDE_INT) (-1) << width);
4001
4002 arg1s = arg1;
4003 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4004 arg1s |= ((HOST_WIDE_INT) (-1) << width);
4005 }
4006 else
4007 {
4008 arg0s = arg0;
4009 arg1s = arg1;
4010 }
4011
4012 /* Compute the value of the arithmetic. */
4013
4014 switch (code)
4015 {
4016 case PLUS:
4017 val = arg0s + arg1s;
4018 break;
4019
4020 case MINUS:
4021 val = arg0s - arg1s;
4022 break;
4023
4024 case MULT:
4025 val = arg0s * arg1s;
4026 break;
4027
4028 case DIV:
4029 if (arg1s == 0)
4030 return 0;
4031 val = arg0s / arg1s;
4032 break;
4033
4034 case MOD:
4035 if (arg1s == 0)
4036 return 0;
4037 val = arg0s % arg1s;
4038 break;
4039
4040 case UDIV:
4041 if (arg1 == 0)
4042 return 0;
4043 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
4044 break;
4045
4046 case UMOD:
4047 if (arg1 == 0)
4048 return 0;
4049 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
4050 break;
4051
4052 case AND:
4053 val = arg0 & arg1;
4054 break;
4055
4056 case IOR:
4057 val = arg0 | arg1;
4058 break;
4059
4060 case XOR:
4061 val = arg0 ^ arg1;
4062 break;
4063
4064 case LSHIFTRT:
4065 /* If shift count is undefined, don't fold it; let the machine do
4066 what it wants. But truncate it if the machine will do that. */
4067 if (arg1 < 0)
4068 return 0;
4069
4070 #ifdef SHIFT_COUNT_TRUNCATED
4071 if (SHIFT_COUNT_TRUNCATED)
4072 arg1 %= width;
4073 #endif
4074
4075 val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
4076 break;
4077
4078 case ASHIFT:
4079 if (arg1 < 0)
4080 return 0;
4081
4082 #ifdef SHIFT_COUNT_TRUNCATED
4083 if (SHIFT_COUNT_TRUNCATED)
4084 arg1 %= width;
4085 #endif
4086
4087 val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
4088 break;
4089
4090 case ASHIFTRT:
4091 if (arg1 < 0)
4092 return 0;
4093
4094 #ifdef SHIFT_COUNT_TRUNCATED
4095 if (SHIFT_COUNT_TRUNCATED)
4096 arg1 %= width;
4097 #endif
4098
4099 val = arg0s >> arg1;
4100
4101 /* Bootstrap compiler may not have sign extended the right shift.
4102 Manually extend the sign to insure bootstrap cc matches gcc. */
4103 if (arg0s < 0 && arg1 > 0)
4104 val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4105
4106 break;
4107
4108 case ROTATERT:
4109 if (arg1 < 0)
4110 return 0;
4111
4112 arg1 %= width;
4113 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4114 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
4115 break;
4116
4117 case ROTATE:
4118 if (arg1 < 0)
4119 return 0;
4120
4121 arg1 %= width;
4122 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4123 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
4124 break;
4125
4126 case COMPARE:
4127 /* Do nothing here. */
4128 return 0;
4129
4130 case SMIN:
4131 val = arg0s <= arg1s ? arg0s : arg1s;
4132 break;
4133
4134 case UMIN:
4135 val = ((unsigned HOST_WIDE_INT) arg0
4136 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4137 break;
4138
4139 case SMAX:
4140 val = arg0s > arg1s ? arg0s : arg1s;
4141 break;
4142
4143 case UMAX:
4144 val = ((unsigned HOST_WIDE_INT) arg0
4145 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4146 break;
4147
4148 default:
4149 abort ();
4150 }
4151
4152 /* Clear the bits that don't belong in our mode, unless they and our sign
4153 bit are all one. So we get either a reasonable negative value or a
4154 reasonable unsigned value for this mode. */
4155 if (width < HOST_BITS_PER_WIDE_INT
4156 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4157 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4158 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4159
4160 /* If this would be an entire word for the target, but is not for
4161 the host, then sign-extend on the host so that the number will look
4162 the same way on the host that it would on the target.
4163
4164 For example, when building a 64 bit alpha hosted 32 bit sparc
4165 targeted compiler, then we want the 32 bit unsigned value -1 to be
4166 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
4167 The later confuses the sparc backend. */
4168
4169 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
4170 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
4171 val |= ((HOST_WIDE_INT) (-1) << width);
4172
4173 return GEN_INT (val);
4174 }
4175 \f
4176 /* Simplify a PLUS or MINUS, at least one of whose operands may be another
4177 PLUS or MINUS.
4178
4179 Rather than test for specific case, we do this by a brute-force method
4180 and do all possible simplifications until no more changes occur. Then
4181 we rebuild the operation. */
4182
4183 static rtx
4184 simplify_plus_minus (code, mode, op0, op1)
4185 enum rtx_code code;
4186 enum machine_mode mode;
4187 rtx op0, op1;
4188 {
4189 rtx ops[8];
4190 int negs[8];
4191 rtx result, tem;
4192 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
4193 int first = 1, negate = 0, changed;
4194 int i, j;
4195
4196 bzero ((char *) ops, sizeof ops);
4197
4198 /* Set up the two operands and then expand them until nothing has been
4199 changed. If we run out of room in our array, give up; this should
4200 almost never happen. */
4201
4202 ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4203
4204 changed = 1;
4205 while (changed)
4206 {
4207 changed = 0;
4208
4209 for (i = 0; i < n_ops; i++)
4210 switch (GET_CODE (ops[i]))
4211 {
4212 case PLUS:
4213 case MINUS:
4214 if (n_ops == 7)
4215 return 0;
4216
4217 ops[n_ops] = XEXP (ops[i], 1);
4218 negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4219 ops[i] = XEXP (ops[i], 0);
4220 input_ops++;
4221 changed = 1;
4222 break;
4223
4224 case NEG:
4225 ops[i] = XEXP (ops[i], 0);
4226 negs[i] = ! negs[i];
4227 changed = 1;
4228 break;
4229
4230 case CONST:
4231 ops[i] = XEXP (ops[i], 0);
4232 input_consts++;
4233 changed = 1;
4234 break;
4235
4236 case NOT:
4237 /* ~a -> (-a - 1) */
4238 if (n_ops != 7)
4239 {
4240 ops[n_ops] = constm1_rtx;
4241 negs[n_ops++] = negs[i];
4242 ops[i] = XEXP (ops[i], 0);
4243 negs[i] = ! negs[i];
4244 changed = 1;
4245 }
4246 break;
4247
4248 case CONST_INT:
4249 if (negs[i])
4250 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4251 break;
4252 }
4253 }
4254
4255 /* If we only have two operands, we can't do anything. */
4256 if (n_ops <= 2)
4257 return 0;
4258
4259 /* Now simplify each pair of operands until nothing changes. The first
4260 time through just simplify constants against each other. */
4261
4262 changed = 1;
4263 while (changed)
4264 {
4265 changed = first;
4266
4267 for (i = 0; i < n_ops - 1; i++)
4268 for (j = i + 1; j < n_ops; j++)
4269 if (ops[i] != 0 && ops[j] != 0
4270 && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4271 {
4272 rtx lhs = ops[i], rhs = ops[j];
4273 enum rtx_code ncode = PLUS;
4274
4275 if (negs[i] && ! negs[j])
4276 lhs = ops[j], rhs = ops[i], ncode = MINUS;
4277 else if (! negs[i] && negs[j])
4278 ncode = MINUS;
4279
4280 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
4281 if (tem)
4282 {
4283 ops[i] = tem, ops[j] = 0;
4284 negs[i] = negs[i] && negs[j];
4285 if (GET_CODE (tem) == NEG)
4286 ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4287
4288 if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4289 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4290 changed = 1;
4291 }
4292 }
4293
4294 first = 0;
4295 }
4296
4297 /* Pack all the operands to the lower-numbered entries and give up if
4298 we didn't reduce the number of operands we had. Make sure we
4299 count a CONST as two operands. If we have the same number of
4300 operands, but have made more CONSTs than we had, this is also
4301 an improvement, so accept it. */
4302
4303 for (i = 0, j = 0; j < n_ops; j++)
4304 if (ops[j] != 0)
4305 {
4306 ops[i] = ops[j], negs[i++] = negs[j];
4307 if (GET_CODE (ops[j]) == CONST)
4308 n_consts++;
4309 }
4310
4311 if (i + n_consts > input_ops
4312 || (i + n_consts == input_ops && n_consts <= input_consts))
4313 return 0;
4314
4315 n_ops = i;
4316
4317 /* If we have a CONST_INT, put it last. */
4318 for (i = 0; i < n_ops - 1; i++)
4319 if (GET_CODE (ops[i]) == CONST_INT)
4320 {
4321 tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4322 j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4323 }
4324
4325 /* Put a non-negated operand first. If there aren't any, make all
4326 operands positive and negate the whole thing later. */
4327 for (i = 0; i < n_ops && negs[i]; i++)
4328 ;
4329
4330 if (i == n_ops)
4331 {
4332 for (i = 0; i < n_ops; i++)
4333 negs[i] = 0;
4334 negate = 1;
4335 }
4336 else if (i != 0)
4337 {
4338 tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4339 j = negs[0], negs[0] = negs[i], negs[i] = j;
4340 }
4341
4342 /* Now make the result by performing the requested operations. */
4343 result = ops[0];
4344 for (i = 1; i < n_ops; i++)
4345 result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4346
4347 return negate ? gen_rtx (NEG, mode, result) : result;
4348 }
4349 \f
4350 /* Make a binary operation by properly ordering the operands and
4351 seeing if the expression folds. */
4352
4353 static rtx
4354 cse_gen_binary (code, mode, op0, op1)
4355 enum rtx_code code;
4356 enum machine_mode mode;
4357 rtx op0, op1;
4358 {
4359 rtx tem;
4360
4361 /* Put complex operands first and constants second if commutative. */
4362 if (GET_RTX_CLASS (code) == 'c'
4363 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4364 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4365 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4366 || (GET_CODE (op0) == SUBREG
4367 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4368 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4369 tem = op0, op0 = op1, op1 = tem;
4370
4371 /* If this simplifies, do it. */
4372 tem = simplify_binary_operation (code, mode, op0, op1);
4373
4374 if (tem)
4375 return tem;
4376
4377 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4378 just form the operation. */
4379
4380 if (code == PLUS && GET_CODE (op1) == CONST_INT
4381 && GET_MODE (op0) != VOIDmode)
4382 return plus_constant (op0, INTVAL (op1));
4383 else if (code == MINUS && GET_CODE (op1) == CONST_INT
4384 && GET_MODE (op0) != VOIDmode)
4385 return plus_constant (op0, - INTVAL (op1));
4386 else
4387 return gen_rtx (code, mode, op0, op1);
4388 }
4389 \f
4390 /* Like simplify_binary_operation except used for relational operators.
4391 MODE is the mode of the operands, not that of the result. If MODE
4392 is VOIDmode, both operands must also be VOIDmode and we compare the
4393 operands in "infinite precision".
4394
4395 If no simplification is possible, this function returns zero. Otherwise,
4396 it returns either const_true_rtx or const0_rtx. */
4397
4398 rtx
4399 simplify_relational_operation (code, mode, op0, op1)
4400 enum rtx_code code;
4401 enum machine_mode mode;
4402 rtx op0, op1;
4403 {
4404 int equal, op0lt, op0ltu, op1lt, op1ltu;
4405 rtx tem;
4406
4407 /* If op0 is a compare, extract the comparison arguments from it. */
4408 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4409 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4410
4411 /* We can't simplify MODE_CC values since we don't know what the
4412 actual comparison is. */
4413 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4414 #ifdef HAVE_cc0
4415 || op0 == cc0_rtx
4416 #endif
4417 )
4418 return 0;
4419
4420 /* For integer comparisons of A and B maybe we can simplify A - B and can
4421 then simplify a comparison of that with zero. If A and B are both either
4422 a register or a CONST_INT, this can't help; testing for these cases will
4423 prevent infinite recursion here and speed things up.
4424
4425 If CODE is an unsigned comparison, then we can never do this optimization,
4426 because it gives an incorrect result if the subtraction wraps around zero.
4427 ANSI C defines unsigned operations such that they never overflow, and
4428 thus such cases can not be ignored. */
4429
4430 if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4431 && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4432 && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4433 && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
4434 && code != GTU && code != GEU && code != LTU && code != LEU)
4435 return simplify_relational_operation (signed_condition (code),
4436 mode, tem, const0_rtx);
4437
4438 /* For non-IEEE floating-point, if the two operands are equal, we know the
4439 result. */
4440 if (rtx_equal_p (op0, op1)
4441 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4442 || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4443 equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4444
4445 /* If the operands are floating-point constants, see if we can fold
4446 the result. */
4447 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4448 else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4449 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4450 {
4451 REAL_VALUE_TYPE d0, d1;
4452 jmp_buf handler;
4453
4454 if (setjmp (handler))
4455 return 0;
4456
4457 set_float_handler (handler);
4458 REAL_VALUE_FROM_CONST_DOUBLE (d0, op0);
4459 REAL_VALUE_FROM_CONST_DOUBLE (d1, op1);
4460 equal = REAL_VALUES_EQUAL (d0, d1);
4461 op0lt = op0ltu = REAL_VALUES_LESS (d0, d1);
4462 op1lt = op1ltu = REAL_VALUES_LESS (d1, d0);
4463 set_float_handler (NULL_PTR);
4464 }
4465 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4466
4467 /* Otherwise, see if the operands are both integers. */
4468 else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4469 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4470 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4471 {
4472 int width = GET_MODE_BITSIZE (mode);
4473 HOST_WIDE_INT l0s, h0s, l1s, h1s;
4474 unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
4475
4476 /* Get the two words comprising each integer constant. */
4477 if (GET_CODE (op0) == CONST_DOUBLE)
4478 {
4479 l0u = l0s = CONST_DOUBLE_LOW (op0);
4480 h0u = h0s = CONST_DOUBLE_HIGH (op0);
4481 }
4482 else
4483 {
4484 l0u = l0s = INTVAL (op0);
4485 h0u = h0s = l0s < 0 ? -1 : 0;
4486 }
4487
4488 if (GET_CODE (op1) == CONST_DOUBLE)
4489 {
4490 l1u = l1s = CONST_DOUBLE_LOW (op1);
4491 h1u = h1s = CONST_DOUBLE_HIGH (op1);
4492 }
4493 else
4494 {
4495 l1u = l1s = INTVAL (op1);
4496 h1u = h1s = l1s < 0 ? -1 : 0;
4497 }
4498
4499 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4500 we have to sign or zero-extend the values. */
4501 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4502 h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
4503
4504 if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4505 {
4506 l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4507 l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
4508
4509 if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4510 l0s |= ((HOST_WIDE_INT) (-1) << width);
4511
4512 if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4513 l1s |= ((HOST_WIDE_INT) (-1) << width);
4514 }
4515
4516 equal = (h0u == h1u && l0u == l1u);
4517 op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4518 op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4519 op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4520 op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4521 }
4522
4523 /* Otherwise, there are some code-specific tests we can make. */
4524 else
4525 {
4526 switch (code)
4527 {
4528 case EQ:
4529 /* References to the frame plus a constant or labels cannot
4530 be zero, but a SYMBOL_REF can due to #pragma weak. */
4531 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4532 || GET_CODE (op0) == LABEL_REF)
4533 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4534 /* On some machines, the ap reg can be 0 sometimes. */
4535 && op0 != arg_pointer_rtx
4536 #endif
4537 )
4538 return const0_rtx;
4539 break;
4540
4541 case NE:
4542 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4543 || GET_CODE (op0) == LABEL_REF)
4544 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4545 && op0 != arg_pointer_rtx
4546 #endif
4547 )
4548 return const_true_rtx;
4549 break;
4550
4551 case GEU:
4552 /* Unsigned values are never negative. */
4553 if (op1 == const0_rtx)
4554 return const_true_rtx;
4555 break;
4556
4557 case LTU:
4558 if (op1 == const0_rtx)
4559 return const0_rtx;
4560 break;
4561
4562 case LEU:
4563 /* Unsigned values are never greater than the largest
4564 unsigned value. */
4565 if (GET_CODE (op1) == CONST_INT
4566 && INTVAL (op1) == GET_MODE_MASK (mode)
4567 && INTEGRAL_MODE_P (mode))
4568 return const_true_rtx;
4569 break;
4570
4571 case GTU:
4572 if (GET_CODE (op1) == CONST_INT
4573 && INTVAL (op1) == GET_MODE_MASK (mode)
4574 && INTEGRAL_MODE_P (mode))
4575 return const0_rtx;
4576 break;
4577 }
4578
4579 return 0;
4580 }
4581
4582 /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4583 as appropriate. */
4584 switch (code)
4585 {
4586 case EQ:
4587 return equal ? const_true_rtx : const0_rtx;
4588 case NE:
4589 return ! equal ? const_true_rtx : const0_rtx;
4590 case LT:
4591 return op0lt ? const_true_rtx : const0_rtx;
4592 case GT:
4593 return op1lt ? const_true_rtx : const0_rtx;
4594 case LTU:
4595 return op0ltu ? const_true_rtx : const0_rtx;
4596 case GTU:
4597 return op1ltu ? const_true_rtx : const0_rtx;
4598 case LE:
4599 return equal || op0lt ? const_true_rtx : const0_rtx;
4600 case GE:
4601 return equal || op1lt ? const_true_rtx : const0_rtx;
4602 case LEU:
4603 return equal || op0ltu ? const_true_rtx : const0_rtx;
4604 case GEU:
4605 return equal || op1ltu ? const_true_rtx : const0_rtx;
4606 }
4607
4608 abort ();
4609 }
4610 \f
4611 /* Simplify CODE, an operation with result mode MODE and three operands,
4612 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4613 a constant. Return 0 if no simplifications is possible. */
4614
4615 rtx
4616 simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4617 enum rtx_code code;
4618 enum machine_mode mode, op0_mode;
4619 rtx op0, op1, op2;
4620 {
4621 int width = GET_MODE_BITSIZE (mode);
4622
4623 /* VOIDmode means "infinite" precision. */
4624 if (width == 0)
4625 width = HOST_BITS_PER_WIDE_INT;
4626
4627 switch (code)
4628 {
4629 case SIGN_EXTRACT:
4630 case ZERO_EXTRACT:
4631 if (GET_CODE (op0) == CONST_INT
4632 && GET_CODE (op1) == CONST_INT
4633 && GET_CODE (op2) == CONST_INT
4634 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4635 && width <= HOST_BITS_PER_WIDE_INT)
4636 {
4637 /* Extracting a bit-field from a constant */
4638 HOST_WIDE_INT val = INTVAL (op0);
4639
4640 if (BITS_BIG_ENDIAN)
4641 val >>= (GET_MODE_BITSIZE (op0_mode)
4642 - INTVAL (op2) - INTVAL (op1));
4643 else
4644 val >>= INTVAL (op2);
4645
4646 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4647 {
4648 /* First zero-extend. */
4649 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4650 /* If desired, propagate sign bit. */
4651 if (code == SIGN_EXTRACT
4652 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4653 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4654 }
4655
4656 /* Clear the bits that don't belong in our mode,
4657 unless they and our sign bit are all one.
4658 So we get either a reasonable negative value or a reasonable
4659 unsigned value for this mode. */
4660 if (width < HOST_BITS_PER_WIDE_INT
4661 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4662 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4663 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4664
4665 return GEN_INT (val);
4666 }
4667 break;
4668
4669 case IF_THEN_ELSE:
4670 if (GET_CODE (op0) == CONST_INT)
4671 return op0 != const0_rtx ? op1 : op2;
4672
4673 /* Convert a == b ? b : a to "a". */
4674 if (GET_CODE (op0) == NE && ! side_effects_p (op0)
4675 && rtx_equal_p (XEXP (op0, 0), op1)
4676 && rtx_equal_p (XEXP (op0, 1), op2))
4677 return op1;
4678 else if (GET_CODE (op0) == EQ && ! side_effects_p (op0)
4679 && rtx_equal_p (XEXP (op0, 1), op1)
4680 && rtx_equal_p (XEXP (op0, 0), op2))
4681 return op2;
4682 break;
4683
4684 default:
4685 abort ();
4686 }
4687
4688 return 0;
4689 }
4690 \f
4691 /* If X is a nontrivial arithmetic operation on an argument
4692 for which a constant value can be determined, return
4693 the result of operating on that value, as a constant.
4694 Otherwise, return X, possibly with one or more operands
4695 modified by recursive calls to this function.
4696
4697 If X is a register whose contents are known, we do NOT
4698 return those contents here. equiv_constant is called to
4699 perform that task.
4700
4701 INSN is the insn that we may be modifying. If it is 0, make a copy
4702 of X before modifying it. */
4703
4704 static rtx
4705 fold_rtx (x, insn)
4706 rtx x;
4707 rtx insn;
4708 {
4709 register enum rtx_code code;
4710 register enum machine_mode mode;
4711 register char *fmt;
4712 register int i;
4713 rtx new = 0;
4714 int copied = 0;
4715 int must_swap = 0;
4716
4717 /* Folded equivalents of first two operands of X. */
4718 rtx folded_arg0;
4719 rtx folded_arg1;
4720
4721 /* Constant equivalents of first three operands of X;
4722 0 when no such equivalent is known. */
4723 rtx const_arg0;
4724 rtx const_arg1;
4725 rtx const_arg2;
4726
4727 /* The mode of the first operand of X. We need this for sign and zero
4728 extends. */
4729 enum machine_mode mode_arg0;
4730
4731 if (x == 0)
4732 return x;
4733
4734 mode = GET_MODE (x);
4735 code = GET_CODE (x);
4736 switch (code)
4737 {
4738 case CONST:
4739 case CONST_INT:
4740 case CONST_DOUBLE:
4741 case SYMBOL_REF:
4742 case LABEL_REF:
4743 case REG:
4744 /* No use simplifying an EXPR_LIST
4745 since they are used only for lists of args
4746 in a function call's REG_EQUAL note. */
4747 case EXPR_LIST:
4748 return x;
4749
4750 #ifdef HAVE_cc0
4751 case CC0:
4752 return prev_insn_cc0;
4753 #endif
4754
4755 case PC:
4756 /* If the next insn is a CODE_LABEL followed by a jump table,
4757 PC's value is a LABEL_REF pointing to that label. That
4758 lets us fold switch statements on the Vax. */
4759 if (insn && GET_CODE (insn) == JUMP_INSN)
4760 {
4761 rtx next = next_nonnote_insn (insn);
4762
4763 if (next && GET_CODE (next) == CODE_LABEL
4764 && NEXT_INSN (next) != 0
4765 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
4766 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
4767 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
4768 return gen_rtx (LABEL_REF, Pmode, next);
4769 }
4770 break;
4771
4772 case SUBREG:
4773 /* See if we previously assigned a constant value to this SUBREG. */
4774 if ((new = lookup_as_function (x, CONST_INT)) != 0
4775 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
4776 return new;
4777
4778 /* If this is a paradoxical SUBREG, we have no idea what value the
4779 extra bits would have. However, if the operand is equivalent
4780 to a SUBREG whose operand is the same as our mode, and all the
4781 modes are within a word, we can just use the inner operand
4782 because these SUBREGs just say how to treat the register.
4783
4784 Similarly if we find an integer constant. */
4785
4786 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4787 {
4788 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
4789 struct table_elt *elt;
4790
4791 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
4792 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
4793 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
4794 imode)) != 0)
4795 for (elt = elt->first_same_value;
4796 elt; elt = elt->next_same_value)
4797 {
4798 if (CONSTANT_P (elt->exp)
4799 && GET_MODE (elt->exp) == VOIDmode)
4800 return elt->exp;
4801
4802 if (GET_CODE (elt->exp) == SUBREG
4803 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4804 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4805 return copy_rtx (SUBREG_REG (elt->exp));
4806 }
4807
4808 return x;
4809 }
4810
4811 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
4812 We might be able to if the SUBREG is extracting a single word in an
4813 integral mode or extracting the low part. */
4814
4815 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
4816 const_arg0 = equiv_constant (folded_arg0);
4817 if (const_arg0)
4818 folded_arg0 = const_arg0;
4819
4820 if (folded_arg0 != SUBREG_REG (x))
4821 {
4822 new = 0;
4823
4824 if (GET_MODE_CLASS (mode) == MODE_INT
4825 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4826 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
4827 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
4828 GET_MODE (SUBREG_REG (x)));
4829 if (new == 0 && subreg_lowpart_p (x))
4830 new = gen_lowpart_if_possible (mode, folded_arg0);
4831 if (new)
4832 return new;
4833 }
4834
4835 /* If this is a narrowing SUBREG and our operand is a REG, see if
4836 we can find an equivalence for REG that is an arithmetic operation
4837 in a wider mode where both operands are paradoxical SUBREGs
4838 from objects of our result mode. In that case, we couldn't report
4839 an equivalent value for that operation, since we don't know what the
4840 extra bits will be. But we can find an equivalence for this SUBREG
4841 by folding that operation is the narrow mode. This allows us to
4842 fold arithmetic in narrow modes when the machine only supports
4843 word-sized arithmetic.
4844
4845 Also look for a case where we have a SUBREG whose operand is the
4846 same as our result. If both modes are smaller than a word, we
4847 are simply interpreting a register in different modes and we
4848 can use the inner value. */
4849
4850 if (GET_CODE (folded_arg0) == REG
4851 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
4852 && subreg_lowpart_p (x))
4853 {
4854 struct table_elt *elt;
4855
4856 /* We can use HASH here since we know that canon_hash won't be
4857 called. */
4858 elt = lookup (folded_arg0,
4859 HASH (folded_arg0, GET_MODE (folded_arg0)),
4860 GET_MODE (folded_arg0));
4861
4862 if (elt)
4863 elt = elt->first_same_value;
4864
4865 for (; elt; elt = elt->next_same_value)
4866 {
4867 enum rtx_code eltcode = GET_CODE (elt->exp);
4868
4869 /* Just check for unary and binary operations. */
4870 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
4871 && GET_CODE (elt->exp) != SIGN_EXTEND
4872 && GET_CODE (elt->exp) != ZERO_EXTEND
4873 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4874 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
4875 {
4876 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
4877
4878 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4879 op0 = fold_rtx (op0, NULL_RTX);
4880
4881 op0 = equiv_constant (op0);
4882 if (op0)
4883 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
4884 op0, mode);
4885 }
4886 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
4887 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
4888 && eltcode != DIV && eltcode != MOD
4889 && eltcode != UDIV && eltcode != UMOD
4890 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
4891 && eltcode != ROTATE && eltcode != ROTATERT
4892 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4893 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
4894 == mode))
4895 || CONSTANT_P (XEXP (elt->exp, 0)))
4896 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
4897 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
4898 == mode))
4899 || CONSTANT_P (XEXP (elt->exp, 1))))
4900 {
4901 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
4902 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
4903
4904 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4905 op0 = fold_rtx (op0, NULL_RTX);
4906
4907 if (op0)
4908 op0 = equiv_constant (op0);
4909
4910 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
4911 op1 = fold_rtx (op1, NULL_RTX);
4912
4913 if (op1)
4914 op1 = equiv_constant (op1);
4915
4916 /* If we are looking for the low SImode part of
4917 (ashift:DI c (const_int 32)), it doesn't work
4918 to compute that in SImode, because a 32-bit shift
4919 in SImode is unpredictable. We know the value is 0. */
4920 if (op0 && op1
4921 && GET_CODE (elt->exp) == ASHIFT
4922 && GET_CODE (op1) == CONST_INT
4923 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
4924 {
4925 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
4926
4927 /* If the count fits in the inner mode's width,
4928 but exceeds the outer mode's width,
4929 the value will get truncated to 0
4930 by the subreg. */
4931 new = const0_rtx;
4932 else
4933 /* If the count exceeds even the inner mode's width,
4934 don't fold this expression. */
4935 new = 0;
4936 }
4937 else if (op0 && op1)
4938 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
4939 op0, op1);
4940 }
4941
4942 else if (GET_CODE (elt->exp) == SUBREG
4943 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4944 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
4945 <= UNITS_PER_WORD)
4946 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4947 new = copy_rtx (SUBREG_REG (elt->exp));
4948
4949 if (new)
4950 return new;
4951 }
4952 }
4953
4954 return x;
4955
4956 case NOT:
4957 case NEG:
4958 /* If we have (NOT Y), see if Y is known to be (NOT Z).
4959 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
4960 new = lookup_as_function (XEXP (x, 0), code);
4961 if (new)
4962 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
4963 break;
4964
4965 case MEM:
4966 /* If we are not actually processing an insn, don't try to find the
4967 best address. Not only don't we care, but we could modify the
4968 MEM in an invalid way since we have no insn to validate against. */
4969 if (insn != 0)
4970 find_best_addr (insn, &XEXP (x, 0));
4971
4972 {
4973 /* Even if we don't fold in the insn itself,
4974 we can safely do so here, in hopes of getting a constant. */
4975 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
4976 rtx base = 0;
4977 HOST_WIDE_INT offset = 0;
4978
4979 if (GET_CODE (addr) == REG
4980 && REGNO_QTY_VALID_P (REGNO (addr))
4981 && GET_MODE (addr) == qty_mode[reg_qty[REGNO (addr)]]
4982 && qty_const[reg_qty[REGNO (addr)]] != 0)
4983 addr = qty_const[reg_qty[REGNO (addr)]];
4984
4985 /* If address is constant, split it into a base and integer offset. */
4986 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
4987 base = addr;
4988 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
4989 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
4990 {
4991 base = XEXP (XEXP (addr, 0), 0);
4992 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
4993 }
4994 else if (GET_CODE (addr) == LO_SUM
4995 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
4996 base = XEXP (addr, 1);
4997
4998 /* If this is a constant pool reference, we can fold it into its
4999 constant to allow better value tracking. */
5000 if (base && GET_CODE (base) == SYMBOL_REF
5001 && CONSTANT_POOL_ADDRESS_P (base))
5002 {
5003 rtx constant = get_pool_constant (base);
5004 enum machine_mode const_mode = get_pool_mode (base);
5005 rtx new;
5006
5007 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
5008 constant_pool_entries_cost = COST (constant);
5009
5010 /* If we are loading the full constant, we have an equivalence. */
5011 if (offset == 0 && mode == const_mode)
5012 return constant;
5013
5014 /* If this actually isn't a constant (weird!), we can't do
5015 anything. Otherwise, handle the two most common cases:
5016 extracting a word from a multi-word constant, and extracting
5017 the low-order bits. Other cases don't seem common enough to
5018 worry about. */
5019 if (! CONSTANT_P (constant))
5020 return x;
5021
5022 if (GET_MODE_CLASS (mode) == MODE_INT
5023 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5024 && offset % UNITS_PER_WORD == 0
5025 && (new = operand_subword (constant,
5026 offset / UNITS_PER_WORD,
5027 0, const_mode)) != 0)
5028 return new;
5029
5030 if (((BYTES_BIG_ENDIAN
5031 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
5032 || (! BYTES_BIG_ENDIAN && offset == 0))
5033 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
5034 return new;
5035 }
5036
5037 /* If this is a reference to a label at a known position in a jump
5038 table, we also know its value. */
5039 if (base && GET_CODE (base) == LABEL_REF)
5040 {
5041 rtx label = XEXP (base, 0);
5042 rtx table_insn = NEXT_INSN (label);
5043
5044 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5045 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
5046 {
5047 rtx table = PATTERN (table_insn);
5048
5049 if (offset >= 0
5050 && (offset / GET_MODE_SIZE (GET_MODE (table))
5051 < XVECLEN (table, 0)))
5052 return XVECEXP (table, 0,
5053 offset / GET_MODE_SIZE (GET_MODE (table)));
5054 }
5055 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5056 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
5057 {
5058 rtx table = PATTERN (table_insn);
5059
5060 if (offset >= 0
5061 && (offset / GET_MODE_SIZE (GET_MODE (table))
5062 < XVECLEN (table, 1)))
5063 {
5064 offset /= GET_MODE_SIZE (GET_MODE (table));
5065 new = gen_rtx (MINUS, Pmode, XVECEXP (table, 1, offset),
5066 XEXP (table, 0));
5067
5068 if (GET_MODE (table) != Pmode)
5069 new = gen_rtx (TRUNCATE, GET_MODE (table), new);
5070
5071 /* Indicate this is a constant. This isn't a
5072 valid form of CONST, but it will only be used
5073 to fold the next insns and then discarded, so
5074 it should be safe. */
5075 return gen_rtx (CONST, GET_MODE (new), new);
5076 }
5077 }
5078 }
5079
5080 return x;
5081 }
5082
5083 case ASM_OPERANDS:
5084 for (i = XVECLEN (x, 3) - 1; i >= 0; i--)
5085 validate_change (insn, &XVECEXP (x, 3, i),
5086 fold_rtx (XVECEXP (x, 3, i), insn), 0);
5087 break;
5088 }
5089
5090 const_arg0 = 0;
5091 const_arg1 = 0;
5092 const_arg2 = 0;
5093 mode_arg0 = VOIDmode;
5094
5095 /* Try folding our operands.
5096 Then see which ones have constant values known. */
5097
5098 fmt = GET_RTX_FORMAT (code);
5099 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5100 if (fmt[i] == 'e')
5101 {
5102 rtx arg = XEXP (x, i);
5103 rtx folded_arg = arg, const_arg = 0;
5104 enum machine_mode mode_arg = GET_MODE (arg);
5105 rtx cheap_arg, expensive_arg;
5106 rtx replacements[2];
5107 int j;
5108
5109 /* Most arguments are cheap, so handle them specially. */
5110 switch (GET_CODE (arg))
5111 {
5112 case REG:
5113 /* This is the same as calling equiv_constant; it is duplicated
5114 here for speed. */
5115 if (REGNO_QTY_VALID_P (REGNO (arg))
5116 && qty_const[reg_qty[REGNO (arg)]] != 0
5117 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != REG
5118 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != PLUS)
5119 const_arg
5120 = gen_lowpart_if_possible (GET_MODE (arg),
5121 qty_const[reg_qty[REGNO (arg)]]);
5122 break;
5123
5124 case CONST:
5125 case CONST_INT:
5126 case SYMBOL_REF:
5127 case LABEL_REF:
5128 case CONST_DOUBLE:
5129 const_arg = arg;
5130 break;
5131
5132 #ifdef HAVE_cc0
5133 case CC0:
5134 folded_arg = prev_insn_cc0;
5135 mode_arg = prev_insn_cc0_mode;
5136 const_arg = equiv_constant (folded_arg);
5137 break;
5138 #endif
5139
5140 default:
5141 folded_arg = fold_rtx (arg, insn);
5142 const_arg = equiv_constant (folded_arg);
5143 }
5144
5145 /* For the first three operands, see if the operand
5146 is constant or equivalent to a constant. */
5147 switch (i)
5148 {
5149 case 0:
5150 folded_arg0 = folded_arg;
5151 const_arg0 = const_arg;
5152 mode_arg0 = mode_arg;
5153 break;
5154 case 1:
5155 folded_arg1 = folded_arg;
5156 const_arg1 = const_arg;
5157 break;
5158 case 2:
5159 const_arg2 = const_arg;
5160 break;
5161 }
5162
5163 /* Pick the least expensive of the folded argument and an
5164 equivalent constant argument. */
5165 if (const_arg == 0 || const_arg == folded_arg
5166 || COST (const_arg) > COST (folded_arg))
5167 cheap_arg = folded_arg, expensive_arg = const_arg;
5168 else
5169 cheap_arg = const_arg, expensive_arg = folded_arg;
5170
5171 /* Try to replace the operand with the cheapest of the two
5172 possibilities. If it doesn't work and this is either of the first
5173 two operands of a commutative operation, try swapping them.
5174 If THAT fails, try the more expensive, provided it is cheaper
5175 than what is already there. */
5176
5177 if (cheap_arg == XEXP (x, i))
5178 continue;
5179
5180 if (insn == 0 && ! copied)
5181 {
5182 x = copy_rtx (x);
5183 copied = 1;
5184 }
5185
5186 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5187 for (j = 0;
5188 j < 2 && replacements[j]
5189 && COST (replacements[j]) < COST (XEXP (x, i));
5190 j++)
5191 {
5192 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5193 break;
5194
5195 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5196 {
5197 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5198 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5199
5200 if (apply_change_group ())
5201 {
5202 /* Swap them back to be invalid so that this loop can
5203 continue and flag them to be swapped back later. */
5204 rtx tem;
5205
5206 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5207 XEXP (x, 1) = tem;
5208 must_swap = 1;
5209 break;
5210 }
5211 }
5212 }
5213 }
5214
5215 else if (fmt[i] == 'E')
5216 /* Don't try to fold inside of a vector of expressions.
5217 Doing nothing is harmless. */
5218 ;
5219
5220 /* If a commutative operation, place a constant integer as the second
5221 operand unless the first operand is also a constant integer. Otherwise,
5222 place any constant second unless the first operand is also a constant. */
5223
5224 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5225 {
5226 if (must_swap || (const_arg0
5227 && (const_arg1 == 0
5228 || (GET_CODE (const_arg0) == CONST_INT
5229 && GET_CODE (const_arg1) != CONST_INT))))
5230 {
5231 register rtx tem = XEXP (x, 0);
5232
5233 if (insn == 0 && ! copied)
5234 {
5235 x = copy_rtx (x);
5236 copied = 1;
5237 }
5238
5239 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5240 validate_change (insn, &XEXP (x, 1), tem, 1);
5241 if (apply_change_group ())
5242 {
5243 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5244 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5245 }
5246 }
5247 }
5248
5249 /* If X is an arithmetic operation, see if we can simplify it. */
5250
5251 switch (GET_RTX_CLASS (code))
5252 {
5253 case '1':
5254 {
5255 int is_const = 0;
5256
5257 /* We can't simplify extension ops unless we know the
5258 original mode. */
5259 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5260 && mode_arg0 == VOIDmode)
5261 break;
5262
5263 /* If we had a CONST, strip it off and put it back later if we
5264 fold. */
5265 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
5266 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
5267
5268 new = simplify_unary_operation (code, mode,
5269 const_arg0 ? const_arg0 : folded_arg0,
5270 mode_arg0);
5271 if (new != 0 && is_const)
5272 new = gen_rtx (CONST, mode, new);
5273 }
5274 break;
5275
5276 case '<':
5277 /* See what items are actually being compared and set FOLDED_ARG[01]
5278 to those values and CODE to the actual comparison code. If any are
5279 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5280 do anything if both operands are already known to be constant. */
5281
5282 if (const_arg0 == 0 || const_arg1 == 0)
5283 {
5284 struct table_elt *p0, *p1;
5285 rtx true = const_true_rtx, false = const0_rtx;
5286 enum machine_mode mode_arg1;
5287
5288 #ifdef FLOAT_STORE_FLAG_VALUE
5289 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5290 {
5291 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5292 mode);
5293 false = CONST0_RTX (mode);
5294 }
5295 #endif
5296
5297 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5298 &mode_arg0, &mode_arg1);
5299 const_arg0 = equiv_constant (folded_arg0);
5300 const_arg1 = equiv_constant (folded_arg1);
5301
5302 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5303 what kinds of things are being compared, so we can't do
5304 anything with this comparison. */
5305
5306 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5307 break;
5308
5309 /* If we do not now have two constants being compared, see
5310 if we can nevertheless deduce some things about the
5311 comparison. */
5312 if (const_arg0 == 0 || const_arg1 == 0)
5313 {
5314 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
5315 non-explicit constant? These aren't zero, but we
5316 don't know their sign. */
5317 if (const_arg1 == const0_rtx
5318 && (NONZERO_BASE_PLUS_P (folded_arg0)
5319 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5320 come out as 0. */
5321 || GET_CODE (folded_arg0) == SYMBOL_REF
5322 #endif
5323 || GET_CODE (folded_arg0) == LABEL_REF
5324 || GET_CODE (folded_arg0) == CONST))
5325 {
5326 if (code == EQ)
5327 return false;
5328 else if (code == NE)
5329 return true;
5330 }
5331
5332 /* See if the two operands are the same. We don't do this
5333 for IEEE floating-point since we can't assume x == x
5334 since x might be a NaN. */
5335
5336 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5337 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
5338 && (folded_arg0 == folded_arg1
5339 || (GET_CODE (folded_arg0) == REG
5340 && GET_CODE (folded_arg1) == REG
5341 && (reg_qty[REGNO (folded_arg0)]
5342 == reg_qty[REGNO (folded_arg1)]))
5343 || ((p0 = lookup (folded_arg0,
5344 (safe_hash (folded_arg0, mode_arg0)
5345 % NBUCKETS), mode_arg0))
5346 && (p1 = lookup (folded_arg1,
5347 (safe_hash (folded_arg1, mode_arg0)
5348 % NBUCKETS), mode_arg0))
5349 && p0->first_same_value == p1->first_same_value)))
5350 return ((code == EQ || code == LE || code == GE
5351 || code == LEU || code == GEU)
5352 ? true : false);
5353
5354 /* If FOLDED_ARG0 is a register, see if the comparison we are
5355 doing now is either the same as we did before or the reverse
5356 (we only check the reverse if not floating-point). */
5357 else if (GET_CODE (folded_arg0) == REG)
5358 {
5359 int qty = reg_qty[REGNO (folded_arg0)];
5360
5361 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5362 && (comparison_dominates_p (qty_comparison_code[qty], code)
5363 || (comparison_dominates_p (qty_comparison_code[qty],
5364 reverse_condition (code))
5365 && ! FLOAT_MODE_P (mode_arg0)))
5366 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5367 || (const_arg1
5368 && rtx_equal_p (qty_comparison_const[qty],
5369 const_arg1))
5370 || (GET_CODE (folded_arg1) == REG
5371 && (reg_qty[REGNO (folded_arg1)]
5372 == qty_comparison_qty[qty]))))
5373 return (comparison_dominates_p (qty_comparison_code[qty],
5374 code)
5375 ? true : false);
5376 }
5377 }
5378 }
5379
5380 /* If we are comparing against zero, see if the first operand is
5381 equivalent to an IOR with a constant. If so, we may be able to
5382 determine the result of this comparison. */
5383
5384 if (const_arg1 == const0_rtx)
5385 {
5386 rtx y = lookup_as_function (folded_arg0, IOR);
5387 rtx inner_const;
5388
5389 if (y != 0
5390 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5391 && GET_CODE (inner_const) == CONST_INT
5392 && INTVAL (inner_const) != 0)
5393 {
5394 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
5395 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5396 && (INTVAL (inner_const)
5397 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
5398 rtx true = const_true_rtx, false = const0_rtx;
5399
5400 #ifdef FLOAT_STORE_FLAG_VALUE
5401 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5402 {
5403 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5404 mode);
5405 false = CONST0_RTX (mode);
5406 }
5407 #endif
5408
5409 switch (code)
5410 {
5411 case EQ:
5412 return false;
5413 case NE:
5414 return true;
5415 case LT: case LE:
5416 if (has_sign)
5417 return true;
5418 break;
5419 case GT: case GE:
5420 if (has_sign)
5421 return false;
5422 break;
5423 }
5424 }
5425 }
5426
5427 new = simplify_relational_operation (code, mode_arg0,
5428 const_arg0 ? const_arg0 : folded_arg0,
5429 const_arg1 ? const_arg1 : folded_arg1);
5430 #ifdef FLOAT_STORE_FLAG_VALUE
5431 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5432 new = ((new == const0_rtx) ? CONST0_RTX (mode)
5433 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
5434 #endif
5435 break;
5436
5437 case '2':
5438 case 'c':
5439 switch (code)
5440 {
5441 case PLUS:
5442 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5443 with that LABEL_REF as its second operand. If so, the result is
5444 the first operand of that MINUS. This handles switches with an
5445 ADDR_DIFF_VEC table. */
5446 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5447 {
5448 rtx y
5449 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
5450 : lookup_as_function (folded_arg0, MINUS);
5451
5452 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5453 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5454 return XEXP (y, 0);
5455
5456 /* Now try for a CONST of a MINUS like the above. */
5457 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
5458 : lookup_as_function (folded_arg0, CONST))) != 0
5459 && GET_CODE (XEXP (y, 0)) == MINUS
5460 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5461 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
5462 return XEXP (XEXP (y, 0), 0);
5463 }
5464
5465 /* Likewise if the operands are in the other order. */
5466 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
5467 {
5468 rtx y
5469 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
5470 : lookup_as_function (folded_arg1, MINUS);
5471
5472 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5473 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
5474 return XEXP (y, 0);
5475
5476 /* Now try for a CONST of a MINUS like the above. */
5477 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
5478 : lookup_as_function (folded_arg1, CONST))) != 0
5479 && GET_CODE (XEXP (y, 0)) == MINUS
5480 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5481 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
5482 return XEXP (XEXP (y, 0), 0);
5483 }
5484
5485 /* If second operand is a register equivalent to a negative
5486 CONST_INT, see if we can find a register equivalent to the
5487 positive constant. Make a MINUS if so. Don't do this for
5488 a non-negative constant since we might then alternate between
5489 chosing positive and negative constants. Having the positive
5490 constant previously-used is the more common case. Be sure
5491 the resulting constant is non-negative; if const_arg1 were
5492 the smallest negative number this would overflow: depending
5493 on the mode, this would either just be the same value (and
5494 hence not save anything) or be incorrect. */
5495 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
5496 && INTVAL (const_arg1) < 0
5497 && - INTVAL (const_arg1) >= 0
5498 && GET_CODE (folded_arg1) == REG)
5499 {
5500 rtx new_const = GEN_INT (- INTVAL (const_arg1));
5501 struct table_elt *p
5502 = lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5503 mode);
5504
5505 if (p)
5506 for (p = p->first_same_value; p; p = p->next_same_value)
5507 if (GET_CODE (p->exp) == REG)
5508 return cse_gen_binary (MINUS, mode, folded_arg0,
5509 canon_reg (p->exp, NULL_RTX));
5510 }
5511 goto from_plus;
5512
5513 case MINUS:
5514 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5515 If so, produce (PLUS Z C2-C). */
5516 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5517 {
5518 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5519 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
5520 return fold_rtx (plus_constant (copy_rtx (y),
5521 -INTVAL (const_arg1)),
5522 NULL_RTX);
5523 }
5524
5525 /* ... fall through ... */
5526
5527 from_plus:
5528 case SMIN: case SMAX: case UMIN: case UMAX:
5529 case IOR: case AND: case XOR:
5530 case MULT: case DIV: case UDIV:
5531 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
5532 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5533 is known to be of similar form, we may be able to replace the
5534 operation with a combined operation. This may eliminate the
5535 intermediate operation if every use is simplified in this way.
5536 Note that the similar optimization done by combine.c only works
5537 if the intermediate operation's result has only one reference. */
5538
5539 if (GET_CODE (folded_arg0) == REG
5540 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5541 {
5542 int is_shift
5543 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5544 rtx y = lookup_as_function (folded_arg0, code);
5545 rtx inner_const;
5546 enum rtx_code associate_code;
5547 rtx new_const;
5548
5549 if (y == 0
5550 || 0 == (inner_const
5551 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5552 || GET_CODE (inner_const) != CONST_INT
5553 /* If we have compiled a statement like
5554 "if (x == (x & mask1))", and now are looking at
5555 "x & mask2", we will have a case where the first operand
5556 of Y is the same as our first operand. Unless we detect
5557 this case, an infinite loop will result. */
5558 || XEXP (y, 0) == folded_arg0)
5559 break;
5560
5561 /* Don't associate these operations if they are a PLUS with the
5562 same constant and it is a power of two. These might be doable
5563 with a pre- or post-increment. Similarly for two subtracts of
5564 identical powers of two with post decrement. */
5565
5566 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
5567 && (0
5568 #if defined(HAVE_PRE_INCREMENT) || defined(HAVE_POST_INCREMENT)
5569 || exact_log2 (INTVAL (const_arg1)) >= 0
5570 #endif
5571 #if defined(HAVE_PRE_DECREMENT) || defined(HAVE_POST_DECREMENT)
5572 || exact_log2 (- INTVAL (const_arg1)) >= 0
5573 #endif
5574 ))
5575 break;
5576
5577 /* Compute the code used to compose the constants. For example,
5578 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5579
5580 associate_code
5581 = (code == MULT || code == DIV || code == UDIV ? MULT
5582 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5583
5584 new_const = simplify_binary_operation (associate_code, mode,
5585 const_arg1, inner_const);
5586
5587 if (new_const == 0)
5588 break;
5589
5590 /* If we are associating shift operations, don't let this
5591 produce a shift of the size of the object or larger.
5592 This could occur when we follow a sign-extend by a right
5593 shift on a machine that does a sign-extend as a pair
5594 of shifts. */
5595
5596 if (is_shift && GET_CODE (new_const) == CONST_INT
5597 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5598 {
5599 /* As an exception, we can turn an ASHIFTRT of this
5600 form into a shift of the number of bits - 1. */
5601 if (code == ASHIFTRT)
5602 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5603 else
5604 break;
5605 }
5606
5607 y = copy_rtx (XEXP (y, 0));
5608
5609 /* If Y contains our first operand (the most common way this
5610 can happen is if Y is a MEM), we would do into an infinite
5611 loop if we tried to fold it. So don't in that case. */
5612
5613 if (! reg_mentioned_p (folded_arg0, y))
5614 y = fold_rtx (y, insn);
5615
5616 return cse_gen_binary (code, mode, y, new_const);
5617 }
5618 }
5619
5620 new = simplify_binary_operation (code, mode,
5621 const_arg0 ? const_arg0 : folded_arg0,
5622 const_arg1 ? const_arg1 : folded_arg1);
5623 break;
5624
5625 case 'o':
5626 /* (lo_sum (high X) X) is simply X. */
5627 if (code == LO_SUM && const_arg0 != 0
5628 && GET_CODE (const_arg0) == HIGH
5629 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
5630 return const_arg1;
5631 break;
5632
5633 case '3':
5634 case 'b':
5635 new = simplify_ternary_operation (code, mode, mode_arg0,
5636 const_arg0 ? const_arg0 : folded_arg0,
5637 const_arg1 ? const_arg1 : folded_arg1,
5638 const_arg2 ? const_arg2 : XEXP (x, 2));
5639 break;
5640 }
5641
5642 return new ? new : x;
5643 }
5644 \f
5645 /* Return a constant value currently equivalent to X.
5646 Return 0 if we don't know one. */
5647
5648 static rtx
5649 equiv_constant (x)
5650 rtx x;
5651 {
5652 if (GET_CODE (x) == REG
5653 && REGNO_QTY_VALID_P (REGNO (x))
5654 && qty_const[reg_qty[REGNO (x)]])
5655 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[reg_qty[REGNO (x)]]);
5656
5657 if (x != 0 && CONSTANT_P (x))
5658 return x;
5659
5660 /* If X is a MEM, try to fold it outside the context of any insn to see if
5661 it might be equivalent to a constant. That handles the case where it
5662 is a constant-pool reference. Then try to look it up in the hash table
5663 in case it is something whose value we have seen before. */
5664
5665 if (GET_CODE (x) == MEM)
5666 {
5667 struct table_elt *elt;
5668
5669 x = fold_rtx (x, NULL_RTX);
5670 if (CONSTANT_P (x))
5671 return x;
5672
5673 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
5674 if (elt == 0)
5675 return 0;
5676
5677 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
5678 if (elt->is_const && CONSTANT_P (elt->exp))
5679 return elt->exp;
5680 }
5681
5682 return 0;
5683 }
5684 \f
5685 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
5686 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
5687 least-significant part of X.
5688 MODE specifies how big a part of X to return.
5689
5690 If the requested operation cannot be done, 0 is returned.
5691
5692 This is similar to gen_lowpart in emit-rtl.c. */
5693
5694 rtx
5695 gen_lowpart_if_possible (mode, x)
5696 enum machine_mode mode;
5697 register rtx x;
5698 {
5699 rtx result = gen_lowpart_common (mode, x);
5700
5701 if (result)
5702 return result;
5703 else if (GET_CODE (x) == MEM)
5704 {
5705 /* This is the only other case we handle. */
5706 register int offset = 0;
5707 rtx new;
5708
5709 if (WORDS_BIG_ENDIAN)
5710 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
5711 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
5712 if (BYTES_BIG_ENDIAN)
5713 /* Adjust the address so that the address-after-the-data is
5714 unchanged. */
5715 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
5716 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
5717 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
5718 if (! memory_address_p (mode, XEXP (new, 0)))
5719 return 0;
5720 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
5721 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
5722 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
5723 return new;
5724 }
5725 else
5726 return 0;
5727 }
5728 \f
5729 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
5730 branch. It will be zero if not.
5731
5732 In certain cases, this can cause us to add an equivalence. For example,
5733 if we are following the taken case of
5734 if (i == 2)
5735 we can add the fact that `i' and '2' are now equivalent.
5736
5737 In any case, we can record that this comparison was passed. If the same
5738 comparison is seen later, we will know its value. */
5739
5740 static void
5741 record_jump_equiv (insn, taken)
5742 rtx insn;
5743 int taken;
5744 {
5745 int cond_known_true;
5746 rtx op0, op1;
5747 enum machine_mode mode, mode0, mode1;
5748 int reversed_nonequality = 0;
5749 enum rtx_code code;
5750
5751 /* Ensure this is the right kind of insn. */
5752 if (! condjump_p (insn) || simplejump_p (insn))
5753 return;
5754
5755 /* See if this jump condition is known true or false. */
5756 if (taken)
5757 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
5758 else
5759 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
5760
5761 /* Get the type of comparison being done and the operands being compared.
5762 If we had to reverse a non-equality condition, record that fact so we
5763 know that it isn't valid for floating-point. */
5764 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
5765 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
5766 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
5767
5768 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
5769 if (! cond_known_true)
5770 {
5771 reversed_nonequality = (code != EQ && code != NE);
5772 code = reverse_condition (code);
5773 }
5774
5775 /* The mode is the mode of the non-constant. */
5776 mode = mode0;
5777 if (mode1 != VOIDmode)
5778 mode = mode1;
5779
5780 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
5781 }
5782
5783 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
5784 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
5785 Make any useful entries we can with that information. Called from
5786 above function and called recursively. */
5787
5788 static void
5789 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
5790 enum rtx_code code;
5791 enum machine_mode mode;
5792 rtx op0, op1;
5793 int reversed_nonequality;
5794 {
5795 unsigned op0_hash, op1_hash;
5796 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
5797 struct table_elt *op0_elt, *op1_elt;
5798
5799 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
5800 we know that they are also equal in the smaller mode (this is also
5801 true for all smaller modes whether or not there is a SUBREG, but
5802 is not worth testing for with no SUBREG. */
5803
5804 /* Note that GET_MODE (op0) may not equal MODE. */
5805 if (code == EQ && GET_CODE (op0) == SUBREG
5806 && (GET_MODE_SIZE (GET_MODE (op0))
5807 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
5808 {
5809 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5810 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5811
5812 record_jump_cond (code, mode, SUBREG_REG (op0),
5813 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5814 reversed_nonequality);
5815 }
5816
5817 if (code == EQ && GET_CODE (op1) == SUBREG
5818 && (GET_MODE_SIZE (GET_MODE (op1))
5819 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
5820 {
5821 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5822 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5823
5824 record_jump_cond (code, mode, SUBREG_REG (op1),
5825 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5826 reversed_nonequality);
5827 }
5828
5829 /* Similarly, if this is an NE comparison, and either is a SUBREG
5830 making a smaller mode, we know the whole thing is also NE. */
5831
5832 /* Note that GET_MODE (op0) may not equal MODE;
5833 if we test MODE instead, we can get an infinite recursion
5834 alternating between two modes each wider than MODE. */
5835
5836 if (code == NE && GET_CODE (op0) == SUBREG
5837 && subreg_lowpart_p (op0)
5838 && (GET_MODE_SIZE (GET_MODE (op0))
5839 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
5840 {
5841 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5842 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5843
5844 record_jump_cond (code, mode, SUBREG_REG (op0),
5845 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5846 reversed_nonequality);
5847 }
5848
5849 if (code == NE && GET_CODE (op1) == SUBREG
5850 && subreg_lowpart_p (op1)
5851 && (GET_MODE_SIZE (GET_MODE (op1))
5852 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
5853 {
5854 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5855 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5856
5857 record_jump_cond (code, mode, SUBREG_REG (op1),
5858 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5859 reversed_nonequality);
5860 }
5861
5862 /* Hash both operands. */
5863
5864 do_not_record = 0;
5865 hash_arg_in_memory = 0;
5866 hash_arg_in_struct = 0;
5867 op0_hash = HASH (op0, mode);
5868 op0_in_memory = hash_arg_in_memory;
5869 op0_in_struct = hash_arg_in_struct;
5870
5871 if (do_not_record)
5872 return;
5873
5874 do_not_record = 0;
5875 hash_arg_in_memory = 0;
5876 hash_arg_in_struct = 0;
5877 op1_hash = HASH (op1, mode);
5878 op1_in_memory = hash_arg_in_memory;
5879 op1_in_struct = hash_arg_in_struct;
5880
5881 if (do_not_record)
5882 return;
5883
5884 /* Look up both operands. */
5885 op0_elt = lookup (op0, op0_hash, mode);
5886 op1_elt = lookup (op1, op1_hash, mode);
5887
5888 /* If both operands are already equivalent or if they are not in the
5889 table but are identical, do nothing. */
5890 if ((op0_elt != 0 && op1_elt != 0
5891 && op0_elt->first_same_value == op1_elt->first_same_value)
5892 || op0 == op1 || rtx_equal_p (op0, op1))
5893 return;
5894
5895 /* If we aren't setting two things equal all we can do is save this
5896 comparison. Similarly if this is floating-point. In the latter
5897 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
5898 If we record the equality, we might inadvertently delete code
5899 whose intent was to change -0 to +0. */
5900
5901 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
5902 {
5903 /* If we reversed a floating-point comparison, if OP0 is not a
5904 register, or if OP1 is neither a register or constant, we can't
5905 do anything. */
5906
5907 if (GET_CODE (op1) != REG)
5908 op1 = equiv_constant (op1);
5909
5910 if ((reversed_nonequality && FLOAT_MODE_P (mode))
5911 || GET_CODE (op0) != REG || op1 == 0)
5912 return;
5913
5914 /* Put OP0 in the hash table if it isn't already. This gives it a
5915 new quantity number. */
5916 if (op0_elt == 0)
5917 {
5918 if (insert_regs (op0, NULL_PTR, 0))
5919 {
5920 rehash_using_reg (op0);
5921 op0_hash = HASH (op0, mode);
5922
5923 /* If OP0 is contained in OP1, this changes its hash code
5924 as well. Faster to rehash than to check, except
5925 for the simple case of a constant. */
5926 if (! CONSTANT_P (op1))
5927 op1_hash = HASH (op1,mode);
5928 }
5929
5930 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
5931 op0_elt->in_memory = op0_in_memory;
5932 op0_elt->in_struct = op0_in_struct;
5933 }
5934
5935 qty_comparison_code[reg_qty[REGNO (op0)]] = code;
5936 if (GET_CODE (op1) == REG)
5937 {
5938 /* Look it up again--in case op0 and op1 are the same. */
5939 op1_elt = lookup (op1, op1_hash, mode);
5940
5941 /* Put OP1 in the hash table so it gets a new quantity number. */
5942 if (op1_elt == 0)
5943 {
5944 if (insert_regs (op1, NULL_PTR, 0))
5945 {
5946 rehash_using_reg (op1);
5947 op1_hash = HASH (op1, mode);
5948 }
5949
5950 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
5951 op1_elt->in_memory = op1_in_memory;
5952 op1_elt->in_struct = op1_in_struct;
5953 }
5954
5955 qty_comparison_qty[reg_qty[REGNO (op0)]] = reg_qty[REGNO (op1)];
5956 qty_comparison_const[reg_qty[REGNO (op0)]] = 0;
5957 }
5958 else
5959 {
5960 qty_comparison_qty[reg_qty[REGNO (op0)]] = -1;
5961 qty_comparison_const[reg_qty[REGNO (op0)]] = op1;
5962 }
5963
5964 return;
5965 }
5966
5967 /* If either side is still missing an equivalence, make it now,
5968 then merge the equivalences. */
5969
5970 if (op0_elt == 0)
5971 {
5972 if (insert_regs (op0, NULL_PTR, 0))
5973 {
5974 rehash_using_reg (op0);
5975 op0_hash = HASH (op0, mode);
5976 }
5977
5978 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
5979 op0_elt->in_memory = op0_in_memory;
5980 op0_elt->in_struct = op0_in_struct;
5981 }
5982
5983 if (op1_elt == 0)
5984 {
5985 if (insert_regs (op1, NULL_PTR, 0))
5986 {
5987 rehash_using_reg (op1);
5988 op1_hash = HASH (op1, mode);
5989 }
5990
5991 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
5992 op1_elt->in_memory = op1_in_memory;
5993 op1_elt->in_struct = op1_in_struct;
5994 }
5995
5996 merge_equiv_classes (op0_elt, op1_elt);
5997 last_jump_equiv_class = op0_elt;
5998 }
5999 \f
6000 /* CSE processing for one instruction.
6001 First simplify sources and addresses of all assignments
6002 in the instruction, using previously-computed equivalents values.
6003 Then install the new sources and destinations in the table
6004 of available values.
6005
6006 If IN_LIBCALL_BLOCK is nonzero, don't record any equivalence made in
6007 the insn. */
6008
6009 /* Data on one SET contained in the instruction. */
6010
6011 struct set
6012 {
6013 /* The SET rtx itself. */
6014 rtx rtl;
6015 /* The SET_SRC of the rtx (the original value, if it is changing). */
6016 rtx src;
6017 /* The hash-table element for the SET_SRC of the SET. */
6018 struct table_elt *src_elt;
6019 /* Hash value for the SET_SRC. */
6020 unsigned src_hash;
6021 /* Hash value for the SET_DEST. */
6022 unsigned dest_hash;
6023 /* The SET_DEST, with SUBREG, etc., stripped. */
6024 rtx inner_dest;
6025 /* Place where the pointer to the INNER_DEST was found. */
6026 rtx *inner_dest_loc;
6027 /* Nonzero if the SET_SRC is in memory. */
6028 char src_in_memory;
6029 /* Nonzero if the SET_SRC is in a structure. */
6030 char src_in_struct;
6031 /* Nonzero if the SET_SRC contains something
6032 whose value cannot be predicted and understood. */
6033 char src_volatile;
6034 /* Original machine mode, in case it becomes a CONST_INT. */
6035 enum machine_mode mode;
6036 /* A constant equivalent for SET_SRC, if any. */
6037 rtx src_const;
6038 /* Hash value of constant equivalent for SET_SRC. */
6039 unsigned src_const_hash;
6040 /* Table entry for constant equivalent for SET_SRC, if any. */
6041 struct table_elt *src_const_elt;
6042 };
6043
6044 static void
6045 cse_insn (insn, in_libcall_block)
6046 rtx insn;
6047 int in_libcall_block;
6048 {
6049 register rtx x = PATTERN (insn);
6050 register int i;
6051 rtx tem;
6052 register int n_sets = 0;
6053
6054 /* Records what this insn does to set CC0. */
6055 rtx this_insn_cc0 = 0;
6056 enum machine_mode this_insn_cc0_mode = VOIDmode;
6057
6058 rtx src_eqv = 0;
6059 struct table_elt *src_eqv_elt = 0;
6060 int src_eqv_volatile;
6061 int src_eqv_in_memory;
6062 int src_eqv_in_struct;
6063 unsigned src_eqv_hash;
6064
6065 struct set *sets;
6066
6067 this_insn = insn;
6068
6069 /* Find all the SETs and CLOBBERs in this instruction.
6070 Record all the SETs in the array `set' and count them.
6071 Also determine whether there is a CLOBBER that invalidates
6072 all memory references, or all references at varying addresses. */
6073
6074 if (GET_CODE (insn) == CALL_INSN)
6075 {
6076 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6077 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
6078 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
6079 }
6080
6081 if (GET_CODE (x) == SET)
6082 {
6083 sets = (struct set *) alloca (sizeof (struct set));
6084 sets[0].rtl = x;
6085
6086 /* Ignore SETs that are unconditional jumps.
6087 They never need cse processing, so this does not hurt.
6088 The reason is not efficiency but rather
6089 so that we can test at the end for instructions
6090 that have been simplified to unconditional jumps
6091 and not be misled by unchanged instructions
6092 that were unconditional jumps to begin with. */
6093 if (SET_DEST (x) == pc_rtx
6094 && GET_CODE (SET_SRC (x)) == LABEL_REF)
6095 ;
6096
6097 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
6098 The hard function value register is used only once, to copy to
6099 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
6100 Ensure we invalidate the destination register. On the 80386 no
6101 other code would invalidate it since it is a fixed_reg.
6102 We need not check the return of apply_change_group; see canon_reg. */
6103
6104 else if (GET_CODE (SET_SRC (x)) == CALL)
6105 {
6106 canon_reg (SET_SRC (x), insn);
6107 apply_change_group ();
6108 fold_rtx (SET_SRC (x), insn);
6109 invalidate (SET_DEST (x), VOIDmode);
6110 }
6111 else
6112 n_sets = 1;
6113 }
6114 else if (GET_CODE (x) == PARALLEL)
6115 {
6116 register int lim = XVECLEN (x, 0);
6117
6118 sets = (struct set *) alloca (lim * sizeof (struct set));
6119
6120 /* Find all regs explicitly clobbered in this insn,
6121 and ensure they are not replaced with any other regs
6122 elsewhere in this insn.
6123 When a reg that is clobbered is also used for input,
6124 we should presume that that is for a reason,
6125 and we should not substitute some other register
6126 which is not supposed to be clobbered.
6127 Therefore, this loop cannot be merged into the one below
6128 because a CALL may precede a CLOBBER and refer to the
6129 value clobbered. We must not let a canonicalization do
6130 anything in that case. */
6131 for (i = 0; i < lim; i++)
6132 {
6133 register rtx y = XVECEXP (x, 0, i);
6134 if (GET_CODE (y) == CLOBBER)
6135 {
6136 rtx clobbered = XEXP (y, 0);
6137
6138 if (GET_CODE (clobbered) == REG
6139 || GET_CODE (clobbered) == SUBREG)
6140 invalidate (clobbered, VOIDmode);
6141 else if (GET_CODE (clobbered) == STRICT_LOW_PART
6142 || GET_CODE (clobbered) == ZERO_EXTRACT)
6143 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6144 }
6145 }
6146
6147 for (i = 0; i < lim; i++)
6148 {
6149 register rtx y = XVECEXP (x, 0, i);
6150 if (GET_CODE (y) == SET)
6151 {
6152 /* As above, we ignore unconditional jumps and call-insns and
6153 ignore the result of apply_change_group. */
6154 if (GET_CODE (SET_SRC (y)) == CALL)
6155 {
6156 canon_reg (SET_SRC (y), insn);
6157 apply_change_group ();
6158 fold_rtx (SET_SRC (y), insn);
6159 invalidate (SET_DEST (y), VOIDmode);
6160 }
6161 else if (SET_DEST (y) == pc_rtx
6162 && GET_CODE (SET_SRC (y)) == LABEL_REF)
6163 ;
6164 else
6165 sets[n_sets++].rtl = y;
6166 }
6167 else if (GET_CODE (y) == CLOBBER)
6168 {
6169 /* If we clobber memory, canon the address.
6170 This does nothing when a register is clobbered
6171 because we have already invalidated the reg. */
6172 if (GET_CODE (XEXP (y, 0)) == MEM)
6173 canon_reg (XEXP (y, 0), NULL_RTX);
6174 }
6175 else if (GET_CODE (y) == USE
6176 && ! (GET_CODE (XEXP (y, 0)) == REG
6177 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
6178 canon_reg (y, NULL_RTX);
6179 else if (GET_CODE (y) == CALL)
6180 {
6181 /* The result of apply_change_group can be ignored; see
6182 canon_reg. */
6183 canon_reg (y, insn);
6184 apply_change_group ();
6185 fold_rtx (y, insn);
6186 }
6187 }
6188 }
6189 else if (GET_CODE (x) == CLOBBER)
6190 {
6191 if (GET_CODE (XEXP (x, 0)) == MEM)
6192 canon_reg (XEXP (x, 0), NULL_RTX);
6193 }
6194
6195 /* Canonicalize a USE of a pseudo register or memory location. */
6196 else if (GET_CODE (x) == USE
6197 && ! (GET_CODE (XEXP (x, 0)) == REG
6198 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
6199 canon_reg (XEXP (x, 0), NULL_RTX);
6200 else if (GET_CODE (x) == CALL)
6201 {
6202 /* The result of apply_change_group can be ignored; see canon_reg. */
6203 canon_reg (x, insn);
6204 apply_change_group ();
6205 fold_rtx (x, insn);
6206 }
6207
6208 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6209 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
6210 is handled specially for this case, and if it isn't set, then there will
6211 be no equivalence for the destination. */
6212 if (n_sets == 1 && REG_NOTES (insn) != 0
6213 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
6214 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6215 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
6216 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
6217
6218 /* Canonicalize sources and addresses of destinations.
6219 We do this in a separate pass to avoid problems when a MATCH_DUP is
6220 present in the insn pattern. In that case, we want to ensure that
6221 we don't break the duplicate nature of the pattern. So we will replace
6222 both operands at the same time. Otherwise, we would fail to find an
6223 equivalent substitution in the loop calling validate_change below.
6224
6225 We used to suppress canonicalization of DEST if it appears in SRC,
6226 but we don't do this any more. */
6227
6228 for (i = 0; i < n_sets; i++)
6229 {
6230 rtx dest = SET_DEST (sets[i].rtl);
6231 rtx src = SET_SRC (sets[i].rtl);
6232 rtx new = canon_reg (src, insn);
6233 int insn_code;
6234
6235 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6236 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6237 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
6238 || (insn_code = recog_memoized (insn)) < 0
6239 || insn_n_dups[insn_code] > 0)
6240 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
6241 else
6242 SET_SRC (sets[i].rtl) = new;
6243
6244 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6245 {
6246 validate_change (insn, &XEXP (dest, 1),
6247 canon_reg (XEXP (dest, 1), insn), 1);
6248 validate_change (insn, &XEXP (dest, 2),
6249 canon_reg (XEXP (dest, 2), insn), 1);
6250 }
6251
6252 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6253 || GET_CODE (dest) == ZERO_EXTRACT
6254 || GET_CODE (dest) == SIGN_EXTRACT)
6255 dest = XEXP (dest, 0);
6256
6257 if (GET_CODE (dest) == MEM)
6258 canon_reg (dest, insn);
6259 }
6260
6261 /* Now that we have done all the replacements, we can apply the change
6262 group and see if they all work. Note that this will cause some
6263 canonicalizations that would have worked individually not to be applied
6264 because some other canonicalization didn't work, but this should not
6265 occur often.
6266
6267 The result of apply_change_group can be ignored; see canon_reg. */
6268
6269 apply_change_group ();
6270
6271 /* Set sets[i].src_elt to the class each source belongs to.
6272 Detect assignments from or to volatile things
6273 and set set[i] to zero so they will be ignored
6274 in the rest of this function.
6275
6276 Nothing in this loop changes the hash table or the register chains. */
6277
6278 for (i = 0; i < n_sets; i++)
6279 {
6280 register rtx src, dest;
6281 register rtx src_folded;
6282 register struct table_elt *elt = 0, *p;
6283 enum machine_mode mode;
6284 rtx src_eqv_here;
6285 rtx src_const = 0;
6286 rtx src_related = 0;
6287 struct table_elt *src_const_elt = 0;
6288 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6289 int src_related_cost = 10000, src_elt_cost = 10000;
6290 /* Set non-zero if we need to call force_const_mem on with the
6291 contents of src_folded before using it. */
6292 int src_folded_force_flag = 0;
6293
6294 dest = SET_DEST (sets[i].rtl);
6295 src = SET_SRC (sets[i].rtl);
6296
6297 /* If SRC is a constant that has no machine mode,
6298 hash it with the destination's machine mode.
6299 This way we can keep different modes separate. */
6300
6301 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6302 sets[i].mode = mode;
6303
6304 if (src_eqv)
6305 {
6306 enum machine_mode eqvmode = mode;
6307 if (GET_CODE (dest) == STRICT_LOW_PART)
6308 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6309 do_not_record = 0;
6310 hash_arg_in_memory = 0;
6311 hash_arg_in_struct = 0;
6312 src_eqv = fold_rtx (src_eqv, insn);
6313 src_eqv_hash = HASH (src_eqv, eqvmode);
6314
6315 /* Find the equivalence class for the equivalent expression. */
6316
6317 if (!do_not_record)
6318 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
6319
6320 src_eqv_volatile = do_not_record;
6321 src_eqv_in_memory = hash_arg_in_memory;
6322 src_eqv_in_struct = hash_arg_in_struct;
6323 }
6324
6325 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6326 value of the INNER register, not the destination. So it is not
6327 a valid substitution for the source. But save it for later. */
6328 if (GET_CODE (dest) == STRICT_LOW_PART)
6329 src_eqv_here = 0;
6330 else
6331 src_eqv_here = src_eqv;
6332
6333 /* Simplify and foldable subexpressions in SRC. Then get the fully-
6334 simplified result, which may not necessarily be valid. */
6335 src_folded = fold_rtx (src, insn);
6336
6337 #if 0
6338 /* ??? This caused bad code to be generated for the m68k port with -O2.
6339 Suppose src is (CONST_INT -1), and that after truncation src_folded
6340 is (CONST_INT 3). Suppose src_folded is then used for src_const.
6341 At the end we will add src and src_const to the same equivalence
6342 class. We now have 3 and -1 on the same equivalence class. This
6343 causes later instructions to be mis-optimized. */
6344 /* If storing a constant in a bitfield, pre-truncate the constant
6345 so we will be able to record it later. */
6346 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6347 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6348 {
6349 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6350
6351 if (GET_CODE (src) == CONST_INT
6352 && GET_CODE (width) == CONST_INT
6353 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6354 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6355 src_folded
6356 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6357 << INTVAL (width)) - 1));
6358 }
6359 #endif
6360
6361 /* Compute SRC's hash code, and also notice if it
6362 should not be recorded at all. In that case,
6363 prevent any further processing of this assignment. */
6364 do_not_record = 0;
6365 hash_arg_in_memory = 0;
6366 hash_arg_in_struct = 0;
6367
6368 sets[i].src = src;
6369 sets[i].src_hash = HASH (src, mode);
6370 sets[i].src_volatile = do_not_record;
6371 sets[i].src_in_memory = hash_arg_in_memory;
6372 sets[i].src_in_struct = hash_arg_in_struct;
6373
6374 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
6375 a pseudo that is set more than once, do not record SRC. Using
6376 SRC as a replacement for anything else will be incorrect in that
6377 situation. Note that this usually occurs only for stack slots,
6378 in which case all the RTL would be refering to SRC, so we don't
6379 lose any optimization opportunities by not having SRC in the
6380 hash table. */
6381
6382 if (GET_CODE (src) == MEM
6383 && find_reg_note (insn, REG_EQUIV, src) != 0
6384 && GET_CODE (dest) == REG
6385 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
6386 && REG_N_SETS (REGNO (dest)) != 1)
6387 sets[i].src_volatile = 1;
6388
6389 #if 0
6390 /* It is no longer clear why we used to do this, but it doesn't
6391 appear to still be needed. So let's try without it since this
6392 code hurts cse'ing widened ops. */
6393 /* If source is a perverse subreg (such as QI treated as an SI),
6394 treat it as volatile. It may do the work of an SI in one context
6395 where the extra bits are not being used, but cannot replace an SI
6396 in general. */
6397 if (GET_CODE (src) == SUBREG
6398 && (GET_MODE_SIZE (GET_MODE (src))
6399 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6400 sets[i].src_volatile = 1;
6401 #endif
6402
6403 /* Locate all possible equivalent forms for SRC. Try to replace
6404 SRC in the insn with each cheaper equivalent.
6405
6406 We have the following types of equivalents: SRC itself, a folded
6407 version, a value given in a REG_EQUAL note, or a value related
6408 to a constant.
6409
6410 Each of these equivalents may be part of an additional class
6411 of equivalents (if more than one is in the table, they must be in
6412 the same class; we check for this).
6413
6414 If the source is volatile, we don't do any table lookups.
6415
6416 We note any constant equivalent for possible later use in a
6417 REG_NOTE. */
6418
6419 if (!sets[i].src_volatile)
6420 elt = lookup (src, sets[i].src_hash, mode);
6421
6422 sets[i].src_elt = elt;
6423
6424 if (elt && src_eqv_here && src_eqv_elt)
6425 {
6426 if (elt->first_same_value != src_eqv_elt->first_same_value)
6427 {
6428 /* The REG_EQUAL is indicating that two formerly distinct
6429 classes are now equivalent. So merge them. */
6430 merge_equiv_classes (elt, src_eqv_elt);
6431 src_eqv_hash = HASH (src_eqv, elt->mode);
6432 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
6433 }
6434
6435 src_eqv_here = 0;
6436 }
6437
6438 else if (src_eqv_elt)
6439 elt = src_eqv_elt;
6440
6441 /* Try to find a constant somewhere and record it in `src_const'.
6442 Record its table element, if any, in `src_const_elt'. Look in
6443 any known equivalences first. (If the constant is not in the
6444 table, also set `sets[i].src_const_hash'). */
6445 if (elt)
6446 for (p = elt->first_same_value; p; p = p->next_same_value)
6447 if (p->is_const)
6448 {
6449 src_const = p->exp;
6450 src_const_elt = elt;
6451 break;
6452 }
6453
6454 if (src_const == 0
6455 && (CONSTANT_P (src_folded)
6456 /* Consider (minus (label_ref L1) (label_ref L2)) as
6457 "constant" here so we will record it. This allows us
6458 to fold switch statements when an ADDR_DIFF_VEC is used. */
6459 || (GET_CODE (src_folded) == MINUS
6460 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6461 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6462 src_const = src_folded, src_const_elt = elt;
6463 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6464 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6465
6466 /* If we don't know if the constant is in the table, get its
6467 hash code and look it up. */
6468 if (src_const && src_const_elt == 0)
6469 {
6470 sets[i].src_const_hash = HASH (src_const, mode);
6471 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
6472 }
6473
6474 sets[i].src_const = src_const;
6475 sets[i].src_const_elt = src_const_elt;
6476
6477 /* If the constant and our source are both in the table, mark them as
6478 equivalent. Otherwise, if a constant is in the table but the source
6479 isn't, set ELT to it. */
6480 if (src_const_elt && elt
6481 && src_const_elt->first_same_value != elt->first_same_value)
6482 merge_equiv_classes (elt, src_const_elt);
6483 else if (src_const_elt && elt == 0)
6484 elt = src_const_elt;
6485
6486 /* See if there is a register linearly related to a constant
6487 equivalent of SRC. */
6488 if (src_const
6489 && (GET_CODE (src_const) == CONST
6490 || (src_const_elt && src_const_elt->related_value != 0)))
6491 {
6492 src_related = use_related_value (src_const, src_const_elt);
6493 if (src_related)
6494 {
6495 struct table_elt *src_related_elt
6496 = lookup (src_related, HASH (src_related, mode), mode);
6497 if (src_related_elt && elt)
6498 {
6499 if (elt->first_same_value
6500 != src_related_elt->first_same_value)
6501 /* This can occur when we previously saw a CONST
6502 involving a SYMBOL_REF and then see the SYMBOL_REF
6503 twice. Merge the involved classes. */
6504 merge_equiv_classes (elt, src_related_elt);
6505
6506 src_related = 0;
6507 src_related_elt = 0;
6508 }
6509 else if (src_related_elt && elt == 0)
6510 elt = src_related_elt;
6511 }
6512 }
6513
6514 /* See if we have a CONST_INT that is already in a register in a
6515 wider mode. */
6516
6517 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6518 && GET_MODE_CLASS (mode) == MODE_INT
6519 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6520 {
6521 enum machine_mode wider_mode;
6522
6523 for (wider_mode = GET_MODE_WIDER_MODE (mode);
6524 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6525 && src_related == 0;
6526 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6527 {
6528 struct table_elt *const_elt
6529 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6530
6531 if (const_elt == 0)
6532 continue;
6533
6534 for (const_elt = const_elt->first_same_value;
6535 const_elt; const_elt = const_elt->next_same_value)
6536 if (GET_CODE (const_elt->exp) == REG)
6537 {
6538 src_related = gen_lowpart_if_possible (mode,
6539 const_elt->exp);
6540 break;
6541 }
6542 }
6543 }
6544
6545 /* Another possibility is that we have an AND with a constant in
6546 a mode narrower than a word. If so, it might have been generated
6547 as part of an "if" which would narrow the AND. If we already
6548 have done the AND in a wider mode, we can use a SUBREG of that
6549 value. */
6550
6551 if (flag_expensive_optimizations && ! src_related
6552 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6553 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6554 {
6555 enum machine_mode tmode;
6556 rtx new_and = gen_rtx (AND, VOIDmode, NULL_RTX, XEXP (src, 1));
6557
6558 for (tmode = GET_MODE_WIDER_MODE (mode);
6559 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6560 tmode = GET_MODE_WIDER_MODE (tmode))
6561 {
6562 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6563 struct table_elt *larger_elt;
6564
6565 if (inner)
6566 {
6567 PUT_MODE (new_and, tmode);
6568 XEXP (new_and, 0) = inner;
6569 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6570 if (larger_elt == 0)
6571 continue;
6572
6573 for (larger_elt = larger_elt->first_same_value;
6574 larger_elt; larger_elt = larger_elt->next_same_value)
6575 if (GET_CODE (larger_elt->exp) == REG)
6576 {
6577 src_related
6578 = gen_lowpart_if_possible (mode, larger_elt->exp);
6579 break;
6580 }
6581
6582 if (src_related)
6583 break;
6584 }
6585 }
6586 }
6587
6588 #ifdef LOAD_EXTEND_OP
6589 /* See if a MEM has already been loaded with a widening operation;
6590 if it has, we can use a subreg of that. Many CISC machines
6591 also have such operations, but this is only likely to be
6592 beneficial these machines. */
6593
6594 if (flag_expensive_optimizations && src_related == 0
6595 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6596 && GET_MODE_CLASS (mode) == MODE_INT
6597 && GET_CODE (src) == MEM && ! do_not_record
6598 && LOAD_EXTEND_OP (mode) != NIL)
6599 {
6600 enum machine_mode tmode;
6601
6602 /* Set what we are trying to extend and the operation it might
6603 have been extended with. */
6604 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6605 XEXP (memory_extend_rtx, 0) = src;
6606
6607 for (tmode = GET_MODE_WIDER_MODE (mode);
6608 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6609 tmode = GET_MODE_WIDER_MODE (tmode))
6610 {
6611 struct table_elt *larger_elt;
6612
6613 PUT_MODE (memory_extend_rtx, tmode);
6614 larger_elt = lookup (memory_extend_rtx,
6615 HASH (memory_extend_rtx, tmode), tmode);
6616 if (larger_elt == 0)
6617 continue;
6618
6619 for (larger_elt = larger_elt->first_same_value;
6620 larger_elt; larger_elt = larger_elt->next_same_value)
6621 if (GET_CODE (larger_elt->exp) == REG)
6622 {
6623 src_related = gen_lowpart_if_possible (mode,
6624 larger_elt->exp);
6625 break;
6626 }
6627
6628 if (src_related)
6629 break;
6630 }
6631 }
6632 #endif /* LOAD_EXTEND_OP */
6633
6634 if (src == src_folded)
6635 src_folded = 0;
6636
6637 /* At this point, ELT, if non-zero, points to a class of expressions
6638 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6639 and SRC_RELATED, if non-zero, each contain additional equivalent
6640 expressions. Prune these latter expressions by deleting expressions
6641 already in the equivalence class.
6642
6643 Check for an equivalent identical to the destination. If found,
6644 this is the preferred equivalent since it will likely lead to
6645 elimination of the insn. Indicate this by placing it in
6646 `src_related'. */
6647
6648 if (elt) elt = elt->first_same_value;
6649 for (p = elt; p; p = p->next_same_value)
6650 {
6651 enum rtx_code code = GET_CODE (p->exp);
6652
6653 /* If the expression is not valid, ignore it. Then we do not
6654 have to check for validity below. In most cases, we can use
6655 `rtx_equal_p', since canonicalization has already been done. */
6656 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
6657 continue;
6658
6659 /* Also skip paradoxical subregs, unless that's what we're
6660 looking for. */
6661 if (code == SUBREG
6662 && (GET_MODE_SIZE (GET_MODE (p->exp))
6663 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
6664 && ! (src != 0
6665 && GET_CODE (src) == SUBREG
6666 && GET_MODE (src) == GET_MODE (p->exp)
6667 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6668 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
6669 continue;
6670
6671 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
6672 src = 0;
6673 else if (src_folded && GET_CODE (src_folded) == code
6674 && rtx_equal_p (src_folded, p->exp))
6675 src_folded = 0;
6676 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
6677 && rtx_equal_p (src_eqv_here, p->exp))
6678 src_eqv_here = 0;
6679 else if (src_related && GET_CODE (src_related) == code
6680 && rtx_equal_p (src_related, p->exp))
6681 src_related = 0;
6682
6683 /* This is the same as the destination of the insns, we want
6684 to prefer it. Copy it to src_related. The code below will
6685 then give it a negative cost. */
6686 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
6687 src_related = dest;
6688
6689 }
6690
6691 /* Find the cheapest valid equivalent, trying all the available
6692 possibilities. Prefer items not in the hash table to ones
6693 that are when they are equal cost. Note that we can never
6694 worsen an insn as the current contents will also succeed.
6695 If we find an equivalent identical to the destination, use it as best,
6696 since this insn will probably be eliminated in that case. */
6697 if (src)
6698 {
6699 if (rtx_equal_p (src, dest))
6700 src_cost = -1;
6701 else
6702 src_cost = COST (src);
6703 }
6704
6705 if (src_eqv_here)
6706 {
6707 if (rtx_equal_p (src_eqv_here, dest))
6708 src_eqv_cost = -1;
6709 else
6710 src_eqv_cost = COST (src_eqv_here);
6711 }
6712
6713 if (src_folded)
6714 {
6715 if (rtx_equal_p (src_folded, dest))
6716 src_folded_cost = -1;
6717 else
6718 src_folded_cost = COST (src_folded);
6719 }
6720
6721 if (src_related)
6722 {
6723 if (rtx_equal_p (src_related, dest))
6724 src_related_cost = -1;
6725 else
6726 src_related_cost = COST (src_related);
6727 }
6728
6729 /* If this was an indirect jump insn, a known label will really be
6730 cheaper even though it looks more expensive. */
6731 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
6732 src_folded = src_const, src_folded_cost = -1;
6733
6734 /* Terminate loop when replacement made. This must terminate since
6735 the current contents will be tested and will always be valid. */
6736 while (1)
6737 {
6738 rtx trial;
6739
6740 /* Skip invalid entries. */
6741 while (elt && GET_CODE (elt->exp) != REG
6742 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6743 elt = elt->next_same_value;
6744
6745 /* A paradoxical subreg would be bad here: it'll be the right
6746 size, but later may be adjusted so that the upper bits aren't
6747 what we want. So reject it. */
6748 if (elt != 0
6749 && GET_CODE (elt->exp) == SUBREG
6750 && (GET_MODE_SIZE (GET_MODE (elt->exp))
6751 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
6752 /* It is okay, though, if the rtx we're trying to match
6753 will ignore any of the bits we can't predict. */
6754 && ! (src != 0
6755 && GET_CODE (src) == SUBREG
6756 && GET_MODE (src) == GET_MODE (elt->exp)
6757 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6758 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
6759 {
6760 elt = elt->next_same_value;
6761 continue;
6762 }
6763
6764 if (elt) src_elt_cost = elt->cost;
6765
6766 /* Find cheapest and skip it for the next time. For items
6767 of equal cost, use this order:
6768 src_folded, src, src_eqv, src_related and hash table entry. */
6769 if (src_folded_cost <= src_cost
6770 && src_folded_cost <= src_eqv_cost
6771 && src_folded_cost <= src_related_cost
6772 && src_folded_cost <= src_elt_cost)
6773 {
6774 trial = src_folded, src_folded_cost = 10000;
6775 if (src_folded_force_flag)
6776 trial = force_const_mem (mode, trial);
6777 }
6778 else if (src_cost <= src_eqv_cost
6779 && src_cost <= src_related_cost
6780 && src_cost <= src_elt_cost)
6781 trial = src, src_cost = 10000;
6782 else if (src_eqv_cost <= src_related_cost
6783 && src_eqv_cost <= src_elt_cost)
6784 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
6785 else if (src_related_cost <= src_elt_cost)
6786 trial = copy_rtx (src_related), src_related_cost = 10000;
6787 else
6788 {
6789 trial = copy_rtx (elt->exp);
6790 elt = elt->next_same_value;
6791 src_elt_cost = 10000;
6792 }
6793
6794 /* We don't normally have an insn matching (set (pc) (pc)), so
6795 check for this separately here. We will delete such an
6796 insn below.
6797
6798 Tablejump insns contain a USE of the table, so simply replacing
6799 the operand with the constant won't match. This is simply an
6800 unconditional branch, however, and is therefore valid. Just
6801 insert the substitution here and we will delete and re-emit
6802 the insn later. */
6803
6804 if (n_sets == 1 && dest == pc_rtx
6805 && (trial == pc_rtx
6806 || (GET_CODE (trial) == LABEL_REF
6807 && ! condjump_p (insn))))
6808 {
6809 /* If TRIAL is a label in front of a jump table, we are
6810 really falling through the switch (this is how casesi
6811 insns work), so we must branch around the table. */
6812 if (GET_CODE (trial) == CODE_LABEL
6813 && NEXT_INSN (trial) != 0
6814 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
6815 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
6816 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
6817
6818 trial = gen_rtx (LABEL_REF, Pmode, get_label_after (trial));
6819
6820 SET_SRC (sets[i].rtl) = trial;
6821 cse_jumps_altered = 1;
6822 break;
6823 }
6824
6825 /* Look for a substitution that makes a valid insn. */
6826 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
6827 {
6828 /* The result of apply_change_group can be ignored; see
6829 canon_reg. */
6830
6831 validate_change (insn, &SET_SRC (sets[i].rtl),
6832 canon_reg (SET_SRC (sets[i].rtl), insn),
6833 1);
6834 apply_change_group ();
6835 break;
6836 }
6837
6838 /* If we previously found constant pool entries for
6839 constants and this is a constant, try making a
6840 pool entry. Put it in src_folded unless we already have done
6841 this since that is where it likely came from. */
6842
6843 else if (constant_pool_entries_cost
6844 && CONSTANT_P (trial)
6845 && ! (GET_CODE (trial) == CONST
6846 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
6847 && (src_folded == 0
6848 || (GET_CODE (src_folded) != MEM
6849 && ! src_folded_force_flag))
6850 && GET_MODE_CLASS (mode) != MODE_CC
6851 && mode != VOIDmode)
6852 {
6853 src_folded_force_flag = 1;
6854 src_folded = trial;
6855 src_folded_cost = constant_pool_entries_cost;
6856 }
6857 }
6858
6859 src = SET_SRC (sets[i].rtl);
6860
6861 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
6862 However, there is an important exception: If both are registers
6863 that are not the head of their equivalence class, replace SET_SRC
6864 with the head of the class. If we do not do this, we will have
6865 both registers live over a portion of the basic block. This way,
6866 their lifetimes will likely abut instead of overlapping. */
6867 if (GET_CODE (dest) == REG
6868 && REGNO_QTY_VALID_P (REGNO (dest))
6869 && qty_mode[reg_qty[REGNO (dest)]] == GET_MODE (dest)
6870 && qty_first_reg[reg_qty[REGNO (dest)]] != REGNO (dest)
6871 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
6872 /* Don't do this if the original insn had a hard reg as
6873 SET_SRC. */
6874 && (GET_CODE (sets[i].src) != REG
6875 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
6876 /* We can't call canon_reg here because it won't do anything if
6877 SRC is a hard register. */
6878 {
6879 int first = qty_first_reg[reg_qty[REGNO (src)]];
6880
6881 src = SET_SRC (sets[i].rtl)
6882 = first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
6883 : gen_rtx (REG, GET_MODE (src), first);
6884
6885 /* If we had a constant that is cheaper than what we are now
6886 setting SRC to, use that constant. We ignored it when we
6887 thought we could make this into a no-op. */
6888 if (src_const && COST (src_const) < COST (src)
6889 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const, 0))
6890 src = src_const;
6891 }
6892
6893 /* If we made a change, recompute SRC values. */
6894 if (src != sets[i].src)
6895 {
6896 do_not_record = 0;
6897 hash_arg_in_memory = 0;
6898 hash_arg_in_struct = 0;
6899 sets[i].src = src;
6900 sets[i].src_hash = HASH (src, mode);
6901 sets[i].src_volatile = do_not_record;
6902 sets[i].src_in_memory = hash_arg_in_memory;
6903 sets[i].src_in_struct = hash_arg_in_struct;
6904 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
6905 }
6906
6907 /* If this is a single SET, we are setting a register, and we have an
6908 equivalent constant, we want to add a REG_NOTE. We don't want
6909 to write a REG_EQUAL note for a constant pseudo since verifying that
6910 that pseudo hasn't been eliminated is a pain. Such a note also
6911 won't help anything. */
6912 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
6913 && GET_CODE (src_const) != REG)
6914 {
6915 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6916
6917 /* Record the actual constant value in a REG_EQUAL note, making
6918 a new one if one does not already exist. */
6919 if (tem)
6920 XEXP (tem, 0) = src_const;
6921 else
6922 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL,
6923 src_const, REG_NOTES (insn));
6924
6925 /* If storing a constant value in a register that
6926 previously held the constant value 0,
6927 record this fact with a REG_WAS_0 note on this insn.
6928
6929 Note that the *register* is required to have previously held 0,
6930 not just any register in the quantity and we must point to the
6931 insn that set that register to zero.
6932
6933 Rather than track each register individually, we just see if
6934 the last set for this quantity was for this register. */
6935
6936 if (REGNO_QTY_VALID_P (REGNO (dest))
6937 && qty_const[reg_qty[REGNO (dest)]] == const0_rtx)
6938 {
6939 /* See if we previously had a REG_WAS_0 note. */
6940 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6941 rtx const_insn = qty_const_insn[reg_qty[REGNO (dest)]];
6942
6943 if ((tem = single_set (const_insn)) != 0
6944 && rtx_equal_p (SET_DEST (tem), dest))
6945 {
6946 if (note)
6947 XEXP (note, 0) = const_insn;
6948 else
6949 REG_NOTES (insn) = gen_rtx (INSN_LIST, REG_WAS_0,
6950 const_insn, REG_NOTES (insn));
6951 }
6952 }
6953 }
6954
6955 /* Now deal with the destination. */
6956 do_not_record = 0;
6957 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
6958
6959 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
6960 to the MEM or REG within it. */
6961 while (GET_CODE (dest) == SIGN_EXTRACT
6962 || GET_CODE (dest) == ZERO_EXTRACT
6963 || GET_CODE (dest) == SUBREG
6964 || GET_CODE (dest) == STRICT_LOW_PART)
6965 {
6966 sets[i].inner_dest_loc = &XEXP (dest, 0);
6967 dest = XEXP (dest, 0);
6968 }
6969
6970 sets[i].inner_dest = dest;
6971
6972 if (GET_CODE (dest) == MEM)
6973 {
6974 #ifdef PUSH_ROUNDING
6975 /* Stack pushes invalidate the stack pointer. */
6976 rtx addr = XEXP (dest, 0);
6977 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
6978 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
6979 && XEXP (addr, 0) == stack_pointer_rtx)
6980 invalidate (stack_pointer_rtx, Pmode);
6981 #endif
6982 dest = fold_rtx (dest, insn);
6983 }
6984
6985 /* Compute the hash code of the destination now,
6986 before the effects of this instruction are recorded,
6987 since the register values used in the address computation
6988 are those before this instruction. */
6989 sets[i].dest_hash = HASH (dest, mode);
6990
6991 /* Don't enter a bit-field in the hash table
6992 because the value in it after the store
6993 may not equal what was stored, due to truncation. */
6994
6995 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6996 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6997 {
6998 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6999
7000 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
7001 && GET_CODE (width) == CONST_INT
7002 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
7003 && ! (INTVAL (src_const)
7004 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
7005 /* Exception: if the value is constant,
7006 and it won't be truncated, record it. */
7007 ;
7008 else
7009 {
7010 /* This is chosen so that the destination will be invalidated
7011 but no new value will be recorded.
7012 We must invalidate because sometimes constant
7013 values can be recorded for bitfields. */
7014 sets[i].src_elt = 0;
7015 sets[i].src_volatile = 1;
7016 src_eqv = 0;
7017 src_eqv_elt = 0;
7018 }
7019 }
7020
7021 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
7022 the insn. */
7023 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
7024 {
7025 PUT_CODE (insn, NOTE);
7026 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7027 NOTE_SOURCE_FILE (insn) = 0;
7028 cse_jumps_altered = 1;
7029 /* One less use of the label this insn used to jump to. */
7030 --LABEL_NUSES (JUMP_LABEL (insn));
7031 /* No more processing for this set. */
7032 sets[i].rtl = 0;
7033 }
7034
7035 /* If this SET is now setting PC to a label, we know it used to
7036 be a conditional or computed branch. So we see if we can follow
7037 it. If it was a computed branch, delete it and re-emit. */
7038 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
7039 {
7040 rtx p;
7041
7042 /* If this is not in the format for a simple branch and
7043 we are the only SET in it, re-emit it. */
7044 if (! simplejump_p (insn) && n_sets == 1)
7045 {
7046 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
7047 JUMP_LABEL (new) = XEXP (src, 0);
7048 LABEL_NUSES (XEXP (src, 0))++;
7049 delete_insn (insn);
7050 insn = new;
7051 }
7052 else
7053 /* Otherwise, force rerecognition, since it probably had
7054 a different pattern before.
7055 This shouldn't really be necessary, since whatever
7056 changed the source value above should have done this.
7057 Until the right place is found, might as well do this here. */
7058 INSN_CODE (insn) = -1;
7059
7060 /* Now that we've converted this jump to an unconditional jump,
7061 there is dead code after it. Delete the dead code until we
7062 reach a BARRIER, the end of the function, or a label. Do
7063 not delete NOTEs except for NOTE_INSN_DELETED since later
7064 phases assume these notes are retained. */
7065
7066 p = insn;
7067
7068 while (NEXT_INSN (p) != 0
7069 && GET_CODE (NEXT_INSN (p)) != BARRIER
7070 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
7071 {
7072 if (GET_CODE (NEXT_INSN (p)) != NOTE
7073 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
7074 delete_insn (NEXT_INSN (p));
7075 else
7076 p = NEXT_INSN (p);
7077 }
7078
7079 /* If we don't have a BARRIER immediately after INSN, put one there.
7080 Much code assumes that there are no NOTEs between a JUMP_INSN and
7081 BARRIER. */
7082
7083 if (NEXT_INSN (insn) == 0
7084 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
7085 emit_barrier_before (NEXT_INSN (insn));
7086
7087 /* We might have two BARRIERs separated by notes. Delete the second
7088 one if so. */
7089
7090 if (p != insn && NEXT_INSN (p) != 0
7091 && GET_CODE (NEXT_INSN (p)) == BARRIER)
7092 delete_insn (NEXT_INSN (p));
7093
7094 cse_jumps_altered = 1;
7095 sets[i].rtl = 0;
7096 }
7097
7098 /* If destination is volatile, invalidate it and then do no further
7099 processing for this assignment. */
7100
7101 else if (do_not_record)
7102 {
7103 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7104 || GET_CODE (dest) == MEM)
7105 invalidate (dest, VOIDmode);
7106 else if (GET_CODE (dest) == STRICT_LOW_PART
7107 || GET_CODE (dest) == ZERO_EXTRACT)
7108 invalidate (XEXP (dest, 0), GET_MODE (dest));
7109 sets[i].rtl = 0;
7110 }
7111
7112 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
7113 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7114
7115 #ifdef HAVE_cc0
7116 /* If setting CC0, record what it was set to, or a constant, if it
7117 is equivalent to a constant. If it is being set to a floating-point
7118 value, make a COMPARE with the appropriate constant of 0. If we
7119 don't do this, later code can interpret this as a test against
7120 const0_rtx, which can cause problems if we try to put it into an
7121 insn as a floating-point operand. */
7122 if (dest == cc0_rtx)
7123 {
7124 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
7125 this_insn_cc0_mode = mode;
7126 if (FLOAT_MODE_P (mode))
7127 this_insn_cc0 = gen_rtx (COMPARE, VOIDmode, this_insn_cc0,
7128 CONST0_RTX (mode));
7129 }
7130 #endif
7131 }
7132
7133 /* Now enter all non-volatile source expressions in the hash table
7134 if they are not already present.
7135 Record their equivalence classes in src_elt.
7136 This way we can insert the corresponding destinations into
7137 the same classes even if the actual sources are no longer in them
7138 (having been invalidated). */
7139
7140 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
7141 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
7142 {
7143 register struct table_elt *elt;
7144 register struct table_elt *classp = sets[0].src_elt;
7145 rtx dest = SET_DEST (sets[0].rtl);
7146 enum machine_mode eqvmode = GET_MODE (dest);
7147
7148 if (GET_CODE (dest) == STRICT_LOW_PART)
7149 {
7150 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
7151 classp = 0;
7152 }
7153 if (insert_regs (src_eqv, classp, 0))
7154 {
7155 rehash_using_reg (src_eqv);
7156 src_eqv_hash = HASH (src_eqv, eqvmode);
7157 }
7158 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7159 elt->in_memory = src_eqv_in_memory;
7160 elt->in_struct = src_eqv_in_struct;
7161 src_eqv_elt = elt;
7162
7163 /* Check to see if src_eqv_elt is the same as a set source which
7164 does not yet have an elt, and if so set the elt of the set source
7165 to src_eqv_elt. */
7166 for (i = 0; i < n_sets; i++)
7167 if (sets[i].rtl && sets[i].src_elt == 0
7168 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
7169 sets[i].src_elt = src_eqv_elt;
7170 }
7171
7172 for (i = 0; i < n_sets; i++)
7173 if (sets[i].rtl && ! sets[i].src_volatile
7174 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
7175 {
7176 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
7177 {
7178 /* REG_EQUAL in setting a STRICT_LOW_PART
7179 gives an equivalent for the entire destination register,
7180 not just for the subreg being stored in now.
7181 This is a more interesting equivalence, so we arrange later
7182 to treat the entire reg as the destination. */
7183 sets[i].src_elt = src_eqv_elt;
7184 sets[i].src_hash = src_eqv_hash;
7185 }
7186 else
7187 {
7188 /* Insert source and constant equivalent into hash table, if not
7189 already present. */
7190 register struct table_elt *classp = src_eqv_elt;
7191 register rtx src = sets[i].src;
7192 register rtx dest = SET_DEST (sets[i].rtl);
7193 enum machine_mode mode
7194 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
7195
7196 if (sets[i].src_elt == 0)
7197 {
7198 register struct table_elt *elt;
7199
7200 /* Note that these insert_regs calls cannot remove
7201 any of the src_elt's, because they would have failed to
7202 match if not still valid. */
7203 if (insert_regs (src, classp, 0))
7204 {
7205 rehash_using_reg (src);
7206 sets[i].src_hash = HASH (src, mode);
7207 }
7208 elt = insert (src, classp, sets[i].src_hash, mode);
7209 elt->in_memory = sets[i].src_in_memory;
7210 elt->in_struct = sets[i].src_in_struct;
7211 sets[i].src_elt = classp = elt;
7212 }
7213
7214 if (sets[i].src_const && sets[i].src_const_elt == 0
7215 && src != sets[i].src_const
7216 && ! rtx_equal_p (sets[i].src_const, src))
7217 sets[i].src_elt = insert (sets[i].src_const, classp,
7218 sets[i].src_const_hash, mode);
7219 }
7220 }
7221 else if (sets[i].src_elt == 0)
7222 /* If we did not insert the source into the hash table (e.g., it was
7223 volatile), note the equivalence class for the REG_EQUAL value, if any,
7224 so that the destination goes into that class. */
7225 sets[i].src_elt = src_eqv_elt;
7226
7227 invalidate_from_clobbers (x);
7228
7229 /* Some registers are invalidated by subroutine calls. Memory is
7230 invalidated by non-constant calls. */
7231
7232 if (GET_CODE (insn) == CALL_INSN)
7233 {
7234 if (! CONST_CALL_P (insn))
7235 invalidate_memory ();
7236 invalidate_for_call ();
7237 }
7238
7239 /* Now invalidate everything set by this instruction.
7240 If a SUBREG or other funny destination is being set,
7241 sets[i].rtl is still nonzero, so here we invalidate the reg
7242 a part of which is being set. */
7243
7244 for (i = 0; i < n_sets; i++)
7245 if (sets[i].rtl)
7246 {
7247 /* We can't use the inner dest, because the mode associated with
7248 a ZERO_EXTRACT is significant. */
7249 register rtx dest = SET_DEST (sets[i].rtl);
7250
7251 /* Needed for registers to remove the register from its
7252 previous quantity's chain.
7253 Needed for memory if this is a nonvarying address, unless
7254 we have just done an invalidate_memory that covers even those. */
7255 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7256 || GET_CODE (dest) == MEM)
7257 invalidate (dest, VOIDmode);
7258 else if (GET_CODE (dest) == STRICT_LOW_PART
7259 || GET_CODE (dest) == ZERO_EXTRACT)
7260 invalidate (XEXP (dest, 0), GET_MODE (dest));
7261 }
7262
7263 /* Make sure registers mentioned in destinations
7264 are safe for use in an expression to be inserted.
7265 This removes from the hash table
7266 any invalid entry that refers to one of these registers.
7267
7268 We don't care about the return value from mention_regs because
7269 we are going to hash the SET_DEST values unconditionally. */
7270
7271 for (i = 0; i < n_sets; i++)
7272 if (sets[i].rtl && GET_CODE (SET_DEST (sets[i].rtl)) != REG)
7273 mention_regs (SET_DEST (sets[i].rtl));
7274
7275 /* We may have just removed some of the src_elt's from the hash table.
7276 So replace each one with the current head of the same class. */
7277
7278 for (i = 0; i < n_sets; i++)
7279 if (sets[i].rtl)
7280 {
7281 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7282 /* If elt was removed, find current head of same class,
7283 or 0 if nothing remains of that class. */
7284 {
7285 register struct table_elt *elt = sets[i].src_elt;
7286
7287 while (elt && elt->prev_same_value)
7288 elt = elt->prev_same_value;
7289
7290 while (elt && elt->first_same_value == 0)
7291 elt = elt->next_same_value;
7292 sets[i].src_elt = elt ? elt->first_same_value : 0;
7293 }
7294 }
7295
7296 /* Now insert the destinations into their equivalence classes. */
7297
7298 for (i = 0; i < n_sets; i++)
7299 if (sets[i].rtl)
7300 {
7301 register rtx dest = SET_DEST (sets[i].rtl);
7302 register struct table_elt *elt;
7303
7304 /* Don't record value if we are not supposed to risk allocating
7305 floating-point values in registers that might be wider than
7306 memory. */
7307 if ((flag_float_store
7308 && GET_CODE (dest) == MEM
7309 && FLOAT_MODE_P (GET_MODE (dest)))
7310 /* Don't record BLKmode values, because we don't know the
7311 size of it, and can't be sure that other BLKmode values
7312 have the same or smaller size. */
7313 || GET_MODE (dest) == BLKmode
7314 /* Don't record values of destinations set inside a libcall block
7315 since we might delete the libcall. Things should have been set
7316 up so we won't want to reuse such a value, but we play it safe
7317 here. */
7318 || in_libcall_block
7319 /* If we didn't put a REG_EQUAL value or a source into the hash
7320 table, there is no point is recording DEST. */
7321 || sets[i].src_elt == 0
7322 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
7323 or SIGN_EXTEND, don't record DEST since it can cause
7324 some tracking to be wrong.
7325
7326 ??? Think about this more later. */
7327 || (GET_CODE (dest) == SUBREG
7328 && (GET_MODE_SIZE (GET_MODE (dest))
7329 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7330 && (GET_CODE (sets[i].src) == SIGN_EXTEND
7331 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7332 continue;
7333
7334 /* STRICT_LOW_PART isn't part of the value BEING set,
7335 and neither is the SUBREG inside it.
7336 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
7337 if (GET_CODE (dest) == STRICT_LOW_PART)
7338 dest = SUBREG_REG (XEXP (dest, 0));
7339
7340 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7341 /* Registers must also be inserted into chains for quantities. */
7342 if (insert_regs (dest, sets[i].src_elt, 1))
7343 {
7344 /* If `insert_regs' changes something, the hash code must be
7345 recalculated. */
7346 rehash_using_reg (dest);
7347 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7348 }
7349
7350 elt = insert (dest, sets[i].src_elt,
7351 sets[i].dest_hash, GET_MODE (dest));
7352 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
7353 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
7354 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
7355 0))));
7356
7357 if (elt->in_memory)
7358 {
7359 /* This implicitly assumes a whole struct
7360 need not have MEM_IN_STRUCT_P.
7361 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
7362 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7363 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7364 }
7365
7366 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7367 narrower than M2, and both M1 and M2 are the same number of words,
7368 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7369 make that equivalence as well.
7370
7371 However, BAR may have equivalences for which gen_lowpart_if_possible
7372 will produce a simpler value than gen_lowpart_if_possible applied to
7373 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7374 BAR's equivalences. If we don't get a simplified form, make
7375 the SUBREG. It will not be used in an equivalence, but will
7376 cause two similar assignments to be detected.
7377
7378 Note the loop below will find SUBREG_REG (DEST) since we have
7379 already entered SRC and DEST of the SET in the table. */
7380
7381 if (GET_CODE (dest) == SUBREG
7382 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7383 / UNITS_PER_WORD)
7384 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7385 && (GET_MODE_SIZE (GET_MODE (dest))
7386 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7387 && sets[i].src_elt != 0)
7388 {
7389 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7390 struct table_elt *elt, *classp = 0;
7391
7392 for (elt = sets[i].src_elt->first_same_value; elt;
7393 elt = elt->next_same_value)
7394 {
7395 rtx new_src = 0;
7396 unsigned src_hash;
7397 struct table_elt *src_elt;
7398
7399 /* Ignore invalid entries. */
7400 if (GET_CODE (elt->exp) != REG
7401 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7402 continue;
7403
7404 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7405 if (new_src == 0)
7406 new_src = gen_rtx (SUBREG, new_mode, elt->exp, 0);
7407
7408 src_hash = HASH (new_src, new_mode);
7409 src_elt = lookup (new_src, src_hash, new_mode);
7410
7411 /* Put the new source in the hash table is if isn't
7412 already. */
7413 if (src_elt == 0)
7414 {
7415 if (insert_regs (new_src, classp, 0))
7416 {
7417 rehash_using_reg (new_src);
7418 src_hash = HASH (new_src, new_mode);
7419 }
7420 src_elt = insert (new_src, classp, src_hash, new_mode);
7421 src_elt->in_memory = elt->in_memory;
7422 src_elt->in_struct = elt->in_struct;
7423 }
7424 else if (classp && classp != src_elt->first_same_value)
7425 /* Show that two things that we've seen before are
7426 actually the same. */
7427 merge_equiv_classes (src_elt, classp);
7428
7429 classp = src_elt->first_same_value;
7430 }
7431 }
7432 }
7433
7434 /* Special handling for (set REG0 REG1)
7435 where REG0 is the "cheapest", cheaper than REG1.
7436 After cse, REG1 will probably not be used in the sequel,
7437 so (if easily done) change this insn to (set REG1 REG0) and
7438 replace REG1 with REG0 in the previous insn that computed their value.
7439 Then REG1 will become a dead store and won't cloud the situation
7440 for later optimizations.
7441
7442 Do not make this change if REG1 is a hard register, because it will
7443 then be used in the sequel and we may be changing a two-operand insn
7444 into a three-operand insn.
7445
7446 Also do not do this if we are operating on a copy of INSN. */
7447
7448 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7449 && NEXT_INSN (PREV_INSN (insn)) == insn
7450 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7451 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7452 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
7453 && (qty_first_reg[reg_qty[REGNO (SET_SRC (sets[0].rtl))]]
7454 == REGNO (SET_DEST (sets[0].rtl))))
7455 {
7456 rtx prev = PREV_INSN (insn);
7457 while (prev && GET_CODE (prev) == NOTE)
7458 prev = PREV_INSN (prev);
7459
7460 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7461 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7462 {
7463 rtx dest = SET_DEST (sets[0].rtl);
7464 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7465
7466 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7467 validate_change (insn, & SET_DEST (sets[0].rtl),
7468 SET_SRC (sets[0].rtl), 1);
7469 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7470 apply_change_group ();
7471
7472 /* If REG1 was equivalent to a constant, REG0 is not. */
7473 if (note)
7474 PUT_REG_NOTE_KIND (note, REG_EQUAL);
7475
7476 /* If there was a REG_WAS_0 note on PREV, remove it. Move
7477 any REG_WAS_0 note on INSN to PREV. */
7478 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7479 if (note)
7480 remove_note (prev, note);
7481
7482 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7483 if (note)
7484 {
7485 remove_note (insn, note);
7486 XEXP (note, 1) = REG_NOTES (prev);
7487 REG_NOTES (prev) = note;
7488 }
7489
7490 /* If INSN has a REG_EQUAL note, and this note mentions REG0,
7491 then we must delete it, because the value in REG0 has changed. */
7492 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7493 if (note && reg_mentioned_p (dest, XEXP (note, 0)))
7494 remove_note (insn, note);
7495 }
7496 }
7497
7498 /* If this is a conditional jump insn, record any known equivalences due to
7499 the condition being tested. */
7500
7501 last_jump_equiv_class = 0;
7502 if (GET_CODE (insn) == JUMP_INSN
7503 && n_sets == 1 && GET_CODE (x) == SET
7504 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7505 record_jump_equiv (insn, 0);
7506
7507 #ifdef HAVE_cc0
7508 /* If the previous insn set CC0 and this insn no longer references CC0,
7509 delete the previous insn. Here we use the fact that nothing expects CC0
7510 to be valid over an insn, which is true until the final pass. */
7511 if (prev_insn && GET_CODE (prev_insn) == INSN
7512 && (tem = single_set (prev_insn)) != 0
7513 && SET_DEST (tem) == cc0_rtx
7514 && ! reg_mentioned_p (cc0_rtx, x))
7515 {
7516 PUT_CODE (prev_insn, NOTE);
7517 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7518 NOTE_SOURCE_FILE (prev_insn) = 0;
7519 }
7520
7521 prev_insn_cc0 = this_insn_cc0;
7522 prev_insn_cc0_mode = this_insn_cc0_mode;
7523 #endif
7524
7525 prev_insn = insn;
7526 }
7527 \f
7528 /* Remove from the ahsh table all expressions that reference memory. */
7529 static void
7530 invalidate_memory ()
7531 {
7532 register int i;
7533 register struct table_elt *p, *next;
7534
7535 for (i = 0; i < NBUCKETS; i++)
7536 for (p = table[i]; p; p = next)
7537 {
7538 next = p->next_same_hash;
7539 if (p->in_memory)
7540 remove_from_table (p, i);
7541 }
7542 }
7543
7544 /* XXX ??? The name of this function bears little resemblance to
7545 what this function actually does. FIXME. */
7546 static int
7547 note_mem_written (addr)
7548 register rtx addr;
7549 {
7550 /* Pushing or popping the stack invalidates just the stack pointer. */
7551 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7552 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7553 && GET_CODE (XEXP (addr, 0)) == REG
7554 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7555 {
7556 if (reg_tick[STACK_POINTER_REGNUM] >= 0)
7557 reg_tick[STACK_POINTER_REGNUM]++;
7558
7559 /* This should be *very* rare. */
7560 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
7561 invalidate (stack_pointer_rtx, VOIDmode);
7562 return 1;
7563 }
7564 return 0;
7565 }
7566
7567 /* Perform invalidation on the basis of everything about an insn
7568 except for invalidating the actual places that are SET in it.
7569 This includes the places CLOBBERed, and anything that might
7570 alias with something that is SET or CLOBBERed.
7571
7572 X is the pattern of the insn. */
7573
7574 static void
7575 invalidate_from_clobbers (x)
7576 rtx x;
7577 {
7578 if (GET_CODE (x) == CLOBBER)
7579 {
7580 rtx ref = XEXP (x, 0);
7581 if (ref)
7582 {
7583 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7584 || GET_CODE (ref) == MEM)
7585 invalidate (ref, VOIDmode);
7586 else if (GET_CODE (ref) == STRICT_LOW_PART
7587 || GET_CODE (ref) == ZERO_EXTRACT)
7588 invalidate (XEXP (ref, 0), GET_MODE (ref));
7589 }
7590 }
7591 else if (GET_CODE (x) == PARALLEL)
7592 {
7593 register int i;
7594 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
7595 {
7596 register rtx y = XVECEXP (x, 0, i);
7597 if (GET_CODE (y) == CLOBBER)
7598 {
7599 rtx ref = XEXP (y, 0);
7600 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7601 || GET_CODE (ref) == MEM)
7602 invalidate (ref, VOIDmode);
7603 else if (GET_CODE (ref) == STRICT_LOW_PART
7604 || GET_CODE (ref) == ZERO_EXTRACT)
7605 invalidate (XEXP (ref, 0), GET_MODE (ref));
7606 }
7607 }
7608 }
7609 }
7610 \f
7611 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
7612 and replace any registers in them with either an equivalent constant
7613 or the canonical form of the register. If we are inside an address,
7614 only do this if the address remains valid.
7615
7616 OBJECT is 0 except when within a MEM in which case it is the MEM.
7617
7618 Return the replacement for X. */
7619
7620 static rtx
7621 cse_process_notes (x, object)
7622 rtx x;
7623 rtx object;
7624 {
7625 enum rtx_code code = GET_CODE (x);
7626 char *fmt = GET_RTX_FORMAT (code);
7627 int i;
7628
7629 switch (code)
7630 {
7631 case CONST_INT:
7632 case CONST:
7633 case SYMBOL_REF:
7634 case LABEL_REF:
7635 case CONST_DOUBLE:
7636 case PC:
7637 case CC0:
7638 case LO_SUM:
7639 return x;
7640
7641 case MEM:
7642 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
7643 return x;
7644
7645 case EXPR_LIST:
7646 case INSN_LIST:
7647 if (REG_NOTE_KIND (x) == REG_EQUAL)
7648 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7649 if (XEXP (x, 1))
7650 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7651 return x;
7652
7653 case SIGN_EXTEND:
7654 case ZERO_EXTEND:
7655 case SUBREG:
7656 {
7657 rtx new = cse_process_notes (XEXP (x, 0), object);
7658 /* We don't substitute VOIDmode constants into these rtx,
7659 since they would impede folding. */
7660 if (GET_MODE (new) != VOIDmode)
7661 validate_change (object, &XEXP (x, 0), new, 0);
7662 return x;
7663 }
7664
7665 case REG:
7666 i = reg_qty[REGNO (x)];
7667
7668 /* Return a constant or a constant register. */
7669 if (REGNO_QTY_VALID_P (REGNO (x))
7670 && qty_const[i] != 0
7671 && (CONSTANT_P (qty_const[i])
7672 || GET_CODE (qty_const[i]) == REG))
7673 {
7674 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
7675 if (new)
7676 return new;
7677 }
7678
7679 /* Otherwise, canonicalize this register. */
7680 return canon_reg (x, NULL_RTX);
7681 }
7682
7683 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7684 if (fmt[i] == 'e')
7685 validate_change (object, &XEXP (x, i),
7686 cse_process_notes (XEXP (x, i), object), 0);
7687
7688 return x;
7689 }
7690 \f
7691 /* Find common subexpressions between the end test of a loop and the beginning
7692 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
7693
7694 Often we have a loop where an expression in the exit test is used
7695 in the body of the loop. For example "while (*p) *q++ = *p++;".
7696 Because of the way we duplicate the loop exit test in front of the loop,
7697 however, we don't detect that common subexpression. This will be caught
7698 when global cse is implemented, but this is a quite common case.
7699
7700 This function handles the most common cases of these common expressions.
7701 It is called after we have processed the basic block ending with the
7702 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
7703 jumps to a label used only once. */
7704
7705 static void
7706 cse_around_loop (loop_start)
7707 rtx loop_start;
7708 {
7709 rtx insn;
7710 int i;
7711 struct table_elt *p;
7712
7713 /* If the jump at the end of the loop doesn't go to the start, we don't
7714 do anything. */
7715 for (insn = PREV_INSN (loop_start);
7716 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
7717 insn = PREV_INSN (insn))
7718 ;
7719
7720 if (insn == 0
7721 || GET_CODE (insn) != NOTE
7722 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
7723 return;
7724
7725 /* If the last insn of the loop (the end test) was an NE comparison,
7726 we will interpret it as an EQ comparison, since we fell through
7727 the loop. Any equivalences resulting from that comparison are
7728 therefore not valid and must be invalidated. */
7729 if (last_jump_equiv_class)
7730 for (p = last_jump_equiv_class->first_same_value; p;
7731 p = p->next_same_value)
7732 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
7733 || (GET_CODE (p->exp) == SUBREG
7734 && GET_CODE (SUBREG_REG (p->exp)) == REG))
7735 invalidate (p->exp, VOIDmode);
7736 else if (GET_CODE (p->exp) == STRICT_LOW_PART
7737 || GET_CODE (p->exp) == ZERO_EXTRACT)
7738 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
7739
7740 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
7741 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
7742
7743 The only thing we do with SET_DEST is invalidate entries, so we
7744 can safely process each SET in order. It is slightly less efficient
7745 to do so, but we only want to handle the most common cases. */
7746
7747 for (insn = NEXT_INSN (loop_start);
7748 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
7749 && ! (GET_CODE (insn) == NOTE
7750 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
7751 insn = NEXT_INSN (insn))
7752 {
7753 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7754 && (GET_CODE (PATTERN (insn)) == SET
7755 || GET_CODE (PATTERN (insn)) == CLOBBER))
7756 cse_set_around_loop (PATTERN (insn), insn, loop_start);
7757 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7758 && GET_CODE (PATTERN (insn)) == PARALLEL)
7759 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7760 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
7761 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
7762 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
7763 loop_start);
7764 }
7765 }
7766 \f
7767 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
7768 since they are done elsewhere. This function is called via note_stores. */
7769
7770 static void
7771 invalidate_skipped_set (dest, set)
7772 rtx set;
7773 rtx dest;
7774 {
7775 enum rtx_code code = GET_CODE (dest);
7776
7777 if (code == MEM
7778 && ! note_mem_written (dest) /* If this is not a stack push ... */
7779 /* There are times when an address can appear varying and be a PLUS
7780 during this scan when it would be a fixed address were we to know
7781 the proper equivalences. So invalidate all memory if there is
7782 a BLKmode or nonscalar memory reference or a reference to a
7783 variable address. */
7784 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
7785 || cse_rtx_varies_p (XEXP (dest, 0))))
7786 {
7787 invalidate_memory ();
7788 return;
7789 }
7790
7791 if (GET_CODE (set) == CLOBBER
7792 #ifdef HAVE_cc0
7793 || dest == cc0_rtx
7794 #endif
7795 || dest == pc_rtx)
7796 return;
7797
7798 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
7799 invalidate (XEXP (dest, 0), GET_MODE (dest));
7800 else if (code == REG || code == SUBREG || code == MEM)
7801 invalidate (dest, VOIDmode);
7802 }
7803
7804 /* Invalidate all insns from START up to the end of the function or the
7805 next label. This called when we wish to CSE around a block that is
7806 conditionally executed. */
7807
7808 static void
7809 invalidate_skipped_block (start)
7810 rtx start;
7811 {
7812 rtx insn;
7813
7814 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
7815 insn = NEXT_INSN (insn))
7816 {
7817 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7818 continue;
7819
7820 if (GET_CODE (insn) == CALL_INSN)
7821 {
7822 if (! CONST_CALL_P (insn))
7823 invalidate_memory ();
7824 invalidate_for_call ();
7825 }
7826
7827 note_stores (PATTERN (insn), invalidate_skipped_set);
7828 }
7829 }
7830 \f
7831 /* Used for communication between the following two routines; contains a
7832 value to be checked for modification. */
7833
7834 static rtx cse_check_loop_start_value;
7835
7836 /* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
7837 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
7838
7839 static void
7840 cse_check_loop_start (x, set)
7841 rtx x;
7842 rtx set;
7843 {
7844 if (cse_check_loop_start_value == 0
7845 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
7846 return;
7847
7848 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
7849 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
7850 cse_check_loop_start_value = 0;
7851 }
7852
7853 /* X is a SET or CLOBBER contained in INSN that was found near the start of
7854 a loop that starts with the label at LOOP_START.
7855
7856 If X is a SET, we see if its SET_SRC is currently in our hash table.
7857 If so, we see if it has a value equal to some register used only in the
7858 loop exit code (as marked by jump.c).
7859
7860 If those two conditions are true, we search backwards from the start of
7861 the loop to see if that same value was loaded into a register that still
7862 retains its value at the start of the loop.
7863
7864 If so, we insert an insn after the load to copy the destination of that
7865 load into the equivalent register and (try to) replace our SET_SRC with that
7866 register.
7867
7868 In any event, we invalidate whatever this SET or CLOBBER modifies. */
7869
7870 static void
7871 cse_set_around_loop (x, insn, loop_start)
7872 rtx x;
7873 rtx insn;
7874 rtx loop_start;
7875 {
7876 struct table_elt *src_elt;
7877
7878 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
7879 are setting PC or CC0 or whose SET_SRC is already a register. */
7880 if (GET_CODE (x) == SET
7881 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
7882 && GET_CODE (SET_SRC (x)) != REG)
7883 {
7884 src_elt = lookup (SET_SRC (x),
7885 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
7886 GET_MODE (SET_DEST (x)));
7887
7888 if (src_elt)
7889 for (src_elt = src_elt->first_same_value; src_elt;
7890 src_elt = src_elt->next_same_value)
7891 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
7892 && COST (src_elt->exp) < COST (SET_SRC (x)))
7893 {
7894 rtx p, set;
7895
7896 /* Look for an insn in front of LOOP_START that sets
7897 something in the desired mode to SET_SRC (x) before we hit
7898 a label or CALL_INSN. */
7899
7900 for (p = prev_nonnote_insn (loop_start);
7901 p && GET_CODE (p) != CALL_INSN
7902 && GET_CODE (p) != CODE_LABEL;
7903 p = prev_nonnote_insn (p))
7904 if ((set = single_set (p)) != 0
7905 && GET_CODE (SET_DEST (set)) == REG
7906 && GET_MODE (SET_DEST (set)) == src_elt->mode
7907 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
7908 {
7909 /* We now have to ensure that nothing between P
7910 and LOOP_START modified anything referenced in
7911 SET_SRC (x). We know that nothing within the loop
7912 can modify it, or we would have invalidated it in
7913 the hash table. */
7914 rtx q;
7915
7916 cse_check_loop_start_value = SET_SRC (x);
7917 for (q = p; q != loop_start; q = NEXT_INSN (q))
7918 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
7919 note_stores (PATTERN (q), cse_check_loop_start);
7920
7921 /* If nothing was changed and we can replace our
7922 SET_SRC, add an insn after P to copy its destination
7923 to what we will be replacing SET_SRC with. */
7924 if (cse_check_loop_start_value
7925 && validate_change (insn, &SET_SRC (x),
7926 src_elt->exp, 0))
7927 emit_insn_after (gen_move_insn (src_elt->exp,
7928 SET_DEST (set)),
7929 p);
7930 break;
7931 }
7932 }
7933 }
7934
7935 /* Now invalidate anything modified by X. */
7936 note_mem_written (SET_DEST (x));
7937
7938 /* See comment on similar code in cse_insn for explanation of these tests. */
7939 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
7940 || GET_CODE (SET_DEST (x)) == MEM)
7941 invalidate (SET_DEST (x), VOIDmode);
7942 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
7943 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
7944 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
7945 }
7946 \f
7947 /* Find the end of INSN's basic block and return its range,
7948 the total number of SETs in all the insns of the block, the last insn of the
7949 block, and the branch path.
7950
7951 The branch path indicates which branches should be followed. If a non-zero
7952 path size is specified, the block should be rescanned and a different set
7953 of branches will be taken. The branch path is only used if
7954 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
7955
7956 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
7957 used to describe the block. It is filled in with the information about
7958 the current block. The incoming structure's branch path, if any, is used
7959 to construct the output branch path. */
7960
7961 void
7962 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
7963 rtx insn;
7964 struct cse_basic_block_data *data;
7965 int follow_jumps;
7966 int after_loop;
7967 int skip_blocks;
7968 {
7969 rtx p = insn, q;
7970 int nsets = 0;
7971 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
7972 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
7973 int path_size = data->path_size;
7974 int path_entry = 0;
7975 int i;
7976
7977 /* Update the previous branch path, if any. If the last branch was
7978 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
7979 shorten the path by one and look at the previous branch. We know that
7980 at least one branch must have been taken if PATH_SIZE is non-zero. */
7981 while (path_size > 0)
7982 {
7983 if (data->path[path_size - 1].status != NOT_TAKEN)
7984 {
7985 data->path[path_size - 1].status = NOT_TAKEN;
7986 break;
7987 }
7988 else
7989 path_size--;
7990 }
7991
7992 /* Scan to end of this basic block. */
7993 while (p && GET_CODE (p) != CODE_LABEL)
7994 {
7995 /* Don't cse out the end of a loop. This makes a difference
7996 only for the unusual loops that always execute at least once;
7997 all other loops have labels there so we will stop in any case.
7998 Cse'ing out the end of the loop is dangerous because it
7999 might cause an invariant expression inside the loop
8000 to be reused after the end of the loop. This would make it
8001 hard to move the expression out of the loop in loop.c,
8002 especially if it is one of several equivalent expressions
8003 and loop.c would like to eliminate it.
8004
8005 If we are running after loop.c has finished, we can ignore
8006 the NOTE_INSN_LOOP_END. */
8007
8008 if (! after_loop && GET_CODE (p) == NOTE
8009 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
8010 break;
8011
8012 /* Don't cse over a call to setjmp; on some machines (eg vax)
8013 the regs restored by the longjmp come from
8014 a later time than the setjmp. */
8015 if (GET_CODE (p) == NOTE
8016 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
8017 break;
8018
8019 /* A PARALLEL can have lots of SETs in it,
8020 especially if it is really an ASM_OPERANDS. */
8021 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
8022 && GET_CODE (PATTERN (p)) == PARALLEL)
8023 nsets += XVECLEN (PATTERN (p), 0);
8024 else if (GET_CODE (p) != NOTE)
8025 nsets += 1;
8026
8027 /* Ignore insns made by CSE; they cannot affect the boundaries of
8028 the basic block. */
8029
8030 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8031 high_cuid = INSN_CUID (p);
8032 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
8033 low_cuid = INSN_CUID (p);
8034
8035 /* See if this insn is in our branch path. If it is and we are to
8036 take it, do so. */
8037 if (path_entry < path_size && data->path[path_entry].branch == p)
8038 {
8039 if (data->path[path_entry].status != NOT_TAKEN)
8040 p = JUMP_LABEL (p);
8041
8042 /* Point to next entry in path, if any. */
8043 path_entry++;
8044 }
8045
8046 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
8047 was specified, we haven't reached our maximum path length, there are
8048 insns following the target of the jump, this is the only use of the
8049 jump label, and the target label is preceded by a BARRIER.
8050
8051 Alternatively, we can follow the jump if it branches around a
8052 block of code and there are no other branches into the block.
8053 In this case invalidate_skipped_block will be called to invalidate any
8054 registers set in the block when following the jump. */
8055
8056 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
8057 && GET_CODE (p) == JUMP_INSN
8058 && GET_CODE (PATTERN (p)) == SET
8059 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
8060 && LABEL_NUSES (JUMP_LABEL (p)) == 1
8061 && NEXT_INSN (JUMP_LABEL (p)) != 0)
8062 {
8063 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
8064 if ((GET_CODE (q) != NOTE
8065 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
8066 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
8067 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
8068 break;
8069
8070 /* If we ran into a BARRIER, this code is an extension of the
8071 basic block when the branch is taken. */
8072 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
8073 {
8074 /* Don't allow ourself to keep walking around an
8075 always-executed loop. */
8076 if (next_real_insn (q) == next)
8077 {
8078 p = NEXT_INSN (p);
8079 continue;
8080 }
8081
8082 /* Similarly, don't put a branch in our path more than once. */
8083 for (i = 0; i < path_entry; i++)
8084 if (data->path[i].branch == p)
8085 break;
8086
8087 if (i != path_entry)
8088 break;
8089
8090 data->path[path_entry].branch = p;
8091 data->path[path_entry++].status = TAKEN;
8092
8093 /* This branch now ends our path. It was possible that we
8094 didn't see this branch the last time around (when the
8095 insn in front of the target was a JUMP_INSN that was
8096 turned into a no-op). */
8097 path_size = path_entry;
8098
8099 p = JUMP_LABEL (p);
8100 /* Mark block so we won't scan it again later. */
8101 PUT_MODE (NEXT_INSN (p), QImode);
8102 }
8103 /* Detect a branch around a block of code. */
8104 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
8105 {
8106 register rtx tmp;
8107
8108 if (next_real_insn (q) == next)
8109 {
8110 p = NEXT_INSN (p);
8111 continue;
8112 }
8113
8114 for (i = 0; i < path_entry; i++)
8115 if (data->path[i].branch == p)
8116 break;
8117
8118 if (i != path_entry)
8119 break;
8120
8121 /* This is no_labels_between_p (p, q) with an added check for
8122 reaching the end of a function (in case Q precedes P). */
8123 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
8124 if (GET_CODE (tmp) == CODE_LABEL)
8125 break;
8126
8127 if (tmp == q)
8128 {
8129 data->path[path_entry].branch = p;
8130 data->path[path_entry++].status = AROUND;
8131
8132 path_size = path_entry;
8133
8134 p = JUMP_LABEL (p);
8135 /* Mark block so we won't scan it again later. */
8136 PUT_MODE (NEXT_INSN (p), QImode);
8137 }
8138 }
8139 }
8140 p = NEXT_INSN (p);
8141 }
8142
8143 data->low_cuid = low_cuid;
8144 data->high_cuid = high_cuid;
8145 data->nsets = nsets;
8146 data->last = p;
8147
8148 /* If all jumps in the path are not taken, set our path length to zero
8149 so a rescan won't be done. */
8150 for (i = path_size - 1; i >= 0; i--)
8151 if (data->path[i].status != NOT_TAKEN)
8152 break;
8153
8154 if (i == -1)
8155 data->path_size = 0;
8156 else
8157 data->path_size = path_size;
8158
8159 /* End the current branch path. */
8160 data->path[path_size].branch = 0;
8161 }
8162 \f
8163 /* Perform cse on the instructions of a function.
8164 F is the first instruction.
8165 NREGS is one plus the highest pseudo-reg number used in the instruction.
8166
8167 AFTER_LOOP is 1 if this is the cse call done after loop optimization
8168 (only if -frerun-cse-after-loop).
8169
8170 Returns 1 if jump_optimize should be redone due to simplifications
8171 in conditional jump instructions. */
8172
8173 int
8174 cse_main (f, nregs, after_loop, file)
8175 rtx f;
8176 int nregs;
8177 int after_loop;
8178 FILE *file;
8179 {
8180 struct cse_basic_block_data val;
8181 register rtx insn = f;
8182 register int i;
8183
8184 cse_jumps_altered = 0;
8185 recorded_label_ref = 0;
8186 constant_pool_entries_cost = 0;
8187 val.path_size = 0;
8188
8189 init_recog ();
8190 init_alias_analysis ();
8191
8192 max_reg = nregs;
8193
8194 all_minus_one = (int *) alloca (nregs * sizeof (int));
8195 consec_ints = (int *) alloca (nregs * sizeof (int));
8196
8197 for (i = 0; i < nregs; i++)
8198 {
8199 all_minus_one[i] = -1;
8200 consec_ints[i] = i;
8201 }
8202
8203 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
8204 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
8205 reg_qty = (int *) alloca (nregs * sizeof (int));
8206 reg_in_table = (int *) alloca (nregs * sizeof (int));
8207 reg_tick = (int *) alloca (nregs * sizeof (int));
8208
8209 #ifdef LOAD_EXTEND_OP
8210
8211 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
8212 and change the code and mode as appropriate. */
8213 memory_extend_rtx = gen_rtx (ZERO_EXTEND, VOIDmode, NULL_RTX);
8214 #endif
8215
8216 /* Discard all the free elements of the previous function
8217 since they are allocated in the temporarily obstack. */
8218 bzero ((char *) table, sizeof table);
8219 free_element_chain = 0;
8220 n_elements_made = 0;
8221
8222 /* Find the largest uid. */
8223
8224 max_uid = get_max_uid ();
8225 uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
8226 bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
8227
8228 /* Compute the mapping from uids to cuids.
8229 CUIDs are numbers assigned to insns, like uids,
8230 except that cuids increase monotonically through the code.
8231 Don't assign cuids to line-number NOTEs, so that the distance in cuids
8232 between two insns is not affected by -g. */
8233
8234 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8235 {
8236 if (GET_CODE (insn) != NOTE
8237 || NOTE_LINE_NUMBER (insn) < 0)
8238 INSN_CUID (insn) = ++i;
8239 else
8240 /* Give a line number note the same cuid as preceding insn. */
8241 INSN_CUID (insn) = i;
8242 }
8243
8244 /* Initialize which registers are clobbered by calls. */
8245
8246 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8247
8248 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8249 if ((call_used_regs[i]
8250 /* Used to check !fixed_regs[i] here, but that isn't safe;
8251 fixed regs are still call-clobbered, and sched can get
8252 confused if they can "live across calls".
8253
8254 The frame pointer is always preserved across calls. The arg
8255 pointer is if it is fixed. The stack pointer usually is, unless
8256 RETURN_POPS_ARGS, in which case an explicit CLOBBER
8257 will be present. If we are generating PIC code, the PIC offset
8258 table register is preserved across calls. */
8259
8260 && i != STACK_POINTER_REGNUM
8261 && i != FRAME_POINTER_REGNUM
8262 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8263 && i != HARD_FRAME_POINTER_REGNUM
8264 #endif
8265 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8266 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8267 #endif
8268 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
8269 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8270 #endif
8271 )
8272 || global_regs[i])
8273 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8274
8275 /* Loop over basic blocks.
8276 Compute the maximum number of qty's needed for each basic block
8277 (which is 2 for each SET). */
8278 insn = f;
8279 while (insn)
8280 {
8281 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8282 flag_cse_skip_blocks);
8283
8284 /* If this basic block was already processed or has no sets, skip it. */
8285 if (val.nsets == 0 || GET_MODE (insn) == QImode)
8286 {
8287 PUT_MODE (insn, VOIDmode);
8288 insn = (val.last ? NEXT_INSN (val.last) : 0);
8289 val.path_size = 0;
8290 continue;
8291 }
8292
8293 cse_basic_block_start = val.low_cuid;
8294 cse_basic_block_end = val.high_cuid;
8295 max_qty = val.nsets * 2;
8296
8297 if (file)
8298 fprintf (file, ";; Processing block from %d to %d, %d sets.\n",
8299 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8300 val.nsets);
8301
8302 /* Make MAX_QTY bigger to give us room to optimize
8303 past the end of this basic block, if that should prove useful. */
8304 if (max_qty < 500)
8305 max_qty = 500;
8306
8307 max_qty += max_reg;
8308
8309 /* If this basic block is being extended by following certain jumps,
8310 (see `cse_end_of_basic_block'), we reprocess the code from the start.
8311 Otherwise, we start after this basic block. */
8312 if (val.path_size > 0)
8313 cse_basic_block (insn, val.last, val.path, 0);
8314 else
8315 {
8316 int old_cse_jumps_altered = cse_jumps_altered;
8317 rtx temp;
8318
8319 /* When cse changes a conditional jump to an unconditional
8320 jump, we want to reprocess the block, since it will give
8321 us a new branch path to investigate. */
8322 cse_jumps_altered = 0;
8323 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8324 if (cse_jumps_altered == 0
8325 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8326 insn = temp;
8327
8328 cse_jumps_altered |= old_cse_jumps_altered;
8329 }
8330
8331 #ifdef USE_C_ALLOCA
8332 alloca (0);
8333 #endif
8334 }
8335
8336 /* Tell refers_to_mem_p that qty_const info is not available. */
8337 qty_const = 0;
8338
8339 if (max_elements_made < n_elements_made)
8340 max_elements_made = n_elements_made;
8341
8342 return cse_jumps_altered || recorded_label_ref;
8343 }
8344
8345 /* Process a single basic block. FROM and TO and the limits of the basic
8346 block. NEXT_BRANCH points to the branch path when following jumps or
8347 a null path when not following jumps.
8348
8349 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8350 loop. This is true when we are being called for the last time on a
8351 block and this CSE pass is before loop.c. */
8352
8353 static rtx
8354 cse_basic_block (from, to, next_branch, around_loop)
8355 register rtx from, to;
8356 struct branch_path *next_branch;
8357 int around_loop;
8358 {
8359 register rtx insn;
8360 int to_usage = 0;
8361 int in_libcall_block = 0;
8362
8363 /* Each of these arrays is undefined before max_reg, so only allocate
8364 the space actually needed and adjust the start below. */
8365
8366 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8367 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8368 qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
8369 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8370 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8371 qty_comparison_code
8372 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8373 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8374 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8375
8376 qty_first_reg -= max_reg;
8377 qty_last_reg -= max_reg;
8378 qty_mode -= max_reg;
8379 qty_const -= max_reg;
8380 qty_const_insn -= max_reg;
8381 qty_comparison_code -= max_reg;
8382 qty_comparison_qty -= max_reg;
8383 qty_comparison_const -= max_reg;
8384
8385 new_basic_block ();
8386
8387 /* TO might be a label. If so, protect it from being deleted. */
8388 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8389 ++LABEL_NUSES (to);
8390
8391 for (insn = from; insn != to; insn = NEXT_INSN (insn))
8392 {
8393 register enum rtx_code code;
8394
8395 /* See if this is a branch that is part of the path. If so, and it is
8396 to be taken, do so. */
8397 if (next_branch->branch == insn)
8398 {
8399 enum taken status = next_branch++->status;
8400 if (status != NOT_TAKEN)
8401 {
8402 if (status == TAKEN)
8403 record_jump_equiv (insn, 1);
8404 else
8405 invalidate_skipped_block (NEXT_INSN (insn));
8406
8407 /* Set the last insn as the jump insn; it doesn't affect cc0.
8408 Then follow this branch. */
8409 #ifdef HAVE_cc0
8410 prev_insn_cc0 = 0;
8411 #endif
8412 prev_insn = insn;
8413 insn = JUMP_LABEL (insn);
8414 continue;
8415 }
8416 }
8417
8418 code = GET_CODE (insn);
8419 if (GET_MODE (insn) == QImode)
8420 PUT_MODE (insn, VOIDmode);
8421
8422 if (GET_RTX_CLASS (code) == 'i')
8423 {
8424 /* Process notes first so we have all notes in canonical forms when
8425 looking for duplicate operations. */
8426
8427 if (REG_NOTES (insn))
8428 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
8429
8430 /* Track when we are inside in LIBCALL block. Inside such a block,
8431 we do not want to record destinations. The last insn of a
8432 LIBCALL block is not considered to be part of the block, since
8433 its destination is the result of the block and hence should be
8434 recorded. */
8435
8436 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8437 in_libcall_block = 1;
8438 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8439 in_libcall_block = 0;
8440
8441 cse_insn (insn, in_libcall_block);
8442 }
8443
8444 /* If INSN is now an unconditional jump, skip to the end of our
8445 basic block by pretending that we just did the last insn in the
8446 basic block. If we are jumping to the end of our block, show
8447 that we can have one usage of TO. */
8448
8449 if (simplejump_p (insn))
8450 {
8451 if (to == 0)
8452 return 0;
8453
8454 if (JUMP_LABEL (insn) == to)
8455 to_usage = 1;
8456
8457 /* Maybe TO was deleted because the jump is unconditional.
8458 If so, there is nothing left in this basic block. */
8459 /* ??? Perhaps it would be smarter to set TO
8460 to whatever follows this insn,
8461 and pretend the basic block had always ended here. */
8462 if (INSN_DELETED_P (to))
8463 break;
8464
8465 insn = PREV_INSN (to);
8466 }
8467
8468 /* See if it is ok to keep on going past the label
8469 which used to end our basic block. Remember that we incremented
8470 the count of that label, so we decrement it here. If we made
8471 a jump unconditional, TO_USAGE will be one; in that case, we don't
8472 want to count the use in that jump. */
8473
8474 if (to != 0 && NEXT_INSN (insn) == to
8475 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8476 {
8477 struct cse_basic_block_data val;
8478 rtx prev;
8479
8480 insn = NEXT_INSN (to);
8481
8482 if (LABEL_NUSES (to) == 0)
8483 insn = delete_insn (to);
8484
8485 /* If TO was the last insn in the function, we are done. */
8486 if (insn == 0)
8487 return 0;
8488
8489 /* If TO was preceded by a BARRIER we are done with this block
8490 because it has no continuation. */
8491 prev = prev_nonnote_insn (to);
8492 if (prev && GET_CODE (prev) == BARRIER)
8493 return insn;
8494
8495 /* Find the end of the following block. Note that we won't be
8496 following branches in this case. */
8497 to_usage = 0;
8498 val.path_size = 0;
8499 cse_end_of_basic_block (insn, &val, 0, 0, 0);
8500
8501 /* If the tables we allocated have enough space left
8502 to handle all the SETs in the next basic block,
8503 continue through it. Otherwise, return,
8504 and that block will be scanned individually. */
8505 if (val.nsets * 2 + next_qty > max_qty)
8506 break;
8507
8508 cse_basic_block_start = val.low_cuid;
8509 cse_basic_block_end = val.high_cuid;
8510 to = val.last;
8511
8512 /* Prevent TO from being deleted if it is a label. */
8513 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8514 ++LABEL_NUSES (to);
8515
8516 /* Back up so we process the first insn in the extension. */
8517 insn = PREV_INSN (insn);
8518 }
8519 }
8520
8521 if (next_qty > max_qty)
8522 abort ();
8523
8524 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
8525 the previous insn is the only insn that branches to the head of a loop,
8526 we can cse into the loop. Don't do this if we changed the jump
8527 structure of a loop unless we aren't going to be following jumps. */
8528
8529 if ((cse_jumps_altered == 0
8530 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8531 && around_loop && to != 0
8532 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
8533 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
8534 && JUMP_LABEL (PREV_INSN (to)) != 0
8535 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
8536 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
8537
8538 return to ? NEXT_INSN (to) : 0;
8539 }
8540 \f
8541 /* Count the number of times registers are used (not set) in X.
8542 COUNTS is an array in which we accumulate the count, INCR is how much
8543 we count each register usage.
8544
8545 Don't count a usage of DEST, which is the SET_DEST of a SET which
8546 contains X in its SET_SRC. This is because such a SET does not
8547 modify the liveness of DEST. */
8548
8549 static void
8550 count_reg_usage (x, counts, dest, incr)
8551 rtx x;
8552 int *counts;
8553 rtx dest;
8554 int incr;
8555 {
8556 enum rtx_code code;
8557 char *fmt;
8558 int i, j;
8559
8560 if (x == 0)
8561 return;
8562
8563 switch (code = GET_CODE (x))
8564 {
8565 case REG:
8566 if (x != dest)
8567 counts[REGNO (x)] += incr;
8568 return;
8569
8570 case PC:
8571 case CC0:
8572 case CONST:
8573 case CONST_INT:
8574 case CONST_DOUBLE:
8575 case SYMBOL_REF:
8576 case LABEL_REF:
8577 case CLOBBER:
8578 return;
8579
8580 case SET:
8581 /* Unless we are setting a REG, count everything in SET_DEST. */
8582 if (GET_CODE (SET_DEST (x)) != REG)
8583 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
8584
8585 /* If SRC has side-effects, then we can't delete this insn, so the
8586 usage of SET_DEST inside SRC counts.
8587
8588 ??? Strictly-speaking, we might be preserving this insn
8589 because some other SET has side-effects, but that's hard
8590 to do and can't happen now. */
8591 count_reg_usage (SET_SRC (x), counts,
8592 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
8593 incr);
8594 return;
8595
8596 case CALL_INSN:
8597 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
8598
8599 /* ... falls through ... */
8600 case INSN:
8601 case JUMP_INSN:
8602 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
8603
8604 /* Things used in a REG_EQUAL note aren't dead since loop may try to
8605 use them. */
8606
8607 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
8608 return;
8609
8610 case EXPR_LIST:
8611 case INSN_LIST:
8612 if (REG_NOTE_KIND (x) == REG_EQUAL
8613 || GET_CODE (XEXP (x,0)) == USE)
8614 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
8615 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
8616 return;
8617 }
8618
8619 fmt = GET_RTX_FORMAT (code);
8620 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8621 {
8622 if (fmt[i] == 'e')
8623 count_reg_usage (XEXP (x, i), counts, dest, incr);
8624 else if (fmt[i] == 'E')
8625 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8626 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
8627 }
8628 }
8629 \f
8630 /* Scan all the insns and delete any that are dead; i.e., they store a register
8631 that is never used or they copy a register to itself.
8632
8633 This is used to remove insns made obviously dead by cse. It improves the
8634 heuristics in loop since it won't try to move dead invariants out of loops
8635 or make givs for dead quantities. The remaining passes of the compilation
8636 are also sped up. */
8637
8638 void
8639 delete_dead_from_cse (insns, nreg)
8640 rtx insns;
8641 int nreg;
8642 {
8643 int *counts = (int *) alloca (nreg * sizeof (int));
8644 rtx insn, prev;
8645 rtx tem;
8646 int i;
8647 int in_libcall = 0;
8648
8649 /* First count the number of times each register is used. */
8650 bzero ((char *) counts, sizeof (int) * nreg);
8651 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
8652 count_reg_usage (insn, counts, NULL_RTX, 1);
8653
8654 /* Go from the last insn to the first and delete insns that only set unused
8655 registers or copy a register to itself. As we delete an insn, remove
8656 usage counts for registers it uses. */
8657 for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
8658 {
8659 int live_insn = 0;
8660
8661 prev = prev_real_insn (insn);
8662
8663 /* Don't delete any insns that are part of a libcall block.
8664 Flow or loop might get confused if we did that. Remember
8665 that we are scanning backwards. */
8666 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8667 in_libcall = 1;
8668
8669 if (in_libcall)
8670 live_insn = 1;
8671 else if (GET_CODE (PATTERN (insn)) == SET)
8672 {
8673 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
8674 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
8675 ;
8676
8677 #ifdef HAVE_cc0
8678 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
8679 && ! side_effects_p (SET_SRC (PATTERN (insn)))
8680 && ((tem = next_nonnote_insn (insn)) == 0
8681 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8682 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8683 ;
8684 #endif
8685 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
8686 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
8687 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
8688 || side_effects_p (SET_SRC (PATTERN (insn))))
8689 live_insn = 1;
8690 }
8691 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
8692 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8693 {
8694 rtx elt = XVECEXP (PATTERN (insn), 0, i);
8695
8696 if (GET_CODE (elt) == SET)
8697 {
8698 if (GET_CODE (SET_DEST (elt)) == REG
8699 && SET_DEST (elt) == SET_SRC (elt))
8700 ;
8701
8702 #ifdef HAVE_cc0
8703 else if (GET_CODE (SET_DEST (elt)) == CC0
8704 && ! side_effects_p (SET_SRC (elt))
8705 && ((tem = next_nonnote_insn (insn)) == 0
8706 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8707 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8708 ;
8709 #endif
8710 else if (GET_CODE (SET_DEST (elt)) != REG
8711 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
8712 || counts[REGNO (SET_DEST (elt))] != 0
8713 || side_effects_p (SET_SRC (elt)))
8714 live_insn = 1;
8715 }
8716 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
8717 live_insn = 1;
8718 }
8719 else
8720 live_insn = 1;
8721
8722 /* If this is a dead insn, delete it and show registers in it aren't
8723 being used. */
8724
8725 if (! live_insn)
8726 {
8727 count_reg_usage (insn, counts, NULL_RTX, -1);
8728 delete_insn (insn);
8729 }
8730
8731 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8732 in_libcall = 0;
8733 }
8734 }