regmove.c (optimize_reg_copy_1): Undo Aug 18 change.
[gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 88, 89, 92-7, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
25 #include <setjmp.h>
26
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "flags.h"
31 #include "real.h"
32 #include "insn-config.h"
33 #include "recog.h"
34 #include "expr.h"
35 #include "toplev.h"
36 #include "output.h"
37
38 /* The basic idea of common subexpression elimination is to go
39 through the code, keeping a record of expressions that would
40 have the same value at the current scan point, and replacing
41 expressions encountered with the cheapest equivalent expression.
42
43 It is too complicated to keep track of the different possibilities
44 when control paths merge; so, at each label, we forget all that is
45 known and start fresh. This can be described as processing each
46 basic block separately. Note, however, that these are not quite
47 the same as the basic blocks found by a later pass and used for
48 data flow analysis and register packing. We do not need to start fresh
49 after a conditional jump instruction if there is no label there.
50
51 We use two data structures to record the equivalent expressions:
52 a hash table for most expressions, and several vectors together
53 with "quantity numbers" to record equivalent (pseudo) registers.
54
55 The use of the special data structure for registers is desirable
56 because it is faster. It is possible because registers references
57 contain a fairly small number, the register number, taken from
58 a contiguously allocated series, and two register references are
59 identical if they have the same number. General expressions
60 do not have any such thing, so the only way to retrieve the
61 information recorded on an expression other than a register
62 is to keep it in a hash table.
63
64 Registers and "quantity numbers":
65
66 At the start of each basic block, all of the (hardware and pseudo)
67 registers used in the function are given distinct quantity
68 numbers to indicate their contents. During scan, when the code
69 copies one register into another, we copy the quantity number.
70 When a register is loaded in any other way, we allocate a new
71 quantity number to describe the value generated by this operation.
72 `reg_qty' records what quantity a register is currently thought
73 of as containing.
74
75 All real quantity numbers are greater than or equal to `max_reg'.
76 If register N has not been assigned a quantity, reg_qty[N] will equal N.
77
78 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
79 variables should be referenced with an index below `max_reg'.
80
81 We also maintain a bidirectional chain of registers for each
82 quantity number. `qty_first_reg', `qty_last_reg',
83 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
84
85 The first register in a chain is the one whose lifespan is least local.
86 Among equals, it is the one that was seen first.
87 We replace any equivalent register with that one.
88
89 If two registers have the same quantity number, it must be true that
90 REG expressions with `qty_mode' must be in the hash table for both
91 registers and must be in the same class.
92
93 The converse is not true. Since hard registers may be referenced in
94 any mode, two REG expressions might be equivalent in the hash table
95 but not have the same quantity number if the quantity number of one
96 of the registers is not the same mode as those expressions.
97
98 Constants and quantity numbers
99
100 When a quantity has a known constant value, that value is stored
101 in the appropriate element of qty_const. This is in addition to
102 putting the constant in the hash table as is usual for non-regs.
103
104 Whether a reg or a constant is preferred is determined by the configuration
105 macro CONST_COSTS and will often depend on the constant value. In any
106 event, expressions containing constants can be simplified, by fold_rtx.
107
108 When a quantity has a known nearly constant value (such as an address
109 of a stack slot), that value is stored in the appropriate element
110 of qty_const.
111
112 Integer constants don't have a machine mode. However, cse
113 determines the intended machine mode from the destination
114 of the instruction that moves the constant. The machine mode
115 is recorded in the hash table along with the actual RTL
116 constant expression so that different modes are kept separate.
117
118 Other expressions:
119
120 To record known equivalences among expressions in general
121 we use a hash table called `table'. It has a fixed number of buckets
122 that contain chains of `struct table_elt' elements for expressions.
123 These chains connect the elements whose expressions have the same
124 hash codes.
125
126 Other chains through the same elements connect the elements which
127 currently have equivalent values.
128
129 Register references in an expression are canonicalized before hashing
130 the expression. This is done using `reg_qty' and `qty_first_reg'.
131 The hash code of a register reference is computed using the quantity
132 number, not the register number.
133
134 When the value of an expression changes, it is necessary to remove from the
135 hash table not just that expression but all expressions whose values
136 could be different as a result.
137
138 1. If the value changing is in memory, except in special cases
139 ANYTHING referring to memory could be changed. That is because
140 nobody knows where a pointer does not point.
141 The function `invalidate_memory' removes what is necessary.
142
143 The special cases are when the address is constant or is
144 a constant plus a fixed register such as the frame pointer
145 or a static chain pointer. When such addresses are stored in,
146 we can tell exactly which other such addresses must be invalidated
147 due to overlap. `invalidate' does this.
148 All expressions that refer to non-constant
149 memory addresses are also invalidated. `invalidate_memory' does this.
150
151 2. If the value changing is a register, all expressions
152 containing references to that register, and only those,
153 must be removed.
154
155 Because searching the entire hash table for expressions that contain
156 a register is very slow, we try to figure out when it isn't necessary.
157 Precisely, this is necessary only when expressions have been
158 entered in the hash table using this register, and then the value has
159 changed, and then another expression wants to be added to refer to
160 the register's new value. This sequence of circumstances is rare
161 within any one basic block.
162
163 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
164 reg_tick[i] is incremented whenever a value is stored in register i.
165 reg_in_table[i] holds -1 if no references to register i have been
166 entered in the table; otherwise, it contains the value reg_tick[i] had
167 when the references were entered. If we want to enter a reference
168 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
169 Until we want to enter a new entry, the mere fact that the two vectors
170 don't match makes the entries be ignored if anyone tries to match them.
171
172 Registers themselves are entered in the hash table as well as in
173 the equivalent-register chains. However, the vectors `reg_tick'
174 and `reg_in_table' do not apply to expressions which are simple
175 register references. These expressions are removed from the table
176 immediately when they become invalid, and this can be done even if
177 we do not immediately search for all the expressions that refer to
178 the register.
179
180 A CLOBBER rtx in an instruction invalidates its operand for further
181 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
182 invalidates everything that resides in memory.
183
184 Related expressions:
185
186 Constant expressions that differ only by an additive integer
187 are called related. When a constant expression is put in
188 the table, the related expression with no constant term
189 is also entered. These are made to point at each other
190 so that it is possible to find out if there exists any
191 register equivalent to an expression related to a given expression. */
192
193 /* One plus largest register number used in this function. */
194
195 static int max_reg;
196
197 /* One plus largest instruction UID used in this function at time of
198 cse_main call. */
199
200 static int max_insn_uid;
201
202 /* Length of vectors indexed by quantity number.
203 We know in advance we will not need a quantity number this big. */
204
205 static int max_qty;
206
207 /* Next quantity number to be allocated.
208 This is 1 + the largest number needed so far. */
209
210 static int next_qty;
211
212 /* Indexed by quantity number, gives the first (or last) register
213 in the chain of registers that currently contain this quantity. */
214
215 static int *qty_first_reg;
216 static int *qty_last_reg;
217
218 /* Index by quantity number, gives the mode of the quantity. */
219
220 static enum machine_mode *qty_mode;
221
222 /* Indexed by quantity number, gives the rtx of the constant value of the
223 quantity, or zero if it does not have a known value.
224 A sum of the frame pointer (or arg pointer) plus a constant
225 can also be entered here. */
226
227 static rtx *qty_const;
228
229 /* Indexed by qty number, gives the insn that stored the constant value
230 recorded in `qty_const'. */
231
232 static rtx *qty_const_insn;
233
234 /* The next three variables are used to track when a comparison between a
235 quantity and some constant or register has been passed. In that case, we
236 know the results of the comparison in case we see it again. These variables
237 record a comparison that is known to be true. */
238
239 /* Indexed by qty number, gives the rtx code of a comparison with a known
240 result involving this quantity. If none, it is UNKNOWN. */
241 static enum rtx_code *qty_comparison_code;
242
243 /* Indexed by qty number, gives the constant being compared against in a
244 comparison of known result. If no such comparison, it is undefined.
245 If the comparison is not with a constant, it is zero. */
246
247 static rtx *qty_comparison_const;
248
249 /* Indexed by qty number, gives the quantity being compared against in a
250 comparison of known result. If no such comparison, if it undefined.
251 If the comparison is not with a register, it is -1. */
252
253 static int *qty_comparison_qty;
254
255 #ifdef HAVE_cc0
256 /* For machines that have a CC0, we do not record its value in the hash
257 table since its use is guaranteed to be the insn immediately following
258 its definition and any other insn is presumed to invalidate it.
259
260 Instead, we store below the value last assigned to CC0. If it should
261 happen to be a constant, it is stored in preference to the actual
262 assigned value. In case it is a constant, we store the mode in which
263 the constant should be interpreted. */
264
265 static rtx prev_insn_cc0;
266 static enum machine_mode prev_insn_cc0_mode;
267 #endif
268
269 /* Previous actual insn. 0 if at first insn of basic block. */
270
271 static rtx prev_insn;
272
273 /* Insn being scanned. */
274
275 static rtx this_insn;
276
277 /* Index by register number, gives the quantity number
278 of the register's current contents. */
279
280 static int *reg_qty;
281
282 /* Index by register number, gives the number of the next (or
283 previous) register in the chain of registers sharing the same
284 value.
285
286 Or -1 if this register is at the end of the chain.
287
288 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
289
290 static int *reg_next_eqv;
291 static int *reg_prev_eqv;
292
293 /* Index by register number, gives the number of times
294 that register has been altered in the current basic block. */
295
296 static int *reg_tick;
297
298 /* Index by register number, gives the reg_tick value at which
299 rtx's containing this register are valid in the hash table.
300 If this does not equal the current reg_tick value, such expressions
301 existing in the hash table are invalid.
302 If this is -1, no expressions containing this register have been
303 entered in the table. */
304
305 static int *reg_in_table;
306
307 /* A HARD_REG_SET containing all the hard registers for which there is
308 currently a REG expression in the hash table. Note the difference
309 from the above variables, which indicate if the REG is mentioned in some
310 expression in the table. */
311
312 static HARD_REG_SET hard_regs_in_table;
313
314 /* A HARD_REG_SET containing all the hard registers that are invalidated
315 by a CALL_INSN. */
316
317 static HARD_REG_SET regs_invalidated_by_call;
318
319 /* Two vectors of ints:
320 one containing max_reg -1's; the other max_reg + 500 (an approximation
321 for max_qty) elements where element i contains i.
322 These are used to initialize various other vectors fast. */
323
324 static int *all_minus_one;
325 static int *consec_ints;
326
327 /* CUID of insn that starts the basic block currently being cse-processed. */
328
329 static int cse_basic_block_start;
330
331 /* CUID of insn that ends the basic block currently being cse-processed. */
332
333 static int cse_basic_block_end;
334
335 /* Vector mapping INSN_UIDs to cuids.
336 The cuids are like uids but increase monotonically always.
337 We use them to see whether a reg is used outside a given basic block. */
338
339 static int *uid_cuid;
340
341 /* Highest UID in UID_CUID. */
342 static int max_uid;
343
344 /* Get the cuid of an insn. */
345
346 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
347
348 /* Nonzero if cse has altered conditional jump insns
349 in such a way that jump optimization should be redone. */
350
351 static int cse_jumps_altered;
352
353 /* Nonzero if we put a LABEL_REF into the hash table. Since we may have put
354 it into an INSN without a REG_LABEL, we have to rerun jump after CSE
355 to put in the note. */
356 static int recorded_label_ref;
357
358 /* canon_hash stores 1 in do_not_record
359 if it notices a reference to CC0, PC, or some other volatile
360 subexpression. */
361
362 static int do_not_record;
363
364 #ifdef LOAD_EXTEND_OP
365
366 /* Scratch rtl used when looking for load-extended copy of a MEM. */
367 static rtx memory_extend_rtx;
368 #endif
369
370 /* canon_hash stores 1 in hash_arg_in_memory
371 if it notices a reference to memory within the expression being hashed. */
372
373 static int hash_arg_in_memory;
374
375 /* canon_hash stores 1 in hash_arg_in_struct
376 if it notices a reference to memory that's part of a structure. */
377
378 static int hash_arg_in_struct;
379
380 /* The hash table contains buckets which are chains of `struct table_elt's,
381 each recording one expression's information.
382 That expression is in the `exp' field.
383
384 Those elements with the same hash code are chained in both directions
385 through the `next_same_hash' and `prev_same_hash' fields.
386
387 Each set of expressions with equivalent values
388 are on a two-way chain through the `next_same_value'
389 and `prev_same_value' fields, and all point with
390 the `first_same_value' field at the first element in
391 that chain. The chain is in order of increasing cost.
392 Each element's cost value is in its `cost' field.
393
394 The `in_memory' field is nonzero for elements that
395 involve any reference to memory. These elements are removed
396 whenever a write is done to an unidentified location in memory.
397 To be safe, we assume that a memory address is unidentified unless
398 the address is either a symbol constant or a constant plus
399 the frame pointer or argument pointer.
400
401 The `in_struct' field is nonzero for elements that
402 involve any reference to memory inside a structure or array.
403
404 The `related_value' field is used to connect related expressions
405 (that differ by adding an integer).
406 The related expressions are chained in a circular fashion.
407 `related_value' is zero for expressions for which this
408 chain is not useful.
409
410 The `cost' field stores the cost of this element's expression.
411
412 The `is_const' flag is set if the element is a constant (including
413 a fixed address).
414
415 The `flag' field is used as a temporary during some search routines.
416
417 The `mode' field is usually the same as GET_MODE (`exp'), but
418 if `exp' is a CONST_INT and has no machine mode then the `mode'
419 field is the mode it was being used as. Each constant is
420 recorded separately for each mode it is used with. */
421
422
423 struct table_elt
424 {
425 rtx exp;
426 struct table_elt *next_same_hash;
427 struct table_elt *prev_same_hash;
428 struct table_elt *next_same_value;
429 struct table_elt *prev_same_value;
430 struct table_elt *first_same_value;
431 struct table_elt *related_value;
432 int cost;
433 enum machine_mode mode;
434 char in_memory;
435 char in_struct;
436 char is_const;
437 char flag;
438 };
439
440 /* We don't want a lot of buckets, because we rarely have very many
441 things stored in the hash table, and a lot of buckets slows
442 down a lot of loops that happen frequently. */
443 #define NBUCKETS 31
444
445 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
446 register (hard registers may require `do_not_record' to be set). */
447
448 #define HASH(X, M) \
449 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
450 ? (((unsigned) REG << 7) + (unsigned) reg_qty[REGNO (X)]) % NBUCKETS \
451 : canon_hash (X, M) % NBUCKETS)
452
453 /* Determine whether register number N is considered a fixed register for CSE.
454 It is desirable to replace other regs with fixed regs, to reduce need for
455 non-fixed hard regs.
456 A reg wins if it is either the frame pointer or designated as fixed,
457 but not if it is an overlapping register. */
458 #ifdef OVERLAPPING_REGNO_P
459 #define FIXED_REGNO_P(N) \
460 (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
461 || fixed_regs[N] || global_regs[N]) \
462 && ! OVERLAPPING_REGNO_P ((N)))
463 #else
464 #define FIXED_REGNO_P(N) \
465 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
466 || fixed_regs[N] || global_regs[N])
467 #endif
468
469 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
470 hard registers and pointers into the frame are the cheapest with a cost
471 of 0. Next come pseudos with a cost of one and other hard registers with
472 a cost of 2. Aside from these special cases, call `rtx_cost'. */
473
474 #define CHEAP_REGNO(N) \
475 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
476 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
477 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
478 || ((N) < FIRST_PSEUDO_REGISTER \
479 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
480
481 /* A register is cheap if it is a user variable assigned to the register
482 or if its register number always corresponds to a cheap register. */
483
484 #define CHEAP_REG(N) \
485 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
486 || CHEAP_REGNO (REGNO (N)))
487
488 #define COST(X) \
489 (GET_CODE (X) == REG \
490 ? (CHEAP_REG (X) ? 0 \
491 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
492 : 2) \
493 : notreg_cost(X))
494
495 /* Determine if the quantity number for register X represents a valid index
496 into the `qty_...' variables. */
497
498 #define REGNO_QTY_VALID_P(N) (reg_qty[N] != (N))
499
500 #ifdef ADDRESS_COST
501 /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
502 during CSE, such nodes are present. Using an ADDRESSOF node which
503 refers to the address of a REG is a good thing because we can then
504 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
505 #define CSE_ADDRESS_COST(RTX) \
506 ((GET_CODE (RTX) == ADDRESSOF && REG_P (XEXP ((RTX), 0))) \
507 ? -1 : ADDRESS_COST(RTX))
508 #endif
509
510 static struct table_elt *table[NBUCKETS];
511
512 /* Chain of `struct table_elt's made so far for this function
513 but currently removed from the table. */
514
515 static struct table_elt *free_element_chain;
516
517 /* Number of `struct table_elt' structures made so far for this function. */
518
519 static int n_elements_made;
520
521 /* Maximum value `n_elements_made' has had so far in this compilation
522 for functions previously processed. */
523
524 static int max_elements_made;
525
526 /* Surviving equivalence class when two equivalence classes are merged
527 by recording the effects of a jump in the last insn. Zero if the
528 last insn was not a conditional jump. */
529
530 static struct table_elt *last_jump_equiv_class;
531
532 /* Set to the cost of a constant pool reference if one was found for a
533 symbolic constant. If this was found, it means we should try to
534 convert constants into constant pool entries if they don't fit in
535 the insn. */
536
537 static int constant_pool_entries_cost;
538
539 /* Define maximum length of a branch path. */
540
541 #define PATHLENGTH 10
542
543 /* This data describes a block that will be processed by cse_basic_block. */
544
545 struct cse_basic_block_data {
546 /* Lowest CUID value of insns in block. */
547 int low_cuid;
548 /* Highest CUID value of insns in block. */
549 int high_cuid;
550 /* Total number of SETs in block. */
551 int nsets;
552 /* Last insn in the block. */
553 rtx last;
554 /* Size of current branch path, if any. */
555 int path_size;
556 /* Current branch path, indicating which branches will be taken. */
557 struct branch_path {
558 /* The branch insn. */
559 rtx branch;
560 /* Whether it should be taken or not. AROUND is the same as taken
561 except that it is used when the destination label is not preceded
562 by a BARRIER. */
563 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
564 } path[PATHLENGTH];
565 };
566
567 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
568 virtual regs here because the simplify_*_operation routines are called
569 by integrate.c, which is called before virtual register instantiation. */
570
571 #define FIXED_BASE_PLUS_P(X) \
572 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
573 || (X) == arg_pointer_rtx \
574 || (X) == virtual_stack_vars_rtx \
575 || (X) == virtual_incoming_args_rtx \
576 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
577 && (XEXP (X, 0) == frame_pointer_rtx \
578 || XEXP (X, 0) == hard_frame_pointer_rtx \
579 || XEXP (X, 0) == arg_pointer_rtx \
580 || XEXP (X, 0) == virtual_stack_vars_rtx \
581 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
582 || GET_CODE (X) == ADDRESSOF)
583
584 /* Similar, but also allows reference to the stack pointer.
585
586 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
587 arg_pointer_rtx by itself is nonzero, because on at least one machine,
588 the i960, the arg pointer is zero when it is unused. */
589
590 #define NONZERO_BASE_PLUS_P(X) \
591 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
592 || (X) == virtual_stack_vars_rtx \
593 || (X) == virtual_incoming_args_rtx \
594 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
595 && (XEXP (X, 0) == frame_pointer_rtx \
596 || XEXP (X, 0) == hard_frame_pointer_rtx \
597 || XEXP (X, 0) == arg_pointer_rtx \
598 || XEXP (X, 0) == virtual_stack_vars_rtx \
599 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
600 || (X) == stack_pointer_rtx \
601 || (X) == virtual_stack_dynamic_rtx \
602 || (X) == virtual_outgoing_args_rtx \
603 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
604 && (XEXP (X, 0) == stack_pointer_rtx \
605 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
606 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
607 || GET_CODE (X) == ADDRESSOF)
608
609 static int notreg_cost PROTO((rtx));
610 static void new_basic_block PROTO((void));
611 static void make_new_qty PROTO((int));
612 static void make_regs_eqv PROTO((int, int));
613 static void delete_reg_equiv PROTO((int));
614 static int mention_regs PROTO((rtx));
615 static int insert_regs PROTO((rtx, struct table_elt *, int));
616 static void free_element PROTO((struct table_elt *));
617 static void remove_from_table PROTO((struct table_elt *, unsigned));
618 static struct table_elt *get_element PROTO((void));
619 static struct table_elt *lookup PROTO((rtx, unsigned, enum machine_mode)),
620 *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
621 static rtx lookup_as_function PROTO((rtx, enum rtx_code));
622 static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
623 enum machine_mode));
624 static void merge_equiv_classes PROTO((struct table_elt *,
625 struct table_elt *));
626 static void invalidate PROTO((rtx, enum machine_mode));
627 static int cse_rtx_varies_p PROTO((rtx));
628 static void remove_invalid_refs PROTO((int));
629 static void remove_invalid_subreg_refs PROTO((int, int, enum machine_mode));
630 static void rehash_using_reg PROTO((rtx));
631 static void invalidate_memory PROTO((void));
632 static void invalidate_for_call PROTO((void));
633 static rtx use_related_value PROTO((rtx, struct table_elt *));
634 static unsigned canon_hash PROTO((rtx, enum machine_mode));
635 static unsigned safe_hash PROTO((rtx, enum machine_mode));
636 static int exp_equiv_p PROTO((rtx, rtx, int, int));
637 static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
638 HOST_WIDE_INT *,
639 HOST_WIDE_INT *));
640 static int refers_to_p PROTO((rtx, rtx));
641 static rtx canon_reg PROTO((rtx, rtx));
642 static void find_best_addr PROTO((rtx, rtx *));
643 static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
644 enum machine_mode *,
645 enum machine_mode *));
646 static rtx cse_gen_binary PROTO((enum rtx_code, enum machine_mode,
647 rtx, rtx));
648 static rtx simplify_plus_minus PROTO((enum rtx_code, enum machine_mode,
649 rtx, rtx));
650 static rtx fold_rtx PROTO((rtx, rtx));
651 static rtx equiv_constant PROTO((rtx));
652 static void record_jump_equiv PROTO((rtx, int));
653 static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
654 rtx, rtx, int));
655 static void cse_insn PROTO((rtx, rtx));
656 static int note_mem_written PROTO((rtx));
657 static void invalidate_from_clobbers PROTO((rtx));
658 static rtx cse_process_notes PROTO((rtx, rtx));
659 static void cse_around_loop PROTO((rtx));
660 static void invalidate_skipped_set PROTO((rtx, rtx));
661 static void invalidate_skipped_block PROTO((rtx));
662 static void cse_check_loop_start PROTO((rtx, rtx));
663 static void cse_set_around_loop PROTO((rtx, rtx, rtx));
664 static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
665 static void count_reg_usage PROTO((rtx, int *, rtx, int));
666
667 extern int rtx_equal_function_value_matters;
668 \f
669 /* Return an estimate of the cost of computing rtx X.
670 One use is in cse, to decide which expression to keep in the hash table.
671 Another is in rtl generation, to pick the cheapest way to multiply.
672 Other uses like the latter are expected in the future. */
673
674 /* Internal function, to compute cost when X is not a register; called
675 from COST macro to keep it simple. */
676
677 static int
678 notreg_cost (x)
679 rtx x;
680 {
681 return ((GET_CODE (x) == SUBREG
682 && GET_CODE (SUBREG_REG (x)) == REG
683 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
684 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
685 && (GET_MODE_SIZE (GET_MODE (x))
686 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
687 && subreg_lowpart_p (x)
688 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
689 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
690 ? (CHEAP_REG (SUBREG_REG (x)) ? 0
691 : (REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER ? 1
692 : 2))
693 : rtx_cost (x, SET) * 2);
694 }
695
696 /* Return the right cost to give to an operation
697 to make the cost of the corresponding register-to-register instruction
698 N times that of a fast register-to-register instruction. */
699
700 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
701
702 int
703 rtx_cost (x, outer_code)
704 rtx x;
705 enum rtx_code outer_code ATTRIBUTE_UNUSED;
706 {
707 register int i, j;
708 register enum rtx_code code;
709 register char *fmt;
710 register int total;
711
712 if (x == 0)
713 return 0;
714
715 /* Compute the default costs of certain things.
716 Note that RTX_COSTS can override the defaults. */
717
718 code = GET_CODE (x);
719 switch (code)
720 {
721 case MULT:
722 /* Count multiplication by 2**n as a shift,
723 because if we are considering it, we would output it as a shift. */
724 if (GET_CODE (XEXP (x, 1)) == CONST_INT
725 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
726 total = 2;
727 else
728 total = COSTS_N_INSNS (5);
729 break;
730 case DIV:
731 case UDIV:
732 case MOD:
733 case UMOD:
734 total = COSTS_N_INSNS (7);
735 break;
736 case USE:
737 /* Used in loop.c and combine.c as a marker. */
738 total = 0;
739 break;
740 case ASM_OPERANDS:
741 /* We don't want these to be used in substitutions because
742 we have no way of validating the resulting insn. So assign
743 anything containing an ASM_OPERANDS a very high cost. */
744 total = 1000;
745 break;
746 default:
747 total = 2;
748 }
749
750 switch (code)
751 {
752 case REG:
753 return ! CHEAP_REG (x);
754
755 case SUBREG:
756 /* If we can't tie these modes, make this expensive. The larger
757 the mode, the more expensive it is. */
758 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
759 return COSTS_N_INSNS (2
760 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
761 return 2;
762 #ifdef RTX_COSTS
763 RTX_COSTS (x, code, outer_code);
764 #endif
765 #ifdef CONST_COSTS
766 CONST_COSTS (x, code, outer_code);
767 #endif
768
769 default:
770 #ifdef DEFAULT_RTX_COSTS
771 DEFAULT_RTX_COSTS(x, code, outer_code);
772 #endif
773 break;
774 }
775
776 /* Sum the costs of the sub-rtx's, plus cost of this operation,
777 which is already in total. */
778
779 fmt = GET_RTX_FORMAT (code);
780 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
781 if (fmt[i] == 'e')
782 total += rtx_cost (XEXP (x, i), code);
783 else if (fmt[i] == 'E')
784 for (j = 0; j < XVECLEN (x, i); j++)
785 total += rtx_cost (XVECEXP (x, i, j), code);
786
787 return total;
788 }
789 \f
790 /* Clear the hash table and initialize each register with its own quantity,
791 for a new basic block. */
792
793 static void
794 new_basic_block ()
795 {
796 register int i;
797
798 next_qty = max_reg;
799
800 bzero ((char *) reg_tick, max_reg * sizeof (int));
801
802 bcopy ((char *) all_minus_one, (char *) reg_in_table,
803 max_reg * sizeof (int));
804 bcopy ((char *) consec_ints, (char *) reg_qty, max_reg * sizeof (int));
805 CLEAR_HARD_REG_SET (hard_regs_in_table);
806
807 /* The per-quantity values used to be initialized here, but it is
808 much faster to initialize each as it is made in `make_new_qty'. */
809
810 for (i = 0; i < NBUCKETS; i++)
811 {
812 register struct table_elt *this, *next;
813 for (this = table[i]; this; this = next)
814 {
815 next = this->next_same_hash;
816 free_element (this);
817 }
818 }
819
820 bzero ((char *) table, sizeof table);
821
822 prev_insn = 0;
823
824 #ifdef HAVE_cc0
825 prev_insn_cc0 = 0;
826 #endif
827 }
828
829 /* Say that register REG contains a quantity not in any register before
830 and initialize that quantity. */
831
832 static void
833 make_new_qty (reg)
834 register int reg;
835 {
836 register int q;
837
838 if (next_qty >= max_qty)
839 abort ();
840
841 q = reg_qty[reg] = next_qty++;
842 qty_first_reg[q] = reg;
843 qty_last_reg[q] = reg;
844 qty_const[q] = qty_const_insn[q] = 0;
845 qty_comparison_code[q] = UNKNOWN;
846
847 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
848 }
849
850 /* Make reg NEW equivalent to reg OLD.
851 OLD is not changing; NEW is. */
852
853 static void
854 make_regs_eqv (new, old)
855 register int new, old;
856 {
857 register int lastr, firstr;
858 register int q = reg_qty[old];
859
860 /* Nothing should become eqv until it has a "non-invalid" qty number. */
861 if (! REGNO_QTY_VALID_P (old))
862 abort ();
863
864 reg_qty[new] = q;
865 firstr = qty_first_reg[q];
866 lastr = qty_last_reg[q];
867
868 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
869 hard regs. Among pseudos, if NEW will live longer than any other reg
870 of the same qty, and that is beyond the current basic block,
871 make it the new canonical replacement for this qty. */
872 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
873 /* Certain fixed registers might be of the class NO_REGS. This means
874 that not only can they not be allocated by the compiler, but
875 they cannot be used in substitutions or canonicalizations
876 either. */
877 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
878 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
879 || (new >= FIRST_PSEUDO_REGISTER
880 && (firstr < FIRST_PSEUDO_REGISTER
881 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
882 || (uid_cuid[REGNO_FIRST_UID (new)]
883 < cse_basic_block_start))
884 && (uid_cuid[REGNO_LAST_UID (new)]
885 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
886 {
887 reg_prev_eqv[firstr] = new;
888 reg_next_eqv[new] = firstr;
889 reg_prev_eqv[new] = -1;
890 qty_first_reg[q] = new;
891 }
892 else
893 {
894 /* If NEW is a hard reg (known to be non-fixed), insert at end.
895 Otherwise, insert before any non-fixed hard regs that are at the
896 end. Registers of class NO_REGS cannot be used as an
897 equivalent for anything. */
898 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
899 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
900 && new >= FIRST_PSEUDO_REGISTER)
901 lastr = reg_prev_eqv[lastr];
902 reg_next_eqv[new] = reg_next_eqv[lastr];
903 if (reg_next_eqv[lastr] >= 0)
904 reg_prev_eqv[reg_next_eqv[lastr]] = new;
905 else
906 qty_last_reg[q] = new;
907 reg_next_eqv[lastr] = new;
908 reg_prev_eqv[new] = lastr;
909 }
910 }
911
912 /* Remove REG from its equivalence class. */
913
914 static void
915 delete_reg_equiv (reg)
916 register int reg;
917 {
918 register int q = reg_qty[reg];
919 register int p, n;
920
921 /* If invalid, do nothing. */
922 if (q == reg)
923 return;
924
925 p = reg_prev_eqv[reg];
926 n = reg_next_eqv[reg];
927
928 if (n != -1)
929 reg_prev_eqv[n] = p;
930 else
931 qty_last_reg[q] = p;
932 if (p != -1)
933 reg_next_eqv[p] = n;
934 else
935 qty_first_reg[q] = n;
936
937 reg_qty[reg] = reg;
938 }
939
940 /* Remove any invalid expressions from the hash table
941 that refer to any of the registers contained in expression X.
942
943 Make sure that newly inserted references to those registers
944 as subexpressions will be considered valid.
945
946 mention_regs is not called when a register itself
947 is being stored in the table.
948
949 Return 1 if we have done something that may have changed the hash code
950 of X. */
951
952 static int
953 mention_regs (x)
954 rtx x;
955 {
956 register enum rtx_code code;
957 register int i, j;
958 register char *fmt;
959 register int changed = 0;
960
961 if (x == 0)
962 return 0;
963
964 code = GET_CODE (x);
965 if (code == REG)
966 {
967 register int regno = REGNO (x);
968 register int endregno
969 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
970 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
971 int i;
972
973 for (i = regno; i < endregno; i++)
974 {
975 if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
976 remove_invalid_refs (i);
977
978 reg_in_table[i] = reg_tick[i];
979 }
980
981 return 0;
982 }
983
984 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
985 pseudo if they don't use overlapping words. We handle only pseudos
986 here for simplicity. */
987 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
988 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
989 {
990 int i = REGNO (SUBREG_REG (x));
991
992 if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
993 {
994 /* If reg_tick has been incremented more than once since
995 reg_in_table was last set, that means that the entire
996 register has been set before, so discard anything memorized
997 for the entrire register, including all SUBREG expressions. */
998 if (reg_in_table[i] != reg_tick[i] - 1)
999 remove_invalid_refs (i);
1000 else
1001 remove_invalid_subreg_refs (i, SUBREG_WORD (x), GET_MODE (x));
1002 }
1003
1004 reg_in_table[i] = reg_tick[i];
1005 return 0;
1006 }
1007
1008 /* If X is a comparison or a COMPARE and either operand is a register
1009 that does not have a quantity, give it one. This is so that a later
1010 call to record_jump_equiv won't cause X to be assigned a different
1011 hash code and not found in the table after that call.
1012
1013 It is not necessary to do this here, since rehash_using_reg can
1014 fix up the table later, but doing this here eliminates the need to
1015 call that expensive function in the most common case where the only
1016 use of the register is in the comparison. */
1017
1018 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1019 {
1020 if (GET_CODE (XEXP (x, 0)) == REG
1021 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1022 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
1023 {
1024 rehash_using_reg (XEXP (x, 0));
1025 changed = 1;
1026 }
1027
1028 if (GET_CODE (XEXP (x, 1)) == REG
1029 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1030 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
1031 {
1032 rehash_using_reg (XEXP (x, 1));
1033 changed = 1;
1034 }
1035 }
1036
1037 fmt = GET_RTX_FORMAT (code);
1038 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1039 if (fmt[i] == 'e')
1040 changed |= mention_regs (XEXP (x, i));
1041 else if (fmt[i] == 'E')
1042 for (j = 0; j < XVECLEN (x, i); j++)
1043 changed |= mention_regs (XVECEXP (x, i, j));
1044
1045 return changed;
1046 }
1047
1048 /* Update the register quantities for inserting X into the hash table
1049 with a value equivalent to CLASSP.
1050 (If the class does not contain a REG, it is irrelevant.)
1051 If MODIFIED is nonzero, X is a destination; it is being modified.
1052 Note that delete_reg_equiv should be called on a register
1053 before insert_regs is done on that register with MODIFIED != 0.
1054
1055 Nonzero value means that elements of reg_qty have changed
1056 so X's hash code may be different. */
1057
1058 static int
1059 insert_regs (x, classp, modified)
1060 rtx x;
1061 struct table_elt *classp;
1062 int modified;
1063 {
1064 if (GET_CODE (x) == REG)
1065 {
1066 register int regno = REGNO (x);
1067
1068 /* If REGNO is in the equivalence table already but is of the
1069 wrong mode for that equivalence, don't do anything here. */
1070
1071 if (REGNO_QTY_VALID_P (regno)
1072 && qty_mode[reg_qty[regno]] != GET_MODE (x))
1073 return 0;
1074
1075 if (modified || ! REGNO_QTY_VALID_P (regno))
1076 {
1077 if (classp)
1078 for (classp = classp->first_same_value;
1079 classp != 0;
1080 classp = classp->next_same_value)
1081 if (GET_CODE (classp->exp) == REG
1082 && GET_MODE (classp->exp) == GET_MODE (x))
1083 {
1084 make_regs_eqv (regno, REGNO (classp->exp));
1085 return 1;
1086 }
1087
1088 make_new_qty (regno);
1089 qty_mode[reg_qty[regno]] = GET_MODE (x);
1090 return 1;
1091 }
1092
1093 return 0;
1094 }
1095
1096 /* If X is a SUBREG, we will likely be inserting the inner register in the
1097 table. If that register doesn't have an assigned quantity number at
1098 this point but does later, the insertion that we will be doing now will
1099 not be accessible because its hash code will have changed. So assign
1100 a quantity number now. */
1101
1102 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1103 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1104 {
1105 int regno = REGNO (SUBREG_REG (x));
1106
1107 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1108 /* Mention_regs checks if REG_TICK is exactly one larger than
1109 REG_IN_TABLE to find out if there was only a single preceding
1110 invalidation - for the SUBREG - or another one, which would be
1111 for the full register. Since we don't invalidate the SUBREG
1112 here first, we might have to bump up REG_TICK so that mention_regs
1113 will do the right thing. */
1114 if (reg_in_table[regno] >= 0
1115 && reg_tick[regno] == reg_in_table[regno] + 1)
1116 reg_tick[regno]++;
1117 mention_regs (x);
1118 return 1;
1119 }
1120 else
1121 return mention_regs (x);
1122 }
1123 \f
1124 /* Look in or update the hash table. */
1125
1126 /* Put the element ELT on the list of free elements. */
1127
1128 static void
1129 free_element (elt)
1130 struct table_elt *elt;
1131 {
1132 elt->next_same_hash = free_element_chain;
1133 free_element_chain = elt;
1134 }
1135
1136 /* Return an element that is free for use. */
1137
1138 static struct table_elt *
1139 get_element ()
1140 {
1141 struct table_elt *elt = free_element_chain;
1142 if (elt)
1143 {
1144 free_element_chain = elt->next_same_hash;
1145 return elt;
1146 }
1147 n_elements_made++;
1148 return (struct table_elt *) oballoc (sizeof (struct table_elt));
1149 }
1150
1151 /* Remove table element ELT from use in the table.
1152 HASH is its hash code, made using the HASH macro.
1153 It's an argument because often that is known in advance
1154 and we save much time not recomputing it. */
1155
1156 static void
1157 remove_from_table (elt, hash)
1158 register struct table_elt *elt;
1159 unsigned hash;
1160 {
1161 if (elt == 0)
1162 return;
1163
1164 /* Mark this element as removed. See cse_insn. */
1165 elt->first_same_value = 0;
1166
1167 /* Remove the table element from its equivalence class. */
1168
1169 {
1170 register struct table_elt *prev = elt->prev_same_value;
1171 register struct table_elt *next = elt->next_same_value;
1172
1173 if (next) next->prev_same_value = prev;
1174
1175 if (prev)
1176 prev->next_same_value = next;
1177 else
1178 {
1179 register struct table_elt *newfirst = next;
1180 while (next)
1181 {
1182 next->first_same_value = newfirst;
1183 next = next->next_same_value;
1184 }
1185 }
1186 }
1187
1188 /* Remove the table element from its hash bucket. */
1189
1190 {
1191 register struct table_elt *prev = elt->prev_same_hash;
1192 register struct table_elt *next = elt->next_same_hash;
1193
1194 if (next) next->prev_same_hash = prev;
1195
1196 if (prev)
1197 prev->next_same_hash = next;
1198 else if (table[hash] == elt)
1199 table[hash] = next;
1200 else
1201 {
1202 /* This entry is not in the proper hash bucket. This can happen
1203 when two classes were merged by `merge_equiv_classes'. Search
1204 for the hash bucket that it heads. This happens only very
1205 rarely, so the cost is acceptable. */
1206 for (hash = 0; hash < NBUCKETS; hash++)
1207 if (table[hash] == elt)
1208 table[hash] = next;
1209 }
1210 }
1211
1212 /* Remove the table element from its related-value circular chain. */
1213
1214 if (elt->related_value != 0 && elt->related_value != elt)
1215 {
1216 register struct table_elt *p = elt->related_value;
1217 while (p->related_value != elt)
1218 p = p->related_value;
1219 p->related_value = elt->related_value;
1220 if (p->related_value == p)
1221 p->related_value = 0;
1222 }
1223
1224 free_element (elt);
1225 }
1226
1227 /* Look up X in the hash table and return its table element,
1228 or 0 if X is not in the table.
1229
1230 MODE is the machine-mode of X, or if X is an integer constant
1231 with VOIDmode then MODE is the mode with which X will be used.
1232
1233 Here we are satisfied to find an expression whose tree structure
1234 looks like X. */
1235
1236 static struct table_elt *
1237 lookup (x, hash, mode)
1238 rtx x;
1239 unsigned hash;
1240 enum machine_mode mode;
1241 {
1242 register struct table_elt *p;
1243
1244 for (p = table[hash]; p; p = p->next_same_hash)
1245 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1246 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1247 return p;
1248
1249 return 0;
1250 }
1251
1252 /* Like `lookup' but don't care whether the table element uses invalid regs.
1253 Also ignore discrepancies in the machine mode of a register. */
1254
1255 static struct table_elt *
1256 lookup_for_remove (x, hash, mode)
1257 rtx x;
1258 unsigned hash;
1259 enum machine_mode mode;
1260 {
1261 register struct table_elt *p;
1262
1263 if (GET_CODE (x) == REG)
1264 {
1265 int regno = REGNO (x);
1266 /* Don't check the machine mode when comparing registers;
1267 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1268 for (p = table[hash]; p; p = p->next_same_hash)
1269 if (GET_CODE (p->exp) == REG
1270 && REGNO (p->exp) == regno)
1271 return p;
1272 }
1273 else
1274 {
1275 for (p = table[hash]; p; p = p->next_same_hash)
1276 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1277 return p;
1278 }
1279
1280 return 0;
1281 }
1282
1283 /* Look for an expression equivalent to X and with code CODE.
1284 If one is found, return that expression. */
1285
1286 static rtx
1287 lookup_as_function (x, code)
1288 rtx x;
1289 enum rtx_code code;
1290 {
1291 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1292 GET_MODE (x));
1293 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1294 long as we are narrowing. So if we looked in vain for a mode narrower
1295 than word_mode before, look for word_mode now. */
1296 if (p == 0 && code == CONST_INT
1297 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1298 {
1299 x = copy_rtx (x);
1300 PUT_MODE (x, word_mode);
1301 p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS, word_mode);
1302 }
1303
1304 if (p == 0)
1305 return 0;
1306
1307 for (p = p->first_same_value; p; p = p->next_same_value)
1308 {
1309 if (GET_CODE (p->exp) == code
1310 /* Make sure this is a valid entry in the table. */
1311 && exp_equiv_p (p->exp, p->exp, 1, 0))
1312 return p->exp;
1313 }
1314
1315 return 0;
1316 }
1317
1318 /* Insert X in the hash table, assuming HASH is its hash code
1319 and CLASSP is an element of the class it should go in
1320 (or 0 if a new class should be made).
1321 It is inserted at the proper position to keep the class in
1322 the order cheapest first.
1323
1324 MODE is the machine-mode of X, or if X is an integer constant
1325 with VOIDmode then MODE is the mode with which X will be used.
1326
1327 For elements of equal cheapness, the most recent one
1328 goes in front, except that the first element in the list
1329 remains first unless a cheaper element is added. The order of
1330 pseudo-registers does not matter, as canon_reg will be called to
1331 find the cheapest when a register is retrieved from the table.
1332
1333 The in_memory field in the hash table element is set to 0.
1334 The caller must set it nonzero if appropriate.
1335
1336 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1337 and if insert_regs returns a nonzero value
1338 you must then recompute its hash code before calling here.
1339
1340 If necessary, update table showing constant values of quantities. */
1341
1342 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1343
1344 static struct table_elt *
1345 insert (x, classp, hash, mode)
1346 register rtx x;
1347 register struct table_elt *classp;
1348 unsigned hash;
1349 enum machine_mode mode;
1350 {
1351 register struct table_elt *elt;
1352
1353 /* If X is a register and we haven't made a quantity for it,
1354 something is wrong. */
1355 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1356 abort ();
1357
1358 /* If X is a hard register, show it is being put in the table. */
1359 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1360 {
1361 int regno = REGNO (x);
1362 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1363 int i;
1364
1365 for (i = regno; i < endregno; i++)
1366 SET_HARD_REG_BIT (hard_regs_in_table, i);
1367 }
1368
1369 /* If X is a label, show we recorded it. */
1370 if (GET_CODE (x) == LABEL_REF
1371 || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1372 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
1373 recorded_label_ref = 1;
1374
1375 /* Put an element for X into the right hash bucket. */
1376
1377 elt = get_element ();
1378 elt->exp = x;
1379 elt->cost = COST (x);
1380 elt->next_same_value = 0;
1381 elt->prev_same_value = 0;
1382 elt->next_same_hash = table[hash];
1383 elt->prev_same_hash = 0;
1384 elt->related_value = 0;
1385 elt->in_memory = 0;
1386 elt->mode = mode;
1387 elt->is_const = (CONSTANT_P (x)
1388 /* GNU C++ takes advantage of this for `this'
1389 (and other const values). */
1390 || (RTX_UNCHANGING_P (x)
1391 && GET_CODE (x) == REG
1392 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1393 || FIXED_BASE_PLUS_P (x));
1394
1395 if (table[hash])
1396 table[hash]->prev_same_hash = elt;
1397 table[hash] = elt;
1398
1399 /* Put it into the proper value-class. */
1400 if (classp)
1401 {
1402 classp = classp->first_same_value;
1403 if (CHEAPER (elt, classp))
1404 /* Insert at the head of the class */
1405 {
1406 register struct table_elt *p;
1407 elt->next_same_value = classp;
1408 classp->prev_same_value = elt;
1409 elt->first_same_value = elt;
1410
1411 for (p = classp; p; p = p->next_same_value)
1412 p->first_same_value = elt;
1413 }
1414 else
1415 {
1416 /* Insert not at head of the class. */
1417 /* Put it after the last element cheaper than X. */
1418 register struct table_elt *p, *next;
1419 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1420 p = next);
1421 /* Put it after P and before NEXT. */
1422 elt->next_same_value = next;
1423 if (next)
1424 next->prev_same_value = elt;
1425 elt->prev_same_value = p;
1426 p->next_same_value = elt;
1427 elt->first_same_value = classp;
1428 }
1429 }
1430 else
1431 elt->first_same_value = elt;
1432
1433 /* If this is a constant being set equivalent to a register or a register
1434 being set equivalent to a constant, note the constant equivalence.
1435
1436 If this is a constant, it cannot be equivalent to a different constant,
1437 and a constant is the only thing that can be cheaper than a register. So
1438 we know the register is the head of the class (before the constant was
1439 inserted).
1440
1441 If this is a register that is not already known equivalent to a
1442 constant, we must check the entire class.
1443
1444 If this is a register that is already known equivalent to an insn,
1445 update `qty_const_insn' to show that `this_insn' is the latest
1446 insn making that quantity equivalent to the constant. */
1447
1448 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1449 && GET_CODE (x) != REG)
1450 {
1451 qty_const[reg_qty[REGNO (classp->exp)]]
1452 = gen_lowpart_if_possible (qty_mode[reg_qty[REGNO (classp->exp)]], x);
1453 qty_const_insn[reg_qty[REGNO (classp->exp)]] = this_insn;
1454 }
1455
1456 else if (GET_CODE (x) == REG && classp && ! qty_const[reg_qty[REGNO (x)]]
1457 && ! elt->is_const)
1458 {
1459 register struct table_elt *p;
1460
1461 for (p = classp; p != 0; p = p->next_same_value)
1462 {
1463 if (p->is_const && GET_CODE (p->exp) != REG)
1464 {
1465 qty_const[reg_qty[REGNO (x)]]
1466 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1467 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1468 break;
1469 }
1470 }
1471 }
1472
1473 else if (GET_CODE (x) == REG && qty_const[reg_qty[REGNO (x)]]
1474 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]])
1475 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1476
1477 /* If this is a constant with symbolic value,
1478 and it has a term with an explicit integer value,
1479 link it up with related expressions. */
1480 if (GET_CODE (x) == CONST)
1481 {
1482 rtx subexp = get_related_value (x);
1483 unsigned subhash;
1484 struct table_elt *subelt, *subelt_prev;
1485
1486 if (subexp != 0)
1487 {
1488 /* Get the integer-free subexpression in the hash table. */
1489 subhash = safe_hash (subexp, mode) % NBUCKETS;
1490 subelt = lookup (subexp, subhash, mode);
1491 if (subelt == 0)
1492 subelt = insert (subexp, NULL_PTR, subhash, mode);
1493 /* Initialize SUBELT's circular chain if it has none. */
1494 if (subelt->related_value == 0)
1495 subelt->related_value = subelt;
1496 /* Find the element in the circular chain that precedes SUBELT. */
1497 subelt_prev = subelt;
1498 while (subelt_prev->related_value != subelt)
1499 subelt_prev = subelt_prev->related_value;
1500 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1501 This way the element that follows SUBELT is the oldest one. */
1502 elt->related_value = subelt_prev->related_value;
1503 subelt_prev->related_value = elt;
1504 }
1505 }
1506
1507 return elt;
1508 }
1509 \f
1510 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1511 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1512 the two classes equivalent.
1513
1514 CLASS1 will be the surviving class; CLASS2 should not be used after this
1515 call.
1516
1517 Any invalid entries in CLASS2 will not be copied. */
1518
1519 static void
1520 merge_equiv_classes (class1, class2)
1521 struct table_elt *class1, *class2;
1522 {
1523 struct table_elt *elt, *next, *new;
1524
1525 /* Ensure we start with the head of the classes. */
1526 class1 = class1->first_same_value;
1527 class2 = class2->first_same_value;
1528
1529 /* If they were already equal, forget it. */
1530 if (class1 == class2)
1531 return;
1532
1533 for (elt = class2; elt; elt = next)
1534 {
1535 unsigned hash;
1536 rtx exp = elt->exp;
1537 enum machine_mode mode = elt->mode;
1538
1539 next = elt->next_same_value;
1540
1541 /* Remove old entry, make a new one in CLASS1's class.
1542 Don't do this for invalid entries as we cannot find their
1543 hash code (it also isn't necessary). */
1544 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1545 {
1546 hash_arg_in_memory = 0;
1547 hash_arg_in_struct = 0;
1548 hash = HASH (exp, mode);
1549
1550 if (GET_CODE (exp) == REG)
1551 delete_reg_equiv (REGNO (exp));
1552
1553 remove_from_table (elt, hash);
1554
1555 if (insert_regs (exp, class1, 0))
1556 {
1557 rehash_using_reg (exp);
1558 hash = HASH (exp, mode);
1559 }
1560 new = insert (exp, class1, hash, mode);
1561 new->in_memory = hash_arg_in_memory;
1562 new->in_struct = hash_arg_in_struct;
1563 }
1564 }
1565 }
1566 \f
1567 /* Remove from the hash table, or mark as invalid,
1568 all expressions whose values could be altered by storing in X.
1569 X is a register, a subreg, or a memory reference with nonvarying address
1570 (because, when a memory reference with a varying address is stored in,
1571 all memory references are removed by invalidate_memory
1572 so specific invalidation is superfluous).
1573 FULL_MODE, if not VOIDmode, indicates that this much should be invalidated
1574 instead of just the amount indicated by the mode of X. This is only used
1575 for bitfield stores into memory.
1576
1577 A nonvarying address may be just a register or just
1578 a symbol reference, or it may be either of those plus
1579 a numeric offset. */
1580
1581 static void
1582 invalidate (x, full_mode)
1583 rtx x;
1584 enum machine_mode full_mode;
1585 {
1586 register int i;
1587 register struct table_elt *p;
1588
1589 /* If X is a register, dependencies on its contents
1590 are recorded through the qty number mechanism.
1591 Just change the qty number of the register,
1592 mark it as invalid for expressions that refer to it,
1593 and remove it itself. */
1594
1595 if (GET_CODE (x) == REG)
1596 {
1597 register int regno = REGNO (x);
1598 register unsigned hash = HASH (x, GET_MODE (x));
1599
1600 /* Remove REGNO from any quantity list it might be on and indicate
1601 that its value might have changed. If it is a pseudo, remove its
1602 entry from the hash table.
1603
1604 For a hard register, we do the first two actions above for any
1605 additional hard registers corresponding to X. Then, if any of these
1606 registers are in the table, we must remove any REG entries that
1607 overlap these registers. */
1608
1609 delete_reg_equiv (regno);
1610 reg_tick[regno]++;
1611
1612 if (regno >= FIRST_PSEUDO_REGISTER)
1613 {
1614 /* Because a register can be referenced in more than one mode,
1615 we might have to remove more than one table entry. */
1616
1617 struct table_elt *elt;
1618
1619 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1620 remove_from_table (elt, hash);
1621 }
1622 else
1623 {
1624 HOST_WIDE_INT in_table
1625 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1626 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1627 int tregno, tendregno;
1628 register struct table_elt *p, *next;
1629
1630 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1631
1632 for (i = regno + 1; i < endregno; i++)
1633 {
1634 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1635 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1636 delete_reg_equiv (i);
1637 reg_tick[i]++;
1638 }
1639
1640 if (in_table)
1641 for (hash = 0; hash < NBUCKETS; hash++)
1642 for (p = table[hash]; p; p = next)
1643 {
1644 next = p->next_same_hash;
1645
1646 if (GET_CODE (p->exp) != REG
1647 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1648 continue;
1649
1650 tregno = REGNO (p->exp);
1651 tendregno
1652 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1653 if (tendregno > regno && tregno < endregno)
1654 remove_from_table (p, hash);
1655 }
1656 }
1657
1658 return;
1659 }
1660
1661 if (GET_CODE (x) == SUBREG)
1662 {
1663 if (GET_CODE (SUBREG_REG (x)) != REG)
1664 abort ();
1665 invalidate (SUBREG_REG (x), VOIDmode);
1666 return;
1667 }
1668
1669 /* If X is a parallel, invalidate all of its elements. */
1670
1671 if (GET_CODE (x) == PARALLEL)
1672 {
1673 for (i = XVECLEN (x, 0) - 1; i >= 0 ; --i)
1674 invalidate (XVECEXP (x, 0, i), VOIDmode);
1675 return;
1676 }
1677
1678 /* If X is an expr_list, this is part of a disjoint return value;
1679 extract the location in question ignoring the offset. */
1680
1681 if (GET_CODE (x) == EXPR_LIST)
1682 {
1683 invalidate (XEXP (x, 0), VOIDmode);
1684 return;
1685 }
1686
1687 /* X is not a register; it must be a memory reference with
1688 a nonvarying address. Remove all hash table elements
1689 that refer to overlapping pieces of memory. */
1690
1691 if (GET_CODE (x) != MEM)
1692 abort ();
1693
1694 if (full_mode == VOIDmode)
1695 full_mode = GET_MODE (x);
1696
1697 for (i = 0; i < NBUCKETS; i++)
1698 {
1699 register struct table_elt *next;
1700 for (p = table[i]; p; p = next)
1701 {
1702 next = p->next_same_hash;
1703 /* Invalidate ASM_OPERANDS which reference memory (this is easier
1704 than checking all the aliases). */
1705 if (p->in_memory
1706 && (GET_CODE (p->exp) != MEM
1707 || true_dependence (x, full_mode, p->exp, cse_rtx_varies_p)))
1708 remove_from_table (p, i);
1709 }
1710 }
1711 }
1712
1713 /* Remove all expressions that refer to register REGNO,
1714 since they are already invalid, and we are about to
1715 mark that register valid again and don't want the old
1716 expressions to reappear as valid. */
1717
1718 static void
1719 remove_invalid_refs (regno)
1720 int regno;
1721 {
1722 register int i;
1723 register struct table_elt *p, *next;
1724
1725 for (i = 0; i < NBUCKETS; i++)
1726 for (p = table[i]; p; p = next)
1727 {
1728 next = p->next_same_hash;
1729 if (GET_CODE (p->exp) != REG
1730 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1731 remove_from_table (p, i);
1732 }
1733 }
1734
1735 /* Likewise for a subreg with subreg_reg WORD and mode MODE. */
1736 static void
1737 remove_invalid_subreg_refs (regno, word, mode)
1738 int regno;
1739 int word;
1740 enum machine_mode mode;
1741 {
1742 register int i;
1743 register struct table_elt *p, *next;
1744 int end = word + (GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD;
1745
1746 for (i = 0; i < NBUCKETS; i++)
1747 for (p = table[i]; p; p = next)
1748 {
1749 rtx exp;
1750 next = p->next_same_hash;
1751
1752 exp = p->exp;
1753 if (GET_CODE (p->exp) != REG
1754 && (GET_CODE (exp) != SUBREG
1755 || GET_CODE (SUBREG_REG (exp)) != REG
1756 || REGNO (SUBREG_REG (exp)) != regno
1757 || (((SUBREG_WORD (exp)
1758 + (GET_MODE_SIZE (GET_MODE (exp)) - 1) / UNITS_PER_WORD)
1759 >= word)
1760 && SUBREG_WORD (exp) <= end))
1761 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1762 remove_from_table (p, i);
1763 }
1764 }
1765 \f
1766 /* Recompute the hash codes of any valid entries in the hash table that
1767 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1768
1769 This is called when we make a jump equivalence. */
1770
1771 static void
1772 rehash_using_reg (x)
1773 rtx x;
1774 {
1775 unsigned int i;
1776 struct table_elt *p, *next;
1777 unsigned hash;
1778
1779 if (GET_CODE (x) == SUBREG)
1780 x = SUBREG_REG (x);
1781
1782 /* If X is not a register or if the register is known not to be in any
1783 valid entries in the table, we have no work to do. */
1784
1785 if (GET_CODE (x) != REG
1786 || reg_in_table[REGNO (x)] < 0
1787 || reg_in_table[REGNO (x)] != reg_tick[REGNO (x)])
1788 return;
1789
1790 /* Scan all hash chains looking for valid entries that mention X.
1791 If we find one and it is in the wrong hash chain, move it. We can skip
1792 objects that are registers, since they are handled specially. */
1793
1794 for (i = 0; i < NBUCKETS; i++)
1795 for (p = table[i]; p; p = next)
1796 {
1797 next = p->next_same_hash;
1798 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1799 && exp_equiv_p (p->exp, p->exp, 1, 0)
1800 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1801 {
1802 if (p->next_same_hash)
1803 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1804
1805 if (p->prev_same_hash)
1806 p->prev_same_hash->next_same_hash = p->next_same_hash;
1807 else
1808 table[i] = p->next_same_hash;
1809
1810 p->next_same_hash = table[hash];
1811 p->prev_same_hash = 0;
1812 if (table[hash])
1813 table[hash]->prev_same_hash = p;
1814 table[hash] = p;
1815 }
1816 }
1817 }
1818 \f
1819 /* Remove from the hash table any expression that is a call-clobbered
1820 register. Also update their TICK values. */
1821
1822 static void
1823 invalidate_for_call ()
1824 {
1825 int regno, endregno;
1826 int i;
1827 unsigned hash;
1828 struct table_elt *p, *next;
1829 int in_table = 0;
1830
1831 /* Go through all the hard registers. For each that is clobbered in
1832 a CALL_INSN, remove the register from quantity chains and update
1833 reg_tick if defined. Also see if any of these registers is currently
1834 in the table. */
1835
1836 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1837 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1838 {
1839 delete_reg_equiv (regno);
1840 if (reg_tick[regno] >= 0)
1841 reg_tick[regno]++;
1842
1843 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1844 }
1845
1846 /* In the case where we have no call-clobbered hard registers in the
1847 table, we are done. Otherwise, scan the table and remove any
1848 entry that overlaps a call-clobbered register. */
1849
1850 if (in_table)
1851 for (hash = 0; hash < NBUCKETS; hash++)
1852 for (p = table[hash]; p; p = next)
1853 {
1854 next = p->next_same_hash;
1855
1856 if (p->in_memory)
1857 {
1858 remove_from_table (p, hash);
1859 continue;
1860 }
1861
1862 if (GET_CODE (p->exp) != REG
1863 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1864 continue;
1865
1866 regno = REGNO (p->exp);
1867 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1868
1869 for (i = regno; i < endregno; i++)
1870 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1871 {
1872 remove_from_table (p, hash);
1873 break;
1874 }
1875 }
1876 }
1877 \f
1878 /* Given an expression X of type CONST,
1879 and ELT which is its table entry (or 0 if it
1880 is not in the hash table),
1881 return an alternate expression for X as a register plus integer.
1882 If none can be found, return 0. */
1883
1884 static rtx
1885 use_related_value (x, elt)
1886 rtx x;
1887 struct table_elt *elt;
1888 {
1889 register struct table_elt *relt = 0;
1890 register struct table_elt *p, *q;
1891 HOST_WIDE_INT offset;
1892
1893 /* First, is there anything related known?
1894 If we have a table element, we can tell from that.
1895 Otherwise, must look it up. */
1896
1897 if (elt != 0 && elt->related_value != 0)
1898 relt = elt;
1899 else if (elt == 0 && GET_CODE (x) == CONST)
1900 {
1901 rtx subexp = get_related_value (x);
1902 if (subexp != 0)
1903 relt = lookup (subexp,
1904 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
1905 GET_MODE (subexp));
1906 }
1907
1908 if (relt == 0)
1909 return 0;
1910
1911 /* Search all related table entries for one that has an
1912 equivalent register. */
1913
1914 p = relt;
1915 while (1)
1916 {
1917 /* This loop is strange in that it is executed in two different cases.
1918 The first is when X is already in the table. Then it is searching
1919 the RELATED_VALUE list of X's class (RELT). The second case is when
1920 X is not in the table. Then RELT points to a class for the related
1921 value.
1922
1923 Ensure that, whatever case we are in, that we ignore classes that have
1924 the same value as X. */
1925
1926 if (rtx_equal_p (x, p->exp))
1927 q = 0;
1928 else
1929 for (q = p->first_same_value; q; q = q->next_same_value)
1930 if (GET_CODE (q->exp) == REG)
1931 break;
1932
1933 if (q)
1934 break;
1935
1936 p = p->related_value;
1937
1938 /* We went all the way around, so there is nothing to be found.
1939 Alternatively, perhaps RELT was in the table for some other reason
1940 and it has no related values recorded. */
1941 if (p == relt || p == 0)
1942 break;
1943 }
1944
1945 if (q == 0)
1946 return 0;
1947
1948 offset = (get_integer_term (x) - get_integer_term (p->exp));
1949 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
1950 return plus_constant (q->exp, offset);
1951 }
1952 \f
1953 /* Hash an rtx. We are careful to make sure the value is never negative.
1954 Equivalent registers hash identically.
1955 MODE is used in hashing for CONST_INTs only;
1956 otherwise the mode of X is used.
1957
1958 Store 1 in do_not_record if any subexpression is volatile.
1959
1960 Store 1 in hash_arg_in_memory if X contains a MEM rtx
1961 which does not have the RTX_UNCHANGING_P bit set.
1962 In this case, also store 1 in hash_arg_in_struct
1963 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
1964
1965 Note that cse_insn knows that the hash code of a MEM expression
1966 is just (int) MEM plus the hash code of the address. */
1967
1968 static unsigned
1969 canon_hash (x, mode)
1970 rtx x;
1971 enum machine_mode mode;
1972 {
1973 register int i, j;
1974 register unsigned hash = 0;
1975 register enum rtx_code code;
1976 register char *fmt;
1977
1978 /* repeat is used to turn tail-recursion into iteration. */
1979 repeat:
1980 if (x == 0)
1981 return hash;
1982
1983 code = GET_CODE (x);
1984 switch (code)
1985 {
1986 case REG:
1987 {
1988 register int regno = REGNO (x);
1989
1990 /* On some machines, we can't record any non-fixed hard register,
1991 because extending its life will cause reload problems. We
1992 consider ap, fp, and sp to be fixed for this purpose.
1993 On all machines, we can't record any global registers. */
1994
1995 if (regno < FIRST_PSEUDO_REGISTER
1996 && (global_regs[regno]
1997 || (SMALL_REGISTER_CLASSES
1998 && ! fixed_regs[regno]
1999 && regno != FRAME_POINTER_REGNUM
2000 && regno != HARD_FRAME_POINTER_REGNUM
2001 && regno != ARG_POINTER_REGNUM
2002 && regno != STACK_POINTER_REGNUM)))
2003 {
2004 do_not_record = 1;
2005 return 0;
2006 }
2007 hash += ((unsigned) REG << 7) + (unsigned) reg_qty[regno];
2008 return hash;
2009 }
2010
2011 /* We handle SUBREG of a REG specially because the underlying
2012 reg changes its hash value with every value change; we don't
2013 want to have to forget unrelated subregs when one subreg changes. */
2014 case SUBREG:
2015 {
2016 if (GET_CODE (SUBREG_REG (x)) == REG)
2017 {
2018 hash += (((unsigned) SUBREG << 7)
2019 + REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2020 return hash;
2021 }
2022 break;
2023 }
2024
2025 case CONST_INT:
2026 {
2027 unsigned HOST_WIDE_INT tem = INTVAL (x);
2028 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2029 return hash;
2030 }
2031
2032 case CONST_DOUBLE:
2033 /* This is like the general case, except that it only counts
2034 the integers representing the constant. */
2035 hash += (unsigned) code + (unsigned) GET_MODE (x);
2036 if (GET_MODE (x) != VOIDmode)
2037 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2038 {
2039 unsigned tem = XINT (x, i);
2040 hash += tem;
2041 }
2042 else
2043 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2044 + (unsigned) CONST_DOUBLE_HIGH (x));
2045 return hash;
2046
2047 /* Assume there is only one rtx object for any given label. */
2048 case LABEL_REF:
2049 hash
2050 += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2051 return hash;
2052
2053 case SYMBOL_REF:
2054 hash
2055 += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2056 return hash;
2057
2058 case MEM:
2059 if (MEM_VOLATILE_P (x))
2060 {
2061 do_not_record = 1;
2062 return 0;
2063 }
2064 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2065 {
2066 hash_arg_in_memory = 1;
2067 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
2068 }
2069 /* Now that we have already found this special case,
2070 might as well speed it up as much as possible. */
2071 hash += (unsigned) MEM;
2072 x = XEXP (x, 0);
2073 goto repeat;
2074
2075 case PRE_DEC:
2076 case PRE_INC:
2077 case POST_DEC:
2078 case POST_INC:
2079 case PC:
2080 case CC0:
2081 case CALL:
2082 case UNSPEC_VOLATILE:
2083 do_not_record = 1;
2084 return 0;
2085
2086 case ASM_OPERANDS:
2087 if (MEM_VOLATILE_P (x))
2088 {
2089 do_not_record = 1;
2090 return 0;
2091 }
2092 break;
2093
2094 default:
2095 break;
2096 }
2097
2098 i = GET_RTX_LENGTH (code) - 1;
2099 hash += (unsigned) code + (unsigned) GET_MODE (x);
2100 fmt = GET_RTX_FORMAT (code);
2101 for (; i >= 0; i--)
2102 {
2103 if (fmt[i] == 'e')
2104 {
2105 rtx tem = XEXP (x, i);
2106
2107 /* If we are about to do the last recursive call
2108 needed at this level, change it into iteration.
2109 This function is called enough to be worth it. */
2110 if (i == 0)
2111 {
2112 x = tem;
2113 goto repeat;
2114 }
2115 hash += canon_hash (tem, 0);
2116 }
2117 else if (fmt[i] == 'E')
2118 for (j = 0; j < XVECLEN (x, i); j++)
2119 hash += canon_hash (XVECEXP (x, i, j), 0);
2120 else if (fmt[i] == 's')
2121 {
2122 register unsigned char *p = (unsigned char *) XSTR (x, i);
2123 if (p)
2124 while (*p)
2125 hash += *p++;
2126 }
2127 else if (fmt[i] == 'i')
2128 {
2129 register unsigned tem = XINT (x, i);
2130 hash += tem;
2131 }
2132 else if (fmt[i] == '0')
2133 /* unused */;
2134 else
2135 abort ();
2136 }
2137 return hash;
2138 }
2139
2140 /* Like canon_hash but with no side effects. */
2141
2142 static unsigned
2143 safe_hash (x, mode)
2144 rtx x;
2145 enum machine_mode mode;
2146 {
2147 int save_do_not_record = do_not_record;
2148 int save_hash_arg_in_memory = hash_arg_in_memory;
2149 int save_hash_arg_in_struct = hash_arg_in_struct;
2150 unsigned hash = canon_hash (x, mode);
2151 hash_arg_in_memory = save_hash_arg_in_memory;
2152 hash_arg_in_struct = save_hash_arg_in_struct;
2153 do_not_record = save_do_not_record;
2154 return hash;
2155 }
2156 \f
2157 /* Return 1 iff X and Y would canonicalize into the same thing,
2158 without actually constructing the canonicalization of either one.
2159 If VALIDATE is nonzero,
2160 we assume X is an expression being processed from the rtl
2161 and Y was found in the hash table. We check register refs
2162 in Y for being marked as valid.
2163
2164 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2165 that is known to be in the register. Ordinarily, we don't allow them
2166 to match, because letting them match would cause unpredictable results
2167 in all the places that search a hash table chain for an equivalent
2168 for a given value. A possible equivalent that has different structure
2169 has its hash code computed from different data. Whether the hash code
2170 is the same as that of the given value is pure luck. */
2171
2172 static int
2173 exp_equiv_p (x, y, validate, equal_values)
2174 rtx x, y;
2175 int validate;
2176 int equal_values;
2177 {
2178 register int i, j;
2179 register enum rtx_code code;
2180 register char *fmt;
2181
2182 /* Note: it is incorrect to assume an expression is equivalent to itself
2183 if VALIDATE is nonzero. */
2184 if (x == y && !validate)
2185 return 1;
2186 if (x == 0 || y == 0)
2187 return x == y;
2188
2189 code = GET_CODE (x);
2190 if (code != GET_CODE (y))
2191 {
2192 if (!equal_values)
2193 return 0;
2194
2195 /* If X is a constant and Y is a register or vice versa, they may be
2196 equivalent. We only have to validate if Y is a register. */
2197 if (CONSTANT_P (x) && GET_CODE (y) == REG
2198 && REGNO_QTY_VALID_P (REGNO (y))
2199 && GET_MODE (y) == qty_mode[reg_qty[REGNO (y)]]
2200 && rtx_equal_p (x, qty_const[reg_qty[REGNO (y)]])
2201 && (! validate || reg_in_table[REGNO (y)] == reg_tick[REGNO (y)]))
2202 return 1;
2203
2204 if (CONSTANT_P (y) && code == REG
2205 && REGNO_QTY_VALID_P (REGNO (x))
2206 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2207 && rtx_equal_p (y, qty_const[reg_qty[REGNO (x)]]))
2208 return 1;
2209
2210 return 0;
2211 }
2212
2213 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2214 if (GET_MODE (x) != GET_MODE (y))
2215 return 0;
2216
2217 switch (code)
2218 {
2219 case PC:
2220 case CC0:
2221 return x == y;
2222
2223 case CONST_INT:
2224 return INTVAL (x) == INTVAL (y);
2225
2226 case LABEL_REF:
2227 return XEXP (x, 0) == XEXP (y, 0);
2228
2229 case SYMBOL_REF:
2230 return XSTR (x, 0) == XSTR (y, 0);
2231
2232 case REG:
2233 {
2234 int regno = REGNO (y);
2235 int endregno
2236 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2237 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2238 int i;
2239
2240 /* If the quantities are not the same, the expressions are not
2241 equivalent. If there are and we are not to validate, they
2242 are equivalent. Otherwise, ensure all regs are up-to-date. */
2243
2244 if (reg_qty[REGNO (x)] != reg_qty[regno])
2245 return 0;
2246
2247 if (! validate)
2248 return 1;
2249
2250 for (i = regno; i < endregno; i++)
2251 if (reg_in_table[i] != reg_tick[i])
2252 return 0;
2253
2254 return 1;
2255 }
2256
2257 /* For commutative operations, check both orders. */
2258 case PLUS:
2259 case MULT:
2260 case AND:
2261 case IOR:
2262 case XOR:
2263 case NE:
2264 case EQ:
2265 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2266 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2267 validate, equal_values))
2268 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2269 validate, equal_values)
2270 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2271 validate, equal_values)));
2272
2273 default:
2274 break;
2275 }
2276
2277 /* Compare the elements. If any pair of corresponding elements
2278 fail to match, return 0 for the whole things. */
2279
2280 fmt = GET_RTX_FORMAT (code);
2281 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2282 {
2283 switch (fmt[i])
2284 {
2285 case 'e':
2286 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2287 return 0;
2288 break;
2289
2290 case 'E':
2291 if (XVECLEN (x, i) != XVECLEN (y, i))
2292 return 0;
2293 for (j = 0; j < XVECLEN (x, i); j++)
2294 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2295 validate, equal_values))
2296 return 0;
2297 break;
2298
2299 case 's':
2300 if (strcmp (XSTR (x, i), XSTR (y, i)))
2301 return 0;
2302 break;
2303
2304 case 'i':
2305 if (XINT (x, i) != XINT (y, i))
2306 return 0;
2307 break;
2308
2309 case 'w':
2310 if (XWINT (x, i) != XWINT (y, i))
2311 return 0;
2312 break;
2313
2314 case '0':
2315 break;
2316
2317 default:
2318 abort ();
2319 }
2320 }
2321
2322 return 1;
2323 }
2324 \f
2325 /* Return 1 iff any subexpression of X matches Y.
2326 Here we do not require that X or Y be valid (for registers referred to)
2327 for being in the hash table. */
2328
2329 static int
2330 refers_to_p (x, y)
2331 rtx x, y;
2332 {
2333 register int i;
2334 register enum rtx_code code;
2335 register char *fmt;
2336
2337 repeat:
2338 if (x == y)
2339 return 1;
2340 if (x == 0 || y == 0)
2341 return 0;
2342
2343 code = GET_CODE (x);
2344 /* If X as a whole has the same code as Y, they may match.
2345 If so, return 1. */
2346 if (code == GET_CODE (y))
2347 {
2348 if (exp_equiv_p (x, y, 0, 1))
2349 return 1;
2350 }
2351
2352 /* X does not match, so try its subexpressions. */
2353
2354 fmt = GET_RTX_FORMAT (code);
2355 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2356 if (fmt[i] == 'e')
2357 {
2358 if (i == 0)
2359 {
2360 x = XEXP (x, 0);
2361 goto repeat;
2362 }
2363 else
2364 if (refers_to_p (XEXP (x, i), y))
2365 return 1;
2366 }
2367 else if (fmt[i] == 'E')
2368 {
2369 int j;
2370 for (j = 0; j < XVECLEN (x, i); j++)
2371 if (refers_to_p (XVECEXP (x, i, j), y))
2372 return 1;
2373 }
2374
2375 return 0;
2376 }
2377 \f
2378 /* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2379 set PBASE, PSTART, and PEND which correspond to the base of the address,
2380 the starting offset, and ending offset respectively.
2381
2382 ADDR is known to be a nonvarying address. */
2383
2384 /* ??? Despite what the comments say, this function is in fact frequently
2385 passed varying addresses. This does not appear to cause any problems. */
2386
2387 static void
2388 set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2389 rtx addr;
2390 int size;
2391 rtx *pbase;
2392 HOST_WIDE_INT *pstart, *pend;
2393 {
2394 rtx base;
2395 HOST_WIDE_INT start, end;
2396
2397 base = addr;
2398 start = 0;
2399 end = 0;
2400
2401 if (flag_pic && GET_CODE (base) == PLUS
2402 && XEXP (base, 0) == pic_offset_table_rtx)
2403 base = XEXP (base, 1);
2404
2405 /* Registers with nonvarying addresses usually have constant equivalents;
2406 but the frame pointer register is also possible. */
2407 if (GET_CODE (base) == REG
2408 && qty_const != 0
2409 && REGNO_QTY_VALID_P (REGNO (base))
2410 && qty_mode[reg_qty[REGNO (base)]] == GET_MODE (base)
2411 && qty_const[reg_qty[REGNO (base)]] != 0)
2412 base = qty_const[reg_qty[REGNO (base)]];
2413 else if (GET_CODE (base) == PLUS
2414 && GET_CODE (XEXP (base, 1)) == CONST_INT
2415 && GET_CODE (XEXP (base, 0)) == REG
2416 && qty_const != 0
2417 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2418 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2419 == GET_MODE (XEXP (base, 0)))
2420 && qty_const[reg_qty[REGNO (XEXP (base, 0))]])
2421 {
2422 start = INTVAL (XEXP (base, 1));
2423 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2424 }
2425 /* This can happen as the result of virtual register instantiation,
2426 if the initial offset is too large to be a valid address. */
2427 else if (GET_CODE (base) == PLUS
2428 && GET_CODE (XEXP (base, 0)) == REG
2429 && GET_CODE (XEXP (base, 1)) == REG
2430 && qty_const != 0
2431 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2432 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2433 == GET_MODE (XEXP (base, 0)))
2434 && qty_const[reg_qty[REGNO (XEXP (base, 0))]]
2435 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 1)))
2436 && (qty_mode[reg_qty[REGNO (XEXP (base, 1))]]
2437 == GET_MODE (XEXP (base, 1)))
2438 && qty_const[reg_qty[REGNO (XEXP (base, 1))]])
2439 {
2440 rtx tem = qty_const[reg_qty[REGNO (XEXP (base, 1))]];
2441 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2442
2443 /* One of the two values must be a constant. */
2444 if (GET_CODE (base) != CONST_INT)
2445 {
2446 if (GET_CODE (tem) != CONST_INT)
2447 abort ();
2448 start = INTVAL (tem);
2449 }
2450 else
2451 {
2452 start = INTVAL (base);
2453 base = tem;
2454 }
2455 }
2456
2457 /* Handle everything that we can find inside an address that has been
2458 viewed as constant. */
2459
2460 while (1)
2461 {
2462 /* If no part of this switch does a "continue", the code outside
2463 will exit this loop. */
2464
2465 switch (GET_CODE (base))
2466 {
2467 case LO_SUM:
2468 /* By definition, operand1 of a LO_SUM is the associated constant
2469 address. Use the associated constant address as the base
2470 instead. */
2471 base = XEXP (base, 1);
2472 continue;
2473
2474 case CONST:
2475 /* Strip off CONST. */
2476 base = XEXP (base, 0);
2477 continue;
2478
2479 case PLUS:
2480 if (GET_CODE (XEXP (base, 1)) == CONST_INT)
2481 {
2482 start += INTVAL (XEXP (base, 1));
2483 base = XEXP (base, 0);
2484 continue;
2485 }
2486 break;
2487
2488 case AND:
2489 /* Handle the case of an AND which is the negative of a power of
2490 two. This is used to represent unaligned memory operations. */
2491 if (GET_CODE (XEXP (base, 1)) == CONST_INT
2492 && exact_log2 (- INTVAL (XEXP (base, 1))) > 0)
2493 {
2494 set_nonvarying_address_components (XEXP (base, 0), size,
2495 pbase, pstart, pend);
2496
2497 /* Assume the worst misalignment. START is affected, but not
2498 END, so compensate but adjusting SIZE. Don't lose any
2499 constant we already had. */
2500
2501 size = *pend - *pstart - INTVAL (XEXP (base, 1)) - 1;
2502 start += *pstart + INTVAL (XEXP (base, 1)) + 1;
2503 end += *pend;
2504 base = *pbase;
2505 }
2506 break;
2507
2508 default:
2509 break;
2510 }
2511
2512 break;
2513 }
2514
2515 if (GET_CODE (base) == CONST_INT)
2516 {
2517 start += INTVAL (base);
2518 base = const0_rtx;
2519 }
2520
2521 end = start + size;
2522
2523 /* Set the return values. */
2524 *pbase = base;
2525 *pstart = start;
2526 *pend = end;
2527 }
2528
2529 /* Return 1 if X has a value that can vary even between two
2530 executions of the program. 0 means X can be compared reliably
2531 against certain constants or near-constants. */
2532
2533 static int
2534 cse_rtx_varies_p (x)
2535 register rtx x;
2536 {
2537 /* We need not check for X and the equivalence class being of the same
2538 mode because if X is equivalent to a constant in some mode, it
2539 doesn't vary in any mode. */
2540
2541 if (GET_CODE (x) == REG
2542 && REGNO_QTY_VALID_P (REGNO (x))
2543 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2544 && qty_const[reg_qty[REGNO (x)]] != 0)
2545 return 0;
2546
2547 if (GET_CODE (x) == PLUS
2548 && GET_CODE (XEXP (x, 1)) == CONST_INT
2549 && GET_CODE (XEXP (x, 0)) == REG
2550 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2551 && (GET_MODE (XEXP (x, 0))
2552 == qty_mode[reg_qty[REGNO (XEXP (x, 0))]])
2553 && qty_const[reg_qty[REGNO (XEXP (x, 0))]])
2554 return 0;
2555
2556 /* This can happen as the result of virtual register instantiation, if
2557 the initial constant is too large to be a valid address. This gives
2558 us a three instruction sequence, load large offset into a register,
2559 load fp minus a constant into a register, then a MEM which is the
2560 sum of the two `constant' registers. */
2561 if (GET_CODE (x) == PLUS
2562 && GET_CODE (XEXP (x, 0)) == REG
2563 && GET_CODE (XEXP (x, 1)) == REG
2564 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2565 && (GET_MODE (XEXP (x, 0))
2566 == qty_mode[reg_qty[REGNO (XEXP (x, 0))]])
2567 && qty_const[reg_qty[REGNO (XEXP (x, 0))]]
2568 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1)))
2569 && (GET_MODE (XEXP (x, 1))
2570 == qty_mode[reg_qty[REGNO (XEXP (x, 1))]])
2571 && qty_const[reg_qty[REGNO (XEXP (x, 1))]])
2572 return 0;
2573
2574 return rtx_varies_p (x);
2575 }
2576 \f
2577 /* Canonicalize an expression:
2578 replace each register reference inside it
2579 with the "oldest" equivalent register.
2580
2581 If INSN is non-zero and we are replacing a pseudo with a hard register
2582 or vice versa, validate_change is used to ensure that INSN remains valid
2583 after we make our substitution. The calls are made with IN_GROUP non-zero
2584 so apply_change_group must be called upon the outermost return from this
2585 function (unless INSN is zero). The result of apply_change_group can
2586 generally be discarded since the changes we are making are optional. */
2587
2588 static rtx
2589 canon_reg (x, insn)
2590 rtx x;
2591 rtx insn;
2592 {
2593 register int i;
2594 register enum rtx_code code;
2595 register char *fmt;
2596
2597 if (x == 0)
2598 return x;
2599
2600 code = GET_CODE (x);
2601 switch (code)
2602 {
2603 case PC:
2604 case CC0:
2605 case CONST:
2606 case CONST_INT:
2607 case CONST_DOUBLE:
2608 case SYMBOL_REF:
2609 case LABEL_REF:
2610 case ADDR_VEC:
2611 case ADDR_DIFF_VEC:
2612 return x;
2613
2614 case REG:
2615 {
2616 register int first;
2617
2618 /* Never replace a hard reg, because hard regs can appear
2619 in more than one machine mode, and we must preserve the mode
2620 of each occurrence. Also, some hard regs appear in
2621 MEMs that are shared and mustn't be altered. Don't try to
2622 replace any reg that maps to a reg of class NO_REGS. */
2623 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2624 || ! REGNO_QTY_VALID_P (REGNO (x)))
2625 return x;
2626
2627 first = qty_first_reg[reg_qty[REGNO (x)]];
2628 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2629 : REGNO_REG_CLASS (first) == NO_REGS ? x
2630 : gen_rtx_REG (qty_mode[reg_qty[REGNO (x)]], first));
2631 }
2632
2633 default:
2634 break;
2635 }
2636
2637 fmt = GET_RTX_FORMAT (code);
2638 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2639 {
2640 register int j;
2641
2642 if (fmt[i] == 'e')
2643 {
2644 rtx new = canon_reg (XEXP (x, i), insn);
2645 int insn_code;
2646
2647 /* If replacing pseudo with hard reg or vice versa, ensure the
2648 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2649 if (insn != 0 && new != 0
2650 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2651 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2652 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2653 || (insn_code = recog_memoized (insn)) < 0
2654 || insn_n_dups[insn_code] > 0))
2655 validate_change (insn, &XEXP (x, i), new, 1);
2656 else
2657 XEXP (x, i) = new;
2658 }
2659 else if (fmt[i] == 'E')
2660 for (j = 0; j < XVECLEN (x, i); j++)
2661 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2662 }
2663
2664 return x;
2665 }
2666 \f
2667 /* LOC is a location within INSN that is an operand address (the contents of
2668 a MEM). Find the best equivalent address to use that is valid for this
2669 insn.
2670
2671 On most CISC machines, complicated address modes are costly, and rtx_cost
2672 is a good approximation for that cost. However, most RISC machines have
2673 only a few (usually only one) memory reference formats. If an address is
2674 valid at all, it is often just as cheap as any other address. Hence, for
2675 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2676 costs of various addresses. For two addresses of equal cost, choose the one
2677 with the highest `rtx_cost' value as that has the potential of eliminating
2678 the most insns. For equal costs, we choose the first in the equivalence
2679 class. Note that we ignore the fact that pseudo registers are cheaper
2680 than hard registers here because we would also prefer the pseudo registers.
2681 */
2682
2683 static void
2684 find_best_addr (insn, loc)
2685 rtx insn;
2686 rtx *loc;
2687 {
2688 struct table_elt *elt;
2689 rtx addr = *loc;
2690 #ifdef ADDRESS_COST
2691 struct table_elt *p;
2692 int found_better = 1;
2693 #endif
2694 int save_do_not_record = do_not_record;
2695 int save_hash_arg_in_memory = hash_arg_in_memory;
2696 int save_hash_arg_in_struct = hash_arg_in_struct;
2697 int addr_volatile;
2698 int regno;
2699 unsigned hash;
2700
2701 /* Do not try to replace constant addresses or addresses of local and
2702 argument slots. These MEM expressions are made only once and inserted
2703 in many instructions, as well as being used to control symbol table
2704 output. It is not safe to clobber them.
2705
2706 There are some uncommon cases where the address is already in a register
2707 for some reason, but we cannot take advantage of that because we have
2708 no easy way to unshare the MEM. In addition, looking up all stack
2709 addresses is costly. */
2710 if ((GET_CODE (addr) == PLUS
2711 && GET_CODE (XEXP (addr, 0)) == REG
2712 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2713 && (regno = REGNO (XEXP (addr, 0)),
2714 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2715 || regno == ARG_POINTER_REGNUM))
2716 || (GET_CODE (addr) == REG
2717 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2718 || regno == HARD_FRAME_POINTER_REGNUM
2719 || regno == ARG_POINTER_REGNUM))
2720 || GET_CODE (addr) == ADDRESSOF
2721 || CONSTANT_ADDRESS_P (addr))
2722 return;
2723
2724 /* If this address is not simply a register, try to fold it. This will
2725 sometimes simplify the expression. Many simplifications
2726 will not be valid, but some, usually applying the associative rule, will
2727 be valid and produce better code. */
2728 if (GET_CODE (addr) != REG)
2729 {
2730 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2731
2732 if (1
2733 #ifdef ADDRESS_COST
2734 && (CSE_ADDRESS_COST (folded) < CSE_ADDRESS_COST (addr)
2735 || (CSE_ADDRESS_COST (folded) == CSE_ADDRESS_COST (addr)
2736 && rtx_cost (folded, MEM) > rtx_cost (addr, MEM)))
2737 #else
2738 && rtx_cost (folded, MEM) < rtx_cost (addr, MEM)
2739 #endif
2740 && validate_change (insn, loc, folded, 0))
2741 addr = folded;
2742 }
2743
2744 /* If this address is not in the hash table, we can't look for equivalences
2745 of the whole address. Also, ignore if volatile. */
2746
2747 do_not_record = 0;
2748 hash = HASH (addr, Pmode);
2749 addr_volatile = do_not_record;
2750 do_not_record = save_do_not_record;
2751 hash_arg_in_memory = save_hash_arg_in_memory;
2752 hash_arg_in_struct = save_hash_arg_in_struct;
2753
2754 if (addr_volatile)
2755 return;
2756
2757 elt = lookup (addr, hash, Pmode);
2758
2759 #ifndef ADDRESS_COST
2760 if (elt)
2761 {
2762 int our_cost = elt->cost;
2763
2764 /* Find the lowest cost below ours that works. */
2765 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2766 if (elt->cost < our_cost
2767 && (GET_CODE (elt->exp) == REG
2768 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2769 && validate_change (insn, loc,
2770 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2771 return;
2772 }
2773 #else
2774
2775 if (elt)
2776 {
2777 /* We need to find the best (under the criteria documented above) entry
2778 in the class that is valid. We use the `flag' field to indicate
2779 choices that were invalid and iterate until we can't find a better
2780 one that hasn't already been tried. */
2781
2782 for (p = elt->first_same_value; p; p = p->next_same_value)
2783 p->flag = 0;
2784
2785 while (found_better)
2786 {
2787 int best_addr_cost = CSE_ADDRESS_COST (*loc);
2788 int best_rtx_cost = (elt->cost + 1) >> 1;
2789 struct table_elt *best_elt = elt;
2790
2791 found_better = 0;
2792 for (p = elt->first_same_value; p; p = p->next_same_value)
2793 if (! p->flag)
2794 {
2795 if ((GET_CODE (p->exp) == REG
2796 || exp_equiv_p (p->exp, p->exp, 1, 0))
2797 && (CSE_ADDRESS_COST (p->exp) < best_addr_cost
2798 || (CSE_ADDRESS_COST (p->exp) == best_addr_cost
2799 && (p->cost + 1) >> 1 > best_rtx_cost)))
2800 {
2801 found_better = 1;
2802 best_addr_cost = CSE_ADDRESS_COST (p->exp);
2803 best_rtx_cost = (p->cost + 1) >> 1;
2804 best_elt = p;
2805 }
2806 }
2807
2808 if (found_better)
2809 {
2810 if (validate_change (insn, loc,
2811 canon_reg (copy_rtx (best_elt->exp),
2812 NULL_RTX), 0))
2813 return;
2814 else
2815 best_elt->flag = 1;
2816 }
2817 }
2818 }
2819
2820 /* If the address is a binary operation with the first operand a register
2821 and the second a constant, do the same as above, but looking for
2822 equivalences of the register. Then try to simplify before checking for
2823 the best address to use. This catches a few cases: First is when we
2824 have REG+const and the register is another REG+const. We can often merge
2825 the constants and eliminate one insn and one register. It may also be
2826 that a machine has a cheap REG+REG+const. Finally, this improves the
2827 code on the Alpha for unaligned byte stores. */
2828
2829 if (flag_expensive_optimizations
2830 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2831 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2832 && GET_CODE (XEXP (*loc, 0)) == REG
2833 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2834 {
2835 rtx c = XEXP (*loc, 1);
2836
2837 do_not_record = 0;
2838 hash = HASH (XEXP (*loc, 0), Pmode);
2839 do_not_record = save_do_not_record;
2840 hash_arg_in_memory = save_hash_arg_in_memory;
2841 hash_arg_in_struct = save_hash_arg_in_struct;
2842
2843 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2844 if (elt == 0)
2845 return;
2846
2847 /* We need to find the best (under the criteria documented above) entry
2848 in the class that is valid. We use the `flag' field to indicate
2849 choices that were invalid and iterate until we can't find a better
2850 one that hasn't already been tried. */
2851
2852 for (p = elt->first_same_value; p; p = p->next_same_value)
2853 p->flag = 0;
2854
2855 while (found_better)
2856 {
2857 int best_addr_cost = CSE_ADDRESS_COST (*loc);
2858 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2859 struct table_elt *best_elt = elt;
2860 rtx best_rtx = *loc;
2861 int count;
2862
2863 /* This is at worst case an O(n^2) algorithm, so limit our search
2864 to the first 32 elements on the list. This avoids trouble
2865 compiling code with very long basic blocks that can easily
2866 call cse_gen_binary so many times that we run out of memory. */
2867
2868 found_better = 0;
2869 for (p = elt->first_same_value, count = 0;
2870 p && count < 32;
2871 p = p->next_same_value, count++)
2872 if (! p->flag
2873 && (GET_CODE (p->exp) == REG
2874 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2875 {
2876 rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
2877
2878 if ((CSE_ADDRESS_COST (new) < best_addr_cost
2879 || (CSE_ADDRESS_COST (new) == best_addr_cost
2880 && (COST (new) + 1) >> 1 > best_rtx_cost)))
2881 {
2882 found_better = 1;
2883 best_addr_cost = CSE_ADDRESS_COST (new);
2884 best_rtx_cost = (COST (new) + 1) >> 1;
2885 best_elt = p;
2886 best_rtx = new;
2887 }
2888 }
2889
2890 if (found_better)
2891 {
2892 if (validate_change (insn, loc,
2893 canon_reg (copy_rtx (best_rtx),
2894 NULL_RTX), 0))
2895 return;
2896 else
2897 best_elt->flag = 1;
2898 }
2899 }
2900 }
2901 #endif
2902 }
2903 \f
2904 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2905 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2906 what values are being compared.
2907
2908 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2909 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2910 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2911 compared to produce cc0.
2912
2913 The return value is the comparison operator and is either the code of
2914 A or the code corresponding to the inverse of the comparison. */
2915
2916 static enum rtx_code
2917 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
2918 enum rtx_code code;
2919 rtx *parg1, *parg2;
2920 enum machine_mode *pmode1, *pmode2;
2921 {
2922 rtx arg1, arg2;
2923
2924 arg1 = *parg1, arg2 = *parg2;
2925
2926 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2927
2928 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2929 {
2930 /* Set non-zero when we find something of interest. */
2931 rtx x = 0;
2932 int reverse_code = 0;
2933 struct table_elt *p = 0;
2934
2935 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2936 On machines with CC0, this is the only case that can occur, since
2937 fold_rtx will return the COMPARE or item being compared with zero
2938 when given CC0. */
2939
2940 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2941 x = arg1;
2942
2943 /* If ARG1 is a comparison operator and CODE is testing for
2944 STORE_FLAG_VALUE, get the inner arguments. */
2945
2946 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2947 {
2948 if (code == NE
2949 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2950 && code == LT && STORE_FLAG_VALUE == -1)
2951 #ifdef FLOAT_STORE_FLAG_VALUE
2952 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2953 && FLOAT_STORE_FLAG_VALUE < 0)
2954 #endif
2955 )
2956 x = arg1;
2957 else if (code == EQ
2958 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2959 && code == GE && STORE_FLAG_VALUE == -1)
2960 #ifdef FLOAT_STORE_FLAG_VALUE
2961 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2962 && FLOAT_STORE_FLAG_VALUE < 0)
2963 #endif
2964 )
2965 x = arg1, reverse_code = 1;
2966 }
2967
2968 /* ??? We could also check for
2969
2970 (ne (and (eq (...) (const_int 1))) (const_int 0))
2971
2972 and related forms, but let's wait until we see them occurring. */
2973
2974 if (x == 0)
2975 /* Look up ARG1 in the hash table and see if it has an equivalence
2976 that lets us see what is being compared. */
2977 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
2978 GET_MODE (arg1));
2979 if (p) p = p->first_same_value;
2980
2981 for (; p; p = p->next_same_value)
2982 {
2983 enum machine_mode inner_mode = GET_MODE (p->exp);
2984
2985 /* If the entry isn't valid, skip it. */
2986 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2987 continue;
2988
2989 if (GET_CODE (p->exp) == COMPARE
2990 /* Another possibility is that this machine has a compare insn
2991 that includes the comparison code. In that case, ARG1 would
2992 be equivalent to a comparison operation that would set ARG1 to
2993 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2994 ORIG_CODE is the actual comparison being done; if it is an EQ,
2995 we must reverse ORIG_CODE. On machine with a negative value
2996 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2997 || ((code == NE
2998 || (code == LT
2999 && GET_MODE_CLASS (inner_mode) == MODE_INT
3000 && (GET_MODE_BITSIZE (inner_mode)
3001 <= HOST_BITS_PER_WIDE_INT)
3002 && (STORE_FLAG_VALUE
3003 & ((HOST_WIDE_INT) 1
3004 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3005 #ifdef FLOAT_STORE_FLAG_VALUE
3006 || (code == LT
3007 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3008 && FLOAT_STORE_FLAG_VALUE < 0)
3009 #endif
3010 )
3011 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3012 {
3013 x = p->exp;
3014 break;
3015 }
3016 else if ((code == EQ
3017 || (code == GE
3018 && GET_MODE_CLASS (inner_mode) == MODE_INT
3019 && (GET_MODE_BITSIZE (inner_mode)
3020 <= HOST_BITS_PER_WIDE_INT)
3021 && (STORE_FLAG_VALUE
3022 & ((HOST_WIDE_INT) 1
3023 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3024 #ifdef FLOAT_STORE_FLAG_VALUE
3025 || (code == GE
3026 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3027 && FLOAT_STORE_FLAG_VALUE < 0)
3028 #endif
3029 )
3030 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3031 {
3032 reverse_code = 1;
3033 x = p->exp;
3034 break;
3035 }
3036
3037 /* If this is fp + constant, the equivalent is a better operand since
3038 it may let us predict the value of the comparison. */
3039 else if (NONZERO_BASE_PLUS_P (p->exp))
3040 {
3041 arg1 = p->exp;
3042 continue;
3043 }
3044 }
3045
3046 /* If we didn't find a useful equivalence for ARG1, we are done.
3047 Otherwise, set up for the next iteration. */
3048 if (x == 0)
3049 break;
3050
3051 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3052 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3053 code = GET_CODE (x);
3054
3055 if (reverse_code)
3056 code = reverse_condition (code);
3057 }
3058
3059 /* Return our results. Return the modes from before fold_rtx
3060 because fold_rtx might produce const_int, and then it's too late. */
3061 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3062 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3063
3064 return code;
3065 }
3066 \f
3067 /* Try to simplify a unary operation CODE whose output mode is to be
3068 MODE with input operand OP whose mode was originally OP_MODE.
3069 Return zero if no simplification can be made. */
3070
3071 rtx
3072 simplify_unary_operation (code, mode, op, op_mode)
3073 enum rtx_code code;
3074 enum machine_mode mode;
3075 rtx op;
3076 enum machine_mode op_mode;
3077 {
3078 register int width = GET_MODE_BITSIZE (mode);
3079
3080 /* The order of these tests is critical so that, for example, we don't
3081 check the wrong mode (input vs. output) for a conversion operation,
3082 such as FIX. At some point, this should be simplified. */
3083
3084 #if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
3085
3086 if (code == FLOAT && GET_MODE (op) == VOIDmode
3087 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3088 {
3089 HOST_WIDE_INT hv, lv;
3090 REAL_VALUE_TYPE d;
3091
3092 if (GET_CODE (op) == CONST_INT)
3093 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3094 else
3095 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
3096
3097 #ifdef REAL_ARITHMETIC
3098 REAL_VALUE_FROM_INT (d, lv, hv, mode);
3099 #else
3100 if (hv < 0)
3101 {
3102 d = (double) (~ hv);
3103 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3104 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3105 d += (double) (unsigned HOST_WIDE_INT) (~ lv);
3106 d = (- d - 1.0);
3107 }
3108 else
3109 {
3110 d = (double) hv;
3111 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3112 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3113 d += (double) (unsigned HOST_WIDE_INT) lv;
3114 }
3115 #endif /* REAL_ARITHMETIC */
3116 d = real_value_truncate (mode, d);
3117 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3118 }
3119 else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
3120 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3121 {
3122 HOST_WIDE_INT hv, lv;
3123 REAL_VALUE_TYPE d;
3124
3125 if (GET_CODE (op) == CONST_INT)
3126 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3127 else
3128 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
3129
3130 if (op_mode == VOIDmode)
3131 {
3132 /* We don't know how to interpret negative-looking numbers in
3133 this case, so don't try to fold those. */
3134 if (hv < 0)
3135 return 0;
3136 }
3137 else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
3138 ;
3139 else
3140 hv = 0, lv &= GET_MODE_MASK (op_mode);
3141
3142 #ifdef REAL_ARITHMETIC
3143 REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv, mode);
3144 #else
3145
3146 d = (double) (unsigned HOST_WIDE_INT) hv;
3147 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3148 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3149 d += (double) (unsigned HOST_WIDE_INT) lv;
3150 #endif /* REAL_ARITHMETIC */
3151 d = real_value_truncate (mode, d);
3152 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3153 }
3154 #endif
3155
3156 if (GET_CODE (op) == CONST_INT
3157 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3158 {
3159 register HOST_WIDE_INT arg0 = INTVAL (op);
3160 register HOST_WIDE_INT val;
3161
3162 switch (code)
3163 {
3164 case NOT:
3165 val = ~ arg0;
3166 break;
3167
3168 case NEG:
3169 val = - arg0;
3170 break;
3171
3172 case ABS:
3173 val = (arg0 >= 0 ? arg0 : - arg0);
3174 break;
3175
3176 case FFS:
3177 /* Don't use ffs here. Instead, get low order bit and then its
3178 number. If arg0 is zero, this will return 0, as desired. */
3179 arg0 &= GET_MODE_MASK (mode);
3180 val = exact_log2 (arg0 & (- arg0)) + 1;
3181 break;
3182
3183 case TRUNCATE:
3184 val = arg0;
3185 break;
3186
3187 case ZERO_EXTEND:
3188 if (op_mode == VOIDmode)
3189 op_mode = mode;
3190 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3191 {
3192 /* If we were really extending the mode,
3193 we would have to distinguish between zero-extension
3194 and sign-extension. */
3195 if (width != GET_MODE_BITSIZE (op_mode))
3196 abort ();
3197 val = arg0;
3198 }
3199 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3200 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3201 else
3202 return 0;
3203 break;
3204
3205 case SIGN_EXTEND:
3206 if (op_mode == VOIDmode)
3207 op_mode = mode;
3208 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3209 {
3210 /* If we were really extending the mode,
3211 we would have to distinguish between zero-extension
3212 and sign-extension. */
3213 if (width != GET_MODE_BITSIZE (op_mode))
3214 abort ();
3215 val = arg0;
3216 }
3217 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3218 {
3219 val
3220 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3221 if (val
3222 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3223 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3224 }
3225 else
3226 return 0;
3227 break;
3228
3229 case SQRT:
3230 return 0;
3231
3232 default:
3233 abort ();
3234 }
3235
3236 /* Clear the bits that don't belong in our mode,
3237 unless they and our sign bit are all one.
3238 So we get either a reasonable negative value or a reasonable
3239 unsigned value for this mode. */
3240 if (width < HOST_BITS_PER_WIDE_INT
3241 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3242 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3243 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3244
3245 /* If this would be an entire word for the target, but is not for
3246 the host, then sign-extend on the host so that the number will look
3247 the same way on the host that it would on the target.
3248
3249 For example, when building a 64 bit alpha hosted 32 bit sparc
3250 targeted compiler, then we want the 32 bit unsigned value -1 to be
3251 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3252 The later confuses the sparc backend. */
3253
3254 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3255 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3256 val |= ((HOST_WIDE_INT) (-1) << width);
3257
3258 return GEN_INT (val);
3259 }
3260
3261 /* We can do some operations on integer CONST_DOUBLEs. Also allow
3262 for a DImode operation on a CONST_INT. */
3263 else if (GET_MODE (op) == VOIDmode && width <= HOST_BITS_PER_INT * 2
3264 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3265 {
3266 HOST_WIDE_INT l1, h1, lv, hv;
3267
3268 if (GET_CODE (op) == CONST_DOUBLE)
3269 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3270 else
3271 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3272
3273 switch (code)
3274 {
3275 case NOT:
3276 lv = ~ l1;
3277 hv = ~ h1;
3278 break;
3279
3280 case NEG:
3281 neg_double (l1, h1, &lv, &hv);
3282 break;
3283
3284 case ABS:
3285 if (h1 < 0)
3286 neg_double (l1, h1, &lv, &hv);
3287 else
3288 lv = l1, hv = h1;
3289 break;
3290
3291 case FFS:
3292 hv = 0;
3293 if (l1 == 0)
3294 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
3295 else
3296 lv = exact_log2 (l1 & (-l1)) + 1;
3297 break;
3298
3299 case TRUNCATE:
3300 /* This is just a change-of-mode, so do nothing. */
3301 lv = l1, hv = h1;
3302 break;
3303
3304 case ZERO_EXTEND:
3305 if (op_mode == VOIDmode
3306 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3307 return 0;
3308
3309 hv = 0;
3310 lv = l1 & GET_MODE_MASK (op_mode);
3311 break;
3312
3313 case SIGN_EXTEND:
3314 if (op_mode == VOIDmode
3315 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3316 return 0;
3317 else
3318 {
3319 lv = l1 & GET_MODE_MASK (op_mode);
3320 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3321 && (lv & ((HOST_WIDE_INT) 1
3322 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3323 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3324
3325 hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
3326 }
3327 break;
3328
3329 case SQRT:
3330 return 0;
3331
3332 default:
3333 return 0;
3334 }
3335
3336 return immed_double_const (lv, hv, mode);
3337 }
3338
3339 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3340 else if (GET_CODE (op) == CONST_DOUBLE
3341 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3342 {
3343 REAL_VALUE_TYPE d;
3344 jmp_buf handler;
3345 rtx x;
3346
3347 if (setjmp (handler))
3348 /* There used to be a warning here, but that is inadvisable.
3349 People may want to cause traps, and the natural way
3350 to do it should not get a warning. */
3351 return 0;
3352
3353 set_float_handler (handler);
3354
3355 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3356
3357 switch (code)
3358 {
3359 case NEG:
3360 d = REAL_VALUE_NEGATE (d);
3361 break;
3362
3363 case ABS:
3364 if (REAL_VALUE_NEGATIVE (d))
3365 d = REAL_VALUE_NEGATE (d);
3366 break;
3367
3368 case FLOAT_TRUNCATE:
3369 d = real_value_truncate (mode, d);
3370 break;
3371
3372 case FLOAT_EXTEND:
3373 /* All this does is change the mode. */
3374 break;
3375
3376 case FIX:
3377 d = REAL_VALUE_RNDZINT (d);
3378 break;
3379
3380 case UNSIGNED_FIX:
3381 d = REAL_VALUE_UNSIGNED_RNDZINT (d);
3382 break;
3383
3384 case SQRT:
3385 return 0;
3386
3387 default:
3388 abort ();
3389 }
3390
3391 x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3392 set_float_handler (NULL_PTR);
3393 return x;
3394 }
3395
3396 else if (GET_CODE (op) == CONST_DOUBLE
3397 && GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
3398 && GET_MODE_CLASS (mode) == MODE_INT
3399 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3400 {
3401 REAL_VALUE_TYPE d;
3402 jmp_buf handler;
3403 HOST_WIDE_INT val;
3404
3405 if (setjmp (handler))
3406 return 0;
3407
3408 set_float_handler (handler);
3409
3410 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3411
3412 switch (code)
3413 {
3414 case FIX:
3415 val = REAL_VALUE_FIX (d);
3416 break;
3417
3418 case UNSIGNED_FIX:
3419 val = REAL_VALUE_UNSIGNED_FIX (d);
3420 break;
3421
3422 default:
3423 abort ();
3424 }
3425
3426 set_float_handler (NULL_PTR);
3427
3428 /* Clear the bits that don't belong in our mode,
3429 unless they and our sign bit are all one.
3430 So we get either a reasonable negative value or a reasonable
3431 unsigned value for this mode. */
3432 if (width < HOST_BITS_PER_WIDE_INT
3433 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3434 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3435 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3436
3437 /* If this would be an entire word for the target, but is not for
3438 the host, then sign-extend on the host so that the number will look
3439 the same way on the host that it would on the target.
3440
3441 For example, when building a 64 bit alpha hosted 32 bit sparc
3442 targeted compiler, then we want the 32 bit unsigned value -1 to be
3443 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3444 The later confuses the sparc backend. */
3445
3446 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3447 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3448 val |= ((HOST_WIDE_INT) (-1) << width);
3449
3450 return GEN_INT (val);
3451 }
3452 #endif
3453 /* This was formerly used only for non-IEEE float.
3454 eggert@twinsun.com says it is safe for IEEE also. */
3455 else
3456 {
3457 /* There are some simplifications we can do even if the operands
3458 aren't constant. */
3459 switch (code)
3460 {
3461 case NEG:
3462 case NOT:
3463 /* (not (not X)) == X, similarly for NEG. */
3464 if (GET_CODE (op) == code)
3465 return XEXP (op, 0);
3466 break;
3467
3468 case SIGN_EXTEND:
3469 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3470 becomes just the MINUS if its mode is MODE. This allows
3471 folding switch statements on machines using casesi (such as
3472 the Vax). */
3473 if (GET_CODE (op) == TRUNCATE
3474 && GET_MODE (XEXP (op, 0)) == mode
3475 && GET_CODE (XEXP (op, 0)) == MINUS
3476 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3477 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3478 return XEXP (op, 0);
3479
3480 #ifdef POINTERS_EXTEND_UNSIGNED
3481 if (! POINTERS_EXTEND_UNSIGNED
3482 && mode == Pmode && GET_MODE (op) == ptr_mode
3483 && CONSTANT_P (op))
3484 return convert_memory_address (Pmode, op);
3485 #endif
3486 break;
3487
3488 #ifdef POINTERS_EXTEND_UNSIGNED
3489 case ZERO_EXTEND:
3490 if (POINTERS_EXTEND_UNSIGNED
3491 && mode == Pmode && GET_MODE (op) == ptr_mode
3492 && CONSTANT_P (op))
3493 return convert_memory_address (Pmode, op);
3494 break;
3495 #endif
3496
3497 default:
3498 break;
3499 }
3500
3501 return 0;
3502 }
3503 }
3504 \f
3505 /* Simplify a binary operation CODE with result mode MODE, operating on OP0
3506 and OP1. Return 0 if no simplification is possible.
3507
3508 Don't use this for relational operations such as EQ or LT.
3509 Use simplify_relational_operation instead. */
3510
3511 rtx
3512 simplify_binary_operation (code, mode, op0, op1)
3513 enum rtx_code code;
3514 enum machine_mode mode;
3515 rtx op0, op1;
3516 {
3517 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3518 HOST_WIDE_INT val;
3519 int width = GET_MODE_BITSIZE (mode);
3520 rtx tem;
3521
3522 /* Relational operations don't work here. We must know the mode
3523 of the operands in order to do the comparison correctly.
3524 Assuming a full word can give incorrect results.
3525 Consider comparing 128 with -128 in QImode. */
3526
3527 if (GET_RTX_CLASS (code) == '<')
3528 abort ();
3529
3530 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3531 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3532 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3533 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3534 {
3535 REAL_VALUE_TYPE f0, f1, value;
3536 jmp_buf handler;
3537
3538 if (setjmp (handler))
3539 return 0;
3540
3541 set_float_handler (handler);
3542
3543 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3544 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3545 f0 = real_value_truncate (mode, f0);
3546 f1 = real_value_truncate (mode, f1);
3547
3548 #ifdef REAL_ARITHMETIC
3549 #ifndef REAL_INFINITY
3550 if (code == DIV && REAL_VALUES_EQUAL (f1, dconst0))
3551 return 0;
3552 #endif
3553 REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
3554 #else
3555 switch (code)
3556 {
3557 case PLUS:
3558 value = f0 + f1;
3559 break;
3560 case MINUS:
3561 value = f0 - f1;
3562 break;
3563 case MULT:
3564 value = f0 * f1;
3565 break;
3566 case DIV:
3567 #ifndef REAL_INFINITY
3568 if (f1 == 0)
3569 return 0;
3570 #endif
3571 value = f0 / f1;
3572 break;
3573 case SMIN:
3574 value = MIN (f0, f1);
3575 break;
3576 case SMAX:
3577 value = MAX (f0, f1);
3578 break;
3579 default:
3580 abort ();
3581 }
3582 #endif
3583
3584 value = real_value_truncate (mode, value);
3585 set_float_handler (NULL_PTR);
3586 return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
3587 }
3588 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3589
3590 /* We can fold some multi-word operations. */
3591 if (GET_MODE_CLASS (mode) == MODE_INT
3592 && width == HOST_BITS_PER_WIDE_INT * 2
3593 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
3594 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3595 {
3596 HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
3597
3598 if (GET_CODE (op0) == CONST_DOUBLE)
3599 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3600 else
3601 l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
3602
3603 if (GET_CODE (op1) == CONST_DOUBLE)
3604 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3605 else
3606 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3607
3608 switch (code)
3609 {
3610 case MINUS:
3611 /* A - B == A + (-B). */
3612 neg_double (l2, h2, &lv, &hv);
3613 l2 = lv, h2 = hv;
3614
3615 /* .. fall through ... */
3616
3617 case PLUS:
3618 add_double (l1, h1, l2, h2, &lv, &hv);
3619 break;
3620
3621 case MULT:
3622 mul_double (l1, h1, l2, h2, &lv, &hv);
3623 break;
3624
3625 case DIV: case MOD: case UDIV: case UMOD:
3626 /* We'd need to include tree.h to do this and it doesn't seem worth
3627 it. */
3628 return 0;
3629
3630 case AND:
3631 lv = l1 & l2, hv = h1 & h2;
3632 break;
3633
3634 case IOR:
3635 lv = l1 | l2, hv = h1 | h2;
3636 break;
3637
3638 case XOR:
3639 lv = l1 ^ l2, hv = h1 ^ h2;
3640 break;
3641
3642 case SMIN:
3643 if (h1 < h2
3644 || (h1 == h2
3645 && ((unsigned HOST_WIDE_INT) l1
3646 < (unsigned HOST_WIDE_INT) l2)))
3647 lv = l1, hv = h1;
3648 else
3649 lv = l2, hv = h2;
3650 break;
3651
3652 case SMAX:
3653 if (h1 > h2
3654 || (h1 == h2
3655 && ((unsigned HOST_WIDE_INT) l1
3656 > (unsigned HOST_WIDE_INT) l2)))
3657 lv = l1, hv = h1;
3658 else
3659 lv = l2, hv = h2;
3660 break;
3661
3662 case UMIN:
3663 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3664 || (h1 == h2
3665 && ((unsigned HOST_WIDE_INT) l1
3666 < (unsigned HOST_WIDE_INT) l2)))
3667 lv = l1, hv = h1;
3668 else
3669 lv = l2, hv = h2;
3670 break;
3671
3672 case UMAX:
3673 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3674 || (h1 == h2
3675 && ((unsigned HOST_WIDE_INT) l1
3676 > (unsigned HOST_WIDE_INT) l2)))
3677 lv = l1, hv = h1;
3678 else
3679 lv = l2, hv = h2;
3680 break;
3681
3682 case LSHIFTRT: case ASHIFTRT:
3683 case ASHIFT:
3684 case ROTATE: case ROTATERT:
3685 #ifdef SHIFT_COUNT_TRUNCATED
3686 if (SHIFT_COUNT_TRUNCATED)
3687 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3688 #endif
3689
3690 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3691 return 0;
3692
3693 if (code == LSHIFTRT || code == ASHIFTRT)
3694 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3695 code == ASHIFTRT);
3696 else if (code == ASHIFT)
3697 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
3698 else if (code == ROTATE)
3699 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3700 else /* code == ROTATERT */
3701 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3702 break;
3703
3704 default:
3705 return 0;
3706 }
3707
3708 return immed_double_const (lv, hv, mode);
3709 }
3710
3711 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3712 || width > HOST_BITS_PER_WIDE_INT || width == 0)
3713 {
3714 /* Even if we can't compute a constant result,
3715 there are some cases worth simplifying. */
3716
3717 switch (code)
3718 {
3719 case PLUS:
3720 /* In IEEE floating point, x+0 is not the same as x. Similarly
3721 for the other optimizations below. */
3722 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3723 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3724 break;
3725
3726 if (op1 == CONST0_RTX (mode))
3727 return op0;
3728
3729 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3730 if (GET_CODE (op0) == NEG)
3731 return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
3732 else if (GET_CODE (op1) == NEG)
3733 return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
3734
3735 /* Handle both-operands-constant cases. We can only add
3736 CONST_INTs to constants since the sum of relocatable symbols
3737 can't be handled by most assemblers. Don't add CONST_INT
3738 to CONST_INT since overflow won't be computed properly if wider
3739 than HOST_BITS_PER_WIDE_INT. */
3740
3741 if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3742 && GET_CODE (op1) == CONST_INT)
3743 return plus_constant (op0, INTVAL (op1));
3744 else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3745 && GET_CODE (op0) == CONST_INT)
3746 return plus_constant (op1, INTVAL (op0));
3747
3748 /* See if this is something like X * C - X or vice versa or
3749 if the multiplication is written as a shift. If so, we can
3750 distribute and make a new multiply, shift, or maybe just
3751 have X (if C is 2 in the example above). But don't make
3752 real multiply if we didn't have one before. */
3753
3754 if (! FLOAT_MODE_P (mode))
3755 {
3756 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3757 rtx lhs = op0, rhs = op1;
3758 int had_mult = 0;
3759
3760 if (GET_CODE (lhs) == NEG)
3761 coeff0 = -1, lhs = XEXP (lhs, 0);
3762 else if (GET_CODE (lhs) == MULT
3763 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3764 {
3765 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3766 had_mult = 1;
3767 }
3768 else if (GET_CODE (lhs) == ASHIFT
3769 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3770 && INTVAL (XEXP (lhs, 1)) >= 0
3771 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3772 {
3773 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3774 lhs = XEXP (lhs, 0);
3775 }
3776
3777 if (GET_CODE (rhs) == NEG)
3778 coeff1 = -1, rhs = XEXP (rhs, 0);
3779 else if (GET_CODE (rhs) == MULT
3780 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3781 {
3782 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3783 had_mult = 1;
3784 }
3785 else if (GET_CODE (rhs) == ASHIFT
3786 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3787 && INTVAL (XEXP (rhs, 1)) >= 0
3788 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3789 {
3790 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3791 rhs = XEXP (rhs, 0);
3792 }
3793
3794 if (rtx_equal_p (lhs, rhs))
3795 {
3796 tem = cse_gen_binary (MULT, mode, lhs,
3797 GEN_INT (coeff0 + coeff1));
3798 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3799 }
3800 }
3801
3802 /* If one of the operands is a PLUS or a MINUS, see if we can
3803 simplify this by the associative law.
3804 Don't use the associative law for floating point.
3805 The inaccuracy makes it nonassociative,
3806 and subtle programs can break if operations are associated. */
3807
3808 if (INTEGRAL_MODE_P (mode)
3809 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3810 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3811 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3812 return tem;
3813 break;
3814
3815 case COMPARE:
3816 #ifdef HAVE_cc0
3817 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3818 using cc0, in which case we want to leave it as a COMPARE
3819 so we can distinguish it from a register-register-copy.
3820
3821 In IEEE floating point, x-0 is not the same as x. */
3822
3823 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3824 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3825 && op1 == CONST0_RTX (mode))
3826 return op0;
3827 #else
3828 /* Do nothing here. */
3829 #endif
3830 break;
3831
3832 case MINUS:
3833 /* None of these optimizations can be done for IEEE
3834 floating point. */
3835 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3836 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3837 break;
3838
3839 /* We can't assume x-x is 0 even with non-IEEE floating point,
3840 but since it is zero except in very strange circumstances, we
3841 will treat it as zero with -ffast-math. */
3842 if (rtx_equal_p (op0, op1)
3843 && ! side_effects_p (op0)
3844 && (! FLOAT_MODE_P (mode) || flag_fast_math))
3845 return CONST0_RTX (mode);
3846
3847 /* Change subtraction from zero into negation. */
3848 if (op0 == CONST0_RTX (mode))
3849 return gen_rtx_NEG (mode, op1);
3850
3851 /* (-1 - a) is ~a. */
3852 if (op0 == constm1_rtx)
3853 return gen_rtx_NOT (mode, op1);
3854
3855 /* Subtracting 0 has no effect. */
3856 if (op1 == CONST0_RTX (mode))
3857 return op0;
3858
3859 /* See if this is something like X * C - X or vice versa or
3860 if the multiplication is written as a shift. If so, we can
3861 distribute and make a new multiply, shift, or maybe just
3862 have X (if C is 2 in the example above). But don't make
3863 real multiply if we didn't have one before. */
3864
3865 if (! FLOAT_MODE_P (mode))
3866 {
3867 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3868 rtx lhs = op0, rhs = op1;
3869 int had_mult = 0;
3870
3871 if (GET_CODE (lhs) == NEG)
3872 coeff0 = -1, lhs = XEXP (lhs, 0);
3873 else if (GET_CODE (lhs) == MULT
3874 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3875 {
3876 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3877 had_mult = 1;
3878 }
3879 else if (GET_CODE (lhs) == ASHIFT
3880 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3881 && INTVAL (XEXP (lhs, 1)) >= 0
3882 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3883 {
3884 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3885 lhs = XEXP (lhs, 0);
3886 }
3887
3888 if (GET_CODE (rhs) == NEG)
3889 coeff1 = - 1, rhs = XEXP (rhs, 0);
3890 else if (GET_CODE (rhs) == MULT
3891 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3892 {
3893 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3894 had_mult = 1;
3895 }
3896 else if (GET_CODE (rhs) == ASHIFT
3897 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3898 && INTVAL (XEXP (rhs, 1)) >= 0
3899 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3900 {
3901 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3902 rhs = XEXP (rhs, 0);
3903 }
3904
3905 if (rtx_equal_p (lhs, rhs))
3906 {
3907 tem = cse_gen_binary (MULT, mode, lhs,
3908 GEN_INT (coeff0 - coeff1));
3909 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3910 }
3911 }
3912
3913 /* (a - (-b)) -> (a + b). */
3914 if (GET_CODE (op1) == NEG)
3915 return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
3916
3917 /* If one of the operands is a PLUS or a MINUS, see if we can
3918 simplify this by the associative law.
3919 Don't use the associative law for floating point.
3920 The inaccuracy makes it nonassociative,
3921 and subtle programs can break if operations are associated. */
3922
3923 if (INTEGRAL_MODE_P (mode)
3924 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3925 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3926 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3927 return tem;
3928
3929 /* Don't let a relocatable value get a negative coeff. */
3930 if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
3931 return plus_constant (op0, - INTVAL (op1));
3932
3933 /* (x - (x & y)) -> (x & ~y) */
3934 if (GET_CODE (op1) == AND)
3935 {
3936 if (rtx_equal_p (op0, XEXP (op1, 0)))
3937 return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 1)));
3938 if (rtx_equal_p (op0, XEXP (op1, 1)))
3939 return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 0)));
3940 }
3941 break;
3942
3943 case MULT:
3944 if (op1 == constm1_rtx)
3945 {
3946 tem = simplify_unary_operation (NEG, mode, op0, mode);
3947
3948 return tem ? tem : gen_rtx_NEG (mode, op0);
3949 }
3950
3951 /* In IEEE floating point, x*0 is not always 0. */
3952 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3953 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3954 && op1 == CONST0_RTX (mode)
3955 && ! side_effects_p (op0))
3956 return op1;
3957
3958 /* In IEEE floating point, x*1 is not equivalent to x for nans.
3959 However, ANSI says we can drop signals,
3960 so we can do this anyway. */
3961 if (op1 == CONST1_RTX (mode))
3962 return op0;
3963
3964 /* Convert multiply by constant power of two into shift unless
3965 we are still generating RTL. This test is a kludge. */
3966 if (GET_CODE (op1) == CONST_INT
3967 && (val = exact_log2 (INTVAL (op1))) >= 0
3968 /* If the mode is larger than the host word size, and the
3969 uppermost bit is set, then this isn't a power of two due
3970 to implicit sign extension. */
3971 && (width <= HOST_BITS_PER_WIDE_INT
3972 || val != HOST_BITS_PER_WIDE_INT - 1)
3973 && ! rtx_equal_function_value_matters)
3974 return gen_rtx_ASHIFT (mode, op0, GEN_INT (val));
3975
3976 if (GET_CODE (op1) == CONST_DOUBLE
3977 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
3978 {
3979 REAL_VALUE_TYPE d;
3980 jmp_buf handler;
3981 int op1is2, op1ism1;
3982
3983 if (setjmp (handler))
3984 return 0;
3985
3986 set_float_handler (handler);
3987 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3988 op1is2 = REAL_VALUES_EQUAL (d, dconst2);
3989 op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
3990 set_float_handler (NULL_PTR);
3991
3992 /* x*2 is x+x and x*(-1) is -x */
3993 if (op1is2 && GET_MODE (op0) == mode)
3994 return gen_rtx_PLUS (mode, op0, copy_rtx (op0));
3995
3996 else if (op1ism1 && GET_MODE (op0) == mode)
3997 return gen_rtx_NEG (mode, op0);
3998 }
3999 break;
4000
4001 case IOR:
4002 if (op1 == const0_rtx)
4003 return op0;
4004 if (GET_CODE (op1) == CONST_INT
4005 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4006 return op1;
4007 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4008 return op0;
4009 /* A | (~A) -> -1 */
4010 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4011 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
4012 && ! side_effects_p (op0)
4013 && GET_MODE_CLASS (mode) != MODE_CC)
4014 return constm1_rtx;
4015 break;
4016
4017 case XOR:
4018 if (op1 == const0_rtx)
4019 return op0;
4020 if (GET_CODE (op1) == CONST_INT
4021 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4022 return gen_rtx_NOT (mode, op0);
4023 if (op0 == op1 && ! side_effects_p (op0)
4024 && GET_MODE_CLASS (mode) != MODE_CC)
4025 return const0_rtx;
4026 break;
4027
4028 case AND:
4029 if (op1 == const0_rtx && ! side_effects_p (op0))
4030 return const0_rtx;
4031 if (GET_CODE (op1) == CONST_INT
4032 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4033 return op0;
4034 if (op0 == op1 && ! side_effects_p (op0)
4035 && GET_MODE_CLASS (mode) != MODE_CC)
4036 return op0;
4037 /* A & (~A) -> 0 */
4038 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4039 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
4040 && ! side_effects_p (op0)
4041 && GET_MODE_CLASS (mode) != MODE_CC)
4042 return const0_rtx;
4043 break;
4044
4045 case UDIV:
4046 /* Convert divide by power of two into shift (divide by 1 handled
4047 below). */
4048 if (GET_CODE (op1) == CONST_INT
4049 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
4050 return gen_rtx_LSHIFTRT (mode, op0, GEN_INT (arg1));
4051
4052 /* ... fall through ... */
4053
4054 case DIV:
4055 if (op1 == CONST1_RTX (mode))
4056 return op0;
4057
4058 /* In IEEE floating point, 0/x is not always 0. */
4059 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4060 || ! FLOAT_MODE_P (mode) || flag_fast_math)
4061 && op0 == CONST0_RTX (mode)
4062 && ! side_effects_p (op1))
4063 return op0;
4064
4065 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4066 /* Change division by a constant into multiplication. Only do
4067 this with -ffast-math until an expert says it is safe in
4068 general. */
4069 else if (GET_CODE (op1) == CONST_DOUBLE
4070 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
4071 && op1 != CONST0_RTX (mode)
4072 && flag_fast_math)
4073 {
4074 REAL_VALUE_TYPE d;
4075 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
4076
4077 if (! REAL_VALUES_EQUAL (d, dconst0))
4078 {
4079 #if defined (REAL_ARITHMETIC)
4080 REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
4081 return gen_rtx_MULT (mode, op0,
4082 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
4083 #else
4084 return gen_rtx_MULT (mode, op0,
4085 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
4086 #endif
4087 }
4088 }
4089 #endif
4090 break;
4091
4092 case UMOD:
4093 /* Handle modulus by power of two (mod with 1 handled below). */
4094 if (GET_CODE (op1) == CONST_INT
4095 && exact_log2 (INTVAL (op1)) > 0)
4096 return gen_rtx_AND (mode, op0, GEN_INT (INTVAL (op1) - 1));
4097
4098 /* ... fall through ... */
4099
4100 case MOD:
4101 if ((op0 == const0_rtx || op1 == const1_rtx)
4102 && ! side_effects_p (op0) && ! side_effects_p (op1))
4103 return const0_rtx;
4104 break;
4105
4106 case ROTATERT:
4107 case ROTATE:
4108 /* Rotating ~0 always results in ~0. */
4109 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
4110 && INTVAL (op0) == GET_MODE_MASK (mode)
4111 && ! side_effects_p (op1))
4112 return op0;
4113
4114 /* ... fall through ... */
4115
4116 case ASHIFT:
4117 case ASHIFTRT:
4118 case LSHIFTRT:
4119 if (op1 == const0_rtx)
4120 return op0;
4121 if (op0 == const0_rtx && ! side_effects_p (op1))
4122 return op0;
4123 break;
4124
4125 case SMIN:
4126 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4127 && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
4128 && ! side_effects_p (op0))
4129 return op1;
4130 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4131 return op0;
4132 break;
4133
4134 case SMAX:
4135 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4136 && (INTVAL (op1)
4137 == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
4138 && ! side_effects_p (op0))
4139 return op1;
4140 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4141 return op0;
4142 break;
4143
4144 case UMIN:
4145 if (op1 == const0_rtx && ! side_effects_p (op0))
4146 return op1;
4147 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4148 return op0;
4149 break;
4150
4151 case UMAX:
4152 if (op1 == constm1_rtx && ! side_effects_p (op0))
4153 return op1;
4154 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4155 return op0;
4156 break;
4157
4158 default:
4159 abort ();
4160 }
4161
4162 return 0;
4163 }
4164
4165 /* Get the integer argument values in two forms:
4166 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
4167
4168 arg0 = INTVAL (op0);
4169 arg1 = INTVAL (op1);
4170
4171 if (width < HOST_BITS_PER_WIDE_INT)
4172 {
4173 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
4174 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
4175
4176 arg0s = arg0;
4177 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4178 arg0s |= ((HOST_WIDE_INT) (-1) << width);
4179
4180 arg1s = arg1;
4181 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4182 arg1s |= ((HOST_WIDE_INT) (-1) << width);
4183 }
4184 else
4185 {
4186 arg0s = arg0;
4187 arg1s = arg1;
4188 }
4189
4190 /* Compute the value of the arithmetic. */
4191
4192 switch (code)
4193 {
4194 case PLUS:
4195 val = arg0s + arg1s;
4196 break;
4197
4198 case MINUS:
4199 val = arg0s - arg1s;
4200 break;
4201
4202 case MULT:
4203 val = arg0s * arg1s;
4204 break;
4205
4206 case DIV:
4207 if (arg1s == 0)
4208 return 0;
4209 val = arg0s / arg1s;
4210 break;
4211
4212 case MOD:
4213 if (arg1s == 0)
4214 return 0;
4215 val = arg0s % arg1s;
4216 break;
4217
4218 case UDIV:
4219 if (arg1 == 0)
4220 return 0;
4221 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
4222 break;
4223
4224 case UMOD:
4225 if (arg1 == 0)
4226 return 0;
4227 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
4228 break;
4229
4230 case AND:
4231 val = arg0 & arg1;
4232 break;
4233
4234 case IOR:
4235 val = arg0 | arg1;
4236 break;
4237
4238 case XOR:
4239 val = arg0 ^ arg1;
4240 break;
4241
4242 case LSHIFTRT:
4243 /* If shift count is undefined, don't fold it; let the machine do
4244 what it wants. But truncate it if the machine will do that. */
4245 if (arg1 < 0)
4246 return 0;
4247
4248 #ifdef SHIFT_COUNT_TRUNCATED
4249 if (SHIFT_COUNT_TRUNCATED)
4250 arg1 %= width;
4251 #endif
4252
4253 val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
4254 break;
4255
4256 case ASHIFT:
4257 if (arg1 < 0)
4258 return 0;
4259
4260 #ifdef SHIFT_COUNT_TRUNCATED
4261 if (SHIFT_COUNT_TRUNCATED)
4262 arg1 %= width;
4263 #endif
4264
4265 val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
4266 break;
4267
4268 case ASHIFTRT:
4269 if (arg1 < 0)
4270 return 0;
4271
4272 #ifdef SHIFT_COUNT_TRUNCATED
4273 if (SHIFT_COUNT_TRUNCATED)
4274 arg1 %= width;
4275 #endif
4276
4277 val = arg0s >> arg1;
4278
4279 /* Bootstrap compiler may not have sign extended the right shift.
4280 Manually extend the sign to insure bootstrap cc matches gcc. */
4281 if (arg0s < 0 && arg1 > 0)
4282 val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4283
4284 break;
4285
4286 case ROTATERT:
4287 if (arg1 < 0)
4288 return 0;
4289
4290 arg1 %= width;
4291 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4292 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
4293 break;
4294
4295 case ROTATE:
4296 if (arg1 < 0)
4297 return 0;
4298
4299 arg1 %= width;
4300 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4301 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
4302 break;
4303
4304 case COMPARE:
4305 /* Do nothing here. */
4306 return 0;
4307
4308 case SMIN:
4309 val = arg0s <= arg1s ? arg0s : arg1s;
4310 break;
4311
4312 case UMIN:
4313 val = ((unsigned HOST_WIDE_INT) arg0
4314 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4315 break;
4316
4317 case SMAX:
4318 val = arg0s > arg1s ? arg0s : arg1s;
4319 break;
4320
4321 case UMAX:
4322 val = ((unsigned HOST_WIDE_INT) arg0
4323 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4324 break;
4325
4326 default:
4327 abort ();
4328 }
4329
4330 /* Clear the bits that don't belong in our mode, unless they and our sign
4331 bit are all one. So we get either a reasonable negative value or a
4332 reasonable unsigned value for this mode. */
4333 if (width < HOST_BITS_PER_WIDE_INT
4334 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4335 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4336 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4337
4338 /* If this would be an entire word for the target, but is not for
4339 the host, then sign-extend on the host so that the number will look
4340 the same way on the host that it would on the target.
4341
4342 For example, when building a 64 bit alpha hosted 32 bit sparc
4343 targeted compiler, then we want the 32 bit unsigned value -1 to be
4344 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
4345 The later confuses the sparc backend. */
4346
4347 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
4348 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
4349 val |= ((HOST_WIDE_INT) (-1) << width);
4350
4351 return GEN_INT (val);
4352 }
4353 \f
4354 /* Simplify a PLUS or MINUS, at least one of whose operands may be another
4355 PLUS or MINUS.
4356
4357 Rather than test for specific case, we do this by a brute-force method
4358 and do all possible simplifications until no more changes occur. Then
4359 we rebuild the operation. */
4360
4361 static rtx
4362 simplify_plus_minus (code, mode, op0, op1)
4363 enum rtx_code code;
4364 enum machine_mode mode;
4365 rtx op0, op1;
4366 {
4367 rtx ops[8];
4368 int negs[8];
4369 rtx result, tem;
4370 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
4371 int first = 1, negate = 0, changed;
4372 int i, j;
4373
4374 bzero ((char *) ops, sizeof ops);
4375
4376 /* Set up the two operands and then expand them until nothing has been
4377 changed. If we run out of room in our array, give up; this should
4378 almost never happen. */
4379
4380 ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4381
4382 changed = 1;
4383 while (changed)
4384 {
4385 changed = 0;
4386
4387 for (i = 0; i < n_ops; i++)
4388 switch (GET_CODE (ops[i]))
4389 {
4390 case PLUS:
4391 case MINUS:
4392 if (n_ops == 7)
4393 return 0;
4394
4395 ops[n_ops] = XEXP (ops[i], 1);
4396 negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4397 ops[i] = XEXP (ops[i], 0);
4398 input_ops++;
4399 changed = 1;
4400 break;
4401
4402 case NEG:
4403 ops[i] = XEXP (ops[i], 0);
4404 negs[i] = ! negs[i];
4405 changed = 1;
4406 break;
4407
4408 case CONST:
4409 ops[i] = XEXP (ops[i], 0);
4410 input_consts++;
4411 changed = 1;
4412 break;
4413
4414 case NOT:
4415 /* ~a -> (-a - 1) */
4416 if (n_ops != 7)
4417 {
4418 ops[n_ops] = constm1_rtx;
4419 negs[n_ops++] = negs[i];
4420 ops[i] = XEXP (ops[i], 0);
4421 negs[i] = ! negs[i];
4422 changed = 1;
4423 }
4424 break;
4425
4426 case CONST_INT:
4427 if (negs[i])
4428 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4429 break;
4430
4431 default:
4432 break;
4433 }
4434 }
4435
4436 /* If we only have two operands, we can't do anything. */
4437 if (n_ops <= 2)
4438 return 0;
4439
4440 /* Now simplify each pair of operands until nothing changes. The first
4441 time through just simplify constants against each other. */
4442
4443 changed = 1;
4444 while (changed)
4445 {
4446 changed = first;
4447
4448 for (i = 0; i < n_ops - 1; i++)
4449 for (j = i + 1; j < n_ops; j++)
4450 if (ops[i] != 0 && ops[j] != 0
4451 && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4452 {
4453 rtx lhs = ops[i], rhs = ops[j];
4454 enum rtx_code ncode = PLUS;
4455
4456 if (negs[i] && ! negs[j])
4457 lhs = ops[j], rhs = ops[i], ncode = MINUS;
4458 else if (! negs[i] && negs[j])
4459 ncode = MINUS;
4460
4461 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
4462 if (tem)
4463 {
4464 ops[i] = tem, ops[j] = 0;
4465 negs[i] = negs[i] && negs[j];
4466 if (GET_CODE (tem) == NEG)
4467 ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4468
4469 if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4470 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4471 changed = 1;
4472 }
4473 }
4474
4475 first = 0;
4476 }
4477
4478 /* Pack all the operands to the lower-numbered entries and give up if
4479 we didn't reduce the number of operands we had. Make sure we
4480 count a CONST as two operands. If we have the same number of
4481 operands, but have made more CONSTs than we had, this is also
4482 an improvement, so accept it. */
4483
4484 for (i = 0, j = 0; j < n_ops; j++)
4485 if (ops[j] != 0)
4486 {
4487 ops[i] = ops[j], negs[i++] = negs[j];
4488 if (GET_CODE (ops[j]) == CONST)
4489 n_consts++;
4490 }
4491
4492 if (i + n_consts > input_ops
4493 || (i + n_consts == input_ops && n_consts <= input_consts))
4494 return 0;
4495
4496 n_ops = i;
4497
4498 /* If we have a CONST_INT, put it last. */
4499 for (i = 0; i < n_ops - 1; i++)
4500 if (GET_CODE (ops[i]) == CONST_INT)
4501 {
4502 tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4503 j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4504 }
4505
4506 /* Put a non-negated operand first. If there aren't any, make all
4507 operands positive and negate the whole thing later. */
4508 for (i = 0; i < n_ops && negs[i]; i++)
4509 ;
4510
4511 if (i == n_ops)
4512 {
4513 for (i = 0; i < n_ops; i++)
4514 negs[i] = 0;
4515 negate = 1;
4516 }
4517 else if (i != 0)
4518 {
4519 tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4520 j = negs[0], negs[0] = negs[i], negs[i] = j;
4521 }
4522
4523 /* Now make the result by performing the requested operations. */
4524 result = ops[0];
4525 for (i = 1; i < n_ops; i++)
4526 result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4527
4528 return negate ? gen_rtx_NEG (mode, result) : result;
4529 }
4530 \f
4531 /* Make a binary operation by properly ordering the operands and
4532 seeing if the expression folds. */
4533
4534 static rtx
4535 cse_gen_binary (code, mode, op0, op1)
4536 enum rtx_code code;
4537 enum machine_mode mode;
4538 rtx op0, op1;
4539 {
4540 rtx tem;
4541
4542 /* Put complex operands first and constants second if commutative. */
4543 if (GET_RTX_CLASS (code) == 'c'
4544 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4545 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4546 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4547 || (GET_CODE (op0) == SUBREG
4548 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4549 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4550 tem = op0, op0 = op1, op1 = tem;
4551
4552 /* If this simplifies, do it. */
4553 tem = simplify_binary_operation (code, mode, op0, op1);
4554
4555 if (tem)
4556 return tem;
4557
4558 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4559 just form the operation. */
4560
4561 if (code == PLUS && GET_CODE (op1) == CONST_INT
4562 && GET_MODE (op0) != VOIDmode)
4563 return plus_constant (op0, INTVAL (op1));
4564 else if (code == MINUS && GET_CODE (op1) == CONST_INT
4565 && GET_MODE (op0) != VOIDmode)
4566 return plus_constant (op0, - INTVAL (op1));
4567 else
4568 return gen_rtx_fmt_ee (code, mode, op0, op1);
4569 }
4570 \f
4571 /* Like simplify_binary_operation except used for relational operators.
4572 MODE is the mode of the operands, not that of the result. If MODE
4573 is VOIDmode, both operands must also be VOIDmode and we compare the
4574 operands in "infinite precision".
4575
4576 If no simplification is possible, this function returns zero. Otherwise,
4577 it returns either const_true_rtx or const0_rtx. */
4578
4579 rtx
4580 simplify_relational_operation (code, mode, op0, op1)
4581 enum rtx_code code;
4582 enum machine_mode mode;
4583 rtx op0, op1;
4584 {
4585 int equal, op0lt, op0ltu, op1lt, op1ltu;
4586 rtx tem;
4587
4588 /* If op0 is a compare, extract the comparison arguments from it. */
4589 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4590 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4591
4592 /* We can't simplify MODE_CC values since we don't know what the
4593 actual comparison is. */
4594 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4595 #ifdef HAVE_cc0
4596 || op0 == cc0_rtx
4597 #endif
4598 )
4599 return 0;
4600
4601 /* For integer comparisons of A and B maybe we can simplify A - B and can
4602 then simplify a comparison of that with zero. If A and B are both either
4603 a register or a CONST_INT, this can't help; testing for these cases will
4604 prevent infinite recursion here and speed things up.
4605
4606 If CODE is an unsigned comparison, then we can never do this optimization,
4607 because it gives an incorrect result if the subtraction wraps around zero.
4608 ANSI C defines unsigned operations such that they never overflow, and
4609 thus such cases can not be ignored. */
4610
4611 if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4612 && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4613 && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4614 && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
4615 && code != GTU && code != GEU && code != LTU && code != LEU)
4616 return simplify_relational_operation (signed_condition (code),
4617 mode, tem, const0_rtx);
4618
4619 /* For non-IEEE floating-point, if the two operands are equal, we know the
4620 result. */
4621 if (rtx_equal_p (op0, op1)
4622 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4623 || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4624 equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4625
4626 /* If the operands are floating-point constants, see if we can fold
4627 the result. */
4628 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4629 else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4630 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4631 {
4632 REAL_VALUE_TYPE d0, d1;
4633 jmp_buf handler;
4634
4635 if (setjmp (handler))
4636 return 0;
4637
4638 set_float_handler (handler);
4639 REAL_VALUE_FROM_CONST_DOUBLE (d0, op0);
4640 REAL_VALUE_FROM_CONST_DOUBLE (d1, op1);
4641 equal = REAL_VALUES_EQUAL (d0, d1);
4642 op0lt = op0ltu = REAL_VALUES_LESS (d0, d1);
4643 op1lt = op1ltu = REAL_VALUES_LESS (d1, d0);
4644 set_float_handler (NULL_PTR);
4645 }
4646 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4647
4648 /* Otherwise, see if the operands are both integers. */
4649 else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4650 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4651 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4652 {
4653 int width = GET_MODE_BITSIZE (mode);
4654 HOST_WIDE_INT l0s, h0s, l1s, h1s;
4655 unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
4656
4657 /* Get the two words comprising each integer constant. */
4658 if (GET_CODE (op0) == CONST_DOUBLE)
4659 {
4660 l0u = l0s = CONST_DOUBLE_LOW (op0);
4661 h0u = h0s = CONST_DOUBLE_HIGH (op0);
4662 }
4663 else
4664 {
4665 l0u = l0s = INTVAL (op0);
4666 h0u = h0s = l0s < 0 ? -1 : 0;
4667 }
4668
4669 if (GET_CODE (op1) == CONST_DOUBLE)
4670 {
4671 l1u = l1s = CONST_DOUBLE_LOW (op1);
4672 h1u = h1s = CONST_DOUBLE_HIGH (op1);
4673 }
4674 else
4675 {
4676 l1u = l1s = INTVAL (op1);
4677 h1u = h1s = l1s < 0 ? -1 : 0;
4678 }
4679
4680 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4681 we have to sign or zero-extend the values. */
4682 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4683 h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
4684
4685 if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4686 {
4687 l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4688 l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
4689
4690 if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4691 l0s |= ((HOST_WIDE_INT) (-1) << width);
4692
4693 if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4694 l1s |= ((HOST_WIDE_INT) (-1) << width);
4695 }
4696
4697 equal = (h0u == h1u && l0u == l1u);
4698 op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4699 op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4700 op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4701 op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4702 }
4703
4704 /* Otherwise, there are some code-specific tests we can make. */
4705 else
4706 {
4707 switch (code)
4708 {
4709 case EQ:
4710 /* References to the frame plus a constant or labels cannot
4711 be zero, but a SYMBOL_REF can due to #pragma weak. */
4712 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4713 || GET_CODE (op0) == LABEL_REF)
4714 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4715 /* On some machines, the ap reg can be 0 sometimes. */
4716 && op0 != arg_pointer_rtx
4717 #endif
4718 )
4719 return const0_rtx;
4720 break;
4721
4722 case NE:
4723 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4724 || GET_CODE (op0) == LABEL_REF)
4725 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4726 && op0 != arg_pointer_rtx
4727 #endif
4728 )
4729 return const_true_rtx;
4730 break;
4731
4732 case GEU:
4733 /* Unsigned values are never negative. */
4734 if (op1 == const0_rtx)
4735 return const_true_rtx;
4736 break;
4737
4738 case LTU:
4739 if (op1 == const0_rtx)
4740 return const0_rtx;
4741 break;
4742
4743 case LEU:
4744 /* Unsigned values are never greater than the largest
4745 unsigned value. */
4746 if (GET_CODE (op1) == CONST_INT
4747 && INTVAL (op1) == GET_MODE_MASK (mode)
4748 && INTEGRAL_MODE_P (mode))
4749 return const_true_rtx;
4750 break;
4751
4752 case GTU:
4753 if (GET_CODE (op1) == CONST_INT
4754 && INTVAL (op1) == GET_MODE_MASK (mode)
4755 && INTEGRAL_MODE_P (mode))
4756 return const0_rtx;
4757 break;
4758
4759 default:
4760 break;
4761 }
4762
4763 return 0;
4764 }
4765
4766 /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4767 as appropriate. */
4768 switch (code)
4769 {
4770 case EQ:
4771 return equal ? const_true_rtx : const0_rtx;
4772 case NE:
4773 return ! equal ? const_true_rtx : const0_rtx;
4774 case LT:
4775 return op0lt ? const_true_rtx : const0_rtx;
4776 case GT:
4777 return op1lt ? const_true_rtx : const0_rtx;
4778 case LTU:
4779 return op0ltu ? const_true_rtx : const0_rtx;
4780 case GTU:
4781 return op1ltu ? const_true_rtx : const0_rtx;
4782 case LE:
4783 return equal || op0lt ? const_true_rtx : const0_rtx;
4784 case GE:
4785 return equal || op1lt ? const_true_rtx : const0_rtx;
4786 case LEU:
4787 return equal || op0ltu ? const_true_rtx : const0_rtx;
4788 case GEU:
4789 return equal || op1ltu ? const_true_rtx : const0_rtx;
4790 default:
4791 abort ();
4792 }
4793 }
4794 \f
4795 /* Simplify CODE, an operation with result mode MODE and three operands,
4796 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4797 a constant. Return 0 if no simplifications is possible. */
4798
4799 rtx
4800 simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4801 enum rtx_code code;
4802 enum machine_mode mode, op0_mode;
4803 rtx op0, op1, op2;
4804 {
4805 int width = GET_MODE_BITSIZE (mode);
4806
4807 /* VOIDmode means "infinite" precision. */
4808 if (width == 0)
4809 width = HOST_BITS_PER_WIDE_INT;
4810
4811 switch (code)
4812 {
4813 case SIGN_EXTRACT:
4814 case ZERO_EXTRACT:
4815 if (GET_CODE (op0) == CONST_INT
4816 && GET_CODE (op1) == CONST_INT
4817 && GET_CODE (op2) == CONST_INT
4818 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4819 && width <= HOST_BITS_PER_WIDE_INT)
4820 {
4821 /* Extracting a bit-field from a constant */
4822 HOST_WIDE_INT val = INTVAL (op0);
4823
4824 if (BITS_BIG_ENDIAN)
4825 val >>= (GET_MODE_BITSIZE (op0_mode)
4826 - INTVAL (op2) - INTVAL (op1));
4827 else
4828 val >>= INTVAL (op2);
4829
4830 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4831 {
4832 /* First zero-extend. */
4833 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4834 /* If desired, propagate sign bit. */
4835 if (code == SIGN_EXTRACT
4836 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4837 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4838 }
4839
4840 /* Clear the bits that don't belong in our mode,
4841 unless they and our sign bit are all one.
4842 So we get either a reasonable negative value or a reasonable
4843 unsigned value for this mode. */
4844 if (width < HOST_BITS_PER_WIDE_INT
4845 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4846 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4847 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4848
4849 return GEN_INT (val);
4850 }
4851 break;
4852
4853 case IF_THEN_ELSE:
4854 if (GET_CODE (op0) == CONST_INT)
4855 return op0 != const0_rtx ? op1 : op2;
4856
4857 /* Convert a == b ? b : a to "a". */
4858 if (GET_CODE (op0) == NE && ! side_effects_p (op0)
4859 && rtx_equal_p (XEXP (op0, 0), op1)
4860 && rtx_equal_p (XEXP (op0, 1), op2))
4861 return op1;
4862 else if (GET_CODE (op0) == EQ && ! side_effects_p (op0)
4863 && rtx_equal_p (XEXP (op0, 1), op1)
4864 && rtx_equal_p (XEXP (op0, 0), op2))
4865 return op2;
4866 else if (GET_RTX_CLASS (GET_CODE (op0)) == '<' && ! side_effects_p (op0))
4867 {
4868 rtx temp;
4869 temp = simplify_relational_operation (GET_CODE (op0), op0_mode,
4870 XEXP (op0, 0), XEXP (op0, 1));
4871 /* See if any simplifications were possible. */
4872 if (temp == const0_rtx)
4873 return op2;
4874 else if (temp == const1_rtx)
4875 return op1;
4876 }
4877 break;
4878
4879 default:
4880 abort ();
4881 }
4882
4883 return 0;
4884 }
4885 \f
4886 /* If X is a nontrivial arithmetic operation on an argument
4887 for which a constant value can be determined, return
4888 the result of operating on that value, as a constant.
4889 Otherwise, return X, possibly with one or more operands
4890 modified by recursive calls to this function.
4891
4892 If X is a register whose contents are known, we do NOT
4893 return those contents here. equiv_constant is called to
4894 perform that task.
4895
4896 INSN is the insn that we may be modifying. If it is 0, make a copy
4897 of X before modifying it. */
4898
4899 static rtx
4900 fold_rtx (x, insn)
4901 rtx x;
4902 rtx insn;
4903 {
4904 register enum rtx_code code;
4905 register enum machine_mode mode;
4906 register char *fmt;
4907 register int i;
4908 rtx new = 0;
4909 int copied = 0;
4910 int must_swap = 0;
4911
4912 /* Folded equivalents of first two operands of X. */
4913 rtx folded_arg0;
4914 rtx folded_arg1;
4915
4916 /* Constant equivalents of first three operands of X;
4917 0 when no such equivalent is known. */
4918 rtx const_arg0;
4919 rtx const_arg1;
4920 rtx const_arg2;
4921
4922 /* The mode of the first operand of X. We need this for sign and zero
4923 extends. */
4924 enum machine_mode mode_arg0;
4925
4926 if (x == 0)
4927 return x;
4928
4929 mode = GET_MODE (x);
4930 code = GET_CODE (x);
4931 switch (code)
4932 {
4933 case CONST:
4934 /* If the operand is a CONSTANT_P_RTX, see if what's inside it
4935 is known to be constant and replace the whole thing with a
4936 CONST_INT of either zero or one. Note that this code assumes
4937 that an insn that recognizes a CONST will also recognize a
4938 CONST_INT, but that seems to be a safe assumption. */
4939 if (GET_CODE (XEXP (x, 0)) == CONSTANT_P_RTX)
4940 {
4941 x = equiv_constant (fold_rtx (XEXP (XEXP (x, 0), 0), 0));
4942 return (x != 0 && (GET_CODE (x) == CONST_INT
4943 || GET_CODE (x) == CONST_DOUBLE)
4944 ? const1_rtx : const0_rtx);
4945 }
4946
4947 /* ... fall through ... */
4948
4949 case CONST_INT:
4950 case CONST_DOUBLE:
4951 case SYMBOL_REF:
4952 case LABEL_REF:
4953 case REG:
4954 /* No use simplifying an EXPR_LIST
4955 since they are used only for lists of args
4956 in a function call's REG_EQUAL note. */
4957 case EXPR_LIST:
4958 /* Changing anything inside an ADDRESSOF is incorrect; we don't
4959 want to (e.g.,) make (addressof (const_int 0)) just because
4960 the location is known to be zero. */
4961 case ADDRESSOF:
4962 return x;
4963
4964 #ifdef HAVE_cc0
4965 case CC0:
4966 return prev_insn_cc0;
4967 #endif
4968
4969 case PC:
4970 /* If the next insn is a CODE_LABEL followed by a jump table,
4971 PC's value is a LABEL_REF pointing to that label. That
4972 lets us fold switch statements on the Vax. */
4973 if (insn && GET_CODE (insn) == JUMP_INSN)
4974 {
4975 rtx next = next_nonnote_insn (insn);
4976
4977 if (next && GET_CODE (next) == CODE_LABEL
4978 && NEXT_INSN (next) != 0
4979 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
4980 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
4981 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
4982 return gen_rtx_LABEL_REF (Pmode, next);
4983 }
4984 break;
4985
4986 case SUBREG:
4987 /* See if we previously assigned a constant value to this SUBREG. */
4988 if ((new = lookup_as_function (x, CONST_INT)) != 0
4989 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
4990 return new;
4991
4992 /* If this is a paradoxical SUBREG, we have no idea what value the
4993 extra bits would have. However, if the operand is equivalent
4994 to a SUBREG whose operand is the same as our mode, and all the
4995 modes are within a word, we can just use the inner operand
4996 because these SUBREGs just say how to treat the register.
4997
4998 Similarly if we find an integer constant. */
4999
5000 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
5001 {
5002 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
5003 struct table_elt *elt;
5004
5005 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
5006 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
5007 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
5008 imode)) != 0)
5009 for (elt = elt->first_same_value;
5010 elt; elt = elt->next_same_value)
5011 {
5012 if (CONSTANT_P (elt->exp)
5013 && GET_MODE (elt->exp) == VOIDmode)
5014 return elt->exp;
5015
5016 if (GET_CODE (elt->exp) == SUBREG
5017 && GET_MODE (SUBREG_REG (elt->exp)) == mode
5018 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
5019 return copy_rtx (SUBREG_REG (elt->exp));
5020 }
5021
5022 return x;
5023 }
5024
5025 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
5026 We might be able to if the SUBREG is extracting a single word in an
5027 integral mode or extracting the low part. */
5028
5029 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
5030 const_arg0 = equiv_constant (folded_arg0);
5031 if (const_arg0)
5032 folded_arg0 = const_arg0;
5033
5034 if (folded_arg0 != SUBREG_REG (x))
5035 {
5036 new = 0;
5037
5038 if (GET_MODE_CLASS (mode) == MODE_INT
5039 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5040 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
5041 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
5042 GET_MODE (SUBREG_REG (x)));
5043 if (new == 0 && subreg_lowpart_p (x))
5044 new = gen_lowpart_if_possible (mode, folded_arg0);
5045 if (new)
5046 return new;
5047 }
5048
5049 /* If this is a narrowing SUBREG and our operand is a REG, see if
5050 we can find an equivalence for REG that is an arithmetic operation
5051 in a wider mode where both operands are paradoxical SUBREGs
5052 from objects of our result mode. In that case, we couldn't report
5053 an equivalent value for that operation, since we don't know what the
5054 extra bits will be. But we can find an equivalence for this SUBREG
5055 by folding that operation is the narrow mode. This allows us to
5056 fold arithmetic in narrow modes when the machine only supports
5057 word-sized arithmetic.
5058
5059 Also look for a case where we have a SUBREG whose operand is the
5060 same as our result. If both modes are smaller than a word, we
5061 are simply interpreting a register in different modes and we
5062 can use the inner value. */
5063
5064 if (GET_CODE (folded_arg0) == REG
5065 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
5066 && subreg_lowpart_p (x))
5067 {
5068 struct table_elt *elt;
5069
5070 /* We can use HASH here since we know that canon_hash won't be
5071 called. */
5072 elt = lookup (folded_arg0,
5073 HASH (folded_arg0, GET_MODE (folded_arg0)),
5074 GET_MODE (folded_arg0));
5075
5076 if (elt)
5077 elt = elt->first_same_value;
5078
5079 for (; elt; elt = elt->next_same_value)
5080 {
5081 enum rtx_code eltcode = GET_CODE (elt->exp);
5082
5083 /* Just check for unary and binary operations. */
5084 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
5085 && GET_CODE (elt->exp) != SIGN_EXTEND
5086 && GET_CODE (elt->exp) != ZERO_EXTEND
5087 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5088 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
5089 {
5090 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
5091
5092 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
5093 op0 = fold_rtx (op0, NULL_RTX);
5094
5095 op0 = equiv_constant (op0);
5096 if (op0)
5097 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
5098 op0, mode);
5099 }
5100 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
5101 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
5102 && eltcode != DIV && eltcode != MOD
5103 && eltcode != UDIV && eltcode != UMOD
5104 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
5105 && eltcode != ROTATE && eltcode != ROTATERT
5106 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5107 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
5108 == mode))
5109 || CONSTANT_P (XEXP (elt->exp, 0)))
5110 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
5111 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
5112 == mode))
5113 || CONSTANT_P (XEXP (elt->exp, 1))))
5114 {
5115 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
5116 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
5117
5118 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
5119 op0 = fold_rtx (op0, NULL_RTX);
5120
5121 if (op0)
5122 op0 = equiv_constant (op0);
5123
5124 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
5125 op1 = fold_rtx (op1, NULL_RTX);
5126
5127 if (op1)
5128 op1 = equiv_constant (op1);
5129
5130 /* If we are looking for the low SImode part of
5131 (ashift:DI c (const_int 32)), it doesn't work
5132 to compute that in SImode, because a 32-bit shift
5133 in SImode is unpredictable. We know the value is 0. */
5134 if (op0 && op1
5135 && GET_CODE (elt->exp) == ASHIFT
5136 && GET_CODE (op1) == CONST_INT
5137 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
5138 {
5139 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
5140
5141 /* If the count fits in the inner mode's width,
5142 but exceeds the outer mode's width,
5143 the value will get truncated to 0
5144 by the subreg. */
5145 new = const0_rtx;
5146 else
5147 /* If the count exceeds even the inner mode's width,
5148 don't fold this expression. */
5149 new = 0;
5150 }
5151 else if (op0 && op1)
5152 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
5153 op0, op1);
5154 }
5155
5156 else if (GET_CODE (elt->exp) == SUBREG
5157 && GET_MODE (SUBREG_REG (elt->exp)) == mode
5158 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
5159 <= UNITS_PER_WORD)
5160 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
5161 new = copy_rtx (SUBREG_REG (elt->exp));
5162
5163 if (new)
5164 return new;
5165 }
5166 }
5167
5168 return x;
5169
5170 case NOT:
5171 case NEG:
5172 /* If we have (NOT Y), see if Y is known to be (NOT Z).
5173 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
5174 new = lookup_as_function (XEXP (x, 0), code);
5175 if (new)
5176 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
5177 break;
5178
5179 case MEM:
5180 /* If we are not actually processing an insn, don't try to find the
5181 best address. Not only don't we care, but we could modify the
5182 MEM in an invalid way since we have no insn to validate against. */
5183 if (insn != 0)
5184 find_best_addr (insn, &XEXP (x, 0));
5185
5186 {
5187 /* Even if we don't fold in the insn itself,
5188 we can safely do so here, in hopes of getting a constant. */
5189 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
5190 rtx base = 0;
5191 HOST_WIDE_INT offset = 0;
5192
5193 if (GET_CODE (addr) == REG
5194 && REGNO_QTY_VALID_P (REGNO (addr))
5195 && GET_MODE (addr) == qty_mode[reg_qty[REGNO (addr)]]
5196 && qty_const[reg_qty[REGNO (addr)]] != 0)
5197 addr = qty_const[reg_qty[REGNO (addr)]];
5198
5199 /* If address is constant, split it into a base and integer offset. */
5200 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
5201 base = addr;
5202 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
5203 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
5204 {
5205 base = XEXP (XEXP (addr, 0), 0);
5206 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
5207 }
5208 else if (GET_CODE (addr) == LO_SUM
5209 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
5210 base = XEXP (addr, 1);
5211 else if (GET_CODE (addr) == ADDRESSOF)
5212 return change_address (x, VOIDmode, addr);
5213
5214 /* If this is a constant pool reference, we can fold it into its
5215 constant to allow better value tracking. */
5216 if (base && GET_CODE (base) == SYMBOL_REF
5217 && CONSTANT_POOL_ADDRESS_P (base))
5218 {
5219 rtx constant = get_pool_constant (base);
5220 enum machine_mode const_mode = get_pool_mode (base);
5221 rtx new;
5222
5223 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
5224 constant_pool_entries_cost = COST (constant);
5225
5226 /* If we are loading the full constant, we have an equivalence. */
5227 if (offset == 0 && mode == const_mode)
5228 return constant;
5229
5230 /* If this actually isn't a constant (weird!), we can't do
5231 anything. Otherwise, handle the two most common cases:
5232 extracting a word from a multi-word constant, and extracting
5233 the low-order bits. Other cases don't seem common enough to
5234 worry about. */
5235 if (! CONSTANT_P (constant))
5236 return x;
5237
5238 if (GET_MODE_CLASS (mode) == MODE_INT
5239 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5240 && offset % UNITS_PER_WORD == 0
5241 && (new = operand_subword (constant,
5242 offset / UNITS_PER_WORD,
5243 0, const_mode)) != 0)
5244 return new;
5245
5246 if (((BYTES_BIG_ENDIAN
5247 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
5248 || (! BYTES_BIG_ENDIAN && offset == 0))
5249 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
5250 return new;
5251 }
5252
5253 /* If this is a reference to a label at a known position in a jump
5254 table, we also know its value. */
5255 if (base && GET_CODE (base) == LABEL_REF)
5256 {
5257 rtx label = XEXP (base, 0);
5258 rtx table_insn = NEXT_INSN (label);
5259
5260 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5261 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
5262 {
5263 rtx table = PATTERN (table_insn);
5264
5265 if (offset >= 0
5266 && (offset / GET_MODE_SIZE (GET_MODE (table))
5267 < XVECLEN (table, 0)))
5268 return XVECEXP (table, 0,
5269 offset / GET_MODE_SIZE (GET_MODE (table)));
5270 }
5271 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5272 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
5273 {
5274 rtx table = PATTERN (table_insn);
5275
5276 if (offset >= 0
5277 && (offset / GET_MODE_SIZE (GET_MODE (table))
5278 < XVECLEN (table, 1)))
5279 {
5280 offset /= GET_MODE_SIZE (GET_MODE (table));
5281 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
5282 XEXP (table, 0));
5283
5284 if (GET_MODE (table) != Pmode)
5285 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
5286
5287 /* Indicate this is a constant. This isn't a
5288 valid form of CONST, but it will only be used
5289 to fold the next insns and then discarded, so
5290 it should be safe. */
5291 return gen_rtx_CONST (GET_MODE (new), new);
5292 }
5293 }
5294 }
5295
5296 return x;
5297 }
5298
5299 case ASM_OPERANDS:
5300 for (i = XVECLEN (x, 3) - 1; i >= 0; i--)
5301 validate_change (insn, &XVECEXP (x, 3, i),
5302 fold_rtx (XVECEXP (x, 3, i), insn), 0);
5303 break;
5304
5305 default:
5306 break;
5307 }
5308
5309 const_arg0 = 0;
5310 const_arg1 = 0;
5311 const_arg2 = 0;
5312 mode_arg0 = VOIDmode;
5313
5314 /* Try folding our operands.
5315 Then see which ones have constant values known. */
5316
5317 fmt = GET_RTX_FORMAT (code);
5318 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5319 if (fmt[i] == 'e')
5320 {
5321 rtx arg = XEXP (x, i);
5322 rtx folded_arg = arg, const_arg = 0;
5323 enum machine_mode mode_arg = GET_MODE (arg);
5324 rtx cheap_arg, expensive_arg;
5325 rtx replacements[2];
5326 int j;
5327
5328 /* Most arguments are cheap, so handle them specially. */
5329 switch (GET_CODE (arg))
5330 {
5331 case REG:
5332 /* This is the same as calling equiv_constant; it is duplicated
5333 here for speed. */
5334 if (REGNO_QTY_VALID_P (REGNO (arg))
5335 && qty_const[reg_qty[REGNO (arg)]] != 0
5336 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != REG
5337 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != PLUS)
5338 const_arg
5339 = gen_lowpart_if_possible (GET_MODE (arg),
5340 qty_const[reg_qty[REGNO (arg)]]);
5341 break;
5342
5343 case CONST:
5344 case CONST_INT:
5345 case SYMBOL_REF:
5346 case LABEL_REF:
5347 case CONST_DOUBLE:
5348 const_arg = arg;
5349 break;
5350
5351 #ifdef HAVE_cc0
5352 case CC0:
5353 folded_arg = prev_insn_cc0;
5354 mode_arg = prev_insn_cc0_mode;
5355 const_arg = equiv_constant (folded_arg);
5356 break;
5357 #endif
5358
5359 default:
5360 folded_arg = fold_rtx (arg, insn);
5361 const_arg = equiv_constant (folded_arg);
5362 }
5363
5364 /* For the first three operands, see if the operand
5365 is constant or equivalent to a constant. */
5366 switch (i)
5367 {
5368 case 0:
5369 folded_arg0 = folded_arg;
5370 const_arg0 = const_arg;
5371 mode_arg0 = mode_arg;
5372 break;
5373 case 1:
5374 folded_arg1 = folded_arg;
5375 const_arg1 = const_arg;
5376 break;
5377 case 2:
5378 const_arg2 = const_arg;
5379 break;
5380 }
5381
5382 /* Pick the least expensive of the folded argument and an
5383 equivalent constant argument. */
5384 if (const_arg == 0 || const_arg == folded_arg
5385 || COST (const_arg) > COST (folded_arg))
5386 cheap_arg = folded_arg, expensive_arg = const_arg;
5387 else
5388 cheap_arg = const_arg, expensive_arg = folded_arg;
5389
5390 /* Try to replace the operand with the cheapest of the two
5391 possibilities. If it doesn't work and this is either of the first
5392 two operands of a commutative operation, try swapping them.
5393 If THAT fails, try the more expensive, provided it is cheaper
5394 than what is already there. */
5395
5396 if (cheap_arg == XEXP (x, i))
5397 continue;
5398
5399 if (insn == 0 && ! copied)
5400 {
5401 x = copy_rtx (x);
5402 copied = 1;
5403 }
5404
5405 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5406 for (j = 0;
5407 j < 2 && replacements[j]
5408 && COST (replacements[j]) < COST (XEXP (x, i));
5409 j++)
5410 {
5411 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5412 break;
5413
5414 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5415 {
5416 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5417 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5418
5419 if (apply_change_group ())
5420 {
5421 /* Swap them back to be invalid so that this loop can
5422 continue and flag them to be swapped back later. */
5423 rtx tem;
5424
5425 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5426 XEXP (x, 1) = tem;
5427 must_swap = 1;
5428 break;
5429 }
5430 }
5431 }
5432 }
5433
5434 else
5435 {
5436 if (fmt[i] == 'E')
5437 /* Don't try to fold inside of a vector of expressions.
5438 Doing nothing is harmless. */
5439 {;}
5440 }
5441
5442 /* If a commutative operation, place a constant integer as the second
5443 operand unless the first operand is also a constant integer. Otherwise,
5444 place any constant second unless the first operand is also a constant. */
5445
5446 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5447 {
5448 if (must_swap || (const_arg0
5449 && (const_arg1 == 0
5450 || (GET_CODE (const_arg0) == CONST_INT
5451 && GET_CODE (const_arg1) != CONST_INT))))
5452 {
5453 register rtx tem = XEXP (x, 0);
5454
5455 if (insn == 0 && ! copied)
5456 {
5457 x = copy_rtx (x);
5458 copied = 1;
5459 }
5460
5461 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5462 validate_change (insn, &XEXP (x, 1), tem, 1);
5463 if (apply_change_group ())
5464 {
5465 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5466 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5467 }
5468 }
5469 }
5470
5471 /* If X is an arithmetic operation, see if we can simplify it. */
5472
5473 switch (GET_RTX_CLASS (code))
5474 {
5475 case '1':
5476 {
5477 int is_const = 0;
5478
5479 /* We can't simplify extension ops unless we know the
5480 original mode. */
5481 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5482 && mode_arg0 == VOIDmode)
5483 break;
5484
5485 /* If we had a CONST, strip it off and put it back later if we
5486 fold. */
5487 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
5488 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
5489
5490 new = simplify_unary_operation (code, mode,
5491 const_arg0 ? const_arg0 : folded_arg0,
5492 mode_arg0);
5493 if (new != 0 && is_const)
5494 new = gen_rtx_CONST (mode, new);
5495 }
5496 break;
5497
5498 case '<':
5499 /* See what items are actually being compared and set FOLDED_ARG[01]
5500 to those values and CODE to the actual comparison code. If any are
5501 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5502 do anything if both operands are already known to be constant. */
5503
5504 if (const_arg0 == 0 || const_arg1 == 0)
5505 {
5506 struct table_elt *p0, *p1;
5507 rtx true = const_true_rtx, false = const0_rtx;
5508 enum machine_mode mode_arg1;
5509
5510 #ifdef FLOAT_STORE_FLAG_VALUE
5511 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5512 {
5513 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5514 mode);
5515 false = CONST0_RTX (mode);
5516 }
5517 #endif
5518
5519 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5520 &mode_arg0, &mode_arg1);
5521 const_arg0 = equiv_constant (folded_arg0);
5522 const_arg1 = equiv_constant (folded_arg1);
5523
5524 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5525 what kinds of things are being compared, so we can't do
5526 anything with this comparison. */
5527
5528 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5529 break;
5530
5531 /* If we do not now have two constants being compared, see
5532 if we can nevertheless deduce some things about the
5533 comparison. */
5534 if (const_arg0 == 0 || const_arg1 == 0)
5535 {
5536 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
5537 non-explicit constant? These aren't zero, but we
5538 don't know their sign. */
5539 if (const_arg1 == const0_rtx
5540 && (NONZERO_BASE_PLUS_P (folded_arg0)
5541 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5542 come out as 0. */
5543 || GET_CODE (folded_arg0) == SYMBOL_REF
5544 #endif
5545 || GET_CODE (folded_arg0) == LABEL_REF
5546 || GET_CODE (folded_arg0) == CONST))
5547 {
5548 if (code == EQ)
5549 return false;
5550 else if (code == NE)
5551 return true;
5552 }
5553
5554 /* See if the two operands are the same. We don't do this
5555 for IEEE floating-point since we can't assume x == x
5556 since x might be a NaN. */
5557
5558 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5559 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
5560 && (folded_arg0 == folded_arg1
5561 || (GET_CODE (folded_arg0) == REG
5562 && GET_CODE (folded_arg1) == REG
5563 && (reg_qty[REGNO (folded_arg0)]
5564 == reg_qty[REGNO (folded_arg1)]))
5565 || ((p0 = lookup (folded_arg0,
5566 (safe_hash (folded_arg0, mode_arg0)
5567 % NBUCKETS), mode_arg0))
5568 && (p1 = lookup (folded_arg1,
5569 (safe_hash (folded_arg1, mode_arg0)
5570 % NBUCKETS), mode_arg0))
5571 && p0->first_same_value == p1->first_same_value)))
5572 return ((code == EQ || code == LE || code == GE
5573 || code == LEU || code == GEU)
5574 ? true : false);
5575
5576 /* If FOLDED_ARG0 is a register, see if the comparison we are
5577 doing now is either the same as we did before or the reverse
5578 (we only check the reverse if not floating-point). */
5579 else if (GET_CODE (folded_arg0) == REG)
5580 {
5581 int qty = reg_qty[REGNO (folded_arg0)];
5582
5583 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5584 && (comparison_dominates_p (qty_comparison_code[qty], code)
5585 || (comparison_dominates_p (qty_comparison_code[qty],
5586 reverse_condition (code))
5587 && ! FLOAT_MODE_P (mode_arg0)))
5588 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5589 || (const_arg1
5590 && rtx_equal_p (qty_comparison_const[qty],
5591 const_arg1))
5592 || (GET_CODE (folded_arg1) == REG
5593 && (reg_qty[REGNO (folded_arg1)]
5594 == qty_comparison_qty[qty]))))
5595 return (comparison_dominates_p (qty_comparison_code[qty],
5596 code)
5597 ? true : false);
5598 }
5599 }
5600 }
5601
5602 /* If we are comparing against zero, see if the first operand is
5603 equivalent to an IOR with a constant. If so, we may be able to
5604 determine the result of this comparison. */
5605
5606 if (const_arg1 == const0_rtx)
5607 {
5608 rtx y = lookup_as_function (folded_arg0, IOR);
5609 rtx inner_const;
5610
5611 if (y != 0
5612 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5613 && GET_CODE (inner_const) == CONST_INT
5614 && INTVAL (inner_const) != 0)
5615 {
5616 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
5617 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5618 && (INTVAL (inner_const)
5619 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
5620 rtx true = const_true_rtx, false = const0_rtx;
5621
5622 #ifdef FLOAT_STORE_FLAG_VALUE
5623 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5624 {
5625 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5626 mode);
5627 false = CONST0_RTX (mode);
5628 }
5629 #endif
5630
5631 switch (code)
5632 {
5633 case EQ:
5634 return false;
5635 case NE:
5636 return true;
5637 case LT: case LE:
5638 if (has_sign)
5639 return true;
5640 break;
5641 case GT: case GE:
5642 if (has_sign)
5643 return false;
5644 break;
5645 default:
5646 break;
5647 }
5648 }
5649 }
5650
5651 new = simplify_relational_operation (code, mode_arg0,
5652 const_arg0 ? const_arg0 : folded_arg0,
5653 const_arg1 ? const_arg1 : folded_arg1);
5654 #ifdef FLOAT_STORE_FLAG_VALUE
5655 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5656 new = ((new == const0_rtx) ? CONST0_RTX (mode)
5657 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
5658 #endif
5659 break;
5660
5661 case '2':
5662 case 'c':
5663 switch (code)
5664 {
5665 case PLUS:
5666 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5667 with that LABEL_REF as its second operand. If so, the result is
5668 the first operand of that MINUS. This handles switches with an
5669 ADDR_DIFF_VEC table. */
5670 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5671 {
5672 rtx y
5673 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
5674 : lookup_as_function (folded_arg0, MINUS);
5675
5676 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5677 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5678 return XEXP (y, 0);
5679
5680 /* Now try for a CONST of a MINUS like the above. */
5681 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
5682 : lookup_as_function (folded_arg0, CONST))) != 0
5683 && GET_CODE (XEXP (y, 0)) == MINUS
5684 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5685 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
5686 return XEXP (XEXP (y, 0), 0);
5687 }
5688
5689 /* Likewise if the operands are in the other order. */
5690 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
5691 {
5692 rtx y
5693 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
5694 : lookup_as_function (folded_arg1, MINUS);
5695
5696 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5697 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
5698 return XEXP (y, 0);
5699
5700 /* Now try for a CONST of a MINUS like the above. */
5701 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
5702 : lookup_as_function (folded_arg1, CONST))) != 0
5703 && GET_CODE (XEXP (y, 0)) == MINUS
5704 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5705 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
5706 return XEXP (XEXP (y, 0), 0);
5707 }
5708
5709 /* If second operand is a register equivalent to a negative
5710 CONST_INT, see if we can find a register equivalent to the
5711 positive constant. Make a MINUS if so. Don't do this for
5712 a non-negative constant since we might then alternate between
5713 chosing positive and negative constants. Having the positive
5714 constant previously-used is the more common case. Be sure
5715 the resulting constant is non-negative; if const_arg1 were
5716 the smallest negative number this would overflow: depending
5717 on the mode, this would either just be the same value (and
5718 hence not save anything) or be incorrect. */
5719 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
5720 && INTVAL (const_arg1) < 0
5721 && - INTVAL (const_arg1) >= 0
5722 && GET_CODE (folded_arg1) == REG)
5723 {
5724 rtx new_const = GEN_INT (- INTVAL (const_arg1));
5725 struct table_elt *p
5726 = lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5727 mode);
5728
5729 if (p)
5730 for (p = p->first_same_value; p; p = p->next_same_value)
5731 if (GET_CODE (p->exp) == REG)
5732 return cse_gen_binary (MINUS, mode, folded_arg0,
5733 canon_reg (p->exp, NULL_RTX));
5734 }
5735 goto from_plus;
5736
5737 case MINUS:
5738 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5739 If so, produce (PLUS Z C2-C). */
5740 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5741 {
5742 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5743 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
5744 return fold_rtx (plus_constant (copy_rtx (y),
5745 -INTVAL (const_arg1)),
5746 NULL_RTX);
5747 }
5748
5749 /* ... fall through ... */
5750
5751 from_plus:
5752 case SMIN: case SMAX: case UMIN: case UMAX:
5753 case IOR: case AND: case XOR:
5754 case MULT: case DIV: case UDIV:
5755 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
5756 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5757 is known to be of similar form, we may be able to replace the
5758 operation with a combined operation. This may eliminate the
5759 intermediate operation if every use is simplified in this way.
5760 Note that the similar optimization done by combine.c only works
5761 if the intermediate operation's result has only one reference. */
5762
5763 if (GET_CODE (folded_arg0) == REG
5764 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5765 {
5766 int is_shift
5767 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5768 rtx y = lookup_as_function (folded_arg0, code);
5769 rtx inner_const;
5770 enum rtx_code associate_code;
5771 rtx new_const;
5772
5773 if (y == 0
5774 || 0 == (inner_const
5775 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5776 || GET_CODE (inner_const) != CONST_INT
5777 /* If we have compiled a statement like
5778 "if (x == (x & mask1))", and now are looking at
5779 "x & mask2", we will have a case where the first operand
5780 of Y is the same as our first operand. Unless we detect
5781 this case, an infinite loop will result. */
5782 || XEXP (y, 0) == folded_arg0)
5783 break;
5784
5785 /* Don't associate these operations if they are a PLUS with the
5786 same constant and it is a power of two. These might be doable
5787 with a pre- or post-increment. Similarly for two subtracts of
5788 identical powers of two with post decrement. */
5789
5790 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
5791 && ((HAVE_PRE_INCREMENT
5792 && exact_log2 (INTVAL (const_arg1)) >= 0)
5793 || (HAVE_POST_INCREMENT
5794 && exact_log2 (INTVAL (const_arg1)) >= 0)
5795 || (HAVE_PRE_DECREMENT
5796 && exact_log2 (- INTVAL (const_arg1)) >= 0)
5797 || (HAVE_POST_DECREMENT
5798 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
5799 break;
5800
5801 /* Compute the code used to compose the constants. For example,
5802 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5803
5804 associate_code
5805 = (code == MULT || code == DIV || code == UDIV ? MULT
5806 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5807
5808 new_const = simplify_binary_operation (associate_code, mode,
5809 const_arg1, inner_const);
5810
5811 if (new_const == 0)
5812 break;
5813
5814 /* If we are associating shift operations, don't let this
5815 produce a shift of the size of the object or larger.
5816 This could occur when we follow a sign-extend by a right
5817 shift on a machine that does a sign-extend as a pair
5818 of shifts. */
5819
5820 if (is_shift && GET_CODE (new_const) == CONST_INT
5821 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5822 {
5823 /* As an exception, we can turn an ASHIFTRT of this
5824 form into a shift of the number of bits - 1. */
5825 if (code == ASHIFTRT)
5826 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5827 else
5828 break;
5829 }
5830
5831 y = copy_rtx (XEXP (y, 0));
5832
5833 /* If Y contains our first operand (the most common way this
5834 can happen is if Y is a MEM), we would do into an infinite
5835 loop if we tried to fold it. So don't in that case. */
5836
5837 if (! reg_mentioned_p (folded_arg0, y))
5838 y = fold_rtx (y, insn);
5839
5840 return cse_gen_binary (code, mode, y, new_const);
5841 }
5842 break;
5843
5844 default:
5845 break;
5846 }
5847
5848 new = simplify_binary_operation (code, mode,
5849 const_arg0 ? const_arg0 : folded_arg0,
5850 const_arg1 ? const_arg1 : folded_arg1);
5851 break;
5852
5853 case 'o':
5854 /* (lo_sum (high X) X) is simply X. */
5855 if (code == LO_SUM && const_arg0 != 0
5856 && GET_CODE (const_arg0) == HIGH
5857 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
5858 return const_arg1;
5859 break;
5860
5861 case '3':
5862 case 'b':
5863 new = simplify_ternary_operation (code, mode, mode_arg0,
5864 const_arg0 ? const_arg0 : folded_arg0,
5865 const_arg1 ? const_arg1 : folded_arg1,
5866 const_arg2 ? const_arg2 : XEXP (x, 2));
5867 break;
5868 }
5869
5870 return new ? new : x;
5871 }
5872 \f
5873 /* Return a constant value currently equivalent to X.
5874 Return 0 if we don't know one. */
5875
5876 static rtx
5877 equiv_constant (x)
5878 rtx x;
5879 {
5880 if (GET_CODE (x) == REG
5881 && REGNO_QTY_VALID_P (REGNO (x))
5882 && qty_const[reg_qty[REGNO (x)]])
5883 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[reg_qty[REGNO (x)]]);
5884
5885 if (x == 0 || CONSTANT_P (x))
5886 return x;
5887
5888 /* If X is a MEM, try to fold it outside the context of any insn to see if
5889 it might be equivalent to a constant. That handles the case where it
5890 is a constant-pool reference. Then try to look it up in the hash table
5891 in case it is something whose value we have seen before. */
5892
5893 if (GET_CODE (x) == MEM)
5894 {
5895 struct table_elt *elt;
5896
5897 x = fold_rtx (x, NULL_RTX);
5898 if (CONSTANT_P (x))
5899 return x;
5900
5901 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
5902 if (elt == 0)
5903 return 0;
5904
5905 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
5906 if (elt->is_const && CONSTANT_P (elt->exp))
5907 return elt->exp;
5908 }
5909
5910 return 0;
5911 }
5912 \f
5913 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
5914 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
5915 least-significant part of X.
5916 MODE specifies how big a part of X to return.
5917
5918 If the requested operation cannot be done, 0 is returned.
5919
5920 This is similar to gen_lowpart in emit-rtl.c. */
5921
5922 rtx
5923 gen_lowpart_if_possible (mode, x)
5924 enum machine_mode mode;
5925 register rtx x;
5926 {
5927 rtx result = gen_lowpart_common (mode, x);
5928
5929 if (result)
5930 return result;
5931 else if (GET_CODE (x) == MEM)
5932 {
5933 /* This is the only other case we handle. */
5934 register int offset = 0;
5935 rtx new;
5936
5937 if (WORDS_BIG_ENDIAN)
5938 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
5939 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
5940 if (BYTES_BIG_ENDIAN)
5941 /* Adjust the address so that the address-after-the-data is
5942 unchanged. */
5943 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
5944 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
5945 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
5946 if (! memory_address_p (mode, XEXP (new, 0)))
5947 return 0;
5948 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
5949 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
5950 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
5951 return new;
5952 }
5953 else
5954 return 0;
5955 }
5956 \f
5957 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
5958 branch. It will be zero if not.
5959
5960 In certain cases, this can cause us to add an equivalence. For example,
5961 if we are following the taken case of
5962 if (i == 2)
5963 we can add the fact that `i' and '2' are now equivalent.
5964
5965 In any case, we can record that this comparison was passed. If the same
5966 comparison is seen later, we will know its value. */
5967
5968 static void
5969 record_jump_equiv (insn, taken)
5970 rtx insn;
5971 int taken;
5972 {
5973 int cond_known_true;
5974 rtx op0, op1;
5975 enum machine_mode mode, mode0, mode1;
5976 int reversed_nonequality = 0;
5977 enum rtx_code code;
5978
5979 /* Ensure this is the right kind of insn. */
5980 if (! condjump_p (insn) || simplejump_p (insn))
5981 return;
5982
5983 /* See if this jump condition is known true or false. */
5984 if (taken)
5985 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
5986 else
5987 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
5988
5989 /* Get the type of comparison being done and the operands being compared.
5990 If we had to reverse a non-equality condition, record that fact so we
5991 know that it isn't valid for floating-point. */
5992 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
5993 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
5994 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
5995
5996 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
5997 if (! cond_known_true)
5998 {
5999 reversed_nonequality = (code != EQ && code != NE);
6000 code = reverse_condition (code);
6001 }
6002
6003 /* The mode is the mode of the non-constant. */
6004 mode = mode0;
6005 if (mode1 != VOIDmode)
6006 mode = mode1;
6007
6008 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
6009 }
6010
6011 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
6012 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
6013 Make any useful entries we can with that information. Called from
6014 above function and called recursively. */
6015
6016 static void
6017 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
6018 enum rtx_code code;
6019 enum machine_mode mode;
6020 rtx op0, op1;
6021 int reversed_nonequality;
6022 {
6023 unsigned op0_hash, op1_hash;
6024 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
6025 struct table_elt *op0_elt, *op1_elt;
6026
6027 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
6028 we know that they are also equal in the smaller mode (this is also
6029 true for all smaller modes whether or not there is a SUBREG, but
6030 is not worth testing for with no SUBREG. */
6031
6032 /* Note that GET_MODE (op0) may not equal MODE. */
6033 if (code == EQ && GET_CODE (op0) == SUBREG
6034 && (GET_MODE_SIZE (GET_MODE (op0))
6035 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
6036 {
6037 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6038 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6039
6040 record_jump_cond (code, mode, SUBREG_REG (op0),
6041 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
6042 reversed_nonequality);
6043 }
6044
6045 if (code == EQ && GET_CODE (op1) == SUBREG
6046 && (GET_MODE_SIZE (GET_MODE (op1))
6047 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
6048 {
6049 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6050 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6051
6052 record_jump_cond (code, mode, SUBREG_REG (op1),
6053 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
6054 reversed_nonequality);
6055 }
6056
6057 /* Similarly, if this is an NE comparison, and either is a SUBREG
6058 making a smaller mode, we know the whole thing is also NE. */
6059
6060 /* Note that GET_MODE (op0) may not equal MODE;
6061 if we test MODE instead, we can get an infinite recursion
6062 alternating between two modes each wider than MODE. */
6063
6064 if (code == NE && GET_CODE (op0) == SUBREG
6065 && subreg_lowpart_p (op0)
6066 && (GET_MODE_SIZE (GET_MODE (op0))
6067 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
6068 {
6069 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6070 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6071
6072 record_jump_cond (code, mode, SUBREG_REG (op0),
6073 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
6074 reversed_nonequality);
6075 }
6076
6077 if (code == NE && GET_CODE (op1) == SUBREG
6078 && subreg_lowpart_p (op1)
6079 && (GET_MODE_SIZE (GET_MODE (op1))
6080 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
6081 {
6082 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6083 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6084
6085 record_jump_cond (code, mode, SUBREG_REG (op1),
6086 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
6087 reversed_nonequality);
6088 }
6089
6090 /* Hash both operands. */
6091
6092 do_not_record = 0;
6093 hash_arg_in_memory = 0;
6094 hash_arg_in_struct = 0;
6095 op0_hash = HASH (op0, mode);
6096 op0_in_memory = hash_arg_in_memory;
6097 op0_in_struct = hash_arg_in_struct;
6098
6099 if (do_not_record)
6100 return;
6101
6102 do_not_record = 0;
6103 hash_arg_in_memory = 0;
6104 hash_arg_in_struct = 0;
6105 op1_hash = HASH (op1, mode);
6106 op1_in_memory = hash_arg_in_memory;
6107 op1_in_struct = hash_arg_in_struct;
6108
6109 if (do_not_record)
6110 return;
6111
6112 /* Look up both operands. */
6113 op0_elt = lookup (op0, op0_hash, mode);
6114 op1_elt = lookup (op1, op1_hash, mode);
6115
6116 /* If both operands are already equivalent or if they are not in the
6117 table but are identical, do nothing. */
6118 if ((op0_elt != 0 && op1_elt != 0
6119 && op0_elt->first_same_value == op1_elt->first_same_value)
6120 || op0 == op1 || rtx_equal_p (op0, op1))
6121 return;
6122
6123 /* If we aren't setting two things equal all we can do is save this
6124 comparison. Similarly if this is floating-point. In the latter
6125 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
6126 If we record the equality, we might inadvertently delete code
6127 whose intent was to change -0 to +0. */
6128
6129 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
6130 {
6131 /* If we reversed a floating-point comparison, if OP0 is not a
6132 register, or if OP1 is neither a register or constant, we can't
6133 do anything. */
6134
6135 if (GET_CODE (op1) != REG)
6136 op1 = equiv_constant (op1);
6137
6138 if ((reversed_nonequality && FLOAT_MODE_P (mode))
6139 || GET_CODE (op0) != REG || op1 == 0)
6140 return;
6141
6142 /* Put OP0 in the hash table if it isn't already. This gives it a
6143 new quantity number. */
6144 if (op0_elt == 0)
6145 {
6146 if (insert_regs (op0, NULL_PTR, 0))
6147 {
6148 rehash_using_reg (op0);
6149 op0_hash = HASH (op0, mode);
6150
6151 /* If OP0 is contained in OP1, this changes its hash code
6152 as well. Faster to rehash than to check, except
6153 for the simple case of a constant. */
6154 if (! CONSTANT_P (op1))
6155 op1_hash = HASH (op1,mode);
6156 }
6157
6158 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
6159 op0_elt->in_memory = op0_in_memory;
6160 op0_elt->in_struct = op0_in_struct;
6161 }
6162
6163 qty_comparison_code[reg_qty[REGNO (op0)]] = code;
6164 if (GET_CODE (op1) == REG)
6165 {
6166 /* Look it up again--in case op0 and op1 are the same. */
6167 op1_elt = lookup (op1, op1_hash, mode);
6168
6169 /* Put OP1 in the hash table so it gets a new quantity number. */
6170 if (op1_elt == 0)
6171 {
6172 if (insert_regs (op1, NULL_PTR, 0))
6173 {
6174 rehash_using_reg (op1);
6175 op1_hash = HASH (op1, mode);
6176 }
6177
6178 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
6179 op1_elt->in_memory = op1_in_memory;
6180 op1_elt->in_struct = op1_in_struct;
6181 }
6182
6183 qty_comparison_qty[reg_qty[REGNO (op0)]] = reg_qty[REGNO (op1)];
6184 qty_comparison_const[reg_qty[REGNO (op0)]] = 0;
6185 }
6186 else
6187 {
6188 qty_comparison_qty[reg_qty[REGNO (op0)]] = -1;
6189 qty_comparison_const[reg_qty[REGNO (op0)]] = op1;
6190 }
6191
6192 return;
6193 }
6194
6195 /* If either side is still missing an equivalence, make it now,
6196 then merge the equivalences. */
6197
6198 if (op0_elt == 0)
6199 {
6200 if (insert_regs (op0, NULL_PTR, 0))
6201 {
6202 rehash_using_reg (op0);
6203 op0_hash = HASH (op0, mode);
6204 }
6205
6206 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
6207 op0_elt->in_memory = op0_in_memory;
6208 op0_elt->in_struct = op0_in_struct;
6209 }
6210
6211 if (op1_elt == 0)
6212 {
6213 if (insert_regs (op1, NULL_PTR, 0))
6214 {
6215 rehash_using_reg (op1);
6216 op1_hash = HASH (op1, mode);
6217 }
6218
6219 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
6220 op1_elt->in_memory = op1_in_memory;
6221 op1_elt->in_struct = op1_in_struct;
6222 }
6223
6224 merge_equiv_classes (op0_elt, op1_elt);
6225 last_jump_equiv_class = op0_elt;
6226 }
6227 \f
6228 /* CSE processing for one instruction.
6229 First simplify sources and addresses of all assignments
6230 in the instruction, using previously-computed equivalents values.
6231 Then install the new sources and destinations in the table
6232 of available values.
6233
6234 If LIBCALL_INSN is nonzero, don't record any equivalence made in
6235 the insn. It means that INSN is inside libcall block. In this
6236 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
6237
6238 /* Data on one SET contained in the instruction. */
6239
6240 struct set
6241 {
6242 /* The SET rtx itself. */
6243 rtx rtl;
6244 /* The SET_SRC of the rtx (the original value, if it is changing). */
6245 rtx src;
6246 /* The hash-table element for the SET_SRC of the SET. */
6247 struct table_elt *src_elt;
6248 /* Hash value for the SET_SRC. */
6249 unsigned src_hash;
6250 /* Hash value for the SET_DEST. */
6251 unsigned dest_hash;
6252 /* The SET_DEST, with SUBREG, etc., stripped. */
6253 rtx inner_dest;
6254 /* Place where the pointer to the INNER_DEST was found. */
6255 rtx *inner_dest_loc;
6256 /* Nonzero if the SET_SRC is in memory. */
6257 char src_in_memory;
6258 /* Nonzero if the SET_SRC is in a structure. */
6259 char src_in_struct;
6260 /* Nonzero if the SET_SRC contains something
6261 whose value cannot be predicted and understood. */
6262 char src_volatile;
6263 /* Original machine mode, in case it becomes a CONST_INT. */
6264 enum machine_mode mode;
6265 /* A constant equivalent for SET_SRC, if any. */
6266 rtx src_const;
6267 /* Hash value of constant equivalent for SET_SRC. */
6268 unsigned src_const_hash;
6269 /* Table entry for constant equivalent for SET_SRC, if any. */
6270 struct table_elt *src_const_elt;
6271 };
6272
6273 static void
6274 cse_insn (insn, libcall_insn)
6275 rtx insn;
6276 rtx libcall_insn;
6277 {
6278 register rtx x = PATTERN (insn);
6279 register int i;
6280 rtx tem;
6281 register int n_sets = 0;
6282
6283 #ifdef HAVE_cc0
6284 /* Records what this insn does to set CC0. */
6285 rtx this_insn_cc0 = 0;
6286 enum machine_mode this_insn_cc0_mode = VOIDmode;
6287 #endif
6288
6289 rtx src_eqv = 0;
6290 struct table_elt *src_eqv_elt = 0;
6291 int src_eqv_volatile;
6292 int src_eqv_in_memory;
6293 int src_eqv_in_struct;
6294 unsigned src_eqv_hash;
6295
6296 struct set *sets;
6297
6298 this_insn = insn;
6299
6300 /* Find all the SETs and CLOBBERs in this instruction.
6301 Record all the SETs in the array `set' and count them.
6302 Also determine whether there is a CLOBBER that invalidates
6303 all memory references, or all references at varying addresses. */
6304
6305 if (GET_CODE (insn) == CALL_INSN)
6306 {
6307 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6308 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
6309 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
6310 }
6311
6312 if (GET_CODE (x) == SET)
6313 {
6314 sets = (struct set *) alloca (sizeof (struct set));
6315 sets[0].rtl = x;
6316
6317 /* Ignore SETs that are unconditional jumps.
6318 They never need cse processing, so this does not hurt.
6319 The reason is not efficiency but rather
6320 so that we can test at the end for instructions
6321 that have been simplified to unconditional jumps
6322 and not be misled by unchanged instructions
6323 that were unconditional jumps to begin with. */
6324 if (SET_DEST (x) == pc_rtx
6325 && GET_CODE (SET_SRC (x)) == LABEL_REF)
6326 ;
6327
6328 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
6329 The hard function value register is used only once, to copy to
6330 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
6331 Ensure we invalidate the destination register. On the 80386 no
6332 other code would invalidate it since it is a fixed_reg.
6333 We need not check the return of apply_change_group; see canon_reg. */
6334
6335 else if (GET_CODE (SET_SRC (x)) == CALL)
6336 {
6337 canon_reg (SET_SRC (x), insn);
6338 apply_change_group ();
6339 fold_rtx (SET_SRC (x), insn);
6340 invalidate (SET_DEST (x), VOIDmode);
6341 }
6342 else
6343 n_sets = 1;
6344 }
6345 else if (GET_CODE (x) == PARALLEL)
6346 {
6347 register int lim = XVECLEN (x, 0);
6348
6349 sets = (struct set *) alloca (lim * sizeof (struct set));
6350
6351 /* Find all regs explicitly clobbered in this insn,
6352 and ensure they are not replaced with any other regs
6353 elsewhere in this insn.
6354 When a reg that is clobbered is also used for input,
6355 we should presume that that is for a reason,
6356 and we should not substitute some other register
6357 which is not supposed to be clobbered.
6358 Therefore, this loop cannot be merged into the one below
6359 because a CALL may precede a CLOBBER and refer to the
6360 value clobbered. We must not let a canonicalization do
6361 anything in that case. */
6362 for (i = 0; i < lim; i++)
6363 {
6364 register rtx y = XVECEXP (x, 0, i);
6365 if (GET_CODE (y) == CLOBBER)
6366 {
6367 rtx clobbered = XEXP (y, 0);
6368
6369 if (GET_CODE (clobbered) == REG
6370 || GET_CODE (clobbered) == SUBREG)
6371 invalidate (clobbered, VOIDmode);
6372 else if (GET_CODE (clobbered) == STRICT_LOW_PART
6373 || GET_CODE (clobbered) == ZERO_EXTRACT)
6374 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6375 }
6376 }
6377
6378 for (i = 0; i < lim; i++)
6379 {
6380 register rtx y = XVECEXP (x, 0, i);
6381 if (GET_CODE (y) == SET)
6382 {
6383 /* As above, we ignore unconditional jumps and call-insns and
6384 ignore the result of apply_change_group. */
6385 if (GET_CODE (SET_SRC (y)) == CALL)
6386 {
6387 canon_reg (SET_SRC (y), insn);
6388 apply_change_group ();
6389 fold_rtx (SET_SRC (y), insn);
6390 invalidate (SET_DEST (y), VOIDmode);
6391 }
6392 else if (SET_DEST (y) == pc_rtx
6393 && GET_CODE (SET_SRC (y)) == LABEL_REF)
6394 ;
6395 else
6396 sets[n_sets++].rtl = y;
6397 }
6398 else if (GET_CODE (y) == CLOBBER)
6399 {
6400 /* If we clobber memory, canon the address.
6401 This does nothing when a register is clobbered
6402 because we have already invalidated the reg. */
6403 if (GET_CODE (XEXP (y, 0)) == MEM)
6404 canon_reg (XEXP (y, 0), NULL_RTX);
6405 }
6406 else if (GET_CODE (y) == USE
6407 && ! (GET_CODE (XEXP (y, 0)) == REG
6408 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
6409 canon_reg (y, NULL_RTX);
6410 else if (GET_CODE (y) == CALL)
6411 {
6412 /* The result of apply_change_group can be ignored; see
6413 canon_reg. */
6414 canon_reg (y, insn);
6415 apply_change_group ();
6416 fold_rtx (y, insn);
6417 }
6418 }
6419 }
6420 else if (GET_CODE (x) == CLOBBER)
6421 {
6422 if (GET_CODE (XEXP (x, 0)) == MEM)
6423 canon_reg (XEXP (x, 0), NULL_RTX);
6424 }
6425
6426 /* Canonicalize a USE of a pseudo register or memory location. */
6427 else if (GET_CODE (x) == USE
6428 && ! (GET_CODE (XEXP (x, 0)) == REG
6429 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
6430 canon_reg (XEXP (x, 0), NULL_RTX);
6431 else if (GET_CODE (x) == CALL)
6432 {
6433 /* The result of apply_change_group can be ignored; see canon_reg. */
6434 canon_reg (x, insn);
6435 apply_change_group ();
6436 fold_rtx (x, insn);
6437 }
6438
6439 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6440 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
6441 is handled specially for this case, and if it isn't set, then there will
6442 be no equivalence for the destination. */
6443 if (n_sets == 1 && REG_NOTES (insn) != 0
6444 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
6445 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6446 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
6447 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
6448
6449 /* Canonicalize sources and addresses of destinations.
6450 We do this in a separate pass to avoid problems when a MATCH_DUP is
6451 present in the insn pattern. In that case, we want to ensure that
6452 we don't break the duplicate nature of the pattern. So we will replace
6453 both operands at the same time. Otherwise, we would fail to find an
6454 equivalent substitution in the loop calling validate_change below.
6455
6456 We used to suppress canonicalization of DEST if it appears in SRC,
6457 but we don't do this any more. */
6458
6459 for (i = 0; i < n_sets; i++)
6460 {
6461 rtx dest = SET_DEST (sets[i].rtl);
6462 rtx src = SET_SRC (sets[i].rtl);
6463 rtx new = canon_reg (src, insn);
6464 int insn_code;
6465
6466 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6467 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6468 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
6469 || (insn_code = recog_memoized (insn)) < 0
6470 || insn_n_dups[insn_code] > 0)
6471 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
6472 else
6473 SET_SRC (sets[i].rtl) = new;
6474
6475 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6476 {
6477 validate_change (insn, &XEXP (dest, 1),
6478 canon_reg (XEXP (dest, 1), insn), 1);
6479 validate_change (insn, &XEXP (dest, 2),
6480 canon_reg (XEXP (dest, 2), insn), 1);
6481 }
6482
6483 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6484 || GET_CODE (dest) == ZERO_EXTRACT
6485 || GET_CODE (dest) == SIGN_EXTRACT)
6486 dest = XEXP (dest, 0);
6487
6488 if (GET_CODE (dest) == MEM)
6489 canon_reg (dest, insn);
6490 }
6491
6492 /* Now that we have done all the replacements, we can apply the change
6493 group and see if they all work. Note that this will cause some
6494 canonicalizations that would have worked individually not to be applied
6495 because some other canonicalization didn't work, but this should not
6496 occur often.
6497
6498 The result of apply_change_group can be ignored; see canon_reg. */
6499
6500 apply_change_group ();
6501
6502 /* Set sets[i].src_elt to the class each source belongs to.
6503 Detect assignments from or to volatile things
6504 and set set[i] to zero so they will be ignored
6505 in the rest of this function.
6506
6507 Nothing in this loop changes the hash table or the register chains. */
6508
6509 for (i = 0; i < n_sets; i++)
6510 {
6511 register rtx src, dest;
6512 register rtx src_folded;
6513 register struct table_elt *elt = 0, *p;
6514 enum machine_mode mode;
6515 rtx src_eqv_here;
6516 rtx src_const = 0;
6517 rtx src_related = 0;
6518 struct table_elt *src_const_elt = 0;
6519 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6520 int src_related_cost = 10000, src_elt_cost = 10000;
6521 /* Set non-zero if we need to call force_const_mem on with the
6522 contents of src_folded before using it. */
6523 int src_folded_force_flag = 0;
6524
6525 dest = SET_DEST (sets[i].rtl);
6526 src = SET_SRC (sets[i].rtl);
6527
6528 /* If SRC is a constant that has no machine mode,
6529 hash it with the destination's machine mode.
6530 This way we can keep different modes separate. */
6531
6532 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6533 sets[i].mode = mode;
6534
6535 if (src_eqv)
6536 {
6537 enum machine_mode eqvmode = mode;
6538 if (GET_CODE (dest) == STRICT_LOW_PART)
6539 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6540 do_not_record = 0;
6541 hash_arg_in_memory = 0;
6542 hash_arg_in_struct = 0;
6543 src_eqv = fold_rtx (src_eqv, insn);
6544 src_eqv_hash = HASH (src_eqv, eqvmode);
6545
6546 /* Find the equivalence class for the equivalent expression. */
6547
6548 if (!do_not_record)
6549 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
6550
6551 src_eqv_volatile = do_not_record;
6552 src_eqv_in_memory = hash_arg_in_memory;
6553 src_eqv_in_struct = hash_arg_in_struct;
6554 }
6555
6556 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6557 value of the INNER register, not the destination. So it is not
6558 a valid substitution for the source. But save it for later. */
6559 if (GET_CODE (dest) == STRICT_LOW_PART)
6560 src_eqv_here = 0;
6561 else
6562 src_eqv_here = src_eqv;
6563
6564 /* Simplify and foldable subexpressions in SRC. Then get the fully-
6565 simplified result, which may not necessarily be valid. */
6566 src_folded = fold_rtx (src, insn);
6567
6568 #if 0
6569 /* ??? This caused bad code to be generated for the m68k port with -O2.
6570 Suppose src is (CONST_INT -1), and that after truncation src_folded
6571 is (CONST_INT 3). Suppose src_folded is then used for src_const.
6572 At the end we will add src and src_const to the same equivalence
6573 class. We now have 3 and -1 on the same equivalence class. This
6574 causes later instructions to be mis-optimized. */
6575 /* If storing a constant in a bitfield, pre-truncate the constant
6576 so we will be able to record it later. */
6577 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6578 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6579 {
6580 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6581
6582 if (GET_CODE (src) == CONST_INT
6583 && GET_CODE (width) == CONST_INT
6584 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6585 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6586 src_folded
6587 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6588 << INTVAL (width)) - 1));
6589 }
6590 #endif
6591
6592 /* Compute SRC's hash code, and also notice if it
6593 should not be recorded at all. In that case,
6594 prevent any further processing of this assignment. */
6595 do_not_record = 0;
6596 hash_arg_in_memory = 0;
6597 hash_arg_in_struct = 0;
6598
6599 sets[i].src = src;
6600 sets[i].src_hash = HASH (src, mode);
6601 sets[i].src_volatile = do_not_record;
6602 sets[i].src_in_memory = hash_arg_in_memory;
6603 sets[i].src_in_struct = hash_arg_in_struct;
6604
6605 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
6606 a pseudo that is set more than once, do not record SRC. Using
6607 SRC as a replacement for anything else will be incorrect in that
6608 situation. Note that this usually occurs only for stack slots,
6609 in which case all the RTL would be referring to SRC, so we don't
6610 lose any optimization opportunities by not having SRC in the
6611 hash table. */
6612
6613 if (GET_CODE (src) == MEM
6614 && find_reg_note (insn, REG_EQUIV, src) != 0
6615 && GET_CODE (dest) == REG
6616 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
6617 && REG_N_SETS (REGNO (dest)) != 1)
6618 sets[i].src_volatile = 1;
6619
6620 #if 0
6621 /* It is no longer clear why we used to do this, but it doesn't
6622 appear to still be needed. So let's try without it since this
6623 code hurts cse'ing widened ops. */
6624 /* If source is a perverse subreg (such as QI treated as an SI),
6625 treat it as volatile. It may do the work of an SI in one context
6626 where the extra bits are not being used, but cannot replace an SI
6627 in general. */
6628 if (GET_CODE (src) == SUBREG
6629 && (GET_MODE_SIZE (GET_MODE (src))
6630 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6631 sets[i].src_volatile = 1;
6632 #endif
6633
6634 /* Locate all possible equivalent forms for SRC. Try to replace
6635 SRC in the insn with each cheaper equivalent.
6636
6637 We have the following types of equivalents: SRC itself, a folded
6638 version, a value given in a REG_EQUAL note, or a value related
6639 to a constant.
6640
6641 Each of these equivalents may be part of an additional class
6642 of equivalents (if more than one is in the table, they must be in
6643 the same class; we check for this).
6644
6645 If the source is volatile, we don't do any table lookups.
6646
6647 We note any constant equivalent for possible later use in a
6648 REG_NOTE. */
6649
6650 if (!sets[i].src_volatile)
6651 elt = lookup (src, sets[i].src_hash, mode);
6652
6653 sets[i].src_elt = elt;
6654
6655 if (elt && src_eqv_here && src_eqv_elt)
6656 {
6657 if (elt->first_same_value != src_eqv_elt->first_same_value)
6658 {
6659 /* The REG_EQUAL is indicating that two formerly distinct
6660 classes are now equivalent. So merge them. */
6661 merge_equiv_classes (elt, src_eqv_elt);
6662 src_eqv_hash = HASH (src_eqv, elt->mode);
6663 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
6664 }
6665
6666 src_eqv_here = 0;
6667 }
6668
6669 else if (src_eqv_elt)
6670 elt = src_eqv_elt;
6671
6672 /* Try to find a constant somewhere and record it in `src_const'.
6673 Record its table element, if any, in `src_const_elt'. Look in
6674 any known equivalences first. (If the constant is not in the
6675 table, also set `sets[i].src_const_hash'). */
6676 if (elt)
6677 for (p = elt->first_same_value; p; p = p->next_same_value)
6678 if (p->is_const)
6679 {
6680 src_const = p->exp;
6681 src_const_elt = elt;
6682 break;
6683 }
6684
6685 if (src_const == 0
6686 && (CONSTANT_P (src_folded)
6687 /* Consider (minus (label_ref L1) (label_ref L2)) as
6688 "constant" here so we will record it. This allows us
6689 to fold switch statements when an ADDR_DIFF_VEC is used. */
6690 || (GET_CODE (src_folded) == MINUS
6691 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6692 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6693 src_const = src_folded, src_const_elt = elt;
6694 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6695 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6696
6697 /* If we don't know if the constant is in the table, get its
6698 hash code and look it up. */
6699 if (src_const && src_const_elt == 0)
6700 {
6701 sets[i].src_const_hash = HASH (src_const, mode);
6702 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
6703 }
6704
6705 sets[i].src_const = src_const;
6706 sets[i].src_const_elt = src_const_elt;
6707
6708 /* If the constant and our source are both in the table, mark them as
6709 equivalent. Otherwise, if a constant is in the table but the source
6710 isn't, set ELT to it. */
6711 if (src_const_elt && elt
6712 && src_const_elt->first_same_value != elt->first_same_value)
6713 merge_equiv_classes (elt, src_const_elt);
6714 else if (src_const_elt && elt == 0)
6715 elt = src_const_elt;
6716
6717 /* See if there is a register linearly related to a constant
6718 equivalent of SRC. */
6719 if (src_const
6720 && (GET_CODE (src_const) == CONST
6721 || (src_const_elt && src_const_elt->related_value != 0)))
6722 {
6723 src_related = use_related_value (src_const, src_const_elt);
6724 if (src_related)
6725 {
6726 struct table_elt *src_related_elt
6727 = lookup (src_related, HASH (src_related, mode), mode);
6728 if (src_related_elt && elt)
6729 {
6730 if (elt->first_same_value
6731 != src_related_elt->first_same_value)
6732 /* This can occur when we previously saw a CONST
6733 involving a SYMBOL_REF and then see the SYMBOL_REF
6734 twice. Merge the involved classes. */
6735 merge_equiv_classes (elt, src_related_elt);
6736
6737 src_related = 0;
6738 src_related_elt = 0;
6739 }
6740 else if (src_related_elt && elt == 0)
6741 elt = src_related_elt;
6742 }
6743 }
6744
6745 /* See if we have a CONST_INT that is already in a register in a
6746 wider mode. */
6747
6748 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6749 && GET_MODE_CLASS (mode) == MODE_INT
6750 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6751 {
6752 enum machine_mode wider_mode;
6753
6754 for (wider_mode = GET_MODE_WIDER_MODE (mode);
6755 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6756 && src_related == 0;
6757 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6758 {
6759 struct table_elt *const_elt
6760 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6761
6762 if (const_elt == 0)
6763 continue;
6764
6765 for (const_elt = const_elt->first_same_value;
6766 const_elt; const_elt = const_elt->next_same_value)
6767 if (GET_CODE (const_elt->exp) == REG)
6768 {
6769 src_related = gen_lowpart_if_possible (mode,
6770 const_elt->exp);
6771 break;
6772 }
6773 }
6774 }
6775
6776 /* Another possibility is that we have an AND with a constant in
6777 a mode narrower than a word. If so, it might have been generated
6778 as part of an "if" which would narrow the AND. If we already
6779 have done the AND in a wider mode, we can use a SUBREG of that
6780 value. */
6781
6782 if (flag_expensive_optimizations && ! src_related
6783 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6784 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6785 {
6786 enum machine_mode tmode;
6787 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
6788
6789 for (tmode = GET_MODE_WIDER_MODE (mode);
6790 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6791 tmode = GET_MODE_WIDER_MODE (tmode))
6792 {
6793 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6794 struct table_elt *larger_elt;
6795
6796 if (inner)
6797 {
6798 PUT_MODE (new_and, tmode);
6799 XEXP (new_and, 0) = inner;
6800 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6801 if (larger_elt == 0)
6802 continue;
6803
6804 for (larger_elt = larger_elt->first_same_value;
6805 larger_elt; larger_elt = larger_elt->next_same_value)
6806 if (GET_CODE (larger_elt->exp) == REG)
6807 {
6808 src_related
6809 = gen_lowpart_if_possible (mode, larger_elt->exp);
6810 break;
6811 }
6812
6813 if (src_related)
6814 break;
6815 }
6816 }
6817 }
6818
6819 #ifdef LOAD_EXTEND_OP
6820 /* See if a MEM has already been loaded with a widening operation;
6821 if it has, we can use a subreg of that. Many CISC machines
6822 also have such operations, but this is only likely to be
6823 beneficial these machines. */
6824
6825 if (flag_expensive_optimizations && src_related == 0
6826 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6827 && GET_MODE_CLASS (mode) == MODE_INT
6828 && GET_CODE (src) == MEM && ! do_not_record
6829 && LOAD_EXTEND_OP (mode) != NIL)
6830 {
6831 enum machine_mode tmode;
6832
6833 /* Set what we are trying to extend and the operation it might
6834 have been extended with. */
6835 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6836 XEXP (memory_extend_rtx, 0) = src;
6837
6838 for (tmode = GET_MODE_WIDER_MODE (mode);
6839 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6840 tmode = GET_MODE_WIDER_MODE (tmode))
6841 {
6842 struct table_elt *larger_elt;
6843
6844 PUT_MODE (memory_extend_rtx, tmode);
6845 larger_elt = lookup (memory_extend_rtx,
6846 HASH (memory_extend_rtx, tmode), tmode);
6847 if (larger_elt == 0)
6848 continue;
6849
6850 for (larger_elt = larger_elt->first_same_value;
6851 larger_elt; larger_elt = larger_elt->next_same_value)
6852 if (GET_CODE (larger_elt->exp) == REG)
6853 {
6854 src_related = gen_lowpart_if_possible (mode,
6855 larger_elt->exp);
6856 break;
6857 }
6858
6859 if (src_related)
6860 break;
6861 }
6862 }
6863 #endif /* LOAD_EXTEND_OP */
6864
6865 if (src == src_folded)
6866 src_folded = 0;
6867
6868 /* Folds of constant_p_rtx are to be preferred, since we do
6869 not wish any to live past CSE. */
6870 if (src && GET_CODE (src) == CONST
6871 && GET_CODE (XEXP (src, 0)) == CONSTANT_P_RTX)
6872 src = 0;
6873
6874 /* At this point, ELT, if non-zero, points to a class of expressions
6875 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6876 and SRC_RELATED, if non-zero, each contain additional equivalent
6877 expressions. Prune these latter expressions by deleting expressions
6878 already in the equivalence class.
6879
6880 Check for an equivalent identical to the destination. If found,
6881 this is the preferred equivalent since it will likely lead to
6882 elimination of the insn. Indicate this by placing it in
6883 `src_related'. */
6884
6885 if (elt) elt = elt->first_same_value;
6886 for (p = elt; p; p = p->next_same_value)
6887 {
6888 enum rtx_code code = GET_CODE (p->exp);
6889
6890 /* If the expression is not valid, ignore it. Then we do not
6891 have to check for validity below. In most cases, we can use
6892 `rtx_equal_p', since canonicalization has already been done. */
6893 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
6894 continue;
6895
6896 /* Also skip paradoxical subregs, unless that's what we're
6897 looking for. */
6898 if (code == SUBREG
6899 && (GET_MODE_SIZE (GET_MODE (p->exp))
6900 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
6901 && ! (src != 0
6902 && GET_CODE (src) == SUBREG
6903 && GET_MODE (src) == GET_MODE (p->exp)
6904 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6905 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
6906 continue;
6907
6908 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
6909 src = 0;
6910 else if (src_folded && GET_CODE (src_folded) == code
6911 && rtx_equal_p (src_folded, p->exp))
6912 src_folded = 0;
6913 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
6914 && rtx_equal_p (src_eqv_here, p->exp))
6915 src_eqv_here = 0;
6916 else if (src_related && GET_CODE (src_related) == code
6917 && rtx_equal_p (src_related, p->exp))
6918 src_related = 0;
6919
6920 /* This is the same as the destination of the insns, we want
6921 to prefer it. Copy it to src_related. The code below will
6922 then give it a negative cost. */
6923 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
6924 src_related = dest;
6925
6926 }
6927
6928 /* Find the cheapest valid equivalent, trying all the available
6929 possibilities. Prefer items not in the hash table to ones
6930 that are when they are equal cost. Note that we can never
6931 worsen an insn as the current contents will also succeed.
6932 If we find an equivalent identical to the destination, use it as best,
6933 since this insn will probably be eliminated in that case. */
6934 if (src)
6935 {
6936 if (rtx_equal_p (src, dest))
6937 src_cost = -1;
6938 else
6939 src_cost = COST (src);
6940 }
6941
6942 if (src_eqv_here)
6943 {
6944 if (rtx_equal_p (src_eqv_here, dest))
6945 src_eqv_cost = -1;
6946 else
6947 src_eqv_cost = COST (src_eqv_here);
6948 }
6949
6950 if (src_folded)
6951 {
6952 if (rtx_equal_p (src_folded, dest))
6953 src_folded_cost = -1;
6954 else
6955 src_folded_cost = COST (src_folded);
6956 }
6957
6958 if (src_related)
6959 {
6960 if (rtx_equal_p (src_related, dest))
6961 src_related_cost = -1;
6962 else
6963 src_related_cost = COST (src_related);
6964 }
6965
6966 /* If this was an indirect jump insn, a known label will really be
6967 cheaper even though it looks more expensive. */
6968 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
6969 src_folded = src_const, src_folded_cost = -1;
6970
6971 /* Terminate loop when replacement made. This must terminate since
6972 the current contents will be tested and will always be valid. */
6973 while (1)
6974 {
6975 rtx trial, old_src;
6976
6977 /* Skip invalid entries. */
6978 while (elt && GET_CODE (elt->exp) != REG
6979 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6980 elt = elt->next_same_value;
6981
6982 /* A paradoxical subreg would be bad here: it'll be the right
6983 size, but later may be adjusted so that the upper bits aren't
6984 what we want. So reject it. */
6985 if (elt != 0
6986 && GET_CODE (elt->exp) == SUBREG
6987 && (GET_MODE_SIZE (GET_MODE (elt->exp))
6988 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
6989 /* It is okay, though, if the rtx we're trying to match
6990 will ignore any of the bits we can't predict. */
6991 && ! (src != 0
6992 && GET_CODE (src) == SUBREG
6993 && GET_MODE (src) == GET_MODE (elt->exp)
6994 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6995 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
6996 {
6997 elt = elt->next_same_value;
6998 continue;
6999 }
7000
7001 if (elt) src_elt_cost = elt->cost;
7002
7003 /* Find cheapest and skip it for the next time. For items
7004 of equal cost, use this order:
7005 src_folded, src, src_eqv, src_related and hash table entry. */
7006 if (src_folded_cost <= src_cost
7007 && src_folded_cost <= src_eqv_cost
7008 && src_folded_cost <= src_related_cost
7009 && src_folded_cost <= src_elt_cost)
7010 {
7011 trial = src_folded, src_folded_cost = 10000;
7012 if (src_folded_force_flag)
7013 trial = force_const_mem (mode, trial);
7014 }
7015 else if (src_cost <= src_eqv_cost
7016 && src_cost <= src_related_cost
7017 && src_cost <= src_elt_cost)
7018 trial = src, src_cost = 10000;
7019 else if (src_eqv_cost <= src_related_cost
7020 && src_eqv_cost <= src_elt_cost)
7021 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
7022 else if (src_related_cost <= src_elt_cost)
7023 trial = copy_rtx (src_related), src_related_cost = 10000;
7024 else
7025 {
7026 trial = copy_rtx (elt->exp);
7027 elt = elt->next_same_value;
7028 src_elt_cost = 10000;
7029 }
7030
7031 /* We don't normally have an insn matching (set (pc) (pc)), so
7032 check for this separately here. We will delete such an
7033 insn below.
7034
7035 Tablejump insns contain a USE of the table, so simply replacing
7036 the operand with the constant won't match. This is simply an
7037 unconditional branch, however, and is therefore valid. Just
7038 insert the substitution here and we will delete and re-emit
7039 the insn later. */
7040
7041 /* Keep track of the original SET_SRC so that we can fix notes
7042 on libcall instructions. */
7043 old_src = SET_SRC (sets[i].rtl);
7044
7045 if (n_sets == 1 && dest == pc_rtx
7046 && (trial == pc_rtx
7047 || (GET_CODE (trial) == LABEL_REF
7048 && ! condjump_p (insn))))
7049 {
7050 /* If TRIAL is a label in front of a jump table, we are
7051 really falling through the switch (this is how casesi
7052 insns work), so we must branch around the table. */
7053 if (GET_CODE (trial) == CODE_LABEL
7054 && NEXT_INSN (trial) != 0
7055 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
7056 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
7057 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
7058
7059 trial = gen_rtx_LABEL_REF (Pmode, get_label_after (trial));
7060
7061 SET_SRC (sets[i].rtl) = trial;
7062 cse_jumps_altered = 1;
7063 break;
7064 }
7065
7066 /* Look for a substitution that makes a valid insn. */
7067 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
7068 {
7069 /* If we just made a substitution inside a libcall, then we
7070 need to make the same substitution in any notes attached
7071 to the RETVAL insn. */
7072 if (libcall_insn
7073 && (GET_CODE (old_src) == REG
7074 || GET_CODE (old_src) == SUBREG
7075 || GET_CODE (old_src) == MEM))
7076 replace_rtx (REG_NOTES (libcall_insn), old_src,
7077 canon_reg (SET_SRC (sets[i].rtl), insn));
7078
7079 /* The result of apply_change_group can be ignored; see
7080 canon_reg. */
7081
7082 validate_change (insn, &SET_SRC (sets[i].rtl),
7083 canon_reg (SET_SRC (sets[i].rtl), insn),
7084 1);
7085 apply_change_group ();
7086 break;
7087 }
7088
7089 /* If we previously found constant pool entries for
7090 constants and this is a constant, try making a
7091 pool entry. Put it in src_folded unless we already have done
7092 this since that is where it likely came from. */
7093
7094 else if (constant_pool_entries_cost
7095 && CONSTANT_P (trial)
7096 && ! (GET_CODE (trial) == CONST
7097 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
7098 && (src_folded == 0
7099 || (GET_CODE (src_folded) != MEM
7100 && ! src_folded_force_flag))
7101 && GET_MODE_CLASS (mode) != MODE_CC
7102 && mode != VOIDmode)
7103 {
7104 src_folded_force_flag = 1;
7105 src_folded = trial;
7106 src_folded_cost = constant_pool_entries_cost;
7107 }
7108 }
7109
7110 src = SET_SRC (sets[i].rtl);
7111
7112 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
7113 However, there is an important exception: If both are registers
7114 that are not the head of their equivalence class, replace SET_SRC
7115 with the head of the class. If we do not do this, we will have
7116 both registers live over a portion of the basic block. This way,
7117 their lifetimes will likely abut instead of overlapping. */
7118 if (GET_CODE (dest) == REG
7119 && REGNO_QTY_VALID_P (REGNO (dest))
7120 && qty_mode[reg_qty[REGNO (dest)]] == GET_MODE (dest)
7121 && qty_first_reg[reg_qty[REGNO (dest)]] != REGNO (dest)
7122 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
7123 /* Don't do this if the original insn had a hard reg as
7124 SET_SRC. */
7125 && (GET_CODE (sets[i].src) != REG
7126 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
7127 /* We can't call canon_reg here because it won't do anything if
7128 SRC is a hard register. */
7129 {
7130 int first = qty_first_reg[reg_qty[REGNO (src)]];
7131 rtx new_src
7132 = (first >= FIRST_PSEUDO_REGISTER
7133 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
7134
7135 /* We must use validate-change even for this, because this
7136 might be a special no-op instruction, suitable only to
7137 tag notes onto. */
7138 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
7139 {
7140 src = new_src;
7141 /* If we had a constant that is cheaper than what we are now
7142 setting SRC to, use that constant. We ignored it when we
7143 thought we could make this into a no-op. */
7144 if (src_const && COST (src_const) < COST (src)
7145 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const,
7146 0))
7147 src = src_const;
7148 }
7149 }
7150
7151 /* If we made a change, recompute SRC values. */
7152 if (src != sets[i].src)
7153 {
7154 do_not_record = 0;
7155 hash_arg_in_memory = 0;
7156 hash_arg_in_struct = 0;
7157 sets[i].src = src;
7158 sets[i].src_hash = HASH (src, mode);
7159 sets[i].src_volatile = do_not_record;
7160 sets[i].src_in_memory = hash_arg_in_memory;
7161 sets[i].src_in_struct = hash_arg_in_struct;
7162 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
7163 }
7164
7165 /* If this is a single SET, we are setting a register, and we have an
7166 equivalent constant, we want to add a REG_NOTE. We don't want
7167 to write a REG_EQUAL note for a constant pseudo since verifying that
7168 that pseudo hasn't been eliminated is a pain. Such a note also
7169 won't help anything. */
7170 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
7171 && GET_CODE (src_const) != REG)
7172 {
7173 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7174
7175 /* Record the actual constant value in a REG_EQUAL note, making
7176 a new one if one does not already exist. */
7177 if (tem)
7178 XEXP (tem, 0) = src_const;
7179 else
7180 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
7181 src_const, REG_NOTES (insn));
7182
7183 /* If storing a constant value in a register that
7184 previously held the constant value 0,
7185 record this fact with a REG_WAS_0 note on this insn.
7186
7187 Note that the *register* is required to have previously held 0,
7188 not just any register in the quantity and we must point to the
7189 insn that set that register to zero.
7190
7191 Rather than track each register individually, we just see if
7192 the last set for this quantity was for this register. */
7193
7194 if (REGNO_QTY_VALID_P (REGNO (dest))
7195 && qty_const[reg_qty[REGNO (dest)]] == const0_rtx)
7196 {
7197 /* See if we previously had a REG_WAS_0 note. */
7198 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7199 rtx const_insn = qty_const_insn[reg_qty[REGNO (dest)]];
7200
7201 if ((tem = single_set (const_insn)) != 0
7202 && rtx_equal_p (SET_DEST (tem), dest))
7203 {
7204 if (note)
7205 XEXP (note, 0) = const_insn;
7206 else
7207 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_WAS_0,
7208 const_insn,
7209 REG_NOTES (insn));
7210 }
7211 }
7212 }
7213
7214 /* Now deal with the destination. */
7215 do_not_record = 0;
7216 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
7217
7218 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
7219 to the MEM or REG within it. */
7220 while (GET_CODE (dest) == SIGN_EXTRACT
7221 || GET_CODE (dest) == ZERO_EXTRACT
7222 || GET_CODE (dest) == SUBREG
7223 || GET_CODE (dest) == STRICT_LOW_PART)
7224 {
7225 sets[i].inner_dest_loc = &XEXP (dest, 0);
7226 dest = XEXP (dest, 0);
7227 }
7228
7229 sets[i].inner_dest = dest;
7230
7231 if (GET_CODE (dest) == MEM)
7232 {
7233 #ifdef PUSH_ROUNDING
7234 /* Stack pushes invalidate the stack pointer. */
7235 rtx addr = XEXP (dest, 0);
7236 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7237 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7238 && XEXP (addr, 0) == stack_pointer_rtx)
7239 invalidate (stack_pointer_rtx, Pmode);
7240 #endif
7241 dest = fold_rtx (dest, insn);
7242 }
7243
7244 /* Compute the hash code of the destination now,
7245 before the effects of this instruction are recorded,
7246 since the register values used in the address computation
7247 are those before this instruction. */
7248 sets[i].dest_hash = HASH (dest, mode);
7249
7250 /* Don't enter a bit-field in the hash table
7251 because the value in it after the store
7252 may not equal what was stored, due to truncation. */
7253
7254 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
7255 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
7256 {
7257 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
7258
7259 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
7260 && GET_CODE (width) == CONST_INT
7261 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
7262 && ! (INTVAL (src_const)
7263 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
7264 /* Exception: if the value is constant,
7265 and it won't be truncated, record it. */
7266 ;
7267 else
7268 {
7269 /* This is chosen so that the destination will be invalidated
7270 but no new value will be recorded.
7271 We must invalidate because sometimes constant
7272 values can be recorded for bitfields. */
7273 sets[i].src_elt = 0;
7274 sets[i].src_volatile = 1;
7275 src_eqv = 0;
7276 src_eqv_elt = 0;
7277 }
7278 }
7279
7280 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
7281 the insn. */
7282 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
7283 {
7284 PUT_CODE (insn, NOTE);
7285 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7286 NOTE_SOURCE_FILE (insn) = 0;
7287 cse_jumps_altered = 1;
7288 /* One less use of the label this insn used to jump to. */
7289 if (JUMP_LABEL (insn) != 0)
7290 --LABEL_NUSES (JUMP_LABEL (insn));
7291 /* No more processing for this set. */
7292 sets[i].rtl = 0;
7293 }
7294
7295 /* If this SET is now setting PC to a label, we know it used to
7296 be a conditional or computed branch. So we see if we can follow
7297 it. If it was a computed branch, delete it and re-emit. */
7298 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
7299 {
7300 rtx p;
7301
7302 /* If this is not in the format for a simple branch and
7303 we are the only SET in it, re-emit it. */
7304 if (! simplejump_p (insn) && n_sets == 1)
7305 {
7306 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
7307 JUMP_LABEL (new) = XEXP (src, 0);
7308 LABEL_NUSES (XEXP (src, 0))++;
7309 delete_insn (insn);
7310 insn = new;
7311 }
7312 else
7313 /* Otherwise, force rerecognition, since it probably had
7314 a different pattern before.
7315 This shouldn't really be necessary, since whatever
7316 changed the source value above should have done this.
7317 Until the right place is found, might as well do this here. */
7318 INSN_CODE (insn) = -1;
7319
7320 /* Now that we've converted this jump to an unconditional jump,
7321 there is dead code after it. Delete the dead code until we
7322 reach a BARRIER, the end of the function, or a label. Do
7323 not delete NOTEs except for NOTE_INSN_DELETED since later
7324 phases assume these notes are retained. */
7325
7326 p = insn;
7327
7328 while (NEXT_INSN (p) != 0
7329 && GET_CODE (NEXT_INSN (p)) != BARRIER
7330 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
7331 {
7332 if (GET_CODE (NEXT_INSN (p)) != NOTE
7333 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
7334 delete_insn (NEXT_INSN (p));
7335 else
7336 p = NEXT_INSN (p);
7337 }
7338
7339 /* If we don't have a BARRIER immediately after INSN, put one there.
7340 Much code assumes that there are no NOTEs between a JUMP_INSN and
7341 BARRIER. */
7342
7343 if (NEXT_INSN (insn) == 0
7344 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
7345 emit_barrier_before (NEXT_INSN (insn));
7346
7347 /* We might have two BARRIERs separated by notes. Delete the second
7348 one if so. */
7349
7350 if (p != insn && NEXT_INSN (p) != 0
7351 && GET_CODE (NEXT_INSN (p)) == BARRIER)
7352 delete_insn (NEXT_INSN (p));
7353
7354 cse_jumps_altered = 1;
7355 sets[i].rtl = 0;
7356 }
7357
7358 /* If destination is volatile, invalidate it and then do no further
7359 processing for this assignment. */
7360
7361 else if (do_not_record)
7362 {
7363 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7364 || GET_CODE (dest) == MEM)
7365 invalidate (dest, VOIDmode);
7366 else if (GET_CODE (dest) == STRICT_LOW_PART
7367 || GET_CODE (dest) == ZERO_EXTRACT)
7368 invalidate (XEXP (dest, 0), GET_MODE (dest));
7369 sets[i].rtl = 0;
7370 }
7371
7372 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
7373 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7374
7375 #ifdef HAVE_cc0
7376 /* If setting CC0, record what it was set to, or a constant, if it
7377 is equivalent to a constant. If it is being set to a floating-point
7378 value, make a COMPARE with the appropriate constant of 0. If we
7379 don't do this, later code can interpret this as a test against
7380 const0_rtx, which can cause problems if we try to put it into an
7381 insn as a floating-point operand. */
7382 if (dest == cc0_rtx)
7383 {
7384 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
7385 this_insn_cc0_mode = mode;
7386 if (FLOAT_MODE_P (mode))
7387 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
7388 CONST0_RTX (mode));
7389 }
7390 #endif
7391 }
7392
7393 /* Now enter all non-volatile source expressions in the hash table
7394 if they are not already present.
7395 Record their equivalence classes in src_elt.
7396 This way we can insert the corresponding destinations into
7397 the same classes even if the actual sources are no longer in them
7398 (having been invalidated). */
7399
7400 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
7401 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
7402 {
7403 register struct table_elt *elt;
7404 register struct table_elt *classp = sets[0].src_elt;
7405 rtx dest = SET_DEST (sets[0].rtl);
7406 enum machine_mode eqvmode = GET_MODE (dest);
7407
7408 if (GET_CODE (dest) == STRICT_LOW_PART)
7409 {
7410 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
7411 classp = 0;
7412 }
7413 if (insert_regs (src_eqv, classp, 0))
7414 {
7415 rehash_using_reg (src_eqv);
7416 src_eqv_hash = HASH (src_eqv, eqvmode);
7417 }
7418 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7419 elt->in_memory = src_eqv_in_memory;
7420 elt->in_struct = src_eqv_in_struct;
7421 src_eqv_elt = elt;
7422
7423 /* Check to see if src_eqv_elt is the same as a set source which
7424 does not yet have an elt, and if so set the elt of the set source
7425 to src_eqv_elt. */
7426 for (i = 0; i < n_sets; i++)
7427 if (sets[i].rtl && sets[i].src_elt == 0
7428 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
7429 sets[i].src_elt = src_eqv_elt;
7430 }
7431
7432 for (i = 0; i < n_sets; i++)
7433 if (sets[i].rtl && ! sets[i].src_volatile
7434 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
7435 {
7436 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
7437 {
7438 /* REG_EQUAL in setting a STRICT_LOW_PART
7439 gives an equivalent for the entire destination register,
7440 not just for the subreg being stored in now.
7441 This is a more interesting equivalence, so we arrange later
7442 to treat the entire reg as the destination. */
7443 sets[i].src_elt = src_eqv_elt;
7444 sets[i].src_hash = src_eqv_hash;
7445 }
7446 else
7447 {
7448 /* Insert source and constant equivalent into hash table, if not
7449 already present. */
7450 register struct table_elt *classp = src_eqv_elt;
7451 register rtx src = sets[i].src;
7452 register rtx dest = SET_DEST (sets[i].rtl);
7453 enum machine_mode mode
7454 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
7455
7456 if (sets[i].src_elt == 0)
7457 {
7458 register struct table_elt *elt;
7459
7460 /* Note that these insert_regs calls cannot remove
7461 any of the src_elt's, because they would have failed to
7462 match if not still valid. */
7463 if (insert_regs (src, classp, 0))
7464 {
7465 rehash_using_reg (src);
7466 sets[i].src_hash = HASH (src, mode);
7467 }
7468 elt = insert (src, classp, sets[i].src_hash, mode);
7469 elt->in_memory = sets[i].src_in_memory;
7470 elt->in_struct = sets[i].src_in_struct;
7471 sets[i].src_elt = classp = elt;
7472 }
7473
7474 if (sets[i].src_const && sets[i].src_const_elt == 0
7475 && src != sets[i].src_const
7476 && ! rtx_equal_p (sets[i].src_const, src))
7477 sets[i].src_elt = insert (sets[i].src_const, classp,
7478 sets[i].src_const_hash, mode);
7479 }
7480 }
7481 else if (sets[i].src_elt == 0)
7482 /* If we did not insert the source into the hash table (e.g., it was
7483 volatile), note the equivalence class for the REG_EQUAL value, if any,
7484 so that the destination goes into that class. */
7485 sets[i].src_elt = src_eqv_elt;
7486
7487 invalidate_from_clobbers (x);
7488
7489 /* Some registers are invalidated by subroutine calls. Memory is
7490 invalidated by non-constant calls. */
7491
7492 if (GET_CODE (insn) == CALL_INSN)
7493 {
7494 if (! CONST_CALL_P (insn))
7495 invalidate_memory ();
7496 invalidate_for_call ();
7497 }
7498
7499 /* Now invalidate everything set by this instruction.
7500 If a SUBREG or other funny destination is being set,
7501 sets[i].rtl is still nonzero, so here we invalidate the reg
7502 a part of which is being set. */
7503
7504 for (i = 0; i < n_sets; i++)
7505 if (sets[i].rtl)
7506 {
7507 /* We can't use the inner dest, because the mode associated with
7508 a ZERO_EXTRACT is significant. */
7509 register rtx dest = SET_DEST (sets[i].rtl);
7510
7511 /* Needed for registers to remove the register from its
7512 previous quantity's chain.
7513 Needed for memory if this is a nonvarying address, unless
7514 we have just done an invalidate_memory that covers even those. */
7515 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7516 || GET_CODE (dest) == MEM)
7517 invalidate (dest, VOIDmode);
7518 else if (GET_CODE (dest) == STRICT_LOW_PART
7519 || GET_CODE (dest) == ZERO_EXTRACT)
7520 invalidate (XEXP (dest, 0), GET_MODE (dest));
7521 }
7522
7523 /* Make sure registers mentioned in destinations
7524 are safe for use in an expression to be inserted.
7525 This removes from the hash table
7526 any invalid entry that refers to one of these registers.
7527
7528 We don't care about the return value from mention_regs because
7529 we are going to hash the SET_DEST values unconditionally. */
7530
7531 for (i = 0; i < n_sets; i++)
7532 {
7533 if (sets[i].rtl)
7534 {
7535 rtx x = SET_DEST (sets[i].rtl);
7536
7537 if (GET_CODE (x) != REG)
7538 mention_regs (x);
7539 else
7540 {
7541 /* We used to rely on all references to a register becoming
7542 inaccessible when a register changes to a new quantity,
7543 since that changes the hash code. However, that is not
7544 safe, since after NBUCKETS new quantities we get a
7545 hash 'collision' of a register with its own invalid
7546 entries. And since SUBREGs have been changed not to
7547 change their hash code with the hash code of the register,
7548 it wouldn't work any longer at all. So we have to check
7549 for any invalid references lying around now.
7550 This code is similar to the REG case in mention_regs,
7551 but it knows that reg_tick has been incremented, and
7552 it leaves reg_in_table as -1 . */
7553 register int regno = REGNO (x);
7554 register int endregno
7555 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
7556 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
7557 int i;
7558
7559 for (i = regno; i < endregno; i++)
7560 {
7561 if (reg_in_table[i] >= 0)
7562 {
7563 remove_invalid_refs (i);
7564 reg_in_table[i] = -1;
7565 }
7566 }
7567 }
7568 }
7569 }
7570
7571 /* We may have just removed some of the src_elt's from the hash table.
7572 So replace each one with the current head of the same class. */
7573
7574 for (i = 0; i < n_sets; i++)
7575 if (sets[i].rtl)
7576 {
7577 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7578 /* If elt was removed, find current head of same class,
7579 or 0 if nothing remains of that class. */
7580 {
7581 register struct table_elt *elt = sets[i].src_elt;
7582
7583 while (elt && elt->prev_same_value)
7584 elt = elt->prev_same_value;
7585
7586 while (elt && elt->first_same_value == 0)
7587 elt = elt->next_same_value;
7588 sets[i].src_elt = elt ? elt->first_same_value : 0;
7589 }
7590 }
7591
7592 /* Now insert the destinations into their equivalence classes. */
7593
7594 for (i = 0; i < n_sets; i++)
7595 if (sets[i].rtl)
7596 {
7597 register rtx dest = SET_DEST (sets[i].rtl);
7598 rtx inner_dest = sets[i].inner_dest;
7599 register struct table_elt *elt;
7600
7601 /* Don't record value if we are not supposed to risk allocating
7602 floating-point values in registers that might be wider than
7603 memory. */
7604 if ((flag_float_store
7605 && GET_CODE (dest) == MEM
7606 && FLOAT_MODE_P (GET_MODE (dest)))
7607 /* Don't record BLKmode values, because we don't know the
7608 size of it, and can't be sure that other BLKmode values
7609 have the same or smaller size. */
7610 || GET_MODE (dest) == BLKmode
7611 /* Don't record values of destinations set inside a libcall block
7612 since we might delete the libcall. Things should have been set
7613 up so we won't want to reuse such a value, but we play it safe
7614 here. */
7615 || libcall_insn
7616 /* If we didn't put a REG_EQUAL value or a source into the hash
7617 table, there is no point is recording DEST. */
7618 || sets[i].src_elt == 0
7619 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
7620 or SIGN_EXTEND, don't record DEST since it can cause
7621 some tracking to be wrong.
7622
7623 ??? Think about this more later. */
7624 || (GET_CODE (dest) == SUBREG
7625 && (GET_MODE_SIZE (GET_MODE (dest))
7626 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7627 && (GET_CODE (sets[i].src) == SIGN_EXTEND
7628 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7629 continue;
7630
7631 /* STRICT_LOW_PART isn't part of the value BEING set,
7632 and neither is the SUBREG inside it.
7633 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
7634 if (GET_CODE (dest) == STRICT_LOW_PART)
7635 dest = SUBREG_REG (XEXP (dest, 0));
7636
7637 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7638 /* Registers must also be inserted into chains for quantities. */
7639 if (insert_regs (dest, sets[i].src_elt, 1))
7640 {
7641 /* If `insert_regs' changes something, the hash code must be
7642 recalculated. */
7643 rehash_using_reg (dest);
7644 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7645 }
7646
7647 if (GET_CODE (inner_dest) == MEM
7648 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
7649 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
7650 that (MEM (ADDRESSOF (X))) is equivalent to Y.
7651 Consider the case in which the address of the MEM is
7652 passed to a function, which alters the MEM. Then, if we
7653 later use Y instead of the MEM we'll miss the update. */
7654 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
7655 else
7656 elt = insert (dest, sets[i].src_elt,
7657 sets[i].dest_hash, GET_MODE (dest));
7658
7659 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
7660 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
7661 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
7662 0))));
7663
7664 if (elt->in_memory)
7665 {
7666 /* This implicitly assumes a whole struct
7667 need not have MEM_IN_STRUCT_P.
7668 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
7669 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7670 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7671 }
7672
7673 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7674 narrower than M2, and both M1 and M2 are the same number of words,
7675 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7676 make that equivalence as well.
7677
7678 However, BAR may have equivalences for which gen_lowpart_if_possible
7679 will produce a simpler value than gen_lowpart_if_possible applied to
7680 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7681 BAR's equivalences. If we don't get a simplified form, make
7682 the SUBREG. It will not be used in an equivalence, but will
7683 cause two similar assignments to be detected.
7684
7685 Note the loop below will find SUBREG_REG (DEST) since we have
7686 already entered SRC and DEST of the SET in the table. */
7687
7688 if (GET_CODE (dest) == SUBREG
7689 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7690 / UNITS_PER_WORD)
7691 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7692 && (GET_MODE_SIZE (GET_MODE (dest))
7693 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7694 && sets[i].src_elt != 0)
7695 {
7696 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7697 struct table_elt *elt, *classp = 0;
7698
7699 for (elt = sets[i].src_elt->first_same_value; elt;
7700 elt = elt->next_same_value)
7701 {
7702 rtx new_src = 0;
7703 unsigned src_hash;
7704 struct table_elt *src_elt;
7705
7706 /* Ignore invalid entries. */
7707 if (GET_CODE (elt->exp) != REG
7708 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7709 continue;
7710
7711 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7712 if (new_src == 0)
7713 new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
7714
7715 src_hash = HASH (new_src, new_mode);
7716 src_elt = lookup (new_src, src_hash, new_mode);
7717
7718 /* Put the new source in the hash table is if isn't
7719 already. */
7720 if (src_elt == 0)
7721 {
7722 if (insert_regs (new_src, classp, 0))
7723 {
7724 rehash_using_reg (new_src);
7725 src_hash = HASH (new_src, new_mode);
7726 }
7727 src_elt = insert (new_src, classp, src_hash, new_mode);
7728 src_elt->in_memory = elt->in_memory;
7729 src_elt->in_struct = elt->in_struct;
7730 }
7731 else if (classp && classp != src_elt->first_same_value)
7732 /* Show that two things that we've seen before are
7733 actually the same. */
7734 merge_equiv_classes (src_elt, classp);
7735
7736 classp = src_elt->first_same_value;
7737 /* Ignore invalid entries. */
7738 while (classp
7739 && GET_CODE (classp->exp) != REG
7740 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
7741 classp = classp->next_same_value;
7742 }
7743 }
7744 }
7745
7746 /* Special handling for (set REG0 REG1)
7747 where REG0 is the "cheapest", cheaper than REG1.
7748 After cse, REG1 will probably not be used in the sequel,
7749 so (if easily done) change this insn to (set REG1 REG0) and
7750 replace REG1 with REG0 in the previous insn that computed their value.
7751 Then REG1 will become a dead store and won't cloud the situation
7752 for later optimizations.
7753
7754 Do not make this change if REG1 is a hard register, because it will
7755 then be used in the sequel and we may be changing a two-operand insn
7756 into a three-operand insn.
7757
7758 Also do not do this if we are operating on a copy of INSN. */
7759
7760 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7761 && NEXT_INSN (PREV_INSN (insn)) == insn
7762 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7763 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7764 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
7765 && (qty_first_reg[reg_qty[REGNO (SET_SRC (sets[0].rtl))]]
7766 == REGNO (SET_DEST (sets[0].rtl))))
7767 {
7768 rtx prev = PREV_INSN (insn);
7769 while (prev && GET_CODE (prev) == NOTE)
7770 prev = PREV_INSN (prev);
7771
7772 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7773 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7774 {
7775 rtx dest = SET_DEST (sets[0].rtl);
7776 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7777
7778 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7779 validate_change (insn, & SET_DEST (sets[0].rtl),
7780 SET_SRC (sets[0].rtl), 1);
7781 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7782 apply_change_group ();
7783
7784 /* If REG1 was equivalent to a constant, REG0 is not. */
7785 if (note)
7786 PUT_REG_NOTE_KIND (note, REG_EQUAL);
7787
7788 /* If there was a REG_WAS_0 note on PREV, remove it. Move
7789 any REG_WAS_0 note on INSN to PREV. */
7790 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7791 if (note)
7792 remove_note (prev, note);
7793
7794 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7795 if (note)
7796 {
7797 remove_note (insn, note);
7798 XEXP (note, 1) = REG_NOTES (prev);
7799 REG_NOTES (prev) = note;
7800 }
7801
7802 /* If INSN has a REG_EQUAL note, and this note mentions REG0,
7803 then we must delete it, because the value in REG0 has changed. */
7804 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7805 if (note && reg_mentioned_p (dest, XEXP (note, 0)))
7806 remove_note (insn, note);
7807 }
7808 }
7809
7810 /* If this is a conditional jump insn, record any known equivalences due to
7811 the condition being tested. */
7812
7813 last_jump_equiv_class = 0;
7814 if (GET_CODE (insn) == JUMP_INSN
7815 && n_sets == 1 && GET_CODE (x) == SET
7816 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7817 record_jump_equiv (insn, 0);
7818
7819 #ifdef HAVE_cc0
7820 /* If the previous insn set CC0 and this insn no longer references CC0,
7821 delete the previous insn. Here we use the fact that nothing expects CC0
7822 to be valid over an insn, which is true until the final pass. */
7823 if (prev_insn && GET_CODE (prev_insn) == INSN
7824 && (tem = single_set (prev_insn)) != 0
7825 && SET_DEST (tem) == cc0_rtx
7826 && ! reg_mentioned_p (cc0_rtx, x))
7827 {
7828 PUT_CODE (prev_insn, NOTE);
7829 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7830 NOTE_SOURCE_FILE (prev_insn) = 0;
7831 }
7832
7833 prev_insn_cc0 = this_insn_cc0;
7834 prev_insn_cc0_mode = this_insn_cc0_mode;
7835 #endif
7836
7837 prev_insn = insn;
7838 }
7839 \f
7840 /* Remove from the ahsh table all expressions that reference memory. */
7841 static void
7842 invalidate_memory ()
7843 {
7844 register int i;
7845 register struct table_elt *p, *next;
7846
7847 for (i = 0; i < NBUCKETS; i++)
7848 for (p = table[i]; p; p = next)
7849 {
7850 next = p->next_same_hash;
7851 if (p->in_memory)
7852 remove_from_table (p, i);
7853 }
7854 }
7855
7856 /* XXX ??? The name of this function bears little resemblance to
7857 what this function actually does. FIXME. */
7858 static int
7859 note_mem_written (addr)
7860 register rtx addr;
7861 {
7862 /* Pushing or popping the stack invalidates just the stack pointer. */
7863 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7864 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7865 && GET_CODE (XEXP (addr, 0)) == REG
7866 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7867 {
7868 if (reg_tick[STACK_POINTER_REGNUM] >= 0)
7869 reg_tick[STACK_POINTER_REGNUM]++;
7870
7871 /* This should be *very* rare. */
7872 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
7873 invalidate (stack_pointer_rtx, VOIDmode);
7874 return 1;
7875 }
7876 return 0;
7877 }
7878
7879 /* Perform invalidation on the basis of everything about an insn
7880 except for invalidating the actual places that are SET in it.
7881 This includes the places CLOBBERed, and anything that might
7882 alias with something that is SET or CLOBBERed.
7883
7884 X is the pattern of the insn. */
7885
7886 static void
7887 invalidate_from_clobbers (x)
7888 rtx x;
7889 {
7890 if (GET_CODE (x) == CLOBBER)
7891 {
7892 rtx ref = XEXP (x, 0);
7893 if (ref)
7894 {
7895 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7896 || GET_CODE (ref) == MEM)
7897 invalidate (ref, VOIDmode);
7898 else if (GET_CODE (ref) == STRICT_LOW_PART
7899 || GET_CODE (ref) == ZERO_EXTRACT)
7900 invalidate (XEXP (ref, 0), GET_MODE (ref));
7901 }
7902 }
7903 else if (GET_CODE (x) == PARALLEL)
7904 {
7905 register int i;
7906 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
7907 {
7908 register rtx y = XVECEXP (x, 0, i);
7909 if (GET_CODE (y) == CLOBBER)
7910 {
7911 rtx ref = XEXP (y, 0);
7912 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7913 || GET_CODE (ref) == MEM)
7914 invalidate (ref, VOIDmode);
7915 else if (GET_CODE (ref) == STRICT_LOW_PART
7916 || GET_CODE (ref) == ZERO_EXTRACT)
7917 invalidate (XEXP (ref, 0), GET_MODE (ref));
7918 }
7919 }
7920 }
7921 }
7922 \f
7923 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
7924 and replace any registers in them with either an equivalent constant
7925 or the canonical form of the register. If we are inside an address,
7926 only do this if the address remains valid.
7927
7928 OBJECT is 0 except when within a MEM in which case it is the MEM.
7929
7930 Return the replacement for X. */
7931
7932 static rtx
7933 cse_process_notes (x, object)
7934 rtx x;
7935 rtx object;
7936 {
7937 enum rtx_code code = GET_CODE (x);
7938 char *fmt = GET_RTX_FORMAT (code);
7939 int i;
7940
7941 switch (code)
7942 {
7943 case CONST_INT:
7944 case CONST:
7945 case SYMBOL_REF:
7946 case LABEL_REF:
7947 case CONST_DOUBLE:
7948 case PC:
7949 case CC0:
7950 case LO_SUM:
7951 return x;
7952
7953 case MEM:
7954 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
7955 return x;
7956
7957 case EXPR_LIST:
7958 case INSN_LIST:
7959 if (REG_NOTE_KIND (x) == REG_EQUAL)
7960 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7961 if (XEXP (x, 1))
7962 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7963 return x;
7964
7965 case SIGN_EXTEND:
7966 case ZERO_EXTEND:
7967 case SUBREG:
7968 {
7969 rtx new = cse_process_notes (XEXP (x, 0), object);
7970 /* We don't substitute VOIDmode constants into these rtx,
7971 since they would impede folding. */
7972 if (GET_MODE (new) != VOIDmode)
7973 validate_change (object, &XEXP (x, 0), new, 0);
7974 return x;
7975 }
7976
7977 case REG:
7978 i = reg_qty[REGNO (x)];
7979
7980 /* Return a constant or a constant register. */
7981 if (REGNO_QTY_VALID_P (REGNO (x))
7982 && qty_const[i] != 0
7983 && (CONSTANT_P (qty_const[i])
7984 || GET_CODE (qty_const[i]) == REG))
7985 {
7986 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
7987 if (new)
7988 return new;
7989 }
7990
7991 /* Otherwise, canonicalize this register. */
7992 return canon_reg (x, NULL_RTX);
7993
7994 default:
7995 break;
7996 }
7997
7998 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7999 if (fmt[i] == 'e')
8000 validate_change (object, &XEXP (x, i),
8001 cse_process_notes (XEXP (x, i), object), 0);
8002
8003 return x;
8004 }
8005 \f
8006 /* Find common subexpressions between the end test of a loop and the beginning
8007 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
8008
8009 Often we have a loop where an expression in the exit test is used
8010 in the body of the loop. For example "while (*p) *q++ = *p++;".
8011 Because of the way we duplicate the loop exit test in front of the loop,
8012 however, we don't detect that common subexpression. This will be caught
8013 when global cse is implemented, but this is a quite common case.
8014
8015 This function handles the most common cases of these common expressions.
8016 It is called after we have processed the basic block ending with the
8017 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
8018 jumps to a label used only once. */
8019
8020 static void
8021 cse_around_loop (loop_start)
8022 rtx loop_start;
8023 {
8024 rtx insn;
8025 int i;
8026 struct table_elt *p;
8027
8028 /* If the jump at the end of the loop doesn't go to the start, we don't
8029 do anything. */
8030 for (insn = PREV_INSN (loop_start);
8031 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
8032 insn = PREV_INSN (insn))
8033 ;
8034
8035 if (insn == 0
8036 || GET_CODE (insn) != NOTE
8037 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
8038 return;
8039
8040 /* If the last insn of the loop (the end test) was an NE comparison,
8041 we will interpret it as an EQ comparison, since we fell through
8042 the loop. Any equivalences resulting from that comparison are
8043 therefore not valid and must be invalidated. */
8044 if (last_jump_equiv_class)
8045 for (p = last_jump_equiv_class->first_same_value; p;
8046 p = p->next_same_value)
8047 {
8048 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
8049 || (GET_CODE (p->exp) == SUBREG
8050 && GET_CODE (SUBREG_REG (p->exp)) == REG))
8051 invalidate (p->exp, VOIDmode);
8052 else if (GET_CODE (p->exp) == STRICT_LOW_PART
8053 || GET_CODE (p->exp) == ZERO_EXTRACT)
8054 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
8055 }
8056
8057 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
8058 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
8059
8060 The only thing we do with SET_DEST is invalidate entries, so we
8061 can safely process each SET in order. It is slightly less efficient
8062 to do so, but we only want to handle the most common cases.
8063
8064 The gen_move_insn call in cse_set_around_loop may create new pseudos.
8065 These pseudos won't have valid entries in any of the tables indexed
8066 by register number, such as reg_qty. We avoid out-of-range array
8067 accesses by not processing any instructions created after cse started. */
8068
8069 for (insn = NEXT_INSN (loop_start);
8070 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
8071 && INSN_UID (insn) < max_insn_uid
8072 && ! (GET_CODE (insn) == NOTE
8073 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
8074 insn = NEXT_INSN (insn))
8075 {
8076 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8077 && (GET_CODE (PATTERN (insn)) == SET
8078 || GET_CODE (PATTERN (insn)) == CLOBBER))
8079 cse_set_around_loop (PATTERN (insn), insn, loop_start);
8080 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8081 && GET_CODE (PATTERN (insn)) == PARALLEL)
8082 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8083 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
8084 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
8085 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
8086 loop_start);
8087 }
8088 }
8089 \f
8090 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
8091 since they are done elsewhere. This function is called via note_stores. */
8092
8093 static void
8094 invalidate_skipped_set (dest, set)
8095 rtx set;
8096 rtx dest;
8097 {
8098 enum rtx_code code = GET_CODE (dest);
8099
8100 if (code == MEM
8101 && ! note_mem_written (dest) /* If this is not a stack push ... */
8102 /* There are times when an address can appear varying and be a PLUS
8103 during this scan when it would be a fixed address were we to know
8104 the proper equivalences. So invalidate all memory if there is
8105 a BLKmode or nonscalar memory reference or a reference to a
8106 variable address. */
8107 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
8108 || cse_rtx_varies_p (XEXP (dest, 0))))
8109 {
8110 invalidate_memory ();
8111 return;
8112 }
8113
8114 if (GET_CODE (set) == CLOBBER
8115 #ifdef HAVE_cc0
8116 || dest == cc0_rtx
8117 #endif
8118 || dest == pc_rtx)
8119 return;
8120
8121 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
8122 invalidate (XEXP (dest, 0), GET_MODE (dest));
8123 else if (code == REG || code == SUBREG || code == MEM)
8124 invalidate (dest, VOIDmode);
8125 }
8126
8127 /* Invalidate all insns from START up to the end of the function or the
8128 next label. This called when we wish to CSE around a block that is
8129 conditionally executed. */
8130
8131 static void
8132 invalidate_skipped_block (start)
8133 rtx start;
8134 {
8135 rtx insn;
8136
8137 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
8138 insn = NEXT_INSN (insn))
8139 {
8140 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
8141 continue;
8142
8143 if (GET_CODE (insn) == CALL_INSN)
8144 {
8145 if (! CONST_CALL_P (insn))
8146 invalidate_memory ();
8147 invalidate_for_call ();
8148 }
8149
8150 invalidate_from_clobbers (PATTERN (insn));
8151 note_stores (PATTERN (insn), invalidate_skipped_set);
8152 }
8153 }
8154 \f
8155 /* Used for communication between the following two routines; contains a
8156 value to be checked for modification. */
8157
8158 static rtx cse_check_loop_start_value;
8159
8160 /* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
8161 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
8162
8163 static void
8164 cse_check_loop_start (x, set)
8165 rtx x;
8166 rtx set ATTRIBUTE_UNUSED;
8167 {
8168 if (cse_check_loop_start_value == 0
8169 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
8170 return;
8171
8172 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
8173 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
8174 cse_check_loop_start_value = 0;
8175 }
8176
8177 /* X is a SET or CLOBBER contained in INSN that was found near the start of
8178 a loop that starts with the label at LOOP_START.
8179
8180 If X is a SET, we see if its SET_SRC is currently in our hash table.
8181 If so, we see if it has a value equal to some register used only in the
8182 loop exit code (as marked by jump.c).
8183
8184 If those two conditions are true, we search backwards from the start of
8185 the loop to see if that same value was loaded into a register that still
8186 retains its value at the start of the loop.
8187
8188 If so, we insert an insn after the load to copy the destination of that
8189 load into the equivalent register and (try to) replace our SET_SRC with that
8190 register.
8191
8192 In any event, we invalidate whatever this SET or CLOBBER modifies. */
8193
8194 static void
8195 cse_set_around_loop (x, insn, loop_start)
8196 rtx x;
8197 rtx insn;
8198 rtx loop_start;
8199 {
8200 struct table_elt *src_elt;
8201
8202 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
8203 are setting PC or CC0 or whose SET_SRC is already a register. */
8204 if (GET_CODE (x) == SET
8205 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
8206 && GET_CODE (SET_SRC (x)) != REG)
8207 {
8208 src_elt = lookup (SET_SRC (x),
8209 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
8210 GET_MODE (SET_DEST (x)));
8211
8212 if (src_elt)
8213 for (src_elt = src_elt->first_same_value; src_elt;
8214 src_elt = src_elt->next_same_value)
8215 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
8216 && COST (src_elt->exp) < COST (SET_SRC (x)))
8217 {
8218 rtx p, set;
8219
8220 /* Look for an insn in front of LOOP_START that sets
8221 something in the desired mode to SET_SRC (x) before we hit
8222 a label or CALL_INSN. */
8223
8224 for (p = prev_nonnote_insn (loop_start);
8225 p && GET_CODE (p) != CALL_INSN
8226 && GET_CODE (p) != CODE_LABEL;
8227 p = prev_nonnote_insn (p))
8228 if ((set = single_set (p)) != 0
8229 && GET_CODE (SET_DEST (set)) == REG
8230 && GET_MODE (SET_DEST (set)) == src_elt->mode
8231 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
8232 {
8233 /* We now have to ensure that nothing between P
8234 and LOOP_START modified anything referenced in
8235 SET_SRC (x). We know that nothing within the loop
8236 can modify it, or we would have invalidated it in
8237 the hash table. */
8238 rtx q;
8239
8240 cse_check_loop_start_value = SET_SRC (x);
8241 for (q = p; q != loop_start; q = NEXT_INSN (q))
8242 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
8243 note_stores (PATTERN (q), cse_check_loop_start);
8244
8245 /* If nothing was changed and we can replace our
8246 SET_SRC, add an insn after P to copy its destination
8247 to what we will be replacing SET_SRC with. */
8248 if (cse_check_loop_start_value
8249 && validate_change (insn, &SET_SRC (x),
8250 src_elt->exp, 0))
8251 {
8252 /* If this creates new pseudos, this is unsafe,
8253 because the regno of new pseudo is unsuitable
8254 to index into reg_qty when cse_insn processes
8255 the new insn. Therefore, if a new pseudo was
8256 created, discard this optimization. */
8257 int nregs = max_reg_num ();
8258 rtx move
8259 = gen_move_insn (src_elt->exp, SET_DEST (set));
8260 if (nregs != max_reg_num ())
8261 {
8262 if (! validate_change (insn, &SET_SRC (x),
8263 SET_SRC (set), 0))
8264 abort ();
8265 }
8266 else
8267 emit_insn_after (move, p);
8268 }
8269 break;
8270 }
8271 }
8272 }
8273
8274 /* Now invalidate anything modified by X. */
8275 note_mem_written (SET_DEST (x));
8276
8277 /* See comment on similar code in cse_insn for explanation of these tests. */
8278 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
8279 || GET_CODE (SET_DEST (x)) == MEM)
8280 invalidate (SET_DEST (x), VOIDmode);
8281 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
8282 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
8283 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
8284 }
8285 \f
8286 /* Find the end of INSN's basic block and return its range,
8287 the total number of SETs in all the insns of the block, the last insn of the
8288 block, and the branch path.
8289
8290 The branch path indicates which branches should be followed. If a non-zero
8291 path size is specified, the block should be rescanned and a different set
8292 of branches will be taken. The branch path is only used if
8293 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
8294
8295 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
8296 used to describe the block. It is filled in with the information about
8297 the current block. The incoming structure's branch path, if any, is used
8298 to construct the output branch path. */
8299
8300 void
8301 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
8302 rtx insn;
8303 struct cse_basic_block_data *data;
8304 int follow_jumps;
8305 int after_loop;
8306 int skip_blocks;
8307 {
8308 rtx p = insn, q;
8309 int nsets = 0;
8310 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
8311 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
8312 int path_size = data->path_size;
8313 int path_entry = 0;
8314 int i;
8315
8316 /* Update the previous branch path, if any. If the last branch was
8317 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
8318 shorten the path by one and look at the previous branch. We know that
8319 at least one branch must have been taken if PATH_SIZE is non-zero. */
8320 while (path_size > 0)
8321 {
8322 if (data->path[path_size - 1].status != NOT_TAKEN)
8323 {
8324 data->path[path_size - 1].status = NOT_TAKEN;
8325 break;
8326 }
8327 else
8328 path_size--;
8329 }
8330
8331 /* Scan to end of this basic block. */
8332 while (p && GET_CODE (p) != CODE_LABEL)
8333 {
8334 /* Don't cse out the end of a loop. This makes a difference
8335 only for the unusual loops that always execute at least once;
8336 all other loops have labels there so we will stop in any case.
8337 Cse'ing out the end of the loop is dangerous because it
8338 might cause an invariant expression inside the loop
8339 to be reused after the end of the loop. This would make it
8340 hard to move the expression out of the loop in loop.c,
8341 especially if it is one of several equivalent expressions
8342 and loop.c would like to eliminate it.
8343
8344 If we are running after loop.c has finished, we can ignore
8345 the NOTE_INSN_LOOP_END. */
8346
8347 if (! after_loop && GET_CODE (p) == NOTE
8348 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
8349 break;
8350
8351 /* Don't cse over a call to setjmp; on some machines (eg vax)
8352 the regs restored by the longjmp come from
8353 a later time than the setjmp. */
8354 if (GET_CODE (p) == NOTE
8355 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
8356 break;
8357
8358 /* A PARALLEL can have lots of SETs in it,
8359 especially if it is really an ASM_OPERANDS. */
8360 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
8361 && GET_CODE (PATTERN (p)) == PARALLEL)
8362 nsets += XVECLEN (PATTERN (p), 0);
8363 else if (GET_CODE (p) != NOTE)
8364 nsets += 1;
8365
8366 /* Ignore insns made by CSE; they cannot affect the boundaries of
8367 the basic block. */
8368
8369 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8370 high_cuid = INSN_CUID (p);
8371 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
8372 low_cuid = INSN_CUID (p);
8373
8374 /* See if this insn is in our branch path. If it is and we are to
8375 take it, do so. */
8376 if (path_entry < path_size && data->path[path_entry].branch == p)
8377 {
8378 if (data->path[path_entry].status != NOT_TAKEN)
8379 p = JUMP_LABEL (p);
8380
8381 /* Point to next entry in path, if any. */
8382 path_entry++;
8383 }
8384
8385 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
8386 was specified, we haven't reached our maximum path length, there are
8387 insns following the target of the jump, this is the only use of the
8388 jump label, and the target label is preceded by a BARRIER.
8389
8390 Alternatively, we can follow the jump if it branches around a
8391 block of code and there are no other branches into the block.
8392 In this case invalidate_skipped_block will be called to invalidate any
8393 registers set in the block when following the jump. */
8394
8395 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
8396 && GET_CODE (p) == JUMP_INSN
8397 && GET_CODE (PATTERN (p)) == SET
8398 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
8399 && JUMP_LABEL (p) != 0
8400 && LABEL_NUSES (JUMP_LABEL (p)) == 1
8401 && NEXT_INSN (JUMP_LABEL (p)) != 0)
8402 {
8403 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
8404 if ((GET_CODE (q) != NOTE
8405 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
8406 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
8407 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
8408 break;
8409
8410 /* If we ran into a BARRIER, this code is an extension of the
8411 basic block when the branch is taken. */
8412 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
8413 {
8414 /* Don't allow ourself to keep walking around an
8415 always-executed loop. */
8416 if (next_real_insn (q) == next)
8417 {
8418 p = NEXT_INSN (p);
8419 continue;
8420 }
8421
8422 /* Similarly, don't put a branch in our path more than once. */
8423 for (i = 0; i < path_entry; i++)
8424 if (data->path[i].branch == p)
8425 break;
8426
8427 if (i != path_entry)
8428 break;
8429
8430 data->path[path_entry].branch = p;
8431 data->path[path_entry++].status = TAKEN;
8432
8433 /* This branch now ends our path. It was possible that we
8434 didn't see this branch the last time around (when the
8435 insn in front of the target was a JUMP_INSN that was
8436 turned into a no-op). */
8437 path_size = path_entry;
8438
8439 p = JUMP_LABEL (p);
8440 /* Mark block so we won't scan it again later. */
8441 PUT_MODE (NEXT_INSN (p), QImode);
8442 }
8443 /* Detect a branch around a block of code. */
8444 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
8445 {
8446 register rtx tmp;
8447
8448 if (next_real_insn (q) == next)
8449 {
8450 p = NEXT_INSN (p);
8451 continue;
8452 }
8453
8454 for (i = 0; i < path_entry; i++)
8455 if (data->path[i].branch == p)
8456 break;
8457
8458 if (i != path_entry)
8459 break;
8460
8461 /* This is no_labels_between_p (p, q) with an added check for
8462 reaching the end of a function (in case Q precedes P). */
8463 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
8464 if (GET_CODE (tmp) == CODE_LABEL)
8465 break;
8466
8467 if (tmp == q)
8468 {
8469 data->path[path_entry].branch = p;
8470 data->path[path_entry++].status = AROUND;
8471
8472 path_size = path_entry;
8473
8474 p = JUMP_LABEL (p);
8475 /* Mark block so we won't scan it again later. */
8476 PUT_MODE (NEXT_INSN (p), QImode);
8477 }
8478 }
8479 }
8480 p = NEXT_INSN (p);
8481 }
8482
8483 data->low_cuid = low_cuid;
8484 data->high_cuid = high_cuid;
8485 data->nsets = nsets;
8486 data->last = p;
8487
8488 /* If all jumps in the path are not taken, set our path length to zero
8489 so a rescan won't be done. */
8490 for (i = path_size - 1; i >= 0; i--)
8491 if (data->path[i].status != NOT_TAKEN)
8492 break;
8493
8494 if (i == -1)
8495 data->path_size = 0;
8496 else
8497 data->path_size = path_size;
8498
8499 /* End the current branch path. */
8500 data->path[path_size].branch = 0;
8501 }
8502 \f
8503 /* Perform cse on the instructions of a function.
8504 F is the first instruction.
8505 NREGS is one plus the highest pseudo-reg number used in the instruction.
8506
8507 AFTER_LOOP is 1 if this is the cse call done after loop optimization
8508 (only if -frerun-cse-after-loop).
8509
8510 Returns 1 if jump_optimize should be redone due to simplifications
8511 in conditional jump instructions. */
8512
8513 int
8514 cse_main (f, nregs, after_loop, file)
8515 rtx f;
8516 int nregs;
8517 int after_loop;
8518 FILE *file;
8519 {
8520 struct cse_basic_block_data val;
8521 register rtx insn = f;
8522 register int i;
8523
8524 cse_jumps_altered = 0;
8525 recorded_label_ref = 0;
8526 constant_pool_entries_cost = 0;
8527 val.path_size = 0;
8528
8529 init_recog ();
8530 init_alias_analysis ();
8531
8532 max_reg = nregs;
8533
8534 max_insn_uid = get_max_uid ();
8535
8536 all_minus_one = (int *) alloca (nregs * sizeof (int));
8537 consec_ints = (int *) alloca (nregs * sizeof (int));
8538
8539 for (i = 0; i < nregs; i++)
8540 {
8541 all_minus_one[i] = -1;
8542 consec_ints[i] = i;
8543 }
8544
8545 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
8546 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
8547 reg_qty = (int *) alloca (nregs * sizeof (int));
8548 reg_in_table = (int *) alloca (nregs * sizeof (int));
8549 reg_tick = (int *) alloca (nregs * sizeof (int));
8550
8551 #ifdef LOAD_EXTEND_OP
8552
8553 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
8554 and change the code and mode as appropriate. */
8555 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
8556 #endif
8557
8558 /* Discard all the free elements of the previous function
8559 since they are allocated in the temporarily obstack. */
8560 bzero ((char *) table, sizeof table);
8561 free_element_chain = 0;
8562 n_elements_made = 0;
8563
8564 /* Find the largest uid. */
8565
8566 max_uid = get_max_uid ();
8567 uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
8568 bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
8569
8570 /* Compute the mapping from uids to cuids.
8571 CUIDs are numbers assigned to insns, like uids,
8572 except that cuids increase monotonically through the code.
8573 Don't assign cuids to line-number NOTEs, so that the distance in cuids
8574 between two insns is not affected by -g. */
8575
8576 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8577 {
8578 if (GET_CODE (insn) != NOTE
8579 || NOTE_LINE_NUMBER (insn) < 0)
8580 INSN_CUID (insn) = ++i;
8581 else
8582 /* Give a line number note the same cuid as preceding insn. */
8583 INSN_CUID (insn) = i;
8584 }
8585
8586 /* Initialize which registers are clobbered by calls. */
8587
8588 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8589
8590 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8591 if ((call_used_regs[i]
8592 /* Used to check !fixed_regs[i] here, but that isn't safe;
8593 fixed regs are still call-clobbered, and sched can get
8594 confused if they can "live across calls".
8595
8596 The frame pointer is always preserved across calls. The arg
8597 pointer is if it is fixed. The stack pointer usually is, unless
8598 RETURN_POPS_ARGS, in which case an explicit CLOBBER
8599 will be present. If we are generating PIC code, the PIC offset
8600 table register is preserved across calls. */
8601
8602 && i != STACK_POINTER_REGNUM
8603 && i != FRAME_POINTER_REGNUM
8604 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8605 && i != HARD_FRAME_POINTER_REGNUM
8606 #endif
8607 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8608 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8609 #endif
8610 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
8611 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8612 #endif
8613 )
8614 || global_regs[i])
8615 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8616
8617 /* Loop over basic blocks.
8618 Compute the maximum number of qty's needed for each basic block
8619 (which is 2 for each SET). */
8620 insn = f;
8621 while (insn)
8622 {
8623 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8624 flag_cse_skip_blocks);
8625
8626 /* If this basic block was already processed or has no sets, skip it. */
8627 if (val.nsets == 0 || GET_MODE (insn) == QImode)
8628 {
8629 PUT_MODE (insn, VOIDmode);
8630 insn = (val.last ? NEXT_INSN (val.last) : 0);
8631 val.path_size = 0;
8632 continue;
8633 }
8634
8635 cse_basic_block_start = val.low_cuid;
8636 cse_basic_block_end = val.high_cuid;
8637 max_qty = val.nsets * 2;
8638
8639 if (file)
8640 fprintf (file, ";; Processing block from %d to %d, %d sets.\n",
8641 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8642 val.nsets);
8643
8644 /* Make MAX_QTY bigger to give us room to optimize
8645 past the end of this basic block, if that should prove useful. */
8646 if (max_qty < 500)
8647 max_qty = 500;
8648
8649 max_qty += max_reg;
8650
8651 /* If this basic block is being extended by following certain jumps,
8652 (see `cse_end_of_basic_block'), we reprocess the code from the start.
8653 Otherwise, we start after this basic block. */
8654 if (val.path_size > 0)
8655 cse_basic_block (insn, val.last, val.path, 0);
8656 else
8657 {
8658 int old_cse_jumps_altered = cse_jumps_altered;
8659 rtx temp;
8660
8661 /* When cse changes a conditional jump to an unconditional
8662 jump, we want to reprocess the block, since it will give
8663 us a new branch path to investigate. */
8664 cse_jumps_altered = 0;
8665 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8666 if (cse_jumps_altered == 0
8667 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8668 insn = temp;
8669
8670 cse_jumps_altered |= old_cse_jumps_altered;
8671 }
8672
8673 #ifdef USE_C_ALLOCA
8674 alloca (0);
8675 #endif
8676 }
8677
8678 /* Tell refers_to_mem_p that qty_const info is not available. */
8679 qty_const = 0;
8680
8681 if (max_elements_made < n_elements_made)
8682 max_elements_made = n_elements_made;
8683
8684 return cse_jumps_altered || recorded_label_ref;
8685 }
8686
8687 /* Process a single basic block. FROM and TO and the limits of the basic
8688 block. NEXT_BRANCH points to the branch path when following jumps or
8689 a null path when not following jumps.
8690
8691 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8692 loop. This is true when we are being called for the last time on a
8693 block and this CSE pass is before loop.c. */
8694
8695 static rtx
8696 cse_basic_block (from, to, next_branch, around_loop)
8697 register rtx from, to;
8698 struct branch_path *next_branch;
8699 int around_loop;
8700 {
8701 register rtx insn;
8702 int to_usage = 0;
8703 rtx libcall_insn = NULL_RTX;
8704 int num_insns = 0;
8705
8706 /* Each of these arrays is undefined before max_reg, so only allocate
8707 the space actually needed and adjust the start below. */
8708
8709 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8710 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8711 qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
8712 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8713 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8714 qty_comparison_code
8715 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8716 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8717 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8718
8719 qty_first_reg -= max_reg;
8720 qty_last_reg -= max_reg;
8721 qty_mode -= max_reg;
8722 qty_const -= max_reg;
8723 qty_const_insn -= max_reg;
8724 qty_comparison_code -= max_reg;
8725 qty_comparison_qty -= max_reg;
8726 qty_comparison_const -= max_reg;
8727
8728 new_basic_block ();
8729
8730 /* TO might be a label. If so, protect it from being deleted. */
8731 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8732 ++LABEL_NUSES (to);
8733
8734 for (insn = from; insn != to; insn = NEXT_INSN (insn))
8735 {
8736 register enum rtx_code code = GET_CODE (insn);
8737 int i;
8738 struct table_elt *p;
8739
8740 /* If we have processed 1,000 insns, flush the hash table to
8741 avoid extreme quadratic behavior. We must not include NOTEs
8742 in the count since there may be more or them when generating
8743 debugging information. If we clear the table at different
8744 times, code generated with -g -O might be different than code
8745 generated with -O but not -g.
8746
8747 ??? This is a real kludge and needs to be done some other way.
8748 Perhaps for 2.9. */
8749 if (code != NOTE && num_insns++ > 1000)
8750 {
8751 for (i = 0; i < NBUCKETS; i++)
8752 for (p = table[i]; p; p = table[i])
8753 {
8754 /* Note that invalidate can remove elements
8755 after P in the current hash chain. */
8756 if (GET_CODE (p->exp) == REG)
8757 invalidate (p->exp, p->mode);
8758 else
8759 remove_from_table (p, i);
8760 }
8761
8762 num_insns = 0;
8763 }
8764
8765 /* See if this is a branch that is part of the path. If so, and it is
8766 to be taken, do so. */
8767 if (next_branch->branch == insn)
8768 {
8769 enum taken status = next_branch++->status;
8770 if (status != NOT_TAKEN)
8771 {
8772 if (status == TAKEN)
8773 record_jump_equiv (insn, 1);
8774 else
8775 invalidate_skipped_block (NEXT_INSN (insn));
8776
8777 /* Set the last insn as the jump insn; it doesn't affect cc0.
8778 Then follow this branch. */
8779 #ifdef HAVE_cc0
8780 prev_insn_cc0 = 0;
8781 #endif
8782 prev_insn = insn;
8783 insn = JUMP_LABEL (insn);
8784 continue;
8785 }
8786 }
8787
8788 if (GET_MODE (insn) == QImode)
8789 PUT_MODE (insn, VOIDmode);
8790
8791 if (GET_RTX_CLASS (code) == 'i')
8792 {
8793 rtx p;
8794
8795 /* Process notes first so we have all notes in canonical forms when
8796 looking for duplicate operations. */
8797
8798 if (REG_NOTES (insn))
8799 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
8800
8801 /* Track when we are inside in LIBCALL block. Inside such a block,
8802 we do not want to record destinations. The last insn of a
8803 LIBCALL block is not considered to be part of the block, since
8804 its destination is the result of the block and hence should be
8805 recorded. */
8806
8807 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
8808 libcall_insn = XEXP (p, 0);
8809 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8810 libcall_insn = NULL_RTX;
8811
8812 cse_insn (insn, libcall_insn);
8813 }
8814
8815 /* If INSN is now an unconditional jump, skip to the end of our
8816 basic block by pretending that we just did the last insn in the
8817 basic block. If we are jumping to the end of our block, show
8818 that we can have one usage of TO. */
8819
8820 if (simplejump_p (insn))
8821 {
8822 if (to == 0)
8823 return 0;
8824
8825 if (JUMP_LABEL (insn) == to)
8826 to_usage = 1;
8827
8828 /* Maybe TO was deleted because the jump is unconditional.
8829 If so, there is nothing left in this basic block. */
8830 /* ??? Perhaps it would be smarter to set TO
8831 to whatever follows this insn,
8832 and pretend the basic block had always ended here. */
8833 if (INSN_DELETED_P (to))
8834 break;
8835
8836 insn = PREV_INSN (to);
8837 }
8838
8839 /* See if it is ok to keep on going past the label
8840 which used to end our basic block. Remember that we incremented
8841 the count of that label, so we decrement it here. If we made
8842 a jump unconditional, TO_USAGE will be one; in that case, we don't
8843 want to count the use in that jump. */
8844
8845 if (to != 0 && NEXT_INSN (insn) == to
8846 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8847 {
8848 struct cse_basic_block_data val;
8849 rtx prev;
8850
8851 insn = NEXT_INSN (to);
8852
8853 if (LABEL_NUSES (to) == 0)
8854 insn = delete_insn (to);
8855
8856 /* If TO was the last insn in the function, we are done. */
8857 if (insn == 0)
8858 return 0;
8859
8860 /* If TO was preceded by a BARRIER we are done with this block
8861 because it has no continuation. */
8862 prev = prev_nonnote_insn (to);
8863 if (prev && GET_CODE (prev) == BARRIER)
8864 return insn;
8865
8866 /* Find the end of the following block. Note that we won't be
8867 following branches in this case. */
8868 to_usage = 0;
8869 val.path_size = 0;
8870 cse_end_of_basic_block (insn, &val, 0, 0, 0);
8871
8872 /* If the tables we allocated have enough space left
8873 to handle all the SETs in the next basic block,
8874 continue through it. Otherwise, return,
8875 and that block will be scanned individually. */
8876 if (val.nsets * 2 + next_qty > max_qty)
8877 break;
8878
8879 cse_basic_block_start = val.low_cuid;
8880 cse_basic_block_end = val.high_cuid;
8881 to = val.last;
8882
8883 /* Prevent TO from being deleted if it is a label. */
8884 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8885 ++LABEL_NUSES (to);
8886
8887 /* Back up so we process the first insn in the extension. */
8888 insn = PREV_INSN (insn);
8889 }
8890 }
8891
8892 if (next_qty > max_qty)
8893 abort ();
8894
8895 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
8896 the previous insn is the only insn that branches to the head of a loop,
8897 we can cse into the loop. Don't do this if we changed the jump
8898 structure of a loop unless we aren't going to be following jumps. */
8899
8900 if ((cse_jumps_altered == 0
8901 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8902 && around_loop && to != 0
8903 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
8904 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
8905 && JUMP_LABEL (PREV_INSN (to)) != 0
8906 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
8907 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
8908
8909 return to ? NEXT_INSN (to) : 0;
8910 }
8911 \f
8912 /* Count the number of times registers are used (not set) in X.
8913 COUNTS is an array in which we accumulate the count, INCR is how much
8914 we count each register usage.
8915
8916 Don't count a usage of DEST, which is the SET_DEST of a SET which
8917 contains X in its SET_SRC. This is because such a SET does not
8918 modify the liveness of DEST. */
8919
8920 static void
8921 count_reg_usage (x, counts, dest, incr)
8922 rtx x;
8923 int *counts;
8924 rtx dest;
8925 int incr;
8926 {
8927 enum rtx_code code;
8928 char *fmt;
8929 int i, j;
8930
8931 if (x == 0)
8932 return;
8933
8934 switch (code = GET_CODE (x))
8935 {
8936 case REG:
8937 if (x != dest)
8938 counts[REGNO (x)] += incr;
8939 return;
8940
8941 case PC:
8942 case CC0:
8943 case CONST:
8944 case CONST_INT:
8945 case CONST_DOUBLE:
8946 case SYMBOL_REF:
8947 case LABEL_REF:
8948 return;
8949
8950 case CLOBBER:
8951 /* If we are clobbering a MEM, mark any registers inside the address
8952 as being used. */
8953 if (GET_CODE (XEXP (x, 0)) == MEM)
8954 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
8955 return;
8956
8957 case SET:
8958 /* Unless we are setting a REG, count everything in SET_DEST. */
8959 if (GET_CODE (SET_DEST (x)) != REG)
8960 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
8961
8962 /* If SRC has side-effects, then we can't delete this insn, so the
8963 usage of SET_DEST inside SRC counts.
8964
8965 ??? Strictly-speaking, we might be preserving this insn
8966 because some other SET has side-effects, but that's hard
8967 to do and can't happen now. */
8968 count_reg_usage (SET_SRC (x), counts,
8969 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
8970 incr);
8971 return;
8972
8973 case CALL_INSN:
8974 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
8975
8976 /* ... falls through ... */
8977 case INSN:
8978 case JUMP_INSN:
8979 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
8980
8981 /* Things used in a REG_EQUAL note aren't dead since loop may try to
8982 use them. */
8983
8984 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
8985 return;
8986
8987 case EXPR_LIST:
8988 case INSN_LIST:
8989 if (REG_NOTE_KIND (x) == REG_EQUAL
8990 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
8991 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
8992 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
8993 return;
8994
8995 default:
8996 break;
8997 }
8998
8999 fmt = GET_RTX_FORMAT (code);
9000 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9001 {
9002 if (fmt[i] == 'e')
9003 count_reg_usage (XEXP (x, i), counts, dest, incr);
9004 else if (fmt[i] == 'E')
9005 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9006 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
9007 }
9008 }
9009 \f
9010 /* Scan all the insns and delete any that are dead; i.e., they store a register
9011 that is never used or they copy a register to itself.
9012
9013 This is used to remove insns made obviously dead by cse, loop or other
9014 optimizations. It improves the heuristics in loop since it won't try to
9015 move dead invariants out of loops or make givs for dead quantities. The
9016 remaining passes of the compilation are also sped up. */
9017
9018 void
9019 delete_trivially_dead_insns (insns, nreg)
9020 rtx insns;
9021 int nreg;
9022 {
9023 int *counts = (int *) alloca (nreg * sizeof (int));
9024 rtx insn, prev;
9025 #ifdef HAVE_cc0
9026 rtx tem;
9027 #endif
9028 int i;
9029 int in_libcall = 0, dead_libcall = 0;
9030
9031 /* First count the number of times each register is used. */
9032 bzero ((char *) counts, sizeof (int) * nreg);
9033 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
9034 count_reg_usage (insn, counts, NULL_RTX, 1);
9035
9036 /* Go from the last insn to the first and delete insns that only set unused
9037 registers or copy a register to itself. As we delete an insn, remove
9038 usage counts for registers it uses. */
9039 for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
9040 {
9041 int live_insn = 0;
9042 rtx note;
9043
9044 prev = prev_real_insn (insn);
9045
9046 /* Don't delete any insns that are part of a libcall block unless
9047 we can delete the whole libcall block.
9048
9049 Flow or loop might get confused if we did that. Remember
9050 that we are scanning backwards. */
9051 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
9052 {
9053 in_libcall = 1;
9054 live_insn = 1;
9055 dead_libcall = 0;
9056
9057 /* See if there's a REG_EQUAL note on this insn and try to
9058 replace the source with the REG_EQUAL expression.
9059
9060 We assume that insns with REG_RETVALs can only be reg->reg
9061 copies at this point. */
9062 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
9063 if (note)
9064 {
9065 rtx set = single_set (insn);
9066 if (set
9067 && validate_change (insn, &SET_SRC (set), XEXP (note, 0), 0))
9068 {
9069 remove_note (insn,
9070 find_reg_note (insn, REG_RETVAL, NULL_RTX));
9071 dead_libcall = 1;
9072 }
9073 }
9074 }
9075 else if (in_libcall)
9076 live_insn = ! dead_libcall;
9077 else if (GET_CODE (PATTERN (insn)) == SET)
9078 {
9079 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
9080 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
9081 ;
9082
9083 #ifdef HAVE_cc0
9084 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
9085 && ! side_effects_p (SET_SRC (PATTERN (insn)))
9086 && ((tem = next_nonnote_insn (insn)) == 0
9087 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9088 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9089 ;
9090 #endif
9091 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
9092 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
9093 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
9094 || side_effects_p (SET_SRC (PATTERN (insn))))
9095 live_insn = 1;
9096 }
9097 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
9098 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
9099 {
9100 rtx elt = XVECEXP (PATTERN (insn), 0, i);
9101
9102 if (GET_CODE (elt) == SET)
9103 {
9104 if (GET_CODE (SET_DEST (elt)) == REG
9105 && SET_DEST (elt) == SET_SRC (elt))
9106 ;
9107
9108 #ifdef HAVE_cc0
9109 else if (GET_CODE (SET_DEST (elt)) == CC0
9110 && ! side_effects_p (SET_SRC (elt))
9111 && ((tem = next_nonnote_insn (insn)) == 0
9112 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9113 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9114 ;
9115 #endif
9116 else if (GET_CODE (SET_DEST (elt)) != REG
9117 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
9118 || counts[REGNO (SET_DEST (elt))] != 0
9119 || side_effects_p (SET_SRC (elt)))
9120 live_insn = 1;
9121 }
9122 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
9123 live_insn = 1;
9124 }
9125 else
9126 live_insn = 1;
9127
9128 /* If this is a dead insn, delete it and show registers in it aren't
9129 being used. */
9130
9131 if (! live_insn)
9132 {
9133 count_reg_usage (insn, counts, NULL_RTX, -1);
9134 delete_insn (insn);
9135 }
9136
9137 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
9138 {
9139 in_libcall = 0;
9140 dead_libcall = 0;
9141 }
9142 }
9143 }