* cse.c (delete_trivially_dead_insn): Don't iterate.
[gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "regs.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "function.h"
37 #include "expr.h"
38 #include "toplev.h"
39 #include "output.h"
40 #include "ggc.h"
41 #include "timevar.h"
42 #include "except.h"
43 #include "target.h"
44 #include "params.h"
45 #include "rtlhooks-def.h"
46
47 /* The basic idea of common subexpression elimination is to go
48 through the code, keeping a record of expressions that would
49 have the same value at the current scan point, and replacing
50 expressions encountered with the cheapest equivalent expression.
51
52 It is too complicated to keep track of the different possibilities
53 when control paths merge in this code; so, at each label, we forget all
54 that is known and start fresh. This can be described as processing each
55 extended basic block separately. We have a separate pass to perform
56 global CSE.
57
58 Note CSE can turn a conditional or computed jump into a nop or
59 an unconditional jump. When this occurs we arrange to run the jump
60 optimizer after CSE to delete the unreachable code.
61
62 We use two data structures to record the equivalent expressions:
63 a hash table for most expressions, and a vector of "quantity
64 numbers" to record equivalent (pseudo) registers.
65
66 The use of the special data structure for registers is desirable
67 because it is faster. It is possible because registers references
68 contain a fairly small number, the register number, taken from
69 a contiguously allocated series, and two register references are
70 identical if they have the same number. General expressions
71 do not have any such thing, so the only way to retrieve the
72 information recorded on an expression other than a register
73 is to keep it in a hash table.
74
75 Registers and "quantity numbers":
76
77 At the start of each basic block, all of the (hardware and pseudo)
78 registers used in the function are given distinct quantity
79 numbers to indicate their contents. During scan, when the code
80 copies one register into another, we copy the quantity number.
81 When a register is loaded in any other way, we allocate a new
82 quantity number to describe the value generated by this operation.
83 `reg_qty' records what quantity a register is currently thought
84 of as containing.
85
86 All real quantity numbers are greater than or equal to zero.
87 If register N has not been assigned a quantity, reg_qty[N] will
88 equal -N - 1, which is always negative.
89
90 Quantity numbers below zero do not exist and none of the `qty_table'
91 entries should be referenced with a negative index.
92
93 We also maintain a bidirectional chain of registers for each
94 quantity number. The `qty_table` members `first_reg' and `last_reg',
95 and `reg_eqv_table' members `next' and `prev' hold these chains.
96
97 The first register in a chain is the one whose lifespan is least local.
98 Among equals, it is the one that was seen first.
99 We replace any equivalent register with that one.
100
101 If two registers have the same quantity number, it must be true that
102 REG expressions with qty_table `mode' must be in the hash table for both
103 registers and must be in the same class.
104
105 The converse is not true. Since hard registers may be referenced in
106 any mode, two REG expressions might be equivalent in the hash table
107 but not have the same quantity number if the quantity number of one
108 of the registers is not the same mode as those expressions.
109
110 Constants and quantity numbers
111
112 When a quantity has a known constant value, that value is stored
113 in the appropriate qty_table `const_rtx'. This is in addition to
114 putting the constant in the hash table as is usual for non-regs.
115
116 Whether a reg or a constant is preferred is determined by the configuration
117 macro CONST_COSTS and will often depend on the constant value. In any
118 event, expressions containing constants can be simplified, by fold_rtx.
119
120 When a quantity has a known nearly constant value (such as an address
121 of a stack slot), that value is stored in the appropriate qty_table
122 `const_rtx'.
123
124 Integer constants don't have a machine mode. However, cse
125 determines the intended machine mode from the destination
126 of the instruction that moves the constant. The machine mode
127 is recorded in the hash table along with the actual RTL
128 constant expression so that different modes are kept separate.
129
130 Other expressions:
131
132 To record known equivalences among expressions in general
133 we use a hash table called `table'. It has a fixed number of buckets
134 that contain chains of `struct table_elt' elements for expressions.
135 These chains connect the elements whose expressions have the same
136 hash codes.
137
138 Other chains through the same elements connect the elements which
139 currently have equivalent values.
140
141 Register references in an expression are canonicalized before hashing
142 the expression. This is done using `reg_qty' and qty_table `first_reg'.
143 The hash code of a register reference is computed using the quantity
144 number, not the register number.
145
146 When the value of an expression changes, it is necessary to remove from the
147 hash table not just that expression but all expressions whose values
148 could be different as a result.
149
150 1. If the value changing is in memory, except in special cases
151 ANYTHING referring to memory could be changed. That is because
152 nobody knows where a pointer does not point.
153 The function `invalidate_memory' removes what is necessary.
154
155 The special cases are when the address is constant or is
156 a constant plus a fixed register such as the frame pointer
157 or a static chain pointer. When such addresses are stored in,
158 we can tell exactly which other such addresses must be invalidated
159 due to overlap. `invalidate' does this.
160 All expressions that refer to non-constant
161 memory addresses are also invalidated. `invalidate_memory' does this.
162
163 2. If the value changing is a register, all expressions
164 containing references to that register, and only those,
165 must be removed.
166
167 Because searching the entire hash table for expressions that contain
168 a register is very slow, we try to figure out when it isn't necessary.
169 Precisely, this is necessary only when expressions have been
170 entered in the hash table using this register, and then the value has
171 changed, and then another expression wants to be added to refer to
172 the register's new value. This sequence of circumstances is rare
173 within any one basic block.
174
175 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
176 reg_tick[i] is incremented whenever a value is stored in register i.
177 reg_in_table[i] holds -1 if no references to register i have been
178 entered in the table; otherwise, it contains the value reg_tick[i] had
179 when the references were entered. If we want to enter a reference
180 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
181 Until we want to enter a new entry, the mere fact that the two vectors
182 don't match makes the entries be ignored if anyone tries to match them.
183
184 Registers themselves are entered in the hash table as well as in
185 the equivalent-register chains. However, the vectors `reg_tick'
186 and `reg_in_table' do not apply to expressions which are simple
187 register references. These expressions are removed from the table
188 immediately when they become invalid, and this can be done even if
189 we do not immediately search for all the expressions that refer to
190 the register.
191
192 A CLOBBER rtx in an instruction invalidates its operand for further
193 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
194 invalidates everything that resides in memory.
195
196 Related expressions:
197
198 Constant expressions that differ only by an additive integer
199 are called related. When a constant expression is put in
200 the table, the related expression with no constant term
201 is also entered. These are made to point at each other
202 so that it is possible to find out if there exists any
203 register equivalent to an expression related to a given expression. */
204
205 /* Length of qty_table vector. We know in advance we will not need
206 a quantity number this big. */
207
208 static int max_qty;
209
210 /* Next quantity number to be allocated.
211 This is 1 + the largest number needed so far. */
212
213 static int next_qty;
214
215 /* Per-qty information tracking.
216
217 `first_reg' and `last_reg' track the head and tail of the
218 chain of registers which currently contain this quantity.
219
220 `mode' contains the machine mode of this quantity.
221
222 `const_rtx' holds the rtx of the constant value of this
223 quantity, if known. A summations of the frame/arg pointer
224 and a constant can also be entered here. When this holds
225 a known value, `const_insn' is the insn which stored the
226 constant value.
227
228 `comparison_{code,const,qty}' are used to track when a
229 comparison between a quantity and some constant or register has
230 been passed. In such a case, we know the results of the comparison
231 in case we see it again. These members record a comparison that
232 is known to be true. `comparison_code' holds the rtx code of such
233 a comparison, else it is set to UNKNOWN and the other two
234 comparison members are undefined. `comparison_const' holds
235 the constant being compared against, or zero if the comparison
236 is not against a constant. `comparison_qty' holds the quantity
237 being compared against when the result is known. If the comparison
238 is not with a register, `comparison_qty' is -1. */
239
240 struct qty_table_elem
241 {
242 rtx const_rtx;
243 rtx const_insn;
244 rtx comparison_const;
245 int comparison_qty;
246 unsigned int first_reg, last_reg;
247 /* The sizes of these fields should match the sizes of the
248 code and mode fields of struct rtx_def (see rtl.h). */
249 ENUM_BITFIELD(rtx_code) comparison_code : 16;
250 ENUM_BITFIELD(machine_mode) mode : 8;
251 };
252
253 /* The table of all qtys, indexed by qty number. */
254 static struct qty_table_elem *qty_table;
255
256 /* Structure used to pass arguments via for_each_rtx to function
257 cse_change_cc_mode. */
258 struct change_cc_mode_args
259 {
260 rtx insn;
261 rtx newreg;
262 };
263
264 #ifdef HAVE_cc0
265 /* For machines that have a CC0, we do not record its value in the hash
266 table since its use is guaranteed to be the insn immediately following
267 its definition and any other insn is presumed to invalidate it.
268
269 Instead, we store below the value last assigned to CC0. If it should
270 happen to be a constant, it is stored in preference to the actual
271 assigned value. In case it is a constant, we store the mode in which
272 the constant should be interpreted. */
273
274 static rtx prev_insn_cc0;
275 static enum machine_mode prev_insn_cc0_mode;
276
277 /* Previous actual insn. 0 if at first insn of basic block. */
278
279 static rtx prev_insn;
280 #endif
281
282 /* Insn being scanned. */
283
284 static rtx this_insn;
285
286 /* Index by register number, gives the number of the next (or
287 previous) register in the chain of registers sharing the same
288 value.
289
290 Or -1 if this register is at the end of the chain.
291
292 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
293
294 /* Per-register equivalence chain. */
295 struct reg_eqv_elem
296 {
297 int next, prev;
298 };
299
300 /* The table of all register equivalence chains. */
301 static struct reg_eqv_elem *reg_eqv_table;
302
303 struct cse_reg_info
304 {
305 /* Next in hash chain. */
306 struct cse_reg_info *hash_next;
307
308 /* The next cse_reg_info structure in the free or used list. */
309 struct cse_reg_info *next;
310
311 /* Search key */
312 unsigned int regno;
313
314 /* The quantity number of the register's current contents. */
315 int reg_qty;
316
317 /* The number of times the register has been altered in the current
318 basic block. */
319 int reg_tick;
320
321 /* The REG_TICK value at which rtx's containing this register are
322 valid in the hash table. If this does not equal the current
323 reg_tick value, such expressions existing in the hash table are
324 invalid. */
325 int reg_in_table;
326
327 /* The SUBREG that was set when REG_TICK was last incremented. Set
328 to -1 if the last store was to the whole register, not a subreg. */
329 unsigned int subreg_ticked;
330 };
331
332 /* We maintain a linked list of cse_reg_info instances, which is
333 partitioned into two pieces. The first part, pointed to by
334 cse_reg_info_list, is a list of those entries that are in use. The
335 second part, pointed to by cse_reg_info_list_free, is a list of
336 those entries that are not in use.
337
338 We combine these two parts into one linked list for efficiency.
339 Specifically, when we take an element from the second part and want
340 to move it to the first part, all we have to do is move the pointer
341 cse_reg_info_list_free to the next element. Also, if we wish to
342 move all elements into the second part, we just have to move the
343 pointer to the first element of the list. */
344
345 /* A linked list of cse_reg_info entries that have been allocated so
346 far. */
347 static struct cse_reg_info *cse_reg_info_list;
348
349 /* A pointer to the first unused entry in the above linked list. */
350 static struct cse_reg_info *cse_reg_info_list_free;
351
352 /* A mapping from registers to cse_reg_info data structures. */
353 #define REGHASH_SHIFT 7
354 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
355 #define REGHASH_MASK (REGHASH_SIZE - 1)
356 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
357
358 #define REGHASH_FN(REGNO) \
359 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
360
361 /* The last lookup we did into the cse_reg_info_tree. This allows us
362 to cache repeated lookups. */
363 static unsigned int cached_regno;
364 static struct cse_reg_info *cached_cse_reg_info;
365
366 /* A HARD_REG_SET containing all the hard registers for which there is
367 currently a REG expression in the hash table. Note the difference
368 from the above variables, which indicate if the REG is mentioned in some
369 expression in the table. */
370
371 static HARD_REG_SET hard_regs_in_table;
372
373 /* CUID of insn that starts the basic block currently being cse-processed. */
374
375 static int cse_basic_block_start;
376
377 /* CUID of insn that ends the basic block currently being cse-processed. */
378
379 static int cse_basic_block_end;
380
381 /* Vector mapping INSN_UIDs to cuids.
382 The cuids are like uids but increase monotonically always.
383 We use them to see whether a reg is used outside a given basic block. */
384
385 static int *uid_cuid;
386
387 /* Highest UID in UID_CUID. */
388 static int max_uid;
389
390 /* Get the cuid of an insn. */
391
392 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
393
394 /* Nonzero if this pass has made changes, and therefore it's
395 worthwhile to run the garbage collector. */
396
397 static int cse_altered;
398
399 /* Nonzero if cse has altered conditional jump insns
400 in such a way that jump optimization should be redone. */
401
402 static int cse_jumps_altered;
403
404 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
405 REG_LABEL, we have to rerun jump after CSE to put in the note. */
406 static int recorded_label_ref;
407
408 /* canon_hash stores 1 in do_not_record
409 if it notices a reference to CC0, PC, or some other volatile
410 subexpression. */
411
412 static int do_not_record;
413
414 /* canon_hash stores 1 in hash_arg_in_memory
415 if it notices a reference to memory within the expression being hashed. */
416
417 static int hash_arg_in_memory;
418
419 /* The hash table contains buckets which are chains of `struct table_elt's,
420 each recording one expression's information.
421 That expression is in the `exp' field.
422
423 The canon_exp field contains a canonical (from the point of view of
424 alias analysis) version of the `exp' field.
425
426 Those elements with the same hash code are chained in both directions
427 through the `next_same_hash' and `prev_same_hash' fields.
428
429 Each set of expressions with equivalent values
430 are on a two-way chain through the `next_same_value'
431 and `prev_same_value' fields, and all point with
432 the `first_same_value' field at the first element in
433 that chain. The chain is in order of increasing cost.
434 Each element's cost value is in its `cost' field.
435
436 The `in_memory' field is nonzero for elements that
437 involve any reference to memory. These elements are removed
438 whenever a write is done to an unidentified location in memory.
439 To be safe, we assume that a memory address is unidentified unless
440 the address is either a symbol constant or a constant plus
441 the frame pointer or argument pointer.
442
443 The `related_value' field is used to connect related expressions
444 (that differ by adding an integer).
445 The related expressions are chained in a circular fashion.
446 `related_value' is zero for expressions for which this
447 chain is not useful.
448
449 The `cost' field stores the cost of this element's expression.
450 The `regcost' field stores the value returned by approx_reg_cost for
451 this element's expression.
452
453 The `is_const' flag is set if the element is a constant (including
454 a fixed address).
455
456 The `flag' field is used as a temporary during some search routines.
457
458 The `mode' field is usually the same as GET_MODE (`exp'), but
459 if `exp' is a CONST_INT and has no machine mode then the `mode'
460 field is the mode it was being used as. Each constant is
461 recorded separately for each mode it is used with. */
462
463 struct table_elt
464 {
465 rtx exp;
466 rtx canon_exp;
467 struct table_elt *next_same_hash;
468 struct table_elt *prev_same_hash;
469 struct table_elt *next_same_value;
470 struct table_elt *prev_same_value;
471 struct table_elt *first_same_value;
472 struct table_elt *related_value;
473 int cost;
474 int regcost;
475 /* The size of this field should match the size
476 of the mode field of struct rtx_def (see rtl.h). */
477 ENUM_BITFIELD(machine_mode) mode : 8;
478 char in_memory;
479 char is_const;
480 char flag;
481 };
482
483 /* We don't want a lot of buckets, because we rarely have very many
484 things stored in the hash table, and a lot of buckets slows
485 down a lot of loops that happen frequently. */
486 #define HASH_SHIFT 5
487 #define HASH_SIZE (1 << HASH_SHIFT)
488 #define HASH_MASK (HASH_SIZE - 1)
489
490 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
491 register (hard registers may require `do_not_record' to be set). */
492
493 #define HASH(X, M) \
494 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
495 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
496 : canon_hash (X, M)) & HASH_MASK)
497
498 /* Like HASH, but without side-effects. */
499 #define SAFE_HASH(X, M) \
500 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
501 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
502 : safe_hash (X, M)) & HASH_MASK)
503
504 /* Determine whether register number N is considered a fixed register for the
505 purpose of approximating register costs.
506 It is desirable to replace other regs with fixed regs, to reduce need for
507 non-fixed hard regs.
508 A reg wins if it is either the frame pointer or designated as fixed. */
509 #define FIXED_REGNO_P(N) \
510 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
511 || fixed_regs[N] || global_regs[N])
512
513 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
514 hard registers and pointers into the frame are the cheapest with a cost
515 of 0. Next come pseudos with a cost of one and other hard registers with
516 a cost of 2. Aside from these special cases, call `rtx_cost'. */
517
518 #define CHEAP_REGNO(N) \
519 (REGNO_PTR_FRAME_P(N) \
520 || (HARD_REGISTER_NUM_P (N) \
521 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
522
523 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
524 #define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
525
526 /* Get the info associated with register N. */
527
528 #define GET_CSE_REG_INFO(N) \
529 (((N) == cached_regno && cached_cse_reg_info) \
530 ? cached_cse_reg_info : get_cse_reg_info ((N)))
531
532 /* Get the number of times this register has been updated in this
533 basic block. */
534
535 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
536
537 /* Get the point at which REG was recorded in the table. */
538
539 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
540
541 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
542 SUBREG). */
543
544 #define SUBREG_TICKED(N) ((GET_CSE_REG_INFO (N))->subreg_ticked)
545
546 /* Get the quantity number for REG. */
547
548 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
549
550 /* Determine if the quantity number for register X represents a valid index
551 into the qty_table. */
552
553 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
554
555 static struct table_elt *table[HASH_SIZE];
556
557 /* Chain of `struct table_elt's made so far for this function
558 but currently removed from the table. */
559
560 static struct table_elt *free_element_chain;
561
562 /* Set to the cost of a constant pool reference if one was found for a
563 symbolic constant. If this was found, it means we should try to
564 convert constants into constant pool entries if they don't fit in
565 the insn. */
566
567 static int constant_pool_entries_cost;
568 static int constant_pool_entries_regcost;
569
570 /* This data describes a block that will be processed by cse_basic_block. */
571
572 struct cse_basic_block_data
573 {
574 /* Lowest CUID value of insns in block. */
575 int low_cuid;
576 /* Highest CUID value of insns in block. */
577 int high_cuid;
578 /* Total number of SETs in block. */
579 int nsets;
580 /* Last insn in the block. */
581 rtx last;
582 /* Size of current branch path, if any. */
583 int path_size;
584 /* Current branch path, indicating which branches will be taken. */
585 struct branch_path
586 {
587 /* The branch insn. */
588 rtx branch;
589 /* Whether it should be taken or not. AROUND is the same as taken
590 except that it is used when the destination label is not preceded
591 by a BARRIER. */
592 enum taken {PATH_TAKEN, PATH_NOT_TAKEN, PATH_AROUND} status;
593 } *path;
594 };
595
596 static bool fixed_base_plus_p (rtx x);
597 static int notreg_cost (rtx, enum rtx_code);
598 static int approx_reg_cost_1 (rtx *, void *);
599 static int approx_reg_cost (rtx);
600 static int preferable (int, int, int, int);
601 static void new_basic_block (void);
602 static void make_new_qty (unsigned int, enum machine_mode);
603 static void make_regs_eqv (unsigned int, unsigned int);
604 static void delete_reg_equiv (unsigned int);
605 static int mention_regs (rtx);
606 static int insert_regs (rtx, struct table_elt *, int);
607 static void remove_from_table (struct table_elt *, unsigned);
608 static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
609 static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
610 static rtx lookup_as_function (rtx, enum rtx_code);
611 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
612 enum machine_mode);
613 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
614 static void invalidate (rtx, enum machine_mode);
615 static int cse_rtx_varies_p (rtx, int);
616 static void remove_invalid_refs (unsigned int);
617 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
618 enum machine_mode);
619 static void rehash_using_reg (rtx);
620 static void invalidate_memory (void);
621 static void invalidate_for_call (void);
622 static rtx use_related_value (rtx, struct table_elt *);
623
624 static inline unsigned canon_hash (rtx, enum machine_mode);
625 static inline unsigned safe_hash (rtx, enum machine_mode);
626 static unsigned hash_rtx_string (const char *);
627
628 static rtx canon_reg (rtx, rtx);
629 static void find_best_addr (rtx, rtx *, enum machine_mode);
630 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
631 enum machine_mode *,
632 enum machine_mode *);
633 static rtx fold_rtx (rtx, rtx);
634 static rtx equiv_constant (rtx);
635 static void record_jump_equiv (rtx, int);
636 static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
637 int);
638 static void cse_insn (rtx, rtx);
639 static void cse_end_of_basic_block (rtx, struct cse_basic_block_data *,
640 int, int);
641 static int addr_affects_sp_p (rtx);
642 static void invalidate_from_clobbers (rtx);
643 static rtx cse_process_notes (rtx, rtx);
644 static void invalidate_skipped_set (rtx, rtx, void *);
645 static void invalidate_skipped_block (rtx);
646 static rtx cse_basic_block (rtx, rtx, struct branch_path *);
647 static void count_reg_usage (rtx, int *, int);
648 static int check_for_label_ref (rtx *, void *);
649 extern void dump_class (struct table_elt*);
650 static struct cse_reg_info * get_cse_reg_info (unsigned int);
651 static int check_dependence (rtx *, void *);
652
653 static void flush_hash_table (void);
654 static bool insn_live_p (rtx, int *);
655 static bool set_live_p (rtx, rtx, int *);
656 static bool dead_libcall_p (rtx, int *);
657 static int cse_change_cc_mode (rtx *, void *);
658 static void cse_change_cc_mode_insn (rtx, rtx);
659 static void cse_change_cc_mode_insns (rtx, rtx, rtx);
660 static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
661 \f
662
663 #undef RTL_HOOKS_GEN_LOWPART
664 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
665
666 static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
667 \f
668 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
669 virtual regs here because the simplify_*_operation routines are called
670 by integrate.c, which is called before virtual register instantiation. */
671
672 static bool
673 fixed_base_plus_p (rtx x)
674 {
675 switch (GET_CODE (x))
676 {
677 case REG:
678 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
679 return true;
680 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
681 return true;
682 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
683 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
684 return true;
685 return false;
686
687 case PLUS:
688 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
689 return false;
690 return fixed_base_plus_p (XEXP (x, 0));
691
692 default:
693 return false;
694 }
695 }
696
697 /* Dump the expressions in the equivalence class indicated by CLASSP.
698 This function is used only for debugging. */
699 void
700 dump_class (struct table_elt *classp)
701 {
702 struct table_elt *elt;
703
704 fprintf (stderr, "Equivalence chain for ");
705 print_rtl (stderr, classp->exp);
706 fprintf (stderr, ": \n");
707
708 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
709 {
710 print_rtl (stderr, elt->exp);
711 fprintf (stderr, "\n");
712 }
713 }
714
715 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
716
717 static int
718 approx_reg_cost_1 (rtx *xp, void *data)
719 {
720 rtx x = *xp;
721 int *cost_p = data;
722
723 if (x && REG_P (x))
724 {
725 unsigned int regno = REGNO (x);
726
727 if (! CHEAP_REGNO (regno))
728 {
729 if (regno < FIRST_PSEUDO_REGISTER)
730 {
731 if (SMALL_REGISTER_CLASSES)
732 return 1;
733 *cost_p += 2;
734 }
735 else
736 *cost_p += 1;
737 }
738 }
739
740 return 0;
741 }
742
743 /* Return an estimate of the cost of the registers used in an rtx.
744 This is mostly the number of different REG expressions in the rtx;
745 however for some exceptions like fixed registers we use a cost of
746 0. If any other hard register reference occurs, return MAX_COST. */
747
748 static int
749 approx_reg_cost (rtx x)
750 {
751 int cost = 0;
752
753 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
754 return MAX_COST;
755
756 return cost;
757 }
758
759 /* Returns a canonical version of X for the address, from the point of view,
760 that all multiplications are represented as MULT instead of the multiply
761 by a power of 2 being represented as ASHIFT. */
762
763 static rtx
764 canon_for_address (rtx x)
765 {
766 enum rtx_code code;
767 enum machine_mode mode;
768 rtx new = 0;
769 int i;
770 const char *fmt;
771
772 if (!x)
773 return x;
774
775 code = GET_CODE (x);
776 mode = GET_MODE (x);
777
778 switch (code)
779 {
780 case ASHIFT:
781 if (GET_CODE (XEXP (x, 1)) == CONST_INT
782 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)
783 && INTVAL (XEXP (x, 1)) >= 0)
784 {
785 new = canon_for_address (XEXP (x, 0));
786 new = gen_rtx_MULT (mode, new,
787 gen_int_mode ((HOST_WIDE_INT) 1
788 << INTVAL (XEXP (x, 1)),
789 mode));
790 }
791 break;
792 default:
793 break;
794
795 }
796 if (new)
797 return new;
798
799 /* Now recursively process each operand of this operation. */
800 fmt = GET_RTX_FORMAT (code);
801 for (i = 0; i < GET_RTX_LENGTH (code); i++)
802 if (fmt[i] == 'e')
803 {
804 new = canon_for_address (XEXP (x, i));
805 XEXP (x, i) = new;
806 }
807 return x;
808 }
809
810 /* Return a negative value if an rtx A, whose costs are given by COST_A
811 and REGCOST_A, is more desirable than an rtx B.
812 Return a positive value if A is less desirable, or 0 if the two are
813 equally good. */
814 static int
815 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
816 {
817 /* First, get rid of cases involving expressions that are entirely
818 unwanted. */
819 if (cost_a != cost_b)
820 {
821 if (cost_a == MAX_COST)
822 return 1;
823 if (cost_b == MAX_COST)
824 return -1;
825 }
826
827 /* Avoid extending lifetimes of hardregs. */
828 if (regcost_a != regcost_b)
829 {
830 if (regcost_a == MAX_COST)
831 return 1;
832 if (regcost_b == MAX_COST)
833 return -1;
834 }
835
836 /* Normal operation costs take precedence. */
837 if (cost_a != cost_b)
838 return cost_a - cost_b;
839 /* Only if these are identical consider effects on register pressure. */
840 if (regcost_a != regcost_b)
841 return regcost_a - regcost_b;
842 return 0;
843 }
844
845 /* Internal function, to compute cost when X is not a register; called
846 from COST macro to keep it simple. */
847
848 static int
849 notreg_cost (rtx x, enum rtx_code outer)
850 {
851 return ((GET_CODE (x) == SUBREG
852 && REG_P (SUBREG_REG (x))
853 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
854 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
855 && (GET_MODE_SIZE (GET_MODE (x))
856 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
857 && subreg_lowpart_p (x)
858 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
859 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
860 ? 0
861 : rtx_cost (x, outer) * 2);
862 }
863
864 \f
865 static struct cse_reg_info *
866 get_cse_reg_info (unsigned int regno)
867 {
868 struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
869 struct cse_reg_info *p;
870
871 for (p = *hash_head; p != NULL; p = p->hash_next)
872 if (p->regno == regno)
873 break;
874
875 if (p == NULL)
876 {
877 /* Get a new cse_reg_info structure. */
878 if (cse_reg_info_list_free)
879 {
880 p = cse_reg_info_list_free;
881 cse_reg_info_list_free = p->next;
882 }
883 else
884 {
885 p = xmalloc (sizeof (struct cse_reg_info));
886 p->next = cse_reg_info_list;
887 cse_reg_info_list = p;
888 }
889
890 /* Insert into hash table. */
891 p->hash_next = *hash_head;
892 *hash_head = p;
893
894 /* Initialize it. */
895 p->reg_tick = 1;
896 p->reg_in_table = -1;
897 p->subreg_ticked = -1;
898 p->reg_qty = -regno - 1;
899 p->regno = regno;
900 }
901
902 /* Cache this lookup; we tend to be looking up information about the
903 same register several times in a row. */
904 cached_regno = regno;
905 cached_cse_reg_info = p;
906
907 return p;
908 }
909
910 /* Clear the hash table and initialize each register with its own quantity,
911 for a new basic block. */
912
913 static void
914 new_basic_block (void)
915 {
916 int i;
917
918 next_qty = 0;
919
920 /* Clear out hash table state for this pass. */
921
922 memset (reg_hash, 0, sizeof reg_hash);
923
924 cse_reg_info_list_free = cse_reg_info_list;
925
926 cached_cse_reg_info = 0;
927
928 CLEAR_HARD_REG_SET (hard_regs_in_table);
929
930 /* The per-quantity values used to be initialized here, but it is
931 much faster to initialize each as it is made in `make_new_qty'. */
932
933 for (i = 0; i < HASH_SIZE; i++)
934 {
935 struct table_elt *first;
936
937 first = table[i];
938 if (first != NULL)
939 {
940 struct table_elt *last = first;
941
942 table[i] = NULL;
943
944 while (last->next_same_hash != NULL)
945 last = last->next_same_hash;
946
947 /* Now relink this hash entire chain into
948 the free element list. */
949
950 last->next_same_hash = free_element_chain;
951 free_element_chain = first;
952 }
953 }
954
955 #ifdef HAVE_cc0
956 prev_insn = 0;
957 prev_insn_cc0 = 0;
958 #endif
959 }
960
961 /* Say that register REG contains a quantity in mode MODE not in any
962 register before and initialize that quantity. */
963
964 static void
965 make_new_qty (unsigned int reg, enum machine_mode mode)
966 {
967 int q;
968 struct qty_table_elem *ent;
969 struct reg_eqv_elem *eqv;
970
971 gcc_assert (next_qty < max_qty);
972
973 q = REG_QTY (reg) = next_qty++;
974 ent = &qty_table[q];
975 ent->first_reg = reg;
976 ent->last_reg = reg;
977 ent->mode = mode;
978 ent->const_rtx = ent->const_insn = NULL_RTX;
979 ent->comparison_code = UNKNOWN;
980
981 eqv = &reg_eqv_table[reg];
982 eqv->next = eqv->prev = -1;
983 }
984
985 /* Make reg NEW equivalent to reg OLD.
986 OLD is not changing; NEW is. */
987
988 static void
989 make_regs_eqv (unsigned int new, unsigned int old)
990 {
991 unsigned int lastr, firstr;
992 int q = REG_QTY (old);
993 struct qty_table_elem *ent;
994
995 ent = &qty_table[q];
996
997 /* Nothing should become eqv until it has a "non-invalid" qty number. */
998 gcc_assert (REGNO_QTY_VALID_P (old));
999
1000 REG_QTY (new) = q;
1001 firstr = ent->first_reg;
1002 lastr = ent->last_reg;
1003
1004 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1005 hard regs. Among pseudos, if NEW will live longer than any other reg
1006 of the same qty, and that is beyond the current basic block,
1007 make it the new canonical replacement for this qty. */
1008 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1009 /* Certain fixed registers might be of the class NO_REGS. This means
1010 that not only can they not be allocated by the compiler, but
1011 they cannot be used in substitutions or canonicalizations
1012 either. */
1013 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1014 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1015 || (new >= FIRST_PSEUDO_REGISTER
1016 && (firstr < FIRST_PSEUDO_REGISTER
1017 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1018 || (uid_cuid[REGNO_FIRST_UID (new)]
1019 < cse_basic_block_start))
1020 && (uid_cuid[REGNO_LAST_UID (new)]
1021 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1022 {
1023 reg_eqv_table[firstr].prev = new;
1024 reg_eqv_table[new].next = firstr;
1025 reg_eqv_table[new].prev = -1;
1026 ent->first_reg = new;
1027 }
1028 else
1029 {
1030 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1031 Otherwise, insert before any non-fixed hard regs that are at the
1032 end. Registers of class NO_REGS cannot be used as an
1033 equivalent for anything. */
1034 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1035 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1036 && new >= FIRST_PSEUDO_REGISTER)
1037 lastr = reg_eqv_table[lastr].prev;
1038 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1039 if (reg_eqv_table[lastr].next >= 0)
1040 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1041 else
1042 qty_table[q].last_reg = new;
1043 reg_eqv_table[lastr].next = new;
1044 reg_eqv_table[new].prev = lastr;
1045 }
1046 }
1047
1048 /* Remove REG from its equivalence class. */
1049
1050 static void
1051 delete_reg_equiv (unsigned int reg)
1052 {
1053 struct qty_table_elem *ent;
1054 int q = REG_QTY (reg);
1055 int p, n;
1056
1057 /* If invalid, do nothing. */
1058 if (! REGNO_QTY_VALID_P (reg))
1059 return;
1060
1061 ent = &qty_table[q];
1062
1063 p = reg_eqv_table[reg].prev;
1064 n = reg_eqv_table[reg].next;
1065
1066 if (n != -1)
1067 reg_eqv_table[n].prev = p;
1068 else
1069 ent->last_reg = p;
1070 if (p != -1)
1071 reg_eqv_table[p].next = n;
1072 else
1073 ent->first_reg = n;
1074
1075 REG_QTY (reg) = -reg - 1;
1076 }
1077
1078 /* Remove any invalid expressions from the hash table
1079 that refer to any of the registers contained in expression X.
1080
1081 Make sure that newly inserted references to those registers
1082 as subexpressions will be considered valid.
1083
1084 mention_regs is not called when a register itself
1085 is being stored in the table.
1086
1087 Return 1 if we have done something that may have changed the hash code
1088 of X. */
1089
1090 static int
1091 mention_regs (rtx x)
1092 {
1093 enum rtx_code code;
1094 int i, j;
1095 const char *fmt;
1096 int changed = 0;
1097
1098 if (x == 0)
1099 return 0;
1100
1101 code = GET_CODE (x);
1102 if (code == REG)
1103 {
1104 unsigned int regno = REGNO (x);
1105 unsigned int endregno
1106 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1107 : hard_regno_nregs[regno][GET_MODE (x)]);
1108 unsigned int i;
1109
1110 for (i = regno; i < endregno; i++)
1111 {
1112 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1113 remove_invalid_refs (i);
1114
1115 REG_IN_TABLE (i) = REG_TICK (i);
1116 SUBREG_TICKED (i) = -1;
1117 }
1118
1119 return 0;
1120 }
1121
1122 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1123 pseudo if they don't use overlapping words. We handle only pseudos
1124 here for simplicity. */
1125 if (code == SUBREG && REG_P (SUBREG_REG (x))
1126 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1127 {
1128 unsigned int i = REGNO (SUBREG_REG (x));
1129
1130 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1131 {
1132 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1133 the last store to this register really stored into this
1134 subreg, then remove the memory of this subreg.
1135 Otherwise, remove any memory of the entire register and
1136 all its subregs from the table. */
1137 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1138 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1139 remove_invalid_refs (i);
1140 else
1141 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1142 }
1143
1144 REG_IN_TABLE (i) = REG_TICK (i);
1145 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1146 return 0;
1147 }
1148
1149 /* If X is a comparison or a COMPARE and either operand is a register
1150 that does not have a quantity, give it one. This is so that a later
1151 call to record_jump_equiv won't cause X to be assigned a different
1152 hash code and not found in the table after that call.
1153
1154 It is not necessary to do this here, since rehash_using_reg can
1155 fix up the table later, but doing this here eliminates the need to
1156 call that expensive function in the most common case where the only
1157 use of the register is in the comparison. */
1158
1159 if (code == COMPARE || COMPARISON_P (x))
1160 {
1161 if (REG_P (XEXP (x, 0))
1162 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1163 if (insert_regs (XEXP (x, 0), NULL, 0))
1164 {
1165 rehash_using_reg (XEXP (x, 0));
1166 changed = 1;
1167 }
1168
1169 if (REG_P (XEXP (x, 1))
1170 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1171 if (insert_regs (XEXP (x, 1), NULL, 0))
1172 {
1173 rehash_using_reg (XEXP (x, 1));
1174 changed = 1;
1175 }
1176 }
1177
1178 fmt = GET_RTX_FORMAT (code);
1179 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1180 if (fmt[i] == 'e')
1181 changed |= mention_regs (XEXP (x, i));
1182 else if (fmt[i] == 'E')
1183 for (j = 0; j < XVECLEN (x, i); j++)
1184 changed |= mention_regs (XVECEXP (x, i, j));
1185
1186 return changed;
1187 }
1188
1189 /* Update the register quantities for inserting X into the hash table
1190 with a value equivalent to CLASSP.
1191 (If the class does not contain a REG, it is irrelevant.)
1192 If MODIFIED is nonzero, X is a destination; it is being modified.
1193 Note that delete_reg_equiv should be called on a register
1194 before insert_regs is done on that register with MODIFIED != 0.
1195
1196 Nonzero value means that elements of reg_qty have changed
1197 so X's hash code may be different. */
1198
1199 static int
1200 insert_regs (rtx x, struct table_elt *classp, int modified)
1201 {
1202 if (REG_P (x))
1203 {
1204 unsigned int regno = REGNO (x);
1205 int qty_valid;
1206
1207 /* If REGNO is in the equivalence table already but is of the
1208 wrong mode for that equivalence, don't do anything here. */
1209
1210 qty_valid = REGNO_QTY_VALID_P (regno);
1211 if (qty_valid)
1212 {
1213 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1214
1215 if (ent->mode != GET_MODE (x))
1216 return 0;
1217 }
1218
1219 if (modified || ! qty_valid)
1220 {
1221 if (classp)
1222 for (classp = classp->first_same_value;
1223 classp != 0;
1224 classp = classp->next_same_value)
1225 if (REG_P (classp->exp)
1226 && GET_MODE (classp->exp) == GET_MODE (x))
1227 {
1228 make_regs_eqv (regno, REGNO (classp->exp));
1229 return 1;
1230 }
1231
1232 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1233 than REG_IN_TABLE to find out if there was only a single preceding
1234 invalidation - for the SUBREG - or another one, which would be
1235 for the full register. However, if we find here that REG_TICK
1236 indicates that the register is invalid, it means that it has
1237 been invalidated in a separate operation. The SUBREG might be used
1238 now (then this is a recursive call), or we might use the full REG
1239 now and a SUBREG of it later. So bump up REG_TICK so that
1240 mention_regs will do the right thing. */
1241 if (! modified
1242 && REG_IN_TABLE (regno) >= 0
1243 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1244 REG_TICK (regno)++;
1245 make_new_qty (regno, GET_MODE (x));
1246 return 1;
1247 }
1248
1249 return 0;
1250 }
1251
1252 /* If X is a SUBREG, we will likely be inserting the inner register in the
1253 table. If that register doesn't have an assigned quantity number at
1254 this point but does later, the insertion that we will be doing now will
1255 not be accessible because its hash code will have changed. So assign
1256 a quantity number now. */
1257
1258 else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1259 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1260 {
1261 insert_regs (SUBREG_REG (x), NULL, 0);
1262 mention_regs (x);
1263 return 1;
1264 }
1265 else
1266 return mention_regs (x);
1267 }
1268 \f
1269 /* Look in or update the hash table. */
1270
1271 /* Remove table element ELT from use in the table.
1272 HASH is its hash code, made using the HASH macro.
1273 It's an argument because often that is known in advance
1274 and we save much time not recomputing it. */
1275
1276 static void
1277 remove_from_table (struct table_elt *elt, unsigned int hash)
1278 {
1279 if (elt == 0)
1280 return;
1281
1282 /* Mark this element as removed. See cse_insn. */
1283 elt->first_same_value = 0;
1284
1285 /* Remove the table element from its equivalence class. */
1286
1287 {
1288 struct table_elt *prev = elt->prev_same_value;
1289 struct table_elt *next = elt->next_same_value;
1290
1291 if (next)
1292 next->prev_same_value = prev;
1293
1294 if (prev)
1295 prev->next_same_value = next;
1296 else
1297 {
1298 struct table_elt *newfirst = next;
1299 while (next)
1300 {
1301 next->first_same_value = newfirst;
1302 next = next->next_same_value;
1303 }
1304 }
1305 }
1306
1307 /* Remove the table element from its hash bucket. */
1308
1309 {
1310 struct table_elt *prev = elt->prev_same_hash;
1311 struct table_elt *next = elt->next_same_hash;
1312
1313 if (next)
1314 next->prev_same_hash = prev;
1315
1316 if (prev)
1317 prev->next_same_hash = next;
1318 else if (table[hash] == elt)
1319 table[hash] = next;
1320 else
1321 {
1322 /* This entry is not in the proper hash bucket. This can happen
1323 when two classes were merged by `merge_equiv_classes'. Search
1324 for the hash bucket that it heads. This happens only very
1325 rarely, so the cost is acceptable. */
1326 for (hash = 0; hash < HASH_SIZE; hash++)
1327 if (table[hash] == elt)
1328 table[hash] = next;
1329 }
1330 }
1331
1332 /* Remove the table element from its related-value circular chain. */
1333
1334 if (elt->related_value != 0 && elt->related_value != elt)
1335 {
1336 struct table_elt *p = elt->related_value;
1337
1338 while (p->related_value != elt)
1339 p = p->related_value;
1340 p->related_value = elt->related_value;
1341 if (p->related_value == p)
1342 p->related_value = 0;
1343 }
1344
1345 /* Now add it to the free element chain. */
1346 elt->next_same_hash = free_element_chain;
1347 free_element_chain = elt;
1348 }
1349
1350 /* Look up X in the hash table and return its table element,
1351 or 0 if X is not in the table.
1352
1353 MODE is the machine-mode of X, or if X is an integer constant
1354 with VOIDmode then MODE is the mode with which X will be used.
1355
1356 Here we are satisfied to find an expression whose tree structure
1357 looks like X. */
1358
1359 static struct table_elt *
1360 lookup (rtx x, unsigned int hash, enum machine_mode mode)
1361 {
1362 struct table_elt *p;
1363
1364 for (p = table[hash]; p; p = p->next_same_hash)
1365 if (mode == p->mode && ((x == p->exp && REG_P (x))
1366 || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1367 return p;
1368
1369 return 0;
1370 }
1371
1372 /* Like `lookup' but don't care whether the table element uses invalid regs.
1373 Also ignore discrepancies in the machine mode of a register. */
1374
1375 static struct table_elt *
1376 lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1377 {
1378 struct table_elt *p;
1379
1380 if (REG_P (x))
1381 {
1382 unsigned int regno = REGNO (x);
1383
1384 /* Don't check the machine mode when comparing registers;
1385 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1386 for (p = table[hash]; p; p = p->next_same_hash)
1387 if (REG_P (p->exp)
1388 && REGNO (p->exp) == regno)
1389 return p;
1390 }
1391 else
1392 {
1393 for (p = table[hash]; p; p = p->next_same_hash)
1394 if (mode == p->mode
1395 && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1396 return p;
1397 }
1398
1399 return 0;
1400 }
1401
1402 /* Look for an expression equivalent to X and with code CODE.
1403 If one is found, return that expression. */
1404
1405 static rtx
1406 lookup_as_function (rtx x, enum rtx_code code)
1407 {
1408 struct table_elt *p
1409 = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1410
1411 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1412 long as we are narrowing. So if we looked in vain for a mode narrower
1413 than word_mode before, look for word_mode now. */
1414 if (p == 0 && code == CONST_INT
1415 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1416 {
1417 x = copy_rtx (x);
1418 PUT_MODE (x, word_mode);
1419 p = lookup (x, SAFE_HASH (x, VOIDmode), word_mode);
1420 }
1421
1422 if (p == 0)
1423 return 0;
1424
1425 for (p = p->first_same_value; p; p = p->next_same_value)
1426 if (GET_CODE (p->exp) == code
1427 /* Make sure this is a valid entry in the table. */
1428 && exp_equiv_p (p->exp, p->exp, 1, false))
1429 return p->exp;
1430
1431 return 0;
1432 }
1433
1434 /* Insert X in the hash table, assuming HASH is its hash code
1435 and CLASSP is an element of the class it should go in
1436 (or 0 if a new class should be made).
1437 It is inserted at the proper position to keep the class in
1438 the order cheapest first.
1439
1440 MODE is the machine-mode of X, or if X is an integer constant
1441 with VOIDmode then MODE is the mode with which X will be used.
1442
1443 For elements of equal cheapness, the most recent one
1444 goes in front, except that the first element in the list
1445 remains first unless a cheaper element is added. The order of
1446 pseudo-registers does not matter, as canon_reg will be called to
1447 find the cheapest when a register is retrieved from the table.
1448
1449 The in_memory field in the hash table element is set to 0.
1450 The caller must set it nonzero if appropriate.
1451
1452 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1453 and if insert_regs returns a nonzero value
1454 you must then recompute its hash code before calling here.
1455
1456 If necessary, update table showing constant values of quantities. */
1457
1458 #define CHEAPER(X, Y) \
1459 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1460
1461 static struct table_elt *
1462 insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
1463 {
1464 struct table_elt *elt;
1465
1466 /* If X is a register and we haven't made a quantity for it,
1467 something is wrong. */
1468 gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1469
1470 /* If X is a hard register, show it is being put in the table. */
1471 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1472 {
1473 unsigned int regno = REGNO (x);
1474 unsigned int endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
1475 unsigned int i;
1476
1477 for (i = regno; i < endregno; i++)
1478 SET_HARD_REG_BIT (hard_regs_in_table, i);
1479 }
1480
1481 /* Put an element for X into the right hash bucket. */
1482
1483 elt = free_element_chain;
1484 if (elt)
1485 free_element_chain = elt->next_same_hash;
1486 else
1487 elt = xmalloc (sizeof (struct table_elt));
1488
1489 elt->exp = x;
1490 elt->canon_exp = NULL_RTX;
1491 elt->cost = COST (x);
1492 elt->regcost = approx_reg_cost (x);
1493 elt->next_same_value = 0;
1494 elt->prev_same_value = 0;
1495 elt->next_same_hash = table[hash];
1496 elt->prev_same_hash = 0;
1497 elt->related_value = 0;
1498 elt->in_memory = 0;
1499 elt->mode = mode;
1500 elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1501
1502 if (table[hash])
1503 table[hash]->prev_same_hash = elt;
1504 table[hash] = elt;
1505
1506 /* Put it into the proper value-class. */
1507 if (classp)
1508 {
1509 classp = classp->first_same_value;
1510 if (CHEAPER (elt, classp))
1511 /* Insert at the head of the class. */
1512 {
1513 struct table_elt *p;
1514 elt->next_same_value = classp;
1515 classp->prev_same_value = elt;
1516 elt->first_same_value = elt;
1517
1518 for (p = classp; p; p = p->next_same_value)
1519 p->first_same_value = elt;
1520 }
1521 else
1522 {
1523 /* Insert not at head of the class. */
1524 /* Put it after the last element cheaper than X. */
1525 struct table_elt *p, *next;
1526
1527 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1528 p = next);
1529
1530 /* Put it after P and before NEXT. */
1531 elt->next_same_value = next;
1532 if (next)
1533 next->prev_same_value = elt;
1534
1535 elt->prev_same_value = p;
1536 p->next_same_value = elt;
1537 elt->first_same_value = classp;
1538 }
1539 }
1540 else
1541 elt->first_same_value = elt;
1542
1543 /* If this is a constant being set equivalent to a register or a register
1544 being set equivalent to a constant, note the constant equivalence.
1545
1546 If this is a constant, it cannot be equivalent to a different constant,
1547 and a constant is the only thing that can be cheaper than a register. So
1548 we know the register is the head of the class (before the constant was
1549 inserted).
1550
1551 If this is a register that is not already known equivalent to a
1552 constant, we must check the entire class.
1553
1554 If this is a register that is already known equivalent to an insn,
1555 update the qtys `const_insn' to show that `this_insn' is the latest
1556 insn making that quantity equivalent to the constant. */
1557
1558 if (elt->is_const && classp && REG_P (classp->exp)
1559 && !REG_P (x))
1560 {
1561 int exp_q = REG_QTY (REGNO (classp->exp));
1562 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1563
1564 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1565 exp_ent->const_insn = this_insn;
1566 }
1567
1568 else if (REG_P (x)
1569 && classp
1570 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1571 && ! elt->is_const)
1572 {
1573 struct table_elt *p;
1574
1575 for (p = classp; p != 0; p = p->next_same_value)
1576 {
1577 if (p->is_const && !REG_P (p->exp))
1578 {
1579 int x_q = REG_QTY (REGNO (x));
1580 struct qty_table_elem *x_ent = &qty_table[x_q];
1581
1582 x_ent->const_rtx
1583 = gen_lowpart (GET_MODE (x), p->exp);
1584 x_ent->const_insn = this_insn;
1585 break;
1586 }
1587 }
1588 }
1589
1590 else if (REG_P (x)
1591 && qty_table[REG_QTY (REGNO (x))].const_rtx
1592 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1593 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1594
1595 /* If this is a constant with symbolic value,
1596 and it has a term with an explicit integer value,
1597 link it up with related expressions. */
1598 if (GET_CODE (x) == CONST)
1599 {
1600 rtx subexp = get_related_value (x);
1601 unsigned subhash;
1602 struct table_elt *subelt, *subelt_prev;
1603
1604 if (subexp != 0)
1605 {
1606 /* Get the integer-free subexpression in the hash table. */
1607 subhash = SAFE_HASH (subexp, mode);
1608 subelt = lookup (subexp, subhash, mode);
1609 if (subelt == 0)
1610 subelt = insert (subexp, NULL, subhash, mode);
1611 /* Initialize SUBELT's circular chain if it has none. */
1612 if (subelt->related_value == 0)
1613 subelt->related_value = subelt;
1614 /* Find the element in the circular chain that precedes SUBELT. */
1615 subelt_prev = subelt;
1616 while (subelt_prev->related_value != subelt)
1617 subelt_prev = subelt_prev->related_value;
1618 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1619 This way the element that follows SUBELT is the oldest one. */
1620 elt->related_value = subelt_prev->related_value;
1621 subelt_prev->related_value = elt;
1622 }
1623 }
1624
1625 return elt;
1626 }
1627 \f
1628 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1629 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1630 the two classes equivalent.
1631
1632 CLASS1 will be the surviving class; CLASS2 should not be used after this
1633 call.
1634
1635 Any invalid entries in CLASS2 will not be copied. */
1636
1637 static void
1638 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1639 {
1640 struct table_elt *elt, *next, *new;
1641
1642 /* Ensure we start with the head of the classes. */
1643 class1 = class1->first_same_value;
1644 class2 = class2->first_same_value;
1645
1646 /* If they were already equal, forget it. */
1647 if (class1 == class2)
1648 return;
1649
1650 for (elt = class2; elt; elt = next)
1651 {
1652 unsigned int hash;
1653 rtx exp = elt->exp;
1654 enum machine_mode mode = elt->mode;
1655
1656 next = elt->next_same_value;
1657
1658 /* Remove old entry, make a new one in CLASS1's class.
1659 Don't do this for invalid entries as we cannot find their
1660 hash code (it also isn't necessary). */
1661 if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1662 {
1663 bool need_rehash = false;
1664
1665 hash_arg_in_memory = 0;
1666 hash = HASH (exp, mode);
1667
1668 if (REG_P (exp))
1669 {
1670 need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1671 delete_reg_equiv (REGNO (exp));
1672 }
1673
1674 remove_from_table (elt, hash);
1675
1676 if (insert_regs (exp, class1, 0) || need_rehash)
1677 {
1678 rehash_using_reg (exp);
1679 hash = HASH (exp, mode);
1680 }
1681 new = insert (exp, class1, hash, mode);
1682 new->in_memory = hash_arg_in_memory;
1683 }
1684 }
1685 }
1686 \f
1687 /* Flush the entire hash table. */
1688
1689 static void
1690 flush_hash_table (void)
1691 {
1692 int i;
1693 struct table_elt *p;
1694
1695 for (i = 0; i < HASH_SIZE; i++)
1696 for (p = table[i]; p; p = table[i])
1697 {
1698 /* Note that invalidate can remove elements
1699 after P in the current hash chain. */
1700 if (REG_P (p->exp))
1701 invalidate (p->exp, p->mode);
1702 else
1703 remove_from_table (p, i);
1704 }
1705 }
1706 \f
1707 /* Function called for each rtx to check whether true dependence exist. */
1708 struct check_dependence_data
1709 {
1710 enum machine_mode mode;
1711 rtx exp;
1712 rtx addr;
1713 };
1714
1715 static int
1716 check_dependence (rtx *x, void *data)
1717 {
1718 struct check_dependence_data *d = (struct check_dependence_data *) data;
1719 if (*x && MEM_P (*x))
1720 return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1721 cse_rtx_varies_p);
1722 else
1723 return 0;
1724 }
1725 \f
1726 /* Remove from the hash table, or mark as invalid, all expressions whose
1727 values could be altered by storing in X. X is a register, a subreg, or
1728 a memory reference with nonvarying address (because, when a memory
1729 reference with a varying address is stored in, all memory references are
1730 removed by invalidate_memory so specific invalidation is superfluous).
1731 FULL_MODE, if not VOIDmode, indicates that this much should be
1732 invalidated instead of just the amount indicated by the mode of X. This
1733 is only used for bitfield stores into memory.
1734
1735 A nonvarying address may be just a register or just a symbol reference,
1736 or it may be either of those plus a numeric offset. */
1737
1738 static void
1739 invalidate (rtx x, enum machine_mode full_mode)
1740 {
1741 int i;
1742 struct table_elt *p;
1743 rtx addr;
1744
1745 switch (GET_CODE (x))
1746 {
1747 case REG:
1748 {
1749 /* If X is a register, dependencies on its contents are recorded
1750 through the qty number mechanism. Just change the qty number of
1751 the register, mark it as invalid for expressions that refer to it,
1752 and remove it itself. */
1753 unsigned int regno = REGNO (x);
1754 unsigned int hash = HASH (x, GET_MODE (x));
1755
1756 /* Remove REGNO from any quantity list it might be on and indicate
1757 that its value might have changed. If it is a pseudo, remove its
1758 entry from the hash table.
1759
1760 For a hard register, we do the first two actions above for any
1761 additional hard registers corresponding to X. Then, if any of these
1762 registers are in the table, we must remove any REG entries that
1763 overlap these registers. */
1764
1765 delete_reg_equiv (regno);
1766 REG_TICK (regno)++;
1767 SUBREG_TICKED (regno) = -1;
1768
1769 if (regno >= FIRST_PSEUDO_REGISTER)
1770 {
1771 /* Because a register can be referenced in more than one mode,
1772 we might have to remove more than one table entry. */
1773 struct table_elt *elt;
1774
1775 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1776 remove_from_table (elt, hash);
1777 }
1778 else
1779 {
1780 HOST_WIDE_INT in_table
1781 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1782 unsigned int endregno
1783 = regno + hard_regno_nregs[regno][GET_MODE (x)];
1784 unsigned int tregno, tendregno, rn;
1785 struct table_elt *p, *next;
1786
1787 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1788
1789 for (rn = regno + 1; rn < endregno; rn++)
1790 {
1791 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1792 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1793 delete_reg_equiv (rn);
1794 REG_TICK (rn)++;
1795 SUBREG_TICKED (rn) = -1;
1796 }
1797
1798 if (in_table)
1799 for (hash = 0; hash < HASH_SIZE; hash++)
1800 for (p = table[hash]; p; p = next)
1801 {
1802 next = p->next_same_hash;
1803
1804 if (!REG_P (p->exp)
1805 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1806 continue;
1807
1808 tregno = REGNO (p->exp);
1809 tendregno
1810 = tregno + hard_regno_nregs[tregno][GET_MODE (p->exp)];
1811 if (tendregno > regno && tregno < endregno)
1812 remove_from_table (p, hash);
1813 }
1814 }
1815 }
1816 return;
1817
1818 case SUBREG:
1819 invalidate (SUBREG_REG (x), VOIDmode);
1820 return;
1821
1822 case PARALLEL:
1823 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1824 invalidate (XVECEXP (x, 0, i), VOIDmode);
1825 return;
1826
1827 case EXPR_LIST:
1828 /* This is part of a disjoint return value; extract the location in
1829 question ignoring the offset. */
1830 invalidate (XEXP (x, 0), VOIDmode);
1831 return;
1832
1833 case MEM:
1834 addr = canon_rtx (get_addr (XEXP (x, 0)));
1835 /* Calculate the canonical version of X here so that
1836 true_dependence doesn't generate new RTL for X on each call. */
1837 x = canon_rtx (x);
1838
1839 /* Remove all hash table elements that refer to overlapping pieces of
1840 memory. */
1841 if (full_mode == VOIDmode)
1842 full_mode = GET_MODE (x);
1843
1844 for (i = 0; i < HASH_SIZE; i++)
1845 {
1846 struct table_elt *next;
1847
1848 for (p = table[i]; p; p = next)
1849 {
1850 next = p->next_same_hash;
1851 if (p->in_memory)
1852 {
1853 struct check_dependence_data d;
1854
1855 /* Just canonicalize the expression once;
1856 otherwise each time we call invalidate
1857 true_dependence will canonicalize the
1858 expression again. */
1859 if (!p->canon_exp)
1860 p->canon_exp = canon_rtx (p->exp);
1861 d.exp = x;
1862 d.addr = addr;
1863 d.mode = full_mode;
1864 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1865 remove_from_table (p, i);
1866 }
1867 }
1868 }
1869 return;
1870
1871 default:
1872 gcc_unreachable ();
1873 }
1874 }
1875 \f
1876 /* Remove all expressions that refer to register REGNO,
1877 since they are already invalid, and we are about to
1878 mark that register valid again and don't want the old
1879 expressions to reappear as valid. */
1880
1881 static void
1882 remove_invalid_refs (unsigned int regno)
1883 {
1884 unsigned int i;
1885 struct table_elt *p, *next;
1886
1887 for (i = 0; i < HASH_SIZE; i++)
1888 for (p = table[i]; p; p = next)
1889 {
1890 next = p->next_same_hash;
1891 if (!REG_P (p->exp)
1892 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1893 remove_from_table (p, i);
1894 }
1895 }
1896
1897 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1898 and mode MODE. */
1899 static void
1900 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1901 enum machine_mode mode)
1902 {
1903 unsigned int i;
1904 struct table_elt *p, *next;
1905 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
1906
1907 for (i = 0; i < HASH_SIZE; i++)
1908 for (p = table[i]; p; p = next)
1909 {
1910 rtx exp = p->exp;
1911 next = p->next_same_hash;
1912
1913 if (!REG_P (exp)
1914 && (GET_CODE (exp) != SUBREG
1915 || !REG_P (SUBREG_REG (exp))
1916 || REGNO (SUBREG_REG (exp)) != regno
1917 || (((SUBREG_BYTE (exp)
1918 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1919 && SUBREG_BYTE (exp) <= end))
1920 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1921 remove_from_table (p, i);
1922 }
1923 }
1924 \f
1925 /* Recompute the hash codes of any valid entries in the hash table that
1926 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1927
1928 This is called when we make a jump equivalence. */
1929
1930 static void
1931 rehash_using_reg (rtx x)
1932 {
1933 unsigned int i;
1934 struct table_elt *p, *next;
1935 unsigned hash;
1936
1937 if (GET_CODE (x) == SUBREG)
1938 x = SUBREG_REG (x);
1939
1940 /* If X is not a register or if the register is known not to be in any
1941 valid entries in the table, we have no work to do. */
1942
1943 if (!REG_P (x)
1944 || REG_IN_TABLE (REGNO (x)) < 0
1945 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1946 return;
1947
1948 /* Scan all hash chains looking for valid entries that mention X.
1949 If we find one and it is in the wrong hash chain, move it. */
1950
1951 for (i = 0; i < HASH_SIZE; i++)
1952 for (p = table[i]; p; p = next)
1953 {
1954 next = p->next_same_hash;
1955 if (reg_mentioned_p (x, p->exp)
1956 && exp_equiv_p (p->exp, p->exp, 1, false)
1957 && i != (hash = SAFE_HASH (p->exp, p->mode)))
1958 {
1959 if (p->next_same_hash)
1960 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1961
1962 if (p->prev_same_hash)
1963 p->prev_same_hash->next_same_hash = p->next_same_hash;
1964 else
1965 table[i] = p->next_same_hash;
1966
1967 p->next_same_hash = table[hash];
1968 p->prev_same_hash = 0;
1969 if (table[hash])
1970 table[hash]->prev_same_hash = p;
1971 table[hash] = p;
1972 }
1973 }
1974 }
1975 \f
1976 /* Remove from the hash table any expression that is a call-clobbered
1977 register. Also update their TICK values. */
1978
1979 static void
1980 invalidate_for_call (void)
1981 {
1982 unsigned int regno, endregno;
1983 unsigned int i;
1984 unsigned hash;
1985 struct table_elt *p, *next;
1986 int in_table = 0;
1987
1988 /* Go through all the hard registers. For each that is clobbered in
1989 a CALL_INSN, remove the register from quantity chains and update
1990 reg_tick if defined. Also see if any of these registers is currently
1991 in the table. */
1992
1993 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1994 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1995 {
1996 delete_reg_equiv (regno);
1997 if (REG_TICK (regno) >= 0)
1998 {
1999 REG_TICK (regno)++;
2000 SUBREG_TICKED (regno) = -1;
2001 }
2002
2003 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2004 }
2005
2006 /* In the case where we have no call-clobbered hard registers in the
2007 table, we are done. Otherwise, scan the table and remove any
2008 entry that overlaps a call-clobbered register. */
2009
2010 if (in_table)
2011 for (hash = 0; hash < HASH_SIZE; hash++)
2012 for (p = table[hash]; p; p = next)
2013 {
2014 next = p->next_same_hash;
2015
2016 if (!REG_P (p->exp)
2017 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2018 continue;
2019
2020 regno = REGNO (p->exp);
2021 endregno = regno + hard_regno_nregs[regno][GET_MODE (p->exp)];
2022
2023 for (i = regno; i < endregno; i++)
2024 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2025 {
2026 remove_from_table (p, hash);
2027 break;
2028 }
2029 }
2030 }
2031 \f
2032 /* Given an expression X of type CONST,
2033 and ELT which is its table entry (or 0 if it
2034 is not in the hash table),
2035 return an alternate expression for X as a register plus integer.
2036 If none can be found, return 0. */
2037
2038 static rtx
2039 use_related_value (rtx x, struct table_elt *elt)
2040 {
2041 struct table_elt *relt = 0;
2042 struct table_elt *p, *q;
2043 HOST_WIDE_INT offset;
2044
2045 /* First, is there anything related known?
2046 If we have a table element, we can tell from that.
2047 Otherwise, must look it up. */
2048
2049 if (elt != 0 && elt->related_value != 0)
2050 relt = elt;
2051 else if (elt == 0 && GET_CODE (x) == CONST)
2052 {
2053 rtx subexp = get_related_value (x);
2054 if (subexp != 0)
2055 relt = lookup (subexp,
2056 SAFE_HASH (subexp, GET_MODE (subexp)),
2057 GET_MODE (subexp));
2058 }
2059
2060 if (relt == 0)
2061 return 0;
2062
2063 /* Search all related table entries for one that has an
2064 equivalent register. */
2065
2066 p = relt;
2067 while (1)
2068 {
2069 /* This loop is strange in that it is executed in two different cases.
2070 The first is when X is already in the table. Then it is searching
2071 the RELATED_VALUE list of X's class (RELT). The second case is when
2072 X is not in the table. Then RELT points to a class for the related
2073 value.
2074
2075 Ensure that, whatever case we are in, that we ignore classes that have
2076 the same value as X. */
2077
2078 if (rtx_equal_p (x, p->exp))
2079 q = 0;
2080 else
2081 for (q = p->first_same_value; q; q = q->next_same_value)
2082 if (REG_P (q->exp))
2083 break;
2084
2085 if (q)
2086 break;
2087
2088 p = p->related_value;
2089
2090 /* We went all the way around, so there is nothing to be found.
2091 Alternatively, perhaps RELT was in the table for some other reason
2092 and it has no related values recorded. */
2093 if (p == relt || p == 0)
2094 break;
2095 }
2096
2097 if (q == 0)
2098 return 0;
2099
2100 offset = (get_integer_term (x) - get_integer_term (p->exp));
2101 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2102 return plus_constant (q->exp, offset);
2103 }
2104 \f
2105 /* Hash a string. Just add its bytes up. */
2106 static inline unsigned
2107 hash_rtx_string (const char *ps)
2108 {
2109 unsigned hash = 0;
2110 const unsigned char *p = (const unsigned char *) ps;
2111
2112 if (p)
2113 while (*p)
2114 hash += *p++;
2115
2116 return hash;
2117 }
2118
2119 /* Hash an rtx. We are careful to make sure the value is never negative.
2120 Equivalent registers hash identically.
2121 MODE is used in hashing for CONST_INTs only;
2122 otherwise the mode of X is used.
2123
2124 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2125
2126 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2127 a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
2128
2129 Note that cse_insn knows that the hash code of a MEM expression
2130 is just (int) MEM plus the hash code of the address. */
2131
2132 unsigned
2133 hash_rtx (rtx x, enum machine_mode mode, int *do_not_record_p,
2134 int *hash_arg_in_memory_p, bool have_reg_qty)
2135 {
2136 int i, j;
2137 unsigned hash = 0;
2138 enum rtx_code code;
2139 const char *fmt;
2140
2141 /* Used to turn recursion into iteration. We can't rely on GCC's
2142 tail-recursion elimination since we need to keep accumulating values
2143 in HASH. */
2144 repeat:
2145 if (x == 0)
2146 return hash;
2147
2148 code = GET_CODE (x);
2149 switch (code)
2150 {
2151 case REG:
2152 {
2153 unsigned int regno = REGNO (x);
2154
2155 if (!reload_completed)
2156 {
2157 /* On some machines, we can't record any non-fixed hard register,
2158 because extending its life will cause reload problems. We
2159 consider ap, fp, sp, gp to be fixed for this purpose.
2160
2161 We also consider CCmode registers to be fixed for this purpose;
2162 failure to do so leads to failure to simplify 0<100 type of
2163 conditionals.
2164
2165 On all machines, we can't record any global registers.
2166 Nor should we record any register that is in a small
2167 class, as defined by CLASS_LIKELY_SPILLED_P. */
2168 bool record;
2169
2170 if (regno >= FIRST_PSEUDO_REGISTER)
2171 record = true;
2172 else if (x == frame_pointer_rtx
2173 || x == hard_frame_pointer_rtx
2174 || x == arg_pointer_rtx
2175 || x == stack_pointer_rtx
2176 || x == pic_offset_table_rtx)
2177 record = true;
2178 else if (global_regs[regno])
2179 record = false;
2180 else if (fixed_regs[regno])
2181 record = true;
2182 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2183 record = true;
2184 else if (SMALL_REGISTER_CLASSES)
2185 record = false;
2186 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2187 record = false;
2188 else
2189 record = true;
2190
2191 if (!record)
2192 {
2193 *do_not_record_p = 1;
2194 return 0;
2195 }
2196 }
2197
2198 hash += ((unsigned int) REG << 7);
2199 hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2200 return hash;
2201 }
2202
2203 /* We handle SUBREG of a REG specially because the underlying
2204 reg changes its hash value with every value change; we don't
2205 want to have to forget unrelated subregs when one subreg changes. */
2206 case SUBREG:
2207 {
2208 if (REG_P (SUBREG_REG (x)))
2209 {
2210 hash += (((unsigned int) SUBREG << 7)
2211 + REGNO (SUBREG_REG (x))
2212 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2213 return hash;
2214 }
2215 break;
2216 }
2217
2218 case CONST_INT:
2219 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2220 + (unsigned int) INTVAL (x));
2221 return hash;
2222
2223 case CONST_DOUBLE:
2224 /* This is like the general case, except that it only counts
2225 the integers representing the constant. */
2226 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2227 if (GET_MODE (x) != VOIDmode)
2228 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2229 else
2230 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2231 + (unsigned int) CONST_DOUBLE_HIGH (x));
2232 return hash;
2233
2234 case CONST_VECTOR:
2235 {
2236 int units;
2237 rtx elt;
2238
2239 units = CONST_VECTOR_NUNITS (x);
2240
2241 for (i = 0; i < units; ++i)
2242 {
2243 elt = CONST_VECTOR_ELT (x, i);
2244 hash += hash_rtx (elt, GET_MODE (elt), do_not_record_p,
2245 hash_arg_in_memory_p, have_reg_qty);
2246 }
2247
2248 return hash;
2249 }
2250
2251 /* Assume there is only one rtx object for any given label. */
2252 case LABEL_REF:
2253 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2254 differences and differences between each stage's debugging dumps. */
2255 hash += (((unsigned int) LABEL_REF << 7)
2256 + CODE_LABEL_NUMBER (XEXP (x, 0)));
2257 return hash;
2258
2259 case SYMBOL_REF:
2260 {
2261 /* Don't hash on the symbol's address to avoid bootstrap differences.
2262 Different hash values may cause expressions to be recorded in
2263 different orders and thus different registers to be used in the
2264 final assembler. This also avoids differences in the dump files
2265 between various stages. */
2266 unsigned int h = 0;
2267 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2268
2269 while (*p)
2270 h += (h << 7) + *p++; /* ??? revisit */
2271
2272 hash += ((unsigned int) SYMBOL_REF << 7) + h;
2273 return hash;
2274 }
2275
2276 case MEM:
2277 /* We don't record if marked volatile or if BLKmode since we don't
2278 know the size of the move. */
2279 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2280 {
2281 *do_not_record_p = 1;
2282 return 0;
2283 }
2284 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2285 *hash_arg_in_memory_p = 1;
2286
2287 /* Now that we have already found this special case,
2288 might as well speed it up as much as possible. */
2289 hash += (unsigned) MEM;
2290 x = XEXP (x, 0);
2291 goto repeat;
2292
2293 case USE:
2294 /* A USE that mentions non-volatile memory needs special
2295 handling since the MEM may be BLKmode which normally
2296 prevents an entry from being made. Pure calls are
2297 marked by a USE which mentions BLKmode memory.
2298 See calls.c:emit_call_1. */
2299 if (MEM_P (XEXP (x, 0))
2300 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2301 {
2302 hash += (unsigned) USE;
2303 x = XEXP (x, 0);
2304
2305 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2306 *hash_arg_in_memory_p = 1;
2307
2308 /* Now that we have already found this special case,
2309 might as well speed it up as much as possible. */
2310 hash += (unsigned) MEM;
2311 x = XEXP (x, 0);
2312 goto repeat;
2313 }
2314 break;
2315
2316 case PRE_DEC:
2317 case PRE_INC:
2318 case POST_DEC:
2319 case POST_INC:
2320 case PRE_MODIFY:
2321 case POST_MODIFY:
2322 case PC:
2323 case CC0:
2324 case CALL:
2325 case UNSPEC_VOLATILE:
2326 *do_not_record_p = 1;
2327 return 0;
2328
2329 case ASM_OPERANDS:
2330 if (MEM_VOLATILE_P (x))
2331 {
2332 *do_not_record_p = 1;
2333 return 0;
2334 }
2335 else
2336 {
2337 /* We don't want to take the filename and line into account. */
2338 hash += (unsigned) code + (unsigned) GET_MODE (x)
2339 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2340 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2341 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2342
2343 if (ASM_OPERANDS_INPUT_LENGTH (x))
2344 {
2345 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2346 {
2347 hash += (hash_rtx (ASM_OPERANDS_INPUT (x, i),
2348 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2349 do_not_record_p, hash_arg_in_memory_p,
2350 have_reg_qty)
2351 + hash_rtx_string
2352 (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2353 }
2354
2355 hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2356 x = ASM_OPERANDS_INPUT (x, 0);
2357 mode = GET_MODE (x);
2358 goto repeat;
2359 }
2360
2361 return hash;
2362 }
2363 break;
2364
2365 default:
2366 break;
2367 }
2368
2369 i = GET_RTX_LENGTH (code) - 1;
2370 hash += (unsigned) code + (unsigned) GET_MODE (x);
2371 fmt = GET_RTX_FORMAT (code);
2372 for (; i >= 0; i--)
2373 {
2374 switch (fmt[i])
2375 {
2376 case 'e':
2377 /* If we are about to do the last recursive call
2378 needed at this level, change it into iteration.
2379 This function is called enough to be worth it. */
2380 if (i == 0)
2381 {
2382 x = XEXP (x, i);
2383 goto repeat;
2384 }
2385
2386 hash += hash_rtx (XEXP (x, i), 0, do_not_record_p,
2387 hash_arg_in_memory_p, have_reg_qty);
2388 break;
2389
2390 case 'E':
2391 for (j = 0; j < XVECLEN (x, i); j++)
2392 hash += hash_rtx (XVECEXP (x, i, j), 0, do_not_record_p,
2393 hash_arg_in_memory_p, have_reg_qty);
2394 break;
2395
2396 case 's':
2397 hash += hash_rtx_string (XSTR (x, i));
2398 break;
2399
2400 case 'i':
2401 hash += (unsigned int) XINT (x, i);
2402 break;
2403
2404 case '0': case 't':
2405 /* Unused. */
2406 break;
2407
2408 default:
2409 gcc_unreachable ();
2410 }
2411 }
2412
2413 return hash;
2414 }
2415
2416 /* Hash an rtx X for cse via hash_rtx.
2417 Stores 1 in do_not_record if any subexpression is volatile.
2418 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2419 does not have the RTX_UNCHANGING_P bit set. */
2420
2421 static inline unsigned
2422 canon_hash (rtx x, enum machine_mode mode)
2423 {
2424 return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2425 }
2426
2427 /* Like canon_hash but with no side effects, i.e. do_not_record
2428 and hash_arg_in_memory are not changed. */
2429
2430 static inline unsigned
2431 safe_hash (rtx x, enum machine_mode mode)
2432 {
2433 int dummy_do_not_record;
2434 return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2435 }
2436 \f
2437 /* Return 1 iff X and Y would canonicalize into the same thing,
2438 without actually constructing the canonicalization of either one.
2439 If VALIDATE is nonzero,
2440 we assume X is an expression being processed from the rtl
2441 and Y was found in the hash table. We check register refs
2442 in Y for being marked as valid.
2443
2444 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2445
2446 int
2447 exp_equiv_p (rtx x, rtx y, int validate, bool for_gcse)
2448 {
2449 int i, j;
2450 enum rtx_code code;
2451 const char *fmt;
2452
2453 /* Note: it is incorrect to assume an expression is equivalent to itself
2454 if VALIDATE is nonzero. */
2455 if (x == y && !validate)
2456 return 1;
2457
2458 if (x == 0 || y == 0)
2459 return x == y;
2460
2461 code = GET_CODE (x);
2462 if (code != GET_CODE (y))
2463 return 0;
2464
2465 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2466 if (GET_MODE (x) != GET_MODE (y))
2467 return 0;
2468
2469 switch (code)
2470 {
2471 case PC:
2472 case CC0:
2473 case CONST_INT:
2474 return x == y;
2475
2476 case LABEL_REF:
2477 return XEXP (x, 0) == XEXP (y, 0);
2478
2479 case SYMBOL_REF:
2480 return XSTR (x, 0) == XSTR (y, 0);
2481
2482 case REG:
2483 if (for_gcse)
2484 return REGNO (x) == REGNO (y);
2485 else
2486 {
2487 unsigned int regno = REGNO (y);
2488 unsigned int i;
2489 unsigned int endregno
2490 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2491 : hard_regno_nregs[regno][GET_MODE (y)]);
2492
2493 /* If the quantities are not the same, the expressions are not
2494 equivalent. If there are and we are not to validate, they
2495 are equivalent. Otherwise, ensure all regs are up-to-date. */
2496
2497 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2498 return 0;
2499
2500 if (! validate)
2501 return 1;
2502
2503 for (i = regno; i < endregno; i++)
2504 if (REG_IN_TABLE (i) != REG_TICK (i))
2505 return 0;
2506
2507 return 1;
2508 }
2509
2510 case MEM:
2511 if (for_gcse)
2512 {
2513 /* Can't merge two expressions in different alias sets, since we
2514 can decide that the expression is transparent in a block when
2515 it isn't, due to it being set with the different alias set. */
2516 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
2517 return 0;
2518
2519 /* A volatile mem should not be considered equivalent to any
2520 other. */
2521 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2522 return 0;
2523 }
2524 break;
2525
2526 /* For commutative operations, check both orders. */
2527 case PLUS:
2528 case MULT:
2529 case AND:
2530 case IOR:
2531 case XOR:
2532 case NE:
2533 case EQ:
2534 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2535 validate, for_gcse)
2536 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2537 validate, for_gcse))
2538 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2539 validate, for_gcse)
2540 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2541 validate, for_gcse)));
2542
2543 case ASM_OPERANDS:
2544 /* We don't use the generic code below because we want to
2545 disregard filename and line numbers. */
2546
2547 /* A volatile asm isn't equivalent to any other. */
2548 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2549 return 0;
2550
2551 if (GET_MODE (x) != GET_MODE (y)
2552 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2553 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2554 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2555 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2556 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2557 return 0;
2558
2559 if (ASM_OPERANDS_INPUT_LENGTH (x))
2560 {
2561 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2562 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2563 ASM_OPERANDS_INPUT (y, i),
2564 validate, for_gcse)
2565 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2566 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2567 return 0;
2568 }
2569
2570 return 1;
2571
2572 default:
2573 break;
2574 }
2575
2576 /* Compare the elements. If any pair of corresponding elements
2577 fail to match, return 0 for the whole thing. */
2578
2579 fmt = GET_RTX_FORMAT (code);
2580 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2581 {
2582 switch (fmt[i])
2583 {
2584 case 'e':
2585 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2586 validate, for_gcse))
2587 return 0;
2588 break;
2589
2590 case 'E':
2591 if (XVECLEN (x, i) != XVECLEN (y, i))
2592 return 0;
2593 for (j = 0; j < XVECLEN (x, i); j++)
2594 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2595 validate, for_gcse))
2596 return 0;
2597 break;
2598
2599 case 's':
2600 if (strcmp (XSTR (x, i), XSTR (y, i)))
2601 return 0;
2602 break;
2603
2604 case 'i':
2605 if (XINT (x, i) != XINT (y, i))
2606 return 0;
2607 break;
2608
2609 case 'w':
2610 if (XWINT (x, i) != XWINT (y, i))
2611 return 0;
2612 break;
2613
2614 case '0':
2615 case 't':
2616 break;
2617
2618 default:
2619 gcc_unreachable ();
2620 }
2621 }
2622
2623 return 1;
2624 }
2625 \f
2626 /* Return 1 if X has a value that can vary even between two
2627 executions of the program. 0 means X can be compared reliably
2628 against certain constants or near-constants. */
2629
2630 static int
2631 cse_rtx_varies_p (rtx x, int from_alias)
2632 {
2633 /* We need not check for X and the equivalence class being of the same
2634 mode because if X is equivalent to a constant in some mode, it
2635 doesn't vary in any mode. */
2636
2637 if (REG_P (x)
2638 && REGNO_QTY_VALID_P (REGNO (x)))
2639 {
2640 int x_q = REG_QTY (REGNO (x));
2641 struct qty_table_elem *x_ent = &qty_table[x_q];
2642
2643 if (GET_MODE (x) == x_ent->mode
2644 && x_ent->const_rtx != NULL_RTX)
2645 return 0;
2646 }
2647
2648 if (GET_CODE (x) == PLUS
2649 && GET_CODE (XEXP (x, 1)) == CONST_INT
2650 && REG_P (XEXP (x, 0))
2651 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2652 {
2653 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2654 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2655
2656 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2657 && x0_ent->const_rtx != NULL_RTX)
2658 return 0;
2659 }
2660
2661 /* This can happen as the result of virtual register instantiation, if
2662 the initial constant is too large to be a valid address. This gives
2663 us a three instruction sequence, load large offset into a register,
2664 load fp minus a constant into a register, then a MEM which is the
2665 sum of the two `constant' registers. */
2666 if (GET_CODE (x) == PLUS
2667 && REG_P (XEXP (x, 0))
2668 && REG_P (XEXP (x, 1))
2669 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2670 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2671 {
2672 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2673 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2674 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2675 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2676
2677 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2678 && x0_ent->const_rtx != NULL_RTX
2679 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2680 && x1_ent->const_rtx != NULL_RTX)
2681 return 0;
2682 }
2683
2684 return rtx_varies_p (x, from_alias);
2685 }
2686 \f
2687 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2688 the result if necessary. INSN is as for canon_reg. */
2689
2690 static void
2691 validate_canon_reg (rtx *xloc, rtx insn)
2692 {
2693 rtx new = canon_reg (*xloc, insn);
2694 int insn_code;
2695
2696 /* If replacing pseudo with hard reg or vice versa, ensure the
2697 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2698 if (insn != 0 && new != 0
2699 && REG_P (new) && REG_P (*xloc)
2700 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2701 != (REGNO (*xloc) < FIRST_PSEUDO_REGISTER))
2702 || GET_MODE (new) != GET_MODE (*xloc)
2703 || (insn_code = recog_memoized (insn)) < 0
2704 || insn_data[insn_code].n_dups > 0))
2705 validate_change (insn, xloc, new, 1);
2706 else
2707 *xloc = new;
2708 }
2709
2710 /* Canonicalize an expression:
2711 replace each register reference inside it
2712 with the "oldest" equivalent register.
2713
2714 If INSN is nonzero and we are replacing a pseudo with a hard register
2715 or vice versa, validate_change is used to ensure that INSN remains valid
2716 after we make our substitution. The calls are made with IN_GROUP nonzero
2717 so apply_change_group must be called upon the outermost return from this
2718 function (unless INSN is zero). The result of apply_change_group can
2719 generally be discarded since the changes we are making are optional. */
2720
2721 static rtx
2722 canon_reg (rtx x, rtx insn)
2723 {
2724 int i;
2725 enum rtx_code code;
2726 const char *fmt;
2727
2728 if (x == 0)
2729 return x;
2730
2731 code = GET_CODE (x);
2732 switch (code)
2733 {
2734 case PC:
2735 case CC0:
2736 case CONST:
2737 case CONST_INT:
2738 case CONST_DOUBLE:
2739 case CONST_VECTOR:
2740 case SYMBOL_REF:
2741 case LABEL_REF:
2742 case ADDR_VEC:
2743 case ADDR_DIFF_VEC:
2744 return x;
2745
2746 case REG:
2747 {
2748 int first;
2749 int q;
2750 struct qty_table_elem *ent;
2751
2752 /* Never replace a hard reg, because hard regs can appear
2753 in more than one machine mode, and we must preserve the mode
2754 of each occurrence. Also, some hard regs appear in
2755 MEMs that are shared and mustn't be altered. Don't try to
2756 replace any reg that maps to a reg of class NO_REGS. */
2757 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2758 || ! REGNO_QTY_VALID_P (REGNO (x)))
2759 return x;
2760
2761 q = REG_QTY (REGNO (x));
2762 ent = &qty_table[q];
2763 first = ent->first_reg;
2764 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2765 : REGNO_REG_CLASS (first) == NO_REGS ? x
2766 : gen_rtx_REG (ent->mode, first));
2767 }
2768
2769 default:
2770 break;
2771 }
2772
2773 fmt = GET_RTX_FORMAT (code);
2774 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2775 {
2776 int j;
2777
2778 if (fmt[i] == 'e')
2779 validate_canon_reg (&XEXP (x, i), insn);
2780 else if (fmt[i] == 'E')
2781 for (j = 0; j < XVECLEN (x, i); j++)
2782 validate_canon_reg (&XVECEXP (x, i, j), insn);
2783 }
2784
2785 return x;
2786 }
2787 \f
2788 /* LOC is a location within INSN that is an operand address (the contents of
2789 a MEM). Find the best equivalent address to use that is valid for this
2790 insn.
2791
2792 On most CISC machines, complicated address modes are costly, and rtx_cost
2793 is a good approximation for that cost. However, most RISC machines have
2794 only a few (usually only one) memory reference formats. If an address is
2795 valid at all, it is often just as cheap as any other address. Hence, for
2796 RISC machines, we use `address_cost' to compare the costs of various
2797 addresses. For two addresses of equal cost, choose the one with the
2798 highest `rtx_cost' value as that has the potential of eliminating the
2799 most insns. For equal costs, we choose the first in the equivalence
2800 class. Note that we ignore the fact that pseudo registers are cheaper than
2801 hard registers here because we would also prefer the pseudo registers. */
2802
2803 static void
2804 find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
2805 {
2806 struct table_elt *elt;
2807 rtx addr = *loc;
2808 struct table_elt *p;
2809 int found_better = 1;
2810 int save_do_not_record = do_not_record;
2811 int save_hash_arg_in_memory = hash_arg_in_memory;
2812 int addr_volatile;
2813 int regno;
2814 unsigned hash;
2815
2816 /* Do not try to replace constant addresses or addresses of local and
2817 argument slots. These MEM expressions are made only once and inserted
2818 in many instructions, as well as being used to control symbol table
2819 output. It is not safe to clobber them.
2820
2821 There are some uncommon cases where the address is already in a register
2822 for some reason, but we cannot take advantage of that because we have
2823 no easy way to unshare the MEM. In addition, looking up all stack
2824 addresses is costly. */
2825 if ((GET_CODE (addr) == PLUS
2826 && REG_P (XEXP (addr, 0))
2827 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2828 && (regno = REGNO (XEXP (addr, 0)),
2829 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2830 || regno == ARG_POINTER_REGNUM))
2831 || (REG_P (addr)
2832 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2833 || regno == HARD_FRAME_POINTER_REGNUM
2834 || regno == ARG_POINTER_REGNUM))
2835 || CONSTANT_ADDRESS_P (addr))
2836 return;
2837
2838 /* If this address is not simply a register, try to fold it. This will
2839 sometimes simplify the expression. Many simplifications
2840 will not be valid, but some, usually applying the associative rule, will
2841 be valid and produce better code. */
2842 if (!REG_P (addr))
2843 {
2844 rtx folded = fold_rtx (addr, NULL_RTX);
2845 if (folded != addr)
2846 {
2847 int addr_folded_cost = address_cost (folded, mode);
2848 int addr_cost = address_cost (addr, mode);
2849
2850 if ((addr_folded_cost < addr_cost
2851 || (addr_folded_cost == addr_cost
2852 /* ??? The rtx_cost comparison is left over from an older
2853 version of this code. It is probably no longer helpful.*/
2854 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2855 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2856 && validate_change (insn, loc, folded, 0))
2857 addr = folded;
2858 }
2859 }
2860
2861 /* If this address is not in the hash table, we can't look for equivalences
2862 of the whole address. Also, ignore if volatile. */
2863
2864 do_not_record = 0;
2865 hash = HASH (addr, Pmode);
2866 addr_volatile = do_not_record;
2867 do_not_record = save_do_not_record;
2868 hash_arg_in_memory = save_hash_arg_in_memory;
2869
2870 if (addr_volatile)
2871 return;
2872
2873 elt = lookup (addr, hash, Pmode);
2874
2875 if (elt)
2876 {
2877 /* We need to find the best (under the criteria documented above) entry
2878 in the class that is valid. We use the `flag' field to indicate
2879 choices that were invalid and iterate until we can't find a better
2880 one that hasn't already been tried. */
2881
2882 for (p = elt->first_same_value; p; p = p->next_same_value)
2883 p->flag = 0;
2884
2885 while (found_better)
2886 {
2887 int best_addr_cost = address_cost (*loc, mode);
2888 int best_rtx_cost = (elt->cost + 1) >> 1;
2889 int exp_cost;
2890 struct table_elt *best_elt = elt;
2891
2892 found_better = 0;
2893 for (p = elt->first_same_value; p; p = p->next_same_value)
2894 if (! p->flag)
2895 {
2896 if ((REG_P (p->exp)
2897 || exp_equiv_p (p->exp, p->exp, 1, false))
2898 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2899 || (exp_cost == best_addr_cost
2900 && ((p->cost + 1) >> 1) > best_rtx_cost)))
2901 {
2902 found_better = 1;
2903 best_addr_cost = exp_cost;
2904 best_rtx_cost = (p->cost + 1) >> 1;
2905 best_elt = p;
2906 }
2907 }
2908
2909 if (found_better)
2910 {
2911 if (validate_change (insn, loc,
2912 canon_reg (copy_rtx (best_elt->exp),
2913 NULL_RTX), 0))
2914 return;
2915 else
2916 best_elt->flag = 1;
2917 }
2918 }
2919 }
2920
2921 /* If the address is a binary operation with the first operand a register
2922 and the second a constant, do the same as above, but looking for
2923 equivalences of the register. Then try to simplify before checking for
2924 the best address to use. This catches a few cases: First is when we
2925 have REG+const and the register is another REG+const. We can often merge
2926 the constants and eliminate one insn and one register. It may also be
2927 that a machine has a cheap REG+REG+const. Finally, this improves the
2928 code on the Alpha for unaligned byte stores. */
2929
2930 if (flag_expensive_optimizations
2931 && ARITHMETIC_P (*loc)
2932 && REG_P (XEXP (*loc, 0)))
2933 {
2934 rtx op1 = XEXP (*loc, 1);
2935
2936 do_not_record = 0;
2937 hash = HASH (XEXP (*loc, 0), Pmode);
2938 do_not_record = save_do_not_record;
2939 hash_arg_in_memory = save_hash_arg_in_memory;
2940
2941 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2942 if (elt == 0)
2943 return;
2944
2945 /* We need to find the best (under the criteria documented above) entry
2946 in the class that is valid. We use the `flag' field to indicate
2947 choices that were invalid and iterate until we can't find a better
2948 one that hasn't already been tried. */
2949
2950 for (p = elt->first_same_value; p; p = p->next_same_value)
2951 p->flag = 0;
2952
2953 while (found_better)
2954 {
2955 int best_addr_cost = address_cost (*loc, mode);
2956 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2957 struct table_elt *best_elt = elt;
2958 rtx best_rtx = *loc;
2959 int count;
2960
2961 /* This is at worst case an O(n^2) algorithm, so limit our search
2962 to the first 32 elements on the list. This avoids trouble
2963 compiling code with very long basic blocks that can easily
2964 call simplify_gen_binary so many times that we run out of
2965 memory. */
2966
2967 found_better = 0;
2968 for (p = elt->first_same_value, count = 0;
2969 p && count < 32;
2970 p = p->next_same_value, count++)
2971 if (! p->flag
2972 && (REG_P (p->exp)
2973 || exp_equiv_p (p->exp, p->exp, 1, false)))
2974 {
2975 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
2976 p->exp, op1);
2977 int new_cost;
2978
2979 /* Get the canonical version of the address so we can accept
2980 more. */
2981 new = canon_for_address (new);
2982
2983 new_cost = address_cost (new, mode);
2984
2985 if (new_cost < best_addr_cost
2986 || (new_cost == best_addr_cost
2987 && (COST (new) + 1) >> 1 > best_rtx_cost))
2988 {
2989 found_better = 1;
2990 best_addr_cost = new_cost;
2991 best_rtx_cost = (COST (new) + 1) >> 1;
2992 best_elt = p;
2993 best_rtx = new;
2994 }
2995 }
2996
2997 if (found_better)
2998 {
2999 if (validate_change (insn, loc,
3000 canon_reg (copy_rtx (best_rtx),
3001 NULL_RTX), 0))
3002 return;
3003 else
3004 best_elt->flag = 1;
3005 }
3006 }
3007 }
3008 }
3009 \f
3010 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3011 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3012 what values are being compared.
3013
3014 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3015 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3016 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3017 compared to produce cc0.
3018
3019 The return value is the comparison operator and is either the code of
3020 A or the code corresponding to the inverse of the comparison. */
3021
3022 static enum rtx_code
3023 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
3024 enum machine_mode *pmode1, enum machine_mode *pmode2)
3025 {
3026 rtx arg1, arg2;
3027
3028 arg1 = *parg1, arg2 = *parg2;
3029
3030 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3031
3032 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3033 {
3034 /* Set nonzero when we find something of interest. */
3035 rtx x = 0;
3036 int reverse_code = 0;
3037 struct table_elt *p = 0;
3038
3039 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3040 On machines with CC0, this is the only case that can occur, since
3041 fold_rtx will return the COMPARE or item being compared with zero
3042 when given CC0. */
3043
3044 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3045 x = arg1;
3046
3047 /* If ARG1 is a comparison operator and CODE is testing for
3048 STORE_FLAG_VALUE, get the inner arguments. */
3049
3050 else if (COMPARISON_P (arg1))
3051 {
3052 #ifdef FLOAT_STORE_FLAG_VALUE
3053 REAL_VALUE_TYPE fsfv;
3054 #endif
3055
3056 if (code == NE
3057 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3058 && code == LT && STORE_FLAG_VALUE == -1)
3059 #ifdef FLOAT_STORE_FLAG_VALUE
3060 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3061 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3062 REAL_VALUE_NEGATIVE (fsfv)))
3063 #endif
3064 )
3065 x = arg1;
3066 else if (code == EQ
3067 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3068 && code == GE && STORE_FLAG_VALUE == -1)
3069 #ifdef FLOAT_STORE_FLAG_VALUE
3070 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3071 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3072 REAL_VALUE_NEGATIVE (fsfv)))
3073 #endif
3074 )
3075 x = arg1, reverse_code = 1;
3076 }
3077
3078 /* ??? We could also check for
3079
3080 (ne (and (eq (...) (const_int 1))) (const_int 0))
3081
3082 and related forms, but let's wait until we see them occurring. */
3083
3084 if (x == 0)
3085 /* Look up ARG1 in the hash table and see if it has an equivalence
3086 that lets us see what is being compared. */
3087 p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
3088 if (p)
3089 {
3090 p = p->first_same_value;
3091
3092 /* If what we compare is already known to be constant, that is as
3093 good as it gets.
3094 We need to break the loop in this case, because otherwise we
3095 can have an infinite loop when looking at a reg that is known
3096 to be a constant which is the same as a comparison of a reg
3097 against zero which appears later in the insn stream, which in
3098 turn is constant and the same as the comparison of the first reg
3099 against zero... */
3100 if (p->is_const)
3101 break;
3102 }
3103
3104 for (; p; p = p->next_same_value)
3105 {
3106 enum machine_mode inner_mode = GET_MODE (p->exp);
3107 #ifdef FLOAT_STORE_FLAG_VALUE
3108 REAL_VALUE_TYPE fsfv;
3109 #endif
3110
3111 /* If the entry isn't valid, skip it. */
3112 if (! exp_equiv_p (p->exp, p->exp, 1, false))
3113 continue;
3114
3115 if (GET_CODE (p->exp) == COMPARE
3116 /* Another possibility is that this machine has a compare insn
3117 that includes the comparison code. In that case, ARG1 would
3118 be equivalent to a comparison operation that would set ARG1 to
3119 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3120 ORIG_CODE is the actual comparison being done; if it is an EQ,
3121 we must reverse ORIG_CODE. On machine with a negative value
3122 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3123 || ((code == NE
3124 || (code == LT
3125 && GET_MODE_CLASS (inner_mode) == MODE_INT
3126 && (GET_MODE_BITSIZE (inner_mode)
3127 <= HOST_BITS_PER_WIDE_INT)
3128 && (STORE_FLAG_VALUE
3129 & ((HOST_WIDE_INT) 1
3130 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3131 #ifdef FLOAT_STORE_FLAG_VALUE
3132 || (code == LT
3133 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3134 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3135 REAL_VALUE_NEGATIVE (fsfv)))
3136 #endif
3137 )
3138 && COMPARISON_P (p->exp)))
3139 {
3140 x = p->exp;
3141 break;
3142 }
3143 else if ((code == EQ
3144 || (code == GE
3145 && GET_MODE_CLASS (inner_mode) == MODE_INT
3146 && (GET_MODE_BITSIZE (inner_mode)
3147 <= HOST_BITS_PER_WIDE_INT)
3148 && (STORE_FLAG_VALUE
3149 & ((HOST_WIDE_INT) 1
3150 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3151 #ifdef FLOAT_STORE_FLAG_VALUE
3152 || (code == GE
3153 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3154 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3155 REAL_VALUE_NEGATIVE (fsfv)))
3156 #endif
3157 )
3158 && COMPARISON_P (p->exp))
3159 {
3160 reverse_code = 1;
3161 x = p->exp;
3162 break;
3163 }
3164
3165 /* If this non-trapping address, e.g. fp + constant, the
3166 equivalent is a better operand since it may let us predict
3167 the value of the comparison. */
3168 else if (!rtx_addr_can_trap_p (p->exp))
3169 {
3170 arg1 = p->exp;
3171 continue;
3172 }
3173 }
3174
3175 /* If we didn't find a useful equivalence for ARG1, we are done.
3176 Otherwise, set up for the next iteration. */
3177 if (x == 0)
3178 break;
3179
3180 /* If we need to reverse the comparison, make sure that that is
3181 possible -- we can't necessarily infer the value of GE from LT
3182 with floating-point operands. */
3183 if (reverse_code)
3184 {
3185 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3186 if (reversed == UNKNOWN)
3187 break;
3188 else
3189 code = reversed;
3190 }
3191 else if (COMPARISON_P (x))
3192 code = GET_CODE (x);
3193 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3194 }
3195
3196 /* Return our results. Return the modes from before fold_rtx
3197 because fold_rtx might produce const_int, and then it's too late. */
3198 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3199 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3200
3201 return code;
3202 }
3203 \f
3204 /* If X is a nontrivial arithmetic operation on an argument
3205 for which a constant value can be determined, return
3206 the result of operating on that value, as a constant.
3207 Otherwise, return X, possibly with one or more operands
3208 modified by recursive calls to this function.
3209
3210 If X is a register whose contents are known, we do NOT
3211 return those contents here. equiv_constant is called to
3212 perform that task.
3213
3214 INSN is the insn that we may be modifying. If it is 0, make a copy
3215 of X before modifying it. */
3216
3217 static rtx
3218 fold_rtx (rtx x, rtx insn)
3219 {
3220 enum rtx_code code;
3221 enum machine_mode mode;
3222 const char *fmt;
3223 int i;
3224 rtx new = 0;
3225 int copied = 0;
3226 int must_swap = 0;
3227
3228 /* Folded equivalents of first two operands of X. */
3229 rtx folded_arg0;
3230 rtx folded_arg1;
3231
3232 /* Constant equivalents of first three operands of X;
3233 0 when no such equivalent is known. */
3234 rtx const_arg0;
3235 rtx const_arg1;
3236 rtx const_arg2;
3237
3238 /* The mode of the first operand of X. We need this for sign and zero
3239 extends. */
3240 enum machine_mode mode_arg0;
3241
3242 if (x == 0)
3243 return x;
3244
3245 mode = GET_MODE (x);
3246 code = GET_CODE (x);
3247 switch (code)
3248 {
3249 case CONST:
3250 case CONST_INT:
3251 case CONST_DOUBLE:
3252 case CONST_VECTOR:
3253 case SYMBOL_REF:
3254 case LABEL_REF:
3255 case REG:
3256 case PC:
3257 /* No use simplifying an EXPR_LIST
3258 since they are used only for lists of args
3259 in a function call's REG_EQUAL note. */
3260 case EXPR_LIST:
3261 return x;
3262
3263 #ifdef HAVE_cc0
3264 case CC0:
3265 return prev_insn_cc0;
3266 #endif
3267
3268 case SUBREG:
3269 /* See if we previously assigned a constant value to this SUBREG. */
3270 if ((new = lookup_as_function (x, CONST_INT)) != 0
3271 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3272 return new;
3273
3274 /* If this is a paradoxical SUBREG, we have no idea what value the
3275 extra bits would have. However, if the operand is equivalent
3276 to a SUBREG whose operand is the same as our mode, and all the
3277 modes are within a word, we can just use the inner operand
3278 because these SUBREGs just say how to treat the register.
3279
3280 Similarly if we find an integer constant. */
3281
3282 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3283 {
3284 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3285 struct table_elt *elt;
3286
3287 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3288 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3289 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3290 imode)) != 0)
3291 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3292 {
3293 if (CONSTANT_P (elt->exp)
3294 && GET_MODE (elt->exp) == VOIDmode)
3295 return elt->exp;
3296
3297 if (GET_CODE (elt->exp) == SUBREG
3298 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3299 && exp_equiv_p (elt->exp, elt->exp, 1, false))
3300 return copy_rtx (SUBREG_REG (elt->exp));
3301 }
3302
3303 return x;
3304 }
3305
3306 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3307 We might be able to if the SUBREG is extracting a single word in an
3308 integral mode or extracting the low part. */
3309
3310 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3311 const_arg0 = equiv_constant (folded_arg0);
3312 if (const_arg0)
3313 folded_arg0 = const_arg0;
3314
3315 if (folded_arg0 != SUBREG_REG (x))
3316 {
3317 new = simplify_subreg (mode, folded_arg0,
3318 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3319 if (new)
3320 return new;
3321 }
3322
3323 if (REG_P (folded_arg0)
3324 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0)))
3325 {
3326 struct table_elt *elt;
3327
3328 elt = lookup (folded_arg0,
3329 HASH (folded_arg0, GET_MODE (folded_arg0)),
3330 GET_MODE (folded_arg0));
3331
3332 if (elt)
3333 elt = elt->first_same_value;
3334
3335 if (subreg_lowpart_p (x))
3336 /* If this is a narrowing SUBREG and our operand is a REG, see
3337 if we can find an equivalence for REG that is an arithmetic
3338 operation in a wider mode where both operands are paradoxical
3339 SUBREGs from objects of our result mode. In that case, we
3340 couldn-t report an equivalent value for that operation, since we
3341 don't know what the extra bits will be. But we can find an
3342 equivalence for this SUBREG by folding that operation in the
3343 narrow mode. This allows us to fold arithmetic in narrow modes
3344 when the machine only supports word-sized arithmetic.
3345
3346 Also look for a case where we have a SUBREG whose operand
3347 is the same as our result. If both modes are smaller
3348 than a word, we are simply interpreting a register in
3349 different modes and we can use the inner value. */
3350
3351 for (; elt; elt = elt->next_same_value)
3352 {
3353 enum rtx_code eltcode = GET_CODE (elt->exp);
3354
3355 /* Just check for unary and binary operations. */
3356 if (UNARY_P (elt->exp)
3357 && eltcode != SIGN_EXTEND
3358 && eltcode != ZERO_EXTEND
3359 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3360 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3361 && (GET_MODE_CLASS (mode)
3362 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3363 {
3364 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3365
3366 if (!REG_P (op0) && ! CONSTANT_P (op0))
3367 op0 = fold_rtx (op0, NULL_RTX);
3368
3369 op0 = equiv_constant (op0);
3370 if (op0)
3371 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3372 op0, mode);
3373 }
3374 else if (ARITHMETIC_P (elt->exp)
3375 && eltcode != DIV && eltcode != MOD
3376 && eltcode != UDIV && eltcode != UMOD
3377 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3378 && eltcode != ROTATE && eltcode != ROTATERT
3379 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3380 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3381 == mode))
3382 || CONSTANT_P (XEXP (elt->exp, 0)))
3383 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3384 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3385 == mode))
3386 || CONSTANT_P (XEXP (elt->exp, 1))))
3387 {
3388 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3389 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3390
3391 if (op0 && !REG_P (op0) && ! CONSTANT_P (op0))
3392 op0 = fold_rtx (op0, NULL_RTX);
3393
3394 if (op0)
3395 op0 = equiv_constant (op0);
3396
3397 if (op1 && !REG_P (op1) && ! CONSTANT_P (op1))
3398 op1 = fold_rtx (op1, NULL_RTX);
3399
3400 if (op1)
3401 op1 = equiv_constant (op1);
3402
3403 /* If we are looking for the low SImode part of
3404 (ashift:DI c (const_int 32)), it doesn't work
3405 to compute that in SImode, because a 32-bit shift
3406 in SImode is unpredictable. We know the value is 0. */
3407 if (op0 && op1
3408 && GET_CODE (elt->exp) == ASHIFT
3409 && GET_CODE (op1) == CONST_INT
3410 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3411 {
3412 if (INTVAL (op1)
3413 < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3414 /* If the count fits in the inner mode's width,
3415 but exceeds the outer mode's width,
3416 the value will get truncated to 0
3417 by the subreg. */
3418 new = CONST0_RTX (mode);
3419 else
3420 /* If the count exceeds even the inner mode's width,
3421 don't fold this expression. */
3422 new = 0;
3423 }
3424 else if (op0 && op1)
3425 new = simplify_binary_operation (GET_CODE (elt->exp), mode, op0, op1);
3426 }
3427
3428 else if (GET_CODE (elt->exp) == SUBREG
3429 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3430 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3431 <= UNITS_PER_WORD)
3432 && exp_equiv_p (elt->exp, elt->exp, 1, false))
3433 new = copy_rtx (SUBREG_REG (elt->exp));
3434
3435 if (new)
3436 return new;
3437 }
3438 else
3439 /* A SUBREG resulting from a zero extension may fold to zero if
3440 it extracts higher bits than the ZERO_EXTEND's source bits.
3441 FIXME: if combine tried to, er, combine these instructions,
3442 this transformation may be moved to simplify_subreg. */
3443 for (; elt; elt = elt->next_same_value)
3444 {
3445 if (GET_CODE (elt->exp) == ZERO_EXTEND
3446 && subreg_lsb (x)
3447 >= GET_MODE_BITSIZE (GET_MODE (XEXP (elt->exp, 0))))
3448 return CONST0_RTX (mode);
3449 }
3450 }
3451
3452 return x;
3453
3454 case NOT:
3455 case NEG:
3456 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3457 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3458 new = lookup_as_function (XEXP (x, 0), code);
3459 if (new)
3460 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3461 break;
3462
3463 case MEM:
3464 /* If we are not actually processing an insn, don't try to find the
3465 best address. Not only don't we care, but we could modify the
3466 MEM in an invalid way since we have no insn to validate against. */
3467 if (insn != 0)
3468 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3469
3470 {
3471 /* Even if we don't fold in the insn itself,
3472 we can safely do so here, in hopes of getting a constant. */
3473 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3474 rtx base = 0;
3475 HOST_WIDE_INT offset = 0;
3476
3477 if (REG_P (addr)
3478 && REGNO_QTY_VALID_P (REGNO (addr)))
3479 {
3480 int addr_q = REG_QTY (REGNO (addr));
3481 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3482
3483 if (GET_MODE (addr) == addr_ent->mode
3484 && addr_ent->const_rtx != NULL_RTX)
3485 addr = addr_ent->const_rtx;
3486 }
3487
3488 /* If address is constant, split it into a base and integer offset. */
3489 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3490 base = addr;
3491 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3492 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3493 {
3494 base = XEXP (XEXP (addr, 0), 0);
3495 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3496 }
3497 else if (GET_CODE (addr) == LO_SUM
3498 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3499 base = XEXP (addr, 1);
3500
3501 /* If this is a constant pool reference, we can fold it into its
3502 constant to allow better value tracking. */
3503 if (base && GET_CODE (base) == SYMBOL_REF
3504 && CONSTANT_POOL_ADDRESS_P (base))
3505 {
3506 rtx constant = get_pool_constant (base);
3507 enum machine_mode const_mode = get_pool_mode (base);
3508 rtx new;
3509
3510 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3511 {
3512 constant_pool_entries_cost = COST (constant);
3513 constant_pool_entries_regcost = approx_reg_cost (constant);
3514 }
3515
3516 /* If we are loading the full constant, we have an equivalence. */
3517 if (offset == 0 && mode == const_mode)
3518 return constant;
3519
3520 /* If this actually isn't a constant (weird!), we can't do
3521 anything. Otherwise, handle the two most common cases:
3522 extracting a word from a multi-word constant, and extracting
3523 the low-order bits. Other cases don't seem common enough to
3524 worry about. */
3525 if (! CONSTANT_P (constant))
3526 return x;
3527
3528 if (GET_MODE_CLASS (mode) == MODE_INT
3529 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3530 && offset % UNITS_PER_WORD == 0
3531 && (new = operand_subword (constant,
3532 offset / UNITS_PER_WORD,
3533 0, const_mode)) != 0)
3534 return new;
3535
3536 if (((BYTES_BIG_ENDIAN
3537 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3538 || (! BYTES_BIG_ENDIAN && offset == 0))
3539 && (new = gen_lowpart (mode, constant)) != 0)
3540 return new;
3541 }
3542
3543 /* If this is a reference to a label at a known position in a jump
3544 table, we also know its value. */
3545 if (base && GET_CODE (base) == LABEL_REF)
3546 {
3547 rtx label = XEXP (base, 0);
3548 rtx table_insn = NEXT_INSN (label);
3549
3550 if (table_insn && JUMP_P (table_insn)
3551 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3552 {
3553 rtx table = PATTERN (table_insn);
3554
3555 if (offset >= 0
3556 && (offset / GET_MODE_SIZE (GET_MODE (table))
3557 < XVECLEN (table, 0)))
3558 return XVECEXP (table, 0,
3559 offset / GET_MODE_SIZE (GET_MODE (table)));
3560 }
3561 if (table_insn && JUMP_P (table_insn)
3562 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3563 {
3564 rtx table = PATTERN (table_insn);
3565
3566 if (offset >= 0
3567 && (offset / GET_MODE_SIZE (GET_MODE (table))
3568 < XVECLEN (table, 1)))
3569 {
3570 offset /= GET_MODE_SIZE (GET_MODE (table));
3571 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3572 XEXP (table, 0));
3573
3574 if (GET_MODE (table) != Pmode)
3575 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3576
3577 /* Indicate this is a constant. This isn't a
3578 valid form of CONST, but it will only be used
3579 to fold the next insns and then discarded, so
3580 it should be safe.
3581
3582 Note this expression must be explicitly discarded,
3583 by cse_insn, else it may end up in a REG_EQUAL note
3584 and "escape" to cause problems elsewhere. */
3585 return gen_rtx_CONST (GET_MODE (new), new);
3586 }
3587 }
3588 }
3589
3590 return x;
3591 }
3592
3593 #ifdef NO_FUNCTION_CSE
3594 case CALL:
3595 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3596 return x;
3597 break;
3598 #endif
3599
3600 case ASM_OPERANDS:
3601 if (insn)
3602 {
3603 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3604 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3605 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3606 }
3607 break;
3608
3609 default:
3610 break;
3611 }
3612
3613 const_arg0 = 0;
3614 const_arg1 = 0;
3615 const_arg2 = 0;
3616 mode_arg0 = VOIDmode;
3617
3618 /* Try folding our operands.
3619 Then see which ones have constant values known. */
3620
3621 fmt = GET_RTX_FORMAT (code);
3622 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3623 if (fmt[i] == 'e')
3624 {
3625 rtx arg = XEXP (x, i);
3626 rtx folded_arg = arg, const_arg = 0;
3627 enum machine_mode mode_arg = GET_MODE (arg);
3628 rtx cheap_arg, expensive_arg;
3629 rtx replacements[2];
3630 int j;
3631 int old_cost = COST_IN (XEXP (x, i), code);
3632
3633 /* Most arguments are cheap, so handle them specially. */
3634 switch (GET_CODE (arg))
3635 {
3636 case REG:
3637 /* This is the same as calling equiv_constant; it is duplicated
3638 here for speed. */
3639 if (REGNO_QTY_VALID_P (REGNO (arg)))
3640 {
3641 int arg_q = REG_QTY (REGNO (arg));
3642 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3643
3644 if (arg_ent->const_rtx != NULL_RTX
3645 && !REG_P (arg_ent->const_rtx)
3646 && GET_CODE (arg_ent->const_rtx) != PLUS)
3647 const_arg
3648 = gen_lowpart (GET_MODE (arg),
3649 arg_ent->const_rtx);
3650 }
3651 break;
3652
3653 case CONST:
3654 case CONST_INT:
3655 case SYMBOL_REF:
3656 case LABEL_REF:
3657 case CONST_DOUBLE:
3658 case CONST_VECTOR:
3659 const_arg = arg;
3660 break;
3661
3662 #ifdef HAVE_cc0
3663 case CC0:
3664 folded_arg = prev_insn_cc0;
3665 mode_arg = prev_insn_cc0_mode;
3666 const_arg = equiv_constant (folded_arg);
3667 break;
3668 #endif
3669
3670 default:
3671 folded_arg = fold_rtx (arg, insn);
3672 const_arg = equiv_constant (folded_arg);
3673 }
3674
3675 /* For the first three operands, see if the operand
3676 is constant or equivalent to a constant. */
3677 switch (i)
3678 {
3679 case 0:
3680 folded_arg0 = folded_arg;
3681 const_arg0 = const_arg;
3682 mode_arg0 = mode_arg;
3683 break;
3684 case 1:
3685 folded_arg1 = folded_arg;
3686 const_arg1 = const_arg;
3687 break;
3688 case 2:
3689 const_arg2 = const_arg;
3690 break;
3691 }
3692
3693 /* Pick the least expensive of the folded argument and an
3694 equivalent constant argument. */
3695 if (const_arg == 0 || const_arg == folded_arg
3696 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3697 cheap_arg = folded_arg, expensive_arg = const_arg;
3698 else
3699 cheap_arg = const_arg, expensive_arg = folded_arg;
3700
3701 /* Try to replace the operand with the cheapest of the two
3702 possibilities. If it doesn't work and this is either of the first
3703 two operands of a commutative operation, try swapping them.
3704 If THAT fails, try the more expensive, provided it is cheaper
3705 than what is already there. */
3706
3707 if (cheap_arg == XEXP (x, i))
3708 continue;
3709
3710 if (insn == 0 && ! copied)
3711 {
3712 x = copy_rtx (x);
3713 copied = 1;
3714 }
3715
3716 /* Order the replacements from cheapest to most expensive. */
3717 replacements[0] = cheap_arg;
3718 replacements[1] = expensive_arg;
3719
3720 for (j = 0; j < 2 && replacements[j]; j++)
3721 {
3722 int new_cost = COST_IN (replacements[j], code);
3723
3724 /* Stop if what existed before was cheaper. Prefer constants
3725 in the case of a tie. */
3726 if (new_cost > old_cost
3727 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3728 break;
3729
3730 /* It's not safe to substitute the operand of a conversion
3731 operator with a constant, as the conversion's identity
3732 depends upon the mode of it's operand. This optimization
3733 is handled by the call to simplify_unary_operation. */
3734 if (GET_RTX_CLASS (code) == RTX_UNARY
3735 && GET_MODE (replacements[j]) != mode_arg0
3736 && (code == ZERO_EXTEND
3737 || code == SIGN_EXTEND
3738 || code == TRUNCATE
3739 || code == FLOAT_TRUNCATE
3740 || code == FLOAT_EXTEND
3741 || code == FLOAT
3742 || code == FIX
3743 || code == UNSIGNED_FLOAT
3744 || code == UNSIGNED_FIX))
3745 continue;
3746
3747 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3748 break;
3749
3750 if (GET_RTX_CLASS (code) == RTX_COMM_COMPARE
3751 || GET_RTX_CLASS (code) == RTX_COMM_ARITH)
3752 {
3753 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3754 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3755
3756 if (apply_change_group ())
3757 {
3758 /* Swap them back to be invalid so that this loop can
3759 continue and flag them to be swapped back later. */
3760 rtx tem;
3761
3762 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3763 XEXP (x, 1) = tem;
3764 must_swap = 1;
3765 break;
3766 }
3767 }
3768 }
3769 }
3770
3771 else
3772 {
3773 if (fmt[i] == 'E')
3774 /* Don't try to fold inside of a vector of expressions.
3775 Doing nothing is harmless. */
3776 {;}
3777 }
3778
3779 /* If a commutative operation, place a constant integer as the second
3780 operand unless the first operand is also a constant integer. Otherwise,
3781 place any constant second unless the first operand is also a constant. */
3782
3783 if (COMMUTATIVE_P (x))
3784 {
3785 if (must_swap
3786 || swap_commutative_operands_p (const_arg0 ? const_arg0
3787 : XEXP (x, 0),
3788 const_arg1 ? const_arg1
3789 : XEXP (x, 1)))
3790 {
3791 rtx tem = XEXP (x, 0);
3792
3793 if (insn == 0 && ! copied)
3794 {
3795 x = copy_rtx (x);
3796 copied = 1;
3797 }
3798
3799 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3800 validate_change (insn, &XEXP (x, 1), tem, 1);
3801 if (apply_change_group ())
3802 {
3803 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3804 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3805 }
3806 }
3807 }
3808
3809 /* If X is an arithmetic operation, see if we can simplify it. */
3810
3811 switch (GET_RTX_CLASS (code))
3812 {
3813 case RTX_UNARY:
3814 {
3815 int is_const = 0;
3816
3817 /* We can't simplify extension ops unless we know the
3818 original mode. */
3819 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3820 && mode_arg0 == VOIDmode)
3821 break;
3822
3823 /* If we had a CONST, strip it off and put it back later if we
3824 fold. */
3825 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3826 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3827
3828 new = simplify_unary_operation (code, mode,
3829 const_arg0 ? const_arg0 : folded_arg0,
3830 mode_arg0);
3831 /* NEG of PLUS could be converted into MINUS, but that causes
3832 expressions of the form
3833 (CONST (MINUS (CONST_INT) (SYMBOL_REF)))
3834 which many ports mistakenly treat as LEGITIMATE_CONSTANT_P.
3835 FIXME: those ports should be fixed. */
3836 if (new != 0 && is_const
3837 && GET_CODE (new) == PLUS
3838 && (GET_CODE (XEXP (new, 0)) == SYMBOL_REF
3839 || GET_CODE (XEXP (new, 0)) == LABEL_REF)
3840 && GET_CODE (XEXP (new, 1)) == CONST_INT)
3841 new = gen_rtx_CONST (mode, new);
3842 }
3843 break;
3844
3845 case RTX_COMPARE:
3846 case RTX_COMM_COMPARE:
3847 /* See what items are actually being compared and set FOLDED_ARG[01]
3848 to those values and CODE to the actual comparison code. If any are
3849 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3850 do anything if both operands are already known to be constant. */
3851
3852 /* ??? Vector mode comparisons are not supported yet. */
3853 if (VECTOR_MODE_P (mode))
3854 break;
3855
3856 if (const_arg0 == 0 || const_arg1 == 0)
3857 {
3858 struct table_elt *p0, *p1;
3859 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3860 enum machine_mode mode_arg1;
3861
3862 #ifdef FLOAT_STORE_FLAG_VALUE
3863 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3864 {
3865 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3866 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3867 false_rtx = CONST0_RTX (mode);
3868 }
3869 #endif
3870
3871 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3872 &mode_arg0, &mode_arg1);
3873
3874 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3875 what kinds of things are being compared, so we can't do
3876 anything with this comparison. */
3877
3878 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3879 break;
3880
3881 const_arg0 = equiv_constant (folded_arg0);
3882 const_arg1 = equiv_constant (folded_arg1);
3883
3884 /* If we do not now have two constants being compared, see
3885 if we can nevertheless deduce some things about the
3886 comparison. */
3887 if (const_arg0 == 0 || const_arg1 == 0)
3888 {
3889 /* Some addresses are known to be nonzero. We don't know
3890 their sign, but equality comparisons are known. */
3891 if (const_arg1 == const0_rtx
3892 && nonzero_address_p (folded_arg0))
3893 {
3894 if (code == EQ)
3895 return false_rtx;
3896 else if (code == NE)
3897 return true_rtx;
3898 }
3899
3900 /* See if the two operands are the same. */
3901
3902 if (folded_arg0 == folded_arg1
3903 || (REG_P (folded_arg0)
3904 && REG_P (folded_arg1)
3905 && (REG_QTY (REGNO (folded_arg0))
3906 == REG_QTY (REGNO (folded_arg1))))
3907 || ((p0 = lookup (folded_arg0,
3908 SAFE_HASH (folded_arg0, mode_arg0),
3909 mode_arg0))
3910 && (p1 = lookup (folded_arg1,
3911 SAFE_HASH (folded_arg1, mode_arg0),
3912 mode_arg0))
3913 && p0->first_same_value == p1->first_same_value))
3914 {
3915 /* Sadly two equal NaNs are not equivalent. */
3916 if (!HONOR_NANS (mode_arg0))
3917 return ((code == EQ || code == LE || code == GE
3918 || code == LEU || code == GEU || code == UNEQ
3919 || code == UNLE || code == UNGE
3920 || code == ORDERED)
3921 ? true_rtx : false_rtx);
3922 /* Take care for the FP compares we can resolve. */
3923 if (code == UNEQ || code == UNLE || code == UNGE)
3924 return true_rtx;
3925 if (code == LTGT || code == LT || code == GT)
3926 return false_rtx;
3927 }
3928
3929 /* If FOLDED_ARG0 is a register, see if the comparison we are
3930 doing now is either the same as we did before or the reverse
3931 (we only check the reverse if not floating-point). */
3932 else if (REG_P (folded_arg0))
3933 {
3934 int qty = REG_QTY (REGNO (folded_arg0));
3935
3936 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3937 {
3938 struct qty_table_elem *ent = &qty_table[qty];
3939
3940 if ((comparison_dominates_p (ent->comparison_code, code)
3941 || (! FLOAT_MODE_P (mode_arg0)
3942 && comparison_dominates_p (ent->comparison_code,
3943 reverse_condition (code))))
3944 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3945 || (const_arg1
3946 && rtx_equal_p (ent->comparison_const,
3947 const_arg1))
3948 || (REG_P (folded_arg1)
3949 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3950 return (comparison_dominates_p (ent->comparison_code, code)
3951 ? true_rtx : false_rtx);
3952 }
3953 }
3954 }
3955 }
3956
3957 /* If we are comparing against zero, see if the first operand is
3958 equivalent to an IOR with a constant. If so, we may be able to
3959 determine the result of this comparison. */
3960
3961 if (const_arg1 == const0_rtx)
3962 {
3963 rtx y = lookup_as_function (folded_arg0, IOR);
3964 rtx inner_const;
3965
3966 if (y != 0
3967 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3968 && GET_CODE (inner_const) == CONST_INT
3969 && INTVAL (inner_const) != 0)
3970 {
3971 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
3972 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3973 && (INTVAL (inner_const)
3974 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
3975 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3976
3977 #ifdef FLOAT_STORE_FLAG_VALUE
3978 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3979 {
3980 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3981 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3982 false_rtx = CONST0_RTX (mode);
3983 }
3984 #endif
3985
3986 switch (code)
3987 {
3988 case EQ:
3989 return false_rtx;
3990 case NE:
3991 return true_rtx;
3992 case LT: case LE:
3993 if (has_sign)
3994 return true_rtx;
3995 break;
3996 case GT: case GE:
3997 if (has_sign)
3998 return false_rtx;
3999 break;
4000 default:
4001 break;
4002 }
4003 }
4004 }
4005
4006 {
4007 rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
4008 rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
4009 new = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
4010 }
4011 break;
4012
4013 case RTX_BIN_ARITH:
4014 case RTX_COMM_ARITH:
4015 switch (code)
4016 {
4017 case PLUS:
4018 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4019 with that LABEL_REF as its second operand. If so, the result is
4020 the first operand of that MINUS. This handles switches with an
4021 ADDR_DIFF_VEC table. */
4022 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4023 {
4024 rtx y
4025 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4026 : lookup_as_function (folded_arg0, MINUS);
4027
4028 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4029 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4030 return XEXP (y, 0);
4031
4032 /* Now try for a CONST of a MINUS like the above. */
4033 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4034 : lookup_as_function (folded_arg0, CONST))) != 0
4035 && GET_CODE (XEXP (y, 0)) == MINUS
4036 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4037 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4038 return XEXP (XEXP (y, 0), 0);
4039 }
4040
4041 /* Likewise if the operands are in the other order. */
4042 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4043 {
4044 rtx y
4045 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4046 : lookup_as_function (folded_arg1, MINUS);
4047
4048 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4049 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4050 return XEXP (y, 0);
4051
4052 /* Now try for a CONST of a MINUS like the above. */
4053 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4054 : lookup_as_function (folded_arg1, CONST))) != 0
4055 && GET_CODE (XEXP (y, 0)) == MINUS
4056 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4057 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4058 return XEXP (XEXP (y, 0), 0);
4059 }
4060
4061 /* If second operand is a register equivalent to a negative
4062 CONST_INT, see if we can find a register equivalent to the
4063 positive constant. Make a MINUS if so. Don't do this for
4064 a non-negative constant since we might then alternate between
4065 choosing positive and negative constants. Having the positive
4066 constant previously-used is the more common case. Be sure
4067 the resulting constant is non-negative; if const_arg1 were
4068 the smallest negative number this would overflow: depending
4069 on the mode, this would either just be the same value (and
4070 hence not save anything) or be incorrect. */
4071 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4072 && INTVAL (const_arg1) < 0
4073 /* This used to test
4074
4075 -INTVAL (const_arg1) >= 0
4076
4077 But The Sun V5.0 compilers mis-compiled that test. So
4078 instead we test for the problematic value in a more direct
4079 manner and hope the Sun compilers get it correct. */
4080 && INTVAL (const_arg1) !=
4081 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4082 && REG_P (folded_arg1))
4083 {
4084 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4085 struct table_elt *p
4086 = lookup (new_const, SAFE_HASH (new_const, mode), mode);
4087
4088 if (p)
4089 for (p = p->first_same_value; p; p = p->next_same_value)
4090 if (REG_P (p->exp))
4091 return simplify_gen_binary (MINUS, mode, folded_arg0,
4092 canon_reg (p->exp, NULL_RTX));
4093 }
4094 goto from_plus;
4095
4096 case MINUS:
4097 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4098 If so, produce (PLUS Z C2-C). */
4099 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4100 {
4101 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4102 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4103 return fold_rtx (plus_constant (copy_rtx (y),
4104 -INTVAL (const_arg1)),
4105 NULL_RTX);
4106 }
4107
4108 /* Fall through. */
4109
4110 from_plus:
4111 case SMIN: case SMAX: case UMIN: case UMAX:
4112 case IOR: case AND: case XOR:
4113 case MULT:
4114 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4115 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4116 is known to be of similar form, we may be able to replace the
4117 operation with a combined operation. This may eliminate the
4118 intermediate operation if every use is simplified in this way.
4119 Note that the similar optimization done by combine.c only works
4120 if the intermediate operation's result has only one reference. */
4121
4122 if (REG_P (folded_arg0)
4123 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4124 {
4125 int is_shift
4126 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4127 rtx y = lookup_as_function (folded_arg0, code);
4128 rtx inner_const;
4129 enum rtx_code associate_code;
4130 rtx new_const;
4131
4132 if (y == 0
4133 || 0 == (inner_const
4134 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4135 || GET_CODE (inner_const) != CONST_INT
4136 /* If we have compiled a statement like
4137 "if (x == (x & mask1))", and now are looking at
4138 "x & mask2", we will have a case where the first operand
4139 of Y is the same as our first operand. Unless we detect
4140 this case, an infinite loop will result. */
4141 || XEXP (y, 0) == folded_arg0)
4142 break;
4143
4144 /* Don't associate these operations if they are a PLUS with the
4145 same constant and it is a power of two. These might be doable
4146 with a pre- or post-increment. Similarly for two subtracts of
4147 identical powers of two with post decrement. */
4148
4149 if (code == PLUS && const_arg1 == inner_const
4150 && ((HAVE_PRE_INCREMENT
4151 && exact_log2 (INTVAL (const_arg1)) >= 0)
4152 || (HAVE_POST_INCREMENT
4153 && exact_log2 (INTVAL (const_arg1)) >= 0)
4154 || (HAVE_PRE_DECREMENT
4155 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4156 || (HAVE_POST_DECREMENT
4157 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4158 break;
4159
4160 /* Compute the code used to compose the constants. For example,
4161 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
4162
4163 associate_code = (is_shift || code == MINUS ? PLUS : code);
4164
4165 new_const = simplify_binary_operation (associate_code, mode,
4166 const_arg1, inner_const);
4167
4168 if (new_const == 0)
4169 break;
4170
4171 /* If we are associating shift operations, don't let this
4172 produce a shift of the size of the object or larger.
4173 This could occur when we follow a sign-extend by a right
4174 shift on a machine that does a sign-extend as a pair
4175 of shifts. */
4176
4177 if (is_shift && GET_CODE (new_const) == CONST_INT
4178 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4179 {
4180 /* As an exception, we can turn an ASHIFTRT of this
4181 form into a shift of the number of bits - 1. */
4182 if (code == ASHIFTRT)
4183 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4184 else
4185 break;
4186 }
4187
4188 y = copy_rtx (XEXP (y, 0));
4189
4190 /* If Y contains our first operand (the most common way this
4191 can happen is if Y is a MEM), we would do into an infinite
4192 loop if we tried to fold it. So don't in that case. */
4193
4194 if (! reg_mentioned_p (folded_arg0, y))
4195 y = fold_rtx (y, insn);
4196
4197 return simplify_gen_binary (code, mode, y, new_const);
4198 }
4199 break;
4200
4201 case DIV: case UDIV:
4202 /* ??? The associative optimization performed immediately above is
4203 also possible for DIV and UDIV using associate_code of MULT.
4204 However, we would need extra code to verify that the
4205 multiplication does not overflow, that is, there is no overflow
4206 in the calculation of new_const. */
4207 break;
4208
4209 default:
4210 break;
4211 }
4212
4213 new = simplify_binary_operation (code, mode,
4214 const_arg0 ? const_arg0 : folded_arg0,
4215 const_arg1 ? const_arg1 : folded_arg1);
4216 break;
4217
4218 case RTX_OBJ:
4219 /* (lo_sum (high X) X) is simply X. */
4220 if (code == LO_SUM && const_arg0 != 0
4221 && GET_CODE (const_arg0) == HIGH
4222 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4223 return const_arg1;
4224 break;
4225
4226 case RTX_TERNARY:
4227 case RTX_BITFIELD_OPS:
4228 new = simplify_ternary_operation (code, mode, mode_arg0,
4229 const_arg0 ? const_arg0 : folded_arg0,
4230 const_arg1 ? const_arg1 : folded_arg1,
4231 const_arg2 ? const_arg2 : XEXP (x, 2));
4232 break;
4233
4234 default:
4235 break;
4236 }
4237
4238 return new ? new : x;
4239 }
4240 \f
4241 /* Return a constant value currently equivalent to X.
4242 Return 0 if we don't know one. */
4243
4244 static rtx
4245 equiv_constant (rtx x)
4246 {
4247 if (REG_P (x)
4248 && REGNO_QTY_VALID_P (REGNO (x)))
4249 {
4250 int x_q = REG_QTY (REGNO (x));
4251 struct qty_table_elem *x_ent = &qty_table[x_q];
4252
4253 if (x_ent->const_rtx)
4254 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
4255 }
4256
4257 if (x == 0 || CONSTANT_P (x))
4258 return x;
4259
4260 /* If X is a MEM, try to fold it outside the context of any insn to see if
4261 it might be equivalent to a constant. That handles the case where it
4262 is a constant-pool reference. Then try to look it up in the hash table
4263 in case it is something whose value we have seen before. */
4264
4265 if (MEM_P (x))
4266 {
4267 struct table_elt *elt;
4268
4269 x = fold_rtx (x, NULL_RTX);
4270 if (CONSTANT_P (x))
4271 return x;
4272
4273 elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
4274 if (elt == 0)
4275 return 0;
4276
4277 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4278 if (elt->is_const && CONSTANT_P (elt->exp))
4279 return elt->exp;
4280 }
4281
4282 return 0;
4283 }
4284 \f
4285 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4286 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4287 least-significant part of X.
4288 MODE specifies how big a part of X to return.
4289
4290 If the requested operation cannot be done, 0 is returned.
4291
4292 This is similar to gen_lowpart_general in emit-rtl.c. */
4293
4294 rtx
4295 gen_lowpart_if_possible (enum machine_mode mode, rtx x)
4296 {
4297 rtx result = gen_lowpart_common (mode, x);
4298
4299 if (result)
4300 return result;
4301 else if (MEM_P (x))
4302 {
4303 /* This is the only other case we handle. */
4304 int offset = 0;
4305 rtx new;
4306
4307 if (WORDS_BIG_ENDIAN)
4308 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4309 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4310 if (BYTES_BIG_ENDIAN)
4311 /* Adjust the address so that the address-after-the-data is
4312 unchanged. */
4313 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4314 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4315
4316 new = adjust_address_nv (x, mode, offset);
4317 if (! memory_address_p (mode, XEXP (new, 0)))
4318 return 0;
4319
4320 return new;
4321 }
4322 else
4323 return 0;
4324 }
4325 \f
4326 /* Given INSN, a jump insn, PATH_TAKEN indicates if we are following the "taken"
4327 branch. It will be zero if not.
4328
4329 In certain cases, this can cause us to add an equivalence. For example,
4330 if we are following the taken case of
4331 if (i == 2)
4332 we can add the fact that `i' and '2' are now equivalent.
4333
4334 In any case, we can record that this comparison was passed. If the same
4335 comparison is seen later, we will know its value. */
4336
4337 static void
4338 record_jump_equiv (rtx insn, int taken)
4339 {
4340 int cond_known_true;
4341 rtx op0, op1;
4342 rtx set;
4343 enum machine_mode mode, mode0, mode1;
4344 int reversed_nonequality = 0;
4345 enum rtx_code code;
4346
4347 /* Ensure this is the right kind of insn. */
4348 if (! any_condjump_p (insn))
4349 return;
4350 set = pc_set (insn);
4351
4352 /* See if this jump condition is known true or false. */
4353 if (taken)
4354 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4355 else
4356 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4357
4358 /* Get the type of comparison being done and the operands being compared.
4359 If we had to reverse a non-equality condition, record that fact so we
4360 know that it isn't valid for floating-point. */
4361 code = GET_CODE (XEXP (SET_SRC (set), 0));
4362 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4363 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4364
4365 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4366 if (! cond_known_true)
4367 {
4368 code = reversed_comparison_code_parts (code, op0, op1, insn);
4369
4370 /* Don't remember if we can't find the inverse. */
4371 if (code == UNKNOWN)
4372 return;
4373 }
4374
4375 /* The mode is the mode of the non-constant. */
4376 mode = mode0;
4377 if (mode1 != VOIDmode)
4378 mode = mode1;
4379
4380 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4381 }
4382
4383 /* Yet another form of subreg creation. In this case, we want something in
4384 MODE, and we should assume OP has MODE iff it is naturally modeless. */
4385
4386 static rtx
4387 record_jump_cond_subreg (enum machine_mode mode, rtx op)
4388 {
4389 enum machine_mode op_mode = GET_MODE (op);
4390 if (op_mode == mode || op_mode == VOIDmode)
4391 return op;
4392 return lowpart_subreg (mode, op, op_mode);
4393 }
4394
4395 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4396 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4397 Make any useful entries we can with that information. Called from
4398 above function and called recursively. */
4399
4400 static void
4401 record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4402 rtx op1, int reversed_nonequality)
4403 {
4404 unsigned op0_hash, op1_hash;
4405 int op0_in_memory, op1_in_memory;
4406 struct table_elt *op0_elt, *op1_elt;
4407
4408 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4409 we know that they are also equal in the smaller mode (this is also
4410 true for all smaller modes whether or not there is a SUBREG, but
4411 is not worth testing for with no SUBREG). */
4412
4413 /* Note that GET_MODE (op0) may not equal MODE. */
4414 if (code == EQ && GET_CODE (op0) == SUBREG
4415 && (GET_MODE_SIZE (GET_MODE (op0))
4416 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4417 {
4418 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4419 rtx tem = record_jump_cond_subreg (inner_mode, op1);
4420 if (tem)
4421 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4422 reversed_nonequality);
4423 }
4424
4425 if (code == EQ && GET_CODE (op1) == SUBREG
4426 && (GET_MODE_SIZE (GET_MODE (op1))
4427 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4428 {
4429 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4430 rtx tem = record_jump_cond_subreg (inner_mode, op0);
4431 if (tem)
4432 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4433 reversed_nonequality);
4434 }
4435
4436 /* Similarly, if this is an NE comparison, and either is a SUBREG
4437 making a smaller mode, we know the whole thing is also NE. */
4438
4439 /* Note that GET_MODE (op0) may not equal MODE;
4440 if we test MODE instead, we can get an infinite recursion
4441 alternating between two modes each wider than MODE. */
4442
4443 if (code == NE && GET_CODE (op0) == SUBREG
4444 && subreg_lowpart_p (op0)
4445 && (GET_MODE_SIZE (GET_MODE (op0))
4446 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4447 {
4448 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4449 rtx tem = record_jump_cond_subreg (inner_mode, op1);
4450 if (tem)
4451 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4452 reversed_nonequality);
4453 }
4454
4455 if (code == NE && GET_CODE (op1) == SUBREG
4456 && subreg_lowpart_p (op1)
4457 && (GET_MODE_SIZE (GET_MODE (op1))
4458 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4459 {
4460 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4461 rtx tem = record_jump_cond_subreg (inner_mode, op0);
4462 if (tem)
4463 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4464 reversed_nonequality);
4465 }
4466
4467 /* Hash both operands. */
4468
4469 do_not_record = 0;
4470 hash_arg_in_memory = 0;
4471 op0_hash = HASH (op0, mode);
4472 op0_in_memory = hash_arg_in_memory;
4473
4474 if (do_not_record)
4475 return;
4476
4477 do_not_record = 0;
4478 hash_arg_in_memory = 0;
4479 op1_hash = HASH (op1, mode);
4480 op1_in_memory = hash_arg_in_memory;
4481
4482 if (do_not_record)
4483 return;
4484
4485 /* Look up both operands. */
4486 op0_elt = lookup (op0, op0_hash, mode);
4487 op1_elt = lookup (op1, op1_hash, mode);
4488
4489 /* If both operands are already equivalent or if they are not in the
4490 table but are identical, do nothing. */
4491 if ((op0_elt != 0 && op1_elt != 0
4492 && op0_elt->first_same_value == op1_elt->first_same_value)
4493 || op0 == op1 || rtx_equal_p (op0, op1))
4494 return;
4495
4496 /* If we aren't setting two things equal all we can do is save this
4497 comparison. Similarly if this is floating-point. In the latter
4498 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4499 If we record the equality, we might inadvertently delete code
4500 whose intent was to change -0 to +0. */
4501
4502 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4503 {
4504 struct qty_table_elem *ent;
4505 int qty;
4506
4507 /* If we reversed a floating-point comparison, if OP0 is not a
4508 register, or if OP1 is neither a register or constant, we can't
4509 do anything. */
4510
4511 if (!REG_P (op1))
4512 op1 = equiv_constant (op1);
4513
4514 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4515 || !REG_P (op0) || op1 == 0)
4516 return;
4517
4518 /* Put OP0 in the hash table if it isn't already. This gives it a
4519 new quantity number. */
4520 if (op0_elt == 0)
4521 {
4522 if (insert_regs (op0, NULL, 0))
4523 {
4524 rehash_using_reg (op0);
4525 op0_hash = HASH (op0, mode);
4526
4527 /* If OP0 is contained in OP1, this changes its hash code
4528 as well. Faster to rehash than to check, except
4529 for the simple case of a constant. */
4530 if (! CONSTANT_P (op1))
4531 op1_hash = HASH (op1,mode);
4532 }
4533
4534 op0_elt = insert (op0, NULL, op0_hash, mode);
4535 op0_elt->in_memory = op0_in_memory;
4536 }
4537
4538 qty = REG_QTY (REGNO (op0));
4539 ent = &qty_table[qty];
4540
4541 ent->comparison_code = code;
4542 if (REG_P (op1))
4543 {
4544 /* Look it up again--in case op0 and op1 are the same. */
4545 op1_elt = lookup (op1, op1_hash, mode);
4546
4547 /* Put OP1 in the hash table so it gets a new quantity number. */
4548 if (op1_elt == 0)
4549 {
4550 if (insert_regs (op1, NULL, 0))
4551 {
4552 rehash_using_reg (op1);
4553 op1_hash = HASH (op1, mode);
4554 }
4555
4556 op1_elt = insert (op1, NULL, op1_hash, mode);
4557 op1_elt->in_memory = op1_in_memory;
4558 }
4559
4560 ent->comparison_const = NULL_RTX;
4561 ent->comparison_qty = REG_QTY (REGNO (op1));
4562 }
4563 else
4564 {
4565 ent->comparison_const = op1;
4566 ent->comparison_qty = -1;
4567 }
4568
4569 return;
4570 }
4571
4572 /* If either side is still missing an equivalence, make it now,
4573 then merge the equivalences. */
4574
4575 if (op0_elt == 0)
4576 {
4577 if (insert_regs (op0, NULL, 0))
4578 {
4579 rehash_using_reg (op0);
4580 op0_hash = HASH (op0, mode);
4581 }
4582
4583 op0_elt = insert (op0, NULL, op0_hash, mode);
4584 op0_elt->in_memory = op0_in_memory;
4585 }
4586
4587 if (op1_elt == 0)
4588 {
4589 if (insert_regs (op1, NULL, 0))
4590 {
4591 rehash_using_reg (op1);
4592 op1_hash = HASH (op1, mode);
4593 }
4594
4595 op1_elt = insert (op1, NULL, op1_hash, mode);
4596 op1_elt->in_memory = op1_in_memory;
4597 }
4598
4599 merge_equiv_classes (op0_elt, op1_elt);
4600 }
4601 \f
4602 /* CSE processing for one instruction.
4603 First simplify sources and addresses of all assignments
4604 in the instruction, using previously-computed equivalents values.
4605 Then install the new sources and destinations in the table
4606 of available values.
4607
4608 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4609 the insn. It means that INSN is inside libcall block. In this
4610 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4611
4612 /* Data on one SET contained in the instruction. */
4613
4614 struct set
4615 {
4616 /* The SET rtx itself. */
4617 rtx rtl;
4618 /* The SET_SRC of the rtx (the original value, if it is changing). */
4619 rtx src;
4620 /* The hash-table element for the SET_SRC of the SET. */
4621 struct table_elt *src_elt;
4622 /* Hash value for the SET_SRC. */
4623 unsigned src_hash;
4624 /* Hash value for the SET_DEST. */
4625 unsigned dest_hash;
4626 /* The SET_DEST, with SUBREG, etc., stripped. */
4627 rtx inner_dest;
4628 /* Nonzero if the SET_SRC is in memory. */
4629 char src_in_memory;
4630 /* Nonzero if the SET_SRC contains something
4631 whose value cannot be predicted and understood. */
4632 char src_volatile;
4633 /* Original machine mode, in case it becomes a CONST_INT.
4634 The size of this field should match the size of the mode
4635 field of struct rtx_def (see rtl.h). */
4636 ENUM_BITFIELD(machine_mode) mode : 8;
4637 /* A constant equivalent for SET_SRC, if any. */
4638 rtx src_const;
4639 /* Original SET_SRC value used for libcall notes. */
4640 rtx orig_src;
4641 /* Hash value of constant equivalent for SET_SRC. */
4642 unsigned src_const_hash;
4643 /* Table entry for constant equivalent for SET_SRC, if any. */
4644 struct table_elt *src_const_elt;
4645 };
4646
4647 static void
4648 cse_insn (rtx insn, rtx libcall_insn)
4649 {
4650 rtx x = PATTERN (insn);
4651 int i;
4652 rtx tem;
4653 int n_sets = 0;
4654
4655 #ifdef HAVE_cc0
4656 /* Records what this insn does to set CC0. */
4657 rtx this_insn_cc0 = 0;
4658 enum machine_mode this_insn_cc0_mode = VOIDmode;
4659 #endif
4660
4661 rtx src_eqv = 0;
4662 struct table_elt *src_eqv_elt = 0;
4663 int src_eqv_volatile = 0;
4664 int src_eqv_in_memory = 0;
4665 unsigned src_eqv_hash = 0;
4666
4667 struct set *sets = (struct set *) 0;
4668
4669 this_insn = insn;
4670
4671 /* Find all the SETs and CLOBBERs in this instruction.
4672 Record all the SETs in the array `set' and count them.
4673 Also determine whether there is a CLOBBER that invalidates
4674 all memory references, or all references at varying addresses. */
4675
4676 if (CALL_P (insn))
4677 {
4678 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4679 {
4680 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4681 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4682 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4683 }
4684 }
4685
4686 if (GET_CODE (x) == SET)
4687 {
4688 sets = alloca (sizeof (struct set));
4689 sets[0].rtl = x;
4690
4691 /* Ignore SETs that are unconditional jumps.
4692 They never need cse processing, so this does not hurt.
4693 The reason is not efficiency but rather
4694 so that we can test at the end for instructions
4695 that have been simplified to unconditional jumps
4696 and not be misled by unchanged instructions
4697 that were unconditional jumps to begin with. */
4698 if (SET_DEST (x) == pc_rtx
4699 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4700 ;
4701
4702 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4703 The hard function value register is used only once, to copy to
4704 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4705 Ensure we invalidate the destination register. On the 80386 no
4706 other code would invalidate it since it is a fixed_reg.
4707 We need not check the return of apply_change_group; see canon_reg. */
4708
4709 else if (GET_CODE (SET_SRC (x)) == CALL)
4710 {
4711 canon_reg (SET_SRC (x), insn);
4712 apply_change_group ();
4713 fold_rtx (SET_SRC (x), insn);
4714 invalidate (SET_DEST (x), VOIDmode);
4715 }
4716 else
4717 n_sets = 1;
4718 }
4719 else if (GET_CODE (x) == PARALLEL)
4720 {
4721 int lim = XVECLEN (x, 0);
4722
4723 sets = alloca (lim * sizeof (struct set));
4724
4725 /* Find all regs explicitly clobbered in this insn,
4726 and ensure they are not replaced with any other regs
4727 elsewhere in this insn.
4728 When a reg that is clobbered is also used for input,
4729 we should presume that that is for a reason,
4730 and we should not substitute some other register
4731 which is not supposed to be clobbered.
4732 Therefore, this loop cannot be merged into the one below
4733 because a CALL may precede a CLOBBER and refer to the
4734 value clobbered. We must not let a canonicalization do
4735 anything in that case. */
4736 for (i = 0; i < lim; i++)
4737 {
4738 rtx y = XVECEXP (x, 0, i);
4739 if (GET_CODE (y) == CLOBBER)
4740 {
4741 rtx clobbered = XEXP (y, 0);
4742
4743 if (REG_P (clobbered)
4744 || GET_CODE (clobbered) == SUBREG)
4745 invalidate (clobbered, VOIDmode);
4746 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4747 || GET_CODE (clobbered) == ZERO_EXTRACT)
4748 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4749 }
4750 }
4751
4752 for (i = 0; i < lim; i++)
4753 {
4754 rtx y = XVECEXP (x, 0, i);
4755 if (GET_CODE (y) == SET)
4756 {
4757 /* As above, we ignore unconditional jumps and call-insns and
4758 ignore the result of apply_change_group. */
4759 if (GET_CODE (SET_SRC (y)) == CALL)
4760 {
4761 canon_reg (SET_SRC (y), insn);
4762 apply_change_group ();
4763 fold_rtx (SET_SRC (y), insn);
4764 invalidate (SET_DEST (y), VOIDmode);
4765 }
4766 else if (SET_DEST (y) == pc_rtx
4767 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4768 ;
4769 else
4770 sets[n_sets++].rtl = y;
4771 }
4772 else if (GET_CODE (y) == CLOBBER)
4773 {
4774 /* If we clobber memory, canon the address.
4775 This does nothing when a register is clobbered
4776 because we have already invalidated the reg. */
4777 if (MEM_P (XEXP (y, 0)))
4778 canon_reg (XEXP (y, 0), NULL_RTX);
4779 }
4780 else if (GET_CODE (y) == USE
4781 && ! (REG_P (XEXP (y, 0))
4782 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4783 canon_reg (y, NULL_RTX);
4784 else if (GET_CODE (y) == CALL)
4785 {
4786 /* The result of apply_change_group can be ignored; see
4787 canon_reg. */
4788 canon_reg (y, insn);
4789 apply_change_group ();
4790 fold_rtx (y, insn);
4791 }
4792 }
4793 }
4794 else if (GET_CODE (x) == CLOBBER)
4795 {
4796 if (MEM_P (XEXP (x, 0)))
4797 canon_reg (XEXP (x, 0), NULL_RTX);
4798 }
4799
4800 /* Canonicalize a USE of a pseudo register or memory location. */
4801 else if (GET_CODE (x) == USE
4802 && ! (REG_P (XEXP (x, 0))
4803 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4804 canon_reg (XEXP (x, 0), NULL_RTX);
4805 else if (GET_CODE (x) == CALL)
4806 {
4807 /* The result of apply_change_group can be ignored; see canon_reg. */
4808 canon_reg (x, insn);
4809 apply_change_group ();
4810 fold_rtx (x, insn);
4811 }
4812
4813 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4814 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4815 is handled specially for this case, and if it isn't set, then there will
4816 be no equivalence for the destination. */
4817 if (n_sets == 1 && REG_NOTES (insn) != 0
4818 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4819 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4820 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4821 {
4822 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4823 XEXP (tem, 0) = src_eqv;
4824 }
4825
4826 /* Canonicalize sources and addresses of destinations.
4827 We do this in a separate pass to avoid problems when a MATCH_DUP is
4828 present in the insn pattern. In that case, we want to ensure that
4829 we don't break the duplicate nature of the pattern. So we will replace
4830 both operands at the same time. Otherwise, we would fail to find an
4831 equivalent substitution in the loop calling validate_change below.
4832
4833 We used to suppress canonicalization of DEST if it appears in SRC,
4834 but we don't do this any more. */
4835
4836 for (i = 0; i < n_sets; i++)
4837 {
4838 rtx dest = SET_DEST (sets[i].rtl);
4839 rtx src = SET_SRC (sets[i].rtl);
4840 rtx new = canon_reg (src, insn);
4841 int insn_code;
4842
4843 sets[i].orig_src = src;
4844 if ((REG_P (new) && REG_P (src)
4845 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4846 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4847 || (insn_code = recog_memoized (insn)) < 0
4848 || insn_data[insn_code].n_dups > 0)
4849 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4850 else
4851 SET_SRC (sets[i].rtl) = new;
4852
4853 if (GET_CODE (dest) == ZERO_EXTRACT)
4854 {
4855 validate_change (insn, &XEXP (dest, 1),
4856 canon_reg (XEXP (dest, 1), insn), 1);
4857 validate_change (insn, &XEXP (dest, 2),
4858 canon_reg (XEXP (dest, 2), insn), 1);
4859 }
4860
4861 while (GET_CODE (dest) == SUBREG
4862 || GET_CODE (dest) == ZERO_EXTRACT
4863 || GET_CODE (dest) == STRICT_LOW_PART)
4864 dest = XEXP (dest, 0);
4865
4866 if (MEM_P (dest))
4867 canon_reg (dest, insn);
4868 }
4869
4870 /* Now that we have done all the replacements, we can apply the change
4871 group and see if they all work. Note that this will cause some
4872 canonicalizations that would have worked individually not to be applied
4873 because some other canonicalization didn't work, but this should not
4874 occur often.
4875
4876 The result of apply_change_group can be ignored; see canon_reg. */
4877
4878 apply_change_group ();
4879
4880 /* Set sets[i].src_elt to the class each source belongs to.
4881 Detect assignments from or to volatile things
4882 and set set[i] to zero so they will be ignored
4883 in the rest of this function.
4884
4885 Nothing in this loop changes the hash table or the register chains. */
4886
4887 for (i = 0; i < n_sets; i++)
4888 {
4889 rtx src, dest;
4890 rtx src_folded;
4891 struct table_elt *elt = 0, *p;
4892 enum machine_mode mode;
4893 rtx src_eqv_here;
4894 rtx src_const = 0;
4895 rtx src_related = 0;
4896 struct table_elt *src_const_elt = 0;
4897 int src_cost = MAX_COST;
4898 int src_eqv_cost = MAX_COST;
4899 int src_folded_cost = MAX_COST;
4900 int src_related_cost = MAX_COST;
4901 int src_elt_cost = MAX_COST;
4902 int src_regcost = MAX_COST;
4903 int src_eqv_regcost = MAX_COST;
4904 int src_folded_regcost = MAX_COST;
4905 int src_related_regcost = MAX_COST;
4906 int src_elt_regcost = MAX_COST;
4907 /* Set nonzero if we need to call force_const_mem on with the
4908 contents of src_folded before using it. */
4909 int src_folded_force_flag = 0;
4910
4911 dest = SET_DEST (sets[i].rtl);
4912 src = SET_SRC (sets[i].rtl);
4913
4914 /* If SRC is a constant that has no machine mode,
4915 hash it with the destination's machine mode.
4916 This way we can keep different modes separate. */
4917
4918 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4919 sets[i].mode = mode;
4920
4921 if (src_eqv)
4922 {
4923 enum machine_mode eqvmode = mode;
4924 if (GET_CODE (dest) == STRICT_LOW_PART)
4925 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4926 do_not_record = 0;
4927 hash_arg_in_memory = 0;
4928 src_eqv_hash = HASH (src_eqv, eqvmode);
4929
4930 /* Find the equivalence class for the equivalent expression. */
4931
4932 if (!do_not_record)
4933 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4934
4935 src_eqv_volatile = do_not_record;
4936 src_eqv_in_memory = hash_arg_in_memory;
4937 }
4938
4939 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4940 value of the INNER register, not the destination. So it is not
4941 a valid substitution for the source. But save it for later. */
4942 if (GET_CODE (dest) == STRICT_LOW_PART)
4943 src_eqv_here = 0;
4944 else
4945 src_eqv_here = src_eqv;
4946
4947 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4948 simplified result, which may not necessarily be valid. */
4949 src_folded = fold_rtx (src, insn);
4950
4951 #if 0
4952 /* ??? This caused bad code to be generated for the m68k port with -O2.
4953 Suppose src is (CONST_INT -1), and that after truncation src_folded
4954 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4955 At the end we will add src and src_const to the same equivalence
4956 class. We now have 3 and -1 on the same equivalence class. This
4957 causes later instructions to be mis-optimized. */
4958 /* If storing a constant in a bitfield, pre-truncate the constant
4959 so we will be able to record it later. */
4960 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
4961 {
4962 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4963
4964 if (GET_CODE (src) == CONST_INT
4965 && GET_CODE (width) == CONST_INT
4966 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4967 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4968 src_folded
4969 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4970 << INTVAL (width)) - 1));
4971 }
4972 #endif
4973
4974 /* Compute SRC's hash code, and also notice if it
4975 should not be recorded at all. In that case,
4976 prevent any further processing of this assignment. */
4977 do_not_record = 0;
4978 hash_arg_in_memory = 0;
4979
4980 sets[i].src = src;
4981 sets[i].src_hash = HASH (src, mode);
4982 sets[i].src_volatile = do_not_record;
4983 sets[i].src_in_memory = hash_arg_in_memory;
4984
4985 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4986 a pseudo, do not record SRC. Using SRC as a replacement for
4987 anything else will be incorrect in that situation. Note that
4988 this usually occurs only for stack slots, in which case all the
4989 RTL would be referring to SRC, so we don't lose any optimization
4990 opportunities by not having SRC in the hash table. */
4991
4992 if (MEM_P (src)
4993 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
4994 && REG_P (dest)
4995 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4996 sets[i].src_volatile = 1;
4997
4998 #if 0
4999 /* It is no longer clear why we used to do this, but it doesn't
5000 appear to still be needed. So let's try without it since this
5001 code hurts cse'ing widened ops. */
5002 /* If source is a paradoxical subreg (such as QI treated as an SI),
5003 treat it as volatile. It may do the work of an SI in one context
5004 where the extra bits are not being used, but cannot replace an SI
5005 in general. */
5006 if (GET_CODE (src) == SUBREG
5007 && (GET_MODE_SIZE (GET_MODE (src))
5008 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5009 sets[i].src_volatile = 1;
5010 #endif
5011
5012 /* Locate all possible equivalent forms for SRC. Try to replace
5013 SRC in the insn with each cheaper equivalent.
5014
5015 We have the following types of equivalents: SRC itself, a folded
5016 version, a value given in a REG_EQUAL note, or a value related
5017 to a constant.
5018
5019 Each of these equivalents may be part of an additional class
5020 of equivalents (if more than one is in the table, they must be in
5021 the same class; we check for this).
5022
5023 If the source is volatile, we don't do any table lookups.
5024
5025 We note any constant equivalent for possible later use in a
5026 REG_NOTE. */
5027
5028 if (!sets[i].src_volatile)
5029 elt = lookup (src, sets[i].src_hash, mode);
5030
5031 sets[i].src_elt = elt;
5032
5033 if (elt && src_eqv_here && src_eqv_elt)
5034 {
5035 if (elt->first_same_value != src_eqv_elt->first_same_value)
5036 {
5037 /* The REG_EQUAL is indicating that two formerly distinct
5038 classes are now equivalent. So merge them. */
5039 merge_equiv_classes (elt, src_eqv_elt);
5040 src_eqv_hash = HASH (src_eqv, elt->mode);
5041 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5042 }
5043
5044 src_eqv_here = 0;
5045 }
5046
5047 else if (src_eqv_elt)
5048 elt = src_eqv_elt;
5049
5050 /* Try to find a constant somewhere and record it in `src_const'.
5051 Record its table element, if any, in `src_const_elt'. Look in
5052 any known equivalences first. (If the constant is not in the
5053 table, also set `sets[i].src_const_hash'). */
5054 if (elt)
5055 for (p = elt->first_same_value; p; p = p->next_same_value)
5056 if (p->is_const)
5057 {
5058 src_const = p->exp;
5059 src_const_elt = elt;
5060 break;
5061 }
5062
5063 if (src_const == 0
5064 && (CONSTANT_P (src_folded)
5065 /* Consider (minus (label_ref L1) (label_ref L2)) as
5066 "constant" here so we will record it. This allows us
5067 to fold switch statements when an ADDR_DIFF_VEC is used. */
5068 || (GET_CODE (src_folded) == MINUS
5069 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5070 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5071 src_const = src_folded, src_const_elt = elt;
5072 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5073 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5074
5075 /* If we don't know if the constant is in the table, get its
5076 hash code and look it up. */
5077 if (src_const && src_const_elt == 0)
5078 {
5079 sets[i].src_const_hash = HASH (src_const, mode);
5080 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5081 }
5082
5083 sets[i].src_const = src_const;
5084 sets[i].src_const_elt = src_const_elt;
5085
5086 /* If the constant and our source are both in the table, mark them as
5087 equivalent. Otherwise, if a constant is in the table but the source
5088 isn't, set ELT to it. */
5089 if (src_const_elt && elt
5090 && src_const_elt->first_same_value != elt->first_same_value)
5091 merge_equiv_classes (elt, src_const_elt);
5092 else if (src_const_elt && elt == 0)
5093 elt = src_const_elt;
5094
5095 /* See if there is a register linearly related to a constant
5096 equivalent of SRC. */
5097 if (src_const
5098 && (GET_CODE (src_const) == CONST
5099 || (src_const_elt && src_const_elt->related_value != 0)))
5100 {
5101 src_related = use_related_value (src_const, src_const_elt);
5102 if (src_related)
5103 {
5104 struct table_elt *src_related_elt
5105 = lookup (src_related, HASH (src_related, mode), mode);
5106 if (src_related_elt && elt)
5107 {
5108 if (elt->first_same_value
5109 != src_related_elt->first_same_value)
5110 /* This can occur when we previously saw a CONST
5111 involving a SYMBOL_REF and then see the SYMBOL_REF
5112 twice. Merge the involved classes. */
5113 merge_equiv_classes (elt, src_related_elt);
5114
5115 src_related = 0;
5116 src_related_elt = 0;
5117 }
5118 else if (src_related_elt && elt == 0)
5119 elt = src_related_elt;
5120 }
5121 }
5122
5123 /* See if we have a CONST_INT that is already in a register in a
5124 wider mode. */
5125
5126 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5127 && GET_MODE_CLASS (mode) == MODE_INT
5128 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5129 {
5130 enum machine_mode wider_mode;
5131
5132 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5133 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5134 && src_related == 0;
5135 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5136 {
5137 struct table_elt *const_elt
5138 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5139
5140 if (const_elt == 0)
5141 continue;
5142
5143 for (const_elt = const_elt->first_same_value;
5144 const_elt; const_elt = const_elt->next_same_value)
5145 if (REG_P (const_elt->exp))
5146 {
5147 src_related = gen_lowpart (mode,
5148 const_elt->exp);
5149 break;
5150 }
5151 }
5152 }
5153
5154 /* Another possibility is that we have an AND with a constant in
5155 a mode narrower than a word. If so, it might have been generated
5156 as part of an "if" which would narrow the AND. If we already
5157 have done the AND in a wider mode, we can use a SUBREG of that
5158 value. */
5159
5160 if (flag_expensive_optimizations && ! src_related
5161 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5162 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5163 {
5164 enum machine_mode tmode;
5165 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5166
5167 for (tmode = GET_MODE_WIDER_MODE (mode);
5168 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5169 tmode = GET_MODE_WIDER_MODE (tmode))
5170 {
5171 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
5172 struct table_elt *larger_elt;
5173
5174 if (inner)
5175 {
5176 PUT_MODE (new_and, tmode);
5177 XEXP (new_and, 0) = inner;
5178 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5179 if (larger_elt == 0)
5180 continue;
5181
5182 for (larger_elt = larger_elt->first_same_value;
5183 larger_elt; larger_elt = larger_elt->next_same_value)
5184 if (REG_P (larger_elt->exp))
5185 {
5186 src_related
5187 = gen_lowpart (mode, larger_elt->exp);
5188 break;
5189 }
5190
5191 if (src_related)
5192 break;
5193 }
5194 }
5195 }
5196
5197 #ifdef LOAD_EXTEND_OP
5198 /* See if a MEM has already been loaded with a widening operation;
5199 if it has, we can use a subreg of that. Many CISC machines
5200 also have such operations, but this is only likely to be
5201 beneficial on these machines. */
5202
5203 if (flag_expensive_optimizations && src_related == 0
5204 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5205 && GET_MODE_CLASS (mode) == MODE_INT
5206 && MEM_P (src) && ! do_not_record
5207 && LOAD_EXTEND_OP (mode) != UNKNOWN)
5208 {
5209 struct rtx_def memory_extend_buf;
5210 rtx memory_extend_rtx = &memory_extend_buf;
5211 enum machine_mode tmode;
5212
5213 /* Set what we are trying to extend and the operation it might
5214 have been extended with. */
5215 memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
5216 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5217 XEXP (memory_extend_rtx, 0) = src;
5218
5219 for (tmode = GET_MODE_WIDER_MODE (mode);
5220 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5221 tmode = GET_MODE_WIDER_MODE (tmode))
5222 {
5223 struct table_elt *larger_elt;
5224
5225 PUT_MODE (memory_extend_rtx, tmode);
5226 larger_elt = lookup (memory_extend_rtx,
5227 HASH (memory_extend_rtx, tmode), tmode);
5228 if (larger_elt == 0)
5229 continue;
5230
5231 for (larger_elt = larger_elt->first_same_value;
5232 larger_elt; larger_elt = larger_elt->next_same_value)
5233 if (REG_P (larger_elt->exp))
5234 {
5235 src_related = gen_lowpart (mode,
5236 larger_elt->exp);
5237 break;
5238 }
5239
5240 if (src_related)
5241 break;
5242 }
5243 }
5244 #endif /* LOAD_EXTEND_OP */
5245
5246 if (src == src_folded)
5247 src_folded = 0;
5248
5249 /* At this point, ELT, if nonzero, points to a class of expressions
5250 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5251 and SRC_RELATED, if nonzero, each contain additional equivalent
5252 expressions. Prune these latter expressions by deleting expressions
5253 already in the equivalence class.
5254
5255 Check for an equivalent identical to the destination. If found,
5256 this is the preferred equivalent since it will likely lead to
5257 elimination of the insn. Indicate this by placing it in
5258 `src_related'. */
5259
5260 if (elt)
5261 elt = elt->first_same_value;
5262 for (p = elt; p; p = p->next_same_value)
5263 {
5264 enum rtx_code code = GET_CODE (p->exp);
5265
5266 /* If the expression is not valid, ignore it. Then we do not
5267 have to check for validity below. In most cases, we can use
5268 `rtx_equal_p', since canonicalization has already been done. */
5269 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
5270 continue;
5271
5272 /* Also skip paradoxical subregs, unless that's what we're
5273 looking for. */
5274 if (code == SUBREG
5275 && (GET_MODE_SIZE (GET_MODE (p->exp))
5276 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5277 && ! (src != 0
5278 && GET_CODE (src) == SUBREG
5279 && GET_MODE (src) == GET_MODE (p->exp)
5280 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5281 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5282 continue;
5283
5284 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5285 src = 0;
5286 else if (src_folded && GET_CODE (src_folded) == code
5287 && rtx_equal_p (src_folded, p->exp))
5288 src_folded = 0;
5289 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5290 && rtx_equal_p (src_eqv_here, p->exp))
5291 src_eqv_here = 0;
5292 else if (src_related && GET_CODE (src_related) == code
5293 && rtx_equal_p (src_related, p->exp))
5294 src_related = 0;
5295
5296 /* This is the same as the destination of the insns, we want
5297 to prefer it. Copy it to src_related. The code below will
5298 then give it a negative cost. */
5299 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5300 src_related = dest;
5301 }
5302
5303 /* Find the cheapest valid equivalent, trying all the available
5304 possibilities. Prefer items not in the hash table to ones
5305 that are when they are equal cost. Note that we can never
5306 worsen an insn as the current contents will also succeed.
5307 If we find an equivalent identical to the destination, use it as best,
5308 since this insn will probably be eliminated in that case. */
5309 if (src)
5310 {
5311 if (rtx_equal_p (src, dest))
5312 src_cost = src_regcost = -1;
5313 else
5314 {
5315 src_cost = COST (src);
5316 src_regcost = approx_reg_cost (src);
5317 }
5318 }
5319
5320 if (src_eqv_here)
5321 {
5322 if (rtx_equal_p (src_eqv_here, dest))
5323 src_eqv_cost = src_eqv_regcost = -1;
5324 else
5325 {
5326 src_eqv_cost = COST (src_eqv_here);
5327 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5328 }
5329 }
5330
5331 if (src_folded)
5332 {
5333 if (rtx_equal_p (src_folded, dest))
5334 src_folded_cost = src_folded_regcost = -1;
5335 else
5336 {
5337 src_folded_cost = COST (src_folded);
5338 src_folded_regcost = approx_reg_cost (src_folded);
5339 }
5340 }
5341
5342 if (src_related)
5343 {
5344 if (rtx_equal_p (src_related, dest))
5345 src_related_cost = src_related_regcost = -1;
5346 else
5347 {
5348 src_related_cost = COST (src_related);
5349 src_related_regcost = approx_reg_cost (src_related);
5350 }
5351 }
5352
5353 /* If this was an indirect jump insn, a known label will really be
5354 cheaper even though it looks more expensive. */
5355 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5356 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5357
5358 /* Terminate loop when replacement made. This must terminate since
5359 the current contents will be tested and will always be valid. */
5360 while (1)
5361 {
5362 rtx trial;
5363
5364 /* Skip invalid entries. */
5365 while (elt && !REG_P (elt->exp)
5366 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5367 elt = elt->next_same_value;
5368
5369 /* A paradoxical subreg would be bad here: it'll be the right
5370 size, but later may be adjusted so that the upper bits aren't
5371 what we want. So reject it. */
5372 if (elt != 0
5373 && GET_CODE (elt->exp) == SUBREG
5374 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5375 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5376 /* It is okay, though, if the rtx we're trying to match
5377 will ignore any of the bits we can't predict. */
5378 && ! (src != 0
5379 && GET_CODE (src) == SUBREG
5380 && GET_MODE (src) == GET_MODE (elt->exp)
5381 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5382 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5383 {
5384 elt = elt->next_same_value;
5385 continue;
5386 }
5387
5388 if (elt)
5389 {
5390 src_elt_cost = elt->cost;
5391 src_elt_regcost = elt->regcost;
5392 }
5393
5394 /* Find cheapest and skip it for the next time. For items
5395 of equal cost, use this order:
5396 src_folded, src, src_eqv, src_related and hash table entry. */
5397 if (src_folded
5398 && preferable (src_folded_cost, src_folded_regcost,
5399 src_cost, src_regcost) <= 0
5400 && preferable (src_folded_cost, src_folded_regcost,
5401 src_eqv_cost, src_eqv_regcost) <= 0
5402 && preferable (src_folded_cost, src_folded_regcost,
5403 src_related_cost, src_related_regcost) <= 0
5404 && preferable (src_folded_cost, src_folded_regcost,
5405 src_elt_cost, src_elt_regcost) <= 0)
5406 {
5407 trial = src_folded, src_folded_cost = MAX_COST;
5408 if (src_folded_force_flag)
5409 {
5410 rtx forced = force_const_mem (mode, trial);
5411 if (forced)
5412 trial = forced;
5413 }
5414 }
5415 else if (src
5416 && preferable (src_cost, src_regcost,
5417 src_eqv_cost, src_eqv_regcost) <= 0
5418 && preferable (src_cost, src_regcost,
5419 src_related_cost, src_related_regcost) <= 0
5420 && preferable (src_cost, src_regcost,
5421 src_elt_cost, src_elt_regcost) <= 0)
5422 trial = src, src_cost = MAX_COST;
5423 else if (src_eqv_here
5424 && preferable (src_eqv_cost, src_eqv_regcost,
5425 src_related_cost, src_related_regcost) <= 0
5426 && preferable (src_eqv_cost, src_eqv_regcost,
5427 src_elt_cost, src_elt_regcost) <= 0)
5428 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5429 else if (src_related
5430 && preferable (src_related_cost, src_related_regcost,
5431 src_elt_cost, src_elt_regcost) <= 0)
5432 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5433 else
5434 {
5435 trial = copy_rtx (elt->exp);
5436 elt = elt->next_same_value;
5437 src_elt_cost = MAX_COST;
5438 }
5439
5440 /* We don't normally have an insn matching (set (pc) (pc)), so
5441 check for this separately here. We will delete such an
5442 insn below.
5443
5444 For other cases such as a table jump or conditional jump
5445 where we know the ultimate target, go ahead and replace the
5446 operand. While that may not make a valid insn, we will
5447 reemit the jump below (and also insert any necessary
5448 barriers). */
5449 if (n_sets == 1 && dest == pc_rtx
5450 && (trial == pc_rtx
5451 || (GET_CODE (trial) == LABEL_REF
5452 && ! condjump_p (insn))))
5453 {
5454 /* Don't substitute non-local labels, this confuses CFG. */
5455 if (GET_CODE (trial) == LABEL_REF
5456 && LABEL_REF_NONLOCAL_P (trial))
5457 continue;
5458
5459 SET_SRC (sets[i].rtl) = trial;
5460 cse_jumps_altered = 1;
5461 break;
5462 }
5463
5464 /* Look for a substitution that makes a valid insn. */
5465 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5466 {
5467 rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5468
5469 /* If we just made a substitution inside a libcall, then we
5470 need to make the same substitution in any notes attached
5471 to the RETVAL insn. */
5472 if (libcall_insn
5473 && (REG_P (sets[i].orig_src)
5474 || GET_CODE (sets[i].orig_src) == SUBREG
5475 || MEM_P (sets[i].orig_src)))
5476 {
5477 rtx note = find_reg_equal_equiv_note (libcall_insn);
5478 if (note != 0)
5479 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0),
5480 sets[i].orig_src,
5481 copy_rtx (new));
5482 }
5483
5484 /* The result of apply_change_group can be ignored; see
5485 canon_reg. */
5486
5487 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5488 apply_change_group ();
5489 break;
5490 }
5491
5492 /* If we previously found constant pool entries for
5493 constants and this is a constant, try making a
5494 pool entry. Put it in src_folded unless we already have done
5495 this since that is where it likely came from. */
5496
5497 else if (constant_pool_entries_cost
5498 && CONSTANT_P (trial)
5499 /* Reject cases that will abort in decode_rtx_const.
5500 On the alpha when simplifying a switch, we get
5501 (const (truncate (minus (label_ref) (label_ref)))). */
5502 && ! (GET_CODE (trial) == CONST
5503 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5504 /* Likewise on IA-64, except without the truncate. */
5505 && ! (GET_CODE (trial) == CONST
5506 && GET_CODE (XEXP (trial, 0)) == MINUS
5507 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5508 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5509 && (src_folded == 0
5510 || (!MEM_P (src_folded)
5511 && ! src_folded_force_flag))
5512 && GET_MODE_CLASS (mode) != MODE_CC
5513 && mode != VOIDmode)
5514 {
5515 src_folded_force_flag = 1;
5516 src_folded = trial;
5517 src_folded_cost = constant_pool_entries_cost;
5518 src_folded_regcost = constant_pool_entries_regcost;
5519 }
5520 }
5521
5522 src = SET_SRC (sets[i].rtl);
5523
5524 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5525 However, there is an important exception: If both are registers
5526 that are not the head of their equivalence class, replace SET_SRC
5527 with the head of the class. If we do not do this, we will have
5528 both registers live over a portion of the basic block. This way,
5529 their lifetimes will likely abut instead of overlapping. */
5530 if (REG_P (dest)
5531 && REGNO_QTY_VALID_P (REGNO (dest)))
5532 {
5533 int dest_q = REG_QTY (REGNO (dest));
5534 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5535
5536 if (dest_ent->mode == GET_MODE (dest)
5537 && dest_ent->first_reg != REGNO (dest)
5538 && REG_P (src) && REGNO (src) == REGNO (dest)
5539 /* Don't do this if the original insn had a hard reg as
5540 SET_SRC or SET_DEST. */
5541 && (!REG_P (sets[i].src)
5542 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5543 && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5544 /* We can't call canon_reg here because it won't do anything if
5545 SRC is a hard register. */
5546 {
5547 int src_q = REG_QTY (REGNO (src));
5548 struct qty_table_elem *src_ent = &qty_table[src_q];
5549 int first = src_ent->first_reg;
5550 rtx new_src
5551 = (first >= FIRST_PSEUDO_REGISTER
5552 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5553
5554 /* We must use validate-change even for this, because this
5555 might be a special no-op instruction, suitable only to
5556 tag notes onto. */
5557 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5558 {
5559 src = new_src;
5560 /* If we had a constant that is cheaper than what we are now
5561 setting SRC to, use that constant. We ignored it when we
5562 thought we could make this into a no-op. */
5563 if (src_const && COST (src_const) < COST (src)
5564 && validate_change (insn, &SET_SRC (sets[i].rtl),
5565 src_const, 0))
5566 src = src_const;
5567 }
5568 }
5569 }
5570
5571 /* If we made a change, recompute SRC values. */
5572 if (src != sets[i].src)
5573 {
5574 cse_altered = 1;
5575 do_not_record = 0;
5576 hash_arg_in_memory = 0;
5577 sets[i].src = src;
5578 sets[i].src_hash = HASH (src, mode);
5579 sets[i].src_volatile = do_not_record;
5580 sets[i].src_in_memory = hash_arg_in_memory;
5581 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5582 }
5583
5584 /* If this is a single SET, we are setting a register, and we have an
5585 equivalent constant, we want to add a REG_NOTE. We don't want
5586 to write a REG_EQUAL note for a constant pseudo since verifying that
5587 that pseudo hasn't been eliminated is a pain. Such a note also
5588 won't help anything.
5589
5590 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5591 which can be created for a reference to a compile time computable
5592 entry in a jump table. */
5593
5594 if (n_sets == 1 && src_const && REG_P (dest)
5595 && !REG_P (src_const)
5596 && ! (GET_CODE (src_const) == CONST
5597 && GET_CODE (XEXP (src_const, 0)) == MINUS
5598 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5599 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5600 {
5601 /* We only want a REG_EQUAL note if src_const != src. */
5602 if (! rtx_equal_p (src, src_const))
5603 {
5604 /* Make sure that the rtx is not shared. */
5605 src_const = copy_rtx (src_const);
5606
5607 /* Record the actual constant value in a REG_EQUAL note,
5608 making a new one if one does not already exist. */
5609 set_unique_reg_note (insn, REG_EQUAL, src_const);
5610 }
5611 }
5612
5613 /* Now deal with the destination. */
5614 do_not_record = 0;
5615
5616 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
5617 while (GET_CODE (dest) == SUBREG
5618 || GET_CODE (dest) == ZERO_EXTRACT
5619 || GET_CODE (dest) == STRICT_LOW_PART)
5620 dest = XEXP (dest, 0);
5621
5622 sets[i].inner_dest = dest;
5623
5624 if (MEM_P (dest))
5625 {
5626 #ifdef PUSH_ROUNDING
5627 /* Stack pushes invalidate the stack pointer. */
5628 rtx addr = XEXP (dest, 0);
5629 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5630 && XEXP (addr, 0) == stack_pointer_rtx)
5631 invalidate (stack_pointer_rtx, Pmode);
5632 #endif
5633 dest = fold_rtx (dest, insn);
5634 }
5635
5636 /* Compute the hash code of the destination now,
5637 before the effects of this instruction are recorded,
5638 since the register values used in the address computation
5639 are those before this instruction. */
5640 sets[i].dest_hash = HASH (dest, mode);
5641
5642 /* Don't enter a bit-field in the hash table
5643 because the value in it after the store
5644 may not equal what was stored, due to truncation. */
5645
5646 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5647 {
5648 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5649
5650 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5651 && GET_CODE (width) == CONST_INT
5652 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5653 && ! (INTVAL (src_const)
5654 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5655 /* Exception: if the value is constant,
5656 and it won't be truncated, record it. */
5657 ;
5658 else
5659 {
5660 /* This is chosen so that the destination will be invalidated
5661 but no new value will be recorded.
5662 We must invalidate because sometimes constant
5663 values can be recorded for bitfields. */
5664 sets[i].src_elt = 0;
5665 sets[i].src_volatile = 1;
5666 src_eqv = 0;
5667 src_eqv_elt = 0;
5668 }
5669 }
5670
5671 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5672 the insn. */
5673 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5674 {
5675 /* One less use of the label this insn used to jump to. */
5676 delete_insn (insn);
5677 cse_jumps_altered = 1;
5678 /* No more processing for this set. */
5679 sets[i].rtl = 0;
5680 }
5681
5682 /* If this SET is now setting PC to a label, we know it used to
5683 be a conditional or computed branch. */
5684 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5685 && !LABEL_REF_NONLOCAL_P (src))
5686 {
5687 /* Now emit a BARRIER after the unconditional jump. */
5688 if (NEXT_INSN (insn) == 0
5689 || !BARRIER_P (NEXT_INSN (insn)))
5690 emit_barrier_after (insn);
5691
5692 /* We reemit the jump in as many cases as possible just in
5693 case the form of an unconditional jump is significantly
5694 different than a computed jump or conditional jump.
5695
5696 If this insn has multiple sets, then reemitting the
5697 jump is nontrivial. So instead we just force rerecognition
5698 and hope for the best. */
5699 if (n_sets == 1)
5700 {
5701 rtx new, note;
5702
5703 new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5704 JUMP_LABEL (new) = XEXP (src, 0);
5705 LABEL_NUSES (XEXP (src, 0))++;
5706
5707 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5708 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5709 if (note)
5710 {
5711 XEXP (note, 1) = NULL_RTX;
5712 REG_NOTES (new) = note;
5713 }
5714
5715 delete_insn (insn);
5716 insn = new;
5717
5718 /* Now emit a BARRIER after the unconditional jump. */
5719 if (NEXT_INSN (insn) == 0
5720 || !BARRIER_P (NEXT_INSN (insn)))
5721 emit_barrier_after (insn);
5722 }
5723 else
5724 INSN_CODE (insn) = -1;
5725
5726 /* Do not bother deleting any unreachable code,
5727 let jump/flow do that. */
5728
5729 cse_jumps_altered = 1;
5730 sets[i].rtl = 0;
5731 }
5732
5733 /* If destination is volatile, invalidate it and then do no further
5734 processing for this assignment. */
5735
5736 else if (do_not_record)
5737 {
5738 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5739 invalidate (dest, VOIDmode);
5740 else if (MEM_P (dest))
5741 invalidate (dest, VOIDmode);
5742 else if (GET_CODE (dest) == STRICT_LOW_PART
5743 || GET_CODE (dest) == ZERO_EXTRACT)
5744 invalidate (XEXP (dest, 0), GET_MODE (dest));
5745 sets[i].rtl = 0;
5746 }
5747
5748 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5749 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5750
5751 #ifdef HAVE_cc0
5752 /* If setting CC0, record what it was set to, or a constant, if it
5753 is equivalent to a constant. If it is being set to a floating-point
5754 value, make a COMPARE with the appropriate constant of 0. If we
5755 don't do this, later code can interpret this as a test against
5756 const0_rtx, which can cause problems if we try to put it into an
5757 insn as a floating-point operand. */
5758 if (dest == cc0_rtx)
5759 {
5760 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5761 this_insn_cc0_mode = mode;
5762 if (FLOAT_MODE_P (mode))
5763 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5764 CONST0_RTX (mode));
5765 }
5766 #endif
5767 }
5768
5769 /* Now enter all non-volatile source expressions in the hash table
5770 if they are not already present.
5771 Record their equivalence classes in src_elt.
5772 This way we can insert the corresponding destinations into
5773 the same classes even if the actual sources are no longer in them
5774 (having been invalidated). */
5775
5776 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5777 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5778 {
5779 struct table_elt *elt;
5780 struct table_elt *classp = sets[0].src_elt;
5781 rtx dest = SET_DEST (sets[0].rtl);
5782 enum machine_mode eqvmode = GET_MODE (dest);
5783
5784 if (GET_CODE (dest) == STRICT_LOW_PART)
5785 {
5786 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5787 classp = 0;
5788 }
5789 if (insert_regs (src_eqv, classp, 0))
5790 {
5791 rehash_using_reg (src_eqv);
5792 src_eqv_hash = HASH (src_eqv, eqvmode);
5793 }
5794 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5795 elt->in_memory = src_eqv_in_memory;
5796 src_eqv_elt = elt;
5797
5798 /* Check to see if src_eqv_elt is the same as a set source which
5799 does not yet have an elt, and if so set the elt of the set source
5800 to src_eqv_elt. */
5801 for (i = 0; i < n_sets; i++)
5802 if (sets[i].rtl && sets[i].src_elt == 0
5803 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5804 sets[i].src_elt = src_eqv_elt;
5805 }
5806
5807 for (i = 0; i < n_sets; i++)
5808 if (sets[i].rtl && ! sets[i].src_volatile
5809 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5810 {
5811 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5812 {
5813 /* REG_EQUAL in setting a STRICT_LOW_PART
5814 gives an equivalent for the entire destination register,
5815 not just for the subreg being stored in now.
5816 This is a more interesting equivalence, so we arrange later
5817 to treat the entire reg as the destination. */
5818 sets[i].src_elt = src_eqv_elt;
5819 sets[i].src_hash = src_eqv_hash;
5820 }
5821 else
5822 {
5823 /* Insert source and constant equivalent into hash table, if not
5824 already present. */
5825 struct table_elt *classp = src_eqv_elt;
5826 rtx src = sets[i].src;
5827 rtx dest = SET_DEST (sets[i].rtl);
5828 enum machine_mode mode
5829 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5830
5831 /* It's possible that we have a source value known to be
5832 constant but don't have a REG_EQUAL note on the insn.
5833 Lack of a note will mean src_eqv_elt will be NULL. This
5834 can happen where we've generated a SUBREG to access a
5835 CONST_INT that is already in a register in a wider mode.
5836 Ensure that the source expression is put in the proper
5837 constant class. */
5838 if (!classp)
5839 classp = sets[i].src_const_elt;
5840
5841 if (sets[i].src_elt == 0)
5842 {
5843 /* Don't put a hard register source into the table if this is
5844 the last insn of a libcall. In this case, we only need
5845 to put src_eqv_elt in src_elt. */
5846 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5847 {
5848 struct table_elt *elt;
5849
5850 /* Note that these insert_regs calls cannot remove
5851 any of the src_elt's, because they would have failed to
5852 match if not still valid. */
5853 if (insert_regs (src, classp, 0))
5854 {
5855 rehash_using_reg (src);
5856 sets[i].src_hash = HASH (src, mode);
5857 }
5858 elt = insert (src, classp, sets[i].src_hash, mode);
5859 elt->in_memory = sets[i].src_in_memory;
5860 sets[i].src_elt = classp = elt;
5861 }
5862 else
5863 sets[i].src_elt = classp;
5864 }
5865 if (sets[i].src_const && sets[i].src_const_elt == 0
5866 && src != sets[i].src_const
5867 && ! rtx_equal_p (sets[i].src_const, src))
5868 sets[i].src_elt = insert (sets[i].src_const, classp,
5869 sets[i].src_const_hash, mode);
5870 }
5871 }
5872 else if (sets[i].src_elt == 0)
5873 /* If we did not insert the source into the hash table (e.g., it was
5874 volatile), note the equivalence class for the REG_EQUAL value, if any,
5875 so that the destination goes into that class. */
5876 sets[i].src_elt = src_eqv_elt;
5877
5878 invalidate_from_clobbers (x);
5879
5880 /* Some registers are invalidated by subroutine calls. Memory is
5881 invalidated by non-constant calls. */
5882
5883 if (CALL_P (insn))
5884 {
5885 if (! CONST_OR_PURE_CALL_P (insn))
5886 invalidate_memory ();
5887 invalidate_for_call ();
5888 }
5889
5890 /* Now invalidate everything set by this instruction.
5891 If a SUBREG or other funny destination is being set,
5892 sets[i].rtl is still nonzero, so here we invalidate the reg
5893 a part of which is being set. */
5894
5895 for (i = 0; i < n_sets; i++)
5896 if (sets[i].rtl)
5897 {
5898 /* We can't use the inner dest, because the mode associated with
5899 a ZERO_EXTRACT is significant. */
5900 rtx dest = SET_DEST (sets[i].rtl);
5901
5902 /* Needed for registers to remove the register from its
5903 previous quantity's chain.
5904 Needed for memory if this is a nonvarying address, unless
5905 we have just done an invalidate_memory that covers even those. */
5906 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5907 invalidate (dest, VOIDmode);
5908 else if (MEM_P (dest))
5909 invalidate (dest, VOIDmode);
5910 else if (GET_CODE (dest) == STRICT_LOW_PART
5911 || GET_CODE (dest) == ZERO_EXTRACT)
5912 invalidate (XEXP (dest, 0), GET_MODE (dest));
5913 }
5914
5915 /* A volatile ASM invalidates everything. */
5916 if (NONJUMP_INSN_P (insn)
5917 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5918 && MEM_VOLATILE_P (PATTERN (insn)))
5919 flush_hash_table ();
5920
5921 /* Make sure registers mentioned in destinations
5922 are safe for use in an expression to be inserted.
5923 This removes from the hash table
5924 any invalid entry that refers to one of these registers.
5925
5926 We don't care about the return value from mention_regs because
5927 we are going to hash the SET_DEST values unconditionally. */
5928
5929 for (i = 0; i < n_sets; i++)
5930 {
5931 if (sets[i].rtl)
5932 {
5933 rtx x = SET_DEST (sets[i].rtl);
5934
5935 if (!REG_P (x))
5936 mention_regs (x);
5937 else
5938 {
5939 /* We used to rely on all references to a register becoming
5940 inaccessible when a register changes to a new quantity,
5941 since that changes the hash code. However, that is not
5942 safe, since after HASH_SIZE new quantities we get a
5943 hash 'collision' of a register with its own invalid
5944 entries. And since SUBREGs have been changed not to
5945 change their hash code with the hash code of the register,
5946 it wouldn't work any longer at all. So we have to check
5947 for any invalid references lying around now.
5948 This code is similar to the REG case in mention_regs,
5949 but it knows that reg_tick has been incremented, and
5950 it leaves reg_in_table as -1 . */
5951 unsigned int regno = REGNO (x);
5952 unsigned int endregno
5953 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
5954 : hard_regno_nregs[regno][GET_MODE (x)]);
5955 unsigned int i;
5956
5957 for (i = regno; i < endregno; i++)
5958 {
5959 if (REG_IN_TABLE (i) >= 0)
5960 {
5961 remove_invalid_refs (i);
5962 REG_IN_TABLE (i) = -1;
5963 }
5964 }
5965 }
5966 }
5967 }
5968
5969 /* We may have just removed some of the src_elt's from the hash table.
5970 So replace each one with the current head of the same class. */
5971
5972 for (i = 0; i < n_sets; i++)
5973 if (sets[i].rtl)
5974 {
5975 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5976 /* If elt was removed, find current head of same class,
5977 or 0 if nothing remains of that class. */
5978 {
5979 struct table_elt *elt = sets[i].src_elt;
5980
5981 while (elt && elt->prev_same_value)
5982 elt = elt->prev_same_value;
5983
5984 while (elt && elt->first_same_value == 0)
5985 elt = elt->next_same_value;
5986 sets[i].src_elt = elt ? elt->first_same_value : 0;
5987 }
5988 }
5989
5990 /* Now insert the destinations into their equivalence classes. */
5991
5992 for (i = 0; i < n_sets; i++)
5993 if (sets[i].rtl)
5994 {
5995 rtx dest = SET_DEST (sets[i].rtl);
5996 struct table_elt *elt;
5997
5998 /* Don't record value if we are not supposed to risk allocating
5999 floating-point values in registers that might be wider than
6000 memory. */
6001 if ((flag_float_store
6002 && MEM_P (dest)
6003 && FLOAT_MODE_P (GET_MODE (dest)))
6004 /* Don't record BLKmode values, because we don't know the
6005 size of it, and can't be sure that other BLKmode values
6006 have the same or smaller size. */
6007 || GET_MODE (dest) == BLKmode
6008 /* Don't record values of destinations set inside a libcall block
6009 since we might delete the libcall. Things should have been set
6010 up so we won't want to reuse such a value, but we play it safe
6011 here. */
6012 || libcall_insn
6013 /* If we didn't put a REG_EQUAL value or a source into the hash
6014 table, there is no point is recording DEST. */
6015 || sets[i].src_elt == 0
6016 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6017 or SIGN_EXTEND, don't record DEST since it can cause
6018 some tracking to be wrong.
6019
6020 ??? Think about this more later. */
6021 || (GET_CODE (dest) == SUBREG
6022 && (GET_MODE_SIZE (GET_MODE (dest))
6023 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6024 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6025 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6026 continue;
6027
6028 /* STRICT_LOW_PART isn't part of the value BEING set,
6029 and neither is the SUBREG inside it.
6030 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6031 if (GET_CODE (dest) == STRICT_LOW_PART)
6032 dest = SUBREG_REG (XEXP (dest, 0));
6033
6034 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
6035 /* Registers must also be inserted into chains for quantities. */
6036 if (insert_regs (dest, sets[i].src_elt, 1))
6037 {
6038 /* If `insert_regs' changes something, the hash code must be
6039 recalculated. */
6040 rehash_using_reg (dest);
6041 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6042 }
6043
6044 elt = insert (dest, sets[i].src_elt,
6045 sets[i].dest_hash, GET_MODE (dest));
6046
6047 elt->in_memory = (MEM_P (sets[i].inner_dest)
6048 && !MEM_READONLY_P (sets[i].inner_dest));
6049
6050 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6051 narrower than M2, and both M1 and M2 are the same number of words,
6052 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6053 make that equivalence as well.
6054
6055 However, BAR may have equivalences for which gen_lowpart
6056 will produce a simpler value than gen_lowpart applied to
6057 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6058 BAR's equivalences. If we don't get a simplified form, make
6059 the SUBREG. It will not be used in an equivalence, but will
6060 cause two similar assignments to be detected.
6061
6062 Note the loop below will find SUBREG_REG (DEST) since we have
6063 already entered SRC and DEST of the SET in the table. */
6064
6065 if (GET_CODE (dest) == SUBREG
6066 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6067 / UNITS_PER_WORD)
6068 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6069 && (GET_MODE_SIZE (GET_MODE (dest))
6070 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6071 && sets[i].src_elt != 0)
6072 {
6073 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6074 struct table_elt *elt, *classp = 0;
6075
6076 for (elt = sets[i].src_elt->first_same_value; elt;
6077 elt = elt->next_same_value)
6078 {
6079 rtx new_src = 0;
6080 unsigned src_hash;
6081 struct table_elt *src_elt;
6082 int byte = 0;
6083
6084 /* Ignore invalid entries. */
6085 if (!REG_P (elt->exp)
6086 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
6087 continue;
6088
6089 /* We may have already been playing subreg games. If the
6090 mode is already correct for the destination, use it. */
6091 if (GET_MODE (elt->exp) == new_mode)
6092 new_src = elt->exp;
6093 else
6094 {
6095 /* Calculate big endian correction for the SUBREG_BYTE.
6096 We have already checked that M1 (GET_MODE (dest))
6097 is not narrower than M2 (new_mode). */
6098 if (BYTES_BIG_ENDIAN)
6099 byte = (GET_MODE_SIZE (GET_MODE (dest))
6100 - GET_MODE_SIZE (new_mode));
6101
6102 new_src = simplify_gen_subreg (new_mode, elt->exp,
6103 GET_MODE (dest), byte);
6104 }
6105
6106 /* The call to simplify_gen_subreg fails if the value
6107 is VOIDmode, yet we can't do any simplification, e.g.
6108 for EXPR_LISTs denoting function call results.
6109 It is invalid to construct a SUBREG with a VOIDmode
6110 SUBREG_REG, hence a zero new_src means we can't do
6111 this substitution. */
6112 if (! new_src)
6113 continue;
6114
6115 src_hash = HASH (new_src, new_mode);
6116 src_elt = lookup (new_src, src_hash, new_mode);
6117
6118 /* Put the new source in the hash table is if isn't
6119 already. */
6120 if (src_elt == 0)
6121 {
6122 if (insert_regs (new_src, classp, 0))
6123 {
6124 rehash_using_reg (new_src);
6125 src_hash = HASH (new_src, new_mode);
6126 }
6127 src_elt = insert (new_src, classp, src_hash, new_mode);
6128 src_elt->in_memory = elt->in_memory;
6129 }
6130 else if (classp && classp != src_elt->first_same_value)
6131 /* Show that two things that we've seen before are
6132 actually the same. */
6133 merge_equiv_classes (src_elt, classp);
6134
6135 classp = src_elt->first_same_value;
6136 /* Ignore invalid entries. */
6137 while (classp
6138 && !REG_P (classp->exp)
6139 && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
6140 classp = classp->next_same_value;
6141 }
6142 }
6143 }
6144
6145 /* Special handling for (set REG0 REG1) where REG0 is the
6146 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6147 be used in the sequel, so (if easily done) change this insn to
6148 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6149 that computed their value. Then REG1 will become a dead store
6150 and won't cloud the situation for later optimizations.
6151
6152 Do not make this change if REG1 is a hard register, because it will
6153 then be used in the sequel and we may be changing a two-operand insn
6154 into a three-operand insn.
6155
6156 Also do not do this if we are operating on a copy of INSN.
6157
6158 Also don't do this if INSN ends a libcall; this would cause an unrelated
6159 register to be set in the middle of a libcall, and we then get bad code
6160 if the libcall is deleted. */
6161
6162 if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
6163 && NEXT_INSN (PREV_INSN (insn)) == insn
6164 && REG_P (SET_SRC (sets[0].rtl))
6165 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6166 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6167 {
6168 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6169 struct qty_table_elem *src_ent = &qty_table[src_q];
6170
6171 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6172 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6173 {
6174 rtx prev = insn;
6175 /* Scan for the previous nonnote insn, but stop at a basic
6176 block boundary. */
6177 do
6178 {
6179 prev = PREV_INSN (prev);
6180 }
6181 while (prev && NOTE_P (prev)
6182 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6183
6184 /* Do not swap the registers around if the previous instruction
6185 attaches a REG_EQUIV note to REG1.
6186
6187 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6188 from the pseudo that originally shadowed an incoming argument
6189 to another register. Some uses of REG_EQUIV might rely on it
6190 being attached to REG1 rather than REG2.
6191
6192 This section previously turned the REG_EQUIV into a REG_EQUAL
6193 note. We cannot do that because REG_EQUIV may provide an
6194 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
6195
6196 if (prev != 0 && NONJUMP_INSN_P (prev)
6197 && GET_CODE (PATTERN (prev)) == SET
6198 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6199 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6200 {
6201 rtx dest = SET_DEST (sets[0].rtl);
6202 rtx src = SET_SRC (sets[0].rtl);
6203 rtx note;
6204
6205 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6206 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6207 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6208 apply_change_group ();
6209
6210 /* If INSN has a REG_EQUAL note, and this note mentions
6211 REG0, then we must delete it, because the value in
6212 REG0 has changed. If the note's value is REG1, we must
6213 also delete it because that is now this insn's dest. */
6214 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6215 if (note != 0
6216 && (reg_mentioned_p (dest, XEXP (note, 0))
6217 || rtx_equal_p (src, XEXP (note, 0))))
6218 remove_note (insn, note);
6219 }
6220 }
6221 }
6222
6223 /* If this is a conditional jump insn, record any known equivalences due to
6224 the condition being tested. */
6225
6226 if (JUMP_P (insn)
6227 && n_sets == 1 && GET_CODE (x) == SET
6228 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6229 record_jump_equiv (insn, 0);
6230
6231 #ifdef HAVE_cc0
6232 /* If the previous insn set CC0 and this insn no longer references CC0,
6233 delete the previous insn. Here we use the fact that nothing expects CC0
6234 to be valid over an insn, which is true until the final pass. */
6235 if (prev_insn && NONJUMP_INSN_P (prev_insn)
6236 && (tem = single_set (prev_insn)) != 0
6237 && SET_DEST (tem) == cc0_rtx
6238 && ! reg_mentioned_p (cc0_rtx, x))
6239 delete_insn (prev_insn);
6240
6241 prev_insn_cc0 = this_insn_cc0;
6242 prev_insn_cc0_mode = this_insn_cc0_mode;
6243 prev_insn = insn;
6244 #endif
6245 }
6246 \f
6247 /* Remove from the hash table all expressions that reference memory. */
6248
6249 static void
6250 invalidate_memory (void)
6251 {
6252 int i;
6253 struct table_elt *p, *next;
6254
6255 for (i = 0; i < HASH_SIZE; i++)
6256 for (p = table[i]; p; p = next)
6257 {
6258 next = p->next_same_hash;
6259 if (p->in_memory)
6260 remove_from_table (p, i);
6261 }
6262 }
6263
6264 /* If ADDR is an address that implicitly affects the stack pointer, return
6265 1 and update the register tables to show the effect. Else, return 0. */
6266
6267 static int
6268 addr_affects_sp_p (rtx addr)
6269 {
6270 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
6271 && REG_P (XEXP (addr, 0))
6272 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6273 {
6274 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6275 {
6276 REG_TICK (STACK_POINTER_REGNUM)++;
6277 /* Is it possible to use a subreg of SP? */
6278 SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6279 }
6280
6281 /* This should be *very* rare. */
6282 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6283 invalidate (stack_pointer_rtx, VOIDmode);
6284
6285 return 1;
6286 }
6287
6288 return 0;
6289 }
6290
6291 /* Perform invalidation on the basis of everything about an insn
6292 except for invalidating the actual places that are SET in it.
6293 This includes the places CLOBBERed, and anything that might
6294 alias with something that is SET or CLOBBERed.
6295
6296 X is the pattern of the insn. */
6297
6298 static void
6299 invalidate_from_clobbers (rtx x)
6300 {
6301 if (GET_CODE (x) == CLOBBER)
6302 {
6303 rtx ref = XEXP (x, 0);
6304 if (ref)
6305 {
6306 if (REG_P (ref) || GET_CODE (ref) == SUBREG
6307 || MEM_P (ref))
6308 invalidate (ref, VOIDmode);
6309 else if (GET_CODE (ref) == STRICT_LOW_PART
6310 || GET_CODE (ref) == ZERO_EXTRACT)
6311 invalidate (XEXP (ref, 0), GET_MODE (ref));
6312 }
6313 }
6314 else if (GET_CODE (x) == PARALLEL)
6315 {
6316 int i;
6317 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6318 {
6319 rtx y = XVECEXP (x, 0, i);
6320 if (GET_CODE (y) == CLOBBER)
6321 {
6322 rtx ref = XEXP (y, 0);
6323 if (REG_P (ref) || GET_CODE (ref) == SUBREG
6324 || MEM_P (ref))
6325 invalidate (ref, VOIDmode);
6326 else if (GET_CODE (ref) == STRICT_LOW_PART
6327 || GET_CODE (ref) == ZERO_EXTRACT)
6328 invalidate (XEXP (ref, 0), GET_MODE (ref));
6329 }
6330 }
6331 }
6332 }
6333 \f
6334 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6335 and replace any registers in them with either an equivalent constant
6336 or the canonical form of the register. If we are inside an address,
6337 only do this if the address remains valid.
6338
6339 OBJECT is 0 except when within a MEM in which case it is the MEM.
6340
6341 Return the replacement for X. */
6342
6343 static rtx
6344 cse_process_notes (rtx x, rtx object)
6345 {
6346 enum rtx_code code = GET_CODE (x);
6347 const char *fmt = GET_RTX_FORMAT (code);
6348 int i;
6349
6350 switch (code)
6351 {
6352 case CONST_INT:
6353 case CONST:
6354 case SYMBOL_REF:
6355 case LABEL_REF:
6356 case CONST_DOUBLE:
6357 case CONST_VECTOR:
6358 case PC:
6359 case CC0:
6360 case LO_SUM:
6361 return x;
6362
6363 case MEM:
6364 validate_change (x, &XEXP (x, 0),
6365 cse_process_notes (XEXP (x, 0), x), 0);
6366 return x;
6367
6368 case EXPR_LIST:
6369 case INSN_LIST:
6370 if (REG_NOTE_KIND (x) == REG_EQUAL)
6371 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6372 if (XEXP (x, 1))
6373 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6374 return x;
6375
6376 case SIGN_EXTEND:
6377 case ZERO_EXTEND:
6378 case SUBREG:
6379 {
6380 rtx new = cse_process_notes (XEXP (x, 0), object);
6381 /* We don't substitute VOIDmode constants into these rtx,
6382 since they would impede folding. */
6383 if (GET_MODE (new) != VOIDmode)
6384 validate_change (object, &XEXP (x, 0), new, 0);
6385 return x;
6386 }
6387
6388 case REG:
6389 i = REG_QTY (REGNO (x));
6390
6391 /* Return a constant or a constant register. */
6392 if (REGNO_QTY_VALID_P (REGNO (x)))
6393 {
6394 struct qty_table_elem *ent = &qty_table[i];
6395
6396 if (ent->const_rtx != NULL_RTX
6397 && (CONSTANT_P (ent->const_rtx)
6398 || REG_P (ent->const_rtx)))
6399 {
6400 rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
6401 if (new)
6402 return new;
6403 }
6404 }
6405
6406 /* Otherwise, canonicalize this register. */
6407 return canon_reg (x, NULL_RTX);
6408
6409 default:
6410 break;
6411 }
6412
6413 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6414 if (fmt[i] == 'e')
6415 validate_change (object, &XEXP (x, i),
6416 cse_process_notes (XEXP (x, i), object), 0);
6417
6418 return x;
6419 }
6420 \f
6421 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6422 since they are done elsewhere. This function is called via note_stores. */
6423
6424 static void
6425 invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
6426 {
6427 enum rtx_code code = GET_CODE (dest);
6428
6429 if (code == MEM
6430 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6431 /* There are times when an address can appear varying and be a PLUS
6432 during this scan when it would be a fixed address were we to know
6433 the proper equivalences. So invalidate all memory if there is
6434 a BLKmode or nonscalar memory reference or a reference to a
6435 variable address. */
6436 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6437 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6438 {
6439 invalidate_memory ();
6440 return;
6441 }
6442
6443 if (GET_CODE (set) == CLOBBER
6444 || CC0_P (dest)
6445 || dest == pc_rtx)
6446 return;
6447
6448 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6449 invalidate (XEXP (dest, 0), GET_MODE (dest));
6450 else if (code == REG || code == SUBREG || code == MEM)
6451 invalidate (dest, VOIDmode);
6452 }
6453
6454 /* Invalidate all insns from START up to the end of the function or the
6455 next label. This called when we wish to CSE around a block that is
6456 conditionally executed. */
6457
6458 static void
6459 invalidate_skipped_block (rtx start)
6460 {
6461 rtx insn;
6462
6463 for (insn = start; insn && !LABEL_P (insn);
6464 insn = NEXT_INSN (insn))
6465 {
6466 if (! INSN_P (insn))
6467 continue;
6468
6469 if (CALL_P (insn))
6470 {
6471 if (! CONST_OR_PURE_CALL_P (insn))
6472 invalidate_memory ();
6473 invalidate_for_call ();
6474 }
6475
6476 invalidate_from_clobbers (PATTERN (insn));
6477 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6478 }
6479 }
6480 \f
6481 /* Find the end of INSN's basic block and return its range,
6482 the total number of SETs in all the insns of the block, the last insn of the
6483 block, and the branch path.
6484
6485 The branch path indicates which branches should be followed. If a nonzero
6486 path size is specified, the block should be rescanned and a different set
6487 of branches will be taken. The branch path is only used if
6488 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6489
6490 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6491 used to describe the block. It is filled in with the information about
6492 the current block. The incoming structure's branch path, if any, is used
6493 to construct the output branch path. */
6494
6495 static void
6496 cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
6497 int follow_jumps, int skip_blocks)
6498 {
6499 rtx p = insn, q;
6500 int nsets = 0;
6501 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6502 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6503 int path_size = data->path_size;
6504 int path_entry = 0;
6505 int i;
6506
6507 /* Update the previous branch path, if any. If the last branch was
6508 previously PATH_TAKEN, mark it PATH_NOT_TAKEN.
6509 If it was previously PATH_NOT_TAKEN,
6510 shorten the path by one and look at the previous branch. We know that
6511 at least one branch must have been taken if PATH_SIZE is nonzero. */
6512 while (path_size > 0)
6513 {
6514 if (data->path[path_size - 1].status != PATH_NOT_TAKEN)
6515 {
6516 data->path[path_size - 1].status = PATH_NOT_TAKEN;
6517 break;
6518 }
6519 else
6520 path_size--;
6521 }
6522
6523 /* If the first instruction is marked with QImode, that means we've
6524 already processed this block. Our caller will look at DATA->LAST
6525 to figure out where to go next. We want to return the next block
6526 in the instruction stream, not some branched-to block somewhere
6527 else. We accomplish this by pretending our called forbid us to
6528 follow jumps, or skip blocks. */
6529 if (GET_MODE (insn) == QImode)
6530 follow_jumps = skip_blocks = 0;
6531
6532 /* Scan to end of this basic block. */
6533 while (p && !LABEL_P (p))
6534 {
6535 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6536 the regs restored by the longjmp come from
6537 a later time than the setjmp. */
6538 if (PREV_INSN (p) && CALL_P (PREV_INSN (p))
6539 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6540 break;
6541
6542 /* A PARALLEL can have lots of SETs in it,
6543 especially if it is really an ASM_OPERANDS. */
6544 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6545 nsets += XVECLEN (PATTERN (p), 0);
6546 else if (!NOTE_P (p))
6547 nsets += 1;
6548
6549 /* Ignore insns made by CSE; they cannot affect the boundaries of
6550 the basic block. */
6551
6552 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6553 high_cuid = INSN_CUID (p);
6554 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6555 low_cuid = INSN_CUID (p);
6556
6557 /* See if this insn is in our branch path. If it is and we are to
6558 take it, do so. */
6559 if (path_entry < path_size && data->path[path_entry].branch == p)
6560 {
6561 if (data->path[path_entry].status != PATH_NOT_TAKEN)
6562 p = JUMP_LABEL (p);
6563
6564 /* Point to next entry in path, if any. */
6565 path_entry++;
6566 }
6567
6568 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6569 was specified, we haven't reached our maximum path length, there are
6570 insns following the target of the jump, this is the only use of the
6571 jump label, and the target label is preceded by a BARRIER.
6572
6573 Alternatively, we can follow the jump if it branches around a
6574 block of code and there are no other branches into the block.
6575 In this case invalidate_skipped_block will be called to invalidate any
6576 registers set in the block when following the jump. */
6577
6578 else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
6579 && JUMP_P (p)
6580 && GET_CODE (PATTERN (p)) == SET
6581 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6582 && JUMP_LABEL (p) != 0
6583 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6584 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6585 {
6586 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6587 if ((!NOTE_P (q)
6588 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6589 || (PREV_INSN (q) && CALL_P (PREV_INSN (q))
6590 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6591 && (!LABEL_P (q) || LABEL_NUSES (q) != 0))
6592 break;
6593
6594 /* If we ran into a BARRIER, this code is an extension of the
6595 basic block when the branch is taken. */
6596 if (follow_jumps && q != 0 && BARRIER_P (q))
6597 {
6598 /* Don't allow ourself to keep walking around an
6599 always-executed loop. */
6600 if (next_real_insn (q) == next)
6601 {
6602 p = NEXT_INSN (p);
6603 continue;
6604 }
6605
6606 /* Similarly, don't put a branch in our path more than once. */
6607 for (i = 0; i < path_entry; i++)
6608 if (data->path[i].branch == p)
6609 break;
6610
6611 if (i != path_entry)
6612 break;
6613
6614 data->path[path_entry].branch = p;
6615 data->path[path_entry++].status = PATH_TAKEN;
6616
6617 /* This branch now ends our path. It was possible that we
6618 didn't see this branch the last time around (when the
6619 insn in front of the target was a JUMP_INSN that was
6620 turned into a no-op). */
6621 path_size = path_entry;
6622
6623 p = JUMP_LABEL (p);
6624 /* Mark block so we won't scan it again later. */
6625 PUT_MODE (NEXT_INSN (p), QImode);
6626 }
6627 /* Detect a branch around a block of code. */
6628 else if (skip_blocks && q != 0 && !LABEL_P (q))
6629 {
6630 rtx tmp;
6631
6632 if (next_real_insn (q) == next)
6633 {
6634 p = NEXT_INSN (p);
6635 continue;
6636 }
6637
6638 for (i = 0; i < path_entry; i++)
6639 if (data->path[i].branch == p)
6640 break;
6641
6642 if (i != path_entry)
6643 break;
6644
6645 /* This is no_labels_between_p (p, q) with an added check for
6646 reaching the end of a function (in case Q precedes P). */
6647 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6648 if (LABEL_P (tmp))
6649 break;
6650
6651 if (tmp == q)
6652 {
6653 data->path[path_entry].branch = p;
6654 data->path[path_entry++].status = PATH_AROUND;
6655
6656 path_size = path_entry;
6657
6658 p = JUMP_LABEL (p);
6659 /* Mark block so we won't scan it again later. */
6660 PUT_MODE (NEXT_INSN (p), QImode);
6661 }
6662 }
6663 }
6664 p = NEXT_INSN (p);
6665 }
6666
6667 data->low_cuid = low_cuid;
6668 data->high_cuid = high_cuid;
6669 data->nsets = nsets;
6670 data->last = p;
6671
6672 /* If all jumps in the path are not taken, set our path length to zero
6673 so a rescan won't be done. */
6674 for (i = path_size - 1; i >= 0; i--)
6675 if (data->path[i].status != PATH_NOT_TAKEN)
6676 break;
6677
6678 if (i == -1)
6679 data->path_size = 0;
6680 else
6681 data->path_size = path_size;
6682
6683 /* End the current branch path. */
6684 data->path[path_size].branch = 0;
6685 }
6686 \f
6687 /* Perform cse on the instructions of a function.
6688 F is the first instruction.
6689 NREGS is one plus the highest pseudo-reg number used in the instruction.
6690
6691 Returns 1 if jump_optimize should be redone due to simplifications
6692 in conditional jump instructions. */
6693
6694 int
6695 cse_main (rtx f, int nregs, FILE *file)
6696 {
6697 struct cse_basic_block_data val;
6698 rtx insn = f;
6699 int i;
6700
6701 val.path = xmalloc (sizeof (struct branch_path)
6702 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6703
6704 cse_jumps_altered = 0;
6705 recorded_label_ref = 0;
6706 constant_pool_entries_cost = 0;
6707 constant_pool_entries_regcost = 0;
6708 val.path_size = 0;
6709 rtl_hooks = cse_rtl_hooks;
6710
6711 init_recog ();
6712 init_alias_analysis ();
6713
6714 reg_eqv_table = xmalloc (nregs * sizeof (struct reg_eqv_elem));
6715
6716 /* Find the largest uid. */
6717
6718 max_uid = get_max_uid ();
6719 uid_cuid = xcalloc (max_uid + 1, sizeof (int));
6720
6721 /* Compute the mapping from uids to cuids.
6722 CUIDs are numbers assigned to insns, like uids,
6723 except that cuids increase monotonically through the code.
6724 Don't assign cuids to line-number NOTEs, so that the distance in cuids
6725 between two insns is not affected by -g. */
6726
6727 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6728 {
6729 if (!NOTE_P (insn)
6730 || NOTE_LINE_NUMBER (insn) < 0)
6731 INSN_CUID (insn) = ++i;
6732 else
6733 /* Give a line number note the same cuid as preceding insn. */
6734 INSN_CUID (insn) = i;
6735 }
6736
6737 /* Loop over basic blocks.
6738 Compute the maximum number of qty's needed for each basic block
6739 (which is 2 for each SET). */
6740 insn = f;
6741 while (insn)
6742 {
6743 cse_altered = 0;
6744 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps,
6745 flag_cse_skip_blocks);
6746
6747 /* If this basic block was already processed or has no sets, skip it. */
6748 if (val.nsets == 0 || GET_MODE (insn) == QImode)
6749 {
6750 PUT_MODE (insn, VOIDmode);
6751 insn = (val.last ? NEXT_INSN (val.last) : 0);
6752 val.path_size = 0;
6753 continue;
6754 }
6755
6756 cse_basic_block_start = val.low_cuid;
6757 cse_basic_block_end = val.high_cuid;
6758 max_qty = val.nsets * 2;
6759
6760 if (file)
6761 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
6762 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
6763 val.nsets);
6764
6765 /* Make MAX_QTY bigger to give us room to optimize
6766 past the end of this basic block, if that should prove useful. */
6767 if (max_qty < 500)
6768 max_qty = 500;
6769
6770 /* If this basic block is being extended by following certain jumps,
6771 (see `cse_end_of_basic_block'), we reprocess the code from the start.
6772 Otherwise, we start after this basic block. */
6773 if (val.path_size > 0)
6774 cse_basic_block (insn, val.last, val.path);
6775 else
6776 {
6777 int old_cse_jumps_altered = cse_jumps_altered;
6778 rtx temp;
6779
6780 /* When cse changes a conditional jump to an unconditional
6781 jump, we want to reprocess the block, since it will give
6782 us a new branch path to investigate. */
6783 cse_jumps_altered = 0;
6784 temp = cse_basic_block (insn, val.last, val.path);
6785 if (cse_jumps_altered == 0
6786 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
6787 insn = temp;
6788
6789 cse_jumps_altered |= old_cse_jumps_altered;
6790 }
6791
6792 if (cse_altered)
6793 ggc_collect ();
6794
6795 #ifdef USE_C_ALLOCA
6796 alloca (0);
6797 #endif
6798 }
6799
6800 /* Clean up. */
6801 end_alias_analysis ();
6802 free (uid_cuid);
6803 free (reg_eqv_table);
6804 free (val.path);
6805 rtl_hooks = general_rtl_hooks;
6806
6807 return cse_jumps_altered || recorded_label_ref;
6808 }
6809
6810 /* Process a single basic block. FROM and TO and the limits of the basic
6811 block. NEXT_BRANCH points to the branch path when following jumps or
6812 a null path when not following jumps. */
6813
6814 static rtx
6815 cse_basic_block (rtx from, rtx to, struct branch_path *next_branch)
6816 {
6817 rtx insn;
6818 int to_usage = 0;
6819 rtx libcall_insn = NULL_RTX;
6820 int num_insns = 0;
6821 int no_conflict = 0;
6822
6823 /* Allocate the space needed by qty_table. */
6824 qty_table = xmalloc (max_qty * sizeof (struct qty_table_elem));
6825
6826 new_basic_block ();
6827
6828 /* TO might be a label. If so, protect it from being deleted. */
6829 if (to != 0 && LABEL_P (to))
6830 ++LABEL_NUSES (to);
6831
6832 for (insn = from; insn != to; insn = NEXT_INSN (insn))
6833 {
6834 enum rtx_code code = GET_CODE (insn);
6835
6836 /* If we have processed 1,000 insns, flush the hash table to
6837 avoid extreme quadratic behavior. We must not include NOTEs
6838 in the count since there may be more of them when generating
6839 debugging information. If we clear the table at different
6840 times, code generated with -g -O might be different than code
6841 generated with -O but not -g.
6842
6843 ??? This is a real kludge and needs to be done some other way.
6844 Perhaps for 2.9. */
6845 if (code != NOTE && num_insns++ > 1000)
6846 {
6847 flush_hash_table ();
6848 num_insns = 0;
6849 }
6850
6851 /* See if this is a branch that is part of the path. If so, and it is
6852 to be taken, do so. */
6853 if (next_branch->branch == insn)
6854 {
6855 enum taken status = next_branch++->status;
6856 if (status != PATH_NOT_TAKEN)
6857 {
6858 if (status == PATH_TAKEN)
6859 record_jump_equiv (insn, 1);
6860 else
6861 invalidate_skipped_block (NEXT_INSN (insn));
6862
6863 /* Set the last insn as the jump insn; it doesn't affect cc0.
6864 Then follow this branch. */
6865 #ifdef HAVE_cc0
6866 prev_insn_cc0 = 0;
6867 prev_insn = insn;
6868 #endif
6869 insn = JUMP_LABEL (insn);
6870 continue;
6871 }
6872 }
6873
6874 if (GET_MODE (insn) == QImode)
6875 PUT_MODE (insn, VOIDmode);
6876
6877 if (GET_RTX_CLASS (code) == RTX_INSN)
6878 {
6879 rtx p;
6880
6881 /* Process notes first so we have all notes in canonical forms when
6882 looking for duplicate operations. */
6883
6884 if (REG_NOTES (insn))
6885 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
6886
6887 /* Track when we are inside in LIBCALL block. Inside such a block,
6888 we do not want to record destinations. The last insn of a
6889 LIBCALL block is not considered to be part of the block, since
6890 its destination is the result of the block and hence should be
6891 recorded. */
6892
6893 if (REG_NOTES (insn) != 0)
6894 {
6895 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
6896 libcall_insn = XEXP (p, 0);
6897 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
6898 {
6899 /* Keep libcall_insn for the last SET insn of a no-conflict
6900 block to prevent changing the destination. */
6901 if (! no_conflict)
6902 libcall_insn = 0;
6903 else
6904 no_conflict = -1;
6905 }
6906 else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
6907 no_conflict = 1;
6908 }
6909
6910 cse_insn (insn, libcall_insn);
6911
6912 if (no_conflict == -1)
6913 {
6914 libcall_insn = 0;
6915 no_conflict = 0;
6916 }
6917
6918 /* If we haven't already found an insn where we added a LABEL_REF,
6919 check this one. */
6920 if (NONJUMP_INSN_P (insn) && ! recorded_label_ref
6921 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
6922 (void *) insn))
6923 recorded_label_ref = 1;
6924 }
6925
6926 /* If INSN is now an unconditional jump, skip to the end of our
6927 basic block by pretending that we just did the last insn in the
6928 basic block. If we are jumping to the end of our block, show
6929 that we can have one usage of TO. */
6930
6931 if (any_uncondjump_p (insn))
6932 {
6933 if (to == 0)
6934 {
6935 free (qty_table);
6936 return 0;
6937 }
6938
6939 if (JUMP_LABEL (insn) == to)
6940 to_usage = 1;
6941
6942 /* Maybe TO was deleted because the jump is unconditional.
6943 If so, there is nothing left in this basic block. */
6944 /* ??? Perhaps it would be smarter to set TO
6945 to whatever follows this insn,
6946 and pretend the basic block had always ended here. */
6947 if (INSN_DELETED_P (to))
6948 break;
6949
6950 insn = PREV_INSN (to);
6951 }
6952
6953 /* See if it is ok to keep on going past the label
6954 which used to end our basic block. Remember that we incremented
6955 the count of that label, so we decrement it here. If we made
6956 a jump unconditional, TO_USAGE will be one; in that case, we don't
6957 want to count the use in that jump. */
6958
6959 if (to != 0 && NEXT_INSN (insn) == to
6960 && LABEL_P (to) && --LABEL_NUSES (to) == to_usage)
6961 {
6962 struct cse_basic_block_data val;
6963 rtx prev;
6964
6965 insn = NEXT_INSN (to);
6966
6967 /* If TO was the last insn in the function, we are done. */
6968 if (insn == 0)
6969 {
6970 free (qty_table);
6971 return 0;
6972 }
6973
6974 /* If TO was preceded by a BARRIER we are done with this block
6975 because it has no continuation. */
6976 prev = prev_nonnote_insn (to);
6977 if (prev && BARRIER_P (prev))
6978 {
6979 free (qty_table);
6980 return insn;
6981 }
6982
6983 /* Find the end of the following block. Note that we won't be
6984 following branches in this case. */
6985 to_usage = 0;
6986 val.path_size = 0;
6987 val.path = xmalloc (sizeof (struct branch_path)
6988 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6989 cse_end_of_basic_block (insn, &val, 0, 0);
6990 free (val.path);
6991
6992 /* If the tables we allocated have enough space left
6993 to handle all the SETs in the next basic block,
6994 continue through it. Otherwise, return,
6995 and that block will be scanned individually. */
6996 if (val.nsets * 2 + next_qty > max_qty)
6997 break;
6998
6999 cse_basic_block_start = val.low_cuid;
7000 cse_basic_block_end = val.high_cuid;
7001 to = val.last;
7002
7003 /* Prevent TO from being deleted if it is a label. */
7004 if (to != 0 && LABEL_P (to))
7005 ++LABEL_NUSES (to);
7006
7007 /* Back up so we process the first insn in the extension. */
7008 insn = PREV_INSN (insn);
7009 }
7010 }
7011
7012 gcc_assert (next_qty <= max_qty);
7013
7014 free (qty_table);
7015
7016 return to ? NEXT_INSN (to) : 0;
7017 }
7018 \f
7019 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7020 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
7021
7022 static int
7023 check_for_label_ref (rtx *rtl, void *data)
7024 {
7025 rtx insn = (rtx) data;
7026
7027 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7028 we must rerun jump since it needs to place the note. If this is a
7029 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7030 since no REG_LABEL will be added. */
7031 return (GET_CODE (*rtl) == LABEL_REF
7032 && ! LABEL_REF_NONLOCAL_P (*rtl)
7033 && LABEL_P (XEXP (*rtl, 0))
7034 && INSN_UID (XEXP (*rtl, 0)) != 0
7035 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7036 }
7037 \f
7038 /* Count the number of times registers are used (not set) in X.
7039 COUNTS is an array in which we accumulate the count, INCR is how much
7040 we count each register usage. */
7041
7042 static void
7043 count_reg_usage (rtx x, int *counts, int incr)
7044 {
7045 enum rtx_code code;
7046 rtx note;
7047 const char *fmt;
7048 int i, j;
7049
7050 if (x == 0)
7051 return;
7052
7053 switch (code = GET_CODE (x))
7054 {
7055 case REG:
7056 counts[REGNO (x)] += incr;
7057 return;
7058
7059 case PC:
7060 case CC0:
7061 case CONST:
7062 case CONST_INT:
7063 case CONST_DOUBLE:
7064 case CONST_VECTOR:
7065 case SYMBOL_REF:
7066 case LABEL_REF:
7067 return;
7068
7069 case CLOBBER:
7070 /* If we are clobbering a MEM, mark any registers inside the address
7071 as being used. */
7072 if (MEM_P (XEXP (x, 0)))
7073 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, incr);
7074 return;
7075
7076 case SET:
7077 /* Unless we are setting a REG, count everything in SET_DEST. */
7078 if (!REG_P (SET_DEST (x)))
7079 count_reg_usage (SET_DEST (x), counts, incr);
7080 count_reg_usage (SET_SRC (x), counts, incr);
7081 return;
7082
7083 case CALL_INSN:
7084 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, incr);
7085 /* Fall through. */
7086
7087 case INSN:
7088 case JUMP_INSN:
7089 count_reg_usage (PATTERN (x), counts, incr);
7090
7091 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7092 use them. */
7093
7094 note = find_reg_equal_equiv_note (x);
7095 if (note)
7096 {
7097 rtx eqv = XEXP (note, 0);
7098
7099 if (GET_CODE (eqv) == EXPR_LIST)
7100 /* This REG_EQUAL note describes the result of a function call.
7101 Process all the arguments. */
7102 do
7103 {
7104 count_reg_usage (XEXP (eqv, 0), counts, incr);
7105 eqv = XEXP (eqv, 1);
7106 }
7107 while (eqv && GET_CODE (eqv) == EXPR_LIST);
7108 else
7109 count_reg_usage (eqv, counts, incr);
7110 }
7111 return;
7112
7113 case EXPR_LIST:
7114 if (REG_NOTE_KIND (x) == REG_EQUAL
7115 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7116 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7117 involving registers in the address. */
7118 || GET_CODE (XEXP (x, 0)) == CLOBBER)
7119 count_reg_usage (XEXP (x, 0), counts, incr);
7120
7121 count_reg_usage (XEXP (x, 1), counts, incr);
7122 return;
7123
7124 case ASM_OPERANDS:
7125 /* Iterate over just the inputs, not the constraints as well. */
7126 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7127 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, incr);
7128 return;
7129
7130 case INSN_LIST:
7131 gcc_unreachable ();
7132
7133 default:
7134 break;
7135 }
7136
7137 fmt = GET_RTX_FORMAT (code);
7138 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7139 {
7140 if (fmt[i] == 'e')
7141 count_reg_usage (XEXP (x, i), counts, incr);
7142 else if (fmt[i] == 'E')
7143 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7144 count_reg_usage (XVECEXP (x, i, j), counts, incr);
7145 }
7146 }
7147 \f
7148 /* Return true if set is live. */
7149 static bool
7150 set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
7151 int *counts)
7152 {
7153 #ifdef HAVE_cc0
7154 rtx tem;
7155 #endif
7156
7157 if (set_noop_p (set))
7158 ;
7159
7160 #ifdef HAVE_cc0
7161 else if (GET_CODE (SET_DEST (set)) == CC0
7162 && !side_effects_p (SET_SRC (set))
7163 && ((tem = next_nonnote_insn (insn)) == 0
7164 || !INSN_P (tem)
7165 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7166 return false;
7167 #endif
7168 else if (!REG_P (SET_DEST (set))
7169 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7170 || counts[REGNO (SET_DEST (set))] != 0
7171 || side_effects_p (SET_SRC (set)))
7172 return true;
7173 return false;
7174 }
7175
7176 /* Return true if insn is live. */
7177
7178 static bool
7179 insn_live_p (rtx insn, int *counts)
7180 {
7181 int i;
7182 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7183 return true;
7184 else if (GET_CODE (PATTERN (insn)) == SET)
7185 return set_live_p (PATTERN (insn), insn, counts);
7186 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7187 {
7188 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7189 {
7190 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7191
7192 if (GET_CODE (elt) == SET)
7193 {
7194 if (set_live_p (elt, insn, counts))
7195 return true;
7196 }
7197 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7198 return true;
7199 }
7200 return false;
7201 }
7202 else
7203 return true;
7204 }
7205
7206 /* Return true if libcall is dead as a whole. */
7207
7208 static bool
7209 dead_libcall_p (rtx insn, int *counts)
7210 {
7211 rtx note, set, new;
7212
7213 /* See if there's a REG_EQUAL note on this insn and try to
7214 replace the source with the REG_EQUAL expression.
7215
7216 We assume that insns with REG_RETVALs can only be reg->reg
7217 copies at this point. */
7218 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7219 if (!note)
7220 return false;
7221
7222 set = single_set (insn);
7223 if (!set)
7224 return false;
7225
7226 new = simplify_rtx (XEXP (note, 0));
7227 if (!new)
7228 new = XEXP (note, 0);
7229
7230 /* While changing insn, we must update the counts accordingly. */
7231 count_reg_usage (insn, counts, -1);
7232
7233 if (validate_change (insn, &SET_SRC (set), new, 0))
7234 {
7235 count_reg_usage (insn, counts, 1);
7236 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7237 remove_note (insn, note);
7238 return true;
7239 }
7240
7241 if (CONSTANT_P (new))
7242 {
7243 new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7244 if (new && validate_change (insn, &SET_SRC (set), new, 0))
7245 {
7246 count_reg_usage (insn, counts, 1);
7247 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7248 remove_note (insn, note);
7249 return true;
7250 }
7251 }
7252
7253 count_reg_usage (insn, counts, 1);
7254 return false;
7255 }
7256
7257 /* Scan all the insns and delete any that are dead; i.e., they store a register
7258 that is never used or they copy a register to itself.
7259
7260 This is used to remove insns made obviously dead by cse, loop or other
7261 optimizations. It improves the heuristics in loop since it won't try to
7262 move dead invariants out of loops or make givs for dead quantities. The
7263 remaining passes of the compilation are also sped up. */
7264
7265 int
7266 delete_trivially_dead_insns (rtx insns, int nreg)
7267 {
7268 int *counts;
7269 rtx insn, prev;
7270 int in_libcall = 0, dead_libcall = 0;
7271 int ndead = 0;
7272
7273 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7274 /* First count the number of times each register is used. */
7275 counts = xcalloc (nreg, sizeof (int));
7276 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7277 count_reg_usage (insn, counts, 1);
7278
7279 /* Go from the last insn to the first and delete insns that only set unused
7280 registers or copy a register to itself. As we delete an insn, remove
7281 usage counts for registers it uses.
7282
7283 The first jump optimization pass may leave a real insn as the last
7284 insn in the function. We must not skip that insn or we may end
7285 up deleting code that is not really dead. */
7286 insn = get_last_insn ();
7287 if (! INSN_P (insn))
7288 insn = prev_real_insn (insn);
7289
7290 for (; insn; insn = prev)
7291 {
7292 int live_insn = 0;
7293
7294 prev = prev_real_insn (insn);
7295
7296 /* Don't delete any insns that are part of a libcall block unless
7297 we can delete the whole libcall block.
7298
7299 Flow or loop might get confused if we did that. Remember
7300 that we are scanning backwards. */
7301 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7302 {
7303 in_libcall = 1;
7304 live_insn = 1;
7305 dead_libcall = dead_libcall_p (insn, counts);
7306 }
7307 else if (in_libcall)
7308 live_insn = ! dead_libcall;
7309 else
7310 live_insn = insn_live_p (insn, counts);
7311
7312 /* If this is a dead insn, delete it and show registers in it aren't
7313 being used. */
7314
7315 if (! live_insn)
7316 {
7317 count_reg_usage (insn, counts, -1);
7318 delete_insn_and_edges (insn);
7319 ndead++;
7320 }
7321
7322 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7323 {
7324 in_libcall = 0;
7325 dead_libcall = 0;
7326 }
7327 }
7328
7329 if (dump_file && ndead)
7330 fprintf (dump_file, "Deleted %i trivially dead insns\n",
7331 ndead);
7332 /* Clean up. */
7333 free (counts);
7334 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7335 return ndead;
7336 }
7337
7338 /* This function is called via for_each_rtx. The argument, NEWREG, is
7339 a condition code register with the desired mode. If we are looking
7340 at the same register in a different mode, replace it with
7341 NEWREG. */
7342
7343 static int
7344 cse_change_cc_mode (rtx *loc, void *data)
7345 {
7346 struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
7347
7348 if (*loc
7349 && REG_P (*loc)
7350 && REGNO (*loc) == REGNO (args->newreg)
7351 && GET_MODE (*loc) != GET_MODE (args->newreg))
7352 {
7353 validate_change (args->insn, loc, args->newreg, 1);
7354
7355 return -1;
7356 }
7357 return 0;
7358 }
7359
7360 /* Change the mode of any reference to the register REGNO (NEWREG) to
7361 GET_MODE (NEWREG) in INSN. */
7362
7363 static void
7364 cse_change_cc_mode_insn (rtx insn, rtx newreg)
7365 {
7366 struct change_cc_mode_args args;
7367 int success;
7368
7369 if (!INSN_P (insn))
7370 return;
7371
7372 args.insn = insn;
7373 args.newreg = newreg;
7374
7375 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
7376 for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, &args);
7377
7378 /* If the following assertion was triggered, there is most probably
7379 something wrong with the cc_modes_compatible back end function.
7380 CC modes only can be considered compatible if the insn - with the mode
7381 replaced by any of the compatible modes - can still be recognized. */
7382 success = apply_change_group ();
7383 gcc_assert (success);
7384 }
7385
7386 /* Change the mode of any reference to the register REGNO (NEWREG) to
7387 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7388 any instruction which modifies NEWREG. */
7389
7390 static void
7391 cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7392 {
7393 rtx insn;
7394
7395 for (insn = start; insn != end; insn = NEXT_INSN (insn))
7396 {
7397 if (! INSN_P (insn))
7398 continue;
7399
7400 if (reg_set_p (newreg, insn))
7401 return;
7402
7403 cse_change_cc_mode_insn (insn, newreg);
7404 }
7405 }
7406
7407 /* BB is a basic block which finishes with CC_REG as a condition code
7408 register which is set to CC_SRC. Look through the successors of BB
7409 to find blocks which have a single predecessor (i.e., this one),
7410 and look through those blocks for an assignment to CC_REG which is
7411 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7412 permitted to change the mode of CC_SRC to a compatible mode. This
7413 returns VOIDmode if no equivalent assignments were found.
7414 Otherwise it returns the mode which CC_SRC should wind up with.
7415
7416 The main complexity in this function is handling the mode issues.
7417 We may have more than one duplicate which we can eliminate, and we
7418 try to find a mode which will work for multiple duplicates. */
7419
7420 static enum machine_mode
7421 cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7422 {
7423 bool found_equiv;
7424 enum machine_mode mode;
7425 unsigned int insn_count;
7426 edge e;
7427 rtx insns[2];
7428 enum machine_mode modes[2];
7429 rtx last_insns[2];
7430 unsigned int i;
7431 rtx newreg;
7432 edge_iterator ei;
7433
7434 /* We expect to have two successors. Look at both before picking
7435 the final mode for the comparison. If we have more successors
7436 (i.e., some sort of table jump, although that seems unlikely),
7437 then we require all beyond the first two to use the same
7438 mode. */
7439
7440 found_equiv = false;
7441 mode = GET_MODE (cc_src);
7442 insn_count = 0;
7443 FOR_EACH_EDGE (e, ei, bb->succs)
7444 {
7445 rtx insn;
7446 rtx end;
7447
7448 if (e->flags & EDGE_COMPLEX)
7449 continue;
7450
7451 if (EDGE_COUNT (e->dest->preds) != 1
7452 || e->dest == EXIT_BLOCK_PTR)
7453 continue;
7454
7455 end = NEXT_INSN (BB_END (e->dest));
7456 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7457 {
7458 rtx set;
7459
7460 if (! INSN_P (insn))
7461 continue;
7462
7463 /* If CC_SRC is modified, we have to stop looking for
7464 something which uses it. */
7465 if (modified_in_p (cc_src, insn))
7466 break;
7467
7468 /* Check whether INSN sets CC_REG to CC_SRC. */
7469 set = single_set (insn);
7470 if (set
7471 && REG_P (SET_DEST (set))
7472 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7473 {
7474 bool found;
7475 enum machine_mode set_mode;
7476 enum machine_mode comp_mode;
7477
7478 found = false;
7479 set_mode = GET_MODE (SET_SRC (set));
7480 comp_mode = set_mode;
7481 if (rtx_equal_p (cc_src, SET_SRC (set)))
7482 found = true;
7483 else if (GET_CODE (cc_src) == COMPARE
7484 && GET_CODE (SET_SRC (set)) == COMPARE
7485 && mode != set_mode
7486 && rtx_equal_p (XEXP (cc_src, 0),
7487 XEXP (SET_SRC (set), 0))
7488 && rtx_equal_p (XEXP (cc_src, 1),
7489 XEXP (SET_SRC (set), 1)))
7490
7491 {
7492 comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7493 if (comp_mode != VOIDmode
7494 && (can_change_mode || comp_mode == mode))
7495 found = true;
7496 }
7497
7498 if (found)
7499 {
7500 found_equiv = true;
7501 if (insn_count < ARRAY_SIZE (insns))
7502 {
7503 insns[insn_count] = insn;
7504 modes[insn_count] = set_mode;
7505 last_insns[insn_count] = end;
7506 ++insn_count;
7507
7508 if (mode != comp_mode)
7509 {
7510 gcc_assert (can_change_mode);
7511 mode = comp_mode;
7512
7513 /* The modified insn will be re-recognized later. */
7514 PUT_MODE (cc_src, mode);
7515 }
7516 }
7517 else
7518 {
7519 if (set_mode != mode)
7520 {
7521 /* We found a matching expression in the
7522 wrong mode, but we don't have room to
7523 store it in the array. Punt. This case
7524 should be rare. */
7525 break;
7526 }
7527 /* INSN sets CC_REG to a value equal to CC_SRC
7528 with the right mode. We can simply delete
7529 it. */
7530 delete_insn (insn);
7531 }
7532
7533 /* We found an instruction to delete. Keep looking,
7534 in the hopes of finding a three-way jump. */
7535 continue;
7536 }
7537
7538 /* We found an instruction which sets the condition
7539 code, so don't look any farther. */
7540 break;
7541 }
7542
7543 /* If INSN sets CC_REG in some other way, don't look any
7544 farther. */
7545 if (reg_set_p (cc_reg, insn))
7546 break;
7547 }
7548
7549 /* If we fell off the bottom of the block, we can keep looking
7550 through successors. We pass CAN_CHANGE_MODE as false because
7551 we aren't prepared to handle compatibility between the
7552 further blocks and this block. */
7553 if (insn == end)
7554 {
7555 enum machine_mode submode;
7556
7557 submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7558 if (submode != VOIDmode)
7559 {
7560 gcc_assert (submode == mode);
7561 found_equiv = true;
7562 can_change_mode = false;
7563 }
7564 }
7565 }
7566
7567 if (! found_equiv)
7568 return VOIDmode;
7569
7570 /* Now INSN_COUNT is the number of instructions we found which set
7571 CC_REG to a value equivalent to CC_SRC. The instructions are in
7572 INSNS. The modes used by those instructions are in MODES. */
7573
7574 newreg = NULL_RTX;
7575 for (i = 0; i < insn_count; ++i)
7576 {
7577 if (modes[i] != mode)
7578 {
7579 /* We need to change the mode of CC_REG in INSNS[i] and
7580 subsequent instructions. */
7581 if (! newreg)
7582 {
7583 if (GET_MODE (cc_reg) == mode)
7584 newreg = cc_reg;
7585 else
7586 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7587 }
7588 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7589 newreg);
7590 }
7591
7592 delete_insn (insns[i]);
7593 }
7594
7595 return mode;
7596 }
7597
7598 /* If we have a fixed condition code register (or two), walk through
7599 the instructions and try to eliminate duplicate assignments. */
7600
7601 void
7602 cse_condition_code_reg (void)
7603 {
7604 unsigned int cc_regno_1;
7605 unsigned int cc_regno_2;
7606 rtx cc_reg_1;
7607 rtx cc_reg_2;
7608 basic_block bb;
7609
7610 if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7611 return;
7612
7613 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7614 if (cc_regno_2 != INVALID_REGNUM)
7615 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7616 else
7617 cc_reg_2 = NULL_RTX;
7618
7619 FOR_EACH_BB (bb)
7620 {
7621 rtx last_insn;
7622 rtx cc_reg;
7623 rtx insn;
7624 rtx cc_src_insn;
7625 rtx cc_src;
7626 enum machine_mode mode;
7627 enum machine_mode orig_mode;
7628
7629 /* Look for blocks which end with a conditional jump based on a
7630 condition code register. Then look for the instruction which
7631 sets the condition code register. Then look through the
7632 successor blocks for instructions which set the condition
7633 code register to the same value. There are other possible
7634 uses of the condition code register, but these are by far the
7635 most common and the ones which we are most likely to be able
7636 to optimize. */
7637
7638 last_insn = BB_END (bb);
7639 if (!JUMP_P (last_insn))
7640 continue;
7641
7642 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7643 cc_reg = cc_reg_1;
7644 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7645 cc_reg = cc_reg_2;
7646 else
7647 continue;
7648
7649 cc_src_insn = NULL_RTX;
7650 cc_src = NULL_RTX;
7651 for (insn = PREV_INSN (last_insn);
7652 insn && insn != PREV_INSN (BB_HEAD (bb));
7653 insn = PREV_INSN (insn))
7654 {
7655 rtx set;
7656
7657 if (! INSN_P (insn))
7658 continue;
7659 set = single_set (insn);
7660 if (set
7661 && REG_P (SET_DEST (set))
7662 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7663 {
7664 cc_src_insn = insn;
7665 cc_src = SET_SRC (set);
7666 break;
7667 }
7668 else if (reg_set_p (cc_reg, insn))
7669 break;
7670 }
7671
7672 if (! cc_src_insn)
7673 continue;
7674
7675 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7676 continue;
7677
7678 /* Now CC_REG is a condition code register used for a
7679 conditional jump at the end of the block, and CC_SRC, in
7680 CC_SRC_INSN, is the value to which that condition code
7681 register is set, and CC_SRC is still meaningful at the end of
7682 the basic block. */
7683
7684 orig_mode = GET_MODE (cc_src);
7685 mode = cse_cc_succs (bb, cc_reg, cc_src, true);
7686 if (mode != VOIDmode)
7687 {
7688 gcc_assert (mode == GET_MODE (cc_src));
7689 if (mode != orig_mode)
7690 {
7691 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7692
7693 cse_change_cc_mode_insn (cc_src_insn, newreg);
7694
7695 /* Do the same in the following insns that use the
7696 current value of CC_REG within BB. */
7697 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7698 NEXT_INSN (last_insn),
7699 newreg);
7700 }
7701 }
7702 }
7703 }