rs6000.md (fseldfsf4): Add TARGET_SINGLE_FLOAT condition.
[gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "regs.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "function.h"
37 #include "expr.h"
38 #include "toplev.h"
39 #include "output.h"
40 #include "ggc.h"
41 #include "timevar.h"
42 #include "except.h"
43 #include "target.h"
44 #include "params.h"
45 #include "rtlhooks-def.h"
46 #include "tree-pass.h"
47 #include "df.h"
48 #include "dbgcnt.h"
49
50 /* The basic idea of common subexpression elimination is to go
51 through the code, keeping a record of expressions that would
52 have the same value at the current scan point, and replacing
53 expressions encountered with the cheapest equivalent expression.
54
55 It is too complicated to keep track of the different possibilities
56 when control paths merge in this code; so, at each label, we forget all
57 that is known and start fresh. This can be described as processing each
58 extended basic block separately. We have a separate pass to perform
59 global CSE.
60
61 Note CSE can turn a conditional or computed jump into a nop or
62 an unconditional jump. When this occurs we arrange to run the jump
63 optimizer after CSE to delete the unreachable code.
64
65 We use two data structures to record the equivalent expressions:
66 a hash table for most expressions, and a vector of "quantity
67 numbers" to record equivalent (pseudo) registers.
68
69 The use of the special data structure for registers is desirable
70 because it is faster. It is possible because registers references
71 contain a fairly small number, the register number, taken from
72 a contiguously allocated series, and two register references are
73 identical if they have the same number. General expressions
74 do not have any such thing, so the only way to retrieve the
75 information recorded on an expression other than a register
76 is to keep it in a hash table.
77
78 Registers and "quantity numbers":
79
80 At the start of each basic block, all of the (hardware and pseudo)
81 registers used in the function are given distinct quantity
82 numbers to indicate their contents. During scan, when the code
83 copies one register into another, we copy the quantity number.
84 When a register is loaded in any other way, we allocate a new
85 quantity number to describe the value generated by this operation.
86 `REG_QTY (N)' records what quantity register N is currently thought
87 of as containing.
88
89 All real quantity numbers are greater than or equal to zero.
90 If register N has not been assigned a quantity, `REG_QTY (N)' will
91 equal -N - 1, which is always negative.
92
93 Quantity numbers below zero do not exist and none of the `qty_table'
94 entries should be referenced with a negative index.
95
96 We also maintain a bidirectional chain of registers for each
97 quantity number. The `qty_table` members `first_reg' and `last_reg',
98 and `reg_eqv_table' members `next' and `prev' hold these chains.
99
100 The first register in a chain is the one whose lifespan is least local.
101 Among equals, it is the one that was seen first.
102 We replace any equivalent register with that one.
103
104 If two registers have the same quantity number, it must be true that
105 REG expressions with qty_table `mode' must be in the hash table for both
106 registers and must be in the same class.
107
108 The converse is not true. Since hard registers may be referenced in
109 any mode, two REG expressions might be equivalent in the hash table
110 but not have the same quantity number if the quantity number of one
111 of the registers is not the same mode as those expressions.
112
113 Constants and quantity numbers
114
115 When a quantity has a known constant value, that value is stored
116 in the appropriate qty_table `const_rtx'. This is in addition to
117 putting the constant in the hash table as is usual for non-regs.
118
119 Whether a reg or a constant is preferred is determined by the configuration
120 macro CONST_COSTS and will often depend on the constant value. In any
121 event, expressions containing constants can be simplified, by fold_rtx.
122
123 When a quantity has a known nearly constant value (such as an address
124 of a stack slot), that value is stored in the appropriate qty_table
125 `const_rtx'.
126
127 Integer constants don't have a machine mode. However, cse
128 determines the intended machine mode from the destination
129 of the instruction that moves the constant. The machine mode
130 is recorded in the hash table along with the actual RTL
131 constant expression so that different modes are kept separate.
132
133 Other expressions:
134
135 To record known equivalences among expressions in general
136 we use a hash table called `table'. It has a fixed number of buckets
137 that contain chains of `struct table_elt' elements for expressions.
138 These chains connect the elements whose expressions have the same
139 hash codes.
140
141 Other chains through the same elements connect the elements which
142 currently have equivalent values.
143
144 Register references in an expression are canonicalized before hashing
145 the expression. This is done using `reg_qty' and qty_table `first_reg'.
146 The hash code of a register reference is computed using the quantity
147 number, not the register number.
148
149 When the value of an expression changes, it is necessary to remove from the
150 hash table not just that expression but all expressions whose values
151 could be different as a result.
152
153 1. If the value changing is in memory, except in special cases
154 ANYTHING referring to memory could be changed. That is because
155 nobody knows where a pointer does not point.
156 The function `invalidate_memory' removes what is necessary.
157
158 The special cases are when the address is constant or is
159 a constant plus a fixed register such as the frame pointer
160 or a static chain pointer. When such addresses are stored in,
161 we can tell exactly which other such addresses must be invalidated
162 due to overlap. `invalidate' does this.
163 All expressions that refer to non-constant
164 memory addresses are also invalidated. `invalidate_memory' does this.
165
166 2. If the value changing is a register, all expressions
167 containing references to that register, and only those,
168 must be removed.
169
170 Because searching the entire hash table for expressions that contain
171 a register is very slow, we try to figure out when it isn't necessary.
172 Precisely, this is necessary only when expressions have been
173 entered in the hash table using this register, and then the value has
174 changed, and then another expression wants to be added to refer to
175 the register's new value. This sequence of circumstances is rare
176 within any one basic block.
177
178 `REG_TICK' and `REG_IN_TABLE', accessors for members of
179 cse_reg_info, are used to detect this case. REG_TICK (i) is
180 incremented whenever a value is stored in register i.
181 REG_IN_TABLE (i) holds -1 if no references to register i have been
182 entered in the table; otherwise, it contains the value REG_TICK (i)
183 had when the references were entered. If we want to enter a
184 reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
185 remove old references. Until we want to enter a new entry, the
186 mere fact that the two vectors don't match makes the entries be
187 ignored if anyone tries to match them.
188
189 Registers themselves are entered in the hash table as well as in
190 the equivalent-register chains. However, `REG_TICK' and
191 `REG_IN_TABLE' do not apply to expressions which are simple
192 register references. These expressions are removed from the table
193 immediately when they become invalid, and this can be done even if
194 we do not immediately search for all the expressions that refer to
195 the register.
196
197 A CLOBBER rtx in an instruction invalidates its operand for further
198 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
199 invalidates everything that resides in memory.
200
201 Related expressions:
202
203 Constant expressions that differ only by an additive integer
204 are called related. When a constant expression is put in
205 the table, the related expression with no constant term
206 is also entered. These are made to point at each other
207 so that it is possible to find out if there exists any
208 register equivalent to an expression related to a given expression. */
209
210 /* Length of qty_table vector. We know in advance we will not need
211 a quantity number this big. */
212
213 static int max_qty;
214
215 /* Next quantity number to be allocated.
216 This is 1 + the largest number needed so far. */
217
218 static int next_qty;
219
220 /* Per-qty information tracking.
221
222 `first_reg' and `last_reg' track the head and tail of the
223 chain of registers which currently contain this quantity.
224
225 `mode' contains the machine mode of this quantity.
226
227 `const_rtx' holds the rtx of the constant value of this
228 quantity, if known. A summations of the frame/arg pointer
229 and a constant can also be entered here. When this holds
230 a known value, `const_insn' is the insn which stored the
231 constant value.
232
233 `comparison_{code,const,qty}' are used to track when a
234 comparison between a quantity and some constant or register has
235 been passed. In such a case, we know the results of the comparison
236 in case we see it again. These members record a comparison that
237 is known to be true. `comparison_code' holds the rtx code of such
238 a comparison, else it is set to UNKNOWN and the other two
239 comparison members are undefined. `comparison_const' holds
240 the constant being compared against, or zero if the comparison
241 is not against a constant. `comparison_qty' holds the quantity
242 being compared against when the result is known. If the comparison
243 is not with a register, `comparison_qty' is -1. */
244
245 struct qty_table_elem
246 {
247 rtx const_rtx;
248 rtx const_insn;
249 rtx comparison_const;
250 int comparison_qty;
251 unsigned int first_reg, last_reg;
252 /* The sizes of these fields should match the sizes of the
253 code and mode fields of struct rtx_def (see rtl.h). */
254 ENUM_BITFIELD(rtx_code) comparison_code : 16;
255 ENUM_BITFIELD(machine_mode) mode : 8;
256 };
257
258 /* The table of all qtys, indexed by qty number. */
259 static struct qty_table_elem *qty_table;
260
261 /* Structure used to pass arguments via for_each_rtx to function
262 cse_change_cc_mode. */
263 struct change_cc_mode_args
264 {
265 rtx insn;
266 rtx newreg;
267 };
268
269 #ifdef HAVE_cc0
270 /* For machines that have a CC0, we do not record its value in the hash
271 table since its use is guaranteed to be the insn immediately following
272 its definition and any other insn is presumed to invalidate it.
273
274 Instead, we store below the current and last value assigned to CC0.
275 If it should happen to be a constant, it is stored in preference
276 to the actual assigned value. In case it is a constant, we store
277 the mode in which the constant should be interpreted. */
278
279 static rtx this_insn_cc0, prev_insn_cc0;
280 static enum machine_mode this_insn_cc0_mode, prev_insn_cc0_mode;
281 #endif
282
283 /* Insn being scanned. */
284
285 static rtx this_insn;
286 static bool optimize_this_for_speed_p;
287
288 /* Index by register number, gives the number of the next (or
289 previous) register in the chain of registers sharing the same
290 value.
291
292 Or -1 if this register is at the end of the chain.
293
294 If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined. */
295
296 /* Per-register equivalence chain. */
297 struct reg_eqv_elem
298 {
299 int next, prev;
300 };
301
302 /* The table of all register equivalence chains. */
303 static struct reg_eqv_elem *reg_eqv_table;
304
305 struct cse_reg_info
306 {
307 /* The timestamp at which this register is initialized. */
308 unsigned int timestamp;
309
310 /* The quantity number of the register's current contents. */
311 int reg_qty;
312
313 /* The number of times the register has been altered in the current
314 basic block. */
315 int reg_tick;
316
317 /* The REG_TICK value at which rtx's containing this register are
318 valid in the hash table. If this does not equal the current
319 reg_tick value, such expressions existing in the hash table are
320 invalid. */
321 int reg_in_table;
322
323 /* The SUBREG that was set when REG_TICK was last incremented. Set
324 to -1 if the last store was to the whole register, not a subreg. */
325 unsigned int subreg_ticked;
326 };
327
328 /* A table of cse_reg_info indexed by register numbers. */
329 static struct cse_reg_info *cse_reg_info_table;
330
331 /* The size of the above table. */
332 static unsigned int cse_reg_info_table_size;
333
334 /* The index of the first entry that has not been initialized. */
335 static unsigned int cse_reg_info_table_first_uninitialized;
336
337 /* The timestamp at the beginning of the current run of
338 cse_extended_basic_block. We increment this variable at the beginning of
339 the current run of cse_extended_basic_block. The timestamp field of a
340 cse_reg_info entry matches the value of this variable if and only
341 if the entry has been initialized during the current run of
342 cse_extended_basic_block. */
343 static unsigned int cse_reg_info_timestamp;
344
345 /* A HARD_REG_SET containing all the hard registers for which there is
346 currently a REG expression in the hash table. Note the difference
347 from the above variables, which indicate if the REG is mentioned in some
348 expression in the table. */
349
350 static HARD_REG_SET hard_regs_in_table;
351
352 /* True if CSE has altered the CFG. */
353 static bool cse_cfg_altered;
354
355 /* True if CSE has altered conditional jump insns in such a way
356 that jump optimization should be redone. */
357 static bool cse_jumps_altered;
358
359 /* True if we put a LABEL_REF into the hash table for an INSN
360 without a REG_LABEL_OPERAND, we have to rerun jump after CSE
361 to put in the note. */
362 static bool recorded_label_ref;
363
364 /* canon_hash stores 1 in do_not_record
365 if it notices a reference to CC0, PC, or some other volatile
366 subexpression. */
367
368 static int do_not_record;
369
370 /* canon_hash stores 1 in hash_arg_in_memory
371 if it notices a reference to memory within the expression being hashed. */
372
373 static int hash_arg_in_memory;
374
375 /* The hash table contains buckets which are chains of `struct table_elt's,
376 each recording one expression's information.
377 That expression is in the `exp' field.
378
379 The canon_exp field contains a canonical (from the point of view of
380 alias analysis) version of the `exp' field.
381
382 Those elements with the same hash code are chained in both directions
383 through the `next_same_hash' and `prev_same_hash' fields.
384
385 Each set of expressions with equivalent values
386 are on a two-way chain through the `next_same_value'
387 and `prev_same_value' fields, and all point with
388 the `first_same_value' field at the first element in
389 that chain. The chain is in order of increasing cost.
390 Each element's cost value is in its `cost' field.
391
392 The `in_memory' field is nonzero for elements that
393 involve any reference to memory. These elements are removed
394 whenever a write is done to an unidentified location in memory.
395 To be safe, we assume that a memory address is unidentified unless
396 the address is either a symbol constant or a constant plus
397 the frame pointer or argument pointer.
398
399 The `related_value' field is used to connect related expressions
400 (that differ by adding an integer).
401 The related expressions are chained in a circular fashion.
402 `related_value' is zero for expressions for which this
403 chain is not useful.
404
405 The `cost' field stores the cost of this element's expression.
406 The `regcost' field stores the value returned by approx_reg_cost for
407 this element's expression.
408
409 The `is_const' flag is set if the element is a constant (including
410 a fixed address).
411
412 The `flag' field is used as a temporary during some search routines.
413
414 The `mode' field is usually the same as GET_MODE (`exp'), but
415 if `exp' is a CONST_INT and has no machine mode then the `mode'
416 field is the mode it was being used as. Each constant is
417 recorded separately for each mode it is used with. */
418
419 struct table_elt
420 {
421 rtx exp;
422 rtx canon_exp;
423 struct table_elt *next_same_hash;
424 struct table_elt *prev_same_hash;
425 struct table_elt *next_same_value;
426 struct table_elt *prev_same_value;
427 struct table_elt *first_same_value;
428 struct table_elt *related_value;
429 int cost;
430 int regcost;
431 /* The size of this field should match the size
432 of the mode field of struct rtx_def (see rtl.h). */
433 ENUM_BITFIELD(machine_mode) mode : 8;
434 char in_memory;
435 char is_const;
436 char flag;
437 };
438
439 /* We don't want a lot of buckets, because we rarely have very many
440 things stored in the hash table, and a lot of buckets slows
441 down a lot of loops that happen frequently. */
442 #define HASH_SHIFT 5
443 #define HASH_SIZE (1 << HASH_SHIFT)
444 #define HASH_MASK (HASH_SIZE - 1)
445
446 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
447 register (hard registers may require `do_not_record' to be set). */
448
449 #define HASH(X, M) \
450 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
451 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
452 : canon_hash (X, M)) & HASH_MASK)
453
454 /* Like HASH, but without side-effects. */
455 #define SAFE_HASH(X, M) \
456 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
457 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
458 : safe_hash (X, M)) & HASH_MASK)
459
460 /* Determine whether register number N is considered a fixed register for the
461 purpose of approximating register costs.
462 It is desirable to replace other regs with fixed regs, to reduce need for
463 non-fixed hard regs.
464 A reg wins if it is either the frame pointer or designated as fixed. */
465 #define FIXED_REGNO_P(N) \
466 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
467 || fixed_regs[N] || global_regs[N])
468
469 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
470 hard registers and pointers into the frame are the cheapest with a cost
471 of 0. Next come pseudos with a cost of one and other hard registers with
472 a cost of 2. Aside from these special cases, call `rtx_cost'. */
473
474 #define CHEAP_REGNO(N) \
475 (REGNO_PTR_FRAME_P(N) \
476 || (HARD_REGISTER_NUM_P (N) \
477 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
478
479 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
480 #define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
481
482 /* Get the number of times this register has been updated in this
483 basic block. */
484
485 #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
486
487 /* Get the point at which REG was recorded in the table. */
488
489 #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
490
491 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
492 SUBREG). */
493
494 #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
495
496 /* Get the quantity number for REG. */
497
498 #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
499
500 /* Determine if the quantity number for register X represents a valid index
501 into the qty_table. */
502
503 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
504
505 static struct table_elt *table[HASH_SIZE];
506
507 /* Chain of `struct table_elt's made so far for this function
508 but currently removed from the table. */
509
510 static struct table_elt *free_element_chain;
511
512 /* Set to the cost of a constant pool reference if one was found for a
513 symbolic constant. If this was found, it means we should try to
514 convert constants into constant pool entries if they don't fit in
515 the insn. */
516
517 static int constant_pool_entries_cost;
518 static int constant_pool_entries_regcost;
519
520 /* This data describes a block that will be processed by
521 cse_extended_basic_block. */
522
523 struct cse_basic_block_data
524 {
525 /* Total number of SETs in block. */
526 int nsets;
527 /* Size of current branch path, if any. */
528 int path_size;
529 /* Current path, indicating which basic_blocks will be processed. */
530 struct branch_path
531 {
532 /* The basic block for this path entry. */
533 basic_block bb;
534 } *path;
535 };
536
537
538 /* Pointers to the live in/live out bitmaps for the boundaries of the
539 current EBB. */
540 static bitmap cse_ebb_live_in, cse_ebb_live_out;
541
542 /* A simple bitmap to track which basic blocks have been visited
543 already as part of an already processed extended basic block. */
544 static sbitmap cse_visited_basic_blocks;
545
546 static bool fixed_base_plus_p (rtx x);
547 static int notreg_cost (rtx, enum rtx_code);
548 static int approx_reg_cost_1 (rtx *, void *);
549 static int approx_reg_cost (rtx);
550 static int preferable (int, int, int, int);
551 static void new_basic_block (void);
552 static void make_new_qty (unsigned int, enum machine_mode);
553 static void make_regs_eqv (unsigned int, unsigned int);
554 static void delete_reg_equiv (unsigned int);
555 static int mention_regs (rtx);
556 static int insert_regs (rtx, struct table_elt *, int);
557 static void remove_from_table (struct table_elt *, unsigned);
558 static void remove_pseudo_from_table (rtx, unsigned);
559 static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
560 static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
561 static rtx lookup_as_function (rtx, enum rtx_code);
562 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
563 enum machine_mode);
564 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
565 static void invalidate (rtx, enum machine_mode);
566 static bool cse_rtx_varies_p (const_rtx, bool);
567 static void remove_invalid_refs (unsigned int);
568 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
569 enum machine_mode);
570 static void rehash_using_reg (rtx);
571 static void invalidate_memory (void);
572 static void invalidate_for_call (void);
573 static rtx use_related_value (rtx, struct table_elt *);
574
575 static inline unsigned canon_hash (rtx, enum machine_mode);
576 static inline unsigned safe_hash (rtx, enum machine_mode);
577 static inline unsigned hash_rtx_string (const char *);
578
579 static rtx canon_reg (rtx, rtx);
580 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
581 enum machine_mode *,
582 enum machine_mode *);
583 static rtx fold_rtx (rtx, rtx);
584 static rtx equiv_constant (rtx);
585 static void record_jump_equiv (rtx, bool);
586 static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
587 int);
588 static void cse_insn (rtx);
589 static void cse_prescan_path (struct cse_basic_block_data *);
590 static void invalidate_from_clobbers (rtx);
591 static rtx cse_process_notes (rtx, rtx, bool *);
592 static void cse_extended_basic_block (struct cse_basic_block_data *);
593 static void count_reg_usage (rtx, int *, rtx, int);
594 static int check_for_label_ref (rtx *, void *);
595 extern void dump_class (struct table_elt*);
596 static void get_cse_reg_info_1 (unsigned int regno);
597 static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
598 static int check_dependence (rtx *, void *);
599
600 static void flush_hash_table (void);
601 static bool insn_live_p (rtx, int *);
602 static bool set_live_p (rtx, rtx, int *);
603 static int cse_change_cc_mode (rtx *, void *);
604 static void cse_change_cc_mode_insn (rtx, rtx);
605 static void cse_change_cc_mode_insns (rtx, rtx, rtx);
606 static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
607 \f
608
609 #undef RTL_HOOKS_GEN_LOWPART
610 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
611
612 static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
613 \f
614 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
615 virtual regs here because the simplify_*_operation routines are called
616 by integrate.c, which is called before virtual register instantiation. */
617
618 static bool
619 fixed_base_plus_p (rtx x)
620 {
621 switch (GET_CODE (x))
622 {
623 case REG:
624 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
625 return true;
626 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
627 return true;
628 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
629 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
630 return true;
631 return false;
632
633 case PLUS:
634 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
635 return false;
636 return fixed_base_plus_p (XEXP (x, 0));
637
638 default:
639 return false;
640 }
641 }
642
643 /* Dump the expressions in the equivalence class indicated by CLASSP.
644 This function is used only for debugging. */
645 void
646 dump_class (struct table_elt *classp)
647 {
648 struct table_elt *elt;
649
650 fprintf (stderr, "Equivalence chain for ");
651 print_rtl (stderr, classp->exp);
652 fprintf (stderr, ": \n");
653
654 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
655 {
656 print_rtl (stderr, elt->exp);
657 fprintf (stderr, "\n");
658 }
659 }
660
661 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
662
663 static int
664 approx_reg_cost_1 (rtx *xp, void *data)
665 {
666 rtx x = *xp;
667 int *cost_p = (int *) data;
668
669 if (x && REG_P (x))
670 {
671 unsigned int regno = REGNO (x);
672
673 if (! CHEAP_REGNO (regno))
674 {
675 if (regno < FIRST_PSEUDO_REGISTER)
676 {
677 if (SMALL_REGISTER_CLASSES)
678 return 1;
679 *cost_p += 2;
680 }
681 else
682 *cost_p += 1;
683 }
684 }
685
686 return 0;
687 }
688
689 /* Return an estimate of the cost of the registers used in an rtx.
690 This is mostly the number of different REG expressions in the rtx;
691 however for some exceptions like fixed registers we use a cost of
692 0. If any other hard register reference occurs, return MAX_COST. */
693
694 static int
695 approx_reg_cost (rtx x)
696 {
697 int cost = 0;
698
699 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
700 return MAX_COST;
701
702 return cost;
703 }
704
705 /* Return a negative value if an rtx A, whose costs are given by COST_A
706 and REGCOST_A, is more desirable than an rtx B.
707 Return a positive value if A is less desirable, or 0 if the two are
708 equally good. */
709 static int
710 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
711 {
712 /* First, get rid of cases involving expressions that are entirely
713 unwanted. */
714 if (cost_a != cost_b)
715 {
716 if (cost_a == MAX_COST)
717 return 1;
718 if (cost_b == MAX_COST)
719 return -1;
720 }
721
722 /* Avoid extending lifetimes of hardregs. */
723 if (regcost_a != regcost_b)
724 {
725 if (regcost_a == MAX_COST)
726 return 1;
727 if (regcost_b == MAX_COST)
728 return -1;
729 }
730
731 /* Normal operation costs take precedence. */
732 if (cost_a != cost_b)
733 return cost_a - cost_b;
734 /* Only if these are identical consider effects on register pressure. */
735 if (regcost_a != regcost_b)
736 return regcost_a - regcost_b;
737 return 0;
738 }
739
740 /* Internal function, to compute cost when X is not a register; called
741 from COST macro to keep it simple. */
742
743 static int
744 notreg_cost (rtx x, enum rtx_code outer)
745 {
746 return ((GET_CODE (x) == SUBREG
747 && REG_P (SUBREG_REG (x))
748 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
749 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
750 && (GET_MODE_SIZE (GET_MODE (x))
751 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
752 && subreg_lowpart_p (x)
753 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
754 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
755 ? 0
756 : rtx_cost (x, outer, optimize_this_for_speed_p) * 2);
757 }
758
759 \f
760 /* Initialize CSE_REG_INFO_TABLE. */
761
762 static void
763 init_cse_reg_info (unsigned int nregs)
764 {
765 /* Do we need to grow the table? */
766 if (nregs > cse_reg_info_table_size)
767 {
768 unsigned int new_size;
769
770 if (cse_reg_info_table_size < 2048)
771 {
772 /* Compute a new size that is a power of 2 and no smaller
773 than the large of NREGS and 64. */
774 new_size = (cse_reg_info_table_size
775 ? cse_reg_info_table_size : 64);
776
777 while (new_size < nregs)
778 new_size *= 2;
779 }
780 else
781 {
782 /* If we need a big table, allocate just enough to hold
783 NREGS registers. */
784 new_size = nregs;
785 }
786
787 /* Reallocate the table with NEW_SIZE entries. */
788 if (cse_reg_info_table)
789 free (cse_reg_info_table);
790 cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size);
791 cse_reg_info_table_size = new_size;
792 cse_reg_info_table_first_uninitialized = 0;
793 }
794
795 /* Do we have all of the first NREGS entries initialized? */
796 if (cse_reg_info_table_first_uninitialized < nregs)
797 {
798 unsigned int old_timestamp = cse_reg_info_timestamp - 1;
799 unsigned int i;
800
801 /* Put the old timestamp on newly allocated entries so that they
802 will all be considered out of date. We do not touch those
803 entries beyond the first NREGS entries to be nice to the
804 virtual memory. */
805 for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
806 cse_reg_info_table[i].timestamp = old_timestamp;
807
808 cse_reg_info_table_first_uninitialized = nregs;
809 }
810 }
811
812 /* Given REGNO, initialize the cse_reg_info entry for REGNO. */
813
814 static void
815 get_cse_reg_info_1 (unsigned int regno)
816 {
817 /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
818 entry will be considered to have been initialized. */
819 cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
820
821 /* Initialize the rest of the entry. */
822 cse_reg_info_table[regno].reg_tick = 1;
823 cse_reg_info_table[regno].reg_in_table = -1;
824 cse_reg_info_table[regno].subreg_ticked = -1;
825 cse_reg_info_table[regno].reg_qty = -regno - 1;
826 }
827
828 /* Find a cse_reg_info entry for REGNO. */
829
830 static inline struct cse_reg_info *
831 get_cse_reg_info (unsigned int regno)
832 {
833 struct cse_reg_info *p = &cse_reg_info_table[regno];
834
835 /* If this entry has not been initialized, go ahead and initialize
836 it. */
837 if (p->timestamp != cse_reg_info_timestamp)
838 get_cse_reg_info_1 (regno);
839
840 return p;
841 }
842
843 /* Clear the hash table and initialize each register with its own quantity,
844 for a new basic block. */
845
846 static void
847 new_basic_block (void)
848 {
849 int i;
850
851 next_qty = 0;
852
853 /* Invalidate cse_reg_info_table. */
854 cse_reg_info_timestamp++;
855
856 /* Clear out hash table state for this pass. */
857 CLEAR_HARD_REG_SET (hard_regs_in_table);
858
859 /* The per-quantity values used to be initialized here, but it is
860 much faster to initialize each as it is made in `make_new_qty'. */
861
862 for (i = 0; i < HASH_SIZE; i++)
863 {
864 struct table_elt *first;
865
866 first = table[i];
867 if (first != NULL)
868 {
869 struct table_elt *last = first;
870
871 table[i] = NULL;
872
873 while (last->next_same_hash != NULL)
874 last = last->next_same_hash;
875
876 /* Now relink this hash entire chain into
877 the free element list. */
878
879 last->next_same_hash = free_element_chain;
880 free_element_chain = first;
881 }
882 }
883
884 #ifdef HAVE_cc0
885 prev_insn_cc0 = 0;
886 #endif
887 }
888
889 /* Say that register REG contains a quantity in mode MODE not in any
890 register before and initialize that quantity. */
891
892 static void
893 make_new_qty (unsigned int reg, enum machine_mode mode)
894 {
895 int q;
896 struct qty_table_elem *ent;
897 struct reg_eqv_elem *eqv;
898
899 gcc_assert (next_qty < max_qty);
900
901 q = REG_QTY (reg) = next_qty++;
902 ent = &qty_table[q];
903 ent->first_reg = reg;
904 ent->last_reg = reg;
905 ent->mode = mode;
906 ent->const_rtx = ent->const_insn = NULL_RTX;
907 ent->comparison_code = UNKNOWN;
908
909 eqv = &reg_eqv_table[reg];
910 eqv->next = eqv->prev = -1;
911 }
912
913 /* Make reg NEW equivalent to reg OLD.
914 OLD is not changing; NEW is. */
915
916 static void
917 make_regs_eqv (unsigned int new_reg, unsigned int old_reg)
918 {
919 unsigned int lastr, firstr;
920 int q = REG_QTY (old_reg);
921 struct qty_table_elem *ent;
922
923 ent = &qty_table[q];
924
925 /* Nothing should become eqv until it has a "non-invalid" qty number. */
926 gcc_assert (REGNO_QTY_VALID_P (old_reg));
927
928 REG_QTY (new_reg) = q;
929 firstr = ent->first_reg;
930 lastr = ent->last_reg;
931
932 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
933 hard regs. Among pseudos, if NEW will live longer than any other reg
934 of the same qty, and that is beyond the current basic block,
935 make it the new canonical replacement for this qty. */
936 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
937 /* Certain fixed registers might be of the class NO_REGS. This means
938 that not only can they not be allocated by the compiler, but
939 they cannot be used in substitutions or canonicalizations
940 either. */
941 && (new_reg >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new_reg) != NO_REGS)
942 && ((new_reg < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new_reg))
943 || (new_reg >= FIRST_PSEUDO_REGISTER
944 && (firstr < FIRST_PSEUDO_REGISTER
945 || (bitmap_bit_p (cse_ebb_live_out, new_reg)
946 && !bitmap_bit_p (cse_ebb_live_out, firstr))
947 || (bitmap_bit_p (cse_ebb_live_in, new_reg)
948 && !bitmap_bit_p (cse_ebb_live_in, firstr))))))
949 {
950 reg_eqv_table[firstr].prev = new_reg;
951 reg_eqv_table[new_reg].next = firstr;
952 reg_eqv_table[new_reg].prev = -1;
953 ent->first_reg = new_reg;
954 }
955 else
956 {
957 /* If NEW is a hard reg (known to be non-fixed), insert at end.
958 Otherwise, insert before any non-fixed hard regs that are at the
959 end. Registers of class NO_REGS cannot be used as an
960 equivalent for anything. */
961 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
962 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
963 && new_reg >= FIRST_PSEUDO_REGISTER)
964 lastr = reg_eqv_table[lastr].prev;
965 reg_eqv_table[new_reg].next = reg_eqv_table[lastr].next;
966 if (reg_eqv_table[lastr].next >= 0)
967 reg_eqv_table[reg_eqv_table[lastr].next].prev = new_reg;
968 else
969 qty_table[q].last_reg = new_reg;
970 reg_eqv_table[lastr].next = new_reg;
971 reg_eqv_table[new_reg].prev = lastr;
972 }
973 }
974
975 /* Remove REG from its equivalence class. */
976
977 static void
978 delete_reg_equiv (unsigned int reg)
979 {
980 struct qty_table_elem *ent;
981 int q = REG_QTY (reg);
982 int p, n;
983
984 /* If invalid, do nothing. */
985 if (! REGNO_QTY_VALID_P (reg))
986 return;
987
988 ent = &qty_table[q];
989
990 p = reg_eqv_table[reg].prev;
991 n = reg_eqv_table[reg].next;
992
993 if (n != -1)
994 reg_eqv_table[n].prev = p;
995 else
996 ent->last_reg = p;
997 if (p != -1)
998 reg_eqv_table[p].next = n;
999 else
1000 ent->first_reg = n;
1001
1002 REG_QTY (reg) = -reg - 1;
1003 }
1004
1005 /* Remove any invalid expressions from the hash table
1006 that refer to any of the registers contained in expression X.
1007
1008 Make sure that newly inserted references to those registers
1009 as subexpressions will be considered valid.
1010
1011 mention_regs is not called when a register itself
1012 is being stored in the table.
1013
1014 Return 1 if we have done something that may have changed the hash code
1015 of X. */
1016
1017 static int
1018 mention_regs (rtx x)
1019 {
1020 enum rtx_code code;
1021 int i, j;
1022 const char *fmt;
1023 int changed = 0;
1024
1025 if (x == 0)
1026 return 0;
1027
1028 code = GET_CODE (x);
1029 if (code == REG)
1030 {
1031 unsigned int regno = REGNO (x);
1032 unsigned int endregno = END_REGNO (x);
1033 unsigned int i;
1034
1035 for (i = regno; i < endregno; i++)
1036 {
1037 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1038 remove_invalid_refs (i);
1039
1040 REG_IN_TABLE (i) = REG_TICK (i);
1041 SUBREG_TICKED (i) = -1;
1042 }
1043
1044 return 0;
1045 }
1046
1047 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1048 pseudo if they don't use overlapping words. We handle only pseudos
1049 here for simplicity. */
1050 if (code == SUBREG && REG_P (SUBREG_REG (x))
1051 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1052 {
1053 unsigned int i = REGNO (SUBREG_REG (x));
1054
1055 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1056 {
1057 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1058 the last store to this register really stored into this
1059 subreg, then remove the memory of this subreg.
1060 Otherwise, remove any memory of the entire register and
1061 all its subregs from the table. */
1062 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1063 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1064 remove_invalid_refs (i);
1065 else
1066 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1067 }
1068
1069 REG_IN_TABLE (i) = REG_TICK (i);
1070 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1071 return 0;
1072 }
1073
1074 /* If X is a comparison or a COMPARE and either operand is a register
1075 that does not have a quantity, give it one. This is so that a later
1076 call to record_jump_equiv won't cause X to be assigned a different
1077 hash code and not found in the table after that call.
1078
1079 It is not necessary to do this here, since rehash_using_reg can
1080 fix up the table later, but doing this here eliminates the need to
1081 call that expensive function in the most common case where the only
1082 use of the register is in the comparison. */
1083
1084 if (code == COMPARE || COMPARISON_P (x))
1085 {
1086 if (REG_P (XEXP (x, 0))
1087 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1088 if (insert_regs (XEXP (x, 0), NULL, 0))
1089 {
1090 rehash_using_reg (XEXP (x, 0));
1091 changed = 1;
1092 }
1093
1094 if (REG_P (XEXP (x, 1))
1095 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1096 if (insert_regs (XEXP (x, 1), NULL, 0))
1097 {
1098 rehash_using_reg (XEXP (x, 1));
1099 changed = 1;
1100 }
1101 }
1102
1103 fmt = GET_RTX_FORMAT (code);
1104 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1105 if (fmt[i] == 'e')
1106 changed |= mention_regs (XEXP (x, i));
1107 else if (fmt[i] == 'E')
1108 for (j = 0; j < XVECLEN (x, i); j++)
1109 changed |= mention_regs (XVECEXP (x, i, j));
1110
1111 return changed;
1112 }
1113
1114 /* Update the register quantities for inserting X into the hash table
1115 with a value equivalent to CLASSP.
1116 (If the class does not contain a REG, it is irrelevant.)
1117 If MODIFIED is nonzero, X is a destination; it is being modified.
1118 Note that delete_reg_equiv should be called on a register
1119 before insert_regs is done on that register with MODIFIED != 0.
1120
1121 Nonzero value means that elements of reg_qty have changed
1122 so X's hash code may be different. */
1123
1124 static int
1125 insert_regs (rtx x, struct table_elt *classp, int modified)
1126 {
1127 if (REG_P (x))
1128 {
1129 unsigned int regno = REGNO (x);
1130 int qty_valid;
1131
1132 /* If REGNO is in the equivalence table already but is of the
1133 wrong mode for that equivalence, don't do anything here. */
1134
1135 qty_valid = REGNO_QTY_VALID_P (regno);
1136 if (qty_valid)
1137 {
1138 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1139
1140 if (ent->mode != GET_MODE (x))
1141 return 0;
1142 }
1143
1144 if (modified || ! qty_valid)
1145 {
1146 if (classp)
1147 for (classp = classp->first_same_value;
1148 classp != 0;
1149 classp = classp->next_same_value)
1150 if (REG_P (classp->exp)
1151 && GET_MODE (classp->exp) == GET_MODE (x))
1152 {
1153 unsigned c_regno = REGNO (classp->exp);
1154
1155 gcc_assert (REGNO_QTY_VALID_P (c_regno));
1156
1157 /* Suppose that 5 is hard reg and 100 and 101 are
1158 pseudos. Consider
1159
1160 (set (reg:si 100) (reg:si 5))
1161 (set (reg:si 5) (reg:si 100))
1162 (set (reg:di 101) (reg:di 5))
1163
1164 We would now set REG_QTY (101) = REG_QTY (5), but the
1165 entry for 5 is in SImode. When we use this later in
1166 copy propagation, we get the register in wrong mode. */
1167 if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
1168 continue;
1169
1170 make_regs_eqv (regno, c_regno);
1171 return 1;
1172 }
1173
1174 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1175 than REG_IN_TABLE to find out if there was only a single preceding
1176 invalidation - for the SUBREG - or another one, which would be
1177 for the full register. However, if we find here that REG_TICK
1178 indicates that the register is invalid, it means that it has
1179 been invalidated in a separate operation. The SUBREG might be used
1180 now (then this is a recursive call), or we might use the full REG
1181 now and a SUBREG of it later. So bump up REG_TICK so that
1182 mention_regs will do the right thing. */
1183 if (! modified
1184 && REG_IN_TABLE (regno) >= 0
1185 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1186 REG_TICK (regno)++;
1187 make_new_qty (regno, GET_MODE (x));
1188 return 1;
1189 }
1190
1191 return 0;
1192 }
1193
1194 /* If X is a SUBREG, we will likely be inserting the inner register in the
1195 table. If that register doesn't have an assigned quantity number at
1196 this point but does later, the insertion that we will be doing now will
1197 not be accessible because its hash code will have changed. So assign
1198 a quantity number now. */
1199
1200 else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1201 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1202 {
1203 insert_regs (SUBREG_REG (x), NULL, 0);
1204 mention_regs (x);
1205 return 1;
1206 }
1207 else
1208 return mention_regs (x);
1209 }
1210 \f
1211 /* Look in or update the hash table. */
1212
1213 /* Remove table element ELT from use in the table.
1214 HASH is its hash code, made using the HASH macro.
1215 It's an argument because often that is known in advance
1216 and we save much time not recomputing it. */
1217
1218 static void
1219 remove_from_table (struct table_elt *elt, unsigned int hash)
1220 {
1221 if (elt == 0)
1222 return;
1223
1224 /* Mark this element as removed. See cse_insn. */
1225 elt->first_same_value = 0;
1226
1227 /* Remove the table element from its equivalence class. */
1228
1229 {
1230 struct table_elt *prev = elt->prev_same_value;
1231 struct table_elt *next = elt->next_same_value;
1232
1233 if (next)
1234 next->prev_same_value = prev;
1235
1236 if (prev)
1237 prev->next_same_value = next;
1238 else
1239 {
1240 struct table_elt *newfirst = next;
1241 while (next)
1242 {
1243 next->first_same_value = newfirst;
1244 next = next->next_same_value;
1245 }
1246 }
1247 }
1248
1249 /* Remove the table element from its hash bucket. */
1250
1251 {
1252 struct table_elt *prev = elt->prev_same_hash;
1253 struct table_elt *next = elt->next_same_hash;
1254
1255 if (next)
1256 next->prev_same_hash = prev;
1257
1258 if (prev)
1259 prev->next_same_hash = next;
1260 else if (table[hash] == elt)
1261 table[hash] = next;
1262 else
1263 {
1264 /* This entry is not in the proper hash bucket. This can happen
1265 when two classes were merged by `merge_equiv_classes'. Search
1266 for the hash bucket that it heads. This happens only very
1267 rarely, so the cost is acceptable. */
1268 for (hash = 0; hash < HASH_SIZE; hash++)
1269 if (table[hash] == elt)
1270 table[hash] = next;
1271 }
1272 }
1273
1274 /* Remove the table element from its related-value circular chain. */
1275
1276 if (elt->related_value != 0 && elt->related_value != elt)
1277 {
1278 struct table_elt *p = elt->related_value;
1279
1280 while (p->related_value != elt)
1281 p = p->related_value;
1282 p->related_value = elt->related_value;
1283 if (p->related_value == p)
1284 p->related_value = 0;
1285 }
1286
1287 /* Now add it to the free element chain. */
1288 elt->next_same_hash = free_element_chain;
1289 free_element_chain = elt;
1290 }
1291
1292 /* Same as above, but X is a pseudo-register. */
1293
1294 static void
1295 remove_pseudo_from_table (rtx x, unsigned int hash)
1296 {
1297 struct table_elt *elt;
1298
1299 /* Because a pseudo-register can be referenced in more than one
1300 mode, we might have to remove more than one table entry. */
1301 while ((elt = lookup_for_remove (x, hash, VOIDmode)))
1302 remove_from_table (elt, hash);
1303 }
1304
1305 /* Look up X in the hash table and return its table element,
1306 or 0 if X is not in the table.
1307
1308 MODE is the machine-mode of X, or if X is an integer constant
1309 with VOIDmode then MODE is the mode with which X will be used.
1310
1311 Here we are satisfied to find an expression whose tree structure
1312 looks like X. */
1313
1314 static struct table_elt *
1315 lookup (rtx x, unsigned int hash, enum machine_mode mode)
1316 {
1317 struct table_elt *p;
1318
1319 for (p = table[hash]; p; p = p->next_same_hash)
1320 if (mode == p->mode && ((x == p->exp && REG_P (x))
1321 || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1322 return p;
1323
1324 return 0;
1325 }
1326
1327 /* Like `lookup' but don't care whether the table element uses invalid regs.
1328 Also ignore discrepancies in the machine mode of a register. */
1329
1330 static struct table_elt *
1331 lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1332 {
1333 struct table_elt *p;
1334
1335 if (REG_P (x))
1336 {
1337 unsigned int regno = REGNO (x);
1338
1339 /* Don't check the machine mode when comparing registers;
1340 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1341 for (p = table[hash]; p; p = p->next_same_hash)
1342 if (REG_P (p->exp)
1343 && REGNO (p->exp) == regno)
1344 return p;
1345 }
1346 else
1347 {
1348 for (p = table[hash]; p; p = p->next_same_hash)
1349 if (mode == p->mode
1350 && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1351 return p;
1352 }
1353
1354 return 0;
1355 }
1356
1357 /* Look for an expression equivalent to X and with code CODE.
1358 If one is found, return that expression. */
1359
1360 static rtx
1361 lookup_as_function (rtx x, enum rtx_code code)
1362 {
1363 struct table_elt *p
1364 = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1365
1366 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1367 long as we are narrowing. So if we looked in vain for a mode narrower
1368 than word_mode before, look for word_mode now. */
1369 if (p == 0 && code == CONST_INT
1370 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1371 {
1372 x = copy_rtx (x);
1373 PUT_MODE (x, word_mode);
1374 p = lookup (x, SAFE_HASH (x, VOIDmode), word_mode);
1375 }
1376
1377 if (p == 0)
1378 return 0;
1379
1380 for (p = p->first_same_value; p; p = p->next_same_value)
1381 if (GET_CODE (p->exp) == code
1382 /* Make sure this is a valid entry in the table. */
1383 && exp_equiv_p (p->exp, p->exp, 1, false))
1384 return p->exp;
1385
1386 return 0;
1387 }
1388
1389 /* Insert X in the hash table, assuming HASH is its hash code
1390 and CLASSP is an element of the class it should go in
1391 (or 0 if a new class should be made).
1392 It is inserted at the proper position to keep the class in
1393 the order cheapest first.
1394
1395 MODE is the machine-mode of X, or if X is an integer constant
1396 with VOIDmode then MODE is the mode with which X will be used.
1397
1398 For elements of equal cheapness, the most recent one
1399 goes in front, except that the first element in the list
1400 remains first unless a cheaper element is added. The order of
1401 pseudo-registers does not matter, as canon_reg will be called to
1402 find the cheapest when a register is retrieved from the table.
1403
1404 The in_memory field in the hash table element is set to 0.
1405 The caller must set it nonzero if appropriate.
1406
1407 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1408 and if insert_regs returns a nonzero value
1409 you must then recompute its hash code before calling here.
1410
1411 If necessary, update table showing constant values of quantities. */
1412
1413 #define CHEAPER(X, Y) \
1414 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1415
1416 static struct table_elt *
1417 insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
1418 {
1419 struct table_elt *elt;
1420
1421 /* If X is a register and we haven't made a quantity for it,
1422 something is wrong. */
1423 gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1424
1425 /* If X is a hard register, show it is being put in the table. */
1426 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1427 add_to_hard_reg_set (&hard_regs_in_table, GET_MODE (x), REGNO (x));
1428
1429 /* Put an element for X into the right hash bucket. */
1430
1431 elt = free_element_chain;
1432 if (elt)
1433 free_element_chain = elt->next_same_hash;
1434 else
1435 elt = XNEW (struct table_elt);
1436
1437 elt->exp = x;
1438 elt->canon_exp = NULL_RTX;
1439 elt->cost = COST (x);
1440 elt->regcost = approx_reg_cost (x);
1441 elt->next_same_value = 0;
1442 elt->prev_same_value = 0;
1443 elt->next_same_hash = table[hash];
1444 elt->prev_same_hash = 0;
1445 elt->related_value = 0;
1446 elt->in_memory = 0;
1447 elt->mode = mode;
1448 elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1449
1450 if (table[hash])
1451 table[hash]->prev_same_hash = elt;
1452 table[hash] = elt;
1453
1454 /* Put it into the proper value-class. */
1455 if (classp)
1456 {
1457 classp = classp->first_same_value;
1458 if (CHEAPER (elt, classp))
1459 /* Insert at the head of the class. */
1460 {
1461 struct table_elt *p;
1462 elt->next_same_value = classp;
1463 classp->prev_same_value = elt;
1464 elt->first_same_value = elt;
1465
1466 for (p = classp; p; p = p->next_same_value)
1467 p->first_same_value = elt;
1468 }
1469 else
1470 {
1471 /* Insert not at head of the class. */
1472 /* Put it after the last element cheaper than X. */
1473 struct table_elt *p, *next;
1474
1475 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1476 p = next);
1477
1478 /* Put it after P and before NEXT. */
1479 elt->next_same_value = next;
1480 if (next)
1481 next->prev_same_value = elt;
1482
1483 elt->prev_same_value = p;
1484 p->next_same_value = elt;
1485 elt->first_same_value = classp;
1486 }
1487 }
1488 else
1489 elt->first_same_value = elt;
1490
1491 /* If this is a constant being set equivalent to a register or a register
1492 being set equivalent to a constant, note the constant equivalence.
1493
1494 If this is a constant, it cannot be equivalent to a different constant,
1495 and a constant is the only thing that can be cheaper than a register. So
1496 we know the register is the head of the class (before the constant was
1497 inserted).
1498
1499 If this is a register that is not already known equivalent to a
1500 constant, we must check the entire class.
1501
1502 If this is a register that is already known equivalent to an insn,
1503 update the qtys `const_insn' to show that `this_insn' is the latest
1504 insn making that quantity equivalent to the constant. */
1505
1506 if (elt->is_const && classp && REG_P (classp->exp)
1507 && !REG_P (x))
1508 {
1509 int exp_q = REG_QTY (REGNO (classp->exp));
1510 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1511
1512 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1513 exp_ent->const_insn = this_insn;
1514 }
1515
1516 else if (REG_P (x)
1517 && classp
1518 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1519 && ! elt->is_const)
1520 {
1521 struct table_elt *p;
1522
1523 for (p = classp; p != 0; p = p->next_same_value)
1524 {
1525 if (p->is_const && !REG_P (p->exp))
1526 {
1527 int x_q = REG_QTY (REGNO (x));
1528 struct qty_table_elem *x_ent = &qty_table[x_q];
1529
1530 x_ent->const_rtx
1531 = gen_lowpart (GET_MODE (x), p->exp);
1532 x_ent->const_insn = this_insn;
1533 break;
1534 }
1535 }
1536 }
1537
1538 else if (REG_P (x)
1539 && qty_table[REG_QTY (REGNO (x))].const_rtx
1540 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1541 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1542
1543 /* If this is a constant with symbolic value,
1544 and it has a term with an explicit integer value,
1545 link it up with related expressions. */
1546 if (GET_CODE (x) == CONST)
1547 {
1548 rtx subexp = get_related_value (x);
1549 unsigned subhash;
1550 struct table_elt *subelt, *subelt_prev;
1551
1552 if (subexp != 0)
1553 {
1554 /* Get the integer-free subexpression in the hash table. */
1555 subhash = SAFE_HASH (subexp, mode);
1556 subelt = lookup (subexp, subhash, mode);
1557 if (subelt == 0)
1558 subelt = insert (subexp, NULL, subhash, mode);
1559 /* Initialize SUBELT's circular chain if it has none. */
1560 if (subelt->related_value == 0)
1561 subelt->related_value = subelt;
1562 /* Find the element in the circular chain that precedes SUBELT. */
1563 subelt_prev = subelt;
1564 while (subelt_prev->related_value != subelt)
1565 subelt_prev = subelt_prev->related_value;
1566 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1567 This way the element that follows SUBELT is the oldest one. */
1568 elt->related_value = subelt_prev->related_value;
1569 subelt_prev->related_value = elt;
1570 }
1571 }
1572
1573 return elt;
1574 }
1575 \f
1576 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1577 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1578 the two classes equivalent.
1579
1580 CLASS1 will be the surviving class; CLASS2 should not be used after this
1581 call.
1582
1583 Any invalid entries in CLASS2 will not be copied. */
1584
1585 static void
1586 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1587 {
1588 struct table_elt *elt, *next, *new_elt;
1589
1590 /* Ensure we start with the head of the classes. */
1591 class1 = class1->first_same_value;
1592 class2 = class2->first_same_value;
1593
1594 /* If they were already equal, forget it. */
1595 if (class1 == class2)
1596 return;
1597
1598 for (elt = class2; elt; elt = next)
1599 {
1600 unsigned int hash;
1601 rtx exp = elt->exp;
1602 enum machine_mode mode = elt->mode;
1603
1604 next = elt->next_same_value;
1605
1606 /* Remove old entry, make a new one in CLASS1's class.
1607 Don't do this for invalid entries as we cannot find their
1608 hash code (it also isn't necessary). */
1609 if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1610 {
1611 bool need_rehash = false;
1612
1613 hash_arg_in_memory = 0;
1614 hash = HASH (exp, mode);
1615
1616 if (REG_P (exp))
1617 {
1618 need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1619 delete_reg_equiv (REGNO (exp));
1620 }
1621
1622 if (REG_P (exp) && REGNO (exp) >= FIRST_PSEUDO_REGISTER)
1623 remove_pseudo_from_table (exp, hash);
1624 else
1625 remove_from_table (elt, hash);
1626
1627 if (insert_regs (exp, class1, 0) || need_rehash)
1628 {
1629 rehash_using_reg (exp);
1630 hash = HASH (exp, mode);
1631 }
1632 new_elt = insert (exp, class1, hash, mode);
1633 new_elt->in_memory = hash_arg_in_memory;
1634 }
1635 }
1636 }
1637 \f
1638 /* Flush the entire hash table. */
1639
1640 static void
1641 flush_hash_table (void)
1642 {
1643 int i;
1644 struct table_elt *p;
1645
1646 for (i = 0; i < HASH_SIZE; i++)
1647 for (p = table[i]; p; p = table[i])
1648 {
1649 /* Note that invalidate can remove elements
1650 after P in the current hash chain. */
1651 if (REG_P (p->exp))
1652 invalidate (p->exp, VOIDmode);
1653 else
1654 remove_from_table (p, i);
1655 }
1656 }
1657 \f
1658 /* Function called for each rtx to check whether true dependence exist. */
1659 struct check_dependence_data
1660 {
1661 enum machine_mode mode;
1662 rtx exp;
1663 rtx addr;
1664 };
1665
1666 static int
1667 check_dependence (rtx *x, void *data)
1668 {
1669 struct check_dependence_data *d = (struct check_dependence_data *) data;
1670 if (*x && MEM_P (*x))
1671 return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1672 cse_rtx_varies_p);
1673 else
1674 return 0;
1675 }
1676 \f
1677 /* Remove from the hash table, or mark as invalid, all expressions whose
1678 values could be altered by storing in X. X is a register, a subreg, or
1679 a memory reference with nonvarying address (because, when a memory
1680 reference with a varying address is stored in, all memory references are
1681 removed by invalidate_memory so specific invalidation is superfluous).
1682 FULL_MODE, if not VOIDmode, indicates that this much should be
1683 invalidated instead of just the amount indicated by the mode of X. This
1684 is only used for bitfield stores into memory.
1685
1686 A nonvarying address may be just a register or just a symbol reference,
1687 or it may be either of those plus a numeric offset. */
1688
1689 static void
1690 invalidate (rtx x, enum machine_mode full_mode)
1691 {
1692 int i;
1693 struct table_elt *p;
1694 rtx addr;
1695
1696 switch (GET_CODE (x))
1697 {
1698 case REG:
1699 {
1700 /* If X is a register, dependencies on its contents are recorded
1701 through the qty number mechanism. Just change the qty number of
1702 the register, mark it as invalid for expressions that refer to it,
1703 and remove it itself. */
1704 unsigned int regno = REGNO (x);
1705 unsigned int hash = HASH (x, GET_MODE (x));
1706
1707 /* Remove REGNO from any quantity list it might be on and indicate
1708 that its value might have changed. If it is a pseudo, remove its
1709 entry from the hash table.
1710
1711 For a hard register, we do the first two actions above for any
1712 additional hard registers corresponding to X. Then, if any of these
1713 registers are in the table, we must remove any REG entries that
1714 overlap these registers. */
1715
1716 delete_reg_equiv (regno);
1717 REG_TICK (regno)++;
1718 SUBREG_TICKED (regno) = -1;
1719
1720 if (regno >= FIRST_PSEUDO_REGISTER)
1721 remove_pseudo_from_table (x, hash);
1722 else
1723 {
1724 HOST_WIDE_INT in_table
1725 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1726 unsigned int endregno = END_HARD_REGNO (x);
1727 unsigned int tregno, tendregno, rn;
1728 struct table_elt *p, *next;
1729
1730 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1731
1732 for (rn = regno + 1; rn < endregno; rn++)
1733 {
1734 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1735 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1736 delete_reg_equiv (rn);
1737 REG_TICK (rn)++;
1738 SUBREG_TICKED (rn) = -1;
1739 }
1740
1741 if (in_table)
1742 for (hash = 0; hash < HASH_SIZE; hash++)
1743 for (p = table[hash]; p; p = next)
1744 {
1745 next = p->next_same_hash;
1746
1747 if (!REG_P (p->exp)
1748 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1749 continue;
1750
1751 tregno = REGNO (p->exp);
1752 tendregno = END_HARD_REGNO (p->exp);
1753 if (tendregno > regno && tregno < endregno)
1754 remove_from_table (p, hash);
1755 }
1756 }
1757 }
1758 return;
1759
1760 case SUBREG:
1761 invalidate (SUBREG_REG (x), VOIDmode);
1762 return;
1763
1764 case PARALLEL:
1765 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1766 invalidate (XVECEXP (x, 0, i), VOIDmode);
1767 return;
1768
1769 case EXPR_LIST:
1770 /* This is part of a disjoint return value; extract the location in
1771 question ignoring the offset. */
1772 invalidate (XEXP (x, 0), VOIDmode);
1773 return;
1774
1775 case MEM:
1776 addr = canon_rtx (get_addr (XEXP (x, 0)));
1777 /* Calculate the canonical version of X here so that
1778 true_dependence doesn't generate new RTL for X on each call. */
1779 x = canon_rtx (x);
1780
1781 /* Remove all hash table elements that refer to overlapping pieces of
1782 memory. */
1783 if (full_mode == VOIDmode)
1784 full_mode = GET_MODE (x);
1785
1786 for (i = 0; i < HASH_SIZE; i++)
1787 {
1788 struct table_elt *next;
1789
1790 for (p = table[i]; p; p = next)
1791 {
1792 next = p->next_same_hash;
1793 if (p->in_memory)
1794 {
1795 struct check_dependence_data d;
1796
1797 /* Just canonicalize the expression once;
1798 otherwise each time we call invalidate
1799 true_dependence will canonicalize the
1800 expression again. */
1801 if (!p->canon_exp)
1802 p->canon_exp = canon_rtx (p->exp);
1803 d.exp = x;
1804 d.addr = addr;
1805 d.mode = full_mode;
1806 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1807 remove_from_table (p, i);
1808 }
1809 }
1810 }
1811 return;
1812
1813 default:
1814 gcc_unreachable ();
1815 }
1816 }
1817 \f
1818 /* Remove all expressions that refer to register REGNO,
1819 since they are already invalid, and we are about to
1820 mark that register valid again and don't want the old
1821 expressions to reappear as valid. */
1822
1823 static void
1824 remove_invalid_refs (unsigned int regno)
1825 {
1826 unsigned int i;
1827 struct table_elt *p, *next;
1828
1829 for (i = 0; i < HASH_SIZE; i++)
1830 for (p = table[i]; p; p = next)
1831 {
1832 next = p->next_same_hash;
1833 if (!REG_P (p->exp)
1834 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1835 remove_from_table (p, i);
1836 }
1837 }
1838
1839 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1840 and mode MODE. */
1841 static void
1842 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1843 enum machine_mode mode)
1844 {
1845 unsigned int i;
1846 struct table_elt *p, *next;
1847 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
1848
1849 for (i = 0; i < HASH_SIZE; i++)
1850 for (p = table[i]; p; p = next)
1851 {
1852 rtx exp = p->exp;
1853 next = p->next_same_hash;
1854
1855 if (!REG_P (exp)
1856 && (GET_CODE (exp) != SUBREG
1857 || !REG_P (SUBREG_REG (exp))
1858 || REGNO (SUBREG_REG (exp)) != regno
1859 || (((SUBREG_BYTE (exp)
1860 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1861 && SUBREG_BYTE (exp) <= end))
1862 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1863 remove_from_table (p, i);
1864 }
1865 }
1866 \f
1867 /* Recompute the hash codes of any valid entries in the hash table that
1868 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1869
1870 This is called when we make a jump equivalence. */
1871
1872 static void
1873 rehash_using_reg (rtx x)
1874 {
1875 unsigned int i;
1876 struct table_elt *p, *next;
1877 unsigned hash;
1878
1879 if (GET_CODE (x) == SUBREG)
1880 x = SUBREG_REG (x);
1881
1882 /* If X is not a register or if the register is known not to be in any
1883 valid entries in the table, we have no work to do. */
1884
1885 if (!REG_P (x)
1886 || REG_IN_TABLE (REGNO (x)) < 0
1887 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1888 return;
1889
1890 /* Scan all hash chains looking for valid entries that mention X.
1891 If we find one and it is in the wrong hash chain, move it. */
1892
1893 for (i = 0; i < HASH_SIZE; i++)
1894 for (p = table[i]; p; p = next)
1895 {
1896 next = p->next_same_hash;
1897 if (reg_mentioned_p (x, p->exp)
1898 && exp_equiv_p (p->exp, p->exp, 1, false)
1899 && i != (hash = SAFE_HASH (p->exp, p->mode)))
1900 {
1901 if (p->next_same_hash)
1902 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1903
1904 if (p->prev_same_hash)
1905 p->prev_same_hash->next_same_hash = p->next_same_hash;
1906 else
1907 table[i] = p->next_same_hash;
1908
1909 p->next_same_hash = table[hash];
1910 p->prev_same_hash = 0;
1911 if (table[hash])
1912 table[hash]->prev_same_hash = p;
1913 table[hash] = p;
1914 }
1915 }
1916 }
1917 \f
1918 /* Remove from the hash table any expression that is a call-clobbered
1919 register. Also update their TICK values. */
1920
1921 static void
1922 invalidate_for_call (void)
1923 {
1924 unsigned int regno, endregno;
1925 unsigned int i;
1926 unsigned hash;
1927 struct table_elt *p, *next;
1928 int in_table = 0;
1929
1930 /* Go through all the hard registers. For each that is clobbered in
1931 a CALL_INSN, remove the register from quantity chains and update
1932 reg_tick if defined. Also see if any of these registers is currently
1933 in the table. */
1934
1935 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1936 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1937 {
1938 delete_reg_equiv (regno);
1939 if (REG_TICK (regno) >= 0)
1940 {
1941 REG_TICK (regno)++;
1942 SUBREG_TICKED (regno) = -1;
1943 }
1944
1945 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1946 }
1947
1948 /* In the case where we have no call-clobbered hard registers in the
1949 table, we are done. Otherwise, scan the table and remove any
1950 entry that overlaps a call-clobbered register. */
1951
1952 if (in_table)
1953 for (hash = 0; hash < HASH_SIZE; hash++)
1954 for (p = table[hash]; p; p = next)
1955 {
1956 next = p->next_same_hash;
1957
1958 if (!REG_P (p->exp)
1959 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1960 continue;
1961
1962 regno = REGNO (p->exp);
1963 endregno = END_HARD_REGNO (p->exp);
1964
1965 for (i = regno; i < endregno; i++)
1966 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1967 {
1968 remove_from_table (p, hash);
1969 break;
1970 }
1971 }
1972 }
1973 \f
1974 /* Given an expression X of type CONST,
1975 and ELT which is its table entry (or 0 if it
1976 is not in the hash table),
1977 return an alternate expression for X as a register plus integer.
1978 If none can be found, return 0. */
1979
1980 static rtx
1981 use_related_value (rtx x, struct table_elt *elt)
1982 {
1983 struct table_elt *relt = 0;
1984 struct table_elt *p, *q;
1985 HOST_WIDE_INT offset;
1986
1987 /* First, is there anything related known?
1988 If we have a table element, we can tell from that.
1989 Otherwise, must look it up. */
1990
1991 if (elt != 0 && elt->related_value != 0)
1992 relt = elt;
1993 else if (elt == 0 && GET_CODE (x) == CONST)
1994 {
1995 rtx subexp = get_related_value (x);
1996 if (subexp != 0)
1997 relt = lookup (subexp,
1998 SAFE_HASH (subexp, GET_MODE (subexp)),
1999 GET_MODE (subexp));
2000 }
2001
2002 if (relt == 0)
2003 return 0;
2004
2005 /* Search all related table entries for one that has an
2006 equivalent register. */
2007
2008 p = relt;
2009 while (1)
2010 {
2011 /* This loop is strange in that it is executed in two different cases.
2012 The first is when X is already in the table. Then it is searching
2013 the RELATED_VALUE list of X's class (RELT). The second case is when
2014 X is not in the table. Then RELT points to a class for the related
2015 value.
2016
2017 Ensure that, whatever case we are in, that we ignore classes that have
2018 the same value as X. */
2019
2020 if (rtx_equal_p (x, p->exp))
2021 q = 0;
2022 else
2023 for (q = p->first_same_value; q; q = q->next_same_value)
2024 if (REG_P (q->exp))
2025 break;
2026
2027 if (q)
2028 break;
2029
2030 p = p->related_value;
2031
2032 /* We went all the way around, so there is nothing to be found.
2033 Alternatively, perhaps RELT was in the table for some other reason
2034 and it has no related values recorded. */
2035 if (p == relt || p == 0)
2036 break;
2037 }
2038
2039 if (q == 0)
2040 return 0;
2041
2042 offset = (get_integer_term (x) - get_integer_term (p->exp));
2043 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2044 return plus_constant (q->exp, offset);
2045 }
2046 \f
2047
2048 /* Hash a string. Just add its bytes up. */
2049 static inline unsigned
2050 hash_rtx_string (const char *ps)
2051 {
2052 unsigned hash = 0;
2053 const unsigned char *p = (const unsigned char *) ps;
2054
2055 if (p)
2056 while (*p)
2057 hash += *p++;
2058
2059 return hash;
2060 }
2061
2062 /* Same as hash_rtx, but call CB on each rtx if it is not NULL.
2063 When the callback returns true, we continue with the new rtx. */
2064
2065 unsigned
2066 hash_rtx_cb (const_rtx x, enum machine_mode mode,
2067 int *do_not_record_p, int *hash_arg_in_memory_p,
2068 bool have_reg_qty, hash_rtx_callback_function cb)
2069 {
2070 int i, j;
2071 unsigned hash = 0;
2072 enum rtx_code code;
2073 const char *fmt;
2074 enum machine_mode newmode;
2075 rtx newx;
2076
2077 /* Used to turn recursion into iteration. We can't rely on GCC's
2078 tail-recursion elimination since we need to keep accumulating values
2079 in HASH. */
2080 repeat:
2081 if (x == 0)
2082 return hash;
2083
2084 /* Invoke the callback first. */
2085 if (cb != NULL
2086 && ((*cb) (x, mode, &newx, &newmode)))
2087 {
2088 hash += hash_rtx_cb (newx, newmode, do_not_record_p,
2089 hash_arg_in_memory_p, have_reg_qty, cb);
2090 return hash;
2091 }
2092
2093 code = GET_CODE (x);
2094 switch (code)
2095 {
2096 case REG:
2097 {
2098 unsigned int regno = REGNO (x);
2099
2100 if (do_not_record_p && !reload_completed)
2101 {
2102 /* On some machines, we can't record any non-fixed hard register,
2103 because extending its life will cause reload problems. We
2104 consider ap, fp, sp, gp to be fixed for this purpose.
2105
2106 We also consider CCmode registers to be fixed for this purpose;
2107 failure to do so leads to failure to simplify 0<100 type of
2108 conditionals.
2109
2110 On all machines, we can't record any global registers.
2111 Nor should we record any register that is in a small
2112 class, as defined by CLASS_LIKELY_SPILLED_P. */
2113 bool record;
2114
2115 if (regno >= FIRST_PSEUDO_REGISTER)
2116 record = true;
2117 else if (x == frame_pointer_rtx
2118 || x == hard_frame_pointer_rtx
2119 || x == arg_pointer_rtx
2120 || x == stack_pointer_rtx
2121 || x == pic_offset_table_rtx)
2122 record = true;
2123 else if (global_regs[regno])
2124 record = false;
2125 else if (fixed_regs[regno])
2126 record = true;
2127 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2128 record = true;
2129 else if (SMALL_REGISTER_CLASSES)
2130 record = false;
2131 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2132 record = false;
2133 else
2134 record = true;
2135
2136 if (!record)
2137 {
2138 *do_not_record_p = 1;
2139 return 0;
2140 }
2141 }
2142
2143 hash += ((unsigned int) REG << 7);
2144 hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2145 return hash;
2146 }
2147
2148 /* We handle SUBREG of a REG specially because the underlying
2149 reg changes its hash value with every value change; we don't
2150 want to have to forget unrelated subregs when one subreg changes. */
2151 case SUBREG:
2152 {
2153 if (REG_P (SUBREG_REG (x)))
2154 {
2155 hash += (((unsigned int) SUBREG << 7)
2156 + REGNO (SUBREG_REG (x))
2157 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2158 return hash;
2159 }
2160 break;
2161 }
2162
2163 case CONST_INT:
2164 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2165 + (unsigned int) INTVAL (x));
2166 return hash;
2167
2168 case CONST_DOUBLE:
2169 /* This is like the general case, except that it only counts
2170 the integers representing the constant. */
2171 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2172 if (GET_MODE (x) != VOIDmode)
2173 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2174 else
2175 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2176 + (unsigned int) CONST_DOUBLE_HIGH (x));
2177 return hash;
2178
2179 case CONST_FIXED:
2180 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2181 hash += fixed_hash (CONST_FIXED_VALUE (x));
2182 return hash;
2183
2184 case CONST_VECTOR:
2185 {
2186 int units;
2187 rtx elt;
2188
2189 units = CONST_VECTOR_NUNITS (x);
2190
2191 for (i = 0; i < units; ++i)
2192 {
2193 elt = CONST_VECTOR_ELT (x, i);
2194 hash += hash_rtx_cb (elt, GET_MODE (elt),
2195 do_not_record_p, hash_arg_in_memory_p,
2196 have_reg_qty, cb);
2197 }
2198
2199 return hash;
2200 }
2201
2202 /* Assume there is only one rtx object for any given label. */
2203 case LABEL_REF:
2204 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2205 differences and differences between each stage's debugging dumps. */
2206 hash += (((unsigned int) LABEL_REF << 7)
2207 + CODE_LABEL_NUMBER (XEXP (x, 0)));
2208 return hash;
2209
2210 case SYMBOL_REF:
2211 {
2212 /* Don't hash on the symbol's address to avoid bootstrap differences.
2213 Different hash values may cause expressions to be recorded in
2214 different orders and thus different registers to be used in the
2215 final assembler. This also avoids differences in the dump files
2216 between various stages. */
2217 unsigned int h = 0;
2218 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2219
2220 while (*p)
2221 h += (h << 7) + *p++; /* ??? revisit */
2222
2223 hash += ((unsigned int) SYMBOL_REF << 7) + h;
2224 return hash;
2225 }
2226
2227 case MEM:
2228 /* We don't record if marked volatile or if BLKmode since we don't
2229 know the size of the move. */
2230 if (do_not_record_p && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2231 {
2232 *do_not_record_p = 1;
2233 return 0;
2234 }
2235 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2236 *hash_arg_in_memory_p = 1;
2237
2238 /* Now that we have already found this special case,
2239 might as well speed it up as much as possible. */
2240 hash += (unsigned) MEM;
2241 x = XEXP (x, 0);
2242 goto repeat;
2243
2244 case USE:
2245 /* A USE that mentions non-volatile memory needs special
2246 handling since the MEM may be BLKmode which normally
2247 prevents an entry from being made. Pure calls are
2248 marked by a USE which mentions BLKmode memory.
2249 See calls.c:emit_call_1. */
2250 if (MEM_P (XEXP (x, 0))
2251 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2252 {
2253 hash += (unsigned) USE;
2254 x = XEXP (x, 0);
2255
2256 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2257 *hash_arg_in_memory_p = 1;
2258
2259 /* Now that we have already found this special case,
2260 might as well speed it up as much as possible. */
2261 hash += (unsigned) MEM;
2262 x = XEXP (x, 0);
2263 goto repeat;
2264 }
2265 break;
2266
2267 case PRE_DEC:
2268 case PRE_INC:
2269 case POST_DEC:
2270 case POST_INC:
2271 case PRE_MODIFY:
2272 case POST_MODIFY:
2273 case PC:
2274 case CC0:
2275 case CALL:
2276 case UNSPEC_VOLATILE:
2277 if (do_not_record_p) {
2278 *do_not_record_p = 1;
2279 return 0;
2280 }
2281 else
2282 return hash;
2283 break;
2284
2285 case ASM_OPERANDS:
2286 if (do_not_record_p && MEM_VOLATILE_P (x))
2287 {
2288 *do_not_record_p = 1;
2289 return 0;
2290 }
2291 else
2292 {
2293 /* We don't want to take the filename and line into account. */
2294 hash += (unsigned) code + (unsigned) GET_MODE (x)
2295 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2296 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2297 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2298
2299 if (ASM_OPERANDS_INPUT_LENGTH (x))
2300 {
2301 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2302 {
2303 hash += (hash_rtx_cb (ASM_OPERANDS_INPUT (x, i),
2304 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2305 do_not_record_p, hash_arg_in_memory_p,
2306 have_reg_qty, cb)
2307 + hash_rtx_string
2308 (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2309 }
2310
2311 hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2312 x = ASM_OPERANDS_INPUT (x, 0);
2313 mode = GET_MODE (x);
2314 goto repeat;
2315 }
2316
2317 return hash;
2318 }
2319 break;
2320
2321 default:
2322 break;
2323 }
2324
2325 i = GET_RTX_LENGTH (code) - 1;
2326 hash += (unsigned) code + (unsigned) GET_MODE (x);
2327 fmt = GET_RTX_FORMAT (code);
2328 for (; i >= 0; i--)
2329 {
2330 switch (fmt[i])
2331 {
2332 case 'e':
2333 /* If we are about to do the last recursive call
2334 needed at this level, change it into iteration.
2335 This function is called enough to be worth it. */
2336 if (i == 0)
2337 {
2338 x = XEXP (x, i);
2339 goto repeat;
2340 }
2341
2342 hash += hash_rtx_cb (XEXP (x, i), 0, do_not_record_p,
2343 hash_arg_in_memory_p,
2344 have_reg_qty, cb);
2345 break;
2346
2347 case 'E':
2348 for (j = 0; j < XVECLEN (x, i); j++)
2349 hash += hash_rtx_cb (XVECEXP (x, i, j), 0, do_not_record_p,
2350 hash_arg_in_memory_p,
2351 have_reg_qty, cb);
2352 break;
2353
2354 case 's':
2355 hash += hash_rtx_string (XSTR (x, i));
2356 break;
2357
2358 case 'i':
2359 hash += (unsigned int) XINT (x, i);
2360 break;
2361
2362 case '0': case 't':
2363 /* Unused. */
2364 break;
2365
2366 default:
2367 gcc_unreachable ();
2368 }
2369 }
2370
2371 return hash;
2372 }
2373
2374 /* Hash an rtx. We are careful to make sure the value is never negative.
2375 Equivalent registers hash identically.
2376 MODE is used in hashing for CONST_INTs only;
2377 otherwise the mode of X is used.
2378
2379 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2380
2381 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2382 a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
2383
2384 Note that cse_insn knows that the hash code of a MEM expression
2385 is just (int) MEM plus the hash code of the address. */
2386
2387 unsigned
2388 hash_rtx (const_rtx x, enum machine_mode mode, int *do_not_record_p,
2389 int *hash_arg_in_memory_p, bool have_reg_qty)
2390 {
2391 return hash_rtx_cb (x, mode, do_not_record_p,
2392 hash_arg_in_memory_p, have_reg_qty, NULL);
2393 }
2394
2395 /* Hash an rtx X for cse via hash_rtx.
2396 Stores 1 in do_not_record if any subexpression is volatile.
2397 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2398 does not have the RTX_UNCHANGING_P bit set. */
2399
2400 static inline unsigned
2401 canon_hash (rtx x, enum machine_mode mode)
2402 {
2403 return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2404 }
2405
2406 /* Like canon_hash but with no side effects, i.e. do_not_record
2407 and hash_arg_in_memory are not changed. */
2408
2409 static inline unsigned
2410 safe_hash (rtx x, enum machine_mode mode)
2411 {
2412 int dummy_do_not_record;
2413 return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2414 }
2415 \f
2416 /* Return 1 iff X and Y would canonicalize into the same thing,
2417 without actually constructing the canonicalization of either one.
2418 If VALIDATE is nonzero,
2419 we assume X is an expression being processed from the rtl
2420 and Y was found in the hash table. We check register refs
2421 in Y for being marked as valid.
2422
2423 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2424
2425 int
2426 exp_equiv_p (const_rtx x, const_rtx y, int validate, bool for_gcse)
2427 {
2428 int i, j;
2429 enum rtx_code code;
2430 const char *fmt;
2431
2432 /* Note: it is incorrect to assume an expression is equivalent to itself
2433 if VALIDATE is nonzero. */
2434 if (x == y && !validate)
2435 return 1;
2436
2437 if (x == 0 || y == 0)
2438 return x == y;
2439
2440 code = GET_CODE (x);
2441 if (code != GET_CODE (y))
2442 return 0;
2443
2444 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2445 if (GET_MODE (x) != GET_MODE (y))
2446 return 0;
2447
2448 switch (code)
2449 {
2450 case PC:
2451 case CC0:
2452 case CONST_INT:
2453 case CONST_DOUBLE:
2454 case CONST_FIXED:
2455 return x == y;
2456
2457 case LABEL_REF:
2458 return XEXP (x, 0) == XEXP (y, 0);
2459
2460 case SYMBOL_REF:
2461 return XSTR (x, 0) == XSTR (y, 0);
2462
2463 case REG:
2464 if (for_gcse)
2465 return REGNO (x) == REGNO (y);
2466 else
2467 {
2468 unsigned int regno = REGNO (y);
2469 unsigned int i;
2470 unsigned int endregno = END_REGNO (y);
2471
2472 /* If the quantities are not the same, the expressions are not
2473 equivalent. If there are and we are not to validate, they
2474 are equivalent. Otherwise, ensure all regs are up-to-date. */
2475
2476 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2477 return 0;
2478
2479 if (! validate)
2480 return 1;
2481
2482 for (i = regno; i < endregno; i++)
2483 if (REG_IN_TABLE (i) != REG_TICK (i))
2484 return 0;
2485
2486 return 1;
2487 }
2488
2489 case MEM:
2490 if (for_gcse)
2491 {
2492 /* A volatile mem should not be considered equivalent to any
2493 other. */
2494 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2495 return 0;
2496
2497 /* Can't merge two expressions in different alias sets, since we
2498 can decide that the expression is transparent in a block when
2499 it isn't, due to it being set with the different alias set.
2500
2501 Also, can't merge two expressions with different MEM_ATTRS.
2502 They could e.g. be two different entities allocated into the
2503 same space on the stack (see e.g. PR25130). In that case, the
2504 MEM addresses can be the same, even though the two MEMs are
2505 absolutely not equivalent.
2506
2507 But because really all MEM attributes should be the same for
2508 equivalent MEMs, we just use the invariant that MEMs that have
2509 the same attributes share the same mem_attrs data structure. */
2510 if (MEM_ATTRS (x) != MEM_ATTRS (y))
2511 return 0;
2512 }
2513 break;
2514
2515 /* For commutative operations, check both orders. */
2516 case PLUS:
2517 case MULT:
2518 case AND:
2519 case IOR:
2520 case XOR:
2521 case NE:
2522 case EQ:
2523 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2524 validate, for_gcse)
2525 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2526 validate, for_gcse))
2527 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2528 validate, for_gcse)
2529 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2530 validate, for_gcse)));
2531
2532 case ASM_OPERANDS:
2533 /* We don't use the generic code below because we want to
2534 disregard filename and line numbers. */
2535
2536 /* A volatile asm isn't equivalent to any other. */
2537 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2538 return 0;
2539
2540 if (GET_MODE (x) != GET_MODE (y)
2541 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2542 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2543 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2544 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2545 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2546 return 0;
2547
2548 if (ASM_OPERANDS_INPUT_LENGTH (x))
2549 {
2550 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2551 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2552 ASM_OPERANDS_INPUT (y, i),
2553 validate, for_gcse)
2554 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2555 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2556 return 0;
2557 }
2558
2559 return 1;
2560
2561 default:
2562 break;
2563 }
2564
2565 /* Compare the elements. If any pair of corresponding elements
2566 fail to match, return 0 for the whole thing. */
2567
2568 fmt = GET_RTX_FORMAT (code);
2569 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2570 {
2571 switch (fmt[i])
2572 {
2573 case 'e':
2574 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2575 validate, for_gcse))
2576 return 0;
2577 break;
2578
2579 case 'E':
2580 if (XVECLEN (x, i) != XVECLEN (y, i))
2581 return 0;
2582 for (j = 0; j < XVECLEN (x, i); j++)
2583 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2584 validate, for_gcse))
2585 return 0;
2586 break;
2587
2588 case 's':
2589 if (strcmp (XSTR (x, i), XSTR (y, i)))
2590 return 0;
2591 break;
2592
2593 case 'i':
2594 if (XINT (x, i) != XINT (y, i))
2595 return 0;
2596 break;
2597
2598 case 'w':
2599 if (XWINT (x, i) != XWINT (y, i))
2600 return 0;
2601 break;
2602
2603 case '0':
2604 case 't':
2605 break;
2606
2607 default:
2608 gcc_unreachable ();
2609 }
2610 }
2611
2612 return 1;
2613 }
2614 \f
2615 /* Return 1 if X has a value that can vary even between two
2616 executions of the program. 0 means X can be compared reliably
2617 against certain constants or near-constants. */
2618
2619 static bool
2620 cse_rtx_varies_p (const_rtx x, bool from_alias)
2621 {
2622 /* We need not check for X and the equivalence class being of the same
2623 mode because if X is equivalent to a constant in some mode, it
2624 doesn't vary in any mode. */
2625
2626 if (REG_P (x)
2627 && REGNO_QTY_VALID_P (REGNO (x)))
2628 {
2629 int x_q = REG_QTY (REGNO (x));
2630 struct qty_table_elem *x_ent = &qty_table[x_q];
2631
2632 if (GET_MODE (x) == x_ent->mode
2633 && x_ent->const_rtx != NULL_RTX)
2634 return 0;
2635 }
2636
2637 if (GET_CODE (x) == PLUS
2638 && GET_CODE (XEXP (x, 1)) == CONST_INT
2639 && REG_P (XEXP (x, 0))
2640 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2641 {
2642 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2643 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2644
2645 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2646 && x0_ent->const_rtx != NULL_RTX)
2647 return 0;
2648 }
2649
2650 /* This can happen as the result of virtual register instantiation, if
2651 the initial constant is too large to be a valid address. This gives
2652 us a three instruction sequence, load large offset into a register,
2653 load fp minus a constant into a register, then a MEM which is the
2654 sum of the two `constant' registers. */
2655 if (GET_CODE (x) == PLUS
2656 && REG_P (XEXP (x, 0))
2657 && REG_P (XEXP (x, 1))
2658 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2659 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2660 {
2661 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2662 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2663 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2664 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2665
2666 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2667 && x0_ent->const_rtx != NULL_RTX
2668 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2669 && x1_ent->const_rtx != NULL_RTX)
2670 return 0;
2671 }
2672
2673 return rtx_varies_p (x, from_alias);
2674 }
2675 \f
2676 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2677 the result if necessary. INSN is as for canon_reg. */
2678
2679 static void
2680 validate_canon_reg (rtx *xloc, rtx insn)
2681 {
2682 if (*xloc)
2683 {
2684 rtx new_rtx = canon_reg (*xloc, insn);
2685
2686 /* If replacing pseudo with hard reg or vice versa, ensure the
2687 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2688 gcc_assert (insn && new_rtx);
2689 validate_change (insn, xloc, new_rtx, 1);
2690 }
2691 }
2692
2693 /* Canonicalize an expression:
2694 replace each register reference inside it
2695 with the "oldest" equivalent register.
2696
2697 If INSN is nonzero validate_change is used to ensure that INSN remains valid
2698 after we make our substitution. The calls are made with IN_GROUP nonzero
2699 so apply_change_group must be called upon the outermost return from this
2700 function (unless INSN is zero). The result of apply_change_group can
2701 generally be discarded since the changes we are making are optional. */
2702
2703 static rtx
2704 canon_reg (rtx x, rtx insn)
2705 {
2706 int i;
2707 enum rtx_code code;
2708 const char *fmt;
2709
2710 if (x == 0)
2711 return x;
2712
2713 code = GET_CODE (x);
2714 switch (code)
2715 {
2716 case PC:
2717 case CC0:
2718 case CONST:
2719 case CONST_INT:
2720 case CONST_DOUBLE:
2721 case CONST_FIXED:
2722 case CONST_VECTOR:
2723 case SYMBOL_REF:
2724 case LABEL_REF:
2725 case ADDR_VEC:
2726 case ADDR_DIFF_VEC:
2727 return x;
2728
2729 case REG:
2730 {
2731 int first;
2732 int q;
2733 struct qty_table_elem *ent;
2734
2735 /* Never replace a hard reg, because hard regs can appear
2736 in more than one machine mode, and we must preserve the mode
2737 of each occurrence. Also, some hard regs appear in
2738 MEMs that are shared and mustn't be altered. Don't try to
2739 replace any reg that maps to a reg of class NO_REGS. */
2740 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2741 || ! REGNO_QTY_VALID_P (REGNO (x)))
2742 return x;
2743
2744 q = REG_QTY (REGNO (x));
2745 ent = &qty_table[q];
2746 first = ent->first_reg;
2747 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2748 : REGNO_REG_CLASS (first) == NO_REGS ? x
2749 : gen_rtx_REG (ent->mode, first));
2750 }
2751
2752 default:
2753 break;
2754 }
2755
2756 fmt = GET_RTX_FORMAT (code);
2757 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2758 {
2759 int j;
2760
2761 if (fmt[i] == 'e')
2762 validate_canon_reg (&XEXP (x, i), insn);
2763 else if (fmt[i] == 'E')
2764 for (j = 0; j < XVECLEN (x, i); j++)
2765 validate_canon_reg (&XVECEXP (x, i, j), insn);
2766 }
2767
2768 return x;
2769 }
2770 \f
2771 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2772 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2773 what values are being compared.
2774
2775 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2776 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2777 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2778 compared to produce cc0.
2779
2780 The return value is the comparison operator and is either the code of
2781 A or the code corresponding to the inverse of the comparison. */
2782
2783 static enum rtx_code
2784 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
2785 enum machine_mode *pmode1, enum machine_mode *pmode2)
2786 {
2787 rtx arg1, arg2;
2788
2789 arg1 = *parg1, arg2 = *parg2;
2790
2791 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2792
2793 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2794 {
2795 /* Set nonzero when we find something of interest. */
2796 rtx x = 0;
2797 int reverse_code = 0;
2798 struct table_elt *p = 0;
2799
2800 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2801 On machines with CC0, this is the only case that can occur, since
2802 fold_rtx will return the COMPARE or item being compared with zero
2803 when given CC0. */
2804
2805 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2806 x = arg1;
2807
2808 /* If ARG1 is a comparison operator and CODE is testing for
2809 STORE_FLAG_VALUE, get the inner arguments. */
2810
2811 else if (COMPARISON_P (arg1))
2812 {
2813 #ifdef FLOAT_STORE_FLAG_VALUE
2814 REAL_VALUE_TYPE fsfv;
2815 #endif
2816
2817 if (code == NE
2818 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2819 && code == LT && STORE_FLAG_VALUE == -1)
2820 #ifdef FLOAT_STORE_FLAG_VALUE
2821 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
2822 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2823 REAL_VALUE_NEGATIVE (fsfv)))
2824 #endif
2825 )
2826 x = arg1;
2827 else if (code == EQ
2828 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2829 && code == GE && STORE_FLAG_VALUE == -1)
2830 #ifdef FLOAT_STORE_FLAG_VALUE
2831 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
2832 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2833 REAL_VALUE_NEGATIVE (fsfv)))
2834 #endif
2835 )
2836 x = arg1, reverse_code = 1;
2837 }
2838
2839 /* ??? We could also check for
2840
2841 (ne (and (eq (...) (const_int 1))) (const_int 0))
2842
2843 and related forms, but let's wait until we see them occurring. */
2844
2845 if (x == 0)
2846 /* Look up ARG1 in the hash table and see if it has an equivalence
2847 that lets us see what is being compared. */
2848 p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
2849 if (p)
2850 {
2851 p = p->first_same_value;
2852
2853 /* If what we compare is already known to be constant, that is as
2854 good as it gets.
2855 We need to break the loop in this case, because otherwise we
2856 can have an infinite loop when looking at a reg that is known
2857 to be a constant which is the same as a comparison of a reg
2858 against zero which appears later in the insn stream, which in
2859 turn is constant and the same as the comparison of the first reg
2860 against zero... */
2861 if (p->is_const)
2862 break;
2863 }
2864
2865 for (; p; p = p->next_same_value)
2866 {
2867 enum machine_mode inner_mode = GET_MODE (p->exp);
2868 #ifdef FLOAT_STORE_FLAG_VALUE
2869 REAL_VALUE_TYPE fsfv;
2870 #endif
2871
2872 /* If the entry isn't valid, skip it. */
2873 if (! exp_equiv_p (p->exp, p->exp, 1, false))
2874 continue;
2875
2876 if (GET_CODE (p->exp) == COMPARE
2877 /* Another possibility is that this machine has a compare insn
2878 that includes the comparison code. In that case, ARG1 would
2879 be equivalent to a comparison operation that would set ARG1 to
2880 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2881 ORIG_CODE is the actual comparison being done; if it is an EQ,
2882 we must reverse ORIG_CODE. On machine with a negative value
2883 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2884 || ((code == NE
2885 || (code == LT
2886 && GET_MODE_CLASS (inner_mode) == MODE_INT
2887 && (GET_MODE_BITSIZE (inner_mode)
2888 <= HOST_BITS_PER_WIDE_INT)
2889 && (STORE_FLAG_VALUE
2890 & ((HOST_WIDE_INT) 1
2891 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2892 #ifdef FLOAT_STORE_FLAG_VALUE
2893 || (code == LT
2894 && SCALAR_FLOAT_MODE_P (inner_mode)
2895 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2896 REAL_VALUE_NEGATIVE (fsfv)))
2897 #endif
2898 )
2899 && COMPARISON_P (p->exp)))
2900 {
2901 x = p->exp;
2902 break;
2903 }
2904 else if ((code == EQ
2905 || (code == GE
2906 && GET_MODE_CLASS (inner_mode) == MODE_INT
2907 && (GET_MODE_BITSIZE (inner_mode)
2908 <= HOST_BITS_PER_WIDE_INT)
2909 && (STORE_FLAG_VALUE
2910 & ((HOST_WIDE_INT) 1
2911 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2912 #ifdef FLOAT_STORE_FLAG_VALUE
2913 || (code == GE
2914 && SCALAR_FLOAT_MODE_P (inner_mode)
2915 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2916 REAL_VALUE_NEGATIVE (fsfv)))
2917 #endif
2918 )
2919 && COMPARISON_P (p->exp))
2920 {
2921 reverse_code = 1;
2922 x = p->exp;
2923 break;
2924 }
2925
2926 /* If this non-trapping address, e.g. fp + constant, the
2927 equivalent is a better operand since it may let us predict
2928 the value of the comparison. */
2929 else if (!rtx_addr_can_trap_p (p->exp))
2930 {
2931 arg1 = p->exp;
2932 continue;
2933 }
2934 }
2935
2936 /* If we didn't find a useful equivalence for ARG1, we are done.
2937 Otherwise, set up for the next iteration. */
2938 if (x == 0)
2939 break;
2940
2941 /* If we need to reverse the comparison, make sure that that is
2942 possible -- we can't necessarily infer the value of GE from LT
2943 with floating-point operands. */
2944 if (reverse_code)
2945 {
2946 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
2947 if (reversed == UNKNOWN)
2948 break;
2949 else
2950 code = reversed;
2951 }
2952 else if (COMPARISON_P (x))
2953 code = GET_CODE (x);
2954 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
2955 }
2956
2957 /* Return our results. Return the modes from before fold_rtx
2958 because fold_rtx might produce const_int, and then it's too late. */
2959 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
2960 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
2961
2962 return code;
2963 }
2964 \f
2965 /* If X is a nontrivial arithmetic operation on an argument for which
2966 a constant value can be determined, return the result of operating
2967 on that value, as a constant. Otherwise, return X, possibly with
2968 one or more operands changed to a forward-propagated constant.
2969
2970 If X is a register whose contents are known, we do NOT return
2971 those contents here; equiv_constant is called to perform that task.
2972 For SUBREGs and MEMs, we do that both here and in equiv_constant.
2973
2974 INSN is the insn that we may be modifying. If it is 0, make a copy
2975 of X before modifying it. */
2976
2977 static rtx
2978 fold_rtx (rtx x, rtx insn)
2979 {
2980 enum rtx_code code;
2981 enum machine_mode mode;
2982 const char *fmt;
2983 int i;
2984 rtx new_rtx = 0;
2985 int changed = 0;
2986
2987 /* Operands of X. */
2988 rtx folded_arg0;
2989 rtx folded_arg1;
2990
2991 /* Constant equivalents of first three operands of X;
2992 0 when no such equivalent is known. */
2993 rtx const_arg0;
2994 rtx const_arg1;
2995 rtx const_arg2;
2996
2997 /* The mode of the first operand of X. We need this for sign and zero
2998 extends. */
2999 enum machine_mode mode_arg0;
3000
3001 if (x == 0)
3002 return x;
3003
3004 /* Try to perform some initial simplifications on X. */
3005 code = GET_CODE (x);
3006 switch (code)
3007 {
3008 case MEM:
3009 case SUBREG:
3010 if ((new_rtx = equiv_constant (x)) != NULL_RTX)
3011 return new_rtx;
3012 return x;
3013
3014 case CONST:
3015 case CONST_INT:
3016 case CONST_DOUBLE:
3017 case CONST_FIXED:
3018 case CONST_VECTOR:
3019 case SYMBOL_REF:
3020 case LABEL_REF:
3021 case REG:
3022 case PC:
3023 /* No use simplifying an EXPR_LIST
3024 since they are used only for lists of args
3025 in a function call's REG_EQUAL note. */
3026 case EXPR_LIST:
3027 return x;
3028
3029 #ifdef HAVE_cc0
3030 case CC0:
3031 return prev_insn_cc0;
3032 #endif
3033
3034 case ASM_OPERANDS:
3035 if (insn)
3036 {
3037 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3038 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3039 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3040 }
3041 return x;
3042
3043 #ifdef NO_FUNCTION_CSE
3044 case CALL:
3045 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3046 return x;
3047 break;
3048 #endif
3049
3050 /* Anything else goes through the loop below. */
3051 default:
3052 break;
3053 }
3054
3055 mode = GET_MODE (x);
3056 const_arg0 = 0;
3057 const_arg1 = 0;
3058 const_arg2 = 0;
3059 mode_arg0 = VOIDmode;
3060
3061 /* Try folding our operands.
3062 Then see which ones have constant values known. */
3063
3064 fmt = GET_RTX_FORMAT (code);
3065 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3066 if (fmt[i] == 'e')
3067 {
3068 rtx folded_arg = XEXP (x, i), const_arg;
3069 enum machine_mode mode_arg = GET_MODE (folded_arg);
3070
3071 switch (GET_CODE (folded_arg))
3072 {
3073 case MEM:
3074 case REG:
3075 case SUBREG:
3076 const_arg = equiv_constant (folded_arg);
3077 break;
3078
3079 case CONST:
3080 case CONST_INT:
3081 case SYMBOL_REF:
3082 case LABEL_REF:
3083 case CONST_DOUBLE:
3084 case CONST_FIXED:
3085 case CONST_VECTOR:
3086 const_arg = folded_arg;
3087 break;
3088
3089 #ifdef HAVE_cc0
3090 case CC0:
3091 folded_arg = prev_insn_cc0;
3092 mode_arg = prev_insn_cc0_mode;
3093 const_arg = equiv_constant (folded_arg);
3094 break;
3095 #endif
3096
3097 default:
3098 folded_arg = fold_rtx (folded_arg, insn);
3099 const_arg = equiv_constant (folded_arg);
3100 break;
3101 }
3102
3103 /* For the first three operands, see if the operand
3104 is constant or equivalent to a constant. */
3105 switch (i)
3106 {
3107 case 0:
3108 folded_arg0 = folded_arg;
3109 const_arg0 = const_arg;
3110 mode_arg0 = mode_arg;
3111 break;
3112 case 1:
3113 folded_arg1 = folded_arg;
3114 const_arg1 = const_arg;
3115 break;
3116 case 2:
3117 const_arg2 = const_arg;
3118 break;
3119 }
3120
3121 /* Pick the least expensive of the argument and an equivalent constant
3122 argument. */
3123 if (const_arg != 0
3124 && const_arg != folded_arg
3125 && COST_IN (const_arg, code) <= COST_IN (folded_arg, code)
3126
3127 /* It's not safe to substitute the operand of a conversion
3128 operator with a constant, as the conversion's identity
3129 depends upon the mode of its operand. This optimization
3130 is handled by the call to simplify_unary_operation. */
3131 && (GET_RTX_CLASS (code) != RTX_UNARY
3132 || GET_MODE (const_arg) == mode_arg0
3133 || (code != ZERO_EXTEND
3134 && code != SIGN_EXTEND
3135 && code != TRUNCATE
3136 && code != FLOAT_TRUNCATE
3137 && code != FLOAT_EXTEND
3138 && code != FLOAT
3139 && code != FIX
3140 && code != UNSIGNED_FLOAT
3141 && code != UNSIGNED_FIX)))
3142 folded_arg = const_arg;
3143
3144 if (folded_arg == XEXP (x, i))
3145 continue;
3146
3147 if (insn == NULL_RTX && !changed)
3148 x = copy_rtx (x);
3149 changed = 1;
3150 validate_unshare_change (insn, &XEXP (x, i), folded_arg, 1);
3151 }
3152
3153 if (changed)
3154 {
3155 /* Canonicalize X if necessary, and keep const_argN and folded_argN
3156 consistent with the order in X. */
3157 if (canonicalize_change_group (insn, x))
3158 {
3159 rtx tem;
3160 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3161 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3162 }
3163
3164 apply_change_group ();
3165 }
3166
3167 /* If X is an arithmetic operation, see if we can simplify it. */
3168
3169 switch (GET_RTX_CLASS (code))
3170 {
3171 case RTX_UNARY:
3172 {
3173 int is_const = 0;
3174
3175 /* We can't simplify extension ops unless we know the
3176 original mode. */
3177 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3178 && mode_arg0 == VOIDmode)
3179 break;
3180
3181 /* If we had a CONST, strip it off and put it back later if we
3182 fold. */
3183 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3184 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3185
3186 new_rtx = simplify_unary_operation (code, mode,
3187 const_arg0 ? const_arg0 : folded_arg0,
3188 mode_arg0);
3189 /* NEG of PLUS could be converted into MINUS, but that causes
3190 expressions of the form
3191 (CONST (MINUS (CONST_INT) (SYMBOL_REF)))
3192 which many ports mistakenly treat as LEGITIMATE_CONSTANT_P.
3193 FIXME: those ports should be fixed. */
3194 if (new_rtx != 0 && is_const
3195 && GET_CODE (new_rtx) == PLUS
3196 && (GET_CODE (XEXP (new_rtx, 0)) == SYMBOL_REF
3197 || GET_CODE (XEXP (new_rtx, 0)) == LABEL_REF)
3198 && GET_CODE (XEXP (new_rtx, 1)) == CONST_INT)
3199 new_rtx = gen_rtx_CONST (mode, new_rtx);
3200 }
3201 break;
3202
3203 case RTX_COMPARE:
3204 case RTX_COMM_COMPARE:
3205 /* See what items are actually being compared and set FOLDED_ARG[01]
3206 to those values and CODE to the actual comparison code. If any are
3207 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3208 do anything if both operands are already known to be constant. */
3209
3210 /* ??? Vector mode comparisons are not supported yet. */
3211 if (VECTOR_MODE_P (mode))
3212 break;
3213
3214 if (const_arg0 == 0 || const_arg1 == 0)
3215 {
3216 struct table_elt *p0, *p1;
3217 rtx true_rtx, false_rtx;
3218 enum machine_mode mode_arg1;
3219
3220 if (SCALAR_FLOAT_MODE_P (mode))
3221 {
3222 #ifdef FLOAT_STORE_FLAG_VALUE
3223 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3224 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3225 #else
3226 true_rtx = NULL_RTX;
3227 #endif
3228 false_rtx = CONST0_RTX (mode);
3229 }
3230 else
3231 {
3232 true_rtx = const_true_rtx;
3233 false_rtx = const0_rtx;
3234 }
3235
3236 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3237 &mode_arg0, &mode_arg1);
3238
3239 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3240 what kinds of things are being compared, so we can't do
3241 anything with this comparison. */
3242
3243 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3244 break;
3245
3246 const_arg0 = equiv_constant (folded_arg0);
3247 const_arg1 = equiv_constant (folded_arg1);
3248
3249 /* If we do not now have two constants being compared, see
3250 if we can nevertheless deduce some things about the
3251 comparison. */
3252 if (const_arg0 == 0 || const_arg1 == 0)
3253 {
3254 if (const_arg1 != NULL)
3255 {
3256 rtx cheapest_simplification;
3257 int cheapest_cost;
3258 rtx simp_result;
3259 struct table_elt *p;
3260
3261 /* See if we can find an equivalent of folded_arg0
3262 that gets us a cheaper expression, possibly a
3263 constant through simplifications. */
3264 p = lookup (folded_arg0, SAFE_HASH (folded_arg0, mode_arg0),
3265 mode_arg0);
3266
3267 if (p != NULL)
3268 {
3269 cheapest_simplification = x;
3270 cheapest_cost = COST (x);
3271
3272 for (p = p->first_same_value; p != NULL; p = p->next_same_value)
3273 {
3274 int cost;
3275
3276 /* If the entry isn't valid, skip it. */
3277 if (! exp_equiv_p (p->exp, p->exp, 1, false))
3278 continue;
3279
3280 /* Try to simplify using this equivalence. */
3281 simp_result
3282 = simplify_relational_operation (code, mode,
3283 mode_arg0,
3284 p->exp,
3285 const_arg1);
3286
3287 if (simp_result == NULL)
3288 continue;
3289
3290 cost = COST (simp_result);
3291 if (cost < cheapest_cost)
3292 {
3293 cheapest_cost = cost;
3294 cheapest_simplification = simp_result;
3295 }
3296 }
3297
3298 /* If we have a cheaper expression now, use that
3299 and try folding it further, from the top. */
3300 if (cheapest_simplification != x)
3301 return fold_rtx (copy_rtx (cheapest_simplification),
3302 insn);
3303 }
3304 }
3305
3306 /* See if the two operands are the same. */
3307
3308 if ((REG_P (folded_arg0)
3309 && REG_P (folded_arg1)
3310 && (REG_QTY (REGNO (folded_arg0))
3311 == REG_QTY (REGNO (folded_arg1))))
3312 || ((p0 = lookup (folded_arg0,
3313 SAFE_HASH (folded_arg0, mode_arg0),
3314 mode_arg0))
3315 && (p1 = lookup (folded_arg1,
3316 SAFE_HASH (folded_arg1, mode_arg0),
3317 mode_arg0))
3318 && p0->first_same_value == p1->first_same_value))
3319 folded_arg1 = folded_arg0;
3320
3321 /* If FOLDED_ARG0 is a register, see if the comparison we are
3322 doing now is either the same as we did before or the reverse
3323 (we only check the reverse if not floating-point). */
3324 else if (REG_P (folded_arg0))
3325 {
3326 int qty = REG_QTY (REGNO (folded_arg0));
3327
3328 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3329 {
3330 struct qty_table_elem *ent = &qty_table[qty];
3331
3332 if ((comparison_dominates_p (ent->comparison_code, code)
3333 || (! FLOAT_MODE_P (mode_arg0)
3334 && comparison_dominates_p (ent->comparison_code,
3335 reverse_condition (code))))
3336 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3337 || (const_arg1
3338 && rtx_equal_p (ent->comparison_const,
3339 const_arg1))
3340 || (REG_P (folded_arg1)
3341 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3342 {
3343 if (comparison_dominates_p (ent->comparison_code, code))
3344 {
3345 if (true_rtx)
3346 return true_rtx;
3347 else
3348 break;
3349 }
3350 else
3351 return false_rtx;
3352 }
3353 }
3354 }
3355 }
3356 }
3357
3358 /* If we are comparing against zero, see if the first operand is
3359 equivalent to an IOR with a constant. If so, we may be able to
3360 determine the result of this comparison. */
3361 if (const_arg1 == const0_rtx && !const_arg0)
3362 {
3363 rtx y = lookup_as_function (folded_arg0, IOR);
3364 rtx inner_const;
3365
3366 if (y != 0
3367 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3368 && GET_CODE (inner_const) == CONST_INT
3369 && INTVAL (inner_const) != 0)
3370 folded_arg0 = gen_rtx_IOR (mode_arg0, XEXP (y, 0), inner_const);
3371 }
3372
3373 {
3374 rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
3375 rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
3376 new_rtx = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
3377 }
3378 break;
3379
3380 case RTX_BIN_ARITH:
3381 case RTX_COMM_ARITH:
3382 switch (code)
3383 {
3384 case PLUS:
3385 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3386 with that LABEL_REF as its second operand. If so, the result is
3387 the first operand of that MINUS. This handles switches with an
3388 ADDR_DIFF_VEC table. */
3389 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3390 {
3391 rtx y
3392 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
3393 : lookup_as_function (folded_arg0, MINUS);
3394
3395 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3396 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
3397 return XEXP (y, 0);
3398
3399 /* Now try for a CONST of a MINUS like the above. */
3400 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3401 : lookup_as_function (folded_arg0, CONST))) != 0
3402 && GET_CODE (XEXP (y, 0)) == MINUS
3403 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3404 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
3405 return XEXP (XEXP (y, 0), 0);
3406 }
3407
3408 /* Likewise if the operands are in the other order. */
3409 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3410 {
3411 rtx y
3412 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
3413 : lookup_as_function (folded_arg1, MINUS);
3414
3415 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3416 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
3417 return XEXP (y, 0);
3418
3419 /* Now try for a CONST of a MINUS like the above. */
3420 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3421 : lookup_as_function (folded_arg1, CONST))) != 0
3422 && GET_CODE (XEXP (y, 0)) == MINUS
3423 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3424 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
3425 return XEXP (XEXP (y, 0), 0);
3426 }
3427
3428 /* If second operand is a register equivalent to a negative
3429 CONST_INT, see if we can find a register equivalent to the
3430 positive constant. Make a MINUS if so. Don't do this for
3431 a non-negative constant since we might then alternate between
3432 choosing positive and negative constants. Having the positive
3433 constant previously-used is the more common case. Be sure
3434 the resulting constant is non-negative; if const_arg1 were
3435 the smallest negative number this would overflow: depending
3436 on the mode, this would either just be the same value (and
3437 hence not save anything) or be incorrect. */
3438 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
3439 && INTVAL (const_arg1) < 0
3440 /* This used to test
3441
3442 -INTVAL (const_arg1) >= 0
3443
3444 But The Sun V5.0 compilers mis-compiled that test. So
3445 instead we test for the problematic value in a more direct
3446 manner and hope the Sun compilers get it correct. */
3447 && INTVAL (const_arg1) !=
3448 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
3449 && REG_P (folded_arg1))
3450 {
3451 rtx new_const = GEN_INT (-INTVAL (const_arg1));
3452 struct table_elt *p
3453 = lookup (new_const, SAFE_HASH (new_const, mode), mode);
3454
3455 if (p)
3456 for (p = p->first_same_value; p; p = p->next_same_value)
3457 if (REG_P (p->exp))
3458 return simplify_gen_binary (MINUS, mode, folded_arg0,
3459 canon_reg (p->exp, NULL_RTX));
3460 }
3461 goto from_plus;
3462
3463 case MINUS:
3464 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3465 If so, produce (PLUS Z C2-C). */
3466 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
3467 {
3468 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
3469 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
3470 return fold_rtx (plus_constant (copy_rtx (y),
3471 -INTVAL (const_arg1)),
3472 NULL_RTX);
3473 }
3474
3475 /* Fall through. */
3476
3477 from_plus:
3478 case SMIN: case SMAX: case UMIN: case UMAX:
3479 case IOR: case AND: case XOR:
3480 case MULT:
3481 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3482 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3483 is known to be of similar form, we may be able to replace the
3484 operation with a combined operation. This may eliminate the
3485 intermediate operation if every use is simplified in this way.
3486 Note that the similar optimization done by combine.c only works
3487 if the intermediate operation's result has only one reference. */
3488
3489 if (REG_P (folded_arg0)
3490 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
3491 {
3492 int is_shift
3493 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
3494 rtx y, inner_const, new_const;
3495 enum rtx_code associate_code;
3496
3497 if (is_shift
3498 && (INTVAL (const_arg1) >= GET_MODE_BITSIZE (mode)
3499 || INTVAL (const_arg1) < 0))
3500 {
3501 if (SHIFT_COUNT_TRUNCATED)
3502 const_arg1 = GEN_INT (INTVAL (const_arg1)
3503 & (GET_MODE_BITSIZE (mode) - 1));
3504 else
3505 break;
3506 }
3507
3508 y = lookup_as_function (folded_arg0, code);
3509 if (y == 0)
3510 break;
3511
3512 /* If we have compiled a statement like
3513 "if (x == (x & mask1))", and now are looking at
3514 "x & mask2", we will have a case where the first operand
3515 of Y is the same as our first operand. Unless we detect
3516 this case, an infinite loop will result. */
3517 if (XEXP (y, 0) == folded_arg0)
3518 break;
3519
3520 inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
3521 if (!inner_const || GET_CODE (inner_const) != CONST_INT)
3522 break;
3523
3524 /* Don't associate these operations if they are a PLUS with the
3525 same constant and it is a power of two. These might be doable
3526 with a pre- or post-increment. Similarly for two subtracts of
3527 identical powers of two with post decrement. */
3528
3529 if (code == PLUS && const_arg1 == inner_const
3530 && ((HAVE_PRE_INCREMENT
3531 && exact_log2 (INTVAL (const_arg1)) >= 0)
3532 || (HAVE_POST_INCREMENT
3533 && exact_log2 (INTVAL (const_arg1)) >= 0)
3534 || (HAVE_PRE_DECREMENT
3535 && exact_log2 (- INTVAL (const_arg1)) >= 0)
3536 || (HAVE_POST_DECREMENT
3537 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
3538 break;
3539
3540 /* ??? Vector mode shifts by scalar
3541 shift operand are not supported yet. */
3542 if (is_shift && VECTOR_MODE_P (mode))
3543 break;
3544
3545 if (is_shift
3546 && (INTVAL (inner_const) >= GET_MODE_BITSIZE (mode)
3547 || INTVAL (inner_const) < 0))
3548 {
3549 if (SHIFT_COUNT_TRUNCATED)
3550 inner_const = GEN_INT (INTVAL (inner_const)
3551 & (GET_MODE_BITSIZE (mode) - 1));
3552 else
3553 break;
3554 }
3555
3556 /* Compute the code used to compose the constants. For example,
3557 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
3558
3559 associate_code = (is_shift || code == MINUS ? PLUS : code);
3560
3561 new_const = simplify_binary_operation (associate_code, mode,
3562 const_arg1, inner_const);
3563
3564 if (new_const == 0)
3565 break;
3566
3567 /* If we are associating shift operations, don't let this
3568 produce a shift of the size of the object or larger.
3569 This could occur when we follow a sign-extend by a right
3570 shift on a machine that does a sign-extend as a pair
3571 of shifts. */
3572
3573 if (is_shift
3574 && GET_CODE (new_const) == CONST_INT
3575 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
3576 {
3577 /* As an exception, we can turn an ASHIFTRT of this
3578 form into a shift of the number of bits - 1. */
3579 if (code == ASHIFTRT)
3580 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
3581 else if (!side_effects_p (XEXP (y, 0)))
3582 return CONST0_RTX (mode);
3583 else
3584 break;
3585 }
3586
3587 y = copy_rtx (XEXP (y, 0));
3588
3589 /* If Y contains our first operand (the most common way this
3590 can happen is if Y is a MEM), we would do into an infinite
3591 loop if we tried to fold it. So don't in that case. */
3592
3593 if (! reg_mentioned_p (folded_arg0, y))
3594 y = fold_rtx (y, insn);
3595
3596 return simplify_gen_binary (code, mode, y, new_const);
3597 }
3598 break;
3599
3600 case DIV: case UDIV:
3601 /* ??? The associative optimization performed immediately above is
3602 also possible for DIV and UDIV using associate_code of MULT.
3603 However, we would need extra code to verify that the
3604 multiplication does not overflow, that is, there is no overflow
3605 in the calculation of new_const. */
3606 break;
3607
3608 default:
3609 break;
3610 }
3611
3612 new_rtx = simplify_binary_operation (code, mode,
3613 const_arg0 ? const_arg0 : folded_arg0,
3614 const_arg1 ? const_arg1 : folded_arg1);
3615 break;
3616
3617 case RTX_OBJ:
3618 /* (lo_sum (high X) X) is simply X. */
3619 if (code == LO_SUM && const_arg0 != 0
3620 && GET_CODE (const_arg0) == HIGH
3621 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
3622 return const_arg1;
3623 break;
3624
3625 case RTX_TERNARY:
3626 case RTX_BITFIELD_OPS:
3627 new_rtx = simplify_ternary_operation (code, mode, mode_arg0,
3628 const_arg0 ? const_arg0 : folded_arg0,
3629 const_arg1 ? const_arg1 : folded_arg1,
3630 const_arg2 ? const_arg2 : XEXP (x, 2));
3631 break;
3632
3633 default:
3634 break;
3635 }
3636
3637 return new_rtx ? new_rtx : x;
3638 }
3639 \f
3640 /* Return a constant value currently equivalent to X.
3641 Return 0 if we don't know one. */
3642
3643 static rtx
3644 equiv_constant (rtx x)
3645 {
3646 if (REG_P (x)
3647 && REGNO_QTY_VALID_P (REGNO (x)))
3648 {
3649 int x_q = REG_QTY (REGNO (x));
3650 struct qty_table_elem *x_ent = &qty_table[x_q];
3651
3652 if (x_ent->const_rtx)
3653 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
3654 }
3655
3656 if (x == 0 || CONSTANT_P (x))
3657 return x;
3658
3659 if (GET_CODE (x) == SUBREG)
3660 {
3661 rtx new_rtx;
3662
3663 /* See if we previously assigned a constant value to this SUBREG. */
3664 if ((new_rtx = lookup_as_function (x, CONST_INT)) != 0
3665 || (new_rtx = lookup_as_function (x, CONST_DOUBLE)) != 0
3666 || (new_rtx = lookup_as_function (x, CONST_FIXED)) != 0)
3667 return new_rtx;
3668
3669 if (REG_P (SUBREG_REG (x))
3670 && (new_rtx = equiv_constant (SUBREG_REG (x))) != 0)
3671 return simplify_subreg (GET_MODE (x), SUBREG_REG (x),
3672 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3673
3674 return 0;
3675 }
3676
3677 /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3678 the hash table in case its value was seen before. */
3679
3680 if (MEM_P (x))
3681 {
3682 struct table_elt *elt;
3683
3684 x = avoid_constant_pool_reference (x);
3685 if (CONSTANT_P (x))
3686 return x;
3687
3688 elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
3689 if (elt == 0)
3690 return 0;
3691
3692 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3693 if (elt->is_const && CONSTANT_P (elt->exp))
3694 return elt->exp;
3695 }
3696
3697 return 0;
3698 }
3699 \f
3700 /* Given INSN, a jump insn, TAKEN indicates if we are following the
3701 "taken" branch.
3702
3703 In certain cases, this can cause us to add an equivalence. For example,
3704 if we are following the taken case of
3705 if (i == 2)
3706 we can add the fact that `i' and '2' are now equivalent.
3707
3708 In any case, we can record that this comparison was passed. If the same
3709 comparison is seen later, we will know its value. */
3710
3711 static void
3712 record_jump_equiv (rtx insn, bool taken)
3713 {
3714 int cond_known_true;
3715 rtx op0, op1;
3716 rtx set;
3717 enum machine_mode mode, mode0, mode1;
3718 int reversed_nonequality = 0;
3719 enum rtx_code code;
3720
3721 /* Ensure this is the right kind of insn. */
3722 gcc_assert (any_condjump_p (insn));
3723
3724 set = pc_set (insn);
3725
3726 /* See if this jump condition is known true or false. */
3727 if (taken)
3728 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
3729 else
3730 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
3731
3732 /* Get the type of comparison being done and the operands being compared.
3733 If we had to reverse a non-equality condition, record that fact so we
3734 know that it isn't valid for floating-point. */
3735 code = GET_CODE (XEXP (SET_SRC (set), 0));
3736 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
3737 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
3738
3739 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
3740 if (! cond_known_true)
3741 {
3742 code = reversed_comparison_code_parts (code, op0, op1, insn);
3743
3744 /* Don't remember if we can't find the inverse. */
3745 if (code == UNKNOWN)
3746 return;
3747 }
3748
3749 /* The mode is the mode of the non-constant. */
3750 mode = mode0;
3751 if (mode1 != VOIDmode)
3752 mode = mode1;
3753
3754 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
3755 }
3756
3757 /* Yet another form of subreg creation. In this case, we want something in
3758 MODE, and we should assume OP has MODE iff it is naturally modeless. */
3759
3760 static rtx
3761 record_jump_cond_subreg (enum machine_mode mode, rtx op)
3762 {
3763 enum machine_mode op_mode = GET_MODE (op);
3764 if (op_mode == mode || op_mode == VOIDmode)
3765 return op;
3766 return lowpart_subreg (mode, op, op_mode);
3767 }
3768
3769 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3770 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
3771 Make any useful entries we can with that information. Called from
3772 above function and called recursively. */
3773
3774 static void
3775 record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
3776 rtx op1, int reversed_nonequality)
3777 {
3778 unsigned op0_hash, op1_hash;
3779 int op0_in_memory, op1_in_memory;
3780 struct table_elt *op0_elt, *op1_elt;
3781
3782 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3783 we know that they are also equal in the smaller mode (this is also
3784 true for all smaller modes whether or not there is a SUBREG, but
3785 is not worth testing for with no SUBREG). */
3786
3787 /* Note that GET_MODE (op0) may not equal MODE. */
3788 if (code == EQ && GET_CODE (op0) == SUBREG
3789 && (GET_MODE_SIZE (GET_MODE (op0))
3790 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
3791 {
3792 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3793 rtx tem = record_jump_cond_subreg (inner_mode, op1);
3794 if (tem)
3795 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3796 reversed_nonequality);
3797 }
3798
3799 if (code == EQ && GET_CODE (op1) == SUBREG
3800 && (GET_MODE_SIZE (GET_MODE (op1))
3801 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
3802 {
3803 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
3804 rtx tem = record_jump_cond_subreg (inner_mode, op0);
3805 if (tem)
3806 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3807 reversed_nonequality);
3808 }
3809
3810 /* Similarly, if this is an NE comparison, and either is a SUBREG
3811 making a smaller mode, we know the whole thing is also NE. */
3812
3813 /* Note that GET_MODE (op0) may not equal MODE;
3814 if we test MODE instead, we can get an infinite recursion
3815 alternating between two modes each wider than MODE. */
3816
3817 if (code == NE && GET_CODE (op0) == SUBREG
3818 && subreg_lowpart_p (op0)
3819 && (GET_MODE_SIZE (GET_MODE (op0))
3820 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
3821 {
3822 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3823 rtx tem = record_jump_cond_subreg (inner_mode, op1);
3824 if (tem)
3825 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3826 reversed_nonequality);
3827 }
3828
3829 if (code == NE && GET_CODE (op1) == SUBREG
3830 && subreg_lowpart_p (op1)
3831 && (GET_MODE_SIZE (GET_MODE (op1))
3832 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
3833 {
3834 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
3835 rtx tem = record_jump_cond_subreg (inner_mode, op0);
3836 if (tem)
3837 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3838 reversed_nonequality);
3839 }
3840
3841 /* Hash both operands. */
3842
3843 do_not_record = 0;
3844 hash_arg_in_memory = 0;
3845 op0_hash = HASH (op0, mode);
3846 op0_in_memory = hash_arg_in_memory;
3847
3848 if (do_not_record)
3849 return;
3850
3851 do_not_record = 0;
3852 hash_arg_in_memory = 0;
3853 op1_hash = HASH (op1, mode);
3854 op1_in_memory = hash_arg_in_memory;
3855
3856 if (do_not_record)
3857 return;
3858
3859 /* Look up both operands. */
3860 op0_elt = lookup (op0, op0_hash, mode);
3861 op1_elt = lookup (op1, op1_hash, mode);
3862
3863 /* If both operands are already equivalent or if they are not in the
3864 table but are identical, do nothing. */
3865 if ((op0_elt != 0 && op1_elt != 0
3866 && op0_elt->first_same_value == op1_elt->first_same_value)
3867 || op0 == op1 || rtx_equal_p (op0, op1))
3868 return;
3869
3870 /* If we aren't setting two things equal all we can do is save this
3871 comparison. Similarly if this is floating-point. In the latter
3872 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
3873 If we record the equality, we might inadvertently delete code
3874 whose intent was to change -0 to +0. */
3875
3876 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
3877 {
3878 struct qty_table_elem *ent;
3879 int qty;
3880
3881 /* If we reversed a floating-point comparison, if OP0 is not a
3882 register, or if OP1 is neither a register or constant, we can't
3883 do anything. */
3884
3885 if (!REG_P (op1))
3886 op1 = equiv_constant (op1);
3887
3888 if ((reversed_nonequality && FLOAT_MODE_P (mode))
3889 || !REG_P (op0) || op1 == 0)
3890 return;
3891
3892 /* Put OP0 in the hash table if it isn't already. This gives it a
3893 new quantity number. */
3894 if (op0_elt == 0)
3895 {
3896 if (insert_regs (op0, NULL, 0))
3897 {
3898 rehash_using_reg (op0);
3899 op0_hash = HASH (op0, mode);
3900
3901 /* If OP0 is contained in OP1, this changes its hash code
3902 as well. Faster to rehash than to check, except
3903 for the simple case of a constant. */
3904 if (! CONSTANT_P (op1))
3905 op1_hash = HASH (op1,mode);
3906 }
3907
3908 op0_elt = insert (op0, NULL, op0_hash, mode);
3909 op0_elt->in_memory = op0_in_memory;
3910 }
3911
3912 qty = REG_QTY (REGNO (op0));
3913 ent = &qty_table[qty];
3914
3915 ent->comparison_code = code;
3916 if (REG_P (op1))
3917 {
3918 /* Look it up again--in case op0 and op1 are the same. */
3919 op1_elt = lookup (op1, op1_hash, mode);
3920
3921 /* Put OP1 in the hash table so it gets a new quantity number. */
3922 if (op1_elt == 0)
3923 {
3924 if (insert_regs (op1, NULL, 0))
3925 {
3926 rehash_using_reg (op1);
3927 op1_hash = HASH (op1, mode);
3928 }
3929
3930 op1_elt = insert (op1, NULL, op1_hash, mode);
3931 op1_elt->in_memory = op1_in_memory;
3932 }
3933
3934 ent->comparison_const = NULL_RTX;
3935 ent->comparison_qty = REG_QTY (REGNO (op1));
3936 }
3937 else
3938 {
3939 ent->comparison_const = op1;
3940 ent->comparison_qty = -1;
3941 }
3942
3943 return;
3944 }
3945
3946 /* If either side is still missing an equivalence, make it now,
3947 then merge the equivalences. */
3948
3949 if (op0_elt == 0)
3950 {
3951 if (insert_regs (op0, NULL, 0))
3952 {
3953 rehash_using_reg (op0);
3954 op0_hash = HASH (op0, mode);
3955 }
3956
3957 op0_elt = insert (op0, NULL, op0_hash, mode);
3958 op0_elt->in_memory = op0_in_memory;
3959 }
3960
3961 if (op1_elt == 0)
3962 {
3963 if (insert_regs (op1, NULL, 0))
3964 {
3965 rehash_using_reg (op1);
3966 op1_hash = HASH (op1, mode);
3967 }
3968
3969 op1_elt = insert (op1, NULL, op1_hash, mode);
3970 op1_elt->in_memory = op1_in_memory;
3971 }
3972
3973 merge_equiv_classes (op0_elt, op1_elt);
3974 }
3975 \f
3976 /* CSE processing for one instruction.
3977 First simplify sources and addresses of all assignments
3978 in the instruction, using previously-computed equivalents values.
3979 Then install the new sources and destinations in the table
3980 of available values. */
3981
3982 /* Data on one SET contained in the instruction. */
3983
3984 struct set
3985 {
3986 /* The SET rtx itself. */
3987 rtx rtl;
3988 /* The SET_SRC of the rtx (the original value, if it is changing). */
3989 rtx src;
3990 /* The hash-table element for the SET_SRC of the SET. */
3991 struct table_elt *src_elt;
3992 /* Hash value for the SET_SRC. */
3993 unsigned src_hash;
3994 /* Hash value for the SET_DEST. */
3995 unsigned dest_hash;
3996 /* The SET_DEST, with SUBREG, etc., stripped. */
3997 rtx inner_dest;
3998 /* Nonzero if the SET_SRC is in memory. */
3999 char src_in_memory;
4000 /* Nonzero if the SET_SRC contains something
4001 whose value cannot be predicted and understood. */
4002 char src_volatile;
4003 /* Original machine mode, in case it becomes a CONST_INT.
4004 The size of this field should match the size of the mode
4005 field of struct rtx_def (see rtl.h). */
4006 ENUM_BITFIELD(machine_mode) mode : 8;
4007 /* A constant equivalent for SET_SRC, if any. */
4008 rtx src_const;
4009 /* Hash value of constant equivalent for SET_SRC. */
4010 unsigned src_const_hash;
4011 /* Table entry for constant equivalent for SET_SRC, if any. */
4012 struct table_elt *src_const_elt;
4013 /* Table entry for the destination address. */
4014 struct table_elt *dest_addr_elt;
4015 };
4016
4017 static void
4018 cse_insn (rtx insn)
4019 {
4020 rtx x = PATTERN (insn);
4021 int i;
4022 rtx tem;
4023 int n_sets = 0;
4024
4025 rtx src_eqv = 0;
4026 struct table_elt *src_eqv_elt = 0;
4027 int src_eqv_volatile = 0;
4028 int src_eqv_in_memory = 0;
4029 unsigned src_eqv_hash = 0;
4030
4031 struct set *sets = (struct set *) 0;
4032
4033 this_insn = insn;
4034 #ifdef HAVE_cc0
4035 /* Records what this insn does to set CC0. */
4036 this_insn_cc0 = 0;
4037 this_insn_cc0_mode = VOIDmode;
4038 #endif
4039
4040 /* Find all the SETs and CLOBBERs in this instruction.
4041 Record all the SETs in the array `set' and count them.
4042 Also determine whether there is a CLOBBER that invalidates
4043 all memory references, or all references at varying addresses. */
4044
4045 if (CALL_P (insn))
4046 {
4047 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4048 {
4049 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4050 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4051 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4052 }
4053 }
4054
4055 if (GET_CODE (x) == SET)
4056 {
4057 sets = XALLOCA (struct set);
4058 sets[0].rtl = x;
4059
4060 /* Ignore SETs that are unconditional jumps.
4061 They never need cse processing, so this does not hurt.
4062 The reason is not efficiency but rather
4063 so that we can test at the end for instructions
4064 that have been simplified to unconditional jumps
4065 and not be misled by unchanged instructions
4066 that were unconditional jumps to begin with. */
4067 if (SET_DEST (x) == pc_rtx
4068 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4069 ;
4070
4071 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4072 The hard function value register is used only once, to copy to
4073 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4074 Ensure we invalidate the destination register. On the 80386 no
4075 other code would invalidate it since it is a fixed_reg.
4076 We need not check the return of apply_change_group; see canon_reg. */
4077
4078 else if (GET_CODE (SET_SRC (x)) == CALL)
4079 {
4080 canon_reg (SET_SRC (x), insn);
4081 apply_change_group ();
4082 fold_rtx (SET_SRC (x), insn);
4083 invalidate (SET_DEST (x), VOIDmode);
4084 }
4085 else
4086 n_sets = 1;
4087 }
4088 else if (GET_CODE (x) == PARALLEL)
4089 {
4090 int lim = XVECLEN (x, 0);
4091
4092 sets = XALLOCAVEC (struct set, lim);
4093
4094 /* Find all regs explicitly clobbered in this insn,
4095 and ensure they are not replaced with any other regs
4096 elsewhere in this insn.
4097 When a reg that is clobbered is also used for input,
4098 we should presume that that is for a reason,
4099 and we should not substitute some other register
4100 which is not supposed to be clobbered.
4101 Therefore, this loop cannot be merged into the one below
4102 because a CALL may precede a CLOBBER and refer to the
4103 value clobbered. We must not let a canonicalization do
4104 anything in that case. */
4105 for (i = 0; i < lim; i++)
4106 {
4107 rtx y = XVECEXP (x, 0, i);
4108 if (GET_CODE (y) == CLOBBER)
4109 {
4110 rtx clobbered = XEXP (y, 0);
4111
4112 if (REG_P (clobbered)
4113 || GET_CODE (clobbered) == SUBREG)
4114 invalidate (clobbered, VOIDmode);
4115 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4116 || GET_CODE (clobbered) == ZERO_EXTRACT)
4117 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4118 }
4119 }
4120
4121 for (i = 0; i < lim; i++)
4122 {
4123 rtx y = XVECEXP (x, 0, i);
4124 if (GET_CODE (y) == SET)
4125 {
4126 /* As above, we ignore unconditional jumps and call-insns and
4127 ignore the result of apply_change_group. */
4128 if (GET_CODE (SET_SRC (y)) == CALL)
4129 {
4130 canon_reg (SET_SRC (y), insn);
4131 apply_change_group ();
4132 fold_rtx (SET_SRC (y), insn);
4133 invalidate (SET_DEST (y), VOIDmode);
4134 }
4135 else if (SET_DEST (y) == pc_rtx
4136 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4137 ;
4138 else
4139 sets[n_sets++].rtl = y;
4140 }
4141 else if (GET_CODE (y) == CLOBBER)
4142 {
4143 /* If we clobber memory, canon the address.
4144 This does nothing when a register is clobbered
4145 because we have already invalidated the reg. */
4146 if (MEM_P (XEXP (y, 0)))
4147 canon_reg (XEXP (y, 0), insn);
4148 }
4149 else if (GET_CODE (y) == USE
4150 && ! (REG_P (XEXP (y, 0))
4151 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4152 canon_reg (y, insn);
4153 else if (GET_CODE (y) == CALL)
4154 {
4155 /* The result of apply_change_group can be ignored; see
4156 canon_reg. */
4157 canon_reg (y, insn);
4158 apply_change_group ();
4159 fold_rtx (y, insn);
4160 }
4161 }
4162 }
4163 else if (GET_CODE (x) == CLOBBER)
4164 {
4165 if (MEM_P (XEXP (x, 0)))
4166 canon_reg (XEXP (x, 0), insn);
4167 }
4168
4169 /* Canonicalize a USE of a pseudo register or memory location. */
4170 else if (GET_CODE (x) == USE
4171 && ! (REG_P (XEXP (x, 0))
4172 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4173 canon_reg (XEXP (x, 0), insn);
4174 else if (GET_CODE (x) == CALL)
4175 {
4176 /* The result of apply_change_group can be ignored; see canon_reg. */
4177 canon_reg (x, insn);
4178 apply_change_group ();
4179 fold_rtx (x, insn);
4180 }
4181
4182 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4183 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4184 is handled specially for this case, and if it isn't set, then there will
4185 be no equivalence for the destination. */
4186 if (n_sets == 1 && REG_NOTES (insn) != 0
4187 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4188 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4189 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4190 {
4191 /* The result of apply_change_group can be ignored; see canon_reg. */
4192 canon_reg (XEXP (tem, 0), insn);
4193 apply_change_group ();
4194 src_eqv = fold_rtx (XEXP (tem, 0), insn);
4195 XEXP (tem, 0) = copy_rtx (src_eqv);
4196 df_notes_rescan (insn);
4197 }
4198
4199 /* Canonicalize sources and addresses of destinations.
4200 We do this in a separate pass to avoid problems when a MATCH_DUP is
4201 present in the insn pattern. In that case, we want to ensure that
4202 we don't break the duplicate nature of the pattern. So we will replace
4203 both operands at the same time. Otherwise, we would fail to find an
4204 equivalent substitution in the loop calling validate_change below.
4205
4206 We used to suppress canonicalization of DEST if it appears in SRC,
4207 but we don't do this any more. */
4208
4209 for (i = 0; i < n_sets; i++)
4210 {
4211 rtx dest = SET_DEST (sets[i].rtl);
4212 rtx src = SET_SRC (sets[i].rtl);
4213 rtx new_rtx = canon_reg (src, insn);
4214
4215 validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
4216
4217 if (GET_CODE (dest) == ZERO_EXTRACT)
4218 {
4219 validate_change (insn, &XEXP (dest, 1),
4220 canon_reg (XEXP (dest, 1), insn), 1);
4221 validate_change (insn, &XEXP (dest, 2),
4222 canon_reg (XEXP (dest, 2), insn), 1);
4223 }
4224
4225 while (GET_CODE (dest) == SUBREG
4226 || GET_CODE (dest) == ZERO_EXTRACT
4227 || GET_CODE (dest) == STRICT_LOW_PART)
4228 dest = XEXP (dest, 0);
4229
4230 if (MEM_P (dest))
4231 canon_reg (dest, insn);
4232 }
4233
4234 /* Now that we have done all the replacements, we can apply the change
4235 group and see if they all work. Note that this will cause some
4236 canonicalizations that would have worked individually not to be applied
4237 because some other canonicalization didn't work, but this should not
4238 occur often.
4239
4240 The result of apply_change_group can be ignored; see canon_reg. */
4241
4242 apply_change_group ();
4243
4244 /* Set sets[i].src_elt to the class each source belongs to.
4245 Detect assignments from or to volatile things
4246 and set set[i] to zero so they will be ignored
4247 in the rest of this function.
4248
4249 Nothing in this loop changes the hash table or the register chains. */
4250
4251 for (i = 0; i < n_sets; i++)
4252 {
4253 rtx src, dest;
4254 rtx src_folded;
4255 struct table_elt *elt = 0, *p;
4256 enum machine_mode mode;
4257 rtx src_eqv_here;
4258 rtx src_const = 0;
4259 rtx src_related = 0;
4260 struct table_elt *src_const_elt = 0;
4261 int src_cost = MAX_COST;
4262 int src_eqv_cost = MAX_COST;
4263 int src_folded_cost = MAX_COST;
4264 int src_related_cost = MAX_COST;
4265 int src_elt_cost = MAX_COST;
4266 int src_regcost = MAX_COST;
4267 int src_eqv_regcost = MAX_COST;
4268 int src_folded_regcost = MAX_COST;
4269 int src_related_regcost = MAX_COST;
4270 int src_elt_regcost = MAX_COST;
4271 /* Set nonzero if we need to call force_const_mem on with the
4272 contents of src_folded before using it. */
4273 int src_folded_force_flag = 0;
4274
4275 dest = SET_DEST (sets[i].rtl);
4276 src = SET_SRC (sets[i].rtl);
4277
4278 /* If SRC is a constant that has no machine mode,
4279 hash it with the destination's machine mode.
4280 This way we can keep different modes separate. */
4281
4282 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4283 sets[i].mode = mode;
4284
4285 if (src_eqv)
4286 {
4287 enum machine_mode eqvmode = mode;
4288 if (GET_CODE (dest) == STRICT_LOW_PART)
4289 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4290 do_not_record = 0;
4291 hash_arg_in_memory = 0;
4292 src_eqv_hash = HASH (src_eqv, eqvmode);
4293
4294 /* Find the equivalence class for the equivalent expression. */
4295
4296 if (!do_not_record)
4297 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4298
4299 src_eqv_volatile = do_not_record;
4300 src_eqv_in_memory = hash_arg_in_memory;
4301 }
4302
4303 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4304 value of the INNER register, not the destination. So it is not
4305 a valid substitution for the source. But save it for later. */
4306 if (GET_CODE (dest) == STRICT_LOW_PART)
4307 src_eqv_here = 0;
4308 else
4309 src_eqv_here = src_eqv;
4310
4311 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4312 simplified result, which may not necessarily be valid. */
4313 src_folded = fold_rtx (src, insn);
4314
4315 #if 0
4316 /* ??? This caused bad code to be generated for the m68k port with -O2.
4317 Suppose src is (CONST_INT -1), and that after truncation src_folded
4318 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4319 At the end we will add src and src_const to the same equivalence
4320 class. We now have 3 and -1 on the same equivalence class. This
4321 causes later instructions to be mis-optimized. */
4322 /* If storing a constant in a bitfield, pre-truncate the constant
4323 so we will be able to record it later. */
4324 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
4325 {
4326 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4327
4328 if (GET_CODE (src) == CONST_INT
4329 && GET_CODE (width) == CONST_INT
4330 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4331 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4332 src_folded
4333 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4334 << INTVAL (width)) - 1));
4335 }
4336 #endif
4337
4338 /* Compute SRC's hash code, and also notice if it
4339 should not be recorded at all. In that case,
4340 prevent any further processing of this assignment. */
4341 do_not_record = 0;
4342 hash_arg_in_memory = 0;
4343
4344 sets[i].src = src;
4345 sets[i].src_hash = HASH (src, mode);
4346 sets[i].src_volatile = do_not_record;
4347 sets[i].src_in_memory = hash_arg_in_memory;
4348
4349 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4350 a pseudo, do not record SRC. Using SRC as a replacement for
4351 anything else will be incorrect in that situation. Note that
4352 this usually occurs only for stack slots, in which case all the
4353 RTL would be referring to SRC, so we don't lose any optimization
4354 opportunities by not having SRC in the hash table. */
4355
4356 if (MEM_P (src)
4357 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
4358 && REG_P (dest)
4359 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4360 sets[i].src_volatile = 1;
4361
4362 #if 0
4363 /* It is no longer clear why we used to do this, but it doesn't
4364 appear to still be needed. So let's try without it since this
4365 code hurts cse'ing widened ops. */
4366 /* If source is a paradoxical subreg (such as QI treated as an SI),
4367 treat it as volatile. It may do the work of an SI in one context
4368 where the extra bits are not being used, but cannot replace an SI
4369 in general. */
4370 if (GET_CODE (src) == SUBREG
4371 && (GET_MODE_SIZE (GET_MODE (src))
4372 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4373 sets[i].src_volatile = 1;
4374 #endif
4375
4376 /* Locate all possible equivalent forms for SRC. Try to replace
4377 SRC in the insn with each cheaper equivalent.
4378
4379 We have the following types of equivalents: SRC itself, a folded
4380 version, a value given in a REG_EQUAL note, or a value related
4381 to a constant.
4382
4383 Each of these equivalents may be part of an additional class
4384 of equivalents (if more than one is in the table, they must be in
4385 the same class; we check for this).
4386
4387 If the source is volatile, we don't do any table lookups.
4388
4389 We note any constant equivalent for possible later use in a
4390 REG_NOTE. */
4391
4392 if (!sets[i].src_volatile)
4393 elt = lookup (src, sets[i].src_hash, mode);
4394
4395 sets[i].src_elt = elt;
4396
4397 if (elt && src_eqv_here && src_eqv_elt)
4398 {
4399 if (elt->first_same_value != src_eqv_elt->first_same_value)
4400 {
4401 /* The REG_EQUAL is indicating that two formerly distinct
4402 classes are now equivalent. So merge them. */
4403 merge_equiv_classes (elt, src_eqv_elt);
4404 src_eqv_hash = HASH (src_eqv, elt->mode);
4405 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
4406 }
4407
4408 src_eqv_here = 0;
4409 }
4410
4411 else if (src_eqv_elt)
4412 elt = src_eqv_elt;
4413
4414 /* Try to find a constant somewhere and record it in `src_const'.
4415 Record its table element, if any, in `src_const_elt'. Look in
4416 any known equivalences first. (If the constant is not in the
4417 table, also set `sets[i].src_const_hash'). */
4418 if (elt)
4419 for (p = elt->first_same_value; p; p = p->next_same_value)
4420 if (p->is_const)
4421 {
4422 src_const = p->exp;
4423 src_const_elt = elt;
4424 break;
4425 }
4426
4427 if (src_const == 0
4428 && (CONSTANT_P (src_folded)
4429 /* Consider (minus (label_ref L1) (label_ref L2)) as
4430 "constant" here so we will record it. This allows us
4431 to fold switch statements when an ADDR_DIFF_VEC is used. */
4432 || (GET_CODE (src_folded) == MINUS
4433 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4434 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4435 src_const = src_folded, src_const_elt = elt;
4436 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4437 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4438
4439 /* If we don't know if the constant is in the table, get its
4440 hash code and look it up. */
4441 if (src_const && src_const_elt == 0)
4442 {
4443 sets[i].src_const_hash = HASH (src_const, mode);
4444 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
4445 }
4446
4447 sets[i].src_const = src_const;
4448 sets[i].src_const_elt = src_const_elt;
4449
4450 /* If the constant and our source are both in the table, mark them as
4451 equivalent. Otherwise, if a constant is in the table but the source
4452 isn't, set ELT to it. */
4453 if (src_const_elt && elt
4454 && src_const_elt->first_same_value != elt->first_same_value)
4455 merge_equiv_classes (elt, src_const_elt);
4456 else if (src_const_elt && elt == 0)
4457 elt = src_const_elt;
4458
4459 /* See if there is a register linearly related to a constant
4460 equivalent of SRC. */
4461 if (src_const
4462 && (GET_CODE (src_const) == CONST
4463 || (src_const_elt && src_const_elt->related_value != 0)))
4464 {
4465 src_related = use_related_value (src_const, src_const_elt);
4466 if (src_related)
4467 {
4468 struct table_elt *src_related_elt
4469 = lookup (src_related, HASH (src_related, mode), mode);
4470 if (src_related_elt && elt)
4471 {
4472 if (elt->first_same_value
4473 != src_related_elt->first_same_value)
4474 /* This can occur when we previously saw a CONST
4475 involving a SYMBOL_REF and then see the SYMBOL_REF
4476 twice. Merge the involved classes. */
4477 merge_equiv_classes (elt, src_related_elt);
4478
4479 src_related = 0;
4480 src_related_elt = 0;
4481 }
4482 else if (src_related_elt && elt == 0)
4483 elt = src_related_elt;
4484 }
4485 }
4486
4487 /* See if we have a CONST_INT that is already in a register in a
4488 wider mode. */
4489
4490 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
4491 && GET_MODE_CLASS (mode) == MODE_INT
4492 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
4493 {
4494 enum machine_mode wider_mode;
4495
4496 for (wider_mode = GET_MODE_WIDER_MODE (mode);
4497 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
4498 && src_related == 0;
4499 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4500 {
4501 struct table_elt *const_elt
4502 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
4503
4504 if (const_elt == 0)
4505 continue;
4506
4507 for (const_elt = const_elt->first_same_value;
4508 const_elt; const_elt = const_elt->next_same_value)
4509 if (REG_P (const_elt->exp))
4510 {
4511 src_related = gen_lowpart (mode, const_elt->exp);
4512 break;
4513 }
4514 }
4515 }
4516
4517 /* Another possibility is that we have an AND with a constant in
4518 a mode narrower than a word. If so, it might have been generated
4519 as part of an "if" which would narrow the AND. If we already
4520 have done the AND in a wider mode, we can use a SUBREG of that
4521 value. */
4522
4523 if (flag_expensive_optimizations && ! src_related
4524 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
4525 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4526 {
4527 enum machine_mode tmode;
4528 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
4529
4530 for (tmode = GET_MODE_WIDER_MODE (mode);
4531 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4532 tmode = GET_MODE_WIDER_MODE (tmode))
4533 {
4534 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
4535 struct table_elt *larger_elt;
4536
4537 if (inner)
4538 {
4539 PUT_MODE (new_and, tmode);
4540 XEXP (new_and, 0) = inner;
4541 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
4542 if (larger_elt == 0)
4543 continue;
4544
4545 for (larger_elt = larger_elt->first_same_value;
4546 larger_elt; larger_elt = larger_elt->next_same_value)
4547 if (REG_P (larger_elt->exp))
4548 {
4549 src_related
4550 = gen_lowpart (mode, larger_elt->exp);
4551 break;
4552 }
4553
4554 if (src_related)
4555 break;
4556 }
4557 }
4558 }
4559
4560 #ifdef LOAD_EXTEND_OP
4561 /* See if a MEM has already been loaded with a widening operation;
4562 if it has, we can use a subreg of that. Many CISC machines
4563 also have such operations, but this is only likely to be
4564 beneficial on these machines. */
4565
4566 if (flag_expensive_optimizations && src_related == 0
4567 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4568 && GET_MODE_CLASS (mode) == MODE_INT
4569 && MEM_P (src) && ! do_not_record
4570 && LOAD_EXTEND_OP (mode) != UNKNOWN)
4571 {
4572 struct rtx_def memory_extend_buf;
4573 rtx memory_extend_rtx = &memory_extend_buf;
4574 enum machine_mode tmode;
4575
4576 /* Set what we are trying to extend and the operation it might
4577 have been extended with. */
4578 memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
4579 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
4580 XEXP (memory_extend_rtx, 0) = src;
4581
4582 for (tmode = GET_MODE_WIDER_MODE (mode);
4583 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4584 tmode = GET_MODE_WIDER_MODE (tmode))
4585 {
4586 struct table_elt *larger_elt;
4587
4588 PUT_MODE (memory_extend_rtx, tmode);
4589 larger_elt = lookup (memory_extend_rtx,
4590 HASH (memory_extend_rtx, tmode), tmode);
4591 if (larger_elt == 0)
4592 continue;
4593
4594 for (larger_elt = larger_elt->first_same_value;
4595 larger_elt; larger_elt = larger_elt->next_same_value)
4596 if (REG_P (larger_elt->exp))
4597 {
4598 src_related = gen_lowpart (mode, larger_elt->exp);
4599 break;
4600 }
4601
4602 if (src_related)
4603 break;
4604 }
4605 }
4606 #endif /* LOAD_EXTEND_OP */
4607
4608 if (src == src_folded)
4609 src_folded = 0;
4610
4611 /* At this point, ELT, if nonzero, points to a class of expressions
4612 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
4613 and SRC_RELATED, if nonzero, each contain additional equivalent
4614 expressions. Prune these latter expressions by deleting expressions
4615 already in the equivalence class.
4616
4617 Check for an equivalent identical to the destination. If found,
4618 this is the preferred equivalent since it will likely lead to
4619 elimination of the insn. Indicate this by placing it in
4620 `src_related'. */
4621
4622 if (elt)
4623 elt = elt->first_same_value;
4624 for (p = elt; p; p = p->next_same_value)
4625 {
4626 enum rtx_code code = GET_CODE (p->exp);
4627
4628 /* If the expression is not valid, ignore it. Then we do not
4629 have to check for validity below. In most cases, we can use
4630 `rtx_equal_p', since canonicalization has already been done. */
4631 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
4632 continue;
4633
4634 /* Also skip paradoxical subregs, unless that's what we're
4635 looking for. */
4636 if (code == SUBREG
4637 && (GET_MODE_SIZE (GET_MODE (p->exp))
4638 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
4639 && ! (src != 0
4640 && GET_CODE (src) == SUBREG
4641 && GET_MODE (src) == GET_MODE (p->exp)
4642 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4643 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
4644 continue;
4645
4646 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
4647 src = 0;
4648 else if (src_folded && GET_CODE (src_folded) == code
4649 && rtx_equal_p (src_folded, p->exp))
4650 src_folded = 0;
4651 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
4652 && rtx_equal_p (src_eqv_here, p->exp))
4653 src_eqv_here = 0;
4654 else if (src_related && GET_CODE (src_related) == code
4655 && rtx_equal_p (src_related, p->exp))
4656 src_related = 0;
4657
4658 /* This is the same as the destination of the insns, we want
4659 to prefer it. Copy it to src_related. The code below will
4660 then give it a negative cost. */
4661 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
4662 src_related = dest;
4663 }
4664
4665 /* Find the cheapest valid equivalent, trying all the available
4666 possibilities. Prefer items not in the hash table to ones
4667 that are when they are equal cost. Note that we can never
4668 worsen an insn as the current contents will also succeed.
4669 If we find an equivalent identical to the destination, use it as best,
4670 since this insn will probably be eliminated in that case. */
4671 if (src)
4672 {
4673 if (rtx_equal_p (src, dest))
4674 src_cost = src_regcost = -1;
4675 else
4676 {
4677 src_cost = COST (src);
4678 src_regcost = approx_reg_cost (src);
4679 }
4680 }
4681
4682 if (src_eqv_here)
4683 {
4684 if (rtx_equal_p (src_eqv_here, dest))
4685 src_eqv_cost = src_eqv_regcost = -1;
4686 else
4687 {
4688 src_eqv_cost = COST (src_eqv_here);
4689 src_eqv_regcost = approx_reg_cost (src_eqv_here);
4690 }
4691 }
4692
4693 if (src_folded)
4694 {
4695 if (rtx_equal_p (src_folded, dest))
4696 src_folded_cost = src_folded_regcost = -1;
4697 else
4698 {
4699 src_folded_cost = COST (src_folded);
4700 src_folded_regcost = approx_reg_cost (src_folded);
4701 }
4702 }
4703
4704 if (src_related)
4705 {
4706 if (rtx_equal_p (src_related, dest))
4707 src_related_cost = src_related_regcost = -1;
4708 else
4709 {
4710 src_related_cost = COST (src_related);
4711 src_related_regcost = approx_reg_cost (src_related);
4712 }
4713 }
4714
4715 /* If this was an indirect jump insn, a known label will really be
4716 cheaper even though it looks more expensive. */
4717 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
4718 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
4719
4720 /* Terminate loop when replacement made. This must terminate since
4721 the current contents will be tested and will always be valid. */
4722 while (1)
4723 {
4724 rtx trial;
4725
4726 /* Skip invalid entries. */
4727 while (elt && !REG_P (elt->exp)
4728 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
4729 elt = elt->next_same_value;
4730
4731 /* A paradoxical subreg would be bad here: it'll be the right
4732 size, but later may be adjusted so that the upper bits aren't
4733 what we want. So reject it. */
4734 if (elt != 0
4735 && GET_CODE (elt->exp) == SUBREG
4736 && (GET_MODE_SIZE (GET_MODE (elt->exp))
4737 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
4738 /* It is okay, though, if the rtx we're trying to match
4739 will ignore any of the bits we can't predict. */
4740 && ! (src != 0
4741 && GET_CODE (src) == SUBREG
4742 && GET_MODE (src) == GET_MODE (elt->exp)
4743 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4744 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
4745 {
4746 elt = elt->next_same_value;
4747 continue;
4748 }
4749
4750 if (elt)
4751 {
4752 src_elt_cost = elt->cost;
4753 src_elt_regcost = elt->regcost;
4754 }
4755
4756 /* Find cheapest and skip it for the next time. For items
4757 of equal cost, use this order:
4758 src_folded, src, src_eqv, src_related and hash table entry. */
4759 if (src_folded
4760 && preferable (src_folded_cost, src_folded_regcost,
4761 src_cost, src_regcost) <= 0
4762 && preferable (src_folded_cost, src_folded_regcost,
4763 src_eqv_cost, src_eqv_regcost) <= 0
4764 && preferable (src_folded_cost, src_folded_regcost,
4765 src_related_cost, src_related_regcost) <= 0
4766 && preferable (src_folded_cost, src_folded_regcost,
4767 src_elt_cost, src_elt_regcost) <= 0)
4768 {
4769 trial = src_folded, src_folded_cost = MAX_COST;
4770 if (src_folded_force_flag)
4771 {
4772 rtx forced = force_const_mem (mode, trial);
4773 if (forced)
4774 trial = forced;
4775 }
4776 }
4777 else if (src
4778 && preferable (src_cost, src_regcost,
4779 src_eqv_cost, src_eqv_regcost) <= 0
4780 && preferable (src_cost, src_regcost,
4781 src_related_cost, src_related_regcost) <= 0
4782 && preferable (src_cost, src_regcost,
4783 src_elt_cost, src_elt_regcost) <= 0)
4784 trial = src, src_cost = MAX_COST;
4785 else if (src_eqv_here
4786 && preferable (src_eqv_cost, src_eqv_regcost,
4787 src_related_cost, src_related_regcost) <= 0
4788 && preferable (src_eqv_cost, src_eqv_regcost,
4789 src_elt_cost, src_elt_regcost) <= 0)
4790 trial = src_eqv_here, src_eqv_cost = MAX_COST;
4791 else if (src_related
4792 && preferable (src_related_cost, src_related_regcost,
4793 src_elt_cost, src_elt_regcost) <= 0)
4794 trial = src_related, src_related_cost = MAX_COST;
4795 else
4796 {
4797 trial = elt->exp;
4798 elt = elt->next_same_value;
4799 src_elt_cost = MAX_COST;
4800 }
4801
4802 /* Avoid creation of overlapping memory moves. */
4803 if (MEM_P (trial) && MEM_P (SET_DEST (sets[i].rtl)))
4804 {
4805 rtx src, dest;
4806
4807 /* BLKmode moves are not handled by cse anyway. */
4808 if (GET_MODE (trial) == BLKmode)
4809 break;
4810
4811 src = canon_rtx (trial);
4812 dest = canon_rtx (SET_DEST (sets[i].rtl));
4813
4814 if (!MEM_P (src) || !MEM_P (dest)
4815 || !nonoverlapping_memrefs_p (src, dest))
4816 break;
4817 }
4818
4819 /* We don't normally have an insn matching (set (pc) (pc)), so
4820 check for this separately here. We will delete such an
4821 insn below.
4822
4823 For other cases such as a table jump or conditional jump
4824 where we know the ultimate target, go ahead and replace the
4825 operand. While that may not make a valid insn, we will
4826 reemit the jump below (and also insert any necessary
4827 barriers). */
4828 if (n_sets == 1 && dest == pc_rtx
4829 && (trial == pc_rtx
4830 || (GET_CODE (trial) == LABEL_REF
4831 && ! condjump_p (insn))))
4832 {
4833 /* Don't substitute non-local labels, this confuses CFG. */
4834 if (GET_CODE (trial) == LABEL_REF
4835 && LABEL_REF_NONLOCAL_P (trial))
4836 continue;
4837
4838 SET_SRC (sets[i].rtl) = trial;
4839 cse_jumps_altered = true;
4840 break;
4841 }
4842
4843 /* Reject certain invalid forms of CONST that we create. */
4844 else if (CONSTANT_P (trial)
4845 && GET_CODE (trial) == CONST
4846 /* Reject cases that will cause decode_rtx_const to
4847 die. On the alpha when simplifying a switch, we
4848 get (const (truncate (minus (label_ref)
4849 (label_ref)))). */
4850 && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
4851 /* Likewise on IA-64, except without the
4852 truncate. */
4853 || (GET_CODE (XEXP (trial, 0)) == MINUS
4854 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
4855 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
4856 /* Do nothing for this case. */
4857 ;
4858
4859 /* Look for a substitution that makes a valid insn. */
4860 else if (validate_unshare_change
4861 (insn, &SET_SRC (sets[i].rtl), trial, 0))
4862 {
4863 rtx new_rtx = canon_reg (SET_SRC (sets[i].rtl), insn);
4864
4865 /* The result of apply_change_group can be ignored; see
4866 canon_reg. */
4867
4868 validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
4869 apply_change_group ();
4870
4871 break;
4872 }
4873
4874 /* If we previously found constant pool entries for
4875 constants and this is a constant, try making a
4876 pool entry. Put it in src_folded unless we already have done
4877 this since that is where it likely came from. */
4878
4879 else if (constant_pool_entries_cost
4880 && CONSTANT_P (trial)
4881 && (src_folded == 0
4882 || (!MEM_P (src_folded)
4883 && ! src_folded_force_flag))
4884 && GET_MODE_CLASS (mode) != MODE_CC
4885 && mode != VOIDmode)
4886 {
4887 src_folded_force_flag = 1;
4888 src_folded = trial;
4889 src_folded_cost = constant_pool_entries_cost;
4890 src_folded_regcost = constant_pool_entries_regcost;
4891 }
4892 }
4893
4894 src = SET_SRC (sets[i].rtl);
4895
4896 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
4897 However, there is an important exception: If both are registers
4898 that are not the head of their equivalence class, replace SET_SRC
4899 with the head of the class. If we do not do this, we will have
4900 both registers live over a portion of the basic block. This way,
4901 their lifetimes will likely abut instead of overlapping. */
4902 if (REG_P (dest)
4903 && REGNO_QTY_VALID_P (REGNO (dest)))
4904 {
4905 int dest_q = REG_QTY (REGNO (dest));
4906 struct qty_table_elem *dest_ent = &qty_table[dest_q];
4907
4908 if (dest_ent->mode == GET_MODE (dest)
4909 && dest_ent->first_reg != REGNO (dest)
4910 && REG_P (src) && REGNO (src) == REGNO (dest)
4911 /* Don't do this if the original insn had a hard reg as
4912 SET_SRC or SET_DEST. */
4913 && (!REG_P (sets[i].src)
4914 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
4915 && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
4916 /* We can't call canon_reg here because it won't do anything if
4917 SRC is a hard register. */
4918 {
4919 int src_q = REG_QTY (REGNO (src));
4920 struct qty_table_elem *src_ent = &qty_table[src_q];
4921 int first = src_ent->first_reg;
4922 rtx new_src
4923 = (first >= FIRST_PSEUDO_REGISTER
4924 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
4925
4926 /* We must use validate-change even for this, because this
4927 might be a special no-op instruction, suitable only to
4928 tag notes onto. */
4929 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
4930 {
4931 src = new_src;
4932 /* If we had a constant that is cheaper than what we are now
4933 setting SRC to, use that constant. We ignored it when we
4934 thought we could make this into a no-op. */
4935 if (src_const && COST (src_const) < COST (src)
4936 && validate_change (insn, &SET_SRC (sets[i].rtl),
4937 src_const, 0))
4938 src = src_const;
4939 }
4940 }
4941 }
4942
4943 /* If we made a change, recompute SRC values. */
4944 if (src != sets[i].src)
4945 {
4946 do_not_record = 0;
4947 hash_arg_in_memory = 0;
4948 sets[i].src = src;
4949 sets[i].src_hash = HASH (src, mode);
4950 sets[i].src_volatile = do_not_record;
4951 sets[i].src_in_memory = hash_arg_in_memory;
4952 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
4953 }
4954
4955 /* If this is a single SET, we are setting a register, and we have an
4956 equivalent constant, we want to add a REG_NOTE. We don't want
4957 to write a REG_EQUAL note for a constant pseudo since verifying that
4958 that pseudo hasn't been eliminated is a pain. Such a note also
4959 won't help anything.
4960
4961 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
4962 which can be created for a reference to a compile time computable
4963 entry in a jump table. */
4964
4965 if (n_sets == 1 && src_const && REG_P (dest)
4966 && !REG_P (src_const)
4967 && ! (GET_CODE (src_const) == CONST
4968 && GET_CODE (XEXP (src_const, 0)) == MINUS
4969 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
4970 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
4971 {
4972 /* We only want a REG_EQUAL note if src_const != src. */
4973 if (! rtx_equal_p (src, src_const))
4974 {
4975 /* Make sure that the rtx is not shared. */
4976 src_const = copy_rtx (src_const);
4977
4978 /* Record the actual constant value in a REG_EQUAL note,
4979 making a new one if one does not already exist. */
4980 set_unique_reg_note (insn, REG_EQUAL, src_const);
4981 df_notes_rescan (insn);
4982 }
4983 }
4984
4985 /* Now deal with the destination. */
4986 do_not_record = 0;
4987
4988 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
4989 while (GET_CODE (dest) == SUBREG
4990 || GET_CODE (dest) == ZERO_EXTRACT
4991 || GET_CODE (dest) == STRICT_LOW_PART)
4992 dest = XEXP (dest, 0);
4993
4994 sets[i].inner_dest = dest;
4995
4996 if (MEM_P (dest))
4997 {
4998 #ifdef PUSH_ROUNDING
4999 /* Stack pushes invalidate the stack pointer. */
5000 rtx addr = XEXP (dest, 0);
5001 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5002 && XEXP (addr, 0) == stack_pointer_rtx)
5003 invalidate (stack_pointer_rtx, VOIDmode);
5004 #endif
5005 dest = fold_rtx (dest, insn);
5006 }
5007
5008 /* Compute the hash code of the destination now,
5009 before the effects of this instruction are recorded,
5010 since the register values used in the address computation
5011 are those before this instruction. */
5012 sets[i].dest_hash = HASH (dest, mode);
5013
5014 /* Don't enter a bit-field in the hash table
5015 because the value in it after the store
5016 may not equal what was stored, due to truncation. */
5017
5018 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5019 {
5020 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5021
5022 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5023 && GET_CODE (width) == CONST_INT
5024 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5025 && ! (INTVAL (src_const)
5026 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5027 /* Exception: if the value is constant,
5028 and it won't be truncated, record it. */
5029 ;
5030 else
5031 {
5032 /* This is chosen so that the destination will be invalidated
5033 but no new value will be recorded.
5034 We must invalidate because sometimes constant
5035 values can be recorded for bitfields. */
5036 sets[i].src_elt = 0;
5037 sets[i].src_volatile = 1;
5038 src_eqv = 0;
5039 src_eqv_elt = 0;
5040 }
5041 }
5042
5043 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5044 the insn. */
5045 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5046 {
5047 /* One less use of the label this insn used to jump to. */
5048 delete_insn_and_edges (insn);
5049 cse_jumps_altered = true;
5050 /* No more processing for this set. */
5051 sets[i].rtl = 0;
5052 }
5053
5054 /* If this SET is now setting PC to a label, we know it used to
5055 be a conditional or computed branch. */
5056 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5057 && !LABEL_REF_NONLOCAL_P (src))
5058 {
5059 /* We reemit the jump in as many cases as possible just in
5060 case the form of an unconditional jump is significantly
5061 different than a computed jump or conditional jump.
5062
5063 If this insn has multiple sets, then reemitting the
5064 jump is nontrivial. So instead we just force rerecognition
5065 and hope for the best. */
5066 if (n_sets == 1)
5067 {
5068 rtx new_rtx, note;
5069
5070 new_rtx = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5071 JUMP_LABEL (new_rtx) = XEXP (src, 0);
5072 LABEL_NUSES (XEXP (src, 0))++;
5073
5074 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5075 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5076 if (note)
5077 {
5078 XEXP (note, 1) = NULL_RTX;
5079 REG_NOTES (new_rtx) = note;
5080 }
5081
5082 delete_insn_and_edges (insn);
5083 insn = new_rtx;
5084 }
5085 else
5086 INSN_CODE (insn) = -1;
5087
5088 /* Do not bother deleting any unreachable code, let jump do it. */
5089 cse_jumps_altered = true;
5090 sets[i].rtl = 0;
5091 }
5092
5093 /* If destination is volatile, invalidate it and then do no further
5094 processing for this assignment. */
5095
5096 else if (do_not_record)
5097 {
5098 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5099 invalidate (dest, VOIDmode);
5100 else if (MEM_P (dest))
5101 invalidate (dest, VOIDmode);
5102 else if (GET_CODE (dest) == STRICT_LOW_PART
5103 || GET_CODE (dest) == ZERO_EXTRACT)
5104 invalidate (XEXP (dest, 0), GET_MODE (dest));
5105 sets[i].rtl = 0;
5106 }
5107
5108 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5109 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5110
5111 #ifdef HAVE_cc0
5112 /* If setting CC0, record what it was set to, or a constant, if it
5113 is equivalent to a constant. If it is being set to a floating-point
5114 value, make a COMPARE with the appropriate constant of 0. If we
5115 don't do this, later code can interpret this as a test against
5116 const0_rtx, which can cause problems if we try to put it into an
5117 insn as a floating-point operand. */
5118 if (dest == cc0_rtx)
5119 {
5120 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5121 this_insn_cc0_mode = mode;
5122 if (FLOAT_MODE_P (mode))
5123 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5124 CONST0_RTX (mode));
5125 }
5126 #endif
5127 }
5128
5129 /* Now enter all non-volatile source expressions in the hash table
5130 if they are not already present.
5131 Record their equivalence classes in src_elt.
5132 This way we can insert the corresponding destinations into
5133 the same classes even if the actual sources are no longer in them
5134 (having been invalidated). */
5135
5136 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5137 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5138 {
5139 struct table_elt *elt;
5140 struct table_elt *classp = sets[0].src_elt;
5141 rtx dest = SET_DEST (sets[0].rtl);
5142 enum machine_mode eqvmode = GET_MODE (dest);
5143
5144 if (GET_CODE (dest) == STRICT_LOW_PART)
5145 {
5146 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5147 classp = 0;
5148 }
5149 if (insert_regs (src_eqv, classp, 0))
5150 {
5151 rehash_using_reg (src_eqv);
5152 src_eqv_hash = HASH (src_eqv, eqvmode);
5153 }
5154 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5155 elt->in_memory = src_eqv_in_memory;
5156 src_eqv_elt = elt;
5157
5158 /* Check to see if src_eqv_elt is the same as a set source which
5159 does not yet have an elt, and if so set the elt of the set source
5160 to src_eqv_elt. */
5161 for (i = 0; i < n_sets; i++)
5162 if (sets[i].rtl && sets[i].src_elt == 0
5163 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5164 sets[i].src_elt = src_eqv_elt;
5165 }
5166
5167 for (i = 0; i < n_sets; i++)
5168 if (sets[i].rtl && ! sets[i].src_volatile
5169 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5170 {
5171 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5172 {
5173 /* REG_EQUAL in setting a STRICT_LOW_PART
5174 gives an equivalent for the entire destination register,
5175 not just for the subreg being stored in now.
5176 This is a more interesting equivalence, so we arrange later
5177 to treat the entire reg as the destination. */
5178 sets[i].src_elt = src_eqv_elt;
5179 sets[i].src_hash = src_eqv_hash;
5180 }
5181 else
5182 {
5183 /* Insert source and constant equivalent into hash table, if not
5184 already present. */
5185 struct table_elt *classp = src_eqv_elt;
5186 rtx src = sets[i].src;
5187 rtx dest = SET_DEST (sets[i].rtl);
5188 enum machine_mode mode
5189 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5190
5191 /* It's possible that we have a source value known to be
5192 constant but don't have a REG_EQUAL note on the insn.
5193 Lack of a note will mean src_eqv_elt will be NULL. This
5194 can happen where we've generated a SUBREG to access a
5195 CONST_INT that is already in a register in a wider mode.
5196 Ensure that the source expression is put in the proper
5197 constant class. */
5198 if (!classp)
5199 classp = sets[i].src_const_elt;
5200
5201 if (sets[i].src_elt == 0)
5202 {
5203 struct table_elt *elt;
5204
5205 /* Note that these insert_regs calls cannot remove
5206 any of the src_elt's, because they would have failed to
5207 match if not still valid. */
5208 if (insert_regs (src, classp, 0))
5209 {
5210 rehash_using_reg (src);
5211 sets[i].src_hash = HASH (src, mode);
5212 }
5213 elt = insert (src, classp, sets[i].src_hash, mode);
5214 elt->in_memory = sets[i].src_in_memory;
5215 sets[i].src_elt = classp = elt;
5216 }
5217 if (sets[i].src_const && sets[i].src_const_elt == 0
5218 && src != sets[i].src_const
5219 && ! rtx_equal_p (sets[i].src_const, src))
5220 sets[i].src_elt = insert (sets[i].src_const, classp,
5221 sets[i].src_const_hash, mode);
5222 }
5223 }
5224 else if (sets[i].src_elt == 0)
5225 /* If we did not insert the source into the hash table (e.g., it was
5226 volatile), note the equivalence class for the REG_EQUAL value, if any,
5227 so that the destination goes into that class. */
5228 sets[i].src_elt = src_eqv_elt;
5229
5230 /* Record destination addresses in the hash table. This allows us to
5231 check if they are invalidated by other sets. */
5232 for (i = 0; i < n_sets; i++)
5233 {
5234 if (sets[i].rtl)
5235 {
5236 rtx x = sets[i].inner_dest;
5237 struct table_elt *elt;
5238 enum machine_mode mode;
5239 unsigned hash;
5240
5241 if (MEM_P (x))
5242 {
5243 x = XEXP (x, 0);
5244 mode = GET_MODE (x);
5245 hash = HASH (x, mode);
5246 elt = lookup (x, hash, mode);
5247 if (!elt)
5248 {
5249 if (insert_regs (x, NULL, 0))
5250 {
5251 rtx dest = SET_DEST (sets[i].rtl);
5252
5253 rehash_using_reg (x);
5254 hash = HASH (x, mode);
5255 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5256 }
5257 elt = insert (x, NULL, hash, mode);
5258 }
5259
5260 sets[i].dest_addr_elt = elt;
5261 }
5262 else
5263 sets[i].dest_addr_elt = NULL;
5264 }
5265 }
5266
5267 invalidate_from_clobbers (x);
5268
5269 /* Some registers are invalidated by subroutine calls. Memory is
5270 invalidated by non-constant calls. */
5271
5272 if (CALL_P (insn))
5273 {
5274 if (!(RTL_CONST_OR_PURE_CALL_P (insn)))
5275 invalidate_memory ();
5276 invalidate_for_call ();
5277 }
5278
5279 /* Now invalidate everything set by this instruction.
5280 If a SUBREG or other funny destination is being set,
5281 sets[i].rtl is still nonzero, so here we invalidate the reg
5282 a part of which is being set. */
5283
5284 for (i = 0; i < n_sets; i++)
5285 if (sets[i].rtl)
5286 {
5287 /* We can't use the inner dest, because the mode associated with
5288 a ZERO_EXTRACT is significant. */
5289 rtx dest = SET_DEST (sets[i].rtl);
5290
5291 /* Needed for registers to remove the register from its
5292 previous quantity's chain.
5293 Needed for memory if this is a nonvarying address, unless
5294 we have just done an invalidate_memory that covers even those. */
5295 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5296 invalidate (dest, VOIDmode);
5297 else if (MEM_P (dest))
5298 invalidate (dest, VOIDmode);
5299 else if (GET_CODE (dest) == STRICT_LOW_PART
5300 || GET_CODE (dest) == ZERO_EXTRACT)
5301 invalidate (XEXP (dest, 0), GET_MODE (dest));
5302 }
5303
5304 /* A volatile ASM invalidates everything. */
5305 if (NONJUMP_INSN_P (insn)
5306 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5307 && MEM_VOLATILE_P (PATTERN (insn)))
5308 flush_hash_table ();
5309
5310 /* Don't cse over a call to setjmp; on some machines (eg VAX)
5311 the regs restored by the longjmp come from a later time
5312 than the setjmp. */
5313 if (CALL_P (insn) && find_reg_note (insn, REG_SETJMP, NULL))
5314 {
5315 flush_hash_table ();
5316 goto done;
5317 }
5318
5319 /* Make sure registers mentioned in destinations
5320 are safe for use in an expression to be inserted.
5321 This removes from the hash table
5322 any invalid entry that refers to one of these registers.
5323
5324 We don't care about the return value from mention_regs because
5325 we are going to hash the SET_DEST values unconditionally. */
5326
5327 for (i = 0; i < n_sets; i++)
5328 {
5329 if (sets[i].rtl)
5330 {
5331 rtx x = SET_DEST (sets[i].rtl);
5332
5333 if (!REG_P (x))
5334 mention_regs (x);
5335 else
5336 {
5337 /* We used to rely on all references to a register becoming
5338 inaccessible when a register changes to a new quantity,
5339 since that changes the hash code. However, that is not
5340 safe, since after HASH_SIZE new quantities we get a
5341 hash 'collision' of a register with its own invalid
5342 entries. And since SUBREGs have been changed not to
5343 change their hash code with the hash code of the register,
5344 it wouldn't work any longer at all. So we have to check
5345 for any invalid references lying around now.
5346 This code is similar to the REG case in mention_regs,
5347 but it knows that reg_tick has been incremented, and
5348 it leaves reg_in_table as -1 . */
5349 unsigned int regno = REGNO (x);
5350 unsigned int endregno = END_REGNO (x);
5351 unsigned int i;
5352
5353 for (i = regno; i < endregno; i++)
5354 {
5355 if (REG_IN_TABLE (i) >= 0)
5356 {
5357 remove_invalid_refs (i);
5358 REG_IN_TABLE (i) = -1;
5359 }
5360 }
5361 }
5362 }
5363 }
5364
5365 /* We may have just removed some of the src_elt's from the hash table.
5366 So replace each one with the current head of the same class.
5367 Also check if destination addresses have been removed. */
5368
5369 for (i = 0; i < n_sets; i++)
5370 if (sets[i].rtl)
5371 {
5372 if (sets[i].dest_addr_elt
5373 && sets[i].dest_addr_elt->first_same_value == 0)
5374 {
5375 /* The elt was removed, which means this destination is not
5376 valid after this instruction. */
5377 sets[i].rtl = NULL_RTX;
5378 }
5379 else if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5380 /* If elt was removed, find current head of same class,
5381 or 0 if nothing remains of that class. */
5382 {
5383 struct table_elt *elt = sets[i].src_elt;
5384
5385 while (elt && elt->prev_same_value)
5386 elt = elt->prev_same_value;
5387
5388 while (elt && elt->first_same_value == 0)
5389 elt = elt->next_same_value;
5390 sets[i].src_elt = elt ? elt->first_same_value : 0;
5391 }
5392 }
5393
5394 /* Now insert the destinations into their equivalence classes. */
5395
5396 for (i = 0; i < n_sets; i++)
5397 if (sets[i].rtl)
5398 {
5399 rtx dest = SET_DEST (sets[i].rtl);
5400 struct table_elt *elt;
5401
5402 /* Don't record value if we are not supposed to risk allocating
5403 floating-point values in registers that might be wider than
5404 memory. */
5405 if ((flag_float_store
5406 && MEM_P (dest)
5407 && FLOAT_MODE_P (GET_MODE (dest)))
5408 /* Don't record BLKmode values, because we don't know the
5409 size of it, and can't be sure that other BLKmode values
5410 have the same or smaller size. */
5411 || GET_MODE (dest) == BLKmode
5412 /* If we didn't put a REG_EQUAL value or a source into the hash
5413 table, there is no point is recording DEST. */
5414 || sets[i].src_elt == 0
5415 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5416 or SIGN_EXTEND, don't record DEST since it can cause
5417 some tracking to be wrong.
5418
5419 ??? Think about this more later. */
5420 || (GET_CODE (dest) == SUBREG
5421 && (GET_MODE_SIZE (GET_MODE (dest))
5422 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5423 && (GET_CODE (sets[i].src) == SIGN_EXTEND
5424 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
5425 continue;
5426
5427 /* STRICT_LOW_PART isn't part of the value BEING set,
5428 and neither is the SUBREG inside it.
5429 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5430 if (GET_CODE (dest) == STRICT_LOW_PART)
5431 dest = SUBREG_REG (XEXP (dest, 0));
5432
5433 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5434 /* Registers must also be inserted into chains for quantities. */
5435 if (insert_regs (dest, sets[i].src_elt, 1))
5436 {
5437 /* If `insert_regs' changes something, the hash code must be
5438 recalculated. */
5439 rehash_using_reg (dest);
5440 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5441 }
5442
5443 elt = insert (dest, sets[i].src_elt,
5444 sets[i].dest_hash, GET_MODE (dest));
5445
5446 elt->in_memory = (MEM_P (sets[i].inner_dest)
5447 && !MEM_READONLY_P (sets[i].inner_dest));
5448
5449 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5450 narrower than M2, and both M1 and M2 are the same number of words,
5451 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5452 make that equivalence as well.
5453
5454 However, BAR may have equivalences for which gen_lowpart
5455 will produce a simpler value than gen_lowpart applied to
5456 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5457 BAR's equivalences. If we don't get a simplified form, make
5458 the SUBREG. It will not be used in an equivalence, but will
5459 cause two similar assignments to be detected.
5460
5461 Note the loop below will find SUBREG_REG (DEST) since we have
5462 already entered SRC and DEST of the SET in the table. */
5463
5464 if (GET_CODE (dest) == SUBREG
5465 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
5466 / UNITS_PER_WORD)
5467 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
5468 && (GET_MODE_SIZE (GET_MODE (dest))
5469 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5470 && sets[i].src_elt != 0)
5471 {
5472 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
5473 struct table_elt *elt, *classp = 0;
5474
5475 for (elt = sets[i].src_elt->first_same_value; elt;
5476 elt = elt->next_same_value)
5477 {
5478 rtx new_src = 0;
5479 unsigned src_hash;
5480 struct table_elt *src_elt;
5481 int byte = 0;
5482
5483 /* Ignore invalid entries. */
5484 if (!REG_P (elt->exp)
5485 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5486 continue;
5487
5488 /* We may have already been playing subreg games. If the
5489 mode is already correct for the destination, use it. */
5490 if (GET_MODE (elt->exp) == new_mode)
5491 new_src = elt->exp;
5492 else
5493 {
5494 /* Calculate big endian correction for the SUBREG_BYTE.
5495 We have already checked that M1 (GET_MODE (dest))
5496 is not narrower than M2 (new_mode). */
5497 if (BYTES_BIG_ENDIAN)
5498 byte = (GET_MODE_SIZE (GET_MODE (dest))
5499 - GET_MODE_SIZE (new_mode));
5500
5501 new_src = simplify_gen_subreg (new_mode, elt->exp,
5502 GET_MODE (dest), byte);
5503 }
5504
5505 /* The call to simplify_gen_subreg fails if the value
5506 is VOIDmode, yet we can't do any simplification, e.g.
5507 for EXPR_LISTs denoting function call results.
5508 It is invalid to construct a SUBREG with a VOIDmode
5509 SUBREG_REG, hence a zero new_src means we can't do
5510 this substitution. */
5511 if (! new_src)
5512 continue;
5513
5514 src_hash = HASH (new_src, new_mode);
5515 src_elt = lookup (new_src, src_hash, new_mode);
5516
5517 /* Put the new source in the hash table is if isn't
5518 already. */
5519 if (src_elt == 0)
5520 {
5521 if (insert_regs (new_src, classp, 0))
5522 {
5523 rehash_using_reg (new_src);
5524 src_hash = HASH (new_src, new_mode);
5525 }
5526 src_elt = insert (new_src, classp, src_hash, new_mode);
5527 src_elt->in_memory = elt->in_memory;
5528 }
5529 else if (classp && classp != src_elt->first_same_value)
5530 /* Show that two things that we've seen before are
5531 actually the same. */
5532 merge_equiv_classes (src_elt, classp);
5533
5534 classp = src_elt->first_same_value;
5535 /* Ignore invalid entries. */
5536 while (classp
5537 && !REG_P (classp->exp)
5538 && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
5539 classp = classp->next_same_value;
5540 }
5541 }
5542 }
5543
5544 /* Special handling for (set REG0 REG1) where REG0 is the
5545 "cheapest", cheaper than REG1. After cse, REG1 will probably not
5546 be used in the sequel, so (if easily done) change this insn to
5547 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
5548 that computed their value. Then REG1 will become a dead store
5549 and won't cloud the situation for later optimizations.
5550
5551 Do not make this change if REG1 is a hard register, because it will
5552 then be used in the sequel and we may be changing a two-operand insn
5553 into a three-operand insn.
5554
5555 Also do not do this if we are operating on a copy of INSN. */
5556
5557 if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
5558 && NEXT_INSN (PREV_INSN (insn)) == insn
5559 && REG_P (SET_SRC (sets[0].rtl))
5560 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
5561 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
5562 {
5563 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
5564 struct qty_table_elem *src_ent = &qty_table[src_q];
5565
5566 if (src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
5567 {
5568 /* Scan for the previous nonnote insn, but stop at a basic
5569 block boundary. */
5570 rtx prev = insn;
5571 rtx bb_head = BB_HEAD (BLOCK_FOR_INSN (insn));
5572 do
5573 {
5574 prev = PREV_INSN (prev);
5575 }
5576 while (prev != bb_head && NOTE_P (prev));
5577
5578 /* Do not swap the registers around if the previous instruction
5579 attaches a REG_EQUIV note to REG1.
5580
5581 ??? It's not entirely clear whether we can transfer a REG_EQUIV
5582 from the pseudo that originally shadowed an incoming argument
5583 to another register. Some uses of REG_EQUIV might rely on it
5584 being attached to REG1 rather than REG2.
5585
5586 This section previously turned the REG_EQUIV into a REG_EQUAL
5587 note. We cannot do that because REG_EQUIV may provide an
5588 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
5589 if (NONJUMP_INSN_P (prev)
5590 && GET_CODE (PATTERN (prev)) == SET
5591 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
5592 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
5593 {
5594 rtx dest = SET_DEST (sets[0].rtl);
5595 rtx src = SET_SRC (sets[0].rtl);
5596 rtx note;
5597
5598 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
5599 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
5600 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
5601 apply_change_group ();
5602
5603 /* If INSN has a REG_EQUAL note, and this note mentions
5604 REG0, then we must delete it, because the value in
5605 REG0 has changed. If the note's value is REG1, we must
5606 also delete it because that is now this insn's dest. */
5607 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5608 if (note != 0
5609 && (reg_mentioned_p (dest, XEXP (note, 0))
5610 || rtx_equal_p (src, XEXP (note, 0))))
5611 remove_note (insn, note);
5612 }
5613 }
5614 }
5615
5616 done:;
5617 }
5618 \f
5619 /* Remove from the hash table all expressions that reference memory. */
5620
5621 static void
5622 invalidate_memory (void)
5623 {
5624 int i;
5625 struct table_elt *p, *next;
5626
5627 for (i = 0; i < HASH_SIZE; i++)
5628 for (p = table[i]; p; p = next)
5629 {
5630 next = p->next_same_hash;
5631 if (p->in_memory)
5632 remove_from_table (p, i);
5633 }
5634 }
5635
5636 /* Perform invalidation on the basis of everything about an insn
5637 except for invalidating the actual places that are SET in it.
5638 This includes the places CLOBBERed, and anything that might
5639 alias with something that is SET or CLOBBERed.
5640
5641 X is the pattern of the insn. */
5642
5643 static void
5644 invalidate_from_clobbers (rtx x)
5645 {
5646 if (GET_CODE (x) == CLOBBER)
5647 {
5648 rtx ref = XEXP (x, 0);
5649 if (ref)
5650 {
5651 if (REG_P (ref) || GET_CODE (ref) == SUBREG
5652 || MEM_P (ref))
5653 invalidate (ref, VOIDmode);
5654 else if (GET_CODE (ref) == STRICT_LOW_PART
5655 || GET_CODE (ref) == ZERO_EXTRACT)
5656 invalidate (XEXP (ref, 0), GET_MODE (ref));
5657 }
5658 }
5659 else if (GET_CODE (x) == PARALLEL)
5660 {
5661 int i;
5662 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5663 {
5664 rtx y = XVECEXP (x, 0, i);
5665 if (GET_CODE (y) == CLOBBER)
5666 {
5667 rtx ref = XEXP (y, 0);
5668 if (REG_P (ref) || GET_CODE (ref) == SUBREG
5669 || MEM_P (ref))
5670 invalidate (ref, VOIDmode);
5671 else if (GET_CODE (ref) == STRICT_LOW_PART
5672 || GET_CODE (ref) == ZERO_EXTRACT)
5673 invalidate (XEXP (ref, 0), GET_MODE (ref));
5674 }
5675 }
5676 }
5677 }
5678 \f
5679 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
5680 and replace any registers in them with either an equivalent constant
5681 or the canonical form of the register. If we are inside an address,
5682 only do this if the address remains valid.
5683
5684 OBJECT is 0 except when within a MEM in which case it is the MEM.
5685
5686 Return the replacement for X. */
5687
5688 static rtx
5689 cse_process_notes_1 (rtx x, rtx object, bool *changed)
5690 {
5691 enum rtx_code code = GET_CODE (x);
5692 const char *fmt = GET_RTX_FORMAT (code);
5693 int i;
5694
5695 switch (code)
5696 {
5697 case CONST_INT:
5698 case CONST:
5699 case SYMBOL_REF:
5700 case LABEL_REF:
5701 case CONST_DOUBLE:
5702 case CONST_FIXED:
5703 case CONST_VECTOR:
5704 case PC:
5705 case CC0:
5706 case LO_SUM:
5707 return x;
5708
5709 case MEM:
5710 validate_change (x, &XEXP (x, 0),
5711 cse_process_notes (XEXP (x, 0), x, changed), 0);
5712 return x;
5713
5714 case EXPR_LIST:
5715 case INSN_LIST:
5716 if (REG_NOTE_KIND (x) == REG_EQUAL)
5717 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX, changed);
5718 if (XEXP (x, 1))
5719 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX, changed);
5720 return x;
5721
5722 case SIGN_EXTEND:
5723 case ZERO_EXTEND:
5724 case SUBREG:
5725 {
5726 rtx new_rtx = cse_process_notes (XEXP (x, 0), object, changed);
5727 /* We don't substitute VOIDmode constants into these rtx,
5728 since they would impede folding. */
5729 if (GET_MODE (new_rtx) != VOIDmode)
5730 validate_change (object, &XEXP (x, 0), new_rtx, 0);
5731 return x;
5732 }
5733
5734 case REG:
5735 i = REG_QTY (REGNO (x));
5736
5737 /* Return a constant or a constant register. */
5738 if (REGNO_QTY_VALID_P (REGNO (x)))
5739 {
5740 struct qty_table_elem *ent = &qty_table[i];
5741
5742 if (ent->const_rtx != NULL_RTX
5743 && (CONSTANT_P (ent->const_rtx)
5744 || REG_P (ent->const_rtx)))
5745 {
5746 rtx new_rtx = gen_lowpart (GET_MODE (x), ent->const_rtx);
5747 if (new_rtx)
5748 return copy_rtx (new_rtx);
5749 }
5750 }
5751
5752 /* Otherwise, canonicalize this register. */
5753 return canon_reg (x, NULL_RTX);
5754
5755 default:
5756 break;
5757 }
5758
5759 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5760 if (fmt[i] == 'e')
5761 validate_change (object, &XEXP (x, i),
5762 cse_process_notes (XEXP (x, i), object, changed), 0);
5763
5764 return x;
5765 }
5766
5767 static rtx
5768 cse_process_notes (rtx x, rtx object, bool *changed)
5769 {
5770 rtx new_rtx = cse_process_notes_1 (x, object, changed);
5771 if (new_rtx != x)
5772 *changed = true;
5773 return new_rtx;
5774 }
5775
5776 \f
5777 /* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
5778
5779 DATA is a pointer to a struct cse_basic_block_data, that is used to
5780 describe the path.
5781 It is filled with a queue of basic blocks, starting with FIRST_BB
5782 and following a trace through the CFG.
5783
5784 If all paths starting at FIRST_BB have been followed, or no new path
5785 starting at FIRST_BB can be constructed, this function returns FALSE.
5786 Otherwise, DATA->path is filled and the function returns TRUE indicating
5787 that a path to follow was found.
5788
5789 If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
5790 block in the path will be FIRST_BB. */
5791
5792 static bool
5793 cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
5794 int follow_jumps)
5795 {
5796 basic_block bb;
5797 edge e;
5798 int path_size;
5799
5800 SET_BIT (cse_visited_basic_blocks, first_bb->index);
5801
5802 /* See if there is a previous path. */
5803 path_size = data->path_size;
5804
5805 /* There is a previous path. Make sure it started with FIRST_BB. */
5806 if (path_size)
5807 gcc_assert (data->path[0].bb == first_bb);
5808
5809 /* There was only one basic block in the last path. Clear the path and
5810 return, so that paths starting at another basic block can be tried. */
5811 if (path_size == 1)
5812 {
5813 path_size = 0;
5814 goto done;
5815 }
5816
5817 /* If the path was empty from the beginning, construct a new path. */
5818 if (path_size == 0)
5819 data->path[path_size++].bb = first_bb;
5820 else
5821 {
5822 /* Otherwise, path_size must be equal to or greater than 2, because
5823 a previous path exists that is at least two basic blocks long.
5824
5825 Update the previous branch path, if any. If the last branch was
5826 previously along the branch edge, take the fallthrough edge now. */
5827 while (path_size >= 2)
5828 {
5829 basic_block last_bb_in_path, previous_bb_in_path;
5830 edge e;
5831
5832 --path_size;
5833 last_bb_in_path = data->path[path_size].bb;
5834 previous_bb_in_path = data->path[path_size - 1].bb;
5835
5836 /* If we previously followed a path along the branch edge, try
5837 the fallthru edge now. */
5838 if (EDGE_COUNT (previous_bb_in_path->succs) == 2
5839 && any_condjump_p (BB_END (previous_bb_in_path))
5840 && (e = find_edge (previous_bb_in_path, last_bb_in_path))
5841 && e == BRANCH_EDGE (previous_bb_in_path))
5842 {
5843 bb = FALLTHRU_EDGE (previous_bb_in_path)->dest;
5844 if (bb != EXIT_BLOCK_PTR
5845 && single_pred_p (bb)
5846 /* We used to assert here that we would only see blocks
5847 that we have not visited yet. But we may end up
5848 visiting basic blocks twice if the CFG has changed
5849 in this run of cse_main, because when the CFG changes
5850 the topological sort of the CFG also changes. A basic
5851 blocks that previously had more than two predecessors
5852 may now have a single predecessor, and become part of
5853 a path that starts at another basic block.
5854
5855 We still want to visit each basic block only once, so
5856 halt the path here if we have already visited BB. */
5857 && !TEST_BIT (cse_visited_basic_blocks, bb->index))
5858 {
5859 SET_BIT (cse_visited_basic_blocks, bb->index);
5860 data->path[path_size++].bb = bb;
5861 break;
5862 }
5863 }
5864
5865 data->path[path_size].bb = NULL;
5866 }
5867
5868 /* If only one block remains in the path, bail. */
5869 if (path_size == 1)
5870 {
5871 path_size = 0;
5872 goto done;
5873 }
5874 }
5875
5876 /* Extend the path if possible. */
5877 if (follow_jumps)
5878 {
5879 bb = data->path[path_size - 1].bb;
5880 while (bb && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH))
5881 {
5882 if (single_succ_p (bb))
5883 e = single_succ_edge (bb);
5884 else if (EDGE_COUNT (bb->succs) == 2
5885 && any_condjump_p (BB_END (bb)))
5886 {
5887 /* First try to follow the branch. If that doesn't lead
5888 to a useful path, follow the fallthru edge. */
5889 e = BRANCH_EDGE (bb);
5890 if (!single_pred_p (e->dest))
5891 e = FALLTHRU_EDGE (bb);
5892 }
5893 else
5894 e = NULL;
5895
5896 if (e && e->dest != EXIT_BLOCK_PTR
5897 && single_pred_p (e->dest)
5898 /* Avoid visiting basic blocks twice. The large comment
5899 above explains why this can happen. */
5900 && !TEST_BIT (cse_visited_basic_blocks, e->dest->index))
5901 {
5902 basic_block bb2 = e->dest;
5903 SET_BIT (cse_visited_basic_blocks, bb2->index);
5904 data->path[path_size++].bb = bb2;
5905 bb = bb2;
5906 }
5907 else
5908 bb = NULL;
5909 }
5910 }
5911
5912 done:
5913 data->path_size = path_size;
5914 return path_size != 0;
5915 }
5916 \f
5917 /* Dump the path in DATA to file F. NSETS is the number of sets
5918 in the path. */
5919
5920 static void
5921 cse_dump_path (struct cse_basic_block_data *data, int nsets, FILE *f)
5922 {
5923 int path_entry;
5924
5925 fprintf (f, ";; Following path with %d sets: ", nsets);
5926 for (path_entry = 0; path_entry < data->path_size; path_entry++)
5927 fprintf (f, "%d ", (data->path[path_entry].bb)->index);
5928 fputc ('\n', dump_file);
5929 fflush (f);
5930 }
5931
5932 \f
5933 /* Return true if BB has exception handling successor edges. */
5934
5935 static bool
5936 have_eh_succ_edges (basic_block bb)
5937 {
5938 edge e;
5939 edge_iterator ei;
5940
5941 FOR_EACH_EDGE (e, ei, bb->succs)
5942 if (e->flags & EDGE_EH)
5943 return true;
5944
5945 return false;
5946 }
5947
5948 \f
5949 /* Scan to the end of the path described by DATA. Return an estimate of
5950 the total number of SETs of all insns in the path. */
5951
5952 static void
5953 cse_prescan_path (struct cse_basic_block_data *data)
5954 {
5955 int nsets = 0;
5956 int path_size = data->path_size;
5957 int path_entry;
5958
5959 /* Scan to end of each basic block in the path. */
5960 for (path_entry = 0; path_entry < path_size; path_entry++)
5961 {
5962 basic_block bb;
5963 rtx insn;
5964
5965 bb = data->path[path_entry].bb;
5966
5967 FOR_BB_INSNS (bb, insn)
5968 {
5969 if (!INSN_P (insn))
5970 continue;
5971
5972 /* A PARALLEL can have lots of SETs in it,
5973 especially if it is really an ASM_OPERANDS. */
5974 if (GET_CODE (PATTERN (insn)) == PARALLEL)
5975 nsets += XVECLEN (PATTERN (insn), 0);
5976 else
5977 nsets += 1;
5978 }
5979 }
5980
5981 data->nsets = nsets;
5982 }
5983 \f
5984 /* Process a single extended basic block described by EBB_DATA. */
5985
5986 static void
5987 cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
5988 {
5989 int path_size = ebb_data->path_size;
5990 int path_entry;
5991 int num_insns = 0;
5992
5993 /* Allocate the space needed by qty_table. */
5994 qty_table = XNEWVEC (struct qty_table_elem, max_qty);
5995
5996 new_basic_block ();
5997 cse_ebb_live_in = df_get_live_in (ebb_data->path[0].bb);
5998 cse_ebb_live_out = df_get_live_out (ebb_data->path[path_size - 1].bb);
5999 for (path_entry = 0; path_entry < path_size; path_entry++)
6000 {
6001 basic_block bb;
6002 rtx insn;
6003
6004 bb = ebb_data->path[path_entry].bb;
6005
6006 /* Invalidate recorded information for eh regs if there is an EH
6007 edge pointing to that bb. */
6008 if (bb_has_eh_pred (bb))
6009 {
6010 struct df_ref **def_rec;
6011
6012 for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
6013 {
6014 struct df_ref *def = *def_rec;
6015 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
6016 invalidate (DF_REF_REG (def), GET_MODE (DF_REF_REG (def)));
6017 }
6018 }
6019
6020 FOR_BB_INSNS (bb, insn)
6021 {
6022 optimize_this_for_speed_p = optimize_bb_for_speed_p (bb);
6023 /* If we have processed 1,000 insns, flush the hash table to
6024 avoid extreme quadratic behavior. We must not include NOTEs
6025 in the count since there may be more of them when generating
6026 debugging information. If we clear the table at different
6027 times, code generated with -g -O might be different than code
6028 generated with -O but not -g.
6029
6030 FIXME: This is a real kludge and needs to be done some other
6031 way. */
6032 if (INSN_P (insn)
6033 && num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS))
6034 {
6035 flush_hash_table ();
6036 num_insns = 0;
6037 }
6038
6039 if (INSN_P (insn))
6040 {
6041 /* Process notes first so we have all notes in canonical forms
6042 when looking for duplicate operations. */
6043 if (REG_NOTES (insn))
6044 {
6045 bool changed = false;
6046 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn),
6047 NULL_RTX, &changed);
6048 if (changed)
6049 df_notes_rescan (insn);
6050 }
6051
6052 cse_insn (insn);
6053
6054 /* If we haven't already found an insn where we added a LABEL_REF,
6055 check this one. */
6056 if (INSN_P (insn) && !recorded_label_ref
6057 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
6058 (void *) insn))
6059 recorded_label_ref = true;
6060
6061 #ifdef HAVE_cc0
6062 /* If the previous insn set CC0 and this insn no longer
6063 references CC0, delete the previous insn. Here we use
6064 fact that nothing expects CC0 to be valid over an insn,
6065 which is true until the final pass. */
6066 {
6067 rtx prev_insn, tem;
6068
6069 prev_insn = PREV_INSN (insn);
6070 if (prev_insn && NONJUMP_INSN_P (prev_insn)
6071 && (tem = single_set (prev_insn)) != 0
6072 && SET_DEST (tem) == cc0_rtx
6073 && ! reg_mentioned_p (cc0_rtx, PATTERN (insn)))
6074 delete_insn (prev_insn);
6075 }
6076
6077 /* If this insn is not the last insn in the basic block,
6078 it will be PREV_INSN(insn) in the next iteration. If
6079 we recorded any CC0-related information for this insn,
6080 remember it. */
6081 if (insn != BB_END (bb))
6082 {
6083 prev_insn_cc0 = this_insn_cc0;
6084 prev_insn_cc0_mode = this_insn_cc0_mode;
6085 }
6086 #endif
6087 }
6088 }
6089
6090 /* With non-call exceptions, we are not always able to update
6091 the CFG properly inside cse_insn. So clean up possibly
6092 redundant EH edges here. */
6093 if (flag_non_call_exceptions && have_eh_succ_edges (bb))
6094 cse_cfg_altered |= purge_dead_edges (bb);
6095
6096 /* If we changed a conditional jump, we may have terminated
6097 the path we are following. Check that by verifying that
6098 the edge we would take still exists. If the edge does
6099 not exist anymore, purge the remainder of the path.
6100 Note that this will cause us to return to the caller. */
6101 if (path_entry < path_size - 1)
6102 {
6103 basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6104 if (!find_edge (bb, next_bb))
6105 {
6106 do
6107 {
6108 path_size--;
6109
6110 /* If we truncate the path, we must also reset the
6111 visited bit on the remaining blocks in the path,
6112 or we will never visit them at all. */
6113 RESET_BIT (cse_visited_basic_blocks,
6114 ebb_data->path[path_size].bb->index);
6115 ebb_data->path[path_size].bb = NULL;
6116 }
6117 while (path_size - 1 != path_entry);
6118 ebb_data->path_size = path_size;
6119 }
6120 }
6121
6122 /* If this is a conditional jump insn, record any known
6123 equivalences due to the condition being tested. */
6124 insn = BB_END (bb);
6125 if (path_entry < path_size - 1
6126 && JUMP_P (insn)
6127 && single_set (insn)
6128 && any_condjump_p (insn))
6129 {
6130 basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6131 bool taken = (next_bb == BRANCH_EDGE (bb)->dest);
6132 record_jump_equiv (insn, taken);
6133 }
6134
6135 #ifdef HAVE_cc0
6136 /* Clear the CC0-tracking related insns, they can't provide
6137 useful information across basic block boundaries. */
6138 prev_insn_cc0 = 0;
6139 #endif
6140 }
6141
6142 gcc_assert (next_qty <= max_qty);
6143
6144 free (qty_table);
6145 }
6146
6147 \f
6148 /* Perform cse on the instructions of a function.
6149 F is the first instruction.
6150 NREGS is one plus the highest pseudo-reg number used in the instruction.
6151
6152 Return 2 if jump optimizations should be redone due to simplifications
6153 in conditional jump instructions.
6154 Return 1 if the CFG should be cleaned up because it has been modified.
6155 Return 0 otherwise. */
6156
6157 int
6158 cse_main (rtx f ATTRIBUTE_UNUSED, int nregs)
6159 {
6160 struct cse_basic_block_data ebb_data;
6161 basic_block bb;
6162 int *rc_order = XNEWVEC (int, last_basic_block);
6163 int i, n_blocks;
6164
6165 df_set_flags (DF_LR_RUN_DCE);
6166 df_analyze ();
6167 df_set_flags (DF_DEFER_INSN_RESCAN);
6168
6169 reg_scan (get_insns (), max_reg_num ());
6170 init_cse_reg_info (nregs);
6171
6172 ebb_data.path = XNEWVEC (struct branch_path,
6173 PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6174
6175 cse_cfg_altered = false;
6176 cse_jumps_altered = false;
6177 recorded_label_ref = false;
6178 constant_pool_entries_cost = 0;
6179 constant_pool_entries_regcost = 0;
6180 ebb_data.path_size = 0;
6181 ebb_data.nsets = 0;
6182 rtl_hooks = cse_rtl_hooks;
6183
6184 init_recog ();
6185 init_alias_analysis ();
6186
6187 reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
6188
6189 /* Set up the table of already visited basic blocks. */
6190 cse_visited_basic_blocks = sbitmap_alloc (last_basic_block);
6191 sbitmap_zero (cse_visited_basic_blocks);
6192
6193 /* Loop over basic blocks in reverse completion order (RPO),
6194 excluding the ENTRY and EXIT blocks. */
6195 n_blocks = pre_and_rev_post_order_compute (NULL, rc_order, false);
6196 i = 0;
6197 while (i < n_blocks)
6198 {
6199 /* Find the first block in the RPO queue that we have not yet
6200 processed before. */
6201 do
6202 {
6203 bb = BASIC_BLOCK (rc_order[i++]);
6204 }
6205 while (TEST_BIT (cse_visited_basic_blocks, bb->index)
6206 && i < n_blocks);
6207
6208 /* Find all paths starting with BB, and process them. */
6209 while (cse_find_path (bb, &ebb_data, flag_cse_follow_jumps))
6210 {
6211 /* Pre-scan the path. */
6212 cse_prescan_path (&ebb_data);
6213
6214 /* If this basic block has no sets, skip it. */
6215 if (ebb_data.nsets == 0)
6216 continue;
6217
6218 /* Get a reasonable estimate for the maximum number of qty's
6219 needed for this path. For this, we take the number of sets
6220 and multiply that by MAX_RECOG_OPERANDS. */
6221 max_qty = ebb_data.nsets * MAX_RECOG_OPERANDS;
6222
6223 /* Dump the path we're about to process. */
6224 if (dump_file)
6225 cse_dump_path (&ebb_data, ebb_data.nsets, dump_file);
6226
6227 cse_extended_basic_block (&ebb_data);
6228 }
6229 }
6230
6231 /* Clean up. */
6232 end_alias_analysis ();
6233 free (reg_eqv_table);
6234 free (ebb_data.path);
6235 sbitmap_free (cse_visited_basic_blocks);
6236 free (rc_order);
6237 rtl_hooks = general_rtl_hooks;
6238
6239 if (cse_jumps_altered || recorded_label_ref)
6240 return 2;
6241 else if (cse_cfg_altered)
6242 return 1;
6243 else
6244 return 0;
6245 }
6246 \f
6247 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for
6248 which there isn't a REG_LABEL_OPERAND note.
6249 Return one if so. DATA is the insn. */
6250
6251 static int
6252 check_for_label_ref (rtx *rtl, void *data)
6253 {
6254 rtx insn = (rtx) data;
6255
6256 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
6257 note for it, we must rerun jump since it needs to place the note. If
6258 this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
6259 don't do this since no REG_LABEL_OPERAND will be added. */
6260 return (GET_CODE (*rtl) == LABEL_REF
6261 && ! LABEL_REF_NONLOCAL_P (*rtl)
6262 && (!JUMP_P (insn)
6263 || !label_is_jump_target_p (XEXP (*rtl, 0), insn))
6264 && LABEL_P (XEXP (*rtl, 0))
6265 && INSN_UID (XEXP (*rtl, 0)) != 0
6266 && ! find_reg_note (insn, REG_LABEL_OPERAND, XEXP (*rtl, 0)));
6267 }
6268 \f
6269 /* Count the number of times registers are used (not set) in X.
6270 COUNTS is an array in which we accumulate the count, INCR is how much
6271 we count each register usage.
6272
6273 Don't count a usage of DEST, which is the SET_DEST of a SET which
6274 contains X in its SET_SRC. This is because such a SET does not
6275 modify the liveness of DEST.
6276 DEST is set to pc_rtx for a trapping insn, which means that we must count
6277 uses of a SET_DEST regardless because the insn can't be deleted here. */
6278
6279 static void
6280 count_reg_usage (rtx x, int *counts, rtx dest, int incr)
6281 {
6282 enum rtx_code code;
6283 rtx note;
6284 const char *fmt;
6285 int i, j;
6286
6287 if (x == 0)
6288 return;
6289
6290 switch (code = GET_CODE (x))
6291 {
6292 case REG:
6293 if (x != dest)
6294 counts[REGNO (x)] += incr;
6295 return;
6296
6297 case PC:
6298 case CC0:
6299 case CONST:
6300 case CONST_INT:
6301 case CONST_DOUBLE:
6302 case CONST_FIXED:
6303 case CONST_VECTOR:
6304 case SYMBOL_REF:
6305 case LABEL_REF:
6306 return;
6307
6308 case CLOBBER:
6309 /* If we are clobbering a MEM, mark any registers inside the address
6310 as being used. */
6311 if (MEM_P (XEXP (x, 0)))
6312 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
6313 return;
6314
6315 case SET:
6316 /* Unless we are setting a REG, count everything in SET_DEST. */
6317 if (!REG_P (SET_DEST (x)))
6318 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
6319 count_reg_usage (SET_SRC (x), counts,
6320 dest ? dest : SET_DEST (x),
6321 incr);
6322 return;
6323
6324 case CALL_INSN:
6325 case INSN:
6326 case JUMP_INSN:
6327 /* We expect dest to be NULL_RTX here. If the insn may trap, mark
6328 this fact by setting DEST to pc_rtx. */
6329 if (flag_non_call_exceptions && may_trap_p (PATTERN (x)))
6330 dest = pc_rtx;
6331 if (code == CALL_INSN)
6332 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
6333 count_reg_usage (PATTERN (x), counts, dest, incr);
6334
6335 /* Things used in a REG_EQUAL note aren't dead since loop may try to
6336 use them. */
6337
6338 note = find_reg_equal_equiv_note (x);
6339 if (note)
6340 {
6341 rtx eqv = XEXP (note, 0);
6342
6343 if (GET_CODE (eqv) == EXPR_LIST)
6344 /* This REG_EQUAL note describes the result of a function call.
6345 Process all the arguments. */
6346 do
6347 {
6348 count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
6349 eqv = XEXP (eqv, 1);
6350 }
6351 while (eqv && GET_CODE (eqv) == EXPR_LIST);
6352 else
6353 count_reg_usage (eqv, counts, dest, incr);
6354 }
6355 return;
6356
6357 case EXPR_LIST:
6358 if (REG_NOTE_KIND (x) == REG_EQUAL
6359 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
6360 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6361 involving registers in the address. */
6362 || GET_CODE (XEXP (x, 0)) == CLOBBER)
6363 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
6364
6365 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
6366 return;
6367
6368 case ASM_OPERANDS:
6369 /* If the asm is volatile, then this insn cannot be deleted,
6370 and so the inputs *must* be live. */
6371 if (MEM_VOLATILE_P (x))
6372 dest = NULL_RTX;
6373 /* Iterate over just the inputs, not the constraints as well. */
6374 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
6375 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
6376 return;
6377
6378 case INSN_LIST:
6379 gcc_unreachable ();
6380
6381 default:
6382 break;
6383 }
6384
6385 fmt = GET_RTX_FORMAT (code);
6386 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6387 {
6388 if (fmt[i] == 'e')
6389 count_reg_usage (XEXP (x, i), counts, dest, incr);
6390 else if (fmt[i] == 'E')
6391 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6392 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
6393 }
6394 }
6395 \f
6396 /* Return true if set is live. */
6397 static bool
6398 set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
6399 int *counts)
6400 {
6401 #ifdef HAVE_cc0
6402 rtx tem;
6403 #endif
6404
6405 if (set_noop_p (set))
6406 ;
6407
6408 #ifdef HAVE_cc0
6409 else if (GET_CODE (SET_DEST (set)) == CC0
6410 && !side_effects_p (SET_SRC (set))
6411 && ((tem = next_nonnote_insn (insn)) == 0
6412 || !INSN_P (tem)
6413 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
6414 return false;
6415 #endif
6416 else if (!REG_P (SET_DEST (set))
6417 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
6418 || counts[REGNO (SET_DEST (set))] != 0
6419 || side_effects_p (SET_SRC (set)))
6420 return true;
6421 return false;
6422 }
6423
6424 /* Return true if insn is live. */
6425
6426 static bool
6427 insn_live_p (rtx insn, int *counts)
6428 {
6429 int i;
6430 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
6431 return true;
6432 else if (GET_CODE (PATTERN (insn)) == SET)
6433 return set_live_p (PATTERN (insn), insn, counts);
6434 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
6435 {
6436 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6437 {
6438 rtx elt = XVECEXP (PATTERN (insn), 0, i);
6439
6440 if (GET_CODE (elt) == SET)
6441 {
6442 if (set_live_p (elt, insn, counts))
6443 return true;
6444 }
6445 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
6446 return true;
6447 }
6448 return false;
6449 }
6450 else
6451 return true;
6452 }
6453
6454 /* Scan all the insns and delete any that are dead; i.e., they store a register
6455 that is never used or they copy a register to itself.
6456
6457 This is used to remove insns made obviously dead by cse, loop or other
6458 optimizations. It improves the heuristics in loop since it won't try to
6459 move dead invariants out of loops or make givs for dead quantities. The
6460 remaining passes of the compilation are also sped up. */
6461
6462 int
6463 delete_trivially_dead_insns (rtx insns, int nreg)
6464 {
6465 int *counts;
6466 rtx insn, prev;
6467 int ndead = 0;
6468
6469 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
6470 /* First count the number of times each register is used. */
6471 counts = XCNEWVEC (int, nreg);
6472 for (insn = insns; insn; insn = NEXT_INSN (insn))
6473 if (INSN_P (insn))
6474 count_reg_usage (insn, counts, NULL_RTX, 1);
6475
6476 /* Go from the last insn to the first and delete insns that only set unused
6477 registers or copy a register to itself. As we delete an insn, remove
6478 usage counts for registers it uses.
6479
6480 The first jump optimization pass may leave a real insn as the last
6481 insn in the function. We must not skip that insn or we may end
6482 up deleting code that is not really dead. */
6483 for (insn = get_last_insn (); insn; insn = prev)
6484 {
6485 int live_insn = 0;
6486
6487 prev = PREV_INSN (insn);
6488 if (!INSN_P (insn))
6489 continue;
6490
6491 live_insn = insn_live_p (insn, counts);
6492
6493 /* If this is a dead insn, delete it and show registers in it aren't
6494 being used. */
6495
6496 if (! live_insn && dbg_cnt (delete_trivial_dead))
6497 {
6498 count_reg_usage (insn, counts, NULL_RTX, -1);
6499 delete_insn_and_edges (insn);
6500 ndead++;
6501 }
6502 }
6503
6504 if (dump_file && ndead)
6505 fprintf (dump_file, "Deleted %i trivially dead insns\n",
6506 ndead);
6507 /* Clean up. */
6508 free (counts);
6509 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
6510 return ndead;
6511 }
6512
6513 /* This function is called via for_each_rtx. The argument, NEWREG, is
6514 a condition code register with the desired mode. If we are looking
6515 at the same register in a different mode, replace it with
6516 NEWREG. */
6517
6518 static int
6519 cse_change_cc_mode (rtx *loc, void *data)
6520 {
6521 struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
6522
6523 if (*loc
6524 && REG_P (*loc)
6525 && REGNO (*loc) == REGNO (args->newreg)
6526 && GET_MODE (*loc) != GET_MODE (args->newreg))
6527 {
6528 validate_change (args->insn, loc, args->newreg, 1);
6529
6530 return -1;
6531 }
6532 return 0;
6533 }
6534
6535 /* Change the mode of any reference to the register REGNO (NEWREG) to
6536 GET_MODE (NEWREG) in INSN. */
6537
6538 static void
6539 cse_change_cc_mode_insn (rtx insn, rtx newreg)
6540 {
6541 struct change_cc_mode_args args;
6542 int success;
6543
6544 if (!INSN_P (insn))
6545 return;
6546
6547 args.insn = insn;
6548 args.newreg = newreg;
6549
6550 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
6551 for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, &args);
6552
6553 /* If the following assertion was triggered, there is most probably
6554 something wrong with the cc_modes_compatible back end function.
6555 CC modes only can be considered compatible if the insn - with the mode
6556 replaced by any of the compatible modes - can still be recognized. */
6557 success = apply_change_group ();
6558 gcc_assert (success);
6559 }
6560
6561 /* Change the mode of any reference to the register REGNO (NEWREG) to
6562 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
6563 any instruction which modifies NEWREG. */
6564
6565 static void
6566 cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
6567 {
6568 rtx insn;
6569
6570 for (insn = start; insn != end; insn = NEXT_INSN (insn))
6571 {
6572 if (! INSN_P (insn))
6573 continue;
6574
6575 if (reg_set_p (newreg, insn))
6576 return;
6577
6578 cse_change_cc_mode_insn (insn, newreg);
6579 }
6580 }
6581
6582 /* BB is a basic block which finishes with CC_REG as a condition code
6583 register which is set to CC_SRC. Look through the successors of BB
6584 to find blocks which have a single predecessor (i.e., this one),
6585 and look through those blocks for an assignment to CC_REG which is
6586 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
6587 permitted to change the mode of CC_SRC to a compatible mode. This
6588 returns VOIDmode if no equivalent assignments were found.
6589 Otherwise it returns the mode which CC_SRC should wind up with.
6590
6591 The main complexity in this function is handling the mode issues.
6592 We may have more than one duplicate which we can eliminate, and we
6593 try to find a mode which will work for multiple duplicates. */
6594
6595 static enum machine_mode
6596 cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
6597 {
6598 bool found_equiv;
6599 enum machine_mode mode;
6600 unsigned int insn_count;
6601 edge e;
6602 rtx insns[2];
6603 enum machine_mode modes[2];
6604 rtx last_insns[2];
6605 unsigned int i;
6606 rtx newreg;
6607 edge_iterator ei;
6608
6609 /* We expect to have two successors. Look at both before picking
6610 the final mode for the comparison. If we have more successors
6611 (i.e., some sort of table jump, although that seems unlikely),
6612 then we require all beyond the first two to use the same
6613 mode. */
6614
6615 found_equiv = false;
6616 mode = GET_MODE (cc_src);
6617 insn_count = 0;
6618 FOR_EACH_EDGE (e, ei, bb->succs)
6619 {
6620 rtx insn;
6621 rtx end;
6622
6623 if (e->flags & EDGE_COMPLEX)
6624 continue;
6625
6626 if (EDGE_COUNT (e->dest->preds) != 1
6627 || e->dest == EXIT_BLOCK_PTR)
6628 continue;
6629
6630 end = NEXT_INSN (BB_END (e->dest));
6631 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
6632 {
6633 rtx set;
6634
6635 if (! INSN_P (insn))
6636 continue;
6637
6638 /* If CC_SRC is modified, we have to stop looking for
6639 something which uses it. */
6640 if (modified_in_p (cc_src, insn))
6641 break;
6642
6643 /* Check whether INSN sets CC_REG to CC_SRC. */
6644 set = single_set (insn);
6645 if (set
6646 && REG_P (SET_DEST (set))
6647 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
6648 {
6649 bool found;
6650 enum machine_mode set_mode;
6651 enum machine_mode comp_mode;
6652
6653 found = false;
6654 set_mode = GET_MODE (SET_SRC (set));
6655 comp_mode = set_mode;
6656 if (rtx_equal_p (cc_src, SET_SRC (set)))
6657 found = true;
6658 else if (GET_CODE (cc_src) == COMPARE
6659 && GET_CODE (SET_SRC (set)) == COMPARE
6660 && mode != set_mode
6661 && rtx_equal_p (XEXP (cc_src, 0),
6662 XEXP (SET_SRC (set), 0))
6663 && rtx_equal_p (XEXP (cc_src, 1),
6664 XEXP (SET_SRC (set), 1)))
6665
6666 {
6667 comp_mode = targetm.cc_modes_compatible (mode, set_mode);
6668 if (comp_mode != VOIDmode
6669 && (can_change_mode || comp_mode == mode))
6670 found = true;
6671 }
6672
6673 if (found)
6674 {
6675 found_equiv = true;
6676 if (insn_count < ARRAY_SIZE (insns))
6677 {
6678 insns[insn_count] = insn;
6679 modes[insn_count] = set_mode;
6680 last_insns[insn_count] = end;
6681 ++insn_count;
6682
6683 if (mode != comp_mode)
6684 {
6685 gcc_assert (can_change_mode);
6686 mode = comp_mode;
6687
6688 /* The modified insn will be re-recognized later. */
6689 PUT_MODE (cc_src, mode);
6690 }
6691 }
6692 else
6693 {
6694 if (set_mode != mode)
6695 {
6696 /* We found a matching expression in the
6697 wrong mode, but we don't have room to
6698 store it in the array. Punt. This case
6699 should be rare. */
6700 break;
6701 }
6702 /* INSN sets CC_REG to a value equal to CC_SRC
6703 with the right mode. We can simply delete
6704 it. */
6705 delete_insn (insn);
6706 }
6707
6708 /* We found an instruction to delete. Keep looking,
6709 in the hopes of finding a three-way jump. */
6710 continue;
6711 }
6712
6713 /* We found an instruction which sets the condition
6714 code, so don't look any farther. */
6715 break;
6716 }
6717
6718 /* If INSN sets CC_REG in some other way, don't look any
6719 farther. */
6720 if (reg_set_p (cc_reg, insn))
6721 break;
6722 }
6723
6724 /* If we fell off the bottom of the block, we can keep looking
6725 through successors. We pass CAN_CHANGE_MODE as false because
6726 we aren't prepared to handle compatibility between the
6727 further blocks and this block. */
6728 if (insn == end)
6729 {
6730 enum machine_mode submode;
6731
6732 submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
6733 if (submode != VOIDmode)
6734 {
6735 gcc_assert (submode == mode);
6736 found_equiv = true;
6737 can_change_mode = false;
6738 }
6739 }
6740 }
6741
6742 if (! found_equiv)
6743 return VOIDmode;
6744
6745 /* Now INSN_COUNT is the number of instructions we found which set
6746 CC_REG to a value equivalent to CC_SRC. The instructions are in
6747 INSNS. The modes used by those instructions are in MODES. */
6748
6749 newreg = NULL_RTX;
6750 for (i = 0; i < insn_count; ++i)
6751 {
6752 if (modes[i] != mode)
6753 {
6754 /* We need to change the mode of CC_REG in INSNS[i] and
6755 subsequent instructions. */
6756 if (! newreg)
6757 {
6758 if (GET_MODE (cc_reg) == mode)
6759 newreg = cc_reg;
6760 else
6761 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
6762 }
6763 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
6764 newreg);
6765 }
6766
6767 delete_insn_and_edges (insns[i]);
6768 }
6769
6770 return mode;
6771 }
6772
6773 /* If we have a fixed condition code register (or two), walk through
6774 the instructions and try to eliminate duplicate assignments. */
6775
6776 static void
6777 cse_condition_code_reg (void)
6778 {
6779 unsigned int cc_regno_1;
6780 unsigned int cc_regno_2;
6781 rtx cc_reg_1;
6782 rtx cc_reg_2;
6783 basic_block bb;
6784
6785 if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
6786 return;
6787
6788 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
6789 if (cc_regno_2 != INVALID_REGNUM)
6790 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
6791 else
6792 cc_reg_2 = NULL_RTX;
6793
6794 FOR_EACH_BB (bb)
6795 {
6796 rtx last_insn;
6797 rtx cc_reg;
6798 rtx insn;
6799 rtx cc_src_insn;
6800 rtx cc_src;
6801 enum machine_mode mode;
6802 enum machine_mode orig_mode;
6803
6804 /* Look for blocks which end with a conditional jump based on a
6805 condition code register. Then look for the instruction which
6806 sets the condition code register. Then look through the
6807 successor blocks for instructions which set the condition
6808 code register to the same value. There are other possible
6809 uses of the condition code register, but these are by far the
6810 most common and the ones which we are most likely to be able
6811 to optimize. */
6812
6813 last_insn = BB_END (bb);
6814 if (!JUMP_P (last_insn))
6815 continue;
6816
6817 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
6818 cc_reg = cc_reg_1;
6819 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
6820 cc_reg = cc_reg_2;
6821 else
6822 continue;
6823
6824 cc_src_insn = NULL_RTX;
6825 cc_src = NULL_RTX;
6826 for (insn = PREV_INSN (last_insn);
6827 insn && insn != PREV_INSN (BB_HEAD (bb));
6828 insn = PREV_INSN (insn))
6829 {
6830 rtx set;
6831
6832 if (! INSN_P (insn))
6833 continue;
6834 set = single_set (insn);
6835 if (set
6836 && REG_P (SET_DEST (set))
6837 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
6838 {
6839 cc_src_insn = insn;
6840 cc_src = SET_SRC (set);
6841 break;
6842 }
6843 else if (reg_set_p (cc_reg, insn))
6844 break;
6845 }
6846
6847 if (! cc_src_insn)
6848 continue;
6849
6850 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
6851 continue;
6852
6853 /* Now CC_REG is a condition code register used for a
6854 conditional jump at the end of the block, and CC_SRC, in
6855 CC_SRC_INSN, is the value to which that condition code
6856 register is set, and CC_SRC is still meaningful at the end of
6857 the basic block. */
6858
6859 orig_mode = GET_MODE (cc_src);
6860 mode = cse_cc_succs (bb, cc_reg, cc_src, true);
6861 if (mode != VOIDmode)
6862 {
6863 gcc_assert (mode == GET_MODE (cc_src));
6864 if (mode != orig_mode)
6865 {
6866 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
6867
6868 cse_change_cc_mode_insn (cc_src_insn, newreg);
6869
6870 /* Do the same in the following insns that use the
6871 current value of CC_REG within BB. */
6872 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
6873 NEXT_INSN (last_insn),
6874 newreg);
6875 }
6876 }
6877 }
6878 }
6879 \f
6880
6881 /* Perform common subexpression elimination. Nonzero value from
6882 `cse_main' means that jumps were simplified and some code may now
6883 be unreachable, so do jump optimization again. */
6884 static bool
6885 gate_handle_cse (void)
6886 {
6887 return optimize > 0;
6888 }
6889
6890 static unsigned int
6891 rest_of_handle_cse (void)
6892 {
6893 int tem;
6894
6895 if (dump_file)
6896 dump_flow_info (dump_file, dump_flags);
6897
6898 tem = cse_main (get_insns (), max_reg_num ());
6899
6900 /* If we are not running more CSE passes, then we are no longer
6901 expecting CSE to be run. But always rerun it in a cheap mode. */
6902 cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
6903
6904 if (tem == 2)
6905 {
6906 timevar_push (TV_JUMP);
6907 rebuild_jump_labels (get_insns ());
6908 cleanup_cfg (0);
6909 timevar_pop (TV_JUMP);
6910 }
6911 else if (tem == 1 || optimize > 1)
6912 cleanup_cfg (0);
6913
6914 return 0;
6915 }
6916
6917 struct rtl_opt_pass pass_cse =
6918 {
6919 {
6920 RTL_PASS,
6921 "cse1", /* name */
6922 gate_handle_cse, /* gate */
6923 rest_of_handle_cse, /* execute */
6924 NULL, /* sub */
6925 NULL, /* next */
6926 0, /* static_pass_number */
6927 TV_CSE, /* tv_id */
6928 0, /* properties_required */
6929 0, /* properties_provided */
6930 0, /* properties_destroyed */
6931 0, /* todo_flags_start */
6932 TODO_df_finish | TODO_verify_rtl_sharing |
6933 TODO_dump_func |
6934 TODO_ggc_collect |
6935 TODO_verify_flow, /* todo_flags_finish */
6936 }
6937 };
6938
6939
6940 static bool
6941 gate_handle_cse2 (void)
6942 {
6943 return optimize > 0 && flag_rerun_cse_after_loop;
6944 }
6945
6946 /* Run second CSE pass after loop optimizations. */
6947 static unsigned int
6948 rest_of_handle_cse2 (void)
6949 {
6950 int tem;
6951
6952 if (dump_file)
6953 dump_flow_info (dump_file, dump_flags);
6954
6955 tem = cse_main (get_insns (), max_reg_num ());
6956
6957 /* Run a pass to eliminate duplicated assignments to condition code
6958 registers. We have to run this after bypass_jumps, because it
6959 makes it harder for that pass to determine whether a jump can be
6960 bypassed safely. */
6961 cse_condition_code_reg ();
6962
6963 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6964
6965 if (tem == 2)
6966 {
6967 timevar_push (TV_JUMP);
6968 rebuild_jump_labels (get_insns ());
6969 cleanup_cfg (0);
6970 timevar_pop (TV_JUMP);
6971 }
6972 else if (tem == 1)
6973 cleanup_cfg (0);
6974
6975 cse_not_expected = 1;
6976 return 0;
6977 }
6978
6979
6980 struct rtl_opt_pass pass_cse2 =
6981 {
6982 {
6983 RTL_PASS,
6984 "cse2", /* name */
6985 gate_handle_cse2, /* gate */
6986 rest_of_handle_cse2, /* execute */
6987 NULL, /* sub */
6988 NULL, /* next */
6989 0, /* static_pass_number */
6990 TV_CSE2, /* tv_id */
6991 0, /* properties_required */
6992 0, /* properties_provided */
6993 0, /* properties_destroyed */
6994 0, /* todo_flags_start */
6995 TODO_df_finish | TODO_verify_rtl_sharing |
6996 TODO_dump_func |
6997 TODO_ggc_collect |
6998 TODO_verify_flow /* todo_flags_finish */
6999 }
7000 };
7001