re PR rtl-optimization/16356 (Failure to use count register (branch on count register))
[gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "regs.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "function.h"
37 #include "expr.h"
38 #include "toplev.h"
39 #include "output.h"
40 #include "ggc.h"
41 #include "timevar.h"
42 #include "except.h"
43 #include "target.h"
44 #include "params.h"
45 #include "rtlhooks-def.h"
46
47 /* The basic idea of common subexpression elimination is to go
48 through the code, keeping a record of expressions that would
49 have the same value at the current scan point, and replacing
50 expressions encountered with the cheapest equivalent expression.
51
52 It is too complicated to keep track of the different possibilities
53 when control paths merge in this code; so, at each label, we forget all
54 that is known and start fresh. This can be described as processing each
55 extended basic block separately. We have a separate pass to perform
56 global CSE.
57
58 Note CSE can turn a conditional or computed jump into a nop or
59 an unconditional jump. When this occurs we arrange to run the jump
60 optimizer after CSE to delete the unreachable code.
61
62 We use two data structures to record the equivalent expressions:
63 a hash table for most expressions, and a vector of "quantity
64 numbers" to record equivalent (pseudo) registers.
65
66 The use of the special data structure for registers is desirable
67 because it is faster. It is possible because registers references
68 contain a fairly small number, the register number, taken from
69 a contiguously allocated series, and two register references are
70 identical if they have the same number. General expressions
71 do not have any such thing, so the only way to retrieve the
72 information recorded on an expression other than a register
73 is to keep it in a hash table.
74
75 Registers and "quantity numbers":
76
77 At the start of each basic block, all of the (hardware and pseudo)
78 registers used in the function are given distinct quantity
79 numbers to indicate their contents. During scan, when the code
80 copies one register into another, we copy the quantity number.
81 When a register is loaded in any other way, we allocate a new
82 quantity number to describe the value generated by this operation.
83 `reg_qty' records what quantity a register is currently thought
84 of as containing.
85
86 All real quantity numbers are greater than or equal to zero.
87 If register N has not been assigned a quantity, reg_qty[N] will
88 equal -N - 1, which is always negative.
89
90 Quantity numbers below zero do not exist and none of the `qty_table'
91 entries should be referenced with a negative index.
92
93 We also maintain a bidirectional chain of registers for each
94 quantity number. The `qty_table` members `first_reg' and `last_reg',
95 and `reg_eqv_table' members `next' and `prev' hold these chains.
96
97 The first register in a chain is the one whose lifespan is least local.
98 Among equals, it is the one that was seen first.
99 We replace any equivalent register with that one.
100
101 If two registers have the same quantity number, it must be true that
102 REG expressions with qty_table `mode' must be in the hash table for both
103 registers and must be in the same class.
104
105 The converse is not true. Since hard registers may be referenced in
106 any mode, two REG expressions might be equivalent in the hash table
107 but not have the same quantity number if the quantity number of one
108 of the registers is not the same mode as those expressions.
109
110 Constants and quantity numbers
111
112 When a quantity has a known constant value, that value is stored
113 in the appropriate qty_table `const_rtx'. This is in addition to
114 putting the constant in the hash table as is usual for non-regs.
115
116 Whether a reg or a constant is preferred is determined by the configuration
117 macro CONST_COSTS and will often depend on the constant value. In any
118 event, expressions containing constants can be simplified, by fold_rtx.
119
120 When a quantity has a known nearly constant value (such as an address
121 of a stack slot), that value is stored in the appropriate qty_table
122 `const_rtx'.
123
124 Integer constants don't have a machine mode. However, cse
125 determines the intended machine mode from the destination
126 of the instruction that moves the constant. The machine mode
127 is recorded in the hash table along with the actual RTL
128 constant expression so that different modes are kept separate.
129
130 Other expressions:
131
132 To record known equivalences among expressions in general
133 we use a hash table called `table'. It has a fixed number of buckets
134 that contain chains of `struct table_elt' elements for expressions.
135 These chains connect the elements whose expressions have the same
136 hash codes.
137
138 Other chains through the same elements connect the elements which
139 currently have equivalent values.
140
141 Register references in an expression are canonicalized before hashing
142 the expression. This is done using `reg_qty' and qty_table `first_reg'.
143 The hash code of a register reference is computed using the quantity
144 number, not the register number.
145
146 When the value of an expression changes, it is necessary to remove from the
147 hash table not just that expression but all expressions whose values
148 could be different as a result.
149
150 1. If the value changing is in memory, except in special cases
151 ANYTHING referring to memory could be changed. That is because
152 nobody knows where a pointer does not point.
153 The function `invalidate_memory' removes what is necessary.
154
155 The special cases are when the address is constant or is
156 a constant plus a fixed register such as the frame pointer
157 or a static chain pointer. When such addresses are stored in,
158 we can tell exactly which other such addresses must be invalidated
159 due to overlap. `invalidate' does this.
160 All expressions that refer to non-constant
161 memory addresses are also invalidated. `invalidate_memory' does this.
162
163 2. If the value changing is a register, all expressions
164 containing references to that register, and only those,
165 must be removed.
166
167 Because searching the entire hash table for expressions that contain
168 a register is very slow, we try to figure out when it isn't necessary.
169 Precisely, this is necessary only when expressions have been
170 entered in the hash table using this register, and then the value has
171 changed, and then another expression wants to be added to refer to
172 the register's new value. This sequence of circumstances is rare
173 within any one basic block.
174
175 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
176 reg_tick[i] is incremented whenever a value is stored in register i.
177 reg_in_table[i] holds -1 if no references to register i have been
178 entered in the table; otherwise, it contains the value reg_tick[i] had
179 when the references were entered. If we want to enter a reference
180 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
181 Until we want to enter a new entry, the mere fact that the two vectors
182 don't match makes the entries be ignored if anyone tries to match them.
183
184 Registers themselves are entered in the hash table as well as in
185 the equivalent-register chains. However, the vectors `reg_tick'
186 and `reg_in_table' do not apply to expressions which are simple
187 register references. These expressions are removed from the table
188 immediately when they become invalid, and this can be done even if
189 we do not immediately search for all the expressions that refer to
190 the register.
191
192 A CLOBBER rtx in an instruction invalidates its operand for further
193 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
194 invalidates everything that resides in memory.
195
196 Related expressions:
197
198 Constant expressions that differ only by an additive integer
199 are called related. When a constant expression is put in
200 the table, the related expression with no constant term
201 is also entered. These are made to point at each other
202 so that it is possible to find out if there exists any
203 register equivalent to an expression related to a given expression. */
204
205 /* One plus largest register number used in this function. */
206
207 static int max_reg;
208
209 /* One plus largest instruction UID used in this function at time of
210 cse_main call. */
211
212 static int max_insn_uid;
213
214 /* Length of qty_table vector. We know in advance we will not need
215 a quantity number this big. */
216
217 static int max_qty;
218
219 /* Next quantity number to be allocated.
220 This is 1 + the largest number needed so far. */
221
222 static int next_qty;
223
224 /* Per-qty information tracking.
225
226 `first_reg' and `last_reg' track the head and tail of the
227 chain of registers which currently contain this quantity.
228
229 `mode' contains the machine mode of this quantity.
230
231 `const_rtx' holds the rtx of the constant value of this
232 quantity, if known. A summations of the frame/arg pointer
233 and a constant can also be entered here. When this holds
234 a known value, `const_insn' is the insn which stored the
235 constant value.
236
237 `comparison_{code,const,qty}' are used to track when a
238 comparison between a quantity and some constant or register has
239 been passed. In such a case, we know the results of the comparison
240 in case we see it again. These members record a comparison that
241 is known to be true. `comparison_code' holds the rtx code of such
242 a comparison, else it is set to UNKNOWN and the other two
243 comparison members are undefined. `comparison_const' holds
244 the constant being compared against, or zero if the comparison
245 is not against a constant. `comparison_qty' holds the quantity
246 being compared against when the result is known. If the comparison
247 is not with a register, `comparison_qty' is -1. */
248
249 struct qty_table_elem
250 {
251 rtx const_rtx;
252 rtx const_insn;
253 rtx comparison_const;
254 int comparison_qty;
255 unsigned int first_reg, last_reg;
256 /* The sizes of these fields should match the sizes of the
257 code and mode fields of struct rtx_def (see rtl.h). */
258 ENUM_BITFIELD(rtx_code) comparison_code : 16;
259 ENUM_BITFIELD(machine_mode) mode : 8;
260 };
261
262 /* The table of all qtys, indexed by qty number. */
263 static struct qty_table_elem *qty_table;
264
265 /* Structure used to pass arguments via for_each_rtx to function
266 cse_change_cc_mode. */
267 struct change_cc_mode_args
268 {
269 rtx insn;
270 rtx newreg;
271 };
272
273 #ifdef HAVE_cc0
274 /* For machines that have a CC0, we do not record its value in the hash
275 table since its use is guaranteed to be the insn immediately following
276 its definition and any other insn is presumed to invalidate it.
277
278 Instead, we store below the value last assigned to CC0. If it should
279 happen to be a constant, it is stored in preference to the actual
280 assigned value. In case it is a constant, we store the mode in which
281 the constant should be interpreted. */
282
283 static rtx prev_insn_cc0;
284 static enum machine_mode prev_insn_cc0_mode;
285
286 /* Previous actual insn. 0 if at first insn of basic block. */
287
288 static rtx prev_insn;
289 #endif
290
291 /* Insn being scanned. */
292
293 static rtx this_insn;
294
295 /* Index by register number, gives the number of the next (or
296 previous) register in the chain of registers sharing the same
297 value.
298
299 Or -1 if this register is at the end of the chain.
300
301 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
302
303 /* Per-register equivalence chain. */
304 struct reg_eqv_elem
305 {
306 int next, prev;
307 };
308
309 /* The table of all register equivalence chains. */
310 static struct reg_eqv_elem *reg_eqv_table;
311
312 struct cse_reg_info
313 {
314 /* Next in hash chain. */
315 struct cse_reg_info *hash_next;
316
317 /* The next cse_reg_info structure in the free or used list. */
318 struct cse_reg_info *next;
319
320 /* Search key */
321 unsigned int regno;
322
323 /* The quantity number of the register's current contents. */
324 int reg_qty;
325
326 /* The number of times the register has been altered in the current
327 basic block. */
328 int reg_tick;
329
330 /* The REG_TICK value at which rtx's containing this register are
331 valid in the hash table. If this does not equal the current
332 reg_tick value, such expressions existing in the hash table are
333 invalid. */
334 int reg_in_table;
335
336 /* The SUBREG that was set when REG_TICK was last incremented. Set
337 to -1 if the last store was to the whole register, not a subreg. */
338 unsigned int subreg_ticked;
339 };
340
341 /* A free list of cse_reg_info entries. */
342 static struct cse_reg_info *cse_reg_info_free_list;
343
344 /* A used list of cse_reg_info entries. */
345 static struct cse_reg_info *cse_reg_info_used_list;
346 static struct cse_reg_info *cse_reg_info_used_list_end;
347
348 /* A mapping from registers to cse_reg_info data structures. */
349 #define REGHASH_SHIFT 7
350 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
351 #define REGHASH_MASK (REGHASH_SIZE - 1)
352 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
353
354 #define REGHASH_FN(REGNO) \
355 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
356
357 /* The last lookup we did into the cse_reg_info_tree. This allows us
358 to cache repeated lookups. */
359 static unsigned int cached_regno;
360 static struct cse_reg_info *cached_cse_reg_info;
361
362 /* A HARD_REG_SET containing all the hard registers for which there is
363 currently a REG expression in the hash table. Note the difference
364 from the above variables, which indicate if the REG is mentioned in some
365 expression in the table. */
366
367 static HARD_REG_SET hard_regs_in_table;
368
369 /* CUID of insn that starts the basic block currently being cse-processed. */
370
371 static int cse_basic_block_start;
372
373 /* CUID of insn that ends the basic block currently being cse-processed. */
374
375 static int cse_basic_block_end;
376
377 /* Vector mapping INSN_UIDs to cuids.
378 The cuids are like uids but increase monotonically always.
379 We use them to see whether a reg is used outside a given basic block. */
380
381 static int *uid_cuid;
382
383 /* Highest UID in UID_CUID. */
384 static int max_uid;
385
386 /* Get the cuid of an insn. */
387
388 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
389
390 /* Nonzero if this pass has made changes, and therefore it's
391 worthwhile to run the garbage collector. */
392
393 static int cse_altered;
394
395 /* Nonzero if cse has altered conditional jump insns
396 in such a way that jump optimization should be redone. */
397
398 static int cse_jumps_altered;
399
400 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
401 REG_LABEL, we have to rerun jump after CSE to put in the note. */
402 static int recorded_label_ref;
403
404 /* canon_hash stores 1 in do_not_record
405 if it notices a reference to CC0, PC, or some other volatile
406 subexpression. */
407
408 static int do_not_record;
409
410 /* canon_hash stores 1 in hash_arg_in_memory
411 if it notices a reference to memory within the expression being hashed. */
412
413 static int hash_arg_in_memory;
414
415 /* The hash table contains buckets which are chains of `struct table_elt's,
416 each recording one expression's information.
417 That expression is in the `exp' field.
418
419 The canon_exp field contains a canonical (from the point of view of
420 alias analysis) version of the `exp' field.
421
422 Those elements with the same hash code are chained in both directions
423 through the `next_same_hash' and `prev_same_hash' fields.
424
425 Each set of expressions with equivalent values
426 are on a two-way chain through the `next_same_value'
427 and `prev_same_value' fields, and all point with
428 the `first_same_value' field at the first element in
429 that chain. The chain is in order of increasing cost.
430 Each element's cost value is in its `cost' field.
431
432 The `in_memory' field is nonzero for elements that
433 involve any reference to memory. These elements are removed
434 whenever a write is done to an unidentified location in memory.
435 To be safe, we assume that a memory address is unidentified unless
436 the address is either a symbol constant or a constant plus
437 the frame pointer or argument pointer.
438
439 The `related_value' field is used to connect related expressions
440 (that differ by adding an integer).
441 The related expressions are chained in a circular fashion.
442 `related_value' is zero for expressions for which this
443 chain is not useful.
444
445 The `cost' field stores the cost of this element's expression.
446 The `regcost' field stores the value returned by approx_reg_cost for
447 this element's expression.
448
449 The `is_const' flag is set if the element is a constant (including
450 a fixed address).
451
452 The `flag' field is used as a temporary during some search routines.
453
454 The `mode' field is usually the same as GET_MODE (`exp'), but
455 if `exp' is a CONST_INT and has no machine mode then the `mode'
456 field is the mode it was being used as. Each constant is
457 recorded separately for each mode it is used with. */
458
459 struct table_elt
460 {
461 rtx exp;
462 rtx canon_exp;
463 struct table_elt *next_same_hash;
464 struct table_elt *prev_same_hash;
465 struct table_elt *next_same_value;
466 struct table_elt *prev_same_value;
467 struct table_elt *first_same_value;
468 struct table_elt *related_value;
469 int cost;
470 int regcost;
471 /* The size of this field should match the size
472 of the mode field of struct rtx_def (see rtl.h). */
473 ENUM_BITFIELD(machine_mode) mode : 8;
474 char in_memory;
475 char is_const;
476 char flag;
477 };
478
479 /* We don't want a lot of buckets, because we rarely have very many
480 things stored in the hash table, and a lot of buckets slows
481 down a lot of loops that happen frequently. */
482 #define HASH_SHIFT 5
483 #define HASH_SIZE (1 << HASH_SHIFT)
484 #define HASH_MASK (HASH_SIZE - 1)
485
486 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
487 register (hard registers may require `do_not_record' to be set). */
488
489 #define HASH(X, M) \
490 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
491 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
492 : canon_hash (X, M)) & HASH_MASK)
493
494 /* Like HASH, but without side-effects. */
495 #define SAFE_HASH(X, M) \
496 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
497 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
498 : safe_hash (X, M)) & HASH_MASK)
499
500 /* Determine whether register number N is considered a fixed register for the
501 purpose of approximating register costs.
502 It is desirable to replace other regs with fixed regs, to reduce need for
503 non-fixed hard regs.
504 A reg wins if it is either the frame pointer or designated as fixed. */
505 #define FIXED_REGNO_P(N) \
506 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
507 || fixed_regs[N] || global_regs[N])
508
509 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
510 hard registers and pointers into the frame are the cheapest with a cost
511 of 0. Next come pseudos with a cost of one and other hard registers with
512 a cost of 2. Aside from these special cases, call `rtx_cost'. */
513
514 #define CHEAP_REGNO(N) \
515 (REGNO_PTR_FRAME_P(N) \
516 || (HARD_REGISTER_NUM_P (N) \
517 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
518
519 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
520 #define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
521
522 /* Get the info associated with register N. */
523
524 #define GET_CSE_REG_INFO(N) \
525 (((N) == cached_regno && cached_cse_reg_info) \
526 ? cached_cse_reg_info : get_cse_reg_info ((N)))
527
528 /* Get the number of times this register has been updated in this
529 basic block. */
530
531 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
532
533 /* Get the point at which REG was recorded in the table. */
534
535 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
536
537 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
538 SUBREG). */
539
540 #define SUBREG_TICKED(N) ((GET_CSE_REG_INFO (N))->subreg_ticked)
541
542 /* Get the quantity number for REG. */
543
544 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
545
546 /* Determine if the quantity number for register X represents a valid index
547 into the qty_table. */
548
549 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
550
551 static struct table_elt *table[HASH_SIZE];
552
553 /* Chain of `struct table_elt's made so far for this function
554 but currently removed from the table. */
555
556 static struct table_elt *free_element_chain;
557
558 /* Number of `struct table_elt' structures made so far for this function. */
559
560 static int n_elements_made;
561
562 /* Maximum value `n_elements_made' has had so far in this compilation
563 for functions previously processed. */
564
565 static int max_elements_made;
566
567 /* Set to the cost of a constant pool reference if one was found for a
568 symbolic constant. If this was found, it means we should try to
569 convert constants into constant pool entries if they don't fit in
570 the insn. */
571
572 static int constant_pool_entries_cost;
573 static int constant_pool_entries_regcost;
574
575 /* This data describes a block that will be processed by cse_basic_block. */
576
577 struct cse_basic_block_data
578 {
579 /* Lowest CUID value of insns in block. */
580 int low_cuid;
581 /* Highest CUID value of insns in block. */
582 int high_cuid;
583 /* Total number of SETs in block. */
584 int nsets;
585 /* Last insn in the block. */
586 rtx last;
587 /* Size of current branch path, if any. */
588 int path_size;
589 /* Current branch path, indicating which branches will be taken. */
590 struct branch_path
591 {
592 /* The branch insn. */
593 rtx branch;
594 /* Whether it should be taken or not. AROUND is the same as taken
595 except that it is used when the destination label is not preceded
596 by a BARRIER. */
597 enum taken {PATH_TAKEN, PATH_NOT_TAKEN, PATH_AROUND} status;
598 } *path;
599 };
600
601 static bool fixed_base_plus_p (rtx x);
602 static int notreg_cost (rtx, enum rtx_code);
603 static int approx_reg_cost_1 (rtx *, void *);
604 static int approx_reg_cost (rtx);
605 static int preferable (int, int, int, int);
606 static void new_basic_block (void);
607 static void make_new_qty (unsigned int, enum machine_mode);
608 static void make_regs_eqv (unsigned int, unsigned int);
609 static void delete_reg_equiv (unsigned int);
610 static int mention_regs (rtx);
611 static int insert_regs (rtx, struct table_elt *, int);
612 static void remove_from_table (struct table_elt *, unsigned);
613 static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
614 static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
615 static rtx lookup_as_function (rtx, enum rtx_code);
616 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
617 enum machine_mode);
618 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
619 static void invalidate (rtx, enum machine_mode);
620 static int cse_rtx_varies_p (rtx, int);
621 static void remove_invalid_refs (unsigned int);
622 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
623 enum machine_mode);
624 static void rehash_using_reg (rtx);
625 static void invalidate_memory (void);
626 static void invalidate_for_call (void);
627 static rtx use_related_value (rtx, struct table_elt *);
628
629 static inline unsigned canon_hash (rtx, enum machine_mode);
630 static inline unsigned safe_hash (rtx, enum machine_mode);
631 static unsigned hash_rtx_string (const char *);
632
633 static rtx canon_reg (rtx, rtx);
634 static void find_best_addr (rtx, rtx *, enum machine_mode);
635 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
636 enum machine_mode *,
637 enum machine_mode *);
638 static rtx fold_rtx (rtx, rtx);
639 static rtx equiv_constant (rtx);
640 static void record_jump_equiv (rtx, int);
641 static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
642 int);
643 static void cse_insn (rtx, rtx);
644 static void cse_end_of_basic_block (rtx, struct cse_basic_block_data *,
645 int, int);
646 static int addr_affects_sp_p (rtx);
647 static void invalidate_from_clobbers (rtx);
648 static rtx cse_process_notes (rtx, rtx);
649 static void invalidate_skipped_set (rtx, rtx, void *);
650 static void invalidate_skipped_block (rtx);
651 static rtx cse_basic_block (rtx, rtx, struct branch_path *);
652 static void count_reg_usage (rtx, int *, int);
653 static int check_for_label_ref (rtx *, void *);
654 extern void dump_class (struct table_elt*);
655 static struct cse_reg_info * get_cse_reg_info (unsigned int);
656 static int check_dependence (rtx *, void *);
657
658 static void flush_hash_table (void);
659 static bool insn_live_p (rtx, int *);
660 static bool set_live_p (rtx, rtx, int *);
661 static bool dead_libcall_p (rtx, int *);
662 static int cse_change_cc_mode (rtx *, void *);
663 static void cse_change_cc_mode_insn (rtx, rtx);
664 static void cse_change_cc_mode_insns (rtx, rtx, rtx);
665 static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
666 \f
667
668 #undef RTL_HOOKS_GEN_LOWPART
669 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
670
671 static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
672 \f
673 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
674 virtual regs here because the simplify_*_operation routines are called
675 by integrate.c, which is called before virtual register instantiation. */
676
677 static bool
678 fixed_base_plus_p (rtx x)
679 {
680 switch (GET_CODE (x))
681 {
682 case REG:
683 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
684 return true;
685 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
686 return true;
687 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
688 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
689 return true;
690 return false;
691
692 case PLUS:
693 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
694 return false;
695 return fixed_base_plus_p (XEXP (x, 0));
696
697 default:
698 return false;
699 }
700 }
701
702 /* Dump the expressions in the equivalence class indicated by CLASSP.
703 This function is used only for debugging. */
704 void
705 dump_class (struct table_elt *classp)
706 {
707 struct table_elt *elt;
708
709 fprintf (stderr, "Equivalence chain for ");
710 print_rtl (stderr, classp->exp);
711 fprintf (stderr, ": \n");
712
713 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
714 {
715 print_rtl (stderr, elt->exp);
716 fprintf (stderr, "\n");
717 }
718 }
719
720 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
721
722 static int
723 approx_reg_cost_1 (rtx *xp, void *data)
724 {
725 rtx x = *xp;
726 int *cost_p = data;
727
728 if (x && REG_P (x))
729 {
730 unsigned int regno = REGNO (x);
731
732 if (! CHEAP_REGNO (regno))
733 {
734 if (regno < FIRST_PSEUDO_REGISTER)
735 {
736 if (SMALL_REGISTER_CLASSES)
737 return 1;
738 *cost_p += 2;
739 }
740 else
741 *cost_p += 1;
742 }
743 }
744
745 return 0;
746 }
747
748 /* Return an estimate of the cost of the registers used in an rtx.
749 This is mostly the number of different REG expressions in the rtx;
750 however for some exceptions like fixed registers we use a cost of
751 0. If any other hard register reference occurs, return MAX_COST. */
752
753 static int
754 approx_reg_cost (rtx x)
755 {
756 int cost = 0;
757
758 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
759 return MAX_COST;
760
761 return cost;
762 }
763
764 /* Returns a canonical version of X for the address, from the point of view,
765 that all multiplications are repesented as MULT instead of the multiply
766 by a power of 2 being repesented as ASHIFT. */
767
768 static rtx
769 canon_for_address (rtx x)
770 {
771 enum rtx_code code;
772 enum machine_mode mode;
773 rtx new = 0;
774 int i;
775 const char *fmt;
776
777 if (!x)
778 return x;
779
780 code = GET_CODE (x);
781 mode = GET_MODE (x);
782
783 switch (code)
784 {
785 case ASHIFT:
786 if (GET_CODE (XEXP (x, 1)) == CONST_INT
787 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)
788 && INTVAL (XEXP (x, 1)) >= 0)
789 {
790 new = canon_for_address (XEXP (x, 0));
791 new = gen_rtx_MULT (mode, new,
792 gen_int_mode ((HOST_WIDE_INT) 1
793 << INTVAL (XEXP (x, 1)),
794 mode));
795 }
796 break;
797 default:
798 break;
799
800 }
801 if (new)
802 return new;
803
804 /* Now recursively process each operand of this operation. */
805 fmt = GET_RTX_FORMAT (code);
806 for (i = 0; i < GET_RTX_LENGTH (code); i++)
807 if (fmt[i] == 'e')
808 {
809 new = canon_for_address (XEXP (x, i));
810 XEXP (x, i) = new;
811 }
812 return x;
813 }
814
815 /* Return a negative value if an rtx A, whose costs are given by COST_A
816 and REGCOST_A, is more desirable than an rtx B.
817 Return a positive value if A is less desirable, or 0 if the two are
818 equally good. */
819 static int
820 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
821 {
822 /* First, get rid of cases involving expressions that are entirely
823 unwanted. */
824 if (cost_a != cost_b)
825 {
826 if (cost_a == MAX_COST)
827 return 1;
828 if (cost_b == MAX_COST)
829 return -1;
830 }
831
832 /* Avoid extending lifetimes of hardregs. */
833 if (regcost_a != regcost_b)
834 {
835 if (regcost_a == MAX_COST)
836 return 1;
837 if (regcost_b == MAX_COST)
838 return -1;
839 }
840
841 /* Normal operation costs take precedence. */
842 if (cost_a != cost_b)
843 return cost_a - cost_b;
844 /* Only if these are identical consider effects on register pressure. */
845 if (regcost_a != regcost_b)
846 return regcost_a - regcost_b;
847 return 0;
848 }
849
850 /* Internal function, to compute cost when X is not a register; called
851 from COST macro to keep it simple. */
852
853 static int
854 notreg_cost (rtx x, enum rtx_code outer)
855 {
856 return ((GET_CODE (x) == SUBREG
857 && REG_P (SUBREG_REG (x))
858 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
859 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
860 && (GET_MODE_SIZE (GET_MODE (x))
861 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
862 && subreg_lowpart_p (x)
863 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
864 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
865 ? 0
866 : rtx_cost (x, outer) * 2);
867 }
868
869 \f
870 static struct cse_reg_info *
871 get_cse_reg_info (unsigned int regno)
872 {
873 struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
874 struct cse_reg_info *p;
875
876 for (p = *hash_head; p != NULL; p = p->hash_next)
877 if (p->regno == regno)
878 break;
879
880 if (p == NULL)
881 {
882 /* Get a new cse_reg_info structure. */
883 if (cse_reg_info_free_list)
884 {
885 p = cse_reg_info_free_list;
886 cse_reg_info_free_list = p->next;
887 }
888 else
889 p = xmalloc (sizeof (struct cse_reg_info));
890
891 /* Insert into hash table. */
892 p->hash_next = *hash_head;
893 *hash_head = p;
894
895 /* Initialize it. */
896 p->reg_tick = 1;
897 p->reg_in_table = -1;
898 p->subreg_ticked = -1;
899 p->reg_qty = -regno - 1;
900 p->regno = regno;
901 p->next = cse_reg_info_used_list;
902 cse_reg_info_used_list = p;
903 if (!cse_reg_info_used_list_end)
904 cse_reg_info_used_list_end = p;
905 }
906
907 /* Cache this lookup; we tend to be looking up information about the
908 same register several times in a row. */
909 cached_regno = regno;
910 cached_cse_reg_info = p;
911
912 return p;
913 }
914
915 /* Clear the hash table and initialize each register with its own quantity,
916 for a new basic block. */
917
918 static void
919 new_basic_block (void)
920 {
921 int i;
922
923 next_qty = 0;
924
925 /* Clear out hash table state for this pass. */
926
927 memset (reg_hash, 0, sizeof reg_hash);
928
929 if (cse_reg_info_used_list)
930 {
931 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
932 cse_reg_info_free_list = cse_reg_info_used_list;
933 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
934 }
935 cached_cse_reg_info = 0;
936
937 CLEAR_HARD_REG_SET (hard_regs_in_table);
938
939 /* The per-quantity values used to be initialized here, but it is
940 much faster to initialize each as it is made in `make_new_qty'. */
941
942 for (i = 0; i < HASH_SIZE; i++)
943 {
944 struct table_elt *first;
945
946 first = table[i];
947 if (first != NULL)
948 {
949 struct table_elt *last = first;
950
951 table[i] = NULL;
952
953 while (last->next_same_hash != NULL)
954 last = last->next_same_hash;
955
956 /* Now relink this hash entire chain into
957 the free element list. */
958
959 last->next_same_hash = free_element_chain;
960 free_element_chain = first;
961 }
962 }
963
964 #ifdef HAVE_cc0
965 prev_insn = 0;
966 prev_insn_cc0 = 0;
967 #endif
968 }
969
970 /* Say that register REG contains a quantity in mode MODE not in any
971 register before and initialize that quantity. */
972
973 static void
974 make_new_qty (unsigned int reg, enum machine_mode mode)
975 {
976 int q;
977 struct qty_table_elem *ent;
978 struct reg_eqv_elem *eqv;
979
980 gcc_assert (next_qty < max_qty);
981
982 q = REG_QTY (reg) = next_qty++;
983 ent = &qty_table[q];
984 ent->first_reg = reg;
985 ent->last_reg = reg;
986 ent->mode = mode;
987 ent->const_rtx = ent->const_insn = NULL_RTX;
988 ent->comparison_code = UNKNOWN;
989
990 eqv = &reg_eqv_table[reg];
991 eqv->next = eqv->prev = -1;
992 }
993
994 /* Make reg NEW equivalent to reg OLD.
995 OLD is not changing; NEW is. */
996
997 static void
998 make_regs_eqv (unsigned int new, unsigned int old)
999 {
1000 unsigned int lastr, firstr;
1001 int q = REG_QTY (old);
1002 struct qty_table_elem *ent;
1003
1004 ent = &qty_table[q];
1005
1006 /* Nothing should become eqv until it has a "non-invalid" qty number. */
1007 gcc_assert (REGNO_QTY_VALID_P (old));
1008
1009 REG_QTY (new) = q;
1010 firstr = ent->first_reg;
1011 lastr = ent->last_reg;
1012
1013 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1014 hard regs. Among pseudos, if NEW will live longer than any other reg
1015 of the same qty, and that is beyond the current basic block,
1016 make it the new canonical replacement for this qty. */
1017 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1018 /* Certain fixed registers might be of the class NO_REGS. This means
1019 that not only can they not be allocated by the compiler, but
1020 they cannot be used in substitutions or canonicalizations
1021 either. */
1022 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1023 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1024 || (new >= FIRST_PSEUDO_REGISTER
1025 && (firstr < FIRST_PSEUDO_REGISTER
1026 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1027 || (uid_cuid[REGNO_FIRST_UID (new)]
1028 < cse_basic_block_start))
1029 && (uid_cuid[REGNO_LAST_UID (new)]
1030 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1031 {
1032 reg_eqv_table[firstr].prev = new;
1033 reg_eqv_table[new].next = firstr;
1034 reg_eqv_table[new].prev = -1;
1035 ent->first_reg = new;
1036 }
1037 else
1038 {
1039 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1040 Otherwise, insert before any non-fixed hard regs that are at the
1041 end. Registers of class NO_REGS cannot be used as an
1042 equivalent for anything. */
1043 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1044 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1045 && new >= FIRST_PSEUDO_REGISTER)
1046 lastr = reg_eqv_table[lastr].prev;
1047 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1048 if (reg_eqv_table[lastr].next >= 0)
1049 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1050 else
1051 qty_table[q].last_reg = new;
1052 reg_eqv_table[lastr].next = new;
1053 reg_eqv_table[new].prev = lastr;
1054 }
1055 }
1056
1057 /* Remove REG from its equivalence class. */
1058
1059 static void
1060 delete_reg_equiv (unsigned int reg)
1061 {
1062 struct qty_table_elem *ent;
1063 int q = REG_QTY (reg);
1064 int p, n;
1065
1066 /* If invalid, do nothing. */
1067 if (! REGNO_QTY_VALID_P (reg))
1068 return;
1069
1070 ent = &qty_table[q];
1071
1072 p = reg_eqv_table[reg].prev;
1073 n = reg_eqv_table[reg].next;
1074
1075 if (n != -1)
1076 reg_eqv_table[n].prev = p;
1077 else
1078 ent->last_reg = p;
1079 if (p != -1)
1080 reg_eqv_table[p].next = n;
1081 else
1082 ent->first_reg = n;
1083
1084 REG_QTY (reg) = -reg - 1;
1085 }
1086
1087 /* Remove any invalid expressions from the hash table
1088 that refer to any of the registers contained in expression X.
1089
1090 Make sure that newly inserted references to those registers
1091 as subexpressions will be considered valid.
1092
1093 mention_regs is not called when a register itself
1094 is being stored in the table.
1095
1096 Return 1 if we have done something that may have changed the hash code
1097 of X. */
1098
1099 static int
1100 mention_regs (rtx x)
1101 {
1102 enum rtx_code code;
1103 int i, j;
1104 const char *fmt;
1105 int changed = 0;
1106
1107 if (x == 0)
1108 return 0;
1109
1110 code = GET_CODE (x);
1111 if (code == REG)
1112 {
1113 unsigned int regno = REGNO (x);
1114 unsigned int endregno
1115 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1116 : hard_regno_nregs[regno][GET_MODE (x)]);
1117 unsigned int i;
1118
1119 for (i = regno; i < endregno; i++)
1120 {
1121 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1122 remove_invalid_refs (i);
1123
1124 REG_IN_TABLE (i) = REG_TICK (i);
1125 SUBREG_TICKED (i) = -1;
1126 }
1127
1128 return 0;
1129 }
1130
1131 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1132 pseudo if they don't use overlapping words. We handle only pseudos
1133 here for simplicity. */
1134 if (code == SUBREG && REG_P (SUBREG_REG (x))
1135 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1136 {
1137 unsigned int i = REGNO (SUBREG_REG (x));
1138
1139 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1140 {
1141 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1142 the last store to this register really stored into this
1143 subreg, then remove the memory of this subreg.
1144 Otherwise, remove any memory of the entire register and
1145 all its subregs from the table. */
1146 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1147 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1148 remove_invalid_refs (i);
1149 else
1150 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1151 }
1152
1153 REG_IN_TABLE (i) = REG_TICK (i);
1154 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1155 return 0;
1156 }
1157
1158 /* If X is a comparison or a COMPARE and either operand is a register
1159 that does not have a quantity, give it one. This is so that a later
1160 call to record_jump_equiv won't cause X to be assigned a different
1161 hash code and not found in the table after that call.
1162
1163 It is not necessary to do this here, since rehash_using_reg can
1164 fix up the table later, but doing this here eliminates the need to
1165 call that expensive function in the most common case where the only
1166 use of the register is in the comparison. */
1167
1168 if (code == COMPARE || COMPARISON_P (x))
1169 {
1170 if (REG_P (XEXP (x, 0))
1171 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1172 if (insert_regs (XEXP (x, 0), NULL, 0))
1173 {
1174 rehash_using_reg (XEXP (x, 0));
1175 changed = 1;
1176 }
1177
1178 if (REG_P (XEXP (x, 1))
1179 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1180 if (insert_regs (XEXP (x, 1), NULL, 0))
1181 {
1182 rehash_using_reg (XEXP (x, 1));
1183 changed = 1;
1184 }
1185 }
1186
1187 fmt = GET_RTX_FORMAT (code);
1188 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1189 if (fmt[i] == 'e')
1190 changed |= mention_regs (XEXP (x, i));
1191 else if (fmt[i] == 'E')
1192 for (j = 0; j < XVECLEN (x, i); j++)
1193 changed |= mention_regs (XVECEXP (x, i, j));
1194
1195 return changed;
1196 }
1197
1198 /* Update the register quantities for inserting X into the hash table
1199 with a value equivalent to CLASSP.
1200 (If the class does not contain a REG, it is irrelevant.)
1201 If MODIFIED is nonzero, X is a destination; it is being modified.
1202 Note that delete_reg_equiv should be called on a register
1203 before insert_regs is done on that register with MODIFIED != 0.
1204
1205 Nonzero value means that elements of reg_qty have changed
1206 so X's hash code may be different. */
1207
1208 static int
1209 insert_regs (rtx x, struct table_elt *classp, int modified)
1210 {
1211 if (REG_P (x))
1212 {
1213 unsigned int regno = REGNO (x);
1214 int qty_valid;
1215
1216 /* If REGNO is in the equivalence table already but is of the
1217 wrong mode for that equivalence, don't do anything here. */
1218
1219 qty_valid = REGNO_QTY_VALID_P (regno);
1220 if (qty_valid)
1221 {
1222 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1223
1224 if (ent->mode != GET_MODE (x))
1225 return 0;
1226 }
1227
1228 if (modified || ! qty_valid)
1229 {
1230 if (classp)
1231 for (classp = classp->first_same_value;
1232 classp != 0;
1233 classp = classp->next_same_value)
1234 if (REG_P (classp->exp)
1235 && GET_MODE (classp->exp) == GET_MODE (x))
1236 {
1237 make_regs_eqv (regno, REGNO (classp->exp));
1238 return 1;
1239 }
1240
1241 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1242 than REG_IN_TABLE to find out if there was only a single preceding
1243 invalidation - for the SUBREG - or another one, which would be
1244 for the full register. However, if we find here that REG_TICK
1245 indicates that the register is invalid, it means that it has
1246 been invalidated in a separate operation. The SUBREG might be used
1247 now (then this is a recursive call), or we might use the full REG
1248 now and a SUBREG of it later. So bump up REG_TICK so that
1249 mention_regs will do the right thing. */
1250 if (! modified
1251 && REG_IN_TABLE (regno) >= 0
1252 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1253 REG_TICK (regno)++;
1254 make_new_qty (regno, GET_MODE (x));
1255 return 1;
1256 }
1257
1258 return 0;
1259 }
1260
1261 /* If X is a SUBREG, we will likely be inserting the inner register in the
1262 table. If that register doesn't have an assigned quantity number at
1263 this point but does later, the insertion that we will be doing now will
1264 not be accessible because its hash code will have changed. So assign
1265 a quantity number now. */
1266
1267 else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1268 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1269 {
1270 insert_regs (SUBREG_REG (x), NULL, 0);
1271 mention_regs (x);
1272 return 1;
1273 }
1274 else
1275 return mention_regs (x);
1276 }
1277 \f
1278 /* Look in or update the hash table. */
1279
1280 /* Remove table element ELT from use in the table.
1281 HASH is its hash code, made using the HASH macro.
1282 It's an argument because often that is known in advance
1283 and we save much time not recomputing it. */
1284
1285 static void
1286 remove_from_table (struct table_elt *elt, unsigned int hash)
1287 {
1288 if (elt == 0)
1289 return;
1290
1291 /* Mark this element as removed. See cse_insn. */
1292 elt->first_same_value = 0;
1293
1294 /* Remove the table element from its equivalence class. */
1295
1296 {
1297 struct table_elt *prev = elt->prev_same_value;
1298 struct table_elt *next = elt->next_same_value;
1299
1300 if (next)
1301 next->prev_same_value = prev;
1302
1303 if (prev)
1304 prev->next_same_value = next;
1305 else
1306 {
1307 struct table_elt *newfirst = next;
1308 while (next)
1309 {
1310 next->first_same_value = newfirst;
1311 next = next->next_same_value;
1312 }
1313 }
1314 }
1315
1316 /* Remove the table element from its hash bucket. */
1317
1318 {
1319 struct table_elt *prev = elt->prev_same_hash;
1320 struct table_elt *next = elt->next_same_hash;
1321
1322 if (next)
1323 next->prev_same_hash = prev;
1324
1325 if (prev)
1326 prev->next_same_hash = next;
1327 else if (table[hash] == elt)
1328 table[hash] = next;
1329 else
1330 {
1331 /* This entry is not in the proper hash bucket. This can happen
1332 when two classes were merged by `merge_equiv_classes'. Search
1333 for the hash bucket that it heads. This happens only very
1334 rarely, so the cost is acceptable. */
1335 for (hash = 0; hash < HASH_SIZE; hash++)
1336 if (table[hash] == elt)
1337 table[hash] = next;
1338 }
1339 }
1340
1341 /* Remove the table element from its related-value circular chain. */
1342
1343 if (elt->related_value != 0 && elt->related_value != elt)
1344 {
1345 struct table_elt *p = elt->related_value;
1346
1347 while (p->related_value != elt)
1348 p = p->related_value;
1349 p->related_value = elt->related_value;
1350 if (p->related_value == p)
1351 p->related_value = 0;
1352 }
1353
1354 /* Now add it to the free element chain. */
1355 elt->next_same_hash = free_element_chain;
1356 free_element_chain = elt;
1357 }
1358
1359 /* Look up X in the hash table and return its table element,
1360 or 0 if X is not in the table.
1361
1362 MODE is the machine-mode of X, or if X is an integer constant
1363 with VOIDmode then MODE is the mode with which X will be used.
1364
1365 Here we are satisfied to find an expression whose tree structure
1366 looks like X. */
1367
1368 static struct table_elt *
1369 lookup (rtx x, unsigned int hash, enum machine_mode mode)
1370 {
1371 struct table_elt *p;
1372
1373 for (p = table[hash]; p; p = p->next_same_hash)
1374 if (mode == p->mode && ((x == p->exp && REG_P (x))
1375 || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1376 return p;
1377
1378 return 0;
1379 }
1380
1381 /* Like `lookup' but don't care whether the table element uses invalid regs.
1382 Also ignore discrepancies in the machine mode of a register. */
1383
1384 static struct table_elt *
1385 lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1386 {
1387 struct table_elt *p;
1388
1389 if (REG_P (x))
1390 {
1391 unsigned int regno = REGNO (x);
1392
1393 /* Don't check the machine mode when comparing registers;
1394 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1395 for (p = table[hash]; p; p = p->next_same_hash)
1396 if (REG_P (p->exp)
1397 && REGNO (p->exp) == regno)
1398 return p;
1399 }
1400 else
1401 {
1402 for (p = table[hash]; p; p = p->next_same_hash)
1403 if (mode == p->mode
1404 && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1405 return p;
1406 }
1407
1408 return 0;
1409 }
1410
1411 /* Look for an expression equivalent to X and with code CODE.
1412 If one is found, return that expression. */
1413
1414 static rtx
1415 lookup_as_function (rtx x, enum rtx_code code)
1416 {
1417 struct table_elt *p
1418 = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1419
1420 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1421 long as we are narrowing. So if we looked in vain for a mode narrower
1422 than word_mode before, look for word_mode now. */
1423 if (p == 0 && code == CONST_INT
1424 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1425 {
1426 x = copy_rtx (x);
1427 PUT_MODE (x, word_mode);
1428 p = lookup (x, SAFE_HASH (x, VOIDmode), word_mode);
1429 }
1430
1431 if (p == 0)
1432 return 0;
1433
1434 for (p = p->first_same_value; p; p = p->next_same_value)
1435 if (GET_CODE (p->exp) == code
1436 /* Make sure this is a valid entry in the table. */
1437 && exp_equiv_p (p->exp, p->exp, 1, false))
1438 return p->exp;
1439
1440 return 0;
1441 }
1442
1443 /* Insert X in the hash table, assuming HASH is its hash code
1444 and CLASSP is an element of the class it should go in
1445 (or 0 if a new class should be made).
1446 It is inserted at the proper position to keep the class in
1447 the order cheapest first.
1448
1449 MODE is the machine-mode of X, or if X is an integer constant
1450 with VOIDmode then MODE is the mode with which X will be used.
1451
1452 For elements of equal cheapness, the most recent one
1453 goes in front, except that the first element in the list
1454 remains first unless a cheaper element is added. The order of
1455 pseudo-registers does not matter, as canon_reg will be called to
1456 find the cheapest when a register is retrieved from the table.
1457
1458 The in_memory field in the hash table element is set to 0.
1459 The caller must set it nonzero if appropriate.
1460
1461 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1462 and if insert_regs returns a nonzero value
1463 you must then recompute its hash code before calling here.
1464
1465 If necessary, update table showing constant values of quantities. */
1466
1467 #define CHEAPER(X, Y) \
1468 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1469
1470 static struct table_elt *
1471 insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
1472 {
1473 struct table_elt *elt;
1474
1475 /* If X is a register and we haven't made a quantity for it,
1476 something is wrong. */
1477 gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1478
1479 /* If X is a hard register, show it is being put in the table. */
1480 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1481 {
1482 unsigned int regno = REGNO (x);
1483 unsigned int endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
1484 unsigned int i;
1485
1486 for (i = regno; i < endregno; i++)
1487 SET_HARD_REG_BIT (hard_regs_in_table, i);
1488 }
1489
1490 /* Put an element for X into the right hash bucket. */
1491
1492 elt = free_element_chain;
1493 if (elt)
1494 free_element_chain = elt->next_same_hash;
1495 else
1496 {
1497 n_elements_made++;
1498 elt = xmalloc (sizeof (struct table_elt));
1499 }
1500
1501 elt->exp = x;
1502 elt->canon_exp = NULL_RTX;
1503 elt->cost = COST (x);
1504 elt->regcost = approx_reg_cost (x);
1505 elt->next_same_value = 0;
1506 elt->prev_same_value = 0;
1507 elt->next_same_hash = table[hash];
1508 elt->prev_same_hash = 0;
1509 elt->related_value = 0;
1510 elt->in_memory = 0;
1511 elt->mode = mode;
1512 elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1513
1514 if (table[hash])
1515 table[hash]->prev_same_hash = elt;
1516 table[hash] = elt;
1517
1518 /* Put it into the proper value-class. */
1519 if (classp)
1520 {
1521 classp = classp->first_same_value;
1522 if (CHEAPER (elt, classp))
1523 /* Insert at the head of the class. */
1524 {
1525 struct table_elt *p;
1526 elt->next_same_value = classp;
1527 classp->prev_same_value = elt;
1528 elt->first_same_value = elt;
1529
1530 for (p = classp; p; p = p->next_same_value)
1531 p->first_same_value = elt;
1532 }
1533 else
1534 {
1535 /* Insert not at head of the class. */
1536 /* Put it after the last element cheaper than X. */
1537 struct table_elt *p, *next;
1538
1539 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1540 p = next);
1541
1542 /* Put it after P and before NEXT. */
1543 elt->next_same_value = next;
1544 if (next)
1545 next->prev_same_value = elt;
1546
1547 elt->prev_same_value = p;
1548 p->next_same_value = elt;
1549 elt->first_same_value = classp;
1550 }
1551 }
1552 else
1553 elt->first_same_value = elt;
1554
1555 /* If this is a constant being set equivalent to a register or a register
1556 being set equivalent to a constant, note the constant equivalence.
1557
1558 If this is a constant, it cannot be equivalent to a different constant,
1559 and a constant is the only thing that can be cheaper than a register. So
1560 we know the register is the head of the class (before the constant was
1561 inserted).
1562
1563 If this is a register that is not already known equivalent to a
1564 constant, we must check the entire class.
1565
1566 If this is a register that is already known equivalent to an insn,
1567 update the qtys `const_insn' to show that `this_insn' is the latest
1568 insn making that quantity equivalent to the constant. */
1569
1570 if (elt->is_const && classp && REG_P (classp->exp)
1571 && !REG_P (x))
1572 {
1573 int exp_q = REG_QTY (REGNO (classp->exp));
1574 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1575
1576 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1577 exp_ent->const_insn = this_insn;
1578 }
1579
1580 else if (REG_P (x)
1581 && classp
1582 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1583 && ! elt->is_const)
1584 {
1585 struct table_elt *p;
1586
1587 for (p = classp; p != 0; p = p->next_same_value)
1588 {
1589 if (p->is_const && !REG_P (p->exp))
1590 {
1591 int x_q = REG_QTY (REGNO (x));
1592 struct qty_table_elem *x_ent = &qty_table[x_q];
1593
1594 x_ent->const_rtx
1595 = gen_lowpart (GET_MODE (x), p->exp);
1596 x_ent->const_insn = this_insn;
1597 break;
1598 }
1599 }
1600 }
1601
1602 else if (REG_P (x)
1603 && qty_table[REG_QTY (REGNO (x))].const_rtx
1604 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1605 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1606
1607 /* If this is a constant with symbolic value,
1608 and it has a term with an explicit integer value,
1609 link it up with related expressions. */
1610 if (GET_CODE (x) == CONST)
1611 {
1612 rtx subexp = get_related_value (x);
1613 unsigned subhash;
1614 struct table_elt *subelt, *subelt_prev;
1615
1616 if (subexp != 0)
1617 {
1618 /* Get the integer-free subexpression in the hash table. */
1619 subhash = SAFE_HASH (subexp, mode);
1620 subelt = lookup (subexp, subhash, mode);
1621 if (subelt == 0)
1622 subelt = insert (subexp, NULL, subhash, mode);
1623 /* Initialize SUBELT's circular chain if it has none. */
1624 if (subelt->related_value == 0)
1625 subelt->related_value = subelt;
1626 /* Find the element in the circular chain that precedes SUBELT. */
1627 subelt_prev = subelt;
1628 while (subelt_prev->related_value != subelt)
1629 subelt_prev = subelt_prev->related_value;
1630 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1631 This way the element that follows SUBELT is the oldest one. */
1632 elt->related_value = subelt_prev->related_value;
1633 subelt_prev->related_value = elt;
1634 }
1635 }
1636
1637 return elt;
1638 }
1639 \f
1640 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1641 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1642 the two classes equivalent.
1643
1644 CLASS1 will be the surviving class; CLASS2 should not be used after this
1645 call.
1646
1647 Any invalid entries in CLASS2 will not be copied. */
1648
1649 static void
1650 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1651 {
1652 struct table_elt *elt, *next, *new;
1653
1654 /* Ensure we start with the head of the classes. */
1655 class1 = class1->first_same_value;
1656 class2 = class2->first_same_value;
1657
1658 /* If they were already equal, forget it. */
1659 if (class1 == class2)
1660 return;
1661
1662 for (elt = class2; elt; elt = next)
1663 {
1664 unsigned int hash;
1665 rtx exp = elt->exp;
1666 enum machine_mode mode = elt->mode;
1667
1668 next = elt->next_same_value;
1669
1670 /* Remove old entry, make a new one in CLASS1's class.
1671 Don't do this for invalid entries as we cannot find their
1672 hash code (it also isn't necessary). */
1673 if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1674 {
1675 bool need_rehash = false;
1676
1677 hash_arg_in_memory = 0;
1678 hash = HASH (exp, mode);
1679
1680 if (REG_P (exp))
1681 {
1682 need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1683 delete_reg_equiv (REGNO (exp));
1684 }
1685
1686 remove_from_table (elt, hash);
1687
1688 if (insert_regs (exp, class1, 0) || need_rehash)
1689 {
1690 rehash_using_reg (exp);
1691 hash = HASH (exp, mode);
1692 }
1693 new = insert (exp, class1, hash, mode);
1694 new->in_memory = hash_arg_in_memory;
1695 }
1696 }
1697 }
1698 \f
1699 /* Flush the entire hash table. */
1700
1701 static void
1702 flush_hash_table (void)
1703 {
1704 int i;
1705 struct table_elt *p;
1706
1707 for (i = 0; i < HASH_SIZE; i++)
1708 for (p = table[i]; p; p = table[i])
1709 {
1710 /* Note that invalidate can remove elements
1711 after P in the current hash chain. */
1712 if (REG_P (p->exp))
1713 invalidate (p->exp, p->mode);
1714 else
1715 remove_from_table (p, i);
1716 }
1717 }
1718 \f
1719 /* Function called for each rtx to check whether true dependence exist. */
1720 struct check_dependence_data
1721 {
1722 enum machine_mode mode;
1723 rtx exp;
1724 rtx addr;
1725 };
1726
1727 static int
1728 check_dependence (rtx *x, void *data)
1729 {
1730 struct check_dependence_data *d = (struct check_dependence_data *) data;
1731 if (*x && MEM_P (*x))
1732 return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1733 cse_rtx_varies_p);
1734 else
1735 return 0;
1736 }
1737 \f
1738 /* Remove from the hash table, or mark as invalid, all expressions whose
1739 values could be altered by storing in X. X is a register, a subreg, or
1740 a memory reference with nonvarying address (because, when a memory
1741 reference with a varying address is stored in, all memory references are
1742 removed by invalidate_memory so specific invalidation is superfluous).
1743 FULL_MODE, if not VOIDmode, indicates that this much should be
1744 invalidated instead of just the amount indicated by the mode of X. This
1745 is only used for bitfield stores into memory.
1746
1747 A nonvarying address may be just a register or just a symbol reference,
1748 or it may be either of those plus a numeric offset. */
1749
1750 static void
1751 invalidate (rtx x, enum machine_mode full_mode)
1752 {
1753 int i;
1754 struct table_elt *p;
1755 rtx addr;
1756
1757 switch (GET_CODE (x))
1758 {
1759 case REG:
1760 {
1761 /* If X is a register, dependencies on its contents are recorded
1762 through the qty number mechanism. Just change the qty number of
1763 the register, mark it as invalid for expressions that refer to it,
1764 and remove it itself. */
1765 unsigned int regno = REGNO (x);
1766 unsigned int hash = HASH (x, GET_MODE (x));
1767
1768 /* Remove REGNO from any quantity list it might be on and indicate
1769 that its value might have changed. If it is a pseudo, remove its
1770 entry from the hash table.
1771
1772 For a hard register, we do the first two actions above for any
1773 additional hard registers corresponding to X. Then, if any of these
1774 registers are in the table, we must remove any REG entries that
1775 overlap these registers. */
1776
1777 delete_reg_equiv (regno);
1778 REG_TICK (regno)++;
1779 SUBREG_TICKED (regno) = -1;
1780
1781 if (regno >= FIRST_PSEUDO_REGISTER)
1782 {
1783 /* Because a register can be referenced in more than one mode,
1784 we might have to remove more than one table entry. */
1785 struct table_elt *elt;
1786
1787 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1788 remove_from_table (elt, hash);
1789 }
1790 else
1791 {
1792 HOST_WIDE_INT in_table
1793 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1794 unsigned int endregno
1795 = regno + hard_regno_nregs[regno][GET_MODE (x)];
1796 unsigned int tregno, tendregno, rn;
1797 struct table_elt *p, *next;
1798
1799 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1800
1801 for (rn = regno + 1; rn < endregno; rn++)
1802 {
1803 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1804 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1805 delete_reg_equiv (rn);
1806 REG_TICK (rn)++;
1807 SUBREG_TICKED (rn) = -1;
1808 }
1809
1810 if (in_table)
1811 for (hash = 0; hash < HASH_SIZE; hash++)
1812 for (p = table[hash]; p; p = next)
1813 {
1814 next = p->next_same_hash;
1815
1816 if (!REG_P (p->exp)
1817 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1818 continue;
1819
1820 tregno = REGNO (p->exp);
1821 tendregno
1822 = tregno + hard_regno_nregs[tregno][GET_MODE (p->exp)];
1823 if (tendregno > regno && tregno < endregno)
1824 remove_from_table (p, hash);
1825 }
1826 }
1827 }
1828 return;
1829
1830 case SUBREG:
1831 invalidate (SUBREG_REG (x), VOIDmode);
1832 return;
1833
1834 case PARALLEL:
1835 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1836 invalidate (XVECEXP (x, 0, i), VOIDmode);
1837 return;
1838
1839 case EXPR_LIST:
1840 /* This is part of a disjoint return value; extract the location in
1841 question ignoring the offset. */
1842 invalidate (XEXP (x, 0), VOIDmode);
1843 return;
1844
1845 case MEM:
1846 addr = canon_rtx (get_addr (XEXP (x, 0)));
1847 /* Calculate the canonical version of X here so that
1848 true_dependence doesn't generate new RTL for X on each call. */
1849 x = canon_rtx (x);
1850
1851 /* Remove all hash table elements that refer to overlapping pieces of
1852 memory. */
1853 if (full_mode == VOIDmode)
1854 full_mode = GET_MODE (x);
1855
1856 for (i = 0; i < HASH_SIZE; i++)
1857 {
1858 struct table_elt *next;
1859
1860 for (p = table[i]; p; p = next)
1861 {
1862 next = p->next_same_hash;
1863 if (p->in_memory)
1864 {
1865 struct check_dependence_data d;
1866
1867 /* Just canonicalize the expression once;
1868 otherwise each time we call invalidate
1869 true_dependence will canonicalize the
1870 expression again. */
1871 if (!p->canon_exp)
1872 p->canon_exp = canon_rtx (p->exp);
1873 d.exp = x;
1874 d.addr = addr;
1875 d.mode = full_mode;
1876 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1877 remove_from_table (p, i);
1878 }
1879 }
1880 }
1881 return;
1882
1883 default:
1884 gcc_unreachable ();
1885 }
1886 }
1887 \f
1888 /* Remove all expressions that refer to register REGNO,
1889 since they are already invalid, and we are about to
1890 mark that register valid again and don't want the old
1891 expressions to reappear as valid. */
1892
1893 static void
1894 remove_invalid_refs (unsigned int regno)
1895 {
1896 unsigned int i;
1897 struct table_elt *p, *next;
1898
1899 for (i = 0; i < HASH_SIZE; i++)
1900 for (p = table[i]; p; p = next)
1901 {
1902 next = p->next_same_hash;
1903 if (!REG_P (p->exp)
1904 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1905 remove_from_table (p, i);
1906 }
1907 }
1908
1909 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1910 and mode MODE. */
1911 static void
1912 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1913 enum machine_mode mode)
1914 {
1915 unsigned int i;
1916 struct table_elt *p, *next;
1917 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
1918
1919 for (i = 0; i < HASH_SIZE; i++)
1920 for (p = table[i]; p; p = next)
1921 {
1922 rtx exp = p->exp;
1923 next = p->next_same_hash;
1924
1925 if (!REG_P (exp)
1926 && (GET_CODE (exp) != SUBREG
1927 || !REG_P (SUBREG_REG (exp))
1928 || REGNO (SUBREG_REG (exp)) != regno
1929 || (((SUBREG_BYTE (exp)
1930 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1931 && SUBREG_BYTE (exp) <= end))
1932 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1933 remove_from_table (p, i);
1934 }
1935 }
1936 \f
1937 /* Recompute the hash codes of any valid entries in the hash table that
1938 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1939
1940 This is called when we make a jump equivalence. */
1941
1942 static void
1943 rehash_using_reg (rtx x)
1944 {
1945 unsigned int i;
1946 struct table_elt *p, *next;
1947 unsigned hash;
1948
1949 if (GET_CODE (x) == SUBREG)
1950 x = SUBREG_REG (x);
1951
1952 /* If X is not a register or if the register is known not to be in any
1953 valid entries in the table, we have no work to do. */
1954
1955 if (!REG_P (x)
1956 || REG_IN_TABLE (REGNO (x)) < 0
1957 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1958 return;
1959
1960 /* Scan all hash chains looking for valid entries that mention X.
1961 If we find one and it is in the wrong hash chain, move it. */
1962
1963 for (i = 0; i < HASH_SIZE; i++)
1964 for (p = table[i]; p; p = next)
1965 {
1966 next = p->next_same_hash;
1967 if (reg_mentioned_p (x, p->exp)
1968 && exp_equiv_p (p->exp, p->exp, 1, false)
1969 && i != (hash = SAFE_HASH (p->exp, p->mode)))
1970 {
1971 if (p->next_same_hash)
1972 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1973
1974 if (p->prev_same_hash)
1975 p->prev_same_hash->next_same_hash = p->next_same_hash;
1976 else
1977 table[i] = p->next_same_hash;
1978
1979 p->next_same_hash = table[hash];
1980 p->prev_same_hash = 0;
1981 if (table[hash])
1982 table[hash]->prev_same_hash = p;
1983 table[hash] = p;
1984 }
1985 }
1986 }
1987 \f
1988 /* Remove from the hash table any expression that is a call-clobbered
1989 register. Also update their TICK values. */
1990
1991 static void
1992 invalidate_for_call (void)
1993 {
1994 unsigned int regno, endregno;
1995 unsigned int i;
1996 unsigned hash;
1997 struct table_elt *p, *next;
1998 int in_table = 0;
1999
2000 /* Go through all the hard registers. For each that is clobbered in
2001 a CALL_INSN, remove the register from quantity chains and update
2002 reg_tick if defined. Also see if any of these registers is currently
2003 in the table. */
2004
2005 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2006 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2007 {
2008 delete_reg_equiv (regno);
2009 if (REG_TICK (regno) >= 0)
2010 {
2011 REG_TICK (regno)++;
2012 SUBREG_TICKED (regno) = -1;
2013 }
2014
2015 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2016 }
2017
2018 /* In the case where we have no call-clobbered hard registers in the
2019 table, we are done. Otherwise, scan the table and remove any
2020 entry that overlaps a call-clobbered register. */
2021
2022 if (in_table)
2023 for (hash = 0; hash < HASH_SIZE; hash++)
2024 for (p = table[hash]; p; p = next)
2025 {
2026 next = p->next_same_hash;
2027
2028 if (!REG_P (p->exp)
2029 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2030 continue;
2031
2032 regno = REGNO (p->exp);
2033 endregno = regno + hard_regno_nregs[regno][GET_MODE (p->exp)];
2034
2035 for (i = regno; i < endregno; i++)
2036 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2037 {
2038 remove_from_table (p, hash);
2039 break;
2040 }
2041 }
2042 }
2043 \f
2044 /* Given an expression X of type CONST,
2045 and ELT which is its table entry (or 0 if it
2046 is not in the hash table),
2047 return an alternate expression for X as a register plus integer.
2048 If none can be found, return 0. */
2049
2050 static rtx
2051 use_related_value (rtx x, struct table_elt *elt)
2052 {
2053 struct table_elt *relt = 0;
2054 struct table_elt *p, *q;
2055 HOST_WIDE_INT offset;
2056
2057 /* First, is there anything related known?
2058 If we have a table element, we can tell from that.
2059 Otherwise, must look it up. */
2060
2061 if (elt != 0 && elt->related_value != 0)
2062 relt = elt;
2063 else if (elt == 0 && GET_CODE (x) == CONST)
2064 {
2065 rtx subexp = get_related_value (x);
2066 if (subexp != 0)
2067 relt = lookup (subexp,
2068 SAFE_HASH (subexp, GET_MODE (subexp)),
2069 GET_MODE (subexp));
2070 }
2071
2072 if (relt == 0)
2073 return 0;
2074
2075 /* Search all related table entries for one that has an
2076 equivalent register. */
2077
2078 p = relt;
2079 while (1)
2080 {
2081 /* This loop is strange in that it is executed in two different cases.
2082 The first is when X is already in the table. Then it is searching
2083 the RELATED_VALUE list of X's class (RELT). The second case is when
2084 X is not in the table. Then RELT points to a class for the related
2085 value.
2086
2087 Ensure that, whatever case we are in, that we ignore classes that have
2088 the same value as X. */
2089
2090 if (rtx_equal_p (x, p->exp))
2091 q = 0;
2092 else
2093 for (q = p->first_same_value; q; q = q->next_same_value)
2094 if (REG_P (q->exp))
2095 break;
2096
2097 if (q)
2098 break;
2099
2100 p = p->related_value;
2101
2102 /* We went all the way around, so there is nothing to be found.
2103 Alternatively, perhaps RELT was in the table for some other reason
2104 and it has no related values recorded. */
2105 if (p == relt || p == 0)
2106 break;
2107 }
2108
2109 if (q == 0)
2110 return 0;
2111
2112 offset = (get_integer_term (x) - get_integer_term (p->exp));
2113 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2114 return plus_constant (q->exp, offset);
2115 }
2116 \f
2117 /* Hash a string. Just add its bytes up. */
2118 static inline unsigned
2119 hash_rtx_string (const char *ps)
2120 {
2121 unsigned hash = 0;
2122 const unsigned char *p = (const unsigned char *) ps;
2123
2124 if (p)
2125 while (*p)
2126 hash += *p++;
2127
2128 return hash;
2129 }
2130
2131 /* Hash an rtx. We are careful to make sure the value is never negative.
2132 Equivalent registers hash identically.
2133 MODE is used in hashing for CONST_INTs only;
2134 otherwise the mode of X is used.
2135
2136 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2137
2138 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2139 a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
2140
2141 Note that cse_insn knows that the hash code of a MEM expression
2142 is just (int) MEM plus the hash code of the address. */
2143
2144 unsigned
2145 hash_rtx (rtx x, enum machine_mode mode, int *do_not_record_p,
2146 int *hash_arg_in_memory_p, bool have_reg_qty)
2147 {
2148 int i, j;
2149 unsigned hash = 0;
2150 enum rtx_code code;
2151 const char *fmt;
2152
2153 /* Used to turn recursion into iteration. We can't rely on GCC's
2154 tail-recursion elimination since we need to keep accumulating values
2155 in HASH. */
2156 repeat:
2157 if (x == 0)
2158 return hash;
2159
2160 code = GET_CODE (x);
2161 switch (code)
2162 {
2163 case REG:
2164 {
2165 unsigned int regno = REGNO (x);
2166
2167 if (!reload_completed)
2168 {
2169 /* On some machines, we can't record any non-fixed hard register,
2170 because extending its life will cause reload problems. We
2171 consider ap, fp, sp, gp to be fixed for this purpose.
2172
2173 We also consider CCmode registers to be fixed for this purpose;
2174 failure to do so leads to failure to simplify 0<100 type of
2175 conditionals.
2176
2177 On all machines, we can't record any global registers.
2178 Nor should we record any register that is in a small
2179 class, as defined by CLASS_LIKELY_SPILLED_P. */
2180 bool record;
2181
2182 if (regno >= FIRST_PSEUDO_REGISTER)
2183 record = true;
2184 else if (x == frame_pointer_rtx
2185 || x == hard_frame_pointer_rtx
2186 || x == arg_pointer_rtx
2187 || x == stack_pointer_rtx
2188 || x == pic_offset_table_rtx)
2189 record = true;
2190 else if (global_regs[regno])
2191 record = false;
2192 else if (fixed_regs[regno])
2193 record = true;
2194 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2195 record = true;
2196 else if (SMALL_REGISTER_CLASSES)
2197 record = false;
2198 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2199 record = false;
2200 else
2201 record = true;
2202
2203 if (!record)
2204 {
2205 *do_not_record_p = 1;
2206 return 0;
2207 }
2208 }
2209
2210 hash += ((unsigned int) REG << 7);
2211 hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2212 return hash;
2213 }
2214
2215 /* We handle SUBREG of a REG specially because the underlying
2216 reg changes its hash value with every value change; we don't
2217 want to have to forget unrelated subregs when one subreg changes. */
2218 case SUBREG:
2219 {
2220 if (REG_P (SUBREG_REG (x)))
2221 {
2222 hash += (((unsigned int) SUBREG << 7)
2223 + REGNO (SUBREG_REG (x))
2224 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2225 return hash;
2226 }
2227 break;
2228 }
2229
2230 case CONST_INT:
2231 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2232 + (unsigned int) INTVAL (x));
2233 return hash;
2234
2235 case CONST_DOUBLE:
2236 /* This is like the general case, except that it only counts
2237 the integers representing the constant. */
2238 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2239 if (GET_MODE (x) != VOIDmode)
2240 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2241 else
2242 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2243 + (unsigned int) CONST_DOUBLE_HIGH (x));
2244 return hash;
2245
2246 case CONST_VECTOR:
2247 {
2248 int units;
2249 rtx elt;
2250
2251 units = CONST_VECTOR_NUNITS (x);
2252
2253 for (i = 0; i < units; ++i)
2254 {
2255 elt = CONST_VECTOR_ELT (x, i);
2256 hash += hash_rtx (elt, GET_MODE (elt), do_not_record_p,
2257 hash_arg_in_memory_p, have_reg_qty);
2258 }
2259
2260 return hash;
2261 }
2262
2263 /* Assume there is only one rtx object for any given label. */
2264 case LABEL_REF:
2265 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2266 differences and differences between each stage's debugging dumps. */
2267 hash += (((unsigned int) LABEL_REF << 7)
2268 + CODE_LABEL_NUMBER (XEXP (x, 0)));
2269 return hash;
2270
2271 case SYMBOL_REF:
2272 {
2273 /* Don't hash on the symbol's address to avoid bootstrap differences.
2274 Different hash values may cause expressions to be recorded in
2275 different orders and thus different registers to be used in the
2276 final assembler. This also avoids differences in the dump files
2277 between various stages. */
2278 unsigned int h = 0;
2279 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2280
2281 while (*p)
2282 h += (h << 7) + *p++; /* ??? revisit */
2283
2284 hash += ((unsigned int) SYMBOL_REF << 7) + h;
2285 return hash;
2286 }
2287
2288 case MEM:
2289 /* We don't record if marked volatile or if BLKmode since we don't
2290 know the size of the move. */
2291 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2292 {
2293 *do_not_record_p = 1;
2294 return 0;
2295 }
2296 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2297 *hash_arg_in_memory_p = 1;
2298
2299 /* Now that we have already found this special case,
2300 might as well speed it up as much as possible. */
2301 hash += (unsigned) MEM;
2302 x = XEXP (x, 0);
2303 goto repeat;
2304
2305 case USE:
2306 /* A USE that mentions non-volatile memory needs special
2307 handling since the MEM may be BLKmode which normally
2308 prevents an entry from being made. Pure calls are
2309 marked by a USE which mentions BLKmode memory.
2310 See calls.c:emit_call_1. */
2311 if (MEM_P (XEXP (x, 0))
2312 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2313 {
2314 hash += (unsigned) USE;
2315 x = XEXP (x, 0);
2316
2317 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2318 *hash_arg_in_memory_p = 1;
2319
2320 /* Now that we have already found this special case,
2321 might as well speed it up as much as possible. */
2322 hash += (unsigned) MEM;
2323 x = XEXP (x, 0);
2324 goto repeat;
2325 }
2326 break;
2327
2328 case PRE_DEC:
2329 case PRE_INC:
2330 case POST_DEC:
2331 case POST_INC:
2332 case PRE_MODIFY:
2333 case POST_MODIFY:
2334 case PC:
2335 case CC0:
2336 case CALL:
2337 case UNSPEC_VOLATILE:
2338 *do_not_record_p = 1;
2339 return 0;
2340
2341 case ASM_OPERANDS:
2342 if (MEM_VOLATILE_P (x))
2343 {
2344 *do_not_record_p = 1;
2345 return 0;
2346 }
2347 else
2348 {
2349 /* We don't want to take the filename and line into account. */
2350 hash += (unsigned) code + (unsigned) GET_MODE (x)
2351 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2352 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2353 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2354
2355 if (ASM_OPERANDS_INPUT_LENGTH (x))
2356 {
2357 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2358 {
2359 hash += (hash_rtx (ASM_OPERANDS_INPUT (x, i),
2360 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2361 do_not_record_p, hash_arg_in_memory_p,
2362 have_reg_qty)
2363 + hash_rtx_string
2364 (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2365 }
2366
2367 hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2368 x = ASM_OPERANDS_INPUT (x, 0);
2369 mode = GET_MODE (x);
2370 goto repeat;
2371 }
2372
2373 return hash;
2374 }
2375 break;
2376
2377 default:
2378 break;
2379 }
2380
2381 i = GET_RTX_LENGTH (code) - 1;
2382 hash += (unsigned) code + (unsigned) GET_MODE (x);
2383 fmt = GET_RTX_FORMAT (code);
2384 for (; i >= 0; i--)
2385 {
2386 switch (fmt[i])
2387 {
2388 case 'e':
2389 /* If we are about to do the last recursive call
2390 needed at this level, change it into iteration.
2391 This function is called enough to be worth it. */
2392 if (i == 0)
2393 {
2394 x = XEXP (x, i);
2395 goto repeat;
2396 }
2397
2398 hash += hash_rtx (XEXP (x, i), 0, do_not_record_p,
2399 hash_arg_in_memory_p, have_reg_qty);
2400 break;
2401
2402 case 'E':
2403 for (j = 0; j < XVECLEN (x, i); j++)
2404 hash += hash_rtx (XVECEXP (x, i, j), 0, do_not_record_p,
2405 hash_arg_in_memory_p, have_reg_qty);
2406 break;
2407
2408 case 's':
2409 hash += hash_rtx_string (XSTR (x, i));
2410 break;
2411
2412 case 'i':
2413 hash += (unsigned int) XINT (x, i);
2414 break;
2415
2416 case '0': case 't':
2417 /* Unused. */
2418 break;
2419
2420 default:
2421 gcc_unreachable ();
2422 }
2423 }
2424
2425 return hash;
2426 }
2427
2428 /* Hash an rtx X for cse via hash_rtx.
2429 Stores 1 in do_not_record if any subexpression is volatile.
2430 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2431 does not have the RTX_UNCHANGING_P bit set. */
2432
2433 static inline unsigned
2434 canon_hash (rtx x, enum machine_mode mode)
2435 {
2436 return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2437 }
2438
2439 /* Like canon_hash but with no side effects, i.e. do_not_record
2440 and hash_arg_in_memory are not changed. */
2441
2442 static inline unsigned
2443 safe_hash (rtx x, enum machine_mode mode)
2444 {
2445 int dummy_do_not_record;
2446 return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2447 }
2448 \f
2449 /* Return 1 iff X and Y would canonicalize into the same thing,
2450 without actually constructing the canonicalization of either one.
2451 If VALIDATE is nonzero,
2452 we assume X is an expression being processed from the rtl
2453 and Y was found in the hash table. We check register refs
2454 in Y for being marked as valid.
2455
2456 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2457
2458 int
2459 exp_equiv_p (rtx x, rtx y, int validate, bool for_gcse)
2460 {
2461 int i, j;
2462 enum rtx_code code;
2463 const char *fmt;
2464
2465 /* Note: it is incorrect to assume an expression is equivalent to itself
2466 if VALIDATE is nonzero. */
2467 if (x == y && !validate)
2468 return 1;
2469
2470 if (x == 0 || y == 0)
2471 return x == y;
2472
2473 code = GET_CODE (x);
2474 if (code != GET_CODE (y))
2475 return 0;
2476
2477 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2478 if (GET_MODE (x) != GET_MODE (y))
2479 return 0;
2480
2481 switch (code)
2482 {
2483 case PC:
2484 case CC0:
2485 case CONST_INT:
2486 return x == y;
2487
2488 case LABEL_REF:
2489 return XEXP (x, 0) == XEXP (y, 0);
2490
2491 case SYMBOL_REF:
2492 return XSTR (x, 0) == XSTR (y, 0);
2493
2494 case REG:
2495 if (for_gcse)
2496 return REGNO (x) == REGNO (y);
2497 else
2498 {
2499 unsigned int regno = REGNO (y);
2500 unsigned int i;
2501 unsigned int endregno
2502 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2503 : hard_regno_nregs[regno][GET_MODE (y)]);
2504
2505 /* If the quantities are not the same, the expressions are not
2506 equivalent. If there are and we are not to validate, they
2507 are equivalent. Otherwise, ensure all regs are up-to-date. */
2508
2509 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2510 return 0;
2511
2512 if (! validate)
2513 return 1;
2514
2515 for (i = regno; i < endregno; i++)
2516 if (REG_IN_TABLE (i) != REG_TICK (i))
2517 return 0;
2518
2519 return 1;
2520 }
2521
2522 case MEM:
2523 if (for_gcse)
2524 {
2525 /* Can't merge two expressions in different alias sets, since we
2526 can decide that the expression is transparent in a block when
2527 it isn't, due to it being set with the different alias set. */
2528 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
2529 return 0;
2530
2531 /* A volatile mem should not be considered equivalent to any
2532 other. */
2533 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2534 return 0;
2535 }
2536 break;
2537
2538 /* For commutative operations, check both orders. */
2539 case PLUS:
2540 case MULT:
2541 case AND:
2542 case IOR:
2543 case XOR:
2544 case NE:
2545 case EQ:
2546 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2547 validate, for_gcse)
2548 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2549 validate, for_gcse))
2550 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2551 validate, for_gcse)
2552 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2553 validate, for_gcse)));
2554
2555 case ASM_OPERANDS:
2556 /* We don't use the generic code below because we want to
2557 disregard filename and line numbers. */
2558
2559 /* A volatile asm isn't equivalent to any other. */
2560 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2561 return 0;
2562
2563 if (GET_MODE (x) != GET_MODE (y)
2564 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2565 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2566 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2567 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2568 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2569 return 0;
2570
2571 if (ASM_OPERANDS_INPUT_LENGTH (x))
2572 {
2573 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2574 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2575 ASM_OPERANDS_INPUT (y, i),
2576 validate, for_gcse)
2577 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2578 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2579 return 0;
2580 }
2581
2582 return 1;
2583
2584 default:
2585 break;
2586 }
2587
2588 /* Compare the elements. If any pair of corresponding elements
2589 fail to match, return 0 for the whole thing. */
2590
2591 fmt = GET_RTX_FORMAT (code);
2592 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2593 {
2594 switch (fmt[i])
2595 {
2596 case 'e':
2597 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2598 validate, for_gcse))
2599 return 0;
2600 break;
2601
2602 case 'E':
2603 if (XVECLEN (x, i) != XVECLEN (y, i))
2604 return 0;
2605 for (j = 0; j < XVECLEN (x, i); j++)
2606 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2607 validate, for_gcse))
2608 return 0;
2609 break;
2610
2611 case 's':
2612 if (strcmp (XSTR (x, i), XSTR (y, i)))
2613 return 0;
2614 break;
2615
2616 case 'i':
2617 if (XINT (x, i) != XINT (y, i))
2618 return 0;
2619 break;
2620
2621 case 'w':
2622 if (XWINT (x, i) != XWINT (y, i))
2623 return 0;
2624 break;
2625
2626 case '0':
2627 case 't':
2628 break;
2629
2630 default:
2631 gcc_unreachable ();
2632 }
2633 }
2634
2635 return 1;
2636 }
2637 \f
2638 /* Return 1 if X has a value that can vary even between two
2639 executions of the program. 0 means X can be compared reliably
2640 against certain constants or near-constants. */
2641
2642 static int
2643 cse_rtx_varies_p (rtx x, int from_alias)
2644 {
2645 /* We need not check for X and the equivalence class being of the same
2646 mode because if X is equivalent to a constant in some mode, it
2647 doesn't vary in any mode. */
2648
2649 if (REG_P (x)
2650 && REGNO_QTY_VALID_P (REGNO (x)))
2651 {
2652 int x_q = REG_QTY (REGNO (x));
2653 struct qty_table_elem *x_ent = &qty_table[x_q];
2654
2655 if (GET_MODE (x) == x_ent->mode
2656 && x_ent->const_rtx != NULL_RTX)
2657 return 0;
2658 }
2659
2660 if (GET_CODE (x) == PLUS
2661 && GET_CODE (XEXP (x, 1)) == CONST_INT
2662 && REG_P (XEXP (x, 0))
2663 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2664 {
2665 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2666 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2667
2668 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2669 && x0_ent->const_rtx != NULL_RTX)
2670 return 0;
2671 }
2672
2673 /* This can happen as the result of virtual register instantiation, if
2674 the initial constant is too large to be a valid address. This gives
2675 us a three instruction sequence, load large offset into a register,
2676 load fp minus a constant into a register, then a MEM which is the
2677 sum of the two `constant' registers. */
2678 if (GET_CODE (x) == PLUS
2679 && REG_P (XEXP (x, 0))
2680 && REG_P (XEXP (x, 1))
2681 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2682 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2683 {
2684 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2685 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2686 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2687 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2688
2689 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2690 && x0_ent->const_rtx != NULL_RTX
2691 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2692 && x1_ent->const_rtx != NULL_RTX)
2693 return 0;
2694 }
2695
2696 return rtx_varies_p (x, from_alias);
2697 }
2698 \f
2699 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2700 the result if necessary. INSN is as for canon_reg. */
2701
2702 static void
2703 validate_canon_reg (rtx *xloc, rtx insn)
2704 {
2705 rtx new = canon_reg (*xloc, insn);
2706 int insn_code;
2707
2708 /* If replacing pseudo with hard reg or vice versa, ensure the
2709 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2710 if (insn != 0 && new != 0
2711 && REG_P (new) && REG_P (*xloc)
2712 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2713 != (REGNO (*xloc) < FIRST_PSEUDO_REGISTER))
2714 || GET_MODE (new) != GET_MODE (*xloc)
2715 || (insn_code = recog_memoized (insn)) < 0
2716 || insn_data[insn_code].n_dups > 0))
2717 validate_change (insn, xloc, new, 1);
2718 else
2719 *xloc = new;
2720 }
2721
2722 /* Canonicalize an expression:
2723 replace each register reference inside it
2724 with the "oldest" equivalent register.
2725
2726 If INSN is nonzero and we are replacing a pseudo with a hard register
2727 or vice versa, validate_change is used to ensure that INSN remains valid
2728 after we make our substitution. The calls are made with IN_GROUP nonzero
2729 so apply_change_group must be called upon the outermost return from this
2730 function (unless INSN is zero). The result of apply_change_group can
2731 generally be discarded since the changes we are making are optional. */
2732
2733 static rtx
2734 canon_reg (rtx x, rtx insn)
2735 {
2736 int i;
2737 enum rtx_code code;
2738 const char *fmt;
2739
2740 if (x == 0)
2741 return x;
2742
2743 code = GET_CODE (x);
2744 switch (code)
2745 {
2746 case PC:
2747 case CC0:
2748 case CONST:
2749 case CONST_INT:
2750 case CONST_DOUBLE:
2751 case CONST_VECTOR:
2752 case SYMBOL_REF:
2753 case LABEL_REF:
2754 case ADDR_VEC:
2755 case ADDR_DIFF_VEC:
2756 return x;
2757
2758 case REG:
2759 {
2760 int first;
2761 int q;
2762 struct qty_table_elem *ent;
2763
2764 /* Never replace a hard reg, because hard regs can appear
2765 in more than one machine mode, and we must preserve the mode
2766 of each occurrence. Also, some hard regs appear in
2767 MEMs that are shared and mustn't be altered. Don't try to
2768 replace any reg that maps to a reg of class NO_REGS. */
2769 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2770 || ! REGNO_QTY_VALID_P (REGNO (x)))
2771 return x;
2772
2773 q = REG_QTY (REGNO (x));
2774 ent = &qty_table[q];
2775 first = ent->first_reg;
2776 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2777 : REGNO_REG_CLASS (first) == NO_REGS ? x
2778 : gen_rtx_REG (ent->mode, first));
2779 }
2780
2781 default:
2782 break;
2783 }
2784
2785 fmt = GET_RTX_FORMAT (code);
2786 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2787 {
2788 int j;
2789
2790 if (fmt[i] == 'e')
2791 validate_canon_reg (&XEXP (x, i), insn);
2792 else if (fmt[i] == 'E')
2793 for (j = 0; j < XVECLEN (x, i); j++)
2794 validate_canon_reg (&XVECEXP (x, i, j), insn);
2795 }
2796
2797 return x;
2798 }
2799 \f
2800 /* LOC is a location within INSN that is an operand address (the contents of
2801 a MEM). Find the best equivalent address to use that is valid for this
2802 insn.
2803
2804 On most CISC machines, complicated address modes are costly, and rtx_cost
2805 is a good approximation for that cost. However, most RISC machines have
2806 only a few (usually only one) memory reference formats. If an address is
2807 valid at all, it is often just as cheap as any other address. Hence, for
2808 RISC machines, we use `address_cost' to compare the costs of various
2809 addresses. For two addresses of equal cost, choose the one with the
2810 highest `rtx_cost' value as that has the potential of eliminating the
2811 most insns. For equal costs, we choose the first in the equivalence
2812 class. Note that we ignore the fact that pseudo registers are cheaper than
2813 hard registers here because we would also prefer the pseudo registers. */
2814
2815 static void
2816 find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
2817 {
2818 struct table_elt *elt;
2819 rtx addr = *loc;
2820 struct table_elt *p;
2821 int found_better = 1;
2822 int save_do_not_record = do_not_record;
2823 int save_hash_arg_in_memory = hash_arg_in_memory;
2824 int addr_volatile;
2825 int regno;
2826 unsigned hash;
2827
2828 /* Do not try to replace constant addresses or addresses of local and
2829 argument slots. These MEM expressions are made only once and inserted
2830 in many instructions, as well as being used to control symbol table
2831 output. It is not safe to clobber them.
2832
2833 There are some uncommon cases where the address is already in a register
2834 for some reason, but we cannot take advantage of that because we have
2835 no easy way to unshare the MEM. In addition, looking up all stack
2836 addresses is costly. */
2837 if ((GET_CODE (addr) == PLUS
2838 && REG_P (XEXP (addr, 0))
2839 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2840 && (regno = REGNO (XEXP (addr, 0)),
2841 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2842 || regno == ARG_POINTER_REGNUM))
2843 || (REG_P (addr)
2844 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2845 || regno == HARD_FRAME_POINTER_REGNUM
2846 || regno == ARG_POINTER_REGNUM))
2847 || CONSTANT_ADDRESS_P (addr))
2848 return;
2849
2850 /* If this address is not simply a register, try to fold it. This will
2851 sometimes simplify the expression. Many simplifications
2852 will not be valid, but some, usually applying the associative rule, will
2853 be valid and produce better code. */
2854 if (!REG_P (addr))
2855 {
2856 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2857 int addr_folded_cost = address_cost (folded, mode);
2858 int addr_cost = address_cost (addr, mode);
2859
2860 if ((addr_folded_cost < addr_cost
2861 || (addr_folded_cost == addr_cost
2862 /* ??? The rtx_cost comparison is left over from an older
2863 version of this code. It is probably no longer helpful. */
2864 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2865 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2866 && validate_change (insn, loc, folded, 0))
2867 addr = folded;
2868 }
2869
2870 /* If this address is not in the hash table, we can't look for equivalences
2871 of the whole address. Also, ignore if volatile. */
2872
2873 do_not_record = 0;
2874 hash = HASH (addr, Pmode);
2875 addr_volatile = do_not_record;
2876 do_not_record = save_do_not_record;
2877 hash_arg_in_memory = save_hash_arg_in_memory;
2878
2879 if (addr_volatile)
2880 return;
2881
2882 elt = lookup (addr, hash, Pmode);
2883
2884 if (elt)
2885 {
2886 /* We need to find the best (under the criteria documented above) entry
2887 in the class that is valid. We use the `flag' field to indicate
2888 choices that were invalid and iterate until we can't find a better
2889 one that hasn't already been tried. */
2890
2891 for (p = elt->first_same_value; p; p = p->next_same_value)
2892 p->flag = 0;
2893
2894 while (found_better)
2895 {
2896 int best_addr_cost = address_cost (*loc, mode);
2897 int best_rtx_cost = (elt->cost + 1) >> 1;
2898 int exp_cost;
2899 struct table_elt *best_elt = elt;
2900
2901 found_better = 0;
2902 for (p = elt->first_same_value; p; p = p->next_same_value)
2903 if (! p->flag)
2904 {
2905 if ((REG_P (p->exp)
2906 || exp_equiv_p (p->exp, p->exp, 1, false))
2907 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2908 || (exp_cost == best_addr_cost
2909 && ((p->cost + 1) >> 1) > best_rtx_cost)))
2910 {
2911 found_better = 1;
2912 best_addr_cost = exp_cost;
2913 best_rtx_cost = (p->cost + 1) >> 1;
2914 best_elt = p;
2915 }
2916 }
2917
2918 if (found_better)
2919 {
2920 if (validate_change (insn, loc,
2921 canon_reg (copy_rtx (best_elt->exp),
2922 NULL_RTX), 0))
2923 return;
2924 else
2925 best_elt->flag = 1;
2926 }
2927 }
2928 }
2929
2930 /* If the address is a binary operation with the first operand a register
2931 and the second a constant, do the same as above, but looking for
2932 equivalences of the register. Then try to simplify before checking for
2933 the best address to use. This catches a few cases: First is when we
2934 have REG+const and the register is another REG+const. We can often merge
2935 the constants and eliminate one insn and one register. It may also be
2936 that a machine has a cheap REG+REG+const. Finally, this improves the
2937 code on the Alpha for unaligned byte stores. */
2938
2939 if (flag_expensive_optimizations
2940 && ARITHMETIC_P (*loc)
2941 && REG_P (XEXP (*loc, 0)))
2942 {
2943 rtx op1 = XEXP (*loc, 1);
2944
2945 do_not_record = 0;
2946 hash = HASH (XEXP (*loc, 0), Pmode);
2947 do_not_record = save_do_not_record;
2948 hash_arg_in_memory = save_hash_arg_in_memory;
2949
2950 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2951 if (elt == 0)
2952 return;
2953
2954 /* We need to find the best (under the criteria documented above) entry
2955 in the class that is valid. We use the `flag' field to indicate
2956 choices that were invalid and iterate until we can't find a better
2957 one that hasn't already been tried. */
2958
2959 for (p = elt->first_same_value; p; p = p->next_same_value)
2960 p->flag = 0;
2961
2962 while (found_better)
2963 {
2964 int best_addr_cost = address_cost (*loc, mode);
2965 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2966 struct table_elt *best_elt = elt;
2967 rtx best_rtx = *loc;
2968 int count;
2969
2970 /* This is at worst case an O(n^2) algorithm, so limit our search
2971 to the first 32 elements on the list. This avoids trouble
2972 compiling code with very long basic blocks that can easily
2973 call simplify_gen_binary so many times that we run out of
2974 memory. */
2975
2976 found_better = 0;
2977 for (p = elt->first_same_value, count = 0;
2978 p && count < 32;
2979 p = p->next_same_value, count++)
2980 if (! p->flag
2981 && (REG_P (p->exp)
2982 || exp_equiv_p (p->exp, p->exp, 1, false)))
2983 {
2984 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
2985 p->exp, op1);
2986 int new_cost;
2987
2988 /* Get the canonical version of the address so we can accept
2989 more. */
2990 new = canon_for_address (new);
2991
2992 new_cost = address_cost (new, mode);
2993
2994 if (new_cost < best_addr_cost
2995 || (new_cost == best_addr_cost
2996 && (COST (new) + 1) >> 1 > best_rtx_cost))
2997 {
2998 found_better = 1;
2999 best_addr_cost = new_cost;
3000 best_rtx_cost = (COST (new) + 1) >> 1;
3001 best_elt = p;
3002 best_rtx = new;
3003 }
3004 }
3005
3006 if (found_better)
3007 {
3008 if (validate_change (insn, loc,
3009 canon_reg (copy_rtx (best_rtx),
3010 NULL_RTX), 0))
3011 return;
3012 else
3013 best_elt->flag = 1;
3014 }
3015 }
3016 }
3017 }
3018 \f
3019 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3020 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3021 what values are being compared.
3022
3023 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3024 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3025 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3026 compared to produce cc0.
3027
3028 The return value is the comparison operator and is either the code of
3029 A or the code corresponding to the inverse of the comparison. */
3030
3031 static enum rtx_code
3032 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
3033 enum machine_mode *pmode1, enum machine_mode *pmode2)
3034 {
3035 rtx arg1, arg2;
3036
3037 arg1 = *parg1, arg2 = *parg2;
3038
3039 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3040
3041 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3042 {
3043 /* Set nonzero when we find something of interest. */
3044 rtx x = 0;
3045 int reverse_code = 0;
3046 struct table_elt *p = 0;
3047
3048 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3049 On machines with CC0, this is the only case that can occur, since
3050 fold_rtx will return the COMPARE or item being compared with zero
3051 when given CC0. */
3052
3053 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3054 x = arg1;
3055
3056 /* If ARG1 is a comparison operator and CODE is testing for
3057 STORE_FLAG_VALUE, get the inner arguments. */
3058
3059 else if (COMPARISON_P (arg1))
3060 {
3061 #ifdef FLOAT_STORE_FLAG_VALUE
3062 REAL_VALUE_TYPE fsfv;
3063 #endif
3064
3065 if (code == NE
3066 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3067 && code == LT && STORE_FLAG_VALUE == -1)
3068 #ifdef FLOAT_STORE_FLAG_VALUE
3069 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3070 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3071 REAL_VALUE_NEGATIVE (fsfv)))
3072 #endif
3073 )
3074 x = arg1;
3075 else if (code == EQ
3076 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3077 && code == GE && STORE_FLAG_VALUE == -1)
3078 #ifdef FLOAT_STORE_FLAG_VALUE
3079 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3080 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3081 REAL_VALUE_NEGATIVE (fsfv)))
3082 #endif
3083 )
3084 x = arg1, reverse_code = 1;
3085 }
3086
3087 /* ??? We could also check for
3088
3089 (ne (and (eq (...) (const_int 1))) (const_int 0))
3090
3091 and related forms, but let's wait until we see them occurring. */
3092
3093 if (x == 0)
3094 /* Look up ARG1 in the hash table and see if it has an equivalence
3095 that lets us see what is being compared. */
3096 p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
3097 if (p)
3098 {
3099 p = p->first_same_value;
3100
3101 /* If what we compare is already known to be constant, that is as
3102 good as it gets.
3103 We need to break the loop in this case, because otherwise we
3104 can have an infinite loop when looking at a reg that is known
3105 to be a constant which is the same as a comparison of a reg
3106 against zero which appears later in the insn stream, which in
3107 turn is constant and the same as the comparison of the first reg
3108 against zero... */
3109 if (p->is_const)
3110 break;
3111 }
3112
3113 for (; p; p = p->next_same_value)
3114 {
3115 enum machine_mode inner_mode = GET_MODE (p->exp);
3116 #ifdef FLOAT_STORE_FLAG_VALUE
3117 REAL_VALUE_TYPE fsfv;
3118 #endif
3119
3120 /* If the entry isn't valid, skip it. */
3121 if (! exp_equiv_p (p->exp, p->exp, 1, false))
3122 continue;
3123
3124 if (GET_CODE (p->exp) == COMPARE
3125 /* Another possibility is that this machine has a compare insn
3126 that includes the comparison code. In that case, ARG1 would
3127 be equivalent to a comparison operation that would set ARG1 to
3128 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3129 ORIG_CODE is the actual comparison being done; if it is an EQ,
3130 we must reverse ORIG_CODE. On machine with a negative value
3131 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3132 || ((code == NE
3133 || (code == LT
3134 && GET_MODE_CLASS (inner_mode) == MODE_INT
3135 && (GET_MODE_BITSIZE (inner_mode)
3136 <= HOST_BITS_PER_WIDE_INT)
3137 && (STORE_FLAG_VALUE
3138 & ((HOST_WIDE_INT) 1
3139 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3140 #ifdef FLOAT_STORE_FLAG_VALUE
3141 || (code == LT
3142 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3143 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3144 REAL_VALUE_NEGATIVE (fsfv)))
3145 #endif
3146 )
3147 && COMPARISON_P (p->exp)))
3148 {
3149 x = p->exp;
3150 break;
3151 }
3152 else if ((code == EQ
3153 || (code == GE
3154 && GET_MODE_CLASS (inner_mode) == MODE_INT
3155 && (GET_MODE_BITSIZE (inner_mode)
3156 <= HOST_BITS_PER_WIDE_INT)
3157 && (STORE_FLAG_VALUE
3158 & ((HOST_WIDE_INT) 1
3159 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3160 #ifdef FLOAT_STORE_FLAG_VALUE
3161 || (code == GE
3162 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3163 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3164 REAL_VALUE_NEGATIVE (fsfv)))
3165 #endif
3166 )
3167 && COMPARISON_P (p->exp))
3168 {
3169 reverse_code = 1;
3170 x = p->exp;
3171 break;
3172 }
3173
3174 /* If this non-trapping address, e.g. fp + constant, the
3175 equivalent is a better operand since it may let us predict
3176 the value of the comparison. */
3177 else if (!rtx_addr_can_trap_p (p->exp))
3178 {
3179 arg1 = p->exp;
3180 continue;
3181 }
3182 }
3183
3184 /* If we didn't find a useful equivalence for ARG1, we are done.
3185 Otherwise, set up for the next iteration. */
3186 if (x == 0)
3187 break;
3188
3189 /* If we need to reverse the comparison, make sure that that is
3190 possible -- we can't necessarily infer the value of GE from LT
3191 with floating-point operands. */
3192 if (reverse_code)
3193 {
3194 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3195 if (reversed == UNKNOWN)
3196 break;
3197 else
3198 code = reversed;
3199 }
3200 else if (COMPARISON_P (x))
3201 code = GET_CODE (x);
3202 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3203 }
3204
3205 /* Return our results. Return the modes from before fold_rtx
3206 because fold_rtx might produce const_int, and then it's too late. */
3207 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3208 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3209
3210 return code;
3211 }
3212 \f
3213 /* If X is a nontrivial arithmetic operation on an argument
3214 for which a constant value can be determined, return
3215 the result of operating on that value, as a constant.
3216 Otherwise, return X, possibly with one or more operands
3217 modified by recursive calls to this function.
3218
3219 If X is a register whose contents are known, we do NOT
3220 return those contents here. equiv_constant is called to
3221 perform that task.
3222
3223 INSN is the insn that we may be modifying. If it is 0, make a copy
3224 of X before modifying it. */
3225
3226 static rtx
3227 fold_rtx (rtx x, rtx insn)
3228 {
3229 enum rtx_code code;
3230 enum machine_mode mode;
3231 const char *fmt;
3232 int i;
3233 rtx new = 0;
3234 int copied = 0;
3235 int must_swap = 0;
3236
3237 /* Folded equivalents of first two operands of X. */
3238 rtx folded_arg0;
3239 rtx folded_arg1;
3240
3241 /* Constant equivalents of first three operands of X;
3242 0 when no such equivalent is known. */
3243 rtx const_arg0;
3244 rtx const_arg1;
3245 rtx const_arg2;
3246
3247 /* The mode of the first operand of X. We need this for sign and zero
3248 extends. */
3249 enum machine_mode mode_arg0;
3250
3251 if (x == 0)
3252 return x;
3253
3254 mode = GET_MODE (x);
3255 code = GET_CODE (x);
3256 switch (code)
3257 {
3258 case CONST:
3259 case CONST_INT:
3260 case CONST_DOUBLE:
3261 case CONST_VECTOR:
3262 case SYMBOL_REF:
3263 case LABEL_REF:
3264 case REG:
3265 /* No use simplifying an EXPR_LIST
3266 since they are used only for lists of args
3267 in a function call's REG_EQUAL note. */
3268 case EXPR_LIST:
3269 return x;
3270
3271 #ifdef HAVE_cc0
3272 case CC0:
3273 return prev_insn_cc0;
3274 #endif
3275
3276 case PC:
3277 /* If the next insn is a CODE_LABEL followed by a jump table,
3278 PC's value is a LABEL_REF pointing to that label. That
3279 lets us fold switch statements on the VAX. */
3280 {
3281 rtx next;
3282 if (insn && tablejump_p (insn, &next, NULL))
3283 return gen_rtx_LABEL_REF (Pmode, next);
3284 }
3285 break;
3286
3287 case SUBREG:
3288 /* See if we previously assigned a constant value to this SUBREG. */
3289 if ((new = lookup_as_function (x, CONST_INT)) != 0
3290 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3291 return new;
3292
3293 /* If this is a paradoxical SUBREG, we have no idea what value the
3294 extra bits would have. However, if the operand is equivalent
3295 to a SUBREG whose operand is the same as our mode, and all the
3296 modes are within a word, we can just use the inner operand
3297 because these SUBREGs just say how to treat the register.
3298
3299 Similarly if we find an integer constant. */
3300
3301 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3302 {
3303 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3304 struct table_elt *elt;
3305
3306 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3307 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3308 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3309 imode)) != 0)
3310 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3311 {
3312 if (CONSTANT_P (elt->exp)
3313 && GET_MODE (elt->exp) == VOIDmode)
3314 return elt->exp;
3315
3316 if (GET_CODE (elt->exp) == SUBREG
3317 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3318 && exp_equiv_p (elt->exp, elt->exp, 1, false))
3319 return copy_rtx (SUBREG_REG (elt->exp));
3320 }
3321
3322 return x;
3323 }
3324
3325 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3326 We might be able to if the SUBREG is extracting a single word in an
3327 integral mode or extracting the low part. */
3328
3329 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3330 const_arg0 = equiv_constant (folded_arg0);
3331 if (const_arg0)
3332 folded_arg0 = const_arg0;
3333
3334 if (folded_arg0 != SUBREG_REG (x))
3335 {
3336 new = simplify_subreg (mode, folded_arg0,
3337 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3338 if (new)
3339 return new;
3340 }
3341
3342 if (REG_P (folded_arg0)
3343 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0)))
3344 {
3345 struct table_elt *elt;
3346
3347 elt = lookup (folded_arg0,
3348 HASH (folded_arg0, GET_MODE (folded_arg0)),
3349 GET_MODE (folded_arg0));
3350
3351 if (elt)
3352 elt = elt->first_same_value;
3353
3354 if (subreg_lowpart_p (x))
3355 /* If this is a narrowing SUBREG and our operand is a REG, see
3356 if we can find an equivalence for REG that is an arithmetic
3357 operation in a wider mode where both operands are paradoxical
3358 SUBREGs from objects of our result mode. In that case, we
3359 couldn-t report an equivalent value for that operation, since we
3360 don't know what the extra bits will be. But we can find an
3361 equivalence for this SUBREG by folding that operation in the
3362 narrow mode. This allows us to fold arithmetic in narrow modes
3363 when the machine only supports word-sized arithmetic.
3364
3365 Also look for a case where we have a SUBREG whose operand
3366 is the same as our result. If both modes are smaller
3367 than a word, we are simply interpreting a register in
3368 different modes and we can use the inner value. */
3369
3370 for (; elt; elt = elt->next_same_value)
3371 {
3372 enum rtx_code eltcode = GET_CODE (elt->exp);
3373
3374 /* Just check for unary and binary operations. */
3375 if (UNARY_P (elt->exp)
3376 && eltcode != SIGN_EXTEND
3377 && eltcode != ZERO_EXTEND
3378 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3379 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3380 && (GET_MODE_CLASS (mode)
3381 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3382 {
3383 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3384
3385 if (!REG_P (op0) && ! CONSTANT_P (op0))
3386 op0 = fold_rtx (op0, NULL_RTX);
3387
3388 op0 = equiv_constant (op0);
3389 if (op0)
3390 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3391 op0, mode);
3392 }
3393 else if (ARITHMETIC_P (elt->exp)
3394 && eltcode != DIV && eltcode != MOD
3395 && eltcode != UDIV && eltcode != UMOD
3396 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3397 && eltcode != ROTATE && eltcode != ROTATERT
3398 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3399 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3400 == mode))
3401 || CONSTANT_P (XEXP (elt->exp, 0)))
3402 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3403 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3404 == mode))
3405 || CONSTANT_P (XEXP (elt->exp, 1))))
3406 {
3407 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3408 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3409
3410 if (op0 && !REG_P (op0) && ! CONSTANT_P (op0))
3411 op0 = fold_rtx (op0, NULL_RTX);
3412
3413 if (op0)
3414 op0 = equiv_constant (op0);
3415
3416 if (op1 && !REG_P (op1) && ! CONSTANT_P (op1))
3417 op1 = fold_rtx (op1, NULL_RTX);
3418
3419 if (op1)
3420 op1 = equiv_constant (op1);
3421
3422 /* If we are looking for the low SImode part of
3423 (ashift:DI c (const_int 32)), it doesn't work
3424 to compute that in SImode, because a 32-bit shift
3425 in SImode is unpredictable. We know the value is 0. */
3426 if (op0 && op1
3427 && GET_CODE (elt->exp) == ASHIFT
3428 && GET_CODE (op1) == CONST_INT
3429 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3430 {
3431 if (INTVAL (op1)
3432 < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3433 /* If the count fits in the inner mode's width,
3434 but exceeds the outer mode's width,
3435 the value will get truncated to 0
3436 by the subreg. */
3437 new = CONST0_RTX (mode);
3438 else
3439 /* If the count exceeds even the inner mode's width,
3440 don't fold this expression. */
3441 new = 0;
3442 }
3443 else if (op0 && op1)
3444 new = simplify_binary_operation (GET_CODE (elt->exp), mode, op0, op1);
3445 }
3446
3447 else if (GET_CODE (elt->exp) == SUBREG
3448 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3449 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3450 <= UNITS_PER_WORD)
3451 && exp_equiv_p (elt->exp, elt->exp, 1, false))
3452 new = copy_rtx (SUBREG_REG (elt->exp));
3453
3454 if (new)
3455 return new;
3456 }
3457 else
3458 /* A SUBREG resulting from a zero extension may fold to zero if
3459 it extracts higher bits than the ZERO_EXTEND's source bits.
3460 FIXME: if combine tried to, er, combine these instructions,
3461 this transformation may be moved to simplify_subreg. */
3462 for (; elt; elt = elt->next_same_value)
3463 {
3464 if (GET_CODE (elt->exp) == ZERO_EXTEND
3465 && subreg_lsb (x)
3466 >= GET_MODE_BITSIZE (GET_MODE (XEXP (elt->exp, 0))))
3467 return CONST0_RTX (mode);
3468 }
3469 }
3470
3471 return x;
3472
3473 case NOT:
3474 case NEG:
3475 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3476 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3477 new = lookup_as_function (XEXP (x, 0), code);
3478 if (new)
3479 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3480 break;
3481
3482 case MEM:
3483 /* If we are not actually processing an insn, don't try to find the
3484 best address. Not only don't we care, but we could modify the
3485 MEM in an invalid way since we have no insn to validate against. */
3486 if (insn != 0)
3487 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3488
3489 {
3490 /* Even if we don't fold in the insn itself,
3491 we can safely do so here, in hopes of getting a constant. */
3492 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3493 rtx base = 0;
3494 HOST_WIDE_INT offset = 0;
3495
3496 if (REG_P (addr)
3497 && REGNO_QTY_VALID_P (REGNO (addr)))
3498 {
3499 int addr_q = REG_QTY (REGNO (addr));
3500 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3501
3502 if (GET_MODE (addr) == addr_ent->mode
3503 && addr_ent->const_rtx != NULL_RTX)
3504 addr = addr_ent->const_rtx;
3505 }
3506
3507 /* If address is constant, split it into a base and integer offset. */
3508 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3509 base = addr;
3510 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3511 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3512 {
3513 base = XEXP (XEXP (addr, 0), 0);
3514 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3515 }
3516 else if (GET_CODE (addr) == LO_SUM
3517 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3518 base = XEXP (addr, 1);
3519
3520 /* If this is a constant pool reference, we can fold it into its
3521 constant to allow better value tracking. */
3522 if (base && GET_CODE (base) == SYMBOL_REF
3523 && CONSTANT_POOL_ADDRESS_P (base))
3524 {
3525 rtx constant = get_pool_constant (base);
3526 enum machine_mode const_mode = get_pool_mode (base);
3527 rtx new;
3528
3529 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3530 {
3531 constant_pool_entries_cost = COST (constant);
3532 constant_pool_entries_regcost = approx_reg_cost (constant);
3533 }
3534
3535 /* If we are loading the full constant, we have an equivalence. */
3536 if (offset == 0 && mode == const_mode)
3537 return constant;
3538
3539 /* If this actually isn't a constant (weird!), we can't do
3540 anything. Otherwise, handle the two most common cases:
3541 extracting a word from a multi-word constant, and extracting
3542 the low-order bits. Other cases don't seem common enough to
3543 worry about. */
3544 if (! CONSTANT_P (constant))
3545 return x;
3546
3547 if (GET_MODE_CLASS (mode) == MODE_INT
3548 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3549 && offset % UNITS_PER_WORD == 0
3550 && (new = operand_subword (constant,
3551 offset / UNITS_PER_WORD,
3552 0, const_mode)) != 0)
3553 return new;
3554
3555 if (((BYTES_BIG_ENDIAN
3556 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3557 || (! BYTES_BIG_ENDIAN && offset == 0))
3558 && (new = gen_lowpart (mode, constant)) != 0)
3559 return new;
3560 }
3561
3562 /* If this is a reference to a label at a known position in a jump
3563 table, we also know its value. */
3564 if (base && GET_CODE (base) == LABEL_REF)
3565 {
3566 rtx label = XEXP (base, 0);
3567 rtx table_insn = NEXT_INSN (label);
3568
3569 if (table_insn && JUMP_P (table_insn)
3570 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3571 {
3572 rtx table = PATTERN (table_insn);
3573
3574 if (offset >= 0
3575 && (offset / GET_MODE_SIZE (GET_MODE (table))
3576 < XVECLEN (table, 0)))
3577 return XVECEXP (table, 0,
3578 offset / GET_MODE_SIZE (GET_MODE (table)));
3579 }
3580 if (table_insn && JUMP_P (table_insn)
3581 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3582 {
3583 rtx table = PATTERN (table_insn);
3584
3585 if (offset >= 0
3586 && (offset / GET_MODE_SIZE (GET_MODE (table))
3587 < XVECLEN (table, 1)))
3588 {
3589 offset /= GET_MODE_SIZE (GET_MODE (table));
3590 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3591 XEXP (table, 0));
3592
3593 if (GET_MODE (table) != Pmode)
3594 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3595
3596 /* Indicate this is a constant. This isn't a
3597 valid form of CONST, but it will only be used
3598 to fold the next insns and then discarded, so
3599 it should be safe.
3600
3601 Note this expression must be explicitly discarded,
3602 by cse_insn, else it may end up in a REG_EQUAL note
3603 and "escape" to cause problems elsewhere. */
3604 return gen_rtx_CONST (GET_MODE (new), new);
3605 }
3606 }
3607 }
3608
3609 return x;
3610 }
3611
3612 #ifdef NO_FUNCTION_CSE
3613 case CALL:
3614 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3615 return x;
3616 break;
3617 #endif
3618
3619 case ASM_OPERANDS:
3620 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3621 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3622 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3623 break;
3624
3625 default:
3626 break;
3627 }
3628
3629 const_arg0 = 0;
3630 const_arg1 = 0;
3631 const_arg2 = 0;
3632 mode_arg0 = VOIDmode;
3633
3634 /* Try folding our operands.
3635 Then see which ones have constant values known. */
3636
3637 fmt = GET_RTX_FORMAT (code);
3638 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3639 if (fmt[i] == 'e')
3640 {
3641 rtx arg = XEXP (x, i);
3642 rtx folded_arg = arg, const_arg = 0;
3643 enum machine_mode mode_arg = GET_MODE (arg);
3644 rtx cheap_arg, expensive_arg;
3645 rtx replacements[2];
3646 int j;
3647 int old_cost = COST_IN (XEXP (x, i), code);
3648
3649 /* Most arguments are cheap, so handle them specially. */
3650 switch (GET_CODE (arg))
3651 {
3652 case REG:
3653 /* This is the same as calling equiv_constant; it is duplicated
3654 here for speed. */
3655 if (REGNO_QTY_VALID_P (REGNO (arg)))
3656 {
3657 int arg_q = REG_QTY (REGNO (arg));
3658 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3659
3660 if (arg_ent->const_rtx != NULL_RTX
3661 && !REG_P (arg_ent->const_rtx)
3662 && GET_CODE (arg_ent->const_rtx) != PLUS)
3663 const_arg
3664 = gen_lowpart (GET_MODE (arg),
3665 arg_ent->const_rtx);
3666 }
3667 break;
3668
3669 case CONST:
3670 case CONST_INT:
3671 case SYMBOL_REF:
3672 case LABEL_REF:
3673 case CONST_DOUBLE:
3674 case CONST_VECTOR:
3675 const_arg = arg;
3676 break;
3677
3678 #ifdef HAVE_cc0
3679 case CC0:
3680 folded_arg = prev_insn_cc0;
3681 mode_arg = prev_insn_cc0_mode;
3682 const_arg = equiv_constant (folded_arg);
3683 break;
3684 #endif
3685
3686 default:
3687 folded_arg = fold_rtx (arg, insn);
3688 const_arg = equiv_constant (folded_arg);
3689 }
3690
3691 /* For the first three operands, see if the operand
3692 is constant or equivalent to a constant. */
3693 switch (i)
3694 {
3695 case 0:
3696 folded_arg0 = folded_arg;
3697 const_arg0 = const_arg;
3698 mode_arg0 = mode_arg;
3699 break;
3700 case 1:
3701 folded_arg1 = folded_arg;
3702 const_arg1 = const_arg;
3703 break;
3704 case 2:
3705 const_arg2 = const_arg;
3706 break;
3707 }
3708
3709 /* Pick the least expensive of the folded argument and an
3710 equivalent constant argument. */
3711 if (const_arg == 0 || const_arg == folded_arg
3712 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3713 cheap_arg = folded_arg, expensive_arg = const_arg;
3714 else
3715 cheap_arg = const_arg, expensive_arg = folded_arg;
3716
3717 /* Try to replace the operand with the cheapest of the two
3718 possibilities. If it doesn't work and this is either of the first
3719 two operands of a commutative operation, try swapping them.
3720 If THAT fails, try the more expensive, provided it is cheaper
3721 than what is already there. */
3722
3723 if (cheap_arg == XEXP (x, i))
3724 continue;
3725
3726 if (insn == 0 && ! copied)
3727 {
3728 x = copy_rtx (x);
3729 copied = 1;
3730 }
3731
3732 /* Order the replacements from cheapest to most expensive. */
3733 replacements[0] = cheap_arg;
3734 replacements[1] = expensive_arg;
3735
3736 for (j = 0; j < 2 && replacements[j]; j++)
3737 {
3738 int new_cost = COST_IN (replacements[j], code);
3739
3740 /* Stop if what existed before was cheaper. Prefer constants
3741 in the case of a tie. */
3742 if (new_cost > old_cost
3743 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3744 break;
3745
3746 /* It's not safe to substitute the operand of a conversion
3747 operator with a constant, as the conversion's identity
3748 depends upon the mode of it's operand. This optimization
3749 is handled by the call to simplify_unary_operation. */
3750 if (GET_RTX_CLASS (code) == RTX_UNARY
3751 && GET_MODE (replacements[j]) != mode_arg0
3752 && (code == ZERO_EXTEND
3753 || code == SIGN_EXTEND
3754 || code == TRUNCATE
3755 || code == FLOAT_TRUNCATE
3756 || code == FLOAT_EXTEND
3757 || code == FLOAT
3758 || code == FIX
3759 || code == UNSIGNED_FLOAT
3760 || code == UNSIGNED_FIX))
3761 continue;
3762
3763 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3764 break;
3765
3766 if (GET_RTX_CLASS (code) == RTX_COMM_COMPARE
3767 || GET_RTX_CLASS (code) == RTX_COMM_ARITH)
3768 {
3769 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3770 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3771
3772 if (apply_change_group ())
3773 {
3774 /* Swap them back to be invalid so that this loop can
3775 continue and flag them to be swapped back later. */
3776 rtx tem;
3777
3778 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3779 XEXP (x, 1) = tem;
3780 must_swap = 1;
3781 break;
3782 }
3783 }
3784 }
3785 }
3786
3787 else
3788 {
3789 if (fmt[i] == 'E')
3790 /* Don't try to fold inside of a vector of expressions.
3791 Doing nothing is harmless. */
3792 {;}
3793 }
3794
3795 /* If a commutative operation, place a constant integer as the second
3796 operand unless the first operand is also a constant integer. Otherwise,
3797 place any constant second unless the first operand is also a constant. */
3798
3799 if (COMMUTATIVE_P (x))
3800 {
3801 if (must_swap
3802 || swap_commutative_operands_p (const_arg0 ? const_arg0
3803 : XEXP (x, 0),
3804 const_arg1 ? const_arg1
3805 : XEXP (x, 1)))
3806 {
3807 rtx tem = XEXP (x, 0);
3808
3809 if (insn == 0 && ! copied)
3810 {
3811 x = copy_rtx (x);
3812 copied = 1;
3813 }
3814
3815 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3816 validate_change (insn, &XEXP (x, 1), tem, 1);
3817 if (apply_change_group ())
3818 {
3819 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3820 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3821 }
3822 }
3823 }
3824
3825 /* If X is an arithmetic operation, see if we can simplify it. */
3826
3827 switch (GET_RTX_CLASS (code))
3828 {
3829 case RTX_UNARY:
3830 {
3831 int is_const = 0;
3832
3833 /* We can't simplify extension ops unless we know the
3834 original mode. */
3835 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3836 && mode_arg0 == VOIDmode)
3837 break;
3838
3839 /* If we had a CONST, strip it off and put it back later if we
3840 fold. */
3841 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3842 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3843
3844 new = simplify_unary_operation (code, mode,
3845 const_arg0 ? const_arg0 : folded_arg0,
3846 mode_arg0);
3847 /* NEG of PLUS could be converted into MINUS, but that causes
3848 expressions of the form
3849 (CONST (MINUS (CONST_INT) (SYMBOL_REF)))
3850 which many ports mistakenly treat as LEGITIMATE_CONSTANT_P.
3851 FIXME: those ports should be fixed. */
3852 if (new != 0 && is_const
3853 && GET_CODE (new) == PLUS
3854 && (GET_CODE (XEXP (new, 0)) == SYMBOL_REF
3855 || GET_CODE (XEXP (new, 0)) == LABEL_REF)
3856 && GET_CODE (XEXP (new, 1)) == CONST_INT)
3857 new = gen_rtx_CONST (mode, new);
3858 }
3859 break;
3860
3861 case RTX_COMPARE:
3862 case RTX_COMM_COMPARE:
3863 /* See what items are actually being compared and set FOLDED_ARG[01]
3864 to those values and CODE to the actual comparison code. If any are
3865 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3866 do anything if both operands are already known to be constant. */
3867
3868 if (const_arg0 == 0 || const_arg1 == 0)
3869 {
3870 struct table_elt *p0, *p1;
3871 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3872 enum machine_mode mode_arg1;
3873
3874 #ifdef FLOAT_STORE_FLAG_VALUE
3875 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3876 {
3877 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3878 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3879 false_rtx = CONST0_RTX (mode);
3880 }
3881 #endif
3882
3883 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3884 &mode_arg0, &mode_arg1);
3885 const_arg0 = equiv_constant (folded_arg0);
3886 const_arg1 = equiv_constant (folded_arg1);
3887
3888 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3889 what kinds of things are being compared, so we can't do
3890 anything with this comparison. */
3891
3892 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3893 break;
3894
3895 /* If we do not now have two constants being compared, see
3896 if we can nevertheless deduce some things about the
3897 comparison. */
3898 if (const_arg0 == 0 || const_arg1 == 0)
3899 {
3900 /* Some addresses are known to be nonzero. We don't know
3901 their sign, but equality comparisons are known. */
3902 if (const_arg1 == const0_rtx
3903 && nonzero_address_p (folded_arg0))
3904 {
3905 if (code == EQ)
3906 return false_rtx;
3907 else if (code == NE)
3908 return true_rtx;
3909 }
3910
3911 /* See if the two operands are the same. */
3912
3913 if (folded_arg0 == folded_arg1
3914 || (REG_P (folded_arg0)
3915 && REG_P (folded_arg1)
3916 && (REG_QTY (REGNO (folded_arg0))
3917 == REG_QTY (REGNO (folded_arg1))))
3918 || ((p0 = lookup (folded_arg0,
3919 SAFE_HASH (folded_arg0, mode_arg0),
3920 mode_arg0))
3921 && (p1 = lookup (folded_arg1,
3922 SAFE_HASH (folded_arg1, mode_arg0),
3923 mode_arg0))
3924 && p0->first_same_value == p1->first_same_value))
3925 {
3926 /* Sadly two equal NaNs are not equivalent. */
3927 if (!HONOR_NANS (mode_arg0))
3928 return ((code == EQ || code == LE || code == GE
3929 || code == LEU || code == GEU || code == UNEQ
3930 || code == UNLE || code == UNGE
3931 || code == ORDERED)
3932 ? true_rtx : false_rtx);
3933 /* Take care for the FP compares we can resolve. */
3934 if (code == UNEQ || code == UNLE || code == UNGE)
3935 return true_rtx;
3936 if (code == LTGT || code == LT || code == GT)
3937 return false_rtx;
3938 }
3939
3940 /* If FOLDED_ARG0 is a register, see if the comparison we are
3941 doing now is either the same as we did before or the reverse
3942 (we only check the reverse if not floating-point). */
3943 else if (REG_P (folded_arg0))
3944 {
3945 int qty = REG_QTY (REGNO (folded_arg0));
3946
3947 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3948 {
3949 struct qty_table_elem *ent = &qty_table[qty];
3950
3951 if ((comparison_dominates_p (ent->comparison_code, code)
3952 || (! FLOAT_MODE_P (mode_arg0)
3953 && comparison_dominates_p (ent->comparison_code,
3954 reverse_condition (code))))
3955 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3956 || (const_arg1
3957 && rtx_equal_p (ent->comparison_const,
3958 const_arg1))
3959 || (REG_P (folded_arg1)
3960 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3961 return (comparison_dominates_p (ent->comparison_code, code)
3962 ? true_rtx : false_rtx);
3963 }
3964 }
3965 }
3966 }
3967
3968 /* If we are comparing against zero, see if the first operand is
3969 equivalent to an IOR with a constant. If so, we may be able to
3970 determine the result of this comparison. */
3971
3972 if (const_arg1 == const0_rtx)
3973 {
3974 rtx y = lookup_as_function (folded_arg0, IOR);
3975 rtx inner_const;
3976
3977 if (y != 0
3978 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3979 && GET_CODE (inner_const) == CONST_INT
3980 && INTVAL (inner_const) != 0)
3981 {
3982 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
3983 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3984 && (INTVAL (inner_const)
3985 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
3986 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3987
3988 #ifdef FLOAT_STORE_FLAG_VALUE
3989 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3990 {
3991 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3992 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3993 false_rtx = CONST0_RTX (mode);
3994 }
3995 #endif
3996
3997 switch (code)
3998 {
3999 case EQ:
4000 return false_rtx;
4001 case NE:
4002 return true_rtx;
4003 case LT: case LE:
4004 if (has_sign)
4005 return true_rtx;
4006 break;
4007 case GT: case GE:
4008 if (has_sign)
4009 return false_rtx;
4010 break;
4011 default:
4012 break;
4013 }
4014 }
4015 }
4016
4017 {
4018 rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
4019 rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
4020 new = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
4021 }
4022 break;
4023
4024 case RTX_BIN_ARITH:
4025 case RTX_COMM_ARITH:
4026 switch (code)
4027 {
4028 case PLUS:
4029 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4030 with that LABEL_REF as its second operand. If so, the result is
4031 the first operand of that MINUS. This handles switches with an
4032 ADDR_DIFF_VEC table. */
4033 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4034 {
4035 rtx y
4036 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4037 : lookup_as_function (folded_arg0, MINUS);
4038
4039 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4040 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4041 return XEXP (y, 0);
4042
4043 /* Now try for a CONST of a MINUS like the above. */
4044 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4045 : lookup_as_function (folded_arg0, CONST))) != 0
4046 && GET_CODE (XEXP (y, 0)) == MINUS
4047 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4048 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4049 return XEXP (XEXP (y, 0), 0);
4050 }
4051
4052 /* Likewise if the operands are in the other order. */
4053 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4054 {
4055 rtx y
4056 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4057 : lookup_as_function (folded_arg1, MINUS);
4058
4059 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4060 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4061 return XEXP (y, 0);
4062
4063 /* Now try for a CONST of a MINUS like the above. */
4064 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4065 : lookup_as_function (folded_arg1, CONST))) != 0
4066 && GET_CODE (XEXP (y, 0)) == MINUS
4067 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4068 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4069 return XEXP (XEXP (y, 0), 0);
4070 }
4071
4072 /* If second operand is a register equivalent to a negative
4073 CONST_INT, see if we can find a register equivalent to the
4074 positive constant. Make a MINUS if so. Don't do this for
4075 a non-negative constant since we might then alternate between
4076 choosing positive and negative constants. Having the positive
4077 constant previously-used is the more common case. Be sure
4078 the resulting constant is non-negative; if const_arg1 were
4079 the smallest negative number this would overflow: depending
4080 on the mode, this would either just be the same value (and
4081 hence not save anything) or be incorrect. */
4082 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4083 && INTVAL (const_arg1) < 0
4084 /* This used to test
4085
4086 -INTVAL (const_arg1) >= 0
4087
4088 But The Sun V5.0 compilers mis-compiled that test. So
4089 instead we test for the problematic value in a more direct
4090 manner and hope the Sun compilers get it correct. */
4091 && INTVAL (const_arg1) !=
4092 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4093 && REG_P (folded_arg1))
4094 {
4095 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4096 struct table_elt *p
4097 = lookup (new_const, SAFE_HASH (new_const, mode), mode);
4098
4099 if (p)
4100 for (p = p->first_same_value; p; p = p->next_same_value)
4101 if (REG_P (p->exp))
4102 return simplify_gen_binary (MINUS, mode, folded_arg0,
4103 canon_reg (p->exp, NULL_RTX));
4104 }
4105 goto from_plus;
4106
4107 case MINUS:
4108 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4109 If so, produce (PLUS Z C2-C). */
4110 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4111 {
4112 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4113 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4114 return fold_rtx (plus_constant (copy_rtx (y),
4115 -INTVAL (const_arg1)),
4116 NULL_RTX);
4117 }
4118
4119 /* Fall through. */
4120
4121 from_plus:
4122 case SMIN: case SMAX: case UMIN: case UMAX:
4123 case IOR: case AND: case XOR:
4124 case MULT:
4125 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4126 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4127 is known to be of similar form, we may be able to replace the
4128 operation with a combined operation. This may eliminate the
4129 intermediate operation if every use is simplified in this way.
4130 Note that the similar optimization done by combine.c only works
4131 if the intermediate operation's result has only one reference. */
4132
4133 if (REG_P (folded_arg0)
4134 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4135 {
4136 int is_shift
4137 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4138 rtx y = lookup_as_function (folded_arg0, code);
4139 rtx inner_const;
4140 enum rtx_code associate_code;
4141 rtx new_const;
4142
4143 if (y == 0
4144 || 0 == (inner_const
4145 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4146 || GET_CODE (inner_const) != CONST_INT
4147 /* If we have compiled a statement like
4148 "if (x == (x & mask1))", and now are looking at
4149 "x & mask2", we will have a case where the first operand
4150 of Y is the same as our first operand. Unless we detect
4151 this case, an infinite loop will result. */
4152 || XEXP (y, 0) == folded_arg0)
4153 break;
4154
4155 /* Don't associate these operations if they are a PLUS with the
4156 same constant and it is a power of two. These might be doable
4157 with a pre- or post-increment. Similarly for two subtracts of
4158 identical powers of two with post decrement. */
4159
4160 if (code == PLUS && const_arg1 == inner_const
4161 && ((HAVE_PRE_INCREMENT
4162 && exact_log2 (INTVAL (const_arg1)) >= 0)
4163 || (HAVE_POST_INCREMENT
4164 && exact_log2 (INTVAL (const_arg1)) >= 0)
4165 || (HAVE_PRE_DECREMENT
4166 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4167 || (HAVE_POST_DECREMENT
4168 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4169 break;
4170
4171 /* Compute the code used to compose the constants. For example,
4172 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
4173
4174 associate_code = (is_shift || code == MINUS ? PLUS : code);
4175
4176 new_const = simplify_binary_operation (associate_code, mode,
4177 const_arg1, inner_const);
4178
4179 if (new_const == 0)
4180 break;
4181
4182 /* If we are associating shift operations, don't let this
4183 produce a shift of the size of the object or larger.
4184 This could occur when we follow a sign-extend by a right
4185 shift on a machine that does a sign-extend as a pair
4186 of shifts. */
4187
4188 if (is_shift && GET_CODE (new_const) == CONST_INT
4189 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4190 {
4191 /* As an exception, we can turn an ASHIFTRT of this
4192 form into a shift of the number of bits - 1. */
4193 if (code == ASHIFTRT)
4194 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4195 else
4196 break;
4197 }
4198
4199 y = copy_rtx (XEXP (y, 0));
4200
4201 /* If Y contains our first operand (the most common way this
4202 can happen is if Y is a MEM), we would do into an infinite
4203 loop if we tried to fold it. So don't in that case. */
4204
4205 if (! reg_mentioned_p (folded_arg0, y))
4206 y = fold_rtx (y, insn);
4207
4208 return simplify_gen_binary (code, mode, y, new_const);
4209 }
4210 break;
4211
4212 case DIV: case UDIV:
4213 /* ??? The associative optimization performed immediately above is
4214 also possible for DIV and UDIV using associate_code of MULT.
4215 However, we would need extra code to verify that the
4216 multiplication does not overflow, that is, there is no overflow
4217 in the calculation of new_const. */
4218 break;
4219
4220 default:
4221 break;
4222 }
4223
4224 new = simplify_binary_operation (code, mode,
4225 const_arg0 ? const_arg0 : folded_arg0,
4226 const_arg1 ? const_arg1 : folded_arg1);
4227 break;
4228
4229 case RTX_OBJ:
4230 /* (lo_sum (high X) X) is simply X. */
4231 if (code == LO_SUM && const_arg0 != 0
4232 && GET_CODE (const_arg0) == HIGH
4233 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4234 return const_arg1;
4235 break;
4236
4237 case RTX_TERNARY:
4238 case RTX_BITFIELD_OPS:
4239 new = simplify_ternary_operation (code, mode, mode_arg0,
4240 const_arg0 ? const_arg0 : folded_arg0,
4241 const_arg1 ? const_arg1 : folded_arg1,
4242 const_arg2 ? const_arg2 : XEXP (x, 2));
4243 break;
4244
4245 default:
4246 break;
4247 }
4248
4249 return new ? new : x;
4250 }
4251 \f
4252 /* Return a constant value currently equivalent to X.
4253 Return 0 if we don't know one. */
4254
4255 static rtx
4256 equiv_constant (rtx x)
4257 {
4258 if (REG_P (x)
4259 && REGNO_QTY_VALID_P (REGNO (x)))
4260 {
4261 int x_q = REG_QTY (REGNO (x));
4262 struct qty_table_elem *x_ent = &qty_table[x_q];
4263
4264 if (x_ent->const_rtx)
4265 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
4266 }
4267
4268 if (x == 0 || CONSTANT_P (x))
4269 return x;
4270
4271 /* If X is a MEM, try to fold it outside the context of any insn to see if
4272 it might be equivalent to a constant. That handles the case where it
4273 is a constant-pool reference. Then try to look it up in the hash table
4274 in case it is something whose value we have seen before. */
4275
4276 if (MEM_P (x))
4277 {
4278 struct table_elt *elt;
4279
4280 x = fold_rtx (x, NULL_RTX);
4281 if (CONSTANT_P (x))
4282 return x;
4283
4284 elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
4285 if (elt == 0)
4286 return 0;
4287
4288 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4289 if (elt->is_const && CONSTANT_P (elt->exp))
4290 return elt->exp;
4291 }
4292
4293 return 0;
4294 }
4295 \f
4296 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4297 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4298 least-significant part of X.
4299 MODE specifies how big a part of X to return.
4300
4301 If the requested operation cannot be done, 0 is returned.
4302
4303 This is similar to gen_lowpart_general in emit-rtl.c. */
4304
4305 rtx
4306 gen_lowpart_if_possible (enum machine_mode mode, rtx x)
4307 {
4308 rtx result = gen_lowpart_common (mode, x);
4309
4310 if (result)
4311 return result;
4312 else if (MEM_P (x))
4313 {
4314 /* This is the only other case we handle. */
4315 int offset = 0;
4316 rtx new;
4317
4318 if (WORDS_BIG_ENDIAN)
4319 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4320 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4321 if (BYTES_BIG_ENDIAN)
4322 /* Adjust the address so that the address-after-the-data is
4323 unchanged. */
4324 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4325 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4326
4327 new = adjust_address_nv (x, mode, offset);
4328 if (! memory_address_p (mode, XEXP (new, 0)))
4329 return 0;
4330
4331 return new;
4332 }
4333 else
4334 return 0;
4335 }
4336 \f
4337 /* Given INSN, a jump insn, PATH_TAKEN indicates if we are following the "taken"
4338 branch. It will be zero if not.
4339
4340 In certain cases, this can cause us to add an equivalence. For example,
4341 if we are following the taken case of
4342 if (i == 2)
4343 we can add the fact that `i' and '2' are now equivalent.
4344
4345 In any case, we can record that this comparison was passed. If the same
4346 comparison is seen later, we will know its value. */
4347
4348 static void
4349 record_jump_equiv (rtx insn, int taken)
4350 {
4351 int cond_known_true;
4352 rtx op0, op1;
4353 rtx set;
4354 enum machine_mode mode, mode0, mode1;
4355 int reversed_nonequality = 0;
4356 enum rtx_code code;
4357
4358 /* Ensure this is the right kind of insn. */
4359 if (! any_condjump_p (insn))
4360 return;
4361 set = pc_set (insn);
4362
4363 /* See if this jump condition is known true or false. */
4364 if (taken)
4365 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4366 else
4367 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4368
4369 /* Get the type of comparison being done and the operands being compared.
4370 If we had to reverse a non-equality condition, record that fact so we
4371 know that it isn't valid for floating-point. */
4372 code = GET_CODE (XEXP (SET_SRC (set), 0));
4373 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4374 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4375
4376 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4377 if (! cond_known_true)
4378 {
4379 code = reversed_comparison_code_parts (code, op0, op1, insn);
4380
4381 /* Don't remember if we can't find the inverse. */
4382 if (code == UNKNOWN)
4383 return;
4384 }
4385
4386 /* The mode is the mode of the non-constant. */
4387 mode = mode0;
4388 if (mode1 != VOIDmode)
4389 mode = mode1;
4390
4391 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4392 }
4393
4394 /* Yet another form of subreg creation. In this case, we want something in
4395 MODE, and we should assume OP has MODE iff it is naturally modeless. */
4396
4397 static rtx
4398 record_jump_cond_subreg (enum machine_mode mode, rtx op)
4399 {
4400 enum machine_mode op_mode = GET_MODE (op);
4401 if (op_mode == mode || op_mode == VOIDmode)
4402 return op;
4403 return lowpart_subreg (mode, op, op_mode);
4404 }
4405
4406 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4407 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4408 Make any useful entries we can with that information. Called from
4409 above function and called recursively. */
4410
4411 static void
4412 record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4413 rtx op1, int reversed_nonequality)
4414 {
4415 unsigned op0_hash, op1_hash;
4416 int op0_in_memory, op1_in_memory;
4417 struct table_elt *op0_elt, *op1_elt;
4418
4419 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4420 we know that they are also equal in the smaller mode (this is also
4421 true for all smaller modes whether or not there is a SUBREG, but
4422 is not worth testing for with no SUBREG). */
4423
4424 /* Note that GET_MODE (op0) may not equal MODE. */
4425 if (code == EQ && GET_CODE (op0) == SUBREG
4426 && (GET_MODE_SIZE (GET_MODE (op0))
4427 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4428 {
4429 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4430 rtx tem = record_jump_cond_subreg (inner_mode, op1);
4431 if (tem)
4432 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4433 reversed_nonequality);
4434 }
4435
4436 if (code == EQ && GET_CODE (op1) == SUBREG
4437 && (GET_MODE_SIZE (GET_MODE (op1))
4438 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4439 {
4440 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4441 rtx tem = record_jump_cond_subreg (inner_mode, op0);
4442 if (tem)
4443 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4444 reversed_nonequality);
4445 }
4446
4447 /* Similarly, if this is an NE comparison, and either is a SUBREG
4448 making a smaller mode, we know the whole thing is also NE. */
4449
4450 /* Note that GET_MODE (op0) may not equal MODE;
4451 if we test MODE instead, we can get an infinite recursion
4452 alternating between two modes each wider than MODE. */
4453
4454 if (code == NE && GET_CODE (op0) == SUBREG
4455 && subreg_lowpart_p (op0)
4456 && (GET_MODE_SIZE (GET_MODE (op0))
4457 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4458 {
4459 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4460 rtx tem = record_jump_cond_subreg (inner_mode, op1);
4461 if (tem)
4462 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4463 reversed_nonequality);
4464 }
4465
4466 if (code == NE && GET_CODE (op1) == SUBREG
4467 && subreg_lowpart_p (op1)
4468 && (GET_MODE_SIZE (GET_MODE (op1))
4469 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4470 {
4471 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4472 rtx tem = record_jump_cond_subreg (inner_mode, op0);
4473 if (tem)
4474 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4475 reversed_nonequality);
4476 }
4477
4478 /* Hash both operands. */
4479
4480 do_not_record = 0;
4481 hash_arg_in_memory = 0;
4482 op0_hash = HASH (op0, mode);
4483 op0_in_memory = hash_arg_in_memory;
4484
4485 if (do_not_record)
4486 return;
4487
4488 do_not_record = 0;
4489 hash_arg_in_memory = 0;
4490 op1_hash = HASH (op1, mode);
4491 op1_in_memory = hash_arg_in_memory;
4492
4493 if (do_not_record)
4494 return;
4495
4496 /* Look up both operands. */
4497 op0_elt = lookup (op0, op0_hash, mode);
4498 op1_elt = lookup (op1, op1_hash, mode);
4499
4500 /* If both operands are already equivalent or if they are not in the
4501 table but are identical, do nothing. */
4502 if ((op0_elt != 0 && op1_elt != 0
4503 && op0_elt->first_same_value == op1_elt->first_same_value)
4504 || op0 == op1 || rtx_equal_p (op0, op1))
4505 return;
4506
4507 /* If we aren't setting two things equal all we can do is save this
4508 comparison. Similarly if this is floating-point. In the latter
4509 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4510 If we record the equality, we might inadvertently delete code
4511 whose intent was to change -0 to +0. */
4512
4513 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4514 {
4515 struct qty_table_elem *ent;
4516 int qty;
4517
4518 /* If we reversed a floating-point comparison, if OP0 is not a
4519 register, or if OP1 is neither a register or constant, we can't
4520 do anything. */
4521
4522 if (!REG_P (op1))
4523 op1 = equiv_constant (op1);
4524
4525 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4526 || !REG_P (op0) || op1 == 0)
4527 return;
4528
4529 /* Put OP0 in the hash table if it isn't already. This gives it a
4530 new quantity number. */
4531 if (op0_elt == 0)
4532 {
4533 if (insert_regs (op0, NULL, 0))
4534 {
4535 rehash_using_reg (op0);
4536 op0_hash = HASH (op0, mode);
4537
4538 /* If OP0 is contained in OP1, this changes its hash code
4539 as well. Faster to rehash than to check, except
4540 for the simple case of a constant. */
4541 if (! CONSTANT_P (op1))
4542 op1_hash = HASH (op1,mode);
4543 }
4544
4545 op0_elt = insert (op0, NULL, op0_hash, mode);
4546 op0_elt->in_memory = op0_in_memory;
4547 }
4548
4549 qty = REG_QTY (REGNO (op0));
4550 ent = &qty_table[qty];
4551
4552 ent->comparison_code = code;
4553 if (REG_P (op1))
4554 {
4555 /* Look it up again--in case op0 and op1 are the same. */
4556 op1_elt = lookup (op1, op1_hash, mode);
4557
4558 /* Put OP1 in the hash table so it gets a new quantity number. */
4559 if (op1_elt == 0)
4560 {
4561 if (insert_regs (op1, NULL, 0))
4562 {
4563 rehash_using_reg (op1);
4564 op1_hash = HASH (op1, mode);
4565 }
4566
4567 op1_elt = insert (op1, NULL, op1_hash, mode);
4568 op1_elt->in_memory = op1_in_memory;
4569 }
4570
4571 ent->comparison_const = NULL_RTX;
4572 ent->comparison_qty = REG_QTY (REGNO (op1));
4573 }
4574 else
4575 {
4576 ent->comparison_const = op1;
4577 ent->comparison_qty = -1;
4578 }
4579
4580 return;
4581 }
4582
4583 /* If either side is still missing an equivalence, make it now,
4584 then merge the equivalences. */
4585
4586 if (op0_elt == 0)
4587 {
4588 if (insert_regs (op0, NULL, 0))
4589 {
4590 rehash_using_reg (op0);
4591 op0_hash = HASH (op0, mode);
4592 }
4593
4594 op0_elt = insert (op0, NULL, op0_hash, mode);
4595 op0_elt->in_memory = op0_in_memory;
4596 }
4597
4598 if (op1_elt == 0)
4599 {
4600 if (insert_regs (op1, NULL, 0))
4601 {
4602 rehash_using_reg (op1);
4603 op1_hash = HASH (op1, mode);
4604 }
4605
4606 op1_elt = insert (op1, NULL, op1_hash, mode);
4607 op1_elt->in_memory = op1_in_memory;
4608 }
4609
4610 merge_equiv_classes (op0_elt, op1_elt);
4611 }
4612 \f
4613 /* CSE processing for one instruction.
4614 First simplify sources and addresses of all assignments
4615 in the instruction, using previously-computed equivalents values.
4616 Then install the new sources and destinations in the table
4617 of available values.
4618
4619 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4620 the insn. It means that INSN is inside libcall block. In this
4621 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4622
4623 /* Data on one SET contained in the instruction. */
4624
4625 struct set
4626 {
4627 /* The SET rtx itself. */
4628 rtx rtl;
4629 /* The SET_SRC of the rtx (the original value, if it is changing). */
4630 rtx src;
4631 /* The hash-table element for the SET_SRC of the SET. */
4632 struct table_elt *src_elt;
4633 /* Hash value for the SET_SRC. */
4634 unsigned src_hash;
4635 /* Hash value for the SET_DEST. */
4636 unsigned dest_hash;
4637 /* The SET_DEST, with SUBREG, etc., stripped. */
4638 rtx inner_dest;
4639 /* Nonzero if the SET_SRC is in memory. */
4640 char src_in_memory;
4641 /* Nonzero if the SET_SRC contains something
4642 whose value cannot be predicted and understood. */
4643 char src_volatile;
4644 /* Original machine mode, in case it becomes a CONST_INT.
4645 The size of this field should match the size of the mode
4646 field of struct rtx_def (see rtl.h). */
4647 ENUM_BITFIELD(machine_mode) mode : 8;
4648 /* A constant equivalent for SET_SRC, if any. */
4649 rtx src_const;
4650 /* Original SET_SRC value used for libcall notes. */
4651 rtx orig_src;
4652 /* Hash value of constant equivalent for SET_SRC. */
4653 unsigned src_const_hash;
4654 /* Table entry for constant equivalent for SET_SRC, if any. */
4655 struct table_elt *src_const_elt;
4656 };
4657
4658 static void
4659 cse_insn (rtx insn, rtx libcall_insn)
4660 {
4661 rtx x = PATTERN (insn);
4662 int i;
4663 rtx tem;
4664 int n_sets = 0;
4665
4666 #ifdef HAVE_cc0
4667 /* Records what this insn does to set CC0. */
4668 rtx this_insn_cc0 = 0;
4669 enum machine_mode this_insn_cc0_mode = VOIDmode;
4670 #endif
4671
4672 rtx src_eqv = 0;
4673 struct table_elt *src_eqv_elt = 0;
4674 int src_eqv_volatile = 0;
4675 int src_eqv_in_memory = 0;
4676 unsigned src_eqv_hash = 0;
4677
4678 struct set *sets = (struct set *) 0;
4679
4680 this_insn = insn;
4681
4682 /* Find all the SETs and CLOBBERs in this instruction.
4683 Record all the SETs in the array `set' and count them.
4684 Also determine whether there is a CLOBBER that invalidates
4685 all memory references, or all references at varying addresses. */
4686
4687 if (CALL_P (insn))
4688 {
4689 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4690 {
4691 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4692 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4693 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4694 }
4695 }
4696
4697 if (GET_CODE (x) == SET)
4698 {
4699 sets = alloca (sizeof (struct set));
4700 sets[0].rtl = x;
4701
4702 /* Ignore SETs that are unconditional jumps.
4703 They never need cse processing, so this does not hurt.
4704 The reason is not efficiency but rather
4705 so that we can test at the end for instructions
4706 that have been simplified to unconditional jumps
4707 and not be misled by unchanged instructions
4708 that were unconditional jumps to begin with. */
4709 if (SET_DEST (x) == pc_rtx
4710 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4711 ;
4712
4713 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4714 The hard function value register is used only once, to copy to
4715 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4716 Ensure we invalidate the destination register. On the 80386 no
4717 other code would invalidate it since it is a fixed_reg.
4718 We need not check the return of apply_change_group; see canon_reg. */
4719
4720 else if (GET_CODE (SET_SRC (x)) == CALL)
4721 {
4722 canon_reg (SET_SRC (x), insn);
4723 apply_change_group ();
4724 fold_rtx (SET_SRC (x), insn);
4725 invalidate (SET_DEST (x), VOIDmode);
4726 }
4727 else
4728 n_sets = 1;
4729 }
4730 else if (GET_CODE (x) == PARALLEL)
4731 {
4732 int lim = XVECLEN (x, 0);
4733
4734 sets = alloca (lim * sizeof (struct set));
4735
4736 /* Find all regs explicitly clobbered in this insn,
4737 and ensure they are not replaced with any other regs
4738 elsewhere in this insn.
4739 When a reg that is clobbered is also used for input,
4740 we should presume that that is for a reason,
4741 and we should not substitute some other register
4742 which is not supposed to be clobbered.
4743 Therefore, this loop cannot be merged into the one below
4744 because a CALL may precede a CLOBBER and refer to the
4745 value clobbered. We must not let a canonicalization do
4746 anything in that case. */
4747 for (i = 0; i < lim; i++)
4748 {
4749 rtx y = XVECEXP (x, 0, i);
4750 if (GET_CODE (y) == CLOBBER)
4751 {
4752 rtx clobbered = XEXP (y, 0);
4753
4754 if (REG_P (clobbered)
4755 || GET_CODE (clobbered) == SUBREG)
4756 invalidate (clobbered, VOIDmode);
4757 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4758 || GET_CODE (clobbered) == ZERO_EXTRACT)
4759 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4760 }
4761 }
4762
4763 for (i = 0; i < lim; i++)
4764 {
4765 rtx y = XVECEXP (x, 0, i);
4766 if (GET_CODE (y) == SET)
4767 {
4768 /* As above, we ignore unconditional jumps and call-insns and
4769 ignore the result of apply_change_group. */
4770 if (GET_CODE (SET_SRC (y)) == CALL)
4771 {
4772 canon_reg (SET_SRC (y), insn);
4773 apply_change_group ();
4774 fold_rtx (SET_SRC (y), insn);
4775 invalidate (SET_DEST (y), VOIDmode);
4776 }
4777 else if (SET_DEST (y) == pc_rtx
4778 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4779 ;
4780 else
4781 sets[n_sets++].rtl = y;
4782 }
4783 else if (GET_CODE (y) == CLOBBER)
4784 {
4785 /* If we clobber memory, canon the address.
4786 This does nothing when a register is clobbered
4787 because we have already invalidated the reg. */
4788 if (MEM_P (XEXP (y, 0)))
4789 canon_reg (XEXP (y, 0), NULL_RTX);
4790 }
4791 else if (GET_CODE (y) == USE
4792 && ! (REG_P (XEXP (y, 0))
4793 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4794 canon_reg (y, NULL_RTX);
4795 else if (GET_CODE (y) == CALL)
4796 {
4797 /* The result of apply_change_group can be ignored; see
4798 canon_reg. */
4799 canon_reg (y, insn);
4800 apply_change_group ();
4801 fold_rtx (y, insn);
4802 }
4803 }
4804 }
4805 else if (GET_CODE (x) == CLOBBER)
4806 {
4807 if (MEM_P (XEXP (x, 0)))
4808 canon_reg (XEXP (x, 0), NULL_RTX);
4809 }
4810
4811 /* Canonicalize a USE of a pseudo register or memory location. */
4812 else if (GET_CODE (x) == USE
4813 && ! (REG_P (XEXP (x, 0))
4814 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4815 canon_reg (XEXP (x, 0), NULL_RTX);
4816 else if (GET_CODE (x) == CALL)
4817 {
4818 /* The result of apply_change_group can be ignored; see canon_reg. */
4819 canon_reg (x, insn);
4820 apply_change_group ();
4821 fold_rtx (x, insn);
4822 }
4823
4824 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4825 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4826 is handled specially for this case, and if it isn't set, then there will
4827 be no equivalence for the destination. */
4828 if (n_sets == 1 && REG_NOTES (insn) != 0
4829 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4830 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4831 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4832 {
4833 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4834 XEXP (tem, 0) = src_eqv;
4835 }
4836
4837 /* Canonicalize sources and addresses of destinations.
4838 We do this in a separate pass to avoid problems when a MATCH_DUP is
4839 present in the insn pattern. In that case, we want to ensure that
4840 we don't break the duplicate nature of the pattern. So we will replace
4841 both operands at the same time. Otherwise, we would fail to find an
4842 equivalent substitution in the loop calling validate_change below.
4843
4844 We used to suppress canonicalization of DEST if it appears in SRC,
4845 but we don't do this any more. */
4846
4847 for (i = 0; i < n_sets; i++)
4848 {
4849 rtx dest = SET_DEST (sets[i].rtl);
4850 rtx src = SET_SRC (sets[i].rtl);
4851 rtx new = canon_reg (src, insn);
4852 int insn_code;
4853
4854 sets[i].orig_src = src;
4855 if ((REG_P (new) && REG_P (src)
4856 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4857 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4858 || (insn_code = recog_memoized (insn)) < 0
4859 || insn_data[insn_code].n_dups > 0)
4860 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4861 else
4862 SET_SRC (sets[i].rtl) = new;
4863
4864 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4865 {
4866 validate_change (insn, &XEXP (dest, 1),
4867 canon_reg (XEXP (dest, 1), insn), 1);
4868 validate_change (insn, &XEXP (dest, 2),
4869 canon_reg (XEXP (dest, 2), insn), 1);
4870 }
4871
4872 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4873 || GET_CODE (dest) == ZERO_EXTRACT
4874 || GET_CODE (dest) == SIGN_EXTRACT)
4875 dest = XEXP (dest, 0);
4876
4877 if (MEM_P (dest))
4878 canon_reg (dest, insn);
4879 }
4880
4881 /* Now that we have done all the replacements, we can apply the change
4882 group and see if they all work. Note that this will cause some
4883 canonicalizations that would have worked individually not to be applied
4884 because some other canonicalization didn't work, but this should not
4885 occur often.
4886
4887 The result of apply_change_group can be ignored; see canon_reg. */
4888
4889 apply_change_group ();
4890
4891 /* Set sets[i].src_elt to the class each source belongs to.
4892 Detect assignments from or to volatile things
4893 and set set[i] to zero so they will be ignored
4894 in the rest of this function.
4895
4896 Nothing in this loop changes the hash table or the register chains. */
4897
4898 for (i = 0; i < n_sets; i++)
4899 {
4900 rtx src, dest;
4901 rtx src_folded;
4902 struct table_elt *elt = 0, *p;
4903 enum machine_mode mode;
4904 rtx src_eqv_here;
4905 rtx src_const = 0;
4906 rtx src_related = 0;
4907 struct table_elt *src_const_elt = 0;
4908 int src_cost = MAX_COST;
4909 int src_eqv_cost = MAX_COST;
4910 int src_folded_cost = MAX_COST;
4911 int src_related_cost = MAX_COST;
4912 int src_elt_cost = MAX_COST;
4913 int src_regcost = MAX_COST;
4914 int src_eqv_regcost = MAX_COST;
4915 int src_folded_regcost = MAX_COST;
4916 int src_related_regcost = MAX_COST;
4917 int src_elt_regcost = MAX_COST;
4918 /* Set nonzero if we need to call force_const_mem on with the
4919 contents of src_folded before using it. */
4920 int src_folded_force_flag = 0;
4921
4922 dest = SET_DEST (sets[i].rtl);
4923 src = SET_SRC (sets[i].rtl);
4924
4925 /* If SRC is a constant that has no machine mode,
4926 hash it with the destination's machine mode.
4927 This way we can keep different modes separate. */
4928
4929 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4930 sets[i].mode = mode;
4931
4932 if (src_eqv)
4933 {
4934 enum machine_mode eqvmode = mode;
4935 if (GET_CODE (dest) == STRICT_LOW_PART)
4936 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4937 do_not_record = 0;
4938 hash_arg_in_memory = 0;
4939 src_eqv_hash = HASH (src_eqv, eqvmode);
4940
4941 /* Find the equivalence class for the equivalent expression. */
4942
4943 if (!do_not_record)
4944 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4945
4946 src_eqv_volatile = do_not_record;
4947 src_eqv_in_memory = hash_arg_in_memory;
4948 }
4949
4950 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4951 value of the INNER register, not the destination. So it is not
4952 a valid substitution for the source. But save it for later. */
4953 if (GET_CODE (dest) == STRICT_LOW_PART)
4954 src_eqv_here = 0;
4955 else
4956 src_eqv_here = src_eqv;
4957
4958 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4959 simplified result, which may not necessarily be valid. */
4960 src_folded = fold_rtx (src, insn);
4961
4962 #if 0
4963 /* ??? This caused bad code to be generated for the m68k port with -O2.
4964 Suppose src is (CONST_INT -1), and that after truncation src_folded
4965 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4966 At the end we will add src and src_const to the same equivalence
4967 class. We now have 3 and -1 on the same equivalence class. This
4968 causes later instructions to be mis-optimized. */
4969 /* If storing a constant in a bitfield, pre-truncate the constant
4970 so we will be able to record it later. */
4971 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
4972 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
4973 {
4974 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4975
4976 if (GET_CODE (src) == CONST_INT
4977 && GET_CODE (width) == CONST_INT
4978 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4979 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4980 src_folded
4981 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4982 << INTVAL (width)) - 1));
4983 }
4984 #endif
4985
4986 /* Compute SRC's hash code, and also notice if it
4987 should not be recorded at all. In that case,
4988 prevent any further processing of this assignment. */
4989 do_not_record = 0;
4990 hash_arg_in_memory = 0;
4991
4992 sets[i].src = src;
4993 sets[i].src_hash = HASH (src, mode);
4994 sets[i].src_volatile = do_not_record;
4995 sets[i].src_in_memory = hash_arg_in_memory;
4996
4997 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4998 a pseudo, do not record SRC. Using SRC as a replacement for
4999 anything else will be incorrect in that situation. Note that
5000 this usually occurs only for stack slots, in which case all the
5001 RTL would be referring to SRC, so we don't lose any optimization
5002 opportunities by not having SRC in the hash table. */
5003
5004 if (MEM_P (src)
5005 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5006 && REG_P (dest)
5007 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5008 sets[i].src_volatile = 1;
5009
5010 #if 0
5011 /* It is no longer clear why we used to do this, but it doesn't
5012 appear to still be needed. So let's try without it since this
5013 code hurts cse'ing widened ops. */
5014 /* If source is a paradoxical subreg (such as QI treated as an SI),
5015 treat it as volatile. It may do the work of an SI in one context
5016 where the extra bits are not being used, but cannot replace an SI
5017 in general. */
5018 if (GET_CODE (src) == SUBREG
5019 && (GET_MODE_SIZE (GET_MODE (src))
5020 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5021 sets[i].src_volatile = 1;
5022 #endif
5023
5024 /* Locate all possible equivalent forms for SRC. Try to replace
5025 SRC in the insn with each cheaper equivalent.
5026
5027 We have the following types of equivalents: SRC itself, a folded
5028 version, a value given in a REG_EQUAL note, or a value related
5029 to a constant.
5030
5031 Each of these equivalents may be part of an additional class
5032 of equivalents (if more than one is in the table, they must be in
5033 the same class; we check for this).
5034
5035 If the source is volatile, we don't do any table lookups.
5036
5037 We note any constant equivalent for possible later use in a
5038 REG_NOTE. */
5039
5040 if (!sets[i].src_volatile)
5041 elt = lookup (src, sets[i].src_hash, mode);
5042
5043 sets[i].src_elt = elt;
5044
5045 if (elt && src_eqv_here && src_eqv_elt)
5046 {
5047 if (elt->first_same_value != src_eqv_elt->first_same_value)
5048 {
5049 /* The REG_EQUAL is indicating that two formerly distinct
5050 classes are now equivalent. So merge them. */
5051 merge_equiv_classes (elt, src_eqv_elt);
5052 src_eqv_hash = HASH (src_eqv, elt->mode);
5053 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5054 }
5055
5056 src_eqv_here = 0;
5057 }
5058
5059 else if (src_eqv_elt)
5060 elt = src_eqv_elt;
5061
5062 /* Try to find a constant somewhere and record it in `src_const'.
5063 Record its table element, if any, in `src_const_elt'. Look in
5064 any known equivalences first. (If the constant is not in the
5065 table, also set `sets[i].src_const_hash'). */
5066 if (elt)
5067 for (p = elt->first_same_value; p; p = p->next_same_value)
5068 if (p->is_const)
5069 {
5070 src_const = p->exp;
5071 src_const_elt = elt;
5072 break;
5073 }
5074
5075 if (src_const == 0
5076 && (CONSTANT_P (src_folded)
5077 /* Consider (minus (label_ref L1) (label_ref L2)) as
5078 "constant" here so we will record it. This allows us
5079 to fold switch statements when an ADDR_DIFF_VEC is used. */
5080 || (GET_CODE (src_folded) == MINUS
5081 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5082 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5083 src_const = src_folded, src_const_elt = elt;
5084 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5085 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5086
5087 /* If we don't know if the constant is in the table, get its
5088 hash code and look it up. */
5089 if (src_const && src_const_elt == 0)
5090 {
5091 sets[i].src_const_hash = HASH (src_const, mode);
5092 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5093 }
5094
5095 sets[i].src_const = src_const;
5096 sets[i].src_const_elt = src_const_elt;
5097
5098 /* If the constant and our source are both in the table, mark them as
5099 equivalent. Otherwise, if a constant is in the table but the source
5100 isn't, set ELT to it. */
5101 if (src_const_elt && elt
5102 && src_const_elt->first_same_value != elt->first_same_value)
5103 merge_equiv_classes (elt, src_const_elt);
5104 else if (src_const_elt && elt == 0)
5105 elt = src_const_elt;
5106
5107 /* See if there is a register linearly related to a constant
5108 equivalent of SRC. */
5109 if (src_const
5110 && (GET_CODE (src_const) == CONST
5111 || (src_const_elt && src_const_elt->related_value != 0)))
5112 {
5113 src_related = use_related_value (src_const, src_const_elt);
5114 if (src_related)
5115 {
5116 struct table_elt *src_related_elt
5117 = lookup (src_related, HASH (src_related, mode), mode);
5118 if (src_related_elt && elt)
5119 {
5120 if (elt->first_same_value
5121 != src_related_elt->first_same_value)
5122 /* This can occur when we previously saw a CONST
5123 involving a SYMBOL_REF and then see the SYMBOL_REF
5124 twice. Merge the involved classes. */
5125 merge_equiv_classes (elt, src_related_elt);
5126
5127 src_related = 0;
5128 src_related_elt = 0;
5129 }
5130 else if (src_related_elt && elt == 0)
5131 elt = src_related_elt;
5132 }
5133 }
5134
5135 /* See if we have a CONST_INT that is already in a register in a
5136 wider mode. */
5137
5138 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5139 && GET_MODE_CLASS (mode) == MODE_INT
5140 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5141 {
5142 enum machine_mode wider_mode;
5143
5144 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5145 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5146 && src_related == 0;
5147 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5148 {
5149 struct table_elt *const_elt
5150 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5151
5152 if (const_elt == 0)
5153 continue;
5154
5155 for (const_elt = const_elt->first_same_value;
5156 const_elt; const_elt = const_elt->next_same_value)
5157 if (REG_P (const_elt->exp))
5158 {
5159 src_related = gen_lowpart (mode,
5160 const_elt->exp);
5161 break;
5162 }
5163 }
5164 }
5165
5166 /* Another possibility is that we have an AND with a constant in
5167 a mode narrower than a word. If so, it might have been generated
5168 as part of an "if" which would narrow the AND. If we already
5169 have done the AND in a wider mode, we can use a SUBREG of that
5170 value. */
5171
5172 if (flag_expensive_optimizations && ! src_related
5173 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5174 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5175 {
5176 enum machine_mode tmode;
5177 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5178
5179 for (tmode = GET_MODE_WIDER_MODE (mode);
5180 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5181 tmode = GET_MODE_WIDER_MODE (tmode))
5182 {
5183 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
5184 struct table_elt *larger_elt;
5185
5186 if (inner)
5187 {
5188 PUT_MODE (new_and, tmode);
5189 XEXP (new_and, 0) = inner;
5190 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5191 if (larger_elt == 0)
5192 continue;
5193
5194 for (larger_elt = larger_elt->first_same_value;
5195 larger_elt; larger_elt = larger_elt->next_same_value)
5196 if (REG_P (larger_elt->exp))
5197 {
5198 src_related
5199 = gen_lowpart (mode, larger_elt->exp);
5200 break;
5201 }
5202
5203 if (src_related)
5204 break;
5205 }
5206 }
5207 }
5208
5209 #ifdef LOAD_EXTEND_OP
5210 /* See if a MEM has already been loaded with a widening operation;
5211 if it has, we can use a subreg of that. Many CISC machines
5212 also have such operations, but this is only likely to be
5213 beneficial on these machines. */
5214
5215 if (flag_expensive_optimizations && src_related == 0
5216 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5217 && GET_MODE_CLASS (mode) == MODE_INT
5218 && MEM_P (src) && ! do_not_record
5219 && LOAD_EXTEND_OP (mode) != UNKNOWN)
5220 {
5221 struct rtx_def memory_extend_buf;
5222 rtx memory_extend_rtx = &memory_extend_buf;
5223 enum machine_mode tmode;
5224
5225 /* Set what we are trying to extend and the operation it might
5226 have been extended with. */
5227 memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
5228 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5229 XEXP (memory_extend_rtx, 0) = src;
5230
5231 for (tmode = GET_MODE_WIDER_MODE (mode);
5232 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5233 tmode = GET_MODE_WIDER_MODE (tmode))
5234 {
5235 struct table_elt *larger_elt;
5236
5237 PUT_MODE (memory_extend_rtx, tmode);
5238 larger_elt = lookup (memory_extend_rtx,
5239 HASH (memory_extend_rtx, tmode), tmode);
5240 if (larger_elt == 0)
5241 continue;
5242
5243 for (larger_elt = larger_elt->first_same_value;
5244 larger_elt; larger_elt = larger_elt->next_same_value)
5245 if (REG_P (larger_elt->exp))
5246 {
5247 src_related = gen_lowpart (mode,
5248 larger_elt->exp);
5249 break;
5250 }
5251
5252 if (src_related)
5253 break;
5254 }
5255 }
5256 #endif /* LOAD_EXTEND_OP */
5257
5258 if (src == src_folded)
5259 src_folded = 0;
5260
5261 /* At this point, ELT, if nonzero, points to a class of expressions
5262 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5263 and SRC_RELATED, if nonzero, each contain additional equivalent
5264 expressions. Prune these latter expressions by deleting expressions
5265 already in the equivalence class.
5266
5267 Check for an equivalent identical to the destination. If found,
5268 this is the preferred equivalent since it will likely lead to
5269 elimination of the insn. Indicate this by placing it in
5270 `src_related'. */
5271
5272 if (elt)
5273 elt = elt->first_same_value;
5274 for (p = elt; p; p = p->next_same_value)
5275 {
5276 enum rtx_code code = GET_CODE (p->exp);
5277
5278 /* If the expression is not valid, ignore it. Then we do not
5279 have to check for validity below. In most cases, we can use
5280 `rtx_equal_p', since canonicalization has already been done. */
5281 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
5282 continue;
5283
5284 /* Also skip paradoxical subregs, unless that's what we're
5285 looking for. */
5286 if (code == SUBREG
5287 && (GET_MODE_SIZE (GET_MODE (p->exp))
5288 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5289 && ! (src != 0
5290 && GET_CODE (src) == SUBREG
5291 && GET_MODE (src) == GET_MODE (p->exp)
5292 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5293 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5294 continue;
5295
5296 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5297 src = 0;
5298 else if (src_folded && GET_CODE (src_folded) == code
5299 && rtx_equal_p (src_folded, p->exp))
5300 src_folded = 0;
5301 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5302 && rtx_equal_p (src_eqv_here, p->exp))
5303 src_eqv_here = 0;
5304 else if (src_related && GET_CODE (src_related) == code
5305 && rtx_equal_p (src_related, p->exp))
5306 src_related = 0;
5307
5308 /* This is the same as the destination of the insns, we want
5309 to prefer it. Copy it to src_related. The code below will
5310 then give it a negative cost. */
5311 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5312 src_related = dest;
5313 }
5314
5315 /* Find the cheapest valid equivalent, trying all the available
5316 possibilities. Prefer items not in the hash table to ones
5317 that are when they are equal cost. Note that we can never
5318 worsen an insn as the current contents will also succeed.
5319 If we find an equivalent identical to the destination, use it as best,
5320 since this insn will probably be eliminated in that case. */
5321 if (src)
5322 {
5323 if (rtx_equal_p (src, dest))
5324 src_cost = src_regcost = -1;
5325 else
5326 {
5327 src_cost = COST (src);
5328 src_regcost = approx_reg_cost (src);
5329 }
5330 }
5331
5332 if (src_eqv_here)
5333 {
5334 if (rtx_equal_p (src_eqv_here, dest))
5335 src_eqv_cost = src_eqv_regcost = -1;
5336 else
5337 {
5338 src_eqv_cost = COST (src_eqv_here);
5339 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5340 }
5341 }
5342
5343 if (src_folded)
5344 {
5345 if (rtx_equal_p (src_folded, dest))
5346 src_folded_cost = src_folded_regcost = -1;
5347 else
5348 {
5349 src_folded_cost = COST (src_folded);
5350 src_folded_regcost = approx_reg_cost (src_folded);
5351 }
5352 }
5353
5354 if (src_related)
5355 {
5356 if (rtx_equal_p (src_related, dest))
5357 src_related_cost = src_related_regcost = -1;
5358 else
5359 {
5360 src_related_cost = COST (src_related);
5361 src_related_regcost = approx_reg_cost (src_related);
5362 }
5363 }
5364
5365 /* If this was an indirect jump insn, a known label will really be
5366 cheaper even though it looks more expensive. */
5367 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5368 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5369
5370 /* Terminate loop when replacement made. This must terminate since
5371 the current contents will be tested and will always be valid. */
5372 while (1)
5373 {
5374 rtx trial;
5375
5376 /* Skip invalid entries. */
5377 while (elt && !REG_P (elt->exp)
5378 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5379 elt = elt->next_same_value;
5380
5381 /* A paradoxical subreg would be bad here: it'll be the right
5382 size, but later may be adjusted so that the upper bits aren't
5383 what we want. So reject it. */
5384 if (elt != 0
5385 && GET_CODE (elt->exp) == SUBREG
5386 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5387 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5388 /* It is okay, though, if the rtx we're trying to match
5389 will ignore any of the bits we can't predict. */
5390 && ! (src != 0
5391 && GET_CODE (src) == SUBREG
5392 && GET_MODE (src) == GET_MODE (elt->exp)
5393 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5394 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5395 {
5396 elt = elt->next_same_value;
5397 continue;
5398 }
5399
5400 if (elt)
5401 {
5402 src_elt_cost = elt->cost;
5403 src_elt_regcost = elt->regcost;
5404 }
5405
5406 /* Find cheapest and skip it for the next time. For items
5407 of equal cost, use this order:
5408 src_folded, src, src_eqv, src_related and hash table entry. */
5409 if (src_folded
5410 && preferable (src_folded_cost, src_folded_regcost,
5411 src_cost, src_regcost) <= 0
5412 && preferable (src_folded_cost, src_folded_regcost,
5413 src_eqv_cost, src_eqv_regcost) <= 0
5414 && preferable (src_folded_cost, src_folded_regcost,
5415 src_related_cost, src_related_regcost) <= 0
5416 && preferable (src_folded_cost, src_folded_regcost,
5417 src_elt_cost, src_elt_regcost) <= 0)
5418 {
5419 trial = src_folded, src_folded_cost = MAX_COST;
5420 if (src_folded_force_flag)
5421 {
5422 rtx forced = force_const_mem (mode, trial);
5423 if (forced)
5424 trial = forced;
5425 }
5426 }
5427 else if (src
5428 && preferable (src_cost, src_regcost,
5429 src_eqv_cost, src_eqv_regcost) <= 0
5430 && preferable (src_cost, src_regcost,
5431 src_related_cost, src_related_regcost) <= 0
5432 && preferable (src_cost, src_regcost,
5433 src_elt_cost, src_elt_regcost) <= 0)
5434 trial = src, src_cost = MAX_COST;
5435 else if (src_eqv_here
5436 && preferable (src_eqv_cost, src_eqv_regcost,
5437 src_related_cost, src_related_regcost) <= 0
5438 && preferable (src_eqv_cost, src_eqv_regcost,
5439 src_elt_cost, src_elt_regcost) <= 0)
5440 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5441 else if (src_related
5442 && preferable (src_related_cost, src_related_regcost,
5443 src_elt_cost, src_elt_regcost) <= 0)
5444 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5445 else
5446 {
5447 trial = copy_rtx (elt->exp);
5448 elt = elt->next_same_value;
5449 src_elt_cost = MAX_COST;
5450 }
5451
5452 /* We don't normally have an insn matching (set (pc) (pc)), so
5453 check for this separately here. We will delete such an
5454 insn below.
5455
5456 For other cases such as a table jump or conditional jump
5457 where we know the ultimate target, go ahead and replace the
5458 operand. While that may not make a valid insn, we will
5459 reemit the jump below (and also insert any necessary
5460 barriers). */
5461 if (n_sets == 1 && dest == pc_rtx
5462 && (trial == pc_rtx
5463 || (GET_CODE (trial) == LABEL_REF
5464 && ! condjump_p (insn))))
5465 {
5466 /* Don't substitute non-local labels, this confuses CFG. */
5467 if (GET_CODE (trial) == LABEL_REF
5468 && LABEL_REF_NONLOCAL_P (trial))
5469 continue;
5470
5471 SET_SRC (sets[i].rtl) = trial;
5472 cse_jumps_altered = 1;
5473 break;
5474 }
5475
5476 /* Look for a substitution that makes a valid insn. */
5477 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5478 {
5479 rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5480
5481 /* If we just made a substitution inside a libcall, then we
5482 need to make the same substitution in any notes attached
5483 to the RETVAL insn. */
5484 if (libcall_insn
5485 && (REG_P (sets[i].orig_src)
5486 || GET_CODE (sets[i].orig_src) == SUBREG
5487 || MEM_P (sets[i].orig_src)))
5488 {
5489 rtx note = find_reg_equal_equiv_note (libcall_insn);
5490 if (note != 0)
5491 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0),
5492 sets[i].orig_src,
5493 copy_rtx (new));
5494 }
5495
5496 /* The result of apply_change_group can be ignored; see
5497 canon_reg. */
5498
5499 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5500 apply_change_group ();
5501 break;
5502 }
5503
5504 /* If we previously found constant pool entries for
5505 constants and this is a constant, try making a
5506 pool entry. Put it in src_folded unless we already have done
5507 this since that is where it likely came from. */
5508
5509 else if (constant_pool_entries_cost
5510 && CONSTANT_P (trial)
5511 /* Reject cases that will abort in decode_rtx_const.
5512 On the alpha when simplifying a switch, we get
5513 (const (truncate (minus (label_ref) (label_ref)))). */
5514 && ! (GET_CODE (trial) == CONST
5515 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5516 /* Likewise on IA-64, except without the truncate. */
5517 && ! (GET_CODE (trial) == CONST
5518 && GET_CODE (XEXP (trial, 0)) == MINUS
5519 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5520 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5521 && (src_folded == 0
5522 || (!MEM_P (src_folded)
5523 && ! src_folded_force_flag))
5524 && GET_MODE_CLASS (mode) != MODE_CC
5525 && mode != VOIDmode)
5526 {
5527 src_folded_force_flag = 1;
5528 src_folded = trial;
5529 src_folded_cost = constant_pool_entries_cost;
5530 src_folded_regcost = constant_pool_entries_regcost;
5531 }
5532 }
5533
5534 src = SET_SRC (sets[i].rtl);
5535
5536 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5537 However, there is an important exception: If both are registers
5538 that are not the head of their equivalence class, replace SET_SRC
5539 with the head of the class. If we do not do this, we will have
5540 both registers live over a portion of the basic block. This way,
5541 their lifetimes will likely abut instead of overlapping. */
5542 if (REG_P (dest)
5543 && REGNO_QTY_VALID_P (REGNO (dest)))
5544 {
5545 int dest_q = REG_QTY (REGNO (dest));
5546 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5547
5548 if (dest_ent->mode == GET_MODE (dest)
5549 && dest_ent->first_reg != REGNO (dest)
5550 && REG_P (src) && REGNO (src) == REGNO (dest)
5551 /* Don't do this if the original insn had a hard reg as
5552 SET_SRC or SET_DEST. */
5553 && (!REG_P (sets[i].src)
5554 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5555 && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5556 /* We can't call canon_reg here because it won't do anything if
5557 SRC is a hard register. */
5558 {
5559 int src_q = REG_QTY (REGNO (src));
5560 struct qty_table_elem *src_ent = &qty_table[src_q];
5561 int first = src_ent->first_reg;
5562 rtx new_src
5563 = (first >= FIRST_PSEUDO_REGISTER
5564 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5565
5566 /* We must use validate-change even for this, because this
5567 might be a special no-op instruction, suitable only to
5568 tag notes onto. */
5569 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5570 {
5571 src = new_src;
5572 /* If we had a constant that is cheaper than what we are now
5573 setting SRC to, use that constant. We ignored it when we
5574 thought we could make this into a no-op. */
5575 if (src_const && COST (src_const) < COST (src)
5576 && validate_change (insn, &SET_SRC (sets[i].rtl),
5577 src_const, 0))
5578 src = src_const;
5579 }
5580 }
5581 }
5582
5583 /* If we made a change, recompute SRC values. */
5584 if (src != sets[i].src)
5585 {
5586 cse_altered = 1;
5587 do_not_record = 0;
5588 hash_arg_in_memory = 0;
5589 sets[i].src = src;
5590 sets[i].src_hash = HASH (src, mode);
5591 sets[i].src_volatile = do_not_record;
5592 sets[i].src_in_memory = hash_arg_in_memory;
5593 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5594 }
5595
5596 /* If this is a single SET, we are setting a register, and we have an
5597 equivalent constant, we want to add a REG_NOTE. We don't want
5598 to write a REG_EQUAL note for a constant pseudo since verifying that
5599 that pseudo hasn't been eliminated is a pain. Such a note also
5600 won't help anything.
5601
5602 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5603 which can be created for a reference to a compile time computable
5604 entry in a jump table. */
5605
5606 if (n_sets == 1 && src_const && REG_P (dest)
5607 && !REG_P (src_const)
5608 && ! (GET_CODE (src_const) == CONST
5609 && GET_CODE (XEXP (src_const, 0)) == MINUS
5610 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5611 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5612 {
5613 /* We only want a REG_EQUAL note if src_const != src. */
5614 if (! rtx_equal_p (src, src_const))
5615 {
5616 /* Make sure that the rtx is not shared. */
5617 src_const = copy_rtx (src_const);
5618
5619 /* Record the actual constant value in a REG_EQUAL note,
5620 making a new one if one does not already exist. */
5621 set_unique_reg_note (insn, REG_EQUAL, src_const);
5622 }
5623 }
5624
5625 /* Now deal with the destination. */
5626 do_not_record = 0;
5627
5628 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5629 to the MEM or REG within it. */
5630 while (GET_CODE (dest) == SIGN_EXTRACT
5631 || GET_CODE (dest) == ZERO_EXTRACT
5632 || GET_CODE (dest) == SUBREG
5633 || GET_CODE (dest) == STRICT_LOW_PART)
5634 dest = XEXP (dest, 0);
5635
5636 sets[i].inner_dest = dest;
5637
5638 if (MEM_P (dest))
5639 {
5640 #ifdef PUSH_ROUNDING
5641 /* Stack pushes invalidate the stack pointer. */
5642 rtx addr = XEXP (dest, 0);
5643 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5644 && XEXP (addr, 0) == stack_pointer_rtx)
5645 invalidate (stack_pointer_rtx, Pmode);
5646 #endif
5647 dest = fold_rtx (dest, insn);
5648 }
5649
5650 /* Compute the hash code of the destination now,
5651 before the effects of this instruction are recorded,
5652 since the register values used in the address computation
5653 are those before this instruction. */
5654 sets[i].dest_hash = HASH (dest, mode);
5655
5656 /* Don't enter a bit-field in the hash table
5657 because the value in it after the store
5658 may not equal what was stored, due to truncation. */
5659
5660 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5661 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5662 {
5663 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5664
5665 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5666 && GET_CODE (width) == CONST_INT
5667 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5668 && ! (INTVAL (src_const)
5669 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5670 /* Exception: if the value is constant,
5671 and it won't be truncated, record it. */
5672 ;
5673 else
5674 {
5675 /* This is chosen so that the destination will be invalidated
5676 but no new value will be recorded.
5677 We must invalidate because sometimes constant
5678 values can be recorded for bitfields. */
5679 sets[i].src_elt = 0;
5680 sets[i].src_volatile = 1;
5681 src_eqv = 0;
5682 src_eqv_elt = 0;
5683 }
5684 }
5685
5686 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5687 the insn. */
5688 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5689 {
5690 /* One less use of the label this insn used to jump to. */
5691 delete_insn (insn);
5692 cse_jumps_altered = 1;
5693 /* No more processing for this set. */
5694 sets[i].rtl = 0;
5695 }
5696
5697 /* If this SET is now setting PC to a label, we know it used to
5698 be a conditional or computed branch. */
5699 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5700 && !LABEL_REF_NONLOCAL_P (src))
5701 {
5702 /* Now emit a BARRIER after the unconditional jump. */
5703 if (NEXT_INSN (insn) == 0
5704 || !BARRIER_P (NEXT_INSN (insn)))
5705 emit_barrier_after (insn);
5706
5707 /* We reemit the jump in as many cases as possible just in
5708 case the form of an unconditional jump is significantly
5709 different than a computed jump or conditional jump.
5710
5711 If this insn has multiple sets, then reemitting the
5712 jump is nontrivial. So instead we just force rerecognition
5713 and hope for the best. */
5714 if (n_sets == 1)
5715 {
5716 rtx new, note;
5717
5718 new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5719 JUMP_LABEL (new) = XEXP (src, 0);
5720 LABEL_NUSES (XEXP (src, 0))++;
5721
5722 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5723 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5724 if (note)
5725 {
5726 XEXP (note, 1) = NULL_RTX;
5727 REG_NOTES (new) = note;
5728 }
5729
5730 delete_insn (insn);
5731 insn = new;
5732
5733 /* Now emit a BARRIER after the unconditional jump. */
5734 if (NEXT_INSN (insn) == 0
5735 || !BARRIER_P (NEXT_INSN (insn)))
5736 emit_barrier_after (insn);
5737 }
5738 else
5739 INSN_CODE (insn) = -1;
5740
5741 /* Do not bother deleting any unreachable code,
5742 let jump/flow do that. */
5743
5744 cse_jumps_altered = 1;
5745 sets[i].rtl = 0;
5746 }
5747
5748 /* If destination is volatile, invalidate it and then do no further
5749 processing for this assignment. */
5750
5751 else if (do_not_record)
5752 {
5753 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5754 invalidate (dest, VOIDmode);
5755 else if (MEM_P (dest))
5756 invalidate (dest, VOIDmode);
5757 else if (GET_CODE (dest) == STRICT_LOW_PART
5758 || GET_CODE (dest) == ZERO_EXTRACT)
5759 invalidate (XEXP (dest, 0), GET_MODE (dest));
5760 sets[i].rtl = 0;
5761 }
5762
5763 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5764 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5765
5766 #ifdef HAVE_cc0
5767 /* If setting CC0, record what it was set to, or a constant, if it
5768 is equivalent to a constant. If it is being set to a floating-point
5769 value, make a COMPARE with the appropriate constant of 0. If we
5770 don't do this, later code can interpret this as a test against
5771 const0_rtx, which can cause problems if we try to put it into an
5772 insn as a floating-point operand. */
5773 if (dest == cc0_rtx)
5774 {
5775 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5776 this_insn_cc0_mode = mode;
5777 if (FLOAT_MODE_P (mode))
5778 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5779 CONST0_RTX (mode));
5780 }
5781 #endif
5782 }
5783
5784 /* Now enter all non-volatile source expressions in the hash table
5785 if they are not already present.
5786 Record their equivalence classes in src_elt.
5787 This way we can insert the corresponding destinations into
5788 the same classes even if the actual sources are no longer in them
5789 (having been invalidated). */
5790
5791 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5792 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5793 {
5794 struct table_elt *elt;
5795 struct table_elt *classp = sets[0].src_elt;
5796 rtx dest = SET_DEST (sets[0].rtl);
5797 enum machine_mode eqvmode = GET_MODE (dest);
5798
5799 if (GET_CODE (dest) == STRICT_LOW_PART)
5800 {
5801 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5802 classp = 0;
5803 }
5804 if (insert_regs (src_eqv, classp, 0))
5805 {
5806 rehash_using_reg (src_eqv);
5807 src_eqv_hash = HASH (src_eqv, eqvmode);
5808 }
5809 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5810 elt->in_memory = src_eqv_in_memory;
5811 src_eqv_elt = elt;
5812
5813 /* Check to see if src_eqv_elt is the same as a set source which
5814 does not yet have an elt, and if so set the elt of the set source
5815 to src_eqv_elt. */
5816 for (i = 0; i < n_sets; i++)
5817 if (sets[i].rtl && sets[i].src_elt == 0
5818 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5819 sets[i].src_elt = src_eqv_elt;
5820 }
5821
5822 for (i = 0; i < n_sets; i++)
5823 if (sets[i].rtl && ! sets[i].src_volatile
5824 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5825 {
5826 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5827 {
5828 /* REG_EQUAL in setting a STRICT_LOW_PART
5829 gives an equivalent for the entire destination register,
5830 not just for the subreg being stored in now.
5831 This is a more interesting equivalence, so we arrange later
5832 to treat the entire reg as the destination. */
5833 sets[i].src_elt = src_eqv_elt;
5834 sets[i].src_hash = src_eqv_hash;
5835 }
5836 else
5837 {
5838 /* Insert source and constant equivalent into hash table, if not
5839 already present. */
5840 struct table_elt *classp = src_eqv_elt;
5841 rtx src = sets[i].src;
5842 rtx dest = SET_DEST (sets[i].rtl);
5843 enum machine_mode mode
5844 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5845
5846 /* It's possible that we have a source value known to be
5847 constant but don't have a REG_EQUAL note on the insn.
5848 Lack of a note will mean src_eqv_elt will be NULL. This
5849 can happen where we've generated a SUBREG to access a
5850 CONST_INT that is already in a register in a wider mode.
5851 Ensure that the source expression is put in the proper
5852 constant class. */
5853 if (!classp)
5854 classp = sets[i].src_const_elt;
5855
5856 if (sets[i].src_elt == 0)
5857 {
5858 /* Don't put a hard register source into the table if this is
5859 the last insn of a libcall. In this case, we only need
5860 to put src_eqv_elt in src_elt. */
5861 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5862 {
5863 struct table_elt *elt;
5864
5865 /* Note that these insert_regs calls cannot remove
5866 any of the src_elt's, because they would have failed to
5867 match if not still valid. */
5868 if (insert_regs (src, classp, 0))
5869 {
5870 rehash_using_reg (src);
5871 sets[i].src_hash = HASH (src, mode);
5872 }
5873 elt = insert (src, classp, sets[i].src_hash, mode);
5874 elt->in_memory = sets[i].src_in_memory;
5875 sets[i].src_elt = classp = elt;
5876 }
5877 else
5878 sets[i].src_elt = classp;
5879 }
5880 if (sets[i].src_const && sets[i].src_const_elt == 0
5881 && src != sets[i].src_const
5882 && ! rtx_equal_p (sets[i].src_const, src))
5883 sets[i].src_elt = insert (sets[i].src_const, classp,
5884 sets[i].src_const_hash, mode);
5885 }
5886 }
5887 else if (sets[i].src_elt == 0)
5888 /* If we did not insert the source into the hash table (e.g., it was
5889 volatile), note the equivalence class for the REG_EQUAL value, if any,
5890 so that the destination goes into that class. */
5891 sets[i].src_elt = src_eqv_elt;
5892
5893 invalidate_from_clobbers (x);
5894
5895 /* Some registers are invalidated by subroutine calls. Memory is
5896 invalidated by non-constant calls. */
5897
5898 if (CALL_P (insn))
5899 {
5900 if (! CONST_OR_PURE_CALL_P (insn))
5901 invalidate_memory ();
5902 invalidate_for_call ();
5903 }
5904
5905 /* Now invalidate everything set by this instruction.
5906 If a SUBREG or other funny destination is being set,
5907 sets[i].rtl is still nonzero, so here we invalidate the reg
5908 a part of which is being set. */
5909
5910 for (i = 0; i < n_sets; i++)
5911 if (sets[i].rtl)
5912 {
5913 /* We can't use the inner dest, because the mode associated with
5914 a ZERO_EXTRACT is significant. */
5915 rtx dest = SET_DEST (sets[i].rtl);
5916
5917 /* Needed for registers to remove the register from its
5918 previous quantity's chain.
5919 Needed for memory if this is a nonvarying address, unless
5920 we have just done an invalidate_memory that covers even those. */
5921 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5922 invalidate (dest, VOIDmode);
5923 else if (MEM_P (dest))
5924 invalidate (dest, VOIDmode);
5925 else if (GET_CODE (dest) == STRICT_LOW_PART
5926 || GET_CODE (dest) == ZERO_EXTRACT)
5927 invalidate (XEXP (dest, 0), GET_MODE (dest));
5928 }
5929
5930 /* A volatile ASM invalidates everything. */
5931 if (NONJUMP_INSN_P (insn)
5932 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5933 && MEM_VOLATILE_P (PATTERN (insn)))
5934 flush_hash_table ();
5935
5936 /* Make sure registers mentioned in destinations
5937 are safe for use in an expression to be inserted.
5938 This removes from the hash table
5939 any invalid entry that refers to one of these registers.
5940
5941 We don't care about the return value from mention_regs because
5942 we are going to hash the SET_DEST values unconditionally. */
5943
5944 for (i = 0; i < n_sets; i++)
5945 {
5946 if (sets[i].rtl)
5947 {
5948 rtx x = SET_DEST (sets[i].rtl);
5949
5950 if (!REG_P (x))
5951 mention_regs (x);
5952 else
5953 {
5954 /* We used to rely on all references to a register becoming
5955 inaccessible when a register changes to a new quantity,
5956 since that changes the hash code. However, that is not
5957 safe, since after HASH_SIZE new quantities we get a
5958 hash 'collision' of a register with its own invalid
5959 entries. And since SUBREGs have been changed not to
5960 change their hash code with the hash code of the register,
5961 it wouldn't work any longer at all. So we have to check
5962 for any invalid references lying around now.
5963 This code is similar to the REG case in mention_regs,
5964 but it knows that reg_tick has been incremented, and
5965 it leaves reg_in_table as -1 . */
5966 unsigned int regno = REGNO (x);
5967 unsigned int endregno
5968 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
5969 : hard_regno_nregs[regno][GET_MODE (x)]);
5970 unsigned int i;
5971
5972 for (i = regno; i < endregno; i++)
5973 {
5974 if (REG_IN_TABLE (i) >= 0)
5975 {
5976 remove_invalid_refs (i);
5977 REG_IN_TABLE (i) = -1;
5978 }
5979 }
5980 }
5981 }
5982 }
5983
5984 /* We may have just removed some of the src_elt's from the hash table.
5985 So replace each one with the current head of the same class. */
5986
5987 for (i = 0; i < n_sets; i++)
5988 if (sets[i].rtl)
5989 {
5990 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5991 /* If elt was removed, find current head of same class,
5992 or 0 if nothing remains of that class. */
5993 {
5994 struct table_elt *elt = sets[i].src_elt;
5995
5996 while (elt && elt->prev_same_value)
5997 elt = elt->prev_same_value;
5998
5999 while (elt && elt->first_same_value == 0)
6000 elt = elt->next_same_value;
6001 sets[i].src_elt = elt ? elt->first_same_value : 0;
6002 }
6003 }
6004
6005 /* Now insert the destinations into their equivalence classes. */
6006
6007 for (i = 0; i < n_sets; i++)
6008 if (sets[i].rtl)
6009 {
6010 rtx dest = SET_DEST (sets[i].rtl);
6011 struct table_elt *elt;
6012
6013 /* Don't record value if we are not supposed to risk allocating
6014 floating-point values in registers that might be wider than
6015 memory. */
6016 if ((flag_float_store
6017 && MEM_P (dest)
6018 && FLOAT_MODE_P (GET_MODE (dest)))
6019 /* Don't record BLKmode values, because we don't know the
6020 size of it, and can't be sure that other BLKmode values
6021 have the same or smaller size. */
6022 || GET_MODE (dest) == BLKmode
6023 /* Don't record values of destinations set inside a libcall block
6024 since we might delete the libcall. Things should have been set
6025 up so we won't want to reuse such a value, but we play it safe
6026 here. */
6027 || libcall_insn
6028 /* If we didn't put a REG_EQUAL value or a source into the hash
6029 table, there is no point is recording DEST. */
6030 || sets[i].src_elt == 0
6031 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6032 or SIGN_EXTEND, don't record DEST since it can cause
6033 some tracking to be wrong.
6034
6035 ??? Think about this more later. */
6036 || (GET_CODE (dest) == SUBREG
6037 && (GET_MODE_SIZE (GET_MODE (dest))
6038 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6039 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6040 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6041 continue;
6042
6043 /* STRICT_LOW_PART isn't part of the value BEING set,
6044 and neither is the SUBREG inside it.
6045 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6046 if (GET_CODE (dest) == STRICT_LOW_PART)
6047 dest = SUBREG_REG (XEXP (dest, 0));
6048
6049 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
6050 /* Registers must also be inserted into chains for quantities. */
6051 if (insert_regs (dest, sets[i].src_elt, 1))
6052 {
6053 /* If `insert_regs' changes something, the hash code must be
6054 recalculated. */
6055 rehash_using_reg (dest);
6056 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6057 }
6058
6059 elt = insert (dest, sets[i].src_elt,
6060 sets[i].dest_hash, GET_MODE (dest));
6061
6062 elt->in_memory = (MEM_P (sets[i].inner_dest)
6063 && !MEM_READONLY_P (sets[i].inner_dest));
6064
6065 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6066 narrower than M2, and both M1 and M2 are the same number of words,
6067 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6068 make that equivalence as well.
6069
6070 However, BAR may have equivalences for which gen_lowpart
6071 will produce a simpler value than gen_lowpart applied to
6072 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6073 BAR's equivalences. If we don't get a simplified form, make
6074 the SUBREG. It will not be used in an equivalence, but will
6075 cause two similar assignments to be detected.
6076
6077 Note the loop below will find SUBREG_REG (DEST) since we have
6078 already entered SRC and DEST of the SET in the table. */
6079
6080 if (GET_CODE (dest) == SUBREG
6081 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6082 / UNITS_PER_WORD)
6083 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6084 && (GET_MODE_SIZE (GET_MODE (dest))
6085 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6086 && sets[i].src_elt != 0)
6087 {
6088 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6089 struct table_elt *elt, *classp = 0;
6090
6091 for (elt = sets[i].src_elt->first_same_value; elt;
6092 elt = elt->next_same_value)
6093 {
6094 rtx new_src = 0;
6095 unsigned src_hash;
6096 struct table_elt *src_elt;
6097 int byte = 0;
6098
6099 /* Ignore invalid entries. */
6100 if (!REG_P (elt->exp)
6101 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
6102 continue;
6103
6104 /* We may have already been playing subreg games. If the
6105 mode is already correct for the destination, use it. */
6106 if (GET_MODE (elt->exp) == new_mode)
6107 new_src = elt->exp;
6108 else
6109 {
6110 /* Calculate big endian correction for the SUBREG_BYTE.
6111 We have already checked that M1 (GET_MODE (dest))
6112 is not narrower than M2 (new_mode). */
6113 if (BYTES_BIG_ENDIAN)
6114 byte = (GET_MODE_SIZE (GET_MODE (dest))
6115 - GET_MODE_SIZE (new_mode));
6116
6117 new_src = simplify_gen_subreg (new_mode, elt->exp,
6118 GET_MODE (dest), byte);
6119 }
6120
6121 /* The call to simplify_gen_subreg fails if the value
6122 is VOIDmode, yet we can't do any simplification, e.g.
6123 for EXPR_LISTs denoting function call results.
6124 It is invalid to construct a SUBREG with a VOIDmode
6125 SUBREG_REG, hence a zero new_src means we can't do
6126 this substitution. */
6127 if (! new_src)
6128 continue;
6129
6130 src_hash = HASH (new_src, new_mode);
6131 src_elt = lookup (new_src, src_hash, new_mode);
6132
6133 /* Put the new source in the hash table is if isn't
6134 already. */
6135 if (src_elt == 0)
6136 {
6137 if (insert_regs (new_src, classp, 0))
6138 {
6139 rehash_using_reg (new_src);
6140 src_hash = HASH (new_src, new_mode);
6141 }
6142 src_elt = insert (new_src, classp, src_hash, new_mode);
6143 src_elt->in_memory = elt->in_memory;
6144 }
6145 else if (classp && classp != src_elt->first_same_value)
6146 /* Show that two things that we've seen before are
6147 actually the same. */
6148 merge_equiv_classes (src_elt, classp);
6149
6150 classp = src_elt->first_same_value;
6151 /* Ignore invalid entries. */
6152 while (classp
6153 && !REG_P (classp->exp)
6154 && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
6155 classp = classp->next_same_value;
6156 }
6157 }
6158 }
6159
6160 /* Special handling for (set REG0 REG1) where REG0 is the
6161 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6162 be used in the sequel, so (if easily done) change this insn to
6163 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6164 that computed their value. Then REG1 will become a dead store
6165 and won't cloud the situation for later optimizations.
6166
6167 Do not make this change if REG1 is a hard register, because it will
6168 then be used in the sequel and we may be changing a two-operand insn
6169 into a three-operand insn.
6170
6171 Also do not do this if we are operating on a copy of INSN.
6172
6173 Also don't do this if INSN ends a libcall; this would cause an unrelated
6174 register to be set in the middle of a libcall, and we then get bad code
6175 if the libcall is deleted. */
6176
6177 if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
6178 && NEXT_INSN (PREV_INSN (insn)) == insn
6179 && REG_P (SET_SRC (sets[0].rtl))
6180 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6181 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6182 {
6183 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6184 struct qty_table_elem *src_ent = &qty_table[src_q];
6185
6186 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6187 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6188 {
6189 rtx prev = insn;
6190 /* Scan for the previous nonnote insn, but stop at a basic
6191 block boundary. */
6192 do
6193 {
6194 prev = PREV_INSN (prev);
6195 }
6196 while (prev && NOTE_P (prev)
6197 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6198
6199 /* Do not swap the registers around if the previous instruction
6200 attaches a REG_EQUIV note to REG1.
6201
6202 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6203 from the pseudo that originally shadowed an incoming argument
6204 to another register. Some uses of REG_EQUIV might rely on it
6205 being attached to REG1 rather than REG2.
6206
6207 This section previously turned the REG_EQUIV into a REG_EQUAL
6208 note. We cannot do that because REG_EQUIV may provide an
6209 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
6210
6211 if (prev != 0 && NONJUMP_INSN_P (prev)
6212 && GET_CODE (PATTERN (prev)) == SET
6213 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6214 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6215 {
6216 rtx dest = SET_DEST (sets[0].rtl);
6217 rtx src = SET_SRC (sets[0].rtl);
6218 rtx note;
6219
6220 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6221 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6222 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6223 apply_change_group ();
6224
6225 /* If INSN has a REG_EQUAL note, and this note mentions
6226 REG0, then we must delete it, because the value in
6227 REG0 has changed. If the note's value is REG1, we must
6228 also delete it because that is now this insn's dest. */
6229 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6230 if (note != 0
6231 && (reg_mentioned_p (dest, XEXP (note, 0))
6232 || rtx_equal_p (src, XEXP (note, 0))))
6233 remove_note (insn, note);
6234 }
6235 }
6236 }
6237
6238 /* If this is a conditional jump insn, record any known equivalences due to
6239 the condition being tested. */
6240
6241 if (JUMP_P (insn)
6242 && n_sets == 1 && GET_CODE (x) == SET
6243 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6244 record_jump_equiv (insn, 0);
6245
6246 #ifdef HAVE_cc0
6247 /* If the previous insn set CC0 and this insn no longer references CC0,
6248 delete the previous insn. Here we use the fact that nothing expects CC0
6249 to be valid over an insn, which is true until the final pass. */
6250 if (prev_insn && NONJUMP_INSN_P (prev_insn)
6251 && (tem = single_set (prev_insn)) != 0
6252 && SET_DEST (tem) == cc0_rtx
6253 && ! reg_mentioned_p (cc0_rtx, x))
6254 delete_insn (prev_insn);
6255
6256 prev_insn_cc0 = this_insn_cc0;
6257 prev_insn_cc0_mode = this_insn_cc0_mode;
6258 prev_insn = insn;
6259 #endif
6260 }
6261 \f
6262 /* Remove from the hash table all expressions that reference memory. */
6263
6264 static void
6265 invalidate_memory (void)
6266 {
6267 int i;
6268 struct table_elt *p, *next;
6269
6270 for (i = 0; i < HASH_SIZE; i++)
6271 for (p = table[i]; p; p = next)
6272 {
6273 next = p->next_same_hash;
6274 if (p->in_memory)
6275 remove_from_table (p, i);
6276 }
6277 }
6278
6279 /* If ADDR is an address that implicitly affects the stack pointer, return
6280 1 and update the register tables to show the effect. Else, return 0. */
6281
6282 static int
6283 addr_affects_sp_p (rtx addr)
6284 {
6285 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
6286 && REG_P (XEXP (addr, 0))
6287 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6288 {
6289 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6290 {
6291 REG_TICK (STACK_POINTER_REGNUM)++;
6292 /* Is it possible to use a subreg of SP? */
6293 SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6294 }
6295
6296 /* This should be *very* rare. */
6297 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6298 invalidate (stack_pointer_rtx, VOIDmode);
6299
6300 return 1;
6301 }
6302
6303 return 0;
6304 }
6305
6306 /* Perform invalidation on the basis of everything about an insn
6307 except for invalidating the actual places that are SET in it.
6308 This includes the places CLOBBERed, and anything that might
6309 alias with something that is SET or CLOBBERed.
6310
6311 X is the pattern of the insn. */
6312
6313 static void
6314 invalidate_from_clobbers (rtx x)
6315 {
6316 if (GET_CODE (x) == CLOBBER)
6317 {
6318 rtx ref = XEXP (x, 0);
6319 if (ref)
6320 {
6321 if (REG_P (ref) || GET_CODE (ref) == SUBREG
6322 || MEM_P (ref))
6323 invalidate (ref, VOIDmode);
6324 else if (GET_CODE (ref) == STRICT_LOW_PART
6325 || GET_CODE (ref) == ZERO_EXTRACT)
6326 invalidate (XEXP (ref, 0), GET_MODE (ref));
6327 }
6328 }
6329 else if (GET_CODE (x) == PARALLEL)
6330 {
6331 int i;
6332 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6333 {
6334 rtx y = XVECEXP (x, 0, i);
6335 if (GET_CODE (y) == CLOBBER)
6336 {
6337 rtx ref = XEXP (y, 0);
6338 if (REG_P (ref) || GET_CODE (ref) == SUBREG
6339 || MEM_P (ref))
6340 invalidate (ref, VOIDmode);
6341 else if (GET_CODE (ref) == STRICT_LOW_PART
6342 || GET_CODE (ref) == ZERO_EXTRACT)
6343 invalidate (XEXP (ref, 0), GET_MODE (ref));
6344 }
6345 }
6346 }
6347 }
6348 \f
6349 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6350 and replace any registers in them with either an equivalent constant
6351 or the canonical form of the register. If we are inside an address,
6352 only do this if the address remains valid.
6353
6354 OBJECT is 0 except when within a MEM in which case it is the MEM.
6355
6356 Return the replacement for X. */
6357
6358 static rtx
6359 cse_process_notes (rtx x, rtx object)
6360 {
6361 enum rtx_code code = GET_CODE (x);
6362 const char *fmt = GET_RTX_FORMAT (code);
6363 int i;
6364
6365 switch (code)
6366 {
6367 case CONST_INT:
6368 case CONST:
6369 case SYMBOL_REF:
6370 case LABEL_REF:
6371 case CONST_DOUBLE:
6372 case CONST_VECTOR:
6373 case PC:
6374 case CC0:
6375 case LO_SUM:
6376 return x;
6377
6378 case MEM:
6379 validate_change (x, &XEXP (x, 0),
6380 cse_process_notes (XEXP (x, 0), x), 0);
6381 return x;
6382
6383 case EXPR_LIST:
6384 case INSN_LIST:
6385 if (REG_NOTE_KIND (x) == REG_EQUAL)
6386 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6387 if (XEXP (x, 1))
6388 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6389 return x;
6390
6391 case SIGN_EXTEND:
6392 case ZERO_EXTEND:
6393 case SUBREG:
6394 {
6395 rtx new = cse_process_notes (XEXP (x, 0), object);
6396 /* We don't substitute VOIDmode constants into these rtx,
6397 since they would impede folding. */
6398 if (GET_MODE (new) != VOIDmode)
6399 validate_change (object, &XEXP (x, 0), new, 0);
6400 return x;
6401 }
6402
6403 case REG:
6404 i = REG_QTY (REGNO (x));
6405
6406 /* Return a constant or a constant register. */
6407 if (REGNO_QTY_VALID_P (REGNO (x)))
6408 {
6409 struct qty_table_elem *ent = &qty_table[i];
6410
6411 if (ent->const_rtx != NULL_RTX
6412 && (CONSTANT_P (ent->const_rtx)
6413 || REG_P (ent->const_rtx)))
6414 {
6415 rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
6416 if (new)
6417 return new;
6418 }
6419 }
6420
6421 /* Otherwise, canonicalize this register. */
6422 return canon_reg (x, NULL_RTX);
6423
6424 default:
6425 break;
6426 }
6427
6428 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6429 if (fmt[i] == 'e')
6430 validate_change (object, &XEXP (x, i),
6431 cse_process_notes (XEXP (x, i), object), 0);
6432
6433 return x;
6434 }
6435 \f
6436 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6437 since they are done elsewhere. This function is called via note_stores. */
6438
6439 static void
6440 invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
6441 {
6442 enum rtx_code code = GET_CODE (dest);
6443
6444 if (code == MEM
6445 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6446 /* There are times when an address can appear varying and be a PLUS
6447 during this scan when it would be a fixed address were we to know
6448 the proper equivalences. So invalidate all memory if there is
6449 a BLKmode or nonscalar memory reference or a reference to a
6450 variable address. */
6451 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6452 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6453 {
6454 invalidate_memory ();
6455 return;
6456 }
6457
6458 if (GET_CODE (set) == CLOBBER
6459 || CC0_P (dest)
6460 || dest == pc_rtx)
6461 return;
6462
6463 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6464 invalidate (XEXP (dest, 0), GET_MODE (dest));
6465 else if (code == REG || code == SUBREG || code == MEM)
6466 invalidate (dest, VOIDmode);
6467 }
6468
6469 /* Invalidate all insns from START up to the end of the function or the
6470 next label. This called when we wish to CSE around a block that is
6471 conditionally executed. */
6472
6473 static void
6474 invalidate_skipped_block (rtx start)
6475 {
6476 rtx insn;
6477
6478 for (insn = start; insn && !LABEL_P (insn);
6479 insn = NEXT_INSN (insn))
6480 {
6481 if (! INSN_P (insn))
6482 continue;
6483
6484 if (CALL_P (insn))
6485 {
6486 if (! CONST_OR_PURE_CALL_P (insn))
6487 invalidate_memory ();
6488 invalidate_for_call ();
6489 }
6490
6491 invalidate_from_clobbers (PATTERN (insn));
6492 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6493 }
6494 }
6495 \f
6496 /* Find the end of INSN's basic block and return its range,
6497 the total number of SETs in all the insns of the block, the last insn of the
6498 block, and the branch path.
6499
6500 The branch path indicates which branches should be followed. If a nonzero
6501 path size is specified, the block should be rescanned and a different set
6502 of branches will be taken. The branch path is only used if
6503 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6504
6505 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6506 used to describe the block. It is filled in with the information about
6507 the current block. The incoming structure's branch path, if any, is used
6508 to construct the output branch path. */
6509
6510 static void
6511 cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
6512 int follow_jumps, int skip_blocks)
6513 {
6514 rtx p = insn, q;
6515 int nsets = 0;
6516 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6517 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6518 int path_size = data->path_size;
6519 int path_entry = 0;
6520 int i;
6521
6522 /* Update the previous branch path, if any. If the last branch was
6523 previously PATH_TAKEN, mark it PATH_NOT_TAKEN.
6524 If it was previously PATH_NOT_TAKEN,
6525 shorten the path by one and look at the previous branch. We know that
6526 at least one branch must have been taken if PATH_SIZE is nonzero. */
6527 while (path_size > 0)
6528 {
6529 if (data->path[path_size - 1].status != PATH_NOT_TAKEN)
6530 {
6531 data->path[path_size - 1].status = PATH_NOT_TAKEN;
6532 break;
6533 }
6534 else
6535 path_size--;
6536 }
6537
6538 /* If the first instruction is marked with QImode, that means we've
6539 already processed this block. Our caller will look at DATA->LAST
6540 to figure out where to go next. We want to return the next block
6541 in the instruction stream, not some branched-to block somewhere
6542 else. We accomplish this by pretending our called forbid us to
6543 follow jumps, or skip blocks. */
6544 if (GET_MODE (insn) == QImode)
6545 follow_jumps = skip_blocks = 0;
6546
6547 /* Scan to end of this basic block. */
6548 while (p && !LABEL_P (p))
6549 {
6550 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6551 the regs restored by the longjmp come from
6552 a later time than the setjmp. */
6553 if (PREV_INSN (p) && CALL_P (PREV_INSN (p))
6554 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6555 break;
6556
6557 /* A PARALLEL can have lots of SETs in it,
6558 especially if it is really an ASM_OPERANDS. */
6559 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6560 nsets += XVECLEN (PATTERN (p), 0);
6561 else if (!NOTE_P (p))
6562 nsets += 1;
6563
6564 /* Ignore insns made by CSE; they cannot affect the boundaries of
6565 the basic block. */
6566
6567 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6568 high_cuid = INSN_CUID (p);
6569 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6570 low_cuid = INSN_CUID (p);
6571
6572 /* See if this insn is in our branch path. If it is and we are to
6573 take it, do so. */
6574 if (path_entry < path_size && data->path[path_entry].branch == p)
6575 {
6576 if (data->path[path_entry].status != PATH_NOT_TAKEN)
6577 p = JUMP_LABEL (p);
6578
6579 /* Point to next entry in path, if any. */
6580 path_entry++;
6581 }
6582
6583 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6584 was specified, we haven't reached our maximum path length, there are
6585 insns following the target of the jump, this is the only use of the
6586 jump label, and the target label is preceded by a BARRIER.
6587
6588 Alternatively, we can follow the jump if it branches around a
6589 block of code and there are no other branches into the block.
6590 In this case invalidate_skipped_block will be called to invalidate any
6591 registers set in the block when following the jump. */
6592
6593 else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
6594 && JUMP_P (p)
6595 && GET_CODE (PATTERN (p)) == SET
6596 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6597 && JUMP_LABEL (p) != 0
6598 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6599 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6600 {
6601 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6602 if ((!NOTE_P (q)
6603 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6604 || (PREV_INSN (q) && CALL_P (PREV_INSN (q))
6605 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6606 && (!LABEL_P (q) || LABEL_NUSES (q) != 0))
6607 break;
6608
6609 /* If we ran into a BARRIER, this code is an extension of the
6610 basic block when the branch is taken. */
6611 if (follow_jumps && q != 0 && BARRIER_P (q))
6612 {
6613 /* Don't allow ourself to keep walking around an
6614 always-executed loop. */
6615 if (next_real_insn (q) == next)
6616 {
6617 p = NEXT_INSN (p);
6618 continue;
6619 }
6620
6621 /* Similarly, don't put a branch in our path more than once. */
6622 for (i = 0; i < path_entry; i++)
6623 if (data->path[i].branch == p)
6624 break;
6625
6626 if (i != path_entry)
6627 break;
6628
6629 data->path[path_entry].branch = p;
6630 data->path[path_entry++].status = PATH_TAKEN;
6631
6632 /* This branch now ends our path. It was possible that we
6633 didn't see this branch the last time around (when the
6634 insn in front of the target was a JUMP_INSN that was
6635 turned into a no-op). */
6636 path_size = path_entry;
6637
6638 p = JUMP_LABEL (p);
6639 /* Mark block so we won't scan it again later. */
6640 PUT_MODE (NEXT_INSN (p), QImode);
6641 }
6642 /* Detect a branch around a block of code. */
6643 else if (skip_blocks && q != 0 && !LABEL_P (q))
6644 {
6645 rtx tmp;
6646
6647 if (next_real_insn (q) == next)
6648 {
6649 p = NEXT_INSN (p);
6650 continue;
6651 }
6652
6653 for (i = 0; i < path_entry; i++)
6654 if (data->path[i].branch == p)
6655 break;
6656
6657 if (i != path_entry)
6658 break;
6659
6660 /* This is no_labels_between_p (p, q) with an added check for
6661 reaching the end of a function (in case Q precedes P). */
6662 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6663 if (LABEL_P (tmp))
6664 break;
6665
6666 if (tmp == q)
6667 {
6668 data->path[path_entry].branch = p;
6669 data->path[path_entry++].status = PATH_AROUND;
6670
6671 path_size = path_entry;
6672
6673 p = JUMP_LABEL (p);
6674 /* Mark block so we won't scan it again later. */
6675 PUT_MODE (NEXT_INSN (p), QImode);
6676 }
6677 }
6678 }
6679 p = NEXT_INSN (p);
6680 }
6681
6682 data->low_cuid = low_cuid;
6683 data->high_cuid = high_cuid;
6684 data->nsets = nsets;
6685 data->last = p;
6686
6687 /* If all jumps in the path are not taken, set our path length to zero
6688 so a rescan won't be done. */
6689 for (i = path_size - 1; i >= 0; i--)
6690 if (data->path[i].status != PATH_NOT_TAKEN)
6691 break;
6692
6693 if (i == -1)
6694 data->path_size = 0;
6695 else
6696 data->path_size = path_size;
6697
6698 /* End the current branch path. */
6699 data->path[path_size].branch = 0;
6700 }
6701 \f
6702 /* Perform cse on the instructions of a function.
6703 F is the first instruction.
6704 NREGS is one plus the highest pseudo-reg number used in the instruction.
6705
6706 Returns 1 if jump_optimize should be redone due to simplifications
6707 in conditional jump instructions. */
6708
6709 int
6710 cse_main (rtx f, int nregs, FILE *file)
6711 {
6712 struct cse_basic_block_data val;
6713 rtx insn = f;
6714 int i;
6715
6716 val.path = xmalloc (sizeof (struct branch_path)
6717 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6718
6719 cse_jumps_altered = 0;
6720 recorded_label_ref = 0;
6721 constant_pool_entries_cost = 0;
6722 constant_pool_entries_regcost = 0;
6723 val.path_size = 0;
6724 rtl_hooks = cse_rtl_hooks;
6725
6726 init_recog ();
6727 init_alias_analysis ();
6728
6729 max_reg = nregs;
6730
6731 max_insn_uid = get_max_uid ();
6732
6733 reg_eqv_table = xmalloc (nregs * sizeof (struct reg_eqv_elem));
6734
6735 /* Reset the counter indicating how many elements have been made
6736 thus far. */
6737 n_elements_made = 0;
6738
6739 /* Find the largest uid. */
6740
6741 max_uid = get_max_uid ();
6742 uid_cuid = xcalloc (max_uid + 1, sizeof (int));
6743
6744 /* Compute the mapping from uids to cuids.
6745 CUIDs are numbers assigned to insns, like uids,
6746 except that cuids increase monotonically through the code.
6747 Don't assign cuids to line-number NOTEs, so that the distance in cuids
6748 between two insns is not affected by -g. */
6749
6750 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6751 {
6752 if (!NOTE_P (insn)
6753 || NOTE_LINE_NUMBER (insn) < 0)
6754 INSN_CUID (insn) = ++i;
6755 else
6756 /* Give a line number note the same cuid as preceding insn. */
6757 INSN_CUID (insn) = i;
6758 }
6759
6760 /* Loop over basic blocks.
6761 Compute the maximum number of qty's needed for each basic block
6762 (which is 2 for each SET). */
6763 insn = f;
6764 while (insn)
6765 {
6766 cse_altered = 0;
6767 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps,
6768 flag_cse_skip_blocks);
6769
6770 /* If this basic block was already processed or has no sets, skip it. */
6771 if (val.nsets == 0 || GET_MODE (insn) == QImode)
6772 {
6773 PUT_MODE (insn, VOIDmode);
6774 insn = (val.last ? NEXT_INSN (val.last) : 0);
6775 val.path_size = 0;
6776 continue;
6777 }
6778
6779 cse_basic_block_start = val.low_cuid;
6780 cse_basic_block_end = val.high_cuid;
6781 max_qty = val.nsets * 2;
6782
6783 if (file)
6784 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
6785 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
6786 val.nsets);
6787
6788 /* Make MAX_QTY bigger to give us room to optimize
6789 past the end of this basic block, if that should prove useful. */
6790 if (max_qty < 500)
6791 max_qty = 500;
6792
6793 /* If this basic block is being extended by following certain jumps,
6794 (see `cse_end_of_basic_block'), we reprocess the code from the start.
6795 Otherwise, we start after this basic block. */
6796 if (val.path_size > 0)
6797 cse_basic_block (insn, val.last, val.path);
6798 else
6799 {
6800 int old_cse_jumps_altered = cse_jumps_altered;
6801 rtx temp;
6802
6803 /* When cse changes a conditional jump to an unconditional
6804 jump, we want to reprocess the block, since it will give
6805 us a new branch path to investigate. */
6806 cse_jumps_altered = 0;
6807 temp = cse_basic_block (insn, val.last, val.path);
6808 if (cse_jumps_altered == 0
6809 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
6810 insn = temp;
6811
6812 cse_jumps_altered |= old_cse_jumps_altered;
6813 }
6814
6815 if (cse_altered)
6816 ggc_collect ();
6817
6818 #ifdef USE_C_ALLOCA
6819 alloca (0);
6820 #endif
6821 }
6822
6823 if (max_elements_made < n_elements_made)
6824 max_elements_made = n_elements_made;
6825
6826 /* Clean up. */
6827 end_alias_analysis ();
6828 free (uid_cuid);
6829 free (reg_eqv_table);
6830 free (val.path);
6831 rtl_hooks = general_rtl_hooks;
6832
6833 return cse_jumps_altered || recorded_label_ref;
6834 }
6835
6836 /* Process a single basic block. FROM and TO and the limits of the basic
6837 block. NEXT_BRANCH points to the branch path when following jumps or
6838 a null path when not following jumps. */
6839
6840 static rtx
6841 cse_basic_block (rtx from, rtx to, struct branch_path *next_branch)
6842 {
6843 rtx insn;
6844 int to_usage = 0;
6845 rtx libcall_insn = NULL_RTX;
6846 int num_insns = 0;
6847 int no_conflict = 0;
6848
6849 /* Allocate the space needed by qty_table. */
6850 qty_table = xmalloc (max_qty * sizeof (struct qty_table_elem));
6851
6852 new_basic_block ();
6853
6854 /* TO might be a label. If so, protect it from being deleted. */
6855 if (to != 0 && LABEL_P (to))
6856 ++LABEL_NUSES (to);
6857
6858 for (insn = from; insn != to; insn = NEXT_INSN (insn))
6859 {
6860 enum rtx_code code = GET_CODE (insn);
6861
6862 /* If we have processed 1,000 insns, flush the hash table to
6863 avoid extreme quadratic behavior. We must not include NOTEs
6864 in the count since there may be more of them when generating
6865 debugging information. If we clear the table at different
6866 times, code generated with -g -O might be different than code
6867 generated with -O but not -g.
6868
6869 ??? This is a real kludge and needs to be done some other way.
6870 Perhaps for 2.9. */
6871 if (code != NOTE && num_insns++ > 1000)
6872 {
6873 flush_hash_table ();
6874 num_insns = 0;
6875 }
6876
6877 /* See if this is a branch that is part of the path. If so, and it is
6878 to be taken, do so. */
6879 if (next_branch->branch == insn)
6880 {
6881 enum taken status = next_branch++->status;
6882 if (status != PATH_NOT_TAKEN)
6883 {
6884 if (status == PATH_TAKEN)
6885 record_jump_equiv (insn, 1);
6886 else
6887 invalidate_skipped_block (NEXT_INSN (insn));
6888
6889 /* Set the last insn as the jump insn; it doesn't affect cc0.
6890 Then follow this branch. */
6891 #ifdef HAVE_cc0
6892 prev_insn_cc0 = 0;
6893 prev_insn = insn;
6894 #endif
6895 insn = JUMP_LABEL (insn);
6896 continue;
6897 }
6898 }
6899
6900 if (GET_MODE (insn) == QImode)
6901 PUT_MODE (insn, VOIDmode);
6902
6903 if (GET_RTX_CLASS (code) == RTX_INSN)
6904 {
6905 rtx p;
6906
6907 /* Process notes first so we have all notes in canonical forms when
6908 looking for duplicate operations. */
6909
6910 if (REG_NOTES (insn))
6911 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
6912
6913 /* Track when we are inside in LIBCALL block. Inside such a block,
6914 we do not want to record destinations. The last insn of a
6915 LIBCALL block is not considered to be part of the block, since
6916 its destination is the result of the block and hence should be
6917 recorded. */
6918
6919 if (REG_NOTES (insn) != 0)
6920 {
6921 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
6922 libcall_insn = XEXP (p, 0);
6923 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
6924 {
6925 /* Keep libcall_insn for the last SET insn of a no-conflict
6926 block to prevent changing the destination. */
6927 if (! no_conflict)
6928 libcall_insn = 0;
6929 else
6930 no_conflict = -1;
6931 }
6932 else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
6933 no_conflict = 1;
6934 }
6935
6936 cse_insn (insn, libcall_insn);
6937
6938 if (no_conflict == -1)
6939 {
6940 libcall_insn = 0;
6941 no_conflict = 0;
6942 }
6943
6944 /* If we haven't already found an insn where we added a LABEL_REF,
6945 check this one. */
6946 if (NONJUMP_INSN_P (insn) && ! recorded_label_ref
6947 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
6948 (void *) insn))
6949 recorded_label_ref = 1;
6950 }
6951
6952 /* If INSN is now an unconditional jump, skip to the end of our
6953 basic block by pretending that we just did the last insn in the
6954 basic block. If we are jumping to the end of our block, show
6955 that we can have one usage of TO. */
6956
6957 if (any_uncondjump_p (insn))
6958 {
6959 if (to == 0)
6960 {
6961 free (qty_table);
6962 return 0;
6963 }
6964
6965 if (JUMP_LABEL (insn) == to)
6966 to_usage = 1;
6967
6968 /* Maybe TO was deleted because the jump is unconditional.
6969 If so, there is nothing left in this basic block. */
6970 /* ??? Perhaps it would be smarter to set TO
6971 to whatever follows this insn,
6972 and pretend the basic block had always ended here. */
6973 if (INSN_DELETED_P (to))
6974 break;
6975
6976 insn = PREV_INSN (to);
6977 }
6978
6979 /* See if it is ok to keep on going past the label
6980 which used to end our basic block. Remember that we incremented
6981 the count of that label, so we decrement it here. If we made
6982 a jump unconditional, TO_USAGE will be one; in that case, we don't
6983 want to count the use in that jump. */
6984
6985 if (to != 0 && NEXT_INSN (insn) == to
6986 && LABEL_P (to) && --LABEL_NUSES (to) == to_usage)
6987 {
6988 struct cse_basic_block_data val;
6989 rtx prev;
6990
6991 insn = NEXT_INSN (to);
6992
6993 /* If TO was the last insn in the function, we are done. */
6994 if (insn == 0)
6995 {
6996 free (qty_table);
6997 return 0;
6998 }
6999
7000 /* If TO was preceded by a BARRIER we are done with this block
7001 because it has no continuation. */
7002 prev = prev_nonnote_insn (to);
7003 if (prev && BARRIER_P (prev))
7004 {
7005 free (qty_table);
7006 return insn;
7007 }
7008
7009 /* Find the end of the following block. Note that we won't be
7010 following branches in this case. */
7011 to_usage = 0;
7012 val.path_size = 0;
7013 val.path = xmalloc (sizeof (struct branch_path)
7014 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
7015 cse_end_of_basic_block (insn, &val, 0, 0);
7016 free (val.path);
7017
7018 /* If the tables we allocated have enough space left
7019 to handle all the SETs in the next basic block,
7020 continue through it. Otherwise, return,
7021 and that block will be scanned individually. */
7022 if (val.nsets * 2 + next_qty > max_qty)
7023 break;
7024
7025 cse_basic_block_start = val.low_cuid;
7026 cse_basic_block_end = val.high_cuid;
7027 to = val.last;
7028
7029 /* Prevent TO from being deleted if it is a label. */
7030 if (to != 0 && LABEL_P (to))
7031 ++LABEL_NUSES (to);
7032
7033 /* Back up so we process the first insn in the extension. */
7034 insn = PREV_INSN (insn);
7035 }
7036 }
7037
7038 gcc_assert (next_qty <= max_qty);
7039
7040 free (qty_table);
7041
7042 return to ? NEXT_INSN (to) : 0;
7043 }
7044 \f
7045 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7046 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
7047
7048 static int
7049 check_for_label_ref (rtx *rtl, void *data)
7050 {
7051 rtx insn = (rtx) data;
7052
7053 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7054 we must rerun jump since it needs to place the note. If this is a
7055 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7056 since no REG_LABEL will be added. */
7057 return (GET_CODE (*rtl) == LABEL_REF
7058 && ! LABEL_REF_NONLOCAL_P (*rtl)
7059 && LABEL_P (XEXP (*rtl, 0))
7060 && INSN_UID (XEXP (*rtl, 0)) != 0
7061 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7062 }
7063 \f
7064 /* Count the number of times registers are used (not set) in X.
7065 COUNTS is an array in which we accumulate the count, INCR is how much
7066 we count each register usage. */
7067
7068 static void
7069 count_reg_usage (rtx x, int *counts, int incr)
7070 {
7071 enum rtx_code code;
7072 rtx note;
7073 const char *fmt;
7074 int i, j;
7075
7076 if (x == 0)
7077 return;
7078
7079 switch (code = GET_CODE (x))
7080 {
7081 case REG:
7082 counts[REGNO (x)] += incr;
7083 return;
7084
7085 case PC:
7086 case CC0:
7087 case CONST:
7088 case CONST_INT:
7089 case CONST_DOUBLE:
7090 case CONST_VECTOR:
7091 case SYMBOL_REF:
7092 case LABEL_REF:
7093 return;
7094
7095 case CLOBBER:
7096 /* If we are clobbering a MEM, mark any registers inside the address
7097 as being used. */
7098 if (MEM_P (XEXP (x, 0)))
7099 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, incr);
7100 return;
7101
7102 case SET:
7103 /* Unless we are setting a REG, count everything in SET_DEST. */
7104 if (!REG_P (SET_DEST (x)))
7105 count_reg_usage (SET_DEST (x), counts, incr);
7106 count_reg_usage (SET_SRC (x), counts, incr);
7107 return;
7108
7109 case CALL_INSN:
7110 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, incr);
7111 /* Fall through. */
7112
7113 case INSN:
7114 case JUMP_INSN:
7115 count_reg_usage (PATTERN (x), counts, incr);
7116
7117 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7118 use them. */
7119
7120 note = find_reg_equal_equiv_note (x);
7121 if (note)
7122 {
7123 rtx eqv = XEXP (note, 0);
7124
7125 if (GET_CODE (eqv) == EXPR_LIST)
7126 /* This REG_EQUAL note describes the result of a function call.
7127 Process all the arguments. */
7128 do
7129 {
7130 count_reg_usage (XEXP (eqv, 0), counts, incr);
7131 eqv = XEXP (eqv, 1);
7132 }
7133 while (eqv && GET_CODE (eqv) == EXPR_LIST);
7134 else
7135 count_reg_usage (eqv, counts, incr);
7136 }
7137 return;
7138
7139 case EXPR_LIST:
7140 if (REG_NOTE_KIND (x) == REG_EQUAL
7141 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7142 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7143 involving registers in the address. */
7144 || GET_CODE (XEXP (x, 0)) == CLOBBER)
7145 count_reg_usage (XEXP (x, 0), counts, incr);
7146
7147 count_reg_usage (XEXP (x, 1), counts, incr);
7148 return;
7149
7150 case ASM_OPERANDS:
7151 /* Iterate over just the inputs, not the constraints as well. */
7152 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7153 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, incr);
7154 return;
7155
7156 case INSN_LIST:
7157 gcc_unreachable ();
7158
7159 default:
7160 break;
7161 }
7162
7163 fmt = GET_RTX_FORMAT (code);
7164 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7165 {
7166 if (fmt[i] == 'e')
7167 count_reg_usage (XEXP (x, i), counts, incr);
7168 else if (fmt[i] == 'E')
7169 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7170 count_reg_usage (XVECEXP (x, i, j), counts, incr);
7171 }
7172 }
7173 \f
7174 /* Return true if set is live. */
7175 static bool
7176 set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
7177 int *counts)
7178 {
7179 #ifdef HAVE_cc0
7180 rtx tem;
7181 #endif
7182
7183 if (set_noop_p (set))
7184 ;
7185
7186 #ifdef HAVE_cc0
7187 else if (GET_CODE (SET_DEST (set)) == CC0
7188 && !side_effects_p (SET_SRC (set))
7189 && ((tem = next_nonnote_insn (insn)) == 0
7190 || !INSN_P (tem)
7191 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7192 return false;
7193 #endif
7194 else if (!REG_P (SET_DEST (set))
7195 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7196 || counts[REGNO (SET_DEST (set))] != 0
7197 || side_effects_p (SET_SRC (set)))
7198 return true;
7199 return false;
7200 }
7201
7202 /* Return true if insn is live. */
7203
7204 static bool
7205 insn_live_p (rtx insn, int *counts)
7206 {
7207 int i;
7208 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7209 return true;
7210 else if (GET_CODE (PATTERN (insn)) == SET)
7211 return set_live_p (PATTERN (insn), insn, counts);
7212 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7213 {
7214 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7215 {
7216 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7217
7218 if (GET_CODE (elt) == SET)
7219 {
7220 if (set_live_p (elt, insn, counts))
7221 return true;
7222 }
7223 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7224 return true;
7225 }
7226 return false;
7227 }
7228 else
7229 return true;
7230 }
7231
7232 /* Return true if libcall is dead as a whole. */
7233
7234 static bool
7235 dead_libcall_p (rtx insn, int *counts)
7236 {
7237 rtx note, set, new;
7238
7239 /* See if there's a REG_EQUAL note on this insn and try to
7240 replace the source with the REG_EQUAL expression.
7241
7242 We assume that insns with REG_RETVALs can only be reg->reg
7243 copies at this point. */
7244 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7245 if (!note)
7246 return false;
7247
7248 set = single_set (insn);
7249 if (!set)
7250 return false;
7251
7252 new = simplify_rtx (XEXP (note, 0));
7253 if (!new)
7254 new = XEXP (note, 0);
7255
7256 /* While changing insn, we must update the counts accordingly. */
7257 count_reg_usage (insn, counts, -1);
7258
7259 if (validate_change (insn, &SET_SRC (set), new, 0))
7260 {
7261 count_reg_usage (insn, counts, 1);
7262 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7263 remove_note (insn, note);
7264 return true;
7265 }
7266
7267 if (CONSTANT_P (new))
7268 {
7269 new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7270 if (new && validate_change (insn, &SET_SRC (set), new, 0))
7271 {
7272 count_reg_usage (insn, counts, 1);
7273 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7274 remove_note (insn, note);
7275 return true;
7276 }
7277 }
7278
7279 count_reg_usage (insn, counts, 1);
7280 return false;
7281 }
7282
7283 /* Scan all the insns and delete any that are dead; i.e., they store a register
7284 that is never used or they copy a register to itself.
7285
7286 This is used to remove insns made obviously dead by cse, loop or other
7287 optimizations. It improves the heuristics in loop since it won't try to
7288 move dead invariants out of loops or make givs for dead quantities. The
7289 remaining passes of the compilation are also sped up. */
7290
7291 int
7292 delete_trivially_dead_insns (rtx insns, int nreg)
7293 {
7294 int *counts;
7295 rtx insn, prev;
7296 int in_libcall = 0, dead_libcall = 0;
7297 int ndead = 0, nlastdead, niterations = 0;
7298
7299 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7300 /* First count the number of times each register is used. */
7301 counts = xcalloc (nreg, sizeof (int));
7302 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7303 count_reg_usage (insn, counts, 1);
7304
7305 do
7306 {
7307 nlastdead = ndead;
7308 niterations++;
7309 /* Go from the last insn to the first and delete insns that only set unused
7310 registers or copy a register to itself. As we delete an insn, remove
7311 usage counts for registers it uses.
7312
7313 The first jump optimization pass may leave a real insn as the last
7314 insn in the function. We must not skip that insn or we may end
7315 up deleting code that is not really dead. */
7316 insn = get_last_insn ();
7317 if (! INSN_P (insn))
7318 insn = prev_real_insn (insn);
7319
7320 for (; insn; insn = prev)
7321 {
7322 int live_insn = 0;
7323
7324 prev = prev_real_insn (insn);
7325
7326 /* Don't delete any insns that are part of a libcall block unless
7327 we can delete the whole libcall block.
7328
7329 Flow or loop might get confused if we did that. Remember
7330 that we are scanning backwards. */
7331 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7332 {
7333 in_libcall = 1;
7334 live_insn = 1;
7335 dead_libcall = dead_libcall_p (insn, counts);
7336 }
7337 else if (in_libcall)
7338 live_insn = ! dead_libcall;
7339 else
7340 live_insn = insn_live_p (insn, counts);
7341
7342 /* If this is a dead insn, delete it and show registers in it aren't
7343 being used. */
7344
7345 if (! live_insn)
7346 {
7347 count_reg_usage (insn, counts, -1);
7348 delete_insn_and_edges (insn);
7349 ndead++;
7350 }
7351
7352 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7353 {
7354 in_libcall = 0;
7355 dead_libcall = 0;
7356 }
7357 }
7358 }
7359 while (ndead != nlastdead);
7360
7361 if (dump_file && ndead)
7362 fprintf (dump_file, "Deleted %i trivially dead insns; %i iterations\n",
7363 ndead, niterations);
7364 /* Clean up. */
7365 free (counts);
7366 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7367 return ndead;
7368 }
7369
7370 /* This function is called via for_each_rtx. The argument, NEWREG, is
7371 a condition code register with the desired mode. If we are looking
7372 at the same register in a different mode, replace it with
7373 NEWREG. */
7374
7375 static int
7376 cse_change_cc_mode (rtx *loc, void *data)
7377 {
7378 struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
7379
7380 if (*loc
7381 && REG_P (*loc)
7382 && REGNO (*loc) == REGNO (args->newreg)
7383 && GET_MODE (*loc) != GET_MODE (args->newreg))
7384 {
7385 validate_change (args->insn, loc, args->newreg, 1);
7386
7387 return -1;
7388 }
7389 return 0;
7390 }
7391
7392 /* Change the mode of any reference to the register REGNO (NEWREG) to
7393 GET_MODE (NEWREG) in INSN. */
7394
7395 static void
7396 cse_change_cc_mode_insn (rtx insn, rtx newreg)
7397 {
7398 struct change_cc_mode_args args;
7399 int success;
7400
7401 if (!INSN_P (insn))
7402 return;
7403
7404 args.insn = insn;
7405 args.newreg = newreg;
7406
7407 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
7408 for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, &args);
7409
7410 /* If the following assertion was triggered, there is most probably
7411 something wrong with the cc_modes_compatible back end function.
7412 CC modes only can be considered compatible if the insn - with the mode
7413 replaced by any of the compatible modes - can still be recognized. */
7414 success = apply_change_group ();
7415 gcc_assert (success);
7416 }
7417
7418 /* Change the mode of any reference to the register REGNO (NEWREG) to
7419 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7420 any instruction which modifies NEWREG. */
7421
7422 static void
7423 cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7424 {
7425 rtx insn;
7426
7427 for (insn = start; insn != end; insn = NEXT_INSN (insn))
7428 {
7429 if (! INSN_P (insn))
7430 continue;
7431
7432 if (reg_set_p (newreg, insn))
7433 return;
7434
7435 cse_change_cc_mode_insn (insn, newreg);
7436 }
7437 }
7438
7439 /* BB is a basic block which finishes with CC_REG as a condition code
7440 register which is set to CC_SRC. Look through the successors of BB
7441 to find blocks which have a single predecessor (i.e., this one),
7442 and look through those blocks for an assignment to CC_REG which is
7443 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7444 permitted to change the mode of CC_SRC to a compatible mode. This
7445 returns VOIDmode if no equivalent assignments were found.
7446 Otherwise it returns the mode which CC_SRC should wind up with.
7447
7448 The main complexity in this function is handling the mode issues.
7449 We may have more than one duplicate which we can eliminate, and we
7450 try to find a mode which will work for multiple duplicates. */
7451
7452 static enum machine_mode
7453 cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7454 {
7455 bool found_equiv;
7456 enum machine_mode mode;
7457 unsigned int insn_count;
7458 edge e;
7459 rtx insns[2];
7460 enum machine_mode modes[2];
7461 rtx last_insns[2];
7462 unsigned int i;
7463 rtx newreg;
7464 edge_iterator ei;
7465
7466 /* We expect to have two successors. Look at both before picking
7467 the final mode for the comparison. If we have more successors
7468 (i.e., some sort of table jump, although that seems unlikely),
7469 then we require all beyond the first two to use the same
7470 mode. */
7471
7472 found_equiv = false;
7473 mode = GET_MODE (cc_src);
7474 insn_count = 0;
7475 FOR_EACH_EDGE (e, ei, bb->succs)
7476 {
7477 rtx insn;
7478 rtx end;
7479
7480 if (e->flags & EDGE_COMPLEX)
7481 continue;
7482
7483 if (EDGE_COUNT (e->dest->preds) != 1
7484 || e->dest == EXIT_BLOCK_PTR)
7485 continue;
7486
7487 end = NEXT_INSN (BB_END (e->dest));
7488 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7489 {
7490 rtx set;
7491
7492 if (! INSN_P (insn))
7493 continue;
7494
7495 /* If CC_SRC is modified, we have to stop looking for
7496 something which uses it. */
7497 if (modified_in_p (cc_src, insn))
7498 break;
7499
7500 /* Check whether INSN sets CC_REG to CC_SRC. */
7501 set = single_set (insn);
7502 if (set
7503 && REG_P (SET_DEST (set))
7504 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7505 {
7506 bool found;
7507 enum machine_mode set_mode;
7508 enum machine_mode comp_mode;
7509
7510 found = false;
7511 set_mode = GET_MODE (SET_SRC (set));
7512 comp_mode = set_mode;
7513 if (rtx_equal_p (cc_src, SET_SRC (set)))
7514 found = true;
7515 else if (GET_CODE (cc_src) == COMPARE
7516 && GET_CODE (SET_SRC (set)) == COMPARE
7517 && mode != set_mode
7518 && rtx_equal_p (XEXP (cc_src, 0),
7519 XEXP (SET_SRC (set), 0))
7520 && rtx_equal_p (XEXP (cc_src, 1),
7521 XEXP (SET_SRC (set), 1)))
7522
7523 {
7524 comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7525 if (comp_mode != VOIDmode
7526 && (can_change_mode || comp_mode == mode))
7527 found = true;
7528 }
7529
7530 if (found)
7531 {
7532 found_equiv = true;
7533 if (insn_count < ARRAY_SIZE (insns))
7534 {
7535 insns[insn_count] = insn;
7536 modes[insn_count] = set_mode;
7537 last_insns[insn_count] = end;
7538 ++insn_count;
7539
7540 if (mode != comp_mode)
7541 {
7542 gcc_assert (can_change_mode);
7543 mode = comp_mode;
7544
7545 /* The modified insn will be re-recognized later. */
7546 PUT_MODE (cc_src, mode);
7547 }
7548 }
7549 else
7550 {
7551 if (set_mode != mode)
7552 {
7553 /* We found a matching expression in the
7554 wrong mode, but we don't have room to
7555 store it in the array. Punt. This case
7556 should be rare. */
7557 break;
7558 }
7559 /* INSN sets CC_REG to a value equal to CC_SRC
7560 with the right mode. We can simply delete
7561 it. */
7562 delete_insn (insn);
7563 }
7564
7565 /* We found an instruction to delete. Keep looking,
7566 in the hopes of finding a three-way jump. */
7567 continue;
7568 }
7569
7570 /* We found an instruction which sets the condition
7571 code, so don't look any farther. */
7572 break;
7573 }
7574
7575 /* If INSN sets CC_REG in some other way, don't look any
7576 farther. */
7577 if (reg_set_p (cc_reg, insn))
7578 break;
7579 }
7580
7581 /* If we fell off the bottom of the block, we can keep looking
7582 through successors. We pass CAN_CHANGE_MODE as false because
7583 we aren't prepared to handle compatibility between the
7584 further blocks and this block. */
7585 if (insn == end)
7586 {
7587 enum machine_mode submode;
7588
7589 submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7590 if (submode != VOIDmode)
7591 {
7592 gcc_assert (submode == mode);
7593 found_equiv = true;
7594 can_change_mode = false;
7595 }
7596 }
7597 }
7598
7599 if (! found_equiv)
7600 return VOIDmode;
7601
7602 /* Now INSN_COUNT is the number of instructions we found which set
7603 CC_REG to a value equivalent to CC_SRC. The instructions are in
7604 INSNS. The modes used by those instructions are in MODES. */
7605
7606 newreg = NULL_RTX;
7607 for (i = 0; i < insn_count; ++i)
7608 {
7609 if (modes[i] != mode)
7610 {
7611 /* We need to change the mode of CC_REG in INSNS[i] and
7612 subsequent instructions. */
7613 if (! newreg)
7614 {
7615 if (GET_MODE (cc_reg) == mode)
7616 newreg = cc_reg;
7617 else
7618 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7619 }
7620 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7621 newreg);
7622 }
7623
7624 delete_insn (insns[i]);
7625 }
7626
7627 return mode;
7628 }
7629
7630 /* If we have a fixed condition code register (or two), walk through
7631 the instructions and try to eliminate duplicate assignments. */
7632
7633 void
7634 cse_condition_code_reg (void)
7635 {
7636 unsigned int cc_regno_1;
7637 unsigned int cc_regno_2;
7638 rtx cc_reg_1;
7639 rtx cc_reg_2;
7640 basic_block bb;
7641
7642 if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7643 return;
7644
7645 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7646 if (cc_regno_2 != INVALID_REGNUM)
7647 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7648 else
7649 cc_reg_2 = NULL_RTX;
7650
7651 FOR_EACH_BB (bb)
7652 {
7653 rtx last_insn;
7654 rtx cc_reg;
7655 rtx insn;
7656 rtx cc_src_insn;
7657 rtx cc_src;
7658 enum machine_mode mode;
7659 enum machine_mode orig_mode;
7660
7661 /* Look for blocks which end with a conditional jump based on a
7662 condition code register. Then look for the instruction which
7663 sets the condition code register. Then look through the
7664 successor blocks for instructions which set the condition
7665 code register to the same value. There are other possible
7666 uses of the condition code register, but these are by far the
7667 most common and the ones which we are most likely to be able
7668 to optimize. */
7669
7670 last_insn = BB_END (bb);
7671 if (!JUMP_P (last_insn))
7672 continue;
7673
7674 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7675 cc_reg = cc_reg_1;
7676 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7677 cc_reg = cc_reg_2;
7678 else
7679 continue;
7680
7681 cc_src_insn = NULL_RTX;
7682 cc_src = NULL_RTX;
7683 for (insn = PREV_INSN (last_insn);
7684 insn && insn != PREV_INSN (BB_HEAD (bb));
7685 insn = PREV_INSN (insn))
7686 {
7687 rtx set;
7688
7689 if (! INSN_P (insn))
7690 continue;
7691 set = single_set (insn);
7692 if (set
7693 && REG_P (SET_DEST (set))
7694 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7695 {
7696 cc_src_insn = insn;
7697 cc_src = SET_SRC (set);
7698 break;
7699 }
7700 else if (reg_set_p (cc_reg, insn))
7701 break;
7702 }
7703
7704 if (! cc_src_insn)
7705 continue;
7706
7707 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7708 continue;
7709
7710 /* Now CC_REG is a condition code register used for a
7711 conditional jump at the end of the block, and CC_SRC, in
7712 CC_SRC_INSN, is the value to which that condition code
7713 register is set, and CC_SRC is still meaningful at the end of
7714 the basic block. */
7715
7716 orig_mode = GET_MODE (cc_src);
7717 mode = cse_cc_succs (bb, cc_reg, cc_src, true);
7718 if (mode != VOIDmode)
7719 {
7720 gcc_assert (mode == GET_MODE (cc_src));
7721 if (mode != orig_mode)
7722 {
7723 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7724
7725 cse_change_cc_mode_insn (cc_src_insn, newreg);
7726
7727 /* Do the same in the following insns that use the
7728 current value of CC_REG within BB. */
7729 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7730 NEXT_INSN (last_insn),
7731 newreg);
7732 }
7733 }
7734 }
7735 }