06be4ff74127aaaab1bf7c17a0cf2d1436e49bb5
[gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
25
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "flags.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "function.h"
36 #include "expr.h"
37 #include "toplev.h"
38 #include "output.h"
39 #include "ggc.h"
40 #include "timevar.h"
41
42 /* The basic idea of common subexpression elimination is to go
43 through the code, keeping a record of expressions that would
44 have the same value at the current scan point, and replacing
45 expressions encountered with the cheapest equivalent expression.
46
47 It is too complicated to keep track of the different possibilities
48 when control paths merge in this code; so, at each label, we forget all
49 that is known and start fresh. This can be described as processing each
50 extended basic block separately. We have a separate pass to perform
51 global CSE.
52
53 Note CSE can turn a conditional or computed jump into a nop or
54 an unconditional jump. When this occurs we arrange to run the jump
55 optimizer after CSE to delete the unreachable code.
56
57 We use two data structures to record the equivalent expressions:
58 a hash table for most expressions, and a vector of "quantity
59 numbers" to record equivalent (pseudo) registers.
60
61 The use of the special data structure for registers is desirable
62 because it is faster. It is possible because registers references
63 contain a fairly small number, the register number, taken from
64 a contiguously allocated series, and two register references are
65 identical if they have the same number. General expressions
66 do not have any such thing, so the only way to retrieve the
67 information recorded on an expression other than a register
68 is to keep it in a hash table.
69
70 Registers and "quantity numbers":
71
72 At the start of each basic block, all of the (hardware and pseudo)
73 registers used in the function are given distinct quantity
74 numbers to indicate their contents. During scan, when the code
75 copies one register into another, we copy the quantity number.
76 When a register is loaded in any other way, we allocate a new
77 quantity number to describe the value generated by this operation.
78 `reg_qty' records what quantity a register is currently thought
79 of as containing.
80
81 All real quantity numbers are greater than or equal to `max_reg'.
82 If register N has not been assigned a quantity, reg_qty[N] will equal N.
83
84 Quantity numbers below `max_reg' do not exist and none of the `qty_table'
85 entries should be referenced with an index below `max_reg'.
86
87 We also maintain a bidirectional chain of registers for each
88 quantity number. The `qty_table` members `first_reg' and `last_reg',
89 and `reg_eqv_table' members `next' and `prev' hold these chains.
90
91 The first register in a chain is the one whose lifespan is least local.
92 Among equals, it is the one that was seen first.
93 We replace any equivalent register with that one.
94
95 If two registers have the same quantity number, it must be true that
96 REG expressions with qty_table `mode' must be in the hash table for both
97 registers and must be in the same class.
98
99 The converse is not true. Since hard registers may be referenced in
100 any mode, two REG expressions might be equivalent in the hash table
101 but not have the same quantity number if the quantity number of one
102 of the registers is not the same mode as those expressions.
103
104 Constants and quantity numbers
105
106 When a quantity has a known constant value, that value is stored
107 in the appropriate qty_table `const_rtx'. This is in addition to
108 putting the constant in the hash table as is usual for non-regs.
109
110 Whether a reg or a constant is preferred is determined by the configuration
111 macro CONST_COSTS and will often depend on the constant value. In any
112 event, expressions containing constants can be simplified, by fold_rtx.
113
114 When a quantity has a known nearly constant value (such as an address
115 of a stack slot), that value is stored in the appropriate qty_table
116 `const_rtx'.
117
118 Integer constants don't have a machine mode. However, cse
119 determines the intended machine mode from the destination
120 of the instruction that moves the constant. The machine mode
121 is recorded in the hash table along with the actual RTL
122 constant expression so that different modes are kept separate.
123
124 Other expressions:
125
126 To record known equivalences among expressions in general
127 we use a hash table called `table'. It has a fixed number of buckets
128 that contain chains of `struct table_elt' elements for expressions.
129 These chains connect the elements whose expressions have the same
130 hash codes.
131
132 Other chains through the same elements connect the elements which
133 currently have equivalent values.
134
135 Register references in an expression are canonicalized before hashing
136 the expression. This is done using `reg_qty' and qty_table `first_reg'.
137 The hash code of a register reference is computed using the quantity
138 number, not the register number.
139
140 When the value of an expression changes, it is necessary to remove from the
141 hash table not just that expression but all expressions whose values
142 could be different as a result.
143
144 1. If the value changing is in memory, except in special cases
145 ANYTHING referring to memory could be changed. That is because
146 nobody knows where a pointer does not point.
147 The function `invalidate_memory' removes what is necessary.
148
149 The special cases are when the address is constant or is
150 a constant plus a fixed register such as the frame pointer
151 or a static chain pointer. When such addresses are stored in,
152 we can tell exactly which other such addresses must be invalidated
153 due to overlap. `invalidate' does this.
154 All expressions that refer to non-constant
155 memory addresses are also invalidated. `invalidate_memory' does this.
156
157 2. If the value changing is a register, all expressions
158 containing references to that register, and only those,
159 must be removed.
160
161 Because searching the entire hash table for expressions that contain
162 a register is very slow, we try to figure out when it isn't necessary.
163 Precisely, this is necessary only when expressions have been
164 entered in the hash table using this register, and then the value has
165 changed, and then another expression wants to be added to refer to
166 the register's new value. This sequence of circumstances is rare
167 within any one basic block.
168
169 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
170 reg_tick[i] is incremented whenever a value is stored in register i.
171 reg_in_table[i] holds -1 if no references to register i have been
172 entered in the table; otherwise, it contains the value reg_tick[i] had
173 when the references were entered. If we want to enter a reference
174 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
175 Until we want to enter a new entry, the mere fact that the two vectors
176 don't match makes the entries be ignored if anyone tries to match them.
177
178 Registers themselves are entered in the hash table as well as in
179 the equivalent-register chains. However, the vectors `reg_tick'
180 and `reg_in_table' do not apply to expressions which are simple
181 register references. These expressions are removed from the table
182 immediately when they become invalid, and this can be done even if
183 we do not immediately search for all the expressions that refer to
184 the register.
185
186 A CLOBBER rtx in an instruction invalidates its operand for further
187 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
188 invalidates everything that resides in memory.
189
190 Related expressions:
191
192 Constant expressions that differ only by an additive integer
193 are called related. When a constant expression is put in
194 the table, the related expression with no constant term
195 is also entered. These are made to point at each other
196 so that it is possible to find out if there exists any
197 register equivalent to an expression related to a given expression. */
198
199 /* One plus largest register number used in this function. */
200
201 static int max_reg;
202
203 /* One plus largest instruction UID used in this function at time of
204 cse_main call. */
205
206 static int max_insn_uid;
207
208 /* Length of qty_table vector. We know in advance we will not need
209 a quantity number this big. */
210
211 static int max_qty;
212
213 /* Next quantity number to be allocated.
214 This is 1 + the largest number needed so far. */
215
216 static int next_qty;
217
218 /* Per-qty information tracking.
219
220 `first_reg' and `last_reg' track the head and tail of the
221 chain of registers which currently contain this quantity.
222
223 `mode' contains the machine mode of this quantity.
224
225 `const_rtx' holds the rtx of the constant value of this
226 quantity, if known. A summations of the frame/arg pointer
227 and a constant can also be entered here. When this holds
228 a known value, `const_insn' is the insn which stored the
229 constant value.
230
231 `comparison_{code,const,qty}' are used to track when a
232 comparison between a quantity and some constant or register has
233 been passed. In such a case, we know the results of the comparison
234 in case we see it again. These members record a comparison that
235 is known to be true. `comparison_code' holds the rtx code of such
236 a comparison, else it is set to UNKNOWN and the other two
237 comparison members are undefined. `comparison_const' holds
238 the constant being compared against, or zero if the comparison
239 is not against a constant. `comparison_qty' holds the quantity
240 being compared against when the result is known. If the comparison
241 is not with a register, `comparison_qty' is -1. */
242
243 struct qty_table_elem
244 {
245 rtx const_rtx;
246 rtx const_insn;
247 rtx comparison_const;
248 int comparison_qty;
249 unsigned int first_reg, last_reg;
250 enum machine_mode mode;
251 enum rtx_code comparison_code;
252 };
253
254 /* The table of all qtys, indexed by qty number. */
255 static struct qty_table_elem *qty_table;
256
257 #ifdef HAVE_cc0
258 /* For machines that have a CC0, we do not record its value in the hash
259 table since its use is guaranteed to be the insn immediately following
260 its definition and any other insn is presumed to invalidate it.
261
262 Instead, we store below the value last assigned to CC0. If it should
263 happen to be a constant, it is stored in preference to the actual
264 assigned value. In case it is a constant, we store the mode in which
265 the constant should be interpreted. */
266
267 static rtx prev_insn_cc0;
268 static enum machine_mode prev_insn_cc0_mode;
269 #endif
270
271 /* Previous actual insn. 0 if at first insn of basic block. */
272
273 static rtx prev_insn;
274
275 /* Insn being scanned. */
276
277 static rtx this_insn;
278
279 /* Index by register number, gives the number of the next (or
280 previous) register in the chain of registers sharing the same
281 value.
282
283 Or -1 if this register is at the end of the chain.
284
285 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
286
287 /* Per-register equivalence chain. */
288 struct reg_eqv_elem
289 {
290 int next, prev;
291 };
292
293 /* The table of all register equivalence chains. */
294 static struct reg_eqv_elem *reg_eqv_table;
295
296 struct cse_reg_info
297 {
298 /* Next in hash chain. */
299 struct cse_reg_info *hash_next;
300
301 /* The next cse_reg_info structure in the free or used list. */
302 struct cse_reg_info *next;
303
304 /* Search key */
305 unsigned int regno;
306
307 /* The quantity number of the register's current contents. */
308 int reg_qty;
309
310 /* The number of times the register has been altered in the current
311 basic block. */
312 int reg_tick;
313
314 /* The REG_TICK value at which rtx's containing this register are
315 valid in the hash table. If this does not equal the current
316 reg_tick value, such expressions existing in the hash table are
317 invalid. */
318 int reg_in_table;
319
320 /* The SUBREG that was set when REG_TICK was last incremented. Set
321 to -1 if the last store was to the whole register, not a subreg. */
322 unsigned int subreg_ticked;
323 };
324
325 /* A free list of cse_reg_info entries. */
326 static struct cse_reg_info *cse_reg_info_free_list;
327
328 /* A used list of cse_reg_info entries. */
329 static struct cse_reg_info *cse_reg_info_used_list;
330 static struct cse_reg_info *cse_reg_info_used_list_end;
331
332 /* A mapping from registers to cse_reg_info data structures. */
333 #define REGHASH_SHIFT 7
334 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
335 #define REGHASH_MASK (REGHASH_SIZE - 1)
336 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
337
338 #define REGHASH_FN(REGNO) \
339 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
340
341 /* The last lookup we did into the cse_reg_info_tree. This allows us
342 to cache repeated lookups. */
343 static unsigned int cached_regno;
344 static struct cse_reg_info *cached_cse_reg_info;
345
346 /* A HARD_REG_SET containing all the hard registers for which there is
347 currently a REG expression in the hash table. Note the difference
348 from the above variables, which indicate if the REG is mentioned in some
349 expression in the table. */
350
351 static HARD_REG_SET hard_regs_in_table;
352
353 /* CUID of insn that starts the basic block currently being cse-processed. */
354
355 static int cse_basic_block_start;
356
357 /* CUID of insn that ends the basic block currently being cse-processed. */
358
359 static int cse_basic_block_end;
360
361 /* Vector mapping INSN_UIDs to cuids.
362 The cuids are like uids but increase monotonically always.
363 We use them to see whether a reg is used outside a given basic block. */
364
365 static int *uid_cuid;
366
367 /* Highest UID in UID_CUID. */
368 static int max_uid;
369
370 /* Get the cuid of an insn. */
371
372 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
373
374 /* Nonzero if this pass has made changes, and therefore it's
375 worthwhile to run the garbage collector. */
376
377 static int cse_altered;
378
379 /* Nonzero if cse has altered conditional jump insns
380 in such a way that jump optimization should be redone. */
381
382 static int cse_jumps_altered;
383
384 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
385 REG_LABEL, we have to rerun jump after CSE to put in the note. */
386 static int recorded_label_ref;
387
388 /* canon_hash stores 1 in do_not_record
389 if it notices a reference to CC0, PC, or some other volatile
390 subexpression. */
391
392 static int do_not_record;
393
394 #ifdef LOAD_EXTEND_OP
395
396 /* Scratch rtl used when looking for load-extended copy of a MEM. */
397 static rtx memory_extend_rtx;
398 #endif
399
400 /* canon_hash stores 1 in hash_arg_in_memory
401 if it notices a reference to memory within the expression being hashed. */
402
403 static int hash_arg_in_memory;
404
405 /* The hash table contains buckets which are chains of `struct table_elt's,
406 each recording one expression's information.
407 That expression is in the `exp' field.
408
409 The canon_exp field contains a canonical (from the point of view of
410 alias analysis) version of the `exp' field.
411
412 Those elements with the same hash code are chained in both directions
413 through the `next_same_hash' and `prev_same_hash' fields.
414
415 Each set of expressions with equivalent values
416 are on a two-way chain through the `next_same_value'
417 and `prev_same_value' fields, and all point with
418 the `first_same_value' field at the first element in
419 that chain. The chain is in order of increasing cost.
420 Each element's cost value is in its `cost' field.
421
422 The `in_memory' field is nonzero for elements that
423 involve any reference to memory. These elements are removed
424 whenever a write is done to an unidentified location in memory.
425 To be safe, we assume that a memory address is unidentified unless
426 the address is either a symbol constant or a constant plus
427 the frame pointer or argument pointer.
428
429 The `related_value' field is used to connect related expressions
430 (that differ by adding an integer).
431 The related expressions are chained in a circular fashion.
432 `related_value' is zero for expressions for which this
433 chain is not useful.
434
435 The `cost' field stores the cost of this element's expression.
436 The `regcost' field stores the value returned by approx_reg_cost for
437 this element's expression.
438
439 The `is_const' flag is set if the element is a constant (including
440 a fixed address).
441
442 The `flag' field is used as a temporary during some search routines.
443
444 The `mode' field is usually the same as GET_MODE (`exp'), but
445 if `exp' is a CONST_INT and has no machine mode then the `mode'
446 field is the mode it was being used as. Each constant is
447 recorded separately for each mode it is used with. */
448
449 struct table_elt
450 {
451 rtx exp;
452 rtx canon_exp;
453 struct table_elt *next_same_hash;
454 struct table_elt *prev_same_hash;
455 struct table_elt *next_same_value;
456 struct table_elt *prev_same_value;
457 struct table_elt *first_same_value;
458 struct table_elt *related_value;
459 int cost;
460 int regcost;
461 enum machine_mode mode;
462 char in_memory;
463 char is_const;
464 char flag;
465 };
466
467 /* We don't want a lot of buckets, because we rarely have very many
468 things stored in the hash table, and a lot of buckets slows
469 down a lot of loops that happen frequently. */
470 #define HASH_SHIFT 5
471 #define HASH_SIZE (1 << HASH_SHIFT)
472 #define HASH_MASK (HASH_SIZE - 1)
473
474 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
475 register (hard registers may require `do_not_record' to be set). */
476
477 #define HASH(X, M) \
478 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
479 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
480 : canon_hash (X, M)) & HASH_MASK)
481
482 /* Determine whether register number N is considered a fixed register for the
483 purpose of approximating register costs.
484 It is desirable to replace other regs with fixed regs, to reduce need for
485 non-fixed hard regs.
486 A reg wins if it is either the frame pointer or designated as fixed. */
487 #define FIXED_REGNO_P(N) \
488 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
489 || fixed_regs[N] || global_regs[N])
490
491 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
492 hard registers and pointers into the frame are the cheapest with a cost
493 of 0. Next come pseudos with a cost of one and other hard registers with
494 a cost of 2. Aside from these special cases, call `rtx_cost'. */
495
496 #define CHEAP_REGNO(N) \
497 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
498 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
499 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
500 || ((N) < FIRST_PSEUDO_REGISTER \
501 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
502
503 #define COST(X) (GET_CODE (X) == REG ? 0 : notreg_cost (X, SET))
504 #define COST_IN(X,OUTER) (GET_CODE (X) == REG ? 0 : notreg_cost (X, OUTER))
505
506 /* Get the info associated with register N. */
507
508 #define GET_CSE_REG_INFO(N) \
509 (((N) == cached_regno && cached_cse_reg_info) \
510 ? cached_cse_reg_info : get_cse_reg_info ((N)))
511
512 /* Get the number of times this register has been updated in this
513 basic block. */
514
515 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
516
517 /* Get the point at which REG was recorded in the table. */
518
519 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
520
521 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
522 SUBREG). */
523
524 #define SUBREG_TICKED(N) ((GET_CSE_REG_INFO (N))->subreg_ticked)
525
526 /* Get the quantity number for REG. */
527
528 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
529
530 /* Determine if the quantity number for register X represents a valid index
531 into the qty_table. */
532
533 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
534
535 static struct table_elt *table[HASH_SIZE];
536
537 /* Chain of `struct table_elt's made so far for this function
538 but currently removed from the table. */
539
540 static struct table_elt *free_element_chain;
541
542 /* Number of `struct table_elt' structures made so far for this function. */
543
544 static int n_elements_made;
545
546 /* Maximum value `n_elements_made' has had so far in this compilation
547 for functions previously processed. */
548
549 static int max_elements_made;
550
551 /* Surviving equivalence class when two equivalence classes are merged
552 by recording the effects of a jump in the last insn. Zero if the
553 last insn was not a conditional jump. */
554
555 static struct table_elt *last_jump_equiv_class;
556
557 /* Set to the cost of a constant pool reference if one was found for a
558 symbolic constant. If this was found, it means we should try to
559 convert constants into constant pool entries if they don't fit in
560 the insn. */
561
562 static int constant_pool_entries_cost;
563
564 /* Define maximum length of a branch path. */
565
566 #define PATHLENGTH 10
567
568 /* This data describes a block that will be processed by cse_basic_block. */
569
570 struct cse_basic_block_data
571 {
572 /* Lowest CUID value of insns in block. */
573 int low_cuid;
574 /* Highest CUID value of insns in block. */
575 int high_cuid;
576 /* Total number of SETs in block. */
577 int nsets;
578 /* Last insn in the block. */
579 rtx last;
580 /* Size of current branch path, if any. */
581 int path_size;
582 /* Current branch path, indicating which branches will be taken. */
583 struct branch_path
584 {
585 /* The branch insn. */
586 rtx branch;
587 /* Whether it should be taken or not. AROUND is the same as taken
588 except that it is used when the destination label is not preceded
589 by a BARRIER. */
590 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
591 } path[PATHLENGTH];
592 };
593
594 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
595 virtual regs here because the simplify_*_operation routines are called
596 by integrate.c, which is called before virtual register instantiation.
597
598 ?!? FIXED_BASE_PLUS_P and NONZERO_BASE_PLUS_P need to move into
599 a header file so that their definitions can be shared with the
600 simplification routines in simplify-rtx.c. Until then, do not
601 change these macros without also changing the copy in simplify-rtx.c. */
602
603 #define FIXED_BASE_PLUS_P(X) \
604 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
605 || ((X) == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])\
606 || (X) == virtual_stack_vars_rtx \
607 || (X) == virtual_incoming_args_rtx \
608 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
609 && (XEXP (X, 0) == frame_pointer_rtx \
610 || XEXP (X, 0) == hard_frame_pointer_rtx \
611 || ((X) == arg_pointer_rtx \
612 && fixed_regs[ARG_POINTER_REGNUM]) \
613 || XEXP (X, 0) == virtual_stack_vars_rtx \
614 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
615 || GET_CODE (X) == ADDRESSOF)
616
617 /* Similar, but also allows reference to the stack pointer.
618
619 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
620 arg_pointer_rtx by itself is nonzero, because on at least one machine,
621 the i960, the arg pointer is zero when it is unused. */
622
623 #define NONZERO_BASE_PLUS_P(X) \
624 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
625 || (X) == virtual_stack_vars_rtx \
626 || (X) == virtual_incoming_args_rtx \
627 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
628 && (XEXP (X, 0) == frame_pointer_rtx \
629 || XEXP (X, 0) == hard_frame_pointer_rtx \
630 || ((X) == arg_pointer_rtx \
631 && fixed_regs[ARG_POINTER_REGNUM]) \
632 || XEXP (X, 0) == virtual_stack_vars_rtx \
633 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
634 || (X) == stack_pointer_rtx \
635 || (X) == virtual_stack_dynamic_rtx \
636 || (X) == virtual_outgoing_args_rtx \
637 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
638 && (XEXP (X, 0) == stack_pointer_rtx \
639 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
640 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
641 || GET_CODE (X) == ADDRESSOF)
642
643 static int notreg_cost PARAMS ((rtx, enum rtx_code));
644 static int approx_reg_cost_1 PARAMS ((rtx *, void *));
645 static int approx_reg_cost PARAMS ((rtx));
646 static int preferrable PARAMS ((int, int, int, int));
647 static void new_basic_block PARAMS ((void));
648 static void make_new_qty PARAMS ((unsigned int, enum machine_mode));
649 static void make_regs_eqv PARAMS ((unsigned int, unsigned int));
650 static void delete_reg_equiv PARAMS ((unsigned int));
651 static int mention_regs PARAMS ((rtx));
652 static int insert_regs PARAMS ((rtx, struct table_elt *, int));
653 static void remove_from_table PARAMS ((struct table_elt *, unsigned));
654 static struct table_elt *lookup PARAMS ((rtx, unsigned, enum machine_mode)),
655 *lookup_for_remove PARAMS ((rtx, unsigned, enum machine_mode));
656 static rtx lookup_as_function PARAMS ((rtx, enum rtx_code));
657 static struct table_elt *insert PARAMS ((rtx, struct table_elt *, unsigned,
658 enum machine_mode));
659 static void merge_equiv_classes PARAMS ((struct table_elt *,
660 struct table_elt *));
661 static void invalidate PARAMS ((rtx, enum machine_mode));
662 static int cse_rtx_varies_p PARAMS ((rtx, int));
663 static void remove_invalid_refs PARAMS ((unsigned int));
664 static void remove_invalid_subreg_refs PARAMS ((unsigned int, unsigned int,
665 enum machine_mode));
666 static void rehash_using_reg PARAMS ((rtx));
667 static void invalidate_memory PARAMS ((void));
668 static void invalidate_for_call PARAMS ((void));
669 static rtx use_related_value PARAMS ((rtx, struct table_elt *));
670 static unsigned canon_hash PARAMS ((rtx, enum machine_mode));
671 static unsigned canon_hash_string PARAMS ((const char *));
672 static unsigned safe_hash PARAMS ((rtx, enum machine_mode));
673 static int exp_equiv_p PARAMS ((rtx, rtx, int, int));
674 static rtx canon_reg PARAMS ((rtx, rtx));
675 static void find_best_addr PARAMS ((rtx, rtx *, enum machine_mode));
676 static enum rtx_code find_comparison_args PARAMS ((enum rtx_code, rtx *, rtx *,
677 enum machine_mode *,
678 enum machine_mode *));
679 static rtx fold_rtx PARAMS ((rtx, rtx));
680 static rtx equiv_constant PARAMS ((rtx));
681 static void record_jump_equiv PARAMS ((rtx, int));
682 static void record_jump_cond PARAMS ((enum rtx_code, enum machine_mode,
683 rtx, rtx, int));
684 static void cse_insn PARAMS ((rtx, rtx));
685 static int addr_affects_sp_p PARAMS ((rtx));
686 static void invalidate_from_clobbers PARAMS ((rtx));
687 static rtx cse_process_notes PARAMS ((rtx, rtx));
688 static void cse_around_loop PARAMS ((rtx));
689 static void invalidate_skipped_set PARAMS ((rtx, rtx, void *));
690 static void invalidate_skipped_block PARAMS ((rtx));
691 static void cse_check_loop_start PARAMS ((rtx, rtx, void *));
692 static void cse_set_around_loop PARAMS ((rtx, rtx, rtx));
693 static rtx cse_basic_block PARAMS ((rtx, rtx, struct branch_path *, int));
694 static void count_reg_usage PARAMS ((rtx, int *, rtx, int));
695 static int check_for_label_ref PARAMS ((rtx *, void *));
696 extern void dump_class PARAMS ((struct table_elt*));
697 static struct cse_reg_info * get_cse_reg_info PARAMS ((unsigned int));
698 static int check_dependence PARAMS ((rtx *, void *));
699
700 static void flush_hash_table PARAMS ((void));
701 static bool insn_live_p PARAMS ((rtx, int *));
702 static bool set_live_p PARAMS ((rtx, rtx, int *));
703 static bool dead_libcall_p PARAMS ((rtx, int *));
704 \f
705 /* Dump the expressions in the equivalence class indicated by CLASSP.
706 This function is used only for debugging. */
707 void
708 dump_class (classp)
709 struct table_elt *classp;
710 {
711 struct table_elt *elt;
712
713 fprintf (stderr, "Equivalence chain for ");
714 print_rtl (stderr, classp->exp);
715 fprintf (stderr, ": \n");
716
717 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
718 {
719 print_rtl (stderr, elt->exp);
720 fprintf (stderr, "\n");
721 }
722 }
723
724 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
725
726 static int
727 approx_reg_cost_1 (xp, data)
728 rtx *xp;
729 void *data;
730 {
731 rtx x = *xp;
732 int *cost_p = data;
733
734 if (x && GET_CODE (x) == REG)
735 {
736 unsigned int regno = REGNO (x);
737
738 if (! CHEAP_REGNO (regno))
739 {
740 if (regno < FIRST_PSEUDO_REGISTER)
741 {
742 if (SMALL_REGISTER_CLASSES)
743 return 1;
744 *cost_p += 2;
745 }
746 else
747 *cost_p += 1;
748 }
749 }
750
751 return 0;
752 }
753
754 /* Return an estimate of the cost of the registers used in an rtx.
755 This is mostly the number of different REG expressions in the rtx;
756 however for some exceptions like fixed registers we use a cost of
757 0. If any other hard register reference occurs, return MAX_COST. */
758
759 static int
760 approx_reg_cost (x)
761 rtx x;
762 {
763 int cost = 0;
764
765 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
766 return MAX_COST;
767
768 return cost;
769 }
770
771 /* Return a negative value if an rtx A, whose costs are given by COST_A
772 and REGCOST_A, is more desirable than an rtx B.
773 Return a positive value if A is less desirable, or 0 if the two are
774 equally good. */
775 static int
776 preferrable (cost_a, regcost_a, cost_b, regcost_b)
777 int cost_a, regcost_a, cost_b, regcost_b;
778 {
779 /* First, get rid of a cases involving expressions that are entirely
780 unwanted. */
781 if (cost_a != cost_b)
782 {
783 if (cost_a == MAX_COST)
784 return 1;
785 if (cost_b == MAX_COST)
786 return -1;
787 }
788
789 /* Avoid extending lifetimes of hardregs. */
790 if (regcost_a != regcost_b)
791 {
792 if (regcost_a == MAX_COST)
793 return 1;
794 if (regcost_b == MAX_COST)
795 return -1;
796 }
797
798 /* Normal operation costs take precedence. */
799 if (cost_a != cost_b)
800 return cost_a - cost_b;
801 /* Only if these are identical consider effects on register pressure. */
802 if (regcost_a != regcost_b)
803 return regcost_a - regcost_b;
804 return 0;
805 }
806
807 /* Internal function, to compute cost when X is not a register; called
808 from COST macro to keep it simple. */
809
810 static int
811 notreg_cost (x, outer)
812 rtx x;
813 enum rtx_code outer;
814 {
815 return ((GET_CODE (x) == SUBREG
816 && GET_CODE (SUBREG_REG (x)) == REG
817 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
818 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
819 && (GET_MODE_SIZE (GET_MODE (x))
820 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
821 && subreg_lowpart_p (x)
822 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
823 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
824 ? 0
825 : rtx_cost (x, outer) * 2);
826 }
827
828 /* Return an estimate of the cost of computing rtx X.
829 One use is in cse, to decide which expression to keep in the hash table.
830 Another is in rtl generation, to pick the cheapest way to multiply.
831 Other uses like the latter are expected in the future. */
832
833 int
834 rtx_cost (x, outer_code)
835 rtx x;
836 enum rtx_code outer_code ATTRIBUTE_UNUSED;
837 {
838 int i, j;
839 enum rtx_code code;
840 const char *fmt;
841 int total;
842
843 if (x == 0)
844 return 0;
845
846 /* Compute the default costs of certain things.
847 Note that RTX_COSTS can override the defaults. */
848
849 code = GET_CODE (x);
850 switch (code)
851 {
852 case MULT:
853 total = COSTS_N_INSNS (5);
854 break;
855 case DIV:
856 case UDIV:
857 case MOD:
858 case UMOD:
859 total = COSTS_N_INSNS (7);
860 break;
861 case USE:
862 /* Used in loop.c and combine.c as a marker. */
863 total = 0;
864 break;
865 default:
866 total = COSTS_N_INSNS (1);
867 }
868
869 switch (code)
870 {
871 case REG:
872 return 0;
873
874 case SUBREG:
875 /* If we can't tie these modes, make this expensive. The larger
876 the mode, the more expensive it is. */
877 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
878 return COSTS_N_INSNS (2
879 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
880 break;
881
882 #ifdef RTX_COSTS
883 RTX_COSTS (x, code, outer_code);
884 #endif
885 #ifdef CONST_COSTS
886 CONST_COSTS (x, code, outer_code);
887 #endif
888
889 default:
890 #ifdef DEFAULT_RTX_COSTS
891 DEFAULT_RTX_COSTS (x, code, outer_code);
892 #endif
893 break;
894 }
895
896 /* Sum the costs of the sub-rtx's, plus cost of this operation,
897 which is already in total. */
898
899 fmt = GET_RTX_FORMAT (code);
900 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
901 if (fmt[i] == 'e')
902 total += rtx_cost (XEXP (x, i), code);
903 else if (fmt[i] == 'E')
904 for (j = 0; j < XVECLEN (x, i); j++)
905 total += rtx_cost (XVECEXP (x, i, j), code);
906
907 return total;
908 }
909 \f
910 /* Return cost of address expression X.
911 Expect that X is properly formed address reference. */
912
913 int
914 address_cost (x, mode)
915 rtx x;
916 enum machine_mode mode;
917 {
918 /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
919 during CSE, such nodes are present. Using an ADDRESSOF node which
920 refers to the address of a REG is a good thing because we can then
921 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
922
923 if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
924 return -1;
925
926 /* We may be asked for cost of various unusual addresses, such as operands
927 of push instruction. It is not worthwhile to complicate writing
928 of ADDRESS_COST macro by such cases. */
929
930 if (!memory_address_p (mode, x))
931 return 1000;
932 #ifdef ADDRESS_COST
933 return ADDRESS_COST (x);
934 #else
935 return rtx_cost (x, MEM);
936 #endif
937 }
938
939 \f
940 static struct cse_reg_info *
941 get_cse_reg_info (regno)
942 unsigned int regno;
943 {
944 struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
945 struct cse_reg_info *p;
946
947 for (p = *hash_head; p != NULL; p = p->hash_next)
948 if (p->regno == regno)
949 break;
950
951 if (p == NULL)
952 {
953 /* Get a new cse_reg_info structure. */
954 if (cse_reg_info_free_list)
955 {
956 p = cse_reg_info_free_list;
957 cse_reg_info_free_list = p->next;
958 }
959 else
960 p = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
961
962 /* Insert into hash table. */
963 p->hash_next = *hash_head;
964 *hash_head = p;
965
966 /* Initialize it. */
967 p->reg_tick = 1;
968 p->reg_in_table = -1;
969 p->subreg_ticked = -1;
970 p->reg_qty = regno;
971 p->regno = regno;
972 p->next = cse_reg_info_used_list;
973 cse_reg_info_used_list = p;
974 if (!cse_reg_info_used_list_end)
975 cse_reg_info_used_list_end = p;
976 }
977
978 /* Cache this lookup; we tend to be looking up information about the
979 same register several times in a row. */
980 cached_regno = regno;
981 cached_cse_reg_info = p;
982
983 return p;
984 }
985
986 /* Clear the hash table and initialize each register with its own quantity,
987 for a new basic block. */
988
989 static void
990 new_basic_block ()
991 {
992 int i;
993
994 next_qty = max_reg;
995
996 /* Clear out hash table state for this pass. */
997
998 memset ((char *) reg_hash, 0, sizeof reg_hash);
999
1000 if (cse_reg_info_used_list)
1001 {
1002 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
1003 cse_reg_info_free_list = cse_reg_info_used_list;
1004 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
1005 }
1006 cached_cse_reg_info = 0;
1007
1008 CLEAR_HARD_REG_SET (hard_regs_in_table);
1009
1010 /* The per-quantity values used to be initialized here, but it is
1011 much faster to initialize each as it is made in `make_new_qty'. */
1012
1013 for (i = 0; i < HASH_SIZE; i++)
1014 {
1015 struct table_elt *first;
1016
1017 first = table[i];
1018 if (first != NULL)
1019 {
1020 struct table_elt *last = first;
1021
1022 table[i] = NULL;
1023
1024 while (last->next_same_hash != NULL)
1025 last = last->next_same_hash;
1026
1027 /* Now relink this hash entire chain into
1028 the free element list. */
1029
1030 last->next_same_hash = free_element_chain;
1031 free_element_chain = first;
1032 }
1033 }
1034
1035 prev_insn = 0;
1036
1037 #ifdef HAVE_cc0
1038 prev_insn_cc0 = 0;
1039 #endif
1040 }
1041
1042 /* Say that register REG contains a quantity in mode MODE not in any
1043 register before and initialize that quantity. */
1044
1045 static void
1046 make_new_qty (reg, mode)
1047 unsigned int reg;
1048 enum machine_mode mode;
1049 {
1050 int q;
1051 struct qty_table_elem *ent;
1052 struct reg_eqv_elem *eqv;
1053
1054 if (next_qty >= max_qty)
1055 abort ();
1056
1057 q = REG_QTY (reg) = next_qty++;
1058 ent = &qty_table[q];
1059 ent->first_reg = reg;
1060 ent->last_reg = reg;
1061 ent->mode = mode;
1062 ent->const_rtx = ent->const_insn = NULL_RTX;
1063 ent->comparison_code = UNKNOWN;
1064
1065 eqv = &reg_eqv_table[reg];
1066 eqv->next = eqv->prev = -1;
1067 }
1068
1069 /* Make reg NEW equivalent to reg OLD.
1070 OLD is not changing; NEW is. */
1071
1072 static void
1073 make_regs_eqv (new, old)
1074 unsigned int new, old;
1075 {
1076 unsigned int lastr, firstr;
1077 int q = REG_QTY (old);
1078 struct qty_table_elem *ent;
1079
1080 ent = &qty_table[q];
1081
1082 /* Nothing should become eqv until it has a "non-invalid" qty number. */
1083 if (! REGNO_QTY_VALID_P (old))
1084 abort ();
1085
1086 REG_QTY (new) = q;
1087 firstr = ent->first_reg;
1088 lastr = ent->last_reg;
1089
1090 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1091 hard regs. Among pseudos, if NEW will live longer than any other reg
1092 of the same qty, and that is beyond the current basic block,
1093 make it the new canonical replacement for this qty. */
1094 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1095 /* Certain fixed registers might be of the class NO_REGS. This means
1096 that not only can they not be allocated by the compiler, but
1097 they cannot be used in substitutions or canonicalizations
1098 either. */
1099 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1100 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1101 || (new >= FIRST_PSEUDO_REGISTER
1102 && (firstr < FIRST_PSEUDO_REGISTER
1103 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1104 || (uid_cuid[REGNO_FIRST_UID (new)]
1105 < cse_basic_block_start))
1106 && (uid_cuid[REGNO_LAST_UID (new)]
1107 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1108 {
1109 reg_eqv_table[firstr].prev = new;
1110 reg_eqv_table[new].next = firstr;
1111 reg_eqv_table[new].prev = -1;
1112 ent->first_reg = new;
1113 }
1114 else
1115 {
1116 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1117 Otherwise, insert before any non-fixed hard regs that are at the
1118 end. Registers of class NO_REGS cannot be used as an
1119 equivalent for anything. */
1120 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1121 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1122 && new >= FIRST_PSEUDO_REGISTER)
1123 lastr = reg_eqv_table[lastr].prev;
1124 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1125 if (reg_eqv_table[lastr].next >= 0)
1126 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1127 else
1128 qty_table[q].last_reg = new;
1129 reg_eqv_table[lastr].next = new;
1130 reg_eqv_table[new].prev = lastr;
1131 }
1132 }
1133
1134 /* Remove REG from its equivalence class. */
1135
1136 static void
1137 delete_reg_equiv (reg)
1138 unsigned int reg;
1139 {
1140 struct qty_table_elem *ent;
1141 int q = REG_QTY (reg);
1142 int p, n;
1143
1144 /* If invalid, do nothing. */
1145 if (q == (int) reg)
1146 return;
1147
1148 ent = &qty_table[q];
1149
1150 p = reg_eqv_table[reg].prev;
1151 n = reg_eqv_table[reg].next;
1152
1153 if (n != -1)
1154 reg_eqv_table[n].prev = p;
1155 else
1156 ent->last_reg = p;
1157 if (p != -1)
1158 reg_eqv_table[p].next = n;
1159 else
1160 ent->first_reg = n;
1161
1162 REG_QTY (reg) = reg;
1163 }
1164
1165 /* Remove any invalid expressions from the hash table
1166 that refer to any of the registers contained in expression X.
1167
1168 Make sure that newly inserted references to those registers
1169 as subexpressions will be considered valid.
1170
1171 mention_regs is not called when a register itself
1172 is being stored in the table.
1173
1174 Return 1 if we have done something that may have changed the hash code
1175 of X. */
1176
1177 static int
1178 mention_regs (x)
1179 rtx x;
1180 {
1181 enum rtx_code code;
1182 int i, j;
1183 const char *fmt;
1184 int changed = 0;
1185
1186 if (x == 0)
1187 return 0;
1188
1189 code = GET_CODE (x);
1190 if (code == REG)
1191 {
1192 unsigned int regno = REGNO (x);
1193 unsigned int endregno
1194 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1195 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1196 unsigned int i;
1197
1198 for (i = regno; i < endregno; i++)
1199 {
1200 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1201 remove_invalid_refs (i);
1202
1203 REG_IN_TABLE (i) = REG_TICK (i);
1204 SUBREG_TICKED (i) = -1;
1205 }
1206
1207 return 0;
1208 }
1209
1210 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1211 pseudo if they don't use overlapping words. We handle only pseudos
1212 here for simplicity. */
1213 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1214 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1215 {
1216 unsigned int i = REGNO (SUBREG_REG (x));
1217
1218 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1219 {
1220 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1221 the last store to this register really stored into this
1222 subreg, then remove the memory of this subreg.
1223 Otherwise, remove any memory of the entire register and
1224 all its subregs from the table. */
1225 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1226 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1227 remove_invalid_refs (i);
1228 else
1229 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1230 }
1231
1232 REG_IN_TABLE (i) = REG_TICK (i);
1233 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1234 return 0;
1235 }
1236
1237 /* If X is a comparison or a COMPARE and either operand is a register
1238 that does not have a quantity, give it one. This is so that a later
1239 call to record_jump_equiv won't cause X to be assigned a different
1240 hash code and not found in the table after that call.
1241
1242 It is not necessary to do this here, since rehash_using_reg can
1243 fix up the table later, but doing this here eliminates the need to
1244 call that expensive function in the most common case where the only
1245 use of the register is in the comparison. */
1246
1247 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1248 {
1249 if (GET_CODE (XEXP (x, 0)) == REG
1250 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1251 if (insert_regs (XEXP (x, 0), NULL, 0))
1252 {
1253 rehash_using_reg (XEXP (x, 0));
1254 changed = 1;
1255 }
1256
1257 if (GET_CODE (XEXP (x, 1)) == REG
1258 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1259 if (insert_regs (XEXP (x, 1), NULL, 0))
1260 {
1261 rehash_using_reg (XEXP (x, 1));
1262 changed = 1;
1263 }
1264 }
1265
1266 fmt = GET_RTX_FORMAT (code);
1267 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1268 if (fmt[i] == 'e')
1269 changed |= mention_regs (XEXP (x, i));
1270 else if (fmt[i] == 'E')
1271 for (j = 0; j < XVECLEN (x, i); j++)
1272 changed |= mention_regs (XVECEXP (x, i, j));
1273
1274 return changed;
1275 }
1276
1277 /* Update the register quantities for inserting X into the hash table
1278 with a value equivalent to CLASSP.
1279 (If the class does not contain a REG, it is irrelevant.)
1280 If MODIFIED is nonzero, X is a destination; it is being modified.
1281 Note that delete_reg_equiv should be called on a register
1282 before insert_regs is done on that register with MODIFIED != 0.
1283
1284 Nonzero value means that elements of reg_qty have changed
1285 so X's hash code may be different. */
1286
1287 static int
1288 insert_regs (x, classp, modified)
1289 rtx x;
1290 struct table_elt *classp;
1291 int modified;
1292 {
1293 if (GET_CODE (x) == REG)
1294 {
1295 unsigned int regno = REGNO (x);
1296 int qty_valid;
1297
1298 /* If REGNO is in the equivalence table already but is of the
1299 wrong mode for that equivalence, don't do anything here. */
1300
1301 qty_valid = REGNO_QTY_VALID_P (regno);
1302 if (qty_valid)
1303 {
1304 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1305
1306 if (ent->mode != GET_MODE (x))
1307 return 0;
1308 }
1309
1310 if (modified || ! qty_valid)
1311 {
1312 if (classp)
1313 for (classp = classp->first_same_value;
1314 classp != 0;
1315 classp = classp->next_same_value)
1316 if (GET_CODE (classp->exp) == REG
1317 && GET_MODE (classp->exp) == GET_MODE (x))
1318 {
1319 make_regs_eqv (regno, REGNO (classp->exp));
1320 return 1;
1321 }
1322
1323 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1324 than REG_IN_TABLE to find out if there was only a single preceding
1325 invalidation - for the SUBREG - or another one, which would be
1326 for the full register. However, if we find here that REG_TICK
1327 indicates that the register is invalid, it means that it has
1328 been invalidated in a separate operation. The SUBREG might be used
1329 now (then this is a recursive call), or we might use the full REG
1330 now and a SUBREG of it later. So bump up REG_TICK so that
1331 mention_regs will do the right thing. */
1332 if (! modified
1333 && REG_IN_TABLE (regno) >= 0
1334 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1335 REG_TICK (regno)++;
1336 make_new_qty (regno, GET_MODE (x));
1337 return 1;
1338 }
1339
1340 return 0;
1341 }
1342
1343 /* If X is a SUBREG, we will likely be inserting the inner register in the
1344 table. If that register doesn't have an assigned quantity number at
1345 this point but does later, the insertion that we will be doing now will
1346 not be accessible because its hash code will have changed. So assign
1347 a quantity number now. */
1348
1349 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1350 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1351 {
1352 insert_regs (SUBREG_REG (x), NULL, 0);
1353 mention_regs (x);
1354 return 1;
1355 }
1356 else
1357 return mention_regs (x);
1358 }
1359 \f
1360 /* Look in or update the hash table. */
1361
1362 /* Remove table element ELT from use in the table.
1363 HASH is its hash code, made using the HASH macro.
1364 It's an argument because often that is known in advance
1365 and we save much time not recomputing it. */
1366
1367 static void
1368 remove_from_table (elt, hash)
1369 struct table_elt *elt;
1370 unsigned hash;
1371 {
1372 if (elt == 0)
1373 return;
1374
1375 /* Mark this element as removed. See cse_insn. */
1376 elt->first_same_value = 0;
1377
1378 /* Remove the table element from its equivalence class. */
1379
1380 {
1381 struct table_elt *prev = elt->prev_same_value;
1382 struct table_elt *next = elt->next_same_value;
1383
1384 if (next)
1385 next->prev_same_value = prev;
1386
1387 if (prev)
1388 prev->next_same_value = next;
1389 else
1390 {
1391 struct table_elt *newfirst = next;
1392 while (next)
1393 {
1394 next->first_same_value = newfirst;
1395 next = next->next_same_value;
1396 }
1397 }
1398 }
1399
1400 /* Remove the table element from its hash bucket. */
1401
1402 {
1403 struct table_elt *prev = elt->prev_same_hash;
1404 struct table_elt *next = elt->next_same_hash;
1405
1406 if (next)
1407 next->prev_same_hash = prev;
1408
1409 if (prev)
1410 prev->next_same_hash = next;
1411 else if (table[hash] == elt)
1412 table[hash] = next;
1413 else
1414 {
1415 /* This entry is not in the proper hash bucket. This can happen
1416 when two classes were merged by `merge_equiv_classes'. Search
1417 for the hash bucket that it heads. This happens only very
1418 rarely, so the cost is acceptable. */
1419 for (hash = 0; hash < HASH_SIZE; hash++)
1420 if (table[hash] == elt)
1421 table[hash] = next;
1422 }
1423 }
1424
1425 /* Remove the table element from its related-value circular chain. */
1426
1427 if (elt->related_value != 0 && elt->related_value != elt)
1428 {
1429 struct table_elt *p = elt->related_value;
1430
1431 while (p->related_value != elt)
1432 p = p->related_value;
1433 p->related_value = elt->related_value;
1434 if (p->related_value == p)
1435 p->related_value = 0;
1436 }
1437
1438 /* Now add it to the free element chain. */
1439 elt->next_same_hash = free_element_chain;
1440 free_element_chain = elt;
1441 }
1442
1443 /* Look up X in the hash table and return its table element,
1444 or 0 if X is not in the table.
1445
1446 MODE is the machine-mode of X, or if X is an integer constant
1447 with VOIDmode then MODE is the mode with which X will be used.
1448
1449 Here we are satisfied to find an expression whose tree structure
1450 looks like X. */
1451
1452 static struct table_elt *
1453 lookup (x, hash, mode)
1454 rtx x;
1455 unsigned hash;
1456 enum machine_mode mode;
1457 {
1458 struct table_elt *p;
1459
1460 for (p = table[hash]; p; p = p->next_same_hash)
1461 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1462 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1463 return p;
1464
1465 return 0;
1466 }
1467
1468 /* Like `lookup' but don't care whether the table element uses invalid regs.
1469 Also ignore discrepancies in the machine mode of a register. */
1470
1471 static struct table_elt *
1472 lookup_for_remove (x, hash, mode)
1473 rtx x;
1474 unsigned hash;
1475 enum machine_mode mode;
1476 {
1477 struct table_elt *p;
1478
1479 if (GET_CODE (x) == REG)
1480 {
1481 unsigned int regno = REGNO (x);
1482
1483 /* Don't check the machine mode when comparing registers;
1484 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1485 for (p = table[hash]; p; p = p->next_same_hash)
1486 if (GET_CODE (p->exp) == REG
1487 && REGNO (p->exp) == regno)
1488 return p;
1489 }
1490 else
1491 {
1492 for (p = table[hash]; p; p = p->next_same_hash)
1493 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1494 return p;
1495 }
1496
1497 return 0;
1498 }
1499
1500 /* Look for an expression equivalent to X and with code CODE.
1501 If one is found, return that expression. */
1502
1503 static rtx
1504 lookup_as_function (x, code)
1505 rtx x;
1506 enum rtx_code code;
1507 {
1508 struct table_elt *p
1509 = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1510
1511 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1512 long as we are narrowing. So if we looked in vain for a mode narrower
1513 than word_mode before, look for word_mode now. */
1514 if (p == 0 && code == CONST_INT
1515 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1516 {
1517 x = copy_rtx (x);
1518 PUT_MODE (x, word_mode);
1519 p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1520 }
1521
1522 if (p == 0)
1523 return 0;
1524
1525 for (p = p->first_same_value; p; p = p->next_same_value)
1526 if (GET_CODE (p->exp) == code
1527 /* Make sure this is a valid entry in the table. */
1528 && exp_equiv_p (p->exp, p->exp, 1, 0))
1529 return p->exp;
1530
1531 return 0;
1532 }
1533
1534 /* Insert X in the hash table, assuming HASH is its hash code
1535 and CLASSP is an element of the class it should go in
1536 (or 0 if a new class should be made).
1537 It is inserted at the proper position to keep the class in
1538 the order cheapest first.
1539
1540 MODE is the machine-mode of X, or if X is an integer constant
1541 with VOIDmode then MODE is the mode with which X will be used.
1542
1543 For elements of equal cheapness, the most recent one
1544 goes in front, except that the first element in the list
1545 remains first unless a cheaper element is added. The order of
1546 pseudo-registers does not matter, as canon_reg will be called to
1547 find the cheapest when a register is retrieved from the table.
1548
1549 The in_memory field in the hash table element is set to 0.
1550 The caller must set it nonzero if appropriate.
1551
1552 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1553 and if insert_regs returns a nonzero value
1554 you must then recompute its hash code before calling here.
1555
1556 If necessary, update table showing constant values of quantities. */
1557
1558 #define CHEAPER(X, Y) \
1559 (preferrable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1560
1561 static struct table_elt *
1562 insert (x, classp, hash, mode)
1563 rtx x;
1564 struct table_elt *classp;
1565 unsigned hash;
1566 enum machine_mode mode;
1567 {
1568 struct table_elt *elt;
1569
1570 /* If X is a register and we haven't made a quantity for it,
1571 something is wrong. */
1572 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1573 abort ();
1574
1575 /* If X is a hard register, show it is being put in the table. */
1576 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1577 {
1578 unsigned int regno = REGNO (x);
1579 unsigned int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1580 unsigned int i;
1581
1582 for (i = regno; i < endregno; i++)
1583 SET_HARD_REG_BIT (hard_regs_in_table, i);
1584 }
1585
1586 /* Put an element for X into the right hash bucket. */
1587
1588 elt = free_element_chain;
1589 if (elt)
1590 free_element_chain = elt->next_same_hash;
1591 else
1592 {
1593 n_elements_made++;
1594 elt = (struct table_elt *) xmalloc (sizeof (struct table_elt));
1595 }
1596
1597 elt->exp = x;
1598 elt->canon_exp = NULL_RTX;
1599 elt->cost = COST (x);
1600 elt->regcost = approx_reg_cost (x);
1601 elt->next_same_value = 0;
1602 elt->prev_same_value = 0;
1603 elt->next_same_hash = table[hash];
1604 elt->prev_same_hash = 0;
1605 elt->related_value = 0;
1606 elt->in_memory = 0;
1607 elt->mode = mode;
1608 elt->is_const = (CONSTANT_P (x)
1609 /* GNU C++ takes advantage of this for `this'
1610 (and other const values). */
1611 || (GET_CODE (x) == REG
1612 && RTX_UNCHANGING_P (x)
1613 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1614 || FIXED_BASE_PLUS_P (x));
1615
1616 if (table[hash])
1617 table[hash]->prev_same_hash = elt;
1618 table[hash] = elt;
1619
1620 /* Put it into the proper value-class. */
1621 if (classp)
1622 {
1623 classp = classp->first_same_value;
1624 if (CHEAPER (elt, classp))
1625 /* Insert at the head of the class */
1626 {
1627 struct table_elt *p;
1628 elt->next_same_value = classp;
1629 classp->prev_same_value = elt;
1630 elt->first_same_value = elt;
1631
1632 for (p = classp; p; p = p->next_same_value)
1633 p->first_same_value = elt;
1634 }
1635 else
1636 {
1637 /* Insert not at head of the class. */
1638 /* Put it after the last element cheaper than X. */
1639 struct table_elt *p, *next;
1640
1641 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1642 p = next);
1643
1644 /* Put it after P and before NEXT. */
1645 elt->next_same_value = next;
1646 if (next)
1647 next->prev_same_value = elt;
1648
1649 elt->prev_same_value = p;
1650 p->next_same_value = elt;
1651 elt->first_same_value = classp;
1652 }
1653 }
1654 else
1655 elt->first_same_value = elt;
1656
1657 /* If this is a constant being set equivalent to a register or a register
1658 being set equivalent to a constant, note the constant equivalence.
1659
1660 If this is a constant, it cannot be equivalent to a different constant,
1661 and a constant is the only thing that can be cheaper than a register. So
1662 we know the register is the head of the class (before the constant was
1663 inserted).
1664
1665 If this is a register that is not already known equivalent to a
1666 constant, we must check the entire class.
1667
1668 If this is a register that is already known equivalent to an insn,
1669 update the qtys `const_insn' to show that `this_insn' is the latest
1670 insn making that quantity equivalent to the constant. */
1671
1672 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1673 && GET_CODE (x) != REG)
1674 {
1675 int exp_q = REG_QTY (REGNO (classp->exp));
1676 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1677
1678 exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1679 exp_ent->const_insn = this_insn;
1680 }
1681
1682 else if (GET_CODE (x) == REG
1683 && classp
1684 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1685 && ! elt->is_const)
1686 {
1687 struct table_elt *p;
1688
1689 for (p = classp; p != 0; p = p->next_same_value)
1690 {
1691 if (p->is_const && GET_CODE (p->exp) != REG)
1692 {
1693 int x_q = REG_QTY (REGNO (x));
1694 struct qty_table_elem *x_ent = &qty_table[x_q];
1695
1696 x_ent->const_rtx
1697 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1698 x_ent->const_insn = this_insn;
1699 break;
1700 }
1701 }
1702 }
1703
1704 else if (GET_CODE (x) == REG
1705 && qty_table[REG_QTY (REGNO (x))].const_rtx
1706 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1707 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1708
1709 /* If this is a constant with symbolic value,
1710 and it has a term with an explicit integer value,
1711 link it up with related expressions. */
1712 if (GET_CODE (x) == CONST)
1713 {
1714 rtx subexp = get_related_value (x);
1715 unsigned subhash;
1716 struct table_elt *subelt, *subelt_prev;
1717
1718 if (subexp != 0)
1719 {
1720 /* Get the integer-free subexpression in the hash table. */
1721 subhash = safe_hash (subexp, mode) & HASH_MASK;
1722 subelt = lookup (subexp, subhash, mode);
1723 if (subelt == 0)
1724 subelt = insert (subexp, NULL, subhash, mode);
1725 /* Initialize SUBELT's circular chain if it has none. */
1726 if (subelt->related_value == 0)
1727 subelt->related_value = subelt;
1728 /* Find the element in the circular chain that precedes SUBELT. */
1729 subelt_prev = subelt;
1730 while (subelt_prev->related_value != subelt)
1731 subelt_prev = subelt_prev->related_value;
1732 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1733 This way the element that follows SUBELT is the oldest one. */
1734 elt->related_value = subelt_prev->related_value;
1735 subelt_prev->related_value = elt;
1736 }
1737 }
1738
1739 return elt;
1740 }
1741 \f
1742 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1743 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1744 the two classes equivalent.
1745
1746 CLASS1 will be the surviving class; CLASS2 should not be used after this
1747 call.
1748
1749 Any invalid entries in CLASS2 will not be copied. */
1750
1751 static void
1752 merge_equiv_classes (class1, class2)
1753 struct table_elt *class1, *class2;
1754 {
1755 struct table_elt *elt, *next, *new;
1756
1757 /* Ensure we start with the head of the classes. */
1758 class1 = class1->first_same_value;
1759 class2 = class2->first_same_value;
1760
1761 /* If they were already equal, forget it. */
1762 if (class1 == class2)
1763 return;
1764
1765 for (elt = class2; elt; elt = next)
1766 {
1767 unsigned int hash;
1768 rtx exp = elt->exp;
1769 enum machine_mode mode = elt->mode;
1770
1771 next = elt->next_same_value;
1772
1773 /* Remove old entry, make a new one in CLASS1's class.
1774 Don't do this for invalid entries as we cannot find their
1775 hash code (it also isn't necessary). */
1776 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1777 {
1778 hash_arg_in_memory = 0;
1779 hash = HASH (exp, mode);
1780
1781 if (GET_CODE (exp) == REG)
1782 delete_reg_equiv (REGNO (exp));
1783
1784 remove_from_table (elt, hash);
1785
1786 if (insert_regs (exp, class1, 0))
1787 {
1788 rehash_using_reg (exp);
1789 hash = HASH (exp, mode);
1790 }
1791 new = insert (exp, class1, hash, mode);
1792 new->in_memory = hash_arg_in_memory;
1793 }
1794 }
1795 }
1796 \f
1797 /* Flush the entire hash table. */
1798
1799 static void
1800 flush_hash_table ()
1801 {
1802 int i;
1803 struct table_elt *p;
1804
1805 for (i = 0; i < HASH_SIZE; i++)
1806 for (p = table[i]; p; p = table[i])
1807 {
1808 /* Note that invalidate can remove elements
1809 after P in the current hash chain. */
1810 if (GET_CODE (p->exp) == REG)
1811 invalidate (p->exp, p->mode);
1812 else
1813 remove_from_table (p, i);
1814 }
1815 }
1816 \f
1817 /* Function called for each rtx to check whether true dependence exist. */
1818 struct check_dependence_data
1819 {
1820 enum machine_mode mode;
1821 rtx exp;
1822 };
1823
1824 static int
1825 check_dependence (x, data)
1826 rtx *x;
1827 void *data;
1828 {
1829 struct check_dependence_data *d = (struct check_dependence_data *) data;
1830 if (*x && GET_CODE (*x) == MEM)
1831 return true_dependence (d->exp, d->mode, *x, cse_rtx_varies_p);
1832 else
1833 return 0;
1834 }
1835 \f
1836 /* Remove from the hash table, or mark as invalid, all expressions whose
1837 values could be altered by storing in X. X is a register, a subreg, or
1838 a memory reference with nonvarying address (because, when a memory
1839 reference with a varying address is stored in, all memory references are
1840 removed by invalidate_memory so specific invalidation is superfluous).
1841 FULL_MODE, if not VOIDmode, indicates that this much should be
1842 invalidated instead of just the amount indicated by the mode of X. This
1843 is only used for bitfield stores into memory.
1844
1845 A nonvarying address may be just a register or just a symbol reference,
1846 or it may be either of those plus a numeric offset. */
1847
1848 static void
1849 invalidate (x, full_mode)
1850 rtx x;
1851 enum machine_mode full_mode;
1852 {
1853 int i;
1854 struct table_elt *p;
1855
1856 switch (GET_CODE (x))
1857 {
1858 case REG:
1859 {
1860 /* If X is a register, dependencies on its contents are recorded
1861 through the qty number mechanism. Just change the qty number of
1862 the register, mark it as invalid for expressions that refer to it,
1863 and remove it itself. */
1864 unsigned int regno = REGNO (x);
1865 unsigned int hash = HASH (x, GET_MODE (x));
1866
1867 /* Remove REGNO from any quantity list it might be on and indicate
1868 that its value might have changed. If it is a pseudo, remove its
1869 entry from the hash table.
1870
1871 For a hard register, we do the first two actions above for any
1872 additional hard registers corresponding to X. Then, if any of these
1873 registers are in the table, we must remove any REG entries that
1874 overlap these registers. */
1875
1876 delete_reg_equiv (regno);
1877 REG_TICK (regno)++;
1878 SUBREG_TICKED (regno) = -1;
1879
1880 if (regno >= FIRST_PSEUDO_REGISTER)
1881 {
1882 /* Because a register can be referenced in more than one mode,
1883 we might have to remove more than one table entry. */
1884 struct table_elt *elt;
1885
1886 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1887 remove_from_table (elt, hash);
1888 }
1889 else
1890 {
1891 HOST_WIDE_INT in_table
1892 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1893 unsigned int endregno
1894 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1895 unsigned int tregno, tendregno, rn;
1896 struct table_elt *p, *next;
1897
1898 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1899
1900 for (rn = regno + 1; rn < endregno; rn++)
1901 {
1902 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1903 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1904 delete_reg_equiv (rn);
1905 REG_TICK (rn)++;
1906 SUBREG_TICKED (rn) = -1;
1907 }
1908
1909 if (in_table)
1910 for (hash = 0; hash < HASH_SIZE; hash++)
1911 for (p = table[hash]; p; p = next)
1912 {
1913 next = p->next_same_hash;
1914
1915 if (GET_CODE (p->exp) != REG
1916 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1917 continue;
1918
1919 tregno = REGNO (p->exp);
1920 tendregno
1921 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1922 if (tendregno > regno && tregno < endregno)
1923 remove_from_table (p, hash);
1924 }
1925 }
1926 }
1927 return;
1928
1929 case SUBREG:
1930 invalidate (SUBREG_REG (x), VOIDmode);
1931 return;
1932
1933 case PARALLEL:
1934 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1935 invalidate (XVECEXP (x, 0, i), VOIDmode);
1936 return;
1937
1938 case EXPR_LIST:
1939 /* This is part of a disjoint return value; extract the location in
1940 question ignoring the offset. */
1941 invalidate (XEXP (x, 0), VOIDmode);
1942 return;
1943
1944 case MEM:
1945 /* Calculate the canonical version of X here so that
1946 true_dependence doesn't generate new RTL for X on each call. */
1947 x = canon_rtx (x);
1948
1949 /* Remove all hash table elements that refer to overlapping pieces of
1950 memory. */
1951 if (full_mode == VOIDmode)
1952 full_mode = GET_MODE (x);
1953
1954 for (i = 0; i < HASH_SIZE; i++)
1955 {
1956 struct table_elt *next;
1957
1958 for (p = table[i]; p; p = next)
1959 {
1960 next = p->next_same_hash;
1961 if (p->in_memory)
1962 {
1963 struct check_dependence_data d;
1964
1965 /* Just canonicalize the expression once;
1966 otherwise each time we call invalidate
1967 true_dependence will canonicalize the
1968 expression again. */
1969 if (!p->canon_exp)
1970 p->canon_exp = canon_rtx (p->exp);
1971 d.exp = x;
1972 d.mode = full_mode;
1973 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1974 remove_from_table (p, i);
1975 }
1976 }
1977 }
1978 return;
1979
1980 default:
1981 abort ();
1982 }
1983 }
1984 \f
1985 /* Remove all expressions that refer to register REGNO,
1986 since they are already invalid, and we are about to
1987 mark that register valid again and don't want the old
1988 expressions to reappear as valid. */
1989
1990 static void
1991 remove_invalid_refs (regno)
1992 unsigned int regno;
1993 {
1994 unsigned int i;
1995 struct table_elt *p, *next;
1996
1997 for (i = 0; i < HASH_SIZE; i++)
1998 for (p = table[i]; p; p = next)
1999 {
2000 next = p->next_same_hash;
2001 if (GET_CODE (p->exp) != REG
2002 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
2003 remove_from_table (p, i);
2004 }
2005 }
2006
2007 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
2008 and mode MODE. */
2009 static void
2010 remove_invalid_subreg_refs (regno, offset, mode)
2011 unsigned int regno;
2012 unsigned int offset;
2013 enum machine_mode mode;
2014 {
2015 unsigned int i;
2016 struct table_elt *p, *next;
2017 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
2018
2019 for (i = 0; i < HASH_SIZE; i++)
2020 for (p = table[i]; p; p = next)
2021 {
2022 rtx exp = p->exp;
2023 next = p->next_same_hash;
2024
2025 if (GET_CODE (exp) != REG
2026 && (GET_CODE (exp) != SUBREG
2027 || GET_CODE (SUBREG_REG (exp)) != REG
2028 || REGNO (SUBREG_REG (exp)) != regno
2029 || (((SUBREG_BYTE (exp)
2030 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2031 && SUBREG_BYTE (exp) <= end))
2032 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
2033 remove_from_table (p, i);
2034 }
2035 }
2036 \f
2037 /* Recompute the hash codes of any valid entries in the hash table that
2038 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2039
2040 This is called when we make a jump equivalence. */
2041
2042 static void
2043 rehash_using_reg (x)
2044 rtx x;
2045 {
2046 unsigned int i;
2047 struct table_elt *p, *next;
2048 unsigned hash;
2049
2050 if (GET_CODE (x) == SUBREG)
2051 x = SUBREG_REG (x);
2052
2053 /* If X is not a register or if the register is known not to be in any
2054 valid entries in the table, we have no work to do. */
2055
2056 if (GET_CODE (x) != REG
2057 || REG_IN_TABLE (REGNO (x)) < 0
2058 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2059 return;
2060
2061 /* Scan all hash chains looking for valid entries that mention X.
2062 If we find one and it is in the wrong hash chain, move it. We can skip
2063 objects that are registers, since they are handled specially. */
2064
2065 for (i = 0; i < HASH_SIZE; i++)
2066 for (p = table[i]; p; p = next)
2067 {
2068 next = p->next_same_hash;
2069 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
2070 && exp_equiv_p (p->exp, p->exp, 1, 0)
2071 && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
2072 {
2073 if (p->next_same_hash)
2074 p->next_same_hash->prev_same_hash = p->prev_same_hash;
2075
2076 if (p->prev_same_hash)
2077 p->prev_same_hash->next_same_hash = p->next_same_hash;
2078 else
2079 table[i] = p->next_same_hash;
2080
2081 p->next_same_hash = table[hash];
2082 p->prev_same_hash = 0;
2083 if (table[hash])
2084 table[hash]->prev_same_hash = p;
2085 table[hash] = p;
2086 }
2087 }
2088 }
2089 \f
2090 /* Remove from the hash table any expression that is a call-clobbered
2091 register. Also update their TICK values. */
2092
2093 static void
2094 invalidate_for_call ()
2095 {
2096 unsigned int regno, endregno;
2097 unsigned int i;
2098 unsigned hash;
2099 struct table_elt *p, *next;
2100 int in_table = 0;
2101
2102 /* Go through all the hard registers. For each that is clobbered in
2103 a CALL_INSN, remove the register from quantity chains and update
2104 reg_tick if defined. Also see if any of these registers is currently
2105 in the table. */
2106
2107 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2108 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2109 {
2110 delete_reg_equiv (regno);
2111 if (REG_TICK (regno) >= 0)
2112 {
2113 REG_TICK (regno)++;
2114 SUBREG_TICKED (regno) = -1;
2115 }
2116
2117 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2118 }
2119
2120 /* In the case where we have no call-clobbered hard registers in the
2121 table, we are done. Otherwise, scan the table and remove any
2122 entry that overlaps a call-clobbered register. */
2123
2124 if (in_table)
2125 for (hash = 0; hash < HASH_SIZE; hash++)
2126 for (p = table[hash]; p; p = next)
2127 {
2128 next = p->next_same_hash;
2129
2130 if (GET_CODE (p->exp) != REG
2131 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2132 continue;
2133
2134 regno = REGNO (p->exp);
2135 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
2136
2137 for (i = regno; i < endregno; i++)
2138 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2139 {
2140 remove_from_table (p, hash);
2141 break;
2142 }
2143 }
2144 }
2145 \f
2146 /* Given an expression X of type CONST,
2147 and ELT which is its table entry (or 0 if it
2148 is not in the hash table),
2149 return an alternate expression for X as a register plus integer.
2150 If none can be found, return 0. */
2151
2152 static rtx
2153 use_related_value (x, elt)
2154 rtx x;
2155 struct table_elt *elt;
2156 {
2157 struct table_elt *relt = 0;
2158 struct table_elt *p, *q;
2159 HOST_WIDE_INT offset;
2160
2161 /* First, is there anything related known?
2162 If we have a table element, we can tell from that.
2163 Otherwise, must look it up. */
2164
2165 if (elt != 0 && elt->related_value != 0)
2166 relt = elt;
2167 else if (elt == 0 && GET_CODE (x) == CONST)
2168 {
2169 rtx subexp = get_related_value (x);
2170 if (subexp != 0)
2171 relt = lookup (subexp,
2172 safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2173 GET_MODE (subexp));
2174 }
2175
2176 if (relt == 0)
2177 return 0;
2178
2179 /* Search all related table entries for one that has an
2180 equivalent register. */
2181
2182 p = relt;
2183 while (1)
2184 {
2185 /* This loop is strange in that it is executed in two different cases.
2186 The first is when X is already in the table. Then it is searching
2187 the RELATED_VALUE list of X's class (RELT). The second case is when
2188 X is not in the table. Then RELT points to a class for the related
2189 value.
2190
2191 Ensure that, whatever case we are in, that we ignore classes that have
2192 the same value as X. */
2193
2194 if (rtx_equal_p (x, p->exp))
2195 q = 0;
2196 else
2197 for (q = p->first_same_value; q; q = q->next_same_value)
2198 if (GET_CODE (q->exp) == REG)
2199 break;
2200
2201 if (q)
2202 break;
2203
2204 p = p->related_value;
2205
2206 /* We went all the way around, so there is nothing to be found.
2207 Alternatively, perhaps RELT was in the table for some other reason
2208 and it has no related values recorded. */
2209 if (p == relt || p == 0)
2210 break;
2211 }
2212
2213 if (q == 0)
2214 return 0;
2215
2216 offset = (get_integer_term (x) - get_integer_term (p->exp));
2217 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2218 return plus_constant (q->exp, offset);
2219 }
2220 \f
2221 /* Hash a string. Just add its bytes up. */
2222 static inline unsigned
2223 canon_hash_string (ps)
2224 const char *ps;
2225 {
2226 unsigned hash = 0;
2227 const unsigned char *p = (const unsigned char *) ps;
2228
2229 if (p)
2230 while (*p)
2231 hash += *p++;
2232
2233 return hash;
2234 }
2235
2236 /* Hash an rtx. We are careful to make sure the value is never negative.
2237 Equivalent registers hash identically.
2238 MODE is used in hashing for CONST_INTs only;
2239 otherwise the mode of X is used.
2240
2241 Store 1 in do_not_record if any subexpression is volatile.
2242
2243 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2244 which does not have the RTX_UNCHANGING_P bit set.
2245
2246 Note that cse_insn knows that the hash code of a MEM expression
2247 is just (int) MEM plus the hash code of the address. */
2248
2249 static unsigned
2250 canon_hash (x, mode)
2251 rtx x;
2252 enum machine_mode mode;
2253 {
2254 int i, j;
2255 unsigned hash = 0;
2256 enum rtx_code code;
2257 const char *fmt;
2258
2259 /* repeat is used to turn tail-recursion into iteration. */
2260 repeat:
2261 if (x == 0)
2262 return hash;
2263
2264 code = GET_CODE (x);
2265 switch (code)
2266 {
2267 case REG:
2268 {
2269 unsigned int regno = REGNO (x);
2270 bool record;
2271
2272 /* On some machines, we can't record any non-fixed hard register,
2273 because extending its life will cause reload problems. We
2274 consider ap, fp, sp, gp to be fixed for this purpose.
2275
2276 We also consider CCmode registers to be fixed for this purpose;
2277 failure to do so leads to failure to simplify 0<100 type of
2278 conditionals.
2279
2280 On all machines, we can't record any global registers.
2281 Nor should we record any register that is in a small
2282 class, as defined by CLASS_LIKELY_SPILLED_P. */
2283
2284 if (regno >= FIRST_PSEUDO_REGISTER)
2285 record = true;
2286 else if (x == frame_pointer_rtx
2287 || x == hard_frame_pointer_rtx
2288 || x == arg_pointer_rtx
2289 || x == stack_pointer_rtx
2290 || x == pic_offset_table_rtx)
2291 record = true;
2292 else if (global_regs[regno])
2293 record = false;
2294 else if (fixed_regs[regno])
2295 record = true;
2296 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2297 record = true;
2298 else if (SMALL_REGISTER_CLASSES)
2299 record = false;
2300 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2301 record = false;
2302 else
2303 record = true;
2304
2305 if (!record)
2306 {
2307 do_not_record = 1;
2308 return 0;
2309 }
2310
2311 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2312 return hash;
2313 }
2314
2315 /* We handle SUBREG of a REG specially because the underlying
2316 reg changes its hash value with every value change; we don't
2317 want to have to forget unrelated subregs when one subreg changes. */
2318 case SUBREG:
2319 {
2320 if (GET_CODE (SUBREG_REG (x)) == REG)
2321 {
2322 hash += (((unsigned) SUBREG << 7)
2323 + REGNO (SUBREG_REG (x))
2324 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2325 return hash;
2326 }
2327 break;
2328 }
2329
2330 case CONST_INT:
2331 {
2332 unsigned HOST_WIDE_INT tem = INTVAL (x);
2333 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2334 return hash;
2335 }
2336
2337 case CONST_DOUBLE:
2338 /* This is like the general case, except that it only counts
2339 the integers representing the constant. */
2340 hash += (unsigned) code + (unsigned) GET_MODE (x);
2341 if (GET_MODE (x) != VOIDmode)
2342 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2343 else
2344 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2345 + (unsigned) CONST_DOUBLE_HIGH (x));
2346 return hash;
2347
2348 case CONST_VECTOR:
2349 {
2350 int units;
2351 rtx elt;
2352
2353 units = CONST_VECTOR_NUNITS (x);
2354
2355 for (i = 0; i < units; ++i)
2356 {
2357 elt = CONST_VECTOR_ELT (x, i);
2358 hash += canon_hash (elt, GET_MODE (elt));
2359 }
2360
2361 return hash;
2362 }
2363
2364 /* Assume there is only one rtx object for any given label. */
2365 case LABEL_REF:
2366 hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2367 return hash;
2368
2369 case SYMBOL_REF:
2370 hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2371 return hash;
2372
2373 case MEM:
2374 /* We don't record if marked volatile or if BLKmode since we don't
2375 know the size of the move. */
2376 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2377 {
2378 do_not_record = 1;
2379 return 0;
2380 }
2381 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2382 {
2383 hash_arg_in_memory = 1;
2384 }
2385 /* Now that we have already found this special case,
2386 might as well speed it up as much as possible. */
2387 hash += (unsigned) MEM;
2388 x = XEXP (x, 0);
2389 goto repeat;
2390
2391 case USE:
2392 /* A USE that mentions non-volatile memory needs special
2393 handling since the MEM may be BLKmode which normally
2394 prevents an entry from being made. Pure calls are
2395 marked by a USE which mentions BLKmode memory. */
2396 if (GET_CODE (XEXP (x, 0)) == MEM
2397 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2398 {
2399 hash += (unsigned) USE;
2400 x = XEXP (x, 0);
2401
2402 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2403 hash_arg_in_memory = 1;
2404
2405 /* Now that we have already found this special case,
2406 might as well speed it up as much as possible. */
2407 hash += (unsigned) MEM;
2408 x = XEXP (x, 0);
2409 goto repeat;
2410 }
2411 break;
2412
2413 case PRE_DEC:
2414 case PRE_INC:
2415 case POST_DEC:
2416 case POST_INC:
2417 case PRE_MODIFY:
2418 case POST_MODIFY:
2419 case PC:
2420 case CC0:
2421 case CALL:
2422 case UNSPEC_VOLATILE:
2423 do_not_record = 1;
2424 return 0;
2425
2426 case ASM_OPERANDS:
2427 if (MEM_VOLATILE_P (x))
2428 {
2429 do_not_record = 1;
2430 return 0;
2431 }
2432 else
2433 {
2434 /* We don't want to take the filename and line into account. */
2435 hash += (unsigned) code + (unsigned) GET_MODE (x)
2436 + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2437 + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2438 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2439
2440 if (ASM_OPERANDS_INPUT_LENGTH (x))
2441 {
2442 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2443 {
2444 hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2445 GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2446 + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2447 (x, i)));
2448 }
2449
2450 hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2451 x = ASM_OPERANDS_INPUT (x, 0);
2452 mode = GET_MODE (x);
2453 goto repeat;
2454 }
2455
2456 return hash;
2457 }
2458 break;
2459
2460 default:
2461 break;
2462 }
2463
2464 i = GET_RTX_LENGTH (code) - 1;
2465 hash += (unsigned) code + (unsigned) GET_MODE (x);
2466 fmt = GET_RTX_FORMAT (code);
2467 for (; i >= 0; i--)
2468 {
2469 if (fmt[i] == 'e')
2470 {
2471 rtx tem = XEXP (x, i);
2472
2473 /* If we are about to do the last recursive call
2474 needed at this level, change it into iteration.
2475 This function is called enough to be worth it. */
2476 if (i == 0)
2477 {
2478 x = tem;
2479 goto repeat;
2480 }
2481 hash += canon_hash (tem, 0);
2482 }
2483 else if (fmt[i] == 'E')
2484 for (j = 0; j < XVECLEN (x, i); j++)
2485 hash += canon_hash (XVECEXP (x, i, j), 0);
2486 else if (fmt[i] == 's')
2487 hash += canon_hash_string (XSTR (x, i));
2488 else if (fmt[i] == 'i')
2489 {
2490 unsigned tem = XINT (x, i);
2491 hash += tem;
2492 }
2493 else if (fmt[i] == '0' || fmt[i] == 't')
2494 /* Unused. */
2495 ;
2496 else
2497 abort ();
2498 }
2499 return hash;
2500 }
2501
2502 /* Like canon_hash but with no side effects. */
2503
2504 static unsigned
2505 safe_hash (x, mode)
2506 rtx x;
2507 enum machine_mode mode;
2508 {
2509 int save_do_not_record = do_not_record;
2510 int save_hash_arg_in_memory = hash_arg_in_memory;
2511 unsigned hash = canon_hash (x, mode);
2512 hash_arg_in_memory = save_hash_arg_in_memory;
2513 do_not_record = save_do_not_record;
2514 return hash;
2515 }
2516 \f
2517 /* Return 1 iff X and Y would canonicalize into the same thing,
2518 without actually constructing the canonicalization of either one.
2519 If VALIDATE is nonzero,
2520 we assume X is an expression being processed from the rtl
2521 and Y was found in the hash table. We check register refs
2522 in Y for being marked as valid.
2523
2524 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2525 that is known to be in the register. Ordinarily, we don't allow them
2526 to match, because letting them match would cause unpredictable results
2527 in all the places that search a hash table chain for an equivalent
2528 for a given value. A possible equivalent that has different structure
2529 has its hash code computed from different data. Whether the hash code
2530 is the same as that of the given value is pure luck. */
2531
2532 static int
2533 exp_equiv_p (x, y, validate, equal_values)
2534 rtx x, y;
2535 int validate;
2536 int equal_values;
2537 {
2538 int i, j;
2539 enum rtx_code code;
2540 const char *fmt;
2541
2542 /* Note: it is incorrect to assume an expression is equivalent to itself
2543 if VALIDATE is nonzero. */
2544 if (x == y && !validate)
2545 return 1;
2546 if (x == 0 || y == 0)
2547 return x == y;
2548
2549 code = GET_CODE (x);
2550 if (code != GET_CODE (y))
2551 {
2552 if (!equal_values)
2553 return 0;
2554
2555 /* If X is a constant and Y is a register or vice versa, they may be
2556 equivalent. We only have to validate if Y is a register. */
2557 if (CONSTANT_P (x) && GET_CODE (y) == REG
2558 && REGNO_QTY_VALID_P (REGNO (y)))
2559 {
2560 int y_q = REG_QTY (REGNO (y));
2561 struct qty_table_elem *y_ent = &qty_table[y_q];
2562
2563 if (GET_MODE (y) == y_ent->mode
2564 && rtx_equal_p (x, y_ent->const_rtx)
2565 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2566 return 1;
2567 }
2568
2569 if (CONSTANT_P (y) && code == REG
2570 && REGNO_QTY_VALID_P (REGNO (x)))
2571 {
2572 int x_q = REG_QTY (REGNO (x));
2573 struct qty_table_elem *x_ent = &qty_table[x_q];
2574
2575 if (GET_MODE (x) == x_ent->mode
2576 && rtx_equal_p (y, x_ent->const_rtx))
2577 return 1;
2578 }
2579
2580 return 0;
2581 }
2582
2583 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2584 if (GET_MODE (x) != GET_MODE (y))
2585 return 0;
2586
2587 switch (code)
2588 {
2589 case PC:
2590 case CC0:
2591 case CONST_INT:
2592 return x == y;
2593
2594 case LABEL_REF:
2595 return XEXP (x, 0) == XEXP (y, 0);
2596
2597 case SYMBOL_REF:
2598 return XSTR (x, 0) == XSTR (y, 0);
2599
2600 case REG:
2601 {
2602 unsigned int regno = REGNO (y);
2603 unsigned int endregno
2604 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2605 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2606 unsigned int i;
2607
2608 /* If the quantities are not the same, the expressions are not
2609 equivalent. If there are and we are not to validate, they
2610 are equivalent. Otherwise, ensure all regs are up-to-date. */
2611
2612 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2613 return 0;
2614
2615 if (! validate)
2616 return 1;
2617
2618 for (i = regno; i < endregno; i++)
2619 if (REG_IN_TABLE (i) != REG_TICK (i))
2620 return 0;
2621
2622 return 1;
2623 }
2624
2625 /* For commutative operations, check both orders. */
2626 case PLUS:
2627 case MULT:
2628 case AND:
2629 case IOR:
2630 case XOR:
2631 case NE:
2632 case EQ:
2633 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2634 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2635 validate, equal_values))
2636 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2637 validate, equal_values)
2638 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2639 validate, equal_values)));
2640
2641 case ASM_OPERANDS:
2642 /* We don't use the generic code below because we want to
2643 disregard filename and line numbers. */
2644
2645 /* A volatile asm isn't equivalent to any other. */
2646 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2647 return 0;
2648
2649 if (GET_MODE (x) != GET_MODE (y)
2650 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2651 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2652 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2653 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2654 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2655 return 0;
2656
2657 if (ASM_OPERANDS_INPUT_LENGTH (x))
2658 {
2659 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2660 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2661 ASM_OPERANDS_INPUT (y, i),
2662 validate, equal_values)
2663 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2664 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2665 return 0;
2666 }
2667
2668 return 1;
2669
2670 default:
2671 break;
2672 }
2673
2674 /* Compare the elements. If any pair of corresponding elements
2675 fail to match, return 0 for the whole things. */
2676
2677 fmt = GET_RTX_FORMAT (code);
2678 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2679 {
2680 switch (fmt[i])
2681 {
2682 case 'e':
2683 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2684 return 0;
2685 break;
2686
2687 case 'E':
2688 if (XVECLEN (x, i) != XVECLEN (y, i))
2689 return 0;
2690 for (j = 0; j < XVECLEN (x, i); j++)
2691 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2692 validate, equal_values))
2693 return 0;
2694 break;
2695
2696 case 's':
2697 if (strcmp (XSTR (x, i), XSTR (y, i)))
2698 return 0;
2699 break;
2700
2701 case 'i':
2702 if (XINT (x, i) != XINT (y, i))
2703 return 0;
2704 break;
2705
2706 case 'w':
2707 if (XWINT (x, i) != XWINT (y, i))
2708 return 0;
2709 break;
2710
2711 case '0':
2712 case 't':
2713 break;
2714
2715 default:
2716 abort ();
2717 }
2718 }
2719
2720 return 1;
2721 }
2722 \f
2723 /* Return 1 if X has a value that can vary even between two
2724 executions of the program. 0 means X can be compared reliably
2725 against certain constants or near-constants. */
2726
2727 static int
2728 cse_rtx_varies_p (x, from_alias)
2729 rtx x;
2730 int from_alias;
2731 {
2732 /* We need not check for X and the equivalence class being of the same
2733 mode because if X is equivalent to a constant in some mode, it
2734 doesn't vary in any mode. */
2735
2736 if (GET_CODE (x) == REG
2737 && REGNO_QTY_VALID_P (REGNO (x)))
2738 {
2739 int x_q = REG_QTY (REGNO (x));
2740 struct qty_table_elem *x_ent = &qty_table[x_q];
2741
2742 if (GET_MODE (x) == x_ent->mode
2743 && x_ent->const_rtx != NULL_RTX)
2744 return 0;
2745 }
2746
2747 if (GET_CODE (x) == PLUS
2748 && GET_CODE (XEXP (x, 1)) == CONST_INT
2749 && GET_CODE (XEXP (x, 0)) == REG
2750 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2751 {
2752 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2753 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2754
2755 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2756 && x0_ent->const_rtx != NULL_RTX)
2757 return 0;
2758 }
2759
2760 /* This can happen as the result of virtual register instantiation, if
2761 the initial constant is too large to be a valid address. This gives
2762 us a three instruction sequence, load large offset into a register,
2763 load fp minus a constant into a register, then a MEM which is the
2764 sum of the two `constant' registers. */
2765 if (GET_CODE (x) == PLUS
2766 && GET_CODE (XEXP (x, 0)) == REG
2767 && GET_CODE (XEXP (x, 1)) == REG
2768 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2769 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2770 {
2771 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2772 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2773 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2774 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2775
2776 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2777 && x0_ent->const_rtx != NULL_RTX
2778 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2779 && x1_ent->const_rtx != NULL_RTX)
2780 return 0;
2781 }
2782
2783 return rtx_varies_p (x, from_alias);
2784 }
2785 \f
2786 /* Canonicalize an expression:
2787 replace each register reference inside it
2788 with the "oldest" equivalent register.
2789
2790 If INSN is nonzero and we are replacing a pseudo with a hard register
2791 or vice versa, validate_change is used to ensure that INSN remains valid
2792 after we make our substitution. The calls are made with IN_GROUP nonzero
2793 so apply_change_group must be called upon the outermost return from this
2794 function (unless INSN is zero). The result of apply_change_group can
2795 generally be discarded since the changes we are making are optional. */
2796
2797 static rtx
2798 canon_reg (x, insn)
2799 rtx x;
2800 rtx insn;
2801 {
2802 int i;
2803 enum rtx_code code;
2804 const char *fmt;
2805
2806 if (x == 0)
2807 return x;
2808
2809 code = GET_CODE (x);
2810 switch (code)
2811 {
2812 case PC:
2813 case CC0:
2814 case CONST:
2815 case CONST_INT:
2816 case CONST_DOUBLE:
2817 case CONST_VECTOR:
2818 case SYMBOL_REF:
2819 case LABEL_REF:
2820 case ADDR_VEC:
2821 case ADDR_DIFF_VEC:
2822 return x;
2823
2824 case REG:
2825 {
2826 int first;
2827 int q;
2828 struct qty_table_elem *ent;
2829
2830 /* Never replace a hard reg, because hard regs can appear
2831 in more than one machine mode, and we must preserve the mode
2832 of each occurrence. Also, some hard regs appear in
2833 MEMs that are shared and mustn't be altered. Don't try to
2834 replace any reg that maps to a reg of class NO_REGS. */
2835 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2836 || ! REGNO_QTY_VALID_P (REGNO (x)))
2837 return x;
2838
2839 q = REG_QTY (REGNO (x));
2840 ent = &qty_table[q];
2841 first = ent->first_reg;
2842 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2843 : REGNO_REG_CLASS (first) == NO_REGS ? x
2844 : gen_rtx_REG (ent->mode, first));
2845 }
2846
2847 default:
2848 break;
2849 }
2850
2851 fmt = GET_RTX_FORMAT (code);
2852 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2853 {
2854 int j;
2855
2856 if (fmt[i] == 'e')
2857 {
2858 rtx new = canon_reg (XEXP (x, i), insn);
2859 int insn_code;
2860
2861 /* If replacing pseudo with hard reg or vice versa, ensure the
2862 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2863 if (insn != 0 && new != 0
2864 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2865 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2866 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2867 || (insn_code = recog_memoized (insn)) < 0
2868 || insn_data[insn_code].n_dups > 0))
2869 validate_change (insn, &XEXP (x, i), new, 1);
2870 else
2871 XEXP (x, i) = new;
2872 }
2873 else if (fmt[i] == 'E')
2874 for (j = 0; j < XVECLEN (x, i); j++)
2875 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2876 }
2877
2878 return x;
2879 }
2880 \f
2881 /* LOC is a location within INSN that is an operand address (the contents of
2882 a MEM). Find the best equivalent address to use that is valid for this
2883 insn.
2884
2885 On most CISC machines, complicated address modes are costly, and rtx_cost
2886 is a good approximation for that cost. However, most RISC machines have
2887 only a few (usually only one) memory reference formats. If an address is
2888 valid at all, it is often just as cheap as any other address. Hence, for
2889 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2890 costs of various addresses. For two addresses of equal cost, choose the one
2891 with the highest `rtx_cost' value as that has the potential of eliminating
2892 the most insns. For equal costs, we choose the first in the equivalence
2893 class. Note that we ignore the fact that pseudo registers are cheaper
2894 than hard registers here because we would also prefer the pseudo registers.
2895 */
2896
2897 static void
2898 find_best_addr (insn, loc, mode)
2899 rtx insn;
2900 rtx *loc;
2901 enum machine_mode mode;
2902 {
2903 struct table_elt *elt;
2904 rtx addr = *loc;
2905 #ifdef ADDRESS_COST
2906 struct table_elt *p;
2907 int found_better = 1;
2908 #endif
2909 int save_do_not_record = do_not_record;
2910 int save_hash_arg_in_memory = hash_arg_in_memory;
2911 int addr_volatile;
2912 int regno;
2913 unsigned hash;
2914
2915 /* Do not try to replace constant addresses or addresses of local and
2916 argument slots. These MEM expressions are made only once and inserted
2917 in many instructions, as well as being used to control symbol table
2918 output. It is not safe to clobber them.
2919
2920 There are some uncommon cases where the address is already in a register
2921 for some reason, but we cannot take advantage of that because we have
2922 no easy way to unshare the MEM. In addition, looking up all stack
2923 addresses is costly. */
2924 if ((GET_CODE (addr) == PLUS
2925 && GET_CODE (XEXP (addr, 0)) == REG
2926 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2927 && (regno = REGNO (XEXP (addr, 0)),
2928 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2929 || regno == ARG_POINTER_REGNUM))
2930 || (GET_CODE (addr) == REG
2931 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2932 || regno == HARD_FRAME_POINTER_REGNUM
2933 || regno == ARG_POINTER_REGNUM))
2934 || GET_CODE (addr) == ADDRESSOF
2935 || CONSTANT_ADDRESS_P (addr))
2936 return;
2937
2938 /* If this address is not simply a register, try to fold it. This will
2939 sometimes simplify the expression. Many simplifications
2940 will not be valid, but some, usually applying the associative rule, will
2941 be valid and produce better code. */
2942 if (GET_CODE (addr) != REG)
2943 {
2944 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2945 int addr_folded_cost = address_cost (folded, mode);
2946 int addr_cost = address_cost (addr, mode);
2947
2948 if ((addr_folded_cost < addr_cost
2949 || (addr_folded_cost == addr_cost
2950 /* ??? The rtx_cost comparison is left over from an older
2951 version of this code. It is probably no longer helpful. */
2952 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2953 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2954 && validate_change (insn, loc, folded, 0))
2955 addr = folded;
2956 }
2957
2958 /* If this address is not in the hash table, we can't look for equivalences
2959 of the whole address. Also, ignore if volatile. */
2960
2961 do_not_record = 0;
2962 hash = HASH (addr, Pmode);
2963 addr_volatile = do_not_record;
2964 do_not_record = save_do_not_record;
2965 hash_arg_in_memory = save_hash_arg_in_memory;
2966
2967 if (addr_volatile)
2968 return;
2969
2970 elt = lookup (addr, hash, Pmode);
2971
2972 #ifndef ADDRESS_COST
2973 if (elt)
2974 {
2975 int our_cost = elt->cost;
2976
2977 /* Find the lowest cost below ours that works. */
2978 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2979 if (elt->cost < our_cost
2980 && (GET_CODE (elt->exp) == REG
2981 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2982 && validate_change (insn, loc,
2983 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2984 return;
2985 }
2986 #else
2987
2988 if (elt)
2989 {
2990 /* We need to find the best (under the criteria documented above) entry
2991 in the class that is valid. We use the `flag' field to indicate
2992 choices that were invalid and iterate until we can't find a better
2993 one that hasn't already been tried. */
2994
2995 for (p = elt->first_same_value; p; p = p->next_same_value)
2996 p->flag = 0;
2997
2998 while (found_better)
2999 {
3000 int best_addr_cost = address_cost (*loc, mode);
3001 int best_rtx_cost = (elt->cost + 1) >> 1;
3002 int exp_cost;
3003 struct table_elt *best_elt = elt;
3004
3005 found_better = 0;
3006 for (p = elt->first_same_value; p; p = p->next_same_value)
3007 if (! p->flag)
3008 {
3009 if ((GET_CODE (p->exp) == REG
3010 || exp_equiv_p (p->exp, p->exp, 1, 0))
3011 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
3012 || (exp_cost == best_addr_cost
3013 && ((p->cost + 1) >> 1) > best_rtx_cost)))
3014 {
3015 found_better = 1;
3016 best_addr_cost = exp_cost;
3017 best_rtx_cost = (p->cost + 1) >> 1;
3018 best_elt = p;
3019 }
3020 }
3021
3022 if (found_better)
3023 {
3024 if (validate_change (insn, loc,
3025 canon_reg (copy_rtx (best_elt->exp),
3026 NULL_RTX), 0))
3027 return;
3028 else
3029 best_elt->flag = 1;
3030 }
3031 }
3032 }
3033
3034 /* If the address is a binary operation with the first operand a register
3035 and the second a constant, do the same as above, but looking for
3036 equivalences of the register. Then try to simplify before checking for
3037 the best address to use. This catches a few cases: First is when we
3038 have REG+const and the register is another REG+const. We can often merge
3039 the constants and eliminate one insn and one register. It may also be
3040 that a machine has a cheap REG+REG+const. Finally, this improves the
3041 code on the Alpha for unaligned byte stores. */
3042
3043 if (flag_expensive_optimizations
3044 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
3045 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
3046 && GET_CODE (XEXP (*loc, 0)) == REG
3047 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
3048 {
3049 rtx c = XEXP (*loc, 1);
3050
3051 do_not_record = 0;
3052 hash = HASH (XEXP (*loc, 0), Pmode);
3053 do_not_record = save_do_not_record;
3054 hash_arg_in_memory = save_hash_arg_in_memory;
3055
3056 elt = lookup (XEXP (*loc, 0), hash, Pmode);
3057 if (elt == 0)
3058 return;
3059
3060 /* We need to find the best (under the criteria documented above) entry
3061 in the class that is valid. We use the `flag' field to indicate
3062 choices that were invalid and iterate until we can't find a better
3063 one that hasn't already been tried. */
3064
3065 for (p = elt->first_same_value; p; p = p->next_same_value)
3066 p->flag = 0;
3067
3068 while (found_better)
3069 {
3070 int best_addr_cost = address_cost (*loc, mode);
3071 int best_rtx_cost = (COST (*loc) + 1) >> 1;
3072 struct table_elt *best_elt = elt;
3073 rtx best_rtx = *loc;
3074 int count;
3075
3076 /* This is at worst case an O(n^2) algorithm, so limit our search
3077 to the first 32 elements on the list. This avoids trouble
3078 compiling code with very long basic blocks that can easily
3079 call simplify_gen_binary so many times that we run out of
3080 memory. */
3081
3082 found_better = 0;
3083 for (p = elt->first_same_value, count = 0;
3084 p && count < 32;
3085 p = p->next_same_value, count++)
3086 if (! p->flag
3087 && (GET_CODE (p->exp) == REG
3088 || exp_equiv_p (p->exp, p->exp, 1, 0)))
3089 {
3090 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3091 p->exp, c);
3092 int new_cost;
3093 new_cost = address_cost (new, mode);
3094
3095 if (new_cost < best_addr_cost
3096 || (new_cost == best_addr_cost
3097 && (COST (new) + 1) >> 1 > best_rtx_cost))
3098 {
3099 found_better = 1;
3100 best_addr_cost = new_cost;
3101 best_rtx_cost = (COST (new) + 1) >> 1;
3102 best_elt = p;
3103 best_rtx = new;
3104 }
3105 }
3106
3107 if (found_better)
3108 {
3109 if (validate_change (insn, loc,
3110 canon_reg (copy_rtx (best_rtx),
3111 NULL_RTX), 0))
3112 return;
3113 else
3114 best_elt->flag = 1;
3115 }
3116 }
3117 }
3118 #endif
3119 }
3120 \f
3121 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3122 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3123 what values are being compared.
3124
3125 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3126 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3127 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3128 compared to produce cc0.
3129
3130 The return value is the comparison operator and is either the code of
3131 A or the code corresponding to the inverse of the comparison. */
3132
3133 static enum rtx_code
3134 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
3135 enum rtx_code code;
3136 rtx *parg1, *parg2;
3137 enum machine_mode *pmode1, *pmode2;
3138 {
3139 rtx arg1, arg2;
3140
3141 arg1 = *parg1, arg2 = *parg2;
3142
3143 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3144
3145 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3146 {
3147 /* Set nonzero when we find something of interest. */
3148 rtx x = 0;
3149 int reverse_code = 0;
3150 struct table_elt *p = 0;
3151
3152 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3153 On machines with CC0, this is the only case that can occur, since
3154 fold_rtx will return the COMPARE or item being compared with zero
3155 when given CC0. */
3156
3157 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3158 x = arg1;
3159
3160 /* If ARG1 is a comparison operator and CODE is testing for
3161 STORE_FLAG_VALUE, get the inner arguments. */
3162
3163 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3164 {
3165 #ifdef FLOAT_STORE_FLAG_VALUE
3166 REAL_VALUE_TYPE fsfv;
3167 #endif
3168
3169 if (code == NE
3170 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3171 && code == LT && STORE_FLAG_VALUE == -1)
3172 #ifdef FLOAT_STORE_FLAG_VALUE
3173 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3174 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3175 REAL_VALUE_NEGATIVE (fsfv)))
3176 #endif
3177 )
3178 x = arg1;
3179 else if (code == EQ
3180 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3181 && code == GE && STORE_FLAG_VALUE == -1)
3182 #ifdef FLOAT_STORE_FLAG_VALUE
3183 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3184 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3185 REAL_VALUE_NEGATIVE (fsfv)))
3186 #endif
3187 )
3188 x = arg1, reverse_code = 1;
3189 }
3190
3191 /* ??? We could also check for
3192
3193 (ne (and (eq (...) (const_int 1))) (const_int 0))
3194
3195 and related forms, but let's wait until we see them occurring. */
3196
3197 if (x == 0)
3198 /* Look up ARG1 in the hash table and see if it has an equivalence
3199 that lets us see what is being compared. */
3200 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3201 GET_MODE (arg1));
3202 if (p)
3203 {
3204 p = p->first_same_value;
3205
3206 /* If what we compare is already known to be constant, that is as
3207 good as it gets.
3208 We need to break the loop in this case, because otherwise we
3209 can have an infinite loop when looking at a reg that is known
3210 to be a constant which is the same as a comparison of a reg
3211 against zero which appears later in the insn stream, which in
3212 turn is constant and the same as the comparison of the first reg
3213 against zero... */
3214 if (p->is_const)
3215 break;
3216 }
3217
3218 for (; p; p = p->next_same_value)
3219 {
3220 enum machine_mode inner_mode = GET_MODE (p->exp);
3221 #ifdef FLOAT_STORE_FLAG_VALUE
3222 REAL_VALUE_TYPE fsfv;
3223 #endif
3224
3225 /* If the entry isn't valid, skip it. */
3226 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3227 continue;
3228
3229 if (GET_CODE (p->exp) == COMPARE
3230 /* Another possibility is that this machine has a compare insn
3231 that includes the comparison code. In that case, ARG1 would
3232 be equivalent to a comparison operation that would set ARG1 to
3233 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3234 ORIG_CODE is the actual comparison being done; if it is an EQ,
3235 we must reverse ORIG_CODE. On machine with a negative value
3236 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3237 || ((code == NE
3238 || (code == LT
3239 && GET_MODE_CLASS (inner_mode) == MODE_INT
3240 && (GET_MODE_BITSIZE (inner_mode)
3241 <= HOST_BITS_PER_WIDE_INT)
3242 && (STORE_FLAG_VALUE
3243 & ((HOST_WIDE_INT) 1
3244 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3245 #ifdef FLOAT_STORE_FLAG_VALUE
3246 || (code == LT
3247 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3248 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3249 REAL_VALUE_NEGATIVE (fsfv)))
3250 #endif
3251 )
3252 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3253 {
3254 x = p->exp;
3255 break;
3256 }
3257 else if ((code == EQ
3258 || (code == GE
3259 && GET_MODE_CLASS (inner_mode) == MODE_INT
3260 && (GET_MODE_BITSIZE (inner_mode)
3261 <= HOST_BITS_PER_WIDE_INT)
3262 && (STORE_FLAG_VALUE
3263 & ((HOST_WIDE_INT) 1
3264 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3265 #ifdef FLOAT_STORE_FLAG_VALUE
3266 || (code == GE
3267 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3268 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3269 REAL_VALUE_NEGATIVE (fsfv)))
3270 #endif
3271 )
3272 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3273 {
3274 reverse_code = 1;
3275 x = p->exp;
3276 break;
3277 }
3278
3279 /* If this is fp + constant, the equivalent is a better operand since
3280 it may let us predict the value of the comparison. */
3281 else if (NONZERO_BASE_PLUS_P (p->exp))
3282 {
3283 arg1 = p->exp;
3284 continue;
3285 }
3286 }
3287
3288 /* If we didn't find a useful equivalence for ARG1, we are done.
3289 Otherwise, set up for the next iteration. */
3290 if (x == 0)
3291 break;
3292
3293 /* If we need to reverse the comparison, make sure that that is
3294 possible -- we can't necessarily infer the value of GE from LT
3295 with floating-point operands. */
3296 if (reverse_code)
3297 {
3298 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3299 if (reversed == UNKNOWN)
3300 break;
3301 else
3302 code = reversed;
3303 }
3304 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3305 code = GET_CODE (x);
3306 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3307 }
3308
3309 /* Return our results. Return the modes from before fold_rtx
3310 because fold_rtx might produce const_int, and then it's too late. */
3311 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3312 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3313
3314 return code;
3315 }
3316 \f
3317 /* If X is a nontrivial arithmetic operation on an argument
3318 for which a constant value can be determined, return
3319 the result of operating on that value, as a constant.
3320 Otherwise, return X, possibly with one or more operands
3321 modified by recursive calls to this function.
3322
3323 If X is a register whose contents are known, we do NOT
3324 return those contents here. equiv_constant is called to
3325 perform that task.
3326
3327 INSN is the insn that we may be modifying. If it is 0, make a copy
3328 of X before modifying it. */
3329
3330 static rtx
3331 fold_rtx (x, insn)
3332 rtx x;
3333 rtx insn;
3334 {
3335 enum rtx_code code;
3336 enum machine_mode mode;
3337 const char *fmt;
3338 int i;
3339 rtx new = 0;
3340 int copied = 0;
3341 int must_swap = 0;
3342
3343 /* Folded equivalents of first two operands of X. */
3344 rtx folded_arg0;
3345 rtx folded_arg1;
3346
3347 /* Constant equivalents of first three operands of X;
3348 0 when no such equivalent is known. */
3349 rtx const_arg0;
3350 rtx const_arg1;
3351 rtx const_arg2;
3352
3353 /* The mode of the first operand of X. We need this for sign and zero
3354 extends. */
3355 enum machine_mode mode_arg0;
3356
3357 if (x == 0)
3358 return x;
3359
3360 mode = GET_MODE (x);
3361 code = GET_CODE (x);
3362 switch (code)
3363 {
3364 case CONST:
3365 case CONST_INT:
3366 case CONST_DOUBLE:
3367 case CONST_VECTOR:
3368 case SYMBOL_REF:
3369 case LABEL_REF:
3370 case REG:
3371 /* No use simplifying an EXPR_LIST
3372 since they are used only for lists of args
3373 in a function call's REG_EQUAL note. */
3374 case EXPR_LIST:
3375 /* Changing anything inside an ADDRESSOF is incorrect; we don't
3376 want to (e.g.,) make (addressof (const_int 0)) just because
3377 the location is known to be zero. */
3378 case ADDRESSOF:
3379 return x;
3380
3381 #ifdef HAVE_cc0
3382 case CC0:
3383 return prev_insn_cc0;
3384 #endif
3385
3386 case PC:
3387 /* If the next insn is a CODE_LABEL followed by a jump table,
3388 PC's value is a LABEL_REF pointing to that label. That
3389 lets us fold switch statements on the VAX. */
3390 if (insn && GET_CODE (insn) == JUMP_INSN)
3391 {
3392 rtx next = next_nonnote_insn (insn);
3393
3394 if (next && GET_CODE (next) == CODE_LABEL
3395 && NEXT_INSN (next) != 0
3396 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
3397 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
3398 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
3399 return gen_rtx_LABEL_REF (Pmode, next);
3400 }
3401 break;
3402
3403 case SUBREG:
3404 /* See if we previously assigned a constant value to this SUBREG. */
3405 if ((new = lookup_as_function (x, CONST_INT)) != 0
3406 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3407 return new;
3408
3409 /* If this is a paradoxical SUBREG, we have no idea what value the
3410 extra bits would have. However, if the operand is equivalent
3411 to a SUBREG whose operand is the same as our mode, and all the
3412 modes are within a word, we can just use the inner operand
3413 because these SUBREGs just say how to treat the register.
3414
3415 Similarly if we find an integer constant. */
3416
3417 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3418 {
3419 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3420 struct table_elt *elt;
3421
3422 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3423 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3424 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3425 imode)) != 0)
3426 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3427 {
3428 if (CONSTANT_P (elt->exp)
3429 && GET_MODE (elt->exp) == VOIDmode)
3430 return elt->exp;
3431
3432 if (GET_CODE (elt->exp) == SUBREG
3433 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3434 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3435 return copy_rtx (SUBREG_REG (elt->exp));
3436 }
3437
3438 return x;
3439 }
3440
3441 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3442 We might be able to if the SUBREG is extracting a single word in an
3443 integral mode or extracting the low part. */
3444
3445 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3446 const_arg0 = equiv_constant (folded_arg0);
3447 if (const_arg0)
3448 folded_arg0 = const_arg0;
3449
3450 if (folded_arg0 != SUBREG_REG (x))
3451 {
3452 new = simplify_subreg (mode, folded_arg0,
3453 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3454 if (new)
3455 return new;
3456 }
3457
3458 /* If this is a narrowing SUBREG and our operand is a REG, see if
3459 we can find an equivalence for REG that is an arithmetic operation
3460 in a wider mode where both operands are paradoxical SUBREGs
3461 from objects of our result mode. In that case, we couldn't report
3462 an equivalent value for that operation, since we don't know what the
3463 extra bits will be. But we can find an equivalence for this SUBREG
3464 by folding that operation is the narrow mode. This allows us to
3465 fold arithmetic in narrow modes when the machine only supports
3466 word-sized arithmetic.
3467
3468 Also look for a case where we have a SUBREG whose operand is the
3469 same as our result. If both modes are smaller than a word, we
3470 are simply interpreting a register in different modes and we
3471 can use the inner value. */
3472
3473 if (GET_CODE (folded_arg0) == REG
3474 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3475 && subreg_lowpart_p (x))
3476 {
3477 struct table_elt *elt;
3478
3479 /* We can use HASH here since we know that canon_hash won't be
3480 called. */
3481 elt = lookup (folded_arg0,
3482 HASH (folded_arg0, GET_MODE (folded_arg0)),
3483 GET_MODE (folded_arg0));
3484
3485 if (elt)
3486 elt = elt->first_same_value;
3487
3488 for (; elt; elt = elt->next_same_value)
3489 {
3490 enum rtx_code eltcode = GET_CODE (elt->exp);
3491
3492 /* Just check for unary and binary operations. */
3493 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3494 && GET_CODE (elt->exp) != SIGN_EXTEND
3495 && GET_CODE (elt->exp) != ZERO_EXTEND
3496 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3497 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3498 && (GET_MODE_CLASS (mode)
3499 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3500 {
3501 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3502
3503 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3504 op0 = fold_rtx (op0, NULL_RTX);
3505
3506 op0 = equiv_constant (op0);
3507 if (op0)
3508 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3509 op0, mode);
3510 }
3511 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3512 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3513 && eltcode != DIV && eltcode != MOD
3514 && eltcode != UDIV && eltcode != UMOD
3515 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3516 && eltcode != ROTATE && eltcode != ROTATERT
3517 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3518 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3519 == mode))
3520 || CONSTANT_P (XEXP (elt->exp, 0)))
3521 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3522 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3523 == mode))
3524 || CONSTANT_P (XEXP (elt->exp, 1))))
3525 {
3526 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3527 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3528
3529 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3530 op0 = fold_rtx (op0, NULL_RTX);
3531
3532 if (op0)
3533 op0 = equiv_constant (op0);
3534
3535 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3536 op1 = fold_rtx (op1, NULL_RTX);
3537
3538 if (op1)
3539 op1 = equiv_constant (op1);
3540
3541 /* If we are looking for the low SImode part of
3542 (ashift:DI c (const_int 32)), it doesn't work
3543 to compute that in SImode, because a 32-bit shift
3544 in SImode is unpredictable. We know the value is 0. */
3545 if (op0 && op1
3546 && GET_CODE (elt->exp) == ASHIFT
3547 && GET_CODE (op1) == CONST_INT
3548 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3549 {
3550 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3551
3552 /* If the count fits in the inner mode's width,
3553 but exceeds the outer mode's width,
3554 the value will get truncated to 0
3555 by the subreg. */
3556 new = const0_rtx;
3557 else
3558 /* If the count exceeds even the inner mode's width,
3559 don't fold this expression. */
3560 new = 0;
3561 }
3562 else if (op0 && op1)
3563 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3564 op0, op1);
3565 }
3566
3567 else if (GET_CODE (elt->exp) == SUBREG
3568 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3569 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3570 <= UNITS_PER_WORD)
3571 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3572 new = copy_rtx (SUBREG_REG (elt->exp));
3573
3574 if (new)
3575 return new;
3576 }
3577 }
3578
3579 return x;
3580
3581 case NOT:
3582 case NEG:
3583 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3584 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3585 new = lookup_as_function (XEXP (x, 0), code);
3586 if (new)
3587 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3588 break;
3589
3590 case MEM:
3591 /* If we are not actually processing an insn, don't try to find the
3592 best address. Not only don't we care, but we could modify the
3593 MEM in an invalid way since we have no insn to validate against. */
3594 if (insn != 0)
3595 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3596
3597 {
3598 /* Even if we don't fold in the insn itself,
3599 we can safely do so here, in hopes of getting a constant. */
3600 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3601 rtx base = 0;
3602 HOST_WIDE_INT offset = 0;
3603
3604 if (GET_CODE (addr) == REG
3605 && REGNO_QTY_VALID_P (REGNO (addr)))
3606 {
3607 int addr_q = REG_QTY (REGNO (addr));
3608 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3609
3610 if (GET_MODE (addr) == addr_ent->mode
3611 && addr_ent->const_rtx != NULL_RTX)
3612 addr = addr_ent->const_rtx;
3613 }
3614
3615 /* If address is constant, split it into a base and integer offset. */
3616 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3617 base = addr;
3618 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3619 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3620 {
3621 base = XEXP (XEXP (addr, 0), 0);
3622 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3623 }
3624 else if (GET_CODE (addr) == LO_SUM
3625 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3626 base = XEXP (addr, 1);
3627 else if (GET_CODE (addr) == ADDRESSOF)
3628 return change_address (x, VOIDmode, addr);
3629
3630 /* If this is a constant pool reference, we can fold it into its
3631 constant to allow better value tracking. */
3632 if (base && GET_CODE (base) == SYMBOL_REF
3633 && CONSTANT_POOL_ADDRESS_P (base))
3634 {
3635 rtx constant = get_pool_constant (base);
3636 enum machine_mode const_mode = get_pool_mode (base);
3637 rtx new;
3638
3639 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3640 constant_pool_entries_cost = COST (constant);
3641
3642 /* If we are loading the full constant, we have an equivalence. */
3643 if (offset == 0 && mode == const_mode)
3644 return constant;
3645
3646 /* If this actually isn't a constant (weird!), we can't do
3647 anything. Otherwise, handle the two most common cases:
3648 extracting a word from a multi-word constant, and extracting
3649 the low-order bits. Other cases don't seem common enough to
3650 worry about. */
3651 if (! CONSTANT_P (constant))
3652 return x;
3653
3654 if (GET_MODE_CLASS (mode) == MODE_INT
3655 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3656 && offset % UNITS_PER_WORD == 0
3657 && (new = operand_subword (constant,
3658 offset / UNITS_PER_WORD,
3659 0, const_mode)) != 0)
3660 return new;
3661
3662 if (((BYTES_BIG_ENDIAN
3663 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3664 || (! BYTES_BIG_ENDIAN && offset == 0))
3665 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
3666 return new;
3667 }
3668
3669 /* If this is a reference to a label at a known position in a jump
3670 table, we also know its value. */
3671 if (base && GET_CODE (base) == LABEL_REF)
3672 {
3673 rtx label = XEXP (base, 0);
3674 rtx table_insn = NEXT_INSN (label);
3675
3676 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3677 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3678 {
3679 rtx table = PATTERN (table_insn);
3680
3681 if (offset >= 0
3682 && (offset / GET_MODE_SIZE (GET_MODE (table))
3683 < XVECLEN (table, 0)))
3684 return XVECEXP (table, 0,
3685 offset / GET_MODE_SIZE (GET_MODE (table)));
3686 }
3687 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3688 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3689 {
3690 rtx table = PATTERN (table_insn);
3691
3692 if (offset >= 0
3693 && (offset / GET_MODE_SIZE (GET_MODE (table))
3694 < XVECLEN (table, 1)))
3695 {
3696 offset /= GET_MODE_SIZE (GET_MODE (table));
3697 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3698 XEXP (table, 0));
3699
3700 if (GET_MODE (table) != Pmode)
3701 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3702
3703 /* Indicate this is a constant. This isn't a
3704 valid form of CONST, but it will only be used
3705 to fold the next insns and then discarded, so
3706 it should be safe.
3707
3708 Note this expression must be explicitly discarded,
3709 by cse_insn, else it may end up in a REG_EQUAL note
3710 and "escape" to cause problems elsewhere. */
3711 return gen_rtx_CONST (GET_MODE (new), new);
3712 }
3713 }
3714 }
3715
3716 return x;
3717 }
3718
3719 #ifdef NO_FUNCTION_CSE
3720 case CALL:
3721 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3722 return x;
3723 break;
3724 #endif
3725
3726 case ASM_OPERANDS:
3727 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3728 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3729 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3730 break;
3731
3732 default:
3733 break;
3734 }
3735
3736 const_arg0 = 0;
3737 const_arg1 = 0;
3738 const_arg2 = 0;
3739 mode_arg0 = VOIDmode;
3740
3741 /* Try folding our operands.
3742 Then see which ones have constant values known. */
3743
3744 fmt = GET_RTX_FORMAT (code);
3745 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3746 if (fmt[i] == 'e')
3747 {
3748 rtx arg = XEXP (x, i);
3749 rtx folded_arg = arg, const_arg = 0;
3750 enum machine_mode mode_arg = GET_MODE (arg);
3751 rtx cheap_arg, expensive_arg;
3752 rtx replacements[2];
3753 int j;
3754 int old_cost = COST_IN (XEXP (x, i), code);
3755
3756 /* Most arguments are cheap, so handle them specially. */
3757 switch (GET_CODE (arg))
3758 {
3759 case REG:
3760 /* This is the same as calling equiv_constant; it is duplicated
3761 here for speed. */
3762 if (REGNO_QTY_VALID_P (REGNO (arg)))
3763 {
3764 int arg_q = REG_QTY (REGNO (arg));
3765 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3766
3767 if (arg_ent->const_rtx != NULL_RTX
3768 && GET_CODE (arg_ent->const_rtx) != REG
3769 && GET_CODE (arg_ent->const_rtx) != PLUS)
3770 const_arg
3771 = gen_lowpart_if_possible (GET_MODE (arg),
3772 arg_ent->const_rtx);
3773 }
3774 break;
3775
3776 case CONST:
3777 case CONST_INT:
3778 case SYMBOL_REF:
3779 case LABEL_REF:
3780 case CONST_DOUBLE:
3781 case CONST_VECTOR:
3782 const_arg = arg;
3783 break;
3784
3785 #ifdef HAVE_cc0
3786 case CC0:
3787 folded_arg = prev_insn_cc0;
3788 mode_arg = prev_insn_cc0_mode;
3789 const_arg = equiv_constant (folded_arg);
3790 break;
3791 #endif
3792
3793 default:
3794 folded_arg = fold_rtx (arg, insn);
3795 const_arg = equiv_constant (folded_arg);
3796 }
3797
3798 /* For the first three operands, see if the operand
3799 is constant or equivalent to a constant. */
3800 switch (i)
3801 {
3802 case 0:
3803 folded_arg0 = folded_arg;
3804 const_arg0 = const_arg;
3805 mode_arg0 = mode_arg;
3806 break;
3807 case 1:
3808 folded_arg1 = folded_arg;
3809 const_arg1 = const_arg;
3810 break;
3811 case 2:
3812 const_arg2 = const_arg;
3813 break;
3814 }
3815
3816 /* Pick the least expensive of the folded argument and an
3817 equivalent constant argument. */
3818 if (const_arg == 0 || const_arg == folded_arg
3819 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3820 cheap_arg = folded_arg, expensive_arg = const_arg;
3821 else
3822 cheap_arg = const_arg, expensive_arg = folded_arg;
3823
3824 /* Try to replace the operand with the cheapest of the two
3825 possibilities. If it doesn't work and this is either of the first
3826 two operands of a commutative operation, try swapping them.
3827 If THAT fails, try the more expensive, provided it is cheaper
3828 than what is already there. */
3829
3830 if (cheap_arg == XEXP (x, i))
3831 continue;
3832
3833 if (insn == 0 && ! copied)
3834 {
3835 x = copy_rtx (x);
3836 copied = 1;
3837 }
3838
3839 /* Order the replacements from cheapest to most expensive. */
3840 replacements[0] = cheap_arg;
3841 replacements[1] = expensive_arg;
3842
3843 for (j = 0; j < 2 && replacements[j]; j++)
3844 {
3845 int new_cost = COST_IN (replacements[j], code);
3846
3847 /* Stop if what existed before was cheaper. Prefer constants
3848 in the case of a tie. */
3849 if (new_cost > old_cost
3850 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3851 break;
3852
3853 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3854 break;
3855
3856 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c'
3857 || code == LTGT || code == UNEQ || code == ORDERED
3858 || code == UNORDERED)
3859 {
3860 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3861 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3862
3863 if (apply_change_group ())
3864 {
3865 /* Swap them back to be invalid so that this loop can
3866 continue and flag them to be swapped back later. */
3867 rtx tem;
3868
3869 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3870 XEXP (x, 1) = tem;
3871 must_swap = 1;
3872 break;
3873 }
3874 }
3875 }
3876 }
3877
3878 else
3879 {
3880 if (fmt[i] == 'E')
3881 /* Don't try to fold inside of a vector of expressions.
3882 Doing nothing is harmless. */
3883 {;}
3884 }
3885
3886 /* If a commutative operation, place a constant integer as the second
3887 operand unless the first operand is also a constant integer. Otherwise,
3888 place any constant second unless the first operand is also a constant. */
3889
3890 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c'
3891 || code == LTGT || code == UNEQ || code == ORDERED
3892 || code == UNORDERED)
3893 {
3894 if (must_swap || (const_arg0
3895 && (const_arg1 == 0
3896 || (GET_CODE (const_arg0) == CONST_INT
3897 && GET_CODE (const_arg1) != CONST_INT))))
3898 {
3899 rtx tem = XEXP (x, 0);
3900
3901 if (insn == 0 && ! copied)
3902 {
3903 x = copy_rtx (x);
3904 copied = 1;
3905 }
3906
3907 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3908 validate_change (insn, &XEXP (x, 1), tem, 1);
3909 if (apply_change_group ())
3910 {
3911 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3912 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3913 }
3914 }
3915 }
3916
3917 /* If X is an arithmetic operation, see if we can simplify it. */
3918
3919 switch (GET_RTX_CLASS (code))
3920 {
3921 case '1':
3922 {
3923 int is_const = 0;
3924
3925 /* We can't simplify extension ops unless we know the
3926 original mode. */
3927 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3928 && mode_arg0 == VOIDmode)
3929 break;
3930
3931 /* If we had a CONST, strip it off and put it back later if we
3932 fold. */
3933 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3934 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3935
3936 new = simplify_unary_operation (code, mode,
3937 const_arg0 ? const_arg0 : folded_arg0,
3938 mode_arg0);
3939 if (new != 0 && is_const)
3940 new = gen_rtx_CONST (mode, new);
3941 }
3942 break;
3943
3944 case '<':
3945 /* See what items are actually being compared and set FOLDED_ARG[01]
3946 to those values and CODE to the actual comparison code. If any are
3947 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3948 do anything if both operands are already known to be constant. */
3949
3950 if (const_arg0 == 0 || const_arg1 == 0)
3951 {
3952 struct table_elt *p0, *p1;
3953 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3954 enum machine_mode mode_arg1;
3955
3956 #ifdef FLOAT_STORE_FLAG_VALUE
3957 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3958 {
3959 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3960 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3961 false_rtx = CONST0_RTX (mode);
3962 }
3963 #endif
3964
3965 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3966 &mode_arg0, &mode_arg1);
3967 const_arg0 = equiv_constant (folded_arg0);
3968 const_arg1 = equiv_constant (folded_arg1);
3969
3970 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3971 what kinds of things are being compared, so we can't do
3972 anything with this comparison. */
3973
3974 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3975 break;
3976
3977 /* If we do not now have two constants being compared, see
3978 if we can nevertheless deduce some things about the
3979 comparison. */
3980 if (const_arg0 == 0 || const_arg1 == 0)
3981 {
3982 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
3983 non-explicit constant? These aren't zero, but we
3984 don't know their sign. */
3985 if (const_arg1 == const0_rtx
3986 && (NONZERO_BASE_PLUS_P (folded_arg0)
3987 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
3988 come out as 0. */
3989 || GET_CODE (folded_arg0) == SYMBOL_REF
3990 #endif
3991 || GET_CODE (folded_arg0) == LABEL_REF
3992 || GET_CODE (folded_arg0) == CONST))
3993 {
3994 if (code == EQ)
3995 return false_rtx;
3996 else if (code == NE)
3997 return true_rtx;
3998 }
3999
4000 /* See if the two operands are the same. */
4001
4002 if (folded_arg0 == folded_arg1
4003 || (GET_CODE (folded_arg0) == REG
4004 && GET_CODE (folded_arg1) == REG
4005 && (REG_QTY (REGNO (folded_arg0))
4006 == REG_QTY (REGNO (folded_arg1))))
4007 || ((p0 = lookup (folded_arg0,
4008 (safe_hash (folded_arg0, mode_arg0)
4009 & HASH_MASK), mode_arg0))
4010 && (p1 = lookup (folded_arg1,
4011 (safe_hash (folded_arg1, mode_arg0)
4012 & HASH_MASK), mode_arg0))
4013 && p0->first_same_value == p1->first_same_value))
4014 {
4015 /* Sadly two equal NaNs are not equivalent. */
4016 if (!HONOR_NANS (mode_arg0))
4017 return ((code == EQ || code == LE || code == GE
4018 || code == LEU || code == GEU || code == UNEQ
4019 || code == UNLE || code == UNGE
4020 || code == ORDERED)
4021 ? true_rtx : false_rtx);
4022 /* Take care for the FP compares we can resolve. */
4023 if (code == UNEQ || code == UNLE || code == UNGE)
4024 return true_rtx;
4025 if (code == LTGT || code == LT || code == GT)
4026 return false_rtx;
4027 }
4028
4029 /* If FOLDED_ARG0 is a register, see if the comparison we are
4030 doing now is either the same as we did before or the reverse
4031 (we only check the reverse if not floating-point). */
4032 else if (GET_CODE (folded_arg0) == REG)
4033 {
4034 int qty = REG_QTY (REGNO (folded_arg0));
4035
4036 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
4037 {
4038 struct qty_table_elem *ent = &qty_table[qty];
4039
4040 if ((comparison_dominates_p (ent->comparison_code, code)
4041 || (! FLOAT_MODE_P (mode_arg0)
4042 && comparison_dominates_p (ent->comparison_code,
4043 reverse_condition (code))))
4044 && (rtx_equal_p (ent->comparison_const, folded_arg1)
4045 || (const_arg1
4046 && rtx_equal_p (ent->comparison_const,
4047 const_arg1))
4048 || (GET_CODE (folded_arg1) == REG
4049 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
4050 return (comparison_dominates_p (ent->comparison_code, code)
4051 ? true_rtx : false_rtx);
4052 }
4053 }
4054 }
4055 }
4056
4057 /* If we are comparing against zero, see if the first operand is
4058 equivalent to an IOR with a constant. If so, we may be able to
4059 determine the result of this comparison. */
4060
4061 if (const_arg1 == const0_rtx)
4062 {
4063 rtx y = lookup_as_function (folded_arg0, IOR);
4064 rtx inner_const;
4065
4066 if (y != 0
4067 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4068 && GET_CODE (inner_const) == CONST_INT
4069 && INTVAL (inner_const) != 0)
4070 {
4071 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4072 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4073 && (INTVAL (inner_const)
4074 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
4075 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4076
4077 #ifdef FLOAT_STORE_FLAG_VALUE
4078 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
4079 {
4080 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4081 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4082 false_rtx = CONST0_RTX (mode);
4083 }
4084 #endif
4085
4086 switch (code)
4087 {
4088 case EQ:
4089 return false_rtx;
4090 case NE:
4091 return true_rtx;
4092 case LT: case LE:
4093 if (has_sign)
4094 return true_rtx;
4095 break;
4096 case GT: case GE:
4097 if (has_sign)
4098 return false_rtx;
4099 break;
4100 default:
4101 break;
4102 }
4103 }
4104 }
4105
4106 new = simplify_relational_operation (code,
4107 (mode_arg0 != VOIDmode
4108 ? mode_arg0
4109 : (GET_MODE (const_arg0
4110 ? const_arg0
4111 : folded_arg0)
4112 != VOIDmode)
4113 ? GET_MODE (const_arg0
4114 ? const_arg0
4115 : folded_arg0)
4116 : GET_MODE (const_arg1
4117 ? const_arg1
4118 : folded_arg1)),
4119 const_arg0 ? const_arg0 : folded_arg0,
4120 const_arg1 ? const_arg1 : folded_arg1);
4121 #ifdef FLOAT_STORE_FLAG_VALUE
4122 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
4123 {
4124 if (new == const0_rtx)
4125 new = CONST0_RTX (mode);
4126 else
4127 new = (CONST_DOUBLE_FROM_REAL_VALUE
4128 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4129 }
4130 #endif
4131 break;
4132
4133 case '2':
4134 case 'c':
4135 switch (code)
4136 {
4137 case PLUS:
4138 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4139 with that LABEL_REF as its second operand. If so, the result is
4140 the first operand of that MINUS. This handles switches with an
4141 ADDR_DIFF_VEC table. */
4142 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4143 {
4144 rtx y
4145 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4146 : lookup_as_function (folded_arg0, MINUS);
4147
4148 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4149 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4150 return XEXP (y, 0);
4151
4152 /* Now try for a CONST of a MINUS like the above. */
4153 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4154 : lookup_as_function (folded_arg0, CONST))) != 0
4155 && GET_CODE (XEXP (y, 0)) == MINUS
4156 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4157 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4158 return XEXP (XEXP (y, 0), 0);
4159 }
4160
4161 /* Likewise if the operands are in the other order. */
4162 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4163 {
4164 rtx y
4165 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4166 : lookup_as_function (folded_arg1, MINUS);
4167
4168 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4169 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4170 return XEXP (y, 0);
4171
4172 /* Now try for a CONST of a MINUS like the above. */
4173 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4174 : lookup_as_function (folded_arg1, CONST))) != 0
4175 && GET_CODE (XEXP (y, 0)) == MINUS
4176 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4177 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4178 return XEXP (XEXP (y, 0), 0);
4179 }
4180
4181 /* If second operand is a register equivalent to a negative
4182 CONST_INT, see if we can find a register equivalent to the
4183 positive constant. Make a MINUS if so. Don't do this for
4184 a non-negative constant since we might then alternate between
4185 choosing positive and negative constants. Having the positive
4186 constant previously-used is the more common case. Be sure
4187 the resulting constant is non-negative; if const_arg1 were
4188 the smallest negative number this would overflow: depending
4189 on the mode, this would either just be the same value (and
4190 hence not save anything) or be incorrect. */
4191 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4192 && INTVAL (const_arg1) < 0
4193 /* This used to test
4194
4195 -INTVAL (const_arg1) >= 0
4196
4197 But The Sun V5.0 compilers mis-compiled that test. So
4198 instead we test for the problematic value in a more direct
4199 manner and hope the Sun compilers get it correct. */
4200 && INTVAL (const_arg1) !=
4201 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4202 && GET_CODE (folded_arg1) == REG)
4203 {
4204 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4205 struct table_elt *p
4206 = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
4207 mode);
4208
4209 if (p)
4210 for (p = p->first_same_value; p; p = p->next_same_value)
4211 if (GET_CODE (p->exp) == REG)
4212 return simplify_gen_binary (MINUS, mode, folded_arg0,
4213 canon_reg (p->exp, NULL_RTX));
4214 }
4215 goto from_plus;
4216
4217 case MINUS:
4218 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4219 If so, produce (PLUS Z C2-C). */
4220 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4221 {
4222 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4223 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4224 return fold_rtx (plus_constant (copy_rtx (y),
4225 -INTVAL (const_arg1)),
4226 NULL_RTX);
4227 }
4228
4229 /* Fall through. */
4230
4231 from_plus:
4232 case SMIN: case SMAX: case UMIN: case UMAX:
4233 case IOR: case AND: case XOR:
4234 case MULT:
4235 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4236 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4237 is known to be of similar form, we may be able to replace the
4238 operation with a combined operation. This may eliminate the
4239 intermediate operation if every use is simplified in this way.
4240 Note that the similar optimization done by combine.c only works
4241 if the intermediate operation's result has only one reference. */
4242
4243 if (GET_CODE (folded_arg0) == REG
4244 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4245 {
4246 int is_shift
4247 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4248 rtx y = lookup_as_function (folded_arg0, code);
4249 rtx inner_const;
4250 enum rtx_code associate_code;
4251 rtx new_const;
4252
4253 if (y == 0
4254 || 0 == (inner_const
4255 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4256 || GET_CODE (inner_const) != CONST_INT
4257 /* If we have compiled a statement like
4258 "if (x == (x & mask1))", and now are looking at
4259 "x & mask2", we will have a case where the first operand
4260 of Y is the same as our first operand. Unless we detect
4261 this case, an infinite loop will result. */
4262 || XEXP (y, 0) == folded_arg0)
4263 break;
4264
4265 /* Don't associate these operations if they are a PLUS with the
4266 same constant and it is a power of two. These might be doable
4267 with a pre- or post-increment. Similarly for two subtracts of
4268 identical powers of two with post decrement. */
4269
4270 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
4271 && ((HAVE_PRE_INCREMENT
4272 && exact_log2 (INTVAL (const_arg1)) >= 0)
4273 || (HAVE_POST_INCREMENT
4274 && exact_log2 (INTVAL (const_arg1)) >= 0)
4275 || (HAVE_PRE_DECREMENT
4276 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4277 || (HAVE_POST_DECREMENT
4278 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4279 break;
4280
4281 /* Compute the code used to compose the constants. For example,
4282 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
4283
4284 associate_code = (is_shift || code == MINUS ? PLUS : code);
4285
4286 new_const = simplify_binary_operation (associate_code, mode,
4287 const_arg1, inner_const);
4288
4289 if (new_const == 0)
4290 break;
4291
4292 /* If we are associating shift operations, don't let this
4293 produce a shift of the size of the object or larger.
4294 This could occur when we follow a sign-extend by a right
4295 shift on a machine that does a sign-extend as a pair
4296 of shifts. */
4297
4298 if (is_shift && GET_CODE (new_const) == CONST_INT
4299 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4300 {
4301 /* As an exception, we can turn an ASHIFTRT of this
4302 form into a shift of the number of bits - 1. */
4303 if (code == ASHIFTRT)
4304 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4305 else
4306 break;
4307 }
4308
4309 y = copy_rtx (XEXP (y, 0));
4310
4311 /* If Y contains our first operand (the most common way this
4312 can happen is if Y is a MEM), we would do into an infinite
4313 loop if we tried to fold it. So don't in that case. */
4314
4315 if (! reg_mentioned_p (folded_arg0, y))
4316 y = fold_rtx (y, insn);
4317
4318 return simplify_gen_binary (code, mode, y, new_const);
4319 }
4320 break;
4321
4322 case DIV: case UDIV:
4323 /* ??? The associative optimization performed immediately above is
4324 also possible for DIV and UDIV using associate_code of MULT.
4325 However, we would need extra code to verify that the
4326 multiplication does not overflow, that is, there is no overflow
4327 in the calculation of new_const. */
4328 break;
4329
4330 default:
4331 break;
4332 }
4333
4334 new = simplify_binary_operation (code, mode,
4335 const_arg0 ? const_arg0 : folded_arg0,
4336 const_arg1 ? const_arg1 : folded_arg1);
4337 break;
4338
4339 case 'o':
4340 /* (lo_sum (high X) X) is simply X. */
4341 if (code == LO_SUM && const_arg0 != 0
4342 && GET_CODE (const_arg0) == HIGH
4343 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4344 return const_arg1;
4345 break;
4346
4347 case '3':
4348 case 'b':
4349 new = simplify_ternary_operation (code, mode, mode_arg0,
4350 const_arg0 ? const_arg0 : folded_arg0,
4351 const_arg1 ? const_arg1 : folded_arg1,
4352 const_arg2 ? const_arg2 : XEXP (x, 2));
4353 break;
4354
4355 case 'x':
4356 /* Always eliminate CONSTANT_P_RTX at this stage. */
4357 if (code == CONSTANT_P_RTX)
4358 return (const_arg0 ? const1_rtx : const0_rtx);
4359 break;
4360 }
4361
4362 return new ? new : x;
4363 }
4364 \f
4365 /* Return a constant value currently equivalent to X.
4366 Return 0 if we don't know one. */
4367
4368 static rtx
4369 equiv_constant (x)
4370 rtx x;
4371 {
4372 if (GET_CODE (x) == REG
4373 && REGNO_QTY_VALID_P (REGNO (x)))
4374 {
4375 int x_q = REG_QTY (REGNO (x));
4376 struct qty_table_elem *x_ent = &qty_table[x_q];
4377
4378 if (x_ent->const_rtx)
4379 x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4380 }
4381
4382 if (x == 0 || CONSTANT_P (x))
4383 return x;
4384
4385 /* If X is a MEM, try to fold it outside the context of any insn to see if
4386 it might be equivalent to a constant. That handles the case where it
4387 is a constant-pool reference. Then try to look it up in the hash table
4388 in case it is something whose value we have seen before. */
4389
4390 if (GET_CODE (x) == MEM)
4391 {
4392 struct table_elt *elt;
4393
4394 x = fold_rtx (x, NULL_RTX);
4395 if (CONSTANT_P (x))
4396 return x;
4397
4398 elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4399 if (elt == 0)
4400 return 0;
4401
4402 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4403 if (elt->is_const && CONSTANT_P (elt->exp))
4404 return elt->exp;
4405 }
4406
4407 return 0;
4408 }
4409 \f
4410 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4411 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4412 least-significant part of X.
4413 MODE specifies how big a part of X to return.
4414
4415 If the requested operation cannot be done, 0 is returned.
4416
4417 This is similar to gen_lowpart in emit-rtl.c. */
4418
4419 rtx
4420 gen_lowpart_if_possible (mode, x)
4421 enum machine_mode mode;
4422 rtx x;
4423 {
4424 rtx result = gen_lowpart_common (mode, x);
4425
4426 if (result)
4427 return result;
4428 else if (GET_CODE (x) == MEM)
4429 {
4430 /* This is the only other case we handle. */
4431 int offset = 0;
4432 rtx new;
4433
4434 if (WORDS_BIG_ENDIAN)
4435 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4436 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4437 if (BYTES_BIG_ENDIAN)
4438 /* Adjust the address so that the address-after-the-data is
4439 unchanged. */
4440 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4441 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4442
4443 new = adjust_address_nv (x, mode, offset);
4444 if (! memory_address_p (mode, XEXP (new, 0)))
4445 return 0;
4446
4447 return new;
4448 }
4449 else
4450 return 0;
4451 }
4452 \f
4453 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4454 branch. It will be zero if not.
4455
4456 In certain cases, this can cause us to add an equivalence. For example,
4457 if we are following the taken case of
4458 if (i == 2)
4459 we can add the fact that `i' and '2' are now equivalent.
4460
4461 In any case, we can record that this comparison was passed. If the same
4462 comparison is seen later, we will know its value. */
4463
4464 static void
4465 record_jump_equiv (insn, taken)
4466 rtx insn;
4467 int taken;
4468 {
4469 int cond_known_true;
4470 rtx op0, op1;
4471 rtx set;
4472 enum machine_mode mode, mode0, mode1;
4473 int reversed_nonequality = 0;
4474 enum rtx_code code;
4475
4476 /* Ensure this is the right kind of insn. */
4477 if (! any_condjump_p (insn))
4478 return;
4479 set = pc_set (insn);
4480
4481 /* See if this jump condition is known true or false. */
4482 if (taken)
4483 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4484 else
4485 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4486
4487 /* Get the type of comparison being done and the operands being compared.
4488 If we had to reverse a non-equality condition, record that fact so we
4489 know that it isn't valid for floating-point. */
4490 code = GET_CODE (XEXP (SET_SRC (set), 0));
4491 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4492 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4493
4494 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4495 if (! cond_known_true)
4496 {
4497 code = reversed_comparison_code_parts (code, op0, op1, insn);
4498
4499 /* Don't remember if we can't find the inverse. */
4500 if (code == UNKNOWN)
4501 return;
4502 }
4503
4504 /* The mode is the mode of the non-constant. */
4505 mode = mode0;
4506 if (mode1 != VOIDmode)
4507 mode = mode1;
4508
4509 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4510 }
4511
4512 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4513 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4514 Make any useful entries we can with that information. Called from
4515 above function and called recursively. */
4516
4517 static void
4518 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
4519 enum rtx_code code;
4520 enum machine_mode mode;
4521 rtx op0, op1;
4522 int reversed_nonequality;
4523 {
4524 unsigned op0_hash, op1_hash;
4525 int op0_in_memory, op1_in_memory;
4526 struct table_elt *op0_elt, *op1_elt;
4527
4528 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4529 we know that they are also equal in the smaller mode (this is also
4530 true for all smaller modes whether or not there is a SUBREG, but
4531 is not worth testing for with no SUBREG). */
4532
4533 /* Note that GET_MODE (op0) may not equal MODE. */
4534 if (code == EQ && GET_CODE (op0) == SUBREG
4535 && (GET_MODE_SIZE (GET_MODE (op0))
4536 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4537 {
4538 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4539 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4540
4541 record_jump_cond (code, mode, SUBREG_REG (op0),
4542 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4543 reversed_nonequality);
4544 }
4545
4546 if (code == EQ && GET_CODE (op1) == SUBREG
4547 && (GET_MODE_SIZE (GET_MODE (op1))
4548 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4549 {
4550 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4551 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4552
4553 record_jump_cond (code, mode, SUBREG_REG (op1),
4554 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4555 reversed_nonequality);
4556 }
4557
4558 /* Similarly, if this is an NE comparison, and either is a SUBREG
4559 making a smaller mode, we know the whole thing is also NE. */
4560
4561 /* Note that GET_MODE (op0) may not equal MODE;
4562 if we test MODE instead, we can get an infinite recursion
4563 alternating between two modes each wider than MODE. */
4564
4565 if (code == NE && GET_CODE (op0) == SUBREG
4566 && subreg_lowpart_p (op0)
4567 && (GET_MODE_SIZE (GET_MODE (op0))
4568 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4569 {
4570 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4571 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4572
4573 record_jump_cond (code, mode, SUBREG_REG (op0),
4574 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4575 reversed_nonequality);
4576 }
4577
4578 if (code == NE && GET_CODE (op1) == SUBREG
4579 && subreg_lowpart_p (op1)
4580 && (GET_MODE_SIZE (GET_MODE (op1))
4581 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4582 {
4583 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4584 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4585
4586 record_jump_cond (code, mode, SUBREG_REG (op1),
4587 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4588 reversed_nonequality);
4589 }
4590
4591 /* Hash both operands. */
4592
4593 do_not_record = 0;
4594 hash_arg_in_memory = 0;
4595 op0_hash = HASH (op0, mode);
4596 op0_in_memory = hash_arg_in_memory;
4597
4598 if (do_not_record)
4599 return;
4600
4601 do_not_record = 0;
4602 hash_arg_in_memory = 0;
4603 op1_hash = HASH (op1, mode);
4604 op1_in_memory = hash_arg_in_memory;
4605
4606 if (do_not_record)
4607 return;
4608
4609 /* Look up both operands. */
4610 op0_elt = lookup (op0, op0_hash, mode);
4611 op1_elt = lookup (op1, op1_hash, mode);
4612
4613 /* If both operands are already equivalent or if they are not in the
4614 table but are identical, do nothing. */
4615 if ((op0_elt != 0 && op1_elt != 0
4616 && op0_elt->first_same_value == op1_elt->first_same_value)
4617 || op0 == op1 || rtx_equal_p (op0, op1))
4618 return;
4619
4620 /* If we aren't setting two things equal all we can do is save this
4621 comparison. Similarly if this is floating-point. In the latter
4622 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4623 If we record the equality, we might inadvertently delete code
4624 whose intent was to change -0 to +0. */
4625
4626 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4627 {
4628 struct qty_table_elem *ent;
4629 int qty;
4630
4631 /* If we reversed a floating-point comparison, if OP0 is not a
4632 register, or if OP1 is neither a register or constant, we can't
4633 do anything. */
4634
4635 if (GET_CODE (op1) != REG)
4636 op1 = equiv_constant (op1);
4637
4638 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4639 || GET_CODE (op0) != REG || op1 == 0)
4640 return;
4641
4642 /* Put OP0 in the hash table if it isn't already. This gives it a
4643 new quantity number. */
4644 if (op0_elt == 0)
4645 {
4646 if (insert_regs (op0, NULL, 0))
4647 {
4648 rehash_using_reg (op0);
4649 op0_hash = HASH (op0, mode);
4650
4651 /* If OP0 is contained in OP1, this changes its hash code
4652 as well. Faster to rehash than to check, except
4653 for the simple case of a constant. */
4654 if (! CONSTANT_P (op1))
4655 op1_hash = HASH (op1,mode);
4656 }
4657
4658 op0_elt = insert (op0, NULL, op0_hash, mode);
4659 op0_elt->in_memory = op0_in_memory;
4660 }
4661
4662 qty = REG_QTY (REGNO (op0));
4663 ent = &qty_table[qty];
4664
4665 ent->comparison_code = code;
4666 if (GET_CODE (op1) == REG)
4667 {
4668 /* Look it up again--in case op0 and op1 are the same. */
4669 op1_elt = lookup (op1, op1_hash, mode);
4670
4671 /* Put OP1 in the hash table so it gets a new quantity number. */
4672 if (op1_elt == 0)
4673 {
4674 if (insert_regs (op1, NULL, 0))
4675 {
4676 rehash_using_reg (op1);
4677 op1_hash = HASH (op1, mode);
4678 }
4679
4680 op1_elt = insert (op1, NULL, op1_hash, mode);
4681 op1_elt->in_memory = op1_in_memory;
4682 }
4683
4684 ent->comparison_const = NULL_RTX;
4685 ent->comparison_qty = REG_QTY (REGNO (op1));
4686 }
4687 else
4688 {
4689 ent->comparison_const = op1;
4690 ent->comparison_qty = -1;
4691 }
4692
4693 return;
4694 }
4695
4696 /* If either side is still missing an equivalence, make it now,
4697 then merge the equivalences. */
4698
4699 if (op0_elt == 0)
4700 {
4701 if (insert_regs (op0, NULL, 0))
4702 {
4703 rehash_using_reg (op0);
4704 op0_hash = HASH (op0, mode);
4705 }
4706
4707 op0_elt = insert (op0, NULL, op0_hash, mode);
4708 op0_elt->in_memory = op0_in_memory;
4709 }
4710
4711 if (op1_elt == 0)
4712 {
4713 if (insert_regs (op1, NULL, 0))
4714 {
4715 rehash_using_reg (op1);
4716 op1_hash = HASH (op1, mode);
4717 }
4718
4719 op1_elt = insert (op1, NULL, op1_hash, mode);
4720 op1_elt->in_memory = op1_in_memory;
4721 }
4722
4723 merge_equiv_classes (op0_elt, op1_elt);
4724 last_jump_equiv_class = op0_elt;
4725 }
4726 \f
4727 /* CSE processing for one instruction.
4728 First simplify sources and addresses of all assignments
4729 in the instruction, using previously-computed equivalents values.
4730 Then install the new sources and destinations in the table
4731 of available values.
4732
4733 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4734 the insn. It means that INSN is inside libcall block. In this
4735 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4736
4737 /* Data on one SET contained in the instruction. */
4738
4739 struct set
4740 {
4741 /* The SET rtx itself. */
4742 rtx rtl;
4743 /* The SET_SRC of the rtx (the original value, if it is changing). */
4744 rtx src;
4745 /* The hash-table element for the SET_SRC of the SET. */
4746 struct table_elt *src_elt;
4747 /* Hash value for the SET_SRC. */
4748 unsigned src_hash;
4749 /* Hash value for the SET_DEST. */
4750 unsigned dest_hash;
4751 /* The SET_DEST, with SUBREG, etc., stripped. */
4752 rtx inner_dest;
4753 /* Nonzero if the SET_SRC is in memory. */
4754 char src_in_memory;
4755 /* Nonzero if the SET_SRC contains something
4756 whose value cannot be predicted and understood. */
4757 char src_volatile;
4758 /* Original machine mode, in case it becomes a CONST_INT. */
4759 enum machine_mode mode;
4760 /* A constant equivalent for SET_SRC, if any. */
4761 rtx src_const;
4762 /* Original SET_SRC value used for libcall notes. */
4763 rtx orig_src;
4764 /* Hash value of constant equivalent for SET_SRC. */
4765 unsigned src_const_hash;
4766 /* Table entry for constant equivalent for SET_SRC, if any. */
4767 struct table_elt *src_const_elt;
4768 };
4769
4770 static void
4771 cse_insn (insn, libcall_insn)
4772 rtx insn;
4773 rtx libcall_insn;
4774 {
4775 rtx x = PATTERN (insn);
4776 int i;
4777 rtx tem;
4778 int n_sets = 0;
4779
4780 #ifdef HAVE_cc0
4781 /* Records what this insn does to set CC0. */
4782 rtx this_insn_cc0 = 0;
4783 enum machine_mode this_insn_cc0_mode = VOIDmode;
4784 #endif
4785
4786 rtx src_eqv = 0;
4787 struct table_elt *src_eqv_elt = 0;
4788 int src_eqv_volatile = 0;
4789 int src_eqv_in_memory = 0;
4790 unsigned src_eqv_hash = 0;
4791
4792 struct set *sets = (struct set *) 0;
4793
4794 this_insn = insn;
4795
4796 /* Find all the SETs and CLOBBERs in this instruction.
4797 Record all the SETs in the array `set' and count them.
4798 Also determine whether there is a CLOBBER that invalidates
4799 all memory references, or all references at varying addresses. */
4800
4801 if (GET_CODE (insn) == CALL_INSN)
4802 {
4803 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4804 {
4805 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4806 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4807 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4808 }
4809 }
4810
4811 if (GET_CODE (x) == SET)
4812 {
4813 sets = (struct set *) alloca (sizeof (struct set));
4814 sets[0].rtl = x;
4815
4816 /* Ignore SETs that are unconditional jumps.
4817 They never need cse processing, so this does not hurt.
4818 The reason is not efficiency but rather
4819 so that we can test at the end for instructions
4820 that have been simplified to unconditional jumps
4821 and not be misled by unchanged instructions
4822 that were unconditional jumps to begin with. */
4823 if (SET_DEST (x) == pc_rtx
4824 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4825 ;
4826
4827 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4828 The hard function value register is used only once, to copy to
4829 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4830 Ensure we invalidate the destination register. On the 80386 no
4831 other code would invalidate it since it is a fixed_reg.
4832 We need not check the return of apply_change_group; see canon_reg. */
4833
4834 else if (GET_CODE (SET_SRC (x)) == CALL)
4835 {
4836 canon_reg (SET_SRC (x), insn);
4837 apply_change_group ();
4838 fold_rtx (SET_SRC (x), insn);
4839 invalidate (SET_DEST (x), VOIDmode);
4840 }
4841 else
4842 n_sets = 1;
4843 }
4844 else if (GET_CODE (x) == PARALLEL)
4845 {
4846 int lim = XVECLEN (x, 0);
4847
4848 sets = (struct set *) alloca (lim * sizeof (struct set));
4849
4850 /* Find all regs explicitly clobbered in this insn,
4851 and ensure they are not replaced with any other regs
4852 elsewhere in this insn.
4853 When a reg that is clobbered is also used for input,
4854 we should presume that that is for a reason,
4855 and we should not substitute some other register
4856 which is not supposed to be clobbered.
4857 Therefore, this loop cannot be merged into the one below
4858 because a CALL may precede a CLOBBER and refer to the
4859 value clobbered. We must not let a canonicalization do
4860 anything in that case. */
4861 for (i = 0; i < lim; i++)
4862 {
4863 rtx y = XVECEXP (x, 0, i);
4864 if (GET_CODE (y) == CLOBBER)
4865 {
4866 rtx clobbered = XEXP (y, 0);
4867
4868 if (GET_CODE (clobbered) == REG
4869 || GET_CODE (clobbered) == SUBREG)
4870 invalidate (clobbered, VOIDmode);
4871 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4872 || GET_CODE (clobbered) == ZERO_EXTRACT)
4873 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4874 }
4875 }
4876
4877 for (i = 0; i < lim; i++)
4878 {
4879 rtx y = XVECEXP (x, 0, i);
4880 if (GET_CODE (y) == SET)
4881 {
4882 /* As above, we ignore unconditional jumps and call-insns and
4883 ignore the result of apply_change_group. */
4884 if (GET_CODE (SET_SRC (y)) == CALL)
4885 {
4886 canon_reg (SET_SRC (y), insn);
4887 apply_change_group ();
4888 fold_rtx (SET_SRC (y), insn);
4889 invalidate (SET_DEST (y), VOIDmode);
4890 }
4891 else if (SET_DEST (y) == pc_rtx
4892 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4893 ;
4894 else
4895 sets[n_sets++].rtl = y;
4896 }
4897 else if (GET_CODE (y) == CLOBBER)
4898 {
4899 /* If we clobber memory, canon the address.
4900 This does nothing when a register is clobbered
4901 because we have already invalidated the reg. */
4902 if (GET_CODE (XEXP (y, 0)) == MEM)
4903 canon_reg (XEXP (y, 0), NULL_RTX);
4904 }
4905 else if (GET_CODE (y) == USE
4906 && ! (GET_CODE (XEXP (y, 0)) == REG
4907 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4908 canon_reg (y, NULL_RTX);
4909 else if (GET_CODE (y) == CALL)
4910 {
4911 /* The result of apply_change_group can be ignored; see
4912 canon_reg. */
4913 canon_reg (y, insn);
4914 apply_change_group ();
4915 fold_rtx (y, insn);
4916 }
4917 }
4918 }
4919 else if (GET_CODE (x) == CLOBBER)
4920 {
4921 if (GET_CODE (XEXP (x, 0)) == MEM)
4922 canon_reg (XEXP (x, 0), NULL_RTX);
4923 }
4924
4925 /* Canonicalize a USE of a pseudo register or memory location. */
4926 else if (GET_CODE (x) == USE
4927 && ! (GET_CODE (XEXP (x, 0)) == REG
4928 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4929 canon_reg (XEXP (x, 0), NULL_RTX);
4930 else if (GET_CODE (x) == CALL)
4931 {
4932 /* The result of apply_change_group can be ignored; see canon_reg. */
4933 canon_reg (x, insn);
4934 apply_change_group ();
4935 fold_rtx (x, insn);
4936 }
4937
4938 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4939 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4940 is handled specially for this case, and if it isn't set, then there will
4941 be no equivalence for the destination. */
4942 if (n_sets == 1 && REG_NOTES (insn) != 0
4943 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4944 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4945 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4946 {
4947 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4948 XEXP (tem, 0) = src_eqv;
4949 }
4950
4951 /* Canonicalize sources and addresses of destinations.
4952 We do this in a separate pass to avoid problems when a MATCH_DUP is
4953 present in the insn pattern. In that case, we want to ensure that
4954 we don't break the duplicate nature of the pattern. So we will replace
4955 both operands at the same time. Otherwise, we would fail to find an
4956 equivalent substitution in the loop calling validate_change below.
4957
4958 We used to suppress canonicalization of DEST if it appears in SRC,
4959 but we don't do this any more. */
4960
4961 for (i = 0; i < n_sets; i++)
4962 {
4963 rtx dest = SET_DEST (sets[i].rtl);
4964 rtx src = SET_SRC (sets[i].rtl);
4965 rtx new = canon_reg (src, insn);
4966 int insn_code;
4967
4968 sets[i].orig_src = src;
4969 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4970 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4971 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4972 || (insn_code = recog_memoized (insn)) < 0
4973 || insn_data[insn_code].n_dups > 0)
4974 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4975 else
4976 SET_SRC (sets[i].rtl) = new;
4977
4978 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4979 {
4980 validate_change (insn, &XEXP (dest, 1),
4981 canon_reg (XEXP (dest, 1), insn), 1);
4982 validate_change (insn, &XEXP (dest, 2),
4983 canon_reg (XEXP (dest, 2), insn), 1);
4984 }
4985
4986 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4987 || GET_CODE (dest) == ZERO_EXTRACT
4988 || GET_CODE (dest) == SIGN_EXTRACT)
4989 dest = XEXP (dest, 0);
4990
4991 if (GET_CODE (dest) == MEM)
4992 canon_reg (dest, insn);
4993 }
4994
4995 /* Now that we have done all the replacements, we can apply the change
4996 group and see if they all work. Note that this will cause some
4997 canonicalizations that would have worked individually not to be applied
4998 because some other canonicalization didn't work, but this should not
4999 occur often.
5000
5001 The result of apply_change_group can be ignored; see canon_reg. */
5002
5003 apply_change_group ();
5004
5005 /* Set sets[i].src_elt to the class each source belongs to.
5006 Detect assignments from or to volatile things
5007 and set set[i] to zero so they will be ignored
5008 in the rest of this function.
5009
5010 Nothing in this loop changes the hash table or the register chains. */
5011
5012 for (i = 0; i < n_sets; i++)
5013 {
5014 rtx src, dest;
5015 rtx src_folded;
5016 struct table_elt *elt = 0, *p;
5017 enum machine_mode mode;
5018 rtx src_eqv_here;
5019 rtx src_const = 0;
5020 rtx src_related = 0;
5021 struct table_elt *src_const_elt = 0;
5022 int src_cost = MAX_COST;
5023 int src_eqv_cost = MAX_COST;
5024 int src_folded_cost = MAX_COST;
5025 int src_related_cost = MAX_COST;
5026 int src_elt_cost = MAX_COST;
5027 int src_regcost = MAX_COST;
5028 int src_eqv_regcost = MAX_COST;
5029 int src_folded_regcost = MAX_COST;
5030 int src_related_regcost = MAX_COST;
5031 int src_elt_regcost = MAX_COST;
5032 /* Set nonzero if we need to call force_const_mem on with the
5033 contents of src_folded before using it. */
5034 int src_folded_force_flag = 0;
5035
5036 dest = SET_DEST (sets[i].rtl);
5037 src = SET_SRC (sets[i].rtl);
5038
5039 /* If SRC is a constant that has no machine mode,
5040 hash it with the destination's machine mode.
5041 This way we can keep different modes separate. */
5042
5043 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5044 sets[i].mode = mode;
5045
5046 if (src_eqv)
5047 {
5048 enum machine_mode eqvmode = mode;
5049 if (GET_CODE (dest) == STRICT_LOW_PART)
5050 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5051 do_not_record = 0;
5052 hash_arg_in_memory = 0;
5053 src_eqv_hash = HASH (src_eqv, eqvmode);
5054
5055 /* Find the equivalence class for the equivalent expression. */
5056
5057 if (!do_not_record)
5058 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
5059
5060 src_eqv_volatile = do_not_record;
5061 src_eqv_in_memory = hash_arg_in_memory;
5062 }
5063
5064 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5065 value of the INNER register, not the destination. So it is not
5066 a valid substitution for the source. But save it for later. */
5067 if (GET_CODE (dest) == STRICT_LOW_PART)
5068 src_eqv_here = 0;
5069 else
5070 src_eqv_here = src_eqv;
5071
5072 /* Simplify and foldable subexpressions in SRC. Then get the fully-
5073 simplified result, which may not necessarily be valid. */
5074 src_folded = fold_rtx (src, insn);
5075
5076 #if 0
5077 /* ??? This caused bad code to be generated for the m68k port with -O2.
5078 Suppose src is (CONST_INT -1), and that after truncation src_folded
5079 is (CONST_INT 3). Suppose src_folded is then used for src_const.
5080 At the end we will add src and src_const to the same equivalence
5081 class. We now have 3 and -1 on the same equivalence class. This
5082 causes later instructions to be mis-optimized. */
5083 /* If storing a constant in a bitfield, pre-truncate the constant
5084 so we will be able to record it later. */
5085 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5086 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5087 {
5088 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5089
5090 if (GET_CODE (src) == CONST_INT
5091 && GET_CODE (width) == CONST_INT
5092 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5093 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5094 src_folded
5095 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5096 << INTVAL (width)) - 1));
5097 }
5098 #endif
5099
5100 /* Compute SRC's hash code, and also notice if it
5101 should not be recorded at all. In that case,
5102 prevent any further processing of this assignment. */
5103 do_not_record = 0;
5104 hash_arg_in_memory = 0;
5105
5106 sets[i].src = src;
5107 sets[i].src_hash = HASH (src, mode);
5108 sets[i].src_volatile = do_not_record;
5109 sets[i].src_in_memory = hash_arg_in_memory;
5110
5111 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5112 a pseudo, do not record SRC. Using SRC as a replacement for
5113 anything else will be incorrect in that situation. Note that
5114 this usually occurs only for stack slots, in which case all the
5115 RTL would be referring to SRC, so we don't lose any optimization
5116 opportunities by not having SRC in the hash table. */
5117
5118 if (GET_CODE (src) == MEM
5119 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5120 && GET_CODE (dest) == REG
5121 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5122 sets[i].src_volatile = 1;
5123
5124 #if 0
5125 /* It is no longer clear why we used to do this, but it doesn't
5126 appear to still be needed. So let's try without it since this
5127 code hurts cse'ing widened ops. */
5128 /* If source is a perverse subreg (such as QI treated as an SI),
5129 treat it as volatile. It may do the work of an SI in one context
5130 where the extra bits are not being used, but cannot replace an SI
5131 in general. */
5132 if (GET_CODE (src) == SUBREG
5133 && (GET_MODE_SIZE (GET_MODE (src))
5134 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5135 sets[i].src_volatile = 1;
5136 #endif
5137
5138 /* Locate all possible equivalent forms for SRC. Try to replace
5139 SRC in the insn with each cheaper equivalent.
5140
5141 We have the following types of equivalents: SRC itself, a folded
5142 version, a value given in a REG_EQUAL note, or a value related
5143 to a constant.
5144
5145 Each of these equivalents may be part of an additional class
5146 of equivalents (if more than one is in the table, they must be in
5147 the same class; we check for this).
5148
5149 If the source is volatile, we don't do any table lookups.
5150
5151 We note any constant equivalent for possible later use in a
5152 REG_NOTE. */
5153
5154 if (!sets[i].src_volatile)
5155 elt = lookup (src, sets[i].src_hash, mode);
5156
5157 sets[i].src_elt = elt;
5158
5159 if (elt && src_eqv_here && src_eqv_elt)
5160 {
5161 if (elt->first_same_value != src_eqv_elt->first_same_value)
5162 {
5163 /* The REG_EQUAL is indicating that two formerly distinct
5164 classes are now equivalent. So merge them. */
5165 merge_equiv_classes (elt, src_eqv_elt);
5166 src_eqv_hash = HASH (src_eqv, elt->mode);
5167 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5168 }
5169
5170 src_eqv_here = 0;
5171 }
5172
5173 else if (src_eqv_elt)
5174 elt = src_eqv_elt;
5175
5176 /* Try to find a constant somewhere and record it in `src_const'.
5177 Record its table element, if any, in `src_const_elt'. Look in
5178 any known equivalences first. (If the constant is not in the
5179 table, also set `sets[i].src_const_hash'). */
5180 if (elt)
5181 for (p = elt->first_same_value; p; p = p->next_same_value)
5182 if (p->is_const)
5183 {
5184 src_const = p->exp;
5185 src_const_elt = elt;
5186 break;
5187 }
5188
5189 if (src_const == 0
5190 && (CONSTANT_P (src_folded)
5191 /* Consider (minus (label_ref L1) (label_ref L2)) as
5192 "constant" here so we will record it. This allows us
5193 to fold switch statements when an ADDR_DIFF_VEC is used. */
5194 || (GET_CODE (src_folded) == MINUS
5195 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5196 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5197 src_const = src_folded, src_const_elt = elt;
5198 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5199 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5200
5201 /* If we don't know if the constant is in the table, get its
5202 hash code and look it up. */
5203 if (src_const && src_const_elt == 0)
5204 {
5205 sets[i].src_const_hash = HASH (src_const, mode);
5206 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5207 }
5208
5209 sets[i].src_const = src_const;
5210 sets[i].src_const_elt = src_const_elt;
5211
5212 /* If the constant and our source are both in the table, mark them as
5213 equivalent. Otherwise, if a constant is in the table but the source
5214 isn't, set ELT to it. */
5215 if (src_const_elt && elt
5216 && src_const_elt->first_same_value != elt->first_same_value)
5217 merge_equiv_classes (elt, src_const_elt);
5218 else if (src_const_elt && elt == 0)
5219 elt = src_const_elt;
5220
5221 /* See if there is a register linearly related to a constant
5222 equivalent of SRC. */
5223 if (src_const
5224 && (GET_CODE (src_const) == CONST
5225 || (src_const_elt && src_const_elt->related_value != 0)))
5226 {
5227 src_related = use_related_value (src_const, src_const_elt);
5228 if (src_related)
5229 {
5230 struct table_elt *src_related_elt
5231 = lookup (src_related, HASH (src_related, mode), mode);
5232 if (src_related_elt && elt)
5233 {
5234 if (elt->first_same_value
5235 != src_related_elt->first_same_value)
5236 /* This can occur when we previously saw a CONST
5237 involving a SYMBOL_REF and then see the SYMBOL_REF
5238 twice. Merge the involved classes. */
5239 merge_equiv_classes (elt, src_related_elt);
5240
5241 src_related = 0;
5242 src_related_elt = 0;
5243 }
5244 else if (src_related_elt && elt == 0)
5245 elt = src_related_elt;
5246 }
5247 }
5248
5249 /* See if we have a CONST_INT that is already in a register in a
5250 wider mode. */
5251
5252 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5253 && GET_MODE_CLASS (mode) == MODE_INT
5254 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5255 {
5256 enum machine_mode wider_mode;
5257
5258 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5259 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5260 && src_related == 0;
5261 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5262 {
5263 struct table_elt *const_elt
5264 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5265
5266 if (const_elt == 0)
5267 continue;
5268
5269 for (const_elt = const_elt->first_same_value;
5270 const_elt; const_elt = const_elt->next_same_value)
5271 if (GET_CODE (const_elt->exp) == REG)
5272 {
5273 src_related = gen_lowpart_if_possible (mode,
5274 const_elt->exp);
5275 break;
5276 }
5277 }
5278 }
5279
5280 /* Another possibility is that we have an AND with a constant in
5281 a mode narrower than a word. If so, it might have been generated
5282 as part of an "if" which would narrow the AND. If we already
5283 have done the AND in a wider mode, we can use a SUBREG of that
5284 value. */
5285
5286 if (flag_expensive_optimizations && ! src_related
5287 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5288 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5289 {
5290 enum machine_mode tmode;
5291 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5292
5293 for (tmode = GET_MODE_WIDER_MODE (mode);
5294 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5295 tmode = GET_MODE_WIDER_MODE (tmode))
5296 {
5297 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
5298 struct table_elt *larger_elt;
5299
5300 if (inner)
5301 {
5302 PUT_MODE (new_and, tmode);
5303 XEXP (new_and, 0) = inner;
5304 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5305 if (larger_elt == 0)
5306 continue;
5307
5308 for (larger_elt = larger_elt->first_same_value;
5309 larger_elt; larger_elt = larger_elt->next_same_value)
5310 if (GET_CODE (larger_elt->exp) == REG)
5311 {
5312 src_related
5313 = gen_lowpart_if_possible (mode, larger_elt->exp);
5314 break;
5315 }
5316
5317 if (src_related)
5318 break;
5319 }
5320 }
5321 }
5322
5323 #ifdef LOAD_EXTEND_OP
5324 /* See if a MEM has already been loaded with a widening operation;
5325 if it has, we can use a subreg of that. Many CISC machines
5326 also have such operations, but this is only likely to be
5327 beneficial these machines. */
5328
5329 if (flag_expensive_optimizations && src_related == 0
5330 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5331 && GET_MODE_CLASS (mode) == MODE_INT
5332 && GET_CODE (src) == MEM && ! do_not_record
5333 && LOAD_EXTEND_OP (mode) != NIL)
5334 {
5335 enum machine_mode tmode;
5336
5337 /* Set what we are trying to extend and the operation it might
5338 have been extended with. */
5339 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5340 XEXP (memory_extend_rtx, 0) = src;
5341
5342 for (tmode = GET_MODE_WIDER_MODE (mode);
5343 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5344 tmode = GET_MODE_WIDER_MODE (tmode))
5345 {
5346 struct table_elt *larger_elt;
5347
5348 PUT_MODE (memory_extend_rtx, tmode);
5349 larger_elt = lookup (memory_extend_rtx,
5350 HASH (memory_extend_rtx, tmode), tmode);
5351 if (larger_elt == 0)
5352 continue;
5353
5354 for (larger_elt = larger_elt->first_same_value;
5355 larger_elt; larger_elt = larger_elt->next_same_value)
5356 if (GET_CODE (larger_elt->exp) == REG)
5357 {
5358 src_related = gen_lowpart_if_possible (mode,
5359 larger_elt->exp);
5360 break;
5361 }
5362
5363 if (src_related)
5364 break;
5365 }
5366 }
5367 #endif /* LOAD_EXTEND_OP */
5368
5369 if (src == src_folded)
5370 src_folded = 0;
5371
5372 /* At this point, ELT, if nonzero, points to a class of expressions
5373 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5374 and SRC_RELATED, if nonzero, each contain additional equivalent
5375 expressions. Prune these latter expressions by deleting expressions
5376 already in the equivalence class.
5377
5378 Check for an equivalent identical to the destination. If found,
5379 this is the preferred equivalent since it will likely lead to
5380 elimination of the insn. Indicate this by placing it in
5381 `src_related'. */
5382
5383 if (elt)
5384 elt = elt->first_same_value;
5385 for (p = elt; p; p = p->next_same_value)
5386 {
5387 enum rtx_code code = GET_CODE (p->exp);
5388
5389 /* If the expression is not valid, ignore it. Then we do not
5390 have to check for validity below. In most cases, we can use
5391 `rtx_equal_p', since canonicalization has already been done. */
5392 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5393 continue;
5394
5395 /* Also skip paradoxical subregs, unless that's what we're
5396 looking for. */
5397 if (code == SUBREG
5398 && (GET_MODE_SIZE (GET_MODE (p->exp))
5399 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5400 && ! (src != 0
5401 && GET_CODE (src) == SUBREG
5402 && GET_MODE (src) == GET_MODE (p->exp)
5403 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5404 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5405 continue;
5406
5407 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5408 src = 0;
5409 else if (src_folded && GET_CODE (src_folded) == code
5410 && rtx_equal_p (src_folded, p->exp))
5411 src_folded = 0;
5412 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5413 && rtx_equal_p (src_eqv_here, p->exp))
5414 src_eqv_here = 0;
5415 else if (src_related && GET_CODE (src_related) == code
5416 && rtx_equal_p (src_related, p->exp))
5417 src_related = 0;
5418
5419 /* This is the same as the destination of the insns, we want
5420 to prefer it. Copy it to src_related. The code below will
5421 then give it a negative cost. */
5422 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5423 src_related = dest;
5424 }
5425
5426 /* Find the cheapest valid equivalent, trying all the available
5427 possibilities. Prefer items not in the hash table to ones
5428 that are when they are equal cost. Note that we can never
5429 worsen an insn as the current contents will also succeed.
5430 If we find an equivalent identical to the destination, use it as best,
5431 since this insn will probably be eliminated in that case. */
5432 if (src)
5433 {
5434 if (rtx_equal_p (src, dest))
5435 src_cost = src_regcost = -1;
5436 else
5437 {
5438 src_cost = COST (src);
5439 src_regcost = approx_reg_cost (src);
5440 }
5441 }
5442
5443 if (src_eqv_here)
5444 {
5445 if (rtx_equal_p (src_eqv_here, dest))
5446 src_eqv_cost = src_eqv_regcost = -1;
5447 else
5448 {
5449 src_eqv_cost = COST (src_eqv_here);
5450 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5451 }
5452 }
5453
5454 if (src_folded)
5455 {
5456 if (rtx_equal_p (src_folded, dest))
5457 src_folded_cost = src_folded_regcost = -1;
5458 else
5459 {
5460 src_folded_cost = COST (src_folded);
5461 src_folded_regcost = approx_reg_cost (src_folded);
5462 }
5463 }
5464
5465 if (src_related)
5466 {
5467 if (rtx_equal_p (src_related, dest))
5468 src_related_cost = src_related_regcost = -1;
5469 else
5470 {
5471 src_related_cost = COST (src_related);
5472 src_related_regcost = approx_reg_cost (src_related);
5473 }
5474 }
5475
5476 /* If this was an indirect jump insn, a known label will really be
5477 cheaper even though it looks more expensive. */
5478 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5479 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5480
5481 /* Terminate loop when replacement made. This must terminate since
5482 the current contents will be tested and will always be valid. */
5483 while (1)
5484 {
5485 rtx trial;
5486
5487 /* Skip invalid entries. */
5488 while (elt && GET_CODE (elt->exp) != REG
5489 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5490 elt = elt->next_same_value;
5491
5492 /* A paradoxical subreg would be bad here: it'll be the right
5493 size, but later may be adjusted so that the upper bits aren't
5494 what we want. So reject it. */
5495 if (elt != 0
5496 && GET_CODE (elt->exp) == SUBREG
5497 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5498 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5499 /* It is okay, though, if the rtx we're trying to match
5500 will ignore any of the bits we can't predict. */
5501 && ! (src != 0
5502 && GET_CODE (src) == SUBREG
5503 && GET_MODE (src) == GET_MODE (elt->exp)
5504 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5505 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5506 {
5507 elt = elt->next_same_value;
5508 continue;
5509 }
5510
5511 if (elt)
5512 {
5513 src_elt_cost = elt->cost;
5514 src_elt_regcost = elt->regcost;
5515 }
5516
5517 /* Find cheapest and skip it for the next time. For items
5518 of equal cost, use this order:
5519 src_folded, src, src_eqv, src_related and hash table entry. */
5520 if (src_folded
5521 && preferrable (src_folded_cost, src_folded_regcost,
5522 src_cost, src_regcost) <= 0
5523 && preferrable (src_folded_cost, src_folded_regcost,
5524 src_eqv_cost, src_eqv_regcost) <= 0
5525 && preferrable (src_folded_cost, src_folded_regcost,
5526 src_related_cost, src_related_regcost) <= 0
5527 && preferrable (src_folded_cost, src_folded_regcost,
5528 src_elt_cost, src_elt_regcost) <= 0)
5529 {
5530 trial = src_folded, src_folded_cost = MAX_COST;
5531 if (src_folded_force_flag)
5532 trial = force_const_mem (mode, trial);
5533 }
5534 else if (src
5535 && preferrable (src_cost, src_regcost,
5536 src_eqv_cost, src_eqv_regcost) <= 0
5537 && preferrable (src_cost, src_regcost,
5538 src_related_cost, src_related_regcost) <= 0
5539 && preferrable (src_cost, src_regcost,
5540 src_elt_cost, src_elt_regcost) <= 0)
5541 trial = src, src_cost = MAX_COST;
5542 else if (src_eqv_here
5543 && preferrable (src_eqv_cost, src_eqv_regcost,
5544 src_related_cost, src_related_regcost) <= 0
5545 && preferrable (src_eqv_cost, src_eqv_regcost,
5546 src_elt_cost, src_elt_regcost) <= 0)
5547 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5548 else if (src_related
5549 && preferrable (src_related_cost, src_related_regcost,
5550 src_elt_cost, src_elt_regcost) <= 0)
5551 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5552 else
5553 {
5554 trial = copy_rtx (elt->exp);
5555 elt = elt->next_same_value;
5556 src_elt_cost = MAX_COST;
5557 }
5558
5559 /* We don't normally have an insn matching (set (pc) (pc)), so
5560 check for this separately here. We will delete such an
5561 insn below.
5562
5563 For other cases such as a table jump or conditional jump
5564 where we know the ultimate target, go ahead and replace the
5565 operand. While that may not make a valid insn, we will
5566 reemit the jump below (and also insert any necessary
5567 barriers). */
5568 if (n_sets == 1 && dest == pc_rtx
5569 && (trial == pc_rtx
5570 || (GET_CODE (trial) == LABEL_REF
5571 && ! condjump_p (insn))))
5572 {
5573 SET_SRC (sets[i].rtl) = trial;
5574 cse_jumps_altered = 1;
5575 break;
5576 }
5577
5578 /* Look for a substitution that makes a valid insn. */
5579 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5580 {
5581 /* If we just made a substitution inside a libcall, then we
5582 need to make the same substitution in any notes attached
5583 to the RETVAL insn. */
5584 if (libcall_insn
5585 && (GET_CODE (sets[i].orig_src) == REG
5586 || GET_CODE (sets[i].orig_src) == SUBREG
5587 || GET_CODE (sets[i].orig_src) == MEM))
5588 replace_rtx (REG_NOTES (libcall_insn), sets[i].orig_src,
5589 canon_reg (SET_SRC (sets[i].rtl), insn));
5590
5591 /* The result of apply_change_group can be ignored; see
5592 canon_reg. */
5593
5594 validate_change (insn, &SET_SRC (sets[i].rtl),
5595 canon_reg (SET_SRC (sets[i].rtl), insn),
5596 1);
5597 apply_change_group ();
5598 break;
5599 }
5600
5601 /* If we previously found constant pool entries for
5602 constants and this is a constant, try making a
5603 pool entry. Put it in src_folded unless we already have done
5604 this since that is where it likely came from. */
5605
5606 else if (constant_pool_entries_cost
5607 && CONSTANT_P (trial)
5608 /* Reject cases that will abort in decode_rtx_const.
5609 On the alpha when simplifying a switch, we get
5610 (const (truncate (minus (label_ref) (label_ref)))). */
5611 && ! (GET_CODE (trial) == CONST
5612 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5613 /* Likewise on IA-64, except without the truncate. */
5614 && ! (GET_CODE (trial) == CONST
5615 && GET_CODE (XEXP (trial, 0)) == MINUS
5616 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5617 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5618 && (src_folded == 0
5619 || (GET_CODE (src_folded) != MEM
5620 && ! src_folded_force_flag))
5621 && GET_MODE_CLASS (mode) != MODE_CC
5622 && mode != VOIDmode)
5623 {
5624 src_folded_force_flag = 1;
5625 src_folded = trial;
5626 src_folded_cost = constant_pool_entries_cost;
5627 }
5628 }
5629
5630 src = SET_SRC (sets[i].rtl);
5631
5632 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5633 However, there is an important exception: If both are registers
5634 that are not the head of their equivalence class, replace SET_SRC
5635 with the head of the class. If we do not do this, we will have
5636 both registers live over a portion of the basic block. This way,
5637 their lifetimes will likely abut instead of overlapping. */
5638 if (GET_CODE (dest) == REG
5639 && REGNO_QTY_VALID_P (REGNO (dest)))
5640 {
5641 int dest_q = REG_QTY (REGNO (dest));
5642 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5643
5644 if (dest_ent->mode == GET_MODE (dest)
5645 && dest_ent->first_reg != REGNO (dest)
5646 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5647 /* Don't do this if the original insn had a hard reg as
5648 SET_SRC or SET_DEST. */
5649 && (GET_CODE (sets[i].src) != REG
5650 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5651 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5652 /* We can't call canon_reg here because it won't do anything if
5653 SRC is a hard register. */
5654 {
5655 int src_q = REG_QTY (REGNO (src));
5656 struct qty_table_elem *src_ent = &qty_table[src_q];
5657 int first = src_ent->first_reg;
5658 rtx new_src
5659 = (first >= FIRST_PSEUDO_REGISTER
5660 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5661
5662 /* We must use validate-change even for this, because this
5663 might be a special no-op instruction, suitable only to
5664 tag notes onto. */
5665 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5666 {
5667 src = new_src;
5668 /* If we had a constant that is cheaper than what we are now
5669 setting SRC to, use that constant. We ignored it when we
5670 thought we could make this into a no-op. */
5671 if (src_const && COST (src_const) < COST (src)
5672 && validate_change (insn, &SET_SRC (sets[i].rtl),
5673 src_const, 0))
5674 src = src_const;
5675 }
5676 }
5677 }
5678
5679 /* If we made a change, recompute SRC values. */
5680 if (src != sets[i].src)
5681 {
5682 cse_altered = 1;
5683 do_not_record = 0;
5684 hash_arg_in_memory = 0;
5685 sets[i].src = src;
5686 sets[i].src_hash = HASH (src, mode);
5687 sets[i].src_volatile = do_not_record;
5688 sets[i].src_in_memory = hash_arg_in_memory;
5689 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5690 }
5691
5692 /* If this is a single SET, we are setting a register, and we have an
5693 equivalent constant, we want to add a REG_NOTE. We don't want
5694 to write a REG_EQUAL note for a constant pseudo since verifying that
5695 that pseudo hasn't been eliminated is a pain. Such a note also
5696 won't help anything.
5697
5698 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5699 which can be created for a reference to a compile time computable
5700 entry in a jump table. */
5701
5702 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5703 && GET_CODE (src_const) != REG
5704 && ! (GET_CODE (src_const) == CONST
5705 && GET_CODE (XEXP (src_const, 0)) == MINUS
5706 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5707 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5708 {
5709 /* Make sure that the rtx is not shared with any other insn. */
5710 src_const = copy_rtx (src_const);
5711
5712 /* Record the actual constant value in a REG_EQUAL note, making
5713 a new one if one does not already exist. */
5714 set_unique_reg_note (insn, REG_EQUAL, src_const);
5715
5716 /* If storing a constant value in a register that
5717 previously held the constant value 0,
5718 record this fact with a REG_WAS_0 note on this insn.
5719
5720 Note that the *register* is required to have previously held 0,
5721 not just any register in the quantity and we must point to the
5722 insn that set that register to zero.
5723
5724 Rather than track each register individually, we just see if
5725 the last set for this quantity was for this register. */
5726
5727 if (REGNO_QTY_VALID_P (REGNO (dest)))
5728 {
5729 int dest_q = REG_QTY (REGNO (dest));
5730 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5731
5732 if (dest_ent->const_rtx == const0_rtx)
5733 {
5734 /* See if we previously had a REG_WAS_0 note. */
5735 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5736 rtx const_insn = dest_ent->const_insn;
5737
5738 if ((tem = single_set (const_insn)) != 0
5739 && rtx_equal_p (SET_DEST (tem), dest))
5740 {
5741 if (note)
5742 XEXP (note, 0) = const_insn;
5743 else
5744 REG_NOTES (insn)
5745 = gen_rtx_INSN_LIST (REG_WAS_0, const_insn,
5746 REG_NOTES (insn));
5747 }
5748 }
5749 }
5750 }
5751
5752 /* Now deal with the destination. */
5753 do_not_record = 0;
5754
5755 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5756 to the MEM or REG within it. */
5757 while (GET_CODE (dest) == SIGN_EXTRACT
5758 || GET_CODE (dest) == ZERO_EXTRACT
5759 || GET_CODE (dest) == SUBREG
5760 || GET_CODE (dest) == STRICT_LOW_PART)
5761 dest = XEXP (dest, 0);
5762
5763 sets[i].inner_dest = dest;
5764
5765 if (GET_CODE (dest) == MEM)
5766 {
5767 #ifdef PUSH_ROUNDING
5768 /* Stack pushes invalidate the stack pointer. */
5769 rtx addr = XEXP (dest, 0);
5770 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
5771 && XEXP (addr, 0) == stack_pointer_rtx)
5772 invalidate (stack_pointer_rtx, Pmode);
5773 #endif
5774 dest = fold_rtx (dest, insn);
5775 }
5776
5777 /* Compute the hash code of the destination now,
5778 before the effects of this instruction are recorded,
5779 since the register values used in the address computation
5780 are those before this instruction. */
5781 sets[i].dest_hash = HASH (dest, mode);
5782
5783 /* Don't enter a bit-field in the hash table
5784 because the value in it after the store
5785 may not equal what was stored, due to truncation. */
5786
5787 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5788 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5789 {
5790 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5791
5792 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5793 && GET_CODE (width) == CONST_INT
5794 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5795 && ! (INTVAL (src_const)
5796 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5797 /* Exception: if the value is constant,
5798 and it won't be truncated, record it. */
5799 ;
5800 else
5801 {
5802 /* This is chosen so that the destination will be invalidated
5803 but no new value will be recorded.
5804 We must invalidate because sometimes constant
5805 values can be recorded for bitfields. */
5806 sets[i].src_elt = 0;
5807 sets[i].src_volatile = 1;
5808 src_eqv = 0;
5809 src_eqv_elt = 0;
5810 }
5811 }
5812
5813 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5814 the insn. */
5815 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5816 {
5817 /* One less use of the label this insn used to jump to. */
5818 delete_insn (insn);
5819 cse_jumps_altered = 1;
5820 /* No more processing for this set. */
5821 sets[i].rtl = 0;
5822 }
5823
5824 /* If this SET is now setting PC to a label, we know it used to
5825 be a conditional or computed branch. */
5826 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5827 {
5828 /* Now emit a BARRIER after the unconditional jump. */
5829 if (NEXT_INSN (insn) == 0
5830 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5831 emit_barrier_after (insn);
5832
5833 /* We reemit the jump in as many cases as possible just in
5834 case the form of an unconditional jump is significantly
5835 different than a computed jump or conditional jump.
5836
5837 If this insn has multiple sets, then reemitting the
5838 jump is nontrivial. So instead we just force rerecognition
5839 and hope for the best. */
5840 if (n_sets == 1)
5841 {
5842 rtx new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5843
5844 JUMP_LABEL (new) = XEXP (src, 0);
5845 LABEL_NUSES (XEXP (src, 0))++;
5846 delete_insn (insn);
5847 insn = new;
5848
5849 /* Now emit a BARRIER after the unconditional jump. */
5850 if (NEXT_INSN (insn) == 0
5851 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5852 emit_barrier_after (insn);
5853 }
5854 else
5855 INSN_CODE (insn) = -1;
5856
5857 never_reached_warning (insn, NULL);
5858
5859 /* Do not bother deleting any unreachable code,
5860 let jump/flow do that. */
5861
5862 cse_jumps_altered = 1;
5863 sets[i].rtl = 0;
5864 }
5865
5866 /* If destination is volatile, invalidate it and then do no further
5867 processing for this assignment. */
5868
5869 else if (do_not_record)
5870 {
5871 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5872 invalidate (dest, VOIDmode);
5873 else if (GET_CODE (dest) == MEM)
5874 {
5875 /* Outgoing arguments for a libcall don't
5876 affect any recorded expressions. */
5877 if (! libcall_insn || insn == libcall_insn)
5878 invalidate (dest, VOIDmode);
5879 }
5880 else if (GET_CODE (dest) == STRICT_LOW_PART
5881 || GET_CODE (dest) == ZERO_EXTRACT)
5882 invalidate (XEXP (dest, 0), GET_MODE (dest));
5883 sets[i].rtl = 0;
5884 }
5885
5886 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5887 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5888
5889 #ifdef HAVE_cc0
5890 /* If setting CC0, record what it was set to, or a constant, if it
5891 is equivalent to a constant. If it is being set to a floating-point
5892 value, make a COMPARE with the appropriate constant of 0. If we
5893 don't do this, later code can interpret this as a test against
5894 const0_rtx, which can cause problems if we try to put it into an
5895 insn as a floating-point operand. */
5896 if (dest == cc0_rtx)
5897 {
5898 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5899 this_insn_cc0_mode = mode;
5900 if (FLOAT_MODE_P (mode))
5901 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5902 CONST0_RTX (mode));
5903 }
5904 #endif
5905 }
5906
5907 /* Now enter all non-volatile source expressions in the hash table
5908 if they are not already present.
5909 Record their equivalence classes in src_elt.
5910 This way we can insert the corresponding destinations into
5911 the same classes even if the actual sources are no longer in them
5912 (having been invalidated). */
5913
5914 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5915 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5916 {
5917 struct table_elt *elt;
5918 struct table_elt *classp = sets[0].src_elt;
5919 rtx dest = SET_DEST (sets[0].rtl);
5920 enum machine_mode eqvmode = GET_MODE (dest);
5921
5922 if (GET_CODE (dest) == STRICT_LOW_PART)
5923 {
5924 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5925 classp = 0;
5926 }
5927 if (insert_regs (src_eqv, classp, 0))
5928 {
5929 rehash_using_reg (src_eqv);
5930 src_eqv_hash = HASH (src_eqv, eqvmode);
5931 }
5932 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5933 elt->in_memory = src_eqv_in_memory;
5934 src_eqv_elt = elt;
5935
5936 /* Check to see if src_eqv_elt is the same as a set source which
5937 does not yet have an elt, and if so set the elt of the set source
5938 to src_eqv_elt. */
5939 for (i = 0; i < n_sets; i++)
5940 if (sets[i].rtl && sets[i].src_elt == 0
5941 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5942 sets[i].src_elt = src_eqv_elt;
5943 }
5944
5945 for (i = 0; i < n_sets; i++)
5946 if (sets[i].rtl && ! sets[i].src_volatile
5947 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5948 {
5949 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5950 {
5951 /* REG_EQUAL in setting a STRICT_LOW_PART
5952 gives an equivalent for the entire destination register,
5953 not just for the subreg being stored in now.
5954 This is a more interesting equivalence, so we arrange later
5955 to treat the entire reg as the destination. */
5956 sets[i].src_elt = src_eqv_elt;
5957 sets[i].src_hash = src_eqv_hash;
5958 }
5959 else
5960 {
5961 /* Insert source and constant equivalent into hash table, if not
5962 already present. */
5963 struct table_elt *classp = src_eqv_elt;
5964 rtx src = sets[i].src;
5965 rtx dest = SET_DEST (sets[i].rtl);
5966 enum machine_mode mode
5967 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5968
5969 if (sets[i].src_elt == 0)
5970 {
5971 /* Don't put a hard register source into the table if this is
5972 the last insn of a libcall. In this case, we only need
5973 to put src_eqv_elt in src_elt. */
5974 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5975 {
5976 struct table_elt *elt;
5977
5978 /* Note that these insert_regs calls cannot remove
5979 any of the src_elt's, because they would have failed to
5980 match if not still valid. */
5981 if (insert_regs (src, classp, 0))
5982 {
5983 rehash_using_reg (src);
5984 sets[i].src_hash = HASH (src, mode);
5985 }
5986 elt = insert (src, classp, sets[i].src_hash, mode);
5987 elt->in_memory = sets[i].src_in_memory;
5988 sets[i].src_elt = classp = elt;
5989 }
5990 else
5991 sets[i].src_elt = classp;
5992 }
5993 if (sets[i].src_const && sets[i].src_const_elt == 0
5994 && src != sets[i].src_const
5995 && ! rtx_equal_p (sets[i].src_const, src))
5996 sets[i].src_elt = insert (sets[i].src_const, classp,
5997 sets[i].src_const_hash, mode);
5998 }
5999 }
6000 else if (sets[i].src_elt == 0)
6001 /* If we did not insert the source into the hash table (e.g., it was
6002 volatile), note the equivalence class for the REG_EQUAL value, if any,
6003 so that the destination goes into that class. */
6004 sets[i].src_elt = src_eqv_elt;
6005
6006 invalidate_from_clobbers (x);
6007
6008 /* Some registers are invalidated by subroutine calls. Memory is
6009 invalidated by non-constant calls. */
6010
6011 if (GET_CODE (insn) == CALL_INSN)
6012 {
6013 if (! CONST_OR_PURE_CALL_P (insn))
6014 invalidate_memory ();
6015 invalidate_for_call ();
6016 }
6017
6018 /* Now invalidate everything set by this instruction.
6019 If a SUBREG or other funny destination is being set,
6020 sets[i].rtl is still nonzero, so here we invalidate the reg
6021 a part of which is being set. */
6022
6023 for (i = 0; i < n_sets; i++)
6024 if (sets[i].rtl)
6025 {
6026 /* We can't use the inner dest, because the mode associated with
6027 a ZERO_EXTRACT is significant. */
6028 rtx dest = SET_DEST (sets[i].rtl);
6029
6030 /* Needed for registers to remove the register from its
6031 previous quantity's chain.
6032 Needed for memory if this is a nonvarying address, unless
6033 we have just done an invalidate_memory that covers even those. */
6034 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6035 invalidate (dest, VOIDmode);
6036 else if (GET_CODE (dest) == MEM)
6037 {
6038 /* Outgoing arguments for a libcall don't
6039 affect any recorded expressions. */
6040 if (! libcall_insn || insn == libcall_insn)
6041 invalidate (dest, VOIDmode);
6042 }
6043 else if (GET_CODE (dest) == STRICT_LOW_PART
6044 || GET_CODE (dest) == ZERO_EXTRACT)
6045 invalidate (XEXP (dest, 0), GET_MODE (dest));
6046 }
6047
6048 /* A volatile ASM invalidates everything. */
6049 if (GET_CODE (insn) == INSN
6050 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
6051 && MEM_VOLATILE_P (PATTERN (insn)))
6052 flush_hash_table ();
6053
6054 /* Make sure registers mentioned in destinations
6055 are safe for use in an expression to be inserted.
6056 This removes from the hash table
6057 any invalid entry that refers to one of these registers.
6058
6059 We don't care about the return value from mention_regs because
6060 we are going to hash the SET_DEST values unconditionally. */
6061
6062 for (i = 0; i < n_sets; i++)
6063 {
6064 if (sets[i].rtl)
6065 {
6066 rtx x = SET_DEST (sets[i].rtl);
6067
6068 if (GET_CODE (x) != REG)
6069 mention_regs (x);
6070 else
6071 {
6072 /* We used to rely on all references to a register becoming
6073 inaccessible when a register changes to a new quantity,
6074 since that changes the hash code. However, that is not
6075 safe, since after HASH_SIZE new quantities we get a
6076 hash 'collision' of a register with its own invalid
6077 entries. And since SUBREGs have been changed not to
6078 change their hash code with the hash code of the register,
6079 it wouldn't work any longer at all. So we have to check
6080 for any invalid references lying around now.
6081 This code is similar to the REG case in mention_regs,
6082 but it knows that reg_tick has been incremented, and
6083 it leaves reg_in_table as -1 . */
6084 unsigned int regno = REGNO (x);
6085 unsigned int endregno
6086 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
6087 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
6088 unsigned int i;
6089
6090 for (i = regno; i < endregno; i++)
6091 {
6092 if (REG_IN_TABLE (i) >= 0)
6093 {
6094 remove_invalid_refs (i);
6095 REG_IN_TABLE (i) = -1;
6096 }
6097 }
6098 }
6099 }
6100 }
6101
6102 /* We may have just removed some of the src_elt's from the hash table.
6103 So replace each one with the current head of the same class. */
6104
6105 for (i = 0; i < n_sets; i++)
6106 if (sets[i].rtl)
6107 {
6108 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6109 /* If elt was removed, find current head of same class,
6110 or 0 if nothing remains of that class. */
6111 {
6112 struct table_elt *elt = sets[i].src_elt;
6113
6114 while (elt && elt->prev_same_value)
6115 elt = elt->prev_same_value;
6116
6117 while (elt && elt->first_same_value == 0)
6118 elt = elt->next_same_value;
6119 sets[i].src_elt = elt ? elt->first_same_value : 0;
6120 }
6121 }
6122
6123 /* Now insert the destinations into their equivalence classes. */
6124
6125 for (i = 0; i < n_sets; i++)
6126 if (sets[i].rtl)
6127 {
6128 rtx dest = SET_DEST (sets[i].rtl);
6129 rtx inner_dest = sets[i].inner_dest;
6130 struct table_elt *elt;
6131
6132 /* Don't record value if we are not supposed to risk allocating
6133 floating-point values in registers that might be wider than
6134 memory. */
6135 if ((flag_float_store
6136 && GET_CODE (dest) == MEM
6137 && FLOAT_MODE_P (GET_MODE (dest)))
6138 /* Don't record BLKmode values, because we don't know the
6139 size of it, and can't be sure that other BLKmode values
6140 have the same or smaller size. */
6141 || GET_MODE (dest) == BLKmode
6142 /* Don't record values of destinations set inside a libcall block
6143 since we might delete the libcall. Things should have been set
6144 up so we won't want to reuse such a value, but we play it safe
6145 here. */
6146 || libcall_insn
6147 /* If we didn't put a REG_EQUAL value or a source into the hash
6148 table, there is no point is recording DEST. */
6149 || sets[i].src_elt == 0
6150 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6151 or SIGN_EXTEND, don't record DEST since it can cause
6152 some tracking to be wrong.
6153
6154 ??? Think about this more later. */
6155 || (GET_CODE (dest) == SUBREG
6156 && (GET_MODE_SIZE (GET_MODE (dest))
6157 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6158 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6159 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6160 continue;
6161
6162 /* STRICT_LOW_PART isn't part of the value BEING set,
6163 and neither is the SUBREG inside it.
6164 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6165 if (GET_CODE (dest) == STRICT_LOW_PART)
6166 dest = SUBREG_REG (XEXP (dest, 0));
6167
6168 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6169 /* Registers must also be inserted into chains for quantities. */
6170 if (insert_regs (dest, sets[i].src_elt, 1))
6171 {
6172 /* If `insert_regs' changes something, the hash code must be
6173 recalculated. */
6174 rehash_using_reg (dest);
6175 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6176 }
6177
6178 if (GET_CODE (inner_dest) == MEM
6179 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
6180 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
6181 that (MEM (ADDRESSOF (X))) is equivalent to Y.
6182 Consider the case in which the address of the MEM is
6183 passed to a function, which alters the MEM. Then, if we
6184 later use Y instead of the MEM we'll miss the update. */
6185 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
6186 else
6187 elt = insert (dest, sets[i].src_elt,
6188 sets[i].dest_hash, GET_MODE (dest));
6189
6190 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
6191 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
6192 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
6193 0))));
6194
6195 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6196 narrower than M2, and both M1 and M2 are the same number of words,
6197 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6198 make that equivalence as well.
6199
6200 However, BAR may have equivalences for which gen_lowpart_if_possible
6201 will produce a simpler value than gen_lowpart_if_possible applied to
6202 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6203 BAR's equivalences. If we don't get a simplified form, make
6204 the SUBREG. It will not be used in an equivalence, but will
6205 cause two similar assignments to be detected.
6206
6207 Note the loop below will find SUBREG_REG (DEST) since we have
6208 already entered SRC and DEST of the SET in the table. */
6209
6210 if (GET_CODE (dest) == SUBREG
6211 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6212 / UNITS_PER_WORD)
6213 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6214 && (GET_MODE_SIZE (GET_MODE (dest))
6215 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6216 && sets[i].src_elt != 0)
6217 {
6218 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6219 struct table_elt *elt, *classp = 0;
6220
6221 for (elt = sets[i].src_elt->first_same_value; elt;
6222 elt = elt->next_same_value)
6223 {
6224 rtx new_src = 0;
6225 unsigned src_hash;
6226 struct table_elt *src_elt;
6227 int byte = 0;
6228
6229 /* Ignore invalid entries. */
6230 if (GET_CODE (elt->exp) != REG
6231 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6232 continue;
6233
6234 /* We may have already been playing subreg games. If the
6235 mode is already correct for the destination, use it. */
6236 if (GET_MODE (elt->exp) == new_mode)
6237 new_src = elt->exp;
6238 else
6239 {
6240 /* Calculate big endian correction for the SUBREG_BYTE.
6241 We have already checked that M1 (GET_MODE (dest))
6242 is not narrower than M2 (new_mode). */
6243 if (BYTES_BIG_ENDIAN)
6244 byte = (GET_MODE_SIZE (GET_MODE (dest))
6245 - GET_MODE_SIZE (new_mode));
6246
6247 new_src = simplify_gen_subreg (new_mode, elt->exp,
6248 GET_MODE (dest), byte);
6249 }
6250
6251 /* The call to simplify_gen_subreg fails if the value
6252 is VOIDmode, yet we can't do any simplification, e.g.
6253 for EXPR_LISTs denoting function call results.
6254 It is invalid to construct a SUBREG with a VOIDmode
6255 SUBREG_REG, hence a zero new_src means we can't do
6256 this substitution. */
6257 if (! new_src)
6258 continue;
6259
6260 src_hash = HASH (new_src, new_mode);
6261 src_elt = lookup (new_src, src_hash, new_mode);
6262
6263 /* Put the new source in the hash table is if isn't
6264 already. */
6265 if (src_elt == 0)
6266 {
6267 if (insert_regs (new_src, classp, 0))
6268 {
6269 rehash_using_reg (new_src);
6270 src_hash = HASH (new_src, new_mode);
6271 }
6272 src_elt = insert (new_src, classp, src_hash, new_mode);
6273 src_elt->in_memory = elt->in_memory;
6274 }
6275 else if (classp && classp != src_elt->first_same_value)
6276 /* Show that two things that we've seen before are
6277 actually the same. */
6278 merge_equiv_classes (src_elt, classp);
6279
6280 classp = src_elt->first_same_value;
6281 /* Ignore invalid entries. */
6282 while (classp
6283 && GET_CODE (classp->exp) != REG
6284 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6285 classp = classp->next_same_value;
6286 }
6287 }
6288 }
6289
6290 /* Special handling for (set REG0 REG1) where REG0 is the
6291 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6292 be used in the sequel, so (if easily done) change this insn to
6293 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6294 that computed their value. Then REG1 will become a dead store
6295 and won't cloud the situation for later optimizations.
6296
6297 Do not make this change if REG1 is a hard register, because it will
6298 then be used in the sequel and we may be changing a two-operand insn
6299 into a three-operand insn.
6300
6301 Also do not do this if we are operating on a copy of INSN.
6302
6303 Also don't do this if INSN ends a libcall; this would cause an unrelated
6304 register to be set in the middle of a libcall, and we then get bad code
6305 if the libcall is deleted. */
6306
6307 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6308 && NEXT_INSN (PREV_INSN (insn)) == insn
6309 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6310 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6311 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6312 {
6313 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6314 struct qty_table_elem *src_ent = &qty_table[src_q];
6315
6316 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6317 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6318 {
6319 rtx prev = prev_nonnote_insn (insn);
6320
6321 /* Do not swap the registers around if the previous instruction
6322 attaches a REG_EQUIV note to REG1.
6323
6324 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6325 from the pseudo that originally shadowed an incoming argument
6326 to another register. Some uses of REG_EQUIV might rely on it
6327 being attached to REG1 rather than REG2.
6328
6329 This section previously turned the REG_EQUIV into a REG_EQUAL
6330 note. We cannot do that because REG_EQUIV may provide an
6331 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
6332
6333 if (prev != 0 && GET_CODE (prev) == INSN
6334 && GET_CODE (PATTERN (prev)) == SET
6335 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6336 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6337 {
6338 rtx dest = SET_DEST (sets[0].rtl);
6339 rtx src = SET_SRC (sets[0].rtl);
6340 rtx note;
6341
6342 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6343 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6344 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6345 apply_change_group ();
6346
6347 /* If there was a REG_WAS_0 note on PREV, remove it. Move
6348 any REG_WAS_0 note on INSN to PREV. */
6349 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
6350 if (note)
6351 remove_note (prev, note);
6352
6353 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6354 if (note)
6355 {
6356 remove_note (insn, note);
6357 XEXP (note, 1) = REG_NOTES (prev);
6358 REG_NOTES (prev) = note;
6359 }
6360
6361 /* If INSN has a REG_EQUAL note, and this note mentions
6362 REG0, then we must delete it, because the value in
6363 REG0 has changed. If the note's value is REG1, we must
6364 also delete it because that is now this insn's dest. */
6365 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6366 if (note != 0
6367 && (reg_mentioned_p (dest, XEXP (note, 0))
6368 || rtx_equal_p (src, XEXP (note, 0))))
6369 remove_note (insn, note);
6370 }
6371 }
6372 }
6373
6374 /* If this is a conditional jump insn, record any known equivalences due to
6375 the condition being tested. */
6376
6377 last_jump_equiv_class = 0;
6378 if (GET_CODE (insn) == JUMP_INSN
6379 && n_sets == 1 && GET_CODE (x) == SET
6380 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6381 record_jump_equiv (insn, 0);
6382
6383 #ifdef HAVE_cc0
6384 /* If the previous insn set CC0 and this insn no longer references CC0,
6385 delete the previous insn. Here we use the fact that nothing expects CC0
6386 to be valid over an insn, which is true until the final pass. */
6387 if (prev_insn && GET_CODE (prev_insn) == INSN
6388 && (tem = single_set (prev_insn)) != 0
6389 && SET_DEST (tem) == cc0_rtx
6390 && ! reg_mentioned_p (cc0_rtx, x))
6391 delete_insn (prev_insn);
6392
6393 prev_insn_cc0 = this_insn_cc0;
6394 prev_insn_cc0_mode = this_insn_cc0_mode;
6395 #endif
6396
6397 prev_insn = insn;
6398 }
6399 \f
6400 /* Remove from the hash table all expressions that reference memory. */
6401
6402 static void
6403 invalidate_memory ()
6404 {
6405 int i;
6406 struct table_elt *p, *next;
6407
6408 for (i = 0; i < HASH_SIZE; i++)
6409 for (p = table[i]; p; p = next)
6410 {
6411 next = p->next_same_hash;
6412 if (p->in_memory)
6413 remove_from_table (p, i);
6414 }
6415 }
6416
6417 /* If ADDR is an address that implicitly affects the stack pointer, return
6418 1 and update the register tables to show the effect. Else, return 0. */
6419
6420 static int
6421 addr_affects_sp_p (addr)
6422 rtx addr;
6423 {
6424 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
6425 && GET_CODE (XEXP (addr, 0)) == REG
6426 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6427 {
6428 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6429 {
6430 REG_TICK (STACK_POINTER_REGNUM)++;
6431 /* Is it possible to use a subreg of SP? */
6432 SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6433 }
6434
6435 /* This should be *very* rare. */
6436 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6437 invalidate (stack_pointer_rtx, VOIDmode);
6438
6439 return 1;
6440 }
6441
6442 return 0;
6443 }
6444
6445 /* Perform invalidation on the basis of everything about an insn
6446 except for invalidating the actual places that are SET in it.
6447 This includes the places CLOBBERed, and anything that might
6448 alias with something that is SET or CLOBBERed.
6449
6450 X is the pattern of the insn. */
6451
6452 static void
6453 invalidate_from_clobbers (x)
6454 rtx x;
6455 {
6456 if (GET_CODE (x) == CLOBBER)
6457 {
6458 rtx ref = XEXP (x, 0);
6459 if (ref)
6460 {
6461 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6462 || GET_CODE (ref) == MEM)
6463 invalidate (ref, VOIDmode);
6464 else if (GET_CODE (ref) == STRICT_LOW_PART
6465 || GET_CODE (ref) == ZERO_EXTRACT)
6466 invalidate (XEXP (ref, 0), GET_MODE (ref));
6467 }
6468 }
6469 else if (GET_CODE (x) == PARALLEL)
6470 {
6471 int i;
6472 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6473 {
6474 rtx y = XVECEXP (x, 0, i);
6475 if (GET_CODE (y) == CLOBBER)
6476 {
6477 rtx ref = XEXP (y, 0);
6478 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6479 || GET_CODE (ref) == MEM)
6480 invalidate (ref, VOIDmode);
6481 else if (GET_CODE (ref) == STRICT_LOW_PART
6482 || GET_CODE (ref) == ZERO_EXTRACT)
6483 invalidate (XEXP (ref, 0), GET_MODE (ref));
6484 }
6485 }
6486 }
6487 }
6488 \f
6489 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6490 and replace any registers in them with either an equivalent constant
6491 or the canonical form of the register. If we are inside an address,
6492 only do this if the address remains valid.
6493
6494 OBJECT is 0 except when within a MEM in which case it is the MEM.
6495
6496 Return the replacement for X. */
6497
6498 static rtx
6499 cse_process_notes (x, object)
6500 rtx x;
6501 rtx object;
6502 {
6503 enum rtx_code code = GET_CODE (x);
6504 const char *fmt = GET_RTX_FORMAT (code);
6505 int i;
6506
6507 switch (code)
6508 {
6509 case CONST_INT:
6510 case CONST:
6511 case SYMBOL_REF:
6512 case LABEL_REF:
6513 case CONST_DOUBLE:
6514 case CONST_VECTOR:
6515 case PC:
6516 case CC0:
6517 case LO_SUM:
6518 return x;
6519
6520 case MEM:
6521 validate_change (x, &XEXP (x, 0),
6522 cse_process_notes (XEXP (x, 0), x), 0);
6523 return x;
6524
6525 case EXPR_LIST:
6526 case INSN_LIST:
6527 if (REG_NOTE_KIND (x) == REG_EQUAL)
6528 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6529 if (XEXP (x, 1))
6530 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6531 return x;
6532
6533 case SIGN_EXTEND:
6534 case ZERO_EXTEND:
6535 case SUBREG:
6536 {
6537 rtx new = cse_process_notes (XEXP (x, 0), object);
6538 /* We don't substitute VOIDmode constants into these rtx,
6539 since they would impede folding. */
6540 if (GET_MODE (new) != VOIDmode)
6541 validate_change (object, &XEXP (x, 0), new, 0);
6542 return x;
6543 }
6544
6545 case REG:
6546 i = REG_QTY (REGNO (x));
6547
6548 /* Return a constant or a constant register. */
6549 if (REGNO_QTY_VALID_P (REGNO (x)))
6550 {
6551 struct qty_table_elem *ent = &qty_table[i];
6552
6553 if (ent->const_rtx != NULL_RTX
6554 && (CONSTANT_P (ent->const_rtx)
6555 || GET_CODE (ent->const_rtx) == REG))
6556 {
6557 rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6558 if (new)
6559 return new;
6560 }
6561 }
6562
6563 /* Otherwise, canonicalize this register. */
6564 return canon_reg (x, NULL_RTX);
6565
6566 default:
6567 break;
6568 }
6569
6570 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6571 if (fmt[i] == 'e')
6572 validate_change (object, &XEXP (x, i),
6573 cse_process_notes (XEXP (x, i), object), 0);
6574
6575 return x;
6576 }
6577 \f
6578 /* Find common subexpressions between the end test of a loop and the beginning
6579 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6580
6581 Often we have a loop where an expression in the exit test is used
6582 in the body of the loop. For example "while (*p) *q++ = *p++;".
6583 Because of the way we duplicate the loop exit test in front of the loop,
6584 however, we don't detect that common subexpression. This will be caught
6585 when global cse is implemented, but this is a quite common case.
6586
6587 This function handles the most common cases of these common expressions.
6588 It is called after we have processed the basic block ending with the
6589 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6590 jumps to a label used only once. */
6591
6592 static void
6593 cse_around_loop (loop_start)
6594 rtx loop_start;
6595 {
6596 rtx insn;
6597 int i;
6598 struct table_elt *p;
6599
6600 /* If the jump at the end of the loop doesn't go to the start, we don't
6601 do anything. */
6602 for (insn = PREV_INSN (loop_start);
6603 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6604 insn = PREV_INSN (insn))
6605 ;
6606
6607 if (insn == 0
6608 || GET_CODE (insn) != NOTE
6609 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6610 return;
6611
6612 /* If the last insn of the loop (the end test) was an NE comparison,
6613 we will interpret it as an EQ comparison, since we fell through
6614 the loop. Any equivalences resulting from that comparison are
6615 therefore not valid and must be invalidated. */
6616 if (last_jump_equiv_class)
6617 for (p = last_jump_equiv_class->first_same_value; p;
6618 p = p->next_same_value)
6619 {
6620 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6621 || (GET_CODE (p->exp) == SUBREG
6622 && GET_CODE (SUBREG_REG (p->exp)) == REG))
6623 invalidate (p->exp, VOIDmode);
6624 else if (GET_CODE (p->exp) == STRICT_LOW_PART
6625 || GET_CODE (p->exp) == ZERO_EXTRACT)
6626 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6627 }
6628
6629 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6630 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6631
6632 The only thing we do with SET_DEST is invalidate entries, so we
6633 can safely process each SET in order. It is slightly less efficient
6634 to do so, but we only want to handle the most common cases.
6635
6636 The gen_move_insn call in cse_set_around_loop may create new pseudos.
6637 These pseudos won't have valid entries in any of the tables indexed
6638 by register number, such as reg_qty. We avoid out-of-range array
6639 accesses by not processing any instructions created after cse started. */
6640
6641 for (insn = NEXT_INSN (loop_start);
6642 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6643 && INSN_UID (insn) < max_insn_uid
6644 && ! (GET_CODE (insn) == NOTE
6645 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6646 insn = NEXT_INSN (insn))
6647 {
6648 if (INSN_P (insn)
6649 && (GET_CODE (PATTERN (insn)) == SET
6650 || GET_CODE (PATTERN (insn)) == CLOBBER))
6651 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6652 else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6653 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6654 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6655 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6656 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6657 loop_start);
6658 }
6659 }
6660 \f
6661 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6662 since they are done elsewhere. This function is called via note_stores. */
6663
6664 static void
6665 invalidate_skipped_set (dest, set, data)
6666 rtx set;
6667 rtx dest;
6668 void *data ATTRIBUTE_UNUSED;
6669 {
6670 enum rtx_code code = GET_CODE (dest);
6671
6672 if (code == MEM
6673 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6674 /* There are times when an address can appear varying and be a PLUS
6675 during this scan when it would be a fixed address were we to know
6676 the proper equivalences. So invalidate all memory if there is
6677 a BLKmode or nonscalar memory reference or a reference to a
6678 variable address. */
6679 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6680 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6681 {
6682 invalidate_memory ();
6683 return;
6684 }
6685
6686 if (GET_CODE (set) == CLOBBER
6687 #ifdef HAVE_cc0
6688 || dest == cc0_rtx
6689 #endif
6690 || dest == pc_rtx)
6691 return;
6692
6693 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6694 invalidate (XEXP (dest, 0), GET_MODE (dest));
6695 else if (code == REG || code == SUBREG || code == MEM)
6696 invalidate (dest, VOIDmode);
6697 }
6698
6699 /* Invalidate all insns from START up to the end of the function or the
6700 next label. This called when we wish to CSE around a block that is
6701 conditionally executed. */
6702
6703 static void
6704 invalidate_skipped_block (start)
6705 rtx start;
6706 {
6707 rtx insn;
6708
6709 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6710 insn = NEXT_INSN (insn))
6711 {
6712 if (! INSN_P (insn))
6713 continue;
6714
6715 if (GET_CODE (insn) == CALL_INSN)
6716 {
6717 if (! CONST_OR_PURE_CALL_P (insn))
6718 invalidate_memory ();
6719 invalidate_for_call ();
6720 }
6721
6722 invalidate_from_clobbers (PATTERN (insn));
6723 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6724 }
6725 }
6726 \f
6727 /* If modifying X will modify the value in *DATA (which is really an
6728 `rtx *'), indicate that fact by setting the pointed to value to
6729 NULL_RTX. */
6730
6731 static void
6732 cse_check_loop_start (x, set, data)
6733 rtx x;
6734 rtx set ATTRIBUTE_UNUSED;
6735 void *data;
6736 {
6737 rtx *cse_check_loop_start_value = (rtx *) data;
6738
6739 if (*cse_check_loop_start_value == NULL_RTX
6740 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6741 return;
6742
6743 if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6744 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6745 *cse_check_loop_start_value = NULL_RTX;
6746 }
6747
6748 /* X is a SET or CLOBBER contained in INSN that was found near the start of
6749 a loop that starts with the label at LOOP_START.
6750
6751 If X is a SET, we see if its SET_SRC is currently in our hash table.
6752 If so, we see if it has a value equal to some register used only in the
6753 loop exit code (as marked by jump.c).
6754
6755 If those two conditions are true, we search backwards from the start of
6756 the loop to see if that same value was loaded into a register that still
6757 retains its value at the start of the loop.
6758
6759 If so, we insert an insn after the load to copy the destination of that
6760 load into the equivalent register and (try to) replace our SET_SRC with that
6761 register.
6762
6763 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6764
6765 static void
6766 cse_set_around_loop (x, insn, loop_start)
6767 rtx x;
6768 rtx insn;
6769 rtx loop_start;
6770 {
6771 struct table_elt *src_elt;
6772
6773 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6774 are setting PC or CC0 or whose SET_SRC is already a register. */
6775 if (GET_CODE (x) == SET
6776 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6777 && GET_CODE (SET_SRC (x)) != REG)
6778 {
6779 src_elt = lookup (SET_SRC (x),
6780 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6781 GET_MODE (SET_DEST (x)));
6782
6783 if (src_elt)
6784 for (src_elt = src_elt->first_same_value; src_elt;
6785 src_elt = src_elt->next_same_value)
6786 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6787 && COST (src_elt->exp) < COST (SET_SRC (x)))
6788 {
6789 rtx p, set;
6790
6791 /* Look for an insn in front of LOOP_START that sets
6792 something in the desired mode to SET_SRC (x) before we hit
6793 a label or CALL_INSN. */
6794
6795 for (p = prev_nonnote_insn (loop_start);
6796 p && GET_CODE (p) != CALL_INSN
6797 && GET_CODE (p) != CODE_LABEL;
6798 p = prev_nonnote_insn (p))
6799 if ((set = single_set (p)) != 0
6800 && GET_CODE (SET_DEST (set)) == REG
6801 && GET_MODE (SET_DEST (set)) == src_elt->mode
6802 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6803 {
6804 /* We now have to ensure that nothing between P
6805 and LOOP_START modified anything referenced in
6806 SET_SRC (x). We know that nothing within the loop
6807 can modify it, or we would have invalidated it in
6808 the hash table. */
6809 rtx q;
6810 rtx cse_check_loop_start_value = SET_SRC (x);
6811 for (q = p; q != loop_start; q = NEXT_INSN (q))
6812 if (INSN_P (q))
6813 note_stores (PATTERN (q),
6814 cse_check_loop_start,
6815 &cse_check_loop_start_value);
6816
6817 /* If nothing was changed and we can replace our
6818 SET_SRC, add an insn after P to copy its destination
6819 to what we will be replacing SET_SRC with. */
6820 if (cse_check_loop_start_value
6821 && validate_change (insn, &SET_SRC (x),
6822 src_elt->exp, 0))
6823 {
6824 /* If this creates new pseudos, this is unsafe,
6825 because the regno of new pseudo is unsuitable
6826 to index into reg_qty when cse_insn processes
6827 the new insn. Therefore, if a new pseudo was
6828 created, discard this optimization. */
6829 int nregs = max_reg_num ();
6830 rtx move
6831 = gen_move_insn (src_elt->exp, SET_DEST (set));
6832 if (nregs != max_reg_num ())
6833 {
6834 if (! validate_change (insn, &SET_SRC (x),
6835 SET_SRC (set), 0))
6836 abort ();
6837 }
6838 else
6839 emit_insn_after (move, p);
6840 }
6841 break;
6842 }
6843 }
6844 }
6845
6846 /* Deal with the destination of X affecting the stack pointer. */
6847 addr_affects_sp_p (SET_DEST (x));
6848
6849 /* See comment on similar code in cse_insn for explanation of these
6850 tests. */
6851 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6852 || GET_CODE (SET_DEST (x)) == MEM)
6853 invalidate (SET_DEST (x), VOIDmode);
6854 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6855 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6856 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6857 }
6858 \f
6859 /* Find the end of INSN's basic block and return its range,
6860 the total number of SETs in all the insns of the block, the last insn of the
6861 block, and the branch path.
6862
6863 The branch path indicates which branches should be followed. If a nonzero
6864 path size is specified, the block should be rescanned and a different set
6865 of branches will be taken. The branch path is only used if
6866 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6867
6868 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6869 used to describe the block. It is filled in with the information about
6870 the current block. The incoming structure's branch path, if any, is used
6871 to construct the output branch path. */
6872
6873 void
6874 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
6875 rtx insn;
6876 struct cse_basic_block_data *data;
6877 int follow_jumps;
6878 int after_loop;
6879 int skip_blocks;
6880 {
6881 rtx p = insn, q;
6882 int nsets = 0;
6883 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6884 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6885 int path_size = data->path_size;
6886 int path_entry = 0;
6887 int i;
6888
6889 /* Update the previous branch path, if any. If the last branch was
6890 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6891 shorten the path by one and look at the previous branch. We know that
6892 at least one branch must have been taken if PATH_SIZE is nonzero. */
6893 while (path_size > 0)
6894 {
6895 if (data->path[path_size - 1].status != NOT_TAKEN)
6896 {
6897 data->path[path_size - 1].status = NOT_TAKEN;
6898 break;
6899 }
6900 else
6901 path_size--;
6902 }
6903
6904 /* If the first instruction is marked with QImode, that means we've
6905 already processed this block. Our caller will look at DATA->LAST
6906 to figure out where to go next. We want to return the next block
6907 in the instruction stream, not some branched-to block somewhere
6908 else. We accomplish this by pretending our called forbid us to
6909 follow jumps, or skip blocks. */
6910 if (GET_MODE (insn) == QImode)
6911 follow_jumps = skip_blocks = 0;
6912
6913 /* Scan to end of this basic block. */
6914 while (p && GET_CODE (p) != CODE_LABEL)
6915 {
6916 /* Don't cse out the end of a loop. This makes a difference
6917 only for the unusual loops that always execute at least once;
6918 all other loops have labels there so we will stop in any case.
6919 Cse'ing out the end of the loop is dangerous because it
6920 might cause an invariant expression inside the loop
6921 to be reused after the end of the loop. This would make it
6922 hard to move the expression out of the loop in loop.c,
6923 especially if it is one of several equivalent expressions
6924 and loop.c would like to eliminate it.
6925
6926 If we are running after loop.c has finished, we can ignore
6927 the NOTE_INSN_LOOP_END. */
6928
6929 if (! after_loop && GET_CODE (p) == NOTE
6930 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6931 break;
6932
6933 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6934 the regs restored by the longjmp come from
6935 a later time than the setjmp. */
6936 if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
6937 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6938 break;
6939
6940 /* A PARALLEL can have lots of SETs in it,
6941 especially if it is really an ASM_OPERANDS. */
6942 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6943 nsets += XVECLEN (PATTERN (p), 0);
6944 else if (GET_CODE (p) != NOTE)
6945 nsets += 1;
6946
6947 /* Ignore insns made by CSE; they cannot affect the boundaries of
6948 the basic block. */
6949
6950 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6951 high_cuid = INSN_CUID (p);
6952 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6953 low_cuid = INSN_CUID (p);
6954
6955 /* See if this insn is in our branch path. If it is and we are to
6956 take it, do so. */
6957 if (path_entry < path_size && data->path[path_entry].branch == p)
6958 {
6959 if (data->path[path_entry].status != NOT_TAKEN)
6960 p = JUMP_LABEL (p);
6961
6962 /* Point to next entry in path, if any. */
6963 path_entry++;
6964 }
6965
6966 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6967 was specified, we haven't reached our maximum path length, there are
6968 insns following the target of the jump, this is the only use of the
6969 jump label, and the target label is preceded by a BARRIER.
6970
6971 Alternatively, we can follow the jump if it branches around a
6972 block of code and there are no other branches into the block.
6973 In this case invalidate_skipped_block will be called to invalidate any
6974 registers set in the block when following the jump. */
6975
6976 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
6977 && GET_CODE (p) == JUMP_INSN
6978 && GET_CODE (PATTERN (p)) == SET
6979 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6980 && JUMP_LABEL (p) != 0
6981 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6982 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6983 {
6984 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6985 if ((GET_CODE (q) != NOTE
6986 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6987 || (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
6988 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6989 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6990 break;
6991
6992 /* If we ran into a BARRIER, this code is an extension of the
6993 basic block when the branch is taken. */
6994 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6995 {
6996 /* Don't allow ourself to keep walking around an
6997 always-executed loop. */
6998 if (next_real_insn (q) == next)
6999 {
7000 p = NEXT_INSN (p);
7001 continue;
7002 }
7003
7004 /* Similarly, don't put a branch in our path more than once. */
7005 for (i = 0; i < path_entry; i++)
7006 if (data->path[i].branch == p)
7007 break;
7008
7009 if (i != path_entry)
7010 break;
7011
7012 data->path[path_entry].branch = p;
7013 data->path[path_entry++].status = TAKEN;
7014
7015 /* This branch now ends our path. It was possible that we
7016 didn't see this branch the last time around (when the
7017 insn in front of the target was a JUMP_INSN that was
7018 turned into a no-op). */
7019 path_size = path_entry;
7020
7021 p = JUMP_LABEL (p);
7022 /* Mark block so we won't scan it again later. */
7023 PUT_MODE (NEXT_INSN (p), QImode);
7024 }
7025 /* Detect a branch around a block of code. */
7026 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
7027 {
7028 rtx tmp;
7029
7030 if (next_real_insn (q) == next)
7031 {
7032 p = NEXT_INSN (p);
7033 continue;
7034 }
7035
7036 for (i = 0; i < path_entry; i++)
7037 if (data->path[i].branch == p)
7038 break;
7039
7040 if (i != path_entry)
7041 break;
7042
7043 /* This is no_labels_between_p (p, q) with an added check for
7044 reaching the end of a function (in case Q precedes P). */
7045 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
7046 if (GET_CODE (tmp) == CODE_LABEL)
7047 break;
7048
7049 if (tmp == q)
7050 {
7051 data->path[path_entry].branch = p;
7052 data->path[path_entry++].status = AROUND;
7053
7054 path_size = path_entry;
7055
7056 p = JUMP_LABEL (p);
7057 /* Mark block so we won't scan it again later. */
7058 PUT_MODE (NEXT_INSN (p), QImode);
7059 }
7060 }
7061 }
7062 p = NEXT_INSN (p);
7063 }
7064
7065 data->low_cuid = low_cuid;
7066 data->high_cuid = high_cuid;
7067 data->nsets = nsets;
7068 data->last = p;
7069
7070 /* If all jumps in the path are not taken, set our path length to zero
7071 so a rescan won't be done. */
7072 for (i = path_size - 1; i >= 0; i--)
7073 if (data->path[i].status != NOT_TAKEN)
7074 break;
7075
7076 if (i == -1)
7077 data->path_size = 0;
7078 else
7079 data->path_size = path_size;
7080
7081 /* End the current branch path. */
7082 data->path[path_size].branch = 0;
7083 }
7084 \f
7085 /* Perform cse on the instructions of a function.
7086 F is the first instruction.
7087 NREGS is one plus the highest pseudo-reg number used in the instruction.
7088
7089 AFTER_LOOP is 1 if this is the cse call done after loop optimization
7090 (only if -frerun-cse-after-loop).
7091
7092 Returns 1 if jump_optimize should be redone due to simplifications
7093 in conditional jump instructions. */
7094
7095 int
7096 cse_main (f, nregs, after_loop, file)
7097 rtx f;
7098 int nregs;
7099 int after_loop;
7100 FILE *file;
7101 {
7102 struct cse_basic_block_data val;
7103 rtx insn = f;
7104 int i;
7105
7106 cse_jumps_altered = 0;
7107 recorded_label_ref = 0;
7108 constant_pool_entries_cost = 0;
7109 val.path_size = 0;
7110
7111 init_recog ();
7112 init_alias_analysis ();
7113
7114 max_reg = nregs;
7115
7116 max_insn_uid = get_max_uid ();
7117
7118 reg_eqv_table = (struct reg_eqv_elem *)
7119 xmalloc (nregs * sizeof (struct reg_eqv_elem));
7120
7121 #ifdef LOAD_EXTEND_OP
7122
7123 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
7124 and change the code and mode as appropriate. */
7125 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7126 #endif
7127
7128 /* Reset the counter indicating how many elements have been made
7129 thus far. */
7130 n_elements_made = 0;
7131
7132 /* Find the largest uid. */
7133
7134 max_uid = get_max_uid ();
7135 uid_cuid = (int *) xcalloc (max_uid + 1, sizeof (int));
7136
7137 /* Compute the mapping from uids to cuids.
7138 CUIDs are numbers assigned to insns, like uids,
7139 except that cuids increase monotonically through the code.
7140 Don't assign cuids to line-number NOTEs, so that the distance in cuids
7141 between two insns is not affected by -g. */
7142
7143 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7144 {
7145 if (GET_CODE (insn) != NOTE
7146 || NOTE_LINE_NUMBER (insn) < 0)
7147 INSN_CUID (insn) = ++i;
7148 else
7149 /* Give a line number note the same cuid as preceding insn. */
7150 INSN_CUID (insn) = i;
7151 }
7152
7153 ggc_push_context ();
7154
7155 /* Loop over basic blocks.
7156 Compute the maximum number of qty's needed for each basic block
7157 (which is 2 for each SET). */
7158 insn = f;
7159 while (insn)
7160 {
7161 cse_altered = 0;
7162 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7163 flag_cse_skip_blocks);
7164
7165 /* If this basic block was already processed or has no sets, skip it. */
7166 if (val.nsets == 0 || GET_MODE (insn) == QImode)
7167 {
7168 PUT_MODE (insn, VOIDmode);
7169 insn = (val.last ? NEXT_INSN (val.last) : 0);
7170 val.path_size = 0;
7171 continue;
7172 }
7173
7174 cse_basic_block_start = val.low_cuid;
7175 cse_basic_block_end = val.high_cuid;
7176 max_qty = val.nsets * 2;
7177
7178 if (file)
7179 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7180 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7181 val.nsets);
7182
7183 /* Make MAX_QTY bigger to give us room to optimize
7184 past the end of this basic block, if that should prove useful. */
7185 if (max_qty < 500)
7186 max_qty = 500;
7187
7188 max_qty += max_reg;
7189
7190 /* If this basic block is being extended by following certain jumps,
7191 (see `cse_end_of_basic_block'), we reprocess the code from the start.
7192 Otherwise, we start after this basic block. */
7193 if (val.path_size > 0)
7194 cse_basic_block (insn, val.last, val.path, 0);
7195 else
7196 {
7197 int old_cse_jumps_altered = cse_jumps_altered;
7198 rtx temp;
7199
7200 /* When cse changes a conditional jump to an unconditional
7201 jump, we want to reprocess the block, since it will give
7202 us a new branch path to investigate. */
7203 cse_jumps_altered = 0;
7204 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7205 if (cse_jumps_altered == 0
7206 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7207 insn = temp;
7208
7209 cse_jumps_altered |= old_cse_jumps_altered;
7210 }
7211
7212 if (cse_altered)
7213 ggc_collect ();
7214
7215 #ifdef USE_C_ALLOCA
7216 alloca (0);
7217 #endif
7218 }
7219
7220 ggc_pop_context ();
7221
7222 if (max_elements_made < n_elements_made)
7223 max_elements_made = n_elements_made;
7224
7225 /* Clean up. */
7226 end_alias_analysis ();
7227 free (uid_cuid);
7228 free (reg_eqv_table);
7229
7230 return cse_jumps_altered || recorded_label_ref;
7231 }
7232
7233 /* Process a single basic block. FROM and TO and the limits of the basic
7234 block. NEXT_BRANCH points to the branch path when following jumps or
7235 a null path when not following jumps.
7236
7237 AROUND_LOOP is nonzero if we are to try to cse around to the start of a
7238 loop. This is true when we are being called for the last time on a
7239 block and this CSE pass is before loop.c. */
7240
7241 static rtx
7242 cse_basic_block (from, to, next_branch, around_loop)
7243 rtx from, to;
7244 struct branch_path *next_branch;
7245 int around_loop;
7246 {
7247 rtx insn;
7248 int to_usage = 0;
7249 rtx libcall_insn = NULL_RTX;
7250 int num_insns = 0;
7251
7252 /* This array is undefined before max_reg, so only allocate
7253 the space actually needed and adjust the start. */
7254
7255 qty_table
7256 = (struct qty_table_elem *) xmalloc ((max_qty - max_reg)
7257 * sizeof (struct qty_table_elem));
7258 qty_table -= max_reg;
7259
7260 new_basic_block ();
7261
7262 /* TO might be a label. If so, protect it from being deleted. */
7263 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7264 ++LABEL_NUSES (to);
7265
7266 for (insn = from; insn != to; insn = NEXT_INSN (insn))
7267 {
7268 enum rtx_code code = GET_CODE (insn);
7269
7270 /* If we have processed 1,000 insns, flush the hash table to
7271 avoid extreme quadratic behavior. We must not include NOTEs
7272 in the count since there may be more of them when generating
7273 debugging information. If we clear the table at different
7274 times, code generated with -g -O might be different than code
7275 generated with -O but not -g.
7276
7277 ??? This is a real kludge and needs to be done some other way.
7278 Perhaps for 2.9. */
7279 if (code != NOTE && num_insns++ > 1000)
7280 {
7281 flush_hash_table ();
7282 num_insns = 0;
7283 }
7284
7285 /* See if this is a branch that is part of the path. If so, and it is
7286 to be taken, do so. */
7287 if (next_branch->branch == insn)
7288 {
7289 enum taken status = next_branch++->status;
7290 if (status != NOT_TAKEN)
7291 {
7292 if (status == TAKEN)
7293 record_jump_equiv (insn, 1);
7294 else
7295 invalidate_skipped_block (NEXT_INSN (insn));
7296
7297 /* Set the last insn as the jump insn; it doesn't affect cc0.
7298 Then follow this branch. */
7299 #ifdef HAVE_cc0
7300 prev_insn_cc0 = 0;
7301 #endif
7302 prev_insn = insn;
7303 insn = JUMP_LABEL (insn);
7304 continue;
7305 }
7306 }
7307
7308 if (GET_MODE (insn) == QImode)
7309 PUT_MODE (insn, VOIDmode);
7310
7311 if (GET_RTX_CLASS (code) == 'i')
7312 {
7313 rtx p;
7314
7315 /* Process notes first so we have all notes in canonical forms when
7316 looking for duplicate operations. */
7317
7318 if (REG_NOTES (insn))
7319 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7320
7321 /* Track when we are inside in LIBCALL block. Inside such a block,
7322 we do not want to record destinations. The last insn of a
7323 LIBCALL block is not considered to be part of the block, since
7324 its destination is the result of the block and hence should be
7325 recorded. */
7326
7327 if (REG_NOTES (insn) != 0)
7328 {
7329 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7330 libcall_insn = XEXP (p, 0);
7331 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7332 libcall_insn = 0;
7333 }
7334
7335 cse_insn (insn, libcall_insn);
7336
7337 /* If we haven't already found an insn where we added a LABEL_REF,
7338 check this one. */
7339 if (GET_CODE (insn) == INSN && ! recorded_label_ref
7340 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7341 (void *) insn))
7342 recorded_label_ref = 1;
7343 }
7344
7345 /* If INSN is now an unconditional jump, skip to the end of our
7346 basic block by pretending that we just did the last insn in the
7347 basic block. If we are jumping to the end of our block, show
7348 that we can have one usage of TO. */
7349
7350 if (any_uncondjump_p (insn))
7351 {
7352 if (to == 0)
7353 {
7354 free (qty_table + max_reg);
7355 return 0;
7356 }
7357
7358 if (JUMP_LABEL (insn) == to)
7359 to_usage = 1;
7360
7361 /* Maybe TO was deleted because the jump is unconditional.
7362 If so, there is nothing left in this basic block. */
7363 /* ??? Perhaps it would be smarter to set TO
7364 to whatever follows this insn,
7365 and pretend the basic block had always ended here. */
7366 if (INSN_DELETED_P (to))
7367 break;
7368
7369 insn = PREV_INSN (to);
7370 }
7371
7372 /* See if it is ok to keep on going past the label
7373 which used to end our basic block. Remember that we incremented
7374 the count of that label, so we decrement it here. If we made
7375 a jump unconditional, TO_USAGE will be one; in that case, we don't
7376 want to count the use in that jump. */
7377
7378 if (to != 0 && NEXT_INSN (insn) == to
7379 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7380 {
7381 struct cse_basic_block_data val;
7382 rtx prev;
7383
7384 insn = NEXT_INSN (to);
7385
7386 /* If TO was the last insn in the function, we are done. */
7387 if (insn == 0)
7388 {
7389 free (qty_table + max_reg);
7390 return 0;
7391 }
7392
7393 /* If TO was preceded by a BARRIER we are done with this block
7394 because it has no continuation. */
7395 prev = prev_nonnote_insn (to);
7396 if (prev && GET_CODE (prev) == BARRIER)
7397 {
7398 free (qty_table + max_reg);
7399 return insn;
7400 }
7401
7402 /* Find the end of the following block. Note that we won't be
7403 following branches in this case. */
7404 to_usage = 0;
7405 val.path_size = 0;
7406 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7407
7408 /* If the tables we allocated have enough space left
7409 to handle all the SETs in the next basic block,
7410 continue through it. Otherwise, return,
7411 and that block will be scanned individually. */
7412 if (val.nsets * 2 + next_qty > max_qty)
7413 break;
7414
7415 cse_basic_block_start = val.low_cuid;
7416 cse_basic_block_end = val.high_cuid;
7417 to = val.last;
7418
7419 /* Prevent TO from being deleted if it is a label. */
7420 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7421 ++LABEL_NUSES (to);
7422
7423 /* Back up so we process the first insn in the extension. */
7424 insn = PREV_INSN (insn);
7425 }
7426 }
7427
7428 if (next_qty > max_qty)
7429 abort ();
7430
7431 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7432 the previous insn is the only insn that branches to the head of a loop,
7433 we can cse into the loop. Don't do this if we changed the jump
7434 structure of a loop unless we aren't going to be following jumps. */
7435
7436 insn = prev_nonnote_insn (to);
7437 if ((cse_jumps_altered == 0
7438 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7439 && around_loop && to != 0
7440 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7441 && GET_CODE (insn) == JUMP_INSN
7442 && JUMP_LABEL (insn) != 0
7443 && LABEL_NUSES (JUMP_LABEL (insn)) == 1)
7444 cse_around_loop (JUMP_LABEL (insn));
7445
7446 free (qty_table + max_reg);
7447
7448 return to ? NEXT_INSN (to) : 0;
7449 }
7450 \f
7451 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7452 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
7453
7454 static int
7455 check_for_label_ref (rtl, data)
7456 rtx *rtl;
7457 void *data;
7458 {
7459 rtx insn = (rtx) data;
7460
7461 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7462 we must rerun jump since it needs to place the note. If this is a
7463 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7464 since no REG_LABEL will be added. */
7465 return (GET_CODE (*rtl) == LABEL_REF
7466 && ! LABEL_REF_NONLOCAL_P (*rtl)
7467 && LABEL_P (XEXP (*rtl, 0))
7468 && INSN_UID (XEXP (*rtl, 0)) != 0
7469 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7470 }
7471 \f
7472 /* Count the number of times registers are used (not set) in X.
7473 COUNTS is an array in which we accumulate the count, INCR is how much
7474 we count each register usage.
7475
7476 Don't count a usage of DEST, which is the SET_DEST of a SET which
7477 contains X in its SET_SRC. This is because such a SET does not
7478 modify the liveness of DEST. */
7479
7480 static void
7481 count_reg_usage (x, counts, dest, incr)
7482 rtx x;
7483 int *counts;
7484 rtx dest;
7485 int incr;
7486 {
7487 enum rtx_code code;
7488 const char *fmt;
7489 int i, j;
7490
7491 if (x == 0)
7492 return;
7493
7494 switch (code = GET_CODE (x))
7495 {
7496 case REG:
7497 if (x != dest)
7498 counts[REGNO (x)] += incr;
7499 return;
7500
7501 case PC:
7502 case CC0:
7503 case CONST:
7504 case CONST_INT:
7505 case CONST_DOUBLE:
7506 case CONST_VECTOR:
7507 case SYMBOL_REF:
7508 case LABEL_REF:
7509 return;
7510
7511 case CLOBBER:
7512 /* If we are clobbering a MEM, mark any registers inside the address
7513 as being used. */
7514 if (GET_CODE (XEXP (x, 0)) == MEM)
7515 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7516 return;
7517
7518 case SET:
7519 /* Unless we are setting a REG, count everything in SET_DEST. */
7520 if (GET_CODE (SET_DEST (x)) != REG)
7521 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7522
7523 /* If SRC has side-effects, then we can't delete this insn, so the
7524 usage of SET_DEST inside SRC counts.
7525
7526 ??? Strictly-speaking, we might be preserving this insn
7527 because some other SET has side-effects, but that's hard
7528 to do and can't happen now. */
7529 count_reg_usage (SET_SRC (x), counts,
7530 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
7531 incr);
7532 return;
7533
7534 case CALL_INSN:
7535 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
7536 /* Fall through. */
7537
7538 case INSN:
7539 case JUMP_INSN:
7540 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7541
7542 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7543 use them. */
7544
7545 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7546 return;
7547
7548 case EXPR_LIST:
7549 case INSN_LIST:
7550 if (REG_NOTE_KIND (x) == REG_EQUAL
7551 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
7552 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7553 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7554 return;
7555
7556 default:
7557 break;
7558 }
7559
7560 fmt = GET_RTX_FORMAT (code);
7561 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7562 {
7563 if (fmt[i] == 'e')
7564 count_reg_usage (XEXP (x, i), counts, dest, incr);
7565 else if (fmt[i] == 'E')
7566 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7567 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7568 }
7569 }
7570 \f
7571 /* Return true if set is live. */
7572 static bool
7573 set_live_p (set, insn, counts)
7574 rtx set;
7575 rtx insn ATTRIBUTE_UNUSED; /* Only used with HAVE_cc0. */
7576 int *counts;
7577 {
7578 #ifdef HAVE_cc0
7579 rtx tem;
7580 #endif
7581
7582 if (set_noop_p (set))
7583 ;
7584
7585 #ifdef HAVE_cc0
7586 else if (GET_CODE (SET_DEST (set)) == CC0
7587 && !side_effects_p (SET_SRC (set))
7588 && ((tem = next_nonnote_insn (insn)) == 0
7589 || !INSN_P (tem)
7590 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7591 return false;
7592 #endif
7593 else if (GET_CODE (SET_DEST (set)) != REG
7594 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7595 || counts[REGNO (SET_DEST (set))] != 0
7596 || side_effects_p (SET_SRC (set))
7597 /* An ADDRESSOF expression can turn into a use of the
7598 internal arg pointer, so always consider the
7599 internal arg pointer live. If it is truly dead,
7600 flow will delete the initializing insn. */
7601 || (SET_DEST (set) == current_function_internal_arg_pointer))
7602 return true;
7603 return false;
7604 }
7605
7606 /* Return true if insn is live. */
7607
7608 static bool
7609 insn_live_p (insn, counts)
7610 rtx insn;
7611 int *counts;
7612 {
7613 int i;
7614 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7615 return true;
7616 else if (GET_CODE (PATTERN (insn)) == SET)
7617 return set_live_p (PATTERN (insn), insn, counts);
7618 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7619 {
7620 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7621 {
7622 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7623
7624 if (GET_CODE (elt) == SET)
7625 {
7626 if (set_live_p (elt, insn, counts))
7627 return true;
7628 }
7629 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7630 return true;
7631 }
7632 return false;
7633 }
7634 else
7635 return true;
7636 }
7637
7638 /* Return true if libcall is dead as a whole. */
7639
7640 static bool
7641 dead_libcall_p (insn, counts)
7642 rtx insn;
7643 int *counts;
7644 {
7645 rtx note;
7646 /* See if there's a REG_EQUAL note on this insn and try to
7647 replace the source with the REG_EQUAL expression.
7648
7649 We assume that insns with REG_RETVALs can only be reg->reg
7650 copies at this point. */
7651 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7652 if (note)
7653 {
7654 rtx set = single_set (insn);
7655 rtx new = simplify_rtx (XEXP (note, 0));
7656
7657 if (!new)
7658 new = XEXP (note, 0);
7659
7660 /* While changing insn, we must update the counts accordingly. */
7661 count_reg_usage (insn, counts, NULL_RTX, -1);
7662
7663 if (set && validate_change (insn, &SET_SRC (set), new, 0))
7664 {
7665 count_reg_usage (insn, counts, NULL_RTX, 1);
7666 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7667 remove_note (insn, note);
7668 return true;
7669 }
7670 count_reg_usage (insn, counts, NULL_RTX, 1);
7671 }
7672 return false;
7673 }
7674
7675 /* Scan all the insns and delete any that are dead; i.e., they store a register
7676 that is never used or they copy a register to itself.
7677
7678 This is used to remove insns made obviously dead by cse, loop or other
7679 optimizations. It improves the heuristics in loop since it won't try to
7680 move dead invariants out of loops or make givs for dead quantities. The
7681 remaining passes of the compilation are also sped up. */
7682
7683 int
7684 delete_trivially_dead_insns (insns, nreg)
7685 rtx insns;
7686 int nreg;
7687 {
7688 int *counts;
7689 rtx insn, prev;
7690 int in_libcall = 0, dead_libcall = 0;
7691 int ndead = 0, nlastdead, niterations = 0;
7692
7693 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7694 /* First count the number of times each register is used. */
7695 counts = (int *) xcalloc (nreg, sizeof (int));
7696 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7697 count_reg_usage (insn, counts, NULL_RTX, 1);
7698
7699 do
7700 {
7701 nlastdead = ndead;
7702 niterations++;
7703 /* Go from the last insn to the first and delete insns that only set unused
7704 registers or copy a register to itself. As we delete an insn, remove
7705 usage counts for registers it uses.
7706
7707 The first jump optimization pass may leave a real insn as the last
7708 insn in the function. We must not skip that insn or we may end
7709 up deleting code that is not really dead. */
7710 insn = get_last_insn ();
7711 if (! INSN_P (insn))
7712 insn = prev_real_insn (insn);
7713
7714 for (; insn; insn = prev)
7715 {
7716 int live_insn = 0;
7717
7718 prev = prev_real_insn (insn);
7719
7720 /* Don't delete any insns that are part of a libcall block unless
7721 we can delete the whole libcall block.
7722
7723 Flow or loop might get confused if we did that. Remember
7724 that we are scanning backwards. */
7725 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7726 {
7727 in_libcall = 1;
7728 live_insn = 1;
7729 dead_libcall = dead_libcall_p (insn, counts);
7730 }
7731 else if (in_libcall)
7732 live_insn = ! dead_libcall;
7733 else
7734 live_insn = insn_live_p (insn, counts);
7735
7736 /* If this is a dead insn, delete it and show registers in it aren't
7737 being used. */
7738
7739 if (! live_insn)
7740 {
7741 count_reg_usage (insn, counts, NULL_RTX, -1);
7742 delete_insn_and_edges (insn);
7743 ndead++;
7744 }
7745
7746 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7747 {
7748 in_libcall = 0;
7749 dead_libcall = 0;
7750 }
7751 }
7752 }
7753 while (ndead != nlastdead);
7754
7755 if (rtl_dump_file && ndead)
7756 fprintf (rtl_dump_file, "Deleted %i trivially dead insns; %i iterations\n",
7757 ndead, niterations);
7758 /* Clean up. */
7759 free (counts);
7760 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7761 return ndead;
7762 }