toplev.c (set_float_handler): Make static.
[gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
25
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "flags.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "function.h"
36 #include "expr.h"
37 #include "toplev.h"
38 #include "output.h"
39 #include "ggc.h"
40
41 /* The basic idea of common subexpression elimination is to go
42 through the code, keeping a record of expressions that would
43 have the same value at the current scan point, and replacing
44 expressions encountered with the cheapest equivalent expression.
45
46 It is too complicated to keep track of the different possibilities
47 when control paths merge in this code; so, at each label, we forget all
48 that is known and start fresh. This can be described as processing each
49 extended basic block separately. We have a separate pass to perform
50 global CSE.
51
52 Note CSE can turn a conditional or computed jump into a nop or
53 an unconditional jump. When this occurs we arrange to run the jump
54 optimizer after CSE to delete the unreachable code.
55
56 We use two data structures to record the equivalent expressions:
57 a hash table for most expressions, and a vector of "quantity
58 numbers" to record equivalent (pseudo) registers.
59
60 The use of the special data structure for registers is desirable
61 because it is faster. It is possible because registers references
62 contain a fairly small number, the register number, taken from
63 a contiguously allocated series, and two register references are
64 identical if they have the same number. General expressions
65 do not have any such thing, so the only way to retrieve the
66 information recorded on an expression other than a register
67 is to keep it in a hash table.
68
69 Registers and "quantity numbers":
70
71 At the start of each basic block, all of the (hardware and pseudo)
72 registers used in the function are given distinct quantity
73 numbers to indicate their contents. During scan, when the code
74 copies one register into another, we copy the quantity number.
75 When a register is loaded in any other way, we allocate a new
76 quantity number to describe the value generated by this operation.
77 `reg_qty' records what quantity a register is currently thought
78 of as containing.
79
80 All real quantity numbers are greater than or equal to `max_reg'.
81 If register N has not been assigned a quantity, reg_qty[N] will equal N.
82
83 Quantity numbers below `max_reg' do not exist and none of the `qty_table'
84 entries should be referenced with an index below `max_reg'.
85
86 We also maintain a bidirectional chain of registers for each
87 quantity number. The `qty_table` members `first_reg' and `last_reg',
88 and `reg_eqv_table' members `next' and `prev' hold these chains.
89
90 The first register in a chain is the one whose lifespan is least local.
91 Among equals, it is the one that was seen first.
92 We replace any equivalent register with that one.
93
94 If two registers have the same quantity number, it must be true that
95 REG expressions with qty_table `mode' must be in the hash table for both
96 registers and must be in the same class.
97
98 The converse is not true. Since hard registers may be referenced in
99 any mode, two REG expressions might be equivalent in the hash table
100 but not have the same quantity number if the quantity number of one
101 of the registers is not the same mode as those expressions.
102
103 Constants and quantity numbers
104
105 When a quantity has a known constant value, that value is stored
106 in the appropriate qty_table `const_rtx'. This is in addition to
107 putting the constant in the hash table as is usual for non-regs.
108
109 Whether a reg or a constant is preferred is determined by the configuration
110 macro CONST_COSTS and will often depend on the constant value. In any
111 event, expressions containing constants can be simplified, by fold_rtx.
112
113 When a quantity has a known nearly constant value (such as an address
114 of a stack slot), that value is stored in the appropriate qty_table
115 `const_rtx'.
116
117 Integer constants don't have a machine mode. However, cse
118 determines the intended machine mode from the destination
119 of the instruction that moves the constant. The machine mode
120 is recorded in the hash table along with the actual RTL
121 constant expression so that different modes are kept separate.
122
123 Other expressions:
124
125 To record known equivalences among expressions in general
126 we use a hash table called `table'. It has a fixed number of buckets
127 that contain chains of `struct table_elt' elements for expressions.
128 These chains connect the elements whose expressions have the same
129 hash codes.
130
131 Other chains through the same elements connect the elements which
132 currently have equivalent values.
133
134 Register references in an expression are canonicalized before hashing
135 the expression. This is done using `reg_qty' and qty_table `first_reg'.
136 The hash code of a register reference is computed using the quantity
137 number, not the register number.
138
139 When the value of an expression changes, it is necessary to remove from the
140 hash table not just that expression but all expressions whose values
141 could be different as a result.
142
143 1. If the value changing is in memory, except in special cases
144 ANYTHING referring to memory could be changed. That is because
145 nobody knows where a pointer does not point.
146 The function `invalidate_memory' removes what is necessary.
147
148 The special cases are when the address is constant or is
149 a constant plus a fixed register such as the frame pointer
150 or a static chain pointer. When such addresses are stored in,
151 we can tell exactly which other such addresses must be invalidated
152 due to overlap. `invalidate' does this.
153 All expressions that refer to non-constant
154 memory addresses are also invalidated. `invalidate_memory' does this.
155
156 2. If the value changing is a register, all expressions
157 containing references to that register, and only those,
158 must be removed.
159
160 Because searching the entire hash table for expressions that contain
161 a register is very slow, we try to figure out when it isn't necessary.
162 Precisely, this is necessary only when expressions have been
163 entered in the hash table using this register, and then the value has
164 changed, and then another expression wants to be added to refer to
165 the register's new value. This sequence of circumstances is rare
166 within any one basic block.
167
168 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
169 reg_tick[i] is incremented whenever a value is stored in register i.
170 reg_in_table[i] holds -1 if no references to register i have been
171 entered in the table; otherwise, it contains the value reg_tick[i] had
172 when the references were entered. If we want to enter a reference
173 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
174 Until we want to enter a new entry, the mere fact that the two vectors
175 don't match makes the entries be ignored if anyone tries to match them.
176
177 Registers themselves are entered in the hash table as well as in
178 the equivalent-register chains. However, the vectors `reg_tick'
179 and `reg_in_table' do not apply to expressions which are simple
180 register references. These expressions are removed from the table
181 immediately when they become invalid, and this can be done even if
182 we do not immediately search for all the expressions that refer to
183 the register.
184
185 A CLOBBER rtx in an instruction invalidates its operand for further
186 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
187 invalidates everything that resides in memory.
188
189 Related expressions:
190
191 Constant expressions that differ only by an additive integer
192 are called related. When a constant expression is put in
193 the table, the related expression with no constant term
194 is also entered. These are made to point at each other
195 so that it is possible to find out if there exists any
196 register equivalent to an expression related to a given expression. */
197
198 /* One plus largest register number used in this function. */
199
200 static int max_reg;
201
202 /* One plus largest instruction UID used in this function at time of
203 cse_main call. */
204
205 static int max_insn_uid;
206
207 /* Length of qty_table vector. We know in advance we will not need
208 a quantity number this big. */
209
210 static int max_qty;
211
212 /* Next quantity number to be allocated.
213 This is 1 + the largest number needed so far. */
214
215 static int next_qty;
216
217 /* Per-qty information tracking.
218
219 `first_reg' and `last_reg' track the head and tail of the
220 chain of registers which currently contain this quantity.
221
222 `mode' contains the machine mode of this quantity.
223
224 `const_rtx' holds the rtx of the constant value of this
225 quantity, if known. A summations of the frame/arg pointer
226 and a constant can also be entered here. When this holds
227 a known value, `const_insn' is the insn which stored the
228 constant value.
229
230 `comparison_{code,const,qty}' are used to track when a
231 comparison between a quantity and some constant or register has
232 been passed. In such a case, we know the results of the comparison
233 in case we see it again. These members record a comparison that
234 is known to be true. `comparison_code' holds the rtx code of such
235 a comparison, else it is set to UNKNOWN and the other two
236 comparison members are undefined. `comparison_const' holds
237 the constant being compared against, or zero if the comparison
238 is not against a constant. `comparison_qty' holds the quantity
239 being compared against when the result is known. If the comparison
240 is not with a register, `comparison_qty' is -1. */
241
242 struct qty_table_elem
243 {
244 rtx const_rtx;
245 rtx const_insn;
246 rtx comparison_const;
247 int comparison_qty;
248 unsigned int first_reg, last_reg;
249 enum machine_mode mode;
250 enum rtx_code comparison_code;
251 };
252
253 /* The table of all qtys, indexed by qty number. */
254 static struct qty_table_elem *qty_table;
255
256 #ifdef HAVE_cc0
257 /* For machines that have a CC0, we do not record its value in the hash
258 table since its use is guaranteed to be the insn immediately following
259 its definition and any other insn is presumed to invalidate it.
260
261 Instead, we store below the value last assigned to CC0. If it should
262 happen to be a constant, it is stored in preference to the actual
263 assigned value. In case it is a constant, we store the mode in which
264 the constant should be interpreted. */
265
266 static rtx prev_insn_cc0;
267 static enum machine_mode prev_insn_cc0_mode;
268 #endif
269
270 /* Previous actual insn. 0 if at first insn of basic block. */
271
272 static rtx prev_insn;
273
274 /* Insn being scanned. */
275
276 static rtx this_insn;
277
278 /* Index by register number, gives the number of the next (or
279 previous) register in the chain of registers sharing the same
280 value.
281
282 Or -1 if this register is at the end of the chain.
283
284 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
285
286 /* Per-register equivalence chain. */
287 struct reg_eqv_elem
288 {
289 int next, prev;
290 };
291
292 /* The table of all register equivalence chains. */
293 static struct reg_eqv_elem *reg_eqv_table;
294
295 struct cse_reg_info
296 {
297 /* Next in hash chain. */
298 struct cse_reg_info *hash_next;
299
300 /* The next cse_reg_info structure in the free or used list. */
301 struct cse_reg_info *next;
302
303 /* Search key */
304 unsigned int regno;
305
306 /* The quantity number of the register's current contents. */
307 int reg_qty;
308
309 /* The number of times the register has been altered in the current
310 basic block. */
311 int reg_tick;
312
313 /* The REG_TICK value at which rtx's containing this register are
314 valid in the hash table. If this does not equal the current
315 reg_tick value, such expressions existing in the hash table are
316 invalid. */
317 int reg_in_table;
318 };
319
320 /* A free list of cse_reg_info entries. */
321 static struct cse_reg_info *cse_reg_info_free_list;
322
323 /* A used list of cse_reg_info entries. */
324 static struct cse_reg_info *cse_reg_info_used_list;
325 static struct cse_reg_info *cse_reg_info_used_list_end;
326
327 /* A mapping from registers to cse_reg_info data structures. */
328 #define REGHASH_SHIFT 7
329 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
330 #define REGHASH_MASK (REGHASH_SIZE - 1)
331 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
332
333 #define REGHASH_FN(REGNO) \
334 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
335
336 /* The last lookup we did into the cse_reg_info_tree. This allows us
337 to cache repeated lookups. */
338 static unsigned int cached_regno;
339 static struct cse_reg_info *cached_cse_reg_info;
340
341 /* A HARD_REG_SET containing all the hard registers for which there is
342 currently a REG expression in the hash table. Note the difference
343 from the above variables, which indicate if the REG is mentioned in some
344 expression in the table. */
345
346 static HARD_REG_SET hard_regs_in_table;
347
348 /* CUID of insn that starts the basic block currently being cse-processed. */
349
350 static int cse_basic_block_start;
351
352 /* CUID of insn that ends the basic block currently being cse-processed. */
353
354 static int cse_basic_block_end;
355
356 /* Vector mapping INSN_UIDs to cuids.
357 The cuids are like uids but increase monotonically always.
358 We use them to see whether a reg is used outside a given basic block. */
359
360 static int *uid_cuid;
361
362 /* Highest UID in UID_CUID. */
363 static int max_uid;
364
365 /* Get the cuid of an insn. */
366
367 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
368
369 /* Nonzero if this pass has made changes, and therefore it's
370 worthwhile to run the garbage collector. */
371
372 static int cse_altered;
373
374 /* Nonzero if cse has altered conditional jump insns
375 in such a way that jump optimization should be redone. */
376
377 static int cse_jumps_altered;
378
379 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
380 REG_LABEL, we have to rerun jump after CSE to put in the note. */
381 static int recorded_label_ref;
382
383 /* canon_hash stores 1 in do_not_record
384 if it notices a reference to CC0, PC, or some other volatile
385 subexpression. */
386
387 static int do_not_record;
388
389 #ifdef LOAD_EXTEND_OP
390
391 /* Scratch rtl used when looking for load-extended copy of a MEM. */
392 static rtx memory_extend_rtx;
393 #endif
394
395 /* canon_hash stores 1 in hash_arg_in_memory
396 if it notices a reference to memory within the expression being hashed. */
397
398 static int hash_arg_in_memory;
399
400 /* The hash table contains buckets which are chains of `struct table_elt's,
401 each recording one expression's information.
402 That expression is in the `exp' field.
403
404 The canon_exp field contains a canonical (from the point of view of
405 alias analysis) version of the `exp' field.
406
407 Those elements with the same hash code are chained in both directions
408 through the `next_same_hash' and `prev_same_hash' fields.
409
410 Each set of expressions with equivalent values
411 are on a two-way chain through the `next_same_value'
412 and `prev_same_value' fields, and all point with
413 the `first_same_value' field at the first element in
414 that chain. The chain is in order of increasing cost.
415 Each element's cost value is in its `cost' field.
416
417 The `in_memory' field is nonzero for elements that
418 involve any reference to memory. These elements are removed
419 whenever a write is done to an unidentified location in memory.
420 To be safe, we assume that a memory address is unidentified unless
421 the address is either a symbol constant or a constant plus
422 the frame pointer or argument pointer.
423
424 The `related_value' field is used to connect related expressions
425 (that differ by adding an integer).
426 The related expressions are chained in a circular fashion.
427 `related_value' is zero for expressions for which this
428 chain is not useful.
429
430 The `cost' field stores the cost of this element's expression.
431 The `regcost' field stores the value returned by approx_reg_cost for
432 this element's expression.
433
434 The `is_const' flag is set if the element is a constant (including
435 a fixed address).
436
437 The `flag' field is used as a temporary during some search routines.
438
439 The `mode' field is usually the same as GET_MODE (`exp'), but
440 if `exp' is a CONST_INT and has no machine mode then the `mode'
441 field is the mode it was being used as. Each constant is
442 recorded separately for each mode it is used with. */
443
444 struct table_elt
445 {
446 rtx exp;
447 rtx canon_exp;
448 struct table_elt *next_same_hash;
449 struct table_elt *prev_same_hash;
450 struct table_elt *next_same_value;
451 struct table_elt *prev_same_value;
452 struct table_elt *first_same_value;
453 struct table_elt *related_value;
454 int cost;
455 int regcost;
456 enum machine_mode mode;
457 char in_memory;
458 char is_const;
459 char flag;
460 };
461
462 /* We don't want a lot of buckets, because we rarely have very many
463 things stored in the hash table, and a lot of buckets slows
464 down a lot of loops that happen frequently. */
465 #define HASH_SHIFT 5
466 #define HASH_SIZE (1 << HASH_SHIFT)
467 #define HASH_MASK (HASH_SIZE - 1)
468
469 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
470 register (hard registers may require `do_not_record' to be set). */
471
472 #define HASH(X, M) \
473 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
474 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
475 : canon_hash (X, M)) & HASH_MASK)
476
477 /* Determine whether register number N is considered a fixed register for the
478 purpose of approximating register costs.
479 It is desirable to replace other regs with fixed regs, to reduce need for
480 non-fixed hard regs.
481 A reg wins if it is either the frame pointer or designated as fixed. */
482 #define FIXED_REGNO_P(N) \
483 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
484 || fixed_regs[N] || global_regs[N])
485
486 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
487 hard registers and pointers into the frame are the cheapest with a cost
488 of 0. Next come pseudos with a cost of one and other hard registers with
489 a cost of 2. Aside from these special cases, call `rtx_cost'. */
490
491 #define CHEAP_REGNO(N) \
492 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
493 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
494 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
495 || ((N) < FIRST_PSEUDO_REGISTER \
496 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
497
498 #define COST(X) (GET_CODE (X) == REG ? 0 : notreg_cost (X, SET))
499 #define COST_IN(X,OUTER) (GET_CODE (X) == REG ? 0 : notreg_cost (X, OUTER))
500
501 /* Get the info associated with register N. */
502
503 #define GET_CSE_REG_INFO(N) \
504 (((N) == cached_regno && cached_cse_reg_info) \
505 ? cached_cse_reg_info : get_cse_reg_info ((N)))
506
507 /* Get the number of times this register has been updated in this
508 basic block. */
509
510 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
511
512 /* Get the point at which REG was recorded in the table. */
513
514 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
515
516 /* Get the quantity number for REG. */
517
518 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
519
520 /* Determine if the quantity number for register X represents a valid index
521 into the qty_table. */
522
523 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
524
525 static struct table_elt *table[HASH_SIZE];
526
527 /* Chain of `struct table_elt's made so far for this function
528 but currently removed from the table. */
529
530 static struct table_elt *free_element_chain;
531
532 /* Number of `struct table_elt' structures made so far for this function. */
533
534 static int n_elements_made;
535
536 /* Maximum value `n_elements_made' has had so far in this compilation
537 for functions previously processed. */
538
539 static int max_elements_made;
540
541 /* Surviving equivalence class when two equivalence classes are merged
542 by recording the effects of a jump in the last insn. Zero if the
543 last insn was not a conditional jump. */
544
545 static struct table_elt *last_jump_equiv_class;
546
547 /* Set to the cost of a constant pool reference if one was found for a
548 symbolic constant. If this was found, it means we should try to
549 convert constants into constant pool entries if they don't fit in
550 the insn. */
551
552 static int constant_pool_entries_cost;
553
554 /* Define maximum length of a branch path. */
555
556 #define PATHLENGTH 10
557
558 /* This data describes a block that will be processed by cse_basic_block. */
559
560 struct cse_basic_block_data
561 {
562 /* Lowest CUID value of insns in block. */
563 int low_cuid;
564 /* Highest CUID value of insns in block. */
565 int high_cuid;
566 /* Total number of SETs in block. */
567 int nsets;
568 /* Last insn in the block. */
569 rtx last;
570 /* Size of current branch path, if any. */
571 int path_size;
572 /* Current branch path, indicating which branches will be taken. */
573 struct branch_path
574 {
575 /* The branch insn. */
576 rtx branch;
577 /* Whether it should be taken or not. AROUND is the same as taken
578 except that it is used when the destination label is not preceded
579 by a BARRIER. */
580 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
581 } path[PATHLENGTH];
582 };
583
584 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
585 virtual regs here because the simplify_*_operation routines are called
586 by integrate.c, which is called before virtual register instantiation.
587
588 ?!? FIXED_BASE_PLUS_P and NONZERO_BASE_PLUS_P need to move into
589 a header file so that their definitions can be shared with the
590 simplification routines in simplify-rtx.c. Until then, do not
591 change these macros without also changing the copy in simplify-rtx.c. */
592
593 #define FIXED_BASE_PLUS_P(X) \
594 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
595 || ((X) == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])\
596 || (X) == virtual_stack_vars_rtx \
597 || (X) == virtual_incoming_args_rtx \
598 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
599 && (XEXP (X, 0) == frame_pointer_rtx \
600 || XEXP (X, 0) == hard_frame_pointer_rtx \
601 || ((X) == arg_pointer_rtx \
602 && fixed_regs[ARG_POINTER_REGNUM]) \
603 || XEXP (X, 0) == virtual_stack_vars_rtx \
604 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
605 || GET_CODE (X) == ADDRESSOF)
606
607 /* Similar, but also allows reference to the stack pointer.
608
609 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
610 arg_pointer_rtx by itself is nonzero, because on at least one machine,
611 the i960, the arg pointer is zero when it is unused. */
612
613 #define NONZERO_BASE_PLUS_P(X) \
614 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
615 || (X) == virtual_stack_vars_rtx \
616 || (X) == virtual_incoming_args_rtx \
617 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
618 && (XEXP (X, 0) == frame_pointer_rtx \
619 || XEXP (X, 0) == hard_frame_pointer_rtx \
620 || ((X) == arg_pointer_rtx \
621 && fixed_regs[ARG_POINTER_REGNUM]) \
622 || XEXP (X, 0) == virtual_stack_vars_rtx \
623 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
624 || (X) == stack_pointer_rtx \
625 || (X) == virtual_stack_dynamic_rtx \
626 || (X) == virtual_outgoing_args_rtx \
627 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
628 && (XEXP (X, 0) == stack_pointer_rtx \
629 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
630 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
631 || GET_CODE (X) == ADDRESSOF)
632
633 static int notreg_cost PARAMS ((rtx, enum rtx_code));
634 static int approx_reg_cost_1 PARAMS ((rtx *, void *));
635 static int approx_reg_cost PARAMS ((rtx));
636 static int preferrable PARAMS ((int, int, int, int));
637 static void new_basic_block PARAMS ((void));
638 static void make_new_qty PARAMS ((unsigned int, enum machine_mode));
639 static void make_regs_eqv PARAMS ((unsigned int, unsigned int));
640 static void delete_reg_equiv PARAMS ((unsigned int));
641 static int mention_regs PARAMS ((rtx));
642 static int insert_regs PARAMS ((rtx, struct table_elt *, int));
643 static void remove_from_table PARAMS ((struct table_elt *, unsigned));
644 static struct table_elt *lookup PARAMS ((rtx, unsigned, enum machine_mode)),
645 *lookup_for_remove PARAMS ((rtx, unsigned, enum machine_mode));
646 static rtx lookup_as_function PARAMS ((rtx, enum rtx_code));
647 static struct table_elt *insert PARAMS ((rtx, struct table_elt *, unsigned,
648 enum machine_mode));
649 static void merge_equiv_classes PARAMS ((struct table_elt *,
650 struct table_elt *));
651 static void invalidate PARAMS ((rtx, enum machine_mode));
652 static int cse_rtx_varies_p PARAMS ((rtx, int));
653 static void remove_invalid_refs PARAMS ((unsigned int));
654 static void remove_invalid_subreg_refs PARAMS ((unsigned int, unsigned int,
655 enum machine_mode));
656 static void rehash_using_reg PARAMS ((rtx));
657 static void invalidate_memory PARAMS ((void));
658 static void invalidate_for_call PARAMS ((void));
659 static rtx use_related_value PARAMS ((rtx, struct table_elt *));
660 static unsigned canon_hash PARAMS ((rtx, enum machine_mode));
661 static unsigned canon_hash_string PARAMS ((const char *));
662 static unsigned safe_hash PARAMS ((rtx, enum machine_mode));
663 static int exp_equiv_p PARAMS ((rtx, rtx, int, int));
664 static rtx canon_reg PARAMS ((rtx, rtx));
665 static void find_best_addr PARAMS ((rtx, rtx *, enum machine_mode));
666 static enum rtx_code find_comparison_args PARAMS ((enum rtx_code, rtx *, rtx *,
667 enum machine_mode *,
668 enum machine_mode *));
669 static rtx fold_rtx PARAMS ((rtx, rtx));
670 static rtx equiv_constant PARAMS ((rtx));
671 static void record_jump_equiv PARAMS ((rtx, int));
672 static void record_jump_cond PARAMS ((enum rtx_code, enum machine_mode,
673 rtx, rtx, int));
674 static void cse_insn PARAMS ((rtx, rtx));
675 static int addr_affects_sp_p PARAMS ((rtx));
676 static void invalidate_from_clobbers PARAMS ((rtx));
677 static rtx cse_process_notes PARAMS ((rtx, rtx));
678 static void cse_around_loop PARAMS ((rtx));
679 static void invalidate_skipped_set PARAMS ((rtx, rtx, void *));
680 static void invalidate_skipped_block PARAMS ((rtx));
681 static void cse_check_loop_start PARAMS ((rtx, rtx, void *));
682 static void cse_set_around_loop PARAMS ((rtx, rtx, rtx));
683 static rtx cse_basic_block PARAMS ((rtx, rtx, struct branch_path *, int));
684 static void count_reg_usage PARAMS ((rtx, int *, rtx, int));
685 static int check_for_label_ref PARAMS ((rtx *, void *));
686 extern void dump_class PARAMS ((struct table_elt*));
687 static struct cse_reg_info * get_cse_reg_info PARAMS ((unsigned int));
688 static int check_dependence PARAMS ((rtx *, void *));
689
690 static void flush_hash_table PARAMS ((void));
691 static bool insn_live_p PARAMS ((rtx, int *));
692 static bool set_live_p PARAMS ((rtx, rtx, int *));
693 static bool dead_libcall_p PARAMS ((rtx));
694 \f
695 /* Dump the expressions in the equivalence class indicated by CLASSP.
696 This function is used only for debugging. */
697 void
698 dump_class (classp)
699 struct table_elt *classp;
700 {
701 struct table_elt *elt;
702
703 fprintf (stderr, "Equivalence chain for ");
704 print_rtl (stderr, classp->exp);
705 fprintf (stderr, ": \n");
706
707 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
708 {
709 print_rtl (stderr, elt->exp);
710 fprintf (stderr, "\n");
711 }
712 }
713
714 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
715
716 static int
717 approx_reg_cost_1 (xp, data)
718 rtx *xp;
719 void *data;
720 {
721 rtx x = *xp;
722 regset set = (regset) data;
723
724 if (x && GET_CODE (x) == REG)
725 SET_REGNO_REG_SET (set, REGNO (x));
726 return 0;
727 }
728
729 /* Return an estimate of the cost of the registers used in an rtx.
730 This is mostly the number of different REG expressions in the rtx;
731 however for some excecptions like fixed registers we use a cost of
732 0. If any other hard register reference occurs, return MAX_COST. */
733
734 static int
735 approx_reg_cost (x)
736 rtx x;
737 {
738 regset_head set;
739 int i;
740 int cost = 0;
741 int hardregs = 0;
742
743 INIT_REG_SET (&set);
744 for_each_rtx (&x, approx_reg_cost_1, (void *)&set);
745
746 EXECUTE_IF_SET_IN_REG_SET
747 (&set, 0, i,
748 {
749 if (! CHEAP_REGNO (i))
750 {
751 if (i < FIRST_PSEUDO_REGISTER)
752 hardregs++;
753
754 cost += i < FIRST_PSEUDO_REGISTER ? 2 : 1;
755 }
756 });
757
758 CLEAR_REG_SET (&set);
759 return hardregs && SMALL_REGISTER_CLASSES ? MAX_COST : cost;
760 }
761
762 /* Return a negative value if an rtx A, whose costs are given by COST_A
763 and REGCOST_A, is more desirable than an rtx B.
764 Return a positive value if A is less desirable, or 0 if the two are
765 equally good. */
766 static int
767 preferrable (cost_a, regcost_a, cost_b, regcost_b)
768 int cost_a, regcost_a, cost_b, regcost_b;
769 {
770 /* First, get rid of a cases involving expressions that are entirely
771 unwanted. */
772 if (cost_a != cost_b)
773 {
774 if (cost_a == MAX_COST)
775 return 1;
776 if (cost_b == MAX_COST)
777 return -1;
778 }
779
780 /* Avoid extending lifetimes of hardregs. */
781 if (regcost_a != regcost_b)
782 {
783 if (regcost_a == MAX_COST)
784 return 1;
785 if (regcost_b == MAX_COST)
786 return -1;
787 }
788
789 /* Normal operation costs take precedence. */
790 if (cost_a != cost_b)
791 return cost_a - cost_b;
792 /* Only if these are identical consider effects on register pressure. */
793 if (regcost_a != regcost_b)
794 return regcost_a - regcost_b;
795 return 0;
796 }
797
798 /* Internal function, to compute cost when X is not a register; called
799 from COST macro to keep it simple. */
800
801 static int
802 notreg_cost (x, outer)
803 rtx x;
804 enum rtx_code outer;
805 {
806 return ((GET_CODE (x) == SUBREG
807 && GET_CODE (SUBREG_REG (x)) == REG
808 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
809 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
810 && (GET_MODE_SIZE (GET_MODE (x))
811 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
812 && subreg_lowpart_p (x)
813 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
814 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
815 ? 0
816 : rtx_cost (x, outer) * 2);
817 }
818
819 /* Return an estimate of the cost of computing rtx X.
820 One use is in cse, to decide which expression to keep in the hash table.
821 Another is in rtl generation, to pick the cheapest way to multiply.
822 Other uses like the latter are expected in the future. */
823
824 int
825 rtx_cost (x, outer_code)
826 rtx x;
827 enum rtx_code outer_code ATTRIBUTE_UNUSED;
828 {
829 register int i, j;
830 register enum rtx_code code;
831 register const char *fmt;
832 register int total;
833
834 if (x == 0)
835 return 0;
836
837 /* Compute the default costs of certain things.
838 Note that RTX_COSTS can override the defaults. */
839
840 code = GET_CODE (x);
841 switch (code)
842 {
843 case MULT:
844 /* Count multiplication by 2**n as a shift,
845 because if we are considering it, we would output it as a shift. */
846 if (GET_CODE (XEXP (x, 1)) == CONST_INT
847 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
848 total = 2;
849 else
850 total = COSTS_N_INSNS (5);
851 break;
852 case DIV:
853 case UDIV:
854 case MOD:
855 case UMOD:
856 total = COSTS_N_INSNS (7);
857 break;
858 case USE:
859 /* Used in loop.c and combine.c as a marker. */
860 total = 0;
861 break;
862 default:
863 total = COSTS_N_INSNS (1);
864 }
865
866 switch (code)
867 {
868 case REG:
869 return 0;
870
871 case SUBREG:
872 /* If we can't tie these modes, make this expensive. The larger
873 the mode, the more expensive it is. */
874 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
875 return COSTS_N_INSNS (2
876 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
877 break;
878
879 #ifdef RTX_COSTS
880 RTX_COSTS (x, code, outer_code);
881 #endif
882 #ifdef CONST_COSTS
883 CONST_COSTS (x, code, outer_code);
884 #endif
885
886 default:
887 #ifdef DEFAULT_RTX_COSTS
888 DEFAULT_RTX_COSTS (x, code, outer_code);
889 #endif
890 break;
891 }
892
893 /* Sum the costs of the sub-rtx's, plus cost of this operation,
894 which is already in total. */
895
896 fmt = GET_RTX_FORMAT (code);
897 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
898 if (fmt[i] == 'e')
899 total += rtx_cost (XEXP (x, i), code);
900 else if (fmt[i] == 'E')
901 for (j = 0; j < XVECLEN (x, i); j++)
902 total += rtx_cost (XVECEXP (x, i, j), code);
903
904 return total;
905 }
906 \f
907 /* Return cost of address expression X.
908 Expect that X is propertly formed address reference. */
909
910 int
911 address_cost (x, mode)
912 rtx x;
913 enum machine_mode mode;
914 {
915 /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
916 during CSE, such nodes are present. Using an ADDRESSOF node which
917 refers to the address of a REG is a good thing because we can then
918 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
919
920 if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
921 return -1;
922
923 /* We may be asked for cost of various unusual addresses, such as operands
924 of push instruction. It is not worthwhile to complicate writing
925 of ADDRESS_COST macro by such cases. */
926
927 if (!memory_address_p (mode, x))
928 return 1000;
929 #ifdef ADDRESS_COST
930 return ADDRESS_COST (x);
931 #else
932 return rtx_cost (x, MEM);
933 #endif
934 }
935
936 \f
937 static struct cse_reg_info *
938 get_cse_reg_info (regno)
939 unsigned int regno;
940 {
941 struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
942 struct cse_reg_info *p;
943
944 for (p = *hash_head; p != NULL; p = p->hash_next)
945 if (p->regno == regno)
946 break;
947
948 if (p == NULL)
949 {
950 /* Get a new cse_reg_info structure. */
951 if (cse_reg_info_free_list)
952 {
953 p = cse_reg_info_free_list;
954 cse_reg_info_free_list = p->next;
955 }
956 else
957 p = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
958
959 /* Insert into hash table. */
960 p->hash_next = *hash_head;
961 *hash_head = p;
962
963 /* Initialize it. */
964 p->reg_tick = 1;
965 p->reg_in_table = -1;
966 p->reg_qty = regno;
967 p->regno = regno;
968 p->next = cse_reg_info_used_list;
969 cse_reg_info_used_list = p;
970 if (!cse_reg_info_used_list_end)
971 cse_reg_info_used_list_end = p;
972 }
973
974 /* Cache this lookup; we tend to be looking up information about the
975 same register several times in a row. */
976 cached_regno = regno;
977 cached_cse_reg_info = p;
978
979 return p;
980 }
981
982 /* Clear the hash table and initialize each register with its own quantity,
983 for a new basic block. */
984
985 static void
986 new_basic_block ()
987 {
988 register int i;
989
990 next_qty = max_reg;
991
992 /* Clear out hash table state for this pass. */
993
994 memset ((char *) reg_hash, 0, sizeof reg_hash);
995
996 if (cse_reg_info_used_list)
997 {
998 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
999 cse_reg_info_free_list = cse_reg_info_used_list;
1000 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
1001 }
1002 cached_cse_reg_info = 0;
1003
1004 CLEAR_HARD_REG_SET (hard_regs_in_table);
1005
1006 /* The per-quantity values used to be initialized here, but it is
1007 much faster to initialize each as it is made in `make_new_qty'. */
1008
1009 for (i = 0; i < HASH_SIZE; i++)
1010 {
1011 struct table_elt *first;
1012
1013 first = table[i];
1014 if (first != NULL)
1015 {
1016 struct table_elt *last = first;
1017
1018 table[i] = NULL;
1019
1020 while (last->next_same_hash != NULL)
1021 last = last->next_same_hash;
1022
1023 /* Now relink this hash entire chain into
1024 the free element list. */
1025
1026 last->next_same_hash = free_element_chain;
1027 free_element_chain = first;
1028 }
1029 }
1030
1031 prev_insn = 0;
1032
1033 #ifdef HAVE_cc0
1034 prev_insn_cc0 = 0;
1035 #endif
1036 }
1037
1038 /* Say that register REG contains a quantity in mode MODE not in any
1039 register before and initialize that quantity. */
1040
1041 static void
1042 make_new_qty (reg, mode)
1043 unsigned int reg;
1044 enum machine_mode mode;
1045 {
1046 register int q;
1047 register struct qty_table_elem *ent;
1048 register struct reg_eqv_elem *eqv;
1049
1050 if (next_qty >= max_qty)
1051 abort ();
1052
1053 q = REG_QTY (reg) = next_qty++;
1054 ent = &qty_table[q];
1055 ent->first_reg = reg;
1056 ent->last_reg = reg;
1057 ent->mode = mode;
1058 ent->const_rtx = ent->const_insn = NULL_RTX;
1059 ent->comparison_code = UNKNOWN;
1060
1061 eqv = &reg_eqv_table[reg];
1062 eqv->next = eqv->prev = -1;
1063 }
1064
1065 /* Make reg NEW equivalent to reg OLD.
1066 OLD is not changing; NEW is. */
1067
1068 static void
1069 make_regs_eqv (new, old)
1070 unsigned int new, old;
1071 {
1072 unsigned int lastr, firstr;
1073 int q = REG_QTY (old);
1074 struct qty_table_elem *ent;
1075
1076 ent = &qty_table[q];
1077
1078 /* Nothing should become eqv until it has a "non-invalid" qty number. */
1079 if (! REGNO_QTY_VALID_P (old))
1080 abort ();
1081
1082 REG_QTY (new) = q;
1083 firstr = ent->first_reg;
1084 lastr = ent->last_reg;
1085
1086 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1087 hard regs. Among pseudos, if NEW will live longer than any other reg
1088 of the same qty, and that is beyond the current basic block,
1089 make it the new canonical replacement for this qty. */
1090 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1091 /* Certain fixed registers might be of the class NO_REGS. This means
1092 that not only can they not be allocated by the compiler, but
1093 they cannot be used in substitutions or canonicalizations
1094 either. */
1095 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1096 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1097 || (new >= FIRST_PSEUDO_REGISTER
1098 && (firstr < FIRST_PSEUDO_REGISTER
1099 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1100 || (uid_cuid[REGNO_FIRST_UID (new)]
1101 < cse_basic_block_start))
1102 && (uid_cuid[REGNO_LAST_UID (new)]
1103 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1104 {
1105 reg_eqv_table[firstr].prev = new;
1106 reg_eqv_table[new].next = firstr;
1107 reg_eqv_table[new].prev = -1;
1108 ent->first_reg = new;
1109 }
1110 else
1111 {
1112 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1113 Otherwise, insert before any non-fixed hard regs that are at the
1114 end. Registers of class NO_REGS cannot be used as an
1115 equivalent for anything. */
1116 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1117 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1118 && new >= FIRST_PSEUDO_REGISTER)
1119 lastr = reg_eqv_table[lastr].prev;
1120 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1121 if (reg_eqv_table[lastr].next >= 0)
1122 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1123 else
1124 qty_table[q].last_reg = new;
1125 reg_eqv_table[lastr].next = new;
1126 reg_eqv_table[new].prev = lastr;
1127 }
1128 }
1129
1130 /* Remove REG from its equivalence class. */
1131
1132 static void
1133 delete_reg_equiv (reg)
1134 unsigned int reg;
1135 {
1136 register struct qty_table_elem *ent;
1137 register int q = REG_QTY (reg);
1138 register int p, n;
1139
1140 /* If invalid, do nothing. */
1141 if (q == (int) reg)
1142 return;
1143
1144 ent = &qty_table[q];
1145
1146 p = reg_eqv_table[reg].prev;
1147 n = reg_eqv_table[reg].next;
1148
1149 if (n != -1)
1150 reg_eqv_table[n].prev = p;
1151 else
1152 ent->last_reg = p;
1153 if (p != -1)
1154 reg_eqv_table[p].next = n;
1155 else
1156 ent->first_reg = n;
1157
1158 REG_QTY (reg) = reg;
1159 }
1160
1161 /* Remove any invalid expressions from the hash table
1162 that refer to any of the registers contained in expression X.
1163
1164 Make sure that newly inserted references to those registers
1165 as subexpressions will be considered valid.
1166
1167 mention_regs is not called when a register itself
1168 is being stored in the table.
1169
1170 Return 1 if we have done something that may have changed the hash code
1171 of X. */
1172
1173 static int
1174 mention_regs (x)
1175 rtx x;
1176 {
1177 register enum rtx_code code;
1178 register int i, j;
1179 register const char *fmt;
1180 register int changed = 0;
1181
1182 if (x == 0)
1183 return 0;
1184
1185 code = GET_CODE (x);
1186 if (code == REG)
1187 {
1188 unsigned int regno = REGNO (x);
1189 unsigned int endregno
1190 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1191 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1192 unsigned int i;
1193
1194 for (i = regno; i < endregno; i++)
1195 {
1196 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1197 remove_invalid_refs (i);
1198
1199 REG_IN_TABLE (i) = REG_TICK (i);
1200 }
1201
1202 return 0;
1203 }
1204
1205 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1206 pseudo if they don't use overlapping words. We handle only pseudos
1207 here for simplicity. */
1208 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1209 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1210 {
1211 unsigned int i = REGNO (SUBREG_REG (x));
1212
1213 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1214 {
1215 /* If reg_tick has been incremented more than once since
1216 reg_in_table was last set, that means that the entire
1217 register has been set before, so discard anything memorized
1218 for the entire register, including all SUBREG expressions. */
1219 if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
1220 remove_invalid_refs (i);
1221 else
1222 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1223 }
1224
1225 REG_IN_TABLE (i) = REG_TICK (i);
1226 return 0;
1227 }
1228
1229 /* If X is a comparison or a COMPARE and either operand is a register
1230 that does not have a quantity, give it one. This is so that a later
1231 call to record_jump_equiv won't cause X to be assigned a different
1232 hash code and not found in the table after that call.
1233
1234 It is not necessary to do this here, since rehash_using_reg can
1235 fix up the table later, but doing this here eliminates the need to
1236 call that expensive function in the most common case where the only
1237 use of the register is in the comparison. */
1238
1239 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1240 {
1241 if (GET_CODE (XEXP (x, 0)) == REG
1242 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1243 if (insert_regs (XEXP (x, 0), NULL, 0))
1244 {
1245 rehash_using_reg (XEXP (x, 0));
1246 changed = 1;
1247 }
1248
1249 if (GET_CODE (XEXP (x, 1)) == REG
1250 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1251 if (insert_regs (XEXP (x, 1), NULL, 0))
1252 {
1253 rehash_using_reg (XEXP (x, 1));
1254 changed = 1;
1255 }
1256 }
1257
1258 fmt = GET_RTX_FORMAT (code);
1259 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1260 if (fmt[i] == 'e')
1261 changed |= mention_regs (XEXP (x, i));
1262 else if (fmt[i] == 'E')
1263 for (j = 0; j < XVECLEN (x, i); j++)
1264 changed |= mention_regs (XVECEXP (x, i, j));
1265
1266 return changed;
1267 }
1268
1269 /* Update the register quantities for inserting X into the hash table
1270 with a value equivalent to CLASSP.
1271 (If the class does not contain a REG, it is irrelevant.)
1272 If MODIFIED is nonzero, X is a destination; it is being modified.
1273 Note that delete_reg_equiv should be called on a register
1274 before insert_regs is done on that register with MODIFIED != 0.
1275
1276 Nonzero value means that elements of reg_qty have changed
1277 so X's hash code may be different. */
1278
1279 static int
1280 insert_regs (x, classp, modified)
1281 rtx x;
1282 struct table_elt *classp;
1283 int modified;
1284 {
1285 if (GET_CODE (x) == REG)
1286 {
1287 unsigned int regno = REGNO (x);
1288 int qty_valid;
1289
1290 /* If REGNO is in the equivalence table already but is of the
1291 wrong mode for that equivalence, don't do anything here. */
1292
1293 qty_valid = REGNO_QTY_VALID_P (regno);
1294 if (qty_valid)
1295 {
1296 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1297
1298 if (ent->mode != GET_MODE (x))
1299 return 0;
1300 }
1301
1302 if (modified || ! qty_valid)
1303 {
1304 if (classp)
1305 for (classp = classp->first_same_value;
1306 classp != 0;
1307 classp = classp->next_same_value)
1308 if (GET_CODE (classp->exp) == REG
1309 && GET_MODE (classp->exp) == GET_MODE (x))
1310 {
1311 make_regs_eqv (regno, REGNO (classp->exp));
1312 return 1;
1313 }
1314
1315 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1316 than REG_IN_TABLE to find out if there was only a single preceding
1317 invalidation - for the SUBREG - or another one, which would be
1318 for the full register. However, if we find here that REG_TICK
1319 indicates that the register is invalid, it means that it has
1320 been invalidated in a separate operation. The SUBREG might be used
1321 now (then this is a recursive call), or we might use the full REG
1322 now and a SUBREG of it later. So bump up REG_TICK so that
1323 mention_regs will do the right thing. */
1324 if (! modified
1325 && REG_IN_TABLE (regno) >= 0
1326 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1327 REG_TICK (regno)++;
1328 make_new_qty (regno, GET_MODE (x));
1329 return 1;
1330 }
1331
1332 return 0;
1333 }
1334
1335 /* If X is a SUBREG, we will likely be inserting the inner register in the
1336 table. If that register doesn't have an assigned quantity number at
1337 this point but does later, the insertion that we will be doing now will
1338 not be accessible because its hash code will have changed. So assign
1339 a quantity number now. */
1340
1341 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1342 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1343 {
1344 insert_regs (SUBREG_REG (x), NULL, 0);
1345 mention_regs (x);
1346 return 1;
1347 }
1348 else
1349 return mention_regs (x);
1350 }
1351 \f
1352 /* Look in or update the hash table. */
1353
1354 /* Remove table element ELT from use in the table.
1355 HASH is its hash code, made using the HASH macro.
1356 It's an argument because often that is known in advance
1357 and we save much time not recomputing it. */
1358
1359 static void
1360 remove_from_table (elt, hash)
1361 register struct table_elt *elt;
1362 unsigned hash;
1363 {
1364 if (elt == 0)
1365 return;
1366
1367 /* Mark this element as removed. See cse_insn. */
1368 elt->first_same_value = 0;
1369
1370 /* Remove the table element from its equivalence class. */
1371
1372 {
1373 register struct table_elt *prev = elt->prev_same_value;
1374 register struct table_elt *next = elt->next_same_value;
1375
1376 if (next)
1377 next->prev_same_value = prev;
1378
1379 if (prev)
1380 prev->next_same_value = next;
1381 else
1382 {
1383 register struct table_elt *newfirst = next;
1384 while (next)
1385 {
1386 next->first_same_value = newfirst;
1387 next = next->next_same_value;
1388 }
1389 }
1390 }
1391
1392 /* Remove the table element from its hash bucket. */
1393
1394 {
1395 register struct table_elt *prev = elt->prev_same_hash;
1396 register struct table_elt *next = elt->next_same_hash;
1397
1398 if (next)
1399 next->prev_same_hash = prev;
1400
1401 if (prev)
1402 prev->next_same_hash = next;
1403 else if (table[hash] == elt)
1404 table[hash] = next;
1405 else
1406 {
1407 /* This entry is not in the proper hash bucket. This can happen
1408 when two classes were merged by `merge_equiv_classes'. Search
1409 for the hash bucket that it heads. This happens only very
1410 rarely, so the cost is acceptable. */
1411 for (hash = 0; hash < HASH_SIZE; hash++)
1412 if (table[hash] == elt)
1413 table[hash] = next;
1414 }
1415 }
1416
1417 /* Remove the table element from its related-value circular chain. */
1418
1419 if (elt->related_value != 0 && elt->related_value != elt)
1420 {
1421 register struct table_elt *p = elt->related_value;
1422
1423 while (p->related_value != elt)
1424 p = p->related_value;
1425 p->related_value = elt->related_value;
1426 if (p->related_value == p)
1427 p->related_value = 0;
1428 }
1429
1430 /* Now add it to the free element chain. */
1431 elt->next_same_hash = free_element_chain;
1432 free_element_chain = elt;
1433 }
1434
1435 /* Look up X in the hash table and return its table element,
1436 or 0 if X is not in the table.
1437
1438 MODE is the machine-mode of X, or if X is an integer constant
1439 with VOIDmode then MODE is the mode with which X will be used.
1440
1441 Here we are satisfied to find an expression whose tree structure
1442 looks like X. */
1443
1444 static struct table_elt *
1445 lookup (x, hash, mode)
1446 rtx x;
1447 unsigned hash;
1448 enum machine_mode mode;
1449 {
1450 register struct table_elt *p;
1451
1452 for (p = table[hash]; p; p = p->next_same_hash)
1453 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1454 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1455 return p;
1456
1457 return 0;
1458 }
1459
1460 /* Like `lookup' but don't care whether the table element uses invalid regs.
1461 Also ignore discrepancies in the machine mode of a register. */
1462
1463 static struct table_elt *
1464 lookup_for_remove (x, hash, mode)
1465 rtx x;
1466 unsigned hash;
1467 enum machine_mode mode;
1468 {
1469 register struct table_elt *p;
1470
1471 if (GET_CODE (x) == REG)
1472 {
1473 unsigned int regno = REGNO (x);
1474
1475 /* Don't check the machine mode when comparing registers;
1476 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1477 for (p = table[hash]; p; p = p->next_same_hash)
1478 if (GET_CODE (p->exp) == REG
1479 && REGNO (p->exp) == regno)
1480 return p;
1481 }
1482 else
1483 {
1484 for (p = table[hash]; p; p = p->next_same_hash)
1485 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1486 return p;
1487 }
1488
1489 return 0;
1490 }
1491
1492 /* Look for an expression equivalent to X and with code CODE.
1493 If one is found, return that expression. */
1494
1495 static rtx
1496 lookup_as_function (x, code)
1497 rtx x;
1498 enum rtx_code code;
1499 {
1500 register struct table_elt *p
1501 = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1502
1503 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1504 long as we are narrowing. So if we looked in vain for a mode narrower
1505 than word_mode before, look for word_mode now. */
1506 if (p == 0 && code == CONST_INT
1507 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1508 {
1509 x = copy_rtx (x);
1510 PUT_MODE (x, word_mode);
1511 p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1512 }
1513
1514 if (p == 0)
1515 return 0;
1516
1517 for (p = p->first_same_value; p; p = p->next_same_value)
1518 if (GET_CODE (p->exp) == code
1519 /* Make sure this is a valid entry in the table. */
1520 && exp_equiv_p (p->exp, p->exp, 1, 0))
1521 return p->exp;
1522
1523 return 0;
1524 }
1525
1526 /* Insert X in the hash table, assuming HASH is its hash code
1527 and CLASSP is an element of the class it should go in
1528 (or 0 if a new class should be made).
1529 It is inserted at the proper position to keep the class in
1530 the order cheapest first.
1531
1532 MODE is the machine-mode of X, or if X is an integer constant
1533 with VOIDmode then MODE is the mode with which X will be used.
1534
1535 For elements of equal cheapness, the most recent one
1536 goes in front, except that the first element in the list
1537 remains first unless a cheaper element is added. The order of
1538 pseudo-registers does not matter, as canon_reg will be called to
1539 find the cheapest when a register is retrieved from the table.
1540
1541 The in_memory field in the hash table element is set to 0.
1542 The caller must set it nonzero if appropriate.
1543
1544 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1545 and if insert_regs returns a nonzero value
1546 you must then recompute its hash code before calling here.
1547
1548 If necessary, update table showing constant values of quantities. */
1549
1550 #define CHEAPER(X, Y) \
1551 (preferrable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1552
1553 static struct table_elt *
1554 insert (x, classp, hash, mode)
1555 register rtx x;
1556 register struct table_elt *classp;
1557 unsigned hash;
1558 enum machine_mode mode;
1559 {
1560 register struct table_elt *elt;
1561
1562 /* If X is a register and we haven't made a quantity for it,
1563 something is wrong. */
1564 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1565 abort ();
1566
1567 /* If X is a hard register, show it is being put in the table. */
1568 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1569 {
1570 unsigned int regno = REGNO (x);
1571 unsigned int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1572 unsigned int i;
1573
1574 for (i = regno; i < endregno; i++)
1575 SET_HARD_REG_BIT (hard_regs_in_table, i);
1576 }
1577
1578 /* Put an element for X into the right hash bucket. */
1579
1580 elt = free_element_chain;
1581 if (elt)
1582 free_element_chain = elt->next_same_hash;
1583 else
1584 {
1585 n_elements_made++;
1586 elt = (struct table_elt *) xmalloc (sizeof (struct table_elt));
1587 }
1588
1589 elt->exp = x;
1590 elt->canon_exp = NULL_RTX;
1591 elt->cost = COST (x);
1592 elt->regcost = approx_reg_cost (x);
1593 elt->next_same_value = 0;
1594 elt->prev_same_value = 0;
1595 elt->next_same_hash = table[hash];
1596 elt->prev_same_hash = 0;
1597 elt->related_value = 0;
1598 elt->in_memory = 0;
1599 elt->mode = mode;
1600 elt->is_const = (CONSTANT_P (x)
1601 /* GNU C++ takes advantage of this for `this'
1602 (and other const values). */
1603 || (RTX_UNCHANGING_P (x)
1604 && GET_CODE (x) == REG
1605 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1606 || FIXED_BASE_PLUS_P (x));
1607
1608 if (table[hash])
1609 table[hash]->prev_same_hash = elt;
1610 table[hash] = elt;
1611
1612 /* Put it into the proper value-class. */
1613 if (classp)
1614 {
1615 classp = classp->first_same_value;
1616 if (CHEAPER (elt, classp))
1617 /* Insert at the head of the class */
1618 {
1619 register struct table_elt *p;
1620 elt->next_same_value = classp;
1621 classp->prev_same_value = elt;
1622 elt->first_same_value = elt;
1623
1624 for (p = classp; p; p = p->next_same_value)
1625 p->first_same_value = elt;
1626 }
1627 else
1628 {
1629 /* Insert not at head of the class. */
1630 /* Put it after the last element cheaper than X. */
1631 register struct table_elt *p, *next;
1632
1633 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1634 p = next);
1635
1636 /* Put it after P and before NEXT. */
1637 elt->next_same_value = next;
1638 if (next)
1639 next->prev_same_value = elt;
1640
1641 elt->prev_same_value = p;
1642 p->next_same_value = elt;
1643 elt->first_same_value = classp;
1644 }
1645 }
1646 else
1647 elt->first_same_value = elt;
1648
1649 /* If this is a constant being set equivalent to a register or a register
1650 being set equivalent to a constant, note the constant equivalence.
1651
1652 If this is a constant, it cannot be equivalent to a different constant,
1653 and a constant is the only thing that can be cheaper than a register. So
1654 we know the register is the head of the class (before the constant was
1655 inserted).
1656
1657 If this is a register that is not already known equivalent to a
1658 constant, we must check the entire class.
1659
1660 If this is a register that is already known equivalent to an insn,
1661 update the qtys `const_insn' to show that `this_insn' is the latest
1662 insn making that quantity equivalent to the constant. */
1663
1664 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1665 && GET_CODE (x) != REG)
1666 {
1667 int exp_q = REG_QTY (REGNO (classp->exp));
1668 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1669
1670 exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1671 exp_ent->const_insn = this_insn;
1672 }
1673
1674 else if (GET_CODE (x) == REG
1675 && classp
1676 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1677 && ! elt->is_const)
1678 {
1679 register struct table_elt *p;
1680
1681 for (p = classp; p != 0; p = p->next_same_value)
1682 {
1683 if (p->is_const && GET_CODE (p->exp) != REG)
1684 {
1685 int x_q = REG_QTY (REGNO (x));
1686 struct qty_table_elem *x_ent = &qty_table[x_q];
1687
1688 x_ent->const_rtx
1689 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1690 x_ent->const_insn = this_insn;
1691 break;
1692 }
1693 }
1694 }
1695
1696 else if (GET_CODE (x) == REG
1697 && qty_table[REG_QTY (REGNO (x))].const_rtx
1698 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1699 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1700
1701 /* If this is a constant with symbolic value,
1702 and it has a term with an explicit integer value,
1703 link it up with related expressions. */
1704 if (GET_CODE (x) == CONST)
1705 {
1706 rtx subexp = get_related_value (x);
1707 unsigned subhash;
1708 struct table_elt *subelt, *subelt_prev;
1709
1710 if (subexp != 0)
1711 {
1712 /* Get the integer-free subexpression in the hash table. */
1713 subhash = safe_hash (subexp, mode) & HASH_MASK;
1714 subelt = lookup (subexp, subhash, mode);
1715 if (subelt == 0)
1716 subelt = insert (subexp, NULL, subhash, mode);
1717 /* Initialize SUBELT's circular chain if it has none. */
1718 if (subelt->related_value == 0)
1719 subelt->related_value = subelt;
1720 /* Find the element in the circular chain that precedes SUBELT. */
1721 subelt_prev = subelt;
1722 while (subelt_prev->related_value != subelt)
1723 subelt_prev = subelt_prev->related_value;
1724 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1725 This way the element that follows SUBELT is the oldest one. */
1726 elt->related_value = subelt_prev->related_value;
1727 subelt_prev->related_value = elt;
1728 }
1729 }
1730
1731 return elt;
1732 }
1733 \f
1734 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1735 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1736 the two classes equivalent.
1737
1738 CLASS1 will be the surviving class; CLASS2 should not be used after this
1739 call.
1740
1741 Any invalid entries in CLASS2 will not be copied. */
1742
1743 static void
1744 merge_equiv_classes (class1, class2)
1745 struct table_elt *class1, *class2;
1746 {
1747 struct table_elt *elt, *next, *new;
1748
1749 /* Ensure we start with the head of the classes. */
1750 class1 = class1->first_same_value;
1751 class2 = class2->first_same_value;
1752
1753 /* If they were already equal, forget it. */
1754 if (class1 == class2)
1755 return;
1756
1757 for (elt = class2; elt; elt = next)
1758 {
1759 unsigned int hash;
1760 rtx exp = elt->exp;
1761 enum machine_mode mode = elt->mode;
1762
1763 next = elt->next_same_value;
1764
1765 /* Remove old entry, make a new one in CLASS1's class.
1766 Don't do this for invalid entries as we cannot find their
1767 hash code (it also isn't necessary). */
1768 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1769 {
1770 hash_arg_in_memory = 0;
1771 hash = HASH (exp, mode);
1772
1773 if (GET_CODE (exp) == REG)
1774 delete_reg_equiv (REGNO (exp));
1775
1776 remove_from_table (elt, hash);
1777
1778 if (insert_regs (exp, class1, 0))
1779 {
1780 rehash_using_reg (exp);
1781 hash = HASH (exp, mode);
1782 }
1783 new = insert (exp, class1, hash, mode);
1784 new->in_memory = hash_arg_in_memory;
1785 }
1786 }
1787 }
1788 \f
1789 /* Flush the entire hash table. */
1790
1791 static void
1792 flush_hash_table ()
1793 {
1794 int i;
1795 struct table_elt *p;
1796
1797 for (i = 0; i < HASH_SIZE; i++)
1798 for (p = table[i]; p; p = table[i])
1799 {
1800 /* Note that invalidate can remove elements
1801 after P in the current hash chain. */
1802 if (GET_CODE (p->exp) == REG)
1803 invalidate (p->exp, p->mode);
1804 else
1805 remove_from_table (p, i);
1806 }
1807 }
1808 \f
1809 /* Function called for each rtx to check whether true dependence exist. */
1810 struct check_dependence_data
1811 {
1812 enum machine_mode mode;
1813 rtx exp;
1814 };
1815
1816 static int
1817 check_dependence (x, data)
1818 rtx *x;
1819 void *data;
1820 {
1821 struct check_dependence_data *d = (struct check_dependence_data *) data;
1822 if (*x && GET_CODE (*x) == MEM)
1823 return true_dependence (d->exp, d->mode, *x, cse_rtx_varies_p);
1824 else
1825 return 0;
1826 }
1827 \f
1828 /* Remove from the hash table, or mark as invalid, all expressions whose
1829 values could be altered by storing in X. X is a register, a subreg, or
1830 a memory reference with nonvarying address (because, when a memory
1831 reference with a varying address is stored in, all memory references are
1832 removed by invalidate_memory so specific invalidation is superfluous).
1833 FULL_MODE, if not VOIDmode, indicates that this much should be
1834 invalidated instead of just the amount indicated by the mode of X. This
1835 is only used for bitfield stores into memory.
1836
1837 A nonvarying address may be just a register or just a symbol reference,
1838 or it may be either of those plus a numeric offset. */
1839
1840 static void
1841 invalidate (x, full_mode)
1842 rtx x;
1843 enum machine_mode full_mode;
1844 {
1845 register int i;
1846 register struct table_elt *p;
1847
1848 switch (GET_CODE (x))
1849 {
1850 case REG:
1851 {
1852 /* If X is a register, dependencies on its contents are recorded
1853 through the qty number mechanism. Just change the qty number of
1854 the register, mark it as invalid for expressions that refer to it,
1855 and remove it itself. */
1856 unsigned int regno = REGNO (x);
1857 unsigned int hash = HASH (x, GET_MODE (x));
1858
1859 /* Remove REGNO from any quantity list it might be on and indicate
1860 that its value might have changed. If it is a pseudo, remove its
1861 entry from the hash table.
1862
1863 For a hard register, we do the first two actions above for any
1864 additional hard registers corresponding to X. Then, if any of these
1865 registers are in the table, we must remove any REG entries that
1866 overlap these registers. */
1867
1868 delete_reg_equiv (regno);
1869 REG_TICK (regno)++;
1870
1871 if (regno >= FIRST_PSEUDO_REGISTER)
1872 {
1873 /* Because a register can be referenced in more than one mode,
1874 we might have to remove more than one table entry. */
1875 struct table_elt *elt;
1876
1877 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1878 remove_from_table (elt, hash);
1879 }
1880 else
1881 {
1882 HOST_WIDE_INT in_table
1883 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1884 unsigned int endregno
1885 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1886 unsigned int tregno, tendregno, rn;
1887 register struct table_elt *p, *next;
1888
1889 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1890
1891 for (rn = regno + 1; rn < endregno; rn++)
1892 {
1893 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1894 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1895 delete_reg_equiv (rn);
1896 REG_TICK (rn)++;
1897 }
1898
1899 if (in_table)
1900 for (hash = 0; hash < HASH_SIZE; hash++)
1901 for (p = table[hash]; p; p = next)
1902 {
1903 next = p->next_same_hash;
1904
1905 if (GET_CODE (p->exp) != REG
1906 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1907 continue;
1908
1909 tregno = REGNO (p->exp);
1910 tendregno
1911 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1912 if (tendregno > regno && tregno < endregno)
1913 remove_from_table (p, hash);
1914 }
1915 }
1916 }
1917 return;
1918
1919 case SUBREG:
1920 invalidate (SUBREG_REG (x), VOIDmode);
1921 return;
1922
1923 case PARALLEL:
1924 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1925 invalidate (XVECEXP (x, 0, i), VOIDmode);
1926 return;
1927
1928 case EXPR_LIST:
1929 /* This is part of a disjoint return value; extract the location in
1930 question ignoring the offset. */
1931 invalidate (XEXP (x, 0), VOIDmode);
1932 return;
1933
1934 case MEM:
1935 /* Calculate the canonical version of X here so that
1936 true_dependence doesn't generate new RTL for X on each call. */
1937 x = canon_rtx (x);
1938
1939 /* Remove all hash table elements that refer to overlapping pieces of
1940 memory. */
1941 if (full_mode == VOIDmode)
1942 full_mode = GET_MODE (x);
1943
1944 for (i = 0; i < HASH_SIZE; i++)
1945 {
1946 register struct table_elt *next;
1947
1948 for (p = table[i]; p; p = next)
1949 {
1950 next = p->next_same_hash;
1951 if (p->in_memory)
1952 {
1953 struct check_dependence_data d;
1954
1955 /* Just canonicalize the expression once;
1956 otherwise each time we call invalidate
1957 true_dependence will canonicalize the
1958 expression again. */
1959 if (!p->canon_exp)
1960 p->canon_exp = canon_rtx (p->exp);
1961 d.exp = x;
1962 d.mode = full_mode;
1963 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1964 remove_from_table (p, i);
1965 }
1966 }
1967 }
1968 return;
1969
1970 default:
1971 abort ();
1972 }
1973 }
1974 \f
1975 /* Remove all expressions that refer to register REGNO,
1976 since they are already invalid, and we are about to
1977 mark that register valid again and don't want the old
1978 expressions to reappear as valid. */
1979
1980 static void
1981 remove_invalid_refs (regno)
1982 unsigned int regno;
1983 {
1984 unsigned int i;
1985 struct table_elt *p, *next;
1986
1987 for (i = 0; i < HASH_SIZE; i++)
1988 for (p = table[i]; p; p = next)
1989 {
1990 next = p->next_same_hash;
1991 if (GET_CODE (p->exp) != REG
1992 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx*)0))
1993 remove_from_table (p, i);
1994 }
1995 }
1996
1997 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1998 and mode MODE. */
1999 static void
2000 remove_invalid_subreg_refs (regno, offset, mode)
2001 unsigned int regno;
2002 unsigned int offset;
2003 enum machine_mode mode;
2004 {
2005 unsigned int i;
2006 struct table_elt *p, *next;
2007 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
2008
2009 for (i = 0; i < HASH_SIZE; i++)
2010 for (p = table[i]; p; p = next)
2011 {
2012 rtx exp = p->exp;
2013 next = p->next_same_hash;
2014
2015 if (GET_CODE (exp) != REG
2016 && (GET_CODE (exp) != SUBREG
2017 || GET_CODE (SUBREG_REG (exp)) != REG
2018 || REGNO (SUBREG_REG (exp)) != regno
2019 || (((SUBREG_BYTE (exp)
2020 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2021 && SUBREG_BYTE (exp) <= end))
2022 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx*)0))
2023 remove_from_table (p, i);
2024 }
2025 }
2026 \f
2027 /* Recompute the hash codes of any valid entries in the hash table that
2028 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2029
2030 This is called when we make a jump equivalence. */
2031
2032 static void
2033 rehash_using_reg (x)
2034 rtx x;
2035 {
2036 unsigned int i;
2037 struct table_elt *p, *next;
2038 unsigned hash;
2039
2040 if (GET_CODE (x) == SUBREG)
2041 x = SUBREG_REG (x);
2042
2043 /* If X is not a register or if the register is known not to be in any
2044 valid entries in the table, we have no work to do. */
2045
2046 if (GET_CODE (x) != REG
2047 || REG_IN_TABLE (REGNO (x)) < 0
2048 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2049 return;
2050
2051 /* Scan all hash chains looking for valid entries that mention X.
2052 If we find one and it is in the wrong hash chain, move it. We can skip
2053 objects that are registers, since they are handled specially. */
2054
2055 for (i = 0; i < HASH_SIZE; i++)
2056 for (p = table[i]; p; p = next)
2057 {
2058 next = p->next_same_hash;
2059 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
2060 && exp_equiv_p (p->exp, p->exp, 1, 0)
2061 && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
2062 {
2063 if (p->next_same_hash)
2064 p->next_same_hash->prev_same_hash = p->prev_same_hash;
2065
2066 if (p->prev_same_hash)
2067 p->prev_same_hash->next_same_hash = p->next_same_hash;
2068 else
2069 table[i] = p->next_same_hash;
2070
2071 p->next_same_hash = table[hash];
2072 p->prev_same_hash = 0;
2073 if (table[hash])
2074 table[hash]->prev_same_hash = p;
2075 table[hash] = p;
2076 }
2077 }
2078 }
2079 \f
2080 /* Remove from the hash table any expression that is a call-clobbered
2081 register. Also update their TICK values. */
2082
2083 static void
2084 invalidate_for_call ()
2085 {
2086 unsigned int regno, endregno;
2087 unsigned int i;
2088 unsigned hash;
2089 struct table_elt *p, *next;
2090 int in_table = 0;
2091
2092 /* Go through all the hard registers. For each that is clobbered in
2093 a CALL_INSN, remove the register from quantity chains and update
2094 reg_tick if defined. Also see if any of these registers is currently
2095 in the table. */
2096
2097 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2098 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2099 {
2100 delete_reg_equiv (regno);
2101 if (REG_TICK (regno) >= 0)
2102 REG_TICK (regno)++;
2103
2104 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2105 }
2106
2107 /* In the case where we have no call-clobbered hard registers in the
2108 table, we are done. Otherwise, scan the table and remove any
2109 entry that overlaps a call-clobbered register. */
2110
2111 if (in_table)
2112 for (hash = 0; hash < HASH_SIZE; hash++)
2113 for (p = table[hash]; p; p = next)
2114 {
2115 next = p->next_same_hash;
2116
2117 if (GET_CODE (p->exp) != REG
2118 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2119 continue;
2120
2121 regno = REGNO (p->exp);
2122 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
2123
2124 for (i = regno; i < endregno; i++)
2125 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2126 {
2127 remove_from_table (p, hash);
2128 break;
2129 }
2130 }
2131 }
2132 \f
2133 /* Given an expression X of type CONST,
2134 and ELT which is its table entry (or 0 if it
2135 is not in the hash table),
2136 return an alternate expression for X as a register plus integer.
2137 If none can be found, return 0. */
2138
2139 static rtx
2140 use_related_value (x, elt)
2141 rtx x;
2142 struct table_elt *elt;
2143 {
2144 register struct table_elt *relt = 0;
2145 register struct table_elt *p, *q;
2146 HOST_WIDE_INT offset;
2147
2148 /* First, is there anything related known?
2149 If we have a table element, we can tell from that.
2150 Otherwise, must look it up. */
2151
2152 if (elt != 0 && elt->related_value != 0)
2153 relt = elt;
2154 else if (elt == 0 && GET_CODE (x) == CONST)
2155 {
2156 rtx subexp = get_related_value (x);
2157 if (subexp != 0)
2158 relt = lookup (subexp,
2159 safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2160 GET_MODE (subexp));
2161 }
2162
2163 if (relt == 0)
2164 return 0;
2165
2166 /* Search all related table entries for one that has an
2167 equivalent register. */
2168
2169 p = relt;
2170 while (1)
2171 {
2172 /* This loop is strange in that it is executed in two different cases.
2173 The first is when X is already in the table. Then it is searching
2174 the RELATED_VALUE list of X's class (RELT). The second case is when
2175 X is not in the table. Then RELT points to a class for the related
2176 value.
2177
2178 Ensure that, whatever case we are in, that we ignore classes that have
2179 the same value as X. */
2180
2181 if (rtx_equal_p (x, p->exp))
2182 q = 0;
2183 else
2184 for (q = p->first_same_value; q; q = q->next_same_value)
2185 if (GET_CODE (q->exp) == REG)
2186 break;
2187
2188 if (q)
2189 break;
2190
2191 p = p->related_value;
2192
2193 /* We went all the way around, so there is nothing to be found.
2194 Alternatively, perhaps RELT was in the table for some other reason
2195 and it has no related values recorded. */
2196 if (p == relt || p == 0)
2197 break;
2198 }
2199
2200 if (q == 0)
2201 return 0;
2202
2203 offset = (get_integer_term (x) - get_integer_term (p->exp));
2204 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2205 return plus_constant (q->exp, offset);
2206 }
2207 \f
2208 /* Hash a string. Just add its bytes up. */
2209 static inline unsigned
2210 canon_hash_string (ps)
2211 const char *ps;
2212 {
2213 unsigned hash = 0;
2214 const unsigned char *p = (const unsigned char *)ps;
2215
2216 if (p)
2217 while (*p)
2218 hash += *p++;
2219
2220 return hash;
2221 }
2222
2223 /* Hash an rtx. We are careful to make sure the value is never negative.
2224 Equivalent registers hash identically.
2225 MODE is used in hashing for CONST_INTs only;
2226 otherwise the mode of X is used.
2227
2228 Store 1 in do_not_record if any subexpression is volatile.
2229
2230 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2231 which does not have the RTX_UNCHANGING_P bit set.
2232
2233 Note that cse_insn knows that the hash code of a MEM expression
2234 is just (int) MEM plus the hash code of the address. */
2235
2236 static unsigned
2237 canon_hash (x, mode)
2238 rtx x;
2239 enum machine_mode mode;
2240 {
2241 register int i, j;
2242 register unsigned hash = 0;
2243 register enum rtx_code code;
2244 register const char *fmt;
2245
2246 /* repeat is used to turn tail-recursion into iteration. */
2247 repeat:
2248 if (x == 0)
2249 return hash;
2250
2251 code = GET_CODE (x);
2252 switch (code)
2253 {
2254 case REG:
2255 {
2256 unsigned int regno = REGNO (x);
2257
2258 /* On some machines, we can't record any non-fixed hard register,
2259 because extending its life will cause reload problems. We
2260 consider ap, fp, and sp to be fixed for this purpose.
2261
2262 We also consider CCmode registers to be fixed for this purpose;
2263 failure to do so leads to failure to simplify 0<100 type of
2264 conditionals.
2265
2266 On all machines, we can't record any global registers.
2267 Nor should we record any register that is in a small
2268 class, as defined by CLASS_LIKELY_SPILLED_P. */
2269
2270 if (regno < FIRST_PSEUDO_REGISTER
2271 && (global_regs[regno]
2272 || CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno))
2273 || (SMALL_REGISTER_CLASSES
2274 && ! fixed_regs[regno]
2275 && regno != FRAME_POINTER_REGNUM
2276 && regno != HARD_FRAME_POINTER_REGNUM
2277 && regno != ARG_POINTER_REGNUM
2278 && regno != STACK_POINTER_REGNUM
2279 && GET_MODE_CLASS (GET_MODE (x)) != MODE_CC)))
2280 {
2281 do_not_record = 1;
2282 return 0;
2283 }
2284
2285 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2286 return hash;
2287 }
2288
2289 /* We handle SUBREG of a REG specially because the underlying
2290 reg changes its hash value with every value change; we don't
2291 want to have to forget unrelated subregs when one subreg changes. */
2292 case SUBREG:
2293 {
2294 if (GET_CODE (SUBREG_REG (x)) == REG)
2295 {
2296 hash += (((unsigned) SUBREG << 7)
2297 + REGNO (SUBREG_REG (x))
2298 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2299 return hash;
2300 }
2301 break;
2302 }
2303
2304 case CONST_INT:
2305 {
2306 unsigned HOST_WIDE_INT tem = INTVAL (x);
2307 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2308 return hash;
2309 }
2310
2311 case CONST_DOUBLE:
2312 /* This is like the general case, except that it only counts
2313 the integers representing the constant. */
2314 hash += (unsigned) code + (unsigned) GET_MODE (x);
2315 if (GET_MODE (x) != VOIDmode)
2316 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2317 {
2318 unsigned HOST_WIDE_INT tem = XWINT (x, i);
2319 hash += tem;
2320 }
2321 else
2322 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2323 + (unsigned) CONST_DOUBLE_HIGH (x));
2324 return hash;
2325
2326 /* Assume there is only one rtx object for any given label. */
2327 case LABEL_REF:
2328 hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2329 return hash;
2330
2331 case SYMBOL_REF:
2332 hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2333 return hash;
2334
2335 case MEM:
2336 /* We don't record if marked volatile or if BLKmode since we don't
2337 know the size of the move. */
2338 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2339 {
2340 do_not_record = 1;
2341 return 0;
2342 }
2343 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2344 {
2345 hash_arg_in_memory = 1;
2346 }
2347 /* Now that we have already found this special case,
2348 might as well speed it up as much as possible. */
2349 hash += (unsigned) MEM;
2350 x = XEXP (x, 0);
2351 goto repeat;
2352
2353 case USE:
2354 /* A USE that mentions non-volatile memory needs special
2355 handling since the MEM may be BLKmode which normally
2356 prevents an entry from being made. Pure calls are
2357 marked by a USE which mentions BLKmode memory. */
2358 if (GET_CODE (XEXP (x, 0)) == MEM
2359 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2360 {
2361 hash += (unsigned)USE;
2362 x = XEXP (x, 0);
2363
2364 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2365 hash_arg_in_memory = 1;
2366
2367 /* Now that we have already found this special case,
2368 might as well speed it up as much as possible. */
2369 hash += (unsigned) MEM;
2370 x = XEXP (x, 0);
2371 goto repeat;
2372 }
2373 break;
2374
2375 case PRE_DEC:
2376 case PRE_INC:
2377 case POST_DEC:
2378 case POST_INC:
2379 case PRE_MODIFY:
2380 case POST_MODIFY:
2381 case PC:
2382 case CC0:
2383 case CALL:
2384 case UNSPEC_VOLATILE:
2385 do_not_record = 1;
2386 return 0;
2387
2388 case ASM_OPERANDS:
2389 if (MEM_VOLATILE_P (x))
2390 {
2391 do_not_record = 1;
2392 return 0;
2393 }
2394 else
2395 {
2396 /* We don't want to take the filename and line into account. */
2397 hash += (unsigned) code + (unsigned) GET_MODE (x)
2398 + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2399 + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2400 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2401
2402 if (ASM_OPERANDS_INPUT_LENGTH (x))
2403 {
2404 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2405 {
2406 hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2407 GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2408 + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2409 (x, i)));
2410 }
2411
2412 hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2413 x = ASM_OPERANDS_INPUT (x, 0);
2414 mode = GET_MODE (x);
2415 goto repeat;
2416 }
2417
2418 return hash;
2419 }
2420 break;
2421
2422 default:
2423 break;
2424 }
2425
2426 i = GET_RTX_LENGTH (code) - 1;
2427 hash += (unsigned) code + (unsigned) GET_MODE (x);
2428 fmt = GET_RTX_FORMAT (code);
2429 for (; i >= 0; i--)
2430 {
2431 if (fmt[i] == 'e')
2432 {
2433 rtx tem = XEXP (x, i);
2434
2435 /* If we are about to do the last recursive call
2436 needed at this level, change it into iteration.
2437 This function is called enough to be worth it. */
2438 if (i == 0)
2439 {
2440 x = tem;
2441 goto repeat;
2442 }
2443 hash += canon_hash (tem, 0);
2444 }
2445 else if (fmt[i] == 'E')
2446 for (j = 0; j < XVECLEN (x, i); j++)
2447 hash += canon_hash (XVECEXP (x, i, j), 0);
2448 else if (fmt[i] == 's')
2449 hash += canon_hash_string (XSTR (x, i));
2450 else if (fmt[i] == 'i')
2451 {
2452 register unsigned tem = XINT (x, i);
2453 hash += tem;
2454 }
2455 else if (fmt[i] == '0' || fmt[i] == 't')
2456 /* Unused. */
2457 ;
2458 else
2459 abort ();
2460 }
2461 return hash;
2462 }
2463
2464 /* Like canon_hash but with no side effects. */
2465
2466 static unsigned
2467 safe_hash (x, mode)
2468 rtx x;
2469 enum machine_mode mode;
2470 {
2471 int save_do_not_record = do_not_record;
2472 int save_hash_arg_in_memory = hash_arg_in_memory;
2473 unsigned hash = canon_hash (x, mode);
2474 hash_arg_in_memory = save_hash_arg_in_memory;
2475 do_not_record = save_do_not_record;
2476 return hash;
2477 }
2478 \f
2479 /* Return 1 iff X and Y would canonicalize into the same thing,
2480 without actually constructing the canonicalization of either one.
2481 If VALIDATE is nonzero,
2482 we assume X is an expression being processed from the rtl
2483 and Y was found in the hash table. We check register refs
2484 in Y for being marked as valid.
2485
2486 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2487 that is known to be in the register. Ordinarily, we don't allow them
2488 to match, because letting them match would cause unpredictable results
2489 in all the places that search a hash table chain for an equivalent
2490 for a given value. A possible equivalent that has different structure
2491 has its hash code computed from different data. Whether the hash code
2492 is the same as that of the given value is pure luck. */
2493
2494 static int
2495 exp_equiv_p (x, y, validate, equal_values)
2496 rtx x, y;
2497 int validate;
2498 int equal_values;
2499 {
2500 register int i, j;
2501 register enum rtx_code code;
2502 register const char *fmt;
2503
2504 /* Note: it is incorrect to assume an expression is equivalent to itself
2505 if VALIDATE is nonzero. */
2506 if (x == y && !validate)
2507 return 1;
2508 if (x == 0 || y == 0)
2509 return x == y;
2510
2511 code = GET_CODE (x);
2512 if (code != GET_CODE (y))
2513 {
2514 if (!equal_values)
2515 return 0;
2516
2517 /* If X is a constant and Y is a register or vice versa, they may be
2518 equivalent. We only have to validate if Y is a register. */
2519 if (CONSTANT_P (x) && GET_CODE (y) == REG
2520 && REGNO_QTY_VALID_P (REGNO (y)))
2521 {
2522 int y_q = REG_QTY (REGNO (y));
2523 struct qty_table_elem *y_ent = &qty_table[y_q];
2524
2525 if (GET_MODE (y) == y_ent->mode
2526 && rtx_equal_p (x, y_ent->const_rtx)
2527 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2528 return 1;
2529 }
2530
2531 if (CONSTANT_P (y) && code == REG
2532 && REGNO_QTY_VALID_P (REGNO (x)))
2533 {
2534 int x_q = REG_QTY (REGNO (x));
2535 struct qty_table_elem *x_ent = &qty_table[x_q];
2536
2537 if (GET_MODE (x) == x_ent->mode
2538 && rtx_equal_p (y, x_ent->const_rtx))
2539 return 1;
2540 }
2541
2542 return 0;
2543 }
2544
2545 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2546 if (GET_MODE (x) != GET_MODE (y))
2547 return 0;
2548
2549 switch (code)
2550 {
2551 case PC:
2552 case CC0:
2553 case CONST_INT:
2554 return x == y;
2555
2556 case LABEL_REF:
2557 return XEXP (x, 0) == XEXP (y, 0);
2558
2559 case SYMBOL_REF:
2560 return XSTR (x, 0) == XSTR (y, 0);
2561
2562 case REG:
2563 {
2564 unsigned int regno = REGNO (y);
2565 unsigned int endregno
2566 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2567 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2568 unsigned int i;
2569
2570 /* If the quantities are not the same, the expressions are not
2571 equivalent. If there are and we are not to validate, they
2572 are equivalent. Otherwise, ensure all regs are up-to-date. */
2573
2574 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2575 return 0;
2576
2577 if (! validate)
2578 return 1;
2579
2580 for (i = regno; i < endregno; i++)
2581 if (REG_IN_TABLE (i) != REG_TICK (i))
2582 return 0;
2583
2584 return 1;
2585 }
2586
2587 /* For commutative operations, check both orders. */
2588 case PLUS:
2589 case MULT:
2590 case AND:
2591 case IOR:
2592 case XOR:
2593 case NE:
2594 case EQ:
2595 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2596 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2597 validate, equal_values))
2598 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2599 validate, equal_values)
2600 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2601 validate, equal_values)));
2602
2603 case ASM_OPERANDS:
2604 /* We don't use the generic code below because we want to
2605 disregard filename and line numbers. */
2606
2607 /* A volatile asm isn't equivalent to any other. */
2608 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2609 return 0;
2610
2611 if (GET_MODE (x) != GET_MODE (y)
2612 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2613 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2614 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2615 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2616 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2617 return 0;
2618
2619 if (ASM_OPERANDS_INPUT_LENGTH (x))
2620 {
2621 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2622 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2623 ASM_OPERANDS_INPUT (y, i),
2624 validate, equal_values)
2625 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2626 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2627 return 0;
2628 }
2629
2630 return 1;
2631
2632 default:
2633 break;
2634 }
2635
2636 /* Compare the elements. If any pair of corresponding elements
2637 fail to match, return 0 for the whole things. */
2638
2639 fmt = GET_RTX_FORMAT (code);
2640 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2641 {
2642 switch (fmt[i])
2643 {
2644 case 'e':
2645 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2646 return 0;
2647 break;
2648
2649 case 'E':
2650 if (XVECLEN (x, i) != XVECLEN (y, i))
2651 return 0;
2652 for (j = 0; j < XVECLEN (x, i); j++)
2653 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2654 validate, equal_values))
2655 return 0;
2656 break;
2657
2658 case 's':
2659 if (strcmp (XSTR (x, i), XSTR (y, i)))
2660 return 0;
2661 break;
2662
2663 case 'i':
2664 if (XINT (x, i) != XINT (y, i))
2665 return 0;
2666 break;
2667
2668 case 'w':
2669 if (XWINT (x, i) != XWINT (y, i))
2670 return 0;
2671 break;
2672
2673 case '0':
2674 case 't':
2675 break;
2676
2677 default:
2678 abort ();
2679 }
2680 }
2681
2682 return 1;
2683 }
2684 \f
2685 /* Return 1 if X has a value that can vary even between two
2686 executions of the program. 0 means X can be compared reliably
2687 against certain constants or near-constants. */
2688
2689 static int
2690 cse_rtx_varies_p (x, from_alias)
2691 register rtx x;
2692 int from_alias;
2693 {
2694 /* We need not check for X and the equivalence class being of the same
2695 mode because if X is equivalent to a constant in some mode, it
2696 doesn't vary in any mode. */
2697
2698 if (GET_CODE (x) == REG
2699 && REGNO_QTY_VALID_P (REGNO (x)))
2700 {
2701 int x_q = REG_QTY (REGNO (x));
2702 struct qty_table_elem *x_ent = &qty_table[x_q];
2703
2704 if (GET_MODE (x) == x_ent->mode
2705 && x_ent->const_rtx != NULL_RTX)
2706 return 0;
2707 }
2708
2709 if (GET_CODE (x) == PLUS
2710 && GET_CODE (XEXP (x, 1)) == CONST_INT
2711 && GET_CODE (XEXP (x, 0)) == REG
2712 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2713 {
2714 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2715 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2716
2717 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2718 && x0_ent->const_rtx != NULL_RTX)
2719 return 0;
2720 }
2721
2722 /* This can happen as the result of virtual register instantiation, if
2723 the initial constant is too large to be a valid address. This gives
2724 us a three instruction sequence, load large offset into a register,
2725 load fp minus a constant into a register, then a MEM which is the
2726 sum of the two `constant' registers. */
2727 if (GET_CODE (x) == PLUS
2728 && GET_CODE (XEXP (x, 0)) == REG
2729 && GET_CODE (XEXP (x, 1)) == REG
2730 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2731 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2732 {
2733 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2734 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2735 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2736 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2737
2738 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2739 && x0_ent->const_rtx != NULL_RTX
2740 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2741 && x1_ent->const_rtx != NULL_RTX)
2742 return 0;
2743 }
2744
2745 return rtx_varies_p (x, from_alias);
2746 }
2747 \f
2748 /* Canonicalize an expression:
2749 replace each register reference inside it
2750 with the "oldest" equivalent register.
2751
2752 If INSN is non-zero and we are replacing a pseudo with a hard register
2753 or vice versa, validate_change is used to ensure that INSN remains valid
2754 after we make our substitution. The calls are made with IN_GROUP non-zero
2755 so apply_change_group must be called upon the outermost return from this
2756 function (unless INSN is zero). The result of apply_change_group can
2757 generally be discarded since the changes we are making are optional. */
2758
2759 static rtx
2760 canon_reg (x, insn)
2761 rtx x;
2762 rtx insn;
2763 {
2764 register int i;
2765 register enum rtx_code code;
2766 register const char *fmt;
2767
2768 if (x == 0)
2769 return x;
2770
2771 code = GET_CODE (x);
2772 switch (code)
2773 {
2774 case PC:
2775 case CC0:
2776 case CONST:
2777 case CONST_INT:
2778 case CONST_DOUBLE:
2779 case SYMBOL_REF:
2780 case LABEL_REF:
2781 case ADDR_VEC:
2782 case ADDR_DIFF_VEC:
2783 return x;
2784
2785 case REG:
2786 {
2787 register int first;
2788 register int q;
2789 register struct qty_table_elem *ent;
2790
2791 /* Never replace a hard reg, because hard regs can appear
2792 in more than one machine mode, and we must preserve the mode
2793 of each occurrence. Also, some hard regs appear in
2794 MEMs that are shared and mustn't be altered. Don't try to
2795 replace any reg that maps to a reg of class NO_REGS. */
2796 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2797 || ! REGNO_QTY_VALID_P (REGNO (x)))
2798 return x;
2799
2800 q = REG_QTY (REGNO (x));
2801 ent = &qty_table[q];
2802 first = ent->first_reg;
2803 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2804 : REGNO_REG_CLASS (first) == NO_REGS ? x
2805 : gen_rtx_REG (ent->mode, first));
2806 }
2807
2808 default:
2809 break;
2810 }
2811
2812 fmt = GET_RTX_FORMAT (code);
2813 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2814 {
2815 register int j;
2816
2817 if (fmt[i] == 'e')
2818 {
2819 rtx new = canon_reg (XEXP (x, i), insn);
2820 int insn_code;
2821
2822 /* If replacing pseudo with hard reg or vice versa, ensure the
2823 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2824 if (insn != 0 && new != 0
2825 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2826 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2827 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2828 || (insn_code = recog_memoized (insn)) < 0
2829 || insn_data[insn_code].n_dups > 0))
2830 validate_change (insn, &XEXP (x, i), new, 1);
2831 else
2832 XEXP (x, i) = new;
2833 }
2834 else if (fmt[i] == 'E')
2835 for (j = 0; j < XVECLEN (x, i); j++)
2836 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2837 }
2838
2839 return x;
2840 }
2841 \f
2842 /* LOC is a location within INSN that is an operand address (the contents of
2843 a MEM). Find the best equivalent address to use that is valid for this
2844 insn.
2845
2846 On most CISC machines, complicated address modes are costly, and rtx_cost
2847 is a good approximation for that cost. However, most RISC machines have
2848 only a few (usually only one) memory reference formats. If an address is
2849 valid at all, it is often just as cheap as any other address. Hence, for
2850 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2851 costs of various addresses. For two addresses of equal cost, choose the one
2852 with the highest `rtx_cost' value as that has the potential of eliminating
2853 the most insns. For equal costs, we choose the first in the equivalence
2854 class. Note that we ignore the fact that pseudo registers are cheaper
2855 than hard registers here because we would also prefer the pseudo registers.
2856 */
2857
2858 static void
2859 find_best_addr (insn, loc, mode)
2860 rtx insn;
2861 rtx *loc;
2862 enum machine_mode mode;
2863 {
2864 struct table_elt *elt;
2865 rtx addr = *loc;
2866 #ifdef ADDRESS_COST
2867 struct table_elt *p;
2868 int found_better = 1;
2869 #endif
2870 int save_do_not_record = do_not_record;
2871 int save_hash_arg_in_memory = hash_arg_in_memory;
2872 int addr_volatile;
2873 int regno;
2874 unsigned hash;
2875
2876 /* Do not try to replace constant addresses or addresses of local and
2877 argument slots. These MEM expressions are made only once and inserted
2878 in many instructions, as well as being used to control symbol table
2879 output. It is not safe to clobber them.
2880
2881 There are some uncommon cases where the address is already in a register
2882 for some reason, but we cannot take advantage of that because we have
2883 no easy way to unshare the MEM. In addition, looking up all stack
2884 addresses is costly. */
2885 if ((GET_CODE (addr) == PLUS
2886 && GET_CODE (XEXP (addr, 0)) == REG
2887 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2888 && (regno = REGNO (XEXP (addr, 0)),
2889 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2890 || regno == ARG_POINTER_REGNUM))
2891 || (GET_CODE (addr) == REG
2892 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2893 || regno == HARD_FRAME_POINTER_REGNUM
2894 || regno == ARG_POINTER_REGNUM))
2895 || GET_CODE (addr) == ADDRESSOF
2896 || CONSTANT_ADDRESS_P (addr))
2897 return;
2898
2899 /* If this address is not simply a register, try to fold it. This will
2900 sometimes simplify the expression. Many simplifications
2901 will not be valid, but some, usually applying the associative rule, will
2902 be valid and produce better code. */
2903 if (GET_CODE (addr) != REG)
2904 {
2905 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2906 int addr_folded_cost = address_cost (folded, mode);
2907 int addr_cost = address_cost (addr, mode);
2908
2909 if ((addr_folded_cost < addr_cost
2910 || (addr_folded_cost == addr_cost
2911 /* ??? The rtx_cost comparison is left over from an older
2912 version of this code. It is probably no longer helpful. */
2913 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2914 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2915 && validate_change (insn, loc, folded, 0))
2916 addr = folded;
2917 }
2918
2919 /* If this address is not in the hash table, we can't look for equivalences
2920 of the whole address. Also, ignore if volatile. */
2921
2922 do_not_record = 0;
2923 hash = HASH (addr, Pmode);
2924 addr_volatile = do_not_record;
2925 do_not_record = save_do_not_record;
2926 hash_arg_in_memory = save_hash_arg_in_memory;
2927
2928 if (addr_volatile)
2929 return;
2930
2931 elt = lookup (addr, hash, Pmode);
2932
2933 #ifndef ADDRESS_COST
2934 if (elt)
2935 {
2936 int our_cost = elt->cost;
2937
2938 /* Find the lowest cost below ours that works. */
2939 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2940 if (elt->cost < our_cost
2941 && (GET_CODE (elt->exp) == REG
2942 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2943 && validate_change (insn, loc,
2944 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2945 return;
2946 }
2947 #else
2948
2949 if (elt)
2950 {
2951 /* We need to find the best (under the criteria documented above) entry
2952 in the class that is valid. We use the `flag' field to indicate
2953 choices that were invalid and iterate until we can't find a better
2954 one that hasn't already been tried. */
2955
2956 for (p = elt->first_same_value; p; p = p->next_same_value)
2957 p->flag = 0;
2958
2959 while (found_better)
2960 {
2961 int best_addr_cost = address_cost (*loc, mode);
2962 int best_rtx_cost = (elt->cost + 1) >> 1;
2963 int exp_cost;
2964 struct table_elt *best_elt = elt;
2965
2966 found_better = 0;
2967 for (p = elt->first_same_value; p; p = p->next_same_value)
2968 if (! p->flag)
2969 {
2970 if ((GET_CODE (p->exp) == REG
2971 || exp_equiv_p (p->exp, p->exp, 1, 0))
2972 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2973 || (exp_cost == best_addr_cost
2974 && ((p->cost + 1) >> 1) > best_rtx_cost)))
2975 {
2976 found_better = 1;
2977 best_addr_cost = exp_cost;
2978 best_rtx_cost = (p->cost + 1) >> 1;
2979 best_elt = p;
2980 }
2981 }
2982
2983 if (found_better)
2984 {
2985 if (validate_change (insn, loc,
2986 canon_reg (copy_rtx (best_elt->exp),
2987 NULL_RTX), 0))
2988 return;
2989 else
2990 best_elt->flag = 1;
2991 }
2992 }
2993 }
2994
2995 /* If the address is a binary operation with the first operand a register
2996 and the second a constant, do the same as above, but looking for
2997 equivalences of the register. Then try to simplify before checking for
2998 the best address to use. This catches a few cases: First is when we
2999 have REG+const and the register is another REG+const. We can often merge
3000 the constants and eliminate one insn and one register. It may also be
3001 that a machine has a cheap REG+REG+const. Finally, this improves the
3002 code on the Alpha for unaligned byte stores. */
3003
3004 if (flag_expensive_optimizations
3005 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
3006 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
3007 && GET_CODE (XEXP (*loc, 0)) == REG
3008 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
3009 {
3010 rtx c = XEXP (*loc, 1);
3011
3012 do_not_record = 0;
3013 hash = HASH (XEXP (*loc, 0), Pmode);
3014 do_not_record = save_do_not_record;
3015 hash_arg_in_memory = save_hash_arg_in_memory;
3016
3017 elt = lookup (XEXP (*loc, 0), hash, Pmode);
3018 if (elt == 0)
3019 return;
3020
3021 /* We need to find the best (under the criteria documented above) entry
3022 in the class that is valid. We use the `flag' field to indicate
3023 choices that were invalid and iterate until we can't find a better
3024 one that hasn't already been tried. */
3025
3026 for (p = elt->first_same_value; p; p = p->next_same_value)
3027 p->flag = 0;
3028
3029 while (found_better)
3030 {
3031 int best_addr_cost = address_cost (*loc, mode);
3032 int best_rtx_cost = (COST (*loc) + 1) >> 1;
3033 struct table_elt *best_elt = elt;
3034 rtx best_rtx = *loc;
3035 int count;
3036
3037 /* This is at worst case an O(n^2) algorithm, so limit our search
3038 to the first 32 elements on the list. This avoids trouble
3039 compiling code with very long basic blocks that can easily
3040 call simplify_gen_binary so many times that we run out of
3041 memory. */
3042
3043 found_better = 0;
3044 for (p = elt->first_same_value, count = 0;
3045 p && count < 32;
3046 p = p->next_same_value, count++)
3047 if (! p->flag
3048 && (GET_CODE (p->exp) == REG
3049 || exp_equiv_p (p->exp, p->exp, 1, 0)))
3050 {
3051 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3052 p->exp, c);
3053 int new_cost;
3054 new_cost = address_cost (new, mode);
3055
3056 if (new_cost < best_addr_cost
3057 || (new_cost == best_addr_cost
3058 && (COST (new) + 1) >> 1 > best_rtx_cost))
3059 {
3060 found_better = 1;
3061 best_addr_cost = new_cost;
3062 best_rtx_cost = (COST (new) + 1) >> 1;
3063 best_elt = p;
3064 best_rtx = new;
3065 }
3066 }
3067
3068 if (found_better)
3069 {
3070 if (validate_change (insn, loc,
3071 canon_reg (copy_rtx (best_rtx),
3072 NULL_RTX), 0))
3073 return;
3074 else
3075 best_elt->flag = 1;
3076 }
3077 }
3078 }
3079 #endif
3080 }
3081 \f
3082 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3083 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3084 what values are being compared.
3085
3086 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3087 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3088 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3089 compared to produce cc0.
3090
3091 The return value is the comparison operator and is either the code of
3092 A or the code corresponding to the inverse of the comparison. */
3093
3094 static enum rtx_code
3095 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
3096 enum rtx_code code;
3097 rtx *parg1, *parg2;
3098 enum machine_mode *pmode1, *pmode2;
3099 {
3100 rtx arg1, arg2;
3101
3102 arg1 = *parg1, arg2 = *parg2;
3103
3104 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3105
3106 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3107 {
3108 /* Set non-zero when we find something of interest. */
3109 rtx x = 0;
3110 int reverse_code = 0;
3111 struct table_elt *p = 0;
3112
3113 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3114 On machines with CC0, this is the only case that can occur, since
3115 fold_rtx will return the COMPARE or item being compared with zero
3116 when given CC0. */
3117
3118 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3119 x = arg1;
3120
3121 /* If ARG1 is a comparison operator and CODE is testing for
3122 STORE_FLAG_VALUE, get the inner arguments. */
3123
3124 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3125 {
3126 if (code == NE
3127 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3128 && code == LT && STORE_FLAG_VALUE == -1)
3129 #ifdef FLOAT_STORE_FLAG_VALUE
3130 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3131 && (REAL_VALUE_NEGATIVE
3132 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3133 #endif
3134 )
3135 x = arg1;
3136 else if (code == EQ
3137 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3138 && code == GE && STORE_FLAG_VALUE == -1)
3139 #ifdef FLOAT_STORE_FLAG_VALUE
3140 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3141 && (REAL_VALUE_NEGATIVE
3142 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3143 #endif
3144 )
3145 x = arg1, reverse_code = 1;
3146 }
3147
3148 /* ??? We could also check for
3149
3150 (ne (and (eq (...) (const_int 1))) (const_int 0))
3151
3152 and related forms, but let's wait until we see them occurring. */
3153
3154 if (x == 0)
3155 /* Look up ARG1 in the hash table and see if it has an equivalence
3156 that lets us see what is being compared. */
3157 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3158 GET_MODE (arg1));
3159 if (p)
3160 {
3161 p = p->first_same_value;
3162
3163 /* If what we compare is already known to be constant, that is as
3164 good as it gets.
3165 We need to break the loop in this case, because otherwise we
3166 can have an infinite loop when looking at a reg that is known
3167 to be a constant which is the same as a comparison of a reg
3168 against zero which appears later in the insn stream, which in
3169 turn is constant and the same as the comparison of the first reg
3170 against zero... */
3171 if (p->is_const)
3172 break;
3173 }
3174
3175 for (; p; p = p->next_same_value)
3176 {
3177 enum machine_mode inner_mode = GET_MODE (p->exp);
3178
3179 /* If the entry isn't valid, skip it. */
3180 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3181 continue;
3182
3183 if (GET_CODE (p->exp) == COMPARE
3184 /* Another possibility is that this machine has a compare insn
3185 that includes the comparison code. In that case, ARG1 would
3186 be equivalent to a comparison operation that would set ARG1 to
3187 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3188 ORIG_CODE is the actual comparison being done; if it is an EQ,
3189 we must reverse ORIG_CODE. On machine with a negative value
3190 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3191 || ((code == NE
3192 || (code == LT
3193 && GET_MODE_CLASS (inner_mode) == MODE_INT
3194 && (GET_MODE_BITSIZE (inner_mode)
3195 <= HOST_BITS_PER_WIDE_INT)
3196 && (STORE_FLAG_VALUE
3197 & ((HOST_WIDE_INT) 1
3198 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3199 #ifdef FLOAT_STORE_FLAG_VALUE
3200 || (code == LT
3201 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3202 && (REAL_VALUE_NEGATIVE
3203 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3204 #endif
3205 )
3206 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3207 {
3208 x = p->exp;
3209 break;
3210 }
3211 else if ((code == EQ
3212 || (code == GE
3213 && GET_MODE_CLASS (inner_mode) == MODE_INT
3214 && (GET_MODE_BITSIZE (inner_mode)
3215 <= HOST_BITS_PER_WIDE_INT)
3216 && (STORE_FLAG_VALUE
3217 & ((HOST_WIDE_INT) 1
3218 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3219 #ifdef FLOAT_STORE_FLAG_VALUE
3220 || (code == GE
3221 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3222 && (REAL_VALUE_NEGATIVE
3223 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3224 #endif
3225 )
3226 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3227 {
3228 reverse_code = 1;
3229 x = p->exp;
3230 break;
3231 }
3232
3233 /* If this is fp + constant, the equivalent is a better operand since
3234 it may let us predict the value of the comparison. */
3235 else if (NONZERO_BASE_PLUS_P (p->exp))
3236 {
3237 arg1 = p->exp;
3238 continue;
3239 }
3240 }
3241
3242 /* If we didn't find a useful equivalence for ARG1, we are done.
3243 Otherwise, set up for the next iteration. */
3244 if (x == 0)
3245 break;
3246
3247 /* If we need to reverse the comparison, make sure that that is
3248 possible -- we can't necessarily infer the value of GE from LT
3249 with floating-point operands. */
3250 if (reverse_code)
3251 {
3252 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3253 if (reversed == UNKNOWN)
3254 break;
3255 else code = reversed;
3256 }
3257 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3258 code = GET_CODE (x);
3259 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3260 }
3261
3262 /* Return our results. Return the modes from before fold_rtx
3263 because fold_rtx might produce const_int, and then it's too late. */
3264 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3265 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3266
3267 return code;
3268 }
3269 \f
3270 /* If X is a nontrivial arithmetic operation on an argument
3271 for which a constant value can be determined, return
3272 the result of operating on that value, as a constant.
3273 Otherwise, return X, possibly with one or more operands
3274 modified by recursive calls to this function.
3275
3276 If X is a register whose contents are known, we do NOT
3277 return those contents here. equiv_constant is called to
3278 perform that task.
3279
3280 INSN is the insn that we may be modifying. If it is 0, make a copy
3281 of X before modifying it. */
3282
3283 static rtx
3284 fold_rtx (x, insn)
3285 rtx x;
3286 rtx insn;
3287 {
3288 register enum rtx_code code;
3289 register enum machine_mode mode;
3290 register const char *fmt;
3291 register int i;
3292 rtx new = 0;
3293 int copied = 0;
3294 int must_swap = 0;
3295
3296 /* Folded equivalents of first two operands of X. */
3297 rtx folded_arg0;
3298 rtx folded_arg1;
3299
3300 /* Constant equivalents of first three operands of X;
3301 0 when no such equivalent is known. */
3302 rtx const_arg0;
3303 rtx const_arg1;
3304 rtx const_arg2;
3305
3306 /* The mode of the first operand of X. We need this for sign and zero
3307 extends. */
3308 enum machine_mode mode_arg0;
3309
3310 if (x == 0)
3311 return x;
3312
3313 mode = GET_MODE (x);
3314 code = GET_CODE (x);
3315 switch (code)
3316 {
3317 case CONST:
3318 case CONST_INT:
3319 case CONST_DOUBLE:
3320 case SYMBOL_REF:
3321 case LABEL_REF:
3322 case REG:
3323 /* No use simplifying an EXPR_LIST
3324 since they are used only for lists of args
3325 in a function call's REG_EQUAL note. */
3326 case EXPR_LIST:
3327 /* Changing anything inside an ADDRESSOF is incorrect; we don't
3328 want to (e.g.,) make (addressof (const_int 0)) just because
3329 the location is known to be zero. */
3330 case ADDRESSOF:
3331 return x;
3332
3333 #ifdef HAVE_cc0
3334 case CC0:
3335 return prev_insn_cc0;
3336 #endif
3337
3338 case PC:
3339 /* If the next insn is a CODE_LABEL followed by a jump table,
3340 PC's value is a LABEL_REF pointing to that label. That
3341 lets us fold switch statements on the VAX. */
3342 if (insn && GET_CODE (insn) == JUMP_INSN)
3343 {
3344 rtx next = next_nonnote_insn (insn);
3345
3346 if (next && GET_CODE (next) == CODE_LABEL
3347 && NEXT_INSN (next) != 0
3348 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
3349 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
3350 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
3351 return gen_rtx_LABEL_REF (Pmode, next);
3352 }
3353 break;
3354
3355 case SUBREG:
3356 /* See if we previously assigned a constant value to this SUBREG. */
3357 if ((new = lookup_as_function (x, CONST_INT)) != 0
3358 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3359 return new;
3360
3361 /* If this is a paradoxical SUBREG, we have no idea what value the
3362 extra bits would have. However, if the operand is equivalent
3363 to a SUBREG whose operand is the same as our mode, and all the
3364 modes are within a word, we can just use the inner operand
3365 because these SUBREGs just say how to treat the register.
3366
3367 Similarly if we find an integer constant. */
3368
3369 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3370 {
3371 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3372 struct table_elt *elt;
3373
3374 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3375 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3376 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3377 imode)) != 0)
3378 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3379 {
3380 if (CONSTANT_P (elt->exp)
3381 && GET_MODE (elt->exp) == VOIDmode)
3382 return elt->exp;
3383
3384 if (GET_CODE (elt->exp) == SUBREG
3385 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3386 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3387 return copy_rtx (SUBREG_REG (elt->exp));
3388 }
3389
3390 return x;
3391 }
3392
3393 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3394 We might be able to if the SUBREG is extracting a single word in an
3395 integral mode or extracting the low part. */
3396
3397 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3398 const_arg0 = equiv_constant (folded_arg0);
3399 if (const_arg0)
3400 folded_arg0 = const_arg0;
3401
3402 if (folded_arg0 != SUBREG_REG (x))
3403 {
3404 new = simplify_subreg (mode, folded_arg0,
3405 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3406 if (new)
3407 return new;
3408 }
3409
3410 /* If this is a narrowing SUBREG and our operand is a REG, see if
3411 we can find an equivalence for REG that is an arithmetic operation
3412 in a wider mode where both operands are paradoxical SUBREGs
3413 from objects of our result mode. In that case, we couldn't report
3414 an equivalent value for that operation, since we don't know what the
3415 extra bits will be. But we can find an equivalence for this SUBREG
3416 by folding that operation is the narrow mode. This allows us to
3417 fold arithmetic in narrow modes when the machine only supports
3418 word-sized arithmetic.
3419
3420 Also look for a case where we have a SUBREG whose operand is the
3421 same as our result. If both modes are smaller than a word, we
3422 are simply interpreting a register in different modes and we
3423 can use the inner value. */
3424
3425 if (GET_CODE (folded_arg0) == REG
3426 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3427 && subreg_lowpart_p (x))
3428 {
3429 struct table_elt *elt;
3430
3431 /* We can use HASH here since we know that canon_hash won't be
3432 called. */
3433 elt = lookup (folded_arg0,
3434 HASH (folded_arg0, GET_MODE (folded_arg0)),
3435 GET_MODE (folded_arg0));
3436
3437 if (elt)
3438 elt = elt->first_same_value;
3439
3440 for (; elt; elt = elt->next_same_value)
3441 {
3442 enum rtx_code eltcode = GET_CODE (elt->exp);
3443
3444 /* Just check for unary and binary operations. */
3445 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3446 && GET_CODE (elt->exp) != SIGN_EXTEND
3447 && GET_CODE (elt->exp) != ZERO_EXTEND
3448 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3449 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
3450 {
3451 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3452
3453 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3454 op0 = fold_rtx (op0, NULL_RTX);
3455
3456 op0 = equiv_constant (op0);
3457 if (op0)
3458 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3459 op0, mode);
3460 }
3461 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3462 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3463 && eltcode != DIV && eltcode != MOD
3464 && eltcode != UDIV && eltcode != UMOD
3465 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3466 && eltcode != ROTATE && eltcode != ROTATERT
3467 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3468 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3469 == mode))
3470 || CONSTANT_P (XEXP (elt->exp, 0)))
3471 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3472 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3473 == mode))
3474 || CONSTANT_P (XEXP (elt->exp, 1))))
3475 {
3476 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3477 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3478
3479 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3480 op0 = fold_rtx (op0, NULL_RTX);
3481
3482 if (op0)
3483 op0 = equiv_constant (op0);
3484
3485 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3486 op1 = fold_rtx (op1, NULL_RTX);
3487
3488 if (op1)
3489 op1 = equiv_constant (op1);
3490
3491 /* If we are looking for the low SImode part of
3492 (ashift:DI c (const_int 32)), it doesn't work
3493 to compute that in SImode, because a 32-bit shift
3494 in SImode is unpredictable. We know the value is 0. */
3495 if (op0 && op1
3496 && GET_CODE (elt->exp) == ASHIFT
3497 && GET_CODE (op1) == CONST_INT
3498 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3499 {
3500 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3501
3502 /* If the count fits in the inner mode's width,
3503 but exceeds the outer mode's width,
3504 the value will get truncated to 0
3505 by the subreg. */
3506 new = const0_rtx;
3507 else
3508 /* If the count exceeds even the inner mode's width,
3509 don't fold this expression. */
3510 new = 0;
3511 }
3512 else if (op0 && op1)
3513 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3514 op0, op1);
3515 }
3516
3517 else if (GET_CODE (elt->exp) == SUBREG
3518 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3519 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3520 <= UNITS_PER_WORD)
3521 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3522 new = copy_rtx (SUBREG_REG (elt->exp));
3523
3524 if (new)
3525 return new;
3526 }
3527 }
3528
3529 return x;
3530
3531 case NOT:
3532 case NEG:
3533 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3534 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3535 new = lookup_as_function (XEXP (x, 0), code);
3536 if (new)
3537 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3538 break;
3539
3540 case MEM:
3541 /* If we are not actually processing an insn, don't try to find the
3542 best address. Not only don't we care, but we could modify the
3543 MEM in an invalid way since we have no insn to validate against. */
3544 if (insn != 0)
3545 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3546
3547 {
3548 /* Even if we don't fold in the insn itself,
3549 we can safely do so here, in hopes of getting a constant. */
3550 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3551 rtx base = 0;
3552 HOST_WIDE_INT offset = 0;
3553
3554 if (GET_CODE (addr) == REG
3555 && REGNO_QTY_VALID_P (REGNO (addr)))
3556 {
3557 int addr_q = REG_QTY (REGNO (addr));
3558 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3559
3560 if (GET_MODE (addr) == addr_ent->mode
3561 && addr_ent->const_rtx != NULL_RTX)
3562 addr = addr_ent->const_rtx;
3563 }
3564
3565 /* If address is constant, split it into a base and integer offset. */
3566 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3567 base = addr;
3568 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3569 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3570 {
3571 base = XEXP (XEXP (addr, 0), 0);
3572 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3573 }
3574 else if (GET_CODE (addr) == LO_SUM
3575 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3576 base = XEXP (addr, 1);
3577 else if (GET_CODE (addr) == ADDRESSOF)
3578 return change_address (x, VOIDmode, addr);
3579
3580 /* If this is a constant pool reference, we can fold it into its
3581 constant to allow better value tracking. */
3582 if (base && GET_CODE (base) == SYMBOL_REF
3583 && CONSTANT_POOL_ADDRESS_P (base))
3584 {
3585 rtx constant = get_pool_constant (base);
3586 enum machine_mode const_mode = get_pool_mode (base);
3587 rtx new;
3588
3589 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3590 constant_pool_entries_cost = COST (constant);
3591
3592 /* If we are loading the full constant, we have an equivalence. */
3593 if (offset == 0 && mode == const_mode)
3594 return constant;
3595
3596 /* If this actually isn't a constant (weird!), we can't do
3597 anything. Otherwise, handle the two most common cases:
3598 extracting a word from a multi-word constant, and extracting
3599 the low-order bits. Other cases don't seem common enough to
3600 worry about. */
3601 if (! CONSTANT_P (constant))
3602 return x;
3603
3604 if (GET_MODE_CLASS (mode) == MODE_INT
3605 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3606 && offset % UNITS_PER_WORD == 0
3607 && (new = operand_subword (constant,
3608 offset / UNITS_PER_WORD,
3609 0, const_mode)) != 0)
3610 return new;
3611
3612 if (((BYTES_BIG_ENDIAN
3613 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3614 || (! BYTES_BIG_ENDIAN && offset == 0))
3615 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
3616 return new;
3617 }
3618
3619 /* If this is a reference to a label at a known position in a jump
3620 table, we also know its value. */
3621 if (base && GET_CODE (base) == LABEL_REF)
3622 {
3623 rtx label = XEXP (base, 0);
3624 rtx table_insn = NEXT_INSN (label);
3625
3626 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3627 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3628 {
3629 rtx table = PATTERN (table_insn);
3630
3631 if (offset >= 0
3632 && (offset / GET_MODE_SIZE (GET_MODE (table))
3633 < XVECLEN (table, 0)))
3634 return XVECEXP (table, 0,
3635 offset / GET_MODE_SIZE (GET_MODE (table)));
3636 }
3637 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3638 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3639 {
3640 rtx table = PATTERN (table_insn);
3641
3642 if (offset >= 0
3643 && (offset / GET_MODE_SIZE (GET_MODE (table))
3644 < XVECLEN (table, 1)))
3645 {
3646 offset /= GET_MODE_SIZE (GET_MODE (table));
3647 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3648 XEXP (table, 0));
3649
3650 if (GET_MODE (table) != Pmode)
3651 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3652
3653 /* Indicate this is a constant. This isn't a
3654 valid form of CONST, but it will only be used
3655 to fold the next insns and then discarded, so
3656 it should be safe.
3657
3658 Note this expression must be explicitly discarded,
3659 by cse_insn, else it may end up in a REG_EQUAL note
3660 and "escape" to cause problems elsewhere. */
3661 return gen_rtx_CONST (GET_MODE (new), new);
3662 }
3663 }
3664 }
3665
3666 return x;
3667 }
3668
3669 #ifdef NO_FUNCTION_CSE
3670 case CALL:
3671 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3672 return x;
3673 break;
3674 #endif
3675
3676 case ASM_OPERANDS:
3677 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3678 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3679 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3680 break;
3681
3682 default:
3683 break;
3684 }
3685
3686 const_arg0 = 0;
3687 const_arg1 = 0;
3688 const_arg2 = 0;
3689 mode_arg0 = VOIDmode;
3690
3691 /* Try folding our operands.
3692 Then see which ones have constant values known. */
3693
3694 fmt = GET_RTX_FORMAT (code);
3695 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3696 if (fmt[i] == 'e')
3697 {
3698 rtx arg = XEXP (x, i);
3699 rtx folded_arg = arg, const_arg = 0;
3700 enum machine_mode mode_arg = GET_MODE (arg);
3701 rtx cheap_arg, expensive_arg;
3702 rtx replacements[2];
3703 int j;
3704
3705 /* Most arguments are cheap, so handle them specially. */
3706 switch (GET_CODE (arg))
3707 {
3708 case REG:
3709 /* This is the same as calling equiv_constant; it is duplicated
3710 here for speed. */
3711 if (REGNO_QTY_VALID_P (REGNO (arg)))
3712 {
3713 int arg_q = REG_QTY (REGNO (arg));
3714 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3715
3716 if (arg_ent->const_rtx != NULL_RTX
3717 && GET_CODE (arg_ent->const_rtx) != REG
3718 && GET_CODE (arg_ent->const_rtx) != PLUS)
3719 const_arg
3720 = gen_lowpart_if_possible (GET_MODE (arg),
3721 arg_ent->const_rtx);
3722 }
3723 break;
3724
3725 case CONST:
3726 case CONST_INT:
3727 case SYMBOL_REF:
3728 case LABEL_REF:
3729 case CONST_DOUBLE:
3730 const_arg = arg;
3731 break;
3732
3733 #ifdef HAVE_cc0
3734 case CC0:
3735 folded_arg = prev_insn_cc0;
3736 mode_arg = prev_insn_cc0_mode;
3737 const_arg = equiv_constant (folded_arg);
3738 break;
3739 #endif
3740
3741 default:
3742 folded_arg = fold_rtx (arg, insn);
3743 const_arg = equiv_constant (folded_arg);
3744 }
3745
3746 /* For the first three operands, see if the operand
3747 is constant or equivalent to a constant. */
3748 switch (i)
3749 {
3750 case 0:
3751 folded_arg0 = folded_arg;
3752 const_arg0 = const_arg;
3753 mode_arg0 = mode_arg;
3754 break;
3755 case 1:
3756 folded_arg1 = folded_arg;
3757 const_arg1 = const_arg;
3758 break;
3759 case 2:
3760 const_arg2 = const_arg;
3761 break;
3762 }
3763
3764 /* Pick the least expensive of the folded argument and an
3765 equivalent constant argument. */
3766 if (const_arg == 0 || const_arg == folded_arg
3767 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3768 cheap_arg = folded_arg, expensive_arg = const_arg;
3769 else
3770 cheap_arg = const_arg, expensive_arg = folded_arg;
3771
3772 /* Try to replace the operand with the cheapest of the two
3773 possibilities. If it doesn't work and this is either of the first
3774 two operands of a commutative operation, try swapping them.
3775 If THAT fails, try the more expensive, provided it is cheaper
3776 than what is already there. */
3777
3778 if (cheap_arg == XEXP (x, i))
3779 continue;
3780
3781 if (insn == 0 && ! copied)
3782 {
3783 x = copy_rtx (x);
3784 copied = 1;
3785 }
3786
3787 /* Order the replacements from cheapest to most expensive. */
3788 replacements[0] = cheap_arg;
3789 replacements[1] = expensive_arg;
3790
3791 for (j = 0; j < 2 && replacements[j]; j++)
3792 {
3793 int old_cost = COST_IN (XEXP (x, i), code);
3794 int new_cost = COST_IN (replacements[j], code);
3795
3796 /* Stop if what existed before was cheaper. Prefer constants
3797 in the case of a tie. */
3798 if (new_cost > old_cost
3799 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3800 break;
3801
3802 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3803 break;
3804
3805 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c'
3806 || code == LTGT || code == UNEQ || code == ORDERED
3807 || code == UNORDERED)
3808 {
3809 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3810 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3811
3812 if (apply_change_group ())
3813 {
3814 /* Swap them back to be invalid so that this loop can
3815 continue and flag them to be swapped back later. */
3816 rtx tem;
3817
3818 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3819 XEXP (x, 1) = tem;
3820 must_swap = 1;
3821 break;
3822 }
3823 }
3824 }
3825 }
3826
3827 else
3828 {
3829 if (fmt[i] == 'E')
3830 /* Don't try to fold inside of a vector of expressions.
3831 Doing nothing is harmless. */
3832 {;}
3833 }
3834
3835 /* If a commutative operation, place a constant integer as the second
3836 operand unless the first operand is also a constant integer. Otherwise,
3837 place any constant second unless the first operand is also a constant. */
3838
3839 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c'
3840 || code == LTGT || code == UNEQ || code == ORDERED
3841 || code == UNORDERED)
3842 {
3843 if (must_swap || (const_arg0
3844 && (const_arg1 == 0
3845 || (GET_CODE (const_arg0) == CONST_INT
3846 && GET_CODE (const_arg1) != CONST_INT))))
3847 {
3848 register rtx tem = XEXP (x, 0);
3849
3850 if (insn == 0 && ! copied)
3851 {
3852 x = copy_rtx (x);
3853 copied = 1;
3854 }
3855
3856 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3857 validate_change (insn, &XEXP (x, 1), tem, 1);
3858 if (apply_change_group ())
3859 {
3860 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3861 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3862 }
3863 }
3864 }
3865
3866 /* If X is an arithmetic operation, see if we can simplify it. */
3867
3868 switch (GET_RTX_CLASS (code))
3869 {
3870 case '1':
3871 {
3872 int is_const = 0;
3873
3874 /* We can't simplify extension ops unless we know the
3875 original mode. */
3876 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3877 && mode_arg0 == VOIDmode)
3878 break;
3879
3880 /* If we had a CONST, strip it off and put it back later if we
3881 fold. */
3882 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3883 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3884
3885 new = simplify_unary_operation (code, mode,
3886 const_arg0 ? const_arg0 : folded_arg0,
3887 mode_arg0);
3888 if (new != 0 && is_const)
3889 new = gen_rtx_CONST (mode, new);
3890 }
3891 break;
3892
3893 case '<':
3894 /* See what items are actually being compared and set FOLDED_ARG[01]
3895 to those values and CODE to the actual comparison code. If any are
3896 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3897 do anything if both operands are already known to be constant. */
3898
3899 if (const_arg0 == 0 || const_arg1 == 0)
3900 {
3901 struct table_elt *p0, *p1;
3902 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3903 enum machine_mode mode_arg1;
3904
3905 #ifdef FLOAT_STORE_FLAG_VALUE
3906 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3907 {
3908 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3909 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3910 false_rtx = CONST0_RTX (mode);
3911 }
3912 #endif
3913
3914 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3915 &mode_arg0, &mode_arg1);
3916 const_arg0 = equiv_constant (folded_arg0);
3917 const_arg1 = equiv_constant (folded_arg1);
3918
3919 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3920 what kinds of things are being compared, so we can't do
3921 anything with this comparison. */
3922
3923 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3924 break;
3925
3926 /* If we do not now have two constants being compared, see
3927 if we can nevertheless deduce some things about the
3928 comparison. */
3929 if (const_arg0 == 0 || const_arg1 == 0)
3930 {
3931 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
3932 non-explicit constant? These aren't zero, but we
3933 don't know their sign. */
3934 if (const_arg1 == const0_rtx
3935 && (NONZERO_BASE_PLUS_P (folded_arg0)
3936 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
3937 come out as 0. */
3938 || GET_CODE (folded_arg0) == SYMBOL_REF
3939 #endif
3940 || GET_CODE (folded_arg0) == LABEL_REF
3941 || GET_CODE (folded_arg0) == CONST))
3942 {
3943 if (code == EQ)
3944 return false_rtx;
3945 else if (code == NE)
3946 return true_rtx;
3947 }
3948
3949 /* See if the two operands are the same. */
3950
3951 if (folded_arg0 == folded_arg1
3952 || (GET_CODE (folded_arg0) == REG
3953 && GET_CODE (folded_arg1) == REG
3954 && (REG_QTY (REGNO (folded_arg0))
3955 == REG_QTY (REGNO (folded_arg1))))
3956 || ((p0 = lookup (folded_arg0,
3957 (safe_hash (folded_arg0, mode_arg0)
3958 & HASH_MASK), mode_arg0))
3959 && (p1 = lookup (folded_arg1,
3960 (safe_hash (folded_arg1, mode_arg0)
3961 & HASH_MASK), mode_arg0))
3962 && p0->first_same_value == p1->first_same_value))
3963 {
3964 /* Sadly two equal NaNs are not equivalent. */
3965 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3966 || ! FLOAT_MODE_P (mode_arg0)
3967 || flag_unsafe_math_optimizations)
3968 return ((code == EQ || code == LE || code == GE
3969 || code == LEU || code == GEU || code == UNEQ
3970 || code == UNLE || code == UNGE || code == ORDERED)
3971 ? true_rtx : false_rtx);
3972 /* Take care for the FP compares we can resolve. */
3973 if (code == UNEQ || code == UNLE || code == UNGE)
3974 return true_rtx;
3975 if (code == LTGT || code == LT || code == GT)
3976 return false_rtx;
3977 }
3978
3979 /* If FOLDED_ARG0 is a register, see if the comparison we are
3980 doing now is either the same as we did before or the reverse
3981 (we only check the reverse if not floating-point). */
3982 else if (GET_CODE (folded_arg0) == REG)
3983 {
3984 int qty = REG_QTY (REGNO (folded_arg0));
3985
3986 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3987 {
3988 struct qty_table_elem *ent = &qty_table[qty];
3989
3990 if ((comparison_dominates_p (ent->comparison_code, code)
3991 || (! FLOAT_MODE_P (mode_arg0)
3992 && comparison_dominates_p (ent->comparison_code,
3993 reverse_condition (code))))
3994 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3995 || (const_arg1
3996 && rtx_equal_p (ent->comparison_const,
3997 const_arg1))
3998 || (GET_CODE (folded_arg1) == REG
3999 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
4000 return (comparison_dominates_p (ent->comparison_code, code)
4001 ? true_rtx : false_rtx);
4002 }
4003 }
4004 }
4005 }
4006
4007 /* If we are comparing against zero, see if the first operand is
4008 equivalent to an IOR with a constant. If so, we may be able to
4009 determine the result of this comparison. */
4010
4011 if (const_arg1 == const0_rtx)
4012 {
4013 rtx y = lookup_as_function (folded_arg0, IOR);
4014 rtx inner_const;
4015
4016 if (y != 0
4017 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4018 && GET_CODE (inner_const) == CONST_INT
4019 && INTVAL (inner_const) != 0)
4020 {
4021 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4022 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4023 && (INTVAL (inner_const)
4024 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
4025 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4026
4027 #ifdef FLOAT_STORE_FLAG_VALUE
4028 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
4029 {
4030 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4031 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4032 false_rtx = CONST0_RTX (mode);
4033 }
4034 #endif
4035
4036 switch (code)
4037 {
4038 case EQ:
4039 return false_rtx;
4040 case NE:
4041 return true_rtx;
4042 case LT: case LE:
4043 if (has_sign)
4044 return true_rtx;
4045 break;
4046 case GT: case GE:
4047 if (has_sign)
4048 return false_rtx;
4049 break;
4050 default:
4051 break;
4052 }
4053 }
4054 }
4055
4056 new = simplify_relational_operation (code,
4057 (mode_arg0 != VOIDmode
4058 ? mode_arg0
4059 : (GET_MODE (const_arg0
4060 ? const_arg0
4061 : folded_arg0)
4062 != VOIDmode)
4063 ? GET_MODE (const_arg0
4064 ? const_arg0
4065 : folded_arg0)
4066 : GET_MODE (const_arg1
4067 ? const_arg1
4068 : folded_arg1)),
4069 const_arg0 ? const_arg0 : folded_arg0,
4070 const_arg1 ? const_arg1 : folded_arg1);
4071 #ifdef FLOAT_STORE_FLAG_VALUE
4072 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
4073 {
4074 if (new == const0_rtx)
4075 new = CONST0_RTX (mode);
4076 else
4077 new = (CONST_DOUBLE_FROM_REAL_VALUE
4078 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4079 }
4080 #endif
4081 break;
4082
4083 case '2':
4084 case 'c':
4085 switch (code)
4086 {
4087 case PLUS:
4088 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4089 with that LABEL_REF as its second operand. If so, the result is
4090 the first operand of that MINUS. This handles switches with an
4091 ADDR_DIFF_VEC table. */
4092 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4093 {
4094 rtx y
4095 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4096 : lookup_as_function (folded_arg0, MINUS);
4097
4098 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4099 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4100 return XEXP (y, 0);
4101
4102 /* Now try for a CONST of a MINUS like the above. */
4103 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4104 : lookup_as_function (folded_arg0, CONST))) != 0
4105 && GET_CODE (XEXP (y, 0)) == MINUS
4106 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4107 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4108 return XEXP (XEXP (y, 0), 0);
4109 }
4110
4111 /* Likewise if the operands are in the other order. */
4112 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4113 {
4114 rtx y
4115 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4116 : lookup_as_function (folded_arg1, MINUS);
4117
4118 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4119 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4120 return XEXP (y, 0);
4121
4122 /* Now try for a CONST of a MINUS like the above. */
4123 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4124 : lookup_as_function (folded_arg1, CONST))) != 0
4125 && GET_CODE (XEXP (y, 0)) == MINUS
4126 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4127 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4128 return XEXP (XEXP (y, 0), 0);
4129 }
4130
4131 /* If second operand is a register equivalent to a negative
4132 CONST_INT, see if we can find a register equivalent to the
4133 positive constant. Make a MINUS if so. Don't do this for
4134 a non-negative constant since we might then alternate between
4135 chosing positive and negative constants. Having the positive
4136 constant previously-used is the more common case. Be sure
4137 the resulting constant is non-negative; if const_arg1 were
4138 the smallest negative number this would overflow: depending
4139 on the mode, this would either just be the same value (and
4140 hence not save anything) or be incorrect. */
4141 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4142 && INTVAL (const_arg1) < 0
4143 /* This used to test
4144
4145 -INTVAL (const_arg1) >= 0
4146
4147 But The Sun V5.0 compilers mis-compiled that test. So
4148 instead we test for the problematic value in a more direct
4149 manner and hope the Sun compilers get it correct. */
4150 && INTVAL (const_arg1) !=
4151 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4152 && GET_CODE (folded_arg1) == REG)
4153 {
4154 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4155 struct table_elt *p
4156 = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
4157 mode);
4158
4159 if (p)
4160 for (p = p->first_same_value; p; p = p->next_same_value)
4161 if (GET_CODE (p->exp) == REG)
4162 return simplify_gen_binary (MINUS, mode, folded_arg0,
4163 canon_reg (p->exp, NULL_RTX));
4164 }
4165 goto from_plus;
4166
4167 case MINUS:
4168 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4169 If so, produce (PLUS Z C2-C). */
4170 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4171 {
4172 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4173 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4174 return fold_rtx (plus_constant (copy_rtx (y),
4175 -INTVAL (const_arg1)),
4176 NULL_RTX);
4177 }
4178
4179 /* Fall through. */
4180
4181 from_plus:
4182 case SMIN: case SMAX: case UMIN: case UMAX:
4183 case IOR: case AND: case XOR:
4184 case MULT: case DIV: case UDIV:
4185 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4186 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4187 is known to be of similar form, we may be able to replace the
4188 operation with a combined operation. This may eliminate the
4189 intermediate operation if every use is simplified in this way.
4190 Note that the similar optimization done by combine.c only works
4191 if the intermediate operation's result has only one reference. */
4192
4193 if (GET_CODE (folded_arg0) == REG
4194 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4195 {
4196 int is_shift
4197 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4198 rtx y = lookup_as_function (folded_arg0, code);
4199 rtx inner_const;
4200 enum rtx_code associate_code;
4201 rtx new_const;
4202
4203 if (y == 0
4204 || 0 == (inner_const
4205 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4206 || GET_CODE (inner_const) != CONST_INT
4207 /* If we have compiled a statement like
4208 "if (x == (x & mask1))", and now are looking at
4209 "x & mask2", we will have a case where the first operand
4210 of Y is the same as our first operand. Unless we detect
4211 this case, an infinite loop will result. */
4212 || XEXP (y, 0) == folded_arg0)
4213 break;
4214
4215 /* Don't associate these operations if they are a PLUS with the
4216 same constant and it is a power of two. These might be doable
4217 with a pre- or post-increment. Similarly for two subtracts of
4218 identical powers of two with post decrement. */
4219
4220 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
4221 && ((HAVE_PRE_INCREMENT
4222 && exact_log2 (INTVAL (const_arg1)) >= 0)
4223 || (HAVE_POST_INCREMENT
4224 && exact_log2 (INTVAL (const_arg1)) >= 0)
4225 || (HAVE_PRE_DECREMENT
4226 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4227 || (HAVE_POST_DECREMENT
4228 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4229 break;
4230
4231 /* Compute the code used to compose the constants. For example,
4232 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
4233
4234 associate_code
4235 = (code == MULT || code == DIV || code == UDIV ? MULT
4236 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
4237
4238 new_const = simplify_binary_operation (associate_code, mode,
4239 const_arg1, inner_const);
4240
4241 if (new_const == 0)
4242 break;
4243
4244 /* If we are associating shift operations, don't let this
4245 produce a shift of the size of the object or larger.
4246 This could occur when we follow a sign-extend by a right
4247 shift on a machine that does a sign-extend as a pair
4248 of shifts. */
4249
4250 if (is_shift && GET_CODE (new_const) == CONST_INT
4251 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4252 {
4253 /* As an exception, we can turn an ASHIFTRT of this
4254 form into a shift of the number of bits - 1. */
4255 if (code == ASHIFTRT)
4256 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4257 else
4258 break;
4259 }
4260
4261 y = copy_rtx (XEXP (y, 0));
4262
4263 /* If Y contains our first operand (the most common way this
4264 can happen is if Y is a MEM), we would do into an infinite
4265 loop if we tried to fold it. So don't in that case. */
4266
4267 if (! reg_mentioned_p (folded_arg0, y))
4268 y = fold_rtx (y, insn);
4269
4270 return simplify_gen_binary (code, mode, y, new_const);
4271 }
4272 break;
4273
4274 default:
4275 break;
4276 }
4277
4278 new = simplify_binary_operation (code, mode,
4279 const_arg0 ? const_arg0 : folded_arg0,
4280 const_arg1 ? const_arg1 : folded_arg1);
4281 break;
4282
4283 case 'o':
4284 /* (lo_sum (high X) X) is simply X. */
4285 if (code == LO_SUM && const_arg0 != 0
4286 && GET_CODE (const_arg0) == HIGH
4287 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4288 return const_arg1;
4289 break;
4290
4291 case '3':
4292 case 'b':
4293 new = simplify_ternary_operation (code, mode, mode_arg0,
4294 const_arg0 ? const_arg0 : folded_arg0,
4295 const_arg1 ? const_arg1 : folded_arg1,
4296 const_arg2 ? const_arg2 : XEXP (x, 2));
4297 break;
4298
4299 case 'x':
4300 /* Always eliminate CONSTANT_P_RTX at this stage. */
4301 if (code == CONSTANT_P_RTX)
4302 return (const_arg0 ? const1_rtx : const0_rtx);
4303 break;
4304 }
4305
4306 return new ? new : x;
4307 }
4308 \f
4309 /* Return a constant value currently equivalent to X.
4310 Return 0 if we don't know one. */
4311
4312 static rtx
4313 equiv_constant (x)
4314 rtx x;
4315 {
4316 if (GET_CODE (x) == REG
4317 && REGNO_QTY_VALID_P (REGNO (x)))
4318 {
4319 int x_q = REG_QTY (REGNO (x));
4320 struct qty_table_elem *x_ent = &qty_table[x_q];
4321
4322 if (x_ent->const_rtx)
4323 x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4324 }
4325
4326 if (x == 0 || CONSTANT_P (x))
4327 return x;
4328
4329 /* If X is a MEM, try to fold it outside the context of any insn to see if
4330 it might be equivalent to a constant. That handles the case where it
4331 is a constant-pool reference. Then try to look it up in the hash table
4332 in case it is something whose value we have seen before. */
4333
4334 if (GET_CODE (x) == MEM)
4335 {
4336 struct table_elt *elt;
4337
4338 x = fold_rtx (x, NULL_RTX);
4339 if (CONSTANT_P (x))
4340 return x;
4341
4342 elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4343 if (elt == 0)
4344 return 0;
4345
4346 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4347 if (elt->is_const && CONSTANT_P (elt->exp))
4348 return elt->exp;
4349 }
4350
4351 return 0;
4352 }
4353 \f
4354 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4355 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4356 least-significant part of X.
4357 MODE specifies how big a part of X to return.
4358
4359 If the requested operation cannot be done, 0 is returned.
4360
4361 This is similar to gen_lowpart in emit-rtl.c. */
4362
4363 rtx
4364 gen_lowpart_if_possible (mode, x)
4365 enum machine_mode mode;
4366 register rtx x;
4367 {
4368 rtx result = gen_lowpart_common (mode, x);
4369
4370 if (result)
4371 return result;
4372 else if (GET_CODE (x) == MEM)
4373 {
4374 /* This is the only other case we handle. */
4375 register int offset = 0;
4376 rtx new;
4377
4378 if (WORDS_BIG_ENDIAN)
4379 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4380 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4381 if (BYTES_BIG_ENDIAN)
4382 /* Adjust the address so that the address-after-the-data is
4383 unchanged. */
4384 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4385 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4386
4387 new = adjust_address_nv (x, mode, offset);
4388 if (! memory_address_p (mode, XEXP (new, 0)))
4389 return 0;
4390
4391 return new;
4392 }
4393 else
4394 return 0;
4395 }
4396 \f
4397 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4398 branch. It will be zero if not.
4399
4400 In certain cases, this can cause us to add an equivalence. For example,
4401 if we are following the taken case of
4402 if (i == 2)
4403 we can add the fact that `i' and '2' are now equivalent.
4404
4405 In any case, we can record that this comparison was passed. If the same
4406 comparison is seen later, we will know its value. */
4407
4408 static void
4409 record_jump_equiv (insn, taken)
4410 rtx insn;
4411 int taken;
4412 {
4413 int cond_known_true;
4414 rtx op0, op1;
4415 rtx set;
4416 enum machine_mode mode, mode0, mode1;
4417 int reversed_nonequality = 0;
4418 enum rtx_code code;
4419
4420 /* Ensure this is the right kind of insn. */
4421 if (! any_condjump_p (insn))
4422 return;
4423 set = pc_set (insn);
4424
4425 /* See if this jump condition is known true or false. */
4426 if (taken)
4427 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4428 else
4429 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4430
4431 /* Get the type of comparison being done and the operands being compared.
4432 If we had to reverse a non-equality condition, record that fact so we
4433 know that it isn't valid for floating-point. */
4434 code = GET_CODE (XEXP (SET_SRC (set), 0));
4435 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4436 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4437
4438 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4439 if (! cond_known_true)
4440 {
4441 code = reversed_comparison_code_parts (code, op0, op1, insn);
4442
4443 /* Don't remember if we can't find the inverse. */
4444 if (code == UNKNOWN)
4445 return;
4446 }
4447
4448 /* The mode is the mode of the non-constant. */
4449 mode = mode0;
4450 if (mode1 != VOIDmode)
4451 mode = mode1;
4452
4453 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4454 }
4455
4456 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4457 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4458 Make any useful entries we can with that information. Called from
4459 above function and called recursively. */
4460
4461 static void
4462 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
4463 enum rtx_code code;
4464 enum machine_mode mode;
4465 rtx op0, op1;
4466 int reversed_nonequality;
4467 {
4468 unsigned op0_hash, op1_hash;
4469 int op0_in_memory, op1_in_memory;
4470 struct table_elt *op0_elt, *op1_elt;
4471
4472 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4473 we know that they are also equal in the smaller mode (this is also
4474 true for all smaller modes whether or not there is a SUBREG, but
4475 is not worth testing for with no SUBREG). */
4476
4477 /* Note that GET_MODE (op0) may not equal MODE. */
4478 if (code == EQ && GET_CODE (op0) == SUBREG
4479 && (GET_MODE_SIZE (GET_MODE (op0))
4480 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4481 {
4482 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4483 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4484
4485 record_jump_cond (code, mode, SUBREG_REG (op0),
4486 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4487 reversed_nonequality);
4488 }
4489
4490 if (code == EQ && GET_CODE (op1) == SUBREG
4491 && (GET_MODE_SIZE (GET_MODE (op1))
4492 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4493 {
4494 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4495 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4496
4497 record_jump_cond (code, mode, SUBREG_REG (op1),
4498 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4499 reversed_nonequality);
4500 }
4501
4502 /* Similarly, if this is an NE comparison, and either is a SUBREG
4503 making a smaller mode, we know the whole thing is also NE. */
4504
4505 /* Note that GET_MODE (op0) may not equal MODE;
4506 if we test MODE instead, we can get an infinite recursion
4507 alternating between two modes each wider than MODE. */
4508
4509 if (code == NE && GET_CODE (op0) == SUBREG
4510 && subreg_lowpart_p (op0)
4511 && (GET_MODE_SIZE (GET_MODE (op0))
4512 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4513 {
4514 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4515 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4516
4517 record_jump_cond (code, mode, SUBREG_REG (op0),
4518 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4519 reversed_nonequality);
4520 }
4521
4522 if (code == NE && GET_CODE (op1) == SUBREG
4523 && subreg_lowpart_p (op1)
4524 && (GET_MODE_SIZE (GET_MODE (op1))
4525 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4526 {
4527 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4528 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4529
4530 record_jump_cond (code, mode, SUBREG_REG (op1),
4531 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4532 reversed_nonequality);
4533 }
4534
4535 /* Hash both operands. */
4536
4537 do_not_record = 0;
4538 hash_arg_in_memory = 0;
4539 op0_hash = HASH (op0, mode);
4540 op0_in_memory = hash_arg_in_memory;
4541
4542 if (do_not_record)
4543 return;
4544
4545 do_not_record = 0;
4546 hash_arg_in_memory = 0;
4547 op1_hash = HASH (op1, mode);
4548 op1_in_memory = hash_arg_in_memory;
4549
4550 if (do_not_record)
4551 return;
4552
4553 /* Look up both operands. */
4554 op0_elt = lookup (op0, op0_hash, mode);
4555 op1_elt = lookup (op1, op1_hash, mode);
4556
4557 /* If both operands are already equivalent or if they are not in the
4558 table but are identical, do nothing. */
4559 if ((op0_elt != 0 && op1_elt != 0
4560 && op0_elt->first_same_value == op1_elt->first_same_value)
4561 || op0 == op1 || rtx_equal_p (op0, op1))
4562 return;
4563
4564 /* If we aren't setting two things equal all we can do is save this
4565 comparison. Similarly if this is floating-point. In the latter
4566 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4567 If we record the equality, we might inadvertently delete code
4568 whose intent was to change -0 to +0. */
4569
4570 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4571 {
4572 struct qty_table_elem *ent;
4573 int qty;
4574
4575 /* If we reversed a floating-point comparison, if OP0 is not a
4576 register, or if OP1 is neither a register or constant, we can't
4577 do anything. */
4578
4579 if (GET_CODE (op1) != REG)
4580 op1 = equiv_constant (op1);
4581
4582 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4583 || GET_CODE (op0) != REG || op1 == 0)
4584 return;
4585
4586 /* Put OP0 in the hash table if it isn't already. This gives it a
4587 new quantity number. */
4588 if (op0_elt == 0)
4589 {
4590 if (insert_regs (op0, NULL, 0))
4591 {
4592 rehash_using_reg (op0);
4593 op0_hash = HASH (op0, mode);
4594
4595 /* If OP0 is contained in OP1, this changes its hash code
4596 as well. Faster to rehash than to check, except
4597 for the simple case of a constant. */
4598 if (! CONSTANT_P (op1))
4599 op1_hash = HASH (op1,mode);
4600 }
4601
4602 op0_elt = insert (op0, NULL, op0_hash, mode);
4603 op0_elt->in_memory = op0_in_memory;
4604 }
4605
4606 qty = REG_QTY (REGNO (op0));
4607 ent = &qty_table[qty];
4608
4609 ent->comparison_code = code;
4610 if (GET_CODE (op1) == REG)
4611 {
4612 /* Look it up again--in case op0 and op1 are the same. */
4613 op1_elt = lookup (op1, op1_hash, mode);
4614
4615 /* Put OP1 in the hash table so it gets a new quantity number. */
4616 if (op1_elt == 0)
4617 {
4618 if (insert_regs (op1, NULL, 0))
4619 {
4620 rehash_using_reg (op1);
4621 op1_hash = HASH (op1, mode);
4622 }
4623
4624 op1_elt = insert (op1, NULL, op1_hash, mode);
4625 op1_elt->in_memory = op1_in_memory;
4626 }
4627
4628 ent->comparison_const = NULL_RTX;
4629 ent->comparison_qty = REG_QTY (REGNO (op1));
4630 }
4631 else
4632 {
4633 ent->comparison_const = op1;
4634 ent->comparison_qty = -1;
4635 }
4636
4637 return;
4638 }
4639
4640 /* If either side is still missing an equivalence, make it now,
4641 then merge the equivalences. */
4642
4643 if (op0_elt == 0)
4644 {
4645 if (insert_regs (op0, NULL, 0))
4646 {
4647 rehash_using_reg (op0);
4648 op0_hash = HASH (op0, mode);
4649 }
4650
4651 op0_elt = insert (op0, NULL, op0_hash, mode);
4652 op0_elt->in_memory = op0_in_memory;
4653 }
4654
4655 if (op1_elt == 0)
4656 {
4657 if (insert_regs (op1, NULL, 0))
4658 {
4659 rehash_using_reg (op1);
4660 op1_hash = HASH (op1, mode);
4661 }
4662
4663 op1_elt = insert (op1, NULL, op1_hash, mode);
4664 op1_elt->in_memory = op1_in_memory;
4665 }
4666
4667 merge_equiv_classes (op0_elt, op1_elt);
4668 last_jump_equiv_class = op0_elt;
4669 }
4670 \f
4671 /* CSE processing for one instruction.
4672 First simplify sources and addresses of all assignments
4673 in the instruction, using previously-computed equivalents values.
4674 Then install the new sources and destinations in the table
4675 of available values.
4676
4677 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4678 the insn. It means that INSN is inside libcall block. In this
4679 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4680
4681 /* Data on one SET contained in the instruction. */
4682
4683 struct set
4684 {
4685 /* The SET rtx itself. */
4686 rtx rtl;
4687 /* The SET_SRC of the rtx (the original value, if it is changing). */
4688 rtx src;
4689 /* The hash-table element for the SET_SRC of the SET. */
4690 struct table_elt *src_elt;
4691 /* Hash value for the SET_SRC. */
4692 unsigned src_hash;
4693 /* Hash value for the SET_DEST. */
4694 unsigned dest_hash;
4695 /* The SET_DEST, with SUBREG, etc., stripped. */
4696 rtx inner_dest;
4697 /* Nonzero if the SET_SRC is in memory. */
4698 char src_in_memory;
4699 /* Nonzero if the SET_SRC contains something
4700 whose value cannot be predicted and understood. */
4701 char src_volatile;
4702 /* Original machine mode, in case it becomes a CONST_INT. */
4703 enum machine_mode mode;
4704 /* A constant equivalent for SET_SRC, if any. */
4705 rtx src_const;
4706 /* Original SET_SRC value used for libcall notes. */
4707 rtx orig_src;
4708 /* Hash value of constant equivalent for SET_SRC. */
4709 unsigned src_const_hash;
4710 /* Table entry for constant equivalent for SET_SRC, if any. */
4711 struct table_elt *src_const_elt;
4712 };
4713
4714 static void
4715 cse_insn (insn, libcall_insn)
4716 rtx insn;
4717 rtx libcall_insn;
4718 {
4719 register rtx x = PATTERN (insn);
4720 register int i;
4721 rtx tem;
4722 register int n_sets = 0;
4723
4724 #ifdef HAVE_cc0
4725 /* Records what this insn does to set CC0. */
4726 rtx this_insn_cc0 = 0;
4727 enum machine_mode this_insn_cc0_mode = VOIDmode;
4728 #endif
4729
4730 rtx src_eqv = 0;
4731 struct table_elt *src_eqv_elt = 0;
4732 int src_eqv_volatile = 0;
4733 int src_eqv_in_memory = 0;
4734 unsigned src_eqv_hash = 0;
4735
4736 struct set *sets = (struct set *) 0;
4737
4738 this_insn = insn;
4739
4740 /* Find all the SETs and CLOBBERs in this instruction.
4741 Record all the SETs in the array `set' and count them.
4742 Also determine whether there is a CLOBBER that invalidates
4743 all memory references, or all references at varying addresses. */
4744
4745 if (GET_CODE (insn) == CALL_INSN)
4746 {
4747 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4748 {
4749 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4750 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4751 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4752 }
4753 }
4754
4755 if (GET_CODE (x) == SET)
4756 {
4757 sets = (struct set *) alloca (sizeof (struct set));
4758 sets[0].rtl = x;
4759
4760 /* Ignore SETs that are unconditional jumps.
4761 They never need cse processing, so this does not hurt.
4762 The reason is not efficiency but rather
4763 so that we can test at the end for instructions
4764 that have been simplified to unconditional jumps
4765 and not be misled by unchanged instructions
4766 that were unconditional jumps to begin with. */
4767 if (SET_DEST (x) == pc_rtx
4768 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4769 ;
4770
4771 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4772 The hard function value register is used only once, to copy to
4773 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4774 Ensure we invalidate the destination register. On the 80386 no
4775 other code would invalidate it since it is a fixed_reg.
4776 We need not check the return of apply_change_group; see canon_reg. */
4777
4778 else if (GET_CODE (SET_SRC (x)) == CALL)
4779 {
4780 canon_reg (SET_SRC (x), insn);
4781 apply_change_group ();
4782 fold_rtx (SET_SRC (x), insn);
4783 invalidate (SET_DEST (x), VOIDmode);
4784 }
4785 else
4786 n_sets = 1;
4787 }
4788 else if (GET_CODE (x) == PARALLEL)
4789 {
4790 register int lim = XVECLEN (x, 0);
4791
4792 sets = (struct set *) alloca (lim * sizeof (struct set));
4793
4794 /* Find all regs explicitly clobbered in this insn,
4795 and ensure they are not replaced with any other regs
4796 elsewhere in this insn.
4797 When a reg that is clobbered is also used for input,
4798 we should presume that that is for a reason,
4799 and we should not substitute some other register
4800 which is not supposed to be clobbered.
4801 Therefore, this loop cannot be merged into the one below
4802 because a CALL may precede a CLOBBER and refer to the
4803 value clobbered. We must not let a canonicalization do
4804 anything in that case. */
4805 for (i = 0; i < lim; i++)
4806 {
4807 register rtx y = XVECEXP (x, 0, i);
4808 if (GET_CODE (y) == CLOBBER)
4809 {
4810 rtx clobbered = XEXP (y, 0);
4811
4812 if (GET_CODE (clobbered) == REG
4813 || GET_CODE (clobbered) == SUBREG)
4814 invalidate (clobbered, VOIDmode);
4815 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4816 || GET_CODE (clobbered) == ZERO_EXTRACT)
4817 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4818 }
4819 }
4820
4821 for (i = 0; i < lim; i++)
4822 {
4823 register rtx y = XVECEXP (x, 0, i);
4824 if (GET_CODE (y) == SET)
4825 {
4826 /* As above, we ignore unconditional jumps and call-insns and
4827 ignore the result of apply_change_group. */
4828 if (GET_CODE (SET_SRC (y)) == CALL)
4829 {
4830 canon_reg (SET_SRC (y), insn);
4831 apply_change_group ();
4832 fold_rtx (SET_SRC (y), insn);
4833 invalidate (SET_DEST (y), VOIDmode);
4834 }
4835 else if (SET_DEST (y) == pc_rtx
4836 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4837 ;
4838 else
4839 sets[n_sets++].rtl = y;
4840 }
4841 else if (GET_CODE (y) == CLOBBER)
4842 {
4843 /* If we clobber memory, canon the address.
4844 This does nothing when a register is clobbered
4845 because we have already invalidated the reg. */
4846 if (GET_CODE (XEXP (y, 0)) == MEM)
4847 canon_reg (XEXP (y, 0), NULL_RTX);
4848 }
4849 else if (GET_CODE (y) == USE
4850 && ! (GET_CODE (XEXP (y, 0)) == REG
4851 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4852 canon_reg (y, NULL_RTX);
4853 else if (GET_CODE (y) == CALL)
4854 {
4855 /* The result of apply_change_group can be ignored; see
4856 canon_reg. */
4857 canon_reg (y, insn);
4858 apply_change_group ();
4859 fold_rtx (y, insn);
4860 }
4861 }
4862 }
4863 else if (GET_CODE (x) == CLOBBER)
4864 {
4865 if (GET_CODE (XEXP (x, 0)) == MEM)
4866 canon_reg (XEXP (x, 0), NULL_RTX);
4867 }
4868
4869 /* Canonicalize a USE of a pseudo register or memory location. */
4870 else if (GET_CODE (x) == USE
4871 && ! (GET_CODE (XEXP (x, 0)) == REG
4872 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4873 canon_reg (XEXP (x, 0), NULL_RTX);
4874 else if (GET_CODE (x) == CALL)
4875 {
4876 /* The result of apply_change_group can be ignored; see canon_reg. */
4877 canon_reg (x, insn);
4878 apply_change_group ();
4879 fold_rtx (x, insn);
4880 }
4881
4882 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4883 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4884 is handled specially for this case, and if it isn't set, then there will
4885 be no equivalence for the destination. */
4886 if (n_sets == 1 && REG_NOTES (insn) != 0
4887 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4888 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4889 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4890 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
4891
4892 /* Canonicalize sources and addresses of destinations.
4893 We do this in a separate pass to avoid problems when a MATCH_DUP is
4894 present in the insn pattern. In that case, we want to ensure that
4895 we don't break the duplicate nature of the pattern. So we will replace
4896 both operands at the same time. Otherwise, we would fail to find an
4897 equivalent substitution in the loop calling validate_change below.
4898
4899 We used to suppress canonicalization of DEST if it appears in SRC,
4900 but we don't do this any more. */
4901
4902 for (i = 0; i < n_sets; i++)
4903 {
4904 rtx dest = SET_DEST (sets[i].rtl);
4905 rtx src = SET_SRC (sets[i].rtl);
4906 rtx new = canon_reg (src, insn);
4907 int insn_code;
4908
4909 sets[i].orig_src = src;
4910 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4911 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4912 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4913 || (insn_code = recog_memoized (insn)) < 0
4914 || insn_data[insn_code].n_dups > 0)
4915 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4916 else
4917 SET_SRC (sets[i].rtl) = new;
4918
4919 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4920 {
4921 validate_change (insn, &XEXP (dest, 1),
4922 canon_reg (XEXP (dest, 1), insn), 1);
4923 validate_change (insn, &XEXP (dest, 2),
4924 canon_reg (XEXP (dest, 2), insn), 1);
4925 }
4926
4927 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4928 || GET_CODE (dest) == ZERO_EXTRACT
4929 || GET_CODE (dest) == SIGN_EXTRACT)
4930 dest = XEXP (dest, 0);
4931
4932 if (GET_CODE (dest) == MEM)
4933 canon_reg (dest, insn);
4934 }
4935
4936 /* Now that we have done all the replacements, we can apply the change
4937 group and see if they all work. Note that this will cause some
4938 canonicalizations that would have worked individually not to be applied
4939 because some other canonicalization didn't work, but this should not
4940 occur often.
4941
4942 The result of apply_change_group can be ignored; see canon_reg. */
4943
4944 apply_change_group ();
4945
4946 /* Set sets[i].src_elt to the class each source belongs to.
4947 Detect assignments from or to volatile things
4948 and set set[i] to zero so they will be ignored
4949 in the rest of this function.
4950
4951 Nothing in this loop changes the hash table or the register chains. */
4952
4953 for (i = 0; i < n_sets; i++)
4954 {
4955 register rtx src, dest;
4956 register rtx src_folded;
4957 register struct table_elt *elt = 0, *p;
4958 enum machine_mode mode;
4959 rtx src_eqv_here;
4960 rtx src_const = 0;
4961 rtx src_related = 0;
4962 struct table_elt *src_const_elt = 0;
4963 int src_cost = MAX_COST;
4964 int src_eqv_cost = MAX_COST;
4965 int src_folded_cost = MAX_COST;
4966 int src_related_cost = MAX_COST;
4967 int src_elt_cost = MAX_COST;
4968 int src_regcost = MAX_COST;
4969 int src_eqv_regcost = MAX_COST;
4970 int src_folded_regcost = MAX_COST;
4971 int src_related_regcost = MAX_COST;
4972 int src_elt_regcost = MAX_COST;
4973 /* Set non-zero if we need to call force_const_mem on with the
4974 contents of src_folded before using it. */
4975 int src_folded_force_flag = 0;
4976
4977 dest = SET_DEST (sets[i].rtl);
4978 src = SET_SRC (sets[i].rtl);
4979
4980 /* If SRC is a constant that has no machine mode,
4981 hash it with the destination's machine mode.
4982 This way we can keep different modes separate. */
4983
4984 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4985 sets[i].mode = mode;
4986
4987 if (src_eqv)
4988 {
4989 enum machine_mode eqvmode = mode;
4990 if (GET_CODE (dest) == STRICT_LOW_PART)
4991 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4992 do_not_record = 0;
4993 hash_arg_in_memory = 0;
4994 src_eqv = fold_rtx (src_eqv, insn);
4995 src_eqv_hash = HASH (src_eqv, eqvmode);
4996
4997 /* Find the equivalence class for the equivalent expression. */
4998
4999 if (!do_not_record)
5000 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
5001
5002 src_eqv_volatile = do_not_record;
5003 src_eqv_in_memory = hash_arg_in_memory;
5004 }
5005
5006 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5007 value of the INNER register, not the destination. So it is not
5008 a valid substitution for the source. But save it for later. */
5009 if (GET_CODE (dest) == STRICT_LOW_PART)
5010 src_eqv_here = 0;
5011 else
5012 src_eqv_here = src_eqv;
5013
5014 /* Simplify and foldable subexpressions in SRC. Then get the fully-
5015 simplified result, which may not necessarily be valid. */
5016 src_folded = fold_rtx (src, insn);
5017
5018 #if 0
5019 /* ??? This caused bad code to be generated for the m68k port with -O2.
5020 Suppose src is (CONST_INT -1), and that after truncation src_folded
5021 is (CONST_INT 3). Suppose src_folded is then used for src_const.
5022 At the end we will add src and src_const to the same equivalence
5023 class. We now have 3 and -1 on the same equivalence class. This
5024 causes later instructions to be mis-optimized. */
5025 /* If storing a constant in a bitfield, pre-truncate the constant
5026 so we will be able to record it later. */
5027 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5028 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5029 {
5030 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5031
5032 if (GET_CODE (src) == CONST_INT
5033 && GET_CODE (width) == CONST_INT
5034 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5035 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5036 src_folded
5037 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5038 << INTVAL (width)) - 1));
5039 }
5040 #endif
5041
5042 /* Compute SRC's hash code, and also notice if it
5043 should not be recorded at all. In that case,
5044 prevent any further processing of this assignment. */
5045 do_not_record = 0;
5046 hash_arg_in_memory = 0;
5047
5048 sets[i].src = src;
5049 sets[i].src_hash = HASH (src, mode);
5050 sets[i].src_volatile = do_not_record;
5051 sets[i].src_in_memory = hash_arg_in_memory;
5052
5053 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5054 a pseudo, do not record SRC. Using SRC as a replacement for
5055 anything else will be incorrect in that situation. Note that
5056 this usually occurs only for stack slots, in which case all the
5057 RTL would be referring to SRC, so we don't lose any optimization
5058 opportunities by not having SRC in the hash table. */
5059
5060 if (GET_CODE (src) == MEM
5061 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5062 && GET_CODE (dest) == REG
5063 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5064 sets[i].src_volatile = 1;
5065
5066 #if 0
5067 /* It is no longer clear why we used to do this, but it doesn't
5068 appear to still be needed. So let's try without it since this
5069 code hurts cse'ing widened ops. */
5070 /* If source is a perverse subreg (such as QI treated as an SI),
5071 treat it as volatile. It may do the work of an SI in one context
5072 where the extra bits are not being used, but cannot replace an SI
5073 in general. */
5074 if (GET_CODE (src) == SUBREG
5075 && (GET_MODE_SIZE (GET_MODE (src))
5076 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5077 sets[i].src_volatile = 1;
5078 #endif
5079
5080 /* Locate all possible equivalent forms for SRC. Try to replace
5081 SRC in the insn with each cheaper equivalent.
5082
5083 We have the following types of equivalents: SRC itself, a folded
5084 version, a value given in a REG_EQUAL note, or a value related
5085 to a constant.
5086
5087 Each of these equivalents may be part of an additional class
5088 of equivalents (if more than one is in the table, they must be in
5089 the same class; we check for this).
5090
5091 If the source is volatile, we don't do any table lookups.
5092
5093 We note any constant equivalent for possible later use in a
5094 REG_NOTE. */
5095
5096 if (!sets[i].src_volatile)
5097 elt = lookup (src, sets[i].src_hash, mode);
5098
5099 sets[i].src_elt = elt;
5100
5101 if (elt && src_eqv_here && src_eqv_elt)
5102 {
5103 if (elt->first_same_value != src_eqv_elt->first_same_value)
5104 {
5105 /* The REG_EQUAL is indicating that two formerly distinct
5106 classes are now equivalent. So merge them. */
5107 merge_equiv_classes (elt, src_eqv_elt);
5108 src_eqv_hash = HASH (src_eqv, elt->mode);
5109 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5110 }
5111
5112 src_eqv_here = 0;
5113 }
5114
5115 else if (src_eqv_elt)
5116 elt = src_eqv_elt;
5117
5118 /* Try to find a constant somewhere and record it in `src_const'.
5119 Record its table element, if any, in `src_const_elt'. Look in
5120 any known equivalences first. (If the constant is not in the
5121 table, also set `sets[i].src_const_hash'). */
5122 if (elt)
5123 for (p = elt->first_same_value; p; p = p->next_same_value)
5124 if (p->is_const)
5125 {
5126 src_const = p->exp;
5127 src_const_elt = elt;
5128 break;
5129 }
5130
5131 if (src_const == 0
5132 && (CONSTANT_P (src_folded)
5133 /* Consider (minus (label_ref L1) (label_ref L2)) as
5134 "constant" here so we will record it. This allows us
5135 to fold switch statements when an ADDR_DIFF_VEC is used. */
5136 || (GET_CODE (src_folded) == MINUS
5137 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5138 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5139 src_const = src_folded, src_const_elt = elt;
5140 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5141 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5142
5143 /* If we don't know if the constant is in the table, get its
5144 hash code and look it up. */
5145 if (src_const && src_const_elt == 0)
5146 {
5147 sets[i].src_const_hash = HASH (src_const, mode);
5148 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5149 }
5150
5151 sets[i].src_const = src_const;
5152 sets[i].src_const_elt = src_const_elt;
5153
5154 /* If the constant and our source are both in the table, mark them as
5155 equivalent. Otherwise, if a constant is in the table but the source
5156 isn't, set ELT to it. */
5157 if (src_const_elt && elt
5158 && src_const_elt->first_same_value != elt->first_same_value)
5159 merge_equiv_classes (elt, src_const_elt);
5160 else if (src_const_elt && elt == 0)
5161 elt = src_const_elt;
5162
5163 /* See if there is a register linearly related to a constant
5164 equivalent of SRC. */
5165 if (src_const
5166 && (GET_CODE (src_const) == CONST
5167 || (src_const_elt && src_const_elt->related_value != 0)))
5168 {
5169 src_related = use_related_value (src_const, src_const_elt);
5170 if (src_related)
5171 {
5172 struct table_elt *src_related_elt
5173 = lookup (src_related, HASH (src_related, mode), mode);
5174 if (src_related_elt && elt)
5175 {
5176 if (elt->first_same_value
5177 != src_related_elt->first_same_value)
5178 /* This can occur when we previously saw a CONST
5179 involving a SYMBOL_REF and then see the SYMBOL_REF
5180 twice. Merge the involved classes. */
5181 merge_equiv_classes (elt, src_related_elt);
5182
5183 src_related = 0;
5184 src_related_elt = 0;
5185 }
5186 else if (src_related_elt && elt == 0)
5187 elt = src_related_elt;
5188 }
5189 }
5190
5191 /* See if we have a CONST_INT that is already in a register in a
5192 wider mode. */
5193
5194 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5195 && GET_MODE_CLASS (mode) == MODE_INT
5196 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5197 {
5198 enum machine_mode wider_mode;
5199
5200 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5201 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5202 && src_related == 0;
5203 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5204 {
5205 struct table_elt *const_elt
5206 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5207
5208 if (const_elt == 0)
5209 continue;
5210
5211 for (const_elt = const_elt->first_same_value;
5212 const_elt; const_elt = const_elt->next_same_value)
5213 if (GET_CODE (const_elt->exp) == REG)
5214 {
5215 src_related = gen_lowpart_if_possible (mode,
5216 const_elt->exp);
5217 break;
5218 }
5219 }
5220 }
5221
5222 /* Another possibility is that we have an AND with a constant in
5223 a mode narrower than a word. If so, it might have been generated
5224 as part of an "if" which would narrow the AND. If we already
5225 have done the AND in a wider mode, we can use a SUBREG of that
5226 value. */
5227
5228 if (flag_expensive_optimizations && ! src_related
5229 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5230 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5231 {
5232 enum machine_mode tmode;
5233 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5234
5235 for (tmode = GET_MODE_WIDER_MODE (mode);
5236 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5237 tmode = GET_MODE_WIDER_MODE (tmode))
5238 {
5239 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
5240 struct table_elt *larger_elt;
5241
5242 if (inner)
5243 {
5244 PUT_MODE (new_and, tmode);
5245 XEXP (new_and, 0) = inner;
5246 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5247 if (larger_elt == 0)
5248 continue;
5249
5250 for (larger_elt = larger_elt->first_same_value;
5251 larger_elt; larger_elt = larger_elt->next_same_value)
5252 if (GET_CODE (larger_elt->exp) == REG)
5253 {
5254 src_related
5255 = gen_lowpart_if_possible (mode, larger_elt->exp);
5256 break;
5257 }
5258
5259 if (src_related)
5260 break;
5261 }
5262 }
5263 }
5264
5265 #ifdef LOAD_EXTEND_OP
5266 /* See if a MEM has already been loaded with a widening operation;
5267 if it has, we can use a subreg of that. Many CISC machines
5268 also have such operations, but this is only likely to be
5269 beneficial these machines. */
5270
5271 if (flag_expensive_optimizations && src_related == 0
5272 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5273 && GET_MODE_CLASS (mode) == MODE_INT
5274 && GET_CODE (src) == MEM && ! do_not_record
5275 && LOAD_EXTEND_OP (mode) != NIL)
5276 {
5277 enum machine_mode tmode;
5278
5279 /* Set what we are trying to extend and the operation it might
5280 have been extended with. */
5281 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5282 XEXP (memory_extend_rtx, 0) = src;
5283
5284 for (tmode = GET_MODE_WIDER_MODE (mode);
5285 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5286 tmode = GET_MODE_WIDER_MODE (tmode))
5287 {
5288 struct table_elt *larger_elt;
5289
5290 PUT_MODE (memory_extend_rtx, tmode);
5291 larger_elt = lookup (memory_extend_rtx,
5292 HASH (memory_extend_rtx, tmode), tmode);
5293 if (larger_elt == 0)
5294 continue;
5295
5296 for (larger_elt = larger_elt->first_same_value;
5297 larger_elt; larger_elt = larger_elt->next_same_value)
5298 if (GET_CODE (larger_elt->exp) == REG)
5299 {
5300 src_related = gen_lowpart_if_possible (mode,
5301 larger_elt->exp);
5302 break;
5303 }
5304
5305 if (src_related)
5306 break;
5307 }
5308 }
5309 #endif /* LOAD_EXTEND_OP */
5310
5311 if (src == src_folded)
5312 src_folded = 0;
5313
5314 /* At this point, ELT, if non-zero, points to a class of expressions
5315 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5316 and SRC_RELATED, if non-zero, each contain additional equivalent
5317 expressions. Prune these latter expressions by deleting expressions
5318 already in the equivalence class.
5319
5320 Check for an equivalent identical to the destination. If found,
5321 this is the preferred equivalent since it will likely lead to
5322 elimination of the insn. Indicate this by placing it in
5323 `src_related'. */
5324
5325 if (elt)
5326 elt = elt->first_same_value;
5327 for (p = elt; p; p = p->next_same_value)
5328 {
5329 enum rtx_code code = GET_CODE (p->exp);
5330
5331 /* If the expression is not valid, ignore it. Then we do not
5332 have to check for validity below. In most cases, we can use
5333 `rtx_equal_p', since canonicalization has already been done. */
5334 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5335 continue;
5336
5337 /* Also skip paradoxical subregs, unless that's what we're
5338 looking for. */
5339 if (code == SUBREG
5340 && (GET_MODE_SIZE (GET_MODE (p->exp))
5341 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5342 && ! (src != 0
5343 && GET_CODE (src) == SUBREG
5344 && GET_MODE (src) == GET_MODE (p->exp)
5345 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5346 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5347 continue;
5348
5349 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5350 src = 0;
5351 else if (src_folded && GET_CODE (src_folded) == code
5352 && rtx_equal_p (src_folded, p->exp))
5353 src_folded = 0;
5354 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5355 && rtx_equal_p (src_eqv_here, p->exp))
5356 src_eqv_here = 0;
5357 else if (src_related && GET_CODE (src_related) == code
5358 && rtx_equal_p (src_related, p->exp))
5359 src_related = 0;
5360
5361 /* This is the same as the destination of the insns, we want
5362 to prefer it. Copy it to src_related. The code below will
5363 then give it a negative cost. */
5364 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5365 src_related = dest;
5366 }
5367
5368 /* Find the cheapest valid equivalent, trying all the available
5369 possibilities. Prefer items not in the hash table to ones
5370 that are when they are equal cost. Note that we can never
5371 worsen an insn as the current contents will also succeed.
5372 If we find an equivalent identical to the destination, use it as best,
5373 since this insn will probably be eliminated in that case. */
5374 if (src)
5375 {
5376 if (rtx_equal_p (src, dest))
5377 src_cost = src_regcost = -1;
5378 else
5379 {
5380 src_cost = COST (src);
5381 src_regcost = approx_reg_cost (src);
5382 }
5383 }
5384
5385 if (src_eqv_here)
5386 {
5387 if (rtx_equal_p (src_eqv_here, dest))
5388 src_eqv_cost = src_eqv_regcost = -1;
5389 else
5390 {
5391 src_eqv_cost = COST (src_eqv_here);
5392 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5393 }
5394 }
5395
5396 if (src_folded)
5397 {
5398 if (rtx_equal_p (src_folded, dest))
5399 src_folded_cost = src_folded_regcost = -1;
5400 else
5401 {
5402 src_folded_cost = COST (src_folded);
5403 src_folded_regcost = approx_reg_cost (src_folded);
5404 }
5405 }
5406
5407 if (src_related)
5408 {
5409 if (rtx_equal_p (src_related, dest))
5410 src_related_cost = src_related_regcost = -1;
5411 else
5412 {
5413 src_related_cost = COST (src_related);
5414 src_related_regcost = approx_reg_cost (src_related);
5415 }
5416 }
5417
5418 /* If this was an indirect jump insn, a known label will really be
5419 cheaper even though it looks more expensive. */
5420 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5421 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5422
5423 /* Terminate loop when replacement made. This must terminate since
5424 the current contents will be tested and will always be valid. */
5425 while (1)
5426 {
5427 rtx trial;
5428
5429 /* Skip invalid entries. */
5430 while (elt && GET_CODE (elt->exp) != REG
5431 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5432 elt = elt->next_same_value;
5433
5434 /* A paradoxical subreg would be bad here: it'll be the right
5435 size, but later may be adjusted so that the upper bits aren't
5436 what we want. So reject it. */
5437 if (elt != 0
5438 && GET_CODE (elt->exp) == SUBREG
5439 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5440 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5441 /* It is okay, though, if the rtx we're trying to match
5442 will ignore any of the bits we can't predict. */
5443 && ! (src != 0
5444 && GET_CODE (src) == SUBREG
5445 && GET_MODE (src) == GET_MODE (elt->exp)
5446 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5447 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5448 {
5449 elt = elt->next_same_value;
5450 continue;
5451 }
5452
5453 if (elt)
5454 {
5455 src_elt_cost = elt->cost;
5456 src_elt_regcost = elt->regcost;
5457 }
5458
5459 /* Find cheapest and skip it for the next time. For items
5460 of equal cost, use this order:
5461 src_folded, src, src_eqv, src_related and hash table entry. */
5462 if (src_folded
5463 && preferrable (src_folded_cost, src_folded_regcost,
5464 src_cost, src_regcost) <= 0
5465 && preferrable (src_folded_cost, src_folded_regcost,
5466 src_eqv_cost, src_eqv_regcost) <= 0
5467 && preferrable (src_folded_cost, src_folded_regcost,
5468 src_related_cost, src_related_regcost) <= 0
5469 && preferrable (src_folded_cost, src_folded_regcost,
5470 src_elt_cost, src_elt_regcost) <= 0)
5471 {
5472 trial = src_folded, src_folded_cost = MAX_COST;
5473 if (src_folded_force_flag)
5474 trial = force_const_mem (mode, trial);
5475 }
5476 else if (src
5477 && preferrable (src_cost, src_regcost,
5478 src_eqv_cost, src_eqv_regcost) <= 0
5479 && preferrable (src_cost, src_regcost,
5480 src_related_cost, src_related_regcost) <= 0
5481 && preferrable (src_cost, src_regcost,
5482 src_elt_cost, src_elt_regcost) <= 0)
5483 trial = src, src_cost = MAX_COST;
5484 else if (src_eqv_here
5485 && preferrable (src_eqv_cost, src_eqv_regcost,
5486 src_related_cost, src_related_regcost) <= 0
5487 && preferrable (src_eqv_cost, src_eqv_regcost,
5488 src_elt_cost, src_elt_regcost) <= 0)
5489 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5490 else if (src_related
5491 && preferrable (src_related_cost, src_related_regcost,
5492 src_elt_cost, src_elt_regcost) <= 0)
5493 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5494 else
5495 {
5496 trial = copy_rtx (elt->exp);
5497 elt = elt->next_same_value;
5498 src_elt_cost = MAX_COST;
5499 }
5500
5501 /* We don't normally have an insn matching (set (pc) (pc)), so
5502 check for this separately here. We will delete such an
5503 insn below.
5504
5505 For other cases such as a table jump or conditional jump
5506 where we know the ultimate target, go ahead and replace the
5507 operand. While that may not make a valid insn, we will
5508 reemit the jump below (and also insert any necessary
5509 barriers). */
5510 if (n_sets == 1 && dest == pc_rtx
5511 && (trial == pc_rtx
5512 || (GET_CODE (trial) == LABEL_REF
5513 && ! condjump_p (insn))))
5514 {
5515 SET_SRC (sets[i].rtl) = trial;
5516 cse_jumps_altered = 1;
5517 break;
5518 }
5519
5520 /* Look for a substitution that makes a valid insn. */
5521 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5522 {
5523 /* If we just made a substitution inside a libcall, then we
5524 need to make the same substitution in any notes attached
5525 to the RETVAL insn. */
5526 if (libcall_insn
5527 && (GET_CODE (sets[i].orig_src) == REG
5528 || GET_CODE (sets[i].orig_src) == SUBREG
5529 || GET_CODE (sets[i].orig_src) == MEM))
5530 replace_rtx (REG_NOTES (libcall_insn), sets[i].orig_src,
5531 canon_reg (SET_SRC (sets[i].rtl), insn));
5532
5533 /* The result of apply_change_group can be ignored; see
5534 canon_reg. */
5535
5536 validate_change (insn, &SET_SRC (sets[i].rtl),
5537 canon_reg (SET_SRC (sets[i].rtl), insn),
5538 1);
5539 apply_change_group ();
5540 break;
5541 }
5542
5543 /* If we previously found constant pool entries for
5544 constants and this is a constant, try making a
5545 pool entry. Put it in src_folded unless we already have done
5546 this since that is where it likely came from. */
5547
5548 else if (constant_pool_entries_cost
5549 && CONSTANT_P (trial)
5550 /* Reject cases that will abort in decode_rtx_const.
5551 On the alpha when simplifying a switch, we get
5552 (const (truncate (minus (label_ref) (label_ref)))). */
5553 && ! (GET_CODE (trial) == CONST
5554 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5555 /* Likewise on IA-64, except without the truncate. */
5556 && ! (GET_CODE (trial) == CONST
5557 && GET_CODE (XEXP (trial, 0)) == MINUS
5558 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5559 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5560 && (src_folded == 0
5561 || (GET_CODE (src_folded) != MEM
5562 && ! src_folded_force_flag))
5563 && GET_MODE_CLASS (mode) != MODE_CC
5564 && mode != VOIDmode)
5565 {
5566 src_folded_force_flag = 1;
5567 src_folded = trial;
5568 src_folded_cost = constant_pool_entries_cost;
5569 }
5570 }
5571
5572 src = SET_SRC (sets[i].rtl);
5573
5574 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5575 However, there is an important exception: If both are registers
5576 that are not the head of their equivalence class, replace SET_SRC
5577 with the head of the class. If we do not do this, we will have
5578 both registers live over a portion of the basic block. This way,
5579 their lifetimes will likely abut instead of overlapping. */
5580 if (GET_CODE (dest) == REG
5581 && REGNO_QTY_VALID_P (REGNO (dest)))
5582 {
5583 int dest_q = REG_QTY (REGNO (dest));
5584 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5585
5586 if (dest_ent->mode == GET_MODE (dest)
5587 && dest_ent->first_reg != REGNO (dest)
5588 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5589 /* Don't do this if the original insn had a hard reg as
5590 SET_SRC or SET_DEST. */
5591 && (GET_CODE (sets[i].src) != REG
5592 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5593 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5594 /* We can't call canon_reg here because it won't do anything if
5595 SRC is a hard register. */
5596 {
5597 int src_q = REG_QTY (REGNO (src));
5598 struct qty_table_elem *src_ent = &qty_table[src_q];
5599 int first = src_ent->first_reg;
5600 rtx new_src
5601 = (first >= FIRST_PSEUDO_REGISTER
5602 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5603
5604 /* We must use validate-change even for this, because this
5605 might be a special no-op instruction, suitable only to
5606 tag notes onto. */
5607 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5608 {
5609 src = new_src;
5610 /* If we had a constant that is cheaper than what we are now
5611 setting SRC to, use that constant. We ignored it when we
5612 thought we could make this into a no-op. */
5613 if (src_const && COST (src_const) < COST (src)
5614 && validate_change (insn, &SET_SRC (sets[i].rtl),
5615 src_const, 0))
5616 src = src_const;
5617 }
5618 }
5619 }
5620
5621 /* If we made a change, recompute SRC values. */
5622 if (src != sets[i].src)
5623 {
5624 cse_altered = 1;
5625 do_not_record = 0;
5626 hash_arg_in_memory = 0;
5627 sets[i].src = src;
5628 sets[i].src_hash = HASH (src, mode);
5629 sets[i].src_volatile = do_not_record;
5630 sets[i].src_in_memory = hash_arg_in_memory;
5631 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5632 }
5633
5634 /* If this is a single SET, we are setting a register, and we have an
5635 equivalent constant, we want to add a REG_NOTE. We don't want
5636 to write a REG_EQUAL note for a constant pseudo since verifying that
5637 that pseudo hasn't been eliminated is a pain. Such a note also
5638 won't help anything.
5639
5640 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5641 which can be created for a reference to a compile time computable
5642 entry in a jump table. */
5643
5644 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5645 && GET_CODE (src_const) != REG
5646 && ! (GET_CODE (src_const) == CONST
5647 && GET_CODE (XEXP (src_const, 0)) == MINUS
5648 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5649 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5650 {
5651 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5652
5653 /* Make sure that the rtx is not shared with any other insn. */
5654 src_const = copy_rtx (src_const);
5655
5656 /* Record the actual constant value in a REG_EQUAL note, making
5657 a new one if one does not already exist. */
5658 if (tem)
5659 XEXP (tem, 0) = src_const;
5660 else
5661 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
5662 src_const, REG_NOTES (insn));
5663
5664 /* If storing a constant value in a register that
5665 previously held the constant value 0,
5666 record this fact with a REG_WAS_0 note on this insn.
5667
5668 Note that the *register* is required to have previously held 0,
5669 not just any register in the quantity and we must point to the
5670 insn that set that register to zero.
5671
5672 Rather than track each register individually, we just see if
5673 the last set for this quantity was for this register. */
5674
5675 if (REGNO_QTY_VALID_P (REGNO (dest)))
5676 {
5677 int dest_q = REG_QTY (REGNO (dest));
5678 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5679
5680 if (dest_ent->const_rtx == const0_rtx)
5681 {
5682 /* See if we previously had a REG_WAS_0 note. */
5683 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5684 rtx const_insn = dest_ent->const_insn;
5685
5686 if ((tem = single_set (const_insn)) != 0
5687 && rtx_equal_p (SET_DEST (tem), dest))
5688 {
5689 if (note)
5690 XEXP (note, 0) = const_insn;
5691 else
5692 REG_NOTES (insn)
5693 = gen_rtx_INSN_LIST (REG_WAS_0, const_insn,
5694 REG_NOTES (insn));
5695 }
5696 }
5697 }
5698 }
5699
5700 /* Now deal with the destination. */
5701 do_not_record = 0;
5702
5703 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5704 to the MEM or REG within it. */
5705 while (GET_CODE (dest) == SIGN_EXTRACT
5706 || GET_CODE (dest) == ZERO_EXTRACT
5707 || GET_CODE (dest) == SUBREG
5708 || GET_CODE (dest) == STRICT_LOW_PART)
5709 dest = XEXP (dest, 0);
5710
5711 sets[i].inner_dest = dest;
5712
5713 if (GET_CODE (dest) == MEM)
5714 {
5715 #ifdef PUSH_ROUNDING
5716 /* Stack pushes invalidate the stack pointer. */
5717 rtx addr = XEXP (dest, 0);
5718 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
5719 && XEXP (addr, 0) == stack_pointer_rtx)
5720 invalidate (stack_pointer_rtx, Pmode);
5721 #endif
5722 dest = fold_rtx (dest, insn);
5723 }
5724
5725 /* Compute the hash code of the destination now,
5726 before the effects of this instruction are recorded,
5727 since the register values used in the address computation
5728 are those before this instruction. */
5729 sets[i].dest_hash = HASH (dest, mode);
5730
5731 /* Don't enter a bit-field in the hash table
5732 because the value in it after the store
5733 may not equal what was stored, due to truncation. */
5734
5735 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5736 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5737 {
5738 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5739
5740 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5741 && GET_CODE (width) == CONST_INT
5742 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5743 && ! (INTVAL (src_const)
5744 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5745 /* Exception: if the value is constant,
5746 and it won't be truncated, record it. */
5747 ;
5748 else
5749 {
5750 /* This is chosen so that the destination will be invalidated
5751 but no new value will be recorded.
5752 We must invalidate because sometimes constant
5753 values can be recorded for bitfields. */
5754 sets[i].src_elt = 0;
5755 sets[i].src_volatile = 1;
5756 src_eqv = 0;
5757 src_eqv_elt = 0;
5758 }
5759 }
5760
5761 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5762 the insn. */
5763 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5764 {
5765 /* One less use of the label this insn used to jump to. */
5766 if (JUMP_LABEL (insn) != 0)
5767 --LABEL_NUSES (JUMP_LABEL (insn));
5768 PUT_CODE (insn, NOTE);
5769 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5770 NOTE_SOURCE_FILE (insn) = 0;
5771 cse_jumps_altered = 1;
5772 /* No more processing for this set. */
5773 sets[i].rtl = 0;
5774 }
5775
5776 /* If this SET is now setting PC to a label, we know it used to
5777 be a conditional or computed branch. */
5778 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5779 {
5780 /* We reemit the jump in as many cases as possible just in
5781 case the form of an unconditional jump is significantly
5782 different than a computed jump or conditional jump.
5783
5784 If this insn has multiple sets, then reemitting the
5785 jump is nontrivial. So instead we just force rerecognition
5786 and hope for the best. */
5787 if (n_sets == 1)
5788 {
5789 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5790 JUMP_LABEL (new) = XEXP (src, 0);
5791 LABEL_NUSES (XEXP (src, 0))++;
5792 insn = new;
5793 }
5794 else
5795 INSN_CODE (insn) = -1;
5796
5797 never_reached_warning (insn);
5798
5799 /* Now emit a BARRIER after the unconditional jump. Do not bother
5800 deleting any unreachable code, let jump/flow do that. */
5801 if (NEXT_INSN (insn) != 0
5802 && GET_CODE (NEXT_INSN (insn)) != BARRIER)
5803 emit_barrier_after (insn);
5804
5805 cse_jumps_altered = 1;
5806 sets[i].rtl = 0;
5807 }
5808
5809 /* If destination is volatile, invalidate it and then do no further
5810 processing for this assignment. */
5811
5812 else if (do_not_record)
5813 {
5814 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5815 invalidate (dest, VOIDmode);
5816 else if (GET_CODE (dest) == MEM)
5817 {
5818 /* Outgoing arguments for a libcall don't
5819 affect any recorded expressions. */
5820 if (! libcall_insn || insn == libcall_insn)
5821 invalidate (dest, VOIDmode);
5822 }
5823 else if (GET_CODE (dest) == STRICT_LOW_PART
5824 || GET_CODE (dest) == ZERO_EXTRACT)
5825 invalidate (XEXP (dest, 0), GET_MODE (dest));
5826 sets[i].rtl = 0;
5827 }
5828
5829 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5830 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5831
5832 #ifdef HAVE_cc0
5833 /* If setting CC0, record what it was set to, or a constant, if it
5834 is equivalent to a constant. If it is being set to a floating-point
5835 value, make a COMPARE with the appropriate constant of 0. If we
5836 don't do this, later code can interpret this as a test against
5837 const0_rtx, which can cause problems if we try to put it into an
5838 insn as a floating-point operand. */
5839 if (dest == cc0_rtx)
5840 {
5841 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5842 this_insn_cc0_mode = mode;
5843 if (FLOAT_MODE_P (mode))
5844 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5845 CONST0_RTX (mode));
5846 }
5847 #endif
5848 }
5849
5850 /* Now enter all non-volatile source expressions in the hash table
5851 if they are not already present.
5852 Record their equivalence classes in src_elt.
5853 This way we can insert the corresponding destinations into
5854 the same classes even if the actual sources are no longer in them
5855 (having been invalidated). */
5856
5857 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5858 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5859 {
5860 register struct table_elt *elt;
5861 register struct table_elt *classp = sets[0].src_elt;
5862 rtx dest = SET_DEST (sets[0].rtl);
5863 enum machine_mode eqvmode = GET_MODE (dest);
5864
5865 if (GET_CODE (dest) == STRICT_LOW_PART)
5866 {
5867 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5868 classp = 0;
5869 }
5870 if (insert_regs (src_eqv, classp, 0))
5871 {
5872 rehash_using_reg (src_eqv);
5873 src_eqv_hash = HASH (src_eqv, eqvmode);
5874 }
5875 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5876 elt->in_memory = src_eqv_in_memory;
5877 src_eqv_elt = elt;
5878
5879 /* Check to see if src_eqv_elt is the same as a set source which
5880 does not yet have an elt, and if so set the elt of the set source
5881 to src_eqv_elt. */
5882 for (i = 0; i < n_sets; i++)
5883 if (sets[i].rtl && sets[i].src_elt == 0
5884 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5885 sets[i].src_elt = src_eqv_elt;
5886 }
5887
5888 for (i = 0; i < n_sets; i++)
5889 if (sets[i].rtl && ! sets[i].src_volatile
5890 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5891 {
5892 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5893 {
5894 /* REG_EQUAL in setting a STRICT_LOW_PART
5895 gives an equivalent for the entire destination register,
5896 not just for the subreg being stored in now.
5897 This is a more interesting equivalence, so we arrange later
5898 to treat the entire reg as the destination. */
5899 sets[i].src_elt = src_eqv_elt;
5900 sets[i].src_hash = src_eqv_hash;
5901 }
5902 else
5903 {
5904 /* Insert source and constant equivalent into hash table, if not
5905 already present. */
5906 register struct table_elt *classp = src_eqv_elt;
5907 register rtx src = sets[i].src;
5908 register rtx dest = SET_DEST (sets[i].rtl);
5909 enum machine_mode mode
5910 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5911
5912 if (sets[i].src_elt == 0)
5913 {
5914 /* Don't put a hard register source into the table if this is
5915 the last insn of a libcall. In this case, we only need
5916 to put src_eqv_elt in src_elt. */
5917 if (GET_CODE (src) != REG
5918 || REGNO (src) >= FIRST_PSEUDO_REGISTER
5919 || ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5920 {
5921 register struct table_elt *elt;
5922
5923 /* Note that these insert_regs calls cannot remove
5924 any of the src_elt's, because they would have failed to
5925 match if not still valid. */
5926 if (insert_regs (src, classp, 0))
5927 {
5928 rehash_using_reg (src);
5929 sets[i].src_hash = HASH (src, mode);
5930 }
5931 elt = insert (src, classp, sets[i].src_hash, mode);
5932 elt->in_memory = sets[i].src_in_memory;
5933 sets[i].src_elt = classp = elt;
5934 }
5935 else
5936 sets[i].src_elt = classp;
5937 }
5938 if (sets[i].src_const && sets[i].src_const_elt == 0
5939 && src != sets[i].src_const
5940 && ! rtx_equal_p (sets[i].src_const, src))
5941 sets[i].src_elt = insert (sets[i].src_const, classp,
5942 sets[i].src_const_hash, mode);
5943 }
5944 }
5945 else if (sets[i].src_elt == 0)
5946 /* If we did not insert the source into the hash table (e.g., it was
5947 volatile), note the equivalence class for the REG_EQUAL value, if any,
5948 so that the destination goes into that class. */
5949 sets[i].src_elt = src_eqv_elt;
5950
5951 invalidate_from_clobbers (x);
5952
5953 /* Some registers are invalidated by subroutine calls. Memory is
5954 invalidated by non-constant calls. */
5955
5956 if (GET_CODE (insn) == CALL_INSN)
5957 {
5958 if (! CONST_OR_PURE_CALL_P (insn))
5959 invalidate_memory ();
5960 invalidate_for_call ();
5961 }
5962
5963 /* Now invalidate everything set by this instruction.
5964 If a SUBREG or other funny destination is being set,
5965 sets[i].rtl is still nonzero, so here we invalidate the reg
5966 a part of which is being set. */
5967
5968 for (i = 0; i < n_sets; i++)
5969 if (sets[i].rtl)
5970 {
5971 /* We can't use the inner dest, because the mode associated with
5972 a ZERO_EXTRACT is significant. */
5973 register rtx dest = SET_DEST (sets[i].rtl);
5974
5975 /* Needed for registers to remove the register from its
5976 previous quantity's chain.
5977 Needed for memory if this is a nonvarying address, unless
5978 we have just done an invalidate_memory that covers even those. */
5979 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5980 invalidate (dest, VOIDmode);
5981 else if (GET_CODE (dest) == MEM)
5982 {
5983 /* Outgoing arguments for a libcall don't
5984 affect any recorded expressions. */
5985 if (! libcall_insn || insn == libcall_insn)
5986 invalidate (dest, VOIDmode);
5987 }
5988 else if (GET_CODE (dest) == STRICT_LOW_PART
5989 || GET_CODE (dest) == ZERO_EXTRACT)
5990 invalidate (XEXP (dest, 0), GET_MODE (dest));
5991 }
5992
5993 /* A volatile ASM invalidates everything. */
5994 if (GET_CODE (insn) == INSN
5995 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5996 && MEM_VOLATILE_P (PATTERN (insn)))
5997 flush_hash_table ();
5998
5999 /* Make sure registers mentioned in destinations
6000 are safe for use in an expression to be inserted.
6001 This removes from the hash table
6002 any invalid entry that refers to one of these registers.
6003
6004 We don't care about the return value from mention_regs because
6005 we are going to hash the SET_DEST values unconditionally. */
6006
6007 for (i = 0; i < n_sets; i++)
6008 {
6009 if (sets[i].rtl)
6010 {
6011 rtx x = SET_DEST (sets[i].rtl);
6012
6013 if (GET_CODE (x) != REG)
6014 mention_regs (x);
6015 else
6016 {
6017 /* We used to rely on all references to a register becoming
6018 inaccessible when a register changes to a new quantity,
6019 since that changes the hash code. However, that is not
6020 safe, since after HASH_SIZE new quantities we get a
6021 hash 'collision' of a register with its own invalid
6022 entries. And since SUBREGs have been changed not to
6023 change their hash code with the hash code of the register,
6024 it wouldn't work any longer at all. So we have to check
6025 for any invalid references lying around now.
6026 This code is similar to the REG case in mention_regs,
6027 but it knows that reg_tick has been incremented, and
6028 it leaves reg_in_table as -1 . */
6029 unsigned int regno = REGNO (x);
6030 unsigned int endregno
6031 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
6032 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
6033 unsigned int i;
6034
6035 for (i = regno; i < endregno; i++)
6036 {
6037 if (REG_IN_TABLE (i) >= 0)
6038 {
6039 remove_invalid_refs (i);
6040 REG_IN_TABLE (i) = -1;
6041 }
6042 }
6043 }
6044 }
6045 }
6046
6047 /* We may have just removed some of the src_elt's from the hash table.
6048 So replace each one with the current head of the same class. */
6049
6050 for (i = 0; i < n_sets; i++)
6051 if (sets[i].rtl)
6052 {
6053 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6054 /* If elt was removed, find current head of same class,
6055 or 0 if nothing remains of that class. */
6056 {
6057 register struct table_elt *elt = sets[i].src_elt;
6058
6059 while (elt && elt->prev_same_value)
6060 elt = elt->prev_same_value;
6061
6062 while (elt && elt->first_same_value == 0)
6063 elt = elt->next_same_value;
6064 sets[i].src_elt = elt ? elt->first_same_value : 0;
6065 }
6066 }
6067
6068 /* Now insert the destinations into their equivalence classes. */
6069
6070 for (i = 0; i < n_sets; i++)
6071 if (sets[i].rtl)
6072 {
6073 register rtx dest = SET_DEST (sets[i].rtl);
6074 rtx inner_dest = sets[i].inner_dest;
6075 register struct table_elt *elt;
6076
6077 /* Don't record value if we are not supposed to risk allocating
6078 floating-point values in registers that might be wider than
6079 memory. */
6080 if ((flag_float_store
6081 && GET_CODE (dest) == MEM
6082 && FLOAT_MODE_P (GET_MODE (dest)))
6083 /* Don't record BLKmode values, because we don't know the
6084 size of it, and can't be sure that other BLKmode values
6085 have the same or smaller size. */
6086 || GET_MODE (dest) == BLKmode
6087 /* Don't record values of destinations set inside a libcall block
6088 since we might delete the libcall. Things should have been set
6089 up so we won't want to reuse such a value, but we play it safe
6090 here. */
6091 || libcall_insn
6092 /* If we didn't put a REG_EQUAL value or a source into the hash
6093 table, there is no point is recording DEST. */
6094 || sets[i].src_elt == 0
6095 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6096 or SIGN_EXTEND, don't record DEST since it can cause
6097 some tracking to be wrong.
6098
6099 ??? Think about this more later. */
6100 || (GET_CODE (dest) == SUBREG
6101 && (GET_MODE_SIZE (GET_MODE (dest))
6102 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6103 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6104 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6105 continue;
6106
6107 /* STRICT_LOW_PART isn't part of the value BEING set,
6108 and neither is the SUBREG inside it.
6109 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6110 if (GET_CODE (dest) == STRICT_LOW_PART)
6111 dest = SUBREG_REG (XEXP (dest, 0));
6112
6113 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6114 /* Registers must also be inserted into chains for quantities. */
6115 if (insert_regs (dest, sets[i].src_elt, 1))
6116 {
6117 /* If `insert_regs' changes something, the hash code must be
6118 recalculated. */
6119 rehash_using_reg (dest);
6120 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6121 }
6122
6123 if (GET_CODE (inner_dest) == MEM
6124 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
6125 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
6126 that (MEM (ADDRESSOF (X))) is equivalent to Y.
6127 Consider the case in which the address of the MEM is
6128 passed to a function, which alters the MEM. Then, if we
6129 later use Y instead of the MEM we'll miss the update. */
6130 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
6131 else
6132 elt = insert (dest, sets[i].src_elt,
6133 sets[i].dest_hash, GET_MODE (dest));
6134
6135 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
6136 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
6137 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
6138 0))));
6139
6140 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6141 narrower than M2, and both M1 and M2 are the same number of words,
6142 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6143 make that equivalence as well.
6144
6145 However, BAR may have equivalences for which gen_lowpart_if_possible
6146 will produce a simpler value than gen_lowpart_if_possible applied to
6147 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6148 BAR's equivalences. If we don't get a simplified form, make
6149 the SUBREG. It will not be used in an equivalence, but will
6150 cause two similar assignments to be detected.
6151
6152 Note the loop below will find SUBREG_REG (DEST) since we have
6153 already entered SRC and DEST of the SET in the table. */
6154
6155 if (GET_CODE (dest) == SUBREG
6156 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6157 / UNITS_PER_WORD)
6158 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6159 && (GET_MODE_SIZE (GET_MODE (dest))
6160 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6161 && sets[i].src_elt != 0)
6162 {
6163 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6164 struct table_elt *elt, *classp = 0;
6165
6166 for (elt = sets[i].src_elt->first_same_value; elt;
6167 elt = elt->next_same_value)
6168 {
6169 rtx new_src = 0;
6170 unsigned src_hash;
6171 struct table_elt *src_elt;
6172
6173 /* Ignore invalid entries. */
6174 if (GET_CODE (elt->exp) != REG
6175 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6176 continue;
6177
6178 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
6179 if (new_src == 0)
6180 new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
6181
6182 src_hash = HASH (new_src, new_mode);
6183 src_elt = lookup (new_src, src_hash, new_mode);
6184
6185 /* Put the new source in the hash table is if isn't
6186 already. */
6187 if (src_elt == 0)
6188 {
6189 if (insert_regs (new_src, classp, 0))
6190 {
6191 rehash_using_reg (new_src);
6192 src_hash = HASH (new_src, new_mode);
6193 }
6194 src_elt = insert (new_src, classp, src_hash, new_mode);
6195 src_elt->in_memory = elt->in_memory;
6196 }
6197 else if (classp && classp != src_elt->first_same_value)
6198 /* Show that two things that we've seen before are
6199 actually the same. */
6200 merge_equiv_classes (src_elt, classp);
6201
6202 classp = src_elt->first_same_value;
6203 /* Ignore invalid entries. */
6204 while (classp
6205 && GET_CODE (classp->exp) != REG
6206 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6207 classp = classp->next_same_value;
6208 }
6209 }
6210 }
6211
6212 /* Special handling for (set REG0 REG1) where REG0 is the
6213 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6214 be used in the sequel, so (if easily done) change this insn to
6215 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6216 that computed their value. Then REG1 will become a dead store
6217 and won't cloud the situation for later optimizations.
6218
6219 Do not make this change if REG1 is a hard register, because it will
6220 then be used in the sequel and we may be changing a two-operand insn
6221 into a three-operand insn.
6222
6223 Also do not do this if we are operating on a copy of INSN.
6224
6225 Also don't do this if INSN ends a libcall; this would cause an unrelated
6226 register to be set in the middle of a libcall, and we then get bad code
6227 if the libcall is deleted. */
6228
6229 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6230 && NEXT_INSN (PREV_INSN (insn)) == insn
6231 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6232 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6233 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6234 {
6235 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6236 struct qty_table_elem *src_ent = &qty_table[src_q];
6237
6238 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6239 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6240 {
6241 rtx prev = prev_nonnote_insn (insn);
6242
6243 /* Do not swap the registers around if the previous instruction
6244 attaches a REG_EQUIV note to REG1.
6245
6246 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6247 from the pseudo that originally shadowed an incoming argument
6248 to another register. Some uses of REG_EQUIV might rely on it
6249 being attached to REG1 rather than REG2.
6250
6251 This section previously turned the REG_EQUIV into a REG_EQUAL
6252 note. We cannot do that because REG_EQUIV may provide an
6253 uninitialised stack slot when REG_PARM_STACK_SPACE is used. */
6254
6255 if (prev != 0 && GET_CODE (prev) == INSN
6256 && GET_CODE (PATTERN (prev)) == SET
6257 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6258 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6259 {
6260 rtx dest = SET_DEST (sets[0].rtl);
6261 rtx src = SET_SRC (sets[0].rtl);
6262 rtx note;
6263
6264 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6265 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6266 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6267 apply_change_group ();
6268
6269 /* If there was a REG_WAS_0 note on PREV, remove it. Move
6270 any REG_WAS_0 note on INSN to PREV. */
6271 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
6272 if (note)
6273 remove_note (prev, note);
6274
6275 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6276 if (note)
6277 {
6278 remove_note (insn, note);
6279 XEXP (note, 1) = REG_NOTES (prev);
6280 REG_NOTES (prev) = note;
6281 }
6282
6283 /* If INSN has a REG_EQUAL note, and this note mentions
6284 REG0, then we must delete it, because the value in
6285 REG0 has changed. If the note's value is REG1, we must
6286 also delete it because that is now this insn's dest. */
6287 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6288 if (note != 0
6289 && (reg_mentioned_p (dest, XEXP (note, 0))
6290 || rtx_equal_p (src, XEXP (note, 0))))
6291 remove_note (insn, note);
6292 }
6293 }
6294 }
6295
6296 /* If this is a conditional jump insn, record any known equivalences due to
6297 the condition being tested. */
6298
6299 last_jump_equiv_class = 0;
6300 if (GET_CODE (insn) == JUMP_INSN
6301 && n_sets == 1 && GET_CODE (x) == SET
6302 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6303 record_jump_equiv (insn, 0);
6304
6305 #ifdef HAVE_cc0
6306 /* If the previous insn set CC0 and this insn no longer references CC0,
6307 delete the previous insn. Here we use the fact that nothing expects CC0
6308 to be valid over an insn, which is true until the final pass. */
6309 if (prev_insn && GET_CODE (prev_insn) == INSN
6310 && (tem = single_set (prev_insn)) != 0
6311 && SET_DEST (tem) == cc0_rtx
6312 && ! reg_mentioned_p (cc0_rtx, x))
6313 {
6314 PUT_CODE (prev_insn, NOTE);
6315 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
6316 NOTE_SOURCE_FILE (prev_insn) = 0;
6317 }
6318
6319 prev_insn_cc0 = this_insn_cc0;
6320 prev_insn_cc0_mode = this_insn_cc0_mode;
6321 #endif
6322
6323 prev_insn = insn;
6324 }
6325 \f
6326 /* Remove from the hash table all expressions that reference memory. */
6327
6328 static void
6329 invalidate_memory ()
6330 {
6331 register int i;
6332 register struct table_elt *p, *next;
6333
6334 for (i = 0; i < HASH_SIZE; i++)
6335 for (p = table[i]; p; p = next)
6336 {
6337 next = p->next_same_hash;
6338 if (p->in_memory)
6339 remove_from_table (p, i);
6340 }
6341 }
6342
6343 /* If ADDR is an address that implicitly affects the stack pointer, return
6344 1 and update the register tables to show the effect. Else, return 0. */
6345
6346 static int
6347 addr_affects_sp_p (addr)
6348 register rtx addr;
6349 {
6350 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
6351 && GET_CODE (XEXP (addr, 0)) == REG
6352 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6353 {
6354 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6355 REG_TICK (STACK_POINTER_REGNUM)++;
6356
6357 /* This should be *very* rare. */
6358 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6359 invalidate (stack_pointer_rtx, VOIDmode);
6360
6361 return 1;
6362 }
6363
6364 return 0;
6365 }
6366
6367 /* Perform invalidation on the basis of everything about an insn
6368 except for invalidating the actual places that are SET in it.
6369 This includes the places CLOBBERed, and anything that might
6370 alias with something that is SET or CLOBBERed.
6371
6372 X is the pattern of the insn. */
6373
6374 static void
6375 invalidate_from_clobbers (x)
6376 rtx x;
6377 {
6378 if (GET_CODE (x) == CLOBBER)
6379 {
6380 rtx ref = XEXP (x, 0);
6381 if (ref)
6382 {
6383 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6384 || GET_CODE (ref) == MEM)
6385 invalidate (ref, VOIDmode);
6386 else if (GET_CODE (ref) == STRICT_LOW_PART
6387 || GET_CODE (ref) == ZERO_EXTRACT)
6388 invalidate (XEXP (ref, 0), GET_MODE (ref));
6389 }
6390 }
6391 else if (GET_CODE (x) == PARALLEL)
6392 {
6393 register int i;
6394 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6395 {
6396 register rtx y = XVECEXP (x, 0, i);
6397 if (GET_CODE (y) == CLOBBER)
6398 {
6399 rtx ref = XEXP (y, 0);
6400 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6401 || GET_CODE (ref) == MEM)
6402 invalidate (ref, VOIDmode);
6403 else if (GET_CODE (ref) == STRICT_LOW_PART
6404 || GET_CODE (ref) == ZERO_EXTRACT)
6405 invalidate (XEXP (ref, 0), GET_MODE (ref));
6406 }
6407 }
6408 }
6409 }
6410 \f
6411 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6412 and replace any registers in them with either an equivalent constant
6413 or the canonical form of the register. If we are inside an address,
6414 only do this if the address remains valid.
6415
6416 OBJECT is 0 except when within a MEM in which case it is the MEM.
6417
6418 Return the replacement for X. */
6419
6420 static rtx
6421 cse_process_notes (x, object)
6422 rtx x;
6423 rtx object;
6424 {
6425 enum rtx_code code = GET_CODE (x);
6426 const char *fmt = GET_RTX_FORMAT (code);
6427 int i;
6428
6429 switch (code)
6430 {
6431 case CONST_INT:
6432 case CONST:
6433 case SYMBOL_REF:
6434 case LABEL_REF:
6435 case CONST_DOUBLE:
6436 case PC:
6437 case CC0:
6438 case LO_SUM:
6439 return x;
6440
6441 case MEM:
6442 validate_change (x, &XEXP (x, 0),
6443 cse_process_notes (XEXP (x, 0), x), 0);
6444 return x;
6445
6446 case EXPR_LIST:
6447 case INSN_LIST:
6448 if (REG_NOTE_KIND (x) == REG_EQUAL)
6449 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6450 if (XEXP (x, 1))
6451 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6452 return x;
6453
6454 case SIGN_EXTEND:
6455 case ZERO_EXTEND:
6456 case SUBREG:
6457 {
6458 rtx new = cse_process_notes (XEXP (x, 0), object);
6459 /* We don't substitute VOIDmode constants into these rtx,
6460 since they would impede folding. */
6461 if (GET_MODE (new) != VOIDmode)
6462 validate_change (object, &XEXP (x, 0), new, 0);
6463 return x;
6464 }
6465
6466 case REG:
6467 i = REG_QTY (REGNO (x));
6468
6469 /* Return a constant or a constant register. */
6470 if (REGNO_QTY_VALID_P (REGNO (x)))
6471 {
6472 struct qty_table_elem *ent = &qty_table[i];
6473
6474 if (ent->const_rtx != NULL_RTX
6475 && (CONSTANT_P (ent->const_rtx)
6476 || GET_CODE (ent->const_rtx) == REG))
6477 {
6478 rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6479 if (new)
6480 return new;
6481 }
6482 }
6483
6484 /* Otherwise, canonicalize this register. */
6485 return canon_reg (x, NULL_RTX);
6486
6487 default:
6488 break;
6489 }
6490
6491 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6492 if (fmt[i] == 'e')
6493 validate_change (object, &XEXP (x, i),
6494 cse_process_notes (XEXP (x, i), object), 0);
6495
6496 return x;
6497 }
6498 \f
6499 /* Find common subexpressions between the end test of a loop and the beginning
6500 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6501
6502 Often we have a loop where an expression in the exit test is used
6503 in the body of the loop. For example "while (*p) *q++ = *p++;".
6504 Because of the way we duplicate the loop exit test in front of the loop,
6505 however, we don't detect that common subexpression. This will be caught
6506 when global cse is implemented, but this is a quite common case.
6507
6508 This function handles the most common cases of these common expressions.
6509 It is called after we have processed the basic block ending with the
6510 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6511 jumps to a label used only once. */
6512
6513 static void
6514 cse_around_loop (loop_start)
6515 rtx loop_start;
6516 {
6517 rtx insn;
6518 int i;
6519 struct table_elt *p;
6520
6521 /* If the jump at the end of the loop doesn't go to the start, we don't
6522 do anything. */
6523 for (insn = PREV_INSN (loop_start);
6524 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6525 insn = PREV_INSN (insn))
6526 ;
6527
6528 if (insn == 0
6529 || GET_CODE (insn) != NOTE
6530 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6531 return;
6532
6533 /* If the last insn of the loop (the end test) was an NE comparison,
6534 we will interpret it as an EQ comparison, since we fell through
6535 the loop. Any equivalences resulting from that comparison are
6536 therefore not valid and must be invalidated. */
6537 if (last_jump_equiv_class)
6538 for (p = last_jump_equiv_class->first_same_value; p;
6539 p = p->next_same_value)
6540 {
6541 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6542 || (GET_CODE (p->exp) == SUBREG
6543 && GET_CODE (SUBREG_REG (p->exp)) == REG))
6544 invalidate (p->exp, VOIDmode);
6545 else if (GET_CODE (p->exp) == STRICT_LOW_PART
6546 || GET_CODE (p->exp) == ZERO_EXTRACT)
6547 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6548 }
6549
6550 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6551 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6552
6553 The only thing we do with SET_DEST is invalidate entries, so we
6554 can safely process each SET in order. It is slightly less efficient
6555 to do so, but we only want to handle the most common cases.
6556
6557 The gen_move_insn call in cse_set_around_loop may create new pseudos.
6558 These pseudos won't have valid entries in any of the tables indexed
6559 by register number, such as reg_qty. We avoid out-of-range array
6560 accesses by not processing any instructions created after cse started. */
6561
6562 for (insn = NEXT_INSN (loop_start);
6563 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6564 && INSN_UID (insn) < max_insn_uid
6565 && ! (GET_CODE (insn) == NOTE
6566 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6567 insn = NEXT_INSN (insn))
6568 {
6569 if (INSN_P (insn)
6570 && (GET_CODE (PATTERN (insn)) == SET
6571 || GET_CODE (PATTERN (insn)) == CLOBBER))
6572 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6573 else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6574 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6575 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6576 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6577 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6578 loop_start);
6579 }
6580 }
6581 \f
6582 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6583 since they are done elsewhere. This function is called via note_stores. */
6584
6585 static void
6586 invalidate_skipped_set (dest, set, data)
6587 rtx set;
6588 rtx dest;
6589 void *data ATTRIBUTE_UNUSED;
6590 {
6591 enum rtx_code code = GET_CODE (dest);
6592
6593 if (code == MEM
6594 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6595 /* There are times when an address can appear varying and be a PLUS
6596 during this scan when it would be a fixed address were we to know
6597 the proper equivalences. So invalidate all memory if there is
6598 a BLKmode or nonscalar memory reference or a reference to a
6599 variable address. */
6600 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6601 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6602 {
6603 invalidate_memory ();
6604 return;
6605 }
6606
6607 if (GET_CODE (set) == CLOBBER
6608 #ifdef HAVE_cc0
6609 || dest == cc0_rtx
6610 #endif
6611 || dest == pc_rtx)
6612 return;
6613
6614 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6615 invalidate (XEXP (dest, 0), GET_MODE (dest));
6616 else if (code == REG || code == SUBREG || code == MEM)
6617 invalidate (dest, VOIDmode);
6618 }
6619
6620 /* Invalidate all insns from START up to the end of the function or the
6621 next label. This called when we wish to CSE around a block that is
6622 conditionally executed. */
6623
6624 static void
6625 invalidate_skipped_block (start)
6626 rtx start;
6627 {
6628 rtx insn;
6629
6630 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6631 insn = NEXT_INSN (insn))
6632 {
6633 if (! INSN_P (insn))
6634 continue;
6635
6636 if (GET_CODE (insn) == CALL_INSN)
6637 {
6638 if (! CONST_OR_PURE_CALL_P (insn))
6639 invalidate_memory ();
6640 invalidate_for_call ();
6641 }
6642
6643 invalidate_from_clobbers (PATTERN (insn));
6644 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6645 }
6646 }
6647 \f
6648 /* If modifying X will modify the value in *DATA (which is really an
6649 `rtx *'), indicate that fact by setting the pointed to value to
6650 NULL_RTX. */
6651
6652 static void
6653 cse_check_loop_start (x, set, data)
6654 rtx x;
6655 rtx set ATTRIBUTE_UNUSED;
6656 void *data;
6657 {
6658 rtx *cse_check_loop_start_value = (rtx *) data;
6659
6660 if (*cse_check_loop_start_value == NULL_RTX
6661 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6662 return;
6663
6664 if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6665 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6666 *cse_check_loop_start_value = NULL_RTX;
6667 }
6668
6669 /* X is a SET or CLOBBER contained in INSN that was found near the start of
6670 a loop that starts with the label at LOOP_START.
6671
6672 If X is a SET, we see if its SET_SRC is currently in our hash table.
6673 If so, we see if it has a value equal to some register used only in the
6674 loop exit code (as marked by jump.c).
6675
6676 If those two conditions are true, we search backwards from the start of
6677 the loop to see if that same value was loaded into a register that still
6678 retains its value at the start of the loop.
6679
6680 If so, we insert an insn after the load to copy the destination of that
6681 load into the equivalent register and (try to) replace our SET_SRC with that
6682 register.
6683
6684 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6685
6686 static void
6687 cse_set_around_loop (x, insn, loop_start)
6688 rtx x;
6689 rtx insn;
6690 rtx loop_start;
6691 {
6692 struct table_elt *src_elt;
6693
6694 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6695 are setting PC or CC0 or whose SET_SRC is already a register. */
6696 if (GET_CODE (x) == SET
6697 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6698 && GET_CODE (SET_SRC (x)) != REG)
6699 {
6700 src_elt = lookup (SET_SRC (x),
6701 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6702 GET_MODE (SET_DEST (x)));
6703
6704 if (src_elt)
6705 for (src_elt = src_elt->first_same_value; src_elt;
6706 src_elt = src_elt->next_same_value)
6707 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6708 && COST (src_elt->exp) < COST (SET_SRC (x)))
6709 {
6710 rtx p, set;
6711
6712 /* Look for an insn in front of LOOP_START that sets
6713 something in the desired mode to SET_SRC (x) before we hit
6714 a label or CALL_INSN. */
6715
6716 for (p = prev_nonnote_insn (loop_start);
6717 p && GET_CODE (p) != CALL_INSN
6718 && GET_CODE (p) != CODE_LABEL;
6719 p = prev_nonnote_insn (p))
6720 if ((set = single_set (p)) != 0
6721 && GET_CODE (SET_DEST (set)) == REG
6722 && GET_MODE (SET_DEST (set)) == src_elt->mode
6723 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6724 {
6725 /* We now have to ensure that nothing between P
6726 and LOOP_START modified anything referenced in
6727 SET_SRC (x). We know that nothing within the loop
6728 can modify it, or we would have invalidated it in
6729 the hash table. */
6730 rtx q;
6731 rtx cse_check_loop_start_value = SET_SRC (x);
6732 for (q = p; q != loop_start; q = NEXT_INSN (q))
6733 if (INSN_P (q))
6734 note_stores (PATTERN (q),
6735 cse_check_loop_start,
6736 &cse_check_loop_start_value);
6737
6738 /* If nothing was changed and we can replace our
6739 SET_SRC, add an insn after P to copy its destination
6740 to what we will be replacing SET_SRC with. */
6741 if (cse_check_loop_start_value
6742 && validate_change (insn, &SET_SRC (x),
6743 src_elt->exp, 0))
6744 {
6745 /* If this creates new pseudos, this is unsafe,
6746 because the regno of new pseudo is unsuitable
6747 to index into reg_qty when cse_insn processes
6748 the new insn. Therefore, if a new pseudo was
6749 created, discard this optimization. */
6750 int nregs = max_reg_num ();
6751 rtx move
6752 = gen_move_insn (src_elt->exp, SET_DEST (set));
6753 if (nregs != max_reg_num ())
6754 {
6755 if (! validate_change (insn, &SET_SRC (x),
6756 SET_SRC (set), 0))
6757 abort ();
6758 }
6759 else
6760 emit_insn_after (move, p);
6761 }
6762 break;
6763 }
6764 }
6765 }
6766
6767 /* Deal with the destination of X affecting the stack pointer. */
6768 addr_affects_sp_p (SET_DEST (x));
6769
6770 /* See comment on similar code in cse_insn for explanation of these
6771 tests. */
6772 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6773 || GET_CODE (SET_DEST (x)) == MEM)
6774 invalidate (SET_DEST (x), VOIDmode);
6775 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6776 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6777 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6778 }
6779 \f
6780 /* Find the end of INSN's basic block and return its range,
6781 the total number of SETs in all the insns of the block, the last insn of the
6782 block, and the branch path.
6783
6784 The branch path indicates which branches should be followed. If a non-zero
6785 path size is specified, the block should be rescanned and a different set
6786 of branches will be taken. The branch path is only used if
6787 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
6788
6789 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6790 used to describe the block. It is filled in with the information about
6791 the current block. The incoming structure's branch path, if any, is used
6792 to construct the output branch path. */
6793
6794 void
6795 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
6796 rtx insn;
6797 struct cse_basic_block_data *data;
6798 int follow_jumps;
6799 int after_loop;
6800 int skip_blocks;
6801 {
6802 rtx p = insn, q;
6803 int nsets = 0;
6804 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6805 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6806 int path_size = data->path_size;
6807 int path_entry = 0;
6808 int i;
6809
6810 /* Update the previous branch path, if any. If the last branch was
6811 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6812 shorten the path by one and look at the previous branch. We know that
6813 at least one branch must have been taken if PATH_SIZE is non-zero. */
6814 while (path_size > 0)
6815 {
6816 if (data->path[path_size - 1].status != NOT_TAKEN)
6817 {
6818 data->path[path_size - 1].status = NOT_TAKEN;
6819 break;
6820 }
6821 else
6822 path_size--;
6823 }
6824
6825 /* If the first instruction is marked with QImode, that means we've
6826 already processed this block. Our caller will look at DATA->LAST
6827 to figure out where to go next. We want to return the next block
6828 in the instruction stream, not some branched-to block somewhere
6829 else. We accomplish this by pretending our called forbid us to
6830 follow jumps, or skip blocks. */
6831 if (GET_MODE (insn) == QImode)
6832 follow_jumps = skip_blocks = 0;
6833
6834 /* Scan to end of this basic block. */
6835 while (p && GET_CODE (p) != CODE_LABEL)
6836 {
6837 /* Don't cse out the end of a loop. This makes a difference
6838 only for the unusual loops that always execute at least once;
6839 all other loops have labels there so we will stop in any case.
6840 Cse'ing out the end of the loop is dangerous because it
6841 might cause an invariant expression inside the loop
6842 to be reused after the end of the loop. This would make it
6843 hard to move the expression out of the loop in loop.c,
6844 especially if it is one of several equivalent expressions
6845 and loop.c would like to eliminate it.
6846
6847 If we are running after loop.c has finished, we can ignore
6848 the NOTE_INSN_LOOP_END. */
6849
6850 if (! after_loop && GET_CODE (p) == NOTE
6851 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6852 break;
6853
6854 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6855 the regs restored by the longjmp come from
6856 a later time than the setjmp. */
6857 if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
6858 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6859 break;
6860
6861 /* A PARALLEL can have lots of SETs in it,
6862 especially if it is really an ASM_OPERANDS. */
6863 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6864 nsets += XVECLEN (PATTERN (p), 0);
6865 else if (GET_CODE (p) != NOTE)
6866 nsets += 1;
6867
6868 /* Ignore insns made by CSE; they cannot affect the boundaries of
6869 the basic block. */
6870
6871 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6872 high_cuid = INSN_CUID (p);
6873 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6874 low_cuid = INSN_CUID (p);
6875
6876 /* See if this insn is in our branch path. If it is and we are to
6877 take it, do so. */
6878 if (path_entry < path_size && data->path[path_entry].branch == p)
6879 {
6880 if (data->path[path_entry].status != NOT_TAKEN)
6881 p = JUMP_LABEL (p);
6882
6883 /* Point to next entry in path, if any. */
6884 path_entry++;
6885 }
6886
6887 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6888 was specified, we haven't reached our maximum path length, there are
6889 insns following the target of the jump, this is the only use of the
6890 jump label, and the target label is preceded by a BARRIER.
6891
6892 Alternatively, we can follow the jump if it branches around a
6893 block of code and there are no other branches into the block.
6894 In this case invalidate_skipped_block will be called to invalidate any
6895 registers set in the block when following the jump. */
6896
6897 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
6898 && GET_CODE (p) == JUMP_INSN
6899 && GET_CODE (PATTERN (p)) == SET
6900 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6901 && JUMP_LABEL (p) != 0
6902 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6903 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6904 {
6905 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6906 if ((GET_CODE (q) != NOTE
6907 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6908 || (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
6909 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6910 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6911 break;
6912
6913 /* If we ran into a BARRIER, this code is an extension of the
6914 basic block when the branch is taken. */
6915 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6916 {
6917 /* Don't allow ourself to keep walking around an
6918 always-executed loop. */
6919 if (next_real_insn (q) == next)
6920 {
6921 p = NEXT_INSN (p);
6922 continue;
6923 }
6924
6925 /* Similarly, don't put a branch in our path more than once. */
6926 for (i = 0; i < path_entry; i++)
6927 if (data->path[i].branch == p)
6928 break;
6929
6930 if (i != path_entry)
6931 break;
6932
6933 data->path[path_entry].branch = p;
6934 data->path[path_entry++].status = TAKEN;
6935
6936 /* This branch now ends our path. It was possible that we
6937 didn't see this branch the last time around (when the
6938 insn in front of the target was a JUMP_INSN that was
6939 turned into a no-op). */
6940 path_size = path_entry;
6941
6942 p = JUMP_LABEL (p);
6943 /* Mark block so we won't scan it again later. */
6944 PUT_MODE (NEXT_INSN (p), QImode);
6945 }
6946 /* Detect a branch around a block of code. */
6947 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6948 {
6949 register rtx tmp;
6950
6951 if (next_real_insn (q) == next)
6952 {
6953 p = NEXT_INSN (p);
6954 continue;
6955 }
6956
6957 for (i = 0; i < path_entry; i++)
6958 if (data->path[i].branch == p)
6959 break;
6960
6961 if (i != path_entry)
6962 break;
6963
6964 /* This is no_labels_between_p (p, q) with an added check for
6965 reaching the end of a function (in case Q precedes P). */
6966 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6967 if (GET_CODE (tmp) == CODE_LABEL)
6968 break;
6969
6970 if (tmp == q)
6971 {
6972 data->path[path_entry].branch = p;
6973 data->path[path_entry++].status = AROUND;
6974
6975 path_size = path_entry;
6976
6977 p = JUMP_LABEL (p);
6978 /* Mark block so we won't scan it again later. */
6979 PUT_MODE (NEXT_INSN (p), QImode);
6980 }
6981 }
6982 }
6983 p = NEXT_INSN (p);
6984 }
6985
6986 data->low_cuid = low_cuid;
6987 data->high_cuid = high_cuid;
6988 data->nsets = nsets;
6989 data->last = p;
6990
6991 /* If all jumps in the path are not taken, set our path length to zero
6992 so a rescan won't be done. */
6993 for (i = path_size - 1; i >= 0; i--)
6994 if (data->path[i].status != NOT_TAKEN)
6995 break;
6996
6997 if (i == -1)
6998 data->path_size = 0;
6999 else
7000 data->path_size = path_size;
7001
7002 /* End the current branch path. */
7003 data->path[path_size].branch = 0;
7004 }
7005 \f
7006 /* Perform cse on the instructions of a function.
7007 F is the first instruction.
7008 NREGS is one plus the highest pseudo-reg number used in the instruction.
7009
7010 AFTER_LOOP is 1 if this is the cse call done after loop optimization
7011 (only if -frerun-cse-after-loop).
7012
7013 Returns 1 if jump_optimize should be redone due to simplifications
7014 in conditional jump instructions. */
7015
7016 int
7017 cse_main (f, nregs, after_loop, file)
7018 rtx f;
7019 int nregs;
7020 int after_loop;
7021 FILE *file;
7022 {
7023 struct cse_basic_block_data val;
7024 register rtx insn = f;
7025 register int i;
7026
7027 cse_jumps_altered = 0;
7028 recorded_label_ref = 0;
7029 constant_pool_entries_cost = 0;
7030 val.path_size = 0;
7031
7032 init_recog ();
7033 init_alias_analysis ();
7034
7035 max_reg = nregs;
7036
7037 max_insn_uid = get_max_uid ();
7038
7039 reg_eqv_table = (struct reg_eqv_elem *)
7040 xmalloc (nregs * sizeof (struct reg_eqv_elem));
7041
7042 #ifdef LOAD_EXTEND_OP
7043
7044 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
7045 and change the code and mode as appropriate. */
7046 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7047 #endif
7048
7049 /* Reset the counter indicating how many elements have been made
7050 thus far. */
7051 n_elements_made = 0;
7052
7053 /* Find the largest uid. */
7054
7055 max_uid = get_max_uid ();
7056 uid_cuid = (int *) xcalloc (max_uid + 1, sizeof (int));
7057
7058 /* Compute the mapping from uids to cuids.
7059 CUIDs are numbers assigned to insns, like uids,
7060 except that cuids increase monotonically through the code.
7061 Don't assign cuids to line-number NOTEs, so that the distance in cuids
7062 between two insns is not affected by -g. */
7063
7064 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7065 {
7066 if (GET_CODE (insn) != NOTE
7067 || NOTE_LINE_NUMBER (insn) < 0)
7068 INSN_CUID (insn) = ++i;
7069 else
7070 /* Give a line number note the same cuid as preceding insn. */
7071 INSN_CUID (insn) = i;
7072 }
7073
7074 ggc_push_context ();
7075
7076 /* Loop over basic blocks.
7077 Compute the maximum number of qty's needed for each basic block
7078 (which is 2 for each SET). */
7079 insn = f;
7080 while (insn)
7081 {
7082 cse_altered = 0;
7083 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7084 flag_cse_skip_blocks);
7085
7086 /* If this basic block was already processed or has no sets, skip it. */
7087 if (val.nsets == 0 || GET_MODE (insn) == QImode)
7088 {
7089 PUT_MODE (insn, VOIDmode);
7090 insn = (val.last ? NEXT_INSN (val.last) : 0);
7091 val.path_size = 0;
7092 continue;
7093 }
7094
7095 cse_basic_block_start = val.low_cuid;
7096 cse_basic_block_end = val.high_cuid;
7097 max_qty = val.nsets * 2;
7098
7099 if (file)
7100 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7101 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7102 val.nsets);
7103
7104 /* Make MAX_QTY bigger to give us room to optimize
7105 past the end of this basic block, if that should prove useful. */
7106 if (max_qty < 500)
7107 max_qty = 500;
7108
7109 max_qty += max_reg;
7110
7111 /* If this basic block is being extended by following certain jumps,
7112 (see `cse_end_of_basic_block'), we reprocess the code from the start.
7113 Otherwise, we start after this basic block. */
7114 if (val.path_size > 0)
7115 cse_basic_block (insn, val.last, val.path, 0);
7116 else
7117 {
7118 int old_cse_jumps_altered = cse_jumps_altered;
7119 rtx temp;
7120
7121 /* When cse changes a conditional jump to an unconditional
7122 jump, we want to reprocess the block, since it will give
7123 us a new branch path to investigate. */
7124 cse_jumps_altered = 0;
7125 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7126 if (cse_jumps_altered == 0
7127 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7128 insn = temp;
7129
7130 cse_jumps_altered |= old_cse_jumps_altered;
7131 }
7132
7133 if (cse_altered)
7134 ggc_collect ();
7135
7136 #ifdef USE_C_ALLOCA
7137 alloca (0);
7138 #endif
7139 }
7140
7141 ggc_pop_context ();
7142
7143 if (max_elements_made < n_elements_made)
7144 max_elements_made = n_elements_made;
7145
7146 /* Clean up. */
7147 end_alias_analysis ();
7148 free (uid_cuid);
7149 free (reg_eqv_table);
7150
7151 return cse_jumps_altered || recorded_label_ref;
7152 }
7153
7154 /* Process a single basic block. FROM and TO and the limits of the basic
7155 block. NEXT_BRANCH points to the branch path when following jumps or
7156 a null path when not following jumps.
7157
7158 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
7159 loop. This is true when we are being called for the last time on a
7160 block and this CSE pass is before loop.c. */
7161
7162 static rtx
7163 cse_basic_block (from, to, next_branch, around_loop)
7164 register rtx from, to;
7165 struct branch_path *next_branch;
7166 int around_loop;
7167 {
7168 register rtx insn;
7169 int to_usage = 0;
7170 rtx libcall_insn = NULL_RTX;
7171 int num_insns = 0;
7172
7173 /* This array is undefined before max_reg, so only allocate
7174 the space actually needed and adjust the start. */
7175
7176 qty_table
7177 = (struct qty_table_elem *) xmalloc ((max_qty - max_reg)
7178 * sizeof (struct qty_table_elem));
7179 qty_table -= max_reg;
7180
7181 new_basic_block ();
7182
7183 /* TO might be a label. If so, protect it from being deleted. */
7184 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7185 ++LABEL_NUSES (to);
7186
7187 for (insn = from; insn != to; insn = NEXT_INSN (insn))
7188 {
7189 register enum rtx_code code = GET_CODE (insn);
7190
7191 /* If we have processed 1,000 insns, flush the hash table to
7192 avoid extreme quadratic behavior. We must not include NOTEs
7193 in the count since there may be more of them when generating
7194 debugging information. If we clear the table at different
7195 times, code generated with -g -O might be different than code
7196 generated with -O but not -g.
7197
7198 ??? This is a real kludge and needs to be done some other way.
7199 Perhaps for 2.9. */
7200 if (code != NOTE && num_insns++ > 1000)
7201 {
7202 flush_hash_table ();
7203 num_insns = 0;
7204 }
7205
7206 /* See if this is a branch that is part of the path. If so, and it is
7207 to be taken, do so. */
7208 if (next_branch->branch == insn)
7209 {
7210 enum taken status = next_branch++->status;
7211 if (status != NOT_TAKEN)
7212 {
7213 if (status == TAKEN)
7214 record_jump_equiv (insn, 1);
7215 else
7216 invalidate_skipped_block (NEXT_INSN (insn));
7217
7218 /* Set the last insn as the jump insn; it doesn't affect cc0.
7219 Then follow this branch. */
7220 #ifdef HAVE_cc0
7221 prev_insn_cc0 = 0;
7222 #endif
7223 prev_insn = insn;
7224 insn = JUMP_LABEL (insn);
7225 continue;
7226 }
7227 }
7228
7229 if (GET_MODE (insn) == QImode)
7230 PUT_MODE (insn, VOIDmode);
7231
7232 if (GET_RTX_CLASS (code) == 'i')
7233 {
7234 rtx p;
7235
7236 /* Process notes first so we have all notes in canonical forms when
7237 looking for duplicate operations. */
7238
7239 if (REG_NOTES (insn))
7240 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7241
7242 /* Track when we are inside in LIBCALL block. Inside such a block,
7243 we do not want to record destinations. The last insn of a
7244 LIBCALL block is not considered to be part of the block, since
7245 its destination is the result of the block and hence should be
7246 recorded. */
7247
7248 if (REG_NOTES (insn) != 0)
7249 {
7250 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7251 libcall_insn = XEXP (p, 0);
7252 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7253 libcall_insn = 0;
7254 }
7255
7256 cse_insn (insn, libcall_insn);
7257
7258 /* If we haven't already found an insn where we added a LABEL_REF,
7259 check this one. */
7260 if (GET_CODE (insn) == INSN && ! recorded_label_ref
7261 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7262 (void *) insn))
7263 recorded_label_ref = 1;
7264 }
7265
7266 /* If INSN is now an unconditional jump, skip to the end of our
7267 basic block by pretending that we just did the last insn in the
7268 basic block. If we are jumping to the end of our block, show
7269 that we can have one usage of TO. */
7270
7271 if (any_uncondjump_p (insn))
7272 {
7273 if (to == 0)
7274 {
7275 free (qty_table + max_reg);
7276 return 0;
7277 }
7278
7279 if (JUMP_LABEL (insn) == to)
7280 to_usage = 1;
7281
7282 /* Maybe TO was deleted because the jump is unconditional.
7283 If so, there is nothing left in this basic block. */
7284 /* ??? Perhaps it would be smarter to set TO
7285 to whatever follows this insn,
7286 and pretend the basic block had always ended here. */
7287 if (INSN_DELETED_P (to))
7288 break;
7289
7290 insn = PREV_INSN (to);
7291 }
7292
7293 /* See if it is ok to keep on going past the label
7294 which used to end our basic block. Remember that we incremented
7295 the count of that label, so we decrement it here. If we made
7296 a jump unconditional, TO_USAGE will be one; in that case, we don't
7297 want to count the use in that jump. */
7298
7299 if (to != 0 && NEXT_INSN (insn) == to
7300 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7301 {
7302 struct cse_basic_block_data val;
7303 rtx prev;
7304
7305 insn = NEXT_INSN (to);
7306
7307 /* If TO was the last insn in the function, we are done. */
7308 if (insn == 0)
7309 {
7310 free (qty_table + max_reg);
7311 return 0;
7312 }
7313
7314 /* If TO was preceded by a BARRIER we are done with this block
7315 because it has no continuation. */
7316 prev = prev_nonnote_insn (to);
7317 if (prev && GET_CODE (prev) == BARRIER)
7318 {
7319 free (qty_table + max_reg);
7320 return insn;
7321 }
7322
7323 /* Find the end of the following block. Note that we won't be
7324 following branches in this case. */
7325 to_usage = 0;
7326 val.path_size = 0;
7327 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7328
7329 /* If the tables we allocated have enough space left
7330 to handle all the SETs in the next basic block,
7331 continue through it. Otherwise, return,
7332 and that block will be scanned individually. */
7333 if (val.nsets * 2 + next_qty > max_qty)
7334 break;
7335
7336 cse_basic_block_start = val.low_cuid;
7337 cse_basic_block_end = val.high_cuid;
7338 to = val.last;
7339
7340 /* Prevent TO from being deleted if it is a label. */
7341 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7342 ++LABEL_NUSES (to);
7343
7344 /* Back up so we process the first insn in the extension. */
7345 insn = PREV_INSN (insn);
7346 }
7347 }
7348
7349 if (next_qty > max_qty)
7350 abort ();
7351
7352 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7353 the previous insn is the only insn that branches to the head of a loop,
7354 we can cse into the loop. Don't do this if we changed the jump
7355 structure of a loop unless we aren't going to be following jumps. */
7356
7357 if ((cse_jumps_altered == 0
7358 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7359 && around_loop && to != 0
7360 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7361 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
7362 && JUMP_LABEL (PREV_INSN (to)) != 0
7363 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
7364 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
7365
7366 free (qty_table + max_reg);
7367
7368 return to ? NEXT_INSN (to) : 0;
7369 }
7370 \f
7371 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7372 there isn't a REG_DEAD note. Return one if so. DATA is the insn. */
7373
7374 static int
7375 check_for_label_ref (rtl, data)
7376 rtx *rtl;
7377 void *data;
7378 {
7379 rtx insn = (rtx) data;
7380
7381 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7382 we must rerun jump since it needs to place the note. If this is a
7383 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7384 since no REG_LABEL will be added. */
7385 return (GET_CODE (*rtl) == LABEL_REF
7386 && INSN_UID (XEXP (*rtl, 0)) != 0
7387 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7388 }
7389 \f
7390 /* Count the number of times registers are used (not set) in X.
7391 COUNTS is an array in which we accumulate the count, INCR is how much
7392 we count each register usage.
7393
7394 Don't count a usage of DEST, which is the SET_DEST of a SET which
7395 contains X in its SET_SRC. This is because such a SET does not
7396 modify the liveness of DEST. */
7397
7398 static void
7399 count_reg_usage (x, counts, dest, incr)
7400 rtx x;
7401 int *counts;
7402 rtx dest;
7403 int incr;
7404 {
7405 enum rtx_code code;
7406 const char *fmt;
7407 int i, j;
7408
7409 if (x == 0)
7410 return;
7411
7412 switch (code = GET_CODE (x))
7413 {
7414 case REG:
7415 if (x != dest)
7416 counts[REGNO (x)] += incr;
7417 return;
7418
7419 case PC:
7420 case CC0:
7421 case CONST:
7422 case CONST_INT:
7423 case CONST_DOUBLE:
7424 case SYMBOL_REF:
7425 case LABEL_REF:
7426 return;
7427
7428 case CLOBBER:
7429 /* If we are clobbering a MEM, mark any registers inside the address
7430 as being used. */
7431 if (GET_CODE (XEXP (x, 0)) == MEM)
7432 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7433 return;
7434
7435 case SET:
7436 /* Unless we are setting a REG, count everything in SET_DEST. */
7437 if (GET_CODE (SET_DEST (x)) != REG)
7438 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7439
7440 /* If SRC has side-effects, then we can't delete this insn, so the
7441 usage of SET_DEST inside SRC counts.
7442
7443 ??? Strictly-speaking, we might be preserving this insn
7444 because some other SET has side-effects, but that's hard
7445 to do and can't happen now. */
7446 count_reg_usage (SET_SRC (x), counts,
7447 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
7448 incr);
7449 return;
7450
7451 case CALL_INSN:
7452 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
7453 /* Fall through. */
7454
7455 case INSN:
7456 case JUMP_INSN:
7457 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7458
7459 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7460 use them. */
7461
7462 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7463 return;
7464
7465 case EXPR_LIST:
7466 case INSN_LIST:
7467 if (REG_NOTE_KIND (x) == REG_EQUAL
7468 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
7469 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7470 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7471 return;
7472
7473 default:
7474 break;
7475 }
7476
7477 fmt = GET_RTX_FORMAT (code);
7478 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7479 {
7480 if (fmt[i] == 'e')
7481 count_reg_usage (XEXP (x, i), counts, dest, incr);
7482 else if (fmt[i] == 'E')
7483 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7484 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7485 }
7486 }
7487 \f
7488 /* Return true if set is live. */
7489 static bool
7490 set_live_p (set, insn, counts)
7491 rtx set;
7492 rtx insn ATTRIBUTE_UNUSED; /* Only used with HAVE_cc0. */
7493 int *counts;
7494 {
7495 #ifdef HAVE_cc0
7496 rtx tem;
7497 #endif
7498
7499 if (set_noop_p (set))
7500 ;
7501
7502 #ifdef HAVE_cc0
7503 else if (GET_CODE (SET_DEST (set)) == CC0
7504 && !side_effects_p (SET_SRC (set))
7505 && ((tem = next_nonnote_insn (insn)) == 0
7506 || !INSN_P (tem)
7507 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7508 return false;
7509 #endif
7510 else if (GET_CODE (SET_DEST (set)) != REG
7511 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7512 || counts[REGNO (SET_DEST (set))] != 0
7513 || side_effects_p (SET_SRC (set))
7514 /* An ADDRESSOF expression can turn into a use of the
7515 internal arg pointer, so always consider the
7516 internal arg pointer live. If it is truly dead,
7517 flow will delete the initializing insn. */
7518 || (SET_DEST (set) == current_function_internal_arg_pointer))
7519 return true;
7520 return false;
7521 }
7522
7523 /* Return true if insn is live. */
7524
7525 static bool
7526 insn_live_p (insn, counts)
7527 rtx insn;
7528 int *counts;
7529 {
7530 int i;
7531 if (GET_CODE (PATTERN (insn)) == SET)
7532 return set_live_p (PATTERN (insn), insn, counts);
7533 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7534 {
7535 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7536 {
7537 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7538
7539 if (GET_CODE (elt) == SET)
7540 {
7541 if (set_live_p (elt, insn, counts))
7542 return true;
7543 }
7544 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7545 return true;
7546 }
7547 return false;
7548 }
7549 else
7550 return true;
7551 }
7552
7553 /* Return true if libcall is dead as a whole. */
7554
7555 static bool
7556 dead_libcall_p (insn)
7557 rtx insn;
7558 {
7559 rtx note;
7560 /* See if there's a REG_EQUAL note on this insn and try to
7561 replace the source with the REG_EQUAL expression.
7562
7563 We assume that insns with REG_RETVALs can only be reg->reg
7564 copies at this point. */
7565 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7566 if (note)
7567 {
7568 rtx set = single_set (insn);
7569 rtx new = simplify_rtx (XEXP (note, 0));
7570
7571 if (!new)
7572 new = XEXP (note, 0);
7573
7574 if (set && validate_change (insn, &SET_SRC (set), new, 0))
7575 {
7576 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7577 return true;
7578 }
7579 }
7580 return false;
7581 }
7582
7583 /* Scan all the insns and delete any that are dead; i.e., they store a register
7584 that is never used or they copy a register to itself.
7585
7586 This is used to remove insns made obviously dead by cse, loop or other
7587 optimizations. It improves the heuristics in loop since it won't try to
7588 move dead invariants out of loops or make givs for dead quantities. The
7589 remaining passes of the compilation are also sped up. */
7590
7591 void
7592 delete_trivially_dead_insns (insns, nreg, preserve_basic_blocks)
7593 rtx insns;
7594 int nreg;
7595 int preserve_basic_blocks;
7596 {
7597 int *counts;
7598 rtx insn, prev;
7599 int i;
7600 int in_libcall = 0, dead_libcall = 0;
7601 basic_block bb;
7602
7603 /* First count the number of times each register is used. */
7604 counts = (int *) xcalloc (nreg, sizeof (int));
7605 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7606 count_reg_usage (insn, counts, NULL_RTX, 1);
7607
7608 /* Go from the last insn to the first and delete insns that only set unused
7609 registers or copy a register to itself. As we delete an insn, remove
7610 usage counts for registers it uses.
7611
7612 The first jump optimization pass may leave a real insn as the last
7613 insn in the function. We must not skip that insn or we may end
7614 up deleting code that is not really dead. */
7615 insn = get_last_insn ();
7616 if (! INSN_P (insn))
7617 insn = prev_real_insn (insn);
7618
7619 if (!preserve_basic_blocks)
7620 for (; insn; insn = prev)
7621 {
7622 int live_insn = 0;
7623
7624 prev = prev_real_insn (insn);
7625
7626 /* Don't delete any insns that are part of a libcall block unless
7627 we can delete the whole libcall block.
7628
7629 Flow or loop might get confused if we did that. Remember
7630 that we are scanning backwards. */
7631 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7632 {
7633 in_libcall = 1;
7634 live_insn = 1;
7635 dead_libcall = dead_libcall_p (insn);
7636 }
7637 else if (in_libcall)
7638 live_insn = ! dead_libcall;
7639 else
7640 live_insn = insn_live_p (insn, counts);
7641
7642 /* If this is a dead insn, delete it and show registers in it aren't
7643 being used. */
7644
7645 if (! live_insn)
7646 {
7647 count_reg_usage (insn, counts, NULL_RTX, -1);
7648 delete_insn (insn);
7649 }
7650
7651 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7652 {
7653 in_libcall = 0;
7654 dead_libcall = 0;
7655 }
7656 }
7657 else
7658 for (i = 0; i < n_basic_blocks; i++)
7659 for (bb = BASIC_BLOCK (i), insn = bb->end; insn != bb->head; insn = prev)
7660 {
7661 int live_insn = 0;
7662
7663 prev = PREV_INSN (insn);
7664 if (!INSN_P (insn))
7665 continue;
7666
7667 /* Don't delete any insns that are part of a libcall block unless
7668 we can delete the whole libcall block.
7669
7670 Flow or loop might get confused if we did that. Remember
7671 that we are scanning backwards. */
7672 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7673 {
7674 in_libcall = 1;
7675 live_insn = 1;
7676 dead_libcall = dead_libcall_p (insn);
7677 }
7678 else if (in_libcall)
7679 live_insn = ! dead_libcall;
7680 else
7681 live_insn = insn_live_p (insn, counts);
7682
7683 /* If this is a dead insn, delete it and show registers in it aren't
7684 being used. */
7685
7686 if (! live_insn)
7687 {
7688 count_reg_usage (insn, counts, NULL_RTX, -1);
7689 if (insn == bb->end)
7690 bb->end = PREV_INSN (insn);
7691 flow_delete_insn (insn);
7692 }
7693
7694 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7695 {
7696 in_libcall = 0;
7697 dead_libcall = 0;
7698 }
7699 }
7700
7701 /* Clean up. */
7702 free (counts);
7703 }