acconfig.h: _GLIBCPP_USING_THREADS and some workaround types added.
[gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
25 #include <setjmp.h>
26
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "flags.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "function.h"
36 #include "expr.h"
37 #include "toplev.h"
38 #include "output.h"
39 #include "ggc.h"
40
41 /* The basic idea of common subexpression elimination is to go
42 through the code, keeping a record of expressions that would
43 have the same value at the current scan point, and replacing
44 expressions encountered with the cheapest equivalent expression.
45
46 It is too complicated to keep track of the different possibilities
47 when control paths merge in this code; so, at each label, we forget all
48 that is known and start fresh. This can be described as processing each
49 extended basic block separately. We have a separate pass to perform
50 global CSE.
51
52 Note CSE can turn a conditional or computed jump into a nop or
53 an unconditional jump. When this occurs we arrange to run the jump
54 optimizer after CSE to delete the unreachable code.
55
56 We use two data structures to record the equivalent expressions:
57 a hash table for most expressions, and a vector of "quantity
58 numbers" to record equivalent (pseudo) registers.
59
60 The use of the special data structure for registers is desirable
61 because it is faster. It is possible because registers references
62 contain a fairly small number, the register number, taken from
63 a contiguously allocated series, and two register references are
64 identical if they have the same number. General expressions
65 do not have any such thing, so the only way to retrieve the
66 information recorded on an expression other than a register
67 is to keep it in a hash table.
68
69 Registers and "quantity numbers":
70
71 At the start of each basic block, all of the (hardware and pseudo)
72 registers used in the function are given distinct quantity
73 numbers to indicate their contents. During scan, when the code
74 copies one register into another, we copy the quantity number.
75 When a register is loaded in any other way, we allocate a new
76 quantity number to describe the value generated by this operation.
77 `reg_qty' records what quantity a register is currently thought
78 of as containing.
79
80 All real quantity numbers are greater than or equal to `max_reg'.
81 If register N has not been assigned a quantity, reg_qty[N] will equal N.
82
83 Quantity numbers below `max_reg' do not exist and none of the `qty_table'
84 entries should be referenced with an index below `max_reg'.
85
86 We also maintain a bidirectional chain of registers for each
87 quantity number. The `qty_table` members `first_reg' and `last_reg',
88 and `reg_eqv_table' members `next' and `prev' hold these chains.
89
90 The first register in a chain is the one whose lifespan is least local.
91 Among equals, it is the one that was seen first.
92 We replace any equivalent register with that one.
93
94 If two registers have the same quantity number, it must be true that
95 REG expressions with qty_table `mode' must be in the hash table for both
96 registers and must be in the same class.
97
98 The converse is not true. Since hard registers may be referenced in
99 any mode, two REG expressions might be equivalent in the hash table
100 but not have the same quantity number if the quantity number of one
101 of the registers is not the same mode as those expressions.
102
103 Constants and quantity numbers
104
105 When a quantity has a known constant value, that value is stored
106 in the appropriate qty_table `const_rtx'. This is in addition to
107 putting the constant in the hash table as is usual for non-regs.
108
109 Whether a reg or a constant is preferred is determined by the configuration
110 macro CONST_COSTS and will often depend on the constant value. In any
111 event, expressions containing constants can be simplified, by fold_rtx.
112
113 When a quantity has a known nearly constant value (such as an address
114 of a stack slot), that value is stored in the appropriate qty_table
115 `const_rtx'.
116
117 Integer constants don't have a machine mode. However, cse
118 determines the intended machine mode from the destination
119 of the instruction that moves the constant. The machine mode
120 is recorded in the hash table along with the actual RTL
121 constant expression so that different modes are kept separate.
122
123 Other expressions:
124
125 To record known equivalences among expressions in general
126 we use a hash table called `table'. It has a fixed number of buckets
127 that contain chains of `struct table_elt' elements for expressions.
128 These chains connect the elements whose expressions have the same
129 hash codes.
130
131 Other chains through the same elements connect the elements which
132 currently have equivalent values.
133
134 Register references in an expression are canonicalized before hashing
135 the expression. This is done using `reg_qty' and qty_table `first_reg'.
136 The hash code of a register reference is computed using the quantity
137 number, not the register number.
138
139 When the value of an expression changes, it is necessary to remove from the
140 hash table not just that expression but all expressions whose values
141 could be different as a result.
142
143 1. If the value changing is in memory, except in special cases
144 ANYTHING referring to memory could be changed. That is because
145 nobody knows where a pointer does not point.
146 The function `invalidate_memory' removes what is necessary.
147
148 The special cases are when the address is constant or is
149 a constant plus a fixed register such as the frame pointer
150 or a static chain pointer. When such addresses are stored in,
151 we can tell exactly which other such addresses must be invalidated
152 due to overlap. `invalidate' does this.
153 All expressions that refer to non-constant
154 memory addresses are also invalidated. `invalidate_memory' does this.
155
156 2. If the value changing is a register, all expressions
157 containing references to that register, and only those,
158 must be removed.
159
160 Because searching the entire hash table for expressions that contain
161 a register is very slow, we try to figure out when it isn't necessary.
162 Precisely, this is necessary only when expressions have been
163 entered in the hash table using this register, and then the value has
164 changed, and then another expression wants to be added to refer to
165 the register's new value. This sequence of circumstances is rare
166 within any one basic block.
167
168 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
169 reg_tick[i] is incremented whenever a value is stored in register i.
170 reg_in_table[i] holds -1 if no references to register i have been
171 entered in the table; otherwise, it contains the value reg_tick[i] had
172 when the references were entered. If we want to enter a reference
173 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
174 Until we want to enter a new entry, the mere fact that the two vectors
175 don't match makes the entries be ignored if anyone tries to match them.
176
177 Registers themselves are entered in the hash table as well as in
178 the equivalent-register chains. However, the vectors `reg_tick'
179 and `reg_in_table' do not apply to expressions which are simple
180 register references. These expressions are removed from the table
181 immediately when they become invalid, and this can be done even if
182 we do not immediately search for all the expressions that refer to
183 the register.
184
185 A CLOBBER rtx in an instruction invalidates its operand for further
186 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
187 invalidates everything that resides in memory.
188
189 Related expressions:
190
191 Constant expressions that differ only by an additive integer
192 are called related. When a constant expression is put in
193 the table, the related expression with no constant term
194 is also entered. These are made to point at each other
195 so that it is possible to find out if there exists any
196 register equivalent to an expression related to a given expression. */
197
198 /* One plus largest register number used in this function. */
199
200 static int max_reg;
201
202 /* One plus largest instruction UID used in this function at time of
203 cse_main call. */
204
205 static int max_insn_uid;
206
207 /* Length of qty_table vector. We know in advance we will not need
208 a quantity number this big. */
209
210 static int max_qty;
211
212 /* Next quantity number to be allocated.
213 This is 1 + the largest number needed so far. */
214
215 static int next_qty;
216
217 /* Per-qty information tracking.
218
219 `first_reg' and `last_reg' track the head and tail of the
220 chain of registers which currently contain this quantity.
221
222 `mode' contains the machine mode of this quantity.
223
224 `const_rtx' holds the rtx of the constant value of this
225 quantity, if known. A summations of the frame/arg pointer
226 and a constant can also be entered here. When this holds
227 a known value, `const_insn' is the insn which stored the
228 constant value.
229
230 `comparison_{code,const,qty}' are used to track when a
231 comparison between a quantity and some constant or register has
232 been passed. In such a case, we know the results of the comparison
233 in case we see it again. These members record a comparison that
234 is known to be true. `comparison_code' holds the rtx code of such
235 a comparison, else it is set to UNKNOWN and the other two
236 comparison members are undefined. `comparison_const' holds
237 the constant being compared against, or zero if the comparison
238 is not against a constant. `comparison_qty' holds the quantity
239 being compared against when the result is known. If the comparison
240 is not with a register, `comparison_qty' is -1. */
241
242 struct qty_table_elem
243 {
244 rtx const_rtx;
245 rtx const_insn;
246 rtx comparison_const;
247 int comparison_qty;
248 unsigned int first_reg, last_reg;
249 enum machine_mode mode;
250 enum rtx_code comparison_code;
251 };
252
253 /* The table of all qtys, indexed by qty number. */
254 static struct qty_table_elem *qty_table;
255
256 #ifdef HAVE_cc0
257 /* For machines that have a CC0, we do not record its value in the hash
258 table since its use is guaranteed to be the insn immediately following
259 its definition and any other insn is presumed to invalidate it.
260
261 Instead, we store below the value last assigned to CC0. If it should
262 happen to be a constant, it is stored in preference to the actual
263 assigned value. In case it is a constant, we store the mode in which
264 the constant should be interpreted. */
265
266 static rtx prev_insn_cc0;
267 static enum machine_mode prev_insn_cc0_mode;
268 #endif
269
270 /* Previous actual insn. 0 if at first insn of basic block. */
271
272 static rtx prev_insn;
273
274 /* Insn being scanned. */
275
276 static rtx this_insn;
277
278 /* Index by register number, gives the number of the next (or
279 previous) register in the chain of registers sharing the same
280 value.
281
282 Or -1 if this register is at the end of the chain.
283
284 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
285
286 /* Per-register equivalence chain. */
287 struct reg_eqv_elem
288 {
289 int next, prev;
290 };
291
292 /* The table of all register equivalence chains. */
293 static struct reg_eqv_elem *reg_eqv_table;
294
295 struct cse_reg_info
296 {
297 /* Next in hash chain. */
298 struct cse_reg_info *hash_next;
299
300 /* The next cse_reg_info structure in the free or used list. */
301 struct cse_reg_info *next;
302
303 /* Search key */
304 unsigned int regno;
305
306 /* The quantity number of the register's current contents. */
307 int reg_qty;
308
309 /* The number of times the register has been altered in the current
310 basic block. */
311 int reg_tick;
312
313 /* The REG_TICK value at which rtx's containing this register are
314 valid in the hash table. If this does not equal the current
315 reg_tick value, such expressions existing in the hash table are
316 invalid. */
317 int reg_in_table;
318 };
319
320 /* A free list of cse_reg_info entries. */
321 static struct cse_reg_info *cse_reg_info_free_list;
322
323 /* A used list of cse_reg_info entries. */
324 static struct cse_reg_info *cse_reg_info_used_list;
325 static struct cse_reg_info *cse_reg_info_used_list_end;
326
327 /* A mapping from registers to cse_reg_info data structures. */
328 #define REGHASH_SHIFT 7
329 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
330 #define REGHASH_MASK (REGHASH_SIZE - 1)
331 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
332
333 #define REGHASH_FN(REGNO) \
334 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
335
336 /* The last lookup we did into the cse_reg_info_tree. This allows us
337 to cache repeated lookups. */
338 static unsigned int cached_regno;
339 static struct cse_reg_info *cached_cse_reg_info;
340
341 /* A HARD_REG_SET containing all the hard registers for which there is
342 currently a REG expression in the hash table. Note the difference
343 from the above variables, which indicate if the REG is mentioned in some
344 expression in the table. */
345
346 static HARD_REG_SET hard_regs_in_table;
347
348 /* A HARD_REG_SET containing all the hard registers that are invalidated
349 by a CALL_INSN. */
350
351 static HARD_REG_SET regs_invalidated_by_call;
352
353 /* CUID of insn that starts the basic block currently being cse-processed. */
354
355 static int cse_basic_block_start;
356
357 /* CUID of insn that ends the basic block currently being cse-processed. */
358
359 static int cse_basic_block_end;
360
361 /* Vector mapping INSN_UIDs to cuids.
362 The cuids are like uids but increase monotonically always.
363 We use them to see whether a reg is used outside a given basic block. */
364
365 static int *uid_cuid;
366
367 /* Highest UID in UID_CUID. */
368 static int max_uid;
369
370 /* Get the cuid of an insn. */
371
372 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
373
374 /* Nonzero if this pass has made changes, and therefore it's
375 worthwhile to run the garbage collector. */
376
377 static int cse_altered;
378
379 /* Nonzero if cse has altered conditional jump insns
380 in such a way that jump optimization should be redone. */
381
382 static int cse_jumps_altered;
383
384 /* Nonzero if we put a LABEL_REF into the hash table. Since we may have put
385 it into an INSN without a REG_LABEL, we have to rerun jump after CSE
386 to put in the note. */
387 static int recorded_label_ref;
388
389 /* canon_hash stores 1 in do_not_record
390 if it notices a reference to CC0, PC, or some other volatile
391 subexpression. */
392
393 static int do_not_record;
394
395 #ifdef LOAD_EXTEND_OP
396
397 /* Scratch rtl used when looking for load-extended copy of a MEM. */
398 static rtx memory_extend_rtx;
399 #endif
400
401 /* canon_hash stores 1 in hash_arg_in_memory
402 if it notices a reference to memory within the expression being hashed. */
403
404 static int hash_arg_in_memory;
405
406 /* The hash table contains buckets which are chains of `struct table_elt's,
407 each recording one expression's information.
408 That expression is in the `exp' field.
409
410 The canon_exp field contains a canonical (from the point of view of
411 alias analysis) version of the `exp' field.
412
413 Those elements with the same hash code are chained in both directions
414 through the `next_same_hash' and `prev_same_hash' fields.
415
416 Each set of expressions with equivalent values
417 are on a two-way chain through the `next_same_value'
418 and `prev_same_value' fields, and all point with
419 the `first_same_value' field at the first element in
420 that chain. The chain is in order of increasing cost.
421 Each element's cost value is in its `cost' field.
422
423 The `in_memory' field is nonzero for elements that
424 involve any reference to memory. These elements are removed
425 whenever a write is done to an unidentified location in memory.
426 To be safe, we assume that a memory address is unidentified unless
427 the address is either a symbol constant or a constant plus
428 the frame pointer or argument pointer.
429
430 The `related_value' field is used to connect related expressions
431 (that differ by adding an integer).
432 The related expressions are chained in a circular fashion.
433 `related_value' is zero for expressions for which this
434 chain is not useful.
435
436 The `cost' field stores the cost of this element's expression.
437
438 The `is_const' flag is set if the element is a constant (including
439 a fixed address).
440
441 The `flag' field is used as a temporary during some search routines.
442
443 The `mode' field is usually the same as GET_MODE (`exp'), but
444 if `exp' is a CONST_INT and has no machine mode then the `mode'
445 field is the mode it was being used as. Each constant is
446 recorded separately for each mode it is used with. */
447
448 struct table_elt
449 {
450 rtx exp;
451 rtx canon_exp;
452 struct table_elt *next_same_hash;
453 struct table_elt *prev_same_hash;
454 struct table_elt *next_same_value;
455 struct table_elt *prev_same_value;
456 struct table_elt *first_same_value;
457 struct table_elt *related_value;
458 int cost;
459 enum machine_mode mode;
460 char in_memory;
461 char is_const;
462 char flag;
463 };
464
465 /* We don't want a lot of buckets, because we rarely have very many
466 things stored in the hash table, and a lot of buckets slows
467 down a lot of loops that happen frequently. */
468 #define HASH_SHIFT 5
469 #define HASH_SIZE (1 << HASH_SHIFT)
470 #define HASH_MASK (HASH_SIZE - 1)
471
472 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
473 register (hard registers may require `do_not_record' to be set). */
474
475 #define HASH(X, M) \
476 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
477 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
478 : canon_hash (X, M)) & HASH_MASK)
479
480 /* Determine whether register number N is considered a fixed register for CSE.
481 It is desirable to replace other regs with fixed regs, to reduce need for
482 non-fixed hard regs.
483 A reg wins if it is either the frame pointer or designated as fixed. */
484 #define FIXED_REGNO_P(N) \
485 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
486 || fixed_regs[N] || global_regs[N])
487
488 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
489 hard registers and pointers into the frame are the cheapest with a cost
490 of 0. Next come pseudos with a cost of one and other hard registers with
491 a cost of 2. Aside from these special cases, call `rtx_cost'. */
492
493 #define CHEAP_REGNO(N) \
494 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
495 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
496 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
497 || ((N) < FIRST_PSEUDO_REGISTER \
498 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
499
500 /* A register is cheap if it is a user variable assigned to the register
501 or if its register number always corresponds to a cheap register. */
502
503 #define CHEAP_REG(N) \
504 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
505 || CHEAP_REGNO (REGNO (N)))
506
507 #define COST(X) \
508 (GET_CODE (X) == REG \
509 ? (CHEAP_REG (X) ? 0 \
510 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
511 : 2) \
512 : notreg_cost(X))
513
514 /* Get the info associated with register N. */
515
516 #define GET_CSE_REG_INFO(N) \
517 (((N) == cached_regno && cached_cse_reg_info) \
518 ? cached_cse_reg_info : get_cse_reg_info ((N)))
519
520 /* Get the number of times this register has been updated in this
521 basic block. */
522
523 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
524
525 /* Get the point at which REG was recorded in the table. */
526
527 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
528
529 /* Get the quantity number for REG. */
530
531 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
532
533 /* Determine if the quantity number for register X represents a valid index
534 into the qty_table. */
535
536 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
537
538 static struct table_elt *table[HASH_SIZE];
539
540 /* Chain of `struct table_elt's made so far for this function
541 but currently removed from the table. */
542
543 static struct table_elt *free_element_chain;
544
545 /* Number of `struct table_elt' structures made so far for this function. */
546
547 static int n_elements_made;
548
549 /* Maximum value `n_elements_made' has had so far in this compilation
550 for functions previously processed. */
551
552 static int max_elements_made;
553
554 /* Surviving equivalence class when two equivalence classes are merged
555 by recording the effects of a jump in the last insn. Zero if the
556 last insn was not a conditional jump. */
557
558 static struct table_elt *last_jump_equiv_class;
559
560 /* Set to the cost of a constant pool reference if one was found for a
561 symbolic constant. If this was found, it means we should try to
562 convert constants into constant pool entries if they don't fit in
563 the insn. */
564
565 static int constant_pool_entries_cost;
566
567 /* Define maximum length of a branch path. */
568
569 #define PATHLENGTH 10
570
571 /* This data describes a block that will be processed by cse_basic_block. */
572
573 struct cse_basic_block_data
574 {
575 /* Lowest CUID value of insns in block. */
576 int low_cuid;
577 /* Highest CUID value of insns in block. */
578 int high_cuid;
579 /* Total number of SETs in block. */
580 int nsets;
581 /* Last insn in the block. */
582 rtx last;
583 /* Size of current branch path, if any. */
584 int path_size;
585 /* Current branch path, indicating which branches will be taken. */
586 struct branch_path
587 {
588 /* The branch insn. */
589 rtx branch;
590 /* Whether it should be taken or not. AROUND is the same as taken
591 except that it is used when the destination label is not preceded
592 by a BARRIER. */
593 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
594 } path[PATHLENGTH];
595 };
596
597 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
598 virtual regs here because the simplify_*_operation routines are called
599 by integrate.c, which is called before virtual register instantiation.
600
601 ?!? FIXED_BASE_PLUS_P and NONZERO_BASE_PLUS_P need to move into
602 a header file so that their definitions can be shared with the
603 simplification routines in simplify-rtx.c. Until then, do not
604 change these macros without also changing the copy in simplify-rtx.c. */
605
606 #define FIXED_BASE_PLUS_P(X) \
607 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
608 || ((X) == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])\
609 || (X) == virtual_stack_vars_rtx \
610 || (X) == virtual_incoming_args_rtx \
611 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
612 && (XEXP (X, 0) == frame_pointer_rtx \
613 || XEXP (X, 0) == hard_frame_pointer_rtx \
614 || ((X) == arg_pointer_rtx \
615 && fixed_regs[ARG_POINTER_REGNUM]) \
616 || XEXP (X, 0) == virtual_stack_vars_rtx \
617 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
618 || GET_CODE (X) == ADDRESSOF)
619
620 /* Similar, but also allows reference to the stack pointer.
621
622 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
623 arg_pointer_rtx by itself is nonzero, because on at least one machine,
624 the i960, the arg pointer is zero when it is unused. */
625
626 #define NONZERO_BASE_PLUS_P(X) \
627 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
628 || (X) == virtual_stack_vars_rtx \
629 || (X) == virtual_incoming_args_rtx \
630 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
631 && (XEXP (X, 0) == frame_pointer_rtx \
632 || XEXP (X, 0) == hard_frame_pointer_rtx \
633 || ((X) == arg_pointer_rtx \
634 && fixed_regs[ARG_POINTER_REGNUM]) \
635 || XEXP (X, 0) == virtual_stack_vars_rtx \
636 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
637 || (X) == stack_pointer_rtx \
638 || (X) == virtual_stack_dynamic_rtx \
639 || (X) == virtual_outgoing_args_rtx \
640 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
641 && (XEXP (X, 0) == stack_pointer_rtx \
642 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
643 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
644 || GET_CODE (X) == ADDRESSOF)
645
646 static int notreg_cost PARAMS ((rtx));
647 static void new_basic_block PARAMS ((void));
648 static void make_new_qty PARAMS ((unsigned int, enum machine_mode));
649 static void make_regs_eqv PARAMS ((unsigned int, unsigned int));
650 static void delete_reg_equiv PARAMS ((unsigned int));
651 static int mention_regs PARAMS ((rtx));
652 static int insert_regs PARAMS ((rtx, struct table_elt *, int));
653 static void remove_from_table PARAMS ((struct table_elt *, unsigned));
654 static struct table_elt *lookup PARAMS ((rtx, unsigned, enum machine_mode)),
655 *lookup_for_remove PARAMS ((rtx, unsigned, enum machine_mode));
656 static rtx lookup_as_function PARAMS ((rtx, enum rtx_code));
657 static struct table_elt *insert PARAMS ((rtx, struct table_elt *, unsigned,
658 enum machine_mode));
659 static void merge_equiv_classes PARAMS ((struct table_elt *,
660 struct table_elt *));
661 static void invalidate PARAMS ((rtx, enum machine_mode));
662 static int cse_rtx_varies_p PARAMS ((rtx));
663 static void remove_invalid_refs PARAMS ((unsigned int));
664 static void remove_invalid_subreg_refs PARAMS ((unsigned int, unsigned int,
665 enum machine_mode));
666 static void rehash_using_reg PARAMS ((rtx));
667 static void invalidate_memory PARAMS ((void));
668 static void invalidate_for_call PARAMS ((void));
669 static rtx use_related_value PARAMS ((rtx, struct table_elt *));
670 static unsigned canon_hash PARAMS ((rtx, enum machine_mode));
671 static unsigned safe_hash PARAMS ((rtx, enum machine_mode));
672 static int exp_equiv_p PARAMS ((rtx, rtx, int, int));
673 static rtx canon_reg PARAMS ((rtx, rtx));
674 static void find_best_addr PARAMS ((rtx, rtx *, enum machine_mode));
675 static enum rtx_code find_comparison_args PARAMS ((enum rtx_code, rtx *, rtx *,
676 enum machine_mode *,
677 enum machine_mode *));
678 static rtx fold_rtx PARAMS ((rtx, rtx));
679 static rtx equiv_constant PARAMS ((rtx));
680 static void record_jump_equiv PARAMS ((rtx, int));
681 static void record_jump_cond PARAMS ((enum rtx_code, enum machine_mode,
682 rtx, rtx, int));
683 static void cse_insn PARAMS ((rtx, rtx));
684 static int addr_affects_sp_p PARAMS ((rtx));
685 static void invalidate_from_clobbers PARAMS ((rtx));
686 static rtx cse_process_notes PARAMS ((rtx, rtx));
687 static void cse_around_loop PARAMS ((rtx));
688 static void invalidate_skipped_set PARAMS ((rtx, rtx, void *));
689 static void invalidate_skipped_block PARAMS ((rtx));
690 static void cse_check_loop_start PARAMS ((rtx, rtx, void *));
691 static void cse_set_around_loop PARAMS ((rtx, rtx, rtx));
692 static rtx cse_basic_block PARAMS ((rtx, rtx, struct branch_path *, int));
693 static void count_reg_usage PARAMS ((rtx, int *, rtx, int));
694 extern void dump_class PARAMS ((struct table_elt*));
695 static struct cse_reg_info * get_cse_reg_info PARAMS ((unsigned int));
696 static int check_dependence PARAMS ((rtx *, void *));
697
698 static void flush_hash_table PARAMS ((void));
699 \f
700 /* Dump the expressions in the equivalence class indicated by CLASSP.
701 This function is used only for debugging. */
702 void
703 dump_class (classp)
704 struct table_elt *classp;
705 {
706 struct table_elt *elt;
707
708 fprintf (stderr, "Equivalence chain for ");
709 print_rtl (stderr, classp->exp);
710 fprintf (stderr, ": \n");
711
712 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
713 {
714 print_rtl (stderr, elt->exp);
715 fprintf (stderr, "\n");
716 }
717 }
718
719 /* Internal function, to compute cost when X is not a register; called
720 from COST macro to keep it simple. */
721
722 static int
723 notreg_cost (x)
724 rtx x;
725 {
726 return ((GET_CODE (x) == SUBREG
727 && GET_CODE (SUBREG_REG (x)) == REG
728 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
729 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
730 && (GET_MODE_SIZE (GET_MODE (x))
731 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
732 && subreg_lowpart_p (x)
733 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
734 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
735 ? (CHEAP_REG (SUBREG_REG (x)) ? 0
736 : (REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER ? 1
737 : 2))
738 : rtx_cost (x, SET) * 2);
739 }
740
741 /* Return the right cost to give to an operation
742 to make the cost of the corresponding register-to-register instruction
743 N times that of a fast register-to-register instruction. */
744
745 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
746
747 /* Return an estimate of the cost of computing rtx X.
748 One use is in cse, to decide which expression to keep in the hash table.
749 Another is in rtl generation, to pick the cheapest way to multiply.
750 Other uses like the latter are expected in the future. */
751
752 int
753 rtx_cost (x, outer_code)
754 rtx x;
755 enum rtx_code outer_code ATTRIBUTE_UNUSED;
756 {
757 register int i, j;
758 register enum rtx_code code;
759 register const char *fmt;
760 register int total;
761
762 if (x == 0)
763 return 0;
764
765 /* Compute the default costs of certain things.
766 Note that RTX_COSTS can override the defaults. */
767
768 code = GET_CODE (x);
769 switch (code)
770 {
771 case MULT:
772 /* Count multiplication by 2**n as a shift,
773 because if we are considering it, we would output it as a shift. */
774 if (GET_CODE (XEXP (x, 1)) == CONST_INT
775 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
776 total = 2;
777 else
778 total = COSTS_N_INSNS (5);
779 break;
780 case DIV:
781 case UDIV:
782 case MOD:
783 case UMOD:
784 total = COSTS_N_INSNS (7);
785 break;
786 case USE:
787 /* Used in loop.c and combine.c as a marker. */
788 total = 0;
789 break;
790 case ASM_OPERANDS:
791 /* We don't want these to be used in substitutions because
792 we have no way of validating the resulting insn. So assign
793 anything containing an ASM_OPERANDS a very high cost. */
794 total = 1000;
795 break;
796 default:
797 total = 2;
798 }
799
800 switch (code)
801 {
802 case REG:
803 return ! CHEAP_REG (x);
804
805 case SUBREG:
806 /* If we can't tie these modes, make this expensive. The larger
807 the mode, the more expensive it is. */
808 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
809 return COSTS_N_INSNS (2
810 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
811 return 2;
812 #ifdef RTX_COSTS
813 RTX_COSTS (x, code, outer_code);
814 #endif
815 #ifdef CONST_COSTS
816 CONST_COSTS (x, code, outer_code);
817 #endif
818
819 default:
820 #ifdef DEFAULT_RTX_COSTS
821 DEFAULT_RTX_COSTS (x, code, outer_code);
822 #endif
823 break;
824 }
825
826 /* Sum the costs of the sub-rtx's, plus cost of this operation,
827 which is already in total. */
828
829 fmt = GET_RTX_FORMAT (code);
830 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
831 if (fmt[i] == 'e')
832 total += rtx_cost (XEXP (x, i), code);
833 else if (fmt[i] == 'E')
834 for (j = 0; j < XVECLEN (x, i); j++)
835 total += rtx_cost (XVECEXP (x, i, j), code);
836
837 return total;
838 }
839 \f
840 /* Return cost of address expression X.
841 Expect that X is propertly formed address reference. */
842
843 int
844 address_cost (x, mode)
845 rtx x;
846 enum machine_mode mode;
847 {
848 /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
849 during CSE, such nodes are present. Using an ADDRESSOF node which
850 refers to the address of a REG is a good thing because we can then
851 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
852
853 if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
854 return -1;
855
856 /* We may be asked for cost of various unusual addresses, such as operands
857 of push instruction. It is not worthwhile to complicate writing
858 of ADDRESS_COST macro by such cases. */
859
860 if (!memory_address_p (mode, x))
861 return 1000;
862 #ifdef ADDRESS_COST
863 return ADDRESS_COST (x);
864 #else
865 return rtx_cost (x, MEM);
866 #endif
867 }
868 \f
869 static struct cse_reg_info *
870 get_cse_reg_info (regno)
871 unsigned int regno;
872 {
873 struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
874 struct cse_reg_info *p;
875
876 for (p = *hash_head; p != NULL; p = p->hash_next)
877 if (p->regno == regno)
878 break;
879
880 if (p == NULL)
881 {
882 /* Get a new cse_reg_info structure. */
883 if (cse_reg_info_free_list)
884 {
885 p = cse_reg_info_free_list;
886 cse_reg_info_free_list = p->next;
887 }
888 else
889 p = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
890
891 /* Insert into hash table. */
892 p->hash_next = *hash_head;
893 *hash_head = p;
894
895 /* Initialize it. */
896 p->reg_tick = 1;
897 p->reg_in_table = -1;
898 p->reg_qty = regno;
899 p->regno = regno;
900 p->next = cse_reg_info_used_list;
901 cse_reg_info_used_list = p;
902 if (!cse_reg_info_used_list_end)
903 cse_reg_info_used_list_end = p;
904 }
905
906 /* Cache this lookup; we tend to be looking up information about the
907 same register several times in a row. */
908 cached_regno = regno;
909 cached_cse_reg_info = p;
910
911 return p;
912 }
913
914 /* Clear the hash table and initialize each register with its own quantity,
915 for a new basic block. */
916
917 static void
918 new_basic_block ()
919 {
920 register int i;
921
922 next_qty = max_reg;
923
924 /* Clear out hash table state for this pass. */
925
926 bzero ((char *) reg_hash, sizeof reg_hash);
927
928 if (cse_reg_info_used_list)
929 {
930 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
931 cse_reg_info_free_list = cse_reg_info_used_list;
932 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
933 }
934 cached_cse_reg_info = 0;
935
936 CLEAR_HARD_REG_SET (hard_regs_in_table);
937
938 /* The per-quantity values used to be initialized here, but it is
939 much faster to initialize each as it is made in `make_new_qty'. */
940
941 for (i = 0; i < HASH_SIZE; i++)
942 {
943 struct table_elt *first;
944
945 first = table[i];
946 if (first != NULL)
947 {
948 struct table_elt *last = first;
949
950 table[i] = NULL;
951
952 while (last->next_same_hash != NULL)
953 last = last->next_same_hash;
954
955 /* Now relink this hash entire chain into
956 the free element list. */
957
958 last->next_same_hash = free_element_chain;
959 free_element_chain = first;
960 }
961 }
962
963 prev_insn = 0;
964
965 #ifdef HAVE_cc0
966 prev_insn_cc0 = 0;
967 #endif
968 }
969
970 /* Say that register REG contains a quantity in mode MODE not in any
971 register before and initialize that quantity. */
972
973 static void
974 make_new_qty (reg, mode)
975 unsigned int reg;
976 enum machine_mode mode;
977 {
978 register int q;
979 register struct qty_table_elem *ent;
980 register struct reg_eqv_elem *eqv;
981
982 if (next_qty >= max_qty)
983 abort ();
984
985 q = REG_QTY (reg) = next_qty++;
986 ent = &qty_table[q];
987 ent->first_reg = reg;
988 ent->last_reg = reg;
989 ent->mode = mode;
990 ent->const_rtx = ent->const_insn = NULL_RTX;
991 ent->comparison_code = UNKNOWN;
992
993 eqv = &reg_eqv_table[reg];
994 eqv->next = eqv->prev = -1;
995 }
996
997 /* Make reg NEW equivalent to reg OLD.
998 OLD is not changing; NEW is. */
999
1000 static void
1001 make_regs_eqv (new, old)
1002 unsigned int new, old;
1003 {
1004 unsigned int lastr, firstr;
1005 int q = REG_QTY (old);
1006 struct qty_table_elem *ent;
1007
1008 ent = &qty_table[q];
1009
1010 /* Nothing should become eqv until it has a "non-invalid" qty number. */
1011 if (! REGNO_QTY_VALID_P (old))
1012 abort ();
1013
1014 REG_QTY (new) = q;
1015 firstr = ent->first_reg;
1016 lastr = ent->last_reg;
1017
1018 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1019 hard regs. Among pseudos, if NEW will live longer than any other reg
1020 of the same qty, and that is beyond the current basic block,
1021 make it the new canonical replacement for this qty. */
1022 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1023 /* Certain fixed registers might be of the class NO_REGS. This means
1024 that not only can they not be allocated by the compiler, but
1025 they cannot be used in substitutions or canonicalizations
1026 either. */
1027 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1028 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1029 || (new >= FIRST_PSEUDO_REGISTER
1030 && (firstr < FIRST_PSEUDO_REGISTER
1031 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1032 || (uid_cuid[REGNO_FIRST_UID (new)]
1033 < cse_basic_block_start))
1034 && (uid_cuid[REGNO_LAST_UID (new)]
1035 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1036 {
1037 reg_eqv_table[firstr].prev = new;
1038 reg_eqv_table[new].next = firstr;
1039 reg_eqv_table[new].prev = -1;
1040 ent->first_reg = new;
1041 }
1042 else
1043 {
1044 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1045 Otherwise, insert before any non-fixed hard regs that are at the
1046 end. Registers of class NO_REGS cannot be used as an
1047 equivalent for anything. */
1048 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1049 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1050 && new >= FIRST_PSEUDO_REGISTER)
1051 lastr = reg_eqv_table[lastr].prev;
1052 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1053 if (reg_eqv_table[lastr].next >= 0)
1054 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1055 else
1056 qty_table[q].last_reg = new;
1057 reg_eqv_table[lastr].next = new;
1058 reg_eqv_table[new].prev = lastr;
1059 }
1060 }
1061
1062 /* Remove REG from its equivalence class. */
1063
1064 static void
1065 delete_reg_equiv (reg)
1066 unsigned int reg;
1067 {
1068 register struct qty_table_elem *ent;
1069 register int q = REG_QTY (reg);
1070 register int p, n;
1071
1072 /* If invalid, do nothing. */
1073 if (q == (int) reg)
1074 return;
1075
1076 ent = &qty_table[q];
1077
1078 p = reg_eqv_table[reg].prev;
1079 n = reg_eqv_table[reg].next;
1080
1081 if (n != -1)
1082 reg_eqv_table[n].prev = p;
1083 else
1084 ent->last_reg = p;
1085 if (p != -1)
1086 reg_eqv_table[p].next = n;
1087 else
1088 ent->first_reg = n;
1089
1090 REG_QTY (reg) = reg;
1091 }
1092
1093 /* Remove any invalid expressions from the hash table
1094 that refer to any of the registers contained in expression X.
1095
1096 Make sure that newly inserted references to those registers
1097 as subexpressions will be considered valid.
1098
1099 mention_regs is not called when a register itself
1100 is being stored in the table.
1101
1102 Return 1 if we have done something that may have changed the hash code
1103 of X. */
1104
1105 static int
1106 mention_regs (x)
1107 rtx x;
1108 {
1109 register enum rtx_code code;
1110 register int i, j;
1111 register const char *fmt;
1112 register int changed = 0;
1113
1114 if (x == 0)
1115 return 0;
1116
1117 code = GET_CODE (x);
1118 if (code == REG)
1119 {
1120 unsigned int regno = REGNO (x);
1121 unsigned int endregno
1122 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1123 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1124 unsigned int i;
1125
1126 for (i = regno; i < endregno; i++)
1127 {
1128 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1129 remove_invalid_refs (i);
1130
1131 REG_IN_TABLE (i) = REG_TICK (i);
1132 }
1133
1134 return 0;
1135 }
1136
1137 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1138 pseudo if they don't use overlapping words. We handle only pseudos
1139 here for simplicity. */
1140 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1141 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1142 {
1143 unsigned int i = REGNO (SUBREG_REG (x));
1144
1145 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1146 {
1147 /* If reg_tick has been incremented more than once since
1148 reg_in_table was last set, that means that the entire
1149 register has been set before, so discard anything memorized
1150 for the entrire register, including all SUBREG expressions. */
1151 if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
1152 remove_invalid_refs (i);
1153 else
1154 remove_invalid_subreg_refs (i, SUBREG_WORD (x), GET_MODE (x));
1155 }
1156
1157 REG_IN_TABLE (i) = REG_TICK (i);
1158 return 0;
1159 }
1160
1161 /* If X is a comparison or a COMPARE and either operand is a register
1162 that does not have a quantity, give it one. This is so that a later
1163 call to record_jump_equiv won't cause X to be assigned a different
1164 hash code and not found in the table after that call.
1165
1166 It is not necessary to do this here, since rehash_using_reg can
1167 fix up the table later, but doing this here eliminates the need to
1168 call that expensive function in the most common case where the only
1169 use of the register is in the comparison. */
1170
1171 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1172 {
1173 if (GET_CODE (XEXP (x, 0)) == REG
1174 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1175 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
1176 {
1177 rehash_using_reg (XEXP (x, 0));
1178 changed = 1;
1179 }
1180
1181 if (GET_CODE (XEXP (x, 1)) == REG
1182 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1183 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
1184 {
1185 rehash_using_reg (XEXP (x, 1));
1186 changed = 1;
1187 }
1188 }
1189
1190 fmt = GET_RTX_FORMAT (code);
1191 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1192 if (fmt[i] == 'e')
1193 changed |= mention_regs (XEXP (x, i));
1194 else if (fmt[i] == 'E')
1195 for (j = 0; j < XVECLEN (x, i); j++)
1196 changed |= mention_regs (XVECEXP (x, i, j));
1197
1198 return changed;
1199 }
1200
1201 /* Update the register quantities for inserting X into the hash table
1202 with a value equivalent to CLASSP.
1203 (If the class does not contain a REG, it is irrelevant.)
1204 If MODIFIED is nonzero, X is a destination; it is being modified.
1205 Note that delete_reg_equiv should be called on a register
1206 before insert_regs is done on that register with MODIFIED != 0.
1207
1208 Nonzero value means that elements of reg_qty have changed
1209 so X's hash code may be different. */
1210
1211 static int
1212 insert_regs (x, classp, modified)
1213 rtx x;
1214 struct table_elt *classp;
1215 int modified;
1216 {
1217 if (GET_CODE (x) == REG)
1218 {
1219 unsigned int regno = REGNO (x);
1220 int qty_valid;
1221
1222 /* If REGNO is in the equivalence table already but is of the
1223 wrong mode for that equivalence, don't do anything here. */
1224
1225 qty_valid = REGNO_QTY_VALID_P (regno);
1226 if (qty_valid)
1227 {
1228 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1229
1230 if (ent->mode != GET_MODE (x))
1231 return 0;
1232 }
1233
1234 if (modified || ! qty_valid)
1235 {
1236 if (classp)
1237 for (classp = classp->first_same_value;
1238 classp != 0;
1239 classp = classp->next_same_value)
1240 if (GET_CODE (classp->exp) == REG
1241 && GET_MODE (classp->exp) == GET_MODE (x))
1242 {
1243 make_regs_eqv (regno, REGNO (classp->exp));
1244 return 1;
1245 }
1246
1247 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1248 than REG_IN_TABLE to find out if there was only a single preceding
1249 invalidation - for the SUBREG - or another one, which would be
1250 for the full register. However, if we find here that REG_TICK
1251 indicates that the register is invalid, it means that it has
1252 been invalidated in a separate operation. The SUBREG might be used
1253 now (then this is a recursive call), or we might use the full REG
1254 now and a SUBREG of it later. So bump up REG_TICK so that
1255 mention_regs will do the right thing. */
1256 if (! modified
1257 && REG_IN_TABLE (regno) >= 0
1258 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1259 REG_TICK (regno)++;
1260 make_new_qty (regno, GET_MODE (x));
1261 return 1;
1262 }
1263
1264 return 0;
1265 }
1266
1267 /* If X is a SUBREG, we will likely be inserting the inner register in the
1268 table. If that register doesn't have an assigned quantity number at
1269 this point but does later, the insertion that we will be doing now will
1270 not be accessible because its hash code will have changed. So assign
1271 a quantity number now. */
1272
1273 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1274 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1275 {
1276 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1277 mention_regs (x);
1278 return 1;
1279 }
1280 else
1281 return mention_regs (x);
1282 }
1283 \f
1284 /* Look in or update the hash table. */
1285
1286 /* Remove table element ELT from use in the table.
1287 HASH is its hash code, made using the HASH macro.
1288 It's an argument because often that is known in advance
1289 and we save much time not recomputing it. */
1290
1291 static void
1292 remove_from_table (elt, hash)
1293 register struct table_elt *elt;
1294 unsigned hash;
1295 {
1296 if (elt == 0)
1297 return;
1298
1299 /* Mark this element as removed. See cse_insn. */
1300 elt->first_same_value = 0;
1301
1302 /* Remove the table element from its equivalence class. */
1303
1304 {
1305 register struct table_elt *prev = elt->prev_same_value;
1306 register struct table_elt *next = elt->next_same_value;
1307
1308 if (next)
1309 next->prev_same_value = prev;
1310
1311 if (prev)
1312 prev->next_same_value = next;
1313 else
1314 {
1315 register struct table_elt *newfirst = next;
1316 while (next)
1317 {
1318 next->first_same_value = newfirst;
1319 next = next->next_same_value;
1320 }
1321 }
1322 }
1323
1324 /* Remove the table element from its hash bucket. */
1325
1326 {
1327 register struct table_elt *prev = elt->prev_same_hash;
1328 register struct table_elt *next = elt->next_same_hash;
1329
1330 if (next)
1331 next->prev_same_hash = prev;
1332
1333 if (prev)
1334 prev->next_same_hash = next;
1335 else if (table[hash] == elt)
1336 table[hash] = next;
1337 else
1338 {
1339 /* This entry is not in the proper hash bucket. This can happen
1340 when two classes were merged by `merge_equiv_classes'. Search
1341 for the hash bucket that it heads. This happens only very
1342 rarely, so the cost is acceptable. */
1343 for (hash = 0; hash < HASH_SIZE; hash++)
1344 if (table[hash] == elt)
1345 table[hash] = next;
1346 }
1347 }
1348
1349 /* Remove the table element from its related-value circular chain. */
1350
1351 if (elt->related_value != 0 && elt->related_value != elt)
1352 {
1353 register struct table_elt *p = elt->related_value;
1354
1355 while (p->related_value != elt)
1356 p = p->related_value;
1357 p->related_value = elt->related_value;
1358 if (p->related_value == p)
1359 p->related_value = 0;
1360 }
1361
1362 /* Now add it to the free element chain. */
1363 elt->next_same_hash = free_element_chain;
1364 free_element_chain = elt;
1365 }
1366
1367 /* Look up X in the hash table and return its table element,
1368 or 0 if X is not in the table.
1369
1370 MODE is the machine-mode of X, or if X is an integer constant
1371 with VOIDmode then MODE is the mode with which X will be used.
1372
1373 Here we are satisfied to find an expression whose tree structure
1374 looks like X. */
1375
1376 static struct table_elt *
1377 lookup (x, hash, mode)
1378 rtx x;
1379 unsigned hash;
1380 enum machine_mode mode;
1381 {
1382 register struct table_elt *p;
1383
1384 for (p = table[hash]; p; p = p->next_same_hash)
1385 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1386 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1387 return p;
1388
1389 return 0;
1390 }
1391
1392 /* Like `lookup' but don't care whether the table element uses invalid regs.
1393 Also ignore discrepancies in the machine mode of a register. */
1394
1395 static struct table_elt *
1396 lookup_for_remove (x, hash, mode)
1397 rtx x;
1398 unsigned hash;
1399 enum machine_mode mode;
1400 {
1401 register struct table_elt *p;
1402
1403 if (GET_CODE (x) == REG)
1404 {
1405 unsigned int regno = REGNO (x);
1406
1407 /* Don't check the machine mode when comparing registers;
1408 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1409 for (p = table[hash]; p; p = p->next_same_hash)
1410 if (GET_CODE (p->exp) == REG
1411 && REGNO (p->exp) == regno)
1412 return p;
1413 }
1414 else
1415 {
1416 for (p = table[hash]; p; p = p->next_same_hash)
1417 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1418 return p;
1419 }
1420
1421 return 0;
1422 }
1423
1424 /* Look for an expression equivalent to X and with code CODE.
1425 If one is found, return that expression. */
1426
1427 static rtx
1428 lookup_as_function (x, code)
1429 rtx x;
1430 enum rtx_code code;
1431 {
1432 register struct table_elt *p
1433 = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1434
1435 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1436 long as we are narrowing. So if we looked in vain for a mode narrower
1437 than word_mode before, look for word_mode now. */
1438 if (p == 0 && code == CONST_INT
1439 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1440 {
1441 x = copy_rtx (x);
1442 PUT_MODE (x, word_mode);
1443 p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1444 }
1445
1446 if (p == 0)
1447 return 0;
1448
1449 for (p = p->first_same_value; p; p = p->next_same_value)
1450 if (GET_CODE (p->exp) == code
1451 /* Make sure this is a valid entry in the table. */
1452 && exp_equiv_p (p->exp, p->exp, 1, 0))
1453 return p->exp;
1454
1455 return 0;
1456 }
1457
1458 /* Insert X in the hash table, assuming HASH is its hash code
1459 and CLASSP is an element of the class it should go in
1460 (or 0 if a new class should be made).
1461 It is inserted at the proper position to keep the class in
1462 the order cheapest first.
1463
1464 MODE is the machine-mode of X, or if X is an integer constant
1465 with VOIDmode then MODE is the mode with which X will be used.
1466
1467 For elements of equal cheapness, the most recent one
1468 goes in front, except that the first element in the list
1469 remains first unless a cheaper element is added. The order of
1470 pseudo-registers does not matter, as canon_reg will be called to
1471 find the cheapest when a register is retrieved from the table.
1472
1473 The in_memory field in the hash table element is set to 0.
1474 The caller must set it nonzero if appropriate.
1475
1476 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1477 and if insert_regs returns a nonzero value
1478 you must then recompute its hash code before calling here.
1479
1480 If necessary, update table showing constant values of quantities. */
1481
1482 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1483
1484 static struct table_elt *
1485 insert (x, classp, hash, mode)
1486 register rtx x;
1487 register struct table_elt *classp;
1488 unsigned hash;
1489 enum machine_mode mode;
1490 {
1491 register struct table_elt *elt;
1492
1493 /* If X is a register and we haven't made a quantity for it,
1494 something is wrong. */
1495 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1496 abort ();
1497
1498 /* If X is a hard register, show it is being put in the table. */
1499 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1500 {
1501 unsigned int regno = REGNO (x);
1502 unsigned int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1503 unsigned int i;
1504
1505 for (i = regno; i < endregno; i++)
1506 SET_HARD_REG_BIT (hard_regs_in_table, i);
1507 }
1508
1509 /* If X is a label, show we recorded it. */
1510 if (GET_CODE (x) == LABEL_REF
1511 || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1512 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
1513 recorded_label_ref = 1;
1514
1515 /* Put an element for X into the right hash bucket. */
1516
1517 elt = free_element_chain;
1518 if (elt)
1519 free_element_chain = elt->next_same_hash;
1520 else
1521 {
1522 n_elements_made++;
1523 elt = (struct table_elt *) oballoc (sizeof (struct table_elt));
1524 }
1525
1526 elt->exp = x;
1527 elt->canon_exp = NULL_RTX;
1528 elt->cost = COST (x);
1529 elt->next_same_value = 0;
1530 elt->prev_same_value = 0;
1531 elt->next_same_hash = table[hash];
1532 elt->prev_same_hash = 0;
1533 elt->related_value = 0;
1534 elt->in_memory = 0;
1535 elt->mode = mode;
1536 elt->is_const = (CONSTANT_P (x)
1537 /* GNU C++ takes advantage of this for `this'
1538 (and other const values). */
1539 || (RTX_UNCHANGING_P (x)
1540 && GET_CODE (x) == REG
1541 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1542 || FIXED_BASE_PLUS_P (x));
1543
1544 if (table[hash])
1545 table[hash]->prev_same_hash = elt;
1546 table[hash] = elt;
1547
1548 /* Put it into the proper value-class. */
1549 if (classp)
1550 {
1551 classp = classp->first_same_value;
1552 if (CHEAPER (elt, classp))
1553 /* Insert at the head of the class */
1554 {
1555 register struct table_elt *p;
1556 elt->next_same_value = classp;
1557 classp->prev_same_value = elt;
1558 elt->first_same_value = elt;
1559
1560 for (p = classp; p; p = p->next_same_value)
1561 p->first_same_value = elt;
1562 }
1563 else
1564 {
1565 /* Insert not at head of the class. */
1566 /* Put it after the last element cheaper than X. */
1567 register struct table_elt *p, *next;
1568
1569 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1570 p = next);
1571
1572 /* Put it after P and before NEXT. */
1573 elt->next_same_value = next;
1574 if (next)
1575 next->prev_same_value = elt;
1576
1577 elt->prev_same_value = p;
1578 p->next_same_value = elt;
1579 elt->first_same_value = classp;
1580 }
1581 }
1582 else
1583 elt->first_same_value = elt;
1584
1585 /* If this is a constant being set equivalent to a register or a register
1586 being set equivalent to a constant, note the constant equivalence.
1587
1588 If this is a constant, it cannot be equivalent to a different constant,
1589 and a constant is the only thing that can be cheaper than a register. So
1590 we know the register is the head of the class (before the constant was
1591 inserted).
1592
1593 If this is a register that is not already known equivalent to a
1594 constant, we must check the entire class.
1595
1596 If this is a register that is already known equivalent to an insn,
1597 update the qtys `const_insn' to show that `this_insn' is the latest
1598 insn making that quantity equivalent to the constant. */
1599
1600 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1601 && GET_CODE (x) != REG)
1602 {
1603 int exp_q = REG_QTY (REGNO (classp->exp));
1604 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1605
1606 exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1607 exp_ent->const_insn = this_insn;
1608 }
1609
1610 else if (GET_CODE (x) == REG
1611 && classp
1612 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1613 && ! elt->is_const)
1614 {
1615 register struct table_elt *p;
1616
1617 for (p = classp; p != 0; p = p->next_same_value)
1618 {
1619 if (p->is_const && GET_CODE (p->exp) != REG)
1620 {
1621 int x_q = REG_QTY (REGNO (x));
1622 struct qty_table_elem *x_ent = &qty_table[x_q];
1623
1624 x_ent->const_rtx
1625 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1626 x_ent->const_insn = this_insn;
1627 break;
1628 }
1629 }
1630 }
1631
1632 else if (GET_CODE (x) == REG
1633 && qty_table[REG_QTY (REGNO (x))].const_rtx
1634 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1635 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1636
1637 /* If this is a constant with symbolic value,
1638 and it has a term with an explicit integer value,
1639 link it up with related expressions. */
1640 if (GET_CODE (x) == CONST)
1641 {
1642 rtx subexp = get_related_value (x);
1643 unsigned subhash;
1644 struct table_elt *subelt, *subelt_prev;
1645
1646 if (subexp != 0)
1647 {
1648 /* Get the integer-free subexpression in the hash table. */
1649 subhash = safe_hash (subexp, mode) & HASH_MASK;
1650 subelt = lookup (subexp, subhash, mode);
1651 if (subelt == 0)
1652 subelt = insert (subexp, NULL_PTR, subhash, mode);
1653 /* Initialize SUBELT's circular chain if it has none. */
1654 if (subelt->related_value == 0)
1655 subelt->related_value = subelt;
1656 /* Find the element in the circular chain that precedes SUBELT. */
1657 subelt_prev = subelt;
1658 while (subelt_prev->related_value != subelt)
1659 subelt_prev = subelt_prev->related_value;
1660 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1661 This way the element that follows SUBELT is the oldest one. */
1662 elt->related_value = subelt_prev->related_value;
1663 subelt_prev->related_value = elt;
1664 }
1665 }
1666
1667 return elt;
1668 }
1669 \f
1670 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1671 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1672 the two classes equivalent.
1673
1674 CLASS1 will be the surviving class; CLASS2 should not be used after this
1675 call.
1676
1677 Any invalid entries in CLASS2 will not be copied. */
1678
1679 static void
1680 merge_equiv_classes (class1, class2)
1681 struct table_elt *class1, *class2;
1682 {
1683 struct table_elt *elt, *next, *new;
1684
1685 /* Ensure we start with the head of the classes. */
1686 class1 = class1->first_same_value;
1687 class2 = class2->first_same_value;
1688
1689 /* If they were already equal, forget it. */
1690 if (class1 == class2)
1691 return;
1692
1693 for (elt = class2; elt; elt = next)
1694 {
1695 unsigned int hash;
1696 rtx exp = elt->exp;
1697 enum machine_mode mode = elt->mode;
1698
1699 next = elt->next_same_value;
1700
1701 /* Remove old entry, make a new one in CLASS1's class.
1702 Don't do this for invalid entries as we cannot find their
1703 hash code (it also isn't necessary). */
1704 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1705 {
1706 hash_arg_in_memory = 0;
1707 hash = HASH (exp, mode);
1708
1709 if (GET_CODE (exp) == REG)
1710 delete_reg_equiv (REGNO (exp));
1711
1712 remove_from_table (elt, hash);
1713
1714 if (insert_regs (exp, class1, 0))
1715 {
1716 rehash_using_reg (exp);
1717 hash = HASH (exp, mode);
1718 }
1719 new = insert (exp, class1, hash, mode);
1720 new->in_memory = hash_arg_in_memory;
1721 }
1722 }
1723 }
1724 \f
1725 /* Flush the entire hash table. */
1726
1727 static void
1728 flush_hash_table ()
1729 {
1730 int i;
1731 struct table_elt *p;
1732
1733 for (i = 0; i < HASH_SIZE; i++)
1734 for (p = table[i]; p; p = table[i])
1735 {
1736 /* Note that invalidate can remove elements
1737 after P in the current hash chain. */
1738 if (GET_CODE (p->exp) == REG)
1739 invalidate (p->exp, p->mode);
1740 else
1741 remove_from_table (p, i);
1742 }
1743 }
1744 \f
1745 /* Function called for each rtx to check whether true dependence exist. */
1746 struct check_dependence_data
1747 {
1748 enum machine_mode mode;
1749 rtx exp;
1750 };
1751 static int
1752 check_dependence (x, data)
1753 rtx *x;
1754 void *data;
1755 {
1756 struct check_dependence_data *d = (struct check_dependence_data *) data;
1757 if (*x && GET_CODE (*x) == MEM)
1758 return true_dependence (d->exp, d->mode, *x, cse_rtx_varies_p);
1759 else
1760 return 0;
1761 }
1762 \f
1763 /* Remove from the hash table, or mark as invalid, all expressions whose
1764 values could be altered by storing in X. X is a register, a subreg, or
1765 a memory reference with nonvarying address (because, when a memory
1766 reference with a varying address is stored in, all memory references are
1767 removed by invalidate_memory so specific invalidation is superfluous).
1768 FULL_MODE, if not VOIDmode, indicates that this much should be
1769 invalidated instead of just the amount indicated by the mode of X. This
1770 is only used for bitfield stores into memory.
1771
1772 A nonvarying address may be just a register or just a symbol reference,
1773 or it may be either of those plus a numeric offset. */
1774
1775 static void
1776 invalidate (x, full_mode)
1777 rtx x;
1778 enum machine_mode full_mode;
1779 {
1780 register int i;
1781 register struct table_elt *p;
1782
1783 switch (GET_CODE (x))
1784 {
1785 case REG:
1786 {
1787 /* If X is a register, dependencies on its contents are recorded
1788 through the qty number mechanism. Just change the qty number of
1789 the register, mark it as invalid for expressions that refer to it,
1790 and remove it itself. */
1791 unsigned int regno = REGNO (x);
1792 unsigned int hash = HASH (x, GET_MODE (x));
1793
1794 /* Remove REGNO from any quantity list it might be on and indicate
1795 that its value might have changed. If it is a pseudo, remove its
1796 entry from the hash table.
1797
1798 For a hard register, we do the first two actions above for any
1799 additional hard registers corresponding to X. Then, if any of these
1800 registers are in the table, we must remove any REG entries that
1801 overlap these registers. */
1802
1803 delete_reg_equiv (regno);
1804 REG_TICK (regno)++;
1805
1806 if (regno >= FIRST_PSEUDO_REGISTER)
1807 {
1808 /* Because a register can be referenced in more than one mode,
1809 we might have to remove more than one table entry. */
1810 struct table_elt *elt;
1811
1812 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1813 remove_from_table (elt, hash);
1814 }
1815 else
1816 {
1817 HOST_WIDE_INT in_table
1818 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1819 unsigned int endregno
1820 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1821 unsigned int tregno, tendregno, rn;
1822 register struct table_elt *p, *next;
1823
1824 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1825
1826 for (rn = regno + 1; rn < endregno; rn++)
1827 {
1828 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1829 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1830 delete_reg_equiv (rn);
1831 REG_TICK (rn)++;
1832 }
1833
1834 if (in_table)
1835 for (hash = 0; hash < HASH_SIZE; hash++)
1836 for (p = table[hash]; p; p = next)
1837 {
1838 next = p->next_same_hash;
1839
1840 if (GET_CODE (p->exp) != REG
1841 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1842 continue;
1843
1844 tregno = REGNO (p->exp);
1845 tendregno
1846 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1847 if (tendregno > regno && tregno < endregno)
1848 remove_from_table (p, hash);
1849 }
1850 }
1851 }
1852 return;
1853
1854 case SUBREG:
1855 invalidate (SUBREG_REG (x), VOIDmode);
1856 return;
1857
1858 case PARALLEL:
1859 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1860 invalidate (XVECEXP (x, 0, i), VOIDmode);
1861 return;
1862
1863 case EXPR_LIST:
1864 /* This is part of a disjoint return value; extract the location in
1865 question ignoring the offset. */
1866 invalidate (XEXP (x, 0), VOIDmode);
1867 return;
1868
1869 case MEM:
1870 /* Calculate the canonical version of X here so that
1871 true_dependence doesn't generate new RTL for X on each call. */
1872 x = canon_rtx (x);
1873
1874 /* Remove all hash table elements that refer to overlapping pieces of
1875 memory. */
1876 if (full_mode == VOIDmode)
1877 full_mode = GET_MODE (x);
1878
1879 for (i = 0; i < HASH_SIZE; i++)
1880 {
1881 register struct table_elt *next;
1882
1883 for (p = table[i]; p; p = next)
1884 {
1885 next = p->next_same_hash;
1886 if (p->in_memory)
1887 {
1888 struct check_dependence_data d;
1889
1890 /* Just canonicalize the expression once;
1891 otherwise each time we call invalidate
1892 true_dependence will canonicalize the
1893 expression again. */
1894 if (!p->canon_exp)
1895 p->canon_exp = canon_rtx (p->exp);
1896 d.exp = x;
1897 d.mode = full_mode;
1898 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1899 remove_from_table (p, i);
1900 }
1901 }
1902 }
1903 return;
1904
1905 default:
1906 abort ();
1907 }
1908 }
1909 \f
1910 /* Remove all expressions that refer to register REGNO,
1911 since they are already invalid, and we are about to
1912 mark that register valid again and don't want the old
1913 expressions to reappear as valid. */
1914
1915 static void
1916 remove_invalid_refs (regno)
1917 unsigned int regno;
1918 {
1919 unsigned int i;
1920 struct table_elt *p, *next;
1921
1922 for (i = 0; i < HASH_SIZE; i++)
1923 for (p = table[i]; p; p = next)
1924 {
1925 next = p->next_same_hash;
1926 if (GET_CODE (p->exp) != REG
1927 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1928 remove_from_table (p, i);
1929 }
1930 }
1931
1932 /* Likewise for a subreg with subreg_reg WORD and mode MODE. */
1933 static void
1934 remove_invalid_subreg_refs (regno, word, mode)
1935 unsigned int regno;
1936 unsigned int word;
1937 enum machine_mode mode;
1938 {
1939 unsigned int i;
1940 struct table_elt *p, *next;
1941 unsigned int end = word + (GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD;
1942
1943 for (i = 0; i < HASH_SIZE; i++)
1944 for (p = table[i]; p; p = next)
1945 {
1946 rtx exp;
1947 next = p->next_same_hash;
1948
1949 exp = p->exp;
1950 if (GET_CODE (p->exp) != REG
1951 && (GET_CODE (exp) != SUBREG
1952 || GET_CODE (SUBREG_REG (exp)) != REG
1953 || REGNO (SUBREG_REG (exp)) != regno
1954 || (((SUBREG_WORD (exp)
1955 + (GET_MODE_SIZE (GET_MODE (exp)) - 1) / UNITS_PER_WORD)
1956 >= word)
1957 && SUBREG_WORD (exp) <= end))
1958 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1959 remove_from_table (p, i);
1960 }
1961 }
1962 \f
1963 /* Recompute the hash codes of any valid entries in the hash table that
1964 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1965
1966 This is called when we make a jump equivalence. */
1967
1968 static void
1969 rehash_using_reg (x)
1970 rtx x;
1971 {
1972 unsigned int i;
1973 struct table_elt *p, *next;
1974 unsigned hash;
1975
1976 if (GET_CODE (x) == SUBREG)
1977 x = SUBREG_REG (x);
1978
1979 /* If X is not a register or if the register is known not to be in any
1980 valid entries in the table, we have no work to do. */
1981
1982 if (GET_CODE (x) != REG
1983 || REG_IN_TABLE (REGNO (x)) < 0
1984 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1985 return;
1986
1987 /* Scan all hash chains looking for valid entries that mention X.
1988 If we find one and it is in the wrong hash chain, move it. We can skip
1989 objects that are registers, since they are handled specially. */
1990
1991 for (i = 0; i < HASH_SIZE; i++)
1992 for (p = table[i]; p; p = next)
1993 {
1994 next = p->next_same_hash;
1995 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1996 && exp_equiv_p (p->exp, p->exp, 1, 0)
1997 && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
1998 {
1999 if (p->next_same_hash)
2000 p->next_same_hash->prev_same_hash = p->prev_same_hash;
2001
2002 if (p->prev_same_hash)
2003 p->prev_same_hash->next_same_hash = p->next_same_hash;
2004 else
2005 table[i] = p->next_same_hash;
2006
2007 p->next_same_hash = table[hash];
2008 p->prev_same_hash = 0;
2009 if (table[hash])
2010 table[hash]->prev_same_hash = p;
2011 table[hash] = p;
2012 }
2013 }
2014 }
2015 \f
2016 /* Remove from the hash table any expression that is a call-clobbered
2017 register. Also update their TICK values. */
2018
2019 static void
2020 invalidate_for_call ()
2021 {
2022 unsigned int regno, endregno;
2023 unsigned int i;
2024 unsigned hash;
2025 struct table_elt *p, *next;
2026 int in_table = 0;
2027
2028 /* Go through all the hard registers. For each that is clobbered in
2029 a CALL_INSN, remove the register from quantity chains and update
2030 reg_tick if defined. Also see if any of these registers is currently
2031 in the table. */
2032
2033 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2034 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2035 {
2036 delete_reg_equiv (regno);
2037 if (REG_TICK (regno) >= 0)
2038 REG_TICK (regno)++;
2039
2040 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2041 }
2042
2043 /* In the case where we have no call-clobbered hard registers in the
2044 table, we are done. Otherwise, scan the table and remove any
2045 entry that overlaps a call-clobbered register. */
2046
2047 if (in_table)
2048 for (hash = 0; hash < HASH_SIZE; hash++)
2049 for (p = table[hash]; p; p = next)
2050 {
2051 next = p->next_same_hash;
2052
2053 if (GET_CODE (p->exp) != REG
2054 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2055 continue;
2056
2057 regno = REGNO (p->exp);
2058 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
2059
2060 for (i = regno; i < endregno; i++)
2061 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2062 {
2063 remove_from_table (p, hash);
2064 break;
2065 }
2066 }
2067 }
2068 \f
2069 /* Given an expression X of type CONST,
2070 and ELT which is its table entry (or 0 if it
2071 is not in the hash table),
2072 return an alternate expression for X as a register plus integer.
2073 If none can be found, return 0. */
2074
2075 static rtx
2076 use_related_value (x, elt)
2077 rtx x;
2078 struct table_elt *elt;
2079 {
2080 register struct table_elt *relt = 0;
2081 register struct table_elt *p, *q;
2082 HOST_WIDE_INT offset;
2083
2084 /* First, is there anything related known?
2085 If we have a table element, we can tell from that.
2086 Otherwise, must look it up. */
2087
2088 if (elt != 0 && elt->related_value != 0)
2089 relt = elt;
2090 else if (elt == 0 && GET_CODE (x) == CONST)
2091 {
2092 rtx subexp = get_related_value (x);
2093 if (subexp != 0)
2094 relt = lookup (subexp,
2095 safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2096 GET_MODE (subexp));
2097 }
2098
2099 if (relt == 0)
2100 return 0;
2101
2102 /* Search all related table entries for one that has an
2103 equivalent register. */
2104
2105 p = relt;
2106 while (1)
2107 {
2108 /* This loop is strange in that it is executed in two different cases.
2109 The first is when X is already in the table. Then it is searching
2110 the RELATED_VALUE list of X's class (RELT). The second case is when
2111 X is not in the table. Then RELT points to a class for the related
2112 value.
2113
2114 Ensure that, whatever case we are in, that we ignore classes that have
2115 the same value as X. */
2116
2117 if (rtx_equal_p (x, p->exp))
2118 q = 0;
2119 else
2120 for (q = p->first_same_value; q; q = q->next_same_value)
2121 if (GET_CODE (q->exp) == REG)
2122 break;
2123
2124 if (q)
2125 break;
2126
2127 p = p->related_value;
2128
2129 /* We went all the way around, so there is nothing to be found.
2130 Alternatively, perhaps RELT was in the table for some other reason
2131 and it has no related values recorded. */
2132 if (p == relt || p == 0)
2133 break;
2134 }
2135
2136 if (q == 0)
2137 return 0;
2138
2139 offset = (get_integer_term (x) - get_integer_term (p->exp));
2140 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2141 return plus_constant (q->exp, offset);
2142 }
2143 \f
2144 /* Hash an rtx. We are careful to make sure the value is never negative.
2145 Equivalent registers hash identically.
2146 MODE is used in hashing for CONST_INTs only;
2147 otherwise the mode of X is used.
2148
2149 Store 1 in do_not_record if any subexpression is volatile.
2150
2151 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2152 which does not have the RTX_UNCHANGING_P bit set.
2153
2154 Note that cse_insn knows that the hash code of a MEM expression
2155 is just (int) MEM plus the hash code of the address. */
2156
2157 static unsigned
2158 canon_hash (x, mode)
2159 rtx x;
2160 enum machine_mode mode;
2161 {
2162 register int i, j;
2163 register unsigned hash = 0;
2164 register enum rtx_code code;
2165 register const char *fmt;
2166
2167 /* repeat is used to turn tail-recursion into iteration. */
2168 repeat:
2169 if (x == 0)
2170 return hash;
2171
2172 code = GET_CODE (x);
2173 switch (code)
2174 {
2175 case REG:
2176 {
2177 unsigned int regno = REGNO (x);
2178
2179 /* On some machines, we can't record any non-fixed hard register,
2180 because extending its life will cause reload problems. We
2181 consider ap, fp, and sp to be fixed for this purpose.
2182
2183 We also consider CCmode registers to be fixed for this purpose;
2184 failure to do so leads to failure to simplify 0<100 type of
2185 conditionals.
2186
2187 On all machines, we can't record any global registers. */
2188
2189 if (regno < FIRST_PSEUDO_REGISTER
2190 && (global_regs[regno]
2191 || (SMALL_REGISTER_CLASSES
2192 && ! fixed_regs[regno]
2193 && regno != FRAME_POINTER_REGNUM
2194 && regno != HARD_FRAME_POINTER_REGNUM
2195 && regno != ARG_POINTER_REGNUM
2196 && regno != STACK_POINTER_REGNUM
2197 && GET_MODE_CLASS (GET_MODE (x)) != MODE_CC)))
2198 {
2199 do_not_record = 1;
2200 return 0;
2201 }
2202
2203 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2204 return hash;
2205 }
2206
2207 /* We handle SUBREG of a REG specially because the underlying
2208 reg changes its hash value with every value change; we don't
2209 want to have to forget unrelated subregs when one subreg changes. */
2210 case SUBREG:
2211 {
2212 if (GET_CODE (SUBREG_REG (x)) == REG)
2213 {
2214 hash += (((unsigned) SUBREG << 7)
2215 + REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2216 return hash;
2217 }
2218 break;
2219 }
2220
2221 case CONST_INT:
2222 {
2223 unsigned HOST_WIDE_INT tem = INTVAL (x);
2224 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2225 return hash;
2226 }
2227
2228 case CONST_DOUBLE:
2229 /* This is like the general case, except that it only counts
2230 the integers representing the constant. */
2231 hash += (unsigned) code + (unsigned) GET_MODE (x);
2232 if (GET_MODE (x) != VOIDmode)
2233 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2234 {
2235 unsigned HOST_WIDE_INT tem = XWINT (x, i);
2236 hash += tem;
2237 }
2238 else
2239 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2240 + (unsigned) CONST_DOUBLE_HIGH (x));
2241 return hash;
2242
2243 /* Assume there is only one rtx object for any given label. */
2244 case LABEL_REF:
2245 hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2246 return hash;
2247
2248 case SYMBOL_REF:
2249 hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2250 return hash;
2251
2252 case MEM:
2253 /* We don't record if marked volatile or if BLKmode since we don't
2254 know the size of the move. */
2255 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2256 {
2257 do_not_record = 1;
2258 return 0;
2259 }
2260 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2261 {
2262 hash_arg_in_memory = 1;
2263 }
2264 /* Now that we have already found this special case,
2265 might as well speed it up as much as possible. */
2266 hash += (unsigned) MEM;
2267 x = XEXP (x, 0);
2268 goto repeat;
2269
2270 case PRE_DEC:
2271 case PRE_INC:
2272 case POST_DEC:
2273 case POST_INC:
2274 case PRE_MODIFY:
2275 case POST_MODIFY:
2276 case PC:
2277 case CC0:
2278 case CALL:
2279 case UNSPEC_VOLATILE:
2280 do_not_record = 1;
2281 return 0;
2282
2283 case ASM_OPERANDS:
2284 if (MEM_VOLATILE_P (x))
2285 {
2286 do_not_record = 1;
2287 return 0;
2288 }
2289 break;
2290
2291 default:
2292 break;
2293 }
2294
2295 i = GET_RTX_LENGTH (code) - 1;
2296 hash += (unsigned) code + (unsigned) GET_MODE (x);
2297 fmt = GET_RTX_FORMAT (code);
2298 for (; i >= 0; i--)
2299 {
2300 if (fmt[i] == 'e')
2301 {
2302 rtx tem = XEXP (x, i);
2303
2304 /* If we are about to do the last recursive call
2305 needed at this level, change it into iteration.
2306 This function is called enough to be worth it. */
2307 if (i == 0)
2308 {
2309 x = tem;
2310 goto repeat;
2311 }
2312 hash += canon_hash (tem, 0);
2313 }
2314 else if (fmt[i] == 'E')
2315 for (j = 0; j < XVECLEN (x, i); j++)
2316 hash += canon_hash (XVECEXP (x, i, j), 0);
2317 else if (fmt[i] == 's')
2318 {
2319 register const unsigned char *p =
2320 (const unsigned char *) XSTR (x, i);
2321
2322 if (p)
2323 while (*p)
2324 hash += *p++;
2325 }
2326 else if (fmt[i] == 'i')
2327 {
2328 register unsigned tem = XINT (x, i);
2329 hash += tem;
2330 }
2331 else if (fmt[i] == '0' || fmt[i] == 't')
2332 /* Unused. */
2333 ;
2334 else
2335 abort ();
2336 }
2337 return hash;
2338 }
2339
2340 /* Like canon_hash but with no side effects. */
2341
2342 static unsigned
2343 safe_hash (x, mode)
2344 rtx x;
2345 enum machine_mode mode;
2346 {
2347 int save_do_not_record = do_not_record;
2348 int save_hash_arg_in_memory = hash_arg_in_memory;
2349 unsigned hash = canon_hash (x, mode);
2350 hash_arg_in_memory = save_hash_arg_in_memory;
2351 do_not_record = save_do_not_record;
2352 return hash;
2353 }
2354 \f
2355 /* Return 1 iff X and Y would canonicalize into the same thing,
2356 without actually constructing the canonicalization of either one.
2357 If VALIDATE is nonzero,
2358 we assume X is an expression being processed from the rtl
2359 and Y was found in the hash table. We check register refs
2360 in Y for being marked as valid.
2361
2362 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2363 that is known to be in the register. Ordinarily, we don't allow them
2364 to match, because letting them match would cause unpredictable results
2365 in all the places that search a hash table chain for an equivalent
2366 for a given value. A possible equivalent that has different structure
2367 has its hash code computed from different data. Whether the hash code
2368 is the same as that of the given value is pure luck. */
2369
2370 static int
2371 exp_equiv_p (x, y, validate, equal_values)
2372 rtx x, y;
2373 int validate;
2374 int equal_values;
2375 {
2376 register int i, j;
2377 register enum rtx_code code;
2378 register const char *fmt;
2379
2380 /* Note: it is incorrect to assume an expression is equivalent to itself
2381 if VALIDATE is nonzero. */
2382 if (x == y && !validate)
2383 return 1;
2384 if (x == 0 || y == 0)
2385 return x == y;
2386
2387 code = GET_CODE (x);
2388 if (code != GET_CODE (y))
2389 {
2390 if (!equal_values)
2391 return 0;
2392
2393 /* If X is a constant and Y is a register or vice versa, they may be
2394 equivalent. We only have to validate if Y is a register. */
2395 if (CONSTANT_P (x) && GET_CODE (y) == REG
2396 && REGNO_QTY_VALID_P (REGNO (y)))
2397 {
2398 int y_q = REG_QTY (REGNO (y));
2399 struct qty_table_elem *y_ent = &qty_table[y_q];
2400
2401 if (GET_MODE (y) == y_ent->mode
2402 && rtx_equal_p (x, y_ent->const_rtx)
2403 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2404 return 1;
2405 }
2406
2407 if (CONSTANT_P (y) && code == REG
2408 && REGNO_QTY_VALID_P (REGNO (x)))
2409 {
2410 int x_q = REG_QTY (REGNO (x));
2411 struct qty_table_elem *x_ent = &qty_table[x_q];
2412
2413 if (GET_MODE (x) == x_ent->mode
2414 && rtx_equal_p (y, x_ent->const_rtx))
2415 return 1;
2416 }
2417
2418 return 0;
2419 }
2420
2421 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2422 if (GET_MODE (x) != GET_MODE (y))
2423 return 0;
2424
2425 switch (code)
2426 {
2427 case PC:
2428 case CC0:
2429 case CONST_INT:
2430 return x == y;
2431
2432 case LABEL_REF:
2433 return XEXP (x, 0) == XEXP (y, 0);
2434
2435 case SYMBOL_REF:
2436 return XSTR (x, 0) == XSTR (y, 0);
2437
2438 case REG:
2439 {
2440 unsigned int regno = REGNO (y);
2441 unsigned int endregno
2442 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2443 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2444 unsigned int i;
2445
2446 /* If the quantities are not the same, the expressions are not
2447 equivalent. If there are and we are not to validate, they
2448 are equivalent. Otherwise, ensure all regs are up-to-date. */
2449
2450 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2451 return 0;
2452
2453 if (! validate)
2454 return 1;
2455
2456 for (i = regno; i < endregno; i++)
2457 if (REG_IN_TABLE (i) != REG_TICK (i))
2458 return 0;
2459
2460 return 1;
2461 }
2462
2463 /* For commutative operations, check both orders. */
2464 case PLUS:
2465 case MULT:
2466 case AND:
2467 case IOR:
2468 case XOR:
2469 case NE:
2470 case EQ:
2471 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2472 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2473 validate, equal_values))
2474 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2475 validate, equal_values)
2476 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2477 validate, equal_values)));
2478
2479 default:
2480 break;
2481 }
2482
2483 /* Compare the elements. If any pair of corresponding elements
2484 fail to match, return 0 for the whole things. */
2485
2486 fmt = GET_RTX_FORMAT (code);
2487 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2488 {
2489 switch (fmt[i])
2490 {
2491 case 'e':
2492 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2493 return 0;
2494 break;
2495
2496 case 'E':
2497 if (XVECLEN (x, i) != XVECLEN (y, i))
2498 return 0;
2499 for (j = 0; j < XVECLEN (x, i); j++)
2500 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2501 validate, equal_values))
2502 return 0;
2503 break;
2504
2505 case 's':
2506 if (strcmp (XSTR (x, i), XSTR (y, i)))
2507 return 0;
2508 break;
2509
2510 case 'i':
2511 if (XINT (x, i) != XINT (y, i))
2512 return 0;
2513 break;
2514
2515 case 'w':
2516 if (XWINT (x, i) != XWINT (y, i))
2517 return 0;
2518 break;
2519
2520 case '0':
2521 case 't':
2522 break;
2523
2524 default:
2525 abort ();
2526 }
2527 }
2528
2529 return 1;
2530 }
2531 \f
2532 /* Return 1 if X has a value that can vary even between two
2533 executions of the program. 0 means X can be compared reliably
2534 against certain constants or near-constants. */
2535
2536 static int
2537 cse_rtx_varies_p (x)
2538 register rtx x;
2539 {
2540 /* We need not check for X and the equivalence class being of the same
2541 mode because if X is equivalent to a constant in some mode, it
2542 doesn't vary in any mode. */
2543
2544 if (GET_CODE (x) == REG
2545 && REGNO_QTY_VALID_P (REGNO (x)))
2546 {
2547 int x_q = REG_QTY (REGNO (x));
2548 struct qty_table_elem *x_ent = &qty_table[x_q];
2549
2550 if (GET_MODE (x) == x_ent->mode
2551 && x_ent->const_rtx != NULL_RTX)
2552 return 0;
2553 }
2554
2555 if (GET_CODE (x) == PLUS
2556 && GET_CODE (XEXP (x, 1)) == CONST_INT
2557 && GET_CODE (XEXP (x, 0)) == REG
2558 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2559 {
2560 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2561 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2562
2563 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2564 && x0_ent->const_rtx != NULL_RTX)
2565 return 0;
2566 }
2567
2568 /* This can happen as the result of virtual register instantiation, if
2569 the initial constant is too large to be a valid address. This gives
2570 us a three instruction sequence, load large offset into a register,
2571 load fp minus a constant into a register, then a MEM which is the
2572 sum of the two `constant' registers. */
2573 if (GET_CODE (x) == PLUS
2574 && GET_CODE (XEXP (x, 0)) == REG
2575 && GET_CODE (XEXP (x, 1)) == REG
2576 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2577 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2578 {
2579 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2580 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2581 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2582 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2583
2584 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2585 && x0_ent->const_rtx != NULL_RTX
2586 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2587 && x1_ent->const_rtx != NULL_RTX)
2588 return 0;
2589 }
2590
2591 return rtx_varies_p (x);
2592 }
2593 \f
2594 /* Canonicalize an expression:
2595 replace each register reference inside it
2596 with the "oldest" equivalent register.
2597
2598 If INSN is non-zero and we are replacing a pseudo with a hard register
2599 or vice versa, validate_change is used to ensure that INSN remains valid
2600 after we make our substitution. The calls are made with IN_GROUP non-zero
2601 so apply_change_group must be called upon the outermost return from this
2602 function (unless INSN is zero). The result of apply_change_group can
2603 generally be discarded since the changes we are making are optional. */
2604
2605 static rtx
2606 canon_reg (x, insn)
2607 rtx x;
2608 rtx insn;
2609 {
2610 register int i;
2611 register enum rtx_code code;
2612 register const char *fmt;
2613
2614 if (x == 0)
2615 return x;
2616
2617 code = GET_CODE (x);
2618 switch (code)
2619 {
2620 case PC:
2621 case CC0:
2622 case CONST:
2623 case CONST_INT:
2624 case CONST_DOUBLE:
2625 case SYMBOL_REF:
2626 case LABEL_REF:
2627 case ADDR_VEC:
2628 case ADDR_DIFF_VEC:
2629 return x;
2630
2631 case REG:
2632 {
2633 register int first;
2634 register int q;
2635 register struct qty_table_elem *ent;
2636
2637 /* Never replace a hard reg, because hard regs can appear
2638 in more than one machine mode, and we must preserve the mode
2639 of each occurrence. Also, some hard regs appear in
2640 MEMs that are shared and mustn't be altered. Don't try to
2641 replace any reg that maps to a reg of class NO_REGS. */
2642 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2643 || ! REGNO_QTY_VALID_P (REGNO (x)))
2644 return x;
2645
2646 q = REG_QTY (REGNO (x));
2647 ent = &qty_table[q];
2648 first = ent->first_reg;
2649 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2650 : REGNO_REG_CLASS (first) == NO_REGS ? x
2651 : gen_rtx_REG (ent->mode, first));
2652 }
2653
2654 default:
2655 break;
2656 }
2657
2658 fmt = GET_RTX_FORMAT (code);
2659 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2660 {
2661 register int j;
2662
2663 if (fmt[i] == 'e')
2664 {
2665 rtx new = canon_reg (XEXP (x, i), insn);
2666 int insn_code;
2667
2668 /* If replacing pseudo with hard reg or vice versa, ensure the
2669 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2670 if (insn != 0 && new != 0
2671 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2672 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2673 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2674 || (insn_code = recog_memoized (insn)) < 0
2675 || insn_data[insn_code].n_dups > 0))
2676 validate_change (insn, &XEXP (x, i), new, 1);
2677 else
2678 XEXP (x, i) = new;
2679 }
2680 else if (fmt[i] == 'E')
2681 for (j = 0; j < XVECLEN (x, i); j++)
2682 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2683 }
2684
2685 return x;
2686 }
2687 \f
2688 /* LOC is a location within INSN that is an operand address (the contents of
2689 a MEM). Find the best equivalent address to use that is valid for this
2690 insn.
2691
2692 On most CISC machines, complicated address modes are costly, and rtx_cost
2693 is a good approximation for that cost. However, most RISC machines have
2694 only a few (usually only one) memory reference formats. If an address is
2695 valid at all, it is often just as cheap as any other address. Hence, for
2696 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2697 costs of various addresses. For two addresses of equal cost, choose the one
2698 with the highest `rtx_cost' value as that has the potential of eliminating
2699 the most insns. For equal costs, we choose the first in the equivalence
2700 class. Note that we ignore the fact that pseudo registers are cheaper
2701 than hard registers here because we would also prefer the pseudo registers.
2702 */
2703
2704 static void
2705 find_best_addr (insn, loc, mode)
2706 rtx insn;
2707 rtx *loc;
2708 enum machine_mode mode;
2709 {
2710 struct table_elt *elt;
2711 rtx addr = *loc;
2712 #ifdef ADDRESS_COST
2713 struct table_elt *p;
2714 int found_better = 1;
2715 #endif
2716 int save_do_not_record = do_not_record;
2717 int save_hash_arg_in_memory = hash_arg_in_memory;
2718 int addr_volatile;
2719 int regno;
2720 int folded_cost, addr_cost;
2721 unsigned hash;
2722
2723 /* Do not try to replace constant addresses or addresses of local and
2724 argument slots. These MEM expressions are made only once and inserted
2725 in many instructions, as well as being used to control symbol table
2726 output. It is not safe to clobber them.
2727
2728 There are some uncommon cases where the address is already in a register
2729 for some reason, but we cannot take advantage of that because we have
2730 no easy way to unshare the MEM. In addition, looking up all stack
2731 addresses is costly. */
2732 if ((GET_CODE (addr) == PLUS
2733 && GET_CODE (XEXP (addr, 0)) == REG
2734 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2735 && (regno = REGNO (XEXP (addr, 0)),
2736 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2737 || regno == ARG_POINTER_REGNUM))
2738 || (GET_CODE (addr) == REG
2739 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2740 || regno == HARD_FRAME_POINTER_REGNUM
2741 || regno == ARG_POINTER_REGNUM))
2742 || GET_CODE (addr) == ADDRESSOF
2743 || CONSTANT_ADDRESS_P (addr))
2744 return;
2745
2746 /* If this address is not simply a register, try to fold it. This will
2747 sometimes simplify the expression. Many simplifications
2748 will not be valid, but some, usually applying the associative rule, will
2749 be valid and produce better code. */
2750 if (GET_CODE (addr) != REG)
2751 {
2752 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2753
2754 folded_cost = address_cost (folded, mode);
2755 addr_cost = address_cost (addr, mode);
2756
2757 if ((folded_cost < addr_cost
2758 || (folded_cost == addr_cost
2759 && rtx_cost (folded, MEM) > rtx_cost (addr, MEM)))
2760 && rtx_cost (folded, MEM) < rtx_cost (addr, MEM)
2761 && validate_change (insn, loc, folded, 0))
2762 addr = folded;
2763 }
2764
2765 /* If this address is not in the hash table, we can't look for equivalences
2766 of the whole address. Also, ignore if volatile. */
2767
2768 do_not_record = 0;
2769 hash = HASH (addr, Pmode);
2770 addr_volatile = do_not_record;
2771 do_not_record = save_do_not_record;
2772 hash_arg_in_memory = save_hash_arg_in_memory;
2773
2774 if (addr_volatile)
2775 return;
2776
2777 elt = lookup (addr, hash, Pmode);
2778
2779 #ifndef ADDRESS_COST
2780 if (elt)
2781 {
2782 int our_cost = elt->cost;
2783
2784 /* Find the lowest cost below ours that works. */
2785 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2786 if (elt->cost < our_cost
2787 && (GET_CODE (elt->exp) == REG
2788 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2789 && validate_change (insn, loc,
2790 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2791 return;
2792 }
2793 #else
2794
2795 if (elt)
2796 {
2797 /* We need to find the best (under the criteria documented above) entry
2798 in the class that is valid. We use the `flag' field to indicate
2799 choices that were invalid and iterate until we can't find a better
2800 one that hasn't already been tried. */
2801
2802 for (p = elt->first_same_value; p; p = p->next_same_value)
2803 p->flag = 0;
2804
2805 while (found_better)
2806 {
2807 int best_addr_cost = address_cost (*loc, mode);
2808 int best_rtx_cost = (elt->cost + 1) >> 1;
2809 int exp_cost;
2810 struct table_elt *best_elt = elt;
2811
2812 found_better = 0;
2813 for (p = elt->first_same_value; p; p = p->next_same_value)
2814 if (! p->flag)
2815 {
2816 if ((GET_CODE (p->exp) == REG
2817 || exp_equiv_p (p->exp, p->exp, 1, 0))
2818 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2819 || (exp_cost == best_addr_cost
2820 && (p->cost + 1) >> 1 < best_rtx_cost)))
2821 {
2822 found_better = 1;
2823 best_addr_cost = exp_cost;
2824 best_rtx_cost = (p->cost + 1) >> 1;
2825 best_elt = p;
2826 }
2827 }
2828
2829 if (found_better)
2830 {
2831 if (validate_change (insn, loc,
2832 canon_reg (copy_rtx (best_elt->exp),
2833 NULL_RTX), 0))
2834 return;
2835 else
2836 best_elt->flag = 1;
2837 }
2838 }
2839 }
2840
2841 /* If the address is a binary operation with the first operand a register
2842 and the second a constant, do the same as above, but looking for
2843 equivalences of the register. Then try to simplify before checking for
2844 the best address to use. This catches a few cases: First is when we
2845 have REG+const and the register is another REG+const. We can often merge
2846 the constants and eliminate one insn and one register. It may also be
2847 that a machine has a cheap REG+REG+const. Finally, this improves the
2848 code on the Alpha for unaligned byte stores. */
2849
2850 if (flag_expensive_optimizations
2851 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2852 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2853 && GET_CODE (XEXP (*loc, 0)) == REG
2854 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2855 {
2856 rtx c = XEXP (*loc, 1);
2857
2858 do_not_record = 0;
2859 hash = HASH (XEXP (*loc, 0), Pmode);
2860 do_not_record = save_do_not_record;
2861 hash_arg_in_memory = save_hash_arg_in_memory;
2862
2863 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2864 if (elt == 0)
2865 return;
2866
2867 /* We need to find the best (under the criteria documented above) entry
2868 in the class that is valid. We use the `flag' field to indicate
2869 choices that were invalid and iterate until we can't find a better
2870 one that hasn't already been tried. */
2871
2872 for (p = elt->first_same_value; p; p = p->next_same_value)
2873 p->flag = 0;
2874
2875 while (found_better)
2876 {
2877 int best_addr_cost = address_cost (*loc, mode);
2878 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2879 struct table_elt *best_elt = elt;
2880 rtx best_rtx = *loc;
2881 int count;
2882
2883 /* This is at worst case an O(n^2) algorithm, so limit our search
2884 to the first 32 elements on the list. This avoids trouble
2885 compiling code with very long basic blocks that can easily
2886 call simplify_gen_binary so many times that we run out of
2887 memory. */
2888
2889 found_better = 0;
2890 for (p = elt->first_same_value, count = 0;
2891 p && count < 32;
2892 p = p->next_same_value, count++)
2893 if (! p->flag
2894 && (GET_CODE (p->exp) == REG
2895 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2896 {
2897 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
2898 p->exp, c);
2899 int new_cost;
2900 new_cost = address_cost (new, mode);
2901
2902 if (new_cost < best_addr_cost
2903 || (new_cost == best_addr_cost
2904 && (COST (new) + 1) >> 1 > best_rtx_cost))
2905 {
2906 found_better = 1;
2907 best_addr_cost = new_cost;
2908 best_rtx_cost = (COST (new) + 1) >> 1;
2909 best_elt = p;
2910 best_rtx = new;
2911 }
2912 }
2913
2914 if (found_better)
2915 {
2916 if (validate_change (insn, loc,
2917 canon_reg (copy_rtx (best_rtx),
2918 NULL_RTX), 0))
2919 return;
2920 else
2921 best_elt->flag = 1;
2922 }
2923 }
2924 }
2925 #endif
2926 }
2927 \f
2928 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2929 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2930 what values are being compared.
2931
2932 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2933 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2934 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2935 compared to produce cc0.
2936
2937 The return value is the comparison operator and is either the code of
2938 A or the code corresponding to the inverse of the comparison. */
2939
2940 static enum rtx_code
2941 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
2942 enum rtx_code code;
2943 rtx *parg1, *parg2;
2944 enum machine_mode *pmode1, *pmode2;
2945 {
2946 rtx arg1, arg2;
2947
2948 arg1 = *parg1, arg2 = *parg2;
2949
2950 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2951
2952 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2953 {
2954 /* Set non-zero when we find something of interest. */
2955 rtx x = 0;
2956 int reverse_code = 0;
2957 struct table_elt *p = 0;
2958
2959 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2960 On machines with CC0, this is the only case that can occur, since
2961 fold_rtx will return the COMPARE or item being compared with zero
2962 when given CC0. */
2963
2964 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2965 x = arg1;
2966
2967 /* If ARG1 is a comparison operator and CODE is testing for
2968 STORE_FLAG_VALUE, get the inner arguments. */
2969
2970 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2971 {
2972 if (code == NE
2973 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2974 && code == LT && STORE_FLAG_VALUE == -1)
2975 #ifdef FLOAT_STORE_FLAG_VALUE
2976 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2977 && (REAL_VALUE_NEGATIVE
2978 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
2979 #endif
2980 )
2981 x = arg1;
2982 else if (code == EQ
2983 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2984 && code == GE && STORE_FLAG_VALUE == -1)
2985 #ifdef FLOAT_STORE_FLAG_VALUE
2986 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2987 && (REAL_VALUE_NEGATIVE
2988 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
2989 #endif
2990 )
2991 x = arg1, reverse_code = 1;
2992 }
2993
2994 /* ??? We could also check for
2995
2996 (ne (and (eq (...) (const_int 1))) (const_int 0))
2997
2998 and related forms, but let's wait until we see them occurring. */
2999
3000 if (x == 0)
3001 /* Look up ARG1 in the hash table and see if it has an equivalence
3002 that lets us see what is being compared. */
3003 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3004 GET_MODE (arg1));
3005 if (p)
3006 p = p->first_same_value;
3007
3008 for (; p; p = p->next_same_value)
3009 {
3010 enum machine_mode inner_mode = GET_MODE (p->exp);
3011
3012 /* If the entry isn't valid, skip it. */
3013 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3014 continue;
3015
3016 if (GET_CODE (p->exp) == COMPARE
3017 /* Another possibility is that this machine has a compare insn
3018 that includes the comparison code. In that case, ARG1 would
3019 be equivalent to a comparison operation that would set ARG1 to
3020 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3021 ORIG_CODE is the actual comparison being done; if it is an EQ,
3022 we must reverse ORIG_CODE. On machine with a negative value
3023 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3024 || ((code == NE
3025 || (code == LT
3026 && GET_MODE_CLASS (inner_mode) == MODE_INT
3027 && (GET_MODE_BITSIZE (inner_mode)
3028 <= HOST_BITS_PER_WIDE_INT)
3029 && (STORE_FLAG_VALUE
3030 & ((HOST_WIDE_INT) 1
3031 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3032 #ifdef FLOAT_STORE_FLAG_VALUE
3033 || (code == LT
3034 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3035 && (REAL_VALUE_NEGATIVE
3036 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3037 #endif
3038 )
3039 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3040 {
3041 x = p->exp;
3042 break;
3043 }
3044 else if ((code == EQ
3045 || (code == GE
3046 && GET_MODE_CLASS (inner_mode) == MODE_INT
3047 && (GET_MODE_BITSIZE (inner_mode)
3048 <= HOST_BITS_PER_WIDE_INT)
3049 && (STORE_FLAG_VALUE
3050 & ((HOST_WIDE_INT) 1
3051 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3052 #ifdef FLOAT_STORE_FLAG_VALUE
3053 || (code == GE
3054 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3055 && (REAL_VALUE_NEGATIVE
3056 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3057 #endif
3058 )
3059 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3060 {
3061 reverse_code = 1;
3062 x = p->exp;
3063 break;
3064 }
3065
3066 /* If this is fp + constant, the equivalent is a better operand since
3067 it may let us predict the value of the comparison. */
3068 else if (NONZERO_BASE_PLUS_P (p->exp))
3069 {
3070 arg1 = p->exp;
3071 continue;
3072 }
3073 }
3074
3075 /* If we didn't find a useful equivalence for ARG1, we are done.
3076 Otherwise, set up for the next iteration. */
3077 if (x == 0)
3078 break;
3079
3080 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3081 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3082 code = GET_CODE (x);
3083
3084 if (reverse_code)
3085 code = reverse_condition (code);
3086 }
3087
3088 /* Return our results. Return the modes from before fold_rtx
3089 because fold_rtx might produce const_int, and then it's too late. */
3090 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3091 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3092
3093 return code;
3094 }
3095 \f
3096 /* If X is a nontrivial arithmetic operation on an argument
3097 for which a constant value can be determined, return
3098 the result of operating on that value, as a constant.
3099 Otherwise, return X, possibly with one or more operands
3100 modified by recursive calls to this function.
3101
3102 If X is a register whose contents are known, we do NOT
3103 return those contents here. equiv_constant is called to
3104 perform that task.
3105
3106 INSN is the insn that we may be modifying. If it is 0, make a copy
3107 of X before modifying it. */
3108
3109 static rtx
3110 fold_rtx (x, insn)
3111 rtx x;
3112 rtx insn;
3113 {
3114 register enum rtx_code code;
3115 register enum machine_mode mode;
3116 register const char *fmt;
3117 register int i;
3118 rtx new = 0;
3119 int copied = 0;
3120 int must_swap = 0;
3121
3122 /* Folded equivalents of first two operands of X. */
3123 rtx folded_arg0;
3124 rtx folded_arg1;
3125
3126 /* Constant equivalents of first three operands of X;
3127 0 when no such equivalent is known. */
3128 rtx const_arg0;
3129 rtx const_arg1;
3130 rtx const_arg2;
3131
3132 /* The mode of the first operand of X. We need this for sign and zero
3133 extends. */
3134 enum machine_mode mode_arg0;
3135
3136 if (x == 0)
3137 return x;
3138
3139 mode = GET_MODE (x);
3140 code = GET_CODE (x);
3141 switch (code)
3142 {
3143 case CONST:
3144 case CONST_INT:
3145 case CONST_DOUBLE:
3146 case SYMBOL_REF:
3147 case LABEL_REF:
3148 case REG:
3149 /* No use simplifying an EXPR_LIST
3150 since they are used only for lists of args
3151 in a function call's REG_EQUAL note. */
3152 case EXPR_LIST:
3153 /* Changing anything inside an ADDRESSOF is incorrect; we don't
3154 want to (e.g.,) make (addressof (const_int 0)) just because
3155 the location is known to be zero. */
3156 case ADDRESSOF:
3157 return x;
3158
3159 #ifdef HAVE_cc0
3160 case CC0:
3161 return prev_insn_cc0;
3162 #endif
3163
3164 case PC:
3165 /* If the next insn is a CODE_LABEL followed by a jump table,
3166 PC's value is a LABEL_REF pointing to that label. That
3167 lets us fold switch statements on the Vax. */
3168 if (insn && GET_CODE (insn) == JUMP_INSN)
3169 {
3170 rtx next = next_nonnote_insn (insn);
3171
3172 if (next && GET_CODE (next) == CODE_LABEL
3173 && NEXT_INSN (next) != 0
3174 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
3175 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
3176 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
3177 return gen_rtx_LABEL_REF (Pmode, next);
3178 }
3179 break;
3180
3181 case SUBREG:
3182 /* See if we previously assigned a constant value to this SUBREG. */
3183 if ((new = lookup_as_function (x, CONST_INT)) != 0
3184 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3185 return new;
3186
3187 /* If this is a paradoxical SUBREG, we have no idea what value the
3188 extra bits would have. However, if the operand is equivalent
3189 to a SUBREG whose operand is the same as our mode, and all the
3190 modes are within a word, we can just use the inner operand
3191 because these SUBREGs just say how to treat the register.
3192
3193 Similarly if we find an integer constant. */
3194
3195 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3196 {
3197 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3198 struct table_elt *elt;
3199
3200 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3201 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3202 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3203 imode)) != 0)
3204 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3205 {
3206 if (CONSTANT_P (elt->exp)
3207 && GET_MODE (elt->exp) == VOIDmode)
3208 return elt->exp;
3209
3210 if (GET_CODE (elt->exp) == SUBREG
3211 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3212 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3213 return copy_rtx (SUBREG_REG (elt->exp));
3214 }
3215
3216 return x;
3217 }
3218
3219 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3220 We might be able to if the SUBREG is extracting a single word in an
3221 integral mode or extracting the low part. */
3222
3223 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3224 const_arg0 = equiv_constant (folded_arg0);
3225 if (const_arg0)
3226 folded_arg0 = const_arg0;
3227
3228 if (folded_arg0 != SUBREG_REG (x))
3229 {
3230 new = 0;
3231
3232 if (GET_MODE_CLASS (mode) == MODE_INT
3233 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3234 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
3235 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
3236 GET_MODE (SUBREG_REG (x)));
3237 if (new == 0 && subreg_lowpart_p (x))
3238 new = gen_lowpart_if_possible (mode, folded_arg0);
3239 if (new)
3240 return new;
3241 }
3242
3243 /* If this is a narrowing SUBREG and our operand is a REG, see if
3244 we can find an equivalence for REG that is an arithmetic operation
3245 in a wider mode where both operands are paradoxical SUBREGs
3246 from objects of our result mode. In that case, we couldn't report
3247 an equivalent value for that operation, since we don't know what the
3248 extra bits will be. But we can find an equivalence for this SUBREG
3249 by folding that operation is the narrow mode. This allows us to
3250 fold arithmetic in narrow modes when the machine only supports
3251 word-sized arithmetic.
3252
3253 Also look for a case where we have a SUBREG whose operand is the
3254 same as our result. If both modes are smaller than a word, we
3255 are simply interpreting a register in different modes and we
3256 can use the inner value. */
3257
3258 if (GET_CODE (folded_arg0) == REG
3259 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3260 && subreg_lowpart_p (x))
3261 {
3262 struct table_elt *elt;
3263
3264 /* We can use HASH here since we know that canon_hash won't be
3265 called. */
3266 elt = lookup (folded_arg0,
3267 HASH (folded_arg0, GET_MODE (folded_arg0)),
3268 GET_MODE (folded_arg0));
3269
3270 if (elt)
3271 elt = elt->first_same_value;
3272
3273 for (; elt; elt = elt->next_same_value)
3274 {
3275 enum rtx_code eltcode = GET_CODE (elt->exp);
3276
3277 /* Just check for unary and binary operations. */
3278 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3279 && GET_CODE (elt->exp) != SIGN_EXTEND
3280 && GET_CODE (elt->exp) != ZERO_EXTEND
3281 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3282 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
3283 {
3284 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3285
3286 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3287 op0 = fold_rtx (op0, NULL_RTX);
3288
3289 op0 = equiv_constant (op0);
3290 if (op0)
3291 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3292 op0, mode);
3293 }
3294 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3295 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3296 && eltcode != DIV && eltcode != MOD
3297 && eltcode != UDIV && eltcode != UMOD
3298 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3299 && eltcode != ROTATE && eltcode != ROTATERT
3300 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3301 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3302 == mode))
3303 || CONSTANT_P (XEXP (elt->exp, 0)))
3304 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3305 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3306 == mode))
3307 || CONSTANT_P (XEXP (elt->exp, 1))))
3308 {
3309 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3310 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3311
3312 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3313 op0 = fold_rtx (op0, NULL_RTX);
3314
3315 if (op0)
3316 op0 = equiv_constant (op0);
3317
3318 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3319 op1 = fold_rtx (op1, NULL_RTX);
3320
3321 if (op1)
3322 op1 = equiv_constant (op1);
3323
3324 /* If we are looking for the low SImode part of
3325 (ashift:DI c (const_int 32)), it doesn't work
3326 to compute that in SImode, because a 32-bit shift
3327 in SImode is unpredictable. We know the value is 0. */
3328 if (op0 && op1
3329 && GET_CODE (elt->exp) == ASHIFT
3330 && GET_CODE (op1) == CONST_INT
3331 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3332 {
3333 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3334
3335 /* If the count fits in the inner mode's width,
3336 but exceeds the outer mode's width,
3337 the value will get truncated to 0
3338 by the subreg. */
3339 new = const0_rtx;
3340 else
3341 /* If the count exceeds even the inner mode's width,
3342 don't fold this expression. */
3343 new = 0;
3344 }
3345 else if (op0 && op1)
3346 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3347 op0, op1);
3348 }
3349
3350 else if (GET_CODE (elt->exp) == SUBREG
3351 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3352 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3353 <= UNITS_PER_WORD)
3354 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3355 new = copy_rtx (SUBREG_REG (elt->exp));
3356
3357 if (new)
3358 return new;
3359 }
3360 }
3361
3362 return x;
3363
3364 case NOT:
3365 case NEG:
3366 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3367 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3368 new = lookup_as_function (XEXP (x, 0), code);
3369 if (new)
3370 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3371 break;
3372
3373 case MEM:
3374 /* If we are not actually processing an insn, don't try to find the
3375 best address. Not only don't we care, but we could modify the
3376 MEM in an invalid way since we have no insn to validate against. */
3377 if (insn != 0)
3378 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3379
3380 {
3381 /* Even if we don't fold in the insn itself,
3382 we can safely do so here, in hopes of getting a constant. */
3383 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3384 rtx base = 0;
3385 HOST_WIDE_INT offset = 0;
3386
3387 if (GET_CODE (addr) == REG
3388 && REGNO_QTY_VALID_P (REGNO (addr)))
3389 {
3390 int addr_q = REG_QTY (REGNO (addr));
3391 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3392
3393 if (GET_MODE (addr) == addr_ent->mode
3394 && addr_ent->const_rtx != NULL_RTX)
3395 addr = addr_ent->const_rtx;
3396 }
3397
3398 /* If address is constant, split it into a base and integer offset. */
3399 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3400 base = addr;
3401 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3402 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3403 {
3404 base = XEXP (XEXP (addr, 0), 0);
3405 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3406 }
3407 else if (GET_CODE (addr) == LO_SUM
3408 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3409 base = XEXP (addr, 1);
3410 else if (GET_CODE (addr) == ADDRESSOF)
3411 return change_address (x, VOIDmode, addr);
3412
3413 /* If this is a constant pool reference, we can fold it into its
3414 constant to allow better value tracking. */
3415 if (base && GET_CODE (base) == SYMBOL_REF
3416 && CONSTANT_POOL_ADDRESS_P (base))
3417 {
3418 rtx constant = get_pool_constant (base);
3419 enum machine_mode const_mode = get_pool_mode (base);
3420 rtx new;
3421
3422 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3423 constant_pool_entries_cost = COST (constant);
3424
3425 /* If we are loading the full constant, we have an equivalence. */
3426 if (offset == 0 && mode == const_mode)
3427 return constant;
3428
3429 /* If this actually isn't a constant (weird!), we can't do
3430 anything. Otherwise, handle the two most common cases:
3431 extracting a word from a multi-word constant, and extracting
3432 the low-order bits. Other cases don't seem common enough to
3433 worry about. */
3434 if (! CONSTANT_P (constant))
3435 return x;
3436
3437 if (GET_MODE_CLASS (mode) == MODE_INT
3438 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3439 && offset % UNITS_PER_WORD == 0
3440 && (new = operand_subword (constant,
3441 offset / UNITS_PER_WORD,
3442 0, const_mode)) != 0)
3443 return new;
3444
3445 if (((BYTES_BIG_ENDIAN
3446 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3447 || (! BYTES_BIG_ENDIAN && offset == 0))
3448 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
3449 return new;
3450 }
3451
3452 /* If this is a reference to a label at a known position in a jump
3453 table, we also know its value. */
3454 if (base && GET_CODE (base) == LABEL_REF)
3455 {
3456 rtx label = XEXP (base, 0);
3457 rtx table_insn = NEXT_INSN (label);
3458
3459 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3460 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3461 {
3462 rtx table = PATTERN (table_insn);
3463
3464 if (offset >= 0
3465 && (offset / GET_MODE_SIZE (GET_MODE (table))
3466 < XVECLEN (table, 0)))
3467 return XVECEXP (table, 0,
3468 offset / GET_MODE_SIZE (GET_MODE (table)));
3469 }
3470 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3471 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3472 {
3473 rtx table = PATTERN (table_insn);
3474
3475 if (offset >= 0
3476 && (offset / GET_MODE_SIZE (GET_MODE (table))
3477 < XVECLEN (table, 1)))
3478 {
3479 offset /= GET_MODE_SIZE (GET_MODE (table));
3480 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3481 XEXP (table, 0));
3482
3483 if (GET_MODE (table) != Pmode)
3484 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3485
3486 /* Indicate this is a constant. This isn't a
3487 valid form of CONST, but it will only be used
3488 to fold the next insns and then discarded, so
3489 it should be safe.
3490
3491 Note this expression must be explicitly discarded,
3492 by cse_insn, else it may end up in a REG_EQUAL note
3493 and "escape" to cause problems elsewhere. */
3494 return gen_rtx_CONST (GET_MODE (new), new);
3495 }
3496 }
3497 }
3498
3499 return x;
3500 }
3501
3502 case ASM_OPERANDS:
3503 for (i = XVECLEN (x, 3) - 1; i >= 0; i--)
3504 validate_change (insn, &XVECEXP (x, 3, i),
3505 fold_rtx (XVECEXP (x, 3, i), insn), 0);
3506 break;
3507
3508 default:
3509 break;
3510 }
3511
3512 const_arg0 = 0;
3513 const_arg1 = 0;
3514 const_arg2 = 0;
3515 mode_arg0 = VOIDmode;
3516
3517 /* Try folding our operands.
3518 Then see which ones have constant values known. */
3519
3520 fmt = GET_RTX_FORMAT (code);
3521 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3522 if (fmt[i] == 'e')
3523 {
3524 rtx arg = XEXP (x, i);
3525 rtx folded_arg = arg, const_arg = 0;
3526 enum machine_mode mode_arg = GET_MODE (arg);
3527 rtx cheap_arg, expensive_arg;
3528 rtx replacements[2];
3529 int j;
3530
3531 /* Most arguments are cheap, so handle them specially. */
3532 switch (GET_CODE (arg))
3533 {
3534 case REG:
3535 /* This is the same as calling equiv_constant; it is duplicated
3536 here for speed. */
3537 if (REGNO_QTY_VALID_P (REGNO (arg)))
3538 {
3539 int arg_q = REG_QTY (REGNO (arg));
3540 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3541
3542 if (arg_ent->const_rtx != NULL_RTX
3543 && GET_CODE (arg_ent->const_rtx) != REG
3544 && GET_CODE (arg_ent->const_rtx) != PLUS)
3545 const_arg
3546 = gen_lowpart_if_possible (GET_MODE (arg),
3547 arg_ent->const_rtx);
3548 }
3549 break;
3550
3551 case CONST:
3552 case CONST_INT:
3553 case SYMBOL_REF:
3554 case LABEL_REF:
3555 case CONST_DOUBLE:
3556 const_arg = arg;
3557 break;
3558
3559 #ifdef HAVE_cc0
3560 case CC0:
3561 folded_arg = prev_insn_cc0;
3562 mode_arg = prev_insn_cc0_mode;
3563 const_arg = equiv_constant (folded_arg);
3564 break;
3565 #endif
3566
3567 default:
3568 folded_arg = fold_rtx (arg, insn);
3569 const_arg = equiv_constant (folded_arg);
3570 }
3571
3572 /* For the first three operands, see if the operand
3573 is constant or equivalent to a constant. */
3574 switch (i)
3575 {
3576 case 0:
3577 folded_arg0 = folded_arg;
3578 const_arg0 = const_arg;
3579 mode_arg0 = mode_arg;
3580 break;
3581 case 1:
3582 folded_arg1 = folded_arg;
3583 const_arg1 = const_arg;
3584 break;
3585 case 2:
3586 const_arg2 = const_arg;
3587 break;
3588 }
3589
3590 /* Pick the least expensive of the folded argument and an
3591 equivalent constant argument. */
3592 if (const_arg == 0 || const_arg == folded_arg
3593 || COST (const_arg) > COST (folded_arg))
3594 cheap_arg = folded_arg, expensive_arg = const_arg;
3595 else
3596 cheap_arg = const_arg, expensive_arg = folded_arg;
3597
3598 /* Try to replace the operand with the cheapest of the two
3599 possibilities. If it doesn't work and this is either of the first
3600 two operands of a commutative operation, try swapping them.
3601 If THAT fails, try the more expensive, provided it is cheaper
3602 than what is already there. */
3603
3604 if (cheap_arg == XEXP (x, i))
3605 continue;
3606
3607 if (insn == 0 && ! copied)
3608 {
3609 x = copy_rtx (x);
3610 copied = 1;
3611 }
3612
3613 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
3614 for (j = 0;
3615 j < 2 && replacements[j]
3616 && COST (replacements[j]) < COST (XEXP (x, i));
3617 j++)
3618 {
3619 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3620 break;
3621
3622 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
3623 {
3624 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3625 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3626
3627 if (apply_change_group ())
3628 {
3629 /* Swap them back to be invalid so that this loop can
3630 continue and flag them to be swapped back later. */
3631 rtx tem;
3632
3633 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3634 XEXP (x, 1) = tem;
3635 must_swap = 1;
3636 break;
3637 }
3638 }
3639 }
3640 }
3641
3642 else
3643 {
3644 if (fmt[i] == 'E')
3645 /* Don't try to fold inside of a vector of expressions.
3646 Doing nothing is harmless. */
3647 {;}
3648 }
3649
3650 /* If a commutative operation, place a constant integer as the second
3651 operand unless the first operand is also a constant integer. Otherwise,
3652 place any constant second unless the first operand is also a constant. */
3653
3654 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
3655 {
3656 if (must_swap || (const_arg0
3657 && (const_arg1 == 0
3658 || (GET_CODE (const_arg0) == CONST_INT
3659 && GET_CODE (const_arg1) != CONST_INT))))
3660 {
3661 register rtx tem = XEXP (x, 0);
3662
3663 if (insn == 0 && ! copied)
3664 {
3665 x = copy_rtx (x);
3666 copied = 1;
3667 }
3668
3669 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3670 validate_change (insn, &XEXP (x, 1), tem, 1);
3671 if (apply_change_group ())
3672 {
3673 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3674 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3675 }
3676 }
3677 }
3678
3679 /* If X is an arithmetic operation, see if we can simplify it. */
3680
3681 switch (GET_RTX_CLASS (code))
3682 {
3683 case '1':
3684 {
3685 int is_const = 0;
3686
3687 /* We can't simplify extension ops unless we know the
3688 original mode. */
3689 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3690 && mode_arg0 == VOIDmode)
3691 break;
3692
3693 /* If we had a CONST, strip it off and put it back later if we
3694 fold. */
3695 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3696 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3697
3698 new = simplify_unary_operation (code, mode,
3699 const_arg0 ? const_arg0 : folded_arg0,
3700 mode_arg0);
3701 if (new != 0 && is_const)
3702 new = gen_rtx_CONST (mode, new);
3703 }
3704 break;
3705
3706 case '<':
3707 /* See what items are actually being compared and set FOLDED_ARG[01]
3708 to those values and CODE to the actual comparison code. If any are
3709 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3710 do anything if both operands are already known to be constant. */
3711
3712 if (const_arg0 == 0 || const_arg1 == 0)
3713 {
3714 struct table_elt *p0, *p1;
3715 rtx true = const_true_rtx, false = const0_rtx;
3716 enum machine_mode mode_arg1;
3717
3718 #ifdef FLOAT_STORE_FLAG_VALUE
3719 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3720 {
3721 true = (CONST_DOUBLE_FROM_REAL_VALUE
3722 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3723 false = CONST0_RTX (mode);
3724 }
3725 #endif
3726
3727 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3728 &mode_arg0, &mode_arg1);
3729 const_arg0 = equiv_constant (folded_arg0);
3730 const_arg1 = equiv_constant (folded_arg1);
3731
3732 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3733 what kinds of things are being compared, so we can't do
3734 anything with this comparison. */
3735
3736 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3737 break;
3738
3739 /* If we do not now have two constants being compared, see
3740 if we can nevertheless deduce some things about the
3741 comparison. */
3742 if (const_arg0 == 0 || const_arg1 == 0)
3743 {
3744 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
3745 non-explicit constant? These aren't zero, but we
3746 don't know their sign. */
3747 if (const_arg1 == const0_rtx
3748 && (NONZERO_BASE_PLUS_P (folded_arg0)
3749 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
3750 come out as 0. */
3751 || GET_CODE (folded_arg0) == SYMBOL_REF
3752 #endif
3753 || GET_CODE (folded_arg0) == LABEL_REF
3754 || GET_CODE (folded_arg0) == CONST))
3755 {
3756 if (code == EQ)
3757 return false;
3758 else if (code == NE)
3759 return true;
3760 }
3761
3762 /* See if the two operands are the same. We don't do this
3763 for IEEE floating-point since we can't assume x == x
3764 since x might be a NaN. */
3765
3766 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3767 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
3768 && (folded_arg0 == folded_arg1
3769 || (GET_CODE (folded_arg0) == REG
3770 && GET_CODE (folded_arg1) == REG
3771 && (REG_QTY (REGNO (folded_arg0))
3772 == REG_QTY (REGNO (folded_arg1))))
3773 || ((p0 = lookup (folded_arg0,
3774 (safe_hash (folded_arg0, mode_arg0)
3775 & HASH_MASK), mode_arg0))
3776 && (p1 = lookup (folded_arg1,
3777 (safe_hash (folded_arg1, mode_arg0)
3778 & HASH_MASK), mode_arg0))
3779 && p0->first_same_value == p1->first_same_value)))
3780 return ((code == EQ || code == LE || code == GE
3781 || code == LEU || code == GEU)
3782 ? true : false);
3783
3784 /* If FOLDED_ARG0 is a register, see if the comparison we are
3785 doing now is either the same as we did before or the reverse
3786 (we only check the reverse if not floating-point). */
3787 else if (GET_CODE (folded_arg0) == REG)
3788 {
3789 int qty = REG_QTY (REGNO (folded_arg0));
3790
3791 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3792 {
3793 struct qty_table_elem *ent = &qty_table[qty];
3794
3795 if ((comparison_dominates_p (ent->comparison_code, code)
3796 || (! FLOAT_MODE_P (mode_arg0)
3797 && comparison_dominates_p (ent->comparison_code,
3798 reverse_condition (code))))
3799 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3800 || (const_arg1
3801 && rtx_equal_p (ent->comparison_const,
3802 const_arg1))
3803 || (GET_CODE (folded_arg1) == REG
3804 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3805 return (comparison_dominates_p (ent->comparison_code, code)
3806 ? true : false);
3807 }
3808 }
3809 }
3810 }
3811
3812 /* If we are comparing against zero, see if the first operand is
3813 equivalent to an IOR with a constant. If so, we may be able to
3814 determine the result of this comparison. */
3815
3816 if (const_arg1 == const0_rtx)
3817 {
3818 rtx y = lookup_as_function (folded_arg0, IOR);
3819 rtx inner_const;
3820
3821 if (y != 0
3822 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3823 && GET_CODE (inner_const) == CONST_INT
3824 && INTVAL (inner_const) != 0)
3825 {
3826 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
3827 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3828 && (INTVAL (inner_const)
3829 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
3830 rtx true = const_true_rtx, false = const0_rtx;
3831
3832 #ifdef FLOAT_STORE_FLAG_VALUE
3833 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3834 {
3835 true = (CONST_DOUBLE_FROM_REAL_VALUE
3836 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3837 false = CONST0_RTX (mode);
3838 }
3839 #endif
3840
3841 switch (code)
3842 {
3843 case EQ:
3844 return false;
3845 case NE:
3846 return true;
3847 case LT: case LE:
3848 if (has_sign)
3849 return true;
3850 break;
3851 case GT: case GE:
3852 if (has_sign)
3853 return false;
3854 break;
3855 default:
3856 break;
3857 }
3858 }
3859 }
3860
3861 new = simplify_relational_operation (code, mode_arg0,
3862 const_arg0 ? const_arg0 : folded_arg0,
3863 const_arg1 ? const_arg1 : folded_arg1);
3864 #ifdef FLOAT_STORE_FLAG_VALUE
3865 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3866 {
3867 if (new == const0_rtx)
3868 new = CONST0_RTX (mode);
3869 else
3870 new = (CONST_DOUBLE_FROM_REAL_VALUE
3871 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3872 }
3873 #endif
3874 break;
3875
3876 case '2':
3877 case 'c':
3878 switch (code)
3879 {
3880 case PLUS:
3881 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3882 with that LABEL_REF as its second operand. If so, the result is
3883 the first operand of that MINUS. This handles switches with an
3884 ADDR_DIFF_VEC table. */
3885 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3886 {
3887 rtx y
3888 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
3889 : lookup_as_function (folded_arg0, MINUS);
3890
3891 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3892 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
3893 return XEXP (y, 0);
3894
3895 /* Now try for a CONST of a MINUS like the above. */
3896 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3897 : lookup_as_function (folded_arg0, CONST))) != 0
3898 && GET_CODE (XEXP (y, 0)) == MINUS
3899 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3900 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
3901 return XEXP (XEXP (y, 0), 0);
3902 }
3903
3904 /* Likewise if the operands are in the other order. */
3905 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3906 {
3907 rtx y
3908 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
3909 : lookup_as_function (folded_arg1, MINUS);
3910
3911 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3912 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
3913 return XEXP (y, 0);
3914
3915 /* Now try for a CONST of a MINUS like the above. */
3916 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3917 : lookup_as_function (folded_arg1, CONST))) != 0
3918 && GET_CODE (XEXP (y, 0)) == MINUS
3919 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3920 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
3921 return XEXP (XEXP (y, 0), 0);
3922 }
3923
3924 /* If second operand is a register equivalent to a negative
3925 CONST_INT, see if we can find a register equivalent to the
3926 positive constant. Make a MINUS if so. Don't do this for
3927 a non-negative constant since we might then alternate between
3928 chosing positive and negative constants. Having the positive
3929 constant previously-used is the more common case. Be sure
3930 the resulting constant is non-negative; if const_arg1 were
3931 the smallest negative number this would overflow: depending
3932 on the mode, this would either just be the same value (and
3933 hence not save anything) or be incorrect. */
3934 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
3935 && INTVAL (const_arg1) < 0
3936 /* This used to test
3937
3938 -INTVAL (const_arg1) >= 0
3939
3940 But The Sun V5.0 compilers mis-compiled that test. So
3941 instead we test for the problematic value in a more direct
3942 manner and hope the Sun compilers get it correct. */
3943 && INTVAL (const_arg1) !=
3944 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
3945 && GET_CODE (folded_arg1) == REG)
3946 {
3947 rtx new_const = GEN_INT (-INTVAL (const_arg1));
3948 struct table_elt *p
3949 = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
3950 mode);
3951
3952 if (p)
3953 for (p = p->first_same_value; p; p = p->next_same_value)
3954 if (GET_CODE (p->exp) == REG)
3955 return simplify_gen_binary (MINUS, mode, folded_arg0,
3956 canon_reg (p->exp, NULL_RTX));
3957 }
3958 goto from_plus;
3959
3960 case MINUS:
3961 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3962 If so, produce (PLUS Z C2-C). */
3963 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
3964 {
3965 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
3966 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
3967 return fold_rtx (plus_constant (copy_rtx (y),
3968 -INTVAL (const_arg1)),
3969 NULL_RTX);
3970 }
3971
3972 /* Fall through. */
3973
3974 from_plus:
3975 case SMIN: case SMAX: case UMIN: case UMAX:
3976 case IOR: case AND: case XOR:
3977 case MULT: case DIV: case UDIV:
3978 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3979 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3980 is known to be of similar form, we may be able to replace the
3981 operation with a combined operation. This may eliminate the
3982 intermediate operation if every use is simplified in this way.
3983 Note that the similar optimization done by combine.c only works
3984 if the intermediate operation's result has only one reference. */
3985
3986 if (GET_CODE (folded_arg0) == REG
3987 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
3988 {
3989 int is_shift
3990 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
3991 rtx y = lookup_as_function (folded_arg0, code);
3992 rtx inner_const;
3993 enum rtx_code associate_code;
3994 rtx new_const;
3995
3996 if (y == 0
3997 || 0 == (inner_const
3998 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
3999 || GET_CODE (inner_const) != CONST_INT
4000 /* If we have compiled a statement like
4001 "if (x == (x & mask1))", and now are looking at
4002 "x & mask2", we will have a case where the first operand
4003 of Y is the same as our first operand. Unless we detect
4004 this case, an infinite loop will result. */
4005 || XEXP (y, 0) == folded_arg0)
4006 break;
4007
4008 /* Don't associate these operations if they are a PLUS with the
4009 same constant and it is a power of two. These might be doable
4010 with a pre- or post-increment. Similarly for two subtracts of
4011 identical powers of two with post decrement. */
4012
4013 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
4014 && ((HAVE_PRE_INCREMENT
4015 && exact_log2 (INTVAL (const_arg1)) >= 0)
4016 || (HAVE_POST_INCREMENT
4017 && exact_log2 (INTVAL (const_arg1)) >= 0)
4018 || (HAVE_PRE_DECREMENT
4019 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4020 || (HAVE_POST_DECREMENT
4021 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4022 break;
4023
4024 /* Compute the code used to compose the constants. For example,
4025 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
4026
4027 associate_code
4028 = (code == MULT || code == DIV || code == UDIV ? MULT
4029 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
4030
4031 new_const = simplify_binary_operation (associate_code, mode,
4032 const_arg1, inner_const);
4033
4034 if (new_const == 0)
4035 break;
4036
4037 /* If we are associating shift operations, don't let this
4038 produce a shift of the size of the object or larger.
4039 This could occur when we follow a sign-extend by a right
4040 shift on a machine that does a sign-extend as a pair
4041 of shifts. */
4042
4043 if (is_shift && GET_CODE (new_const) == CONST_INT
4044 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4045 {
4046 /* As an exception, we can turn an ASHIFTRT of this
4047 form into a shift of the number of bits - 1. */
4048 if (code == ASHIFTRT)
4049 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4050 else
4051 break;
4052 }
4053
4054 y = copy_rtx (XEXP (y, 0));
4055
4056 /* If Y contains our first operand (the most common way this
4057 can happen is if Y is a MEM), we would do into an infinite
4058 loop if we tried to fold it. So don't in that case. */
4059
4060 if (! reg_mentioned_p (folded_arg0, y))
4061 y = fold_rtx (y, insn);
4062
4063 return simplify_gen_binary (code, mode, y, new_const);
4064 }
4065 break;
4066
4067 default:
4068 break;
4069 }
4070
4071 new = simplify_binary_operation (code, mode,
4072 const_arg0 ? const_arg0 : folded_arg0,
4073 const_arg1 ? const_arg1 : folded_arg1);
4074 break;
4075
4076 case 'o':
4077 /* (lo_sum (high X) X) is simply X. */
4078 if (code == LO_SUM && const_arg0 != 0
4079 && GET_CODE (const_arg0) == HIGH
4080 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4081 return const_arg1;
4082 break;
4083
4084 case '3':
4085 case 'b':
4086 new = simplify_ternary_operation (code, mode, mode_arg0,
4087 const_arg0 ? const_arg0 : folded_arg0,
4088 const_arg1 ? const_arg1 : folded_arg1,
4089 const_arg2 ? const_arg2 : XEXP (x, 2));
4090 break;
4091
4092 case 'x':
4093 /* Always eliminate CONSTANT_P_RTX at this stage. */
4094 if (code == CONSTANT_P_RTX)
4095 return (const_arg0 ? const1_rtx : const0_rtx);
4096 break;
4097 }
4098
4099 return new ? new : x;
4100 }
4101 \f
4102 /* Return a constant value currently equivalent to X.
4103 Return 0 if we don't know one. */
4104
4105 static rtx
4106 equiv_constant (x)
4107 rtx x;
4108 {
4109 if (GET_CODE (x) == REG
4110 && REGNO_QTY_VALID_P (REGNO (x)))
4111 {
4112 int x_q = REG_QTY (REGNO (x));
4113 struct qty_table_elem *x_ent = &qty_table[x_q];
4114
4115 if (x_ent->const_rtx)
4116 x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4117 }
4118
4119 if (x == 0 || CONSTANT_P (x))
4120 return x;
4121
4122 /* If X is a MEM, try to fold it outside the context of any insn to see if
4123 it might be equivalent to a constant. That handles the case where it
4124 is a constant-pool reference. Then try to look it up in the hash table
4125 in case it is something whose value we have seen before. */
4126
4127 if (GET_CODE (x) == MEM)
4128 {
4129 struct table_elt *elt;
4130
4131 x = fold_rtx (x, NULL_RTX);
4132 if (CONSTANT_P (x))
4133 return x;
4134
4135 elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4136 if (elt == 0)
4137 return 0;
4138
4139 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4140 if (elt->is_const && CONSTANT_P (elt->exp))
4141 return elt->exp;
4142 }
4143
4144 return 0;
4145 }
4146 \f
4147 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4148 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4149 least-significant part of X.
4150 MODE specifies how big a part of X to return.
4151
4152 If the requested operation cannot be done, 0 is returned.
4153
4154 This is similar to gen_lowpart in emit-rtl.c. */
4155
4156 rtx
4157 gen_lowpart_if_possible (mode, x)
4158 enum machine_mode mode;
4159 register rtx x;
4160 {
4161 rtx result = gen_lowpart_common (mode, x);
4162
4163 if (result)
4164 return result;
4165 else if (GET_CODE (x) == MEM)
4166 {
4167 /* This is the only other case we handle. */
4168 register int offset = 0;
4169 rtx new;
4170
4171 if (WORDS_BIG_ENDIAN)
4172 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4173 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4174 if (BYTES_BIG_ENDIAN)
4175 /* Adjust the address so that the address-after-the-data is
4176 unchanged. */
4177 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4178 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4179 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
4180 if (! memory_address_p (mode, XEXP (new, 0)))
4181 return 0;
4182 MEM_COPY_ATTRIBUTES (new, x);
4183 return new;
4184 }
4185 else
4186 return 0;
4187 }
4188 \f
4189 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4190 branch. It will be zero if not.
4191
4192 In certain cases, this can cause us to add an equivalence. For example,
4193 if we are following the taken case of
4194 if (i == 2)
4195 we can add the fact that `i' and '2' are now equivalent.
4196
4197 In any case, we can record that this comparison was passed. If the same
4198 comparison is seen later, we will know its value. */
4199
4200 static void
4201 record_jump_equiv (insn, taken)
4202 rtx insn;
4203 int taken;
4204 {
4205 int cond_known_true;
4206 rtx op0, op1;
4207 rtx set;
4208 enum machine_mode mode, mode0, mode1;
4209 int reversed_nonequality = 0;
4210 enum rtx_code code;
4211
4212 /* Ensure this is the right kind of insn. */
4213 if (! any_condjump_p (insn))
4214 return;
4215 set = pc_set (insn);
4216
4217 /* See if this jump condition is known true or false. */
4218 if (taken)
4219 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4220 else
4221 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4222
4223 /* Get the type of comparison being done and the operands being compared.
4224 If we had to reverse a non-equality condition, record that fact so we
4225 know that it isn't valid for floating-point. */
4226 code = GET_CODE (XEXP (SET_SRC (set), 0));
4227 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4228 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4229
4230 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4231 if (! cond_known_true)
4232 {
4233 reversed_nonequality = (code != EQ && code != NE);
4234 code = reverse_condition (code);
4235
4236 /* Don't remember if we can't find the inverse. */
4237 if (code == UNKNOWN)
4238 return;
4239 }
4240
4241 /* The mode is the mode of the non-constant. */
4242 mode = mode0;
4243 if (mode1 != VOIDmode)
4244 mode = mode1;
4245
4246 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4247 }
4248
4249 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4250 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4251 Make any useful entries we can with that information. Called from
4252 above function and called recursively. */
4253
4254 static void
4255 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
4256 enum rtx_code code;
4257 enum machine_mode mode;
4258 rtx op0, op1;
4259 int reversed_nonequality;
4260 {
4261 unsigned op0_hash, op1_hash;
4262 int op0_in_memory, op1_in_memory;
4263 struct table_elt *op0_elt, *op1_elt;
4264
4265 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4266 we know that they are also equal in the smaller mode (this is also
4267 true for all smaller modes whether or not there is a SUBREG, but
4268 is not worth testing for with no SUBREG). */
4269
4270 /* Note that GET_MODE (op0) may not equal MODE. */
4271 if (code == EQ && GET_CODE (op0) == SUBREG
4272 && (GET_MODE_SIZE (GET_MODE (op0))
4273 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4274 {
4275 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4276 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4277
4278 record_jump_cond (code, mode, SUBREG_REG (op0),
4279 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4280 reversed_nonequality);
4281 }
4282
4283 if (code == EQ && GET_CODE (op1) == SUBREG
4284 && (GET_MODE_SIZE (GET_MODE (op1))
4285 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4286 {
4287 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4288 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4289
4290 record_jump_cond (code, mode, SUBREG_REG (op1),
4291 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4292 reversed_nonequality);
4293 }
4294
4295 /* Similarly, if this is an NE comparison, and either is a SUBREG
4296 making a smaller mode, we know the whole thing is also NE. */
4297
4298 /* Note that GET_MODE (op0) may not equal MODE;
4299 if we test MODE instead, we can get an infinite recursion
4300 alternating between two modes each wider than MODE. */
4301
4302 if (code == NE && GET_CODE (op0) == SUBREG
4303 && subreg_lowpart_p (op0)
4304 && (GET_MODE_SIZE (GET_MODE (op0))
4305 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4306 {
4307 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4308 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4309
4310 record_jump_cond (code, mode, SUBREG_REG (op0),
4311 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4312 reversed_nonequality);
4313 }
4314
4315 if (code == NE && GET_CODE (op1) == SUBREG
4316 && subreg_lowpart_p (op1)
4317 && (GET_MODE_SIZE (GET_MODE (op1))
4318 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4319 {
4320 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4321 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4322
4323 record_jump_cond (code, mode, SUBREG_REG (op1),
4324 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4325 reversed_nonequality);
4326 }
4327
4328 /* Hash both operands. */
4329
4330 do_not_record = 0;
4331 hash_arg_in_memory = 0;
4332 op0_hash = HASH (op0, mode);
4333 op0_in_memory = hash_arg_in_memory;
4334
4335 if (do_not_record)
4336 return;
4337
4338 do_not_record = 0;
4339 hash_arg_in_memory = 0;
4340 op1_hash = HASH (op1, mode);
4341 op1_in_memory = hash_arg_in_memory;
4342
4343 if (do_not_record)
4344 return;
4345
4346 /* Look up both operands. */
4347 op0_elt = lookup (op0, op0_hash, mode);
4348 op1_elt = lookup (op1, op1_hash, mode);
4349
4350 /* If both operands are already equivalent or if they are not in the
4351 table but are identical, do nothing. */
4352 if ((op0_elt != 0 && op1_elt != 0
4353 && op0_elt->first_same_value == op1_elt->first_same_value)
4354 || op0 == op1 || rtx_equal_p (op0, op1))
4355 return;
4356
4357 /* If we aren't setting two things equal all we can do is save this
4358 comparison. Similarly if this is floating-point. In the latter
4359 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4360 If we record the equality, we might inadvertently delete code
4361 whose intent was to change -0 to +0. */
4362
4363 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4364 {
4365 struct qty_table_elem *ent;
4366 int qty;
4367
4368 /* If we reversed a floating-point comparison, if OP0 is not a
4369 register, or if OP1 is neither a register or constant, we can't
4370 do anything. */
4371
4372 if (GET_CODE (op1) != REG)
4373 op1 = equiv_constant (op1);
4374
4375 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4376 || GET_CODE (op0) != REG || op1 == 0)
4377 return;
4378
4379 /* Put OP0 in the hash table if it isn't already. This gives it a
4380 new quantity number. */
4381 if (op0_elt == 0)
4382 {
4383 if (insert_regs (op0, NULL_PTR, 0))
4384 {
4385 rehash_using_reg (op0);
4386 op0_hash = HASH (op0, mode);
4387
4388 /* If OP0 is contained in OP1, this changes its hash code
4389 as well. Faster to rehash than to check, except
4390 for the simple case of a constant. */
4391 if (! CONSTANT_P (op1))
4392 op1_hash = HASH (op1,mode);
4393 }
4394
4395 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
4396 op0_elt->in_memory = op0_in_memory;
4397 }
4398
4399 qty = REG_QTY (REGNO (op0));
4400 ent = &qty_table[qty];
4401
4402 ent->comparison_code = code;
4403 if (GET_CODE (op1) == REG)
4404 {
4405 /* Look it up again--in case op0 and op1 are the same. */
4406 op1_elt = lookup (op1, op1_hash, mode);
4407
4408 /* Put OP1 in the hash table so it gets a new quantity number. */
4409 if (op1_elt == 0)
4410 {
4411 if (insert_regs (op1, NULL_PTR, 0))
4412 {
4413 rehash_using_reg (op1);
4414 op1_hash = HASH (op1, mode);
4415 }
4416
4417 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
4418 op1_elt->in_memory = op1_in_memory;
4419 }
4420
4421 ent->comparison_const = NULL_RTX;
4422 ent->comparison_qty = REG_QTY (REGNO (op1));
4423 }
4424 else
4425 {
4426 ent->comparison_const = op1;
4427 ent->comparison_qty = -1;
4428 }
4429
4430 return;
4431 }
4432
4433 /* If either side is still missing an equivalence, make it now,
4434 then merge the equivalences. */
4435
4436 if (op0_elt == 0)
4437 {
4438 if (insert_regs (op0, NULL_PTR, 0))
4439 {
4440 rehash_using_reg (op0);
4441 op0_hash = HASH (op0, mode);
4442 }
4443
4444 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
4445 op0_elt->in_memory = op0_in_memory;
4446 }
4447
4448 if (op1_elt == 0)
4449 {
4450 if (insert_regs (op1, NULL_PTR, 0))
4451 {
4452 rehash_using_reg (op1);
4453 op1_hash = HASH (op1, mode);
4454 }
4455
4456 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
4457 op1_elt->in_memory = op1_in_memory;
4458 }
4459
4460 merge_equiv_classes (op0_elt, op1_elt);
4461 last_jump_equiv_class = op0_elt;
4462 }
4463 \f
4464 /* CSE processing for one instruction.
4465 First simplify sources and addresses of all assignments
4466 in the instruction, using previously-computed equivalents values.
4467 Then install the new sources and destinations in the table
4468 of available values.
4469
4470 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4471 the insn. It means that INSN is inside libcall block. In this
4472 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4473
4474 /* Data on one SET contained in the instruction. */
4475
4476 struct set
4477 {
4478 /* The SET rtx itself. */
4479 rtx rtl;
4480 /* The SET_SRC of the rtx (the original value, if it is changing). */
4481 rtx src;
4482 /* The hash-table element for the SET_SRC of the SET. */
4483 struct table_elt *src_elt;
4484 /* Hash value for the SET_SRC. */
4485 unsigned src_hash;
4486 /* Hash value for the SET_DEST. */
4487 unsigned dest_hash;
4488 /* The SET_DEST, with SUBREG, etc., stripped. */
4489 rtx inner_dest;
4490 /* Nonzero if the SET_SRC is in memory. */
4491 char src_in_memory;
4492 /* Nonzero if the SET_SRC contains something
4493 whose value cannot be predicted and understood. */
4494 char src_volatile;
4495 /* Original machine mode, in case it becomes a CONST_INT. */
4496 enum machine_mode mode;
4497 /* A constant equivalent for SET_SRC, if any. */
4498 rtx src_const;
4499 /* Original SET_SRC value used for libcall notes. */
4500 rtx orig_src;
4501 /* Hash value of constant equivalent for SET_SRC. */
4502 unsigned src_const_hash;
4503 /* Table entry for constant equivalent for SET_SRC, if any. */
4504 struct table_elt *src_const_elt;
4505 };
4506
4507 static void
4508 cse_insn (insn, libcall_insn)
4509 rtx insn;
4510 rtx libcall_insn;
4511 {
4512 register rtx x = PATTERN (insn);
4513 register int i;
4514 rtx tem;
4515 register int n_sets = 0;
4516
4517 #ifdef HAVE_cc0
4518 /* Records what this insn does to set CC0. */
4519 rtx this_insn_cc0 = 0;
4520 enum machine_mode this_insn_cc0_mode = VOIDmode;
4521 #endif
4522
4523 rtx src_eqv = 0;
4524 struct table_elt *src_eqv_elt = 0;
4525 int src_eqv_volatile = 0;
4526 int src_eqv_in_memory = 0;
4527 unsigned src_eqv_hash = 0;
4528
4529 struct set *sets = (struct set *) NULL_PTR;
4530
4531 this_insn = insn;
4532
4533 /* Find all the SETs and CLOBBERs in this instruction.
4534 Record all the SETs in the array `set' and count them.
4535 Also determine whether there is a CLOBBER that invalidates
4536 all memory references, or all references at varying addresses. */
4537
4538 if (GET_CODE (insn) == CALL_INSN)
4539 {
4540 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4541 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4542 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4543 }
4544
4545 if (GET_CODE (x) == SET)
4546 {
4547 sets = (struct set *) alloca (sizeof (struct set));
4548 sets[0].rtl = x;
4549
4550 /* Ignore SETs that are unconditional jumps.
4551 They never need cse processing, so this does not hurt.
4552 The reason is not efficiency but rather
4553 so that we can test at the end for instructions
4554 that have been simplified to unconditional jumps
4555 and not be misled by unchanged instructions
4556 that were unconditional jumps to begin with. */
4557 if (SET_DEST (x) == pc_rtx
4558 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4559 ;
4560
4561 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4562 The hard function value register is used only once, to copy to
4563 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4564 Ensure we invalidate the destination register. On the 80386 no
4565 other code would invalidate it since it is a fixed_reg.
4566 We need not check the return of apply_change_group; see canon_reg. */
4567
4568 else if (GET_CODE (SET_SRC (x)) == CALL)
4569 {
4570 canon_reg (SET_SRC (x), insn);
4571 apply_change_group ();
4572 fold_rtx (SET_SRC (x), insn);
4573 invalidate (SET_DEST (x), VOIDmode);
4574 }
4575 else
4576 n_sets = 1;
4577 }
4578 else if (GET_CODE (x) == PARALLEL)
4579 {
4580 register int lim = XVECLEN (x, 0);
4581
4582 sets = (struct set *) alloca (lim * sizeof (struct set));
4583
4584 /* Find all regs explicitly clobbered in this insn,
4585 and ensure they are not replaced with any other regs
4586 elsewhere in this insn.
4587 When a reg that is clobbered is also used for input,
4588 we should presume that that is for a reason,
4589 and we should not substitute some other register
4590 which is not supposed to be clobbered.
4591 Therefore, this loop cannot be merged into the one below
4592 because a CALL may precede a CLOBBER and refer to the
4593 value clobbered. We must not let a canonicalization do
4594 anything in that case. */
4595 for (i = 0; i < lim; i++)
4596 {
4597 register rtx y = XVECEXP (x, 0, i);
4598 if (GET_CODE (y) == CLOBBER)
4599 {
4600 rtx clobbered = XEXP (y, 0);
4601
4602 if (GET_CODE (clobbered) == REG
4603 || GET_CODE (clobbered) == SUBREG)
4604 invalidate (clobbered, VOIDmode);
4605 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4606 || GET_CODE (clobbered) == ZERO_EXTRACT)
4607 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4608 }
4609 }
4610
4611 for (i = 0; i < lim; i++)
4612 {
4613 register rtx y = XVECEXP (x, 0, i);
4614 if (GET_CODE (y) == SET)
4615 {
4616 /* As above, we ignore unconditional jumps and call-insns and
4617 ignore the result of apply_change_group. */
4618 if (GET_CODE (SET_SRC (y)) == CALL)
4619 {
4620 canon_reg (SET_SRC (y), insn);
4621 apply_change_group ();
4622 fold_rtx (SET_SRC (y), insn);
4623 invalidate (SET_DEST (y), VOIDmode);
4624 }
4625 else if (SET_DEST (y) == pc_rtx
4626 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4627 ;
4628 else
4629 sets[n_sets++].rtl = y;
4630 }
4631 else if (GET_CODE (y) == CLOBBER)
4632 {
4633 /* If we clobber memory, canon the address.
4634 This does nothing when a register is clobbered
4635 because we have already invalidated the reg. */
4636 if (GET_CODE (XEXP (y, 0)) == MEM)
4637 canon_reg (XEXP (y, 0), NULL_RTX);
4638 }
4639 else if (GET_CODE (y) == USE
4640 && ! (GET_CODE (XEXP (y, 0)) == REG
4641 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4642 canon_reg (y, NULL_RTX);
4643 else if (GET_CODE (y) == CALL)
4644 {
4645 /* The result of apply_change_group can be ignored; see
4646 canon_reg. */
4647 canon_reg (y, insn);
4648 apply_change_group ();
4649 fold_rtx (y, insn);
4650 }
4651 }
4652 }
4653 else if (GET_CODE (x) == CLOBBER)
4654 {
4655 if (GET_CODE (XEXP (x, 0)) == MEM)
4656 canon_reg (XEXP (x, 0), NULL_RTX);
4657 }
4658
4659 /* Canonicalize a USE of a pseudo register or memory location. */
4660 else if (GET_CODE (x) == USE
4661 && ! (GET_CODE (XEXP (x, 0)) == REG
4662 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4663 canon_reg (XEXP (x, 0), NULL_RTX);
4664 else if (GET_CODE (x) == CALL)
4665 {
4666 /* The result of apply_change_group can be ignored; see canon_reg. */
4667 canon_reg (x, insn);
4668 apply_change_group ();
4669 fold_rtx (x, insn);
4670 }
4671
4672 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4673 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4674 is handled specially for this case, and if it isn't set, then there will
4675 be no equivalence for the destination. */
4676 if (n_sets == 1 && REG_NOTES (insn) != 0
4677 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4678 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4679 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4680 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
4681
4682 /* Canonicalize sources and addresses of destinations.
4683 We do this in a separate pass to avoid problems when a MATCH_DUP is
4684 present in the insn pattern. In that case, we want to ensure that
4685 we don't break the duplicate nature of the pattern. So we will replace
4686 both operands at the same time. Otherwise, we would fail to find an
4687 equivalent substitution in the loop calling validate_change below.
4688
4689 We used to suppress canonicalization of DEST if it appears in SRC,
4690 but we don't do this any more. */
4691
4692 for (i = 0; i < n_sets; i++)
4693 {
4694 rtx dest = SET_DEST (sets[i].rtl);
4695 rtx src = SET_SRC (sets[i].rtl);
4696 rtx new = canon_reg (src, insn);
4697 int insn_code;
4698
4699 sets[i].orig_src = src;
4700 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4701 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4702 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4703 || (insn_code = recog_memoized (insn)) < 0
4704 || insn_data[insn_code].n_dups > 0)
4705 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4706 else
4707 SET_SRC (sets[i].rtl) = new;
4708
4709 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4710 {
4711 validate_change (insn, &XEXP (dest, 1),
4712 canon_reg (XEXP (dest, 1), insn), 1);
4713 validate_change (insn, &XEXP (dest, 2),
4714 canon_reg (XEXP (dest, 2), insn), 1);
4715 }
4716
4717 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4718 || GET_CODE (dest) == ZERO_EXTRACT
4719 || GET_CODE (dest) == SIGN_EXTRACT)
4720 dest = XEXP (dest, 0);
4721
4722 if (GET_CODE (dest) == MEM)
4723 canon_reg (dest, insn);
4724 }
4725
4726 /* Now that we have done all the replacements, we can apply the change
4727 group and see if they all work. Note that this will cause some
4728 canonicalizations that would have worked individually not to be applied
4729 because some other canonicalization didn't work, but this should not
4730 occur often.
4731
4732 The result of apply_change_group can be ignored; see canon_reg. */
4733
4734 apply_change_group ();
4735
4736 /* Set sets[i].src_elt to the class each source belongs to.
4737 Detect assignments from or to volatile things
4738 and set set[i] to zero so they will be ignored
4739 in the rest of this function.
4740
4741 Nothing in this loop changes the hash table or the register chains. */
4742
4743 for (i = 0; i < n_sets; i++)
4744 {
4745 register rtx src, dest;
4746 register rtx src_folded;
4747 register struct table_elt *elt = 0, *p;
4748 enum machine_mode mode;
4749 rtx src_eqv_here;
4750 rtx src_const = 0;
4751 rtx src_related = 0;
4752 struct table_elt *src_const_elt = 0;
4753 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
4754 int src_related_cost = 10000, src_elt_cost = 10000;
4755 /* Set non-zero if we need to call force_const_mem on with the
4756 contents of src_folded before using it. */
4757 int src_folded_force_flag = 0;
4758
4759 dest = SET_DEST (sets[i].rtl);
4760 src = SET_SRC (sets[i].rtl);
4761
4762 /* If SRC is a constant that has no machine mode,
4763 hash it with the destination's machine mode.
4764 This way we can keep different modes separate. */
4765
4766 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4767 sets[i].mode = mode;
4768
4769 if (src_eqv)
4770 {
4771 enum machine_mode eqvmode = mode;
4772 if (GET_CODE (dest) == STRICT_LOW_PART)
4773 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4774 do_not_record = 0;
4775 hash_arg_in_memory = 0;
4776 src_eqv = fold_rtx (src_eqv, insn);
4777 src_eqv_hash = HASH (src_eqv, eqvmode);
4778
4779 /* Find the equivalence class for the equivalent expression. */
4780
4781 if (!do_not_record)
4782 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4783
4784 src_eqv_volatile = do_not_record;
4785 src_eqv_in_memory = hash_arg_in_memory;
4786 }
4787
4788 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4789 value of the INNER register, not the destination. So it is not
4790 a valid substitution for the source. But save it for later. */
4791 if (GET_CODE (dest) == STRICT_LOW_PART)
4792 src_eqv_here = 0;
4793 else
4794 src_eqv_here = src_eqv;
4795
4796 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4797 simplified result, which may not necessarily be valid. */
4798 src_folded = fold_rtx (src, insn);
4799
4800 #if 0
4801 /* ??? This caused bad code to be generated for the m68k port with -O2.
4802 Suppose src is (CONST_INT -1), and that after truncation src_folded
4803 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4804 At the end we will add src and src_const to the same equivalence
4805 class. We now have 3 and -1 on the same equivalence class. This
4806 causes later instructions to be mis-optimized. */
4807 /* If storing a constant in a bitfield, pre-truncate the constant
4808 so we will be able to record it later. */
4809 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
4810 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
4811 {
4812 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4813
4814 if (GET_CODE (src) == CONST_INT
4815 && GET_CODE (width) == CONST_INT
4816 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4817 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4818 src_folded
4819 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4820 << INTVAL (width)) - 1));
4821 }
4822 #endif
4823
4824 /* Compute SRC's hash code, and also notice if it
4825 should not be recorded at all. In that case,
4826 prevent any further processing of this assignment. */
4827 do_not_record = 0;
4828 hash_arg_in_memory = 0;
4829
4830 sets[i].src = src;
4831 sets[i].src_hash = HASH (src, mode);
4832 sets[i].src_volatile = do_not_record;
4833 sets[i].src_in_memory = hash_arg_in_memory;
4834
4835 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4836 a pseudo that is set more than once, do not record SRC. Using
4837 SRC as a replacement for anything else will be incorrect in that
4838 situation. Note that this usually occurs only for stack slots,
4839 in which case all the RTL would be referring to SRC, so we don't
4840 lose any optimization opportunities by not having SRC in the
4841 hash table. */
4842
4843 if (GET_CODE (src) == MEM
4844 && find_reg_note (insn, REG_EQUIV, src) != 0
4845 && GET_CODE (dest) == REG
4846 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
4847 && REG_N_SETS (REGNO (dest)) != 1)
4848 sets[i].src_volatile = 1;
4849
4850 #if 0
4851 /* It is no longer clear why we used to do this, but it doesn't
4852 appear to still be needed. So let's try without it since this
4853 code hurts cse'ing widened ops. */
4854 /* If source is a perverse subreg (such as QI treated as an SI),
4855 treat it as volatile. It may do the work of an SI in one context
4856 where the extra bits are not being used, but cannot replace an SI
4857 in general. */
4858 if (GET_CODE (src) == SUBREG
4859 && (GET_MODE_SIZE (GET_MODE (src))
4860 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4861 sets[i].src_volatile = 1;
4862 #endif
4863
4864 /* Locate all possible equivalent forms for SRC. Try to replace
4865 SRC in the insn with each cheaper equivalent.
4866
4867 We have the following types of equivalents: SRC itself, a folded
4868 version, a value given in a REG_EQUAL note, or a value related
4869 to a constant.
4870
4871 Each of these equivalents may be part of an additional class
4872 of equivalents (if more than one is in the table, they must be in
4873 the same class; we check for this).
4874
4875 If the source is volatile, we don't do any table lookups.
4876
4877 We note any constant equivalent for possible later use in a
4878 REG_NOTE. */
4879
4880 if (!sets[i].src_volatile)
4881 elt = lookup (src, sets[i].src_hash, mode);
4882
4883 sets[i].src_elt = elt;
4884
4885 if (elt && src_eqv_here && src_eqv_elt)
4886 {
4887 if (elt->first_same_value != src_eqv_elt->first_same_value)
4888 {
4889 /* The REG_EQUAL is indicating that two formerly distinct
4890 classes are now equivalent. So merge them. */
4891 merge_equiv_classes (elt, src_eqv_elt);
4892 src_eqv_hash = HASH (src_eqv, elt->mode);
4893 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
4894 }
4895
4896 src_eqv_here = 0;
4897 }
4898
4899 else if (src_eqv_elt)
4900 elt = src_eqv_elt;
4901
4902 /* Try to find a constant somewhere and record it in `src_const'.
4903 Record its table element, if any, in `src_const_elt'. Look in
4904 any known equivalences first. (If the constant is not in the
4905 table, also set `sets[i].src_const_hash'). */
4906 if (elt)
4907 for (p = elt->first_same_value; p; p = p->next_same_value)
4908 if (p->is_const)
4909 {
4910 src_const = p->exp;
4911 src_const_elt = elt;
4912 break;
4913 }
4914
4915 if (src_const == 0
4916 && (CONSTANT_P (src_folded)
4917 /* Consider (minus (label_ref L1) (label_ref L2)) as
4918 "constant" here so we will record it. This allows us
4919 to fold switch statements when an ADDR_DIFF_VEC is used. */
4920 || (GET_CODE (src_folded) == MINUS
4921 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4922 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4923 src_const = src_folded, src_const_elt = elt;
4924 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4925 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4926
4927 /* If we don't know if the constant is in the table, get its
4928 hash code and look it up. */
4929 if (src_const && src_const_elt == 0)
4930 {
4931 sets[i].src_const_hash = HASH (src_const, mode);
4932 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
4933 }
4934
4935 sets[i].src_const = src_const;
4936 sets[i].src_const_elt = src_const_elt;
4937
4938 /* If the constant and our source are both in the table, mark them as
4939 equivalent. Otherwise, if a constant is in the table but the source
4940 isn't, set ELT to it. */
4941 if (src_const_elt && elt
4942 && src_const_elt->first_same_value != elt->first_same_value)
4943 merge_equiv_classes (elt, src_const_elt);
4944 else if (src_const_elt && elt == 0)
4945 elt = src_const_elt;
4946
4947 /* See if there is a register linearly related to a constant
4948 equivalent of SRC. */
4949 if (src_const
4950 && (GET_CODE (src_const) == CONST
4951 || (src_const_elt && src_const_elt->related_value != 0)))
4952 {
4953 src_related = use_related_value (src_const, src_const_elt);
4954 if (src_related)
4955 {
4956 struct table_elt *src_related_elt
4957 = lookup (src_related, HASH (src_related, mode), mode);
4958 if (src_related_elt && elt)
4959 {
4960 if (elt->first_same_value
4961 != src_related_elt->first_same_value)
4962 /* This can occur when we previously saw a CONST
4963 involving a SYMBOL_REF and then see the SYMBOL_REF
4964 twice. Merge the involved classes. */
4965 merge_equiv_classes (elt, src_related_elt);
4966
4967 src_related = 0;
4968 src_related_elt = 0;
4969 }
4970 else if (src_related_elt && elt == 0)
4971 elt = src_related_elt;
4972 }
4973 }
4974
4975 /* See if we have a CONST_INT that is already in a register in a
4976 wider mode. */
4977
4978 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
4979 && GET_MODE_CLASS (mode) == MODE_INT
4980 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
4981 {
4982 enum machine_mode wider_mode;
4983
4984 for (wider_mode = GET_MODE_WIDER_MODE (mode);
4985 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
4986 && src_related == 0;
4987 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4988 {
4989 struct table_elt *const_elt
4990 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
4991
4992 if (const_elt == 0)
4993 continue;
4994
4995 for (const_elt = const_elt->first_same_value;
4996 const_elt; const_elt = const_elt->next_same_value)
4997 if (GET_CODE (const_elt->exp) == REG)
4998 {
4999 src_related = gen_lowpart_if_possible (mode,
5000 const_elt->exp);
5001 break;
5002 }
5003 }
5004 }
5005
5006 /* Another possibility is that we have an AND with a constant in
5007 a mode narrower than a word. If so, it might have been generated
5008 as part of an "if" which would narrow the AND. If we already
5009 have done the AND in a wider mode, we can use a SUBREG of that
5010 value. */
5011
5012 if (flag_expensive_optimizations && ! src_related
5013 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5014 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5015 {
5016 enum machine_mode tmode;
5017 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5018
5019 for (tmode = GET_MODE_WIDER_MODE (mode);
5020 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5021 tmode = GET_MODE_WIDER_MODE (tmode))
5022 {
5023 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
5024 struct table_elt *larger_elt;
5025
5026 if (inner)
5027 {
5028 PUT_MODE (new_and, tmode);
5029 XEXP (new_and, 0) = inner;
5030 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5031 if (larger_elt == 0)
5032 continue;
5033
5034 for (larger_elt = larger_elt->first_same_value;
5035 larger_elt; larger_elt = larger_elt->next_same_value)
5036 if (GET_CODE (larger_elt->exp) == REG)
5037 {
5038 src_related
5039 = gen_lowpart_if_possible (mode, larger_elt->exp);
5040 break;
5041 }
5042
5043 if (src_related)
5044 break;
5045 }
5046 }
5047 }
5048
5049 #ifdef LOAD_EXTEND_OP
5050 /* See if a MEM has already been loaded with a widening operation;
5051 if it has, we can use a subreg of that. Many CISC machines
5052 also have such operations, but this is only likely to be
5053 beneficial these machines. */
5054
5055 if (flag_expensive_optimizations && src_related == 0
5056 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5057 && GET_MODE_CLASS (mode) == MODE_INT
5058 && GET_CODE (src) == MEM && ! do_not_record
5059 && LOAD_EXTEND_OP (mode) != NIL)
5060 {
5061 enum machine_mode tmode;
5062
5063 /* Set what we are trying to extend and the operation it might
5064 have been extended with. */
5065 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5066 XEXP (memory_extend_rtx, 0) = src;
5067
5068 for (tmode = GET_MODE_WIDER_MODE (mode);
5069 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5070 tmode = GET_MODE_WIDER_MODE (tmode))
5071 {
5072 struct table_elt *larger_elt;
5073
5074 PUT_MODE (memory_extend_rtx, tmode);
5075 larger_elt = lookup (memory_extend_rtx,
5076 HASH (memory_extend_rtx, tmode), tmode);
5077 if (larger_elt == 0)
5078 continue;
5079
5080 for (larger_elt = larger_elt->first_same_value;
5081 larger_elt; larger_elt = larger_elt->next_same_value)
5082 if (GET_CODE (larger_elt->exp) == REG)
5083 {
5084 src_related = gen_lowpart_if_possible (mode,
5085 larger_elt->exp);
5086 break;
5087 }
5088
5089 if (src_related)
5090 break;
5091 }
5092 }
5093 #endif /* LOAD_EXTEND_OP */
5094
5095 if (src == src_folded)
5096 src_folded = 0;
5097
5098 /* At this point, ELT, if non-zero, points to a class of expressions
5099 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5100 and SRC_RELATED, if non-zero, each contain additional equivalent
5101 expressions. Prune these latter expressions by deleting expressions
5102 already in the equivalence class.
5103
5104 Check for an equivalent identical to the destination. If found,
5105 this is the preferred equivalent since it will likely lead to
5106 elimination of the insn. Indicate this by placing it in
5107 `src_related'. */
5108
5109 if (elt)
5110 elt = elt->first_same_value;
5111 for (p = elt; p; p = p->next_same_value)
5112 {
5113 enum rtx_code code = GET_CODE (p->exp);
5114
5115 /* If the expression is not valid, ignore it. Then we do not
5116 have to check for validity below. In most cases, we can use
5117 `rtx_equal_p', since canonicalization has already been done. */
5118 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5119 continue;
5120
5121 /* Also skip paradoxical subregs, unless that's what we're
5122 looking for. */
5123 if (code == SUBREG
5124 && (GET_MODE_SIZE (GET_MODE (p->exp))
5125 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5126 && ! (src != 0
5127 && GET_CODE (src) == SUBREG
5128 && GET_MODE (src) == GET_MODE (p->exp)
5129 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5130 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5131 continue;
5132
5133 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5134 src = 0;
5135 else if (src_folded && GET_CODE (src_folded) == code
5136 && rtx_equal_p (src_folded, p->exp))
5137 src_folded = 0;
5138 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5139 && rtx_equal_p (src_eqv_here, p->exp))
5140 src_eqv_here = 0;
5141 else if (src_related && GET_CODE (src_related) == code
5142 && rtx_equal_p (src_related, p->exp))
5143 src_related = 0;
5144
5145 /* This is the same as the destination of the insns, we want
5146 to prefer it. Copy it to src_related. The code below will
5147 then give it a negative cost. */
5148 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5149 src_related = dest;
5150 }
5151
5152 /* Find the cheapest valid equivalent, trying all the available
5153 possibilities. Prefer items not in the hash table to ones
5154 that are when they are equal cost. Note that we can never
5155 worsen an insn as the current contents will also succeed.
5156 If we find an equivalent identical to the destination, use it as best,
5157 since this insn will probably be eliminated in that case. */
5158 if (src)
5159 {
5160 if (rtx_equal_p (src, dest))
5161 src_cost = -1;
5162 else
5163 src_cost = COST (src);
5164 }
5165
5166 if (src_eqv_here)
5167 {
5168 if (rtx_equal_p (src_eqv_here, dest))
5169 src_eqv_cost = -1;
5170 else
5171 src_eqv_cost = COST (src_eqv_here);
5172 }
5173
5174 if (src_folded)
5175 {
5176 if (rtx_equal_p (src_folded, dest))
5177 src_folded_cost = -1;
5178 else
5179 src_folded_cost = COST (src_folded);
5180 }
5181
5182 if (src_related)
5183 {
5184 if (rtx_equal_p (src_related, dest))
5185 src_related_cost = -1;
5186 else
5187 src_related_cost = COST (src_related);
5188 }
5189
5190 /* If this was an indirect jump insn, a known label will really be
5191 cheaper even though it looks more expensive. */
5192 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5193 src_folded = src_const, src_folded_cost = -1;
5194
5195 /* Terminate loop when replacement made. This must terminate since
5196 the current contents will be tested and will always be valid. */
5197 while (1)
5198 {
5199 rtx trial;
5200
5201 /* Skip invalid entries. */
5202 while (elt && GET_CODE (elt->exp) != REG
5203 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5204 elt = elt->next_same_value;
5205
5206 /* A paradoxical subreg would be bad here: it'll be the right
5207 size, but later may be adjusted so that the upper bits aren't
5208 what we want. So reject it. */
5209 if (elt != 0
5210 && GET_CODE (elt->exp) == SUBREG
5211 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5212 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5213 /* It is okay, though, if the rtx we're trying to match
5214 will ignore any of the bits we can't predict. */
5215 && ! (src != 0
5216 && GET_CODE (src) == SUBREG
5217 && GET_MODE (src) == GET_MODE (elt->exp)
5218 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5219 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5220 {
5221 elt = elt->next_same_value;
5222 continue;
5223 }
5224
5225 if (elt)
5226 src_elt_cost = elt->cost;
5227
5228 /* Find cheapest and skip it for the next time. For items
5229 of equal cost, use this order:
5230 src_folded, src, src_eqv, src_related and hash table entry. */
5231 if (src_folded_cost <= src_cost
5232 && src_folded_cost <= src_eqv_cost
5233 && src_folded_cost <= src_related_cost
5234 && src_folded_cost <= src_elt_cost)
5235 {
5236 trial = src_folded, src_folded_cost = 10000;
5237 if (src_folded_force_flag)
5238 trial = force_const_mem (mode, trial);
5239 }
5240 else if (src_cost <= src_eqv_cost
5241 && src_cost <= src_related_cost
5242 && src_cost <= src_elt_cost)
5243 trial = src, src_cost = 10000;
5244 else if (src_eqv_cost <= src_related_cost
5245 && src_eqv_cost <= src_elt_cost)
5246 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
5247 else if (src_related_cost <= src_elt_cost)
5248 trial = copy_rtx (src_related), src_related_cost = 10000;
5249 else
5250 {
5251 trial = copy_rtx (elt->exp);
5252 elt = elt->next_same_value;
5253 src_elt_cost = 10000;
5254 }
5255
5256 /* We don't normally have an insn matching (set (pc) (pc)), so
5257 check for this separately here. We will delete such an
5258 insn below.
5259
5260 Tablejump insns contain a USE of the table, so simply replacing
5261 the operand with the constant won't match. This is simply an
5262 unconditional branch, however, and is therefore valid. Just
5263 insert the substitution here and we will delete and re-emit
5264 the insn later. */
5265
5266 if (n_sets == 1 && dest == pc_rtx
5267 && (trial == pc_rtx
5268 || (GET_CODE (trial) == LABEL_REF
5269 && ! condjump_p (insn))))
5270 {
5271 if (trial == pc_rtx)
5272 {
5273 SET_SRC (sets[i].rtl) = trial;
5274 cse_jumps_altered = 1;
5275 break;
5276 }
5277
5278 PATTERN (insn) = gen_jump (XEXP (trial, 0));
5279 INSN_CODE (insn) = -1;
5280
5281 if (NEXT_INSN (insn) != 0
5282 && GET_CODE (NEXT_INSN (insn)) != BARRIER)
5283 emit_barrier_after (insn);
5284
5285 cse_jumps_altered = 1;
5286 break;
5287 }
5288
5289 /* Look for a substitution that makes a valid insn. */
5290 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5291 {
5292 /* If we just made a substitution inside a libcall, then we
5293 need to make the same substitution in any notes attached
5294 to the RETVAL insn. */
5295 if (libcall_insn
5296 && (GET_CODE (sets[i].orig_src) == REG
5297 || GET_CODE (sets[i].orig_src) == SUBREG
5298 || GET_CODE (sets[i].orig_src) == MEM))
5299 replace_rtx (REG_NOTES (libcall_insn), sets[i].orig_src,
5300 canon_reg (SET_SRC (sets[i].rtl), insn));
5301
5302 /* The result of apply_change_group can be ignored; see
5303 canon_reg. */
5304
5305 validate_change (insn, &SET_SRC (sets[i].rtl),
5306 canon_reg (SET_SRC (sets[i].rtl), insn),
5307 1);
5308 apply_change_group ();
5309 break;
5310 }
5311
5312 /* If we previously found constant pool entries for
5313 constants and this is a constant, try making a
5314 pool entry. Put it in src_folded unless we already have done
5315 this since that is where it likely came from. */
5316
5317 else if (constant_pool_entries_cost
5318 && CONSTANT_P (trial)
5319 && ! (GET_CODE (trial) == CONST
5320 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5321 && (src_folded == 0
5322 || (GET_CODE (src_folded) != MEM
5323 && ! src_folded_force_flag))
5324 && GET_MODE_CLASS (mode) != MODE_CC
5325 && mode != VOIDmode)
5326 {
5327 src_folded_force_flag = 1;
5328 src_folded = trial;
5329 src_folded_cost = constant_pool_entries_cost;
5330 }
5331 }
5332
5333 src = SET_SRC (sets[i].rtl);
5334
5335 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5336 However, there is an important exception: If both are registers
5337 that are not the head of their equivalence class, replace SET_SRC
5338 with the head of the class. If we do not do this, we will have
5339 both registers live over a portion of the basic block. This way,
5340 their lifetimes will likely abut instead of overlapping. */
5341 if (GET_CODE (dest) == REG
5342 && REGNO_QTY_VALID_P (REGNO (dest)))
5343 {
5344 int dest_q = REG_QTY (REGNO (dest));
5345 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5346
5347 if (dest_ent->mode == GET_MODE (dest)
5348 && dest_ent->first_reg != REGNO (dest)
5349 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5350 /* Don't do this if the original insn had a hard reg as
5351 SET_SRC or SET_DEST. */
5352 && (GET_CODE (sets[i].src) != REG
5353 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5354 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5355 /* We can't call canon_reg here because it won't do anything if
5356 SRC is a hard register. */
5357 {
5358 int src_q = REG_QTY (REGNO (src));
5359 struct qty_table_elem *src_ent = &qty_table[src_q];
5360 int first = src_ent->first_reg;
5361 rtx new_src
5362 = (first >= FIRST_PSEUDO_REGISTER
5363 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5364
5365 /* We must use validate-change even for this, because this
5366 might be a special no-op instruction, suitable only to
5367 tag notes onto. */
5368 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5369 {
5370 src = new_src;
5371 /* If we had a constant that is cheaper than what we are now
5372 setting SRC to, use that constant. We ignored it when we
5373 thought we could make this into a no-op. */
5374 if (src_const && COST (src_const) < COST (src)
5375 && validate_change (insn, &SET_SRC (sets[i].rtl),
5376 src_const, 0))
5377 src = src_const;
5378 }
5379 }
5380 }
5381
5382 /* If we made a change, recompute SRC values. */
5383 if (src != sets[i].src)
5384 {
5385 cse_altered = 1;
5386 do_not_record = 0;
5387 hash_arg_in_memory = 0;
5388 sets[i].src = src;
5389 sets[i].src_hash = HASH (src, mode);
5390 sets[i].src_volatile = do_not_record;
5391 sets[i].src_in_memory = hash_arg_in_memory;
5392 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5393 }
5394
5395 /* If this is a single SET, we are setting a register, and we have an
5396 equivalent constant, we want to add a REG_NOTE. We don't want
5397 to write a REG_EQUAL note for a constant pseudo since verifying that
5398 that pseudo hasn't been eliminated is a pain. Such a note also
5399 won't help anything.
5400
5401 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5402 which can be created for a reference to a compile time computable
5403 entry in a jump table. */
5404
5405 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5406 && GET_CODE (src_const) != REG
5407 && ! (GET_CODE (src_const) == CONST
5408 && GET_CODE (XEXP (src_const, 0)) == MINUS
5409 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5410 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5411 {
5412 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5413
5414 /* Make sure that the rtx is not shared with any other insn. */
5415 src_const = copy_rtx (src_const);
5416
5417 /* Record the actual constant value in a REG_EQUAL note, making
5418 a new one if one does not already exist. */
5419 if (tem)
5420 XEXP (tem, 0) = src_const;
5421 else
5422 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
5423 src_const, REG_NOTES (insn));
5424
5425 /* If storing a constant value in a register that
5426 previously held the constant value 0,
5427 record this fact with a REG_WAS_0 note on this insn.
5428
5429 Note that the *register* is required to have previously held 0,
5430 not just any register in the quantity and we must point to the
5431 insn that set that register to zero.
5432
5433 Rather than track each register individually, we just see if
5434 the last set for this quantity was for this register. */
5435
5436 if (REGNO_QTY_VALID_P (REGNO (dest)))
5437 {
5438 int dest_q = REG_QTY (REGNO (dest));
5439 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5440
5441 if (dest_ent->const_rtx == const0_rtx)
5442 {
5443 /* See if we previously had a REG_WAS_0 note. */
5444 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5445 rtx const_insn = dest_ent->const_insn;
5446
5447 if ((tem = single_set (const_insn)) != 0
5448 && rtx_equal_p (SET_DEST (tem), dest))
5449 {
5450 if (note)
5451 XEXP (note, 0) = const_insn;
5452 else
5453 REG_NOTES (insn)
5454 = gen_rtx_INSN_LIST (REG_WAS_0, const_insn,
5455 REG_NOTES (insn));
5456 }
5457 }
5458 }
5459 }
5460
5461 /* Now deal with the destination. */
5462 do_not_record = 0;
5463
5464 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5465 to the MEM or REG within it. */
5466 while (GET_CODE (dest) == SIGN_EXTRACT
5467 || GET_CODE (dest) == ZERO_EXTRACT
5468 || GET_CODE (dest) == SUBREG
5469 || GET_CODE (dest) == STRICT_LOW_PART)
5470 dest = XEXP (dest, 0);
5471
5472 sets[i].inner_dest = dest;
5473
5474 if (GET_CODE (dest) == MEM)
5475 {
5476 #ifdef PUSH_ROUNDING
5477 /* Stack pushes invalidate the stack pointer. */
5478 rtx addr = XEXP (dest, 0);
5479 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
5480 && XEXP (addr, 0) == stack_pointer_rtx)
5481 invalidate (stack_pointer_rtx, Pmode);
5482 #endif
5483 dest = fold_rtx (dest, insn);
5484 }
5485
5486 /* Compute the hash code of the destination now,
5487 before the effects of this instruction are recorded,
5488 since the register values used in the address computation
5489 are those before this instruction. */
5490 sets[i].dest_hash = HASH (dest, mode);
5491
5492 /* Don't enter a bit-field in the hash table
5493 because the value in it after the store
5494 may not equal what was stored, due to truncation. */
5495
5496 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5497 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5498 {
5499 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5500
5501 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5502 && GET_CODE (width) == CONST_INT
5503 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5504 && ! (INTVAL (src_const)
5505 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5506 /* Exception: if the value is constant,
5507 and it won't be truncated, record it. */
5508 ;
5509 else
5510 {
5511 /* This is chosen so that the destination will be invalidated
5512 but no new value will be recorded.
5513 We must invalidate because sometimes constant
5514 values can be recorded for bitfields. */
5515 sets[i].src_elt = 0;
5516 sets[i].src_volatile = 1;
5517 src_eqv = 0;
5518 src_eqv_elt = 0;
5519 }
5520 }
5521
5522 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5523 the insn. */
5524 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5525 {
5526 /* One less use of the label this insn used to jump to. */
5527 if (JUMP_LABEL (insn) != 0)
5528 --LABEL_NUSES (JUMP_LABEL (insn));
5529 PUT_CODE (insn, NOTE);
5530 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5531 NOTE_SOURCE_FILE (insn) = 0;
5532 cse_jumps_altered = 1;
5533 /* No more processing for this set. */
5534 sets[i].rtl = 0;
5535 }
5536
5537 /* If this SET is now setting PC to a label, we know it used to
5538 be a conditional or computed branch. So we see if we can follow
5539 it. If it was a computed branch, delete it and re-emit. */
5540 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5541 {
5542 /* If this is not in the format for a simple branch and
5543 we are the only SET in it, re-emit it. */
5544 if (! simplejump_p (insn) && n_sets == 1)
5545 {
5546 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5547 JUMP_LABEL (new) = XEXP (src, 0);
5548 LABEL_NUSES (XEXP (src, 0))++;
5549 insn = new;
5550 }
5551 else
5552 /* Otherwise, force rerecognition, since it probably had
5553 a different pattern before.
5554 This shouldn't really be necessary, since whatever
5555 changed the source value above should have done this.
5556 Until the right place is found, might as well do this here. */
5557 INSN_CODE (insn) = -1;
5558
5559 never_reached_warning (insn);
5560
5561 /* Now emit a BARRIER after the unconditional jump. Do not bother
5562 deleting any unreachable code, let jump/flow do that. */
5563 if (NEXT_INSN (insn) != 0
5564 && GET_CODE (NEXT_INSN (insn)) != BARRIER)
5565 emit_barrier_after (insn);
5566
5567 cse_jumps_altered = 1;
5568 sets[i].rtl = 0;
5569 }
5570
5571 /* If destination is volatile, invalidate it and then do no further
5572 processing for this assignment. */
5573
5574 else if (do_not_record)
5575 {
5576 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
5577 || GET_CODE (dest) == MEM)
5578 invalidate (dest, VOIDmode);
5579 else if (GET_CODE (dest) == STRICT_LOW_PART
5580 || GET_CODE (dest) == ZERO_EXTRACT)
5581 invalidate (XEXP (dest, 0), GET_MODE (dest));
5582 sets[i].rtl = 0;
5583 }
5584
5585 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5586 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5587
5588 #ifdef HAVE_cc0
5589 /* If setting CC0, record what it was set to, or a constant, if it
5590 is equivalent to a constant. If it is being set to a floating-point
5591 value, make a COMPARE with the appropriate constant of 0. If we
5592 don't do this, later code can interpret this as a test against
5593 const0_rtx, which can cause problems if we try to put it into an
5594 insn as a floating-point operand. */
5595 if (dest == cc0_rtx)
5596 {
5597 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5598 this_insn_cc0_mode = mode;
5599 if (FLOAT_MODE_P (mode))
5600 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5601 CONST0_RTX (mode));
5602 }
5603 #endif
5604 }
5605
5606 /* Now enter all non-volatile source expressions in the hash table
5607 if they are not already present.
5608 Record their equivalence classes in src_elt.
5609 This way we can insert the corresponding destinations into
5610 the same classes even if the actual sources are no longer in them
5611 (having been invalidated). */
5612
5613 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5614 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5615 {
5616 register struct table_elt *elt;
5617 register struct table_elt *classp = sets[0].src_elt;
5618 rtx dest = SET_DEST (sets[0].rtl);
5619 enum machine_mode eqvmode = GET_MODE (dest);
5620
5621 if (GET_CODE (dest) == STRICT_LOW_PART)
5622 {
5623 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5624 classp = 0;
5625 }
5626 if (insert_regs (src_eqv, classp, 0))
5627 {
5628 rehash_using_reg (src_eqv);
5629 src_eqv_hash = HASH (src_eqv, eqvmode);
5630 }
5631 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5632 elt->in_memory = src_eqv_in_memory;
5633 src_eqv_elt = elt;
5634
5635 /* Check to see if src_eqv_elt is the same as a set source which
5636 does not yet have an elt, and if so set the elt of the set source
5637 to src_eqv_elt. */
5638 for (i = 0; i < n_sets; i++)
5639 if (sets[i].rtl && sets[i].src_elt == 0
5640 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5641 sets[i].src_elt = src_eqv_elt;
5642 }
5643
5644 for (i = 0; i < n_sets; i++)
5645 if (sets[i].rtl && ! sets[i].src_volatile
5646 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5647 {
5648 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5649 {
5650 /* REG_EQUAL in setting a STRICT_LOW_PART
5651 gives an equivalent for the entire destination register,
5652 not just for the subreg being stored in now.
5653 This is a more interesting equivalence, so we arrange later
5654 to treat the entire reg as the destination. */
5655 sets[i].src_elt = src_eqv_elt;
5656 sets[i].src_hash = src_eqv_hash;
5657 }
5658 else
5659 {
5660 /* Insert source and constant equivalent into hash table, if not
5661 already present. */
5662 register struct table_elt *classp = src_eqv_elt;
5663 register rtx src = sets[i].src;
5664 register rtx dest = SET_DEST (sets[i].rtl);
5665 enum machine_mode mode
5666 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5667
5668 if (sets[i].src_elt == 0)
5669 {
5670 /* Don't put a hard register source into the table if this is
5671 the last insn of a libcall. In this case, we only need
5672 to put src_eqv_elt in src_elt. */
5673 if (GET_CODE (src) != REG
5674 || REGNO (src) >= FIRST_PSEUDO_REGISTER
5675 || ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5676 {
5677 register struct table_elt *elt;
5678
5679 /* Note that these insert_regs calls cannot remove
5680 any of the src_elt's, because they would have failed to
5681 match if not still valid. */
5682 if (insert_regs (src, classp, 0))
5683 {
5684 rehash_using_reg (src);
5685 sets[i].src_hash = HASH (src, mode);
5686 }
5687 elt = insert (src, classp, sets[i].src_hash, mode);
5688 elt->in_memory = sets[i].src_in_memory;
5689 sets[i].src_elt = classp = elt;
5690 }
5691 else
5692 sets[i].src_elt = classp;
5693 }
5694 if (sets[i].src_const && sets[i].src_const_elt == 0
5695 && src != sets[i].src_const
5696 && ! rtx_equal_p (sets[i].src_const, src))
5697 sets[i].src_elt = insert (sets[i].src_const, classp,
5698 sets[i].src_const_hash, mode);
5699 }
5700 }
5701 else if (sets[i].src_elt == 0)
5702 /* If we did not insert the source into the hash table (e.g., it was
5703 volatile), note the equivalence class for the REG_EQUAL value, if any,
5704 so that the destination goes into that class. */
5705 sets[i].src_elt = src_eqv_elt;
5706
5707 invalidate_from_clobbers (x);
5708
5709 /* Some registers are invalidated by subroutine calls. Memory is
5710 invalidated by non-constant calls. */
5711
5712 if (GET_CODE (insn) == CALL_INSN)
5713 {
5714 if (! CONST_CALL_P (insn))
5715 invalidate_memory ();
5716 invalidate_for_call ();
5717 }
5718
5719 /* Now invalidate everything set by this instruction.
5720 If a SUBREG or other funny destination is being set,
5721 sets[i].rtl is still nonzero, so here we invalidate the reg
5722 a part of which is being set. */
5723
5724 for (i = 0; i < n_sets; i++)
5725 if (sets[i].rtl)
5726 {
5727 /* We can't use the inner dest, because the mode associated with
5728 a ZERO_EXTRACT is significant. */
5729 register rtx dest = SET_DEST (sets[i].rtl);
5730
5731 /* Needed for registers to remove the register from its
5732 previous quantity's chain.
5733 Needed for memory if this is a nonvarying address, unless
5734 we have just done an invalidate_memory that covers even those. */
5735 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
5736 || GET_CODE (dest) == MEM)
5737 invalidate (dest, VOIDmode);
5738 else if (GET_CODE (dest) == STRICT_LOW_PART
5739 || GET_CODE (dest) == ZERO_EXTRACT)
5740 invalidate (XEXP (dest, 0), GET_MODE (dest));
5741 }
5742
5743 /* A volatile ASM invalidates everything. */
5744 if (GET_CODE (insn) == INSN
5745 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5746 && MEM_VOLATILE_P (PATTERN (insn)))
5747 flush_hash_table ();
5748
5749 /* Make sure registers mentioned in destinations
5750 are safe for use in an expression to be inserted.
5751 This removes from the hash table
5752 any invalid entry that refers to one of these registers.
5753
5754 We don't care about the return value from mention_regs because
5755 we are going to hash the SET_DEST values unconditionally. */
5756
5757 for (i = 0; i < n_sets; i++)
5758 {
5759 if (sets[i].rtl)
5760 {
5761 rtx x = SET_DEST (sets[i].rtl);
5762
5763 if (GET_CODE (x) != REG)
5764 mention_regs (x);
5765 else
5766 {
5767 /* We used to rely on all references to a register becoming
5768 inaccessible when a register changes to a new quantity,
5769 since that changes the hash code. However, that is not
5770 safe, since after HASH_SIZE new quantities we get a
5771 hash 'collision' of a register with its own invalid
5772 entries. And since SUBREGs have been changed not to
5773 change their hash code with the hash code of the register,
5774 it wouldn't work any longer at all. So we have to check
5775 for any invalid references lying around now.
5776 This code is similar to the REG case in mention_regs,
5777 but it knows that reg_tick has been incremented, and
5778 it leaves reg_in_table as -1 . */
5779 unsigned int regno = REGNO (x);
5780 unsigned int endregno
5781 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
5782 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
5783 unsigned int i;
5784
5785 for (i = regno; i < endregno; i++)
5786 {
5787 if (REG_IN_TABLE (i) >= 0)
5788 {
5789 remove_invalid_refs (i);
5790 REG_IN_TABLE (i) = -1;
5791 }
5792 }
5793 }
5794 }
5795 }
5796
5797 /* We may have just removed some of the src_elt's from the hash table.
5798 So replace each one with the current head of the same class. */
5799
5800 for (i = 0; i < n_sets; i++)
5801 if (sets[i].rtl)
5802 {
5803 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5804 /* If elt was removed, find current head of same class,
5805 or 0 if nothing remains of that class. */
5806 {
5807 register struct table_elt *elt = sets[i].src_elt;
5808
5809 while (elt && elt->prev_same_value)
5810 elt = elt->prev_same_value;
5811
5812 while (elt && elt->first_same_value == 0)
5813 elt = elt->next_same_value;
5814 sets[i].src_elt = elt ? elt->first_same_value : 0;
5815 }
5816 }
5817
5818 /* Now insert the destinations into their equivalence classes. */
5819
5820 for (i = 0; i < n_sets; i++)
5821 if (sets[i].rtl)
5822 {
5823 register rtx dest = SET_DEST (sets[i].rtl);
5824 rtx inner_dest = sets[i].inner_dest;
5825 register struct table_elt *elt;
5826
5827 /* Don't record value if we are not supposed to risk allocating
5828 floating-point values in registers that might be wider than
5829 memory. */
5830 if ((flag_float_store
5831 && GET_CODE (dest) == MEM
5832 && FLOAT_MODE_P (GET_MODE (dest)))
5833 /* Don't record BLKmode values, because we don't know the
5834 size of it, and can't be sure that other BLKmode values
5835 have the same or smaller size. */
5836 || GET_MODE (dest) == BLKmode
5837 /* Don't record values of destinations set inside a libcall block
5838 since we might delete the libcall. Things should have been set
5839 up so we won't want to reuse such a value, but we play it safe
5840 here. */
5841 || libcall_insn
5842 /* If we didn't put a REG_EQUAL value or a source into the hash
5843 table, there is no point is recording DEST. */
5844 || sets[i].src_elt == 0
5845 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5846 or SIGN_EXTEND, don't record DEST since it can cause
5847 some tracking to be wrong.
5848
5849 ??? Think about this more later. */
5850 || (GET_CODE (dest) == SUBREG
5851 && (GET_MODE_SIZE (GET_MODE (dest))
5852 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5853 && (GET_CODE (sets[i].src) == SIGN_EXTEND
5854 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
5855 continue;
5856
5857 /* STRICT_LOW_PART isn't part of the value BEING set,
5858 and neither is the SUBREG inside it.
5859 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5860 if (GET_CODE (dest) == STRICT_LOW_PART)
5861 dest = SUBREG_REG (XEXP (dest, 0));
5862
5863 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5864 /* Registers must also be inserted into chains for quantities. */
5865 if (insert_regs (dest, sets[i].src_elt, 1))
5866 {
5867 /* If `insert_regs' changes something, the hash code must be
5868 recalculated. */
5869 rehash_using_reg (dest);
5870 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5871 }
5872
5873 if (GET_CODE (inner_dest) == MEM
5874 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
5875 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
5876 that (MEM (ADDRESSOF (X))) is equivalent to Y.
5877 Consider the case in which the address of the MEM is
5878 passed to a function, which alters the MEM. Then, if we
5879 later use Y instead of the MEM we'll miss the update. */
5880 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
5881 else
5882 elt = insert (dest, sets[i].src_elt,
5883 sets[i].dest_hash, GET_MODE (dest));
5884
5885 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
5886 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
5887 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
5888 0))));
5889
5890 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5891 narrower than M2, and both M1 and M2 are the same number of words,
5892 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5893 make that equivalence as well.
5894
5895 However, BAR may have equivalences for which gen_lowpart_if_possible
5896 will produce a simpler value than gen_lowpart_if_possible applied to
5897 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5898 BAR's equivalences. If we don't get a simplified form, make
5899 the SUBREG. It will not be used in an equivalence, but will
5900 cause two similar assignments to be detected.
5901
5902 Note the loop below will find SUBREG_REG (DEST) since we have
5903 already entered SRC and DEST of the SET in the table. */
5904
5905 if (GET_CODE (dest) == SUBREG
5906 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
5907 / UNITS_PER_WORD)
5908 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
5909 && (GET_MODE_SIZE (GET_MODE (dest))
5910 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5911 && sets[i].src_elt != 0)
5912 {
5913 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
5914 struct table_elt *elt, *classp = 0;
5915
5916 for (elt = sets[i].src_elt->first_same_value; elt;
5917 elt = elt->next_same_value)
5918 {
5919 rtx new_src = 0;
5920 unsigned src_hash;
5921 struct table_elt *src_elt;
5922
5923 /* Ignore invalid entries. */
5924 if (GET_CODE (elt->exp) != REG
5925 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5926 continue;
5927
5928 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
5929 if (new_src == 0)
5930 new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
5931
5932 src_hash = HASH (new_src, new_mode);
5933 src_elt = lookup (new_src, src_hash, new_mode);
5934
5935 /* Put the new source in the hash table is if isn't
5936 already. */
5937 if (src_elt == 0)
5938 {
5939 if (insert_regs (new_src, classp, 0))
5940 {
5941 rehash_using_reg (new_src);
5942 src_hash = HASH (new_src, new_mode);
5943 }
5944 src_elt = insert (new_src, classp, src_hash, new_mode);
5945 src_elt->in_memory = elt->in_memory;
5946 }
5947 else if (classp && classp != src_elt->first_same_value)
5948 /* Show that two things that we've seen before are
5949 actually the same. */
5950 merge_equiv_classes (src_elt, classp);
5951
5952 classp = src_elt->first_same_value;
5953 /* Ignore invalid entries. */
5954 while (classp
5955 && GET_CODE (classp->exp) != REG
5956 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
5957 classp = classp->next_same_value;
5958 }
5959 }
5960 }
5961
5962 /* Special handling for (set REG0 REG1) where REG0 is the
5963 "cheapest", cheaper than REG1. After cse, REG1 will probably not
5964 be used in the sequel, so (if easily done) change this insn to
5965 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
5966 that computed their value. Then REG1 will become a dead store
5967 and won't cloud the situation for later optimizations.
5968
5969 Do not make this change if REG1 is a hard register, because it will
5970 then be used in the sequel and we may be changing a two-operand insn
5971 into a three-operand insn.
5972
5973 Also do not do this if we are operating on a copy of INSN.
5974
5975 Also don't do this if INSN ends a libcall; this would cause an unrelated
5976 register to be set in the middle of a libcall, and we then get bad code
5977 if the libcall is deleted. */
5978
5979 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
5980 && NEXT_INSN (PREV_INSN (insn)) == insn
5981 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
5982 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
5983 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
5984 {
5985 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
5986 struct qty_table_elem *src_ent = &qty_table[src_q];
5987
5988 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
5989 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5990 {
5991 rtx prev = prev_nonnote_insn (insn);
5992
5993 if (prev != 0 && GET_CODE (prev) == INSN
5994 && GET_CODE (PATTERN (prev)) == SET
5995 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
5996 {
5997 rtx dest = SET_DEST (sets[0].rtl);
5998 rtx src = SET_SRC (sets[0].rtl);
5999 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
6000
6001 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6002 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6003 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6004 apply_change_group ();
6005
6006 /* If REG1 was equivalent to a constant, REG0 is not. */
6007 if (note)
6008 PUT_REG_NOTE_KIND (note, REG_EQUAL);
6009
6010 /* If there was a REG_WAS_0 note on PREV, remove it. Move
6011 any REG_WAS_0 note on INSN to PREV. */
6012 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
6013 if (note)
6014 remove_note (prev, note);
6015
6016 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6017 if (note)
6018 {
6019 remove_note (insn, note);
6020 XEXP (note, 1) = REG_NOTES (prev);
6021 REG_NOTES (prev) = note;
6022 }
6023
6024 /* If INSN has a REG_EQUAL note, and this note mentions
6025 REG0, then we must delete it, because the value in
6026 REG0 has changed. If the note's value is REG1, we must
6027 also delete it because that is now this insn's dest. */
6028 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6029 if (note != 0
6030 && (reg_mentioned_p (dest, XEXP (note, 0))
6031 || rtx_equal_p (src, XEXP (note, 0))))
6032 remove_note (insn, note);
6033 }
6034 }
6035 }
6036
6037 /* If this is a conditional jump insn, record any known equivalences due to
6038 the condition being tested. */
6039
6040 last_jump_equiv_class = 0;
6041 if (GET_CODE (insn) == JUMP_INSN
6042 && n_sets == 1 && GET_CODE (x) == SET
6043 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6044 record_jump_equiv (insn, 0);
6045
6046 #ifdef HAVE_cc0
6047 /* If the previous insn set CC0 and this insn no longer references CC0,
6048 delete the previous insn. Here we use the fact that nothing expects CC0
6049 to be valid over an insn, which is true until the final pass. */
6050 if (prev_insn && GET_CODE (prev_insn) == INSN
6051 && (tem = single_set (prev_insn)) != 0
6052 && SET_DEST (tem) == cc0_rtx
6053 && ! reg_mentioned_p (cc0_rtx, x))
6054 {
6055 PUT_CODE (prev_insn, NOTE);
6056 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
6057 NOTE_SOURCE_FILE (prev_insn) = 0;
6058 }
6059
6060 prev_insn_cc0 = this_insn_cc0;
6061 prev_insn_cc0_mode = this_insn_cc0_mode;
6062 #endif
6063
6064 prev_insn = insn;
6065 }
6066 \f
6067 /* Remove from the hash table all expressions that reference memory. */
6068
6069 static void
6070 invalidate_memory ()
6071 {
6072 register int i;
6073 register struct table_elt *p, *next;
6074
6075 for (i = 0; i < HASH_SIZE; i++)
6076 for (p = table[i]; p; p = next)
6077 {
6078 next = p->next_same_hash;
6079 if (p->in_memory)
6080 remove_from_table (p, i);
6081 }
6082 }
6083
6084 /* If ADDR is an address that implicitly affects the stack pointer, return
6085 1 and update the register tables to show the effect. Else, return 0. */
6086
6087 static int
6088 addr_affects_sp_p (addr)
6089 register rtx addr;
6090 {
6091 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
6092 && GET_CODE (XEXP (addr, 0)) == REG
6093 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6094 {
6095 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6096 REG_TICK (STACK_POINTER_REGNUM)++;
6097
6098 /* This should be *very* rare. */
6099 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6100 invalidate (stack_pointer_rtx, VOIDmode);
6101
6102 return 1;
6103 }
6104
6105 return 0;
6106 }
6107
6108 /* Perform invalidation on the basis of everything about an insn
6109 except for invalidating the actual places that are SET in it.
6110 This includes the places CLOBBERed, and anything that might
6111 alias with something that is SET or CLOBBERed.
6112
6113 X is the pattern of the insn. */
6114
6115 static void
6116 invalidate_from_clobbers (x)
6117 rtx x;
6118 {
6119 if (GET_CODE (x) == CLOBBER)
6120 {
6121 rtx ref = XEXP (x, 0);
6122 if (ref)
6123 {
6124 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6125 || GET_CODE (ref) == MEM)
6126 invalidate (ref, VOIDmode);
6127 else if (GET_CODE (ref) == STRICT_LOW_PART
6128 || GET_CODE (ref) == ZERO_EXTRACT)
6129 invalidate (XEXP (ref, 0), GET_MODE (ref));
6130 }
6131 }
6132 else if (GET_CODE (x) == PARALLEL)
6133 {
6134 register int i;
6135 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6136 {
6137 register rtx y = XVECEXP (x, 0, i);
6138 if (GET_CODE (y) == CLOBBER)
6139 {
6140 rtx ref = XEXP (y, 0);
6141 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6142 || GET_CODE (ref) == MEM)
6143 invalidate (ref, VOIDmode);
6144 else if (GET_CODE (ref) == STRICT_LOW_PART
6145 || GET_CODE (ref) == ZERO_EXTRACT)
6146 invalidate (XEXP (ref, 0), GET_MODE (ref));
6147 }
6148 }
6149 }
6150 }
6151 \f
6152 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6153 and replace any registers in them with either an equivalent constant
6154 or the canonical form of the register. If we are inside an address,
6155 only do this if the address remains valid.
6156
6157 OBJECT is 0 except when within a MEM in which case it is the MEM.
6158
6159 Return the replacement for X. */
6160
6161 static rtx
6162 cse_process_notes (x, object)
6163 rtx x;
6164 rtx object;
6165 {
6166 enum rtx_code code = GET_CODE (x);
6167 const char *fmt = GET_RTX_FORMAT (code);
6168 int i;
6169
6170 switch (code)
6171 {
6172 case CONST_INT:
6173 case CONST:
6174 case SYMBOL_REF:
6175 case LABEL_REF:
6176 case CONST_DOUBLE:
6177 case PC:
6178 case CC0:
6179 case LO_SUM:
6180 return x;
6181
6182 case MEM:
6183 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
6184 return x;
6185
6186 case EXPR_LIST:
6187 case INSN_LIST:
6188 if (REG_NOTE_KIND (x) == REG_EQUAL)
6189 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6190 if (XEXP (x, 1))
6191 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6192 return x;
6193
6194 case SIGN_EXTEND:
6195 case ZERO_EXTEND:
6196 case SUBREG:
6197 {
6198 rtx new = cse_process_notes (XEXP (x, 0), object);
6199 /* We don't substitute VOIDmode constants into these rtx,
6200 since they would impede folding. */
6201 if (GET_MODE (new) != VOIDmode)
6202 validate_change (object, &XEXP (x, 0), new, 0);
6203 return x;
6204 }
6205
6206 case REG:
6207 i = REG_QTY (REGNO (x));
6208
6209 /* Return a constant or a constant register. */
6210 if (REGNO_QTY_VALID_P (REGNO (x)))
6211 {
6212 struct qty_table_elem *ent = &qty_table[i];
6213
6214 if (ent->const_rtx != NULL_RTX
6215 && (CONSTANT_P (ent->const_rtx)
6216 || GET_CODE (ent->const_rtx) == REG))
6217 {
6218 rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6219 if (new)
6220 return new;
6221 }
6222 }
6223
6224 /* Otherwise, canonicalize this register. */
6225 return canon_reg (x, NULL_RTX);
6226
6227 default:
6228 break;
6229 }
6230
6231 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6232 if (fmt[i] == 'e')
6233 validate_change (object, &XEXP (x, i),
6234 cse_process_notes (XEXP (x, i), object), 0);
6235
6236 return x;
6237 }
6238 \f
6239 /* Find common subexpressions between the end test of a loop and the beginning
6240 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6241
6242 Often we have a loop where an expression in the exit test is used
6243 in the body of the loop. For example "while (*p) *q++ = *p++;".
6244 Because of the way we duplicate the loop exit test in front of the loop,
6245 however, we don't detect that common subexpression. This will be caught
6246 when global cse is implemented, but this is a quite common case.
6247
6248 This function handles the most common cases of these common expressions.
6249 It is called after we have processed the basic block ending with the
6250 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6251 jumps to a label used only once. */
6252
6253 static void
6254 cse_around_loop (loop_start)
6255 rtx loop_start;
6256 {
6257 rtx insn;
6258 int i;
6259 struct table_elt *p;
6260
6261 /* If the jump at the end of the loop doesn't go to the start, we don't
6262 do anything. */
6263 for (insn = PREV_INSN (loop_start);
6264 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6265 insn = PREV_INSN (insn))
6266 ;
6267
6268 if (insn == 0
6269 || GET_CODE (insn) != NOTE
6270 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6271 return;
6272
6273 /* If the last insn of the loop (the end test) was an NE comparison,
6274 we will interpret it as an EQ comparison, since we fell through
6275 the loop. Any equivalences resulting from that comparison are
6276 therefore not valid and must be invalidated. */
6277 if (last_jump_equiv_class)
6278 for (p = last_jump_equiv_class->first_same_value; p;
6279 p = p->next_same_value)
6280 {
6281 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6282 || (GET_CODE (p->exp) == SUBREG
6283 && GET_CODE (SUBREG_REG (p->exp)) == REG))
6284 invalidate (p->exp, VOIDmode);
6285 else if (GET_CODE (p->exp) == STRICT_LOW_PART
6286 || GET_CODE (p->exp) == ZERO_EXTRACT)
6287 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6288 }
6289
6290 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6291 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6292
6293 The only thing we do with SET_DEST is invalidate entries, so we
6294 can safely process each SET in order. It is slightly less efficient
6295 to do so, but we only want to handle the most common cases.
6296
6297 The gen_move_insn call in cse_set_around_loop may create new pseudos.
6298 These pseudos won't have valid entries in any of the tables indexed
6299 by register number, such as reg_qty. We avoid out-of-range array
6300 accesses by not processing any instructions created after cse started. */
6301
6302 for (insn = NEXT_INSN (loop_start);
6303 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6304 && INSN_UID (insn) < max_insn_uid
6305 && ! (GET_CODE (insn) == NOTE
6306 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6307 insn = NEXT_INSN (insn))
6308 {
6309 if (INSN_P (insn)
6310 && (GET_CODE (PATTERN (insn)) == SET
6311 || GET_CODE (PATTERN (insn)) == CLOBBER))
6312 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6313 else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6314 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6315 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6316 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6317 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6318 loop_start);
6319 }
6320 }
6321 \f
6322 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6323 since they are done elsewhere. This function is called via note_stores. */
6324
6325 static void
6326 invalidate_skipped_set (dest, set, data)
6327 rtx set;
6328 rtx dest;
6329 void *data ATTRIBUTE_UNUSED;
6330 {
6331 enum rtx_code code = GET_CODE (dest);
6332
6333 if (code == MEM
6334 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6335 /* There are times when an address can appear varying and be a PLUS
6336 during this scan when it would be a fixed address were we to know
6337 the proper equivalences. So invalidate all memory if there is
6338 a BLKmode or nonscalar memory reference or a reference to a
6339 variable address. */
6340 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6341 || cse_rtx_varies_p (XEXP (dest, 0))))
6342 {
6343 invalidate_memory ();
6344 return;
6345 }
6346
6347 if (GET_CODE (set) == CLOBBER
6348 #ifdef HAVE_cc0
6349 || dest == cc0_rtx
6350 #endif
6351 || dest == pc_rtx)
6352 return;
6353
6354 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6355 invalidate (XEXP (dest, 0), GET_MODE (dest));
6356 else if (code == REG || code == SUBREG || code == MEM)
6357 invalidate (dest, VOIDmode);
6358 }
6359
6360 /* Invalidate all insns from START up to the end of the function or the
6361 next label. This called when we wish to CSE around a block that is
6362 conditionally executed. */
6363
6364 static void
6365 invalidate_skipped_block (start)
6366 rtx start;
6367 {
6368 rtx insn;
6369
6370 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6371 insn = NEXT_INSN (insn))
6372 {
6373 if (! INSN_P (insn))
6374 continue;
6375
6376 if (GET_CODE (insn) == CALL_INSN)
6377 {
6378 if (! CONST_CALL_P (insn))
6379 invalidate_memory ();
6380 invalidate_for_call ();
6381 }
6382
6383 invalidate_from_clobbers (PATTERN (insn));
6384 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6385 }
6386 }
6387 \f
6388 /* If modifying X will modify the value in *DATA (which is really an
6389 `rtx *'), indicate that fact by setting the pointed to value to
6390 NULL_RTX. */
6391
6392 static void
6393 cse_check_loop_start (x, set, data)
6394 rtx x;
6395 rtx set ATTRIBUTE_UNUSED;
6396 void *data;
6397 {
6398 rtx *cse_check_loop_start_value = (rtx *) data;
6399
6400 if (*cse_check_loop_start_value == NULL_RTX
6401 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6402 return;
6403
6404 if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6405 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6406 *cse_check_loop_start_value = NULL_RTX;
6407 }
6408
6409 /* X is a SET or CLOBBER contained in INSN that was found near the start of
6410 a loop that starts with the label at LOOP_START.
6411
6412 If X is a SET, we see if its SET_SRC is currently in our hash table.
6413 If so, we see if it has a value equal to some register used only in the
6414 loop exit code (as marked by jump.c).
6415
6416 If those two conditions are true, we search backwards from the start of
6417 the loop to see if that same value was loaded into a register that still
6418 retains its value at the start of the loop.
6419
6420 If so, we insert an insn after the load to copy the destination of that
6421 load into the equivalent register and (try to) replace our SET_SRC with that
6422 register.
6423
6424 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6425
6426 static void
6427 cse_set_around_loop (x, insn, loop_start)
6428 rtx x;
6429 rtx insn;
6430 rtx loop_start;
6431 {
6432 struct table_elt *src_elt;
6433
6434 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6435 are setting PC or CC0 or whose SET_SRC is already a register. */
6436 if (GET_CODE (x) == SET
6437 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6438 && GET_CODE (SET_SRC (x)) != REG)
6439 {
6440 src_elt = lookup (SET_SRC (x),
6441 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6442 GET_MODE (SET_DEST (x)));
6443
6444 if (src_elt)
6445 for (src_elt = src_elt->first_same_value; src_elt;
6446 src_elt = src_elt->next_same_value)
6447 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6448 && COST (src_elt->exp) < COST (SET_SRC (x)))
6449 {
6450 rtx p, set;
6451
6452 /* Look for an insn in front of LOOP_START that sets
6453 something in the desired mode to SET_SRC (x) before we hit
6454 a label or CALL_INSN. */
6455
6456 for (p = prev_nonnote_insn (loop_start);
6457 p && GET_CODE (p) != CALL_INSN
6458 && GET_CODE (p) != CODE_LABEL;
6459 p = prev_nonnote_insn (p))
6460 if ((set = single_set (p)) != 0
6461 && GET_CODE (SET_DEST (set)) == REG
6462 && GET_MODE (SET_DEST (set)) == src_elt->mode
6463 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6464 {
6465 /* We now have to ensure that nothing between P
6466 and LOOP_START modified anything referenced in
6467 SET_SRC (x). We know that nothing within the loop
6468 can modify it, or we would have invalidated it in
6469 the hash table. */
6470 rtx q;
6471 rtx cse_check_loop_start_value = SET_SRC (x);
6472 for (q = p; q != loop_start; q = NEXT_INSN (q))
6473 if (INSN_P (q))
6474 note_stores (PATTERN (q),
6475 cse_check_loop_start,
6476 &cse_check_loop_start_value);
6477
6478 /* If nothing was changed and we can replace our
6479 SET_SRC, add an insn after P to copy its destination
6480 to what we will be replacing SET_SRC with. */
6481 if (cse_check_loop_start_value
6482 && validate_change (insn, &SET_SRC (x),
6483 src_elt->exp, 0))
6484 {
6485 /* If this creates new pseudos, this is unsafe,
6486 because the regno of new pseudo is unsuitable
6487 to index into reg_qty when cse_insn processes
6488 the new insn. Therefore, if a new pseudo was
6489 created, discard this optimization. */
6490 int nregs = max_reg_num ();
6491 rtx move
6492 = gen_move_insn (src_elt->exp, SET_DEST (set));
6493 if (nregs != max_reg_num ())
6494 {
6495 if (! validate_change (insn, &SET_SRC (x),
6496 SET_SRC (set), 0))
6497 abort ();
6498 }
6499 else
6500 emit_insn_after (move, p);
6501 }
6502 break;
6503 }
6504 }
6505 }
6506
6507 /* Deal with the destination of X affecting the stack pointer. */
6508 addr_affects_sp_p (SET_DEST (x));
6509
6510 /* See comment on similar code in cse_insn for explanation of these
6511 tests. */
6512 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6513 || GET_CODE (SET_DEST (x)) == MEM)
6514 invalidate (SET_DEST (x), VOIDmode);
6515 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6516 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6517 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6518 }
6519 \f
6520 /* Find the end of INSN's basic block and return its range,
6521 the total number of SETs in all the insns of the block, the last insn of the
6522 block, and the branch path.
6523
6524 The branch path indicates which branches should be followed. If a non-zero
6525 path size is specified, the block should be rescanned and a different set
6526 of branches will be taken. The branch path is only used if
6527 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
6528
6529 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6530 used to describe the block. It is filled in with the information about
6531 the current block. The incoming structure's branch path, if any, is used
6532 to construct the output branch path. */
6533
6534 void
6535 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
6536 rtx insn;
6537 struct cse_basic_block_data *data;
6538 int follow_jumps;
6539 int after_loop;
6540 int skip_blocks;
6541 {
6542 rtx p = insn, q;
6543 int nsets = 0;
6544 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6545 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6546 int path_size = data->path_size;
6547 int path_entry = 0;
6548 int i;
6549
6550 /* Update the previous branch path, if any. If the last branch was
6551 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6552 shorten the path by one and look at the previous branch. We know that
6553 at least one branch must have been taken if PATH_SIZE is non-zero. */
6554 while (path_size > 0)
6555 {
6556 if (data->path[path_size - 1].status != NOT_TAKEN)
6557 {
6558 data->path[path_size - 1].status = NOT_TAKEN;
6559 break;
6560 }
6561 else
6562 path_size--;
6563 }
6564
6565 /* If the first instruction is marked with QImode, that means we've
6566 already processed this block. Our caller will look at DATA->LAST
6567 to figure out where to go next. We want to return the next block
6568 in the instruction stream, not some branched-to block somewhere
6569 else. We accomplish this by pretending our called forbid us to
6570 follow jumps, or skip blocks. */
6571 if (GET_MODE (insn) == QImode)
6572 follow_jumps = skip_blocks = 0;
6573
6574 /* Scan to end of this basic block. */
6575 while (p && GET_CODE (p) != CODE_LABEL)
6576 {
6577 /* Don't cse out the end of a loop. This makes a difference
6578 only for the unusual loops that always execute at least once;
6579 all other loops have labels there so we will stop in any case.
6580 Cse'ing out the end of the loop is dangerous because it
6581 might cause an invariant expression inside the loop
6582 to be reused after the end of the loop. This would make it
6583 hard to move the expression out of the loop in loop.c,
6584 especially if it is one of several equivalent expressions
6585 and loop.c would like to eliminate it.
6586
6587 If we are running after loop.c has finished, we can ignore
6588 the NOTE_INSN_LOOP_END. */
6589
6590 if (! after_loop && GET_CODE (p) == NOTE
6591 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6592 break;
6593
6594 /* Don't cse over a call to setjmp; on some machines (eg vax)
6595 the regs restored by the longjmp come from
6596 a later time than the setjmp. */
6597 if (GET_CODE (p) == NOTE
6598 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
6599 break;
6600
6601 /* A PARALLEL can have lots of SETs in it,
6602 especially if it is really an ASM_OPERANDS. */
6603 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6604 nsets += XVECLEN (PATTERN (p), 0);
6605 else if (GET_CODE (p) != NOTE)
6606 nsets += 1;
6607
6608 /* Ignore insns made by CSE; they cannot affect the boundaries of
6609 the basic block. */
6610
6611 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6612 high_cuid = INSN_CUID (p);
6613 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6614 low_cuid = INSN_CUID (p);
6615
6616 /* See if this insn is in our branch path. If it is and we are to
6617 take it, do so. */
6618 if (path_entry < path_size && data->path[path_entry].branch == p)
6619 {
6620 if (data->path[path_entry].status != NOT_TAKEN)
6621 p = JUMP_LABEL (p);
6622
6623 /* Point to next entry in path, if any. */
6624 path_entry++;
6625 }
6626
6627 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6628 was specified, we haven't reached our maximum path length, there are
6629 insns following the target of the jump, this is the only use of the
6630 jump label, and the target label is preceded by a BARRIER.
6631
6632 Alternatively, we can follow the jump if it branches around a
6633 block of code and there are no other branches into the block.
6634 In this case invalidate_skipped_block will be called to invalidate any
6635 registers set in the block when following the jump. */
6636
6637 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
6638 && GET_CODE (p) == JUMP_INSN
6639 && GET_CODE (PATTERN (p)) == SET
6640 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6641 && JUMP_LABEL (p) != 0
6642 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6643 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6644 {
6645 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6646 if ((GET_CODE (q) != NOTE
6647 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6648 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
6649 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6650 break;
6651
6652 /* If we ran into a BARRIER, this code is an extension of the
6653 basic block when the branch is taken. */
6654 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6655 {
6656 /* Don't allow ourself to keep walking around an
6657 always-executed loop. */
6658 if (next_real_insn (q) == next)
6659 {
6660 p = NEXT_INSN (p);
6661 continue;
6662 }
6663
6664 /* Similarly, don't put a branch in our path more than once. */
6665 for (i = 0; i < path_entry; i++)
6666 if (data->path[i].branch == p)
6667 break;
6668
6669 if (i != path_entry)
6670 break;
6671
6672 data->path[path_entry].branch = p;
6673 data->path[path_entry++].status = TAKEN;
6674
6675 /* This branch now ends our path. It was possible that we
6676 didn't see this branch the last time around (when the
6677 insn in front of the target was a JUMP_INSN that was
6678 turned into a no-op). */
6679 path_size = path_entry;
6680
6681 p = JUMP_LABEL (p);
6682 /* Mark block so we won't scan it again later. */
6683 PUT_MODE (NEXT_INSN (p), QImode);
6684 }
6685 /* Detect a branch around a block of code. */
6686 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6687 {
6688 register rtx tmp;
6689
6690 if (next_real_insn (q) == next)
6691 {
6692 p = NEXT_INSN (p);
6693 continue;
6694 }
6695
6696 for (i = 0; i < path_entry; i++)
6697 if (data->path[i].branch == p)
6698 break;
6699
6700 if (i != path_entry)
6701 break;
6702
6703 /* This is no_labels_between_p (p, q) with an added check for
6704 reaching the end of a function (in case Q precedes P). */
6705 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6706 if (GET_CODE (tmp) == CODE_LABEL)
6707 break;
6708
6709 if (tmp == q)
6710 {
6711 data->path[path_entry].branch = p;
6712 data->path[path_entry++].status = AROUND;
6713
6714 path_size = path_entry;
6715
6716 p = JUMP_LABEL (p);
6717 /* Mark block so we won't scan it again later. */
6718 PUT_MODE (NEXT_INSN (p), QImode);
6719 }
6720 }
6721 }
6722 p = NEXT_INSN (p);
6723 }
6724
6725 data->low_cuid = low_cuid;
6726 data->high_cuid = high_cuid;
6727 data->nsets = nsets;
6728 data->last = p;
6729
6730 /* If all jumps in the path are not taken, set our path length to zero
6731 so a rescan won't be done. */
6732 for (i = path_size - 1; i >= 0; i--)
6733 if (data->path[i].status != NOT_TAKEN)
6734 break;
6735
6736 if (i == -1)
6737 data->path_size = 0;
6738 else
6739 data->path_size = path_size;
6740
6741 /* End the current branch path. */
6742 data->path[path_size].branch = 0;
6743 }
6744 \f
6745 /* Perform cse on the instructions of a function.
6746 F is the first instruction.
6747 NREGS is one plus the highest pseudo-reg number used in the instruction.
6748
6749 AFTER_LOOP is 1 if this is the cse call done after loop optimization
6750 (only if -frerun-cse-after-loop).
6751
6752 Returns 1 if jump_optimize should be redone due to simplifications
6753 in conditional jump instructions. */
6754
6755 int
6756 cse_main (f, nregs, after_loop, file)
6757 rtx f;
6758 int nregs;
6759 int after_loop;
6760 FILE *file;
6761 {
6762 struct cse_basic_block_data val;
6763 register rtx insn = f;
6764 register int i;
6765
6766 cse_jumps_altered = 0;
6767 recorded_label_ref = 0;
6768 constant_pool_entries_cost = 0;
6769 val.path_size = 0;
6770
6771 init_recog ();
6772 init_alias_analysis ();
6773
6774 max_reg = nregs;
6775
6776 max_insn_uid = get_max_uid ();
6777
6778 reg_eqv_table = (struct reg_eqv_elem *)
6779 xmalloc (nregs * sizeof (struct reg_eqv_elem));
6780
6781 #ifdef LOAD_EXTEND_OP
6782
6783 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
6784 and change the code and mode as appropriate. */
6785 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
6786 #endif
6787
6788 /* Discard all the free elements of the previous function
6789 since they are allocated in the temporarily obstack. */
6790 bzero ((char *) table, sizeof table);
6791 free_element_chain = 0;
6792 n_elements_made = 0;
6793
6794 /* Find the largest uid. */
6795
6796 max_uid = get_max_uid ();
6797 uid_cuid = (int *) xcalloc (max_uid + 1, sizeof (int));
6798
6799 /* Compute the mapping from uids to cuids.
6800 CUIDs are numbers assigned to insns, like uids,
6801 except that cuids increase monotonically through the code.
6802 Don't assign cuids to line-number NOTEs, so that the distance in cuids
6803 between two insns is not affected by -g. */
6804
6805 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6806 {
6807 if (GET_CODE (insn) != NOTE
6808 || NOTE_LINE_NUMBER (insn) < 0)
6809 INSN_CUID (insn) = ++i;
6810 else
6811 /* Give a line number note the same cuid as preceding insn. */
6812 INSN_CUID (insn) = i;
6813 }
6814
6815 /* Initialize which registers are clobbered by calls. */
6816
6817 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
6818
6819 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6820 if ((call_used_regs[i]
6821 /* Used to check !fixed_regs[i] here, but that isn't safe;
6822 fixed regs are still call-clobbered, and sched can get
6823 confused if they can "live across calls".
6824
6825 The frame pointer is always preserved across calls. The arg
6826 pointer is if it is fixed. The stack pointer usually is, unless
6827 RETURN_POPS_ARGS, in which case an explicit CLOBBER
6828 will be present. If we are generating PIC code, the PIC offset
6829 table register is preserved across calls. */
6830
6831 && i != STACK_POINTER_REGNUM
6832 && i != FRAME_POINTER_REGNUM
6833 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
6834 && i != HARD_FRAME_POINTER_REGNUM
6835 #endif
6836 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
6837 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
6838 #endif
6839 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
6840 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
6841 #endif
6842 )
6843 || global_regs[i])
6844 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
6845
6846 if (ggc_p)
6847 ggc_push_context ();
6848
6849 /* Loop over basic blocks.
6850 Compute the maximum number of qty's needed for each basic block
6851 (which is 2 for each SET). */
6852 insn = f;
6853 while (insn)
6854 {
6855 cse_altered = 0;
6856 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
6857 flag_cse_skip_blocks);
6858
6859 /* If this basic block was already processed or has no sets, skip it. */
6860 if (val.nsets == 0 || GET_MODE (insn) == QImode)
6861 {
6862 PUT_MODE (insn, VOIDmode);
6863 insn = (val.last ? NEXT_INSN (val.last) : 0);
6864 val.path_size = 0;
6865 continue;
6866 }
6867
6868 cse_basic_block_start = val.low_cuid;
6869 cse_basic_block_end = val.high_cuid;
6870 max_qty = val.nsets * 2;
6871
6872 if (file)
6873 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
6874 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
6875 val.nsets);
6876
6877 /* Make MAX_QTY bigger to give us room to optimize
6878 past the end of this basic block, if that should prove useful. */
6879 if (max_qty < 500)
6880 max_qty = 500;
6881
6882 max_qty += max_reg;
6883
6884 /* If this basic block is being extended by following certain jumps,
6885 (see `cse_end_of_basic_block'), we reprocess the code from the start.
6886 Otherwise, we start after this basic block. */
6887 if (val.path_size > 0)
6888 cse_basic_block (insn, val.last, val.path, 0);
6889 else
6890 {
6891 int old_cse_jumps_altered = cse_jumps_altered;
6892 rtx temp;
6893
6894 /* When cse changes a conditional jump to an unconditional
6895 jump, we want to reprocess the block, since it will give
6896 us a new branch path to investigate. */
6897 cse_jumps_altered = 0;
6898 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
6899 if (cse_jumps_altered == 0
6900 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
6901 insn = temp;
6902
6903 cse_jumps_altered |= old_cse_jumps_altered;
6904 }
6905
6906 if (ggc_p && cse_altered)
6907 ggc_collect ();
6908
6909 #ifdef USE_C_ALLOCA
6910 alloca (0);
6911 #endif
6912 }
6913
6914 if (ggc_p)
6915 ggc_pop_context ();
6916
6917 if (max_elements_made < n_elements_made)
6918 max_elements_made = n_elements_made;
6919
6920 /* Clean up. */
6921 end_alias_analysis ();
6922 free (uid_cuid);
6923 free (reg_eqv_table);
6924
6925 return cse_jumps_altered || recorded_label_ref;
6926 }
6927
6928 /* Process a single basic block. FROM and TO and the limits of the basic
6929 block. NEXT_BRANCH points to the branch path when following jumps or
6930 a null path when not following jumps.
6931
6932 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
6933 loop. This is true when we are being called for the last time on a
6934 block and this CSE pass is before loop.c. */
6935
6936 static rtx
6937 cse_basic_block (from, to, next_branch, around_loop)
6938 register rtx from, to;
6939 struct branch_path *next_branch;
6940 int around_loop;
6941 {
6942 register rtx insn;
6943 int to_usage = 0;
6944 rtx libcall_insn = NULL_RTX;
6945 int num_insns = 0;
6946
6947 /* This array is undefined before max_reg, so only allocate
6948 the space actually needed and adjust the start. */
6949
6950 qty_table
6951 = (struct qty_table_elem *) xmalloc ((max_qty - max_reg)
6952 * sizeof (struct qty_table_elem));
6953 qty_table -= max_reg;
6954
6955 new_basic_block ();
6956
6957 /* TO might be a label. If so, protect it from being deleted. */
6958 if (to != 0 && GET_CODE (to) == CODE_LABEL)
6959 ++LABEL_NUSES (to);
6960
6961 for (insn = from; insn != to; insn = NEXT_INSN (insn))
6962 {
6963 register enum rtx_code code = GET_CODE (insn);
6964
6965 /* If we have processed 1,000 insns, flush the hash table to
6966 avoid extreme quadratic behavior. We must not include NOTEs
6967 in the count since there may be more of them when generating
6968 debugging information. If we clear the table at different
6969 times, code generated with -g -O might be different than code
6970 generated with -O but not -g.
6971
6972 ??? This is a real kludge and needs to be done some other way.
6973 Perhaps for 2.9. */
6974 if (code != NOTE && num_insns++ > 1000)
6975 {
6976 flush_hash_table ();
6977 num_insns = 0;
6978 }
6979
6980 /* See if this is a branch that is part of the path. If so, and it is
6981 to be taken, do so. */
6982 if (next_branch->branch == insn)
6983 {
6984 enum taken status = next_branch++->status;
6985 if (status != NOT_TAKEN)
6986 {
6987 if (status == TAKEN)
6988 record_jump_equiv (insn, 1);
6989 else
6990 invalidate_skipped_block (NEXT_INSN (insn));
6991
6992 /* Set the last insn as the jump insn; it doesn't affect cc0.
6993 Then follow this branch. */
6994 #ifdef HAVE_cc0
6995 prev_insn_cc0 = 0;
6996 #endif
6997 prev_insn = insn;
6998 insn = JUMP_LABEL (insn);
6999 continue;
7000 }
7001 }
7002
7003 if (GET_MODE (insn) == QImode)
7004 PUT_MODE (insn, VOIDmode);
7005
7006 if (GET_RTX_CLASS (code) == 'i')
7007 {
7008 rtx p;
7009
7010 /* Process notes first so we have all notes in canonical forms when
7011 looking for duplicate operations. */
7012
7013 if (REG_NOTES (insn))
7014 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7015
7016 /* Track when we are inside in LIBCALL block. Inside such a block,
7017 we do not want to record destinations. The last insn of a
7018 LIBCALL block is not considered to be part of the block, since
7019 its destination is the result of the block and hence should be
7020 recorded. */
7021
7022 if (REG_NOTES (insn) != 0)
7023 {
7024 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7025 libcall_insn = XEXP (p, 0);
7026 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7027 libcall_insn = 0;
7028 }
7029
7030 cse_insn (insn, libcall_insn);
7031 }
7032
7033 /* If INSN is now an unconditional jump, skip to the end of our
7034 basic block by pretending that we just did the last insn in the
7035 basic block. If we are jumping to the end of our block, show
7036 that we can have one usage of TO. */
7037
7038 if (any_uncondjump_p (insn))
7039 {
7040 if (to == 0)
7041 {
7042 free (qty_table + max_reg);
7043 return 0;
7044 }
7045
7046 if (JUMP_LABEL (insn) == to)
7047 to_usage = 1;
7048
7049 /* Maybe TO was deleted because the jump is unconditional.
7050 If so, there is nothing left in this basic block. */
7051 /* ??? Perhaps it would be smarter to set TO
7052 to whatever follows this insn,
7053 and pretend the basic block had always ended here. */
7054 if (INSN_DELETED_P (to))
7055 break;
7056
7057 insn = PREV_INSN (to);
7058 }
7059
7060 /* See if it is ok to keep on going past the label
7061 which used to end our basic block. Remember that we incremented
7062 the count of that label, so we decrement it here. If we made
7063 a jump unconditional, TO_USAGE will be one; in that case, we don't
7064 want to count the use in that jump. */
7065
7066 if (to != 0 && NEXT_INSN (insn) == to
7067 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7068 {
7069 struct cse_basic_block_data val;
7070 rtx prev;
7071
7072 insn = NEXT_INSN (to);
7073
7074 /* If TO was the last insn in the function, we are done. */
7075 if (insn == 0)
7076 {
7077 free (qty_table + max_reg);
7078 return 0;
7079 }
7080
7081 /* If TO was preceded by a BARRIER we are done with this block
7082 because it has no continuation. */
7083 prev = prev_nonnote_insn (to);
7084 if (prev && GET_CODE (prev) == BARRIER)
7085 {
7086 free (qty_table + max_reg);
7087 return insn;
7088 }
7089
7090 /* Find the end of the following block. Note that we won't be
7091 following branches in this case. */
7092 to_usage = 0;
7093 val.path_size = 0;
7094 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7095
7096 /* If the tables we allocated have enough space left
7097 to handle all the SETs in the next basic block,
7098 continue through it. Otherwise, return,
7099 and that block will be scanned individually. */
7100 if (val.nsets * 2 + next_qty > max_qty)
7101 break;
7102
7103 cse_basic_block_start = val.low_cuid;
7104 cse_basic_block_end = val.high_cuid;
7105 to = val.last;
7106
7107 /* Prevent TO from being deleted if it is a label. */
7108 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7109 ++LABEL_NUSES (to);
7110
7111 /* Back up so we process the first insn in the extension. */
7112 insn = PREV_INSN (insn);
7113 }
7114 }
7115
7116 if (next_qty > max_qty)
7117 abort ();
7118
7119 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7120 the previous insn is the only insn that branches to the head of a loop,
7121 we can cse into the loop. Don't do this if we changed the jump
7122 structure of a loop unless we aren't going to be following jumps. */
7123
7124 if ((cse_jumps_altered == 0
7125 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7126 && around_loop && to != 0
7127 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7128 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
7129 && JUMP_LABEL (PREV_INSN (to)) != 0
7130 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
7131 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
7132
7133 free (qty_table + max_reg);
7134
7135 return to ? NEXT_INSN (to) : 0;
7136 }
7137 \f
7138 /* Count the number of times registers are used (not set) in X.
7139 COUNTS is an array in which we accumulate the count, INCR is how much
7140 we count each register usage.
7141
7142 Don't count a usage of DEST, which is the SET_DEST of a SET which
7143 contains X in its SET_SRC. This is because such a SET does not
7144 modify the liveness of DEST. */
7145
7146 static void
7147 count_reg_usage (x, counts, dest, incr)
7148 rtx x;
7149 int *counts;
7150 rtx dest;
7151 int incr;
7152 {
7153 enum rtx_code code;
7154 const char *fmt;
7155 int i, j;
7156
7157 if (x == 0)
7158 return;
7159
7160 switch (code = GET_CODE (x))
7161 {
7162 case REG:
7163 if (x != dest)
7164 counts[REGNO (x)] += incr;
7165 return;
7166
7167 case PC:
7168 case CC0:
7169 case CONST:
7170 case CONST_INT:
7171 case CONST_DOUBLE:
7172 case SYMBOL_REF:
7173 case LABEL_REF:
7174 return;
7175
7176 case CLOBBER:
7177 /* If we are clobbering a MEM, mark any registers inside the address
7178 as being used. */
7179 if (GET_CODE (XEXP (x, 0)) == MEM)
7180 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7181 return;
7182
7183 case SET:
7184 /* Unless we are setting a REG, count everything in SET_DEST. */
7185 if (GET_CODE (SET_DEST (x)) != REG)
7186 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7187
7188 /* If SRC has side-effects, then we can't delete this insn, so the
7189 usage of SET_DEST inside SRC counts.
7190
7191 ??? Strictly-speaking, we might be preserving this insn
7192 because some other SET has side-effects, but that's hard
7193 to do and can't happen now. */
7194 count_reg_usage (SET_SRC (x), counts,
7195 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
7196 incr);
7197 return;
7198
7199 case CALL_INSN:
7200 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
7201 /* Fall through. */
7202
7203 case INSN:
7204 case JUMP_INSN:
7205 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7206
7207 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7208 use them. */
7209
7210 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7211 return;
7212
7213 case EXPR_LIST:
7214 case INSN_LIST:
7215 if (REG_NOTE_KIND (x) == REG_EQUAL
7216 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
7217 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7218 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7219 return;
7220
7221 default:
7222 break;
7223 }
7224
7225 fmt = GET_RTX_FORMAT (code);
7226 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7227 {
7228 if (fmt[i] == 'e')
7229 count_reg_usage (XEXP (x, i), counts, dest, incr);
7230 else if (fmt[i] == 'E')
7231 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7232 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7233 }
7234 }
7235 \f
7236 /* Scan all the insns and delete any that are dead; i.e., they store a register
7237 that is never used or they copy a register to itself.
7238
7239 This is used to remove insns made obviously dead by cse, loop or other
7240 optimizations. It improves the heuristics in loop since it won't try to
7241 move dead invariants out of loops or make givs for dead quantities. The
7242 remaining passes of the compilation are also sped up. */
7243
7244 void
7245 delete_trivially_dead_insns (insns, nreg)
7246 rtx insns;
7247 int nreg;
7248 {
7249 int *counts;
7250 rtx insn, prev;
7251 #ifdef HAVE_cc0
7252 rtx tem;
7253 #endif
7254 int i;
7255 int in_libcall = 0, dead_libcall = 0;
7256
7257 /* First count the number of times each register is used. */
7258 counts = (int *) xcalloc (nreg, sizeof (int));
7259 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7260 count_reg_usage (insn, counts, NULL_RTX, 1);
7261
7262 /* Go from the last insn to the first and delete insns that only set unused
7263 registers or copy a register to itself. As we delete an insn, remove
7264 usage counts for registers it uses.
7265
7266 The first jump optimization pass may leave a real insn as the last
7267 insn in the function. We must not skip that insn or we may end
7268 up deleting code that is not really dead. */
7269 insn = get_last_insn ();
7270 if (! INSN_P (insn))
7271 insn = prev_real_insn (insn);
7272
7273 for (; insn; insn = prev)
7274 {
7275 int live_insn = 0;
7276 rtx note;
7277
7278 prev = prev_real_insn (insn);
7279
7280 /* Don't delete any insns that are part of a libcall block unless
7281 we can delete the whole libcall block.
7282
7283 Flow or loop might get confused if we did that. Remember
7284 that we are scanning backwards. */
7285 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7286 {
7287 in_libcall = 1;
7288 live_insn = 1;
7289 dead_libcall = 0;
7290
7291 /* See if there's a REG_EQUAL note on this insn and try to
7292 replace the source with the REG_EQUAL expression.
7293
7294 We assume that insns with REG_RETVALs can only be reg->reg
7295 copies at this point. */
7296 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7297 if (note)
7298 {
7299 rtx set = single_set (insn);
7300 rtx new = simplify_rtx (XEXP (note, 0));
7301
7302 if (!new)
7303 new = XEXP (note, 0);
7304
7305 if (set && validate_change (insn, &SET_SRC (set), new, 0))
7306 {
7307 remove_note (insn,
7308 find_reg_note (insn, REG_RETVAL, NULL_RTX));
7309 dead_libcall = 1;
7310 }
7311 }
7312 }
7313 else if (in_libcall)
7314 live_insn = ! dead_libcall;
7315 else if (GET_CODE (PATTERN (insn)) == SET)
7316 {
7317 if ((GET_CODE (SET_DEST (PATTERN (insn))) == REG
7318 || GET_CODE (SET_DEST (PATTERN (insn))) == SUBREG)
7319 && rtx_equal_p (SET_DEST (PATTERN (insn)),
7320 SET_SRC (PATTERN (insn))))
7321 ;
7322 else if (GET_CODE (SET_DEST (PATTERN (insn))) == STRICT_LOW_PART
7323 && rtx_equal_p (XEXP (SET_DEST (PATTERN (insn)), 0),
7324 SET_SRC (PATTERN (insn))))
7325 ;
7326
7327 #ifdef HAVE_cc0
7328 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
7329 && ! side_effects_p (SET_SRC (PATTERN (insn)))
7330 && ((tem = next_nonnote_insn (insn)) == 0
7331 || ! INSN_P (tem)
7332 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
7333 ;
7334 #endif
7335 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
7336 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
7337 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
7338 || side_effects_p (SET_SRC (PATTERN (insn)))
7339 /* An ADDRESSOF expression can turn into a use of the
7340 internal arg pointer, so always consider the
7341 internal arg pointer live. If it is truly dead,
7342 flow will delete the initializing insn. */
7343 || (SET_DEST (PATTERN (insn))
7344 == current_function_internal_arg_pointer))
7345 live_insn = 1;
7346 }
7347 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7348 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7349 {
7350 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7351
7352 if (GET_CODE (elt) == SET)
7353 {
7354 if ((GET_CODE (SET_DEST (elt)) == REG
7355 || GET_CODE (SET_DEST (elt)) == SUBREG)
7356 && rtx_equal_p (SET_DEST (elt), SET_SRC (elt)))
7357 ;
7358
7359 #ifdef HAVE_cc0
7360 else if (GET_CODE (SET_DEST (elt)) == CC0
7361 && ! side_effects_p (SET_SRC (elt))
7362 && ((tem = next_nonnote_insn (insn)) == 0
7363 || ! INSN_P (tem)
7364 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
7365 ;
7366 #endif
7367 else if (GET_CODE (SET_DEST (elt)) != REG
7368 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
7369 || counts[REGNO (SET_DEST (elt))] != 0
7370 || side_effects_p (SET_SRC (elt))
7371 /* An ADDRESSOF expression can turn into a use of the
7372 internal arg pointer, so always consider the
7373 internal arg pointer live. If it is truly dead,
7374 flow will delete the initializing insn. */
7375 || (SET_DEST (elt)
7376 == current_function_internal_arg_pointer))
7377 live_insn = 1;
7378 }
7379 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7380 live_insn = 1;
7381 }
7382 else
7383 live_insn = 1;
7384
7385 /* If this is a dead insn, delete it and show registers in it aren't
7386 being used. */
7387
7388 if (! live_insn)
7389 {
7390 count_reg_usage (insn, counts, NULL_RTX, -1);
7391 delete_insn (insn);
7392 }
7393
7394 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7395 {
7396 in_libcall = 0;
7397 dead_libcall = 0;
7398 }
7399 }
7400
7401 /* Clean up. */
7402 free (counts);
7403 }