cp-tree.h (lang_decl_flags): Rename defined_in_class to initialized_in_class.
[gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
25 #include <setjmp.h>
26
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "function.h"
37 #include "expr.h"
38 #include "toplev.h"
39 #include "output.h"
40 #include "ggc.h"
41
42 /* The basic idea of common subexpression elimination is to go
43 through the code, keeping a record of expressions that would
44 have the same value at the current scan point, and replacing
45 expressions encountered with the cheapest equivalent expression.
46
47 It is too complicated to keep track of the different possibilities
48 when control paths merge in this code; so, at each label, we forget all
49 that is known and start fresh. This can be described as processing each
50 extended basic block separately. We have a separate pass to perform
51 global CSE.
52
53 Note CSE can turn a conditional or computed jump into a nop or
54 an unconditional jump. When this occurs we arrange to run the jump
55 optimizer after CSE to delete the unreachable code.
56
57 We use two data structures to record the equivalent expressions:
58 a hash table for most expressions, and a vector of "quantity
59 numbers" to record equivalent (pseudo) registers.
60
61 The use of the special data structure for registers is desirable
62 because it is faster. It is possible because registers references
63 contain a fairly small number, the register number, taken from
64 a contiguously allocated series, and two register references are
65 identical if they have the same number. General expressions
66 do not have any such thing, so the only way to retrieve the
67 information recorded on an expression other than a register
68 is to keep it in a hash table.
69
70 Registers and "quantity numbers":
71
72 At the start of each basic block, all of the (hardware and pseudo)
73 registers used in the function are given distinct quantity
74 numbers to indicate their contents. During scan, when the code
75 copies one register into another, we copy the quantity number.
76 When a register is loaded in any other way, we allocate a new
77 quantity number to describe the value generated by this operation.
78 `reg_qty' records what quantity a register is currently thought
79 of as containing.
80
81 All real quantity numbers are greater than or equal to `max_reg'.
82 If register N has not been assigned a quantity, reg_qty[N] will equal N.
83
84 Quantity numbers below `max_reg' do not exist and none of the `qty_table'
85 entries should be referenced with an index below `max_reg'.
86
87 We also maintain a bidirectional chain of registers for each
88 quantity number. The `qty_table` members `first_reg' and `last_reg',
89 and `reg_eqv_table' members `next' and `prev' hold these chains.
90
91 The first register in a chain is the one whose lifespan is least local.
92 Among equals, it is the one that was seen first.
93 We replace any equivalent register with that one.
94
95 If two registers have the same quantity number, it must be true that
96 REG expressions with qty_table `mode' must be in the hash table for both
97 registers and must be in the same class.
98
99 The converse is not true. Since hard registers may be referenced in
100 any mode, two REG expressions might be equivalent in the hash table
101 but not have the same quantity number if the quantity number of one
102 of the registers is not the same mode as those expressions.
103
104 Constants and quantity numbers
105
106 When a quantity has a known constant value, that value is stored
107 in the appropriate qty_table `const_rtx'. This is in addition to
108 putting the constant in the hash table as is usual for non-regs.
109
110 Whether a reg or a constant is preferred is determined by the configuration
111 macro CONST_COSTS and will often depend on the constant value. In any
112 event, expressions containing constants can be simplified, by fold_rtx.
113
114 When a quantity has a known nearly constant value (such as an address
115 of a stack slot), that value is stored in the appropriate qty_table
116 `const_rtx'.
117
118 Integer constants don't have a machine mode. However, cse
119 determines the intended machine mode from the destination
120 of the instruction that moves the constant. The machine mode
121 is recorded in the hash table along with the actual RTL
122 constant expression so that different modes are kept separate.
123
124 Other expressions:
125
126 To record known equivalences among expressions in general
127 we use a hash table called `table'. It has a fixed number of buckets
128 that contain chains of `struct table_elt' elements for expressions.
129 These chains connect the elements whose expressions have the same
130 hash codes.
131
132 Other chains through the same elements connect the elements which
133 currently have equivalent values.
134
135 Register references in an expression are canonicalized before hashing
136 the expression. This is done using `reg_qty' and qty_table `first_reg'.
137 The hash code of a register reference is computed using the quantity
138 number, not the register number.
139
140 When the value of an expression changes, it is necessary to remove from the
141 hash table not just that expression but all expressions whose values
142 could be different as a result.
143
144 1. If the value changing is in memory, except in special cases
145 ANYTHING referring to memory could be changed. That is because
146 nobody knows where a pointer does not point.
147 The function `invalidate_memory' removes what is necessary.
148
149 The special cases are when the address is constant or is
150 a constant plus a fixed register such as the frame pointer
151 or a static chain pointer. When such addresses are stored in,
152 we can tell exactly which other such addresses must be invalidated
153 due to overlap. `invalidate' does this.
154 All expressions that refer to non-constant
155 memory addresses are also invalidated. `invalidate_memory' does this.
156
157 2. If the value changing is a register, all expressions
158 containing references to that register, and only those,
159 must be removed.
160
161 Because searching the entire hash table for expressions that contain
162 a register is very slow, we try to figure out when it isn't necessary.
163 Precisely, this is necessary only when expressions have been
164 entered in the hash table using this register, and then the value has
165 changed, and then another expression wants to be added to refer to
166 the register's new value. This sequence of circumstances is rare
167 within any one basic block.
168
169 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
170 reg_tick[i] is incremented whenever a value is stored in register i.
171 reg_in_table[i] holds -1 if no references to register i have been
172 entered in the table; otherwise, it contains the value reg_tick[i] had
173 when the references were entered. If we want to enter a reference
174 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
175 Until we want to enter a new entry, the mere fact that the two vectors
176 don't match makes the entries be ignored if anyone tries to match them.
177
178 Registers themselves are entered in the hash table as well as in
179 the equivalent-register chains. However, the vectors `reg_tick'
180 and `reg_in_table' do not apply to expressions which are simple
181 register references. These expressions are removed from the table
182 immediately when they become invalid, and this can be done even if
183 we do not immediately search for all the expressions that refer to
184 the register.
185
186 A CLOBBER rtx in an instruction invalidates its operand for further
187 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
188 invalidates everything that resides in memory.
189
190 Related expressions:
191
192 Constant expressions that differ only by an additive integer
193 are called related. When a constant expression is put in
194 the table, the related expression with no constant term
195 is also entered. These are made to point at each other
196 so that it is possible to find out if there exists any
197 register equivalent to an expression related to a given expression. */
198
199 /* One plus largest register number used in this function. */
200
201 static int max_reg;
202
203 /* One plus largest instruction UID used in this function at time of
204 cse_main call. */
205
206 static int max_insn_uid;
207
208 /* Length of qty_table vector. We know in advance we will not need
209 a quantity number this big. */
210
211 static int max_qty;
212
213 /* Next quantity number to be allocated.
214 This is 1 + the largest number needed so far. */
215
216 static int next_qty;
217
218 /* Per-qty information tracking.
219
220 `first_reg' and `last_reg' track the head and tail of the
221 chain of registers which currently contain this quantity.
222
223 `mode' contains the machine mode of this quantity.
224
225 `const_rtx' holds the rtx of the constant value of this
226 quantity, if known. A summations of the frame/arg pointer
227 and a constant can also be entered here. When this holds
228 a known value, `const_insn' is the insn which stored the
229 constant value.
230
231 `comparison_{code,const,qty}' are used to track when a
232 comparison between a quantity and some constant or register has
233 been passed. In such a case, we know the results of the comparison
234 in case we see it again. These members record a comparison that
235 is known to be true. `comparison_code' holds the rtx code of such
236 a comparison, else it is set to UNKNOWN and the other two
237 comparison members are undefined. `comparison_const' holds
238 the constant being compared against, or zero if the comparison
239 is not against a constant. `comparison_qty' holds the quantity
240 being compared against when the result is known. If the comparison
241 is not with a register, `comparison_qty' is -1. */
242
243 struct qty_table_elem
244 {
245 rtx const_rtx;
246 rtx const_insn;
247 rtx comparison_const;
248 int comparison_qty;
249 unsigned int first_reg, last_reg;
250 enum machine_mode mode;
251 enum rtx_code comparison_code;
252 };
253
254 /* The table of all qtys, indexed by qty number. */
255 static struct qty_table_elem *qty_table;
256
257 #ifdef HAVE_cc0
258 /* For machines that have a CC0, we do not record its value in the hash
259 table since its use is guaranteed to be the insn immediately following
260 its definition and any other insn is presumed to invalidate it.
261
262 Instead, we store below the value last assigned to CC0. If it should
263 happen to be a constant, it is stored in preference to the actual
264 assigned value. In case it is a constant, we store the mode in which
265 the constant should be interpreted. */
266
267 static rtx prev_insn_cc0;
268 static enum machine_mode prev_insn_cc0_mode;
269 #endif
270
271 /* Previous actual insn. 0 if at first insn of basic block. */
272
273 static rtx prev_insn;
274
275 /* Insn being scanned. */
276
277 static rtx this_insn;
278
279 /* Index by register number, gives the number of the next (or
280 previous) register in the chain of registers sharing the same
281 value.
282
283 Or -1 if this register is at the end of the chain.
284
285 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
286
287 /* Per-register equivalence chain. */
288 struct reg_eqv_elem
289 {
290 int next, prev;
291 };
292
293 /* The table of all register equivalence chains. */
294 static struct reg_eqv_elem *reg_eqv_table;
295
296 struct cse_reg_info
297 {
298 /* Next in hash chain. */
299 struct cse_reg_info *hash_next;
300
301 /* The next cse_reg_info structure in the free or used list. */
302 struct cse_reg_info *next;
303
304 /* Search key */
305 unsigned int regno;
306
307 /* The quantity number of the register's current contents. */
308 int reg_qty;
309
310 /* The number of times the register has been altered in the current
311 basic block. */
312 int reg_tick;
313
314 /* The REG_TICK value at which rtx's containing this register are
315 valid in the hash table. If this does not equal the current
316 reg_tick value, such expressions existing in the hash table are
317 invalid. */
318 int reg_in_table;
319 };
320
321 /* A free list of cse_reg_info entries. */
322 static struct cse_reg_info *cse_reg_info_free_list;
323
324 /* A used list of cse_reg_info entries. */
325 static struct cse_reg_info *cse_reg_info_used_list;
326 static struct cse_reg_info *cse_reg_info_used_list_end;
327
328 /* A mapping from registers to cse_reg_info data structures. */
329 #define REGHASH_SHIFT 7
330 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
331 #define REGHASH_MASK (REGHASH_SIZE - 1)
332 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
333
334 #define REGHASH_FN(REGNO) \
335 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
336
337 /* The last lookup we did into the cse_reg_info_tree. This allows us
338 to cache repeated lookups. */
339 static unsigned int cached_regno;
340 static struct cse_reg_info *cached_cse_reg_info;
341
342 /* A HARD_REG_SET containing all the hard registers for which there is
343 currently a REG expression in the hash table. Note the difference
344 from the above variables, which indicate if the REG is mentioned in some
345 expression in the table. */
346
347 static HARD_REG_SET hard_regs_in_table;
348
349 /* A HARD_REG_SET containing all the hard registers that are invalidated
350 by a CALL_INSN. */
351
352 static HARD_REG_SET regs_invalidated_by_call;
353
354 /* CUID of insn that starts the basic block currently being cse-processed. */
355
356 static int cse_basic_block_start;
357
358 /* CUID of insn that ends the basic block currently being cse-processed. */
359
360 static int cse_basic_block_end;
361
362 /* Vector mapping INSN_UIDs to cuids.
363 The cuids are like uids but increase monotonically always.
364 We use them to see whether a reg is used outside a given basic block. */
365
366 static int *uid_cuid;
367
368 /* Highest UID in UID_CUID. */
369 static int max_uid;
370
371 /* Get the cuid of an insn. */
372
373 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
374
375 /* Nonzero if this pass has made changes, and therefore it's
376 worthwhile to run the garbage collector. */
377
378 static int cse_altered;
379
380 /* Nonzero if cse has altered conditional jump insns
381 in such a way that jump optimization should be redone. */
382
383 static int cse_jumps_altered;
384
385 /* Nonzero if we put a LABEL_REF into the hash table. Since we may have put
386 it into an INSN without a REG_LABEL, we have to rerun jump after CSE
387 to put in the note. */
388 static int recorded_label_ref;
389
390 /* canon_hash stores 1 in do_not_record
391 if it notices a reference to CC0, PC, or some other volatile
392 subexpression. */
393
394 static int do_not_record;
395
396 #ifdef LOAD_EXTEND_OP
397
398 /* Scratch rtl used when looking for load-extended copy of a MEM. */
399 static rtx memory_extend_rtx;
400 #endif
401
402 /* canon_hash stores 1 in hash_arg_in_memory
403 if it notices a reference to memory within the expression being hashed. */
404
405 static int hash_arg_in_memory;
406
407 /* The hash table contains buckets which are chains of `struct table_elt's,
408 each recording one expression's information.
409 That expression is in the `exp' field.
410
411 The canon_exp field contains a canonical (from the point of view of
412 alias analysis) version of the `exp' field.
413
414 Those elements with the same hash code are chained in both directions
415 through the `next_same_hash' and `prev_same_hash' fields.
416
417 Each set of expressions with equivalent values
418 are on a two-way chain through the `next_same_value'
419 and `prev_same_value' fields, and all point with
420 the `first_same_value' field at the first element in
421 that chain. The chain is in order of increasing cost.
422 Each element's cost value is in its `cost' field.
423
424 The `in_memory' field is nonzero for elements that
425 involve any reference to memory. These elements are removed
426 whenever a write is done to an unidentified location in memory.
427 To be safe, we assume that a memory address is unidentified unless
428 the address is either a symbol constant or a constant plus
429 the frame pointer or argument pointer.
430
431 The `related_value' field is used to connect related expressions
432 (that differ by adding an integer).
433 The related expressions are chained in a circular fashion.
434 `related_value' is zero for expressions for which this
435 chain is not useful.
436
437 The `cost' field stores the cost of this element's expression.
438 The `regcost' field stores the value returned by approx_reg_cost for
439 this element's expression.
440
441 The `is_const' flag is set if the element is a constant (including
442 a fixed address).
443
444 The `flag' field is used as a temporary during some search routines.
445
446 The `mode' field is usually the same as GET_MODE (`exp'), but
447 if `exp' is a CONST_INT and has no machine mode then the `mode'
448 field is the mode it was being used as. Each constant is
449 recorded separately for each mode it is used with. */
450
451 struct table_elt
452 {
453 rtx exp;
454 rtx canon_exp;
455 struct table_elt *next_same_hash;
456 struct table_elt *prev_same_hash;
457 struct table_elt *next_same_value;
458 struct table_elt *prev_same_value;
459 struct table_elt *first_same_value;
460 struct table_elt *related_value;
461 int cost;
462 int regcost;
463 enum machine_mode mode;
464 char in_memory;
465 char is_const;
466 char flag;
467 };
468
469 /* We don't want a lot of buckets, because we rarely have very many
470 things stored in the hash table, and a lot of buckets slows
471 down a lot of loops that happen frequently. */
472 #define HASH_SHIFT 5
473 #define HASH_SIZE (1 << HASH_SHIFT)
474 #define HASH_MASK (HASH_SIZE - 1)
475
476 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
477 register (hard registers may require `do_not_record' to be set). */
478
479 #define HASH(X, M) \
480 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
481 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
482 : canon_hash (X, M)) & HASH_MASK)
483
484 /* Determine whether register number N is considered a fixed register for the
485 purpose of approximating register costs.
486 It is desirable to replace other regs with fixed regs, to reduce need for
487 non-fixed hard regs.
488 A reg wins if it is either the frame pointer or designated as fixed. */
489 #define FIXED_REGNO_P(N) \
490 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
491 || fixed_regs[N] || global_regs[N])
492
493 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
494 hard registers and pointers into the frame are the cheapest with a cost
495 of 0. Next come pseudos with a cost of one and other hard registers with
496 a cost of 2. Aside from these special cases, call `rtx_cost'. */
497
498 #define CHEAP_REGNO(N) \
499 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
500 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
501 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
502 || ((N) < FIRST_PSEUDO_REGISTER \
503 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
504
505 #define COST(X) (GET_CODE (X) == REG ? 0 : notreg_cost (X, SET))
506 #define COST_IN(X,OUTER) (GET_CODE (X) == REG ? 0 : notreg_cost (X, OUTER))
507
508 /* Get the info associated with register N. */
509
510 #define GET_CSE_REG_INFO(N) \
511 (((N) == cached_regno && cached_cse_reg_info) \
512 ? cached_cse_reg_info : get_cse_reg_info ((N)))
513
514 /* Get the number of times this register has been updated in this
515 basic block. */
516
517 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
518
519 /* Get the point at which REG was recorded in the table. */
520
521 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
522
523 /* Get the quantity number for REG. */
524
525 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
526
527 /* Determine if the quantity number for register X represents a valid index
528 into the qty_table. */
529
530 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
531
532 static struct table_elt *table[HASH_SIZE];
533
534 /* Chain of `struct table_elt's made so far for this function
535 but currently removed from the table. */
536
537 static struct table_elt *free_element_chain;
538
539 /* Number of `struct table_elt' structures made so far for this function. */
540
541 static int n_elements_made;
542
543 /* Maximum value `n_elements_made' has had so far in this compilation
544 for functions previously processed. */
545
546 static int max_elements_made;
547
548 /* Surviving equivalence class when two equivalence classes are merged
549 by recording the effects of a jump in the last insn. Zero if the
550 last insn was not a conditional jump. */
551
552 static struct table_elt *last_jump_equiv_class;
553
554 /* Set to the cost of a constant pool reference if one was found for a
555 symbolic constant. If this was found, it means we should try to
556 convert constants into constant pool entries if they don't fit in
557 the insn. */
558
559 static int constant_pool_entries_cost;
560
561 /* Define maximum length of a branch path. */
562
563 #define PATHLENGTH 10
564
565 /* This data describes a block that will be processed by cse_basic_block. */
566
567 struct cse_basic_block_data
568 {
569 /* Lowest CUID value of insns in block. */
570 int low_cuid;
571 /* Highest CUID value of insns in block. */
572 int high_cuid;
573 /* Total number of SETs in block. */
574 int nsets;
575 /* Last insn in the block. */
576 rtx last;
577 /* Size of current branch path, if any. */
578 int path_size;
579 /* Current branch path, indicating which branches will be taken. */
580 struct branch_path
581 {
582 /* The branch insn. */
583 rtx branch;
584 /* Whether it should be taken or not. AROUND is the same as taken
585 except that it is used when the destination label is not preceded
586 by a BARRIER. */
587 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
588 } path[PATHLENGTH];
589 };
590
591 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
592 virtual regs here because the simplify_*_operation routines are called
593 by integrate.c, which is called before virtual register instantiation.
594
595 ?!? FIXED_BASE_PLUS_P and NONZERO_BASE_PLUS_P need to move into
596 a header file so that their definitions can be shared with the
597 simplification routines in simplify-rtx.c. Until then, do not
598 change these macros without also changing the copy in simplify-rtx.c. */
599
600 #define FIXED_BASE_PLUS_P(X) \
601 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
602 || ((X) == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])\
603 || (X) == virtual_stack_vars_rtx \
604 || (X) == virtual_incoming_args_rtx \
605 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
606 && (XEXP (X, 0) == frame_pointer_rtx \
607 || XEXP (X, 0) == hard_frame_pointer_rtx \
608 || ((X) == arg_pointer_rtx \
609 && fixed_regs[ARG_POINTER_REGNUM]) \
610 || XEXP (X, 0) == virtual_stack_vars_rtx \
611 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
612 || GET_CODE (X) == ADDRESSOF)
613
614 /* Similar, but also allows reference to the stack pointer.
615
616 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
617 arg_pointer_rtx by itself is nonzero, because on at least one machine,
618 the i960, the arg pointer is zero when it is unused. */
619
620 #define NONZERO_BASE_PLUS_P(X) \
621 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
622 || (X) == virtual_stack_vars_rtx \
623 || (X) == virtual_incoming_args_rtx \
624 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
625 && (XEXP (X, 0) == frame_pointer_rtx \
626 || XEXP (X, 0) == hard_frame_pointer_rtx \
627 || ((X) == arg_pointer_rtx \
628 && fixed_regs[ARG_POINTER_REGNUM]) \
629 || XEXP (X, 0) == virtual_stack_vars_rtx \
630 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
631 || (X) == stack_pointer_rtx \
632 || (X) == virtual_stack_dynamic_rtx \
633 || (X) == virtual_outgoing_args_rtx \
634 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
635 && (XEXP (X, 0) == stack_pointer_rtx \
636 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
637 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
638 || GET_CODE (X) == ADDRESSOF)
639
640 static int notreg_cost PARAMS ((rtx, enum rtx_code));
641 static int approx_reg_cost_1 PARAMS ((rtx *, void *));
642 static int approx_reg_cost PARAMS ((rtx));
643 static int preferrable PARAMS ((int, int, int, int));
644 static void new_basic_block PARAMS ((void));
645 static void make_new_qty PARAMS ((unsigned int, enum machine_mode));
646 static void make_regs_eqv PARAMS ((unsigned int, unsigned int));
647 static void delete_reg_equiv PARAMS ((unsigned int));
648 static int mention_regs PARAMS ((rtx));
649 static int insert_regs PARAMS ((rtx, struct table_elt *, int));
650 static void remove_from_table PARAMS ((struct table_elt *, unsigned));
651 static struct table_elt *lookup PARAMS ((rtx, unsigned, enum machine_mode)),
652 *lookup_for_remove PARAMS ((rtx, unsigned, enum machine_mode));
653 static rtx lookup_as_function PARAMS ((rtx, enum rtx_code));
654 static struct table_elt *insert PARAMS ((rtx, struct table_elt *, unsigned,
655 enum machine_mode));
656 static void merge_equiv_classes PARAMS ((struct table_elt *,
657 struct table_elt *));
658 static void invalidate PARAMS ((rtx, enum machine_mode));
659 static int cse_rtx_varies_p PARAMS ((rtx, int));
660 static void remove_invalid_refs PARAMS ((unsigned int));
661 static void remove_invalid_subreg_refs PARAMS ((unsigned int, unsigned int,
662 enum machine_mode));
663 static void rehash_using_reg PARAMS ((rtx));
664 static void invalidate_memory PARAMS ((void));
665 static void invalidate_for_call PARAMS ((void));
666 static rtx use_related_value PARAMS ((rtx, struct table_elt *));
667 static unsigned canon_hash PARAMS ((rtx, enum machine_mode));
668 static unsigned canon_hash_string PARAMS ((const char *));
669 static unsigned safe_hash PARAMS ((rtx, enum machine_mode));
670 static int exp_equiv_p PARAMS ((rtx, rtx, int, int));
671 static rtx canon_reg PARAMS ((rtx, rtx));
672 static void find_best_addr PARAMS ((rtx, rtx *, enum machine_mode));
673 static enum rtx_code find_comparison_args PARAMS ((enum rtx_code, rtx *, rtx *,
674 enum machine_mode *,
675 enum machine_mode *));
676 static rtx fold_rtx PARAMS ((rtx, rtx));
677 static rtx equiv_constant PARAMS ((rtx));
678 static void record_jump_equiv PARAMS ((rtx, int));
679 static void record_jump_cond PARAMS ((enum rtx_code, enum machine_mode,
680 rtx, rtx, int));
681 static void cse_insn PARAMS ((rtx, rtx));
682 static int addr_affects_sp_p PARAMS ((rtx));
683 static void invalidate_from_clobbers PARAMS ((rtx));
684 static rtx cse_process_notes PARAMS ((rtx, rtx));
685 static void cse_around_loop PARAMS ((rtx));
686 static void invalidate_skipped_set PARAMS ((rtx, rtx, void *));
687 static void invalidate_skipped_block PARAMS ((rtx));
688 static void cse_check_loop_start PARAMS ((rtx, rtx, void *));
689 static void cse_set_around_loop PARAMS ((rtx, rtx, rtx));
690 static rtx cse_basic_block PARAMS ((rtx, rtx, struct branch_path *, int));
691 static void count_reg_usage PARAMS ((rtx, int *, rtx, int));
692 extern void dump_class PARAMS ((struct table_elt*));
693 static struct cse_reg_info * get_cse_reg_info PARAMS ((unsigned int));
694 static int check_dependence PARAMS ((rtx *, void *));
695
696 static void flush_hash_table PARAMS ((void));
697 \f
698 /* Dump the expressions in the equivalence class indicated by CLASSP.
699 This function is used only for debugging. */
700 void
701 dump_class (classp)
702 struct table_elt *classp;
703 {
704 struct table_elt *elt;
705
706 fprintf (stderr, "Equivalence chain for ");
707 print_rtl (stderr, classp->exp);
708 fprintf (stderr, ": \n");
709
710 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
711 {
712 print_rtl (stderr, elt->exp);
713 fprintf (stderr, "\n");
714 }
715 }
716
717 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
718 static int
719 approx_reg_cost_1 (xp, data)
720 rtx *xp;
721 void *data;
722 {
723 rtx x = *xp;
724 regset set = (regset) data;
725
726 if (x && GET_CODE (x) == REG)
727 SET_REGNO_REG_SET (set, REGNO (x));
728 return 0;
729 }
730
731 /* Return an estimate of the cost of the registers used in an rtx.
732 This is mostly the number of different REG expressions in the rtx;
733 however for some excecptions like fixed registers we use a cost of
734 0. If any other hard register reference occurs, return MAX_COST. */
735
736 static int
737 approx_reg_cost (x)
738 rtx x;
739 {
740 regset_head set;
741 int i;
742 int cost = 0;
743 int hardregs = 0;
744
745 INIT_REG_SET (&set);
746 for_each_rtx (&x, approx_reg_cost_1, (void *)&set);
747
748 EXECUTE_IF_SET_IN_REG_SET
749 (&set, 0, i,
750 {
751 if (! CHEAP_REGNO (i))
752 {
753 if (i < FIRST_PSEUDO_REGISTER)
754 hardregs++;
755
756 cost += i < FIRST_PSEUDO_REGISTER ? 2 : 1;
757 }
758 });
759
760 CLEAR_REG_SET (&set);
761 return hardregs && SMALL_REGISTER_CLASSES ? MAX_COST : cost;
762 }
763
764 /* Return a negative value if an rtx A, whose costs are given by COST_A
765 and REGCOST_A, is more desirable than an rtx B.
766 Return a positive value if A is less desirable, or 0 if the two are
767 equally good. */
768 static int
769 preferrable (cost_a, regcost_a, cost_b, regcost_b)
770 int cost_a, regcost_a, cost_b, regcost_b;
771 {
772 /* First, get rid of a cases involving expressions that are entirely
773 unwanted. */
774 if (cost_a != cost_b)
775 {
776 if (cost_a == MAX_COST)
777 return 1;
778 if (cost_b == MAX_COST)
779 return -1;
780 }
781
782 /* Avoid extending lifetimes of hardregs. */
783 if (regcost_a != regcost_b)
784 {
785 if (regcost_a == MAX_COST)
786 return 1;
787 if (regcost_b == MAX_COST)
788 return -1;
789 }
790
791 /* Normal operation costs take precedence. */
792 if (cost_a != cost_b)
793 return cost_a - cost_b;
794 /* Only if these are identical consider effects on register pressure. */
795 if (regcost_a != regcost_b)
796 return regcost_a - regcost_b;
797 return 0;
798 }
799
800 /* Internal function, to compute cost when X is not a register; called
801 from COST macro to keep it simple. */
802
803 static int
804 notreg_cost (x, outer)
805 rtx x;
806 enum rtx_code outer;
807 {
808 return ((GET_CODE (x) == SUBREG
809 && GET_CODE (SUBREG_REG (x)) == REG
810 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
811 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
812 && (GET_MODE_SIZE (GET_MODE (x))
813 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
814 && subreg_lowpart_p (x)
815 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
816 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
817 ? 0
818 : rtx_cost (x, outer) * 2);
819 }
820
821 /* Return an estimate of the cost of computing rtx X.
822 One use is in cse, to decide which expression to keep in the hash table.
823 Another is in rtl generation, to pick the cheapest way to multiply.
824 Other uses like the latter are expected in the future. */
825
826 int
827 rtx_cost (x, outer_code)
828 rtx x;
829 enum rtx_code outer_code ATTRIBUTE_UNUSED;
830 {
831 register int i, j;
832 register enum rtx_code code;
833 register const char *fmt;
834 register int total;
835
836 if (x == 0)
837 return 0;
838
839 /* Compute the default costs of certain things.
840 Note that RTX_COSTS can override the defaults. */
841
842 code = GET_CODE (x);
843 switch (code)
844 {
845 case MULT:
846 /* Count multiplication by 2**n as a shift,
847 because if we are considering it, we would output it as a shift. */
848 if (GET_CODE (XEXP (x, 1)) == CONST_INT
849 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
850 total = 2;
851 else
852 total = COSTS_N_INSNS (5);
853 break;
854 case DIV:
855 case UDIV:
856 case MOD:
857 case UMOD:
858 total = COSTS_N_INSNS (7);
859 break;
860 case USE:
861 /* Used in loop.c and combine.c as a marker. */
862 total = 0;
863 break;
864 default:
865 total = COSTS_N_INSNS (1);
866 }
867
868 switch (code)
869 {
870 case REG:
871 return 0;
872
873 case SUBREG:
874 /* If we can't tie these modes, make this expensive. The larger
875 the mode, the more expensive it is. */
876 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
877 return COSTS_N_INSNS (2
878 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
879 break;
880
881 #ifdef RTX_COSTS
882 RTX_COSTS (x, code, outer_code);
883 #endif
884 #ifdef CONST_COSTS
885 CONST_COSTS (x, code, outer_code);
886 #endif
887
888 default:
889 #ifdef DEFAULT_RTX_COSTS
890 DEFAULT_RTX_COSTS (x, code, outer_code);
891 #endif
892 break;
893 }
894
895 /* Sum the costs of the sub-rtx's, plus cost of this operation,
896 which is already in total. */
897
898 fmt = GET_RTX_FORMAT (code);
899 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
900 if (fmt[i] == 'e')
901 total += rtx_cost (XEXP (x, i), code);
902 else if (fmt[i] == 'E')
903 for (j = 0; j < XVECLEN (x, i); j++)
904 total += rtx_cost (XVECEXP (x, i, j), code);
905
906 return total;
907 }
908 \f
909 /* Return cost of address expression X.
910 Expect that X is propertly formed address reference. */
911
912 int
913 address_cost (x, mode)
914 rtx x;
915 enum machine_mode mode;
916 {
917 /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
918 during CSE, such nodes are present. Using an ADDRESSOF node which
919 refers to the address of a REG is a good thing because we can then
920 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
921
922 if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
923 return -1;
924
925 /* We may be asked for cost of various unusual addresses, such as operands
926 of push instruction. It is not worthwhile to complicate writing
927 of ADDRESS_COST macro by such cases. */
928
929 if (!memory_address_p (mode, x))
930 return 1000;
931 #ifdef ADDRESS_COST
932 return ADDRESS_COST (x);
933 #else
934 return rtx_cost (x, MEM);
935 #endif
936 }
937
938 \f
939 static struct cse_reg_info *
940 get_cse_reg_info (regno)
941 unsigned int regno;
942 {
943 struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
944 struct cse_reg_info *p;
945
946 for (p = *hash_head; p != NULL; p = p->hash_next)
947 if (p->regno == regno)
948 break;
949
950 if (p == NULL)
951 {
952 /* Get a new cse_reg_info structure. */
953 if (cse_reg_info_free_list)
954 {
955 p = cse_reg_info_free_list;
956 cse_reg_info_free_list = p->next;
957 }
958 else
959 p = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
960
961 /* Insert into hash table. */
962 p->hash_next = *hash_head;
963 *hash_head = p;
964
965 /* Initialize it. */
966 p->reg_tick = 1;
967 p->reg_in_table = -1;
968 p->reg_qty = regno;
969 p->regno = regno;
970 p->next = cse_reg_info_used_list;
971 cse_reg_info_used_list = p;
972 if (!cse_reg_info_used_list_end)
973 cse_reg_info_used_list_end = p;
974 }
975
976 /* Cache this lookup; we tend to be looking up information about the
977 same register several times in a row. */
978 cached_regno = regno;
979 cached_cse_reg_info = p;
980
981 return p;
982 }
983
984 /* Clear the hash table and initialize each register with its own quantity,
985 for a new basic block. */
986
987 static void
988 new_basic_block ()
989 {
990 register int i;
991
992 next_qty = max_reg;
993
994 /* Clear out hash table state for this pass. */
995
996 memset ((char *) reg_hash, 0, sizeof reg_hash);
997
998 if (cse_reg_info_used_list)
999 {
1000 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
1001 cse_reg_info_free_list = cse_reg_info_used_list;
1002 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
1003 }
1004 cached_cse_reg_info = 0;
1005
1006 CLEAR_HARD_REG_SET (hard_regs_in_table);
1007
1008 /* The per-quantity values used to be initialized here, but it is
1009 much faster to initialize each as it is made in `make_new_qty'. */
1010
1011 for (i = 0; i < HASH_SIZE; i++)
1012 {
1013 struct table_elt *first;
1014
1015 first = table[i];
1016 if (first != NULL)
1017 {
1018 struct table_elt *last = first;
1019
1020 table[i] = NULL;
1021
1022 while (last->next_same_hash != NULL)
1023 last = last->next_same_hash;
1024
1025 /* Now relink this hash entire chain into
1026 the free element list. */
1027
1028 last->next_same_hash = free_element_chain;
1029 free_element_chain = first;
1030 }
1031 }
1032
1033 prev_insn = 0;
1034
1035 #ifdef HAVE_cc0
1036 prev_insn_cc0 = 0;
1037 #endif
1038 }
1039
1040 /* Say that register REG contains a quantity in mode MODE not in any
1041 register before and initialize that quantity. */
1042
1043 static void
1044 make_new_qty (reg, mode)
1045 unsigned int reg;
1046 enum machine_mode mode;
1047 {
1048 register int q;
1049 register struct qty_table_elem *ent;
1050 register struct reg_eqv_elem *eqv;
1051
1052 if (next_qty >= max_qty)
1053 abort ();
1054
1055 q = REG_QTY (reg) = next_qty++;
1056 ent = &qty_table[q];
1057 ent->first_reg = reg;
1058 ent->last_reg = reg;
1059 ent->mode = mode;
1060 ent->const_rtx = ent->const_insn = NULL_RTX;
1061 ent->comparison_code = UNKNOWN;
1062
1063 eqv = &reg_eqv_table[reg];
1064 eqv->next = eqv->prev = -1;
1065 }
1066
1067 /* Make reg NEW equivalent to reg OLD.
1068 OLD is not changing; NEW is. */
1069
1070 static void
1071 make_regs_eqv (new, old)
1072 unsigned int new, old;
1073 {
1074 unsigned int lastr, firstr;
1075 int q = REG_QTY (old);
1076 struct qty_table_elem *ent;
1077
1078 ent = &qty_table[q];
1079
1080 /* Nothing should become eqv until it has a "non-invalid" qty number. */
1081 if (! REGNO_QTY_VALID_P (old))
1082 abort ();
1083
1084 REG_QTY (new) = q;
1085 firstr = ent->first_reg;
1086 lastr = ent->last_reg;
1087
1088 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1089 hard regs. Among pseudos, if NEW will live longer than any other reg
1090 of the same qty, and that is beyond the current basic block,
1091 make it the new canonical replacement for this qty. */
1092 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1093 /* Certain fixed registers might be of the class NO_REGS. This means
1094 that not only can they not be allocated by the compiler, but
1095 they cannot be used in substitutions or canonicalizations
1096 either. */
1097 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1098 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1099 || (new >= FIRST_PSEUDO_REGISTER
1100 && (firstr < FIRST_PSEUDO_REGISTER
1101 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1102 || (uid_cuid[REGNO_FIRST_UID (new)]
1103 < cse_basic_block_start))
1104 && (uid_cuid[REGNO_LAST_UID (new)]
1105 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1106 {
1107 reg_eqv_table[firstr].prev = new;
1108 reg_eqv_table[new].next = firstr;
1109 reg_eqv_table[new].prev = -1;
1110 ent->first_reg = new;
1111 }
1112 else
1113 {
1114 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1115 Otherwise, insert before any non-fixed hard regs that are at the
1116 end. Registers of class NO_REGS cannot be used as an
1117 equivalent for anything. */
1118 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1119 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1120 && new >= FIRST_PSEUDO_REGISTER)
1121 lastr = reg_eqv_table[lastr].prev;
1122 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1123 if (reg_eqv_table[lastr].next >= 0)
1124 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1125 else
1126 qty_table[q].last_reg = new;
1127 reg_eqv_table[lastr].next = new;
1128 reg_eqv_table[new].prev = lastr;
1129 }
1130 }
1131
1132 /* Remove REG from its equivalence class. */
1133
1134 static void
1135 delete_reg_equiv (reg)
1136 unsigned int reg;
1137 {
1138 register struct qty_table_elem *ent;
1139 register int q = REG_QTY (reg);
1140 register int p, n;
1141
1142 /* If invalid, do nothing. */
1143 if (q == (int) reg)
1144 return;
1145
1146 ent = &qty_table[q];
1147
1148 p = reg_eqv_table[reg].prev;
1149 n = reg_eqv_table[reg].next;
1150
1151 if (n != -1)
1152 reg_eqv_table[n].prev = p;
1153 else
1154 ent->last_reg = p;
1155 if (p != -1)
1156 reg_eqv_table[p].next = n;
1157 else
1158 ent->first_reg = n;
1159
1160 REG_QTY (reg) = reg;
1161 }
1162
1163 /* Remove any invalid expressions from the hash table
1164 that refer to any of the registers contained in expression X.
1165
1166 Make sure that newly inserted references to those registers
1167 as subexpressions will be considered valid.
1168
1169 mention_regs is not called when a register itself
1170 is being stored in the table.
1171
1172 Return 1 if we have done something that may have changed the hash code
1173 of X. */
1174
1175 static int
1176 mention_regs (x)
1177 rtx x;
1178 {
1179 register enum rtx_code code;
1180 register int i, j;
1181 register const char *fmt;
1182 register int changed = 0;
1183
1184 if (x == 0)
1185 return 0;
1186
1187 code = GET_CODE (x);
1188 if (code == REG)
1189 {
1190 unsigned int regno = REGNO (x);
1191 unsigned int endregno
1192 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1193 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1194 unsigned int i;
1195
1196 for (i = regno; i < endregno; i++)
1197 {
1198 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1199 remove_invalid_refs (i);
1200
1201 REG_IN_TABLE (i) = REG_TICK (i);
1202 }
1203
1204 return 0;
1205 }
1206
1207 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1208 pseudo if they don't use overlapping words. We handle only pseudos
1209 here for simplicity. */
1210 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1211 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1212 {
1213 unsigned int i = REGNO (SUBREG_REG (x));
1214
1215 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1216 {
1217 /* If reg_tick has been incremented more than once since
1218 reg_in_table was last set, that means that the entire
1219 register has been set before, so discard anything memorized
1220 for the entrire register, including all SUBREG expressions. */
1221 if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
1222 remove_invalid_refs (i);
1223 else
1224 remove_invalid_subreg_refs (i, SUBREG_WORD (x), GET_MODE (x));
1225 }
1226
1227 REG_IN_TABLE (i) = REG_TICK (i);
1228 return 0;
1229 }
1230
1231 /* If X is a comparison or a COMPARE and either operand is a register
1232 that does not have a quantity, give it one. This is so that a later
1233 call to record_jump_equiv won't cause X to be assigned a different
1234 hash code and not found in the table after that call.
1235
1236 It is not necessary to do this here, since rehash_using_reg can
1237 fix up the table later, but doing this here eliminates the need to
1238 call that expensive function in the most common case where the only
1239 use of the register is in the comparison. */
1240
1241 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1242 {
1243 if (GET_CODE (XEXP (x, 0)) == REG
1244 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1245 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
1246 {
1247 rehash_using_reg (XEXP (x, 0));
1248 changed = 1;
1249 }
1250
1251 if (GET_CODE (XEXP (x, 1)) == REG
1252 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1253 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
1254 {
1255 rehash_using_reg (XEXP (x, 1));
1256 changed = 1;
1257 }
1258 }
1259
1260 fmt = GET_RTX_FORMAT (code);
1261 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1262 if (fmt[i] == 'e')
1263 changed |= mention_regs (XEXP (x, i));
1264 else if (fmt[i] == 'E')
1265 for (j = 0; j < XVECLEN (x, i); j++)
1266 changed |= mention_regs (XVECEXP (x, i, j));
1267
1268 return changed;
1269 }
1270
1271 /* Update the register quantities for inserting X into the hash table
1272 with a value equivalent to CLASSP.
1273 (If the class does not contain a REG, it is irrelevant.)
1274 If MODIFIED is nonzero, X is a destination; it is being modified.
1275 Note that delete_reg_equiv should be called on a register
1276 before insert_regs is done on that register with MODIFIED != 0.
1277
1278 Nonzero value means that elements of reg_qty have changed
1279 so X's hash code may be different. */
1280
1281 static int
1282 insert_regs (x, classp, modified)
1283 rtx x;
1284 struct table_elt *classp;
1285 int modified;
1286 {
1287 if (GET_CODE (x) == REG)
1288 {
1289 unsigned int regno = REGNO (x);
1290 int qty_valid;
1291
1292 /* If REGNO is in the equivalence table already but is of the
1293 wrong mode for that equivalence, don't do anything here. */
1294
1295 qty_valid = REGNO_QTY_VALID_P (regno);
1296 if (qty_valid)
1297 {
1298 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1299
1300 if (ent->mode != GET_MODE (x))
1301 return 0;
1302 }
1303
1304 if (modified || ! qty_valid)
1305 {
1306 if (classp)
1307 for (classp = classp->first_same_value;
1308 classp != 0;
1309 classp = classp->next_same_value)
1310 if (GET_CODE (classp->exp) == REG
1311 && GET_MODE (classp->exp) == GET_MODE (x))
1312 {
1313 make_regs_eqv (regno, REGNO (classp->exp));
1314 return 1;
1315 }
1316
1317 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1318 than REG_IN_TABLE to find out if there was only a single preceding
1319 invalidation - for the SUBREG - or another one, which would be
1320 for the full register. However, if we find here that REG_TICK
1321 indicates that the register is invalid, it means that it has
1322 been invalidated in a separate operation. The SUBREG might be used
1323 now (then this is a recursive call), or we might use the full REG
1324 now and a SUBREG of it later. So bump up REG_TICK so that
1325 mention_regs will do the right thing. */
1326 if (! modified
1327 && REG_IN_TABLE (regno) >= 0
1328 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1329 REG_TICK (regno)++;
1330 make_new_qty (regno, GET_MODE (x));
1331 return 1;
1332 }
1333
1334 return 0;
1335 }
1336
1337 /* If X is a SUBREG, we will likely be inserting the inner register in the
1338 table. If that register doesn't have an assigned quantity number at
1339 this point but does later, the insertion that we will be doing now will
1340 not be accessible because its hash code will have changed. So assign
1341 a quantity number now. */
1342
1343 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1344 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1345 {
1346 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1347 mention_regs (x);
1348 return 1;
1349 }
1350 else
1351 return mention_regs (x);
1352 }
1353 \f
1354 /* Look in or update the hash table. */
1355
1356 /* Remove table element ELT from use in the table.
1357 HASH is its hash code, made using the HASH macro.
1358 It's an argument because often that is known in advance
1359 and we save much time not recomputing it. */
1360
1361 static void
1362 remove_from_table (elt, hash)
1363 register struct table_elt *elt;
1364 unsigned hash;
1365 {
1366 if (elt == 0)
1367 return;
1368
1369 /* Mark this element as removed. See cse_insn. */
1370 elt->first_same_value = 0;
1371
1372 /* Remove the table element from its equivalence class. */
1373
1374 {
1375 register struct table_elt *prev = elt->prev_same_value;
1376 register struct table_elt *next = elt->next_same_value;
1377
1378 if (next)
1379 next->prev_same_value = prev;
1380
1381 if (prev)
1382 prev->next_same_value = next;
1383 else
1384 {
1385 register struct table_elt *newfirst = next;
1386 while (next)
1387 {
1388 next->first_same_value = newfirst;
1389 next = next->next_same_value;
1390 }
1391 }
1392 }
1393
1394 /* Remove the table element from its hash bucket. */
1395
1396 {
1397 register struct table_elt *prev = elt->prev_same_hash;
1398 register struct table_elt *next = elt->next_same_hash;
1399
1400 if (next)
1401 next->prev_same_hash = prev;
1402
1403 if (prev)
1404 prev->next_same_hash = next;
1405 else if (table[hash] == elt)
1406 table[hash] = next;
1407 else
1408 {
1409 /* This entry is not in the proper hash bucket. This can happen
1410 when two classes were merged by `merge_equiv_classes'. Search
1411 for the hash bucket that it heads. This happens only very
1412 rarely, so the cost is acceptable. */
1413 for (hash = 0; hash < HASH_SIZE; hash++)
1414 if (table[hash] == elt)
1415 table[hash] = next;
1416 }
1417 }
1418
1419 /* Remove the table element from its related-value circular chain. */
1420
1421 if (elt->related_value != 0 && elt->related_value != elt)
1422 {
1423 register struct table_elt *p = elt->related_value;
1424
1425 while (p->related_value != elt)
1426 p = p->related_value;
1427 p->related_value = elt->related_value;
1428 if (p->related_value == p)
1429 p->related_value = 0;
1430 }
1431
1432 /* Now add it to the free element chain. */
1433 elt->next_same_hash = free_element_chain;
1434 free_element_chain = elt;
1435 }
1436
1437 /* Look up X in the hash table and return its table element,
1438 or 0 if X is not in the table.
1439
1440 MODE is the machine-mode of X, or if X is an integer constant
1441 with VOIDmode then MODE is the mode with which X will be used.
1442
1443 Here we are satisfied to find an expression whose tree structure
1444 looks like X. */
1445
1446 static struct table_elt *
1447 lookup (x, hash, mode)
1448 rtx x;
1449 unsigned hash;
1450 enum machine_mode mode;
1451 {
1452 register struct table_elt *p;
1453
1454 for (p = table[hash]; p; p = p->next_same_hash)
1455 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1456 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1457 return p;
1458
1459 return 0;
1460 }
1461
1462 /* Like `lookup' but don't care whether the table element uses invalid regs.
1463 Also ignore discrepancies in the machine mode of a register. */
1464
1465 static struct table_elt *
1466 lookup_for_remove (x, hash, mode)
1467 rtx x;
1468 unsigned hash;
1469 enum machine_mode mode;
1470 {
1471 register struct table_elt *p;
1472
1473 if (GET_CODE (x) == REG)
1474 {
1475 unsigned int regno = REGNO (x);
1476
1477 /* Don't check the machine mode when comparing registers;
1478 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1479 for (p = table[hash]; p; p = p->next_same_hash)
1480 if (GET_CODE (p->exp) == REG
1481 && REGNO (p->exp) == regno)
1482 return p;
1483 }
1484 else
1485 {
1486 for (p = table[hash]; p; p = p->next_same_hash)
1487 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1488 return p;
1489 }
1490
1491 return 0;
1492 }
1493
1494 /* Look for an expression equivalent to X and with code CODE.
1495 If one is found, return that expression. */
1496
1497 static rtx
1498 lookup_as_function (x, code)
1499 rtx x;
1500 enum rtx_code code;
1501 {
1502 register struct table_elt *p
1503 = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1504
1505 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1506 long as we are narrowing. So if we looked in vain for a mode narrower
1507 than word_mode before, look for word_mode now. */
1508 if (p == 0 && code == CONST_INT
1509 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1510 {
1511 x = copy_rtx (x);
1512 PUT_MODE (x, word_mode);
1513 p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1514 }
1515
1516 if (p == 0)
1517 return 0;
1518
1519 for (p = p->first_same_value; p; p = p->next_same_value)
1520 if (GET_CODE (p->exp) == code
1521 /* Make sure this is a valid entry in the table. */
1522 && exp_equiv_p (p->exp, p->exp, 1, 0))
1523 return p->exp;
1524
1525 return 0;
1526 }
1527
1528 /* Insert X in the hash table, assuming HASH is its hash code
1529 and CLASSP is an element of the class it should go in
1530 (or 0 if a new class should be made).
1531 It is inserted at the proper position to keep the class in
1532 the order cheapest first.
1533
1534 MODE is the machine-mode of X, or if X is an integer constant
1535 with VOIDmode then MODE is the mode with which X will be used.
1536
1537 For elements of equal cheapness, the most recent one
1538 goes in front, except that the first element in the list
1539 remains first unless a cheaper element is added. The order of
1540 pseudo-registers does not matter, as canon_reg will be called to
1541 find the cheapest when a register is retrieved from the table.
1542
1543 The in_memory field in the hash table element is set to 0.
1544 The caller must set it nonzero if appropriate.
1545
1546 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1547 and if insert_regs returns a nonzero value
1548 you must then recompute its hash code before calling here.
1549
1550 If necessary, update table showing constant values of quantities. */
1551
1552 #define CHEAPER(X, Y) \
1553 (preferrable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1554
1555 static struct table_elt *
1556 insert (x, classp, hash, mode)
1557 register rtx x;
1558 register struct table_elt *classp;
1559 unsigned hash;
1560 enum machine_mode mode;
1561 {
1562 register struct table_elt *elt;
1563
1564 /* If X is a register and we haven't made a quantity for it,
1565 something is wrong. */
1566 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1567 abort ();
1568
1569 /* If X is a hard register, show it is being put in the table. */
1570 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1571 {
1572 unsigned int regno = REGNO (x);
1573 unsigned int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1574 unsigned int i;
1575
1576 for (i = regno; i < endregno; i++)
1577 SET_HARD_REG_BIT (hard_regs_in_table, i);
1578 }
1579
1580 /* If X is a label, show we recorded it. */
1581 if (GET_CODE (x) == LABEL_REF
1582 || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1583 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
1584 recorded_label_ref = 1;
1585
1586 /* Put an element for X into the right hash bucket. */
1587
1588 elt = free_element_chain;
1589 if (elt)
1590 free_element_chain = elt->next_same_hash;
1591 else
1592 {
1593 n_elements_made++;
1594 elt = (struct table_elt *) xmalloc (sizeof (struct table_elt));
1595 }
1596
1597 elt->exp = x;
1598 elt->canon_exp = NULL_RTX;
1599 elt->cost = COST (x);
1600 elt->regcost = approx_reg_cost (x);
1601 elt->next_same_value = 0;
1602 elt->prev_same_value = 0;
1603 elt->next_same_hash = table[hash];
1604 elt->prev_same_hash = 0;
1605 elt->related_value = 0;
1606 elt->in_memory = 0;
1607 elt->mode = mode;
1608 elt->is_const = (CONSTANT_P (x)
1609 /* GNU C++ takes advantage of this for `this'
1610 (and other const values). */
1611 || (RTX_UNCHANGING_P (x)
1612 && GET_CODE (x) == REG
1613 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1614 || FIXED_BASE_PLUS_P (x));
1615
1616 if (table[hash])
1617 table[hash]->prev_same_hash = elt;
1618 table[hash] = elt;
1619
1620 /* Put it into the proper value-class. */
1621 if (classp)
1622 {
1623 classp = classp->first_same_value;
1624 if (CHEAPER (elt, classp))
1625 /* Insert at the head of the class */
1626 {
1627 register struct table_elt *p;
1628 elt->next_same_value = classp;
1629 classp->prev_same_value = elt;
1630 elt->first_same_value = elt;
1631
1632 for (p = classp; p; p = p->next_same_value)
1633 p->first_same_value = elt;
1634 }
1635 else
1636 {
1637 /* Insert not at head of the class. */
1638 /* Put it after the last element cheaper than X. */
1639 register struct table_elt *p, *next;
1640
1641 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1642 p = next);
1643
1644 /* Put it after P and before NEXT. */
1645 elt->next_same_value = next;
1646 if (next)
1647 next->prev_same_value = elt;
1648
1649 elt->prev_same_value = p;
1650 p->next_same_value = elt;
1651 elt->first_same_value = classp;
1652 }
1653 }
1654 else
1655 elt->first_same_value = elt;
1656
1657 /* If this is a constant being set equivalent to a register or a register
1658 being set equivalent to a constant, note the constant equivalence.
1659
1660 If this is a constant, it cannot be equivalent to a different constant,
1661 and a constant is the only thing that can be cheaper than a register. So
1662 we know the register is the head of the class (before the constant was
1663 inserted).
1664
1665 If this is a register that is not already known equivalent to a
1666 constant, we must check the entire class.
1667
1668 If this is a register that is already known equivalent to an insn,
1669 update the qtys `const_insn' to show that `this_insn' is the latest
1670 insn making that quantity equivalent to the constant. */
1671
1672 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1673 && GET_CODE (x) != REG)
1674 {
1675 int exp_q = REG_QTY (REGNO (classp->exp));
1676 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1677
1678 exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1679 exp_ent->const_insn = this_insn;
1680 }
1681
1682 else if (GET_CODE (x) == REG
1683 && classp
1684 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1685 && ! elt->is_const)
1686 {
1687 register struct table_elt *p;
1688
1689 for (p = classp; p != 0; p = p->next_same_value)
1690 {
1691 if (p->is_const && GET_CODE (p->exp) != REG)
1692 {
1693 int x_q = REG_QTY (REGNO (x));
1694 struct qty_table_elem *x_ent = &qty_table[x_q];
1695
1696 x_ent->const_rtx
1697 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1698 x_ent->const_insn = this_insn;
1699 break;
1700 }
1701 }
1702 }
1703
1704 else if (GET_CODE (x) == REG
1705 && qty_table[REG_QTY (REGNO (x))].const_rtx
1706 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1707 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1708
1709 /* If this is a constant with symbolic value,
1710 and it has a term with an explicit integer value,
1711 link it up with related expressions. */
1712 if (GET_CODE (x) == CONST)
1713 {
1714 rtx subexp = get_related_value (x);
1715 unsigned subhash;
1716 struct table_elt *subelt, *subelt_prev;
1717
1718 if (subexp != 0)
1719 {
1720 /* Get the integer-free subexpression in the hash table. */
1721 subhash = safe_hash (subexp, mode) & HASH_MASK;
1722 subelt = lookup (subexp, subhash, mode);
1723 if (subelt == 0)
1724 subelt = insert (subexp, NULL_PTR, subhash, mode);
1725 /* Initialize SUBELT's circular chain if it has none. */
1726 if (subelt->related_value == 0)
1727 subelt->related_value = subelt;
1728 /* Find the element in the circular chain that precedes SUBELT. */
1729 subelt_prev = subelt;
1730 while (subelt_prev->related_value != subelt)
1731 subelt_prev = subelt_prev->related_value;
1732 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1733 This way the element that follows SUBELT is the oldest one. */
1734 elt->related_value = subelt_prev->related_value;
1735 subelt_prev->related_value = elt;
1736 }
1737 }
1738
1739 return elt;
1740 }
1741 \f
1742 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1743 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1744 the two classes equivalent.
1745
1746 CLASS1 will be the surviving class; CLASS2 should not be used after this
1747 call.
1748
1749 Any invalid entries in CLASS2 will not be copied. */
1750
1751 static void
1752 merge_equiv_classes (class1, class2)
1753 struct table_elt *class1, *class2;
1754 {
1755 struct table_elt *elt, *next, *new;
1756
1757 /* Ensure we start with the head of the classes. */
1758 class1 = class1->first_same_value;
1759 class2 = class2->first_same_value;
1760
1761 /* If they were already equal, forget it. */
1762 if (class1 == class2)
1763 return;
1764
1765 for (elt = class2; elt; elt = next)
1766 {
1767 unsigned int hash;
1768 rtx exp = elt->exp;
1769 enum machine_mode mode = elt->mode;
1770
1771 next = elt->next_same_value;
1772
1773 /* Remove old entry, make a new one in CLASS1's class.
1774 Don't do this for invalid entries as we cannot find their
1775 hash code (it also isn't necessary). */
1776 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1777 {
1778 hash_arg_in_memory = 0;
1779 hash = HASH (exp, mode);
1780
1781 if (GET_CODE (exp) == REG)
1782 delete_reg_equiv (REGNO (exp));
1783
1784 remove_from_table (elt, hash);
1785
1786 if (insert_regs (exp, class1, 0))
1787 {
1788 rehash_using_reg (exp);
1789 hash = HASH (exp, mode);
1790 }
1791 new = insert (exp, class1, hash, mode);
1792 new->in_memory = hash_arg_in_memory;
1793 }
1794 }
1795 }
1796 \f
1797 /* Flush the entire hash table. */
1798
1799 static void
1800 flush_hash_table ()
1801 {
1802 int i;
1803 struct table_elt *p;
1804
1805 for (i = 0; i < HASH_SIZE; i++)
1806 for (p = table[i]; p; p = table[i])
1807 {
1808 /* Note that invalidate can remove elements
1809 after P in the current hash chain. */
1810 if (GET_CODE (p->exp) == REG)
1811 invalidate (p->exp, p->mode);
1812 else
1813 remove_from_table (p, i);
1814 }
1815 }
1816 \f
1817 /* Function called for each rtx to check whether true dependence exist. */
1818 struct check_dependence_data
1819 {
1820 enum machine_mode mode;
1821 rtx exp;
1822 };
1823 static int
1824 check_dependence (x, data)
1825 rtx *x;
1826 void *data;
1827 {
1828 struct check_dependence_data *d = (struct check_dependence_data *) data;
1829 if (*x && GET_CODE (*x) == MEM)
1830 return true_dependence (d->exp, d->mode, *x, cse_rtx_varies_p);
1831 else
1832 return 0;
1833 }
1834 \f
1835 /* Remove from the hash table, or mark as invalid, all expressions whose
1836 values could be altered by storing in X. X is a register, a subreg, or
1837 a memory reference with nonvarying address (because, when a memory
1838 reference with a varying address is stored in, all memory references are
1839 removed by invalidate_memory so specific invalidation is superfluous).
1840 FULL_MODE, if not VOIDmode, indicates that this much should be
1841 invalidated instead of just the amount indicated by the mode of X. This
1842 is only used for bitfield stores into memory.
1843
1844 A nonvarying address may be just a register or just a symbol reference,
1845 or it may be either of those plus a numeric offset. */
1846
1847 static void
1848 invalidate (x, full_mode)
1849 rtx x;
1850 enum machine_mode full_mode;
1851 {
1852 register int i;
1853 register struct table_elt *p;
1854
1855 switch (GET_CODE (x))
1856 {
1857 case REG:
1858 {
1859 /* If X is a register, dependencies on its contents are recorded
1860 through the qty number mechanism. Just change the qty number of
1861 the register, mark it as invalid for expressions that refer to it,
1862 and remove it itself. */
1863 unsigned int regno = REGNO (x);
1864 unsigned int hash = HASH (x, GET_MODE (x));
1865
1866 /* Remove REGNO from any quantity list it might be on and indicate
1867 that its value might have changed. If it is a pseudo, remove its
1868 entry from the hash table.
1869
1870 For a hard register, we do the first two actions above for any
1871 additional hard registers corresponding to X. Then, if any of these
1872 registers are in the table, we must remove any REG entries that
1873 overlap these registers. */
1874
1875 delete_reg_equiv (regno);
1876 REG_TICK (regno)++;
1877
1878 if (regno >= FIRST_PSEUDO_REGISTER)
1879 {
1880 /* Because a register can be referenced in more than one mode,
1881 we might have to remove more than one table entry. */
1882 struct table_elt *elt;
1883
1884 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1885 remove_from_table (elt, hash);
1886 }
1887 else
1888 {
1889 HOST_WIDE_INT in_table
1890 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1891 unsigned int endregno
1892 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1893 unsigned int tregno, tendregno, rn;
1894 register struct table_elt *p, *next;
1895
1896 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1897
1898 for (rn = regno + 1; rn < endregno; rn++)
1899 {
1900 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1901 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1902 delete_reg_equiv (rn);
1903 REG_TICK (rn)++;
1904 }
1905
1906 if (in_table)
1907 for (hash = 0; hash < HASH_SIZE; hash++)
1908 for (p = table[hash]; p; p = next)
1909 {
1910 next = p->next_same_hash;
1911
1912 if (GET_CODE (p->exp) != REG
1913 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1914 continue;
1915
1916 tregno = REGNO (p->exp);
1917 tendregno
1918 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1919 if (tendregno > regno && tregno < endregno)
1920 remove_from_table (p, hash);
1921 }
1922 }
1923 }
1924 return;
1925
1926 case SUBREG:
1927 invalidate (SUBREG_REG (x), VOIDmode);
1928 return;
1929
1930 case PARALLEL:
1931 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1932 invalidate (XVECEXP (x, 0, i), VOIDmode);
1933 return;
1934
1935 case EXPR_LIST:
1936 /* This is part of a disjoint return value; extract the location in
1937 question ignoring the offset. */
1938 invalidate (XEXP (x, 0), VOIDmode);
1939 return;
1940
1941 case MEM:
1942 /* Calculate the canonical version of X here so that
1943 true_dependence doesn't generate new RTL for X on each call. */
1944 x = canon_rtx (x);
1945
1946 /* Remove all hash table elements that refer to overlapping pieces of
1947 memory. */
1948 if (full_mode == VOIDmode)
1949 full_mode = GET_MODE (x);
1950
1951 for (i = 0; i < HASH_SIZE; i++)
1952 {
1953 register struct table_elt *next;
1954
1955 for (p = table[i]; p; p = next)
1956 {
1957 next = p->next_same_hash;
1958 if (p->in_memory)
1959 {
1960 struct check_dependence_data d;
1961
1962 /* Just canonicalize the expression once;
1963 otherwise each time we call invalidate
1964 true_dependence will canonicalize the
1965 expression again. */
1966 if (!p->canon_exp)
1967 p->canon_exp = canon_rtx (p->exp);
1968 d.exp = x;
1969 d.mode = full_mode;
1970 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1971 remove_from_table (p, i);
1972 }
1973 }
1974 }
1975 return;
1976
1977 default:
1978 abort ();
1979 }
1980 }
1981 \f
1982 /* Remove all expressions that refer to register REGNO,
1983 since they are already invalid, and we are about to
1984 mark that register valid again and don't want the old
1985 expressions to reappear as valid. */
1986
1987 static void
1988 remove_invalid_refs (regno)
1989 unsigned int regno;
1990 {
1991 unsigned int i;
1992 struct table_elt *p, *next;
1993
1994 for (i = 0; i < HASH_SIZE; i++)
1995 for (p = table[i]; p; p = next)
1996 {
1997 next = p->next_same_hash;
1998 if (GET_CODE (p->exp) != REG
1999 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
2000 remove_from_table (p, i);
2001 }
2002 }
2003
2004 /* Likewise for a subreg with subreg_reg WORD and mode MODE. */
2005 static void
2006 remove_invalid_subreg_refs (regno, word, mode)
2007 unsigned int regno;
2008 unsigned int word;
2009 enum machine_mode mode;
2010 {
2011 unsigned int i;
2012 struct table_elt *p, *next;
2013 unsigned int end = word + (GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD;
2014
2015 for (i = 0; i < HASH_SIZE; i++)
2016 for (p = table[i]; p; p = next)
2017 {
2018 rtx exp;
2019 next = p->next_same_hash;
2020
2021 exp = p->exp;
2022 if (GET_CODE (p->exp) != REG
2023 && (GET_CODE (exp) != SUBREG
2024 || GET_CODE (SUBREG_REG (exp)) != REG
2025 || REGNO (SUBREG_REG (exp)) != regno
2026 || (((SUBREG_WORD (exp)
2027 + (GET_MODE_SIZE (GET_MODE (exp)) - 1) / UNITS_PER_WORD)
2028 >= word)
2029 && SUBREG_WORD (exp) <= end))
2030 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
2031 remove_from_table (p, i);
2032 }
2033 }
2034 \f
2035 /* Recompute the hash codes of any valid entries in the hash table that
2036 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2037
2038 This is called when we make a jump equivalence. */
2039
2040 static void
2041 rehash_using_reg (x)
2042 rtx x;
2043 {
2044 unsigned int i;
2045 struct table_elt *p, *next;
2046 unsigned hash;
2047
2048 if (GET_CODE (x) == SUBREG)
2049 x = SUBREG_REG (x);
2050
2051 /* If X is not a register or if the register is known not to be in any
2052 valid entries in the table, we have no work to do. */
2053
2054 if (GET_CODE (x) != REG
2055 || REG_IN_TABLE (REGNO (x)) < 0
2056 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2057 return;
2058
2059 /* Scan all hash chains looking for valid entries that mention X.
2060 If we find one and it is in the wrong hash chain, move it. We can skip
2061 objects that are registers, since they are handled specially. */
2062
2063 for (i = 0; i < HASH_SIZE; i++)
2064 for (p = table[i]; p; p = next)
2065 {
2066 next = p->next_same_hash;
2067 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
2068 && exp_equiv_p (p->exp, p->exp, 1, 0)
2069 && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
2070 {
2071 if (p->next_same_hash)
2072 p->next_same_hash->prev_same_hash = p->prev_same_hash;
2073
2074 if (p->prev_same_hash)
2075 p->prev_same_hash->next_same_hash = p->next_same_hash;
2076 else
2077 table[i] = p->next_same_hash;
2078
2079 p->next_same_hash = table[hash];
2080 p->prev_same_hash = 0;
2081 if (table[hash])
2082 table[hash]->prev_same_hash = p;
2083 table[hash] = p;
2084 }
2085 }
2086 }
2087 \f
2088 /* Remove from the hash table any expression that is a call-clobbered
2089 register. Also update their TICK values. */
2090
2091 static void
2092 invalidate_for_call ()
2093 {
2094 unsigned int regno, endregno;
2095 unsigned int i;
2096 unsigned hash;
2097 struct table_elt *p, *next;
2098 int in_table = 0;
2099
2100 /* Go through all the hard registers. For each that is clobbered in
2101 a CALL_INSN, remove the register from quantity chains and update
2102 reg_tick if defined. Also see if any of these registers is currently
2103 in the table. */
2104
2105 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2106 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2107 {
2108 delete_reg_equiv (regno);
2109 if (REG_TICK (regno) >= 0)
2110 REG_TICK (regno)++;
2111
2112 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2113 }
2114
2115 /* In the case where we have no call-clobbered hard registers in the
2116 table, we are done. Otherwise, scan the table and remove any
2117 entry that overlaps a call-clobbered register. */
2118
2119 if (in_table)
2120 for (hash = 0; hash < HASH_SIZE; hash++)
2121 for (p = table[hash]; p; p = next)
2122 {
2123 next = p->next_same_hash;
2124
2125 if (GET_CODE (p->exp) != REG
2126 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2127 continue;
2128
2129 regno = REGNO (p->exp);
2130 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
2131
2132 for (i = regno; i < endregno; i++)
2133 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2134 {
2135 remove_from_table (p, hash);
2136 break;
2137 }
2138 }
2139 }
2140 \f
2141 /* Given an expression X of type CONST,
2142 and ELT which is its table entry (or 0 if it
2143 is not in the hash table),
2144 return an alternate expression for X as a register plus integer.
2145 If none can be found, return 0. */
2146
2147 static rtx
2148 use_related_value (x, elt)
2149 rtx x;
2150 struct table_elt *elt;
2151 {
2152 register struct table_elt *relt = 0;
2153 register struct table_elt *p, *q;
2154 HOST_WIDE_INT offset;
2155
2156 /* First, is there anything related known?
2157 If we have a table element, we can tell from that.
2158 Otherwise, must look it up. */
2159
2160 if (elt != 0 && elt->related_value != 0)
2161 relt = elt;
2162 else if (elt == 0 && GET_CODE (x) == CONST)
2163 {
2164 rtx subexp = get_related_value (x);
2165 if (subexp != 0)
2166 relt = lookup (subexp,
2167 safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2168 GET_MODE (subexp));
2169 }
2170
2171 if (relt == 0)
2172 return 0;
2173
2174 /* Search all related table entries for one that has an
2175 equivalent register. */
2176
2177 p = relt;
2178 while (1)
2179 {
2180 /* This loop is strange in that it is executed in two different cases.
2181 The first is when X is already in the table. Then it is searching
2182 the RELATED_VALUE list of X's class (RELT). The second case is when
2183 X is not in the table. Then RELT points to a class for the related
2184 value.
2185
2186 Ensure that, whatever case we are in, that we ignore classes that have
2187 the same value as X. */
2188
2189 if (rtx_equal_p (x, p->exp))
2190 q = 0;
2191 else
2192 for (q = p->first_same_value; q; q = q->next_same_value)
2193 if (GET_CODE (q->exp) == REG)
2194 break;
2195
2196 if (q)
2197 break;
2198
2199 p = p->related_value;
2200
2201 /* We went all the way around, so there is nothing to be found.
2202 Alternatively, perhaps RELT was in the table for some other reason
2203 and it has no related values recorded. */
2204 if (p == relt || p == 0)
2205 break;
2206 }
2207
2208 if (q == 0)
2209 return 0;
2210
2211 offset = (get_integer_term (x) - get_integer_term (p->exp));
2212 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2213 return plus_constant (q->exp, offset);
2214 }
2215 \f
2216 /* Hash a string. Just add its bytes up. */
2217 static inline unsigned
2218 canon_hash_string (ps)
2219 const char *ps;
2220 {
2221 unsigned hash = 0;
2222 const unsigned char *p = (const unsigned char *)ps;
2223
2224 if (p)
2225 while (*p)
2226 hash += *p++;
2227
2228 return hash;
2229 }
2230
2231 /* Hash an rtx. We are careful to make sure the value is never negative.
2232 Equivalent registers hash identically.
2233 MODE is used in hashing for CONST_INTs only;
2234 otherwise the mode of X is used.
2235
2236 Store 1 in do_not_record if any subexpression is volatile.
2237
2238 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2239 which does not have the RTX_UNCHANGING_P bit set.
2240
2241 Note that cse_insn knows that the hash code of a MEM expression
2242 is just (int) MEM plus the hash code of the address. */
2243
2244 static unsigned
2245 canon_hash (x, mode)
2246 rtx x;
2247 enum machine_mode mode;
2248 {
2249 register int i, j;
2250 register unsigned hash = 0;
2251 register enum rtx_code code;
2252 register const char *fmt;
2253
2254 /* repeat is used to turn tail-recursion into iteration. */
2255 repeat:
2256 if (x == 0)
2257 return hash;
2258
2259 code = GET_CODE (x);
2260 switch (code)
2261 {
2262 case REG:
2263 {
2264 unsigned int regno = REGNO (x);
2265
2266 /* On some machines, we can't record any non-fixed hard register,
2267 because extending its life will cause reload problems. We
2268 consider ap, fp, and sp to be fixed for this purpose.
2269
2270 We also consider CCmode registers to be fixed for this purpose;
2271 failure to do so leads to failure to simplify 0<100 type of
2272 conditionals.
2273
2274 On all machines, we can't record any global registers. */
2275
2276 if (regno < FIRST_PSEUDO_REGISTER
2277 && (global_regs[regno]
2278 || (SMALL_REGISTER_CLASSES
2279 && ! fixed_regs[regno]
2280 && regno != FRAME_POINTER_REGNUM
2281 && regno != HARD_FRAME_POINTER_REGNUM
2282 && regno != ARG_POINTER_REGNUM
2283 && regno != STACK_POINTER_REGNUM
2284 && GET_MODE_CLASS (GET_MODE (x)) != MODE_CC)))
2285 {
2286 do_not_record = 1;
2287 return 0;
2288 }
2289
2290 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2291 return hash;
2292 }
2293
2294 /* We handle SUBREG of a REG specially because the underlying
2295 reg changes its hash value with every value change; we don't
2296 want to have to forget unrelated subregs when one subreg changes. */
2297 case SUBREG:
2298 {
2299 if (GET_CODE (SUBREG_REG (x)) == REG)
2300 {
2301 hash += (((unsigned) SUBREG << 7)
2302 + REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2303 return hash;
2304 }
2305 break;
2306 }
2307
2308 case CONST_INT:
2309 {
2310 unsigned HOST_WIDE_INT tem = INTVAL (x);
2311 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2312 return hash;
2313 }
2314
2315 case CONST_DOUBLE:
2316 /* This is like the general case, except that it only counts
2317 the integers representing the constant. */
2318 hash += (unsigned) code + (unsigned) GET_MODE (x);
2319 if (GET_MODE (x) != VOIDmode)
2320 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2321 {
2322 unsigned HOST_WIDE_INT tem = XWINT (x, i);
2323 hash += tem;
2324 }
2325 else
2326 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2327 + (unsigned) CONST_DOUBLE_HIGH (x));
2328 return hash;
2329
2330 /* Assume there is only one rtx object for any given label. */
2331 case LABEL_REF:
2332 hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2333 return hash;
2334
2335 case SYMBOL_REF:
2336 hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2337 return hash;
2338
2339 case MEM:
2340 /* We don't record if marked volatile or if BLKmode since we don't
2341 know the size of the move. */
2342 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2343 {
2344 do_not_record = 1;
2345 return 0;
2346 }
2347 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2348 {
2349 hash_arg_in_memory = 1;
2350 }
2351 /* Now that we have already found this special case,
2352 might as well speed it up as much as possible. */
2353 hash += (unsigned) MEM;
2354 x = XEXP (x, 0);
2355 goto repeat;
2356
2357 case USE:
2358 /* A USE that mentions non-volatile memory needs special
2359 handling since the MEM may be BLKmode which normally
2360 prevents an entry from being made. Pure calls are
2361 marked by a USE which mentions BLKmode memory. */
2362 if (GET_CODE (XEXP (x, 0)) == MEM
2363 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2364 {
2365 hash += (unsigned)USE;
2366 x = XEXP (x, 0);
2367
2368 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2369 hash_arg_in_memory = 1;
2370
2371 /* Now that we have already found this special case,
2372 might as well speed it up as much as possible. */
2373 hash += (unsigned) MEM;
2374 x = XEXP (x, 0);
2375 goto repeat;
2376 }
2377 break;
2378
2379 case PRE_DEC:
2380 case PRE_INC:
2381 case POST_DEC:
2382 case POST_INC:
2383 case PRE_MODIFY:
2384 case POST_MODIFY:
2385 case PC:
2386 case CC0:
2387 case CALL:
2388 case UNSPEC_VOLATILE:
2389 do_not_record = 1;
2390 return 0;
2391
2392 case ASM_OPERANDS:
2393 if (MEM_VOLATILE_P (x))
2394 {
2395 do_not_record = 1;
2396 return 0;
2397 }
2398 else
2399 {
2400 /* We don't want to take the filename and line into account. */
2401 hash += (unsigned) code + (unsigned) GET_MODE (x)
2402 + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2403 + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2404 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2405
2406 if (ASM_OPERANDS_INPUT_LENGTH (x))
2407 {
2408 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2409 {
2410 hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2411 GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2412 + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2413 (x, i)));
2414 }
2415
2416 hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2417 x = ASM_OPERANDS_INPUT (x, 0);
2418 mode = GET_MODE (x);
2419 goto repeat;
2420 }
2421
2422 return hash;
2423 }
2424 break;
2425
2426 default:
2427 break;
2428 }
2429
2430 i = GET_RTX_LENGTH (code) - 1;
2431 hash += (unsigned) code + (unsigned) GET_MODE (x);
2432 fmt = GET_RTX_FORMAT (code);
2433 for (; i >= 0; i--)
2434 {
2435 if (fmt[i] == 'e')
2436 {
2437 rtx tem = XEXP (x, i);
2438
2439 /* If we are about to do the last recursive call
2440 needed at this level, change it into iteration.
2441 This function is called enough to be worth it. */
2442 if (i == 0)
2443 {
2444 x = tem;
2445 goto repeat;
2446 }
2447 hash += canon_hash (tem, 0);
2448 }
2449 else if (fmt[i] == 'E')
2450 for (j = 0; j < XVECLEN (x, i); j++)
2451 hash += canon_hash (XVECEXP (x, i, j), 0);
2452 else if (fmt[i] == 's')
2453 hash += canon_hash_string (XSTR (x, i));
2454 else if (fmt[i] == 'i')
2455 {
2456 register unsigned tem = XINT (x, i);
2457 hash += tem;
2458 }
2459 else if (fmt[i] == '0' || fmt[i] == 't')
2460 /* Unused. */
2461 ;
2462 else
2463 abort ();
2464 }
2465 return hash;
2466 }
2467
2468 /* Like canon_hash but with no side effects. */
2469
2470 static unsigned
2471 safe_hash (x, mode)
2472 rtx x;
2473 enum machine_mode mode;
2474 {
2475 int save_do_not_record = do_not_record;
2476 int save_hash_arg_in_memory = hash_arg_in_memory;
2477 unsigned hash = canon_hash (x, mode);
2478 hash_arg_in_memory = save_hash_arg_in_memory;
2479 do_not_record = save_do_not_record;
2480 return hash;
2481 }
2482 \f
2483 /* Return 1 iff X and Y would canonicalize into the same thing,
2484 without actually constructing the canonicalization of either one.
2485 If VALIDATE is nonzero,
2486 we assume X is an expression being processed from the rtl
2487 and Y was found in the hash table. We check register refs
2488 in Y for being marked as valid.
2489
2490 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2491 that is known to be in the register. Ordinarily, we don't allow them
2492 to match, because letting them match would cause unpredictable results
2493 in all the places that search a hash table chain for an equivalent
2494 for a given value. A possible equivalent that has different structure
2495 has its hash code computed from different data. Whether the hash code
2496 is the same as that of the given value is pure luck. */
2497
2498 static int
2499 exp_equiv_p (x, y, validate, equal_values)
2500 rtx x, y;
2501 int validate;
2502 int equal_values;
2503 {
2504 register int i, j;
2505 register enum rtx_code code;
2506 register const char *fmt;
2507
2508 /* Note: it is incorrect to assume an expression is equivalent to itself
2509 if VALIDATE is nonzero. */
2510 if (x == y && !validate)
2511 return 1;
2512 if (x == 0 || y == 0)
2513 return x == y;
2514
2515 code = GET_CODE (x);
2516 if (code != GET_CODE (y))
2517 {
2518 if (!equal_values)
2519 return 0;
2520
2521 /* If X is a constant and Y is a register or vice versa, they may be
2522 equivalent. We only have to validate if Y is a register. */
2523 if (CONSTANT_P (x) && GET_CODE (y) == REG
2524 && REGNO_QTY_VALID_P (REGNO (y)))
2525 {
2526 int y_q = REG_QTY (REGNO (y));
2527 struct qty_table_elem *y_ent = &qty_table[y_q];
2528
2529 if (GET_MODE (y) == y_ent->mode
2530 && rtx_equal_p (x, y_ent->const_rtx)
2531 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2532 return 1;
2533 }
2534
2535 if (CONSTANT_P (y) && code == REG
2536 && REGNO_QTY_VALID_P (REGNO (x)))
2537 {
2538 int x_q = REG_QTY (REGNO (x));
2539 struct qty_table_elem *x_ent = &qty_table[x_q];
2540
2541 if (GET_MODE (x) == x_ent->mode
2542 && rtx_equal_p (y, x_ent->const_rtx))
2543 return 1;
2544 }
2545
2546 return 0;
2547 }
2548
2549 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2550 if (GET_MODE (x) != GET_MODE (y))
2551 return 0;
2552
2553 switch (code)
2554 {
2555 case PC:
2556 case CC0:
2557 case CONST_INT:
2558 return x == y;
2559
2560 case LABEL_REF:
2561 return XEXP (x, 0) == XEXP (y, 0);
2562
2563 case SYMBOL_REF:
2564 return XSTR (x, 0) == XSTR (y, 0);
2565
2566 case REG:
2567 {
2568 unsigned int regno = REGNO (y);
2569 unsigned int endregno
2570 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2571 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2572 unsigned int i;
2573
2574 /* If the quantities are not the same, the expressions are not
2575 equivalent. If there are and we are not to validate, they
2576 are equivalent. Otherwise, ensure all regs are up-to-date. */
2577
2578 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2579 return 0;
2580
2581 if (! validate)
2582 return 1;
2583
2584 for (i = regno; i < endregno; i++)
2585 if (REG_IN_TABLE (i) != REG_TICK (i))
2586 return 0;
2587
2588 return 1;
2589 }
2590
2591 /* For commutative operations, check both orders. */
2592 case PLUS:
2593 case MULT:
2594 case AND:
2595 case IOR:
2596 case XOR:
2597 case NE:
2598 case EQ:
2599 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2600 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2601 validate, equal_values))
2602 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2603 validate, equal_values)
2604 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2605 validate, equal_values)));
2606
2607 case ASM_OPERANDS:
2608 /* We don't use the generic code below because we want to
2609 disregard filename and line numbers. */
2610
2611 /* A volatile asm isn't equivalent to any other. */
2612 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2613 return 0;
2614
2615 if (GET_MODE (x) != GET_MODE (y)
2616 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2617 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2618 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2619 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2620 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2621 return 0;
2622
2623 if (ASM_OPERANDS_INPUT_LENGTH (x))
2624 {
2625 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2626 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2627 ASM_OPERANDS_INPUT (y, i),
2628 validate, equal_values)
2629 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2630 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2631 return 0;
2632 }
2633
2634 return 1;
2635
2636 default:
2637 break;
2638 }
2639
2640 /* Compare the elements. If any pair of corresponding elements
2641 fail to match, return 0 for the whole things. */
2642
2643 fmt = GET_RTX_FORMAT (code);
2644 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2645 {
2646 switch (fmt[i])
2647 {
2648 case 'e':
2649 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2650 return 0;
2651 break;
2652
2653 case 'E':
2654 if (XVECLEN (x, i) != XVECLEN (y, i))
2655 return 0;
2656 for (j = 0; j < XVECLEN (x, i); j++)
2657 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2658 validate, equal_values))
2659 return 0;
2660 break;
2661
2662 case 's':
2663 if (strcmp (XSTR (x, i), XSTR (y, i)))
2664 return 0;
2665 break;
2666
2667 case 'i':
2668 if (XINT (x, i) != XINT (y, i))
2669 return 0;
2670 break;
2671
2672 case 'w':
2673 if (XWINT (x, i) != XWINT (y, i))
2674 return 0;
2675 break;
2676
2677 case '0':
2678 case 't':
2679 break;
2680
2681 default:
2682 abort ();
2683 }
2684 }
2685
2686 return 1;
2687 }
2688 \f
2689 /* Return 1 if X has a value that can vary even between two
2690 executions of the program. 0 means X can be compared reliably
2691 against certain constants or near-constants. */
2692
2693 static int
2694 cse_rtx_varies_p (x, from_alias)
2695 register rtx x;
2696 int from_alias;
2697 {
2698 /* We need not check for X and the equivalence class being of the same
2699 mode because if X is equivalent to a constant in some mode, it
2700 doesn't vary in any mode. */
2701
2702 if (GET_CODE (x) == REG
2703 && REGNO_QTY_VALID_P (REGNO (x)))
2704 {
2705 int x_q = REG_QTY (REGNO (x));
2706 struct qty_table_elem *x_ent = &qty_table[x_q];
2707
2708 if (GET_MODE (x) == x_ent->mode
2709 && x_ent->const_rtx != NULL_RTX)
2710 return 0;
2711 }
2712
2713 if (GET_CODE (x) == PLUS
2714 && GET_CODE (XEXP (x, 1)) == CONST_INT
2715 && GET_CODE (XEXP (x, 0)) == REG
2716 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2717 {
2718 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2719 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2720
2721 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2722 && x0_ent->const_rtx != NULL_RTX)
2723 return 0;
2724 }
2725
2726 /* This can happen as the result of virtual register instantiation, if
2727 the initial constant is too large to be a valid address. This gives
2728 us a three instruction sequence, load large offset into a register,
2729 load fp minus a constant into a register, then a MEM which is the
2730 sum of the two `constant' registers. */
2731 if (GET_CODE (x) == PLUS
2732 && GET_CODE (XEXP (x, 0)) == REG
2733 && GET_CODE (XEXP (x, 1)) == REG
2734 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2735 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2736 {
2737 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2738 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2739 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2740 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2741
2742 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2743 && x0_ent->const_rtx != NULL_RTX
2744 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2745 && x1_ent->const_rtx != NULL_RTX)
2746 return 0;
2747 }
2748
2749 return rtx_varies_p (x, from_alias);
2750 }
2751 \f
2752 /* Canonicalize an expression:
2753 replace each register reference inside it
2754 with the "oldest" equivalent register.
2755
2756 If INSN is non-zero and we are replacing a pseudo with a hard register
2757 or vice versa, validate_change is used to ensure that INSN remains valid
2758 after we make our substitution. The calls are made with IN_GROUP non-zero
2759 so apply_change_group must be called upon the outermost return from this
2760 function (unless INSN is zero). The result of apply_change_group can
2761 generally be discarded since the changes we are making are optional. */
2762
2763 static rtx
2764 canon_reg (x, insn)
2765 rtx x;
2766 rtx insn;
2767 {
2768 register int i;
2769 register enum rtx_code code;
2770 register const char *fmt;
2771
2772 if (x == 0)
2773 return x;
2774
2775 code = GET_CODE (x);
2776 switch (code)
2777 {
2778 case PC:
2779 case CC0:
2780 case CONST:
2781 case CONST_INT:
2782 case CONST_DOUBLE:
2783 case SYMBOL_REF:
2784 case LABEL_REF:
2785 case ADDR_VEC:
2786 case ADDR_DIFF_VEC:
2787 return x;
2788
2789 case REG:
2790 {
2791 register int first;
2792 register int q;
2793 register struct qty_table_elem *ent;
2794
2795 /* Never replace a hard reg, because hard regs can appear
2796 in more than one machine mode, and we must preserve the mode
2797 of each occurrence. Also, some hard regs appear in
2798 MEMs that are shared and mustn't be altered. Don't try to
2799 replace any reg that maps to a reg of class NO_REGS. */
2800 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2801 || ! REGNO_QTY_VALID_P (REGNO (x)))
2802 return x;
2803
2804 q = REG_QTY (REGNO (x));
2805 ent = &qty_table[q];
2806 first = ent->first_reg;
2807 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2808 : REGNO_REG_CLASS (first) == NO_REGS ? x
2809 : gen_rtx_REG (ent->mode, first));
2810 }
2811
2812 default:
2813 break;
2814 }
2815
2816 fmt = GET_RTX_FORMAT (code);
2817 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2818 {
2819 register int j;
2820
2821 if (fmt[i] == 'e')
2822 {
2823 rtx new = canon_reg (XEXP (x, i), insn);
2824 int insn_code;
2825
2826 /* If replacing pseudo with hard reg or vice versa, ensure the
2827 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2828 if (insn != 0 && new != 0
2829 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2830 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2831 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2832 || (insn_code = recog_memoized (insn)) < 0
2833 || insn_data[insn_code].n_dups > 0))
2834 validate_change (insn, &XEXP (x, i), new, 1);
2835 else
2836 XEXP (x, i) = new;
2837 }
2838 else if (fmt[i] == 'E')
2839 for (j = 0; j < XVECLEN (x, i); j++)
2840 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2841 }
2842
2843 return x;
2844 }
2845 \f
2846 /* LOC is a location within INSN that is an operand address (the contents of
2847 a MEM). Find the best equivalent address to use that is valid for this
2848 insn.
2849
2850 On most CISC machines, complicated address modes are costly, and rtx_cost
2851 is a good approximation for that cost. However, most RISC machines have
2852 only a few (usually only one) memory reference formats. If an address is
2853 valid at all, it is often just as cheap as any other address. Hence, for
2854 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2855 costs of various addresses. For two addresses of equal cost, choose the one
2856 with the highest `rtx_cost' value as that has the potential of eliminating
2857 the most insns. For equal costs, we choose the first in the equivalence
2858 class. Note that we ignore the fact that pseudo registers are cheaper
2859 than hard registers here because we would also prefer the pseudo registers.
2860 */
2861
2862 static void
2863 find_best_addr (insn, loc, mode)
2864 rtx insn;
2865 rtx *loc;
2866 enum machine_mode mode;
2867 {
2868 struct table_elt *elt;
2869 rtx addr = *loc;
2870 #ifdef ADDRESS_COST
2871 struct table_elt *p;
2872 int found_better = 1;
2873 #endif
2874 int save_do_not_record = do_not_record;
2875 int save_hash_arg_in_memory = hash_arg_in_memory;
2876 int addr_volatile;
2877 int regno;
2878 unsigned hash;
2879
2880 /* Do not try to replace constant addresses or addresses of local and
2881 argument slots. These MEM expressions are made only once and inserted
2882 in many instructions, as well as being used to control symbol table
2883 output. It is not safe to clobber them.
2884
2885 There are some uncommon cases where the address is already in a register
2886 for some reason, but we cannot take advantage of that because we have
2887 no easy way to unshare the MEM. In addition, looking up all stack
2888 addresses is costly. */
2889 if ((GET_CODE (addr) == PLUS
2890 && GET_CODE (XEXP (addr, 0)) == REG
2891 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2892 && (regno = REGNO (XEXP (addr, 0)),
2893 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2894 || regno == ARG_POINTER_REGNUM))
2895 || (GET_CODE (addr) == REG
2896 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2897 || regno == HARD_FRAME_POINTER_REGNUM
2898 || regno == ARG_POINTER_REGNUM))
2899 || GET_CODE (addr) == ADDRESSOF
2900 || CONSTANT_ADDRESS_P (addr))
2901 return;
2902
2903 /* If this address is not simply a register, try to fold it. This will
2904 sometimes simplify the expression. Many simplifications
2905 will not be valid, but some, usually applying the associative rule, will
2906 be valid and produce better code. */
2907 if (GET_CODE (addr) != REG)
2908 {
2909 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2910 int addr_folded_cost = address_cost (folded, mode);
2911 int addr_cost = address_cost (addr, mode);
2912
2913 if ((addr_folded_cost < addr_cost
2914 || (addr_folded_cost == addr_cost
2915 /* ??? The rtx_cost comparison is left over from an older
2916 version of this code. It is probably no longer helpful. */
2917 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2918 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2919 && validate_change (insn, loc, folded, 0))
2920 addr = folded;
2921 }
2922
2923 /* If this address is not in the hash table, we can't look for equivalences
2924 of the whole address. Also, ignore if volatile. */
2925
2926 do_not_record = 0;
2927 hash = HASH (addr, Pmode);
2928 addr_volatile = do_not_record;
2929 do_not_record = save_do_not_record;
2930 hash_arg_in_memory = save_hash_arg_in_memory;
2931
2932 if (addr_volatile)
2933 return;
2934
2935 elt = lookup (addr, hash, Pmode);
2936
2937 #ifndef ADDRESS_COST
2938 if (elt)
2939 {
2940 int our_cost = elt->cost;
2941
2942 /* Find the lowest cost below ours that works. */
2943 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2944 if (elt->cost < our_cost
2945 && (GET_CODE (elt->exp) == REG
2946 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2947 && validate_change (insn, loc,
2948 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2949 return;
2950 }
2951 #else
2952
2953 if (elt)
2954 {
2955 /* We need to find the best (under the criteria documented above) entry
2956 in the class that is valid. We use the `flag' field to indicate
2957 choices that were invalid and iterate until we can't find a better
2958 one that hasn't already been tried. */
2959
2960 for (p = elt->first_same_value; p; p = p->next_same_value)
2961 p->flag = 0;
2962
2963 while (found_better)
2964 {
2965 int best_addr_cost = address_cost (*loc, mode);
2966 int best_rtx_cost = (elt->cost + 1) >> 1;
2967 int exp_cost;
2968 struct table_elt *best_elt = elt;
2969
2970 found_better = 0;
2971 for (p = elt->first_same_value; p; p = p->next_same_value)
2972 if (! p->flag)
2973 {
2974 if ((GET_CODE (p->exp) == REG
2975 || exp_equiv_p (p->exp, p->exp, 1, 0))
2976 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2977 || (exp_cost == best_addr_cost
2978 && (p->cost + 1) >> 1 < best_rtx_cost)))
2979 {
2980 found_better = 1;
2981 best_addr_cost = exp_cost;
2982 best_rtx_cost = (p->cost + 1) >> 1;
2983 best_elt = p;
2984 }
2985 }
2986
2987 if (found_better)
2988 {
2989 if (validate_change (insn, loc,
2990 canon_reg (copy_rtx (best_elt->exp),
2991 NULL_RTX), 0))
2992 return;
2993 else
2994 best_elt->flag = 1;
2995 }
2996 }
2997 }
2998
2999 /* If the address is a binary operation with the first operand a register
3000 and the second a constant, do the same as above, but looking for
3001 equivalences of the register. Then try to simplify before checking for
3002 the best address to use. This catches a few cases: First is when we
3003 have REG+const and the register is another REG+const. We can often merge
3004 the constants and eliminate one insn and one register. It may also be
3005 that a machine has a cheap REG+REG+const. Finally, this improves the
3006 code on the Alpha for unaligned byte stores. */
3007
3008 if (flag_expensive_optimizations
3009 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
3010 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
3011 && GET_CODE (XEXP (*loc, 0)) == REG
3012 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
3013 {
3014 rtx c = XEXP (*loc, 1);
3015
3016 do_not_record = 0;
3017 hash = HASH (XEXP (*loc, 0), Pmode);
3018 do_not_record = save_do_not_record;
3019 hash_arg_in_memory = save_hash_arg_in_memory;
3020
3021 elt = lookup (XEXP (*loc, 0), hash, Pmode);
3022 if (elt == 0)
3023 return;
3024
3025 /* We need to find the best (under the criteria documented above) entry
3026 in the class that is valid. We use the `flag' field to indicate
3027 choices that were invalid and iterate until we can't find a better
3028 one that hasn't already been tried. */
3029
3030 for (p = elt->first_same_value; p; p = p->next_same_value)
3031 p->flag = 0;
3032
3033 while (found_better)
3034 {
3035 int best_addr_cost = address_cost (*loc, mode);
3036 int best_rtx_cost = (COST (*loc) + 1) >> 1;
3037 struct table_elt *best_elt = elt;
3038 rtx best_rtx = *loc;
3039 int count;
3040
3041 /* This is at worst case an O(n^2) algorithm, so limit our search
3042 to the first 32 elements on the list. This avoids trouble
3043 compiling code with very long basic blocks that can easily
3044 call simplify_gen_binary so many times that we run out of
3045 memory. */
3046
3047 found_better = 0;
3048 for (p = elt->first_same_value, count = 0;
3049 p && count < 32;
3050 p = p->next_same_value, count++)
3051 if (! p->flag
3052 && (GET_CODE (p->exp) == REG
3053 || exp_equiv_p (p->exp, p->exp, 1, 0)))
3054 {
3055 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3056 p->exp, c);
3057 int new_cost;
3058 new_cost = address_cost (new, mode);
3059
3060 if (new_cost < best_addr_cost
3061 || (new_cost == best_addr_cost
3062 && (COST (new) + 1) >> 1 > best_rtx_cost))
3063 {
3064 found_better = 1;
3065 best_addr_cost = new_cost;
3066 best_rtx_cost = (COST (new) + 1) >> 1;
3067 best_elt = p;
3068 best_rtx = new;
3069 }
3070 }
3071
3072 if (found_better)
3073 {
3074 if (validate_change (insn, loc,
3075 canon_reg (copy_rtx (best_rtx),
3076 NULL_RTX), 0))
3077 return;
3078 else
3079 best_elt->flag = 1;
3080 }
3081 }
3082 }
3083 #endif
3084 }
3085 \f
3086 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3087 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3088 what values are being compared.
3089
3090 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3091 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3092 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3093 compared to produce cc0.
3094
3095 The return value is the comparison operator and is either the code of
3096 A or the code corresponding to the inverse of the comparison. */
3097
3098 static enum rtx_code
3099 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
3100 enum rtx_code code;
3101 rtx *parg1, *parg2;
3102 enum machine_mode *pmode1, *pmode2;
3103 {
3104 rtx arg1, arg2;
3105
3106 arg1 = *parg1, arg2 = *parg2;
3107
3108 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3109
3110 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3111 {
3112 /* Set non-zero when we find something of interest. */
3113 rtx x = 0;
3114 int reverse_code = 0;
3115 struct table_elt *p = 0;
3116
3117 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3118 On machines with CC0, this is the only case that can occur, since
3119 fold_rtx will return the COMPARE or item being compared with zero
3120 when given CC0. */
3121
3122 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3123 x = arg1;
3124
3125 /* If ARG1 is a comparison operator and CODE is testing for
3126 STORE_FLAG_VALUE, get the inner arguments. */
3127
3128 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3129 {
3130 if (code == NE
3131 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3132 && code == LT && STORE_FLAG_VALUE == -1)
3133 #ifdef FLOAT_STORE_FLAG_VALUE
3134 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3135 && (REAL_VALUE_NEGATIVE
3136 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3137 #endif
3138 )
3139 x = arg1;
3140 else if (code == EQ
3141 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3142 && code == GE && STORE_FLAG_VALUE == -1)
3143 #ifdef FLOAT_STORE_FLAG_VALUE
3144 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3145 && (REAL_VALUE_NEGATIVE
3146 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3147 #endif
3148 )
3149 x = arg1, reverse_code = 1;
3150 }
3151
3152 /* ??? We could also check for
3153
3154 (ne (and (eq (...) (const_int 1))) (const_int 0))
3155
3156 and related forms, but let's wait until we see them occurring. */
3157
3158 if (x == 0)
3159 /* Look up ARG1 in the hash table and see if it has an equivalence
3160 that lets us see what is being compared. */
3161 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3162 GET_MODE (arg1));
3163 if (p)
3164 {
3165 p = p->first_same_value;
3166
3167 /* If what we compare is already known to be constant, that is as
3168 good as it gets.
3169 We need to break the loop in this case, because otherwise we
3170 can have an infinite loop when looking at a reg that is known
3171 to be a constant which is the same as a comparison of a reg
3172 against zero which appears later in the insn stream, which in
3173 turn is constant and the same as the comparison of the first reg
3174 against zero... */
3175 if (p->is_const)
3176 break;
3177 }
3178
3179 for (; p; p = p->next_same_value)
3180 {
3181 enum machine_mode inner_mode = GET_MODE (p->exp);
3182
3183 /* If the entry isn't valid, skip it. */
3184 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3185 continue;
3186
3187 if (GET_CODE (p->exp) == COMPARE
3188 /* Another possibility is that this machine has a compare insn
3189 that includes the comparison code. In that case, ARG1 would
3190 be equivalent to a comparison operation that would set ARG1 to
3191 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3192 ORIG_CODE is the actual comparison being done; if it is an EQ,
3193 we must reverse ORIG_CODE. On machine with a negative value
3194 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3195 || ((code == NE
3196 || (code == LT
3197 && GET_MODE_CLASS (inner_mode) == MODE_INT
3198 && (GET_MODE_BITSIZE (inner_mode)
3199 <= HOST_BITS_PER_WIDE_INT)
3200 && (STORE_FLAG_VALUE
3201 & ((HOST_WIDE_INT) 1
3202 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3203 #ifdef FLOAT_STORE_FLAG_VALUE
3204 || (code == LT
3205 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3206 && (REAL_VALUE_NEGATIVE
3207 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3208 #endif
3209 )
3210 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3211 {
3212 x = p->exp;
3213 break;
3214 }
3215 else if ((code == EQ
3216 || (code == GE
3217 && GET_MODE_CLASS (inner_mode) == MODE_INT
3218 && (GET_MODE_BITSIZE (inner_mode)
3219 <= HOST_BITS_PER_WIDE_INT)
3220 && (STORE_FLAG_VALUE
3221 & ((HOST_WIDE_INT) 1
3222 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3223 #ifdef FLOAT_STORE_FLAG_VALUE
3224 || (code == GE
3225 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3226 && (REAL_VALUE_NEGATIVE
3227 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3228 #endif
3229 )
3230 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3231 {
3232 reverse_code = 1;
3233 x = p->exp;
3234 break;
3235 }
3236
3237 /* If this is fp + constant, the equivalent is a better operand since
3238 it may let us predict the value of the comparison. */
3239 else if (NONZERO_BASE_PLUS_P (p->exp))
3240 {
3241 arg1 = p->exp;
3242 continue;
3243 }
3244 }
3245
3246 /* If we didn't find a useful equivalence for ARG1, we are done.
3247 Otherwise, set up for the next iteration. */
3248 if (x == 0)
3249 break;
3250
3251 /* If we need to reverse the comparison, make sure that that is
3252 possible -- we can't necessarily infer the value of GE from LT
3253 with floating-point operands. */
3254 if (reverse_code && ! can_reverse_comparison_p (x, NULL_RTX))
3255 break;
3256
3257 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3258 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3259 code = GET_CODE (x);
3260
3261 if (reverse_code)
3262 code = reverse_condition (code);
3263 }
3264
3265 /* Return our results. Return the modes from before fold_rtx
3266 because fold_rtx might produce const_int, and then it's too late. */
3267 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3268 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3269
3270 return code;
3271 }
3272 \f
3273 /* If X is a nontrivial arithmetic operation on an argument
3274 for which a constant value can be determined, return
3275 the result of operating on that value, as a constant.
3276 Otherwise, return X, possibly with one or more operands
3277 modified by recursive calls to this function.
3278
3279 If X is a register whose contents are known, we do NOT
3280 return those contents here. equiv_constant is called to
3281 perform that task.
3282
3283 INSN is the insn that we may be modifying. If it is 0, make a copy
3284 of X before modifying it. */
3285
3286 static rtx
3287 fold_rtx (x, insn)
3288 rtx x;
3289 rtx insn;
3290 {
3291 register enum rtx_code code;
3292 register enum machine_mode mode;
3293 register const char *fmt;
3294 register int i;
3295 rtx new = 0;
3296 int copied = 0;
3297 int must_swap = 0;
3298
3299 /* Folded equivalents of first two operands of X. */
3300 rtx folded_arg0;
3301 rtx folded_arg1;
3302
3303 /* Constant equivalents of first three operands of X;
3304 0 when no such equivalent is known. */
3305 rtx const_arg0;
3306 rtx const_arg1;
3307 rtx const_arg2;
3308
3309 /* The mode of the first operand of X. We need this for sign and zero
3310 extends. */
3311 enum machine_mode mode_arg0;
3312
3313 if (x == 0)
3314 return x;
3315
3316 mode = GET_MODE (x);
3317 code = GET_CODE (x);
3318 switch (code)
3319 {
3320 case CONST:
3321 case CONST_INT:
3322 case CONST_DOUBLE:
3323 case SYMBOL_REF:
3324 case LABEL_REF:
3325 case REG:
3326 /* No use simplifying an EXPR_LIST
3327 since they are used only for lists of args
3328 in a function call's REG_EQUAL note. */
3329 case EXPR_LIST:
3330 /* Changing anything inside an ADDRESSOF is incorrect; we don't
3331 want to (e.g.,) make (addressof (const_int 0)) just because
3332 the location is known to be zero. */
3333 case ADDRESSOF:
3334 return x;
3335
3336 #ifdef HAVE_cc0
3337 case CC0:
3338 return prev_insn_cc0;
3339 #endif
3340
3341 case PC:
3342 /* If the next insn is a CODE_LABEL followed by a jump table,
3343 PC's value is a LABEL_REF pointing to that label. That
3344 lets us fold switch statements on the Vax. */
3345 if (insn && GET_CODE (insn) == JUMP_INSN)
3346 {
3347 rtx next = next_nonnote_insn (insn);
3348
3349 if (next && GET_CODE (next) == CODE_LABEL
3350 && NEXT_INSN (next) != 0
3351 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
3352 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
3353 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
3354 return gen_rtx_LABEL_REF (Pmode, next);
3355 }
3356 break;
3357
3358 case SUBREG:
3359 /* See if we previously assigned a constant value to this SUBREG. */
3360 if ((new = lookup_as_function (x, CONST_INT)) != 0
3361 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3362 return new;
3363
3364 /* If this is a paradoxical SUBREG, we have no idea what value the
3365 extra bits would have. However, if the operand is equivalent
3366 to a SUBREG whose operand is the same as our mode, and all the
3367 modes are within a word, we can just use the inner operand
3368 because these SUBREGs just say how to treat the register.
3369
3370 Similarly if we find an integer constant. */
3371
3372 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3373 {
3374 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3375 struct table_elt *elt;
3376
3377 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3378 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3379 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3380 imode)) != 0)
3381 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3382 {
3383 if (CONSTANT_P (elt->exp)
3384 && GET_MODE (elt->exp) == VOIDmode)
3385 return elt->exp;
3386
3387 if (GET_CODE (elt->exp) == SUBREG
3388 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3389 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3390 return copy_rtx (SUBREG_REG (elt->exp));
3391 }
3392
3393 return x;
3394 }
3395
3396 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3397 We might be able to if the SUBREG is extracting a single word in an
3398 integral mode or extracting the low part. */
3399
3400 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3401 const_arg0 = equiv_constant (folded_arg0);
3402 if (const_arg0)
3403 folded_arg0 = const_arg0;
3404
3405 if (folded_arg0 != SUBREG_REG (x))
3406 {
3407 new = 0;
3408
3409 if (GET_MODE_CLASS (mode) == MODE_INT
3410 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3411 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
3412 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
3413 GET_MODE (SUBREG_REG (x)));
3414 if (new == 0 && subreg_lowpart_p (x))
3415 new = gen_lowpart_if_possible (mode, folded_arg0);
3416 if (new)
3417 return new;
3418 }
3419
3420 /* If this is a narrowing SUBREG and our operand is a REG, see if
3421 we can find an equivalence for REG that is an arithmetic operation
3422 in a wider mode where both operands are paradoxical SUBREGs
3423 from objects of our result mode. In that case, we couldn't report
3424 an equivalent value for that operation, since we don't know what the
3425 extra bits will be. But we can find an equivalence for this SUBREG
3426 by folding that operation is the narrow mode. This allows us to
3427 fold arithmetic in narrow modes when the machine only supports
3428 word-sized arithmetic.
3429
3430 Also look for a case where we have a SUBREG whose operand is the
3431 same as our result. If both modes are smaller than a word, we
3432 are simply interpreting a register in different modes and we
3433 can use the inner value. */
3434
3435 if (GET_CODE (folded_arg0) == REG
3436 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3437 && subreg_lowpart_p (x))
3438 {
3439 struct table_elt *elt;
3440
3441 /* We can use HASH here since we know that canon_hash won't be
3442 called. */
3443 elt = lookup (folded_arg0,
3444 HASH (folded_arg0, GET_MODE (folded_arg0)),
3445 GET_MODE (folded_arg0));
3446
3447 if (elt)
3448 elt = elt->first_same_value;
3449
3450 for (; elt; elt = elt->next_same_value)
3451 {
3452 enum rtx_code eltcode = GET_CODE (elt->exp);
3453
3454 /* Just check for unary and binary operations. */
3455 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3456 && GET_CODE (elt->exp) != SIGN_EXTEND
3457 && GET_CODE (elt->exp) != ZERO_EXTEND
3458 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3459 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
3460 {
3461 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3462
3463 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3464 op0 = fold_rtx (op0, NULL_RTX);
3465
3466 op0 = equiv_constant (op0);
3467 if (op0)
3468 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3469 op0, mode);
3470 }
3471 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3472 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3473 && eltcode != DIV && eltcode != MOD
3474 && eltcode != UDIV && eltcode != UMOD
3475 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3476 && eltcode != ROTATE && eltcode != ROTATERT
3477 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3478 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3479 == mode))
3480 || CONSTANT_P (XEXP (elt->exp, 0)))
3481 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3482 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3483 == mode))
3484 || CONSTANT_P (XEXP (elt->exp, 1))))
3485 {
3486 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3487 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3488
3489 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3490 op0 = fold_rtx (op0, NULL_RTX);
3491
3492 if (op0)
3493 op0 = equiv_constant (op0);
3494
3495 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3496 op1 = fold_rtx (op1, NULL_RTX);
3497
3498 if (op1)
3499 op1 = equiv_constant (op1);
3500
3501 /* If we are looking for the low SImode part of
3502 (ashift:DI c (const_int 32)), it doesn't work
3503 to compute that in SImode, because a 32-bit shift
3504 in SImode is unpredictable. We know the value is 0. */
3505 if (op0 && op1
3506 && GET_CODE (elt->exp) == ASHIFT
3507 && GET_CODE (op1) == CONST_INT
3508 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3509 {
3510 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3511
3512 /* If the count fits in the inner mode's width,
3513 but exceeds the outer mode's width,
3514 the value will get truncated to 0
3515 by the subreg. */
3516 new = const0_rtx;
3517 else
3518 /* If the count exceeds even the inner mode's width,
3519 don't fold this expression. */
3520 new = 0;
3521 }
3522 else if (op0 && op1)
3523 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3524 op0, op1);
3525 }
3526
3527 else if (GET_CODE (elt->exp) == SUBREG
3528 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3529 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3530 <= UNITS_PER_WORD)
3531 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3532 new = copy_rtx (SUBREG_REG (elt->exp));
3533
3534 if (new)
3535 return new;
3536 }
3537 }
3538
3539 return x;
3540
3541 case NOT:
3542 case NEG:
3543 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3544 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3545 new = lookup_as_function (XEXP (x, 0), code);
3546 if (new)
3547 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3548 break;
3549
3550 case MEM:
3551 /* If we are not actually processing an insn, don't try to find the
3552 best address. Not only don't we care, but we could modify the
3553 MEM in an invalid way since we have no insn to validate against. */
3554 if (insn != 0)
3555 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3556
3557 {
3558 /* Even if we don't fold in the insn itself,
3559 we can safely do so here, in hopes of getting a constant. */
3560 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3561 rtx base = 0;
3562 HOST_WIDE_INT offset = 0;
3563
3564 if (GET_CODE (addr) == REG
3565 && REGNO_QTY_VALID_P (REGNO (addr)))
3566 {
3567 int addr_q = REG_QTY (REGNO (addr));
3568 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3569
3570 if (GET_MODE (addr) == addr_ent->mode
3571 && addr_ent->const_rtx != NULL_RTX)
3572 addr = addr_ent->const_rtx;
3573 }
3574
3575 /* If address is constant, split it into a base and integer offset. */
3576 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3577 base = addr;
3578 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3579 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3580 {
3581 base = XEXP (XEXP (addr, 0), 0);
3582 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3583 }
3584 else if (GET_CODE (addr) == LO_SUM
3585 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3586 base = XEXP (addr, 1);
3587 else if (GET_CODE (addr) == ADDRESSOF)
3588 return change_address (x, VOIDmode, addr);
3589
3590 /* If this is a constant pool reference, we can fold it into its
3591 constant to allow better value tracking. */
3592 if (base && GET_CODE (base) == SYMBOL_REF
3593 && CONSTANT_POOL_ADDRESS_P (base))
3594 {
3595 rtx constant = get_pool_constant (base);
3596 enum machine_mode const_mode = get_pool_mode (base);
3597 rtx new;
3598
3599 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3600 constant_pool_entries_cost = COST (constant);
3601
3602 /* If we are loading the full constant, we have an equivalence. */
3603 if (offset == 0 && mode == const_mode)
3604 return constant;
3605
3606 /* If this actually isn't a constant (weird!), we can't do
3607 anything. Otherwise, handle the two most common cases:
3608 extracting a word from a multi-word constant, and extracting
3609 the low-order bits. Other cases don't seem common enough to
3610 worry about. */
3611 if (! CONSTANT_P (constant))
3612 return x;
3613
3614 if (GET_MODE_CLASS (mode) == MODE_INT
3615 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3616 && offset % UNITS_PER_WORD == 0
3617 && (new = operand_subword (constant,
3618 offset / UNITS_PER_WORD,
3619 0, const_mode)) != 0)
3620 return new;
3621
3622 if (((BYTES_BIG_ENDIAN
3623 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3624 || (! BYTES_BIG_ENDIAN && offset == 0))
3625 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
3626 return new;
3627 }
3628
3629 /* If this is a reference to a label at a known position in a jump
3630 table, we also know its value. */
3631 if (base && GET_CODE (base) == LABEL_REF)
3632 {
3633 rtx label = XEXP (base, 0);
3634 rtx table_insn = NEXT_INSN (label);
3635
3636 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3637 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3638 {
3639 rtx table = PATTERN (table_insn);
3640
3641 if (offset >= 0
3642 && (offset / GET_MODE_SIZE (GET_MODE (table))
3643 < XVECLEN (table, 0)))
3644 return XVECEXP (table, 0,
3645 offset / GET_MODE_SIZE (GET_MODE (table)));
3646 }
3647 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3648 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3649 {
3650 rtx table = PATTERN (table_insn);
3651
3652 if (offset >= 0
3653 && (offset / GET_MODE_SIZE (GET_MODE (table))
3654 < XVECLEN (table, 1)))
3655 {
3656 offset /= GET_MODE_SIZE (GET_MODE (table));
3657 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3658 XEXP (table, 0));
3659
3660 if (GET_MODE (table) != Pmode)
3661 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3662
3663 /* Indicate this is a constant. This isn't a
3664 valid form of CONST, but it will only be used
3665 to fold the next insns and then discarded, so
3666 it should be safe.
3667
3668 Note this expression must be explicitly discarded,
3669 by cse_insn, else it may end up in a REG_EQUAL note
3670 and "escape" to cause problems elsewhere. */
3671 return gen_rtx_CONST (GET_MODE (new), new);
3672 }
3673 }
3674 }
3675
3676 return x;
3677 }
3678
3679 #ifdef NO_FUNCTION_CSE
3680 case CALL:
3681 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3682 return x;
3683 break;
3684 #endif
3685
3686 case ASM_OPERANDS:
3687 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3688 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3689 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3690 break;
3691
3692 default:
3693 break;
3694 }
3695
3696 const_arg0 = 0;
3697 const_arg1 = 0;
3698 const_arg2 = 0;
3699 mode_arg0 = VOIDmode;
3700
3701 /* Try folding our operands.
3702 Then see which ones have constant values known. */
3703
3704 fmt = GET_RTX_FORMAT (code);
3705 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3706 if (fmt[i] == 'e')
3707 {
3708 rtx arg = XEXP (x, i);
3709 rtx folded_arg = arg, const_arg = 0;
3710 enum machine_mode mode_arg = GET_MODE (arg);
3711 rtx cheap_arg, expensive_arg;
3712 rtx replacements[2];
3713 int j;
3714
3715 /* Most arguments are cheap, so handle them specially. */
3716 switch (GET_CODE (arg))
3717 {
3718 case REG:
3719 /* This is the same as calling equiv_constant; it is duplicated
3720 here for speed. */
3721 if (REGNO_QTY_VALID_P (REGNO (arg)))
3722 {
3723 int arg_q = REG_QTY (REGNO (arg));
3724 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3725
3726 if (arg_ent->const_rtx != NULL_RTX
3727 && GET_CODE (arg_ent->const_rtx) != REG
3728 && GET_CODE (arg_ent->const_rtx) != PLUS)
3729 const_arg
3730 = gen_lowpart_if_possible (GET_MODE (arg),
3731 arg_ent->const_rtx);
3732 }
3733 break;
3734
3735 case CONST:
3736 case CONST_INT:
3737 case SYMBOL_REF:
3738 case LABEL_REF:
3739 case CONST_DOUBLE:
3740 const_arg = arg;
3741 break;
3742
3743 #ifdef HAVE_cc0
3744 case CC0:
3745 folded_arg = prev_insn_cc0;
3746 mode_arg = prev_insn_cc0_mode;
3747 const_arg = equiv_constant (folded_arg);
3748 break;
3749 #endif
3750
3751 default:
3752 folded_arg = fold_rtx (arg, insn);
3753 const_arg = equiv_constant (folded_arg);
3754 }
3755
3756 /* For the first three operands, see if the operand
3757 is constant or equivalent to a constant. */
3758 switch (i)
3759 {
3760 case 0:
3761 folded_arg0 = folded_arg;
3762 const_arg0 = const_arg;
3763 mode_arg0 = mode_arg;
3764 break;
3765 case 1:
3766 folded_arg1 = folded_arg;
3767 const_arg1 = const_arg;
3768 break;
3769 case 2:
3770 const_arg2 = const_arg;
3771 break;
3772 }
3773
3774 /* Pick the least expensive of the folded argument and an
3775 equivalent constant argument. */
3776 if (const_arg == 0 || const_arg == folded_arg
3777 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3778 cheap_arg = folded_arg, expensive_arg = const_arg;
3779 else
3780 cheap_arg = const_arg, expensive_arg = folded_arg;
3781
3782 /* Try to replace the operand with the cheapest of the two
3783 possibilities. If it doesn't work and this is either of the first
3784 two operands of a commutative operation, try swapping them.
3785 If THAT fails, try the more expensive, provided it is cheaper
3786 than what is already there. */
3787
3788 if (cheap_arg == XEXP (x, i))
3789 continue;
3790
3791 if (insn == 0 && ! copied)
3792 {
3793 x = copy_rtx (x);
3794 copied = 1;
3795 }
3796
3797 /* Order the replacements from cheapest to most expensive. */
3798 replacements[0] = cheap_arg;
3799 replacements[1] = expensive_arg;
3800
3801 for (j = 0; j < 2 && replacements[j]; j++)
3802 {
3803 int old_cost = COST_IN (XEXP (x, i), code);
3804 int new_cost = COST_IN (replacements[j], code);
3805
3806 /* Stop if what existed before was cheaper. Prefer constants
3807 in the case of a tie. */
3808 if (new_cost > old_cost
3809 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3810 break;
3811
3812 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3813 break;
3814
3815 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c'
3816 || code == LTGT || code == UNEQ || code == ORDERED
3817 || code == UNORDERED)
3818 {
3819 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3820 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3821
3822 if (apply_change_group ())
3823 {
3824 /* Swap them back to be invalid so that this loop can
3825 continue and flag them to be swapped back later. */
3826 rtx tem;
3827
3828 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3829 XEXP (x, 1) = tem;
3830 must_swap = 1;
3831 break;
3832 }
3833 }
3834 }
3835 }
3836
3837 else
3838 {
3839 if (fmt[i] == 'E')
3840 /* Don't try to fold inside of a vector of expressions.
3841 Doing nothing is harmless. */
3842 {;}
3843 }
3844
3845 /* If a commutative operation, place a constant integer as the second
3846 operand unless the first operand is also a constant integer. Otherwise,
3847 place any constant second unless the first operand is also a constant. */
3848
3849 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c'
3850 || code == LTGT || code == UNEQ || code == ORDERED
3851 || code == UNORDERED)
3852 {
3853 if (must_swap || (const_arg0
3854 && (const_arg1 == 0
3855 || (GET_CODE (const_arg0) == CONST_INT
3856 && GET_CODE (const_arg1) != CONST_INT))))
3857 {
3858 register rtx tem = XEXP (x, 0);
3859
3860 if (insn == 0 && ! copied)
3861 {
3862 x = copy_rtx (x);
3863 copied = 1;
3864 }
3865
3866 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3867 validate_change (insn, &XEXP (x, 1), tem, 1);
3868 if (apply_change_group ())
3869 {
3870 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3871 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3872 }
3873 }
3874 }
3875
3876 /* If X is an arithmetic operation, see if we can simplify it. */
3877
3878 switch (GET_RTX_CLASS (code))
3879 {
3880 case '1':
3881 {
3882 int is_const = 0;
3883
3884 /* We can't simplify extension ops unless we know the
3885 original mode. */
3886 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3887 && mode_arg0 == VOIDmode)
3888 break;
3889
3890 /* If we had a CONST, strip it off and put it back later if we
3891 fold. */
3892 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3893 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3894
3895 new = simplify_unary_operation (code, mode,
3896 const_arg0 ? const_arg0 : folded_arg0,
3897 mode_arg0);
3898 if (new != 0 && is_const)
3899 new = gen_rtx_CONST (mode, new);
3900 }
3901 break;
3902
3903 case '<':
3904 /* See what items are actually being compared and set FOLDED_ARG[01]
3905 to those values and CODE to the actual comparison code. If any are
3906 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3907 do anything if both operands are already known to be constant. */
3908
3909 if (const_arg0 == 0 || const_arg1 == 0)
3910 {
3911 struct table_elt *p0, *p1;
3912 rtx true = const_true_rtx, false = const0_rtx;
3913 enum machine_mode mode_arg1;
3914
3915 #ifdef FLOAT_STORE_FLAG_VALUE
3916 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3917 {
3918 true = (CONST_DOUBLE_FROM_REAL_VALUE
3919 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3920 false = CONST0_RTX (mode);
3921 }
3922 #endif
3923
3924 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3925 &mode_arg0, &mode_arg1);
3926 const_arg0 = equiv_constant (folded_arg0);
3927 const_arg1 = equiv_constant (folded_arg1);
3928
3929 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3930 what kinds of things are being compared, so we can't do
3931 anything with this comparison. */
3932
3933 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3934 break;
3935
3936 /* If we do not now have two constants being compared, see
3937 if we can nevertheless deduce some things about the
3938 comparison. */
3939 if (const_arg0 == 0 || const_arg1 == 0)
3940 {
3941 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
3942 non-explicit constant? These aren't zero, but we
3943 don't know their sign. */
3944 if (const_arg1 == const0_rtx
3945 && (NONZERO_BASE_PLUS_P (folded_arg0)
3946 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
3947 come out as 0. */
3948 || GET_CODE (folded_arg0) == SYMBOL_REF
3949 #endif
3950 || GET_CODE (folded_arg0) == LABEL_REF
3951 || GET_CODE (folded_arg0) == CONST))
3952 {
3953 if (code == EQ)
3954 return false;
3955 else if (code == NE)
3956 return true;
3957 }
3958
3959 /* See if the two operands are the same. */
3960
3961 if (folded_arg0 == folded_arg1
3962 || (GET_CODE (folded_arg0) == REG
3963 && GET_CODE (folded_arg1) == REG
3964 && (REG_QTY (REGNO (folded_arg0))
3965 == REG_QTY (REGNO (folded_arg1))))
3966 || ((p0 = lookup (folded_arg0,
3967 (safe_hash (folded_arg0, mode_arg0)
3968 & HASH_MASK), mode_arg0))
3969 && (p1 = lookup (folded_arg1,
3970 (safe_hash (folded_arg1, mode_arg0)
3971 & HASH_MASK), mode_arg0))
3972 && p0->first_same_value == p1->first_same_value))
3973 {
3974 /* Sadly two equal NaNs are not equivalent. */
3975 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3976 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
3977 return ((code == EQ || code == LE || code == GE
3978 || code == LEU || code == GEU || code == UNEQ
3979 || code == UNLE || code == UNGE || code == ORDERED)
3980 ? true : false);
3981 /* Take care for the FP compares we can resolve. */
3982 if (code == UNEQ || code == UNLE || code == UNGE)
3983 return true;
3984 if (code == LTGT || code == LT || code == GT)
3985 return false;
3986 }
3987
3988 /* If FOLDED_ARG0 is a register, see if the comparison we are
3989 doing now is either the same as we did before or the reverse
3990 (we only check the reverse if not floating-point). */
3991 else if (GET_CODE (folded_arg0) == REG)
3992 {
3993 int qty = REG_QTY (REGNO (folded_arg0));
3994
3995 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3996 {
3997 struct qty_table_elem *ent = &qty_table[qty];
3998
3999 if ((comparison_dominates_p (ent->comparison_code, code)
4000 || (! FLOAT_MODE_P (mode_arg0)
4001 && comparison_dominates_p (ent->comparison_code,
4002 reverse_condition (code))))
4003 && (rtx_equal_p (ent->comparison_const, folded_arg1)
4004 || (const_arg1
4005 && rtx_equal_p (ent->comparison_const,
4006 const_arg1))
4007 || (GET_CODE (folded_arg1) == REG
4008 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
4009 return (comparison_dominates_p (ent->comparison_code, code)
4010 ? true : false);
4011 }
4012 }
4013 }
4014 }
4015
4016 /* If we are comparing against zero, see if the first operand is
4017 equivalent to an IOR with a constant. If so, we may be able to
4018 determine the result of this comparison. */
4019
4020 if (const_arg1 == const0_rtx)
4021 {
4022 rtx y = lookup_as_function (folded_arg0, IOR);
4023 rtx inner_const;
4024
4025 if (y != 0
4026 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4027 && GET_CODE (inner_const) == CONST_INT
4028 && INTVAL (inner_const) != 0)
4029 {
4030 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4031 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4032 && (INTVAL (inner_const)
4033 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
4034 rtx true = const_true_rtx, false = const0_rtx;
4035
4036 #ifdef FLOAT_STORE_FLAG_VALUE
4037 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
4038 {
4039 true = (CONST_DOUBLE_FROM_REAL_VALUE
4040 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4041 false = CONST0_RTX (mode);
4042 }
4043 #endif
4044
4045 switch (code)
4046 {
4047 case EQ:
4048 return false;
4049 case NE:
4050 return true;
4051 case LT: case LE:
4052 if (has_sign)
4053 return true;
4054 break;
4055 case GT: case GE:
4056 if (has_sign)
4057 return false;
4058 break;
4059 default:
4060 break;
4061 }
4062 }
4063 }
4064
4065 new = simplify_relational_operation (code,
4066 (mode_arg0 != VOIDmode
4067 ? mode_arg0
4068 : (GET_MODE (const_arg0
4069 ? const_arg0
4070 : folded_arg0)
4071 != VOIDmode)
4072 ? GET_MODE (const_arg0
4073 ? const_arg0
4074 : folded_arg0)
4075 : GET_MODE (const_arg1
4076 ? const_arg1
4077 : folded_arg1)),
4078 const_arg0 ? const_arg0 : folded_arg0,
4079 const_arg1 ? const_arg1 : folded_arg1);
4080 #ifdef FLOAT_STORE_FLAG_VALUE
4081 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
4082 {
4083 if (new == const0_rtx)
4084 new = CONST0_RTX (mode);
4085 else
4086 new = (CONST_DOUBLE_FROM_REAL_VALUE
4087 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4088 }
4089 #endif
4090 break;
4091
4092 case '2':
4093 case 'c':
4094 switch (code)
4095 {
4096 case PLUS:
4097 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4098 with that LABEL_REF as its second operand. If so, the result is
4099 the first operand of that MINUS. This handles switches with an
4100 ADDR_DIFF_VEC table. */
4101 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4102 {
4103 rtx y
4104 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4105 : lookup_as_function (folded_arg0, MINUS);
4106
4107 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4108 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4109 return XEXP (y, 0);
4110
4111 /* Now try for a CONST of a MINUS like the above. */
4112 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4113 : lookup_as_function (folded_arg0, CONST))) != 0
4114 && GET_CODE (XEXP (y, 0)) == MINUS
4115 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4116 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4117 return XEXP (XEXP (y, 0), 0);
4118 }
4119
4120 /* Likewise if the operands are in the other order. */
4121 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4122 {
4123 rtx y
4124 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4125 : lookup_as_function (folded_arg1, MINUS);
4126
4127 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4128 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4129 return XEXP (y, 0);
4130
4131 /* Now try for a CONST of a MINUS like the above. */
4132 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4133 : lookup_as_function (folded_arg1, CONST))) != 0
4134 && GET_CODE (XEXP (y, 0)) == MINUS
4135 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4136 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4137 return XEXP (XEXP (y, 0), 0);
4138 }
4139
4140 /* If second operand is a register equivalent to a negative
4141 CONST_INT, see if we can find a register equivalent to the
4142 positive constant. Make a MINUS if so. Don't do this for
4143 a non-negative constant since we might then alternate between
4144 chosing positive and negative constants. Having the positive
4145 constant previously-used is the more common case. Be sure
4146 the resulting constant is non-negative; if const_arg1 were
4147 the smallest negative number this would overflow: depending
4148 on the mode, this would either just be the same value (and
4149 hence not save anything) or be incorrect. */
4150 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4151 && INTVAL (const_arg1) < 0
4152 /* This used to test
4153
4154 -INTVAL (const_arg1) >= 0
4155
4156 But The Sun V5.0 compilers mis-compiled that test. So
4157 instead we test for the problematic value in a more direct
4158 manner and hope the Sun compilers get it correct. */
4159 && INTVAL (const_arg1) !=
4160 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4161 && GET_CODE (folded_arg1) == REG)
4162 {
4163 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4164 struct table_elt *p
4165 = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
4166 mode);
4167
4168 if (p)
4169 for (p = p->first_same_value; p; p = p->next_same_value)
4170 if (GET_CODE (p->exp) == REG)
4171 return simplify_gen_binary (MINUS, mode, folded_arg0,
4172 canon_reg (p->exp, NULL_RTX));
4173 }
4174 goto from_plus;
4175
4176 case MINUS:
4177 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4178 If so, produce (PLUS Z C2-C). */
4179 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4180 {
4181 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4182 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4183 return fold_rtx (plus_constant (copy_rtx (y),
4184 -INTVAL (const_arg1)),
4185 NULL_RTX);
4186 }
4187
4188 /* Fall through. */
4189
4190 from_plus:
4191 case SMIN: case SMAX: case UMIN: case UMAX:
4192 case IOR: case AND: case XOR:
4193 case MULT: case DIV: case UDIV:
4194 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4195 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4196 is known to be of similar form, we may be able to replace the
4197 operation with a combined operation. This may eliminate the
4198 intermediate operation if every use is simplified in this way.
4199 Note that the similar optimization done by combine.c only works
4200 if the intermediate operation's result has only one reference. */
4201
4202 if (GET_CODE (folded_arg0) == REG
4203 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4204 {
4205 int is_shift
4206 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4207 rtx y = lookup_as_function (folded_arg0, code);
4208 rtx inner_const;
4209 enum rtx_code associate_code;
4210 rtx new_const;
4211
4212 if (y == 0
4213 || 0 == (inner_const
4214 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4215 || GET_CODE (inner_const) != CONST_INT
4216 /* If we have compiled a statement like
4217 "if (x == (x & mask1))", and now are looking at
4218 "x & mask2", we will have a case where the first operand
4219 of Y is the same as our first operand. Unless we detect
4220 this case, an infinite loop will result. */
4221 || XEXP (y, 0) == folded_arg0)
4222 break;
4223
4224 /* Don't associate these operations if they are a PLUS with the
4225 same constant and it is a power of two. These might be doable
4226 with a pre- or post-increment. Similarly for two subtracts of
4227 identical powers of two with post decrement. */
4228
4229 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
4230 && ((HAVE_PRE_INCREMENT
4231 && exact_log2 (INTVAL (const_arg1)) >= 0)
4232 || (HAVE_POST_INCREMENT
4233 && exact_log2 (INTVAL (const_arg1)) >= 0)
4234 || (HAVE_PRE_DECREMENT
4235 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4236 || (HAVE_POST_DECREMENT
4237 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4238 break;
4239
4240 /* Compute the code used to compose the constants. For example,
4241 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
4242
4243 associate_code
4244 = (code == MULT || code == DIV || code == UDIV ? MULT
4245 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
4246
4247 new_const = simplify_binary_operation (associate_code, mode,
4248 const_arg1, inner_const);
4249
4250 if (new_const == 0)
4251 break;
4252
4253 /* If we are associating shift operations, don't let this
4254 produce a shift of the size of the object or larger.
4255 This could occur when we follow a sign-extend by a right
4256 shift on a machine that does a sign-extend as a pair
4257 of shifts. */
4258
4259 if (is_shift && GET_CODE (new_const) == CONST_INT
4260 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4261 {
4262 /* As an exception, we can turn an ASHIFTRT of this
4263 form into a shift of the number of bits - 1. */
4264 if (code == ASHIFTRT)
4265 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4266 else
4267 break;
4268 }
4269
4270 y = copy_rtx (XEXP (y, 0));
4271
4272 /* If Y contains our first operand (the most common way this
4273 can happen is if Y is a MEM), we would do into an infinite
4274 loop if we tried to fold it. So don't in that case. */
4275
4276 if (! reg_mentioned_p (folded_arg0, y))
4277 y = fold_rtx (y, insn);
4278
4279 return simplify_gen_binary (code, mode, y, new_const);
4280 }
4281 break;
4282
4283 default:
4284 break;
4285 }
4286
4287 new = simplify_binary_operation (code, mode,
4288 const_arg0 ? const_arg0 : folded_arg0,
4289 const_arg1 ? const_arg1 : folded_arg1);
4290 break;
4291
4292 case 'o':
4293 /* (lo_sum (high X) X) is simply X. */
4294 if (code == LO_SUM && const_arg0 != 0
4295 && GET_CODE (const_arg0) == HIGH
4296 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4297 return const_arg1;
4298 break;
4299
4300 case '3':
4301 case 'b':
4302 new = simplify_ternary_operation (code, mode, mode_arg0,
4303 const_arg0 ? const_arg0 : folded_arg0,
4304 const_arg1 ? const_arg1 : folded_arg1,
4305 const_arg2 ? const_arg2 : XEXP (x, 2));
4306 break;
4307
4308 case 'x':
4309 /* Always eliminate CONSTANT_P_RTX at this stage. */
4310 if (code == CONSTANT_P_RTX)
4311 return (const_arg0 ? const1_rtx : const0_rtx);
4312 break;
4313 }
4314
4315 return new ? new : x;
4316 }
4317 \f
4318 /* Return a constant value currently equivalent to X.
4319 Return 0 if we don't know one. */
4320
4321 static rtx
4322 equiv_constant (x)
4323 rtx x;
4324 {
4325 if (GET_CODE (x) == REG
4326 && REGNO_QTY_VALID_P (REGNO (x)))
4327 {
4328 int x_q = REG_QTY (REGNO (x));
4329 struct qty_table_elem *x_ent = &qty_table[x_q];
4330
4331 if (x_ent->const_rtx)
4332 x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4333 }
4334
4335 if (x == 0 || CONSTANT_P (x))
4336 return x;
4337
4338 /* If X is a MEM, try to fold it outside the context of any insn to see if
4339 it might be equivalent to a constant. That handles the case where it
4340 is a constant-pool reference. Then try to look it up in the hash table
4341 in case it is something whose value we have seen before. */
4342
4343 if (GET_CODE (x) == MEM)
4344 {
4345 struct table_elt *elt;
4346
4347 x = fold_rtx (x, NULL_RTX);
4348 if (CONSTANT_P (x))
4349 return x;
4350
4351 elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4352 if (elt == 0)
4353 return 0;
4354
4355 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4356 if (elt->is_const && CONSTANT_P (elt->exp))
4357 return elt->exp;
4358 }
4359
4360 return 0;
4361 }
4362 \f
4363 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4364 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4365 least-significant part of X.
4366 MODE specifies how big a part of X to return.
4367
4368 If the requested operation cannot be done, 0 is returned.
4369
4370 This is similar to gen_lowpart in emit-rtl.c. */
4371
4372 rtx
4373 gen_lowpart_if_possible (mode, x)
4374 enum machine_mode mode;
4375 register rtx x;
4376 {
4377 rtx result = gen_lowpart_common (mode, x);
4378
4379 if (result)
4380 return result;
4381 else if (GET_CODE (x) == MEM)
4382 {
4383 /* This is the only other case we handle. */
4384 register int offset = 0;
4385 rtx new;
4386
4387 if (WORDS_BIG_ENDIAN)
4388 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4389 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4390 if (BYTES_BIG_ENDIAN)
4391 /* Adjust the address so that the address-after-the-data is
4392 unchanged. */
4393 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4394 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4395 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
4396 if (! memory_address_p (mode, XEXP (new, 0)))
4397 return 0;
4398 MEM_COPY_ATTRIBUTES (new, x);
4399 return new;
4400 }
4401 else
4402 return 0;
4403 }
4404 \f
4405 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4406 branch. It will be zero if not.
4407
4408 In certain cases, this can cause us to add an equivalence. For example,
4409 if we are following the taken case of
4410 if (i == 2)
4411 we can add the fact that `i' and '2' are now equivalent.
4412
4413 In any case, we can record that this comparison was passed. If the same
4414 comparison is seen later, we will know its value. */
4415
4416 static void
4417 record_jump_equiv (insn, taken)
4418 rtx insn;
4419 int taken;
4420 {
4421 int cond_known_true;
4422 rtx op0, op1;
4423 rtx set;
4424 enum machine_mode mode, mode0, mode1;
4425 int reversed_nonequality = 0;
4426 enum rtx_code code;
4427
4428 /* Ensure this is the right kind of insn. */
4429 if (! any_condjump_p (insn))
4430 return;
4431 set = pc_set (insn);
4432
4433 /* See if this jump condition is known true or false. */
4434 if (taken)
4435 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4436 else
4437 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4438
4439 /* Get the type of comparison being done and the operands being compared.
4440 If we had to reverse a non-equality condition, record that fact so we
4441 know that it isn't valid for floating-point. */
4442 code = GET_CODE (XEXP (SET_SRC (set), 0));
4443 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4444 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4445
4446 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4447 if (! cond_known_true)
4448 {
4449 reversed_nonequality = (code != EQ && code != NE);
4450 code = reverse_condition (code);
4451
4452 /* Don't remember if we can't find the inverse. */
4453 if (code == UNKNOWN)
4454 return;
4455 }
4456
4457 /* The mode is the mode of the non-constant. */
4458 mode = mode0;
4459 if (mode1 != VOIDmode)
4460 mode = mode1;
4461
4462 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4463 }
4464
4465 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4466 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4467 Make any useful entries we can with that information. Called from
4468 above function and called recursively. */
4469
4470 static void
4471 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
4472 enum rtx_code code;
4473 enum machine_mode mode;
4474 rtx op0, op1;
4475 int reversed_nonequality;
4476 {
4477 unsigned op0_hash, op1_hash;
4478 int op0_in_memory, op1_in_memory;
4479 struct table_elt *op0_elt, *op1_elt;
4480
4481 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4482 we know that they are also equal in the smaller mode (this is also
4483 true for all smaller modes whether or not there is a SUBREG, but
4484 is not worth testing for with no SUBREG). */
4485
4486 /* Note that GET_MODE (op0) may not equal MODE. */
4487 if (code == EQ && GET_CODE (op0) == SUBREG
4488 && (GET_MODE_SIZE (GET_MODE (op0))
4489 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4490 {
4491 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4492 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4493
4494 record_jump_cond (code, mode, SUBREG_REG (op0),
4495 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4496 reversed_nonequality);
4497 }
4498
4499 if (code == EQ && GET_CODE (op1) == SUBREG
4500 && (GET_MODE_SIZE (GET_MODE (op1))
4501 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4502 {
4503 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4504 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4505
4506 record_jump_cond (code, mode, SUBREG_REG (op1),
4507 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4508 reversed_nonequality);
4509 }
4510
4511 /* Similarly, if this is an NE comparison, and either is a SUBREG
4512 making a smaller mode, we know the whole thing is also NE. */
4513
4514 /* Note that GET_MODE (op0) may not equal MODE;
4515 if we test MODE instead, we can get an infinite recursion
4516 alternating between two modes each wider than MODE. */
4517
4518 if (code == NE && GET_CODE (op0) == SUBREG
4519 && subreg_lowpart_p (op0)
4520 && (GET_MODE_SIZE (GET_MODE (op0))
4521 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4522 {
4523 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4524 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4525
4526 record_jump_cond (code, mode, SUBREG_REG (op0),
4527 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4528 reversed_nonequality);
4529 }
4530
4531 if (code == NE && GET_CODE (op1) == SUBREG
4532 && subreg_lowpart_p (op1)
4533 && (GET_MODE_SIZE (GET_MODE (op1))
4534 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4535 {
4536 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4537 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4538
4539 record_jump_cond (code, mode, SUBREG_REG (op1),
4540 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4541 reversed_nonequality);
4542 }
4543
4544 /* Hash both operands. */
4545
4546 do_not_record = 0;
4547 hash_arg_in_memory = 0;
4548 op0_hash = HASH (op0, mode);
4549 op0_in_memory = hash_arg_in_memory;
4550
4551 if (do_not_record)
4552 return;
4553
4554 do_not_record = 0;
4555 hash_arg_in_memory = 0;
4556 op1_hash = HASH (op1, mode);
4557 op1_in_memory = hash_arg_in_memory;
4558
4559 if (do_not_record)
4560 return;
4561
4562 /* Look up both operands. */
4563 op0_elt = lookup (op0, op0_hash, mode);
4564 op1_elt = lookup (op1, op1_hash, mode);
4565
4566 /* If both operands are already equivalent or if they are not in the
4567 table but are identical, do nothing. */
4568 if ((op0_elt != 0 && op1_elt != 0
4569 && op0_elt->first_same_value == op1_elt->first_same_value)
4570 || op0 == op1 || rtx_equal_p (op0, op1))
4571 return;
4572
4573 /* If we aren't setting two things equal all we can do is save this
4574 comparison. Similarly if this is floating-point. In the latter
4575 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4576 If we record the equality, we might inadvertently delete code
4577 whose intent was to change -0 to +0. */
4578
4579 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4580 {
4581 struct qty_table_elem *ent;
4582 int qty;
4583
4584 /* If we reversed a floating-point comparison, if OP0 is not a
4585 register, or if OP1 is neither a register or constant, we can't
4586 do anything. */
4587
4588 if (GET_CODE (op1) != REG)
4589 op1 = equiv_constant (op1);
4590
4591 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4592 || GET_CODE (op0) != REG || op1 == 0)
4593 return;
4594
4595 /* Put OP0 in the hash table if it isn't already. This gives it a
4596 new quantity number. */
4597 if (op0_elt == 0)
4598 {
4599 if (insert_regs (op0, NULL_PTR, 0))
4600 {
4601 rehash_using_reg (op0);
4602 op0_hash = HASH (op0, mode);
4603
4604 /* If OP0 is contained in OP1, this changes its hash code
4605 as well. Faster to rehash than to check, except
4606 for the simple case of a constant. */
4607 if (! CONSTANT_P (op1))
4608 op1_hash = HASH (op1,mode);
4609 }
4610
4611 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
4612 op0_elt->in_memory = op0_in_memory;
4613 }
4614
4615 qty = REG_QTY (REGNO (op0));
4616 ent = &qty_table[qty];
4617
4618 ent->comparison_code = code;
4619 if (GET_CODE (op1) == REG)
4620 {
4621 /* Look it up again--in case op0 and op1 are the same. */
4622 op1_elt = lookup (op1, op1_hash, mode);
4623
4624 /* Put OP1 in the hash table so it gets a new quantity number. */
4625 if (op1_elt == 0)
4626 {
4627 if (insert_regs (op1, NULL_PTR, 0))
4628 {
4629 rehash_using_reg (op1);
4630 op1_hash = HASH (op1, mode);
4631 }
4632
4633 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
4634 op1_elt->in_memory = op1_in_memory;
4635 }
4636
4637 ent->comparison_const = NULL_RTX;
4638 ent->comparison_qty = REG_QTY (REGNO (op1));
4639 }
4640 else
4641 {
4642 ent->comparison_const = op1;
4643 ent->comparison_qty = -1;
4644 }
4645
4646 return;
4647 }
4648
4649 /* If either side is still missing an equivalence, make it now,
4650 then merge the equivalences. */
4651
4652 if (op0_elt == 0)
4653 {
4654 if (insert_regs (op0, NULL_PTR, 0))
4655 {
4656 rehash_using_reg (op0);
4657 op0_hash = HASH (op0, mode);
4658 }
4659
4660 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
4661 op0_elt->in_memory = op0_in_memory;
4662 }
4663
4664 if (op1_elt == 0)
4665 {
4666 if (insert_regs (op1, NULL_PTR, 0))
4667 {
4668 rehash_using_reg (op1);
4669 op1_hash = HASH (op1, mode);
4670 }
4671
4672 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
4673 op1_elt->in_memory = op1_in_memory;
4674 }
4675
4676 merge_equiv_classes (op0_elt, op1_elt);
4677 last_jump_equiv_class = op0_elt;
4678 }
4679 \f
4680 /* CSE processing for one instruction.
4681 First simplify sources and addresses of all assignments
4682 in the instruction, using previously-computed equivalents values.
4683 Then install the new sources and destinations in the table
4684 of available values.
4685
4686 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4687 the insn. It means that INSN is inside libcall block. In this
4688 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4689
4690 /* Data on one SET contained in the instruction. */
4691
4692 struct set
4693 {
4694 /* The SET rtx itself. */
4695 rtx rtl;
4696 /* The SET_SRC of the rtx (the original value, if it is changing). */
4697 rtx src;
4698 /* The hash-table element for the SET_SRC of the SET. */
4699 struct table_elt *src_elt;
4700 /* Hash value for the SET_SRC. */
4701 unsigned src_hash;
4702 /* Hash value for the SET_DEST. */
4703 unsigned dest_hash;
4704 /* The SET_DEST, with SUBREG, etc., stripped. */
4705 rtx inner_dest;
4706 /* Nonzero if the SET_SRC is in memory. */
4707 char src_in_memory;
4708 /* Nonzero if the SET_SRC contains something
4709 whose value cannot be predicted and understood. */
4710 char src_volatile;
4711 /* Original machine mode, in case it becomes a CONST_INT. */
4712 enum machine_mode mode;
4713 /* A constant equivalent for SET_SRC, if any. */
4714 rtx src_const;
4715 /* Original SET_SRC value used for libcall notes. */
4716 rtx orig_src;
4717 /* Hash value of constant equivalent for SET_SRC. */
4718 unsigned src_const_hash;
4719 /* Table entry for constant equivalent for SET_SRC, if any. */
4720 struct table_elt *src_const_elt;
4721 };
4722
4723 static void
4724 cse_insn (insn, libcall_insn)
4725 rtx insn;
4726 rtx libcall_insn;
4727 {
4728 register rtx x = PATTERN (insn);
4729 register int i;
4730 rtx tem;
4731 register int n_sets = 0;
4732
4733 #ifdef HAVE_cc0
4734 /* Records what this insn does to set CC0. */
4735 rtx this_insn_cc0 = 0;
4736 enum machine_mode this_insn_cc0_mode = VOIDmode;
4737 #endif
4738
4739 rtx src_eqv = 0;
4740 struct table_elt *src_eqv_elt = 0;
4741 int src_eqv_volatile = 0;
4742 int src_eqv_in_memory = 0;
4743 unsigned src_eqv_hash = 0;
4744
4745 struct set *sets = (struct set *) NULL_PTR;
4746
4747 this_insn = insn;
4748
4749 /* Find all the SETs and CLOBBERs in this instruction.
4750 Record all the SETs in the array `set' and count them.
4751 Also determine whether there is a CLOBBER that invalidates
4752 all memory references, or all references at varying addresses. */
4753
4754 if (GET_CODE (insn) == CALL_INSN)
4755 {
4756 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4757 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4758 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4759 }
4760
4761 if (GET_CODE (x) == SET)
4762 {
4763 sets = (struct set *) alloca (sizeof (struct set));
4764 sets[0].rtl = x;
4765
4766 /* Ignore SETs that are unconditional jumps.
4767 They never need cse processing, so this does not hurt.
4768 The reason is not efficiency but rather
4769 so that we can test at the end for instructions
4770 that have been simplified to unconditional jumps
4771 and not be misled by unchanged instructions
4772 that were unconditional jumps to begin with. */
4773 if (SET_DEST (x) == pc_rtx
4774 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4775 ;
4776
4777 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4778 The hard function value register is used only once, to copy to
4779 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4780 Ensure we invalidate the destination register. On the 80386 no
4781 other code would invalidate it since it is a fixed_reg.
4782 We need not check the return of apply_change_group; see canon_reg. */
4783
4784 else if (GET_CODE (SET_SRC (x)) == CALL)
4785 {
4786 canon_reg (SET_SRC (x), insn);
4787 apply_change_group ();
4788 fold_rtx (SET_SRC (x), insn);
4789 invalidate (SET_DEST (x), VOIDmode);
4790 }
4791 else
4792 n_sets = 1;
4793 }
4794 else if (GET_CODE (x) == PARALLEL)
4795 {
4796 register int lim = XVECLEN (x, 0);
4797
4798 sets = (struct set *) alloca (lim * sizeof (struct set));
4799
4800 /* Find all regs explicitly clobbered in this insn,
4801 and ensure they are not replaced with any other regs
4802 elsewhere in this insn.
4803 When a reg that is clobbered is also used for input,
4804 we should presume that that is for a reason,
4805 and we should not substitute some other register
4806 which is not supposed to be clobbered.
4807 Therefore, this loop cannot be merged into the one below
4808 because a CALL may precede a CLOBBER and refer to the
4809 value clobbered. We must not let a canonicalization do
4810 anything in that case. */
4811 for (i = 0; i < lim; i++)
4812 {
4813 register rtx y = XVECEXP (x, 0, i);
4814 if (GET_CODE (y) == CLOBBER)
4815 {
4816 rtx clobbered = XEXP (y, 0);
4817
4818 if (GET_CODE (clobbered) == REG
4819 || GET_CODE (clobbered) == SUBREG)
4820 invalidate (clobbered, VOIDmode);
4821 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4822 || GET_CODE (clobbered) == ZERO_EXTRACT)
4823 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4824 }
4825 }
4826
4827 for (i = 0; i < lim; i++)
4828 {
4829 register rtx y = XVECEXP (x, 0, i);
4830 if (GET_CODE (y) == SET)
4831 {
4832 /* As above, we ignore unconditional jumps and call-insns and
4833 ignore the result of apply_change_group. */
4834 if (GET_CODE (SET_SRC (y)) == CALL)
4835 {
4836 canon_reg (SET_SRC (y), insn);
4837 apply_change_group ();
4838 fold_rtx (SET_SRC (y), insn);
4839 invalidate (SET_DEST (y), VOIDmode);
4840 }
4841 else if (SET_DEST (y) == pc_rtx
4842 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4843 ;
4844 else
4845 sets[n_sets++].rtl = y;
4846 }
4847 else if (GET_CODE (y) == CLOBBER)
4848 {
4849 /* If we clobber memory, canon the address.
4850 This does nothing when a register is clobbered
4851 because we have already invalidated the reg. */
4852 if (GET_CODE (XEXP (y, 0)) == MEM)
4853 canon_reg (XEXP (y, 0), NULL_RTX);
4854 }
4855 else if (GET_CODE (y) == USE
4856 && ! (GET_CODE (XEXP (y, 0)) == REG
4857 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4858 canon_reg (y, NULL_RTX);
4859 else if (GET_CODE (y) == CALL)
4860 {
4861 /* The result of apply_change_group can be ignored; see
4862 canon_reg. */
4863 canon_reg (y, insn);
4864 apply_change_group ();
4865 fold_rtx (y, insn);
4866 }
4867 }
4868 }
4869 else if (GET_CODE (x) == CLOBBER)
4870 {
4871 if (GET_CODE (XEXP (x, 0)) == MEM)
4872 canon_reg (XEXP (x, 0), NULL_RTX);
4873 }
4874
4875 /* Canonicalize a USE of a pseudo register or memory location. */
4876 else if (GET_CODE (x) == USE
4877 && ! (GET_CODE (XEXP (x, 0)) == REG
4878 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4879 canon_reg (XEXP (x, 0), NULL_RTX);
4880 else if (GET_CODE (x) == CALL)
4881 {
4882 /* The result of apply_change_group can be ignored; see canon_reg. */
4883 canon_reg (x, insn);
4884 apply_change_group ();
4885 fold_rtx (x, insn);
4886 }
4887
4888 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4889 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4890 is handled specially for this case, and if it isn't set, then there will
4891 be no equivalence for the destination. */
4892 if (n_sets == 1 && REG_NOTES (insn) != 0
4893 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4894 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4895 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4896 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
4897
4898 /* Canonicalize sources and addresses of destinations.
4899 We do this in a separate pass to avoid problems when a MATCH_DUP is
4900 present in the insn pattern. In that case, we want to ensure that
4901 we don't break the duplicate nature of the pattern. So we will replace
4902 both operands at the same time. Otherwise, we would fail to find an
4903 equivalent substitution in the loop calling validate_change below.
4904
4905 We used to suppress canonicalization of DEST if it appears in SRC,
4906 but we don't do this any more. */
4907
4908 for (i = 0; i < n_sets; i++)
4909 {
4910 rtx dest = SET_DEST (sets[i].rtl);
4911 rtx src = SET_SRC (sets[i].rtl);
4912 rtx new = canon_reg (src, insn);
4913 int insn_code;
4914
4915 sets[i].orig_src = src;
4916 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4917 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4918 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4919 || (insn_code = recog_memoized (insn)) < 0
4920 || insn_data[insn_code].n_dups > 0)
4921 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4922 else
4923 SET_SRC (sets[i].rtl) = new;
4924
4925 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4926 {
4927 validate_change (insn, &XEXP (dest, 1),
4928 canon_reg (XEXP (dest, 1), insn), 1);
4929 validate_change (insn, &XEXP (dest, 2),
4930 canon_reg (XEXP (dest, 2), insn), 1);
4931 }
4932
4933 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4934 || GET_CODE (dest) == ZERO_EXTRACT
4935 || GET_CODE (dest) == SIGN_EXTRACT)
4936 dest = XEXP (dest, 0);
4937
4938 if (GET_CODE (dest) == MEM)
4939 canon_reg (dest, insn);
4940 }
4941
4942 /* Now that we have done all the replacements, we can apply the change
4943 group and see if they all work. Note that this will cause some
4944 canonicalizations that would have worked individually not to be applied
4945 because some other canonicalization didn't work, but this should not
4946 occur often.
4947
4948 The result of apply_change_group can be ignored; see canon_reg. */
4949
4950 apply_change_group ();
4951
4952 /* Set sets[i].src_elt to the class each source belongs to.
4953 Detect assignments from or to volatile things
4954 and set set[i] to zero so they will be ignored
4955 in the rest of this function.
4956
4957 Nothing in this loop changes the hash table or the register chains. */
4958
4959 for (i = 0; i < n_sets; i++)
4960 {
4961 register rtx src, dest;
4962 register rtx src_folded;
4963 register struct table_elt *elt = 0, *p;
4964 enum machine_mode mode;
4965 rtx src_eqv_here;
4966 rtx src_const = 0;
4967 rtx src_related = 0;
4968 struct table_elt *src_const_elt = 0;
4969 int src_cost = MAX_COST;
4970 int src_eqv_cost = MAX_COST;
4971 int src_folded_cost = MAX_COST;
4972 int src_related_cost = MAX_COST;
4973 int src_elt_cost = MAX_COST;
4974 int src_regcost = MAX_COST;
4975 int src_eqv_regcost = MAX_COST;
4976 int src_folded_regcost = MAX_COST;
4977 int src_related_regcost = MAX_COST;
4978 int src_elt_regcost = MAX_COST;
4979 /* Set non-zero if we need to call force_const_mem on with the
4980 contents of src_folded before using it. */
4981 int src_folded_force_flag = 0;
4982
4983 dest = SET_DEST (sets[i].rtl);
4984 src = SET_SRC (sets[i].rtl);
4985
4986 /* If SRC is a constant that has no machine mode,
4987 hash it with the destination's machine mode.
4988 This way we can keep different modes separate. */
4989
4990 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4991 sets[i].mode = mode;
4992
4993 if (src_eqv)
4994 {
4995 enum machine_mode eqvmode = mode;
4996 if (GET_CODE (dest) == STRICT_LOW_PART)
4997 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4998 do_not_record = 0;
4999 hash_arg_in_memory = 0;
5000 src_eqv = fold_rtx (src_eqv, insn);
5001 src_eqv_hash = HASH (src_eqv, eqvmode);
5002
5003 /* Find the equivalence class for the equivalent expression. */
5004
5005 if (!do_not_record)
5006 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
5007
5008 src_eqv_volatile = do_not_record;
5009 src_eqv_in_memory = hash_arg_in_memory;
5010 }
5011
5012 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5013 value of the INNER register, not the destination. So it is not
5014 a valid substitution for the source. But save it for later. */
5015 if (GET_CODE (dest) == STRICT_LOW_PART)
5016 src_eqv_here = 0;
5017 else
5018 src_eqv_here = src_eqv;
5019
5020 /* Simplify and foldable subexpressions in SRC. Then get the fully-
5021 simplified result, which may not necessarily be valid. */
5022 src_folded = fold_rtx (src, insn);
5023
5024 #if 0
5025 /* ??? This caused bad code to be generated for the m68k port with -O2.
5026 Suppose src is (CONST_INT -1), and that after truncation src_folded
5027 is (CONST_INT 3). Suppose src_folded is then used for src_const.
5028 At the end we will add src and src_const to the same equivalence
5029 class. We now have 3 and -1 on the same equivalence class. This
5030 causes later instructions to be mis-optimized. */
5031 /* If storing a constant in a bitfield, pre-truncate the constant
5032 so we will be able to record it later. */
5033 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5034 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5035 {
5036 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5037
5038 if (GET_CODE (src) == CONST_INT
5039 && GET_CODE (width) == CONST_INT
5040 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5041 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5042 src_folded
5043 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5044 << INTVAL (width)) - 1));
5045 }
5046 #endif
5047
5048 /* Compute SRC's hash code, and also notice if it
5049 should not be recorded at all. In that case,
5050 prevent any further processing of this assignment. */
5051 do_not_record = 0;
5052 hash_arg_in_memory = 0;
5053
5054 sets[i].src = src;
5055 sets[i].src_hash = HASH (src, mode);
5056 sets[i].src_volatile = do_not_record;
5057 sets[i].src_in_memory = hash_arg_in_memory;
5058
5059 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5060 a pseudo that is set more than once, do not record SRC. Using
5061 SRC as a replacement for anything else will be incorrect in that
5062 situation. Note that this usually occurs only for stack slots,
5063 in which case all the RTL would be referring to SRC, so we don't
5064 lose any optimization opportunities by not having SRC in the
5065 hash table. */
5066
5067 if (GET_CODE (src) == MEM
5068 && find_reg_note (insn, REG_EQUIV, src) != 0
5069 && GET_CODE (dest) == REG
5070 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
5071 && REG_N_SETS (REGNO (dest)) != 1)
5072 sets[i].src_volatile = 1;
5073
5074 #if 0
5075 /* It is no longer clear why we used to do this, but it doesn't
5076 appear to still be needed. So let's try without it since this
5077 code hurts cse'ing widened ops. */
5078 /* If source is a perverse subreg (such as QI treated as an SI),
5079 treat it as volatile. It may do the work of an SI in one context
5080 where the extra bits are not being used, but cannot replace an SI
5081 in general. */
5082 if (GET_CODE (src) == SUBREG
5083 && (GET_MODE_SIZE (GET_MODE (src))
5084 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5085 sets[i].src_volatile = 1;
5086 #endif
5087
5088 /* Locate all possible equivalent forms for SRC. Try to replace
5089 SRC in the insn with each cheaper equivalent.
5090
5091 We have the following types of equivalents: SRC itself, a folded
5092 version, a value given in a REG_EQUAL note, or a value related
5093 to a constant.
5094
5095 Each of these equivalents may be part of an additional class
5096 of equivalents (if more than one is in the table, they must be in
5097 the same class; we check for this).
5098
5099 If the source is volatile, we don't do any table lookups.
5100
5101 We note any constant equivalent for possible later use in a
5102 REG_NOTE. */
5103
5104 if (!sets[i].src_volatile)
5105 elt = lookup (src, sets[i].src_hash, mode);
5106
5107 sets[i].src_elt = elt;
5108
5109 if (elt && src_eqv_here && src_eqv_elt)
5110 {
5111 if (elt->first_same_value != src_eqv_elt->first_same_value)
5112 {
5113 /* The REG_EQUAL is indicating that two formerly distinct
5114 classes are now equivalent. So merge them. */
5115 merge_equiv_classes (elt, src_eqv_elt);
5116 src_eqv_hash = HASH (src_eqv, elt->mode);
5117 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5118 }
5119
5120 src_eqv_here = 0;
5121 }
5122
5123 else if (src_eqv_elt)
5124 elt = src_eqv_elt;
5125
5126 /* Try to find a constant somewhere and record it in `src_const'.
5127 Record its table element, if any, in `src_const_elt'. Look in
5128 any known equivalences first. (If the constant is not in the
5129 table, also set `sets[i].src_const_hash'). */
5130 if (elt)
5131 for (p = elt->first_same_value; p; p = p->next_same_value)
5132 if (p->is_const)
5133 {
5134 src_const = p->exp;
5135 src_const_elt = elt;
5136 break;
5137 }
5138
5139 if (src_const == 0
5140 && (CONSTANT_P (src_folded)
5141 /* Consider (minus (label_ref L1) (label_ref L2)) as
5142 "constant" here so we will record it. This allows us
5143 to fold switch statements when an ADDR_DIFF_VEC is used. */
5144 || (GET_CODE (src_folded) == MINUS
5145 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5146 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5147 src_const = src_folded, src_const_elt = elt;
5148 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5149 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5150
5151 /* If we don't know if the constant is in the table, get its
5152 hash code and look it up. */
5153 if (src_const && src_const_elt == 0)
5154 {
5155 sets[i].src_const_hash = HASH (src_const, mode);
5156 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5157 }
5158
5159 sets[i].src_const = src_const;
5160 sets[i].src_const_elt = src_const_elt;
5161
5162 /* If the constant and our source are both in the table, mark them as
5163 equivalent. Otherwise, if a constant is in the table but the source
5164 isn't, set ELT to it. */
5165 if (src_const_elt && elt
5166 && src_const_elt->first_same_value != elt->first_same_value)
5167 merge_equiv_classes (elt, src_const_elt);
5168 else if (src_const_elt && elt == 0)
5169 elt = src_const_elt;
5170
5171 /* See if there is a register linearly related to a constant
5172 equivalent of SRC. */
5173 if (src_const
5174 && (GET_CODE (src_const) == CONST
5175 || (src_const_elt && src_const_elt->related_value != 0)))
5176 {
5177 src_related = use_related_value (src_const, src_const_elt);
5178 if (src_related)
5179 {
5180 struct table_elt *src_related_elt
5181 = lookup (src_related, HASH (src_related, mode), mode);
5182 if (src_related_elt && elt)
5183 {
5184 if (elt->first_same_value
5185 != src_related_elt->first_same_value)
5186 /* This can occur when we previously saw a CONST
5187 involving a SYMBOL_REF and then see the SYMBOL_REF
5188 twice. Merge the involved classes. */
5189 merge_equiv_classes (elt, src_related_elt);
5190
5191 src_related = 0;
5192 src_related_elt = 0;
5193 }
5194 else if (src_related_elt && elt == 0)
5195 elt = src_related_elt;
5196 }
5197 }
5198
5199 /* See if we have a CONST_INT that is already in a register in a
5200 wider mode. */
5201
5202 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5203 && GET_MODE_CLASS (mode) == MODE_INT
5204 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5205 {
5206 enum machine_mode wider_mode;
5207
5208 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5209 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5210 && src_related == 0;
5211 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5212 {
5213 struct table_elt *const_elt
5214 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5215
5216 if (const_elt == 0)
5217 continue;
5218
5219 for (const_elt = const_elt->first_same_value;
5220 const_elt; const_elt = const_elt->next_same_value)
5221 if (GET_CODE (const_elt->exp) == REG)
5222 {
5223 src_related = gen_lowpart_if_possible (mode,
5224 const_elt->exp);
5225 break;
5226 }
5227 }
5228 }
5229
5230 /* Another possibility is that we have an AND with a constant in
5231 a mode narrower than a word. If so, it might have been generated
5232 as part of an "if" which would narrow the AND. If we already
5233 have done the AND in a wider mode, we can use a SUBREG of that
5234 value. */
5235
5236 if (flag_expensive_optimizations && ! src_related
5237 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5238 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5239 {
5240 enum machine_mode tmode;
5241 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5242
5243 for (tmode = GET_MODE_WIDER_MODE (mode);
5244 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5245 tmode = GET_MODE_WIDER_MODE (tmode))
5246 {
5247 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
5248 struct table_elt *larger_elt;
5249
5250 if (inner)
5251 {
5252 PUT_MODE (new_and, tmode);
5253 XEXP (new_and, 0) = inner;
5254 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5255 if (larger_elt == 0)
5256 continue;
5257
5258 for (larger_elt = larger_elt->first_same_value;
5259 larger_elt; larger_elt = larger_elt->next_same_value)
5260 if (GET_CODE (larger_elt->exp) == REG)
5261 {
5262 src_related
5263 = gen_lowpart_if_possible (mode, larger_elt->exp);
5264 break;
5265 }
5266
5267 if (src_related)
5268 break;
5269 }
5270 }
5271 }
5272
5273 #ifdef LOAD_EXTEND_OP
5274 /* See if a MEM has already been loaded with a widening operation;
5275 if it has, we can use a subreg of that. Many CISC machines
5276 also have such operations, but this is only likely to be
5277 beneficial these machines. */
5278
5279 if (flag_expensive_optimizations && src_related == 0
5280 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5281 && GET_MODE_CLASS (mode) == MODE_INT
5282 && GET_CODE (src) == MEM && ! do_not_record
5283 && LOAD_EXTEND_OP (mode) != NIL)
5284 {
5285 enum machine_mode tmode;
5286
5287 /* Set what we are trying to extend and the operation it might
5288 have been extended with. */
5289 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5290 XEXP (memory_extend_rtx, 0) = src;
5291
5292 for (tmode = GET_MODE_WIDER_MODE (mode);
5293 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5294 tmode = GET_MODE_WIDER_MODE (tmode))
5295 {
5296 struct table_elt *larger_elt;
5297
5298 PUT_MODE (memory_extend_rtx, tmode);
5299 larger_elt = lookup (memory_extend_rtx,
5300 HASH (memory_extend_rtx, tmode), tmode);
5301 if (larger_elt == 0)
5302 continue;
5303
5304 for (larger_elt = larger_elt->first_same_value;
5305 larger_elt; larger_elt = larger_elt->next_same_value)
5306 if (GET_CODE (larger_elt->exp) == REG)
5307 {
5308 src_related = gen_lowpart_if_possible (mode,
5309 larger_elt->exp);
5310 break;
5311 }
5312
5313 if (src_related)
5314 break;
5315 }
5316 }
5317 #endif /* LOAD_EXTEND_OP */
5318
5319 if (src == src_folded)
5320 src_folded = 0;
5321
5322 /* At this point, ELT, if non-zero, points to a class of expressions
5323 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5324 and SRC_RELATED, if non-zero, each contain additional equivalent
5325 expressions. Prune these latter expressions by deleting expressions
5326 already in the equivalence class.
5327
5328 Check for an equivalent identical to the destination. If found,
5329 this is the preferred equivalent since it will likely lead to
5330 elimination of the insn. Indicate this by placing it in
5331 `src_related'. */
5332
5333 if (elt)
5334 elt = elt->first_same_value;
5335 for (p = elt; p; p = p->next_same_value)
5336 {
5337 enum rtx_code code = GET_CODE (p->exp);
5338
5339 /* If the expression is not valid, ignore it. Then we do not
5340 have to check for validity below. In most cases, we can use
5341 `rtx_equal_p', since canonicalization has already been done. */
5342 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5343 continue;
5344
5345 /* Also skip paradoxical subregs, unless that's what we're
5346 looking for. */
5347 if (code == SUBREG
5348 && (GET_MODE_SIZE (GET_MODE (p->exp))
5349 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5350 && ! (src != 0
5351 && GET_CODE (src) == SUBREG
5352 && GET_MODE (src) == GET_MODE (p->exp)
5353 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5354 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5355 continue;
5356
5357 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5358 src = 0;
5359 else if (src_folded && GET_CODE (src_folded) == code
5360 && rtx_equal_p (src_folded, p->exp))
5361 src_folded = 0;
5362 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5363 && rtx_equal_p (src_eqv_here, p->exp))
5364 src_eqv_here = 0;
5365 else if (src_related && GET_CODE (src_related) == code
5366 && rtx_equal_p (src_related, p->exp))
5367 src_related = 0;
5368
5369 /* This is the same as the destination of the insns, we want
5370 to prefer it. Copy it to src_related. The code below will
5371 then give it a negative cost. */
5372 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5373 src_related = dest;
5374 }
5375
5376 /* Find the cheapest valid equivalent, trying all the available
5377 possibilities. Prefer items not in the hash table to ones
5378 that are when they are equal cost. Note that we can never
5379 worsen an insn as the current contents will also succeed.
5380 If we find an equivalent identical to the destination, use it as best,
5381 since this insn will probably be eliminated in that case. */
5382 if (src)
5383 {
5384 if (rtx_equal_p (src, dest))
5385 src_cost = src_regcost = -1;
5386 else
5387 {
5388 src_cost = COST (src);
5389 src_regcost = approx_reg_cost (src);
5390 }
5391 }
5392
5393 if (src_eqv_here)
5394 {
5395 if (rtx_equal_p (src_eqv_here, dest))
5396 src_eqv_cost = src_eqv_regcost = -1;
5397 else
5398 {
5399 src_eqv_cost = COST (src_eqv_here);
5400 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5401 }
5402 }
5403
5404 if (src_folded)
5405 {
5406 if (rtx_equal_p (src_folded, dest))
5407 src_folded_cost = src_folded_regcost = -1;
5408 else
5409 {
5410 src_folded_cost = COST (src_folded);
5411 src_folded_regcost = approx_reg_cost (src_folded);
5412 }
5413 }
5414
5415 if (src_related)
5416 {
5417 if (rtx_equal_p (src_related, dest))
5418 src_related_cost = src_related_regcost = -1;
5419 else
5420 {
5421 src_related_cost = COST (src_related);
5422 src_related_regcost = approx_reg_cost (src_related);
5423 }
5424 }
5425
5426 /* If this was an indirect jump insn, a known label will really be
5427 cheaper even though it looks more expensive. */
5428 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5429 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5430
5431 /* Terminate loop when replacement made. This must terminate since
5432 the current contents will be tested and will always be valid. */
5433 while (1)
5434 {
5435 rtx trial;
5436
5437 /* Skip invalid entries. */
5438 while (elt && GET_CODE (elt->exp) != REG
5439 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5440 elt = elt->next_same_value;
5441
5442 /* A paradoxical subreg would be bad here: it'll be the right
5443 size, but later may be adjusted so that the upper bits aren't
5444 what we want. So reject it. */
5445 if (elt != 0
5446 && GET_CODE (elt->exp) == SUBREG
5447 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5448 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5449 /* It is okay, though, if the rtx we're trying to match
5450 will ignore any of the bits we can't predict. */
5451 && ! (src != 0
5452 && GET_CODE (src) == SUBREG
5453 && GET_MODE (src) == GET_MODE (elt->exp)
5454 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5455 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5456 {
5457 elt = elt->next_same_value;
5458 continue;
5459 }
5460
5461 if (elt)
5462 {
5463 src_elt_cost = elt->cost;
5464 src_elt_regcost = elt->regcost;
5465 }
5466
5467 /* Find cheapest and skip it for the next time. For items
5468 of equal cost, use this order:
5469 src_folded, src, src_eqv, src_related and hash table entry. */
5470 if (src_folded
5471 && preferrable (src_folded_cost, src_folded_regcost,
5472 src_cost, src_regcost) <= 0
5473 && preferrable (src_folded_cost, src_folded_regcost,
5474 src_eqv_cost, src_eqv_regcost) <= 0
5475 && preferrable (src_folded_cost, src_folded_regcost,
5476 src_related_cost, src_related_regcost) <= 0
5477 && preferrable (src_folded_cost, src_folded_regcost,
5478 src_elt_cost, src_elt_regcost) <= 0)
5479 {
5480 trial = src_folded, src_folded_cost = MAX_COST;
5481 if (src_folded_force_flag)
5482 trial = force_const_mem (mode, trial);
5483 }
5484 else if (src
5485 && preferrable (src_cost, src_regcost,
5486 src_eqv_cost, src_eqv_regcost) <= 0
5487 && preferrable (src_cost, src_regcost,
5488 src_related_cost, src_related_regcost) <= 0
5489 && preferrable (src_cost, src_regcost,
5490 src_elt_cost, src_elt_regcost) <= 0)
5491 trial = src, src_cost = MAX_COST;
5492 else if (src_eqv_here
5493 && preferrable (src_eqv_cost, src_eqv_regcost,
5494 src_related_cost, src_related_regcost) <= 0
5495 && preferrable (src_eqv_cost, src_eqv_regcost,
5496 src_elt_cost, src_elt_regcost) <= 0)
5497 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5498 else if (src_related
5499 && preferrable (src_related_cost, src_related_regcost,
5500 src_elt_cost, src_elt_regcost) <= 0)
5501 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5502 else
5503 {
5504 trial = copy_rtx (elt->exp);
5505 elt = elt->next_same_value;
5506 src_elt_cost = MAX_COST;
5507 }
5508
5509 /* We don't normally have an insn matching (set (pc) (pc)), so
5510 check for this separately here. We will delete such an
5511 insn below.
5512
5513 Tablejump insns contain a USE of the table, so simply replacing
5514 the operand with the constant won't match. This is simply an
5515 unconditional branch, however, and is therefore valid. Just
5516 insert the substitution here and we will delete and re-emit
5517 the insn later. */
5518
5519 if (n_sets == 1 && dest == pc_rtx
5520 && (trial == pc_rtx
5521 || (GET_CODE (trial) == LABEL_REF
5522 && ! condjump_p (insn))))
5523 {
5524 if (trial == pc_rtx)
5525 {
5526 SET_SRC (sets[i].rtl) = trial;
5527 cse_jumps_altered = 1;
5528 break;
5529 }
5530
5531 PATTERN (insn) = gen_jump (XEXP (trial, 0));
5532 INSN_CODE (insn) = -1;
5533
5534 if (NEXT_INSN (insn) != 0
5535 && GET_CODE (NEXT_INSN (insn)) != BARRIER)
5536 emit_barrier_after (insn);
5537
5538 cse_jumps_altered = 1;
5539 break;
5540 }
5541
5542 /* Look for a substitution that makes a valid insn. */
5543 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5544 {
5545 /* If we just made a substitution inside a libcall, then we
5546 need to make the same substitution in any notes attached
5547 to the RETVAL insn. */
5548 if (libcall_insn
5549 && (GET_CODE (sets[i].orig_src) == REG
5550 || GET_CODE (sets[i].orig_src) == SUBREG
5551 || GET_CODE (sets[i].orig_src) == MEM))
5552 replace_rtx (REG_NOTES (libcall_insn), sets[i].orig_src,
5553 canon_reg (SET_SRC (sets[i].rtl), insn));
5554
5555 /* The result of apply_change_group can be ignored; see
5556 canon_reg. */
5557
5558 validate_change (insn, &SET_SRC (sets[i].rtl),
5559 canon_reg (SET_SRC (sets[i].rtl), insn),
5560 1);
5561 apply_change_group ();
5562 break;
5563 }
5564
5565 /* If we previously found constant pool entries for
5566 constants and this is a constant, try making a
5567 pool entry. Put it in src_folded unless we already have done
5568 this since that is where it likely came from. */
5569
5570 else if (constant_pool_entries_cost
5571 && CONSTANT_P (trial)
5572 /* Reject cases that will abort in decode_rtx_const.
5573 On the alpha when simplifying a switch, we get
5574 (const (truncate (minus (label_ref) (label_ref)))). */
5575 && ! (GET_CODE (trial) == CONST
5576 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5577 /* Likewise on IA-64, except without the truncate. */
5578 && ! (GET_CODE (trial) == CONST
5579 && GET_CODE (XEXP (trial, 0)) == MINUS
5580 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5581 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5582 && (src_folded == 0
5583 || (GET_CODE (src_folded) != MEM
5584 && ! src_folded_force_flag))
5585 && GET_MODE_CLASS (mode) != MODE_CC
5586 && mode != VOIDmode)
5587 {
5588 src_folded_force_flag = 1;
5589 src_folded = trial;
5590 src_folded_cost = constant_pool_entries_cost;
5591 }
5592 }
5593
5594 src = SET_SRC (sets[i].rtl);
5595
5596 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5597 However, there is an important exception: If both are registers
5598 that are not the head of their equivalence class, replace SET_SRC
5599 with the head of the class. If we do not do this, we will have
5600 both registers live over a portion of the basic block. This way,
5601 their lifetimes will likely abut instead of overlapping. */
5602 if (GET_CODE (dest) == REG
5603 && REGNO_QTY_VALID_P (REGNO (dest)))
5604 {
5605 int dest_q = REG_QTY (REGNO (dest));
5606 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5607
5608 if (dest_ent->mode == GET_MODE (dest)
5609 && dest_ent->first_reg != REGNO (dest)
5610 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5611 /* Don't do this if the original insn had a hard reg as
5612 SET_SRC or SET_DEST. */
5613 && (GET_CODE (sets[i].src) != REG
5614 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5615 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5616 /* We can't call canon_reg here because it won't do anything if
5617 SRC is a hard register. */
5618 {
5619 int src_q = REG_QTY (REGNO (src));
5620 struct qty_table_elem *src_ent = &qty_table[src_q];
5621 int first = src_ent->first_reg;
5622 rtx new_src
5623 = (first >= FIRST_PSEUDO_REGISTER
5624 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5625
5626 /* We must use validate-change even for this, because this
5627 might be a special no-op instruction, suitable only to
5628 tag notes onto. */
5629 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5630 {
5631 src = new_src;
5632 /* If we had a constant that is cheaper than what we are now
5633 setting SRC to, use that constant. We ignored it when we
5634 thought we could make this into a no-op. */
5635 if (src_const && COST (src_const) < COST (src)
5636 && validate_change (insn, &SET_SRC (sets[i].rtl),
5637 src_const, 0))
5638 src = src_const;
5639 }
5640 }
5641 }
5642
5643 /* If we made a change, recompute SRC values. */
5644 if (src != sets[i].src)
5645 {
5646 cse_altered = 1;
5647 do_not_record = 0;
5648 hash_arg_in_memory = 0;
5649 sets[i].src = src;
5650 sets[i].src_hash = HASH (src, mode);
5651 sets[i].src_volatile = do_not_record;
5652 sets[i].src_in_memory = hash_arg_in_memory;
5653 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5654 }
5655
5656 /* If this is a single SET, we are setting a register, and we have an
5657 equivalent constant, we want to add a REG_NOTE. We don't want
5658 to write a REG_EQUAL note for a constant pseudo since verifying that
5659 that pseudo hasn't been eliminated is a pain. Such a note also
5660 won't help anything.
5661
5662 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5663 which can be created for a reference to a compile time computable
5664 entry in a jump table. */
5665
5666 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5667 && GET_CODE (src_const) != REG
5668 && ! (GET_CODE (src_const) == CONST
5669 && GET_CODE (XEXP (src_const, 0)) == MINUS
5670 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5671 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5672 {
5673 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5674
5675 /* Make sure that the rtx is not shared with any other insn. */
5676 src_const = copy_rtx (src_const);
5677
5678 /* Record the actual constant value in a REG_EQUAL note, making
5679 a new one if one does not already exist. */
5680 if (tem)
5681 XEXP (tem, 0) = src_const;
5682 else
5683 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
5684 src_const, REG_NOTES (insn));
5685
5686 /* If storing a constant value in a register that
5687 previously held the constant value 0,
5688 record this fact with a REG_WAS_0 note on this insn.
5689
5690 Note that the *register* is required to have previously held 0,
5691 not just any register in the quantity and we must point to the
5692 insn that set that register to zero.
5693
5694 Rather than track each register individually, we just see if
5695 the last set for this quantity was for this register. */
5696
5697 if (REGNO_QTY_VALID_P (REGNO (dest)))
5698 {
5699 int dest_q = REG_QTY (REGNO (dest));
5700 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5701
5702 if (dest_ent->const_rtx == const0_rtx)
5703 {
5704 /* See if we previously had a REG_WAS_0 note. */
5705 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5706 rtx const_insn = dest_ent->const_insn;
5707
5708 if ((tem = single_set (const_insn)) != 0
5709 && rtx_equal_p (SET_DEST (tem), dest))
5710 {
5711 if (note)
5712 XEXP (note, 0) = const_insn;
5713 else
5714 REG_NOTES (insn)
5715 = gen_rtx_INSN_LIST (REG_WAS_0, const_insn,
5716 REG_NOTES (insn));
5717 }
5718 }
5719 }
5720 }
5721
5722 /* Now deal with the destination. */
5723 do_not_record = 0;
5724
5725 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5726 to the MEM or REG within it. */
5727 while (GET_CODE (dest) == SIGN_EXTRACT
5728 || GET_CODE (dest) == ZERO_EXTRACT
5729 || GET_CODE (dest) == SUBREG
5730 || GET_CODE (dest) == STRICT_LOW_PART)
5731 dest = XEXP (dest, 0);
5732
5733 sets[i].inner_dest = dest;
5734
5735 if (GET_CODE (dest) == MEM)
5736 {
5737 #ifdef PUSH_ROUNDING
5738 /* Stack pushes invalidate the stack pointer. */
5739 rtx addr = XEXP (dest, 0);
5740 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
5741 && XEXP (addr, 0) == stack_pointer_rtx)
5742 invalidate (stack_pointer_rtx, Pmode);
5743 #endif
5744 dest = fold_rtx (dest, insn);
5745 }
5746
5747 /* Compute the hash code of the destination now,
5748 before the effects of this instruction are recorded,
5749 since the register values used in the address computation
5750 are those before this instruction. */
5751 sets[i].dest_hash = HASH (dest, mode);
5752
5753 /* Don't enter a bit-field in the hash table
5754 because the value in it after the store
5755 may not equal what was stored, due to truncation. */
5756
5757 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5758 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5759 {
5760 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5761
5762 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5763 && GET_CODE (width) == CONST_INT
5764 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5765 && ! (INTVAL (src_const)
5766 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5767 /* Exception: if the value is constant,
5768 and it won't be truncated, record it. */
5769 ;
5770 else
5771 {
5772 /* This is chosen so that the destination will be invalidated
5773 but no new value will be recorded.
5774 We must invalidate because sometimes constant
5775 values can be recorded for bitfields. */
5776 sets[i].src_elt = 0;
5777 sets[i].src_volatile = 1;
5778 src_eqv = 0;
5779 src_eqv_elt = 0;
5780 }
5781 }
5782
5783 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5784 the insn. */
5785 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5786 {
5787 /* One less use of the label this insn used to jump to. */
5788 if (JUMP_LABEL (insn) != 0)
5789 --LABEL_NUSES (JUMP_LABEL (insn));
5790 PUT_CODE (insn, NOTE);
5791 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5792 NOTE_SOURCE_FILE (insn) = 0;
5793 cse_jumps_altered = 1;
5794 /* No more processing for this set. */
5795 sets[i].rtl = 0;
5796 }
5797
5798 /* If this SET is now setting PC to a label, we know it used to
5799 be a conditional or computed branch. So we see if we can follow
5800 it. If it was a computed branch, delete it and re-emit. */
5801 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5802 {
5803 /* If this is not in the format for a simple branch and
5804 we are the only SET in it, re-emit it. */
5805 if (! simplejump_p (insn) && n_sets == 1)
5806 {
5807 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5808 JUMP_LABEL (new) = XEXP (src, 0);
5809 LABEL_NUSES (XEXP (src, 0))++;
5810 insn = new;
5811 }
5812 else
5813 /* Otherwise, force rerecognition, since it probably had
5814 a different pattern before.
5815 This shouldn't really be necessary, since whatever
5816 changed the source value above should have done this.
5817 Until the right place is found, might as well do this here. */
5818 INSN_CODE (insn) = -1;
5819
5820 never_reached_warning (insn);
5821
5822 /* Now emit a BARRIER after the unconditional jump. Do not bother
5823 deleting any unreachable code, let jump/flow do that. */
5824 if (NEXT_INSN (insn) != 0
5825 && GET_CODE (NEXT_INSN (insn)) != BARRIER)
5826 emit_barrier_after (insn);
5827
5828 cse_jumps_altered = 1;
5829 sets[i].rtl = 0;
5830 }
5831
5832 /* If destination is volatile, invalidate it and then do no further
5833 processing for this assignment. */
5834
5835 else if (do_not_record)
5836 {
5837 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5838 invalidate (dest, VOIDmode);
5839 else if (GET_CODE (dest) == MEM)
5840 {
5841 /* Outgoing arguments for a libcall don't
5842 affect any recorded expressions. */
5843 if (! libcall_insn || insn == libcall_insn)
5844 invalidate (dest, VOIDmode);
5845 }
5846 else if (GET_CODE (dest) == STRICT_LOW_PART
5847 || GET_CODE (dest) == ZERO_EXTRACT)
5848 invalidate (XEXP (dest, 0), GET_MODE (dest));
5849 sets[i].rtl = 0;
5850 }
5851
5852 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5853 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5854
5855 #ifdef HAVE_cc0
5856 /* If setting CC0, record what it was set to, or a constant, if it
5857 is equivalent to a constant. If it is being set to a floating-point
5858 value, make a COMPARE with the appropriate constant of 0. If we
5859 don't do this, later code can interpret this as a test against
5860 const0_rtx, which can cause problems if we try to put it into an
5861 insn as a floating-point operand. */
5862 if (dest == cc0_rtx)
5863 {
5864 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5865 this_insn_cc0_mode = mode;
5866 if (FLOAT_MODE_P (mode))
5867 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5868 CONST0_RTX (mode));
5869 }
5870 #endif
5871 }
5872
5873 /* Now enter all non-volatile source expressions in the hash table
5874 if they are not already present.
5875 Record their equivalence classes in src_elt.
5876 This way we can insert the corresponding destinations into
5877 the same classes even if the actual sources are no longer in them
5878 (having been invalidated). */
5879
5880 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5881 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5882 {
5883 register struct table_elt *elt;
5884 register struct table_elt *classp = sets[0].src_elt;
5885 rtx dest = SET_DEST (sets[0].rtl);
5886 enum machine_mode eqvmode = GET_MODE (dest);
5887
5888 if (GET_CODE (dest) == STRICT_LOW_PART)
5889 {
5890 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5891 classp = 0;
5892 }
5893 if (insert_regs (src_eqv, classp, 0))
5894 {
5895 rehash_using_reg (src_eqv);
5896 src_eqv_hash = HASH (src_eqv, eqvmode);
5897 }
5898 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5899 elt->in_memory = src_eqv_in_memory;
5900 src_eqv_elt = elt;
5901
5902 /* Check to see if src_eqv_elt is the same as a set source which
5903 does not yet have an elt, and if so set the elt of the set source
5904 to src_eqv_elt. */
5905 for (i = 0; i < n_sets; i++)
5906 if (sets[i].rtl && sets[i].src_elt == 0
5907 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5908 sets[i].src_elt = src_eqv_elt;
5909 }
5910
5911 for (i = 0; i < n_sets; i++)
5912 if (sets[i].rtl && ! sets[i].src_volatile
5913 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5914 {
5915 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5916 {
5917 /* REG_EQUAL in setting a STRICT_LOW_PART
5918 gives an equivalent for the entire destination register,
5919 not just for the subreg being stored in now.
5920 This is a more interesting equivalence, so we arrange later
5921 to treat the entire reg as the destination. */
5922 sets[i].src_elt = src_eqv_elt;
5923 sets[i].src_hash = src_eqv_hash;
5924 }
5925 else
5926 {
5927 /* Insert source and constant equivalent into hash table, if not
5928 already present. */
5929 register struct table_elt *classp = src_eqv_elt;
5930 register rtx src = sets[i].src;
5931 register rtx dest = SET_DEST (sets[i].rtl);
5932 enum machine_mode mode
5933 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5934
5935 if (sets[i].src_elt == 0)
5936 {
5937 /* Don't put a hard register source into the table if this is
5938 the last insn of a libcall. In this case, we only need
5939 to put src_eqv_elt in src_elt. */
5940 if (GET_CODE (src) != REG
5941 || REGNO (src) >= FIRST_PSEUDO_REGISTER
5942 || ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5943 {
5944 register struct table_elt *elt;
5945
5946 /* Note that these insert_regs calls cannot remove
5947 any of the src_elt's, because they would have failed to
5948 match if not still valid. */
5949 if (insert_regs (src, classp, 0))
5950 {
5951 rehash_using_reg (src);
5952 sets[i].src_hash = HASH (src, mode);
5953 }
5954 elt = insert (src, classp, sets[i].src_hash, mode);
5955 elt->in_memory = sets[i].src_in_memory;
5956 sets[i].src_elt = classp = elt;
5957 }
5958 else
5959 sets[i].src_elt = classp;
5960 }
5961 if (sets[i].src_const && sets[i].src_const_elt == 0
5962 && src != sets[i].src_const
5963 && ! rtx_equal_p (sets[i].src_const, src))
5964 sets[i].src_elt = insert (sets[i].src_const, classp,
5965 sets[i].src_const_hash, mode);
5966 }
5967 }
5968 else if (sets[i].src_elt == 0)
5969 /* If we did not insert the source into the hash table (e.g., it was
5970 volatile), note the equivalence class for the REG_EQUAL value, if any,
5971 so that the destination goes into that class. */
5972 sets[i].src_elt = src_eqv_elt;
5973
5974 invalidate_from_clobbers (x);
5975
5976 /* Some registers are invalidated by subroutine calls. Memory is
5977 invalidated by non-constant calls. */
5978
5979 if (GET_CODE (insn) == CALL_INSN)
5980 {
5981 if (! CONST_CALL_P (insn))
5982 invalidate_memory ();
5983 invalidate_for_call ();
5984 }
5985
5986 /* Now invalidate everything set by this instruction.
5987 If a SUBREG or other funny destination is being set,
5988 sets[i].rtl is still nonzero, so here we invalidate the reg
5989 a part of which is being set. */
5990
5991 for (i = 0; i < n_sets; i++)
5992 if (sets[i].rtl)
5993 {
5994 /* We can't use the inner dest, because the mode associated with
5995 a ZERO_EXTRACT is significant. */
5996 register rtx dest = SET_DEST (sets[i].rtl);
5997
5998 /* Needed for registers to remove the register from its
5999 previous quantity's chain.
6000 Needed for memory if this is a nonvarying address, unless
6001 we have just done an invalidate_memory that covers even those. */
6002 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6003 invalidate (dest, VOIDmode);
6004 else if (GET_CODE (dest) == MEM)
6005 {
6006 /* Outgoing arguments for a libcall don't
6007 affect any recorded expressions. */
6008 if (! libcall_insn || insn == libcall_insn)
6009 invalidate (dest, VOIDmode);
6010 }
6011 else if (GET_CODE (dest) == STRICT_LOW_PART
6012 || GET_CODE (dest) == ZERO_EXTRACT)
6013 invalidate (XEXP (dest, 0), GET_MODE (dest));
6014 }
6015
6016 /* A volatile ASM invalidates everything. */
6017 if (GET_CODE (insn) == INSN
6018 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
6019 && MEM_VOLATILE_P (PATTERN (insn)))
6020 flush_hash_table ();
6021
6022 /* Make sure registers mentioned in destinations
6023 are safe for use in an expression to be inserted.
6024 This removes from the hash table
6025 any invalid entry that refers to one of these registers.
6026
6027 We don't care about the return value from mention_regs because
6028 we are going to hash the SET_DEST values unconditionally. */
6029
6030 for (i = 0; i < n_sets; i++)
6031 {
6032 if (sets[i].rtl)
6033 {
6034 rtx x = SET_DEST (sets[i].rtl);
6035
6036 if (GET_CODE (x) != REG)
6037 mention_regs (x);
6038 else
6039 {
6040 /* We used to rely on all references to a register becoming
6041 inaccessible when a register changes to a new quantity,
6042 since that changes the hash code. However, that is not
6043 safe, since after HASH_SIZE new quantities we get a
6044 hash 'collision' of a register with its own invalid
6045 entries. And since SUBREGs have been changed not to
6046 change their hash code with the hash code of the register,
6047 it wouldn't work any longer at all. So we have to check
6048 for any invalid references lying around now.
6049 This code is similar to the REG case in mention_regs,
6050 but it knows that reg_tick has been incremented, and
6051 it leaves reg_in_table as -1 . */
6052 unsigned int regno = REGNO (x);
6053 unsigned int endregno
6054 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
6055 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
6056 unsigned int i;
6057
6058 for (i = regno; i < endregno; i++)
6059 {
6060 if (REG_IN_TABLE (i) >= 0)
6061 {
6062 remove_invalid_refs (i);
6063 REG_IN_TABLE (i) = -1;
6064 }
6065 }
6066 }
6067 }
6068 }
6069
6070 /* We may have just removed some of the src_elt's from the hash table.
6071 So replace each one with the current head of the same class. */
6072
6073 for (i = 0; i < n_sets; i++)
6074 if (sets[i].rtl)
6075 {
6076 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6077 /* If elt was removed, find current head of same class,
6078 or 0 if nothing remains of that class. */
6079 {
6080 register struct table_elt *elt = sets[i].src_elt;
6081
6082 while (elt && elt->prev_same_value)
6083 elt = elt->prev_same_value;
6084
6085 while (elt && elt->first_same_value == 0)
6086 elt = elt->next_same_value;
6087 sets[i].src_elt = elt ? elt->first_same_value : 0;
6088 }
6089 }
6090
6091 /* Now insert the destinations into their equivalence classes. */
6092
6093 for (i = 0; i < n_sets; i++)
6094 if (sets[i].rtl)
6095 {
6096 register rtx dest = SET_DEST (sets[i].rtl);
6097 rtx inner_dest = sets[i].inner_dest;
6098 register struct table_elt *elt;
6099
6100 /* Don't record value if we are not supposed to risk allocating
6101 floating-point values in registers that might be wider than
6102 memory. */
6103 if ((flag_float_store
6104 && GET_CODE (dest) == MEM
6105 && FLOAT_MODE_P (GET_MODE (dest)))
6106 /* Don't record BLKmode values, because we don't know the
6107 size of it, and can't be sure that other BLKmode values
6108 have the same or smaller size. */
6109 || GET_MODE (dest) == BLKmode
6110 /* Don't record values of destinations set inside a libcall block
6111 since we might delete the libcall. Things should have been set
6112 up so we won't want to reuse such a value, but we play it safe
6113 here. */
6114 || libcall_insn
6115 /* If we didn't put a REG_EQUAL value or a source into the hash
6116 table, there is no point is recording DEST. */
6117 || sets[i].src_elt == 0
6118 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6119 or SIGN_EXTEND, don't record DEST since it can cause
6120 some tracking to be wrong.
6121
6122 ??? Think about this more later. */
6123 || (GET_CODE (dest) == SUBREG
6124 && (GET_MODE_SIZE (GET_MODE (dest))
6125 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6126 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6127 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6128 continue;
6129
6130 /* STRICT_LOW_PART isn't part of the value BEING set,
6131 and neither is the SUBREG inside it.
6132 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6133 if (GET_CODE (dest) == STRICT_LOW_PART)
6134 dest = SUBREG_REG (XEXP (dest, 0));
6135
6136 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6137 /* Registers must also be inserted into chains for quantities. */
6138 if (insert_regs (dest, sets[i].src_elt, 1))
6139 {
6140 /* If `insert_regs' changes something, the hash code must be
6141 recalculated. */
6142 rehash_using_reg (dest);
6143 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6144 }
6145
6146 if (GET_CODE (inner_dest) == MEM
6147 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
6148 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
6149 that (MEM (ADDRESSOF (X))) is equivalent to Y.
6150 Consider the case in which the address of the MEM is
6151 passed to a function, which alters the MEM. Then, if we
6152 later use Y instead of the MEM we'll miss the update. */
6153 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
6154 else
6155 elt = insert (dest, sets[i].src_elt,
6156 sets[i].dest_hash, GET_MODE (dest));
6157
6158 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
6159 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
6160 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
6161 0))));
6162
6163 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6164 narrower than M2, and both M1 and M2 are the same number of words,
6165 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6166 make that equivalence as well.
6167
6168 However, BAR may have equivalences for which gen_lowpart_if_possible
6169 will produce a simpler value than gen_lowpart_if_possible applied to
6170 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6171 BAR's equivalences. If we don't get a simplified form, make
6172 the SUBREG. It will not be used in an equivalence, but will
6173 cause two similar assignments to be detected.
6174
6175 Note the loop below will find SUBREG_REG (DEST) since we have
6176 already entered SRC and DEST of the SET in the table. */
6177
6178 if (GET_CODE (dest) == SUBREG
6179 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6180 / UNITS_PER_WORD)
6181 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6182 && (GET_MODE_SIZE (GET_MODE (dest))
6183 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6184 && sets[i].src_elt != 0)
6185 {
6186 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6187 struct table_elt *elt, *classp = 0;
6188
6189 for (elt = sets[i].src_elt->first_same_value; elt;
6190 elt = elt->next_same_value)
6191 {
6192 rtx new_src = 0;
6193 unsigned src_hash;
6194 struct table_elt *src_elt;
6195
6196 /* Ignore invalid entries. */
6197 if (GET_CODE (elt->exp) != REG
6198 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6199 continue;
6200
6201 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
6202 if (new_src == 0)
6203 new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
6204
6205 src_hash = HASH (new_src, new_mode);
6206 src_elt = lookup (new_src, src_hash, new_mode);
6207
6208 /* Put the new source in the hash table is if isn't
6209 already. */
6210 if (src_elt == 0)
6211 {
6212 if (insert_regs (new_src, classp, 0))
6213 {
6214 rehash_using_reg (new_src);
6215 src_hash = HASH (new_src, new_mode);
6216 }
6217 src_elt = insert (new_src, classp, src_hash, new_mode);
6218 src_elt->in_memory = elt->in_memory;
6219 }
6220 else if (classp && classp != src_elt->first_same_value)
6221 /* Show that two things that we've seen before are
6222 actually the same. */
6223 merge_equiv_classes (src_elt, classp);
6224
6225 classp = src_elt->first_same_value;
6226 /* Ignore invalid entries. */
6227 while (classp
6228 && GET_CODE (classp->exp) != REG
6229 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6230 classp = classp->next_same_value;
6231 }
6232 }
6233 }
6234
6235 /* Special handling for (set REG0 REG1) where REG0 is the
6236 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6237 be used in the sequel, so (if easily done) change this insn to
6238 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6239 that computed their value. Then REG1 will become a dead store
6240 and won't cloud the situation for later optimizations.
6241
6242 Do not make this change if REG1 is a hard register, because it will
6243 then be used in the sequel and we may be changing a two-operand insn
6244 into a three-operand insn.
6245
6246 Also do not do this if we are operating on a copy of INSN.
6247
6248 Also don't do this if INSN ends a libcall; this would cause an unrelated
6249 register to be set in the middle of a libcall, and we then get bad code
6250 if the libcall is deleted. */
6251
6252 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6253 && NEXT_INSN (PREV_INSN (insn)) == insn
6254 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6255 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6256 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6257 {
6258 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6259 struct qty_table_elem *src_ent = &qty_table[src_q];
6260
6261 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6262 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6263 {
6264 rtx prev = prev_nonnote_insn (insn);
6265
6266 /* Do not swap the registers around if the previous instruction
6267 attaches a REG_EQUIV note to REG1.
6268
6269 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6270 from the pseudo that originally shadowed an incoming argument
6271 to another register. Some uses of REG_EQUIV might rely on it
6272 being attached to REG1 rather than REG2.
6273
6274 This section previously turned the REG_EQUIV into a REG_EQUAL
6275 note. We cannot do that because REG_EQUIV may provide an
6276 uninitialised stack slot when REG_PARM_STACK_SPACE is used. */
6277
6278 if (prev != 0 && GET_CODE (prev) == INSN
6279 && GET_CODE (PATTERN (prev)) == SET
6280 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6281 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6282 {
6283 rtx dest = SET_DEST (sets[0].rtl);
6284 rtx src = SET_SRC (sets[0].rtl);
6285 rtx note;
6286
6287 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6288 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6289 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6290 apply_change_group ();
6291
6292 /* If there was a REG_WAS_0 note on PREV, remove it. Move
6293 any REG_WAS_0 note on INSN to PREV. */
6294 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
6295 if (note)
6296 remove_note (prev, note);
6297
6298 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6299 if (note)
6300 {
6301 remove_note (insn, note);
6302 XEXP (note, 1) = REG_NOTES (prev);
6303 REG_NOTES (prev) = note;
6304 }
6305
6306 /* If INSN has a REG_EQUAL note, and this note mentions
6307 REG0, then we must delete it, because the value in
6308 REG0 has changed. If the note's value is REG1, we must
6309 also delete it because that is now this insn's dest. */
6310 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6311 if (note != 0
6312 && (reg_mentioned_p (dest, XEXP (note, 0))
6313 || rtx_equal_p (src, XEXP (note, 0))))
6314 remove_note (insn, note);
6315 }
6316 }
6317 }
6318
6319 /* If this is a conditional jump insn, record any known equivalences due to
6320 the condition being tested. */
6321
6322 last_jump_equiv_class = 0;
6323 if (GET_CODE (insn) == JUMP_INSN
6324 && n_sets == 1 && GET_CODE (x) == SET
6325 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6326 record_jump_equiv (insn, 0);
6327
6328 #ifdef HAVE_cc0
6329 /* If the previous insn set CC0 and this insn no longer references CC0,
6330 delete the previous insn. Here we use the fact that nothing expects CC0
6331 to be valid over an insn, which is true until the final pass. */
6332 if (prev_insn && GET_CODE (prev_insn) == INSN
6333 && (tem = single_set (prev_insn)) != 0
6334 && SET_DEST (tem) == cc0_rtx
6335 && ! reg_mentioned_p (cc0_rtx, x))
6336 {
6337 PUT_CODE (prev_insn, NOTE);
6338 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
6339 NOTE_SOURCE_FILE (prev_insn) = 0;
6340 }
6341
6342 prev_insn_cc0 = this_insn_cc0;
6343 prev_insn_cc0_mode = this_insn_cc0_mode;
6344 #endif
6345
6346 prev_insn = insn;
6347 }
6348 \f
6349 /* Remove from the hash table all expressions that reference memory. */
6350
6351 static void
6352 invalidate_memory ()
6353 {
6354 register int i;
6355 register struct table_elt *p, *next;
6356
6357 for (i = 0; i < HASH_SIZE; i++)
6358 for (p = table[i]; p; p = next)
6359 {
6360 next = p->next_same_hash;
6361 if (p->in_memory)
6362 remove_from_table (p, i);
6363 }
6364 }
6365
6366 /* If ADDR is an address that implicitly affects the stack pointer, return
6367 1 and update the register tables to show the effect. Else, return 0. */
6368
6369 static int
6370 addr_affects_sp_p (addr)
6371 register rtx addr;
6372 {
6373 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
6374 && GET_CODE (XEXP (addr, 0)) == REG
6375 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6376 {
6377 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6378 REG_TICK (STACK_POINTER_REGNUM)++;
6379
6380 /* This should be *very* rare. */
6381 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6382 invalidate (stack_pointer_rtx, VOIDmode);
6383
6384 return 1;
6385 }
6386
6387 return 0;
6388 }
6389
6390 /* Perform invalidation on the basis of everything about an insn
6391 except for invalidating the actual places that are SET in it.
6392 This includes the places CLOBBERed, and anything that might
6393 alias with something that is SET or CLOBBERed.
6394
6395 X is the pattern of the insn. */
6396
6397 static void
6398 invalidate_from_clobbers (x)
6399 rtx x;
6400 {
6401 if (GET_CODE (x) == CLOBBER)
6402 {
6403 rtx ref = XEXP (x, 0);
6404 if (ref)
6405 {
6406 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6407 || GET_CODE (ref) == MEM)
6408 invalidate (ref, VOIDmode);
6409 else if (GET_CODE (ref) == STRICT_LOW_PART
6410 || GET_CODE (ref) == ZERO_EXTRACT)
6411 invalidate (XEXP (ref, 0), GET_MODE (ref));
6412 }
6413 }
6414 else if (GET_CODE (x) == PARALLEL)
6415 {
6416 register int i;
6417 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6418 {
6419 register rtx y = XVECEXP (x, 0, i);
6420 if (GET_CODE (y) == CLOBBER)
6421 {
6422 rtx ref = XEXP (y, 0);
6423 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6424 || GET_CODE (ref) == MEM)
6425 invalidate (ref, VOIDmode);
6426 else if (GET_CODE (ref) == STRICT_LOW_PART
6427 || GET_CODE (ref) == ZERO_EXTRACT)
6428 invalidate (XEXP (ref, 0), GET_MODE (ref));
6429 }
6430 }
6431 }
6432 }
6433 \f
6434 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6435 and replace any registers in them with either an equivalent constant
6436 or the canonical form of the register. If we are inside an address,
6437 only do this if the address remains valid.
6438
6439 OBJECT is 0 except when within a MEM in which case it is the MEM.
6440
6441 Return the replacement for X. */
6442
6443 static rtx
6444 cse_process_notes (x, object)
6445 rtx x;
6446 rtx object;
6447 {
6448 enum rtx_code code = GET_CODE (x);
6449 const char *fmt = GET_RTX_FORMAT (code);
6450 int i;
6451
6452 switch (code)
6453 {
6454 case CONST_INT:
6455 case CONST:
6456 case SYMBOL_REF:
6457 case LABEL_REF:
6458 case CONST_DOUBLE:
6459 case PC:
6460 case CC0:
6461 case LO_SUM:
6462 return x;
6463
6464 case MEM:
6465 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
6466 return x;
6467
6468 case EXPR_LIST:
6469 case INSN_LIST:
6470 if (REG_NOTE_KIND (x) == REG_EQUAL)
6471 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6472 if (XEXP (x, 1))
6473 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6474 return x;
6475
6476 case SIGN_EXTEND:
6477 case ZERO_EXTEND:
6478 case SUBREG:
6479 {
6480 rtx new = cse_process_notes (XEXP (x, 0), object);
6481 /* We don't substitute VOIDmode constants into these rtx,
6482 since they would impede folding. */
6483 if (GET_MODE (new) != VOIDmode)
6484 validate_change (object, &XEXP (x, 0), new, 0);
6485 return x;
6486 }
6487
6488 case REG:
6489 i = REG_QTY (REGNO (x));
6490
6491 /* Return a constant or a constant register. */
6492 if (REGNO_QTY_VALID_P (REGNO (x)))
6493 {
6494 struct qty_table_elem *ent = &qty_table[i];
6495
6496 if (ent->const_rtx != NULL_RTX
6497 && (CONSTANT_P (ent->const_rtx)
6498 || GET_CODE (ent->const_rtx) == REG))
6499 {
6500 rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6501 if (new)
6502 return new;
6503 }
6504 }
6505
6506 /* Otherwise, canonicalize this register. */
6507 return canon_reg (x, NULL_RTX);
6508
6509 default:
6510 break;
6511 }
6512
6513 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6514 if (fmt[i] == 'e')
6515 validate_change (object, &XEXP (x, i),
6516 cse_process_notes (XEXP (x, i), object), 0);
6517
6518 return x;
6519 }
6520 \f
6521 /* Find common subexpressions between the end test of a loop and the beginning
6522 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6523
6524 Often we have a loop where an expression in the exit test is used
6525 in the body of the loop. For example "while (*p) *q++ = *p++;".
6526 Because of the way we duplicate the loop exit test in front of the loop,
6527 however, we don't detect that common subexpression. This will be caught
6528 when global cse is implemented, but this is a quite common case.
6529
6530 This function handles the most common cases of these common expressions.
6531 It is called after we have processed the basic block ending with the
6532 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6533 jumps to a label used only once. */
6534
6535 static void
6536 cse_around_loop (loop_start)
6537 rtx loop_start;
6538 {
6539 rtx insn;
6540 int i;
6541 struct table_elt *p;
6542
6543 /* If the jump at the end of the loop doesn't go to the start, we don't
6544 do anything. */
6545 for (insn = PREV_INSN (loop_start);
6546 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6547 insn = PREV_INSN (insn))
6548 ;
6549
6550 if (insn == 0
6551 || GET_CODE (insn) != NOTE
6552 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6553 return;
6554
6555 /* If the last insn of the loop (the end test) was an NE comparison,
6556 we will interpret it as an EQ comparison, since we fell through
6557 the loop. Any equivalences resulting from that comparison are
6558 therefore not valid and must be invalidated. */
6559 if (last_jump_equiv_class)
6560 for (p = last_jump_equiv_class->first_same_value; p;
6561 p = p->next_same_value)
6562 {
6563 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6564 || (GET_CODE (p->exp) == SUBREG
6565 && GET_CODE (SUBREG_REG (p->exp)) == REG))
6566 invalidate (p->exp, VOIDmode);
6567 else if (GET_CODE (p->exp) == STRICT_LOW_PART
6568 || GET_CODE (p->exp) == ZERO_EXTRACT)
6569 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6570 }
6571
6572 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6573 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6574
6575 The only thing we do with SET_DEST is invalidate entries, so we
6576 can safely process each SET in order. It is slightly less efficient
6577 to do so, but we only want to handle the most common cases.
6578
6579 The gen_move_insn call in cse_set_around_loop may create new pseudos.
6580 These pseudos won't have valid entries in any of the tables indexed
6581 by register number, such as reg_qty. We avoid out-of-range array
6582 accesses by not processing any instructions created after cse started. */
6583
6584 for (insn = NEXT_INSN (loop_start);
6585 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6586 && INSN_UID (insn) < max_insn_uid
6587 && ! (GET_CODE (insn) == NOTE
6588 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6589 insn = NEXT_INSN (insn))
6590 {
6591 if (INSN_P (insn)
6592 && (GET_CODE (PATTERN (insn)) == SET
6593 || GET_CODE (PATTERN (insn)) == CLOBBER))
6594 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6595 else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6596 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6597 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6598 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6599 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6600 loop_start);
6601 }
6602 }
6603 \f
6604 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6605 since they are done elsewhere. This function is called via note_stores. */
6606
6607 static void
6608 invalidate_skipped_set (dest, set, data)
6609 rtx set;
6610 rtx dest;
6611 void *data ATTRIBUTE_UNUSED;
6612 {
6613 enum rtx_code code = GET_CODE (dest);
6614
6615 if (code == MEM
6616 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6617 /* There are times when an address can appear varying and be a PLUS
6618 during this scan when it would be a fixed address were we to know
6619 the proper equivalences. So invalidate all memory if there is
6620 a BLKmode or nonscalar memory reference or a reference to a
6621 variable address. */
6622 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6623 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6624 {
6625 invalidate_memory ();
6626 return;
6627 }
6628
6629 if (GET_CODE (set) == CLOBBER
6630 #ifdef HAVE_cc0
6631 || dest == cc0_rtx
6632 #endif
6633 || dest == pc_rtx)
6634 return;
6635
6636 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6637 invalidate (XEXP (dest, 0), GET_MODE (dest));
6638 else if (code == REG || code == SUBREG || code == MEM)
6639 invalidate (dest, VOIDmode);
6640 }
6641
6642 /* Invalidate all insns from START up to the end of the function or the
6643 next label. This called when we wish to CSE around a block that is
6644 conditionally executed. */
6645
6646 static void
6647 invalidate_skipped_block (start)
6648 rtx start;
6649 {
6650 rtx insn;
6651
6652 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6653 insn = NEXT_INSN (insn))
6654 {
6655 if (! INSN_P (insn))
6656 continue;
6657
6658 if (GET_CODE (insn) == CALL_INSN)
6659 {
6660 if (! CONST_CALL_P (insn))
6661 invalidate_memory ();
6662 invalidate_for_call ();
6663 }
6664
6665 invalidate_from_clobbers (PATTERN (insn));
6666 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6667 }
6668 }
6669 \f
6670 /* If modifying X will modify the value in *DATA (which is really an
6671 `rtx *'), indicate that fact by setting the pointed to value to
6672 NULL_RTX. */
6673
6674 static void
6675 cse_check_loop_start (x, set, data)
6676 rtx x;
6677 rtx set ATTRIBUTE_UNUSED;
6678 void *data;
6679 {
6680 rtx *cse_check_loop_start_value = (rtx *) data;
6681
6682 if (*cse_check_loop_start_value == NULL_RTX
6683 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6684 return;
6685
6686 if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6687 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6688 *cse_check_loop_start_value = NULL_RTX;
6689 }
6690
6691 /* X is a SET or CLOBBER contained in INSN that was found near the start of
6692 a loop that starts with the label at LOOP_START.
6693
6694 If X is a SET, we see if its SET_SRC is currently in our hash table.
6695 If so, we see if it has a value equal to some register used only in the
6696 loop exit code (as marked by jump.c).
6697
6698 If those two conditions are true, we search backwards from the start of
6699 the loop to see if that same value was loaded into a register that still
6700 retains its value at the start of the loop.
6701
6702 If so, we insert an insn after the load to copy the destination of that
6703 load into the equivalent register and (try to) replace our SET_SRC with that
6704 register.
6705
6706 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6707
6708 static void
6709 cse_set_around_loop (x, insn, loop_start)
6710 rtx x;
6711 rtx insn;
6712 rtx loop_start;
6713 {
6714 struct table_elt *src_elt;
6715
6716 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6717 are setting PC or CC0 or whose SET_SRC is already a register. */
6718 if (GET_CODE (x) == SET
6719 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6720 && GET_CODE (SET_SRC (x)) != REG)
6721 {
6722 src_elt = lookup (SET_SRC (x),
6723 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6724 GET_MODE (SET_DEST (x)));
6725
6726 if (src_elt)
6727 for (src_elt = src_elt->first_same_value; src_elt;
6728 src_elt = src_elt->next_same_value)
6729 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6730 && COST (src_elt->exp) < COST (SET_SRC (x)))
6731 {
6732 rtx p, set;
6733
6734 /* Look for an insn in front of LOOP_START that sets
6735 something in the desired mode to SET_SRC (x) before we hit
6736 a label or CALL_INSN. */
6737
6738 for (p = prev_nonnote_insn (loop_start);
6739 p && GET_CODE (p) != CALL_INSN
6740 && GET_CODE (p) != CODE_LABEL;
6741 p = prev_nonnote_insn (p))
6742 if ((set = single_set (p)) != 0
6743 && GET_CODE (SET_DEST (set)) == REG
6744 && GET_MODE (SET_DEST (set)) == src_elt->mode
6745 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6746 {
6747 /* We now have to ensure that nothing between P
6748 and LOOP_START modified anything referenced in
6749 SET_SRC (x). We know that nothing within the loop
6750 can modify it, or we would have invalidated it in
6751 the hash table. */
6752 rtx q;
6753 rtx cse_check_loop_start_value = SET_SRC (x);
6754 for (q = p; q != loop_start; q = NEXT_INSN (q))
6755 if (INSN_P (q))
6756 note_stores (PATTERN (q),
6757 cse_check_loop_start,
6758 &cse_check_loop_start_value);
6759
6760 /* If nothing was changed and we can replace our
6761 SET_SRC, add an insn after P to copy its destination
6762 to what we will be replacing SET_SRC with. */
6763 if (cse_check_loop_start_value
6764 && validate_change (insn, &SET_SRC (x),
6765 src_elt->exp, 0))
6766 {
6767 /* If this creates new pseudos, this is unsafe,
6768 because the regno of new pseudo is unsuitable
6769 to index into reg_qty when cse_insn processes
6770 the new insn. Therefore, if a new pseudo was
6771 created, discard this optimization. */
6772 int nregs = max_reg_num ();
6773 rtx move
6774 = gen_move_insn (src_elt->exp, SET_DEST (set));
6775 if (nregs != max_reg_num ())
6776 {
6777 if (! validate_change (insn, &SET_SRC (x),
6778 SET_SRC (set), 0))
6779 abort ();
6780 }
6781 else
6782 emit_insn_after (move, p);
6783 }
6784 break;
6785 }
6786 }
6787 }
6788
6789 /* Deal with the destination of X affecting the stack pointer. */
6790 addr_affects_sp_p (SET_DEST (x));
6791
6792 /* See comment on similar code in cse_insn for explanation of these
6793 tests. */
6794 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6795 || GET_CODE (SET_DEST (x)) == MEM)
6796 invalidate (SET_DEST (x), VOIDmode);
6797 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6798 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6799 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6800 }
6801 \f
6802 /* Find the end of INSN's basic block and return its range,
6803 the total number of SETs in all the insns of the block, the last insn of the
6804 block, and the branch path.
6805
6806 The branch path indicates which branches should be followed. If a non-zero
6807 path size is specified, the block should be rescanned and a different set
6808 of branches will be taken. The branch path is only used if
6809 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
6810
6811 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6812 used to describe the block. It is filled in with the information about
6813 the current block. The incoming structure's branch path, if any, is used
6814 to construct the output branch path. */
6815
6816 void
6817 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
6818 rtx insn;
6819 struct cse_basic_block_data *data;
6820 int follow_jumps;
6821 int after_loop;
6822 int skip_blocks;
6823 {
6824 rtx p = insn, q;
6825 int nsets = 0;
6826 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6827 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6828 int path_size = data->path_size;
6829 int path_entry = 0;
6830 int i;
6831
6832 /* Update the previous branch path, if any. If the last branch was
6833 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6834 shorten the path by one and look at the previous branch. We know that
6835 at least one branch must have been taken if PATH_SIZE is non-zero. */
6836 while (path_size > 0)
6837 {
6838 if (data->path[path_size - 1].status != NOT_TAKEN)
6839 {
6840 data->path[path_size - 1].status = NOT_TAKEN;
6841 break;
6842 }
6843 else
6844 path_size--;
6845 }
6846
6847 /* If the first instruction is marked with QImode, that means we've
6848 already processed this block. Our caller will look at DATA->LAST
6849 to figure out where to go next. We want to return the next block
6850 in the instruction stream, not some branched-to block somewhere
6851 else. We accomplish this by pretending our called forbid us to
6852 follow jumps, or skip blocks. */
6853 if (GET_MODE (insn) == QImode)
6854 follow_jumps = skip_blocks = 0;
6855
6856 /* Scan to end of this basic block. */
6857 while (p && GET_CODE (p) != CODE_LABEL)
6858 {
6859 /* Don't cse out the end of a loop. This makes a difference
6860 only for the unusual loops that always execute at least once;
6861 all other loops have labels there so we will stop in any case.
6862 Cse'ing out the end of the loop is dangerous because it
6863 might cause an invariant expression inside the loop
6864 to be reused after the end of the loop. This would make it
6865 hard to move the expression out of the loop in loop.c,
6866 especially if it is one of several equivalent expressions
6867 and loop.c would like to eliminate it.
6868
6869 If we are running after loop.c has finished, we can ignore
6870 the NOTE_INSN_LOOP_END. */
6871
6872 if (! after_loop && GET_CODE (p) == NOTE
6873 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6874 break;
6875
6876 /* Don't cse over a call to setjmp; on some machines (eg vax)
6877 the regs restored by the longjmp come from
6878 a later time than the setjmp. */
6879 if (GET_CODE (p) == NOTE
6880 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
6881 break;
6882
6883 /* A PARALLEL can have lots of SETs in it,
6884 especially if it is really an ASM_OPERANDS. */
6885 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6886 nsets += XVECLEN (PATTERN (p), 0);
6887 else if (GET_CODE (p) != NOTE)
6888 nsets += 1;
6889
6890 /* Ignore insns made by CSE; they cannot affect the boundaries of
6891 the basic block. */
6892
6893 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6894 high_cuid = INSN_CUID (p);
6895 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6896 low_cuid = INSN_CUID (p);
6897
6898 /* See if this insn is in our branch path. If it is and we are to
6899 take it, do so. */
6900 if (path_entry < path_size && data->path[path_entry].branch == p)
6901 {
6902 if (data->path[path_entry].status != NOT_TAKEN)
6903 p = JUMP_LABEL (p);
6904
6905 /* Point to next entry in path, if any. */
6906 path_entry++;
6907 }
6908
6909 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6910 was specified, we haven't reached our maximum path length, there are
6911 insns following the target of the jump, this is the only use of the
6912 jump label, and the target label is preceded by a BARRIER.
6913
6914 Alternatively, we can follow the jump if it branches around a
6915 block of code and there are no other branches into the block.
6916 In this case invalidate_skipped_block will be called to invalidate any
6917 registers set in the block when following the jump. */
6918
6919 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
6920 && GET_CODE (p) == JUMP_INSN
6921 && GET_CODE (PATTERN (p)) == SET
6922 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6923 && JUMP_LABEL (p) != 0
6924 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6925 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6926 {
6927 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6928 if ((GET_CODE (q) != NOTE
6929 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6930 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
6931 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6932 break;
6933
6934 /* If we ran into a BARRIER, this code is an extension of the
6935 basic block when the branch is taken. */
6936 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6937 {
6938 /* Don't allow ourself to keep walking around an
6939 always-executed loop. */
6940 if (next_real_insn (q) == next)
6941 {
6942 p = NEXT_INSN (p);
6943 continue;
6944 }
6945
6946 /* Similarly, don't put a branch in our path more than once. */
6947 for (i = 0; i < path_entry; i++)
6948 if (data->path[i].branch == p)
6949 break;
6950
6951 if (i != path_entry)
6952 break;
6953
6954 data->path[path_entry].branch = p;
6955 data->path[path_entry++].status = TAKEN;
6956
6957 /* This branch now ends our path. It was possible that we
6958 didn't see this branch the last time around (when the
6959 insn in front of the target was a JUMP_INSN that was
6960 turned into a no-op). */
6961 path_size = path_entry;
6962
6963 p = JUMP_LABEL (p);
6964 /* Mark block so we won't scan it again later. */
6965 PUT_MODE (NEXT_INSN (p), QImode);
6966 }
6967 /* Detect a branch around a block of code. */
6968 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6969 {
6970 register rtx tmp;
6971
6972 if (next_real_insn (q) == next)
6973 {
6974 p = NEXT_INSN (p);
6975 continue;
6976 }
6977
6978 for (i = 0; i < path_entry; i++)
6979 if (data->path[i].branch == p)
6980 break;
6981
6982 if (i != path_entry)
6983 break;
6984
6985 /* This is no_labels_between_p (p, q) with an added check for
6986 reaching the end of a function (in case Q precedes P). */
6987 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6988 if (GET_CODE (tmp) == CODE_LABEL)
6989 break;
6990
6991 if (tmp == q)
6992 {
6993 data->path[path_entry].branch = p;
6994 data->path[path_entry++].status = AROUND;
6995
6996 path_size = path_entry;
6997
6998 p = JUMP_LABEL (p);
6999 /* Mark block so we won't scan it again later. */
7000 PUT_MODE (NEXT_INSN (p), QImode);
7001 }
7002 }
7003 }
7004 p = NEXT_INSN (p);
7005 }
7006
7007 data->low_cuid = low_cuid;
7008 data->high_cuid = high_cuid;
7009 data->nsets = nsets;
7010 data->last = p;
7011
7012 /* If all jumps in the path are not taken, set our path length to zero
7013 so a rescan won't be done. */
7014 for (i = path_size - 1; i >= 0; i--)
7015 if (data->path[i].status != NOT_TAKEN)
7016 break;
7017
7018 if (i == -1)
7019 data->path_size = 0;
7020 else
7021 data->path_size = path_size;
7022
7023 /* End the current branch path. */
7024 data->path[path_size].branch = 0;
7025 }
7026 \f
7027 /* Perform cse on the instructions of a function.
7028 F is the first instruction.
7029 NREGS is one plus the highest pseudo-reg number used in the instruction.
7030
7031 AFTER_LOOP is 1 if this is the cse call done after loop optimization
7032 (only if -frerun-cse-after-loop).
7033
7034 Returns 1 if jump_optimize should be redone due to simplifications
7035 in conditional jump instructions. */
7036
7037 int
7038 cse_main (f, nregs, after_loop, file)
7039 rtx f;
7040 int nregs;
7041 int after_loop;
7042 FILE *file;
7043 {
7044 struct cse_basic_block_data val;
7045 register rtx insn = f;
7046 register int i;
7047
7048 cse_jumps_altered = 0;
7049 recorded_label_ref = 0;
7050 constant_pool_entries_cost = 0;
7051 val.path_size = 0;
7052
7053 init_recog ();
7054 init_alias_analysis ();
7055
7056 max_reg = nregs;
7057
7058 max_insn_uid = get_max_uid ();
7059
7060 reg_eqv_table = (struct reg_eqv_elem *)
7061 xmalloc (nregs * sizeof (struct reg_eqv_elem));
7062
7063 #ifdef LOAD_EXTEND_OP
7064
7065 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
7066 and change the code and mode as appropriate. */
7067 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7068 #endif
7069
7070 /* Reset the counter indicating how many elements have been made
7071 thus far. */
7072 n_elements_made = 0;
7073
7074 /* Find the largest uid. */
7075
7076 max_uid = get_max_uid ();
7077 uid_cuid = (int *) xcalloc (max_uid + 1, sizeof (int));
7078
7079 /* Compute the mapping from uids to cuids.
7080 CUIDs are numbers assigned to insns, like uids,
7081 except that cuids increase monotonically through the code.
7082 Don't assign cuids to line-number NOTEs, so that the distance in cuids
7083 between two insns is not affected by -g. */
7084
7085 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7086 {
7087 if (GET_CODE (insn) != NOTE
7088 || NOTE_LINE_NUMBER (insn) < 0)
7089 INSN_CUID (insn) = ++i;
7090 else
7091 /* Give a line number note the same cuid as preceding insn. */
7092 INSN_CUID (insn) = i;
7093 }
7094
7095 /* Initialize which registers are clobbered by calls. */
7096
7097 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
7098
7099 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7100 if ((call_used_regs[i]
7101 /* Used to check !fixed_regs[i] here, but that isn't safe;
7102 fixed regs are still call-clobbered, and sched can get
7103 confused if they can "live across calls".
7104
7105 The frame pointer is always preserved across calls. The arg
7106 pointer is if it is fixed. The stack pointer usually is, unless
7107 RETURN_POPS_ARGS, in which case an explicit CLOBBER
7108 will be present. If we are generating PIC code, the PIC offset
7109 table register is preserved across calls. */
7110
7111 && i != STACK_POINTER_REGNUM
7112 && i != FRAME_POINTER_REGNUM
7113 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
7114 && i != HARD_FRAME_POINTER_REGNUM
7115 #endif
7116 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
7117 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
7118 #endif
7119 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
7120 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
7121 #endif
7122 )
7123 || global_regs[i])
7124 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
7125
7126 ggc_push_context ();
7127
7128 /* Loop over basic blocks.
7129 Compute the maximum number of qty's needed for each basic block
7130 (which is 2 for each SET). */
7131 insn = f;
7132 while (insn)
7133 {
7134 cse_altered = 0;
7135 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7136 flag_cse_skip_blocks);
7137
7138 /* If this basic block was already processed or has no sets, skip it. */
7139 if (val.nsets == 0 || GET_MODE (insn) == QImode)
7140 {
7141 PUT_MODE (insn, VOIDmode);
7142 insn = (val.last ? NEXT_INSN (val.last) : 0);
7143 val.path_size = 0;
7144 continue;
7145 }
7146
7147 cse_basic_block_start = val.low_cuid;
7148 cse_basic_block_end = val.high_cuid;
7149 max_qty = val.nsets * 2;
7150
7151 if (file)
7152 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7153 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7154 val.nsets);
7155
7156 /* Make MAX_QTY bigger to give us room to optimize
7157 past the end of this basic block, if that should prove useful. */
7158 if (max_qty < 500)
7159 max_qty = 500;
7160
7161 max_qty += max_reg;
7162
7163 /* If this basic block is being extended by following certain jumps,
7164 (see `cse_end_of_basic_block'), we reprocess the code from the start.
7165 Otherwise, we start after this basic block. */
7166 if (val.path_size > 0)
7167 cse_basic_block (insn, val.last, val.path, 0);
7168 else
7169 {
7170 int old_cse_jumps_altered = cse_jumps_altered;
7171 rtx temp;
7172
7173 /* When cse changes a conditional jump to an unconditional
7174 jump, we want to reprocess the block, since it will give
7175 us a new branch path to investigate. */
7176 cse_jumps_altered = 0;
7177 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7178 if (cse_jumps_altered == 0
7179 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7180 insn = temp;
7181
7182 cse_jumps_altered |= old_cse_jumps_altered;
7183 }
7184
7185 if (cse_altered)
7186 ggc_collect ();
7187
7188 #ifdef USE_C_ALLOCA
7189 alloca (0);
7190 #endif
7191 }
7192
7193 ggc_pop_context ();
7194
7195 if (max_elements_made < n_elements_made)
7196 max_elements_made = n_elements_made;
7197
7198 /* Clean up. */
7199 end_alias_analysis ();
7200 free (uid_cuid);
7201 free (reg_eqv_table);
7202
7203 return cse_jumps_altered || recorded_label_ref;
7204 }
7205
7206 /* Process a single basic block. FROM and TO and the limits of the basic
7207 block. NEXT_BRANCH points to the branch path when following jumps or
7208 a null path when not following jumps.
7209
7210 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
7211 loop. This is true when we are being called for the last time on a
7212 block and this CSE pass is before loop.c. */
7213
7214 static rtx
7215 cse_basic_block (from, to, next_branch, around_loop)
7216 register rtx from, to;
7217 struct branch_path *next_branch;
7218 int around_loop;
7219 {
7220 register rtx insn;
7221 int to_usage = 0;
7222 rtx libcall_insn = NULL_RTX;
7223 int num_insns = 0;
7224
7225 /* This array is undefined before max_reg, so only allocate
7226 the space actually needed and adjust the start. */
7227
7228 qty_table
7229 = (struct qty_table_elem *) xmalloc ((max_qty - max_reg)
7230 * sizeof (struct qty_table_elem));
7231 qty_table -= max_reg;
7232
7233 new_basic_block ();
7234
7235 /* TO might be a label. If so, protect it from being deleted. */
7236 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7237 ++LABEL_NUSES (to);
7238
7239 for (insn = from; insn != to; insn = NEXT_INSN (insn))
7240 {
7241 register enum rtx_code code = GET_CODE (insn);
7242
7243 /* If we have processed 1,000 insns, flush the hash table to
7244 avoid extreme quadratic behavior. We must not include NOTEs
7245 in the count since there may be more of them when generating
7246 debugging information. If we clear the table at different
7247 times, code generated with -g -O might be different than code
7248 generated with -O but not -g.
7249
7250 ??? This is a real kludge and needs to be done some other way.
7251 Perhaps for 2.9. */
7252 if (code != NOTE && num_insns++ > 1000)
7253 {
7254 flush_hash_table ();
7255 num_insns = 0;
7256 }
7257
7258 /* See if this is a branch that is part of the path. If so, and it is
7259 to be taken, do so. */
7260 if (next_branch->branch == insn)
7261 {
7262 enum taken status = next_branch++->status;
7263 if (status != NOT_TAKEN)
7264 {
7265 if (status == TAKEN)
7266 record_jump_equiv (insn, 1);
7267 else
7268 invalidate_skipped_block (NEXT_INSN (insn));
7269
7270 /* Set the last insn as the jump insn; it doesn't affect cc0.
7271 Then follow this branch. */
7272 #ifdef HAVE_cc0
7273 prev_insn_cc0 = 0;
7274 #endif
7275 prev_insn = insn;
7276 insn = JUMP_LABEL (insn);
7277 continue;
7278 }
7279 }
7280
7281 if (GET_MODE (insn) == QImode)
7282 PUT_MODE (insn, VOIDmode);
7283
7284 if (GET_RTX_CLASS (code) == 'i')
7285 {
7286 rtx p;
7287
7288 /* Process notes first so we have all notes in canonical forms when
7289 looking for duplicate operations. */
7290
7291 if (REG_NOTES (insn))
7292 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7293
7294 /* Track when we are inside in LIBCALL block. Inside such a block,
7295 we do not want to record destinations. The last insn of a
7296 LIBCALL block is not considered to be part of the block, since
7297 its destination is the result of the block and hence should be
7298 recorded. */
7299
7300 if (REG_NOTES (insn) != 0)
7301 {
7302 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7303 libcall_insn = XEXP (p, 0);
7304 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7305 libcall_insn = 0;
7306 }
7307
7308 cse_insn (insn, libcall_insn);
7309 }
7310
7311 /* If INSN is now an unconditional jump, skip to the end of our
7312 basic block by pretending that we just did the last insn in the
7313 basic block. If we are jumping to the end of our block, show
7314 that we can have one usage of TO. */
7315
7316 if (any_uncondjump_p (insn))
7317 {
7318 if (to == 0)
7319 {
7320 free (qty_table + max_reg);
7321 return 0;
7322 }
7323
7324 if (JUMP_LABEL (insn) == to)
7325 to_usage = 1;
7326
7327 /* Maybe TO was deleted because the jump is unconditional.
7328 If so, there is nothing left in this basic block. */
7329 /* ??? Perhaps it would be smarter to set TO
7330 to whatever follows this insn,
7331 and pretend the basic block had always ended here. */
7332 if (INSN_DELETED_P (to))
7333 break;
7334
7335 insn = PREV_INSN (to);
7336 }
7337
7338 /* See if it is ok to keep on going past the label
7339 which used to end our basic block. Remember that we incremented
7340 the count of that label, so we decrement it here. If we made
7341 a jump unconditional, TO_USAGE will be one; in that case, we don't
7342 want to count the use in that jump. */
7343
7344 if (to != 0 && NEXT_INSN (insn) == to
7345 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7346 {
7347 struct cse_basic_block_data val;
7348 rtx prev;
7349
7350 insn = NEXT_INSN (to);
7351
7352 /* If TO was the last insn in the function, we are done. */
7353 if (insn == 0)
7354 {
7355 free (qty_table + max_reg);
7356 return 0;
7357 }
7358
7359 /* If TO was preceded by a BARRIER we are done with this block
7360 because it has no continuation. */
7361 prev = prev_nonnote_insn (to);
7362 if (prev && GET_CODE (prev) == BARRIER)
7363 {
7364 free (qty_table + max_reg);
7365 return insn;
7366 }
7367
7368 /* Find the end of the following block. Note that we won't be
7369 following branches in this case. */
7370 to_usage = 0;
7371 val.path_size = 0;
7372 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7373
7374 /* If the tables we allocated have enough space left
7375 to handle all the SETs in the next basic block,
7376 continue through it. Otherwise, return,
7377 and that block will be scanned individually. */
7378 if (val.nsets * 2 + next_qty > max_qty)
7379 break;
7380
7381 cse_basic_block_start = val.low_cuid;
7382 cse_basic_block_end = val.high_cuid;
7383 to = val.last;
7384
7385 /* Prevent TO from being deleted if it is a label. */
7386 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7387 ++LABEL_NUSES (to);
7388
7389 /* Back up so we process the first insn in the extension. */
7390 insn = PREV_INSN (insn);
7391 }
7392 }
7393
7394 if (next_qty > max_qty)
7395 abort ();
7396
7397 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7398 the previous insn is the only insn that branches to the head of a loop,
7399 we can cse into the loop. Don't do this if we changed the jump
7400 structure of a loop unless we aren't going to be following jumps. */
7401
7402 if ((cse_jumps_altered == 0
7403 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7404 && around_loop && to != 0
7405 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7406 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
7407 && JUMP_LABEL (PREV_INSN (to)) != 0
7408 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
7409 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
7410
7411 free (qty_table + max_reg);
7412
7413 return to ? NEXT_INSN (to) : 0;
7414 }
7415 \f
7416 /* Count the number of times registers are used (not set) in X.
7417 COUNTS is an array in which we accumulate the count, INCR is how much
7418 we count each register usage.
7419
7420 Don't count a usage of DEST, which is the SET_DEST of a SET which
7421 contains X in its SET_SRC. This is because such a SET does not
7422 modify the liveness of DEST. */
7423
7424 static void
7425 count_reg_usage (x, counts, dest, incr)
7426 rtx x;
7427 int *counts;
7428 rtx dest;
7429 int incr;
7430 {
7431 enum rtx_code code;
7432 const char *fmt;
7433 int i, j;
7434
7435 if (x == 0)
7436 return;
7437
7438 switch (code = GET_CODE (x))
7439 {
7440 case REG:
7441 if (x != dest)
7442 counts[REGNO (x)] += incr;
7443 return;
7444
7445 case PC:
7446 case CC0:
7447 case CONST:
7448 case CONST_INT:
7449 case CONST_DOUBLE:
7450 case SYMBOL_REF:
7451 case LABEL_REF:
7452 return;
7453
7454 case CLOBBER:
7455 /* If we are clobbering a MEM, mark any registers inside the address
7456 as being used. */
7457 if (GET_CODE (XEXP (x, 0)) == MEM)
7458 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7459 return;
7460
7461 case SET:
7462 /* Unless we are setting a REG, count everything in SET_DEST. */
7463 if (GET_CODE (SET_DEST (x)) != REG)
7464 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7465
7466 /* If SRC has side-effects, then we can't delete this insn, so the
7467 usage of SET_DEST inside SRC counts.
7468
7469 ??? Strictly-speaking, we might be preserving this insn
7470 because some other SET has side-effects, but that's hard
7471 to do and can't happen now. */
7472 count_reg_usage (SET_SRC (x), counts,
7473 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
7474 incr);
7475 return;
7476
7477 case CALL_INSN:
7478 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
7479 /* Fall through. */
7480
7481 case INSN:
7482 case JUMP_INSN:
7483 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7484
7485 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7486 use them. */
7487
7488 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7489 return;
7490
7491 case EXPR_LIST:
7492 case INSN_LIST:
7493 if (REG_NOTE_KIND (x) == REG_EQUAL
7494 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
7495 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7496 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7497 return;
7498
7499 default:
7500 break;
7501 }
7502
7503 fmt = GET_RTX_FORMAT (code);
7504 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7505 {
7506 if (fmt[i] == 'e')
7507 count_reg_usage (XEXP (x, i), counts, dest, incr);
7508 else if (fmt[i] == 'E')
7509 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7510 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7511 }
7512 }
7513 \f
7514 /* Scan all the insns and delete any that are dead; i.e., they store a register
7515 that is never used or they copy a register to itself.
7516
7517 This is used to remove insns made obviously dead by cse, loop or other
7518 optimizations. It improves the heuristics in loop since it won't try to
7519 move dead invariants out of loops or make givs for dead quantities. The
7520 remaining passes of the compilation are also sped up. */
7521
7522 void
7523 delete_trivially_dead_insns (insns, nreg)
7524 rtx insns;
7525 int nreg;
7526 {
7527 int *counts;
7528 rtx insn, prev;
7529 #ifdef HAVE_cc0
7530 rtx tem;
7531 #endif
7532 int i;
7533 int in_libcall = 0, dead_libcall = 0;
7534
7535 /* First count the number of times each register is used. */
7536 counts = (int *) xcalloc (nreg, sizeof (int));
7537 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7538 count_reg_usage (insn, counts, NULL_RTX, 1);
7539
7540 /* Go from the last insn to the first and delete insns that only set unused
7541 registers or copy a register to itself. As we delete an insn, remove
7542 usage counts for registers it uses.
7543
7544 The first jump optimization pass may leave a real insn as the last
7545 insn in the function. We must not skip that insn or we may end
7546 up deleting code that is not really dead. */
7547 insn = get_last_insn ();
7548 if (! INSN_P (insn))
7549 insn = prev_real_insn (insn);
7550
7551 for (; insn; insn = prev)
7552 {
7553 int live_insn = 0;
7554 rtx note;
7555
7556 prev = prev_real_insn (insn);
7557
7558 /* Don't delete any insns that are part of a libcall block unless
7559 we can delete the whole libcall block.
7560
7561 Flow or loop might get confused if we did that. Remember
7562 that we are scanning backwards. */
7563 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7564 {
7565 in_libcall = 1;
7566 live_insn = 1;
7567 dead_libcall = 0;
7568
7569 /* See if there's a REG_EQUAL note on this insn and try to
7570 replace the source with the REG_EQUAL expression.
7571
7572 We assume that insns with REG_RETVALs can only be reg->reg
7573 copies at this point. */
7574 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7575 if (note)
7576 {
7577 rtx set = single_set (insn);
7578 rtx new = simplify_rtx (XEXP (note, 0));
7579
7580 if (!new)
7581 new = XEXP (note, 0);
7582
7583 if (set && validate_change (insn, &SET_SRC (set), new, 0))
7584 {
7585 remove_note (insn,
7586 find_reg_note (insn, REG_RETVAL, NULL_RTX));
7587 dead_libcall = 1;
7588 }
7589 }
7590 }
7591 else if (in_libcall)
7592 live_insn = ! dead_libcall;
7593 else if (GET_CODE (PATTERN (insn)) == SET)
7594 {
7595 if ((GET_CODE (SET_DEST (PATTERN (insn))) == REG
7596 || GET_CODE (SET_DEST (PATTERN (insn))) == SUBREG)
7597 && rtx_equal_p (SET_DEST (PATTERN (insn)),
7598 SET_SRC (PATTERN (insn))))
7599 ;
7600 else if (GET_CODE (SET_DEST (PATTERN (insn))) == STRICT_LOW_PART
7601 && rtx_equal_p (XEXP (SET_DEST (PATTERN (insn)), 0),
7602 SET_SRC (PATTERN (insn))))
7603 ;
7604
7605 #ifdef HAVE_cc0
7606 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
7607 && ! side_effects_p (SET_SRC (PATTERN (insn)))
7608 && ((tem = next_nonnote_insn (insn)) == 0
7609 || ! INSN_P (tem)
7610 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
7611 ;
7612 #endif
7613 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
7614 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
7615 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
7616 || side_effects_p (SET_SRC (PATTERN (insn)))
7617 /* An ADDRESSOF expression can turn into a use of the
7618 internal arg pointer, so always consider the
7619 internal arg pointer live. If it is truly dead,
7620 flow will delete the initializing insn. */
7621 || (SET_DEST (PATTERN (insn))
7622 == current_function_internal_arg_pointer))
7623 live_insn = 1;
7624 }
7625 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7626 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7627 {
7628 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7629
7630 if (GET_CODE (elt) == SET)
7631 {
7632 if ((GET_CODE (SET_DEST (elt)) == REG
7633 || GET_CODE (SET_DEST (elt)) == SUBREG)
7634 && rtx_equal_p (SET_DEST (elt), SET_SRC (elt)))
7635 ;
7636
7637 #ifdef HAVE_cc0
7638 else if (GET_CODE (SET_DEST (elt)) == CC0
7639 && ! side_effects_p (SET_SRC (elt))
7640 && ((tem = next_nonnote_insn (insn)) == 0
7641 || ! INSN_P (tem)
7642 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
7643 ;
7644 #endif
7645 else if (GET_CODE (SET_DEST (elt)) != REG
7646 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
7647 || counts[REGNO (SET_DEST (elt))] != 0
7648 || side_effects_p (SET_SRC (elt))
7649 /* An ADDRESSOF expression can turn into a use of the
7650 internal arg pointer, so always consider the
7651 internal arg pointer live. If it is truly dead,
7652 flow will delete the initializing insn. */
7653 || (SET_DEST (elt)
7654 == current_function_internal_arg_pointer))
7655 live_insn = 1;
7656 }
7657 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7658 live_insn = 1;
7659 }
7660 else
7661 live_insn = 1;
7662
7663 /* If this is a dead insn, delete it and show registers in it aren't
7664 being used. */
7665
7666 if (! live_insn)
7667 {
7668 count_reg_usage (insn, counts, NULL_RTX, -1);
7669 delete_insn (insn);
7670 }
7671
7672 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7673 {
7674 in_libcall = 0;
7675 dead_libcall = 0;
7676 }
7677 }
7678
7679 /* Clean up. */
7680 free (counts);
7681 }