1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
43 Global Optimization by Suppression of Partial Redundancies
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
115 Rice University Ph.D. thesis, Apr. 1996
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
125 Advanced Compiler Design and Implementation
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
148 #include "coretypes.h"
156 #include "hard-reg-set.h"
159 #include "insn-config.h"
161 #include "basic-block.h"
163 #include "function.h"
172 /* Propagate flow information through back edges and thus enable PRE's
173 moving loop invariant calculations out of loops.
175 Originally this tended to create worse overall code, but several
176 improvements during the development of PRE seem to have made following
177 back edges generally a win.
179 Note much of the loop invariant code motion done here would normally
180 be done by loop.c, which has more heuristics for when to move invariants
181 out of loops. At some point we might need to move some of those
182 heuristics into gcse.c. */
184 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
185 are a superset of those done by GCSE.
187 We perform the following steps:
189 1) Compute basic block information.
191 2) Compute table of places where registers are set.
193 3) Perform copy/constant propagation.
195 4) Perform global cse.
197 5) Perform another pass of copy/constant propagation.
199 Two passes of copy/constant propagation are done because the first one
200 enables more GCSE and the second one helps to clean up the copies that
201 GCSE creates. This is needed more for PRE than for Classic because Classic
202 GCSE will try to use an existing register containing the common
203 subexpression rather than create a new one. This is harder to do for PRE
204 because of the code motion (which Classic GCSE doesn't do).
206 Expressions we are interested in GCSE-ing are of the form
207 (set (pseudo-reg) (expression)).
208 Function want_to_gcse_p says what these are.
210 PRE handles moving invariant expressions out of loops (by treating them as
211 partially redundant).
213 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
214 assignment) based GVN (global value numbering). L. T. Simpson's paper
215 (Rice University) on value numbering is a useful reference for this.
217 **********************
219 We used to support multiple passes but there are diminishing returns in
220 doing so. The first pass usually makes 90% of the changes that are doable.
221 A second pass can make a few more changes made possible by the first pass.
222 Experiments show any further passes don't make enough changes to justify
225 A study of spec92 using an unlimited number of passes:
226 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
227 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
228 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
230 It was found doing copy propagation between each pass enables further
233 PRE is quite expensive in complicated functions because the DFA can take
234 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
235 be modified if one wants to experiment.
237 **********************
239 The steps for PRE are:
241 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
243 2) Perform the data flow analysis for PRE.
245 3) Delete the redundant instructions
247 4) Insert the required copies [if any] that make the partially
248 redundant instructions fully redundant.
250 5) For other reaching expressions, insert an instruction to copy the value
251 to a newly created pseudo that will reach the redundant instruction.
253 The deletion is done first so that when we do insertions we
254 know which pseudo reg to use.
256 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
257 argue it is not. The number of iterations for the algorithm to converge
258 is typically 2-4 so I don't view it as that expensive (relatively speaking).
260 PRE GCSE depends heavily on the second CSE pass to clean up the copies
261 we create. To make an expression reach the place where it's redundant,
262 the result of the expression is copied to a new register, and the redundant
263 expression is deleted by replacing it with this new register. Classic GCSE
264 doesn't have this problem as much as it computes the reaching defs of
265 each register in each block and thus can try to use an existing register.
267 **********************
269 A fair bit of simplicity is created by creating small functions for simple
270 tasks, even when the function is only called in one place. This may
271 measurably slow things down [or may not] by creating more function call
272 overhead than is necessary. The source is laid out so that it's trivial
273 to make the affected functions inline so that one can measure what speed
274 up, if any, can be achieved, and maybe later when things settle things can
277 Help stamp out big monolithic functions! */
279 /* GCSE global vars. */
282 static FILE *gcse_file
;
284 /* Note whether or not we should run jump optimization after gcse. We
285 want to do this for two cases.
287 * If we changed any jumps via cprop.
289 * If we added any labels via edge splitting. */
291 static int run_jump_opt_after_gcse
;
293 /* Bitmaps are normally not included in debugging dumps.
294 However it's useful to be able to print them from GDB.
295 We could create special functions for this, but it's simpler to
296 just allow passing stderr to the dump_foo fns. Since stderr can
297 be a macro, we store a copy here. */
298 static FILE *debug_stderr
;
300 /* An obstack for our working variables. */
301 static struct obstack gcse_obstack
;
303 struct reg_use
{rtx reg_rtx
; };
305 /* Hash table of expressions. */
309 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
311 /* Index in the available expression bitmaps. */
313 /* Next entry with the same hash. */
314 struct expr
*next_same_hash
;
315 /* List of anticipatable occurrences in basic blocks in the function.
316 An "anticipatable occurrence" is one that is the first occurrence in the
317 basic block, the operands are not modified in the basic block prior
318 to the occurrence and the output is not used between the start of
319 the block and the occurrence. */
320 struct occr
*antic_occr
;
321 /* List of available occurrence in basic blocks in the function.
322 An "available occurrence" is one that is the last occurrence in the
323 basic block and the operands are not modified by following statements in
324 the basic block [including this insn]. */
325 struct occr
*avail_occr
;
326 /* Non-null if the computation is PRE redundant.
327 The value is the newly created pseudo-reg to record a copy of the
328 expression in all the places that reach the redundant copy. */
332 /* Occurrence of an expression.
333 There is one per basic block. If a pattern appears more than once the
334 last appearance is used [or first for anticipatable expressions]. */
338 /* Next occurrence of this expression. */
340 /* The insn that computes the expression. */
342 /* Nonzero if this [anticipatable] occurrence has been deleted. */
344 /* Nonzero if this [available] occurrence has been copied to
346 /* ??? This is mutually exclusive with deleted_p, so they could share
351 /* Expression and copy propagation hash tables.
352 Each hash table is an array of buckets.
353 ??? It is known that if it were an array of entries, structure elements
354 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
355 not clear whether in the final analysis a sufficient amount of memory would
356 be saved as the size of the available expression bitmaps would be larger
357 [one could build a mapping table without holes afterwards though].
358 Someday I'll perform the computation and figure it out. */
363 This is an array of `expr_hash_table_size' elements. */
366 /* Size of the hash table, in elements. */
369 /* Number of hash table elements. */
370 unsigned int n_elems
;
372 /* Whether the table is expression of copy propagation one. */
376 /* Expression hash table. */
377 static struct hash_table expr_hash_table
;
379 /* Copy propagation hash table. */
380 static struct hash_table set_hash_table
;
382 /* Mapping of uids to cuids.
383 Only real insns get cuids. */
384 static int *uid_cuid
;
386 /* Highest UID in UID_CUID. */
389 /* Get the cuid of an insn. */
390 #ifdef ENABLE_CHECKING
391 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
393 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
396 /* Number of cuids. */
399 /* Mapping of cuids to insns. */
400 static rtx
*cuid_insn
;
402 /* Get insn from cuid. */
403 #define CUID_INSN(CUID) (cuid_insn[CUID])
405 /* Maximum register number in function prior to doing gcse + 1.
406 Registers created during this pass have regno >= max_gcse_regno.
407 This is named with "gcse" to not collide with global of same name. */
408 static unsigned int max_gcse_regno
;
410 /* Table of registers that are modified.
412 For each register, each element is a list of places where the pseudo-reg
415 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
416 requires knowledge of which blocks kill which regs [and thus could use
417 a bitmap instead of the lists `reg_set_table' uses].
419 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
420 num-regs) [however perhaps it may be useful to keep the data as is]. One
421 advantage of recording things this way is that `reg_set_table' is fairly
422 sparse with respect to pseudo regs but for hard regs could be fairly dense
423 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
424 up functions like compute_transp since in the case of pseudo-regs we only
425 need to iterate over the number of times a pseudo-reg is set, not over the
426 number of basic blocks [clearly there is a bit of a slow down in the cases
427 where a pseudo is set more than once in a block, however it is believed
428 that the net effect is to speed things up]. This isn't done for hard-regs
429 because recording call-clobbered hard-regs in `reg_set_table' at each
430 function call can consume a fair bit of memory, and iterating over
431 hard-regs stored this way in compute_transp will be more expensive. */
433 typedef struct reg_set
435 /* The next setting of this register. */
436 struct reg_set
*next
;
437 /* The insn where it was set. */
441 static reg_set
**reg_set_table
;
443 /* Size of `reg_set_table'.
444 The table starts out at max_gcse_regno + slop, and is enlarged as
446 static int reg_set_table_size
;
448 /* Amount to grow `reg_set_table' by when it's full. */
449 #define REG_SET_TABLE_SLOP 100
451 /* This is a list of expressions which are MEMs and will be used by load
453 Load motion tracks MEMs which aren't killed by
454 anything except itself. (ie, loads and stores to a single location).
455 We can then allow movement of these MEM refs with a little special
456 allowance. (all stores copy the same value to the reaching reg used
457 for the loads). This means all values used to store into memory must have
458 no side effects so we can re-issue the setter value.
459 Store Motion uses this structure as an expression table to track stores
460 which look interesting, and might be moveable towards the exit block. */
464 struct expr
* expr
; /* Gcse expression reference for LM. */
465 rtx pattern
; /* Pattern of this mem. */
466 rtx pattern_regs
; /* List of registers mentioned by the mem. */
467 rtx loads
; /* INSN list of loads seen. */
468 rtx stores
; /* INSN list of stores seen. */
469 struct ls_expr
* next
; /* Next in the list. */
470 int invalid
; /* Invalid for some reason. */
471 int index
; /* If it maps to a bitmap index. */
472 unsigned int hash_index
; /* Index when in a hash table. */
473 rtx reaching_reg
; /* Register to use when re-writing. */
476 /* Array of implicit set patterns indexed by basic block index. */
477 static rtx
*implicit_sets
;
479 /* Head of the list of load/store memory refs. */
480 static struct ls_expr
* pre_ldst_mems
= NULL
;
482 /* Bitmap containing one bit for each register in the program.
483 Used when performing GCSE to track which registers have been set since
484 the start of the basic block. */
485 static regset reg_set_bitmap
;
487 /* For each block, a bitmap of registers set in the block.
488 This is used by expr_killed_p and compute_transp.
489 It is computed during hash table computation and not by compute_sets
490 as it includes registers added since the last pass (or between cprop and
491 gcse) and it's currently not easy to realloc sbitmap vectors. */
492 static sbitmap
*reg_set_in_block
;
494 /* Array, indexed by basic block number for a list of insns which modify
495 memory within that block. */
496 static rtx
* modify_mem_list
;
497 bitmap modify_mem_list_set
;
499 /* This array parallels modify_mem_list, but is kept canonicalized. */
500 static rtx
* canon_modify_mem_list
;
501 bitmap canon_modify_mem_list_set
;
502 /* Various variables for statistics gathering. */
504 /* Memory used in a pass.
505 This isn't intended to be absolutely precise. Its intent is only
506 to keep an eye on memory usage. */
507 static int bytes_used
;
509 /* GCSE substitutions made. */
510 static int gcse_subst_count
;
511 /* Number of copy instructions created. */
512 static int gcse_create_count
;
513 /* Number of constants propagated. */
514 static int const_prop_count
;
515 /* Number of copys propagated. */
516 static int copy_prop_count
;
518 /* These variables are used by classic GCSE.
519 Normally they'd be defined a bit later, but `rd_gen' needs to
520 be declared sooner. */
522 /* Each block has a bitmap of each type.
523 The length of each blocks bitmap is:
525 max_cuid - for reaching definitions
526 n_exprs - for available expressions
528 Thus we view the bitmaps as 2 dimensional arrays. i.e.
529 rd_kill[block_num][cuid_num]
530 ae_kill[block_num][expr_num] */
532 /* For reaching defs */
533 static sbitmap
*rd_kill
, *rd_gen
, *reaching_defs
, *rd_out
;
535 /* for available exprs */
536 static sbitmap
*ae_kill
, *ae_gen
, *ae_in
, *ae_out
;
538 /* Objects of this type are passed around by the null-pointer check
540 struct null_pointer_info
542 /* The basic block being processed. */
543 basic_block current_block
;
544 /* The first register to be handled in this pass. */
545 unsigned int min_reg
;
546 /* One greater than the last register to be handled in this pass. */
547 unsigned int max_reg
;
548 sbitmap
*nonnull_local
;
549 sbitmap
*nonnull_killed
;
552 static void compute_can_copy (void);
553 static void *gmalloc (size_t) ATTRIBUTE_MALLOC
;
554 static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC
;
555 static void *grealloc (void *, size_t);
556 static void *gcse_alloc (unsigned long);
557 static void alloc_gcse_mem (rtx
);
558 static void free_gcse_mem (void);
559 static void alloc_reg_set_mem (int);
560 static void free_reg_set_mem (void);
561 static int get_bitmap_width (int, int, int);
562 static void record_one_set (int, rtx
);
563 static void replace_one_set (int, rtx
, rtx
);
564 static void record_set_info (rtx
, rtx
, void *);
565 static void compute_sets (rtx
);
566 static void hash_scan_insn (rtx
, struct hash_table
*, int);
567 static void hash_scan_set (rtx
, rtx
, struct hash_table
*);
568 static void hash_scan_clobber (rtx
, rtx
, struct hash_table
*);
569 static void hash_scan_call (rtx
, rtx
, struct hash_table
*);
570 static int want_to_gcse_p (rtx
);
571 static bool gcse_constant_p (rtx
);
572 static int oprs_unchanged_p (rtx
, rtx
, int);
573 static int oprs_anticipatable_p (rtx
, rtx
);
574 static int oprs_available_p (rtx
, rtx
);
575 static void insert_expr_in_table (rtx
, enum machine_mode
, rtx
, int, int,
576 struct hash_table
*);
577 static void insert_set_in_table (rtx
, rtx
, struct hash_table
*);
578 static unsigned int hash_expr (rtx
, enum machine_mode
, int *, int);
579 static unsigned int hash_expr_1 (rtx
, enum machine_mode
, int *);
580 static unsigned int hash_string_1 (const char *);
581 static unsigned int hash_set (int, int);
582 static int expr_equiv_p (rtx
, rtx
);
583 static void record_last_reg_set_info (rtx
, int);
584 static void record_last_mem_set_info (rtx
);
585 static void record_last_set_info (rtx
, rtx
, void *);
586 static void compute_hash_table (struct hash_table
*);
587 static void alloc_hash_table (int, struct hash_table
*, int);
588 static void free_hash_table (struct hash_table
*);
589 static void compute_hash_table_work (struct hash_table
*);
590 static void dump_hash_table (FILE *, const char *, struct hash_table
*);
591 static struct expr
*lookup_expr (rtx
, struct hash_table
*);
592 static struct expr
*lookup_set (unsigned int, struct hash_table
*);
593 static struct expr
*next_set (unsigned int, struct expr
*);
594 static void reset_opr_set_tables (void);
595 static int oprs_not_set_p (rtx
, rtx
);
596 static void mark_call (rtx
);
597 static void mark_set (rtx
, rtx
);
598 static void mark_clobber (rtx
, rtx
);
599 static void mark_oprs_set (rtx
);
600 static void alloc_cprop_mem (int, int);
601 static void free_cprop_mem (void);
602 static void compute_transp (rtx
, int, sbitmap
*, int);
603 static void compute_transpout (void);
604 static void compute_local_properties (sbitmap
*, sbitmap
*, sbitmap
*,
605 struct hash_table
*);
606 static void compute_cprop_data (void);
607 static void find_used_regs (rtx
*, void *);
608 static int try_replace_reg (rtx
, rtx
, rtx
);
609 static struct expr
*find_avail_set (int, rtx
);
610 static int cprop_jump (basic_block
, rtx
, rtx
, rtx
, rtx
);
611 static void mems_conflict_for_gcse_p (rtx
, rtx
, void *);
612 static int load_killed_in_block_p (basic_block
, int, rtx
, int);
613 static void canon_list_insert (rtx
, rtx
, void *);
614 static int cprop_insn (rtx
, int);
615 static int cprop (int);
616 static void find_implicit_sets (void);
617 static int one_cprop_pass (int, int, int);
618 static bool constprop_register (rtx
, rtx
, rtx
, int);
619 static struct expr
*find_bypass_set (int, int);
620 static bool reg_killed_on_edge (rtx
, edge
);
621 static int bypass_block (basic_block
, rtx
, rtx
);
622 static int bypass_conditional_jumps (void);
623 static void alloc_pre_mem (int, int);
624 static void free_pre_mem (void);
625 static void compute_pre_data (void);
626 static int pre_expr_reaches_here_p (basic_block
, struct expr
*,
628 static void insert_insn_end_bb (struct expr
*, basic_block
, int);
629 static void pre_insert_copy_insn (struct expr
*, rtx
);
630 static void pre_insert_copies (void);
631 static int pre_delete (void);
632 static int pre_gcse (void);
633 static int one_pre_gcse_pass (int);
634 static void add_label_notes (rtx
, rtx
);
635 static void alloc_code_hoist_mem (int, int);
636 static void free_code_hoist_mem (void);
637 static void compute_code_hoist_vbeinout (void);
638 static void compute_code_hoist_data (void);
639 static int hoist_expr_reaches_here_p (basic_block
, int, basic_block
, char *);
640 static void hoist_code (void);
641 static int one_code_hoisting_pass (void);
642 static void alloc_rd_mem (int, int);
643 static void free_rd_mem (void);
644 static void handle_rd_kill_set (rtx
, int, basic_block
);
645 static void compute_kill_rd (void);
646 static void compute_rd (void);
647 static void alloc_avail_expr_mem (int, int);
648 static void free_avail_expr_mem (void);
649 static void compute_ae_gen (struct hash_table
*);
650 static int expr_killed_p (rtx
, basic_block
);
651 static void compute_ae_kill (sbitmap
*, sbitmap
*, struct hash_table
*);
652 static int expr_reaches_here_p (struct occr
*, struct expr
*, basic_block
,
654 static rtx
computing_insn (struct expr
*, rtx
);
655 static int def_reaches_here_p (rtx
, rtx
);
656 static int can_disregard_other_sets (struct reg_set
**, rtx
, int);
657 static int handle_avail_expr (rtx
, struct expr
*);
658 static int classic_gcse (void);
659 static int one_classic_gcse_pass (int);
660 static void invalidate_nonnull_info (rtx
, rtx
, void *);
661 static int delete_null_pointer_checks_1 (unsigned int *, sbitmap
*, sbitmap
*,
662 struct null_pointer_info
*);
663 static rtx
process_insert_insn (struct expr
*);
664 static int pre_edge_insert (struct edge_list
*, struct expr
**);
665 static int expr_reaches_here_p_work (struct occr
*, struct expr
*,
666 basic_block
, int, char *);
667 static int pre_expr_reaches_here_p_work (basic_block
, struct expr
*,
668 basic_block
, char *);
669 static struct ls_expr
* ldst_entry (rtx
);
670 static void free_ldst_entry (struct ls_expr
*);
671 static void free_ldst_mems (void);
672 static void print_ldst_list (FILE *);
673 static struct ls_expr
* find_rtx_in_ldst (rtx
);
674 static int enumerate_ldsts (void);
675 static inline struct ls_expr
* first_ls_expr (void);
676 static inline struct ls_expr
* next_ls_expr (struct ls_expr
*);
677 static int simple_mem (rtx
);
678 static void invalidate_any_buried_refs (rtx
);
679 static void compute_ld_motion_mems (void);
680 static void trim_ld_motion_mems (void);
681 static void update_ld_motion_stores (struct expr
*);
682 static void reg_set_info (rtx
, rtx
, void *);
683 static void reg_clear_last_set (rtx
, rtx
, void *);
684 static bool store_ops_ok (rtx
, int *);
685 static rtx
extract_mentioned_regs (rtx
);
686 static rtx
extract_mentioned_regs_helper (rtx
, rtx
);
687 static void find_moveable_store (rtx
, int *, int *);
688 static int compute_store_table (void);
689 static bool load_kills_store (rtx
, rtx
, int);
690 static bool find_loads (rtx
, rtx
, int);
691 static bool store_killed_in_insn (rtx
, rtx
, rtx
, int);
692 static bool store_killed_after (rtx
, rtx
, rtx
, basic_block
, int *, rtx
*);
693 static bool store_killed_before (rtx
, rtx
, rtx
, basic_block
, int *);
694 static void build_store_vectors (void);
695 static void insert_insn_start_bb (rtx
, basic_block
);
696 static int insert_store (struct ls_expr
*, edge
);
697 static void remove_reachable_equiv_notes (basic_block
, struct ls_expr
*);
698 static void replace_store_insn (rtx
, rtx
, basic_block
, struct ls_expr
*);
699 static void delete_store (struct ls_expr
*, basic_block
);
700 static void free_store_memory (void);
701 static void store_motion (void);
702 static void free_insn_expr_list_list (rtx
*);
703 static void clear_modify_mem_tables (void);
704 static void free_modify_mem_tables (void);
705 static rtx
gcse_emit_move_after (rtx
, rtx
, rtx
);
706 static void local_cprop_find_used_regs (rtx
*, void *);
707 static bool do_local_cprop (rtx
, rtx
, int, rtx
*);
708 static bool adjust_libcall_notes (rtx
, rtx
, rtx
, rtx
*);
709 static void local_cprop_pass (int);
710 static bool is_too_expensive (const char *);
713 /* Entry point for global common subexpression elimination.
714 F is the first instruction in the function. */
717 gcse_main (rtx f
, FILE *file
)
720 /* Bytes used at start of pass. */
721 int initial_bytes_used
;
722 /* Maximum number of bytes used by a pass. */
724 /* Point to release obstack data from for each pass. */
725 char *gcse_obstack_bottom
;
727 /* We do not construct an accurate cfg in functions which call
728 setjmp, so just punt to be safe. */
729 if (current_function_calls_setjmp
)
732 /* Assume that we do not need to run jump optimizations after gcse. */
733 run_jump_opt_after_gcse
= 0;
735 /* For calling dump_foo fns from gdb. */
736 debug_stderr
= stderr
;
739 /* Identify the basic block information for this function, including
740 successors and predecessors. */
741 max_gcse_regno
= max_reg_num ();
744 dump_flow_info (file
);
746 /* Return if there's nothing to do, or it is too expensive. */
747 if (n_basic_blocks
<= 1 || is_too_expensive (_("GCSE disabled")))
750 gcc_obstack_init (&gcse_obstack
);
754 init_alias_analysis ();
755 /* Record where pseudo-registers are set. This data is kept accurate
756 during each pass. ??? We could also record hard-reg information here
757 [since it's unchanging], however it is currently done during hash table
760 It may be tempting to compute MEM set information here too, but MEM sets
761 will be subject to code motion one day and thus we need to compute
762 information about memory sets when we build the hash tables. */
764 alloc_reg_set_mem (max_gcse_regno
);
768 initial_bytes_used
= bytes_used
;
770 gcse_obstack_bottom
= gcse_alloc (1);
772 while (changed
&& pass
< MAX_GCSE_PASSES
)
776 fprintf (file
, "GCSE pass %d\n\n", pass
+ 1);
778 /* Initialize bytes_used to the space for the pred/succ lists,
779 and the reg_set_table data. */
780 bytes_used
= initial_bytes_used
;
782 /* Each pass may create new registers, so recalculate each time. */
783 max_gcse_regno
= max_reg_num ();
787 /* Don't allow constant propagation to modify jumps
789 changed
= one_cprop_pass (pass
+ 1, 0, 0);
792 changed
|= one_classic_gcse_pass (pass
+ 1);
795 changed
|= one_pre_gcse_pass (pass
+ 1);
796 /* We may have just created new basic blocks. Release and
797 recompute various things which are sized on the number of
801 free_modify_mem_tables ();
802 modify_mem_list
= gcalloc (last_basic_block
, sizeof (rtx
));
803 canon_modify_mem_list
= gcalloc (last_basic_block
, sizeof (rtx
));
806 alloc_reg_set_mem (max_reg_num ());
808 run_jump_opt_after_gcse
= 1;
811 if (max_pass_bytes
< bytes_used
)
812 max_pass_bytes
= bytes_used
;
814 /* Free up memory, then reallocate for code hoisting. We can
815 not re-use the existing allocated memory because the tables
816 will not have info for the insns or registers created by
817 partial redundancy elimination. */
820 /* It does not make sense to run code hoisting unless we are optimizing
821 for code size -- it rarely makes programs faster, and can make
822 them bigger if we did partial redundancy elimination (when optimizing
823 for space, we use a classic gcse algorithm instead of partial
824 redundancy algorithms). */
827 max_gcse_regno
= max_reg_num ();
829 changed
|= one_code_hoisting_pass ();
832 if (max_pass_bytes
< bytes_used
)
833 max_pass_bytes
= bytes_used
;
838 fprintf (file
, "\n");
842 obstack_free (&gcse_obstack
, gcse_obstack_bottom
);
846 /* Do one last pass of copy propagation, including cprop into
847 conditional jumps. */
849 max_gcse_regno
= max_reg_num ();
851 /* This time, go ahead and allow cprop to alter jumps. */
852 one_cprop_pass (pass
+ 1, 1, 0);
857 fprintf (file
, "GCSE of %s: %d basic blocks, ",
858 current_function_name (), n_basic_blocks
);
859 fprintf (file
, "%d pass%s, %d bytes\n\n",
860 pass
, pass
> 1 ? "es" : "", max_pass_bytes
);
863 obstack_free (&gcse_obstack
, NULL
);
865 /* We are finished with alias. */
866 end_alias_analysis ();
867 allocate_reg_info (max_reg_num (), FALSE
, FALSE
);
869 if (!optimize_size
&& flag_gcse_sm
)
872 /* Record where pseudo-registers are set. */
873 return run_jump_opt_after_gcse
;
876 /* Misc. utilities. */
878 /* Nonzero for each mode that supports (set (reg) (reg)).
879 This is trivially true for integer and floating point values.
880 It may or may not be true for condition codes. */
881 static char can_copy
[(int) NUM_MACHINE_MODES
];
883 /* Compute which modes support reg/reg copy operations. */
886 compute_can_copy (void)
889 #ifndef AVOID_CCMODE_COPIES
892 memset (can_copy
, 0, NUM_MACHINE_MODES
);
895 for (i
= 0; i
< NUM_MACHINE_MODES
; i
++)
896 if (GET_MODE_CLASS (i
) == MODE_CC
)
898 #ifdef AVOID_CCMODE_COPIES
901 reg
= gen_rtx_REG ((enum machine_mode
) i
, LAST_VIRTUAL_REGISTER
+ 1);
902 insn
= emit_insn (gen_rtx_SET (VOIDmode
, reg
, reg
));
903 if (recog (PATTERN (insn
), insn
, NULL
) >= 0)
913 /* Returns whether the mode supports reg/reg copy operations. */
916 can_copy_p (enum machine_mode mode
)
918 static bool can_copy_init_p
= false;
920 if (! can_copy_init_p
)
923 can_copy_init_p
= true;
926 return can_copy
[mode
] != 0;
929 /* Cover function to xmalloc to record bytes allocated. */
932 gmalloc (size_t size
)
935 return xmalloc (size
);
938 /* Cover function to xcalloc to record bytes allocated. */
941 gcalloc (size_t nelem
, size_t elsize
)
943 bytes_used
+= nelem
* elsize
;
944 return xcalloc (nelem
, elsize
);
947 /* Cover function to xrealloc.
948 We don't record the additional size since we don't know it.
949 It won't affect memory usage stats much anyway. */
952 grealloc (void *ptr
, size_t size
)
954 return xrealloc (ptr
, size
);
957 /* Cover function to obstack_alloc. */
960 gcse_alloc (unsigned long size
)
963 return obstack_alloc (&gcse_obstack
, size
);
966 /* Allocate memory for the cuid mapping array,
967 and reg/memory set tracking tables.
969 This is called at the start of each pass. */
972 alloc_gcse_mem (rtx f
)
977 /* Find the largest UID and create a mapping from UIDs to CUIDs.
978 CUIDs are like UIDs except they increase monotonically, have no gaps,
979 and only apply to real insns. */
981 max_uid
= get_max_uid ();
982 uid_cuid
= gcalloc (max_uid
+ 1, sizeof (int));
983 for (insn
= f
, i
= 0; insn
; insn
= NEXT_INSN (insn
))
986 uid_cuid
[INSN_UID (insn
)] = i
++;
988 uid_cuid
[INSN_UID (insn
)] = i
;
991 /* Create a table mapping cuids to insns. */
994 cuid_insn
= gcalloc (max_cuid
+ 1, sizeof (rtx
));
995 for (insn
= f
, i
= 0; insn
; insn
= NEXT_INSN (insn
))
997 CUID_INSN (i
++) = insn
;
999 /* Allocate vars to track sets of regs. */
1000 reg_set_bitmap
= BITMAP_XMALLOC ();
1002 /* Allocate vars to track sets of regs, memory per block. */
1003 reg_set_in_block
= sbitmap_vector_alloc (last_basic_block
, max_gcse_regno
);
1004 /* Allocate array to keep a list of insns which modify memory in each
1006 modify_mem_list
= gcalloc (last_basic_block
, sizeof (rtx
));
1007 canon_modify_mem_list
= gcalloc (last_basic_block
, sizeof (rtx
));
1008 modify_mem_list_set
= BITMAP_XMALLOC ();
1009 canon_modify_mem_list_set
= BITMAP_XMALLOC ();
1012 /* Free memory allocated by alloc_gcse_mem. */
1015 free_gcse_mem (void)
1020 BITMAP_XFREE (reg_set_bitmap
);
1022 sbitmap_vector_free (reg_set_in_block
);
1023 free_modify_mem_tables ();
1024 BITMAP_XFREE (modify_mem_list_set
);
1025 BITMAP_XFREE (canon_modify_mem_list_set
);
1028 /* Many of the global optimization algorithms work by solving dataflow
1029 equations for various expressions. Initially, some local value is
1030 computed for each expression in each block. Then, the values across the
1031 various blocks are combined (by following flow graph edges) to arrive at
1032 global values. Conceptually, each set of equations is independent. We
1033 may therefore solve all the equations in parallel, solve them one at a
1034 time, or pick any intermediate approach.
1036 When you're going to need N two-dimensional bitmaps, each X (say, the
1037 number of blocks) by Y (say, the number of expressions), call this
1038 function. It's not important what X and Y represent; only that Y
1039 correspond to the things that can be done in parallel. This function will
1040 return an appropriate chunking factor C; you should solve C sets of
1041 equations in parallel. By going through this function, we can easily
1042 trade space against time; by solving fewer equations in parallel we use
1046 get_bitmap_width (int n
, int x
, int y
)
1048 /* It's not really worth figuring out *exactly* how much memory will
1049 be used by a particular choice. The important thing is to get
1050 something approximately right. */
1051 size_t max_bitmap_memory
= 10 * 1024 * 1024;
1053 /* The number of bytes we'd use for a single column of minimum
1055 size_t column_size
= n
* x
* sizeof (SBITMAP_ELT_TYPE
);
1057 /* Often, it's reasonable just to solve all the equations in
1059 if (column_size
* SBITMAP_SET_SIZE (y
) <= max_bitmap_memory
)
1062 /* Otherwise, pick the largest width we can, without going over the
1064 return SBITMAP_ELT_BITS
* ((max_bitmap_memory
+ column_size
- 1)
1068 /* Compute the local properties of each recorded expression.
1070 Local properties are those that are defined by the block, irrespective of
1073 An expression is transparent in a block if its operands are not modified
1076 An expression is computed (locally available) in a block if it is computed
1077 at least once and expression would contain the same value if the
1078 computation was moved to the end of the block.
1080 An expression is locally anticipatable in a block if it is computed at
1081 least once and expression would contain the same value if the computation
1082 was moved to the beginning of the block.
1084 We call this routine for cprop, pre and code hoisting. They all compute
1085 basically the same information and thus can easily share this code.
1087 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1088 properties. If NULL, then it is not necessary to compute or record that
1089 particular property.
1091 TABLE controls which hash table to look at. If it is set hash table,
1092 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1096 compute_local_properties (sbitmap
*transp
, sbitmap
*comp
, sbitmap
*antloc
, struct hash_table
*table
)
1100 /* Initialize any bitmaps that were passed in. */
1104 sbitmap_vector_zero (transp
, last_basic_block
);
1106 sbitmap_vector_ones (transp
, last_basic_block
);
1110 sbitmap_vector_zero (comp
, last_basic_block
);
1112 sbitmap_vector_zero (antloc
, last_basic_block
);
1114 for (i
= 0; i
< table
->size
; i
++)
1118 for (expr
= table
->table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
1120 int indx
= expr
->bitmap_index
;
1123 /* The expression is transparent in this block if it is not killed.
1124 We start by assuming all are transparent [none are killed], and
1125 then reset the bits for those that are. */
1127 compute_transp (expr
->expr
, indx
, transp
, table
->set_p
);
1129 /* The occurrences recorded in antic_occr are exactly those that
1130 we want to set to nonzero in ANTLOC. */
1132 for (occr
= expr
->antic_occr
; occr
!= NULL
; occr
= occr
->next
)
1134 SET_BIT (antloc
[BLOCK_NUM (occr
->insn
)], indx
);
1136 /* While we're scanning the table, this is a good place to
1138 occr
->deleted_p
= 0;
1141 /* The occurrences recorded in avail_occr are exactly those that
1142 we want to set to nonzero in COMP. */
1144 for (occr
= expr
->avail_occr
; occr
!= NULL
; occr
= occr
->next
)
1146 SET_BIT (comp
[BLOCK_NUM (occr
->insn
)], indx
);
1148 /* While we're scanning the table, this is a good place to
1153 /* While we're scanning the table, this is a good place to
1155 expr
->reaching_reg
= 0;
1160 /* Register set information.
1162 `reg_set_table' records where each register is set or otherwise
1165 static struct obstack reg_set_obstack
;
1168 alloc_reg_set_mem (int n_regs
)
1170 reg_set_table_size
= n_regs
+ REG_SET_TABLE_SLOP
;
1171 reg_set_table
= gcalloc (reg_set_table_size
, sizeof (struct reg_set
*));
1173 gcc_obstack_init (®_set_obstack
);
1177 free_reg_set_mem (void)
1179 free (reg_set_table
);
1180 obstack_free (®_set_obstack
, NULL
);
1183 /* An OLD_INSN that used to set REGNO was replaced by NEW_INSN.
1184 Update the corresponding `reg_set_table' entry accordingly.
1185 We assume that NEW_INSN is not already recorded in reg_set_table[regno]. */
1188 replace_one_set (int regno
, rtx old_insn
, rtx new_insn
)
1190 struct reg_set
*reg_info
;
1191 if (regno
>= reg_set_table_size
)
1193 for (reg_info
= reg_set_table
[regno
]; reg_info
; reg_info
= reg_info
->next
)
1194 if (reg_info
->insn
== old_insn
)
1196 reg_info
->insn
= new_insn
;
1201 /* Record REGNO in the reg_set table. */
1204 record_one_set (int regno
, rtx insn
)
1206 /* Allocate a new reg_set element and link it onto the list. */
1207 struct reg_set
*new_reg_info
;
1209 /* If the table isn't big enough, enlarge it. */
1210 if (regno
>= reg_set_table_size
)
1212 int new_size
= regno
+ REG_SET_TABLE_SLOP
;
1214 reg_set_table
= grealloc (reg_set_table
,
1215 new_size
* sizeof (struct reg_set
*));
1216 memset (reg_set_table
+ reg_set_table_size
, 0,
1217 (new_size
- reg_set_table_size
) * sizeof (struct reg_set
*));
1218 reg_set_table_size
= new_size
;
1221 new_reg_info
= obstack_alloc (®_set_obstack
, sizeof (struct reg_set
));
1222 bytes_used
+= sizeof (struct reg_set
);
1223 new_reg_info
->insn
= insn
;
1224 new_reg_info
->next
= reg_set_table
[regno
];
1225 reg_set_table
[regno
] = new_reg_info
;
1228 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1229 an insn. The DATA is really the instruction in which the SET is
1233 record_set_info (rtx dest
, rtx setter ATTRIBUTE_UNUSED
, void *data
)
1235 rtx record_set_insn
= (rtx
) data
;
1237 if (GET_CODE (dest
) == REG
&& REGNO (dest
) >= FIRST_PSEUDO_REGISTER
)
1238 record_one_set (REGNO (dest
), record_set_insn
);
1241 /* Scan the function and record each set of each pseudo-register.
1243 This is called once, at the start of the gcse pass. See the comments for
1244 `reg_set_table' for further documentation. */
1247 compute_sets (rtx f
)
1251 for (insn
= f
; insn
!= 0; insn
= NEXT_INSN (insn
))
1253 note_stores (PATTERN (insn
), record_set_info
, insn
);
1256 /* Hash table support. */
1258 struct reg_avail_info
1260 basic_block last_bb
;
1265 static struct reg_avail_info
*reg_avail_info
;
1266 static basic_block current_bb
;
1269 /* See whether X, the source of a set, is something we want to consider for
1272 static GTY(()) rtx test_insn
;
1274 want_to_gcse_p (rtx x
)
1276 int num_clobbers
= 0;
1279 switch (GET_CODE (x
))
1287 case CONSTANT_P_RTX
:
1294 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1295 if (general_operand (x
, GET_MODE (x
)))
1297 else if (GET_MODE (x
) == VOIDmode
)
1300 /* Otherwise, check if we can make a valid insn from it. First initialize
1301 our test insn if we haven't already. */
1305 = make_insn_raw (gen_rtx_SET (VOIDmode
,
1306 gen_rtx_REG (word_mode
,
1307 FIRST_PSEUDO_REGISTER
* 2),
1309 NEXT_INSN (test_insn
) = PREV_INSN (test_insn
) = 0;
1312 /* Now make an insn like the one we would make when GCSE'ing and see if
1314 PUT_MODE (SET_DEST (PATTERN (test_insn
)), GET_MODE (x
));
1315 SET_SRC (PATTERN (test_insn
)) = x
;
1316 return ((icode
= recog (PATTERN (test_insn
), test_insn
, &num_clobbers
)) >= 0
1317 && (num_clobbers
== 0 || ! added_clobbers_hard_reg_p (icode
)));
1320 /* Return nonzero if the operands of expression X are unchanged from the
1321 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1322 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1325 oprs_unchanged_p (rtx x
, rtx insn
, int avail_p
)
1334 code
= GET_CODE (x
);
1339 struct reg_avail_info
*info
= ®_avail_info
[REGNO (x
)];
1341 if (info
->last_bb
!= current_bb
)
1344 return info
->last_set
< INSN_CUID (insn
);
1346 return info
->first_set
>= INSN_CUID (insn
);
1350 if (load_killed_in_block_p (current_bb
, INSN_CUID (insn
),
1354 return oprs_unchanged_p (XEXP (x
, 0), insn
, avail_p
);
1380 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
1384 /* If we are about to do the last recursive call needed at this
1385 level, change it into iteration. This function is called enough
1388 return oprs_unchanged_p (XEXP (x
, i
), insn
, avail_p
);
1390 else if (! oprs_unchanged_p (XEXP (x
, i
), insn
, avail_p
))
1393 else if (fmt
[i
] == 'E')
1394 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1395 if (! oprs_unchanged_p (XVECEXP (x
, i
, j
), insn
, avail_p
))
1402 /* Used for communication between mems_conflict_for_gcse_p and
1403 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1404 conflict between two memory references. */
1405 static int gcse_mems_conflict_p
;
1407 /* Used for communication between mems_conflict_for_gcse_p and
1408 load_killed_in_block_p. A memory reference for a load instruction,
1409 mems_conflict_for_gcse_p will see if a memory store conflicts with
1410 this memory load. */
1411 static rtx gcse_mem_operand
;
1413 /* DEST is the output of an instruction. If it is a memory reference, and
1414 possibly conflicts with the load found in gcse_mem_operand, then set
1415 gcse_mems_conflict_p to a nonzero value. */
1418 mems_conflict_for_gcse_p (rtx dest
, rtx setter ATTRIBUTE_UNUSED
,
1419 void *data ATTRIBUTE_UNUSED
)
1421 while (GET_CODE (dest
) == SUBREG
1422 || GET_CODE (dest
) == ZERO_EXTRACT
1423 || GET_CODE (dest
) == SIGN_EXTRACT
1424 || GET_CODE (dest
) == STRICT_LOW_PART
)
1425 dest
= XEXP (dest
, 0);
1427 /* If DEST is not a MEM, then it will not conflict with the load. Note
1428 that function calls are assumed to clobber memory, but are handled
1430 if (GET_CODE (dest
) != MEM
)
1433 /* If we are setting a MEM in our list of specially recognized MEMs,
1434 don't mark as killed this time. */
1436 if (expr_equiv_p (dest
, gcse_mem_operand
) && pre_ldst_mems
!= NULL
)
1438 if (!find_rtx_in_ldst (dest
))
1439 gcse_mems_conflict_p
= 1;
1443 if (true_dependence (dest
, GET_MODE (dest
), gcse_mem_operand
,
1445 gcse_mems_conflict_p
= 1;
1448 /* Return nonzero if the expression in X (a memory reference) is killed
1449 in block BB before or after the insn with the CUID in UID_LIMIT.
1450 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1453 To check the entire block, set UID_LIMIT to max_uid + 1 and
1457 load_killed_in_block_p (basic_block bb
, int uid_limit
, rtx x
, int avail_p
)
1459 rtx list_entry
= modify_mem_list
[bb
->index
];
1463 /* Ignore entries in the list that do not apply. */
1465 && INSN_CUID (XEXP (list_entry
, 0)) < uid_limit
)
1467 && INSN_CUID (XEXP (list_entry
, 0)) > uid_limit
))
1469 list_entry
= XEXP (list_entry
, 1);
1473 setter
= XEXP (list_entry
, 0);
1475 /* If SETTER is a call everything is clobbered. Note that calls
1476 to pure functions are never put on the list, so we need not
1477 worry about them. */
1478 if (GET_CODE (setter
) == CALL_INSN
)
1481 /* SETTER must be an INSN of some kind that sets memory. Call
1482 note_stores to examine each hunk of memory that is modified.
1484 The note_stores interface is pretty limited, so we have to
1485 communicate via global variables. Yuk. */
1486 gcse_mem_operand
= x
;
1487 gcse_mems_conflict_p
= 0;
1488 note_stores (PATTERN (setter
), mems_conflict_for_gcse_p
, NULL
);
1489 if (gcse_mems_conflict_p
)
1491 list_entry
= XEXP (list_entry
, 1);
1496 /* Return nonzero if the operands of expression X are unchanged from
1497 the start of INSN's basic block up to but not including INSN. */
1500 oprs_anticipatable_p (rtx x
, rtx insn
)
1502 return oprs_unchanged_p (x
, insn
, 0);
1505 /* Return nonzero if the operands of expression X are unchanged from
1506 INSN to the end of INSN's basic block. */
1509 oprs_available_p (rtx x
, rtx insn
)
1511 return oprs_unchanged_p (x
, insn
, 1);
1514 /* Hash expression X.
1516 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1517 indicating if a volatile operand is found or if the expression contains
1518 something we don't want to insert in the table. HASH_TABLE_SIZE is
1519 the current size of the hash table to be probed.
1521 ??? One might want to merge this with canon_hash. Later. */
1524 hash_expr (rtx x
, enum machine_mode mode
, int *do_not_record_p
,
1525 int hash_table_size
)
1529 *do_not_record_p
= 0;
1531 hash
= hash_expr_1 (x
, mode
, do_not_record_p
);
1532 return hash
% hash_table_size
;
1535 /* Hash a string. Just add its bytes up. */
1537 static inline unsigned
1538 hash_string_1 (const char *ps
)
1541 const unsigned char *p
= (const unsigned char *) ps
;
1550 /* Subroutine of hash_expr to do the actual work. */
1553 hash_expr_1 (rtx x
, enum machine_mode mode
, int *do_not_record_p
)
1560 /* Used to turn recursion into iteration. We can't rely on GCC's
1561 tail-recursion elimination since we need to keep accumulating values
1568 code
= GET_CODE (x
);
1572 hash
+= ((unsigned int) REG
<< 7) + REGNO (x
);
1576 hash
+= (((unsigned int) CONST_INT
<< 7) + (unsigned int) mode
1577 + (unsigned int) INTVAL (x
));
1581 /* This is like the general case, except that it only counts
1582 the integers representing the constant. */
1583 hash
+= (unsigned int) code
+ (unsigned int) GET_MODE (x
);
1584 if (GET_MODE (x
) != VOIDmode
)
1585 for (i
= 2; i
< GET_RTX_LENGTH (CONST_DOUBLE
); i
++)
1586 hash
+= (unsigned int) XWINT (x
, i
);
1588 hash
+= ((unsigned int) CONST_DOUBLE_LOW (x
)
1589 + (unsigned int) CONST_DOUBLE_HIGH (x
));
1597 units
= CONST_VECTOR_NUNITS (x
);
1599 for (i
= 0; i
< units
; ++i
)
1601 elt
= CONST_VECTOR_ELT (x
, i
);
1602 hash
+= hash_expr_1 (elt
, GET_MODE (elt
), do_not_record_p
);
1608 /* Assume there is only one rtx object for any given label. */
1610 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1611 differences and differences between each stage's debugging dumps. */
1612 hash
+= (((unsigned int) LABEL_REF
<< 7)
1613 + CODE_LABEL_NUMBER (XEXP (x
, 0)));
1618 /* Don't hash on the symbol's address to avoid bootstrap differences.
1619 Different hash values may cause expressions to be recorded in
1620 different orders and thus different registers to be used in the
1621 final assembler. This also avoids differences in the dump files
1622 between various stages. */
1624 const unsigned char *p
= (const unsigned char *) XSTR (x
, 0);
1627 h
+= (h
<< 7) + *p
++; /* ??? revisit */
1629 hash
+= ((unsigned int) SYMBOL_REF
<< 7) + h
;
1634 if (MEM_VOLATILE_P (x
))
1636 *do_not_record_p
= 1;
1640 hash
+= (unsigned int) MEM
;
1641 /* We used alias set for hashing, but this is not good, since the alias
1642 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1643 causing the profiles to fail to match. */
1654 case UNSPEC_VOLATILE
:
1655 *do_not_record_p
= 1;
1659 if (MEM_VOLATILE_P (x
))
1661 *do_not_record_p
= 1;
1666 /* We don't want to take the filename and line into account. */
1667 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
)
1668 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x
))
1669 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x
))
1670 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x
);
1672 if (ASM_OPERANDS_INPUT_LENGTH (x
))
1674 for (i
= 1; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
1676 hash
+= (hash_expr_1 (ASM_OPERANDS_INPUT (x
, i
),
1677 GET_MODE (ASM_OPERANDS_INPUT (x
, i
)),
1679 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1683 hash
+= hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x
, 0));
1684 x
= ASM_OPERANDS_INPUT (x
, 0);
1685 mode
= GET_MODE (x
);
1695 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
);
1696 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
1700 /* If we are about to do the last recursive call
1701 needed at this level, change it into iteration.
1702 This function is called enough to be worth it. */
1709 hash
+= hash_expr_1 (XEXP (x
, i
), 0, do_not_record_p
);
1710 if (*do_not_record_p
)
1714 else if (fmt
[i
] == 'E')
1715 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1717 hash
+= hash_expr_1 (XVECEXP (x
, i
, j
), 0, do_not_record_p
);
1718 if (*do_not_record_p
)
1722 else if (fmt
[i
] == 's')
1723 hash
+= hash_string_1 (XSTR (x
, i
));
1724 else if (fmt
[i
] == 'i')
1725 hash
+= (unsigned int) XINT (x
, i
);
1733 /* Hash a set of register REGNO.
1735 Sets are hashed on the register that is set. This simplifies the PRE copy
1738 ??? May need to make things more elaborate. Later, as necessary. */
1741 hash_set (int regno
, int hash_table_size
)
1746 return hash
% hash_table_size
;
1749 /* Return nonzero if exp1 is equivalent to exp2.
1750 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1753 expr_equiv_p (rtx x
, rtx y
)
1762 if (x
== 0 || y
== 0)
1765 code
= GET_CODE (x
);
1766 if (code
!= GET_CODE (y
))
1769 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1770 if (GET_MODE (x
) != GET_MODE (y
))
1781 return XEXP (x
, 0) == XEXP (y
, 0);
1784 return XSTR (x
, 0) == XSTR (y
, 0);
1787 return REGNO (x
) == REGNO (y
);
1790 /* Can't merge two expressions in different alias sets, since we can
1791 decide that the expression is transparent in a block when it isn't,
1792 due to it being set with the different alias set. */
1793 if (MEM_ALIAS_SET (x
) != MEM_ALIAS_SET (y
))
1796 /* A volatile mem should not be considered equivalent to any other. */
1797 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
1801 /* For commutative operations, check both orders. */
1809 return ((expr_equiv_p (XEXP (x
, 0), XEXP (y
, 0))
1810 && expr_equiv_p (XEXP (x
, 1), XEXP (y
, 1)))
1811 || (expr_equiv_p (XEXP (x
, 0), XEXP (y
, 1))
1812 && expr_equiv_p (XEXP (x
, 1), XEXP (y
, 0))));
1815 /* We don't use the generic code below because we want to
1816 disregard filename and line numbers. */
1818 /* A volatile asm isn't equivalent to any other. */
1819 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
1822 if (GET_MODE (x
) != GET_MODE (y
)
1823 || strcmp (ASM_OPERANDS_TEMPLATE (x
), ASM_OPERANDS_TEMPLATE (y
))
1824 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x
),
1825 ASM_OPERANDS_OUTPUT_CONSTRAINT (y
))
1826 || ASM_OPERANDS_OUTPUT_IDX (x
) != ASM_OPERANDS_OUTPUT_IDX (y
)
1827 || ASM_OPERANDS_INPUT_LENGTH (x
) != ASM_OPERANDS_INPUT_LENGTH (y
))
1830 if (ASM_OPERANDS_INPUT_LENGTH (x
))
1832 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
1833 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x
, i
),
1834 ASM_OPERANDS_INPUT (y
, i
))
1835 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x
, i
),
1836 ASM_OPERANDS_INPUT_CONSTRAINT (y
, i
)))
1846 /* Compare the elements. If any pair of corresponding elements
1847 fail to match, return 0 for the whole thing. */
1849 fmt
= GET_RTX_FORMAT (code
);
1850 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1855 if (! expr_equiv_p (XEXP (x
, i
), XEXP (y
, i
)))
1860 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
1862 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1863 if (! expr_equiv_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
)))
1868 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
1873 if (XINT (x
, i
) != XINT (y
, i
))
1878 if (XWINT (x
, i
) != XWINT (y
, i
))
1893 /* Insert expression X in INSN in the hash TABLE.
1894 If it is already present, record it as the last occurrence in INSN's
1897 MODE is the mode of the value X is being stored into.
1898 It is only used if X is a CONST_INT.
1900 ANTIC_P is nonzero if X is an anticipatable expression.
1901 AVAIL_P is nonzero if X is an available expression. */
1904 insert_expr_in_table (rtx x
, enum machine_mode mode
, rtx insn
, int antic_p
,
1905 int avail_p
, struct hash_table
*table
)
1907 int found
, do_not_record_p
;
1909 struct expr
*cur_expr
, *last_expr
= NULL
;
1910 struct occr
*antic_occr
, *avail_occr
;
1911 struct occr
*last_occr
= NULL
;
1913 hash
= hash_expr (x
, mode
, &do_not_record_p
, table
->size
);
1915 /* Do not insert expression in table if it contains volatile operands,
1916 or if hash_expr determines the expression is something we don't want
1917 to or can't handle. */
1918 if (do_not_record_p
)
1921 cur_expr
= table
->table
[hash
];
1924 while (cur_expr
&& 0 == (found
= expr_equiv_p (cur_expr
->expr
, x
)))
1926 /* If the expression isn't found, save a pointer to the end of
1928 last_expr
= cur_expr
;
1929 cur_expr
= cur_expr
->next_same_hash
;
1934 cur_expr
= gcse_alloc (sizeof (struct expr
));
1935 bytes_used
+= sizeof (struct expr
);
1936 if (table
->table
[hash
] == NULL
)
1937 /* This is the first pattern that hashed to this index. */
1938 table
->table
[hash
] = cur_expr
;
1940 /* Add EXPR to end of this hash chain. */
1941 last_expr
->next_same_hash
= cur_expr
;
1943 /* Set the fields of the expr element. */
1945 cur_expr
->bitmap_index
= table
->n_elems
++;
1946 cur_expr
->next_same_hash
= NULL
;
1947 cur_expr
->antic_occr
= NULL
;
1948 cur_expr
->avail_occr
= NULL
;
1951 /* Now record the occurrence(s). */
1954 antic_occr
= cur_expr
->antic_occr
;
1956 /* Search for another occurrence in the same basic block. */
1957 while (antic_occr
&& BLOCK_NUM (antic_occr
->insn
) != BLOCK_NUM (insn
))
1959 /* If an occurrence isn't found, save a pointer to the end of
1961 last_occr
= antic_occr
;
1962 antic_occr
= antic_occr
->next
;
1966 /* Found another instance of the expression in the same basic block.
1967 Prefer the currently recorded one. We want the first one in the
1968 block and the block is scanned from start to end. */
1969 ; /* nothing to do */
1972 /* First occurrence of this expression in this basic block. */
1973 antic_occr
= gcse_alloc (sizeof (struct occr
));
1974 bytes_used
+= sizeof (struct occr
);
1975 /* First occurrence of this expression in any block? */
1976 if (cur_expr
->antic_occr
== NULL
)
1977 cur_expr
->antic_occr
= antic_occr
;
1979 last_occr
->next
= antic_occr
;
1981 antic_occr
->insn
= insn
;
1982 antic_occr
->next
= NULL
;
1988 avail_occr
= cur_expr
->avail_occr
;
1990 /* Search for another occurrence in the same basic block. */
1991 while (avail_occr
&& BLOCK_NUM (avail_occr
->insn
) != BLOCK_NUM (insn
))
1993 /* If an occurrence isn't found, save a pointer to the end of
1995 last_occr
= avail_occr
;
1996 avail_occr
= avail_occr
->next
;
2000 /* Found another instance of the expression in the same basic block.
2001 Prefer this occurrence to the currently recorded one. We want
2002 the last one in the block and the block is scanned from start
2004 avail_occr
->insn
= insn
;
2007 /* First occurrence of this expression in this basic block. */
2008 avail_occr
= gcse_alloc (sizeof (struct occr
));
2009 bytes_used
+= sizeof (struct occr
);
2011 /* First occurrence of this expression in any block? */
2012 if (cur_expr
->avail_occr
== NULL
)
2013 cur_expr
->avail_occr
= avail_occr
;
2015 last_occr
->next
= avail_occr
;
2017 avail_occr
->insn
= insn
;
2018 avail_occr
->next
= NULL
;
2023 /* Insert pattern X in INSN in the hash table.
2024 X is a SET of a reg to either another reg or a constant.
2025 If it is already present, record it as the last occurrence in INSN's
2029 insert_set_in_table (rtx x
, rtx insn
, struct hash_table
*table
)
2033 struct expr
*cur_expr
, *last_expr
= NULL
;
2034 struct occr
*cur_occr
, *last_occr
= NULL
;
2036 if (GET_CODE (x
) != SET
2037 || GET_CODE (SET_DEST (x
)) != REG
)
2040 hash
= hash_set (REGNO (SET_DEST (x
)), table
->size
);
2042 cur_expr
= table
->table
[hash
];
2045 while (cur_expr
&& 0 == (found
= expr_equiv_p (cur_expr
->expr
, x
)))
2047 /* If the expression isn't found, save a pointer to the end of
2049 last_expr
= cur_expr
;
2050 cur_expr
= cur_expr
->next_same_hash
;
2055 cur_expr
= gcse_alloc (sizeof (struct expr
));
2056 bytes_used
+= sizeof (struct expr
);
2057 if (table
->table
[hash
] == NULL
)
2058 /* This is the first pattern that hashed to this index. */
2059 table
->table
[hash
] = cur_expr
;
2061 /* Add EXPR to end of this hash chain. */
2062 last_expr
->next_same_hash
= cur_expr
;
2064 /* Set the fields of the expr element.
2065 We must copy X because it can be modified when copy propagation is
2066 performed on its operands. */
2067 cur_expr
->expr
= copy_rtx (x
);
2068 cur_expr
->bitmap_index
= table
->n_elems
++;
2069 cur_expr
->next_same_hash
= NULL
;
2070 cur_expr
->antic_occr
= NULL
;
2071 cur_expr
->avail_occr
= NULL
;
2074 /* Now record the occurrence. */
2075 cur_occr
= cur_expr
->avail_occr
;
2077 /* Search for another occurrence in the same basic block. */
2078 while (cur_occr
&& BLOCK_NUM (cur_occr
->insn
) != BLOCK_NUM (insn
))
2080 /* If an occurrence isn't found, save a pointer to the end of
2082 last_occr
= cur_occr
;
2083 cur_occr
= cur_occr
->next
;
2087 /* Found another instance of the expression in the same basic block.
2088 Prefer this occurrence to the currently recorded one. We want the
2089 last one in the block and the block is scanned from start to end. */
2090 cur_occr
->insn
= insn
;
2093 /* First occurrence of this expression in this basic block. */
2094 cur_occr
= gcse_alloc (sizeof (struct occr
));
2095 bytes_used
+= sizeof (struct occr
);
2097 /* First occurrence of this expression in any block? */
2098 if (cur_expr
->avail_occr
== NULL
)
2099 cur_expr
->avail_occr
= cur_occr
;
2101 last_occr
->next
= cur_occr
;
2103 cur_occr
->insn
= insn
;
2104 cur_occr
->next
= NULL
;
2108 /* Determine whether the rtx X should be treated as a constant for
2109 the purposes of GCSE's constant propagation. */
2112 gcse_constant_p (rtx x
)
2114 /* Consider a COMPARE of two integers constant. */
2115 if (GET_CODE (x
) == COMPARE
2116 && GET_CODE (XEXP (x
, 0)) == CONST_INT
2117 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2121 /* Consider a COMPARE of the same registers is a constant
2122 if they are not floating point registers. */
2123 if (GET_CODE(x
) == COMPARE
2124 && GET_CODE (XEXP (x
, 0)) == REG
2125 && GET_CODE (XEXP (x
, 1)) == REG
2126 && REGNO (XEXP (x
, 0)) == REGNO (XEXP (x
, 1))
2127 && ! FLOAT_MODE_P (GET_MODE (XEXP (x
, 0)))
2128 && ! FLOAT_MODE_P (GET_MODE (XEXP (x
, 1))))
2131 if (GET_CODE (x
) == CONSTANT_P_RTX
)
2134 return CONSTANT_P (x
);
2137 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2141 hash_scan_set (rtx pat
, rtx insn
, struct hash_table
*table
)
2143 rtx src
= SET_SRC (pat
);
2144 rtx dest
= SET_DEST (pat
);
2147 if (GET_CODE (src
) == CALL
)
2148 hash_scan_call (src
, insn
, table
);
2150 else if (GET_CODE (dest
) == REG
)
2152 unsigned int regno
= REGNO (dest
);
2155 /* If this is a single set and we are doing constant propagation,
2156 see if a REG_NOTE shows this equivalent to a constant. */
2157 if (table
->set_p
&& (note
= find_reg_equal_equiv_note (insn
)) != 0
2158 && gcse_constant_p (XEXP (note
, 0)))
2159 src
= XEXP (note
, 0), pat
= gen_rtx_SET (VOIDmode
, dest
, src
);
2161 /* Only record sets of pseudo-regs in the hash table. */
2163 && regno
>= FIRST_PSEUDO_REGISTER
2164 /* Don't GCSE something if we can't do a reg/reg copy. */
2165 && can_copy_p (GET_MODE (dest
))
2166 /* GCSE commonly inserts instruction after the insn. We can't
2167 do that easily for EH_REGION notes so disable GCSE on these
2169 && !find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
2170 /* Is SET_SRC something we want to gcse? */
2171 && want_to_gcse_p (src
)
2172 /* Don't CSE a nop. */
2173 && ! set_noop_p (pat
)
2174 /* Don't GCSE if it has attached REG_EQUIV note.
2175 At this point this only function parameters should have
2176 REG_EQUIV notes and if the argument slot is used somewhere
2177 explicitly, it means address of parameter has been taken,
2178 so we should not extend the lifetime of the pseudo. */
2179 && ((note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
)) == 0
2180 || GET_CODE (XEXP (note
, 0)) != MEM
))
2182 /* An expression is not anticipatable if its operands are
2183 modified before this insn or if this is not the only SET in
2185 int antic_p
= oprs_anticipatable_p (src
, insn
) && single_set (insn
);
2186 /* An expression is not available if its operands are
2187 subsequently modified, including this insn. It's also not
2188 available if this is a branch, because we can't insert
2189 a set after the branch. */
2190 int avail_p
= (oprs_available_p (src
, insn
)
2191 && ! JUMP_P (insn
));
2193 insert_expr_in_table (src
, GET_MODE (dest
), insn
, antic_p
, avail_p
, table
);
2196 /* Record sets for constant/copy propagation. */
2197 else if (table
->set_p
2198 && regno
>= FIRST_PSEUDO_REGISTER
2199 && ((GET_CODE (src
) == REG
2200 && REGNO (src
) >= FIRST_PSEUDO_REGISTER
2201 && can_copy_p (GET_MODE (dest
))
2202 && REGNO (src
) != regno
)
2203 || gcse_constant_p (src
))
2204 /* A copy is not available if its src or dest is subsequently
2205 modified. Here we want to search from INSN+1 on, but
2206 oprs_available_p searches from INSN on. */
2207 && (insn
== BB_END (BLOCK_FOR_INSN (insn
))
2208 || ((tmp
= next_nonnote_insn (insn
)) != NULL_RTX
2209 && oprs_available_p (pat
, tmp
))))
2210 insert_set_in_table (pat
, insn
, table
);
2212 /* In case of store we want to consider the memory value as available in
2213 the REG stored in that memory. This makes it possible to remove
2214 redundant loads from due to stores to the same location. */
2215 else if (flag_gcse_las
&& GET_CODE (src
) == REG
&& GET_CODE (dest
) == MEM
)
2217 unsigned int regno
= REGNO (src
);
2219 /* Do not do this for constant/copy propagation. */
2221 /* Only record sets of pseudo-regs in the hash table. */
2222 && regno
>= FIRST_PSEUDO_REGISTER
2223 /* Don't GCSE something if we can't do a reg/reg copy. */
2224 && can_copy_p (GET_MODE (src
))
2225 /* GCSE commonly inserts instruction after the insn. We can't
2226 do that easily for EH_REGION notes so disable GCSE on these
2228 && ! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
2229 /* Is SET_DEST something we want to gcse? */
2230 && want_to_gcse_p (dest
)
2231 /* Don't CSE a nop. */
2232 && ! set_noop_p (pat
)
2233 /* Don't GCSE if it has attached REG_EQUIV note.
2234 At this point this only function parameters should have
2235 REG_EQUIV notes and if the argument slot is used somewhere
2236 explicitly, it means address of parameter has been taken,
2237 so we should not extend the lifetime of the pseudo. */
2238 && ((note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
)) == 0
2239 || GET_CODE (XEXP (note
, 0)) != MEM
))
2241 /* Stores are never anticipatable. */
2243 /* An expression is not available if its operands are
2244 subsequently modified, including this insn. It's also not
2245 available if this is a branch, because we can't insert
2246 a set after the branch. */
2247 int avail_p
= oprs_available_p (dest
, insn
)
2250 /* Record the memory expression (DEST) in the hash table. */
2251 insert_expr_in_table (dest
, GET_MODE (dest
), insn
,
2252 antic_p
, avail_p
, table
);
2258 hash_scan_clobber (rtx x ATTRIBUTE_UNUSED
, rtx insn ATTRIBUTE_UNUSED
,
2259 struct hash_table
*table ATTRIBUTE_UNUSED
)
2261 /* Currently nothing to do. */
2265 hash_scan_call (rtx x ATTRIBUTE_UNUSED
, rtx insn ATTRIBUTE_UNUSED
,
2266 struct hash_table
*table ATTRIBUTE_UNUSED
)
2268 /* Currently nothing to do. */
2271 /* Process INSN and add hash table entries as appropriate.
2273 Only available expressions that set a single pseudo-reg are recorded.
2275 Single sets in a PARALLEL could be handled, but it's an extra complication
2276 that isn't dealt with right now. The trick is handling the CLOBBERs that
2277 are also in the PARALLEL. Later.
2279 If SET_P is nonzero, this is for the assignment hash table,
2280 otherwise it is for the expression hash table.
2281 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2282 not record any expressions. */
2285 hash_scan_insn (rtx insn
, struct hash_table
*table
, int in_libcall_block
)
2287 rtx pat
= PATTERN (insn
);
2290 if (in_libcall_block
)
2293 /* Pick out the sets of INSN and for other forms of instructions record
2294 what's been modified. */
2296 if (GET_CODE (pat
) == SET
)
2297 hash_scan_set (pat
, insn
, table
);
2298 else if (GET_CODE (pat
) == PARALLEL
)
2299 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
2301 rtx x
= XVECEXP (pat
, 0, i
);
2303 if (GET_CODE (x
) == SET
)
2304 hash_scan_set (x
, insn
, table
);
2305 else if (GET_CODE (x
) == CLOBBER
)
2306 hash_scan_clobber (x
, insn
, table
);
2307 else if (GET_CODE (x
) == CALL
)
2308 hash_scan_call (x
, insn
, table
);
2311 else if (GET_CODE (pat
) == CLOBBER
)
2312 hash_scan_clobber (pat
, insn
, table
);
2313 else if (GET_CODE (pat
) == CALL
)
2314 hash_scan_call (pat
, insn
, table
);
2318 dump_hash_table (FILE *file
, const char *name
, struct hash_table
*table
)
2321 /* Flattened out table, so it's printed in proper order. */
2322 struct expr
**flat_table
;
2323 unsigned int *hash_val
;
2326 flat_table
= xcalloc (table
->n_elems
, sizeof (struct expr
*));
2327 hash_val
= xmalloc (table
->n_elems
* sizeof (unsigned int));
2329 for (i
= 0; i
< (int) table
->size
; i
++)
2330 for (expr
= table
->table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
2332 flat_table
[expr
->bitmap_index
] = expr
;
2333 hash_val
[expr
->bitmap_index
] = i
;
2336 fprintf (file
, "%s hash table (%d buckets, %d entries)\n",
2337 name
, table
->size
, table
->n_elems
);
2339 for (i
= 0; i
< (int) table
->n_elems
; i
++)
2340 if (flat_table
[i
] != 0)
2342 expr
= flat_table
[i
];
2343 fprintf (file
, "Index %d (hash value %d)\n ",
2344 expr
->bitmap_index
, hash_val
[i
]);
2345 print_rtl (file
, expr
->expr
);
2346 fprintf (file
, "\n");
2349 fprintf (file
, "\n");
2355 /* Record register first/last/block set information for REGNO in INSN.
2357 first_set records the first place in the block where the register
2358 is set and is used to compute "anticipatability".
2360 last_set records the last place in the block where the register
2361 is set and is used to compute "availability".
2363 last_bb records the block for which first_set and last_set are
2364 valid, as a quick test to invalidate them.
2366 reg_set_in_block records whether the register is set in the block
2367 and is used to compute "transparency". */
2370 record_last_reg_set_info (rtx insn
, int regno
)
2372 struct reg_avail_info
*info
= ®_avail_info
[regno
];
2373 int cuid
= INSN_CUID (insn
);
2375 info
->last_set
= cuid
;
2376 if (info
->last_bb
!= current_bb
)
2378 info
->last_bb
= current_bb
;
2379 info
->first_set
= cuid
;
2380 SET_BIT (reg_set_in_block
[current_bb
->index
], regno
);
2385 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2386 Note we store a pair of elements in the list, so they have to be
2387 taken off pairwise. */
2390 canon_list_insert (rtx dest ATTRIBUTE_UNUSED
, rtx unused1 ATTRIBUTE_UNUSED
,
2393 rtx dest_addr
, insn
;
2396 while (GET_CODE (dest
) == SUBREG
2397 || GET_CODE (dest
) == ZERO_EXTRACT
2398 || GET_CODE (dest
) == SIGN_EXTRACT
2399 || GET_CODE (dest
) == STRICT_LOW_PART
)
2400 dest
= XEXP (dest
, 0);
2402 /* If DEST is not a MEM, then it will not conflict with a load. Note
2403 that function calls are assumed to clobber memory, but are handled
2406 if (GET_CODE (dest
) != MEM
)
2409 dest_addr
= get_addr (XEXP (dest
, 0));
2410 dest_addr
= canon_rtx (dest_addr
);
2411 insn
= (rtx
) v_insn
;
2412 bb
= BLOCK_NUM (insn
);
2414 canon_modify_mem_list
[bb
] =
2415 alloc_EXPR_LIST (VOIDmode
, dest_addr
, canon_modify_mem_list
[bb
]);
2416 canon_modify_mem_list
[bb
] =
2417 alloc_EXPR_LIST (VOIDmode
, dest
, canon_modify_mem_list
[bb
]);
2418 bitmap_set_bit (canon_modify_mem_list_set
, bb
);
2421 /* Record memory modification information for INSN. We do not actually care
2422 about the memory location(s) that are set, or even how they are set (consider
2423 a CALL_INSN). We merely need to record which insns modify memory. */
2426 record_last_mem_set_info (rtx insn
)
2428 int bb
= BLOCK_NUM (insn
);
2430 /* load_killed_in_block_p will handle the case of calls clobbering
2432 modify_mem_list
[bb
] = alloc_INSN_LIST (insn
, modify_mem_list
[bb
]);
2433 bitmap_set_bit (modify_mem_list_set
, bb
);
2435 if (GET_CODE (insn
) == CALL_INSN
)
2437 /* Note that traversals of this loop (other than for free-ing)
2438 will break after encountering a CALL_INSN. So, there's no
2439 need to insert a pair of items, as canon_list_insert does. */
2440 canon_modify_mem_list
[bb
] =
2441 alloc_INSN_LIST (insn
, canon_modify_mem_list
[bb
]);
2442 bitmap_set_bit (canon_modify_mem_list_set
, bb
);
2445 note_stores (PATTERN (insn
), canon_list_insert
, (void*) insn
);
2448 /* Called from compute_hash_table via note_stores to handle one
2449 SET or CLOBBER in an insn. DATA is really the instruction in which
2450 the SET is taking place. */
2453 record_last_set_info (rtx dest
, rtx setter ATTRIBUTE_UNUSED
, void *data
)
2455 rtx last_set_insn
= (rtx
) data
;
2457 if (GET_CODE (dest
) == SUBREG
)
2458 dest
= SUBREG_REG (dest
);
2460 if (GET_CODE (dest
) == REG
)
2461 record_last_reg_set_info (last_set_insn
, REGNO (dest
));
2462 else if (GET_CODE (dest
) == MEM
2463 /* Ignore pushes, they clobber nothing. */
2464 && ! push_operand (dest
, GET_MODE (dest
)))
2465 record_last_mem_set_info (last_set_insn
);
2468 /* Top level function to create an expression or assignment hash table.
2470 Expression entries are placed in the hash table if
2471 - they are of the form (set (pseudo-reg) src),
2472 - src is something we want to perform GCSE on,
2473 - none of the operands are subsequently modified in the block
2475 Assignment entries are placed in the hash table if
2476 - they are of the form (set (pseudo-reg) src),
2477 - src is something we want to perform const/copy propagation on,
2478 - none of the operands or target are subsequently modified in the block
2480 Currently src must be a pseudo-reg or a const_int.
2482 TABLE is the table computed. */
2485 compute_hash_table_work (struct hash_table
*table
)
2489 /* While we compute the hash table we also compute a bit array of which
2490 registers are set in which blocks.
2491 ??? This isn't needed during const/copy propagation, but it's cheap to
2493 sbitmap_vector_zero (reg_set_in_block
, last_basic_block
);
2495 /* re-Cache any INSN_LIST nodes we have allocated. */
2496 clear_modify_mem_tables ();
2497 /* Some working arrays used to track first and last set in each block. */
2498 reg_avail_info
= gmalloc (max_gcse_regno
* sizeof (struct reg_avail_info
));
2500 for (i
= 0; i
< max_gcse_regno
; ++i
)
2501 reg_avail_info
[i
].last_bb
= NULL
;
2503 FOR_EACH_BB (current_bb
)
2507 int in_libcall_block
;
2509 /* First pass over the instructions records information used to
2510 determine when registers and memory are first and last set.
2511 ??? hard-reg reg_set_in_block computation
2512 could be moved to compute_sets since they currently don't change. */
2514 for (insn
= BB_HEAD (current_bb
);
2515 insn
&& insn
!= NEXT_INSN (BB_END (current_bb
));
2516 insn
= NEXT_INSN (insn
))
2518 if (! INSN_P (insn
))
2521 if (GET_CODE (insn
) == CALL_INSN
)
2523 bool clobbers_all
= false;
2524 #ifdef NON_SAVING_SETJMP
2525 if (NON_SAVING_SETJMP
2526 && find_reg_note (insn
, REG_SETJMP
, NULL_RTX
))
2527 clobbers_all
= true;
2530 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
2532 || TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
))
2533 record_last_reg_set_info (insn
, regno
);
2538 note_stores (PATTERN (insn
), record_last_set_info
, insn
);
2541 /* Insert implicit sets in the hash table. */
2543 && implicit_sets
[current_bb
->index
] != NULL_RTX
)
2544 hash_scan_set (implicit_sets
[current_bb
->index
],
2545 BB_HEAD (current_bb
), table
);
2547 /* The next pass builds the hash table. */
2549 for (insn
= BB_HEAD (current_bb
), in_libcall_block
= 0;
2550 insn
&& insn
!= NEXT_INSN (BB_END (current_bb
));
2551 insn
= NEXT_INSN (insn
))
2554 if (find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
))
2555 in_libcall_block
= 1;
2556 else if (table
->set_p
&& find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
2557 in_libcall_block
= 0;
2558 hash_scan_insn (insn
, table
, in_libcall_block
);
2559 if (!table
->set_p
&& find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
2560 in_libcall_block
= 0;
2564 free (reg_avail_info
);
2565 reg_avail_info
= NULL
;
2568 /* Allocate space for the set/expr hash TABLE.
2569 N_INSNS is the number of instructions in the function.
2570 It is used to determine the number of buckets to use.
2571 SET_P determines whether set or expression table will
2575 alloc_hash_table (int n_insns
, struct hash_table
*table
, int set_p
)
2579 table
->size
= n_insns
/ 4;
2580 if (table
->size
< 11)
2583 /* Attempt to maintain efficient use of hash table.
2584 Making it an odd number is simplest for now.
2585 ??? Later take some measurements. */
2587 n
= table
->size
* sizeof (struct expr
*);
2588 table
->table
= gmalloc (n
);
2589 table
->set_p
= set_p
;
2592 /* Free things allocated by alloc_hash_table. */
2595 free_hash_table (struct hash_table
*table
)
2597 free (table
->table
);
2600 /* Compute the hash TABLE for doing copy/const propagation or
2601 expression hash table. */
2604 compute_hash_table (struct hash_table
*table
)
2606 /* Initialize count of number of entries in hash table. */
2608 memset (table
->table
, 0, table
->size
* sizeof (struct expr
*));
2610 compute_hash_table_work (table
);
2613 /* Expression tracking support. */
2615 /* Lookup pattern PAT in the expression TABLE.
2616 The result is a pointer to the table entry, or NULL if not found. */
2618 static struct expr
*
2619 lookup_expr (rtx pat
, struct hash_table
*table
)
2621 int do_not_record_p
;
2622 unsigned int hash
= hash_expr (pat
, GET_MODE (pat
), &do_not_record_p
,
2626 if (do_not_record_p
)
2629 expr
= table
->table
[hash
];
2631 while (expr
&& ! expr_equiv_p (expr
->expr
, pat
))
2632 expr
= expr
->next_same_hash
;
2637 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2638 table entry, or NULL if not found. */
2640 static struct expr
*
2641 lookup_set (unsigned int regno
, struct hash_table
*table
)
2643 unsigned int hash
= hash_set (regno
, table
->size
);
2646 expr
= table
->table
[hash
];
2648 while (expr
&& REGNO (SET_DEST (expr
->expr
)) != regno
)
2649 expr
= expr
->next_same_hash
;
2654 /* Return the next entry for REGNO in list EXPR. */
2656 static struct expr
*
2657 next_set (unsigned int regno
, struct expr
*expr
)
2660 expr
= expr
->next_same_hash
;
2661 while (expr
&& REGNO (SET_DEST (expr
->expr
)) != regno
);
2666 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2667 types may be mixed. */
2670 free_insn_expr_list_list (rtx
*listp
)
2674 for (list
= *listp
; list
; list
= next
)
2676 next
= XEXP (list
, 1);
2677 if (GET_CODE (list
) == EXPR_LIST
)
2678 free_EXPR_LIST_node (list
);
2680 free_INSN_LIST_node (list
);
2686 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2688 clear_modify_mem_tables (void)
2692 EXECUTE_IF_SET_IN_BITMAP
2693 (modify_mem_list_set
, 0, i
, free_INSN_LIST_list (modify_mem_list
+ i
));
2694 bitmap_clear (modify_mem_list_set
);
2696 EXECUTE_IF_SET_IN_BITMAP
2697 (canon_modify_mem_list_set
, 0, i
,
2698 free_insn_expr_list_list (canon_modify_mem_list
+ i
));
2699 bitmap_clear (canon_modify_mem_list_set
);
2702 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2705 free_modify_mem_tables (void)
2707 clear_modify_mem_tables ();
2708 free (modify_mem_list
);
2709 free (canon_modify_mem_list
);
2710 modify_mem_list
= 0;
2711 canon_modify_mem_list
= 0;
2714 /* Reset tables used to keep track of what's still available [since the
2715 start of the block]. */
2718 reset_opr_set_tables (void)
2720 /* Maintain a bitmap of which regs have been set since beginning of
2722 CLEAR_REG_SET (reg_set_bitmap
);
2724 /* Also keep a record of the last instruction to modify memory.
2725 For now this is very trivial, we only record whether any memory
2726 location has been modified. */
2727 clear_modify_mem_tables ();
2730 /* Return nonzero if the operands of X are not set before INSN in
2731 INSN's basic block. */
2734 oprs_not_set_p (rtx x
, rtx insn
)
2743 code
= GET_CODE (x
);
2759 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn
),
2760 INSN_CUID (insn
), x
, 0))
2763 return oprs_not_set_p (XEXP (x
, 0), insn
);
2766 return ! REGNO_REG_SET_P (reg_set_bitmap
, REGNO (x
));
2772 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
2776 /* If we are about to do the last recursive call
2777 needed at this level, change it into iteration.
2778 This function is called enough to be worth it. */
2780 return oprs_not_set_p (XEXP (x
, i
), insn
);
2782 if (! oprs_not_set_p (XEXP (x
, i
), insn
))
2785 else if (fmt
[i
] == 'E')
2786 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2787 if (! oprs_not_set_p (XVECEXP (x
, i
, j
), insn
))
2794 /* Mark things set by a CALL. */
2797 mark_call (rtx insn
)
2799 if (! CONST_OR_PURE_CALL_P (insn
))
2800 record_last_mem_set_info (insn
);
2803 /* Mark things set by a SET. */
2806 mark_set (rtx pat
, rtx insn
)
2808 rtx dest
= SET_DEST (pat
);
2810 while (GET_CODE (dest
) == SUBREG
2811 || GET_CODE (dest
) == ZERO_EXTRACT
2812 || GET_CODE (dest
) == SIGN_EXTRACT
2813 || GET_CODE (dest
) == STRICT_LOW_PART
)
2814 dest
= XEXP (dest
, 0);
2816 if (GET_CODE (dest
) == REG
)
2817 SET_REGNO_REG_SET (reg_set_bitmap
, REGNO (dest
));
2818 else if (GET_CODE (dest
) == MEM
)
2819 record_last_mem_set_info (insn
);
2821 if (GET_CODE (SET_SRC (pat
)) == CALL
)
2825 /* Record things set by a CLOBBER. */
2828 mark_clobber (rtx pat
, rtx insn
)
2830 rtx clob
= XEXP (pat
, 0);
2832 while (GET_CODE (clob
) == SUBREG
|| GET_CODE (clob
) == STRICT_LOW_PART
)
2833 clob
= XEXP (clob
, 0);
2835 if (GET_CODE (clob
) == REG
)
2836 SET_REGNO_REG_SET (reg_set_bitmap
, REGNO (clob
));
2838 record_last_mem_set_info (insn
);
2841 /* Record things set by INSN.
2842 This data is used by oprs_not_set_p. */
2845 mark_oprs_set (rtx insn
)
2847 rtx pat
= PATTERN (insn
);
2850 if (GET_CODE (pat
) == SET
)
2851 mark_set (pat
, insn
);
2852 else if (GET_CODE (pat
) == PARALLEL
)
2853 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
2855 rtx x
= XVECEXP (pat
, 0, i
);
2857 if (GET_CODE (x
) == SET
)
2859 else if (GET_CODE (x
) == CLOBBER
)
2860 mark_clobber (x
, insn
);
2861 else if (GET_CODE (x
) == CALL
)
2865 else if (GET_CODE (pat
) == CLOBBER
)
2866 mark_clobber (pat
, insn
);
2867 else if (GET_CODE (pat
) == CALL
)
2872 /* Classic GCSE reaching definition support. */
2874 /* Allocate reaching def variables. */
2877 alloc_rd_mem (int n_blocks
, int n_insns
)
2879 rd_kill
= sbitmap_vector_alloc (n_blocks
, n_insns
);
2880 sbitmap_vector_zero (rd_kill
, n_blocks
);
2882 rd_gen
= sbitmap_vector_alloc (n_blocks
, n_insns
);
2883 sbitmap_vector_zero (rd_gen
, n_blocks
);
2885 reaching_defs
= sbitmap_vector_alloc (n_blocks
, n_insns
);
2886 sbitmap_vector_zero (reaching_defs
, n_blocks
);
2888 rd_out
= sbitmap_vector_alloc (n_blocks
, n_insns
);
2889 sbitmap_vector_zero (rd_out
, n_blocks
);
2892 /* Free reaching def variables. */
2897 sbitmap_vector_free (rd_kill
);
2898 sbitmap_vector_free (rd_gen
);
2899 sbitmap_vector_free (reaching_defs
);
2900 sbitmap_vector_free (rd_out
);
2903 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2906 handle_rd_kill_set (rtx insn
, int regno
, basic_block bb
)
2908 struct reg_set
*this_reg
;
2910 for (this_reg
= reg_set_table
[regno
]; this_reg
; this_reg
= this_reg
->next
)
2911 if (BLOCK_NUM (this_reg
->insn
) != BLOCK_NUM (insn
))
2912 SET_BIT (rd_kill
[bb
->index
], INSN_CUID (this_reg
->insn
));
2915 /* Compute the set of kill's for reaching definitions. */
2918 compute_kill_rd (void)
2926 For each set bit in `gen' of the block (i.e each insn which
2927 generates a definition in the block)
2928 Call the reg set by the insn corresponding to that bit regx
2929 Look at the linked list starting at reg_set_table[regx]
2930 For each setting of regx in the linked list, which is not in
2932 Set the bit in `kill' corresponding to that insn. */
2934 for (cuid
= 0; cuid
< max_cuid
; cuid
++)
2935 if (TEST_BIT (rd_gen
[bb
->index
], cuid
))
2937 rtx insn
= CUID_INSN (cuid
);
2938 rtx pat
= PATTERN (insn
);
2940 if (GET_CODE (insn
) == CALL_INSN
)
2942 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
2943 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
))
2944 handle_rd_kill_set (insn
, regno
, bb
);
2947 if (GET_CODE (pat
) == PARALLEL
)
2949 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; i
--)
2951 enum rtx_code code
= GET_CODE (XVECEXP (pat
, 0, i
));
2953 if ((code
== SET
|| code
== CLOBBER
)
2954 && GET_CODE (XEXP (XVECEXP (pat
, 0, i
), 0)) == REG
)
2955 handle_rd_kill_set (insn
,
2956 REGNO (XEXP (XVECEXP (pat
, 0, i
), 0)),
2960 else if (GET_CODE (pat
) == SET
&& GET_CODE (SET_DEST (pat
)) == REG
)
2961 /* Each setting of this register outside of this block
2962 must be marked in the set of kills in this block. */
2963 handle_rd_kill_set (insn
, REGNO (SET_DEST (pat
)), bb
);
2967 /* Compute the reaching definitions as in
2968 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2969 Chapter 10. It is the same algorithm as used for computing available
2970 expressions but applied to the gens and kills of reaching definitions. */
2975 int changed
, passes
;
2979 sbitmap_copy (rd_out
[bb
->index
] /*dst*/, rd_gen
[bb
->index
] /*src*/);
2988 sbitmap_union_of_preds (reaching_defs
[bb
->index
], rd_out
, bb
->index
);
2989 changed
|= sbitmap_union_of_diff_cg (rd_out
[bb
->index
], rd_gen
[bb
->index
],
2990 reaching_defs
[bb
->index
], rd_kill
[bb
->index
]);
2996 fprintf (gcse_file
, "reaching def computation: %d passes\n", passes
);
2999 /* Classic GCSE available expression support. */
3001 /* Allocate memory for available expression computation. */
3004 alloc_avail_expr_mem (int n_blocks
, int n_exprs
)
3006 ae_kill
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
3007 sbitmap_vector_zero (ae_kill
, n_blocks
);
3009 ae_gen
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
3010 sbitmap_vector_zero (ae_gen
, n_blocks
);
3012 ae_in
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
3013 sbitmap_vector_zero (ae_in
, n_blocks
);
3015 ae_out
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
3016 sbitmap_vector_zero (ae_out
, n_blocks
);
3020 free_avail_expr_mem (void)
3022 sbitmap_vector_free (ae_kill
);
3023 sbitmap_vector_free (ae_gen
);
3024 sbitmap_vector_free (ae_in
);
3025 sbitmap_vector_free (ae_out
);
3028 /* Compute the set of available expressions generated in each basic block. */
3031 compute_ae_gen (struct hash_table
*expr_hash_table
)
3037 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3038 This is all we have to do because an expression is not recorded if it
3039 is not available, and the only expressions we want to work with are the
3040 ones that are recorded. */
3041 for (i
= 0; i
< expr_hash_table
->size
; i
++)
3042 for (expr
= expr_hash_table
->table
[i
]; expr
!= 0; expr
= expr
->next_same_hash
)
3043 for (occr
= expr
->avail_occr
; occr
!= 0; occr
= occr
->next
)
3044 SET_BIT (ae_gen
[BLOCK_NUM (occr
->insn
)], expr
->bitmap_index
);
3047 /* Return nonzero if expression X is killed in BB. */
3050 expr_killed_p (rtx x
, basic_block bb
)
3059 code
= GET_CODE (x
);
3063 return TEST_BIT (reg_set_in_block
[bb
->index
], REGNO (x
));
3066 if (load_killed_in_block_p (bb
, get_max_uid () + 1, x
, 0))
3069 return expr_killed_p (XEXP (x
, 0), bb
);
3087 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
3091 /* If we are about to do the last recursive call
3092 needed at this level, change it into iteration.
3093 This function is called enough to be worth it. */
3095 return expr_killed_p (XEXP (x
, i
), bb
);
3096 else if (expr_killed_p (XEXP (x
, i
), bb
))
3099 else if (fmt
[i
] == 'E')
3100 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3101 if (expr_killed_p (XVECEXP (x
, i
, j
), bb
))
3108 /* Compute the set of available expressions killed in each basic block. */
3111 compute_ae_kill (sbitmap
*ae_gen
, sbitmap
*ae_kill
,
3112 struct hash_table
*expr_hash_table
)
3119 for (i
= 0; i
< expr_hash_table
->size
; i
++)
3120 for (expr
= expr_hash_table
->table
[i
]; expr
; expr
= expr
->next_same_hash
)
3122 /* Skip EXPR if generated in this block. */
3123 if (TEST_BIT (ae_gen
[bb
->index
], expr
->bitmap_index
))
3126 if (expr_killed_p (expr
->expr
, bb
))
3127 SET_BIT (ae_kill
[bb
->index
], expr
->bitmap_index
);
3131 /* Actually perform the Classic GCSE optimizations. */
3133 /* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
3135 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
3136 as a positive reach. We want to do this when there are two computations
3137 of the expression in the block.
3139 VISITED is a pointer to a working buffer for tracking which BB's have
3140 been visited. It is NULL for the top-level call.
3142 We treat reaching expressions that go through blocks containing the same
3143 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3144 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3145 2 as not reaching. The intent is to improve the probability of finding
3146 only one reaching expression and to reduce register lifetimes by picking
3147 the closest such expression. */
3150 expr_reaches_here_p_work (struct occr
*occr
, struct expr
*expr
,
3151 basic_block bb
, int check_self_loop
, char *visited
)
3155 for (pred
= bb
->pred
; pred
!= NULL
; pred
= pred
->pred_next
)
3157 basic_block pred_bb
= pred
->src
;
3159 if (visited
[pred_bb
->index
])
3160 /* This predecessor has already been visited. Nothing to do. */
3162 else if (pred_bb
== bb
)
3164 /* BB loops on itself. */
3166 && TEST_BIT (ae_gen
[pred_bb
->index
], expr
->bitmap_index
)
3167 && BLOCK_NUM (occr
->insn
) == pred_bb
->index
)
3170 visited
[pred_bb
->index
] = 1;
3173 /* Ignore this predecessor if it kills the expression. */
3174 else if (TEST_BIT (ae_kill
[pred_bb
->index
], expr
->bitmap_index
))
3175 visited
[pred_bb
->index
] = 1;
3177 /* Does this predecessor generate this expression? */
3178 else if (TEST_BIT (ae_gen
[pred_bb
->index
], expr
->bitmap_index
))
3180 /* Is this the occurrence we're looking for?
3181 Note that there's only one generating occurrence per block
3182 so we just need to check the block number. */
3183 if (BLOCK_NUM (occr
->insn
) == pred_bb
->index
)
3186 visited
[pred_bb
->index
] = 1;
3189 /* Neither gen nor kill. */
3192 visited
[pred_bb
->index
] = 1;
3193 if (expr_reaches_here_p_work (occr
, expr
, pred_bb
, check_self_loop
,
3200 /* All paths have been checked. */
3204 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
3205 memory allocated for that function is returned. */
3208 expr_reaches_here_p (struct occr
*occr
, struct expr
*expr
, basic_block bb
,
3209 int check_self_loop
)
3212 char *visited
= xcalloc (last_basic_block
, 1);
3214 rval
= expr_reaches_here_p_work (occr
, expr
, bb
, check_self_loop
, visited
);
3220 /* Return the instruction that computes EXPR that reaches INSN's basic block.
3221 If there is more than one such instruction, return NULL.
3223 Called only by handle_avail_expr. */
3226 computing_insn (struct expr
*expr
, rtx insn
)
3228 basic_block bb
= BLOCK_FOR_INSN (insn
);
3230 if (expr
->avail_occr
->next
== NULL
)
3232 if (BLOCK_FOR_INSN (expr
->avail_occr
->insn
) == bb
)
3233 /* The available expression is actually itself
3234 (i.e. a loop in the flow graph) so do nothing. */
3237 /* (FIXME) Case that we found a pattern that was created by
3238 a substitution that took place. */
3239 return expr
->avail_occr
->insn
;
3243 /* Pattern is computed more than once.
3244 Search backwards from this insn to see how many of these
3245 computations actually reach this insn. */
3247 rtx insn_computes_expr
= NULL
;
3250 for (occr
= expr
->avail_occr
; occr
!= NULL
; occr
= occr
->next
)
3252 if (BLOCK_FOR_INSN (occr
->insn
) == bb
)
3254 /* The expression is generated in this block.
3255 The only time we care about this is when the expression
3256 is generated later in the block [and thus there's a loop].
3257 We let the normal cse pass handle the other cases. */
3258 if (INSN_CUID (insn
) < INSN_CUID (occr
->insn
)
3259 && expr_reaches_here_p (occr
, expr
, bb
, 1))
3265 insn_computes_expr
= occr
->insn
;
3268 else if (expr_reaches_here_p (occr
, expr
, bb
, 0))
3274 insn_computes_expr
= occr
->insn
;
3278 if (insn_computes_expr
== NULL
)
3281 return insn_computes_expr
;
3285 /* Return nonzero if the definition in DEF_INSN can reach INSN.
3286 Only called by can_disregard_other_sets. */
3289 def_reaches_here_p (rtx insn
, rtx def_insn
)
3293 if (TEST_BIT (reaching_defs
[BLOCK_NUM (insn
)], INSN_CUID (def_insn
)))
3296 if (BLOCK_NUM (insn
) == BLOCK_NUM (def_insn
))
3298 if (INSN_CUID (def_insn
) < INSN_CUID (insn
))
3300 if (GET_CODE (PATTERN (def_insn
)) == PARALLEL
)
3302 else if (GET_CODE (PATTERN (def_insn
)) == CLOBBER
)
3303 reg
= XEXP (PATTERN (def_insn
), 0);
3304 else if (GET_CODE (PATTERN (def_insn
)) == SET
)
3305 reg
= SET_DEST (PATTERN (def_insn
));
3309 return ! reg_set_between_p (reg
, NEXT_INSN (def_insn
), insn
);
3318 /* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
3319 value returned is the number of definitions that reach INSN. Returning a
3320 value of zero means that [maybe] more than one definition reaches INSN and
3321 the caller can't perform whatever optimization it is trying. i.e. it is
3322 always safe to return zero. */
3325 can_disregard_other_sets (struct reg_set
**addr_this_reg
, rtx insn
, int for_combine
)
3327 int number_of_reaching_defs
= 0;
3328 struct reg_set
*this_reg
;
3330 for (this_reg
= *addr_this_reg
; this_reg
!= 0; this_reg
= this_reg
->next
)
3331 if (def_reaches_here_p (insn
, this_reg
->insn
))
3333 number_of_reaching_defs
++;
3334 /* Ignore parallels for now. */
3335 if (GET_CODE (PATTERN (this_reg
->insn
)) == PARALLEL
)
3339 && (GET_CODE (PATTERN (this_reg
->insn
)) == CLOBBER
3340 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg
->insn
)),
3341 SET_SRC (PATTERN (insn
)))))
3342 /* A setting of the reg to a different value reaches INSN. */
3345 if (number_of_reaching_defs
> 1)
3347 /* If in this setting the value the register is being set to is
3348 equal to the previous value the register was set to and this
3349 setting reaches the insn we are trying to do the substitution
3350 on then we are ok. */
3351 if (GET_CODE (PATTERN (this_reg
->insn
)) == CLOBBER
)
3353 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg
->insn
)),
3354 SET_SRC (PATTERN (insn
))))
3358 *addr_this_reg
= this_reg
;
3361 return number_of_reaching_defs
;
3364 /* Expression computed by insn is available and the substitution is legal,
3365 so try to perform the substitution.
3367 The result is nonzero if any changes were made. */
3370 handle_avail_expr (rtx insn
, struct expr
*expr
)
3372 rtx pat
, insn_computes_expr
, expr_set
;
3374 struct reg_set
*this_reg
;
3375 int found_setting
, use_src
;
3378 /* We only handle the case where one computation of the expression
3379 reaches this instruction. */
3380 insn_computes_expr
= computing_insn (expr
, insn
);
3381 if (insn_computes_expr
== NULL
)
3383 expr_set
= single_set (insn_computes_expr
);
3384 /* The set might be in a parallel with multiple sets; we could
3385 probably handle that, but there's currently no easy way to find
3386 the relevant sub-expression. */
3393 /* At this point we know only one computation of EXPR outside of this
3394 block reaches this insn. Now try to find a register that the
3395 expression is computed into. */
3396 if (GET_CODE (SET_SRC (expr_set
)) == REG
)
3398 /* This is the case when the available expression that reaches
3399 here has already been handled as an available expression. */
3400 unsigned int regnum_for_replacing
3401 = REGNO (SET_SRC (expr_set
));
3403 /* If the register was created by GCSE we can't use `reg_set_table',
3404 however we know it's set only once. */
3405 if (regnum_for_replacing
>= max_gcse_regno
3406 /* If the register the expression is computed into is set only once,
3407 or only one set reaches this insn, we can use it. */
3408 || (((this_reg
= reg_set_table
[regnum_for_replacing
]),
3409 this_reg
->next
== NULL
)
3410 || can_disregard_other_sets (&this_reg
, insn
, 0)))
3419 unsigned int regnum_for_replacing
3420 = REGNO (SET_DEST (expr_set
));
3422 /* This shouldn't happen. */
3423 if (regnum_for_replacing
>= max_gcse_regno
)
3426 this_reg
= reg_set_table
[regnum_for_replacing
];
3428 /* If the register the expression is computed into is set only once,
3429 or only one set reaches this insn, use it. */
3430 if (this_reg
->next
== NULL
3431 || can_disregard_other_sets (&this_reg
, insn
, 0))
3437 pat
= PATTERN (insn
);
3439 to
= SET_SRC (expr_set
);
3441 to
= SET_DEST (expr_set
);
3442 changed
= validate_change (insn
, &SET_SRC (pat
), to
, 0);
3444 /* We should be able to ignore the return code from validate_change but
3445 to play it safe we check. */
3449 if (gcse_file
!= NULL
)
3451 fprintf (gcse_file
, "GCSE: Replacing the source in insn %d with",
3453 fprintf (gcse_file
, " reg %d %s insn %d\n",
3454 REGNO (to
), use_src
? "from" : "set in",
3455 INSN_UID (insn_computes_expr
));
3460 /* The register that the expr is computed into is set more than once. */
3461 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3463 /* Insert an insn after insnx that copies the reg set in insnx
3464 into a new pseudo register call this new register REGN.
3465 From insnb until end of basic block or until REGB is set
3466 replace all uses of REGB with REGN. */
3469 to
= gen_reg_rtx (GET_MODE (SET_DEST (expr_set
)));
3471 /* Generate the new insn. */
3472 /* ??? If the change fails, we return 0, even though we created
3473 an insn. I think this is ok. */
3475 = emit_insn_after (gen_rtx_SET (VOIDmode
, to
,
3476 SET_DEST (expr_set
)),
3477 insn_computes_expr
);
3479 /* Keep register set table up to date. */
3480 record_one_set (REGNO (to
), new_insn
);
3482 gcse_create_count
++;
3483 if (gcse_file
!= NULL
)
3485 fprintf (gcse_file
, "GCSE: Creating insn %d to copy value of reg %d",
3486 INSN_UID (NEXT_INSN (insn_computes_expr
)),
3487 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr
)))));
3488 fprintf (gcse_file
, ", computed in insn %d,\n",
3489 INSN_UID (insn_computes_expr
));
3490 fprintf (gcse_file
, " into newly allocated reg %d\n",
3494 pat
= PATTERN (insn
);
3496 /* Do register replacement for INSN. */
3497 changed
= validate_change (insn
, &SET_SRC (pat
),
3499 (NEXT_INSN (insn_computes_expr
))),
3502 /* We should be able to ignore the return code from validate_change but
3503 to play it safe we check. */
3507 if (gcse_file
!= NULL
)
3510 "GCSE: Replacing the source in insn %d with reg %d ",
3512 REGNO (SET_DEST (PATTERN (NEXT_INSN
3513 (insn_computes_expr
)))));
3514 fprintf (gcse_file
, "set in insn %d\n",
3515 INSN_UID (insn_computes_expr
));
3523 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3524 the dataflow analysis has been done.
3526 The result is nonzero if a change was made. */
3535 /* Note we start at block 1. */
3537 if (ENTRY_BLOCK_PTR
->next_bb
== EXIT_BLOCK_PTR
)
3541 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
->next_bb
->next_bb
, EXIT_BLOCK_PTR
, next_bb
)
3543 /* Reset tables used to keep track of what's still valid [since the
3544 start of the block]. */
3545 reset_opr_set_tables ();
3547 for (insn
= BB_HEAD (bb
);
3548 insn
!= NULL
&& insn
!= NEXT_INSN (BB_END (bb
));
3549 insn
= NEXT_INSN (insn
))
3551 /* Is insn of form (set (pseudo-reg) ...)? */
3552 if (GET_CODE (insn
) == INSN
3553 && GET_CODE (PATTERN (insn
)) == SET
3554 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
3555 && REGNO (SET_DEST (PATTERN (insn
))) >= FIRST_PSEUDO_REGISTER
)
3557 rtx pat
= PATTERN (insn
);
3558 rtx src
= SET_SRC (pat
);
3561 if (want_to_gcse_p (src
)
3562 /* Is the expression recorded? */
3563 && ((expr
= lookup_expr (src
, &expr_hash_table
)) != NULL
)
3564 /* Is the expression available [at the start of the
3566 && TEST_BIT (ae_in
[bb
->index
], expr
->bitmap_index
)
3567 /* Are the operands unchanged since the start of the
3569 && oprs_not_set_p (src
, insn
))
3570 changed
|= handle_avail_expr (insn
, expr
);
3573 /* Keep track of everything modified by this insn. */
3574 /* ??? Need to be careful w.r.t. mods done to INSN. */
3576 mark_oprs_set (insn
);
3583 /* Top level routine to perform one classic GCSE pass.
3585 Return nonzero if a change was made. */
3588 one_classic_gcse_pass (int pass
)
3592 gcse_subst_count
= 0;
3593 gcse_create_count
= 0;
3595 alloc_hash_table (max_cuid
, &expr_hash_table
, 0);
3596 alloc_rd_mem (last_basic_block
, max_cuid
);
3597 compute_hash_table (&expr_hash_table
);
3599 dump_hash_table (gcse_file
, "Expression", &expr_hash_table
);
3601 if (expr_hash_table
.n_elems
> 0)
3605 alloc_avail_expr_mem (last_basic_block
, expr_hash_table
.n_elems
);
3606 compute_ae_gen (&expr_hash_table
);
3607 compute_ae_kill (ae_gen
, ae_kill
, &expr_hash_table
);
3608 compute_available (ae_gen
, ae_kill
, ae_out
, ae_in
);
3609 changed
= classic_gcse ();
3610 free_avail_expr_mem ();
3614 free_hash_table (&expr_hash_table
);
3618 fprintf (gcse_file
, "\n");
3619 fprintf (gcse_file
, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3620 current_function_name (), pass
, bytes_used
, gcse_subst_count
);
3621 fprintf (gcse_file
, "%d insns created\n", gcse_create_count
);
3627 /* Compute copy/constant propagation working variables. */
3629 /* Local properties of assignments. */
3630 static sbitmap
*cprop_pavloc
;
3631 static sbitmap
*cprop_absaltered
;
3633 /* Global properties of assignments (computed from the local properties). */
3634 static sbitmap
*cprop_avin
;
3635 static sbitmap
*cprop_avout
;
3637 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3638 basic blocks. N_SETS is the number of sets. */
3641 alloc_cprop_mem (int n_blocks
, int n_sets
)
3643 cprop_pavloc
= sbitmap_vector_alloc (n_blocks
, n_sets
);
3644 cprop_absaltered
= sbitmap_vector_alloc (n_blocks
, n_sets
);
3646 cprop_avin
= sbitmap_vector_alloc (n_blocks
, n_sets
);
3647 cprop_avout
= sbitmap_vector_alloc (n_blocks
, n_sets
);
3650 /* Free vars used by copy/const propagation. */
3653 free_cprop_mem (void)
3655 sbitmap_vector_free (cprop_pavloc
);
3656 sbitmap_vector_free (cprop_absaltered
);
3657 sbitmap_vector_free (cprop_avin
);
3658 sbitmap_vector_free (cprop_avout
);
3661 /* For each block, compute whether X is transparent. X is either an
3662 expression or an assignment [though we don't care which, for this context
3663 an assignment is treated as an expression]. For each block where an
3664 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3668 compute_transp (rtx x
, int indx
, sbitmap
*bmap
, int set_p
)
3676 /* repeat is used to turn tail-recursion into iteration since GCC
3677 can't do it when there's no return value. */
3683 code
= GET_CODE (x
);
3689 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
)
3692 if (TEST_BIT (reg_set_in_block
[bb
->index
], REGNO (x
)))
3693 SET_BIT (bmap
[bb
->index
], indx
);
3697 for (r
= reg_set_table
[REGNO (x
)]; r
!= NULL
; r
= r
->next
)
3698 SET_BIT (bmap
[BLOCK_NUM (r
->insn
)], indx
);
3703 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
)
3706 if (TEST_BIT (reg_set_in_block
[bb
->index
], REGNO (x
)))
3707 RESET_BIT (bmap
[bb
->index
], indx
);
3711 for (r
= reg_set_table
[REGNO (x
)]; r
!= NULL
; r
= r
->next
)
3712 RESET_BIT (bmap
[BLOCK_NUM (r
->insn
)], indx
);
3721 rtx list_entry
= canon_modify_mem_list
[bb
->index
];
3725 rtx dest
, dest_addr
;
3727 if (GET_CODE (XEXP (list_entry
, 0)) == CALL_INSN
)
3730 SET_BIT (bmap
[bb
->index
], indx
);
3732 RESET_BIT (bmap
[bb
->index
], indx
);
3735 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3736 Examine each hunk of memory that is modified. */
3738 dest
= XEXP (list_entry
, 0);
3739 list_entry
= XEXP (list_entry
, 1);
3740 dest_addr
= XEXP (list_entry
, 0);
3742 if (canon_true_dependence (dest
, GET_MODE (dest
), dest_addr
,
3743 x
, rtx_addr_varies_p
))
3746 SET_BIT (bmap
[bb
->index
], indx
);
3748 RESET_BIT (bmap
[bb
->index
], indx
);
3751 list_entry
= XEXP (list_entry
, 1);
3774 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
3778 /* If we are about to do the last recursive call
3779 needed at this level, change it into iteration.
3780 This function is called enough to be worth it. */
3787 compute_transp (XEXP (x
, i
), indx
, bmap
, set_p
);
3789 else if (fmt
[i
] == 'E')
3790 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3791 compute_transp (XVECEXP (x
, i
, j
), indx
, bmap
, set_p
);
3795 /* Top level routine to do the dataflow analysis needed by copy/const
3799 compute_cprop_data (void)
3801 compute_local_properties (cprop_absaltered
, cprop_pavloc
, NULL
, &set_hash_table
);
3802 compute_available (cprop_pavloc
, cprop_absaltered
,
3803 cprop_avout
, cprop_avin
);
3806 /* Copy/constant propagation. */
3808 /* Maximum number of register uses in an insn that we handle. */
3811 /* Table of uses found in an insn.
3812 Allocated statically to avoid alloc/free complexity and overhead. */
3813 static struct reg_use reg_use_table
[MAX_USES
];
3815 /* Index into `reg_use_table' while building it. */
3816 static int reg_use_count
;
3818 /* Set up a list of register numbers used in INSN. The found uses are stored
3819 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3820 and contains the number of uses in the table upon exit.
3822 ??? If a register appears multiple times we will record it multiple times.
3823 This doesn't hurt anything but it will slow things down. */
3826 find_used_regs (rtx
*xptr
, void *data ATTRIBUTE_UNUSED
)
3833 /* repeat is used to turn tail-recursion into iteration since GCC
3834 can't do it when there's no return value. */
3839 code
= GET_CODE (x
);
3842 if (reg_use_count
== MAX_USES
)
3845 reg_use_table
[reg_use_count
].reg_rtx
= x
;
3849 /* Recursively scan the operands of this expression. */
3851 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
3855 /* If we are about to do the last recursive call
3856 needed at this level, change it into iteration.
3857 This function is called enough to be worth it. */
3864 find_used_regs (&XEXP (x
, i
), data
);
3866 else if (fmt
[i
] == 'E')
3867 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3868 find_used_regs (&XVECEXP (x
, i
, j
), data
);
3872 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3873 Returns nonzero is successful. */
3876 try_replace_reg (rtx from
, rtx to
, rtx insn
)
3878 rtx note
= find_reg_equal_equiv_note (insn
);
3881 rtx set
= single_set (insn
);
3883 validate_replace_src_group (from
, to
, insn
);
3884 if (num_changes_pending () && apply_change_group ())
3887 /* Try to simplify SET_SRC if we have substituted a constant. */
3888 if (success
&& set
&& CONSTANT_P (to
))
3890 src
= simplify_rtx (SET_SRC (set
));
3893 validate_change (insn
, &SET_SRC (set
), src
, 0);
3896 /* If there is already a NOTE, update the expression in it with our
3899 XEXP (note
, 0) = simplify_replace_rtx (XEXP (note
, 0), from
, to
);
3901 if (!success
&& set
&& reg_mentioned_p (from
, SET_SRC (set
)))
3903 /* If above failed and this is a single set, try to simplify the source of
3904 the set given our substitution. We could perhaps try this for multiple
3905 SETs, but it probably won't buy us anything. */
3906 src
= simplify_replace_rtx (SET_SRC (set
), from
, to
);
3908 if (!rtx_equal_p (src
, SET_SRC (set
))
3909 && validate_change (insn
, &SET_SRC (set
), src
, 0))
3912 /* If we've failed to do replacement, have a single SET, don't already
3913 have a note, and have no special SET, add a REG_EQUAL note to not
3914 lose information. */
3915 if (!success
&& note
== 0 && set
!= 0
3916 && GET_CODE (XEXP (set
, 0)) != ZERO_EXTRACT
3917 && GET_CODE (XEXP (set
, 0)) != SIGN_EXTRACT
)
3918 note
= set_unique_reg_note (insn
, REG_EQUAL
, copy_rtx (src
));
3921 /* REG_EQUAL may get simplified into register.
3922 We don't allow that. Remove that note. This code ought
3923 not to happen, because previous code ought to synthesize
3924 reg-reg move, but be on the safe side. */
3925 if (note
&& REG_P (XEXP (note
, 0)))
3926 remove_note (insn
, note
);
3931 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
3932 NULL no such set is found. */
3934 static struct expr
*
3935 find_avail_set (int regno
, rtx insn
)
3937 /* SET1 contains the last set found that can be returned to the caller for
3938 use in a substitution. */
3939 struct expr
*set1
= 0;
3941 /* Loops are not possible here. To get a loop we would need two sets
3942 available at the start of the block containing INSN. ie we would
3943 need two sets like this available at the start of the block:
3945 (set (reg X) (reg Y))
3946 (set (reg Y) (reg X))
3948 This can not happen since the set of (reg Y) would have killed the
3949 set of (reg X) making it unavailable at the start of this block. */
3953 struct expr
*set
= lookup_set (regno
, &set_hash_table
);
3955 /* Find a set that is available at the start of the block
3956 which contains INSN. */
3959 if (TEST_BIT (cprop_avin
[BLOCK_NUM (insn
)], set
->bitmap_index
))
3961 set
= next_set (regno
, set
);
3964 /* If no available set was found we've reached the end of the
3965 (possibly empty) copy chain. */
3969 if (GET_CODE (set
->expr
) != SET
)
3972 src
= SET_SRC (set
->expr
);
3974 /* We know the set is available.
3975 Now check that SRC is ANTLOC (i.e. none of the source operands
3976 have changed since the start of the block).
3978 If the source operand changed, we may still use it for the next
3979 iteration of this loop, but we may not use it for substitutions. */
3981 if (gcse_constant_p (src
) || oprs_not_set_p (src
, insn
))
3984 /* If the source of the set is anything except a register, then
3985 we have reached the end of the copy chain. */
3986 if (GET_CODE (src
) != REG
)
3989 /* Follow the copy chain, ie start another iteration of the loop
3990 and see if we have an available copy into SRC. */
3991 regno
= REGNO (src
);
3994 /* SET1 holds the last set that was available and anticipatable at
3999 /* Subroutine of cprop_insn that tries to propagate constants into
4000 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
4001 it is the instruction that immediately precedes JUMP, and must be a
4002 single SET of a register. FROM is what we will try to replace,
4003 SRC is the constant we will try to substitute for it. Returns nonzero
4004 if a change was made. */
4007 cprop_jump (basic_block bb
, rtx setcc
, rtx jump
, rtx from
, rtx src
)
4009 rtx
new, set_src
, note_src
;
4010 rtx set
= pc_set (jump
);
4011 rtx note
= find_reg_equal_equiv_note (jump
);
4015 note_src
= XEXP (note
, 0);
4016 if (GET_CODE (note_src
) == EXPR_LIST
)
4017 note_src
= NULL_RTX
;
4019 else note_src
= NULL_RTX
;
4021 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
4022 set_src
= note_src
? note_src
: SET_SRC (set
);
4024 /* First substitute the SETCC condition into the JUMP instruction,
4025 then substitute that given values into this expanded JUMP. */
4026 if (setcc
!= NULL_RTX
4027 && !modified_between_p (from
, setcc
, jump
)
4028 && !modified_between_p (src
, setcc
, jump
))
4031 rtx setcc_set
= single_set (setcc
);
4032 rtx setcc_note
= find_reg_equal_equiv_note (setcc
);
4033 setcc_src
= (setcc_note
&& GET_CODE (XEXP (setcc_note
, 0)) != EXPR_LIST
)
4034 ? XEXP (setcc_note
, 0) : SET_SRC (setcc_set
);
4035 set_src
= simplify_replace_rtx (set_src
, SET_DEST (setcc_set
),
4041 new = simplify_replace_rtx (set_src
, from
, src
);
4043 /* If no simplification can be made, then try the next register. */
4044 if (rtx_equal_p (new, SET_SRC (set
)))
4047 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
4052 /* Ensure the value computed inside the jump insn to be equivalent
4053 to one computed by setcc. */
4054 if (setcc
&& modified_in_p (new, setcc
))
4056 if (! validate_change (jump
, &SET_SRC (set
), new, 0))
4058 /* When (some) constants are not valid in a comparison, and there
4059 are two registers to be replaced by constants before the entire
4060 comparison can be folded into a constant, we need to keep
4061 intermediate information in REG_EQUAL notes. For targets with
4062 separate compare insns, such notes are added by try_replace_reg.
4063 When we have a combined compare-and-branch instruction, however,
4064 we need to attach a note to the branch itself to make this
4065 optimization work. */
4067 if (!rtx_equal_p (new, note_src
))
4068 set_unique_reg_note (jump
, REG_EQUAL
, copy_rtx (new));
4072 /* Remove REG_EQUAL note after simplification. */
4074 remove_note (jump
, note
);
4076 /* If this has turned into an unconditional jump,
4077 then put a barrier after it so that the unreachable
4078 code will be deleted. */
4079 if (GET_CODE (SET_SRC (set
)) == LABEL_REF
)
4080 emit_barrier_after (jump
);
4084 /* Delete the cc0 setter. */
4085 if (setcc
!= NULL
&& CC0_P (SET_DEST (single_set (setcc
))))
4086 delete_insn (setcc
);
4089 run_jump_opt_after_gcse
= 1;
4092 if (gcse_file
!= NULL
)
4095 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
4096 REGNO (from
), INSN_UID (jump
));
4097 print_rtl (gcse_file
, src
);
4098 fprintf (gcse_file
, "\n");
4100 purge_dead_edges (bb
);
4106 constprop_register (rtx insn
, rtx from
, rtx to
, int alter_jumps
)
4110 /* Check for reg or cc0 setting instructions followed by
4111 conditional branch instructions first. */
4113 && (sset
= single_set (insn
)) != NULL
4115 && any_condjump_p (NEXT_INSN (insn
)) && onlyjump_p (NEXT_INSN (insn
)))
4117 rtx dest
= SET_DEST (sset
);
4118 if ((REG_P (dest
) || CC0_P (dest
))
4119 && cprop_jump (BLOCK_FOR_INSN (insn
), insn
, NEXT_INSN (insn
), from
, to
))
4123 /* Handle normal insns next. */
4124 if (GET_CODE (insn
) == INSN
4125 && try_replace_reg (from
, to
, insn
))
4128 /* Try to propagate a CONST_INT into a conditional jump.
4129 We're pretty specific about what we will handle in this
4130 code, we can extend this as necessary over time.
4132 Right now the insn in question must look like
4133 (set (pc) (if_then_else ...)) */
4134 else if (alter_jumps
&& any_condjump_p (insn
) && onlyjump_p (insn
))
4135 return cprop_jump (BLOCK_FOR_INSN (insn
), NULL
, insn
, from
, to
);
4139 /* Perform constant and copy propagation on INSN.
4140 The result is nonzero if a change was made. */
4143 cprop_insn (rtx insn
, int alter_jumps
)
4145 struct reg_use
*reg_used
;
4153 note_uses (&PATTERN (insn
), find_used_regs
, NULL
);
4155 note
= find_reg_equal_equiv_note (insn
);
4157 /* We may win even when propagating constants into notes. */
4159 find_used_regs (&XEXP (note
, 0), NULL
);
4161 for (reg_used
= ®_use_table
[0]; reg_use_count
> 0;
4162 reg_used
++, reg_use_count
--)
4164 unsigned int regno
= REGNO (reg_used
->reg_rtx
);
4168 /* Ignore registers created by GCSE.
4169 We do this because ... */
4170 if (regno
>= max_gcse_regno
)
4173 /* If the register has already been set in this block, there's
4174 nothing we can do. */
4175 if (! oprs_not_set_p (reg_used
->reg_rtx
, insn
))
4178 /* Find an assignment that sets reg_used and is available
4179 at the start of the block. */
4180 set
= find_avail_set (regno
, insn
);
4185 /* ??? We might be able to handle PARALLELs. Later. */
4186 if (GET_CODE (pat
) != SET
)
4189 src
= SET_SRC (pat
);
4191 /* Constant propagation. */
4192 if (gcse_constant_p (src
))
4194 if (constprop_register (insn
, reg_used
->reg_rtx
, src
, alter_jumps
))
4198 if (gcse_file
!= NULL
)
4200 fprintf (gcse_file
, "GLOBAL CONST-PROP: Replacing reg %d in ", regno
);
4201 fprintf (gcse_file
, "insn %d with constant ", INSN_UID (insn
));
4202 print_rtl (gcse_file
, src
);
4203 fprintf (gcse_file
, "\n");
4205 if (INSN_DELETED_P (insn
))
4209 else if (GET_CODE (src
) == REG
4210 && REGNO (src
) >= FIRST_PSEUDO_REGISTER
4211 && REGNO (src
) != regno
)
4213 if (try_replace_reg (reg_used
->reg_rtx
, src
, insn
))
4217 if (gcse_file
!= NULL
)
4219 fprintf (gcse_file
, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
4220 regno
, INSN_UID (insn
));
4221 fprintf (gcse_file
, " with reg %d\n", REGNO (src
));
4224 /* The original insn setting reg_used may or may not now be
4225 deletable. We leave the deletion to flow. */
4226 /* FIXME: If it turns out that the insn isn't deletable,
4227 then we may have unnecessarily extended register lifetimes
4228 and made things worse. */
4236 /* Like find_used_regs, but avoid recording uses that appear in
4237 input-output contexts such as zero_extract or pre_dec. This
4238 restricts the cases we consider to those for which local cprop
4239 can legitimately make replacements. */
4242 local_cprop_find_used_regs (rtx
*xptr
, void *data
)
4249 switch (GET_CODE (x
))
4253 case STRICT_LOW_PART
:
4262 /* Can only legitimately appear this early in the context of
4263 stack pushes for function arguments, but handle all of the
4264 codes nonetheless. */
4268 /* Setting a subreg of a register larger than word_mode leaves
4269 the non-written words unchanged. */
4270 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))) > BITS_PER_WORD
)
4278 find_used_regs (xptr
, data
);
4281 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4282 their REG_EQUAL notes need updating. */
4285 do_local_cprop (rtx x
, rtx insn
, int alter_jumps
, rtx
*libcall_sp
)
4287 rtx newreg
= NULL
, newcnst
= NULL
;
4289 /* Rule out USE instructions and ASM statements as we don't want to
4290 change the hard registers mentioned. */
4291 if (GET_CODE (x
) == REG
4292 && (REGNO (x
) >= FIRST_PSEUDO_REGISTER
4293 || (GET_CODE (PATTERN (insn
)) != USE
4294 && asm_noperands (PATTERN (insn
)) < 0)))
4296 cselib_val
*val
= cselib_lookup (x
, GET_MODE (x
), 0);
4297 struct elt_loc_list
*l
;
4301 for (l
= val
->locs
; l
; l
= l
->next
)
4303 rtx this_rtx
= l
->loc
;
4309 if (gcse_constant_p (this_rtx
))
4311 if (REG_P (this_rtx
) && REGNO (this_rtx
) >= FIRST_PSEUDO_REGISTER
4312 /* Don't copy propagate if it has attached REG_EQUIV note.
4313 At this point this only function parameters should have
4314 REG_EQUIV notes and if the argument slot is used somewhere
4315 explicitly, it means address of parameter has been taken,
4316 so we should not extend the lifetime of the pseudo. */
4317 && (!(note
= find_reg_note (l
->setting_insn
, REG_EQUIV
, NULL_RTX
))
4318 || GET_CODE (XEXP (note
, 0)) != MEM
))
4321 if (newcnst
&& constprop_register (insn
, x
, newcnst
, alter_jumps
))
4323 /* If we find a case where we can't fix the retval REG_EQUAL notes
4324 match the new register, we either have to abandon this replacement
4325 or fix delete_trivially_dead_insns to preserve the setting insn,
4326 or make it delete the REG_EUAQL note, and fix up all passes that
4327 require the REG_EQUAL note there. */
4328 if (!adjust_libcall_notes (x
, newcnst
, insn
, libcall_sp
))
4330 if (gcse_file
!= NULL
)
4332 fprintf (gcse_file
, "LOCAL CONST-PROP: Replacing reg %d in ",
4334 fprintf (gcse_file
, "insn %d with constant ",
4336 print_rtl (gcse_file
, newcnst
);
4337 fprintf (gcse_file
, "\n");
4342 else if (newreg
&& newreg
!= x
&& try_replace_reg (x
, newreg
, insn
))
4344 adjust_libcall_notes (x
, newreg
, insn
, libcall_sp
);
4345 if (gcse_file
!= NULL
)
4348 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4349 REGNO (x
), INSN_UID (insn
));
4350 fprintf (gcse_file
, " with reg %d\n", REGNO (newreg
));
4359 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4360 their REG_EQUAL notes need updating to reflect that OLDREG has been
4361 replaced with NEWVAL in INSN. Return true if all substitutions could
4364 adjust_libcall_notes (rtx oldreg
, rtx newval
, rtx insn
, rtx
*libcall_sp
)
4368 while ((end
= *libcall_sp
++))
4370 rtx note
= find_reg_equal_equiv_note (end
);
4377 if (reg_set_between_p (newval
, PREV_INSN (insn
), end
))
4381 note
= find_reg_equal_equiv_note (end
);
4384 if (reg_mentioned_p (newval
, XEXP (note
, 0)))
4387 while ((end
= *libcall_sp
++));
4391 XEXP (note
, 0) = replace_rtx (XEXP (note
, 0), oldreg
, newval
);
4397 #define MAX_NESTED_LIBCALLS 9
4400 local_cprop_pass (int alter_jumps
)
4403 struct reg_use
*reg_used
;
4404 rtx libcall_stack
[MAX_NESTED_LIBCALLS
+ 1], *libcall_sp
;
4405 bool changed
= false;
4408 libcall_sp
= &libcall_stack
[MAX_NESTED_LIBCALLS
];
4410 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4414 rtx note
= find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
);
4418 if (libcall_sp
== libcall_stack
)
4420 *--libcall_sp
= XEXP (note
, 0);
4422 note
= find_reg_note (insn
, REG_RETVAL
, NULL_RTX
);
4425 note
= find_reg_equal_equiv_note (insn
);
4429 note_uses (&PATTERN (insn
), local_cprop_find_used_regs
, NULL
);
4431 local_cprop_find_used_regs (&XEXP (note
, 0), NULL
);
4433 for (reg_used
= ®_use_table
[0]; reg_use_count
> 0;
4434 reg_used
++, reg_use_count
--)
4435 if (do_local_cprop (reg_used
->reg_rtx
, insn
, alter_jumps
,
4441 if (INSN_DELETED_P (insn
))
4444 while (reg_use_count
);
4446 cselib_process_insn (insn
);
4449 /* Global analysis may get into infinite loops for unreachable blocks. */
4450 if (changed
&& alter_jumps
)
4452 delete_unreachable_blocks ();
4453 free_reg_set_mem ();
4454 alloc_reg_set_mem (max_reg_num ());
4455 compute_sets (get_insns ());
4459 /* Forward propagate copies. This includes copies and constants. Return
4460 nonzero if a change was made. */
4463 cprop (int alter_jumps
)
4469 /* Note we start at block 1. */
4470 if (ENTRY_BLOCK_PTR
->next_bb
== EXIT_BLOCK_PTR
)
4472 if (gcse_file
!= NULL
)
4473 fprintf (gcse_file
, "\n");
4478 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
->next_bb
->next_bb
, EXIT_BLOCK_PTR
, next_bb
)
4480 /* Reset tables used to keep track of what's still valid [since the
4481 start of the block]. */
4482 reset_opr_set_tables ();
4484 for (insn
= BB_HEAD (bb
);
4485 insn
!= NULL
&& insn
!= NEXT_INSN (BB_END (bb
));
4486 insn
= NEXT_INSN (insn
))
4489 changed
|= cprop_insn (insn
, alter_jumps
);
4491 /* Keep track of everything modified by this insn. */
4492 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4493 call mark_oprs_set if we turned the insn into a NOTE. */
4494 if (GET_CODE (insn
) != NOTE
)
4495 mark_oprs_set (insn
);
4499 if (gcse_file
!= NULL
)
4500 fprintf (gcse_file
, "\n");
4505 /* Similar to get_condition, only the resulting condition must be
4506 valid at JUMP, instead of at EARLIEST.
4508 This differs from noce_get_condition in ifcvt.c in that we prefer not to
4509 settle for the condition variable in the jump instruction being integral.
4510 We prefer to be able to record the value of a user variable, rather than
4511 the value of a temporary used in a condition. This could be solved by
4512 recording the value of *every* register scaned by canonicalize_condition,
4513 but this would require some code reorganization. */
4516 fis_get_condition (rtx jump
)
4518 rtx cond
, set
, tmp
, insn
, earliest
;
4521 if (! any_condjump_p (jump
))
4524 set
= pc_set (jump
);
4525 cond
= XEXP (SET_SRC (set
), 0);
4527 /* If this branches to JUMP_LABEL when the condition is false,
4528 reverse the condition. */
4529 reverse
= (GET_CODE (XEXP (SET_SRC (set
), 2)) == LABEL_REF
4530 && XEXP (XEXP (SET_SRC (set
), 2), 0) == JUMP_LABEL (jump
));
4532 /* Use canonicalize_condition to do the dirty work of manipulating
4533 MODE_CC values and COMPARE rtx codes. */
4534 tmp
= canonicalize_condition (jump
, cond
, reverse
, &earliest
, NULL_RTX
,
4539 /* Verify that the given condition is valid at JUMP by virtue of not
4540 having been modified since EARLIEST. */
4541 for (insn
= earliest
; insn
!= jump
; insn
= NEXT_INSN (insn
))
4542 if (INSN_P (insn
) && modified_in_p (tmp
, insn
))
4547 /* The condition was modified. See if we can get a partial result
4548 that doesn't follow all the reversals. Perhaps combine can fold
4549 them together later. */
4550 tmp
= XEXP (tmp
, 0);
4551 if (!REG_P (tmp
) || GET_MODE_CLASS (GET_MODE (tmp
)) != MODE_INT
)
4553 tmp
= canonicalize_condition (jump
, cond
, reverse
, &earliest
, tmp
,
4558 /* For sanity's sake, re-validate the new result. */
4559 for (insn
= earliest
; insn
!= jump
; insn
= NEXT_INSN (insn
))
4560 if (INSN_P (insn
) && modified_in_p (tmp
, insn
))
4566 /* Check the comparison COND to see if we can safely form an implicit set from
4567 it. COND is either an EQ or NE comparison. */
4570 implicit_set_cond_p (rtx cond
)
4572 enum machine_mode mode
= GET_MODE (XEXP (cond
, 0));
4573 rtx cst
= XEXP (cond
, 1);
4575 /* We can't perform this optimization if either operand might be or might
4576 contain a signed zero. */
4577 if (HONOR_SIGNED_ZEROS (mode
))
4579 /* It is sufficient to check if CST is or contains a zero. We must
4580 handle float, complex, and vector. If any subpart is a zero, then
4581 the optimization can't be performed. */
4582 /* ??? The complex and vector checks are not implemented yet. We just
4583 always return zero for them. */
4584 if (GET_CODE (cst
) == CONST_DOUBLE
)
4587 REAL_VALUE_FROM_CONST_DOUBLE (d
, cst
);
4588 if (REAL_VALUES_EQUAL (d
, dconst0
))
4595 return gcse_constant_p (cst
);
4598 /* Find the implicit sets of a function. An "implicit set" is a constraint
4599 on the value of a variable, implied by a conditional jump. For example,
4600 following "if (x == 2)", the then branch may be optimized as though the
4601 conditional performed an "explicit set", in this example, "x = 2". This
4602 function records the set patterns that are implicit at the start of each
4606 find_implicit_sets (void)
4608 basic_block bb
, dest
;
4614 /* Check for more than one successor. */
4615 if (bb
->succ
&& bb
->succ
->succ_next
)
4617 cond
= fis_get_condition (BB_END (bb
));
4620 && (GET_CODE (cond
) == EQ
|| GET_CODE (cond
) == NE
)
4621 && GET_CODE (XEXP (cond
, 0)) == REG
4622 && REGNO (XEXP (cond
, 0)) >= FIRST_PSEUDO_REGISTER
4623 && implicit_set_cond_p (cond
))
4625 dest
= GET_CODE (cond
) == EQ
? BRANCH_EDGE (bb
)->dest
4626 : FALLTHRU_EDGE (bb
)->dest
;
4628 if (dest
&& ! dest
->pred
->pred_next
4629 && dest
!= EXIT_BLOCK_PTR
)
4631 new = gen_rtx_SET (VOIDmode
, XEXP (cond
, 0),
4633 implicit_sets
[dest
->index
] = new;
4636 fprintf(gcse_file
, "Implicit set of reg %d in ",
4637 REGNO (XEXP (cond
, 0)));
4638 fprintf(gcse_file
, "basic block %d\n", dest
->index
);
4646 fprintf (gcse_file
, "Found %d implicit sets\n", count
);
4649 /* Perform one copy/constant propagation pass.
4650 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4651 propagation into conditional jumps. If BYPASS_JUMPS is true,
4652 perform conditional jump bypassing optimizations. */
4655 one_cprop_pass (int pass
, int cprop_jumps
, int bypass_jumps
)
4659 const_prop_count
= 0;
4660 copy_prop_count
= 0;
4662 local_cprop_pass (cprop_jumps
);
4664 /* Determine implicit sets. */
4665 implicit_sets
= xcalloc (last_basic_block
, sizeof (rtx
));
4666 find_implicit_sets ();
4668 alloc_hash_table (max_cuid
, &set_hash_table
, 1);
4669 compute_hash_table (&set_hash_table
);
4671 /* Free implicit_sets before peak usage. */
4672 free (implicit_sets
);
4673 implicit_sets
= NULL
;
4676 dump_hash_table (gcse_file
, "SET", &set_hash_table
);
4677 if (set_hash_table
.n_elems
> 0)
4679 alloc_cprop_mem (last_basic_block
, set_hash_table
.n_elems
);
4680 compute_cprop_data ();
4681 changed
= cprop (cprop_jumps
);
4683 changed
|= bypass_conditional_jumps ();
4687 free_hash_table (&set_hash_table
);
4691 fprintf (gcse_file
, "CPROP of %s, pass %d: %d bytes needed, ",
4692 current_function_name (), pass
, bytes_used
);
4693 fprintf (gcse_file
, "%d const props, %d copy props\n\n",
4694 const_prop_count
, copy_prop_count
);
4696 /* Global analysis may get into infinite loops for unreachable blocks. */
4697 if (changed
&& cprop_jumps
)
4698 delete_unreachable_blocks ();
4703 /* Bypass conditional jumps. */
4705 /* The value of last_basic_block at the beginning of the jump_bypass
4706 pass. The use of redirect_edge_and_branch_force may introduce new
4707 basic blocks, but the data flow analysis is only valid for basic
4708 block indices less than bypass_last_basic_block. */
4710 static int bypass_last_basic_block
;
4712 /* Find a set of REGNO to a constant that is available at the end of basic
4713 block BB. Returns NULL if no such set is found. Based heavily upon
4716 static struct expr
*
4717 find_bypass_set (int regno
, int bb
)
4719 struct expr
*result
= 0;
4724 struct expr
*set
= lookup_set (regno
, &set_hash_table
);
4728 if (TEST_BIT (cprop_avout
[bb
], set
->bitmap_index
))
4730 set
= next_set (regno
, set
);
4736 if (GET_CODE (set
->expr
) != SET
)
4739 src
= SET_SRC (set
->expr
);
4740 if (gcse_constant_p (src
))
4743 if (GET_CODE (src
) != REG
)
4746 regno
= REGNO (src
);
4752 /* Subroutine of bypass_block that checks whether a pseudo is killed by
4753 any of the instructions inserted on an edge. Jump bypassing places
4754 condition code setters on CFG edges using insert_insn_on_edge. This
4755 function is required to check that our data flow analysis is still
4756 valid prior to commit_edge_insertions. */
4759 reg_killed_on_edge (rtx reg
, edge e
)
4763 for (insn
= e
->insns
; insn
; insn
= NEXT_INSN (insn
))
4764 if (INSN_P (insn
) && reg_set_p (reg
, insn
))
4770 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4771 basic block BB which has more than one predecessor. If not NULL, SETCC
4772 is the first instruction of BB, which is immediately followed by JUMP_INSN
4773 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
4774 Returns nonzero if a change was made.
4776 During the jump bypassing pass, we may place copies of SETCC instructions
4777 on CFG edges. The following routine must be careful to pay attention to
4778 these inserted insns when performing its transformations. */
4781 bypass_block (basic_block bb
, rtx setcc
, rtx jump
)
4784 edge e
, enext
, edest
;
4786 int may_be_loop_header
;
4788 insn
= (setcc
!= NULL
) ? setcc
: jump
;
4790 /* Determine set of register uses in INSN. */
4792 note_uses (&PATTERN (insn
), find_used_regs
, NULL
);
4793 note
= find_reg_equal_equiv_note (insn
);
4795 find_used_regs (&XEXP (note
, 0), NULL
);
4797 may_be_loop_header
= false;
4798 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
4799 if (e
->flags
& EDGE_DFS_BACK
)
4801 may_be_loop_header
= true;
4806 for (e
= bb
->pred
; e
; e
= enext
)
4808 enext
= e
->pred_next
;
4809 if (e
->flags
& EDGE_COMPLEX
)
4812 /* We can't redirect edges from new basic blocks. */
4813 if (e
->src
->index
>= bypass_last_basic_block
)
4816 /* The irreducible loops created by redirecting of edges entering the
4817 loop from outside would decrease effectiveness of some of the following
4818 optimizations, so prevent this. */
4819 if (may_be_loop_header
4820 && !(e
->flags
& EDGE_DFS_BACK
))
4823 for (i
= 0; i
< reg_use_count
; i
++)
4825 struct reg_use
*reg_used
= ®_use_table
[i
];
4826 unsigned int regno
= REGNO (reg_used
->reg_rtx
);
4827 basic_block dest
, old_dest
;
4831 if (regno
>= max_gcse_regno
)
4834 set
= find_bypass_set (regno
, e
->src
->index
);
4839 /* Check the data flow is valid after edge insertions. */
4840 if (e
->insns
&& reg_killed_on_edge (reg_used
->reg_rtx
, e
))
4843 src
= SET_SRC (pc_set (jump
));
4846 src
= simplify_replace_rtx (src
,
4847 SET_DEST (PATTERN (setcc
)),
4848 SET_SRC (PATTERN (setcc
)));
4850 new = simplify_replace_rtx (src
, reg_used
->reg_rtx
,
4851 SET_SRC (set
->expr
));
4853 /* Jump bypassing may have already placed instructions on
4854 edges of the CFG. We can't bypass an outgoing edge that
4855 has instructions associated with it, as these insns won't
4856 get executed if the incoming edge is redirected. */
4860 edest
= FALLTHRU_EDGE (bb
);
4861 dest
= edest
->insns
? NULL
: edest
->dest
;
4863 else if (GET_CODE (new) == LABEL_REF
)
4865 dest
= BLOCK_FOR_INSN (XEXP (new, 0));
4866 /* Don't bypass edges containing instructions. */
4867 for (edest
= bb
->succ
; edest
; edest
= edest
->succ_next
)
4868 if (edest
->dest
== dest
&& edest
->insns
)
4880 && dest
!= EXIT_BLOCK_PTR
)
4882 redirect_edge_and_branch_force (e
, dest
);
4884 /* Copy the register setter to the redirected edge.
4885 Don't copy CC0 setters, as CC0 is dead after jump. */
4888 rtx pat
= PATTERN (setcc
);
4889 if (!CC0_P (SET_DEST (pat
)))
4890 insert_insn_on_edge (copy_insn (pat
), e
);
4893 if (gcse_file
!= NULL
)
4895 fprintf (gcse_file
, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4896 regno
, INSN_UID (jump
));
4897 print_rtl (gcse_file
, SET_SRC (set
->expr
));
4898 fprintf (gcse_file
, "\nBypass edge from %d->%d to %d\n",
4899 e
->src
->index
, old_dest
->index
, dest
->index
);
4909 /* Find basic blocks with more than one predecessor that only contain a
4910 single conditional jump. If the result of the comparison is known at
4911 compile-time from any incoming edge, redirect that edge to the
4912 appropriate target. Returns nonzero if a change was made.
4914 This function is now mis-named, because we also handle indirect jumps. */
4917 bypass_conditional_jumps (void)
4925 /* Note we start at block 1. */
4926 if (ENTRY_BLOCK_PTR
->next_bb
== EXIT_BLOCK_PTR
)
4929 bypass_last_basic_block
= last_basic_block
;
4930 mark_dfs_back_edges ();
4933 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
->next_bb
->next_bb
,
4934 EXIT_BLOCK_PTR
, next_bb
)
4936 /* Check for more than one predecessor. */
4937 if (bb
->pred
&& bb
->pred
->pred_next
)
4940 for (insn
= BB_HEAD (bb
);
4941 insn
!= NULL
&& insn
!= NEXT_INSN (BB_END (bb
));
4942 insn
= NEXT_INSN (insn
))
4943 if (GET_CODE (insn
) == INSN
)
4947 if (GET_CODE (PATTERN (insn
)) != SET
)
4950 dest
= SET_DEST (PATTERN (insn
));
4951 if (REG_P (dest
) || CC0_P (dest
))
4956 else if (GET_CODE (insn
) == JUMP_INSN
)
4958 if ((any_condjump_p (insn
) || computed_jump_p (insn
))
4959 && onlyjump_p (insn
))
4960 changed
|= bypass_block (bb
, setcc
, insn
);
4963 else if (INSN_P (insn
))
4968 /* If we bypassed any register setting insns, we inserted a
4969 copy on the redirected edge. These need to be committed. */
4971 commit_edge_insertions();
4976 /* Compute PRE+LCM working variables. */
4978 /* Local properties of expressions. */
4979 /* Nonzero for expressions that are transparent in the block. */
4980 static sbitmap
*transp
;
4982 /* Nonzero for expressions that are transparent at the end of the block.
4983 This is only zero for expressions killed by abnormal critical edge
4984 created by a calls. */
4985 static sbitmap
*transpout
;
4987 /* Nonzero for expressions that are computed (available) in the block. */
4988 static sbitmap
*comp
;
4990 /* Nonzero for expressions that are locally anticipatable in the block. */
4991 static sbitmap
*antloc
;
4993 /* Nonzero for expressions where this block is an optimal computation
4995 static sbitmap
*pre_optimal
;
4997 /* Nonzero for expressions which are redundant in a particular block. */
4998 static sbitmap
*pre_redundant
;
5000 /* Nonzero for expressions which should be inserted on a specific edge. */
5001 static sbitmap
*pre_insert_map
;
5003 /* Nonzero for expressions which should be deleted in a specific block. */
5004 static sbitmap
*pre_delete_map
;
5006 /* Contains the edge_list returned by pre_edge_lcm. */
5007 static struct edge_list
*edge_list
;
5009 /* Redundant insns. */
5010 static sbitmap pre_redundant_insns
;
5012 /* Allocate vars used for PRE analysis. */
5015 alloc_pre_mem (int n_blocks
, int n_exprs
)
5017 transp
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5018 comp
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5019 antloc
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5022 pre_redundant
= NULL
;
5023 pre_insert_map
= NULL
;
5024 pre_delete_map
= NULL
;
5027 ae_kill
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5029 /* pre_insert and pre_delete are allocated later. */
5032 /* Free vars used for PRE analysis. */
5037 sbitmap_vector_free (transp
);
5038 sbitmap_vector_free (comp
);
5040 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
5043 sbitmap_vector_free (pre_optimal
);
5045 sbitmap_vector_free (pre_redundant
);
5047 sbitmap_vector_free (pre_insert_map
);
5049 sbitmap_vector_free (pre_delete_map
);
5051 sbitmap_vector_free (ae_in
);
5053 sbitmap_vector_free (ae_out
);
5055 transp
= comp
= NULL
;
5056 pre_optimal
= pre_redundant
= pre_insert_map
= pre_delete_map
= NULL
;
5057 ae_in
= ae_out
= NULL
;
5060 /* Top level routine to do the dataflow analysis needed by PRE. */
5063 compute_pre_data (void)
5065 sbitmap trapping_expr
;
5069 compute_local_properties (transp
, comp
, antloc
, &expr_hash_table
);
5070 sbitmap_vector_zero (ae_kill
, last_basic_block
);
5072 /* Collect expressions which might trap. */
5073 trapping_expr
= sbitmap_alloc (expr_hash_table
.n_elems
);
5074 sbitmap_zero (trapping_expr
);
5075 for (ui
= 0; ui
< expr_hash_table
.size
; ui
++)
5078 for (e
= expr_hash_table
.table
[ui
]; e
!= NULL
; e
= e
->next_same_hash
)
5079 if (may_trap_p (e
->expr
))
5080 SET_BIT (trapping_expr
, e
->bitmap_index
);
5083 /* Compute ae_kill for each basic block using:
5087 This is significantly faster than compute_ae_kill. */
5093 /* If the current block is the destination of an abnormal edge, we
5094 kill all trapping expressions because we won't be able to properly
5095 place the instruction on the edge. So make them neither
5096 anticipatable nor transparent. This is fairly conservative. */
5097 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
5098 if (e
->flags
& EDGE_ABNORMAL
)
5100 sbitmap_difference (antloc
[bb
->index
], antloc
[bb
->index
], trapping_expr
);
5101 sbitmap_difference (transp
[bb
->index
], transp
[bb
->index
], trapping_expr
);
5105 sbitmap_a_or_b (ae_kill
[bb
->index
], transp
[bb
->index
], comp
[bb
->index
]);
5106 sbitmap_not (ae_kill
[bb
->index
], ae_kill
[bb
->index
]);
5109 edge_list
= pre_edge_lcm (gcse_file
, expr_hash_table
.n_elems
, transp
, comp
, antloc
,
5110 ae_kill
, &pre_insert_map
, &pre_delete_map
);
5111 sbitmap_vector_free (antloc
);
5113 sbitmap_vector_free (ae_kill
);
5115 sbitmap_free (trapping_expr
);
5120 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
5123 VISITED is a pointer to a working buffer for tracking which BB's have
5124 been visited. It is NULL for the top-level call.
5126 We treat reaching expressions that go through blocks containing the same
5127 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
5128 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
5129 2 as not reaching. The intent is to improve the probability of finding
5130 only one reaching expression and to reduce register lifetimes by picking
5131 the closest such expression. */
5134 pre_expr_reaches_here_p_work (basic_block occr_bb
, struct expr
*expr
, basic_block bb
, char *visited
)
5138 for (pred
= bb
->pred
; pred
!= NULL
; pred
= pred
->pred_next
)
5140 basic_block pred_bb
= pred
->src
;
5142 if (pred
->src
== ENTRY_BLOCK_PTR
5143 /* Has predecessor has already been visited? */
5144 || visited
[pred_bb
->index
])
5145 ;/* Nothing to do. */
5147 /* Does this predecessor generate this expression? */
5148 else if (TEST_BIT (comp
[pred_bb
->index
], expr
->bitmap_index
))
5150 /* Is this the occurrence we're looking for?
5151 Note that there's only one generating occurrence per block
5152 so we just need to check the block number. */
5153 if (occr_bb
== pred_bb
)
5156 visited
[pred_bb
->index
] = 1;
5158 /* Ignore this predecessor if it kills the expression. */
5159 else if (! TEST_BIT (transp
[pred_bb
->index
], expr
->bitmap_index
))
5160 visited
[pred_bb
->index
] = 1;
5162 /* Neither gen nor kill. */
5165 visited
[pred_bb
->index
] = 1;
5166 if (pre_expr_reaches_here_p_work (occr_bb
, expr
, pred_bb
, visited
))
5171 /* All paths have been checked. */
5175 /* The wrapper for pre_expr_reaches_here_work that ensures that any
5176 memory allocated for that function is returned. */
5179 pre_expr_reaches_here_p (basic_block occr_bb
, struct expr
*expr
, basic_block bb
)
5182 char *visited
= xcalloc (last_basic_block
, 1);
5184 rval
= pre_expr_reaches_here_p_work (occr_bb
, expr
, bb
, visited
);
5191 /* Given an expr, generate RTL which we can insert at the end of a BB,
5192 or on an edge. Set the block number of any insns generated to
5196 process_insert_insn (struct expr
*expr
)
5198 rtx reg
= expr
->reaching_reg
;
5199 rtx exp
= copy_rtx (expr
->expr
);
5204 /* If the expression is something that's an operand, like a constant,
5205 just copy it to a register. */
5206 if (general_operand (exp
, GET_MODE (reg
)))
5207 emit_move_insn (reg
, exp
);
5209 /* Otherwise, make a new insn to compute this expression and make sure the
5210 insn will be recognized (this also adds any needed CLOBBERs). Copy the
5211 expression to make sure we don't have any sharing issues. */
5212 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode
, reg
, exp
))))
5221 /* Add EXPR to the end of basic block BB.
5223 This is used by both the PRE and code hoisting.
5225 For PRE, we want to verify that the expr is either transparent
5226 or locally anticipatable in the target block. This check makes
5227 no sense for code hoisting. */
5230 insert_insn_end_bb (struct expr
*expr
, basic_block bb
, int pre
)
5232 rtx insn
= BB_END (bb
);
5234 rtx reg
= expr
->reaching_reg
;
5235 int regno
= REGNO (reg
);
5238 pat
= process_insert_insn (expr
);
5239 if (pat
== NULL_RTX
|| ! INSN_P (pat
))
5243 while (NEXT_INSN (pat_end
) != NULL_RTX
)
5244 pat_end
= NEXT_INSN (pat_end
);
5246 /* If the last insn is a jump, insert EXPR in front [taking care to
5247 handle cc0, etc. properly]. Similarly we need to care trapping
5248 instructions in presence of non-call exceptions. */
5250 if (GET_CODE (insn
) == JUMP_INSN
5251 || (GET_CODE (insn
) == INSN
5252 && (bb
->succ
->succ_next
|| (bb
->succ
->flags
& EDGE_ABNORMAL
))))
5257 /* It should always be the case that we can put these instructions
5258 anywhere in the basic block with performing PRE optimizations.
5260 if (GET_CODE (insn
) == INSN
&& pre
5261 && !TEST_BIT (antloc
[bb
->index
], expr
->bitmap_index
)
5262 && !TEST_BIT (transp
[bb
->index
], expr
->bitmap_index
))
5265 /* If this is a jump table, then we can't insert stuff here. Since
5266 we know the previous real insn must be the tablejump, we insert
5267 the new instruction just before the tablejump. */
5268 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
5269 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
5270 insn
= prev_real_insn (insn
);
5273 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5274 if cc0 isn't set. */
5275 note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
5277 insn
= XEXP (note
, 0);
5280 rtx maybe_cc0_setter
= prev_nonnote_insn (insn
);
5281 if (maybe_cc0_setter
5282 && INSN_P (maybe_cc0_setter
)
5283 && sets_cc0_p (PATTERN (maybe_cc0_setter
)))
5284 insn
= maybe_cc0_setter
;
5287 /* FIXME: What if something in cc0/jump uses value set in new insn? */
5288 new_insn
= emit_insn_before (pat
, insn
);
5291 /* Likewise if the last insn is a call, as will happen in the presence
5292 of exception handling. */
5293 else if (GET_CODE (insn
) == CALL_INSN
5294 && (bb
->succ
->succ_next
|| (bb
->succ
->flags
& EDGE_ABNORMAL
)))
5296 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5297 we search backward and place the instructions before the first
5298 parameter is loaded. Do this for everyone for consistency and a
5299 presumption that we'll get better code elsewhere as well.
5301 It should always be the case that we can put these instructions
5302 anywhere in the basic block with performing PRE optimizations.
5306 && !TEST_BIT (antloc
[bb
->index
], expr
->bitmap_index
)
5307 && !TEST_BIT (transp
[bb
->index
], expr
->bitmap_index
))
5310 /* Since different machines initialize their parameter registers
5311 in different orders, assume nothing. Collect the set of all
5312 parameter registers. */
5313 insn
= find_first_parameter_load (insn
, BB_HEAD (bb
));
5315 /* If we found all the parameter loads, then we want to insert
5316 before the first parameter load.
5318 If we did not find all the parameter loads, then we might have
5319 stopped on the head of the block, which could be a CODE_LABEL.
5320 If we inserted before the CODE_LABEL, then we would be putting
5321 the insn in the wrong basic block. In that case, put the insn
5322 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
5323 while (GET_CODE (insn
) == CODE_LABEL
5324 || NOTE_INSN_BASIC_BLOCK_P (insn
))
5325 insn
= NEXT_INSN (insn
);
5327 new_insn
= emit_insn_before (pat
, insn
);
5330 new_insn
= emit_insn_after (pat
, insn
);
5336 add_label_notes (PATTERN (pat
), new_insn
);
5337 note_stores (PATTERN (pat
), record_set_info
, pat
);
5341 pat
= NEXT_INSN (pat
);
5344 gcse_create_count
++;
5348 fprintf (gcse_file
, "PRE/HOIST: end of bb %d, insn %d, ",
5349 bb
->index
, INSN_UID (new_insn
));
5350 fprintf (gcse_file
, "copying expression %d to reg %d\n",
5351 expr
->bitmap_index
, regno
);
5355 /* Insert partially redundant expressions on edges in the CFG to make
5356 the expressions fully redundant. */
5359 pre_edge_insert (struct edge_list
*edge_list
, struct expr
**index_map
)
5361 int e
, i
, j
, num_edges
, set_size
, did_insert
= 0;
5364 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5365 if it reaches any of the deleted expressions. */
5367 set_size
= pre_insert_map
[0]->size
;
5368 num_edges
= NUM_EDGES (edge_list
);
5369 inserted
= sbitmap_vector_alloc (num_edges
, expr_hash_table
.n_elems
);
5370 sbitmap_vector_zero (inserted
, num_edges
);
5372 for (e
= 0; e
< num_edges
; e
++)
5375 basic_block bb
= INDEX_EDGE_PRED_BB (edge_list
, e
);
5377 for (i
= indx
= 0; i
< set_size
; i
++, indx
+= SBITMAP_ELT_BITS
)
5379 SBITMAP_ELT_TYPE insert
= pre_insert_map
[e
]->elms
[i
];
5381 for (j
= indx
; insert
&& j
< (int) expr_hash_table
.n_elems
; j
++, insert
>>= 1)
5382 if ((insert
& 1) != 0 && index_map
[j
]->reaching_reg
!= NULL_RTX
)
5384 struct expr
*expr
= index_map
[j
];
5387 /* Now look at each deleted occurrence of this expression. */
5388 for (occr
= expr
->antic_occr
; occr
!= NULL
; occr
= occr
->next
)
5390 if (! occr
->deleted_p
)
5393 /* Insert this expression on this edge if if it would
5394 reach the deleted occurrence in BB. */
5395 if (!TEST_BIT (inserted
[e
], j
))
5398 edge eg
= INDEX_EDGE (edge_list
, e
);
5400 /* We can't insert anything on an abnormal and
5401 critical edge, so we insert the insn at the end of
5402 the previous block. There are several alternatives
5403 detailed in Morgans book P277 (sec 10.5) for
5404 handling this situation. This one is easiest for
5407 if ((eg
->flags
& EDGE_ABNORMAL
) == EDGE_ABNORMAL
)
5408 insert_insn_end_bb (index_map
[j
], bb
, 0);
5411 insn
= process_insert_insn (index_map
[j
]);
5412 insert_insn_on_edge (insn
, eg
);
5417 fprintf (gcse_file
, "PRE/HOIST: edge (%d,%d), ",
5419 INDEX_EDGE_SUCC_BB (edge_list
, e
)->index
);
5420 fprintf (gcse_file
, "copy expression %d\n",
5421 expr
->bitmap_index
);
5424 update_ld_motion_stores (expr
);
5425 SET_BIT (inserted
[e
], j
);
5427 gcse_create_count
++;
5434 sbitmap_vector_free (inserted
);
5438 /* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG.
5439 Given "old_reg <- expr" (INSN), instead of adding after it
5440 reaching_reg <- old_reg
5441 it's better to do the following:
5442 reaching_reg <- expr
5443 old_reg <- reaching_reg
5444 because this way copy propagation can discover additional PRE
5445 opportunities. But if this fails, we try the old way.
5446 When "expr" is a store, i.e.
5447 given "MEM <- old_reg", instead of adding after it
5448 reaching_reg <- old_reg
5449 it's better to add it before as follows:
5450 reaching_reg <- old_reg
5451 MEM <- reaching_reg. */
5454 pre_insert_copy_insn (struct expr
*expr
, rtx insn
)
5456 rtx reg
= expr
->reaching_reg
;
5457 int regno
= REGNO (reg
);
5458 int indx
= expr
->bitmap_index
;
5459 rtx pat
= PATTERN (insn
);
5464 /* This block matches the logic in hash_scan_insn. */
5465 if (GET_CODE (pat
) == SET
)
5467 else if (GET_CODE (pat
) == PARALLEL
)
5469 /* Search through the parallel looking for the set whose
5470 source was the expression that we're interested in. */
5472 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
5474 rtx x
= XVECEXP (pat
, 0, i
);
5475 if (GET_CODE (x
) == SET
5476 && expr_equiv_p (SET_SRC (x
), expr
->expr
))
5486 if (GET_CODE (SET_DEST (set
)) == REG
)
5488 old_reg
= SET_DEST (set
);
5489 /* Check if we can modify the set destination in the original insn. */
5490 if (validate_change (insn
, &SET_DEST (set
), reg
, 0))
5492 new_insn
= gen_move_insn (old_reg
, reg
);
5493 new_insn
= emit_insn_after (new_insn
, insn
);
5495 /* Keep register set table up to date. */
5496 replace_one_set (REGNO (old_reg
), insn
, new_insn
);
5497 record_one_set (regno
, insn
);
5501 new_insn
= gen_move_insn (reg
, old_reg
);
5502 new_insn
= emit_insn_after (new_insn
, insn
);
5504 /* Keep register set table up to date. */
5505 record_one_set (regno
, new_insn
);
5508 else /* This is possible only in case of a store to memory. */
5510 old_reg
= SET_SRC (set
);
5511 new_insn
= gen_move_insn (reg
, old_reg
);
5513 /* Check if we can modify the set source in the original insn. */
5514 if (validate_change (insn
, &SET_SRC (set
), reg
, 0))
5515 new_insn
= emit_insn_before (new_insn
, insn
);
5517 new_insn
= emit_insn_after (new_insn
, insn
);
5519 /* Keep register set table up to date. */
5520 record_one_set (regno
, new_insn
);
5523 gcse_create_count
++;
5527 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5528 BLOCK_NUM (insn
), INSN_UID (new_insn
), indx
,
5529 INSN_UID (insn
), regno
);
5532 /* Copy available expressions that reach the redundant expression
5533 to `reaching_reg'. */
5536 pre_insert_copies (void)
5538 unsigned int i
, added_copy
;
5543 /* For each available expression in the table, copy the result to
5544 `reaching_reg' if the expression reaches a deleted one.
5546 ??? The current algorithm is rather brute force.
5547 Need to do some profiling. */
5549 for (i
= 0; i
< expr_hash_table
.size
; i
++)
5550 for (expr
= expr_hash_table
.table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
5552 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5553 we don't want to insert a copy here because the expression may not
5554 really be redundant. So only insert an insn if the expression was
5555 deleted. This test also avoids further processing if the
5556 expression wasn't deleted anywhere. */
5557 if (expr
->reaching_reg
== NULL
)
5560 /* Set when we add a copy for that expression. */
5563 for (occr
= expr
->antic_occr
; occr
!= NULL
; occr
= occr
->next
)
5565 if (! occr
->deleted_p
)
5568 for (avail
= expr
->avail_occr
; avail
!= NULL
; avail
= avail
->next
)
5570 rtx insn
= avail
->insn
;
5572 /* No need to handle this one if handled already. */
5573 if (avail
->copied_p
)
5576 /* Don't handle this one if it's a redundant one. */
5577 if (TEST_BIT (pre_redundant_insns
, INSN_CUID (insn
)))
5580 /* Or if the expression doesn't reach the deleted one. */
5581 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail
->insn
),
5583 BLOCK_FOR_INSN (occr
->insn
)))
5588 /* Copy the result of avail to reaching_reg. */
5589 pre_insert_copy_insn (expr
, insn
);
5590 avail
->copied_p
= 1;
5595 update_ld_motion_stores (expr
);
5599 /* Emit move from SRC to DEST noting the equivalence with expression computed
5602 gcse_emit_move_after (rtx src
, rtx dest
, rtx insn
)
5605 rtx set
= single_set (insn
), set2
;
5609 /* This should never fail since we're creating a reg->reg copy
5610 we've verified to be valid. */
5612 new = emit_insn_after (gen_move_insn (dest
, src
), insn
);
5614 /* Note the equivalence for local CSE pass. */
5615 set2
= single_set (new);
5616 if (!set2
|| !rtx_equal_p (SET_DEST (set2
), dest
))
5618 if ((note
= find_reg_equal_equiv_note (insn
)))
5619 eqv
= XEXP (note
, 0);
5621 eqv
= SET_SRC (set
);
5623 set_unique_reg_note (new, REG_EQUAL
, copy_insn_1 (eqv
));
5628 /* Delete redundant computations.
5629 Deletion is done by changing the insn to copy the `reaching_reg' of
5630 the expression into the result of the SET. It is left to later passes
5631 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5633 Returns nonzero if a change is made. */
5644 for (i
= 0; i
< expr_hash_table
.size
; i
++)
5645 for (expr
= expr_hash_table
.table
[i
];
5647 expr
= expr
->next_same_hash
)
5649 int indx
= expr
->bitmap_index
;
5651 /* We only need to search antic_occr since we require
5654 for (occr
= expr
->antic_occr
; occr
!= NULL
; occr
= occr
->next
)
5656 rtx insn
= occr
->insn
;
5658 basic_block bb
= BLOCK_FOR_INSN (insn
);
5660 /* We only delete insns that have a single_set. */
5661 if (TEST_BIT (pre_delete_map
[bb
->index
], indx
)
5662 && (set
= single_set (insn
)) != 0)
5664 /* Create a pseudo-reg to store the result of reaching
5665 expressions into. Get the mode for the new pseudo from
5666 the mode of the original destination pseudo. */
5667 if (expr
->reaching_reg
== NULL
)
5669 = gen_reg_rtx (GET_MODE (SET_DEST (set
)));
5671 gcse_emit_move_after (expr
->reaching_reg
, SET_DEST (set
), insn
);
5673 occr
->deleted_p
= 1;
5674 SET_BIT (pre_redundant_insns
, INSN_CUID (insn
));
5681 "PRE: redundant insn %d (expression %d) in ",
5682 INSN_UID (insn
), indx
);
5683 fprintf (gcse_file
, "bb %d, reaching reg is %d\n",
5684 bb
->index
, REGNO (expr
->reaching_reg
));
5693 /* Perform GCSE optimizations using PRE.
5694 This is called by one_pre_gcse_pass after all the dataflow analysis
5697 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5698 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5699 Compiler Design and Implementation.
5701 ??? A new pseudo reg is created to hold the reaching expression. The nice
5702 thing about the classical approach is that it would try to use an existing
5703 reg. If the register can't be adequately optimized [i.e. we introduce
5704 reload problems], one could add a pass here to propagate the new register
5707 ??? We don't handle single sets in PARALLELs because we're [currently] not
5708 able to copy the rest of the parallel when we insert copies to create full
5709 redundancies from partial redundancies. However, there's no reason why we
5710 can't handle PARALLELs in the cases where there are no partial
5717 int did_insert
, changed
;
5718 struct expr
**index_map
;
5721 /* Compute a mapping from expression number (`bitmap_index') to
5722 hash table entry. */
5724 index_map
= xcalloc (expr_hash_table
.n_elems
, sizeof (struct expr
*));
5725 for (i
= 0; i
< expr_hash_table
.size
; i
++)
5726 for (expr
= expr_hash_table
.table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
5727 index_map
[expr
->bitmap_index
] = expr
;
5729 /* Reset bitmap used to track which insns are redundant. */
5730 pre_redundant_insns
= sbitmap_alloc (max_cuid
);
5731 sbitmap_zero (pre_redundant_insns
);
5733 /* Delete the redundant insns first so that
5734 - we know what register to use for the new insns and for the other
5735 ones with reaching expressions
5736 - we know which insns are redundant when we go to create copies */
5738 changed
= pre_delete ();
5740 did_insert
= pre_edge_insert (edge_list
, index_map
);
5742 /* In other places with reaching expressions, copy the expression to the
5743 specially allocated pseudo-reg that reaches the redundant expr. */
5744 pre_insert_copies ();
5747 commit_edge_insertions ();
5752 sbitmap_free (pre_redundant_insns
);
5756 /* Top level routine to perform one PRE GCSE pass.
5758 Return nonzero if a change was made. */
5761 one_pre_gcse_pass (int pass
)
5765 gcse_subst_count
= 0;
5766 gcse_create_count
= 0;
5768 alloc_hash_table (max_cuid
, &expr_hash_table
, 0);
5769 add_noreturn_fake_exit_edges ();
5771 compute_ld_motion_mems ();
5773 compute_hash_table (&expr_hash_table
);
5774 trim_ld_motion_mems ();
5776 dump_hash_table (gcse_file
, "Expression", &expr_hash_table
);
5778 if (expr_hash_table
.n_elems
> 0)
5780 alloc_pre_mem (last_basic_block
, expr_hash_table
.n_elems
);
5781 compute_pre_data ();
5782 changed
|= pre_gcse ();
5783 free_edge_list (edge_list
);
5788 remove_fake_edges ();
5789 free_hash_table (&expr_hash_table
);
5793 fprintf (gcse_file
, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5794 current_function_name (), pass
, bytes_used
);
5795 fprintf (gcse_file
, "%d substs, %d insns created\n",
5796 gcse_subst_count
, gcse_create_count
);
5802 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5803 If notes are added to an insn which references a CODE_LABEL, the
5804 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5805 because the following loop optimization pass requires them. */
5807 /* ??? This is very similar to the loop.c add_label_notes function. We
5808 could probably share code here. */
5810 /* ??? If there was a jump optimization pass after gcse and before loop,
5811 then we would not need to do this here, because jump would add the
5812 necessary REG_LABEL notes. */
5815 add_label_notes (rtx x
, rtx insn
)
5817 enum rtx_code code
= GET_CODE (x
);
5821 if (code
== LABEL_REF
&& !LABEL_REF_NONLOCAL_P (x
))
5823 /* This code used to ignore labels that referred to dispatch tables to
5824 avoid flow generating (slightly) worse code.
5826 We no longer ignore such label references (see LABEL_REF handling in
5827 mark_jump_label for additional information). */
5829 REG_NOTES (insn
) = gen_rtx_INSN_LIST (REG_LABEL
, XEXP (x
, 0),
5831 if (LABEL_P (XEXP (x
, 0)))
5832 LABEL_NUSES (XEXP (x
, 0))++;
5836 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
5839 add_label_notes (XEXP (x
, i
), insn
);
5840 else if (fmt
[i
] == 'E')
5841 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
5842 add_label_notes (XVECEXP (x
, i
, j
), insn
);
5846 /* Compute transparent outgoing information for each block.
5848 An expression is transparent to an edge unless it is killed by
5849 the edge itself. This can only happen with abnormal control flow,
5850 when the edge is traversed through a call. This happens with
5851 non-local labels and exceptions.
5853 This would not be necessary if we split the edge. While this is
5854 normally impossible for abnormal critical edges, with some effort
5855 it should be possible with exception handling, since we still have
5856 control over which handler should be invoked. But due to increased
5857 EH table sizes, this may not be worthwhile. */
5860 compute_transpout (void)
5866 sbitmap_vector_ones (transpout
, last_basic_block
);
5870 /* Note that flow inserted a nop a the end of basic blocks that
5871 end in call instructions for reasons other than abnormal
5873 if (GET_CODE (BB_END (bb
)) != CALL_INSN
)
5876 for (i
= 0; i
< expr_hash_table
.size
; i
++)
5877 for (expr
= expr_hash_table
.table
[i
]; expr
; expr
= expr
->next_same_hash
)
5878 if (GET_CODE (expr
->expr
) == MEM
)
5880 if (GET_CODE (XEXP (expr
->expr
, 0)) == SYMBOL_REF
5881 && CONSTANT_POOL_ADDRESS_P (XEXP (expr
->expr
, 0)))
5884 /* ??? Optimally, we would use interprocedural alias
5885 analysis to determine if this mem is actually killed
5887 RESET_BIT (transpout
[bb
->index
], expr
->bitmap_index
);
5892 /* Removal of useless null pointer checks */
5894 /* Called via note_stores. X is set by SETTER. If X is a register we must
5895 invalidate nonnull_local and set nonnull_killed. DATA is really a
5896 `null_pointer_info *'.
5898 We ignore hard registers. */
5901 invalidate_nonnull_info (rtx x
, rtx setter ATTRIBUTE_UNUSED
, void *data
)
5904 struct null_pointer_info
*npi
= (struct null_pointer_info
*) data
;
5906 while (GET_CODE (x
) == SUBREG
)
5909 /* Ignore anything that is not a register or is a hard register. */
5910 if (GET_CODE (x
) != REG
5911 || REGNO (x
) < npi
->min_reg
5912 || REGNO (x
) >= npi
->max_reg
)
5915 regno
= REGNO (x
) - npi
->min_reg
;
5917 RESET_BIT (npi
->nonnull_local
[npi
->current_block
->index
], regno
);
5918 SET_BIT (npi
->nonnull_killed
[npi
->current_block
->index
], regno
);
5921 /* Do null-pointer check elimination for the registers indicated in
5922 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5923 they are not our responsibility to free. */
5926 delete_null_pointer_checks_1 (unsigned int *block_reg
, sbitmap
*nonnull_avin
,
5927 sbitmap
*nonnull_avout
,
5928 struct null_pointer_info
*npi
)
5930 basic_block bb
, current_block
;
5931 sbitmap
*nonnull_local
= npi
->nonnull_local
;
5932 sbitmap
*nonnull_killed
= npi
->nonnull_killed
;
5933 int something_changed
= 0;
5935 /* Compute local properties, nonnull and killed. A register will have
5936 the nonnull property if at the end of the current block its value is
5937 known to be nonnull. The killed property indicates that somewhere in
5938 the block any information we had about the register is killed.
5940 Note that a register can have both properties in a single block. That
5941 indicates that it's killed, then later in the block a new value is
5943 sbitmap_vector_zero (nonnull_local
, last_basic_block
);
5944 sbitmap_vector_zero (nonnull_killed
, last_basic_block
);
5946 FOR_EACH_BB (current_block
)
5948 rtx insn
, stop_insn
;
5950 /* Set the current block for invalidate_nonnull_info. */
5951 npi
->current_block
= current_block
;
5953 /* Scan each insn in the basic block looking for memory references and
5955 stop_insn
= NEXT_INSN (BB_HEAD (current_block
));
5956 for (insn
= BB_HEAD (current_block
);
5958 insn
= NEXT_INSN (insn
))
5963 /* Ignore anything that is not a normal insn. */
5964 if (! INSN_P (insn
))
5967 /* Basically ignore anything that is not a simple SET. We do have
5968 to make sure to invalidate nonnull_local and set nonnull_killed
5969 for such insns though. */
5970 set
= single_set (insn
);
5973 note_stores (PATTERN (insn
), invalidate_nonnull_info
, npi
);
5977 /* See if we've got a usable memory load. We handle it first
5978 in case it uses its address register as a dest (which kills
5979 the nonnull property). */
5980 if (GET_CODE (SET_SRC (set
)) == MEM
5981 && GET_CODE ((reg
= XEXP (SET_SRC (set
), 0))) == REG
5982 && REGNO (reg
) >= npi
->min_reg
5983 && REGNO (reg
) < npi
->max_reg
)
5984 SET_BIT (nonnull_local
[current_block
->index
],
5985 REGNO (reg
) - npi
->min_reg
);
5987 /* Now invalidate stuff clobbered by this insn. */
5988 note_stores (PATTERN (insn
), invalidate_nonnull_info
, npi
);
5990 /* And handle stores, we do these last since any sets in INSN can
5991 not kill the nonnull property if it is derived from a MEM
5992 appearing in a SET_DEST. */
5993 if (GET_CODE (SET_DEST (set
)) == MEM
5994 && GET_CODE ((reg
= XEXP (SET_DEST (set
), 0))) == REG
5995 && REGNO (reg
) >= npi
->min_reg
5996 && REGNO (reg
) < npi
->max_reg
)
5997 SET_BIT (nonnull_local
[current_block
->index
],
5998 REGNO (reg
) - npi
->min_reg
);
6002 /* Now compute global properties based on the local properties. This
6003 is a classic global availability algorithm. */
6004 compute_available (nonnull_local
, nonnull_killed
,
6005 nonnull_avout
, nonnull_avin
);
6007 /* Now look at each bb and see if it ends with a compare of a value
6011 rtx last_insn
= BB_END (bb
);
6012 rtx condition
, earliest
;
6013 int compare_and_branch
;
6015 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
6016 since BLOCK_REG[BB] is zero if this block did not end with a
6017 comparison against zero, this condition works. */
6018 if (block_reg
[bb
->index
] < npi
->min_reg
6019 || block_reg
[bb
->index
] >= npi
->max_reg
)
6022 /* LAST_INSN is a conditional jump. Get its condition. */
6023 condition
= get_condition (last_insn
, &earliest
, false);
6025 /* If we can't determine the condition then skip. */
6029 /* Is the register known to have a nonzero value? */
6030 if (!TEST_BIT (nonnull_avout
[bb
->index
], block_reg
[bb
->index
] - npi
->min_reg
))
6033 /* Try to compute whether the compare/branch at the loop end is one or
6034 two instructions. */
6035 if (earliest
== last_insn
)
6036 compare_and_branch
= 1;
6037 else if (earliest
== prev_nonnote_insn (last_insn
))
6038 compare_and_branch
= 2;
6042 /* We know the register in this comparison is nonnull at exit from
6043 this block. We can optimize this comparison. */
6044 if (GET_CODE (condition
) == NE
)
6048 new_jump
= emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn
)),
6050 JUMP_LABEL (new_jump
) = JUMP_LABEL (last_insn
);
6051 LABEL_NUSES (JUMP_LABEL (new_jump
))++;
6052 emit_barrier_after (new_jump
);
6055 something_changed
= 1;
6056 delete_insn (last_insn
);
6057 if (compare_and_branch
== 2)
6058 delete_insn (earliest
);
6059 purge_dead_edges (bb
);
6061 /* Don't check this block again. (Note that BB_END is
6062 invalid here; we deleted the last instruction in the
6064 block_reg
[bb
->index
] = 0;
6067 return something_changed
;
6070 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
6073 This is conceptually similar to global constant/copy propagation and
6074 classic global CSE (it even uses the same dataflow equations as cprop).
6076 If a register is used as memory address with the form (mem (reg)), then we
6077 know that REG can not be zero at that point in the program. Any instruction
6078 which sets REG "kills" this property.
6080 So, if every path leading to a conditional branch has an available memory
6081 reference of that form, then we know the register can not have the value
6082 zero at the conditional branch.
6084 So we merely need to compute the local properties and propagate that data
6085 around the cfg, then optimize where possible.
6087 We run this pass two times. Once before CSE, then again after CSE. This
6088 has proven to be the most profitable approach. It is rare for new
6089 optimization opportunities of this nature to appear after the first CSE
6092 This could probably be integrated with global cprop with a little work. */
6095 delete_null_pointer_checks (rtx f ATTRIBUTE_UNUSED
)
6097 sbitmap
*nonnull_avin
, *nonnull_avout
;
6098 unsigned int *block_reg
;
6102 int max_reg
= max_reg_num ();
6103 struct null_pointer_info npi
;
6104 int something_changed
= 0;
6106 /* If we have only a single block, or it is too expensive, give up. */
6107 if (n_basic_blocks
<= 1
6108 || is_too_expensive (_ ("NULL pointer checks disabled")))
6111 /* We need four bitmaps, each with a bit for each register in each
6113 regs_per_pass
= get_bitmap_width (4, last_basic_block
, max_reg
);
6115 /* Allocate bitmaps to hold local and global properties. */
6116 npi
.nonnull_local
= sbitmap_vector_alloc (last_basic_block
, regs_per_pass
);
6117 npi
.nonnull_killed
= sbitmap_vector_alloc (last_basic_block
, regs_per_pass
);
6118 nonnull_avin
= sbitmap_vector_alloc (last_basic_block
, regs_per_pass
);
6119 nonnull_avout
= sbitmap_vector_alloc (last_basic_block
, regs_per_pass
);
6121 /* Go through the basic blocks, seeing whether or not each block
6122 ends with a conditional branch whose condition is a comparison
6123 against zero. Record the register compared in BLOCK_REG. */
6124 block_reg
= xcalloc (last_basic_block
, sizeof (int));
6127 rtx last_insn
= BB_END (bb
);
6128 rtx condition
, earliest
, reg
;
6130 /* We only want conditional branches. */
6131 if (GET_CODE (last_insn
) != JUMP_INSN
6132 || !any_condjump_p (last_insn
)
6133 || !onlyjump_p (last_insn
))
6136 /* LAST_INSN is a conditional jump. Get its condition. */
6137 condition
= get_condition (last_insn
, &earliest
, false);
6139 /* If we were unable to get the condition, or it is not an equality
6140 comparison against zero then there's nothing we can do. */
6142 || (GET_CODE (condition
) != NE
&& GET_CODE (condition
) != EQ
)
6143 || GET_CODE (XEXP (condition
, 1)) != CONST_INT
6144 || (XEXP (condition
, 1)
6145 != CONST0_RTX (GET_MODE (XEXP (condition
, 0)))))
6148 /* We must be checking a register against zero. */
6149 reg
= XEXP (condition
, 0);
6150 if (GET_CODE (reg
) != REG
)
6153 block_reg
[bb
->index
] = REGNO (reg
);
6156 /* Go through the algorithm for each block of registers. */
6157 for (reg
= FIRST_PSEUDO_REGISTER
; reg
< max_reg
; reg
+= regs_per_pass
)
6160 npi
.max_reg
= MIN (reg
+ regs_per_pass
, max_reg
);
6161 something_changed
|= delete_null_pointer_checks_1 (block_reg
,
6167 /* Free the table of registers compared at the end of every block. */
6171 sbitmap_vector_free (npi
.nonnull_local
);
6172 sbitmap_vector_free (npi
.nonnull_killed
);
6173 sbitmap_vector_free (nonnull_avin
);
6174 sbitmap_vector_free (nonnull_avout
);
6176 return something_changed
;
6179 /* Code Hoisting variables and subroutines. */
6181 /* Very busy expressions. */
6182 static sbitmap
*hoist_vbein
;
6183 static sbitmap
*hoist_vbeout
;
6185 /* Hoistable expressions. */
6186 static sbitmap
*hoist_exprs
;
6188 /* ??? We could compute post dominators and run this algorithm in
6189 reverse to perform tail merging, doing so would probably be
6190 more effective than the tail merging code in jump.c.
6192 It's unclear if tail merging could be run in parallel with
6193 code hoisting. It would be nice. */
6195 /* Allocate vars used for code hoisting analysis. */
6198 alloc_code_hoist_mem (int n_blocks
, int n_exprs
)
6200 antloc
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
6201 transp
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
6202 comp
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
6204 hoist_vbein
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
6205 hoist_vbeout
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
6206 hoist_exprs
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
6207 transpout
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
6210 /* Free vars used for code hoisting analysis. */
6213 free_code_hoist_mem (void)
6215 sbitmap_vector_free (antloc
);
6216 sbitmap_vector_free (transp
);
6217 sbitmap_vector_free (comp
);
6219 sbitmap_vector_free (hoist_vbein
);
6220 sbitmap_vector_free (hoist_vbeout
);
6221 sbitmap_vector_free (hoist_exprs
);
6222 sbitmap_vector_free (transpout
);
6224 free_dominance_info (CDI_DOMINATORS
);
6227 /* Compute the very busy expressions at entry/exit from each block.
6229 An expression is very busy if all paths from a given point
6230 compute the expression. */
6233 compute_code_hoist_vbeinout (void)
6235 int changed
, passes
;
6238 sbitmap_vector_zero (hoist_vbeout
, last_basic_block
);
6239 sbitmap_vector_zero (hoist_vbein
, last_basic_block
);
6248 /* We scan the blocks in the reverse order to speed up
6250 FOR_EACH_BB_REVERSE (bb
)
6252 changed
|= sbitmap_a_or_b_and_c_cg (hoist_vbein
[bb
->index
], antloc
[bb
->index
],
6253 hoist_vbeout
[bb
->index
], transp
[bb
->index
]);
6254 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
6255 sbitmap_intersection_of_succs (hoist_vbeout
[bb
->index
], hoist_vbein
, bb
->index
);
6262 fprintf (gcse_file
, "hoisting vbeinout computation: %d passes\n", passes
);
6265 /* Top level routine to do the dataflow analysis needed by code hoisting. */
6268 compute_code_hoist_data (void)
6270 compute_local_properties (transp
, comp
, antloc
, &expr_hash_table
);
6271 compute_transpout ();
6272 compute_code_hoist_vbeinout ();
6273 calculate_dominance_info (CDI_DOMINATORS
);
6275 fprintf (gcse_file
, "\n");
6278 /* Determine if the expression identified by EXPR_INDEX would
6279 reach BB unimpared if it was placed at the end of EXPR_BB.
6281 It's unclear exactly what Muchnick meant by "unimpared". It seems
6282 to me that the expression must either be computed or transparent in
6283 *every* block in the path(s) from EXPR_BB to BB. Any other definition
6284 would allow the expression to be hoisted out of loops, even if
6285 the expression wasn't a loop invariant.
6287 Contrast this to reachability for PRE where an expression is
6288 considered reachable if *any* path reaches instead of *all*
6292 hoist_expr_reaches_here_p (basic_block expr_bb
, int expr_index
, basic_block bb
, char *visited
)
6295 int visited_allocated_locally
= 0;
6298 if (visited
== NULL
)
6300 visited_allocated_locally
= 1;
6301 visited
= xcalloc (last_basic_block
, 1);
6304 for (pred
= bb
->pred
; pred
!= NULL
; pred
= pred
->pred_next
)
6306 basic_block pred_bb
= pred
->src
;
6308 if (pred
->src
== ENTRY_BLOCK_PTR
)
6310 else if (pred_bb
== expr_bb
)
6312 else if (visited
[pred_bb
->index
])
6315 /* Does this predecessor generate this expression? */
6316 else if (TEST_BIT (comp
[pred_bb
->index
], expr_index
))
6318 else if (! TEST_BIT (transp
[pred_bb
->index
], expr_index
))
6324 visited
[pred_bb
->index
] = 1;
6325 if (! hoist_expr_reaches_here_p (expr_bb
, expr_index
,
6330 if (visited_allocated_locally
)
6333 return (pred
== NULL
);
6336 /* Actually perform code hoisting. */
6341 basic_block bb
, dominated
;
6343 unsigned int domby_len
;
6345 struct expr
**index_map
;
6348 sbitmap_vector_zero (hoist_exprs
, last_basic_block
);
6350 /* Compute a mapping from expression number (`bitmap_index') to
6351 hash table entry. */
6353 index_map
= xcalloc (expr_hash_table
.n_elems
, sizeof (struct expr
*));
6354 for (i
= 0; i
< expr_hash_table
.size
; i
++)
6355 for (expr
= expr_hash_table
.table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
6356 index_map
[expr
->bitmap_index
] = expr
;
6358 /* Walk over each basic block looking for potentially hoistable
6359 expressions, nothing gets hoisted from the entry block. */
6363 int insn_inserted_p
;
6365 domby_len
= get_dominated_by (CDI_DOMINATORS
, bb
, &domby
);
6366 /* Examine each expression that is very busy at the exit of this
6367 block. These are the potentially hoistable expressions. */
6368 for (i
= 0; i
< hoist_vbeout
[bb
->index
]->n_bits
; i
++)
6372 if (TEST_BIT (hoist_vbeout
[bb
->index
], i
)
6373 && TEST_BIT (transpout
[bb
->index
], i
))
6375 /* We've found a potentially hoistable expression, now
6376 we look at every block BB dominates to see if it
6377 computes the expression. */
6378 for (j
= 0; j
< domby_len
; j
++)
6380 dominated
= domby
[j
];
6381 /* Ignore self dominance. */
6382 if (bb
== dominated
)
6384 /* We've found a dominated block, now see if it computes
6385 the busy expression and whether or not moving that
6386 expression to the "beginning" of that block is safe. */
6387 if (!TEST_BIT (antloc
[dominated
->index
], i
))
6390 /* Note if the expression would reach the dominated block
6391 unimpared if it was placed at the end of BB.
6393 Keep track of how many times this expression is hoistable
6394 from a dominated block into BB. */
6395 if (hoist_expr_reaches_here_p (bb
, i
, dominated
, NULL
))
6399 /* If we found more than one hoistable occurrence of this
6400 expression, then note it in the bitmap of expressions to
6401 hoist. It makes no sense to hoist things which are computed
6402 in only one BB, and doing so tends to pessimize register
6403 allocation. One could increase this value to try harder
6404 to avoid any possible code expansion due to register
6405 allocation issues; however experiments have shown that
6406 the vast majority of hoistable expressions are only movable
6407 from two successors, so raising this threshold is likely
6408 to nullify any benefit we get from code hoisting. */
6411 SET_BIT (hoist_exprs
[bb
->index
], i
);
6416 /* If we found nothing to hoist, then quit now. */
6423 /* Loop over all the hoistable expressions. */
6424 for (i
= 0; i
< hoist_exprs
[bb
->index
]->n_bits
; i
++)
6426 /* We want to insert the expression into BB only once, so
6427 note when we've inserted it. */
6428 insn_inserted_p
= 0;
6430 /* These tests should be the same as the tests above. */
6431 if (TEST_BIT (hoist_vbeout
[bb
->index
], i
))
6433 /* We've found a potentially hoistable expression, now
6434 we look at every block BB dominates to see if it
6435 computes the expression. */
6436 for (j
= 0; j
< domby_len
; j
++)
6438 dominated
= domby
[j
];
6439 /* Ignore self dominance. */
6440 if (bb
== dominated
)
6443 /* We've found a dominated block, now see if it computes
6444 the busy expression and whether or not moving that
6445 expression to the "beginning" of that block is safe. */
6446 if (!TEST_BIT (antloc
[dominated
->index
], i
))
6449 /* The expression is computed in the dominated block and
6450 it would be safe to compute it at the start of the
6451 dominated block. Now we have to determine if the
6452 expression would reach the dominated block if it was
6453 placed at the end of BB. */
6454 if (hoist_expr_reaches_here_p (bb
, i
, dominated
, NULL
))
6456 struct expr
*expr
= index_map
[i
];
6457 struct occr
*occr
= expr
->antic_occr
;
6461 /* Find the right occurrence of this expression. */
6462 while (BLOCK_FOR_INSN (occr
->insn
) != dominated
&& occr
)
6465 /* Should never happen. */
6471 set
= single_set (insn
);
6475 /* Create a pseudo-reg to store the result of reaching
6476 expressions into. Get the mode for the new pseudo
6477 from the mode of the original destination pseudo. */
6478 if (expr
->reaching_reg
== NULL
)
6480 = gen_reg_rtx (GET_MODE (SET_DEST (set
)));
6482 gcse_emit_move_after (expr
->reaching_reg
, SET_DEST (set
), insn
);
6484 occr
->deleted_p
= 1;
6485 if (!insn_inserted_p
)
6487 insert_insn_end_bb (index_map
[i
], bb
, 0);
6488 insn_inserted_p
= 1;
6500 /* Top level routine to perform one code hoisting (aka unification) pass
6502 Return nonzero if a change was made. */
6505 one_code_hoisting_pass (void)
6509 alloc_hash_table (max_cuid
, &expr_hash_table
, 0);
6510 compute_hash_table (&expr_hash_table
);
6512 dump_hash_table (gcse_file
, "Code Hosting Expressions", &expr_hash_table
);
6514 if (expr_hash_table
.n_elems
> 0)
6516 alloc_code_hoist_mem (last_basic_block
, expr_hash_table
.n_elems
);
6517 compute_code_hoist_data ();
6519 free_code_hoist_mem ();
6522 free_hash_table (&expr_hash_table
);
6527 /* Here we provide the things required to do store motion towards
6528 the exit. In order for this to be effective, gcse also needed to
6529 be taught how to move a load when it is kill only by a store to itself.
6534 void foo(float scale)
6536 for (i=0; i<10; i++)
6540 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
6541 the load out since its live around the loop, and stored at the bottom
6544 The 'Load Motion' referred to and implemented in this file is
6545 an enhancement to gcse which when using edge based lcm, recognizes
6546 this situation and allows gcse to move the load out of the loop.
6548 Once gcse has hoisted the load, store motion can then push this
6549 load towards the exit, and we end up with no loads or stores of 'i'
6552 /* This will search the ldst list for a matching expression. If it
6553 doesn't find one, we create one and initialize it. */
6555 static struct ls_expr
*
6558 int do_not_record_p
= 0;
6559 struct ls_expr
* ptr
;
6562 hash
= hash_expr_1 (x
, GET_MODE (x
), & do_not_record_p
);
6564 for (ptr
= pre_ldst_mems
; ptr
!= NULL
; ptr
= ptr
->next
)
6565 if (ptr
->hash_index
== hash
&& expr_equiv_p (ptr
->pattern
, x
))
6568 ptr
= xmalloc (sizeof (struct ls_expr
));
6570 ptr
->next
= pre_ldst_mems
;
6573 ptr
->pattern_regs
= NULL_RTX
;
6574 ptr
->loads
= NULL_RTX
;
6575 ptr
->stores
= NULL_RTX
;
6576 ptr
->reaching_reg
= NULL_RTX
;
6579 ptr
->hash_index
= hash
;
6580 pre_ldst_mems
= ptr
;
6585 /* Free up an individual ldst entry. */
6588 free_ldst_entry (struct ls_expr
* ptr
)
6590 free_INSN_LIST_list (& ptr
->loads
);
6591 free_INSN_LIST_list (& ptr
->stores
);
6596 /* Free up all memory associated with the ldst list. */
6599 free_ldst_mems (void)
6601 while (pre_ldst_mems
)
6603 struct ls_expr
* tmp
= pre_ldst_mems
;
6605 pre_ldst_mems
= pre_ldst_mems
->next
;
6607 free_ldst_entry (tmp
);
6610 pre_ldst_mems
= NULL
;
6613 /* Dump debugging info about the ldst list. */
6616 print_ldst_list (FILE * file
)
6618 struct ls_expr
* ptr
;
6620 fprintf (file
, "LDST list: \n");
6622 for (ptr
= first_ls_expr(); ptr
!= NULL
; ptr
= next_ls_expr (ptr
))
6624 fprintf (file
, " Pattern (%3d): ", ptr
->index
);
6626 print_rtl (file
, ptr
->pattern
);
6628 fprintf (file
, "\n Loads : ");
6631 print_rtl (file
, ptr
->loads
);
6633 fprintf (file
, "(nil)");
6635 fprintf (file
, "\n Stores : ");
6638 print_rtl (file
, ptr
->stores
);
6640 fprintf (file
, "(nil)");
6642 fprintf (file
, "\n\n");
6645 fprintf (file
, "\n");
6648 /* Returns 1 if X is in the list of ldst only expressions. */
6650 static struct ls_expr
*
6651 find_rtx_in_ldst (rtx x
)
6653 struct ls_expr
* ptr
;
6655 for (ptr
= pre_ldst_mems
; ptr
!= NULL
; ptr
= ptr
->next
)
6656 if (expr_equiv_p (ptr
->pattern
, x
) && ! ptr
->invalid
)
6662 /* Assign each element of the list of mems a monotonically increasing value. */
6665 enumerate_ldsts (void)
6667 struct ls_expr
* ptr
;
6670 for (ptr
= pre_ldst_mems
; ptr
!= NULL
; ptr
= ptr
->next
)
6676 /* Return first item in the list. */
6678 static inline struct ls_expr
*
6679 first_ls_expr (void)
6681 return pre_ldst_mems
;
6684 /* Return the next item in the list after the specified one. */
6686 static inline struct ls_expr
*
6687 next_ls_expr (struct ls_expr
* ptr
)
6692 /* Load Motion for loads which only kill themselves. */
6694 /* Return true if x is a simple MEM operation, with no registers or
6695 side effects. These are the types of loads we consider for the
6696 ld_motion list, otherwise we let the usual aliasing take care of it. */
6701 if (GET_CODE (x
) != MEM
)
6704 if (MEM_VOLATILE_P (x
))
6707 if (GET_MODE (x
) == BLKmode
)
6710 /* If we are handling exceptions, we must be careful with memory references
6711 that may trap. If we are not, the behavior is undefined, so we may just
6713 if (flag_non_call_exceptions
&& may_trap_p (x
))
6716 if (side_effects_p (x
))
6719 /* Do not consider function arguments passed on stack. */
6720 if (reg_mentioned_p (stack_pointer_rtx
, x
))
6723 if (flag_float_store
&& FLOAT_MODE_P (GET_MODE (x
)))
6729 /* Make sure there isn't a buried reference in this pattern anywhere.
6730 If there is, invalidate the entry for it since we're not capable
6731 of fixing it up just yet.. We have to be sure we know about ALL
6732 loads since the aliasing code will allow all entries in the
6733 ld_motion list to not-alias itself. If we miss a load, we will get
6734 the wrong value since gcse might common it and we won't know to
6738 invalidate_any_buried_refs (rtx x
)
6742 struct ls_expr
* ptr
;
6744 /* Invalidate it in the list. */
6745 if (GET_CODE (x
) == MEM
&& simple_mem (x
))
6747 ptr
= ldst_entry (x
);
6751 /* Recursively process the insn. */
6752 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
6754 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
6757 invalidate_any_buried_refs (XEXP (x
, i
));
6758 else if (fmt
[i
] == 'E')
6759 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
6760 invalidate_any_buried_refs (XVECEXP (x
, i
, j
));
6764 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6765 being defined as MEM loads and stores to symbols, with no side effects
6766 and no registers in the expression. For a MEM destination, we also
6767 check that the insn is still valid if we replace the destination with a
6768 REG, as is done in update_ld_motion_stores. If there are any uses/defs
6769 which don't match this criteria, they are invalidated and trimmed out
6773 compute_ld_motion_mems (void)
6775 struct ls_expr
* ptr
;
6779 pre_ldst_mems
= NULL
;
6783 for (insn
= BB_HEAD (bb
);
6784 insn
&& insn
!= NEXT_INSN (BB_END (bb
));
6785 insn
= NEXT_INSN (insn
))
6789 if (GET_CODE (PATTERN (insn
)) == SET
)
6791 rtx src
= SET_SRC (PATTERN (insn
));
6792 rtx dest
= SET_DEST (PATTERN (insn
));
6794 /* Check for a simple LOAD... */
6795 if (GET_CODE (src
) == MEM
&& simple_mem (src
))
6797 ptr
= ldst_entry (src
);
6798 if (GET_CODE (dest
) == REG
)
6799 ptr
->loads
= alloc_INSN_LIST (insn
, ptr
->loads
);
6805 /* Make sure there isn't a buried load somewhere. */
6806 invalidate_any_buried_refs (src
);
6809 /* Check for stores. Don't worry about aliased ones, they
6810 will block any movement we might do later. We only care
6811 about this exact pattern since those are the only
6812 circumstance that we will ignore the aliasing info. */
6813 if (GET_CODE (dest
) == MEM
&& simple_mem (dest
))
6815 ptr
= ldst_entry (dest
);
6817 if (GET_CODE (src
) != MEM
6818 && GET_CODE (src
) != ASM_OPERANDS
6819 /* Check for REG manually since want_to_gcse_p
6820 returns 0 for all REGs. */
6821 && (REG_P (src
) || want_to_gcse_p (src
)))
6822 ptr
->stores
= alloc_INSN_LIST (insn
, ptr
->stores
);
6828 invalidate_any_buried_refs (PATTERN (insn
));
6834 /* Remove any references that have been either invalidated or are not in the
6835 expression list for pre gcse. */
6838 trim_ld_motion_mems (void)
6840 struct ls_expr
* * last
= & pre_ldst_mems
;
6841 struct ls_expr
* ptr
= pre_ldst_mems
;
6847 /* Delete if entry has been made invalid. */
6850 /* Delete if we cannot find this mem in the expression list. */
6851 unsigned int hash
= ptr
->hash_index
% expr_hash_table
.size
;
6853 for (expr
= expr_hash_table
.table
[hash
];
6855 expr
= expr
->next_same_hash
)
6856 if (expr_equiv_p (expr
->expr
, ptr
->pattern
))
6860 expr
= (struct expr
*) 0;
6864 /* Set the expression field if we are keeping it. */
6872 free_ldst_entry (ptr
);
6877 /* Show the world what we've found. */
6878 if (gcse_file
&& pre_ldst_mems
!= NULL
)
6879 print_ldst_list (gcse_file
);
6882 /* This routine will take an expression which we are replacing with
6883 a reaching register, and update any stores that are needed if
6884 that expression is in the ld_motion list. Stores are updated by
6885 copying their SRC to the reaching register, and then storing
6886 the reaching register into the store location. These keeps the
6887 correct value in the reaching register for the loads. */
6890 update_ld_motion_stores (struct expr
* expr
)
6892 struct ls_expr
* mem_ptr
;
6894 if ((mem_ptr
= find_rtx_in_ldst (expr
->expr
)))
6896 /* We can try to find just the REACHED stores, but is shouldn't
6897 matter to set the reaching reg everywhere... some might be
6898 dead and should be eliminated later. */
6900 /* We replace (set mem expr) with (set reg expr) (set mem reg)
6901 where reg is the reaching reg used in the load. We checked in
6902 compute_ld_motion_mems that we can replace (set mem expr) with
6903 (set reg expr) in that insn. */
6904 rtx list
= mem_ptr
->stores
;
6906 for ( ; list
!= NULL_RTX
; list
= XEXP (list
, 1))
6908 rtx insn
= XEXP (list
, 0);
6909 rtx pat
= PATTERN (insn
);
6910 rtx src
= SET_SRC (pat
);
6911 rtx reg
= expr
->reaching_reg
;
6914 /* If we've already copied it, continue. */
6915 if (expr
->reaching_reg
== src
)
6920 fprintf (gcse_file
, "PRE: store updated with reaching reg ");
6921 print_rtl (gcse_file
, expr
->reaching_reg
);
6922 fprintf (gcse_file
, ":\n ");
6923 print_inline_rtx (gcse_file
, insn
, 8);
6924 fprintf (gcse_file
, "\n");
6927 copy
= gen_move_insn ( reg
, copy_rtx (SET_SRC (pat
)));
6928 new = emit_insn_before (copy
, insn
);
6929 record_one_set (REGNO (reg
), new);
6930 SET_SRC (pat
) = reg
;
6932 /* un-recognize this pattern since it's probably different now. */
6933 INSN_CODE (insn
) = -1;
6934 gcse_create_count
++;
6939 /* Store motion code. */
6941 #define ANTIC_STORE_LIST(x) ((x)->loads)
6942 #define AVAIL_STORE_LIST(x) ((x)->stores)
6943 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
6945 /* This is used to communicate the target bitvector we want to use in the
6946 reg_set_info routine when called via the note_stores mechanism. */
6947 static int * regvec
;
6949 /* And current insn, for the same routine. */
6950 static rtx compute_store_table_current_insn
;
6952 /* Used in computing the reverse edge graph bit vectors. */
6953 static sbitmap
* st_antloc
;
6955 /* Global holding the number of store expressions we are dealing with. */
6956 static int num_stores
;
6958 /* Checks to set if we need to mark a register set. Called from
6962 reg_set_info (rtx dest
, rtx setter ATTRIBUTE_UNUSED
,
6965 sbitmap bb_reg
= data
;
6967 if (GET_CODE (dest
) == SUBREG
)
6968 dest
= SUBREG_REG (dest
);
6970 if (GET_CODE (dest
) == REG
)
6972 regvec
[REGNO (dest
)] = INSN_UID (compute_store_table_current_insn
);
6974 SET_BIT (bb_reg
, REGNO (dest
));
6978 /* Clear any mark that says that this insn sets dest. Called from
6982 reg_clear_last_set (rtx dest
, rtx setter ATTRIBUTE_UNUSED
,
6985 int *dead_vec
= data
;
6987 if (GET_CODE (dest
) == SUBREG
)
6988 dest
= SUBREG_REG (dest
);
6990 if (GET_CODE (dest
) == REG
&&
6991 dead_vec
[REGNO (dest
)] == INSN_UID (compute_store_table_current_insn
))
6992 dead_vec
[REGNO (dest
)] = 0;
6995 /* Return zero if some of the registers in list X are killed
6996 due to set of registers in bitmap REGS_SET. */
6999 store_ops_ok (rtx x
, int *regs_set
)
7003 for (; x
; x
= XEXP (x
, 1))
7006 if (regs_set
[REGNO(reg
)])
7013 /* Returns a list of registers mentioned in X. */
7015 extract_mentioned_regs (rtx x
)
7017 return extract_mentioned_regs_helper (x
, NULL_RTX
);
7020 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
7023 extract_mentioned_regs_helper (rtx x
, rtx accum
)
7029 /* Repeat is used to turn tail-recursion into iteration. */
7035 code
= GET_CODE (x
);
7039 return alloc_EXPR_LIST (0, x
, accum
);
7049 /* We do not run this function with arguments having side effects. */
7068 i
= GET_RTX_LENGTH (code
) - 1;
7069 fmt
= GET_RTX_FORMAT (code
);
7075 rtx tem
= XEXP (x
, i
);
7077 /* If we are about to do the last recursive call
7078 needed at this level, change it into iteration. */
7085 accum
= extract_mentioned_regs_helper (tem
, accum
);
7087 else if (fmt
[i
] == 'E')
7091 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
7092 accum
= extract_mentioned_regs_helper (XVECEXP (x
, i
, j
), accum
);
7099 /* Determine whether INSN is MEM store pattern that we will consider moving.
7100 REGS_SET_BEFORE is bitmap of registers set before (and including) the
7101 current insn, REGS_SET_AFTER is bitmap of registers set after (and
7102 including) the insn in this basic block. We must be passing through BB from
7103 head to end, as we are using this fact to speed things up.
7105 The results are stored this way:
7107 -- the first anticipatable expression is added into ANTIC_STORE_LIST
7108 -- if the processed expression is not anticipatable, NULL_RTX is added
7109 there instead, so that we can use it as indicator that no further
7110 expression of this type may be anticipatable
7111 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
7112 consequently, all of them but this head are dead and may be deleted.
7113 -- if the expression is not available, the insn due to that it fails to be
7114 available is stored in reaching_reg.
7116 The things are complicated a bit by fact that there already may be stores
7117 to the same MEM from other blocks; also caller must take care of the
7118 necessary cleanup of the temporary markers after end of the basic block.
7122 find_moveable_store (rtx insn
, int *regs_set_before
, int *regs_set_after
)
7124 struct ls_expr
* ptr
;
7126 int check_anticipatable
, check_available
;
7127 basic_block bb
= BLOCK_FOR_INSN (insn
);
7129 set
= single_set (insn
);
7133 dest
= SET_DEST (set
);
7135 if (GET_CODE (dest
) != MEM
|| MEM_VOLATILE_P (dest
)
7136 || GET_MODE (dest
) == BLKmode
)
7139 if (side_effects_p (dest
))
7142 /* If we are handling exceptions, we must be careful with memory references
7143 that may trap. If we are not, the behavior is undefined, so we may just
7145 if (flag_non_call_exceptions
&& may_trap_p (dest
))
7148 ptr
= ldst_entry (dest
);
7149 if (!ptr
->pattern_regs
)
7150 ptr
->pattern_regs
= extract_mentioned_regs (dest
);
7152 /* Do not check for anticipatability if we either found one anticipatable
7153 store already, or tested for one and found out that it was killed. */
7154 check_anticipatable
= 0;
7155 if (!ANTIC_STORE_LIST (ptr
))
7156 check_anticipatable
= 1;
7159 tmp
= XEXP (ANTIC_STORE_LIST (ptr
), 0);
7161 && BLOCK_FOR_INSN (tmp
) != bb
)
7162 check_anticipatable
= 1;
7164 if (check_anticipatable
)
7166 if (store_killed_before (dest
, ptr
->pattern_regs
, insn
, bb
, regs_set_before
))
7170 ANTIC_STORE_LIST (ptr
) = alloc_INSN_LIST (tmp
,
7171 ANTIC_STORE_LIST (ptr
));
7174 /* It is not necessary to check whether store is available if we did
7175 it successfully before; if we failed before, do not bother to check
7176 until we reach the insn that caused us to fail. */
7177 check_available
= 0;
7178 if (!AVAIL_STORE_LIST (ptr
))
7179 check_available
= 1;
7182 tmp
= XEXP (AVAIL_STORE_LIST (ptr
), 0);
7183 if (BLOCK_FOR_INSN (tmp
) != bb
)
7184 check_available
= 1;
7186 if (check_available
)
7188 /* Check that we have already reached the insn at that the check
7189 failed last time. */
7190 if (LAST_AVAIL_CHECK_FAILURE (ptr
))
7192 for (tmp
= BB_END (bb
);
7193 tmp
!= insn
&& tmp
!= LAST_AVAIL_CHECK_FAILURE (ptr
);
7194 tmp
= PREV_INSN (tmp
))
7197 check_available
= 0;
7200 check_available
= store_killed_after (dest
, ptr
->pattern_regs
, insn
,
7202 &LAST_AVAIL_CHECK_FAILURE (ptr
));
7204 if (!check_available
)
7205 AVAIL_STORE_LIST (ptr
) = alloc_INSN_LIST (insn
, AVAIL_STORE_LIST (ptr
));
7208 /* Find available and anticipatable stores. */
7211 compute_store_table (void)
7217 int *last_set_in
, *already_set
;
7218 struct ls_expr
* ptr
, **prev_next_ptr_ptr
;
7220 max_gcse_regno
= max_reg_num ();
7222 reg_set_in_block
= sbitmap_vector_alloc (last_basic_block
,
7224 sbitmap_vector_zero (reg_set_in_block
, last_basic_block
);
7226 last_set_in
= xcalloc (max_gcse_regno
, sizeof (int));
7227 already_set
= xmalloc (sizeof (int) * max_gcse_regno
);
7229 /* Find all the stores we care about. */
7232 /* First compute the registers set in this block. */
7233 regvec
= last_set_in
;
7235 for (insn
= BB_HEAD (bb
);
7236 insn
!= NEXT_INSN (BB_END (bb
));
7237 insn
= NEXT_INSN (insn
))
7239 if (! INSN_P (insn
))
7242 if (GET_CODE (insn
) == CALL_INSN
)
7244 bool clobbers_all
= false;
7245 #ifdef NON_SAVING_SETJMP
7246 if (NON_SAVING_SETJMP
7247 && find_reg_note (insn
, REG_SETJMP
, NULL_RTX
))
7248 clobbers_all
= true;
7251 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
7253 || TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
))
7255 last_set_in
[regno
] = INSN_UID (insn
);
7256 SET_BIT (reg_set_in_block
[bb
->index
], regno
);
7260 pat
= PATTERN (insn
);
7261 compute_store_table_current_insn
= insn
;
7262 note_stores (pat
, reg_set_info
, reg_set_in_block
[bb
->index
]);
7265 /* Now find the stores. */
7266 memset (already_set
, 0, sizeof (int) * max_gcse_regno
);
7267 regvec
= already_set
;
7268 for (insn
= BB_HEAD (bb
);
7269 insn
!= NEXT_INSN (BB_END (bb
));
7270 insn
= NEXT_INSN (insn
))
7272 if (! INSN_P (insn
))
7275 if (GET_CODE (insn
) == CALL_INSN
)
7277 bool clobbers_all
= false;
7278 #ifdef NON_SAVING_SETJMP
7279 if (NON_SAVING_SETJMP
7280 && find_reg_note (insn
, REG_SETJMP
, NULL_RTX
))
7281 clobbers_all
= true;
7284 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
7286 || TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
))
7287 already_set
[regno
] = 1;
7290 pat
= PATTERN (insn
);
7291 note_stores (pat
, reg_set_info
, NULL
);
7293 /* Now that we've marked regs, look for stores. */
7294 find_moveable_store (insn
, already_set
, last_set_in
);
7296 /* Unmark regs that are no longer set. */
7297 compute_store_table_current_insn
= insn
;
7298 note_stores (pat
, reg_clear_last_set
, last_set_in
);
7299 if (GET_CODE (insn
) == CALL_INSN
)
7301 bool clobbers_all
= false;
7302 #ifdef NON_SAVING_SETJMP
7303 if (NON_SAVING_SETJMP
7304 && find_reg_note (insn
, REG_SETJMP
, NULL_RTX
))
7305 clobbers_all
= true;
7308 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
7310 || TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
))
7311 && last_set_in
[regno
] == INSN_UID (insn
))
7312 last_set_in
[regno
] = 0;
7316 #ifdef ENABLE_CHECKING
7317 /* last_set_in should now be all-zero. */
7318 for (regno
= 0; regno
< max_gcse_regno
; regno
++)
7319 if (last_set_in
[regno
] != 0)
7323 /* Clear temporary marks. */
7324 for (ptr
= first_ls_expr (); ptr
!= NULL
; ptr
= next_ls_expr (ptr
))
7326 LAST_AVAIL_CHECK_FAILURE(ptr
) = NULL_RTX
;
7327 if (ANTIC_STORE_LIST (ptr
)
7328 && (tmp
= XEXP (ANTIC_STORE_LIST (ptr
), 0)) == NULL_RTX
)
7329 ANTIC_STORE_LIST (ptr
) = XEXP (ANTIC_STORE_LIST (ptr
), 1);
7333 /* Remove the stores that are not available anywhere, as there will
7334 be no opportunity to optimize them. */
7335 for (ptr
= pre_ldst_mems
, prev_next_ptr_ptr
= &pre_ldst_mems
;
7337 ptr
= *prev_next_ptr_ptr
)
7339 if (!AVAIL_STORE_LIST (ptr
))
7341 *prev_next_ptr_ptr
= ptr
->next
;
7342 free_ldst_entry (ptr
);
7345 prev_next_ptr_ptr
= &ptr
->next
;
7348 ret
= enumerate_ldsts ();
7352 fprintf (gcse_file
, "ST_avail and ST_antic (shown under loads..)\n");
7353 print_ldst_list (gcse_file
);
7361 /* Check to see if the load X is aliased with STORE_PATTERN.
7362 AFTER is true if we are checking the case when STORE_PATTERN occurs
7366 load_kills_store (rtx x
, rtx store_pattern
, int after
)
7369 return anti_dependence (x
, store_pattern
);
7371 return true_dependence (store_pattern
, GET_MODE (store_pattern
), x
,
7375 /* Go through the entire insn X, looking for any loads which might alias
7376 STORE_PATTERN. Return true if found.
7377 AFTER is true if we are checking the case when STORE_PATTERN occurs
7378 after the insn X. */
7381 find_loads (rtx x
, rtx store_pattern
, int after
)
7390 if (GET_CODE (x
) == SET
)
7393 if (GET_CODE (x
) == MEM
)
7395 if (load_kills_store (x
, store_pattern
, after
))
7399 /* Recursively process the insn. */
7400 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
7402 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0 && !ret
; i
--)
7405 ret
|= find_loads (XEXP (x
, i
), store_pattern
, after
);
7406 else if (fmt
[i
] == 'E')
7407 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
7408 ret
|= find_loads (XVECEXP (x
, i
, j
), store_pattern
, after
);
7413 /* Check if INSN kills the store pattern X (is aliased with it).
7414 AFTER is true if we are checking the case when store X occurs
7415 after the insn. Return true if it it does. */
7418 store_killed_in_insn (rtx x
, rtx x_regs
, rtx insn
, int after
)
7420 rtx reg
, base
, note
;
7425 if (GET_CODE (insn
) == CALL_INSN
)
7427 /* A normal or pure call might read from pattern,
7428 but a const call will not. */
7429 if (! CONST_OR_PURE_CALL_P (insn
) || pure_call_p (insn
))
7432 /* But even a const call reads its parameters. Check whether the
7433 base of some of registers used in mem is stack pointer. */
7434 for (reg
= x_regs
; reg
; reg
= XEXP (reg
, 1))
7436 base
= find_base_term (XEXP (reg
, 0));
7438 || (GET_CODE (base
) == ADDRESS
7439 && GET_MODE (base
) == Pmode
7440 && XEXP (base
, 0) == stack_pointer_rtx
))
7447 if (GET_CODE (PATTERN (insn
)) == SET
)
7449 rtx pat
= PATTERN (insn
);
7450 rtx dest
= SET_DEST (pat
);
7452 if (GET_CODE (dest
) == SIGN_EXTRACT
7453 || GET_CODE (dest
) == ZERO_EXTRACT
)
7454 dest
= XEXP (dest
, 0);
7456 /* Check for memory stores to aliased objects. */
7457 if (GET_CODE (dest
) == MEM
7458 && !expr_equiv_p (dest
, x
))
7462 if (output_dependence (dest
, x
))
7467 if (output_dependence (x
, dest
))
7471 if (find_loads (SET_SRC (pat
), x
, after
))
7474 else if (find_loads (PATTERN (insn
), x
, after
))
7477 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
7478 location aliased with X, then this insn kills X. */
7479 note
= find_reg_equal_equiv_note (insn
);
7482 note
= XEXP (note
, 0);
7484 /* However, if the note represents a must alias rather than a may
7485 alias relationship, then it does not kill X. */
7486 if (expr_equiv_p (note
, x
))
7489 /* See if there are any aliased loads in the note. */
7490 return find_loads (note
, x
, after
);
7493 /* Returns true if the expression X is loaded or clobbered on or after INSN
7494 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
7495 or after the insn. X_REGS is list of registers mentioned in X. If the store
7496 is killed, return the last insn in that it occurs in FAIL_INSN. */
7499 store_killed_after (rtx x
, rtx x_regs
, rtx insn
, basic_block bb
,
7500 int *regs_set_after
, rtx
*fail_insn
)
7502 rtx last
= BB_END (bb
), act
;
7504 if (!store_ops_ok (x_regs
, regs_set_after
))
7506 /* We do not know where it will happen. */
7508 *fail_insn
= NULL_RTX
;
7512 /* Scan from the end, so that fail_insn is determined correctly. */
7513 for (act
= last
; act
!= PREV_INSN (insn
); act
= PREV_INSN (act
))
7514 if (store_killed_in_insn (x
, x_regs
, act
, false))
7524 /* Returns true if the expression X is loaded or clobbered on or before INSN
7525 within basic block BB. X_REGS is list of registers mentioned in X.
7526 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
7528 store_killed_before (rtx x
, rtx x_regs
, rtx insn
, basic_block bb
,
7529 int *regs_set_before
)
7531 rtx first
= BB_HEAD (bb
);
7533 if (!store_ops_ok (x_regs
, regs_set_before
))
7536 for ( ; insn
!= PREV_INSN (first
); insn
= PREV_INSN (insn
))
7537 if (store_killed_in_insn (x
, x_regs
, insn
, true))
7543 /* Fill in available, anticipatable, transparent and kill vectors in
7544 STORE_DATA, based on lists of available and anticipatable stores. */
7546 build_store_vectors (void)
7549 int *regs_set_in_block
;
7551 struct ls_expr
* ptr
;
7554 /* Build the gen_vector. This is any store in the table which is not killed
7555 by aliasing later in its block. */
7556 ae_gen
= sbitmap_vector_alloc (last_basic_block
, num_stores
);
7557 sbitmap_vector_zero (ae_gen
, last_basic_block
);
7559 st_antloc
= sbitmap_vector_alloc (last_basic_block
, num_stores
);
7560 sbitmap_vector_zero (st_antloc
, last_basic_block
);
7562 for (ptr
= first_ls_expr (); ptr
!= NULL
; ptr
= next_ls_expr (ptr
))
7564 for (st
= AVAIL_STORE_LIST (ptr
); st
!= NULL
; st
= XEXP (st
, 1))
7566 insn
= XEXP (st
, 0);
7567 bb
= BLOCK_FOR_INSN (insn
);
7569 /* If we've already seen an available expression in this block,
7570 we can delete this one (It occurs earlier in the block). We'll
7571 copy the SRC expression to an unused register in case there
7572 are any side effects. */
7573 if (TEST_BIT (ae_gen
[bb
->index
], ptr
->index
))
7575 rtx r
= gen_reg_rtx (GET_MODE (ptr
->pattern
));
7577 fprintf (gcse_file
, "Removing redundant store:\n");
7578 replace_store_insn (r
, XEXP (st
, 0), bb
, ptr
);
7581 SET_BIT (ae_gen
[bb
->index
], ptr
->index
);
7584 for (st
= ANTIC_STORE_LIST (ptr
); st
!= NULL
; st
= XEXP (st
, 1))
7586 insn
= XEXP (st
, 0);
7587 bb
= BLOCK_FOR_INSN (insn
);
7588 SET_BIT (st_antloc
[bb
->index
], ptr
->index
);
7592 ae_kill
= sbitmap_vector_alloc (last_basic_block
, num_stores
);
7593 sbitmap_vector_zero (ae_kill
, last_basic_block
);
7595 transp
= sbitmap_vector_alloc (last_basic_block
, num_stores
);
7596 sbitmap_vector_zero (transp
, last_basic_block
);
7597 regs_set_in_block
= xmalloc (sizeof (int) * max_gcse_regno
);
7601 for (regno
= 0; regno
< max_gcse_regno
; regno
++)
7602 regs_set_in_block
[regno
] = TEST_BIT (reg_set_in_block
[bb
->index
], regno
);
7604 for (ptr
= first_ls_expr (); ptr
!= NULL
; ptr
= next_ls_expr (ptr
))
7606 if (store_killed_after (ptr
->pattern
, ptr
->pattern_regs
, BB_HEAD (bb
),
7607 bb
, regs_set_in_block
, NULL
))
7609 /* It should not be necessary to consider the expression
7610 killed if it is both anticipatable and available. */
7611 if (!TEST_BIT (st_antloc
[bb
->index
], ptr
->index
)
7612 || !TEST_BIT (ae_gen
[bb
->index
], ptr
->index
))
7613 SET_BIT (ae_kill
[bb
->index
], ptr
->index
);
7616 SET_BIT (transp
[bb
->index
], ptr
->index
);
7620 free (regs_set_in_block
);
7624 dump_sbitmap_vector (gcse_file
, "st_antloc", "", st_antloc
, last_basic_block
);
7625 dump_sbitmap_vector (gcse_file
, "st_kill", "", ae_kill
, last_basic_block
);
7626 dump_sbitmap_vector (gcse_file
, "Transpt", "", transp
, last_basic_block
);
7627 dump_sbitmap_vector (gcse_file
, "st_avloc", "", ae_gen
, last_basic_block
);
7631 /* Insert an instruction at the beginning of a basic block, and update
7632 the BB_HEAD if needed. */
7635 insert_insn_start_bb (rtx insn
, basic_block bb
)
7637 /* Insert at start of successor block. */
7638 rtx prev
= PREV_INSN (BB_HEAD (bb
));
7639 rtx before
= BB_HEAD (bb
);
7642 if (GET_CODE (before
) != CODE_LABEL
7643 && (GET_CODE (before
) != NOTE
7644 || NOTE_LINE_NUMBER (before
) != NOTE_INSN_BASIC_BLOCK
))
7647 if (prev
== BB_END (bb
))
7649 before
= NEXT_INSN (before
);
7652 insn
= emit_insn_after (insn
, prev
);
7656 fprintf (gcse_file
, "STORE_MOTION insert store at start of BB %d:\n",
7658 print_inline_rtx (gcse_file
, insn
, 6);
7659 fprintf (gcse_file
, "\n");
7663 /* This routine will insert a store on an edge. EXPR is the ldst entry for
7664 the memory reference, and E is the edge to insert it on. Returns nonzero
7665 if an edge insertion was performed. */
7668 insert_store (struct ls_expr
* expr
, edge e
)
7674 /* We did all the deleted before this insert, so if we didn't delete a
7675 store, then we haven't set the reaching reg yet either. */
7676 if (expr
->reaching_reg
== NULL_RTX
)
7679 if (e
->flags
& EDGE_FAKE
)
7682 reg
= expr
->reaching_reg
;
7683 insn
= gen_move_insn (copy_rtx (expr
->pattern
), reg
);
7685 /* If we are inserting this expression on ALL predecessor edges of a BB,
7686 insert it at the start of the BB, and reset the insert bits on the other
7687 edges so we don't try to insert it on the other edges. */
7689 for (tmp
= e
->dest
->pred
; tmp
; tmp
= tmp
->pred_next
)
7690 if (!(tmp
->flags
& EDGE_FAKE
))
7692 int index
= EDGE_INDEX (edge_list
, tmp
->src
, tmp
->dest
);
7693 if (index
== EDGE_INDEX_NO_EDGE
)
7695 if (! TEST_BIT (pre_insert_map
[index
], expr
->index
))
7699 /* If tmp is NULL, we found an insertion on every edge, blank the
7700 insertion vector for these edges, and insert at the start of the BB. */
7701 if (!tmp
&& bb
!= EXIT_BLOCK_PTR
)
7703 for (tmp
= e
->dest
->pred
; tmp
; tmp
= tmp
->pred_next
)
7705 int index
= EDGE_INDEX (edge_list
, tmp
->src
, tmp
->dest
);
7706 RESET_BIT (pre_insert_map
[index
], expr
->index
);
7708 insert_insn_start_bb (insn
, bb
);
7712 /* We can't insert on this edge, so we'll insert at the head of the
7713 successors block. See Morgan, sec 10.5. */
7714 if ((e
->flags
& EDGE_ABNORMAL
) == EDGE_ABNORMAL
)
7716 insert_insn_start_bb (insn
, bb
);
7720 insert_insn_on_edge (insn
, e
);
7724 fprintf (gcse_file
, "STORE_MOTION insert insn on edge (%d, %d):\n",
7725 e
->src
->index
, e
->dest
->index
);
7726 print_inline_rtx (gcse_file
, insn
, 6);
7727 fprintf (gcse_file
, "\n");
7733 /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
7734 memory location in SMEXPR set in basic block BB.
7736 This could be rather expensive. */
7739 remove_reachable_equiv_notes (basic_block bb
, struct ls_expr
*smexpr
)
7741 edge
*stack
= xmalloc (sizeof (edge
) * n_basic_blocks
), act
;
7742 sbitmap visited
= sbitmap_alloc (last_basic_block
);
7744 rtx last
, insn
, note
;
7745 rtx mem
= smexpr
->pattern
;
7747 sbitmap_zero (visited
);
7757 sbitmap_free (visited
);
7760 act
= stack
[--stack_top
];
7764 if (bb
== EXIT_BLOCK_PTR
7765 || TEST_BIT (visited
, bb
->index
)
7766 || TEST_BIT (ae_kill
[bb
->index
], smexpr
->index
))
7768 act
= act
->succ_next
;
7771 SET_BIT (visited
, bb
->index
);
7773 if (TEST_BIT (st_antloc
[bb
->index
], smexpr
->index
))
7775 for (last
= ANTIC_STORE_LIST (smexpr
);
7776 BLOCK_FOR_INSN (XEXP (last
, 0)) != bb
;
7777 last
= XEXP (last
, 1))
7779 last
= XEXP (last
, 0);
7782 last
= NEXT_INSN (BB_END (bb
));
7784 for (insn
= BB_HEAD (bb
); insn
!= last
; insn
= NEXT_INSN (insn
))
7787 note
= find_reg_equal_equiv_note (insn
);
7788 if (!note
|| !expr_equiv_p (XEXP (note
, 0), mem
))
7792 fprintf (gcse_file
, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
7794 remove_note (insn
, note
);
7796 act
= act
->succ_next
;
7800 stack
[stack_top
++] = act
;
7806 /* This routine will replace a store with a SET to a specified register. */
7809 replace_store_insn (rtx reg
, rtx del
, basic_block bb
, struct ls_expr
*smexpr
)
7811 rtx insn
, mem
, note
, set
, ptr
;
7813 mem
= smexpr
->pattern
;
7814 insn
= gen_move_insn (reg
, SET_SRC (single_set (del
)));
7815 insn
= emit_insn_after (insn
, del
);
7820 "STORE_MOTION delete insn in BB %d:\n ", bb
->index
);
7821 print_inline_rtx (gcse_file
, del
, 6);
7822 fprintf (gcse_file
, "\nSTORE MOTION replaced with insn:\n ");
7823 print_inline_rtx (gcse_file
, insn
, 6);
7824 fprintf (gcse_file
, "\n");
7827 for (ptr
= ANTIC_STORE_LIST (smexpr
); ptr
; ptr
= XEXP (ptr
, 1))
7828 if (XEXP (ptr
, 0) == del
)
7830 XEXP (ptr
, 0) = insn
;
7835 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
7836 they are no longer accurate provided that they are reached by this
7837 definition, so drop them. */
7838 for (; insn
!= NEXT_INSN (BB_END (bb
)); insn
= NEXT_INSN (insn
))
7841 set
= single_set (insn
);
7844 if (expr_equiv_p (SET_DEST (set
), mem
))
7846 note
= find_reg_equal_equiv_note (insn
);
7847 if (!note
|| !expr_equiv_p (XEXP (note
, 0), mem
))
7851 fprintf (gcse_file
, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
7853 remove_note (insn
, note
);
7855 remove_reachable_equiv_notes (bb
, smexpr
);
7859 /* Delete a store, but copy the value that would have been stored into
7860 the reaching_reg for later storing. */
7863 delete_store (struct ls_expr
* expr
, basic_block bb
)
7867 if (expr
->reaching_reg
== NULL_RTX
)
7868 expr
->reaching_reg
= gen_reg_rtx (GET_MODE (expr
->pattern
));
7870 reg
= expr
->reaching_reg
;
7872 for (i
= AVAIL_STORE_LIST (expr
); i
; i
= XEXP (i
, 1))
7875 if (BLOCK_FOR_INSN (del
) == bb
)
7877 /* We know there is only one since we deleted redundant
7878 ones during the available computation. */
7879 replace_store_insn (reg
, del
, bb
, expr
);
7885 /* Free memory used by store motion. */
7888 free_store_memory (void)
7893 sbitmap_vector_free (ae_gen
);
7895 sbitmap_vector_free (ae_kill
);
7897 sbitmap_vector_free (transp
);
7899 sbitmap_vector_free (st_antloc
);
7901 sbitmap_vector_free (pre_insert_map
);
7903 sbitmap_vector_free (pre_delete_map
);
7904 if (reg_set_in_block
)
7905 sbitmap_vector_free (reg_set_in_block
);
7907 ae_gen
= ae_kill
= transp
= st_antloc
= NULL
;
7908 pre_insert_map
= pre_delete_map
= reg_set_in_block
= NULL
;
7911 /* Perform store motion. Much like gcse, except we move expressions the
7912 other way by looking at the flowgraph in reverse. */
7919 struct ls_expr
* ptr
;
7920 int update_flow
= 0;
7924 fprintf (gcse_file
, "before store motion\n");
7925 print_rtl (gcse_file
, get_insns ());
7928 init_alias_analysis ();
7930 /* Find all the available and anticipatable stores. */
7931 num_stores
= compute_store_table ();
7932 if (num_stores
== 0)
7934 sbitmap_vector_free (reg_set_in_block
);
7935 end_alias_analysis ();
7939 /* Now compute kill & transp vectors. */
7940 build_store_vectors ();
7941 add_noreturn_fake_exit_edges ();
7942 connect_infinite_loops_to_exit ();
7944 edge_list
= pre_edge_rev_lcm (gcse_file
, num_stores
, transp
, ae_gen
,
7945 st_antloc
, ae_kill
, &pre_insert_map
,
7948 /* Now we want to insert the new stores which are going to be needed. */
7949 for (ptr
= first_ls_expr (); ptr
!= NULL
; ptr
= next_ls_expr (ptr
))
7952 if (TEST_BIT (pre_delete_map
[bb
->index
], ptr
->index
))
7953 delete_store (ptr
, bb
);
7955 for (x
= 0; x
< NUM_EDGES (edge_list
); x
++)
7956 if (TEST_BIT (pre_insert_map
[x
], ptr
->index
))
7957 update_flow
|= insert_store (ptr
, INDEX_EDGE (edge_list
, x
));
7961 commit_edge_insertions ();
7963 free_store_memory ();
7964 free_edge_list (edge_list
);
7965 remove_fake_edges ();
7966 end_alias_analysis ();
7970 /* Entry point for jump bypassing optimization pass. */
7973 bypass_jumps (FILE *file
)
7977 /* We do not construct an accurate cfg in functions which call
7978 setjmp, so just punt to be safe. */
7979 if (current_function_calls_setjmp
)
7982 /* For calling dump_foo fns from gdb. */
7983 debug_stderr
= stderr
;
7986 /* Identify the basic block information for this function, including
7987 successors and predecessors. */
7988 max_gcse_regno
= max_reg_num ();
7991 dump_flow_info (file
);
7993 /* Return if there's nothing to do, or it is too expensive. */
7994 if (n_basic_blocks
<= 1 || is_too_expensive (_ ("jump bypassing disabled")))
7997 gcc_obstack_init (&gcse_obstack
);
8000 /* We need alias. */
8001 init_alias_analysis ();
8003 /* Record where pseudo-registers are set. This data is kept accurate
8004 during each pass. ??? We could also record hard-reg information here
8005 [since it's unchanging], however it is currently done during hash table
8008 It may be tempting to compute MEM set information here too, but MEM sets
8009 will be subject to code motion one day and thus we need to compute
8010 information about memory sets when we build the hash tables. */
8012 alloc_reg_set_mem (max_gcse_regno
);
8013 compute_sets (get_insns ());
8015 max_gcse_regno
= max_reg_num ();
8016 alloc_gcse_mem (get_insns ());
8017 changed
= one_cprop_pass (1, 1, 1);
8022 fprintf (file
, "BYPASS of %s: %d basic blocks, ",
8023 current_function_name (), n_basic_blocks
);
8024 fprintf (file
, "%d bytes\n\n", bytes_used
);
8027 obstack_free (&gcse_obstack
, NULL
);
8028 free_reg_set_mem ();
8030 /* We are finished with alias. */
8031 end_alias_analysis ();
8032 allocate_reg_info (max_reg_num (), FALSE
, FALSE
);
8037 /* Return true if the graph is too expensive to optimize. PASS is the
8038 optimization about to be performed. */
8041 is_too_expensive (const char *pass
)
8043 /* Trying to perform global optimizations on flow graphs which have
8044 a high connectivity will take a long time and is unlikely to be
8045 particularly useful.
8047 In normal circumstances a cfg should have about twice as many
8048 edges as blocks. But we do not want to punish small functions
8049 which have a couple switch statements. Rather than simply
8050 threshold the number of blocks, uses something with a more
8051 graceful degradation. */
8052 if (n_edges
> 20000 + n_basic_blocks
* 4)
8054 if (warn_disabled_optimization
)
8055 warning ("%s: %d basic blocks and %d edges/basic block",
8056 pass
, n_basic_blocks
, n_edges
/ n_basic_blocks
);
8061 /* If allocating memory for the cprop bitmap would take up too much
8062 storage it's better just to disable the optimization. */
8064 * SBITMAP_SET_SIZE (max_reg_num ())
8065 * sizeof (SBITMAP_ELT_TYPE
)) > MAX_GCSE_MEMORY
)
8067 if (warn_disabled_optimization
)
8068 warning ("%s: %d basic blocks and %d registers",
8069 pass
, n_basic_blocks
, max_reg_num ());
8077 #include "gt-gcse.h"