1 /* RTL dead store elimination.
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 Contributed by Richard Sandiford <rsandifor@codesourcery.com>
6 and Kenneth Zadeck <zadeck@naturalbridge.com>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
28 #include "coretypes.h"
35 #include "hard-reg-set.h"
40 #include "tree-pass.h"
41 #include "alloc-pool.h"
43 #include "insn-config.h"
50 #include "tree-flow.h" /* for may_be_aliased */
52 /* This file contains three techniques for performing Dead Store
55 * The first technique performs dse locally on any base address. It
56 is based on the cselib which is a local value numbering technique.
57 This technique is local to a basic block but deals with a fairly
60 * The second technique performs dse globally but is restricted to
61 base addresses that are either constant or are relative to the
64 * The third technique, (which is only done after register allocation)
65 processes the spill spill slots. This differs from the second
66 technique because it takes advantage of the fact that spilling is
67 completely free from the effects of aliasing.
69 Logically, dse is a backwards dataflow problem. A store can be
70 deleted if it if cannot be reached in the backward direction by any
71 use of the value being stored. However, the local technique uses a
72 forwards scan of the basic block because cselib requires that the
73 block be processed in that order.
75 The pass is logically broken into 7 steps:
79 1) The local algorithm, as well as scanning the insns for the two
82 2) Analysis to see if the global algs are necessary. In the case
83 of stores base on a constant address, there must be at least two
84 stores to that address, to make it possible to delete some of the
85 stores. In the case of stores off of the frame or spill related
86 stores, only one store to an address is necessary because those
87 stores die at the end of the function.
89 3) Set up the global dataflow equations based on processing the
90 info parsed in the first step.
92 4) Solve the dataflow equations.
94 5) Delete the insns that the global analysis has indicated are
97 6) Delete insns that store the same value as preceding store
98 where the earlier store couldn't be eliminated.
102 This step uses cselib and canon_rtx to build the largest expression
103 possible for each address. This pass is a forwards pass through
104 each basic block. From the point of view of the global technique,
105 the first pass could examine a block in either direction. The
106 forwards ordering is to accommodate cselib.
108 We a simplifying assumption: addresses fall into four broad
111 1) base has rtx_varies_p == false, offset is constant.
112 2) base has rtx_varies_p == false, offset variable.
113 3) base has rtx_varies_p == true, offset constant.
114 4) base has rtx_varies_p == true, offset variable.
116 The local passes are able to process all 4 kinds of addresses. The
117 global pass only handles (1).
119 The global problem is formulated as follows:
121 A store, S1, to address A, where A is not relative to the stack
122 frame, can be eliminated if all paths from S1 to the end of the
123 of the function contain another store to A before a read to A.
125 If the address A is relative to the stack frame, a store S2 to A
126 can be eliminated if there are no paths from S1 that reach the
127 end of the function that read A before another store to A. In
128 this case S2 can be deleted if there are paths to from S2 to the
129 end of the function that have no reads or writes to A. This
130 second case allows stores to the stack frame to be deleted that
131 would otherwise die when the function returns. This cannot be
132 done if stores_off_frame_dead_at_return is not true. See the doc
133 for that variable for when this variable is false.
135 The global problem is formulated as a backwards set union
136 dataflow problem where the stores are the gens and reads are the
137 kills. Set union problems are rare and require some special
138 handling given our representation of bitmaps. A straightforward
139 implementation of requires a lot of bitmaps filled with 1s.
140 These are expensive and cumbersome in our bitmap formulation so
141 care has been taken to avoid large vectors filled with 1s. See
142 the comments in bb_info and in the dataflow confluence functions
145 There are two places for further enhancements to this algorithm:
147 1) The original dse which was embedded in a pass called flow also
148 did local address forwarding. For example in
153 flow would replace the right hand side of the second insn with a
154 reference to r100. Most of the information is available to add this
155 to this pass. It has not done it because it is a lot of work in
156 the case that either r100 is assigned to between the first and
157 second insn and/or the second insn is a load of part of the value
158 stored by the first insn.
160 insn 5 in gcc.c-torture/compile/990203-1.c simple case.
161 insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
162 insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
163 insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
165 2) The cleaning up of spill code is quite profitable. It currently
166 depends on reading tea leaves and chicken entrails left by reload.
167 This pass depends on reload creating a singleton alias set for each
168 spill slot and telling the next dse pass which of these alias sets
169 are the singletons. Rather than analyze the addresses of the
170 spills, dse's spill processing just does analysis of the loads and
171 stores that use those alias sets. There are three cases where this
174 a) Reload sometimes creates the slot for one mode of access, and
175 then inserts loads and/or stores for a smaller mode. In this
176 case, the current code just punts on the slot. The proper thing
177 to do is to back out and use one bit vector position for each
178 byte of the entity associated with the slot. This depends on
179 KNOWING that reload always generates the accesses for each of the
180 bytes in some canonical (read that easy to understand several
181 passes after reload happens) way.
183 b) Reload sometimes decides that spill slot it allocated was not
184 large enough for the mode and goes back and allocates more slots
185 with the same mode and alias set. The backout in this case is a
186 little more graceful than (a). In this case the slot is unmarked
187 as being a spill slot and if final address comes out to be based
188 off the frame pointer, the global algorithm handles this slot.
190 c) For any pass that may prespill, there is currently no
191 mechanism to tell the dse pass that the slot being used has the
192 special properties that reload uses. It may be that all that is
193 required is to have those passes make the same calls that reload
194 does, assuming that the alias sets can be manipulated in the same
197 /* There are limits to the size of constant offsets we model for the
198 global problem. There are certainly test cases, that exceed this
199 limit, however, it is unlikely that there are important programs
200 that really have constant offsets this size. */
201 #define MAX_OFFSET (64 * 1024)
203 /* Obstack for the DSE dataflow bitmaps. We don't want to put these
204 on the default obstack because these bitmaps can grow quite large
205 (~2GB for the small (!) test case of PR54146) and we'll hold on to
206 all that memory until the end of the compiler run.
207 As a bonus, delete_tree_live_info can destroy all the bitmaps by just
208 releasing the whole obstack. */
209 static bitmap_obstack dse_bitmap_obstack
;
211 /* Obstack for other data. As for above: Kinda nice to be able to
212 throw it all away at the end in one big sweep. */
213 static struct obstack dse_obstack
;
215 /* Scratch bitmap for cselib's cselib_expand_value_rtx. */
216 static bitmap scratch
= NULL
;
220 /* This structure holds information about a candidate store. */
224 /* False means this is a clobber. */
227 /* False if a single HOST_WIDE_INT bitmap is used for positions_needed. */
230 /* The id of the mem group of the base address. If rtx_varies_p is
231 true, this is -1. Otherwise, it is the index into the group
235 /* This is the cselib value. */
236 cselib_val
*cse_base
;
238 /* This canonized mem. */
241 /* Canonized MEM address for use by canon_true_dependence. */
244 /* If this is non-zero, it is the alias set of a spill location. */
245 alias_set_type alias_set
;
247 /* The offset of the first and byte before the last byte associated
248 with the operation. */
249 HOST_WIDE_INT begin
, end
;
253 /* A bitmask as wide as the number of bytes in the word that
254 contains a 1 if the byte may be needed. The store is unused if
255 all of the bits are 0. This is used if IS_LARGE is false. */
256 unsigned HOST_WIDE_INT small_bitmask
;
260 /* A bitmap with one bit per byte. Cleared bit means the position
261 is needed. Used if IS_LARGE is false. */
264 /* Number of set bits (i.e. unneeded bytes) in BITMAP. If it is
265 equal to END - BEGIN, the whole store is unused. */
270 /* The next store info for this insn. */
271 struct store_info
*next
;
273 /* The right hand side of the store. This is used if there is a
274 subsequent reload of the mems address somewhere later in the
278 /* If rhs is or holds a constant, this contains that constant,
282 /* Set if this store stores the same constant value as REDUNDANT_REASON
283 insn stored. These aren't eliminated early, because doing that
284 might prevent the earlier larger store to be eliminated. */
285 struct insn_info
*redundant_reason
;
288 /* Return a bitmask with the first N low bits set. */
290 static unsigned HOST_WIDE_INT
291 lowpart_bitmask (int n
)
293 unsigned HOST_WIDE_INT mask
= ~(unsigned HOST_WIDE_INT
) 0;
294 return mask
>> (HOST_BITS_PER_WIDE_INT
- n
);
297 typedef struct store_info
*store_info_t
;
298 static alloc_pool cse_store_info_pool
;
299 static alloc_pool rtx_store_info_pool
;
301 /* This structure holds information about a load. These are only
302 built for rtx bases. */
305 /* The id of the mem group of the base address. */
308 /* If this is non-zero, it is the alias set of a spill location. */
309 alias_set_type alias_set
;
311 /* The offset of the first and byte after the last byte associated
312 with the operation. If begin == end == 0, the read did not have
313 a constant offset. */
316 /* The mem being read. */
319 /* The next read_info for this insn. */
320 struct read_info
*next
;
322 typedef struct read_info
*read_info_t
;
323 static alloc_pool read_info_pool
;
326 /* One of these records is created for each insn. */
330 /* Set true if the insn contains a store but the insn itself cannot
331 be deleted. This is set if the insn is a parallel and there is
332 more than one non dead output or if the insn is in some way
336 /* This field is only used by the global algorithm. It is set true
337 if the insn contains any read of mem except for a (1). This is
338 also set if the insn is a call or has a clobber mem. If the insn
339 contains a wild read, the use_rec will be null. */
342 /* This is true only for CALL instructions which could potentially read
343 any non-frame memory location. This field is used by the global
345 bool non_frame_wild_read
;
347 /* This field is only used for the processing of const functions.
348 These functions cannot read memory, but they can read the stack
349 because that is where they may get their parms. We need to be
350 this conservative because, like the store motion pass, we don't
351 consider CALL_INSN_FUNCTION_USAGE when processing call insns.
352 Moreover, we need to distinguish two cases:
353 1. Before reload (register elimination), the stores related to
354 outgoing arguments are stack pointer based and thus deemed
355 of non-constant base in this pass. This requires special
356 handling but also means that the frame pointer based stores
357 need not be killed upon encountering a const function call.
358 2. After reload, the stores related to outgoing arguments can be
359 either stack pointer or hard frame pointer based. This means
360 that we have no other choice than also killing all the frame
361 pointer based stores upon encountering a const function call.
362 This field is set after reload for const function calls. Having
363 this set is less severe than a wild read, it just means that all
364 the frame related stores are killed rather than all the stores. */
367 /* This field is only used for the processing of const functions.
368 It is set if the insn may contain a stack pointer based store. */
369 bool stack_pointer_based
;
371 /* This is true if any of the sets within the store contains a
372 cselib base. Such stores can only be deleted by the local
374 bool contains_cselib_groups
;
379 /* The list of mem sets or mem clobbers that are contained in this
380 insn. If the insn is deletable, it contains only one mem set.
381 But it could also contain clobbers. Insns that contain more than
382 one mem set are not deletable, but each of those mems are here in
383 order to provide info to delete other insns. */
384 store_info_t store_rec
;
386 /* The linked list of mem uses in this insn. Only the reads from
387 rtx bases are listed here. The reads to cselib bases are
388 completely processed during the first scan and so are never
390 read_info_t read_rec
;
392 /* The live fixed registers. We assume only fixed registers can
393 cause trouble by being clobbered from an expanded pattern;
394 storing only the live fixed registers (rather than all registers)
395 means less memory needs to be allocated / copied for the individual
397 regset fixed_regs_live
;
399 /* The prev insn in the basic block. */
400 struct insn_info
* prev_insn
;
402 /* The linked list of insns that are in consideration for removal in
403 the forwards pass through the basic block. This pointer may be
404 trash as it is not cleared when a wild read occurs. The only
405 time it is guaranteed to be correct is when the traversal starts
406 at active_local_stores. */
407 struct insn_info
* next_local_store
;
410 typedef struct insn_info
*insn_info_t
;
411 static alloc_pool insn_info_pool
;
413 /* The linked list of stores that are under consideration in this
415 static insn_info_t active_local_stores
;
416 static int active_local_stores_len
;
421 /* Pointer to the insn info for the last insn in the block. These
422 are linked so this is how all of the insns are reached. During
423 scanning this is the current insn being scanned. */
424 insn_info_t last_insn
;
426 /* The info for the global dataflow problem. */
429 /* This is set if the transfer function should and in the wild_read
430 bitmap before applying the kill and gen sets. That vector knocks
431 out most of the bits in the bitmap and thus speeds up the
433 bool apply_wild_read
;
435 /* The following 4 bitvectors hold information about which positions
436 of which stores are live or dead. They are indexed by
439 /* The set of store positions that exist in this block before a wild read. */
442 /* The set of load positions that exist in this block above the
443 same position of a store. */
446 /* The set of stores that reach the top of the block without being
449 Do not represent the in if it is all ones. Note that this is
450 what the bitvector should logically be initialized to for a set
451 intersection problem. However, like the kill set, this is too
452 expensive. So initially, the in set will only be created for the
453 exit block and any block that contains a wild read. */
456 /* The set of stores that reach the bottom of the block from it's
459 Do not represent the in if it is all ones. Note that this is
460 what the bitvector should logically be initialized to for a set
461 intersection problem. However, like the kill and in set, this is
462 too expensive. So what is done is that the confluence operator
463 just initializes the vector from one of the out sets of the
464 successors of the block. */
467 /* The following bitvector is indexed by the reg number. It
468 contains the set of regs that are live at the current instruction
469 being processed. While it contains info for all of the
470 registers, only the hard registers are actually examined. It is used
471 to assure that shift and/or add sequences that are inserted do not
472 accidentally clobber live hard regs. */
476 typedef struct bb_info
*bb_info_t
;
477 static alloc_pool bb_info_pool
;
479 /* Table to hold all bb_infos. */
480 static bb_info_t
*bb_table
;
482 /* There is a group_info for each rtx base that is used to reference
483 memory. There are also not many of the rtx bases because they are
484 very limited in scope. */
488 /* The actual base of the address. */
491 /* The sequential id of the base. This allows us to have a
492 canonical ordering of these that is not based on addresses. */
495 /* True if there are any positions that are to be processed
497 bool process_globally
;
499 /* True if the base of this group is either the frame_pointer or
500 hard_frame_pointer. */
503 /* A mem wrapped around the base pointer for the group in order to do
504 read dependency. It must be given BLKmode in order to encompass all
505 the possible offsets from the base. */
508 /* Canonized version of base_mem's address. */
511 /* These two sets of two bitmaps are used to keep track of how many
512 stores are actually referencing that position from this base. We
513 only do this for rtx bases as this will be used to assign
514 positions in the bitmaps for the global problem. Bit N is set in
515 store1 on the first store for offset N. Bit N is set in store2
516 for the second store to offset N. This is all we need since we
517 only care about offsets that have two or more stores for them.
519 The "_n" suffix is for offsets less than 0 and the "_p" suffix is
520 for 0 and greater offsets.
522 There is one special case here, for stores into the stack frame,
523 we will or store1 into store2 before deciding which stores look
524 at globally. This is because stores to the stack frame that have
525 no other reads before the end of the function can also be
527 bitmap store1_n
, store1_p
, store2_n
, store2_p
;
529 /* These bitmaps keep track of offsets in this group escape this function.
530 An offset escapes if it corresponds to a named variable whose
531 addressable flag is set. */
532 bitmap escaped_n
, escaped_p
;
534 /* The positions in this bitmap have the same assignments as the in,
535 out, gen and kill bitmaps. This bitmap is all zeros except for
536 the positions that are occupied by stores for this group. */
539 /* The offset_map is used to map the offsets from this base into
540 positions in the global bitmaps. It is only created after all of
541 the all of stores have been scanned and we know which ones we
543 int *offset_map_n
, *offset_map_p
;
544 int offset_map_size_n
, offset_map_size_p
;
546 typedef struct group_info
*group_info_t
;
547 typedef const struct group_info
*const_group_info_t
;
548 static alloc_pool rtx_group_info_pool
;
550 /* Tables of group_info structures, hashed by base value. */
551 static htab_t rtx_group_table
;
553 /* Index into the rtx_group_vec. */
554 static int rtx_group_next_id
;
556 DEF_VEC_P(group_info_t
);
557 DEF_VEC_ALLOC_P(group_info_t
,heap
);
559 static VEC(group_info_t
,heap
) *rtx_group_vec
;
562 /* This structure holds the set of changes that are being deferred
563 when removing read operation. See replace_read. */
564 struct deferred_change
567 /* The mem that is being replaced. */
570 /* The reg it is being replaced with. */
573 struct deferred_change
*next
;
576 typedef struct deferred_change
*deferred_change_t
;
577 static alloc_pool deferred_change_pool
;
579 static deferred_change_t deferred_change_list
= NULL
;
581 /* This are used to hold the alias sets of spill variables. Since
582 these are never aliased and there may be a lot of them, it makes
583 sense to treat them specially. This bitvector is only allocated in
584 calls from dse_record_singleton_alias_set which currently is only
585 made during reload1. So when dse is called before reload this
586 mechanism does nothing. */
588 static bitmap clear_alias_sets
= NULL
;
590 /* The set of clear_alias_sets that have been disqualified because
591 there are loads or stores using a different mode than the alias set
592 was registered with. */
593 static bitmap disqualified_clear_alias_sets
= NULL
;
595 /* The group that holds all of the clear_alias_sets. */
596 static group_info_t clear_alias_group
;
598 /* The modes of the clear_alias_sets. */
599 static htab_t clear_alias_mode_table
;
601 /* Hash table element to look up the mode for an alias set. */
602 struct clear_alias_mode_holder
604 alias_set_type alias_set
;
605 enum machine_mode mode
;
608 static alloc_pool clear_alias_mode_pool
;
610 /* This is true except if cfun->stdarg -- i.e. we cannot do
611 this for vararg functions because they play games with the frame. */
612 static bool stores_off_frame_dead_at_return
;
614 /* Counter for stats. */
615 static int globally_deleted
;
616 static int locally_deleted
;
617 static int spill_deleted
;
619 static bitmap all_blocks
;
621 /* Locations that are killed by calls in the global phase. */
622 static bitmap kill_on_calls
;
624 /* The number of bits used in the global bitmaps. */
625 static unsigned int current_position
;
628 static bool gate_dse1 (void);
629 static bool gate_dse2 (void);
632 /*----------------------------------------------------------------------------
636 ----------------------------------------------------------------------------*/
639 /* Find the entry associated with ALIAS_SET. */
641 static struct clear_alias_mode_holder
*
642 clear_alias_set_lookup (alias_set_type alias_set
)
644 struct clear_alias_mode_holder tmp_holder
;
647 tmp_holder
.alias_set
= alias_set
;
648 slot
= htab_find_slot (clear_alias_mode_table
, &tmp_holder
, NO_INSERT
);
651 return (struct clear_alias_mode_holder
*) *slot
;
655 /* Hashtable callbacks for maintaining the "bases" field of
656 store_group_info, given that the addresses are function invariants. */
659 invariant_group_base_eq (const void *p1
, const void *p2
)
661 const_group_info_t gi1
= (const_group_info_t
) p1
;
662 const_group_info_t gi2
= (const_group_info_t
) p2
;
663 return rtx_equal_p (gi1
->rtx_base
, gi2
->rtx_base
);
668 invariant_group_base_hash (const void *p
)
670 const_group_info_t gi
= (const_group_info_t
) p
;
672 return hash_rtx (gi
->rtx_base
, Pmode
, &do_not_record
, NULL
, false);
676 /* Get the GROUP for BASE. Add a new group if it is not there. */
679 get_group_info (rtx base
)
681 struct group_info tmp_gi
;
687 /* Find the store_base_info structure for BASE, creating a new one
689 tmp_gi
.rtx_base
= base
;
690 slot
= htab_find_slot (rtx_group_table
, &tmp_gi
, INSERT
);
691 gi
= (group_info_t
) *slot
;
695 if (!clear_alias_group
)
697 clear_alias_group
= gi
=
698 (group_info_t
) pool_alloc (rtx_group_info_pool
);
699 memset (gi
, 0, sizeof (struct group_info
));
700 gi
->id
= rtx_group_next_id
++;
701 gi
->store1_n
= BITMAP_ALLOC (&dse_bitmap_obstack
);
702 gi
->store1_p
= BITMAP_ALLOC (&dse_bitmap_obstack
);
703 gi
->store2_n
= BITMAP_ALLOC (&dse_bitmap_obstack
);
704 gi
->store2_p
= BITMAP_ALLOC (&dse_bitmap_obstack
);
705 gi
->escaped_p
= BITMAP_ALLOC (&dse_bitmap_obstack
);
706 gi
->escaped_n
= BITMAP_ALLOC (&dse_bitmap_obstack
);
707 gi
->group_kill
= BITMAP_ALLOC (&dse_bitmap_obstack
);
708 gi
->process_globally
= false;
709 gi
->offset_map_size_n
= 0;
710 gi
->offset_map_size_p
= 0;
711 gi
->offset_map_n
= NULL
;
712 gi
->offset_map_p
= NULL
;
713 VEC_safe_push (group_info_t
, heap
, rtx_group_vec
, gi
);
715 return clear_alias_group
;
720 *slot
= gi
= (group_info_t
) pool_alloc (rtx_group_info_pool
);
722 gi
->id
= rtx_group_next_id
++;
723 gi
->base_mem
= gen_rtx_MEM (BLKmode
, base
);
724 gi
->canon_base_addr
= canon_rtx (base
);
725 gi
->store1_n
= BITMAP_ALLOC (&dse_bitmap_obstack
);
726 gi
->store1_p
= BITMAP_ALLOC (&dse_bitmap_obstack
);
727 gi
->store2_n
= BITMAP_ALLOC (&dse_bitmap_obstack
);
728 gi
->store2_p
= BITMAP_ALLOC (&dse_bitmap_obstack
);
729 gi
->escaped_p
= BITMAP_ALLOC (&dse_bitmap_obstack
);
730 gi
->escaped_n
= BITMAP_ALLOC (&dse_bitmap_obstack
);
731 gi
->group_kill
= BITMAP_ALLOC (&dse_bitmap_obstack
);
732 gi
->process_globally
= false;
734 (base
== frame_pointer_rtx
) || (base
== hard_frame_pointer_rtx
);
735 gi
->offset_map_size_n
= 0;
736 gi
->offset_map_size_p
= 0;
737 gi
->offset_map_n
= NULL
;
738 gi
->offset_map_p
= NULL
;
739 VEC_safe_push (group_info_t
, heap
, rtx_group_vec
, gi
);
746 /* Initialization of data structures. */
752 globally_deleted
= 0;
755 bitmap_obstack_initialize (&dse_bitmap_obstack
);
756 gcc_obstack_init (&dse_obstack
);
758 scratch
= BITMAP_ALLOC (®_obstack
);
759 kill_on_calls
= BITMAP_ALLOC (&dse_bitmap_obstack
);
762 = create_alloc_pool ("rtx_store_info_pool",
763 sizeof (struct store_info
), 100);
765 = create_alloc_pool ("read_info_pool",
766 sizeof (struct read_info
), 100);
768 = create_alloc_pool ("insn_info_pool",
769 sizeof (struct insn_info
), 100);
771 = create_alloc_pool ("bb_info_pool",
772 sizeof (struct bb_info
), 100);
774 = create_alloc_pool ("rtx_group_info_pool",
775 sizeof (struct group_info
), 100);
777 = create_alloc_pool ("deferred_change_pool",
778 sizeof (struct deferred_change
), 10);
780 rtx_group_table
= htab_create (11, invariant_group_base_hash
,
781 invariant_group_base_eq
, NULL
);
783 bb_table
= XNEWVEC (bb_info_t
, last_basic_block
);
784 rtx_group_next_id
= 0;
786 stores_off_frame_dead_at_return
= !cfun
->stdarg
;
788 init_alias_analysis ();
790 if (clear_alias_sets
)
791 clear_alias_group
= get_group_info (NULL
);
793 clear_alias_group
= NULL
;
798 /*----------------------------------------------------------------------------
801 Scan all of the insns. Any random ordering of the blocks is fine.
802 Each block is scanned in forward order to accommodate cselib which
803 is used to remove stores with non-constant bases.
804 ----------------------------------------------------------------------------*/
806 /* Delete all of the store_info recs from INSN_INFO. */
809 free_store_info (insn_info_t insn_info
)
811 store_info_t store_info
= insn_info
->store_rec
;
814 store_info_t next
= store_info
->next
;
815 if (store_info
->is_large
)
816 BITMAP_FREE (store_info
->positions_needed
.large
.bmap
);
817 if (store_info
->cse_base
)
818 pool_free (cse_store_info_pool
, store_info
);
820 pool_free (rtx_store_info_pool
, store_info
);
824 insn_info
->cannot_delete
= true;
825 insn_info
->contains_cselib_groups
= false;
826 insn_info
->store_rec
= NULL
;
832 regset fixed_regs_live
;
834 } note_add_store_info
;
836 /* Callback for emit_inc_dec_insn_before via note_stores.
837 Check if a register is clobbered which is live afterwards. */
840 note_add_store (rtx loc
, const_rtx expr ATTRIBUTE_UNUSED
, void *data
)
843 note_add_store_info
*info
= (note_add_store_info
*) data
;
849 /* If this register is referenced by the current or an earlier insn,
850 that's OK. E.g. this applies to the register that is being incremented
851 with this addition. */
852 for (insn
= info
->first
;
853 insn
!= NEXT_INSN (info
->current
);
854 insn
= NEXT_INSN (insn
))
855 if (reg_referenced_p (loc
, PATTERN (insn
)))
858 /* If we come here, we have a clobber of a register that's only OK
859 if that register is not live. If we don't have liveness information
860 available, fail now. */
861 if (!info
->fixed_regs_live
)
863 info
->failure
= true;
866 /* Now check if this is a live fixed register. */
868 n
= hard_regno_nregs
[r
][GET_MODE (loc
)];
870 if (REGNO_REG_SET_P (info
->fixed_regs_live
, r
+n
))
871 info
->failure
= true;
874 /* Callback for for_each_inc_dec that emits an INSN that sets DEST to
875 SRC + SRCOFF before insn ARG. */
878 emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED
,
879 rtx op ATTRIBUTE_UNUSED
,
880 rtx dest
, rtx src
, rtx srcoff
, void *arg
)
882 insn_info_t insn_info
= (insn_info_t
) arg
;
883 rtx insn
= insn_info
->insn
, new_insn
, cur
;
884 note_add_store_info info
;
886 /* We can reuse all operands without copying, because we are about
887 to delete the insn that contained it. */
891 emit_insn (gen_add3_insn (dest
, src
, srcoff
));
892 new_insn
= get_insns ();
896 new_insn
= gen_move_insn (dest
, src
);
897 info
.first
= new_insn
;
898 info
.fixed_regs_live
= insn_info
->fixed_regs_live
;
899 info
.failure
= false;
900 for (cur
= new_insn
; cur
; cur
= NEXT_INSN (cur
))
903 note_stores (PATTERN (cur
), note_add_store
, &info
);
906 /* If a failure was flagged above, return 1 so that for_each_inc_dec will
907 return it immediately, communicating the failure to its caller. */
911 emit_insn_before (new_insn
, insn
);
916 /* Before we delete INSN_INFO->INSN, make sure that the auto inc/dec, if it
917 is there, is split into a separate insn.
918 Return true on success (or if there was nothing to do), false on failure. */
921 check_for_inc_dec_1 (insn_info_t insn_info
)
923 rtx insn
= insn_info
->insn
;
924 rtx note
= find_reg_note (insn
, REG_INC
, NULL_RTX
);
926 return for_each_inc_dec (&insn
, emit_inc_dec_insn_before
, insn_info
) == 0;
931 /* Entry point for postreload. If you work on reload_cse, or you need this
932 anywhere else, consider if you can provide register liveness information
933 and add a parameter to this function so that it can be passed down in
934 insn_info.fixed_regs_live. */
936 check_for_inc_dec (rtx insn
)
938 struct insn_info insn_info
;
941 insn_info
.insn
= insn
;
942 insn_info
.fixed_regs_live
= NULL
;
943 note
= find_reg_note (insn
, REG_INC
, NULL_RTX
);
945 return for_each_inc_dec (&insn
, emit_inc_dec_insn_before
, &insn_info
) == 0;
949 /* Delete the insn and free all of the fields inside INSN_INFO. */
952 delete_dead_store_insn (insn_info_t insn_info
)
954 read_info_t read_info
;
959 if (!check_for_inc_dec_1 (insn_info
))
963 fprintf (dump_file
, "Locally deleting insn %d ",
964 INSN_UID (insn_info
->insn
));
965 if (insn_info
->store_rec
->alias_set
)
966 fprintf (dump_file
, "alias set %d\n",
967 (int) insn_info
->store_rec
->alias_set
);
969 fprintf (dump_file
, "\n");
972 free_store_info (insn_info
);
973 read_info
= insn_info
->read_rec
;
977 read_info_t next
= read_info
->next
;
978 pool_free (read_info_pool
, read_info
);
981 insn_info
->read_rec
= NULL
;
983 delete_insn (insn_info
->insn
);
985 insn_info
->insn
= NULL
;
987 insn_info
->wild_read
= false;
990 /* Check if EXPR can possibly escape the current function scope. */
992 can_escape (tree expr
)
997 base
= get_base_address (expr
);
999 && !may_be_aliased (base
))
1004 /* Set the store* bitmaps offset_map_size* fields in GROUP based on
1005 OFFSET and WIDTH. */
1008 set_usage_bits (group_info_t group
, HOST_WIDE_INT offset
, HOST_WIDE_INT width
,
1012 bool expr_escapes
= can_escape (expr
);
1013 if (offset
> -MAX_OFFSET
&& offset
+ width
< MAX_OFFSET
)
1014 for (i
=offset
; i
<offset
+width
; i
++)
1022 store1
= group
->store1_n
;
1023 store2
= group
->store2_n
;
1024 escaped
= group
->escaped_n
;
1029 store1
= group
->store1_p
;
1030 store2
= group
->store2_p
;
1031 escaped
= group
->escaped_p
;
1035 if (!bitmap_set_bit (store1
, ai
))
1036 bitmap_set_bit (store2
, ai
);
1041 if (group
->offset_map_size_n
< ai
)
1042 group
->offset_map_size_n
= ai
;
1046 if (group
->offset_map_size_p
< ai
)
1047 group
->offset_map_size_p
= ai
;
1051 bitmap_set_bit (escaped
, ai
);
1056 reset_active_stores (void)
1058 active_local_stores
= NULL
;
1059 active_local_stores_len
= 0;
1062 /* Free all READ_REC of the LAST_INSN of BB_INFO. */
1065 free_read_records (bb_info_t bb_info
)
1067 insn_info_t insn_info
= bb_info
->last_insn
;
1068 read_info_t
*ptr
= &insn_info
->read_rec
;
1071 read_info_t next
= (*ptr
)->next
;
1072 if ((*ptr
)->alias_set
== 0)
1074 pool_free (read_info_pool
, *ptr
);
1078 ptr
= &(*ptr
)->next
;
1082 /* Set the BB_INFO so that the last insn is marked as a wild read. */
1085 add_wild_read (bb_info_t bb_info
)
1087 insn_info_t insn_info
= bb_info
->last_insn
;
1088 insn_info
->wild_read
= true;
1089 free_read_records (bb_info
);
1090 reset_active_stores ();
1093 /* Set the BB_INFO so that the last insn is marked as a wild read of
1094 non-frame locations. */
1097 add_non_frame_wild_read (bb_info_t bb_info
)
1099 insn_info_t insn_info
= bb_info
->last_insn
;
1100 insn_info
->non_frame_wild_read
= true;
1101 free_read_records (bb_info
);
1102 reset_active_stores ();
1105 /* Return true if X is a constant or one of the registers that behave
1106 as a constant over the life of a function. This is equivalent to
1107 !rtx_varies_p for memory addresses. */
1110 const_or_frame_p (rtx x
)
1112 switch (GET_CODE (x
))
1123 /* Note that we have to test for the actual rtx used for the frame
1124 and arg pointers and not just the register number in case we have
1125 eliminated the frame and/or arg pointer and are using it
1127 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
1128 /* The arg pointer varies if it is not a fixed register. */
1129 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
])
1130 || x
== pic_offset_table_rtx
)
1139 /* Take all reasonable action to put the address of MEM into the form
1140 that we can do analysis on.
1142 The gold standard is to get the address into the form: address +
1143 OFFSET where address is something that rtx_varies_p considers a
1144 constant. When we can get the address in this form, we can do
1145 global analysis on it. Note that for constant bases, address is
1146 not actually returned, only the group_id. The address can be
1149 If that fails, we try cselib to get a value we can at least use
1150 locally. If that fails we return false.
1152 The GROUP_ID is set to -1 for cselib bases and the index of the
1153 group for non_varying bases.
1155 FOR_READ is true if this is a mem read and false if not. */
1158 canon_address (rtx mem
,
1159 alias_set_type
*alias_set_out
,
1161 HOST_WIDE_INT
*offset
,
1164 enum machine_mode address_mode
= get_address_mode (mem
);
1165 rtx mem_address
= XEXP (mem
, 0);
1166 rtx expanded_address
, address
;
1169 /* Make sure that cselib is has initialized all of the operands of
1170 the address before asking it to do the subst. */
1172 if (clear_alias_sets
)
1174 /* If this is a spill, do not do any further processing. */
1175 alias_set_type alias_set
= MEM_ALIAS_SET (mem
);
1177 fprintf (dump_file
, "found alias set %d\n", (int) alias_set
);
1178 if (bitmap_bit_p (clear_alias_sets
, alias_set
))
1180 struct clear_alias_mode_holder
*entry
1181 = clear_alias_set_lookup (alias_set
);
1183 /* If the modes do not match, we cannot process this set. */
1184 if (entry
->mode
!= GET_MODE (mem
))
1188 "disqualifying alias set %d, (%s) != (%s)\n",
1189 (int) alias_set
, GET_MODE_NAME (entry
->mode
),
1190 GET_MODE_NAME (GET_MODE (mem
)));
1192 bitmap_set_bit (disqualified_clear_alias_sets
, alias_set
);
1196 *alias_set_out
= alias_set
;
1197 *group_id
= clear_alias_group
->id
;
1204 cselib_lookup (mem_address
, address_mode
, 1, GET_MODE (mem
));
1208 fprintf (dump_file
, " mem: ");
1209 print_inline_rtx (dump_file
, mem_address
, 0);
1210 fprintf (dump_file
, "\n");
1213 /* First see if just canon_rtx (mem_address) is const or frame,
1214 if not, try cselib_expand_value_rtx and call canon_rtx on that. */
1216 for (expanded
= 0; expanded
< 2; expanded
++)
1220 /* Use cselib to replace all of the reg references with the full
1221 expression. This will take care of the case where we have
1223 r_x = base + offset;
1228 val = *(base + offset); */
1230 expanded_address
= cselib_expand_value_rtx (mem_address
,
1233 /* If this fails, just go with the address from first
1235 if (!expanded_address
)
1239 expanded_address
= mem_address
;
1241 /* Split the address into canonical BASE + OFFSET terms. */
1242 address
= canon_rtx (expanded_address
);
1250 fprintf (dump_file
, "\n after cselib_expand address: ");
1251 print_inline_rtx (dump_file
, expanded_address
, 0);
1252 fprintf (dump_file
, "\n");
1255 fprintf (dump_file
, "\n after canon_rtx address: ");
1256 print_inline_rtx (dump_file
, address
, 0);
1257 fprintf (dump_file
, "\n");
1260 if (GET_CODE (address
) == CONST
)
1261 address
= XEXP (address
, 0);
1263 if (GET_CODE (address
) == PLUS
1264 && CONST_INT_P (XEXP (address
, 1)))
1266 *offset
= INTVAL (XEXP (address
, 1));
1267 address
= XEXP (address
, 0);
1270 if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (mem
))
1271 && const_or_frame_p (address
))
1273 group_info_t group
= get_group_info (address
);
1276 fprintf (dump_file
, " gid=%d offset=%d \n",
1277 group
->id
, (int)*offset
);
1279 *group_id
= group
->id
;
1284 *base
= cselib_lookup (address
, address_mode
, true, GET_MODE (mem
));
1290 fprintf (dump_file
, " no cselib val - should be a wild read.\n");
1294 fprintf (dump_file
, " varying cselib base=%u:%u offset = %d\n",
1295 (*base
)->uid
, (*base
)->hash
, (int)*offset
);
1300 /* Clear the rhs field from the active_local_stores array. */
1303 clear_rhs_from_active_local_stores (void)
1305 insn_info_t ptr
= active_local_stores
;
1309 store_info_t store_info
= ptr
->store_rec
;
1310 /* Skip the clobbers. */
1311 while (!store_info
->is_set
)
1312 store_info
= store_info
->next
;
1314 store_info
->rhs
= NULL
;
1315 store_info
->const_rhs
= NULL
;
1317 ptr
= ptr
->next_local_store
;
1322 /* Mark byte POS bytes from the beginning of store S_INFO as unneeded. */
1325 set_position_unneeded (store_info_t s_info
, int pos
)
1327 if (__builtin_expect (s_info
->is_large
, false))
1329 if (bitmap_set_bit (s_info
->positions_needed
.large
.bmap
, pos
))
1330 s_info
->positions_needed
.large
.count
++;
1333 s_info
->positions_needed
.small_bitmask
1334 &= ~(((unsigned HOST_WIDE_INT
) 1) << pos
);
1337 /* Mark the whole store S_INFO as unneeded. */
1340 set_all_positions_unneeded (store_info_t s_info
)
1342 if (__builtin_expect (s_info
->is_large
, false))
1344 int pos
, end
= s_info
->end
- s_info
->begin
;
1345 for (pos
= 0; pos
< end
; pos
++)
1346 bitmap_set_bit (s_info
->positions_needed
.large
.bmap
, pos
);
1347 s_info
->positions_needed
.large
.count
= end
;
1350 s_info
->positions_needed
.small_bitmask
= (unsigned HOST_WIDE_INT
) 0;
1353 /* Return TRUE if any bytes from S_INFO store are needed. */
1356 any_positions_needed_p (store_info_t s_info
)
1358 if (__builtin_expect (s_info
->is_large
, false))
1359 return (s_info
->positions_needed
.large
.count
1360 < s_info
->end
- s_info
->begin
);
1362 return (s_info
->positions_needed
.small_bitmask
1363 != (unsigned HOST_WIDE_INT
) 0);
1366 /* Return TRUE if all bytes START through START+WIDTH-1 from S_INFO
1367 store are needed. */
1370 all_positions_needed_p (store_info_t s_info
, int start
, int width
)
1372 if (__builtin_expect (s_info
->is_large
, false))
1374 int end
= start
+ width
;
1376 if (bitmap_bit_p (s_info
->positions_needed
.large
.bmap
, start
++))
1382 unsigned HOST_WIDE_INT mask
= lowpart_bitmask (width
) << start
;
1383 return (s_info
->positions_needed
.small_bitmask
& mask
) == mask
;
1388 static rtx
get_stored_val (store_info_t
, enum machine_mode
, HOST_WIDE_INT
,
1389 HOST_WIDE_INT
, basic_block
, bool);
1392 /* BODY is an instruction pattern that belongs to INSN. Return 1 if
1393 there is a candidate store, after adding it to the appropriate
1394 local store group if so. */
1397 record_store (rtx body
, bb_info_t bb_info
)
1399 rtx mem
, rhs
, const_rhs
, mem_addr
;
1400 HOST_WIDE_INT offset
= 0;
1401 HOST_WIDE_INT width
= 0;
1402 alias_set_type spill_alias_set
;
1403 insn_info_t insn_info
= bb_info
->last_insn
;
1404 store_info_t store_info
= NULL
;
1406 cselib_val
*base
= NULL
;
1407 insn_info_t ptr
, last
, redundant_reason
;
1408 bool store_is_unused
;
1410 if (GET_CODE (body
) != SET
&& GET_CODE (body
) != CLOBBER
)
1413 mem
= SET_DEST (body
);
1415 /* If this is not used, then this cannot be used to keep the insn
1416 from being deleted. On the other hand, it does provide something
1417 that can be used to prove that another store is dead. */
1419 = (find_reg_note (insn_info
->insn
, REG_UNUSED
, mem
) != NULL
);
1421 /* Check whether that value is a suitable memory location. */
1424 /* If the set or clobber is unused, then it does not effect our
1425 ability to get rid of the entire insn. */
1426 if (!store_is_unused
)
1427 insn_info
->cannot_delete
= true;
1431 /* At this point we know mem is a mem. */
1432 if (GET_MODE (mem
) == BLKmode
)
1434 if (GET_CODE (XEXP (mem
, 0)) == SCRATCH
)
1437 fprintf (dump_file
, " adding wild read for (clobber (mem:BLK (scratch))\n");
1438 add_wild_read (bb_info
);
1439 insn_info
->cannot_delete
= true;
1442 /* Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0))
1443 as memset (addr, 0, 36); */
1444 else if (!MEM_SIZE_KNOWN_P (mem
)
1445 || MEM_SIZE (mem
) <= 0
1446 || MEM_SIZE (mem
) > MAX_OFFSET
1447 || GET_CODE (body
) != SET
1448 || !CONST_INT_P (SET_SRC (body
)))
1450 if (!store_is_unused
)
1452 /* If the set or clobber is unused, then it does not effect our
1453 ability to get rid of the entire insn. */
1454 insn_info
->cannot_delete
= true;
1455 clear_rhs_from_active_local_stores ();
1461 /* We can still process a volatile mem, we just cannot delete it. */
1462 if (MEM_VOLATILE_P (mem
))
1463 insn_info
->cannot_delete
= true;
1465 if (!canon_address (mem
, &spill_alias_set
, &group_id
, &offset
, &base
))
1467 clear_rhs_from_active_local_stores ();
1471 if (GET_MODE (mem
) == BLKmode
)
1472 width
= MEM_SIZE (mem
);
1475 width
= GET_MODE_SIZE (GET_MODE (mem
));
1476 gcc_assert ((unsigned) width
<= HOST_BITS_PER_WIDE_INT
);
1479 if (spill_alias_set
)
1481 bitmap store1
= clear_alias_group
->store1_p
;
1482 bitmap store2
= clear_alias_group
->store2_p
;
1484 gcc_assert (GET_MODE (mem
) != BLKmode
);
1486 if (!bitmap_set_bit (store1
, spill_alias_set
))
1487 bitmap_set_bit (store2
, spill_alias_set
);
1489 if (clear_alias_group
->offset_map_size_p
< spill_alias_set
)
1490 clear_alias_group
->offset_map_size_p
= spill_alias_set
;
1492 store_info
= (store_info_t
) pool_alloc (rtx_store_info_pool
);
1495 fprintf (dump_file
, " processing spill store %d(%s)\n",
1496 (int) spill_alias_set
, GET_MODE_NAME (GET_MODE (mem
)));
1498 else if (group_id
>= 0)
1500 /* In the restrictive case where the base is a constant or the
1501 frame pointer we can do global analysis. */
1504 = VEC_index (group_info_t
, rtx_group_vec
, group_id
);
1505 tree expr
= MEM_EXPR (mem
);
1507 store_info
= (store_info_t
) pool_alloc (rtx_store_info_pool
);
1508 set_usage_bits (group
, offset
, width
, expr
);
1511 fprintf (dump_file
, " processing const base store gid=%d[%d..%d)\n",
1512 group_id
, (int)offset
, (int)(offset
+width
));
1516 if (may_be_sp_based_p (XEXP (mem
, 0)))
1517 insn_info
->stack_pointer_based
= true;
1518 insn_info
->contains_cselib_groups
= true;
1520 store_info
= (store_info_t
) pool_alloc (cse_store_info_pool
);
1524 fprintf (dump_file
, " processing cselib store [%d..%d)\n",
1525 (int)offset
, (int)(offset
+width
));
1528 const_rhs
= rhs
= NULL_RTX
;
1529 if (GET_CODE (body
) == SET
1530 /* No place to keep the value after ra. */
1531 && !reload_completed
1532 && (REG_P (SET_SRC (body
))
1533 || GET_CODE (SET_SRC (body
)) == SUBREG
1534 || CONSTANT_P (SET_SRC (body
)))
1535 && !MEM_VOLATILE_P (mem
)
1536 /* Sometimes the store and reload is used for truncation and
1538 && !(FLOAT_MODE_P (GET_MODE (mem
)) && (flag_float_store
)))
1540 rhs
= SET_SRC (body
);
1541 if (CONSTANT_P (rhs
))
1543 else if (body
== PATTERN (insn_info
->insn
))
1545 rtx tem
= find_reg_note (insn_info
->insn
, REG_EQUAL
, NULL_RTX
);
1546 if (tem
&& CONSTANT_P (XEXP (tem
, 0)))
1547 const_rhs
= XEXP (tem
, 0);
1549 if (const_rhs
== NULL_RTX
&& REG_P (rhs
))
1551 rtx tem
= cselib_expand_value_rtx (rhs
, scratch
, 5);
1553 if (tem
&& CONSTANT_P (tem
))
1558 /* Check to see if this stores causes some other stores to be
1560 ptr
= active_local_stores
;
1562 redundant_reason
= NULL
;
1563 mem
= canon_rtx (mem
);
1564 /* For alias_set != 0 canon_true_dependence should be never called. */
1565 if (spill_alias_set
)
1566 mem_addr
= NULL_RTX
;
1570 mem_addr
= base
->val_rtx
;
1574 = VEC_index (group_info_t
, rtx_group_vec
, group_id
);
1575 mem_addr
= group
->canon_base_addr
;
1578 mem_addr
= plus_constant (get_address_mode (mem
), mem_addr
, offset
);
1583 insn_info_t next
= ptr
->next_local_store
;
1584 store_info_t s_info
= ptr
->store_rec
;
1587 /* Skip the clobbers. We delete the active insn if this insn
1588 shadows the set. To have been put on the active list, it
1589 has exactly on set. */
1590 while (!s_info
->is_set
)
1591 s_info
= s_info
->next
;
1593 if (s_info
->alias_set
!= spill_alias_set
)
1595 else if (s_info
->alias_set
)
1597 struct clear_alias_mode_holder
*entry
1598 = clear_alias_set_lookup (s_info
->alias_set
);
1599 /* Generally, spills cannot be processed if and of the
1600 references to the slot have a different mode. But if
1601 we are in the same block and mode is exactly the same
1602 between this store and one before in the same block,
1603 we can still delete it. */
1604 if ((GET_MODE (mem
) == GET_MODE (s_info
->mem
))
1605 && (GET_MODE (mem
) == entry
->mode
))
1608 set_all_positions_unneeded (s_info
);
1611 fprintf (dump_file
, " trying spill store in insn=%d alias_set=%d\n",
1612 INSN_UID (ptr
->insn
), (int) s_info
->alias_set
);
1614 else if ((s_info
->group_id
== group_id
)
1615 && (s_info
->cse_base
== base
))
1619 fprintf (dump_file
, " trying store in insn=%d gid=%d[%d..%d)\n",
1620 INSN_UID (ptr
->insn
), s_info
->group_id
,
1621 (int)s_info
->begin
, (int)s_info
->end
);
1623 /* Even if PTR won't be eliminated as unneeded, if both
1624 PTR and this insn store the same constant value, we might
1625 eliminate this insn instead. */
1626 if (s_info
->const_rhs
1628 && offset
>= s_info
->begin
1629 && offset
+ width
<= s_info
->end
1630 && all_positions_needed_p (s_info
, offset
- s_info
->begin
,
1633 if (GET_MODE (mem
) == BLKmode
)
1635 if (GET_MODE (s_info
->mem
) == BLKmode
1636 && s_info
->const_rhs
== const_rhs
)
1637 redundant_reason
= ptr
;
1639 else if (s_info
->const_rhs
== const0_rtx
1640 && const_rhs
== const0_rtx
)
1641 redundant_reason
= ptr
;
1646 val
= get_stored_val (s_info
, GET_MODE (mem
),
1647 offset
, offset
+ width
,
1648 BLOCK_FOR_INSN (insn_info
->insn
),
1650 if (get_insns () != NULL
)
1653 if (val
&& rtx_equal_p (val
, const_rhs
))
1654 redundant_reason
= ptr
;
1658 for (i
= MAX (offset
, s_info
->begin
);
1659 i
< offset
+ width
&& i
< s_info
->end
;
1661 set_position_unneeded (s_info
, i
- s_info
->begin
);
1663 else if (s_info
->rhs
)
1664 /* Need to see if it is possible for this store to overwrite
1665 the value of store_info. If it is, set the rhs to NULL to
1666 keep it from being used to remove a load. */
1668 if (canon_true_dependence (s_info
->mem
,
1669 GET_MODE (s_info
->mem
),
1674 s_info
->const_rhs
= NULL
;
1678 /* An insn can be deleted if every position of every one of
1679 its s_infos is zero. */
1680 if (any_positions_needed_p (s_info
))
1685 insn_info_t insn_to_delete
= ptr
;
1687 active_local_stores_len
--;
1689 last
->next_local_store
= ptr
->next_local_store
;
1691 active_local_stores
= ptr
->next_local_store
;
1693 if (!insn_to_delete
->cannot_delete
)
1694 delete_dead_store_insn (insn_to_delete
);
1702 /* Finish filling in the store_info. */
1703 store_info
->next
= insn_info
->store_rec
;
1704 insn_info
->store_rec
= store_info
;
1705 store_info
->mem
= mem
;
1706 store_info
->alias_set
= spill_alias_set
;
1707 store_info
->mem_addr
= mem_addr
;
1708 store_info
->cse_base
= base
;
1709 if (width
> HOST_BITS_PER_WIDE_INT
)
1711 store_info
->is_large
= true;
1712 store_info
->positions_needed
.large
.count
= 0;
1713 store_info
->positions_needed
.large
.bmap
= BITMAP_ALLOC (&dse_bitmap_obstack
);
1717 store_info
->is_large
= false;
1718 store_info
->positions_needed
.small_bitmask
= lowpart_bitmask (width
);
1720 store_info
->group_id
= group_id
;
1721 store_info
->begin
= offset
;
1722 store_info
->end
= offset
+ width
;
1723 store_info
->is_set
= GET_CODE (body
) == SET
;
1724 store_info
->rhs
= rhs
;
1725 store_info
->const_rhs
= const_rhs
;
1726 store_info
->redundant_reason
= redundant_reason
;
1728 /* If this is a clobber, we return 0. We will only be able to
1729 delete this insn if there is only one store USED store, but we
1730 can use the clobber to delete other stores earlier. */
1731 return store_info
->is_set
? 1 : 0;
1736 dump_insn_info (const char * start
, insn_info_t insn_info
)
1738 fprintf (dump_file
, "%s insn=%d %s\n", start
,
1739 INSN_UID (insn_info
->insn
),
1740 insn_info
->store_rec
? "has store" : "naked");
1744 /* If the modes are different and the value's source and target do not
1745 line up, we need to extract the value from lower part of the rhs of
1746 the store, shift it, and then put it into a form that can be shoved
1747 into the read_insn. This function generates a right SHIFT of a
1748 value that is at least ACCESS_SIZE bytes wide of READ_MODE. The
1749 shift sequence is returned or NULL if we failed to find a
1753 find_shift_sequence (int access_size
,
1754 store_info_t store_info
,
1755 enum machine_mode read_mode
,
1756 int shift
, bool speed
, bool require_cst
)
1758 enum machine_mode store_mode
= GET_MODE (store_info
->mem
);
1759 enum machine_mode new_mode
;
1760 rtx read_reg
= NULL
;
1762 /* Some machines like the x86 have shift insns for each size of
1763 operand. Other machines like the ppc or the ia-64 may only have
1764 shift insns that shift values within 32 or 64 bit registers.
1765 This loop tries to find the smallest shift insn that will right
1766 justify the value we want to read but is available in one insn on
1769 for (new_mode
= smallest_mode_for_size (access_size
* BITS_PER_UNIT
,
1771 GET_MODE_BITSIZE (new_mode
) <= BITS_PER_WORD
;
1772 new_mode
= GET_MODE_WIDER_MODE (new_mode
))
1774 rtx target
, new_reg
, shift_seq
, insn
, new_lhs
;
1777 /* If a constant was stored into memory, try to simplify it here,
1778 otherwise the cost of the shift might preclude this optimization
1779 e.g. at -Os, even when no actual shift will be needed. */
1780 if (store_info
->const_rhs
)
1782 unsigned int byte
= subreg_lowpart_offset (new_mode
, store_mode
);
1783 rtx ret
= simplify_subreg (new_mode
, store_info
->const_rhs
,
1785 if (ret
&& CONSTANT_P (ret
))
1787 ret
= simplify_const_binary_operation (LSHIFTRT
, new_mode
,
1788 ret
, GEN_INT (shift
));
1789 if (ret
&& CONSTANT_P (ret
))
1791 byte
= subreg_lowpart_offset (read_mode
, new_mode
);
1792 ret
= simplify_subreg (read_mode
, ret
, new_mode
, byte
);
1793 if (ret
&& CONSTANT_P (ret
)
1794 && set_src_cost (ret
, speed
) <= COSTS_N_INSNS (1))
1803 /* Try a wider mode if truncating the store mode to NEW_MODE
1804 requires a real instruction. */
1805 if (GET_MODE_BITSIZE (new_mode
) < GET_MODE_BITSIZE (store_mode
)
1806 && !TRULY_NOOP_TRUNCATION_MODES_P (new_mode
, store_mode
))
1809 /* Also try a wider mode if the necessary punning is either not
1810 desirable or not possible. */
1811 if (!CONSTANT_P (store_info
->rhs
)
1812 && !MODES_TIEABLE_P (new_mode
, store_mode
))
1815 new_reg
= gen_reg_rtx (new_mode
);
1819 /* In theory we could also check for an ashr. Ian Taylor knows
1820 of one dsp where the cost of these two was not the same. But
1821 this really is a rare case anyway. */
1822 target
= expand_binop (new_mode
, lshr_optab
, new_reg
,
1823 GEN_INT (shift
), new_reg
, 1, OPTAB_DIRECT
);
1825 shift_seq
= get_insns ();
1828 if (target
!= new_reg
|| shift_seq
== NULL
)
1832 for (insn
= shift_seq
; insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
1834 cost
+= insn_rtx_cost (PATTERN (insn
), speed
);
1836 /* The computation up to here is essentially independent
1837 of the arguments and could be precomputed. It may
1838 not be worth doing so. We could precompute if
1839 worthwhile or at least cache the results. The result
1840 technically depends on both SHIFT and ACCESS_SIZE,
1841 but in practice the answer will depend only on ACCESS_SIZE. */
1843 if (cost
> COSTS_N_INSNS (1))
1846 new_lhs
= extract_low_bits (new_mode
, store_mode
,
1847 copy_rtx (store_info
->rhs
));
1848 if (new_lhs
== NULL_RTX
)
1851 /* We found an acceptable shift. Generate a move to
1852 take the value from the store and put it into the
1853 shift pseudo, then shift it, then generate another
1854 move to put in into the target of the read. */
1855 emit_move_insn (new_reg
, new_lhs
);
1856 emit_insn (shift_seq
);
1857 read_reg
= extract_low_bits (read_mode
, new_mode
, new_reg
);
1865 /* Call back for note_stores to find the hard regs set or clobbered by
1866 insn. Data is a bitmap of the hardregs set so far. */
1869 look_for_hardregs (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
, void *data
)
1871 bitmap regs_set
= (bitmap
) data
;
1874 && HARD_REGISTER_P (x
))
1876 unsigned int regno
= REGNO (x
);
1877 bitmap_set_range (regs_set
, regno
,
1878 hard_regno_nregs
[regno
][GET_MODE (x
)]);
1882 /* Helper function for replace_read and record_store.
1883 Attempt to return a value stored in STORE_INFO, from READ_BEGIN
1884 to one before READ_END bytes read in READ_MODE. Return NULL
1885 if not successful. If REQUIRE_CST is true, return always constant. */
1888 get_stored_val (store_info_t store_info
, enum machine_mode read_mode
,
1889 HOST_WIDE_INT read_begin
, HOST_WIDE_INT read_end
,
1890 basic_block bb
, bool require_cst
)
1892 enum machine_mode store_mode
= GET_MODE (store_info
->mem
);
1894 int access_size
; /* In bytes. */
1897 /* To get here the read is within the boundaries of the write so
1898 shift will never be negative. Start out with the shift being in
1900 if (store_mode
== BLKmode
)
1902 else if (BYTES_BIG_ENDIAN
)
1903 shift
= store_info
->end
- read_end
;
1905 shift
= read_begin
- store_info
->begin
;
1907 access_size
= shift
+ GET_MODE_SIZE (read_mode
);
1909 /* From now on it is bits. */
1910 shift
*= BITS_PER_UNIT
;
1913 read_reg
= find_shift_sequence (access_size
, store_info
, read_mode
, shift
,
1914 optimize_bb_for_speed_p (bb
),
1916 else if (store_mode
== BLKmode
)
1918 /* The store is a memset (addr, const_val, const_size). */
1919 gcc_assert (CONST_INT_P (store_info
->rhs
));
1920 store_mode
= int_mode_for_mode (read_mode
);
1921 if (store_mode
== BLKmode
)
1922 read_reg
= NULL_RTX
;
1923 else if (store_info
->rhs
== const0_rtx
)
1924 read_reg
= extract_low_bits (read_mode
, store_mode
, const0_rtx
);
1925 else if (GET_MODE_BITSIZE (store_mode
) > HOST_BITS_PER_WIDE_INT
1926 || BITS_PER_UNIT
>= HOST_BITS_PER_WIDE_INT
)
1927 read_reg
= NULL_RTX
;
1930 unsigned HOST_WIDE_INT c
1931 = INTVAL (store_info
->rhs
)
1932 & (((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
) - 1);
1933 int shift
= BITS_PER_UNIT
;
1934 while (shift
< HOST_BITS_PER_WIDE_INT
)
1939 read_reg
= gen_int_mode (c
, store_mode
);
1940 read_reg
= extract_low_bits (read_mode
, store_mode
, read_reg
);
1943 else if (store_info
->const_rhs
1945 || GET_MODE_CLASS (read_mode
) != GET_MODE_CLASS (store_mode
)))
1946 read_reg
= extract_low_bits (read_mode
, store_mode
,
1947 copy_rtx (store_info
->const_rhs
));
1949 read_reg
= extract_low_bits (read_mode
, store_mode
,
1950 copy_rtx (store_info
->rhs
));
1951 if (require_cst
&& read_reg
&& !CONSTANT_P (read_reg
))
1952 read_reg
= NULL_RTX
;
1956 /* Take a sequence of:
1979 Depending on the alignment and the mode of the store and
1983 The STORE_INFO and STORE_INSN are for the store and READ_INFO
1984 and READ_INSN are for the read. Return true if the replacement
1988 replace_read (store_info_t store_info
, insn_info_t store_insn
,
1989 read_info_t read_info
, insn_info_t read_insn
, rtx
*loc
,
1992 enum machine_mode store_mode
= GET_MODE (store_info
->mem
);
1993 enum machine_mode read_mode
= GET_MODE (read_info
->mem
);
1994 rtx insns
, this_insn
, read_reg
;
2000 /* Create a sequence of instructions to set up the read register.
2001 This sequence goes immediately before the store and its result
2002 is read by the load.
2004 We need to keep this in perspective. We are replacing a read
2005 with a sequence of insns, but the read will almost certainly be
2006 in cache, so it is not going to be an expensive one. Thus, we
2007 are not willing to do a multi insn shift or worse a subroutine
2008 call to get rid of the read. */
2010 fprintf (dump_file
, "trying to replace %smode load in insn %d"
2011 " from %smode store in insn %d\n",
2012 GET_MODE_NAME (read_mode
), INSN_UID (read_insn
->insn
),
2013 GET_MODE_NAME (store_mode
), INSN_UID (store_insn
->insn
));
2015 bb
= BLOCK_FOR_INSN (read_insn
->insn
);
2016 read_reg
= get_stored_val (store_info
,
2017 read_mode
, read_info
->begin
, read_info
->end
,
2019 if (read_reg
== NULL_RTX
)
2023 fprintf (dump_file
, " -- could not extract bits of stored value\n");
2026 /* Force the value into a new register so that it won't be clobbered
2027 between the store and the load. */
2028 read_reg
= copy_to_mode_reg (read_mode
, read_reg
);
2029 insns
= get_insns ();
2032 if (insns
!= NULL_RTX
)
2034 /* Now we have to scan the set of new instructions to see if the
2035 sequence contains and sets of hardregs that happened to be
2036 live at this point. For instance, this can happen if one of
2037 the insns sets the CC and the CC happened to be live at that
2038 point. This does occasionally happen, see PR 37922. */
2039 bitmap regs_set
= BITMAP_ALLOC (®_obstack
);
2041 for (this_insn
= insns
; this_insn
!= NULL_RTX
; this_insn
= NEXT_INSN (this_insn
))
2042 note_stores (PATTERN (this_insn
), look_for_hardregs
, regs_set
);
2044 bitmap_and_into (regs_set
, regs_live
);
2045 if (!bitmap_empty_p (regs_set
))
2050 "abandoning replacement because sequence clobbers live hardregs:");
2051 df_print_regset (dump_file
, regs_set
);
2054 BITMAP_FREE (regs_set
);
2057 BITMAP_FREE (regs_set
);
2060 if (validate_change (read_insn
->insn
, loc
, read_reg
, 0))
2062 deferred_change_t deferred_change
=
2063 (deferred_change_t
) pool_alloc (deferred_change_pool
);
2065 /* Insert this right before the store insn where it will be safe
2066 from later insns that might change it before the read. */
2067 emit_insn_before (insns
, store_insn
->insn
);
2069 /* And now for the kludge part: cselib croaks if you just
2070 return at this point. There are two reasons for this:
2072 1) Cselib has an idea of how many pseudos there are and
2073 that does not include the new ones we just added.
2075 2) Cselib does not know about the move insn we added
2076 above the store_info, and there is no way to tell it
2077 about it, because it has "moved on".
2079 Problem (1) is fixable with a certain amount of engineering.
2080 Problem (2) is requires starting the bb from scratch. This
2083 So we are just going to have to lie. The move/extraction
2084 insns are not really an issue, cselib did not see them. But
2085 the use of the new pseudo read_insn is a real problem because
2086 cselib has not scanned this insn. The way that we solve this
2087 problem is that we are just going to put the mem back for now
2088 and when we are finished with the block, we undo this. We
2089 keep a table of mems to get rid of. At the end of the basic
2090 block we can put them back. */
2092 *loc
= read_info
->mem
;
2093 deferred_change
->next
= deferred_change_list
;
2094 deferred_change_list
= deferred_change
;
2095 deferred_change
->loc
= loc
;
2096 deferred_change
->reg
= read_reg
;
2098 /* Get rid of the read_info, from the point of view of the
2099 rest of dse, play like this read never happened. */
2100 read_insn
->read_rec
= read_info
->next
;
2101 pool_free (read_info_pool
, read_info
);
2104 fprintf (dump_file
, " -- replaced the loaded MEM with ");
2105 print_simple_rtl (dump_file
, read_reg
);
2106 fprintf (dump_file
, "\n");
2114 fprintf (dump_file
, " -- replacing the loaded MEM with ");
2115 print_simple_rtl (dump_file
, read_reg
);
2116 fprintf (dump_file
, " led to an invalid instruction\n");
2122 /* A for_each_rtx callback in which DATA is the bb_info. Check to see
2123 if LOC is a mem and if it is look at the address and kill any
2124 appropriate stores that may be active. */
2127 check_mem_read_rtx (rtx
*loc
, void *data
)
2129 rtx mem
= *loc
, mem_addr
;
2131 insn_info_t insn_info
;
2132 HOST_WIDE_INT offset
= 0;
2133 HOST_WIDE_INT width
= 0;
2134 alias_set_type spill_alias_set
= 0;
2135 cselib_val
*base
= NULL
;
2137 read_info_t read_info
;
2139 if (!mem
|| !MEM_P (mem
))
2142 bb_info
= (bb_info_t
) data
;
2143 insn_info
= bb_info
->last_insn
;
2145 if ((MEM_ALIAS_SET (mem
) == ALIAS_SET_MEMORY_BARRIER
)
2146 || (MEM_VOLATILE_P (mem
)))
2149 fprintf (dump_file
, " adding wild read, volatile or barrier.\n");
2150 add_wild_read (bb_info
);
2151 insn_info
->cannot_delete
= true;
2155 /* If it is reading readonly mem, then there can be no conflict with
2157 if (MEM_READONLY_P (mem
))
2160 if (!canon_address (mem
, &spill_alias_set
, &group_id
, &offset
, &base
))
2163 fprintf (dump_file
, " adding wild read, canon_address failure.\n");
2164 add_wild_read (bb_info
);
2168 if (GET_MODE (mem
) == BLKmode
)
2171 width
= GET_MODE_SIZE (GET_MODE (mem
));
2173 read_info
= (read_info_t
) pool_alloc (read_info_pool
);
2174 read_info
->group_id
= group_id
;
2175 read_info
->mem
= mem
;
2176 read_info
->alias_set
= spill_alias_set
;
2177 read_info
->begin
= offset
;
2178 read_info
->end
= offset
+ width
;
2179 read_info
->next
= insn_info
->read_rec
;
2180 insn_info
->read_rec
= read_info
;
2181 /* For alias_set != 0 canon_true_dependence should be never called. */
2182 if (spill_alias_set
)
2183 mem_addr
= NULL_RTX
;
2187 mem_addr
= base
->val_rtx
;
2191 = VEC_index (group_info_t
, rtx_group_vec
, group_id
);
2192 mem_addr
= group
->canon_base_addr
;
2195 mem_addr
= plus_constant (get_address_mode (mem
), mem_addr
, offset
);
2198 /* We ignore the clobbers in store_info. The is mildly aggressive,
2199 but there really should not be a clobber followed by a read. */
2201 if (spill_alias_set
)
2203 insn_info_t i_ptr
= active_local_stores
;
2204 insn_info_t last
= NULL
;
2207 fprintf (dump_file
, " processing spill load %d\n",
2208 (int) spill_alias_set
);
2212 store_info_t store_info
= i_ptr
->store_rec
;
2214 /* Skip the clobbers. */
2215 while (!store_info
->is_set
)
2216 store_info
= store_info
->next
;
2218 if (store_info
->alias_set
== spill_alias_set
)
2221 dump_insn_info ("removing from active", i_ptr
);
2223 active_local_stores_len
--;
2225 last
->next_local_store
= i_ptr
->next_local_store
;
2227 active_local_stores
= i_ptr
->next_local_store
;
2231 i_ptr
= i_ptr
->next_local_store
;
2234 else if (group_id
>= 0)
2236 /* This is the restricted case where the base is a constant or
2237 the frame pointer and offset is a constant. */
2238 insn_info_t i_ptr
= active_local_stores
;
2239 insn_info_t last
= NULL
;
2244 fprintf (dump_file
, " processing const load gid=%d[BLK]\n",
2247 fprintf (dump_file
, " processing const load gid=%d[%d..%d)\n",
2248 group_id
, (int)offset
, (int)(offset
+width
));
2253 bool remove
= false;
2254 store_info_t store_info
= i_ptr
->store_rec
;
2256 /* Skip the clobbers. */
2257 while (!store_info
->is_set
)
2258 store_info
= store_info
->next
;
2260 /* There are three cases here. */
2261 if (store_info
->group_id
< 0)
2262 /* We have a cselib store followed by a read from a
2265 = canon_true_dependence (store_info
->mem
,
2266 GET_MODE (store_info
->mem
),
2267 store_info
->mem_addr
,
2270 else if (group_id
== store_info
->group_id
)
2272 /* This is a block mode load. We may get lucky and
2273 canon_true_dependence may save the day. */
2276 = canon_true_dependence (store_info
->mem
,
2277 GET_MODE (store_info
->mem
),
2278 store_info
->mem_addr
,
2281 /* If this read is just reading back something that we just
2282 stored, rewrite the read. */
2286 && offset
>= store_info
->begin
2287 && offset
+ width
<= store_info
->end
2288 && all_positions_needed_p (store_info
,
2289 offset
- store_info
->begin
,
2291 && replace_read (store_info
, i_ptr
, read_info
,
2292 insn_info
, loc
, bb_info
->regs_live
))
2295 /* The bases are the same, just see if the offsets
2297 if ((offset
< store_info
->end
)
2298 && (offset
+ width
> store_info
->begin
))
2304 The else case that is missing here is that the
2305 bases are constant but different. There is nothing
2306 to do here because there is no overlap. */
2311 dump_insn_info ("removing from active", i_ptr
);
2313 active_local_stores_len
--;
2315 last
->next_local_store
= i_ptr
->next_local_store
;
2317 active_local_stores
= i_ptr
->next_local_store
;
2321 i_ptr
= i_ptr
->next_local_store
;
2326 insn_info_t i_ptr
= active_local_stores
;
2327 insn_info_t last
= NULL
;
2330 fprintf (dump_file
, " processing cselib load mem:");
2331 print_inline_rtx (dump_file
, mem
, 0);
2332 fprintf (dump_file
, "\n");
2337 bool remove
= false;
2338 store_info_t store_info
= i_ptr
->store_rec
;
2341 fprintf (dump_file
, " processing cselib load against insn %d\n",
2342 INSN_UID (i_ptr
->insn
));
2344 /* Skip the clobbers. */
2345 while (!store_info
->is_set
)
2346 store_info
= store_info
->next
;
2348 /* If this read is just reading back something that we just
2349 stored, rewrite the read. */
2351 && store_info
->group_id
== -1
2352 && store_info
->cse_base
== base
2354 && offset
>= store_info
->begin
2355 && offset
+ width
<= store_info
->end
2356 && all_positions_needed_p (store_info
,
2357 offset
- store_info
->begin
, width
)
2358 && replace_read (store_info
, i_ptr
, read_info
, insn_info
, loc
,
2359 bb_info
->regs_live
))
2362 if (!store_info
->alias_set
)
2363 remove
= canon_true_dependence (store_info
->mem
,
2364 GET_MODE (store_info
->mem
),
2365 store_info
->mem_addr
,
2371 dump_insn_info ("removing from active", i_ptr
);
2373 active_local_stores_len
--;
2375 last
->next_local_store
= i_ptr
->next_local_store
;
2377 active_local_stores
= i_ptr
->next_local_store
;
2381 i_ptr
= i_ptr
->next_local_store
;
2387 /* A for_each_rtx callback in which DATA points the INSN_INFO for
2388 as check_mem_read_rtx. Nullify the pointer if i_m_r_m_r returns
2389 true for any part of *LOC. */
2392 check_mem_read_use (rtx
*loc
, void *data
)
2394 for_each_rtx (loc
, check_mem_read_rtx
, data
);
2398 /* Get arguments passed to CALL_INSN. Return TRUE if successful.
2399 So far it only handles arguments passed in registers. */
2402 get_call_args (rtx call_insn
, tree fn
, rtx
*args
, int nargs
)
2404 CUMULATIVE_ARGS args_so_far_v
;
2405 cumulative_args_t args_so_far
;
2409 INIT_CUMULATIVE_ARGS (args_so_far_v
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
2410 args_so_far
= pack_cumulative_args (&args_so_far_v
);
2412 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
2414 arg
!= void_list_node
&& idx
< nargs
;
2415 arg
= TREE_CHAIN (arg
), idx
++)
2417 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
2419 reg
= targetm
.calls
.function_arg (args_so_far
, mode
, NULL_TREE
, true);
2420 if (!reg
|| !REG_P (reg
) || GET_MODE (reg
) != mode
2421 || GET_MODE_CLASS (mode
) != MODE_INT
)
2424 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
);
2426 link
= XEXP (link
, 1))
2427 if (GET_CODE (XEXP (link
, 0)) == USE
)
2429 args
[idx
] = XEXP (XEXP (link
, 0), 0);
2430 if (REG_P (args
[idx
])
2431 && REGNO (args
[idx
]) == REGNO (reg
)
2432 && (GET_MODE (args
[idx
]) == mode
2433 || (GET_MODE_CLASS (GET_MODE (args
[idx
])) == MODE_INT
2434 && (GET_MODE_SIZE (GET_MODE (args
[idx
]))
2436 && (GET_MODE_SIZE (GET_MODE (args
[idx
]))
2437 > GET_MODE_SIZE (mode
)))))
2443 tmp
= cselib_expand_value_rtx (args
[idx
], scratch
, 5);
2444 if (GET_MODE (args
[idx
]) != mode
)
2446 if (!tmp
|| !CONST_INT_P (tmp
))
2448 tmp
= gen_int_mode (INTVAL (tmp
), mode
);
2453 targetm
.calls
.function_arg_advance (args_so_far
, mode
, NULL_TREE
, true);
2455 if (arg
!= void_list_node
|| idx
!= nargs
)
2460 /* Return a bitmap of the fixed registers contained in IN. */
2463 copy_fixed_regs (const_bitmap in
)
2467 ret
= ALLOC_REG_SET (NULL
);
2468 bitmap_and (ret
, in
, fixed_reg_set_regset
);
2472 /* Apply record_store to all candidate stores in INSN. Mark INSN
2473 if some part of it is not a candidate store and assigns to a
2474 non-register target. */
2477 scan_insn (bb_info_t bb_info
, rtx insn
)
2480 insn_info_t insn_info
= (insn_info_t
) pool_alloc (insn_info_pool
);
2482 memset (insn_info
, 0, sizeof (struct insn_info
));
2485 fprintf (dump_file
, "\n**scanning insn=%d\n",
2488 insn_info
->prev_insn
= bb_info
->last_insn
;
2489 insn_info
->insn
= insn
;
2490 bb_info
->last_insn
= insn_info
;
2492 if (DEBUG_INSN_P (insn
))
2494 insn_info
->cannot_delete
= true;
2498 /* Cselib clears the table for this case, so we have to essentially
2500 if (NONJUMP_INSN_P (insn
)
2501 && GET_CODE (PATTERN (insn
)) == ASM_OPERANDS
2502 && MEM_VOLATILE_P (PATTERN (insn
)))
2504 add_wild_read (bb_info
);
2505 insn_info
->cannot_delete
= true;
2509 /* Look at all of the uses in the insn. */
2510 note_uses (&PATTERN (insn
), check_mem_read_use
, bb_info
);
2515 tree memset_call
= NULL_TREE
;
2517 insn_info
->cannot_delete
= true;
2519 /* Const functions cannot do anything bad i.e. read memory,
2520 however, they can read their parameters which may have
2521 been pushed onto the stack.
2522 memset and bzero don't read memory either. */
2523 const_call
= RTL_CONST_CALL_P (insn
);
2526 rtx call
= PATTERN (insn
);
2527 if (GET_CODE (call
) == PARALLEL
)
2528 call
= XVECEXP (call
, 0, 0);
2529 if (GET_CODE (call
) == SET
)
2530 call
= SET_SRC (call
);
2531 if (GET_CODE (call
) == CALL
2532 && MEM_P (XEXP (call
, 0))
2533 && GET_CODE (XEXP (XEXP (call
, 0), 0)) == SYMBOL_REF
)
2535 rtx symbol
= XEXP (XEXP (call
, 0), 0);
2536 if (SYMBOL_REF_DECL (symbol
)
2537 && TREE_CODE (SYMBOL_REF_DECL (symbol
)) == FUNCTION_DECL
)
2539 if ((DECL_BUILT_IN_CLASS (SYMBOL_REF_DECL (symbol
))
2541 && (DECL_FUNCTION_CODE (SYMBOL_REF_DECL (symbol
))
2542 == BUILT_IN_MEMSET
))
2543 || SYMBOL_REF_DECL (symbol
) == block_clear_fn
)
2544 memset_call
= SYMBOL_REF_DECL (symbol
);
2548 if (const_call
|| memset_call
)
2550 insn_info_t i_ptr
= active_local_stores
;
2551 insn_info_t last
= NULL
;
2554 fprintf (dump_file
, "%s call %d\n",
2555 const_call
? "const" : "memset", INSN_UID (insn
));
2557 /* See the head comment of the frame_read field. */
2558 if (reload_completed
)
2559 insn_info
->frame_read
= true;
2561 /* Loop over the active stores and remove those which are
2562 killed by the const function call. */
2565 bool remove_store
= false;
2567 /* The stack pointer based stores are always killed. */
2568 if (i_ptr
->stack_pointer_based
)
2569 remove_store
= true;
2571 /* If the frame is read, the frame related stores are killed. */
2572 else if (insn_info
->frame_read
)
2574 store_info_t store_info
= i_ptr
->store_rec
;
2576 /* Skip the clobbers. */
2577 while (!store_info
->is_set
)
2578 store_info
= store_info
->next
;
2580 if (store_info
->group_id
>= 0
2581 && VEC_index (group_info_t
, rtx_group_vec
,
2582 store_info
->group_id
)->frame_related
)
2583 remove_store
= true;
2589 dump_insn_info ("removing from active", i_ptr
);
2591 active_local_stores_len
--;
2593 last
->next_local_store
= i_ptr
->next_local_store
;
2595 active_local_stores
= i_ptr
->next_local_store
;
2600 i_ptr
= i_ptr
->next_local_store
;
2606 if (get_call_args (insn
, memset_call
, args
, 3)
2607 && CONST_INT_P (args
[1])
2608 && CONST_INT_P (args
[2])
2609 && INTVAL (args
[2]) > 0)
2611 rtx mem
= gen_rtx_MEM (BLKmode
, args
[0]);
2612 set_mem_size (mem
, INTVAL (args
[2]));
2613 body
= gen_rtx_SET (VOIDmode
, mem
, args
[1]);
2614 mems_found
+= record_store (body
, bb_info
);
2616 fprintf (dump_file
, "handling memset as BLKmode store\n");
2617 if (mems_found
== 1)
2619 if (active_local_stores_len
++
2620 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES
))
2622 active_local_stores_len
= 1;
2623 active_local_stores
= NULL
;
2625 insn_info
->fixed_regs_live
2626 = copy_fixed_regs (bb_info
->regs_live
);
2627 insn_info
->next_local_store
= active_local_stores
;
2628 active_local_stores
= insn_info
;
2635 /* Every other call, including pure functions, may read any memory
2636 that is not relative to the frame. */
2637 add_non_frame_wild_read (bb_info
);
2642 /* Assuming that there are sets in these insns, we cannot delete
2644 if ((GET_CODE (PATTERN (insn
)) == CLOBBER
)
2645 || volatile_refs_p (PATTERN (insn
))
2646 || (!cfun
->can_delete_dead_exceptions
&& !insn_nothrow_p (insn
))
2647 || (RTX_FRAME_RELATED_P (insn
))
2648 || find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, NULL_RTX
))
2649 insn_info
->cannot_delete
= true;
2651 body
= PATTERN (insn
);
2652 if (GET_CODE (body
) == PARALLEL
)
2655 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
2656 mems_found
+= record_store (XVECEXP (body
, 0, i
), bb_info
);
2659 mems_found
+= record_store (body
, bb_info
);
2662 fprintf (dump_file
, "mems_found = %d, cannot_delete = %s\n",
2663 mems_found
, insn_info
->cannot_delete
? "true" : "false");
2665 /* If we found some sets of mems, add it into the active_local_stores so
2666 that it can be locally deleted if found dead or used for
2667 replace_read and redundant constant store elimination. Otherwise mark
2668 it as cannot delete. This simplifies the processing later. */
2669 if (mems_found
== 1)
2671 if (active_local_stores_len
++
2672 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES
))
2674 active_local_stores_len
= 1;
2675 active_local_stores
= NULL
;
2677 insn_info
->fixed_regs_live
= copy_fixed_regs (bb_info
->regs_live
);
2678 insn_info
->next_local_store
= active_local_stores
;
2679 active_local_stores
= insn_info
;
2682 insn_info
->cannot_delete
= true;
2686 /* Remove BASE from the set of active_local_stores. This is a
2687 callback from cselib that is used to get rid of the stores in
2688 active_local_stores. */
2691 remove_useless_values (cselib_val
*base
)
2693 insn_info_t insn_info
= active_local_stores
;
2694 insn_info_t last
= NULL
;
2698 store_info_t store_info
= insn_info
->store_rec
;
2701 /* If ANY of the store_infos match the cselib group that is
2702 being deleted, then the insn can not be deleted. */
2705 if ((store_info
->group_id
== -1)
2706 && (store_info
->cse_base
== base
))
2711 store_info
= store_info
->next
;
2716 active_local_stores_len
--;
2718 last
->next_local_store
= insn_info
->next_local_store
;
2720 active_local_stores
= insn_info
->next_local_store
;
2721 free_store_info (insn_info
);
2726 insn_info
= insn_info
->next_local_store
;
2731 /* Do all of step 1. */
2737 bitmap regs_live
= BITMAP_ALLOC (®_obstack
);
2740 all_blocks
= BITMAP_ALLOC (NULL
);
2741 bitmap_set_bit (all_blocks
, ENTRY_BLOCK
);
2742 bitmap_set_bit (all_blocks
, EXIT_BLOCK
);
2747 bb_info_t bb_info
= (bb_info_t
) pool_alloc (bb_info_pool
);
2749 memset (bb_info
, 0, sizeof (struct bb_info
));
2750 bitmap_set_bit (all_blocks
, bb
->index
);
2751 bb_info
->regs_live
= regs_live
;
2753 bitmap_copy (regs_live
, DF_LR_IN (bb
));
2754 df_simulate_initialize_forwards (bb
, regs_live
);
2756 bb_table
[bb
->index
] = bb_info
;
2757 cselib_discard_hook
= remove_useless_values
;
2759 if (bb
->index
>= NUM_FIXED_BLOCKS
)
2764 = create_alloc_pool ("cse_store_info_pool",
2765 sizeof (struct store_info
), 100);
2766 active_local_stores
= NULL
;
2767 active_local_stores_len
= 0;
2768 cselib_clear_table ();
2770 /* Scan the insns. */
2771 FOR_BB_INSNS (bb
, insn
)
2774 scan_insn (bb_info
, insn
);
2775 cselib_process_insn (insn
);
2777 df_simulate_one_insn_forwards (bb
, insn
, regs_live
);
2780 /* This is something of a hack, because the global algorithm
2781 is supposed to take care of the case where stores go dead
2782 at the end of the function. However, the global
2783 algorithm must take a more conservative view of block
2784 mode reads than the local alg does. So to get the case
2785 where you have a store to the frame followed by a non
2786 overlapping block more read, we look at the active local
2787 stores at the end of the function and delete all of the
2788 frame and spill based ones. */
2789 if (stores_off_frame_dead_at_return
2790 && (EDGE_COUNT (bb
->succs
) == 0
2791 || (single_succ_p (bb
)
2792 && single_succ (bb
) == EXIT_BLOCK_PTR
2793 && ! crtl
->calls_eh_return
)))
2795 insn_info_t i_ptr
= active_local_stores
;
2798 store_info_t store_info
= i_ptr
->store_rec
;
2800 /* Skip the clobbers. */
2801 while (!store_info
->is_set
)
2802 store_info
= store_info
->next
;
2803 if (store_info
->alias_set
&& !i_ptr
->cannot_delete
)
2804 delete_dead_store_insn (i_ptr
);
2806 if (store_info
->group_id
>= 0)
2809 = VEC_index (group_info_t
, rtx_group_vec
, store_info
->group_id
);
2810 if (group
->frame_related
&& !i_ptr
->cannot_delete
)
2811 delete_dead_store_insn (i_ptr
);
2814 i_ptr
= i_ptr
->next_local_store
;
2818 /* Get rid of the loads that were discovered in
2819 replace_read. Cselib is finished with this block. */
2820 while (deferred_change_list
)
2822 deferred_change_t next
= deferred_change_list
->next
;
2824 /* There is no reason to validate this change. That was
2826 *deferred_change_list
->loc
= deferred_change_list
->reg
;
2827 pool_free (deferred_change_pool
, deferred_change_list
);
2828 deferred_change_list
= next
;
2831 /* Get rid of all of the cselib based store_infos in this
2832 block and mark the containing insns as not being
2834 ptr
= bb_info
->last_insn
;
2837 if (ptr
->contains_cselib_groups
)
2839 store_info_t s_info
= ptr
->store_rec
;
2840 while (s_info
&& !s_info
->is_set
)
2841 s_info
= s_info
->next
;
2843 && s_info
->redundant_reason
2844 && s_info
->redundant_reason
->insn
2845 && !ptr
->cannot_delete
)
2848 fprintf (dump_file
, "Locally deleting insn %d "
2849 "because insn %d stores the "
2850 "same value and couldn't be "
2852 INSN_UID (ptr
->insn
),
2853 INSN_UID (s_info
->redundant_reason
->insn
));
2854 delete_dead_store_insn (ptr
);
2857 s_info
->redundant_reason
= NULL
;
2858 free_store_info (ptr
);
2862 store_info_t s_info
;
2864 /* Free at least positions_needed bitmaps. */
2865 for (s_info
= ptr
->store_rec
; s_info
; s_info
= s_info
->next
)
2866 if (s_info
->is_large
)
2868 BITMAP_FREE (s_info
->positions_needed
.large
.bmap
);
2869 s_info
->is_large
= false;
2872 ptr
= ptr
->prev_insn
;
2875 free_alloc_pool (cse_store_info_pool
);
2877 bb_info
->regs_live
= NULL
;
2880 BITMAP_FREE (regs_live
);
2882 htab_empty (rtx_group_table
);
2886 /*----------------------------------------------------------------------------
2889 Assign each byte position in the stores that we are going to
2890 analyze globally to a position in the bitmaps. Returns true if
2891 there are any bit positions assigned.
2892 ----------------------------------------------------------------------------*/
2895 dse_step2_init (void)
2900 FOR_EACH_VEC_ELT (group_info_t
, rtx_group_vec
, i
, group
)
2902 /* For all non stack related bases, we only consider a store to
2903 be deletable if there are two or more stores for that
2904 position. This is because it takes one store to make the
2905 other store redundant. However, for the stores that are
2906 stack related, we consider them if there is only one store
2907 for the position. We do this because the stack related
2908 stores can be deleted if their is no read between them and
2909 the end of the function.
2911 To make this work in the current framework, we take the stack
2912 related bases add all of the bits from store1 into store2.
2913 This has the effect of making the eligible even if there is
2916 if (stores_off_frame_dead_at_return
&& group
->frame_related
)
2918 bitmap_ior_into (group
->store2_n
, group
->store1_n
);
2919 bitmap_ior_into (group
->store2_p
, group
->store1_p
);
2921 fprintf (dump_file
, "group %d is frame related ", i
);
2924 group
->offset_map_size_n
++;
2925 group
->offset_map_n
= XOBNEWVEC (&dse_obstack
, int,
2926 group
->offset_map_size_n
);
2927 group
->offset_map_size_p
++;
2928 group
->offset_map_p
= XOBNEWVEC (&dse_obstack
, int,
2929 group
->offset_map_size_p
);
2930 group
->process_globally
= false;
2933 fprintf (dump_file
, "group %d(%d+%d): ", i
,
2934 (int)bitmap_count_bits (group
->store2_n
),
2935 (int)bitmap_count_bits (group
->store2_p
));
2936 bitmap_print (dump_file
, group
->store2_n
, "n ", " ");
2937 bitmap_print (dump_file
, group
->store2_p
, "p ", "\n");
2943 /* Init the offset tables for the normal case. */
2946 dse_step2_nospill (void)
2950 /* Position 0 is unused because 0 is used in the maps to mean
2952 current_position
= 1;
2953 FOR_EACH_VEC_ELT (group_info_t
, rtx_group_vec
, i
, group
)
2958 if (group
== clear_alias_group
)
2961 memset (group
->offset_map_n
, 0, sizeof(int) * group
->offset_map_size_n
);
2962 memset (group
->offset_map_p
, 0, sizeof(int) * group
->offset_map_size_p
);
2963 bitmap_clear (group
->group_kill
);
2965 EXECUTE_IF_SET_IN_BITMAP (group
->store2_n
, 0, j
, bi
)
2967 bitmap_set_bit (group
->group_kill
, current_position
);
2968 if (bitmap_bit_p (group
->escaped_n
, j
))
2969 bitmap_set_bit (kill_on_calls
, current_position
);
2970 group
->offset_map_n
[j
] = current_position
++;
2971 group
->process_globally
= true;
2973 EXECUTE_IF_SET_IN_BITMAP (group
->store2_p
, 0, j
, bi
)
2975 bitmap_set_bit (group
->group_kill
, current_position
);
2976 if (bitmap_bit_p (group
->escaped_p
, j
))
2977 bitmap_set_bit (kill_on_calls
, current_position
);
2978 group
->offset_map_p
[j
] = current_position
++;
2979 group
->process_globally
= true;
2982 return current_position
!= 1;
2986 /* Init the offset tables for the spill case. */
2989 dse_step2_spill (void)
2992 group_info_t group
= clear_alias_group
;
2995 /* Position 0 is unused because 0 is used in the maps to mean
2997 current_position
= 1;
3001 bitmap_print (dump_file
, clear_alias_sets
,
3002 "clear alias sets ", "\n");
3003 bitmap_print (dump_file
, disqualified_clear_alias_sets
,
3004 "disqualified clear alias sets ", "\n");
3007 memset (group
->offset_map_n
, 0, sizeof(int) * group
->offset_map_size_n
);
3008 memset (group
->offset_map_p
, 0, sizeof(int) * group
->offset_map_size_p
);
3009 bitmap_clear (group
->group_kill
);
3011 /* Remove the disqualified positions from the store2_p set. */
3012 bitmap_and_compl_into (group
->store2_p
, disqualified_clear_alias_sets
);
3014 /* We do not need to process the store2_n set because
3015 alias_sets are always positive. */
3016 EXECUTE_IF_SET_IN_BITMAP (group
->store2_p
, 0, j
, bi
)
3018 bitmap_set_bit (group
->group_kill
, current_position
);
3019 group
->offset_map_p
[j
] = current_position
++;
3020 group
->process_globally
= true;
3023 return current_position
!= 1;
3028 /*----------------------------------------------------------------------------
3031 Build the bit vectors for the transfer functions.
3032 ----------------------------------------------------------------------------*/
3035 /* Look up the bitmap index for OFFSET in GROUP_INFO. If it is not
3039 get_bitmap_index (group_info_t group_info
, HOST_WIDE_INT offset
)
3043 HOST_WIDE_INT offset_p
= -offset
;
3044 if (offset_p
>= group_info
->offset_map_size_n
)
3046 return group_info
->offset_map_n
[offset_p
];
3050 if (offset
>= group_info
->offset_map_size_p
)
3052 return group_info
->offset_map_p
[offset
];
3057 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
3061 scan_stores_nospill (store_info_t store_info
, bitmap gen
, bitmap kill
)
3066 group_info_t group_info
3067 = VEC_index (group_info_t
, rtx_group_vec
, store_info
->group_id
);
3068 if (group_info
->process_globally
)
3069 for (i
= store_info
->begin
; i
< store_info
->end
; i
++)
3071 int index
= get_bitmap_index (group_info
, i
);
3074 bitmap_set_bit (gen
, index
);
3076 bitmap_clear_bit (kill
, index
);
3079 store_info
= store_info
->next
;
3084 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
3088 scan_stores_spill (store_info_t store_info
, bitmap gen
, bitmap kill
)
3092 if (store_info
->alias_set
)
3094 int index
= get_bitmap_index (clear_alias_group
,
3095 store_info
->alias_set
);
3098 bitmap_set_bit (gen
, index
);
3100 bitmap_clear_bit (kill
, index
);
3103 store_info
= store_info
->next
;
3108 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3112 scan_reads_nospill (insn_info_t insn_info
, bitmap gen
, bitmap kill
)
3114 read_info_t read_info
= insn_info
->read_rec
;
3118 /* If this insn reads the frame, kill all the frame related stores. */
3119 if (insn_info
->frame_read
)
3121 FOR_EACH_VEC_ELT (group_info_t
, rtx_group_vec
, i
, group
)
3122 if (group
->process_globally
&& group
->frame_related
)
3125 bitmap_ior_into (kill
, group
->group_kill
);
3126 bitmap_and_compl_into (gen
, group
->group_kill
);
3129 if (insn_info
->non_frame_wild_read
)
3131 /* Kill all non-frame related stores. Kill all stores of variables that
3134 bitmap_ior_into (kill
, kill_on_calls
);
3135 bitmap_and_compl_into (gen
, kill_on_calls
);
3136 FOR_EACH_VEC_ELT (group_info_t
, rtx_group_vec
, i
, group
)
3137 if (group
->process_globally
&& !group
->frame_related
)
3140 bitmap_ior_into (kill
, group
->group_kill
);
3141 bitmap_and_compl_into (gen
, group
->group_kill
);
3146 FOR_EACH_VEC_ELT (group_info_t
, rtx_group_vec
, i
, group
)
3148 if (group
->process_globally
)
3150 if (i
== read_info
->group_id
)
3152 if (read_info
->begin
> read_info
->end
)
3154 /* Begin > end for block mode reads. */
3156 bitmap_ior_into (kill
, group
->group_kill
);
3157 bitmap_and_compl_into (gen
, group
->group_kill
);
3161 /* The groups are the same, just process the
3164 for (j
= read_info
->begin
; j
< read_info
->end
; j
++)
3166 int index
= get_bitmap_index (group
, j
);
3170 bitmap_set_bit (kill
, index
);
3171 bitmap_clear_bit (gen
, index
);
3178 /* The groups are different, if the alias sets
3179 conflict, clear the entire group. We only need
3180 to apply this test if the read_info is a cselib
3181 read. Anything with a constant base cannot alias
3182 something else with a different constant
3184 if ((read_info
->group_id
< 0)
3185 && canon_true_dependence (group
->base_mem
,
3186 GET_MODE (group
->base_mem
),
3187 group
->canon_base_addr
,
3188 read_info
->mem
, NULL_RTX
))
3191 bitmap_ior_into (kill
, group
->group_kill
);
3192 bitmap_and_compl_into (gen
, group
->group_kill
);
3198 read_info
= read_info
->next
;
3202 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3206 scan_reads_spill (read_info_t read_info
, bitmap gen
, bitmap kill
)
3210 if (read_info
->alias_set
)
3212 int index
= get_bitmap_index (clear_alias_group
,
3213 read_info
->alias_set
);
3217 bitmap_set_bit (kill
, index
);
3218 bitmap_clear_bit (gen
, index
);
3222 read_info
= read_info
->next
;
3227 /* Return the insn in BB_INFO before the first wild read or if there
3228 are no wild reads in the block, return the last insn. */
3231 find_insn_before_first_wild_read (bb_info_t bb_info
)
3233 insn_info_t insn_info
= bb_info
->last_insn
;
3234 insn_info_t last_wild_read
= NULL
;
3238 if (insn_info
->wild_read
)
3240 last_wild_read
= insn_info
->prev_insn
;
3241 /* Block starts with wild read. */
3242 if (!last_wild_read
)
3246 insn_info
= insn_info
->prev_insn
;
3250 return last_wild_read
;
3252 return bb_info
->last_insn
;
3256 /* Scan the insns in BB_INFO starting at PTR and going to the top of
3257 the block in order to build the gen and kill sets for the block.
3258 We start at ptr which may be the last insn in the block or may be
3259 the first insn with a wild read. In the latter case we are able to
3260 skip the rest of the block because it just does not matter:
3261 anything that happens is hidden by the wild read. */
3264 dse_step3_scan (bool for_spills
, basic_block bb
)
3266 bb_info_t bb_info
= bb_table
[bb
->index
];
3267 insn_info_t insn_info
;
3270 /* There are no wild reads in the spill case. */
3271 insn_info
= bb_info
->last_insn
;
3273 insn_info
= find_insn_before_first_wild_read (bb_info
);
3275 /* In the spill case or in the no_spill case if there is no wild
3276 read in the block, we will need a kill set. */
3277 if (insn_info
== bb_info
->last_insn
)
3280 bitmap_clear (bb_info
->kill
);
3282 bb_info
->kill
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3286 BITMAP_FREE (bb_info
->kill
);
3290 /* There may have been code deleted by the dce pass run before
3292 if (insn_info
->insn
&& INSN_P (insn_info
->insn
))
3294 /* Process the read(s) last. */
3297 scan_stores_spill (insn_info
->store_rec
, bb_info
->gen
, bb_info
->kill
);
3298 scan_reads_spill (insn_info
->read_rec
, bb_info
->gen
, bb_info
->kill
);
3302 scan_stores_nospill (insn_info
->store_rec
, bb_info
->gen
, bb_info
->kill
);
3303 scan_reads_nospill (insn_info
, bb_info
->gen
, bb_info
->kill
);
3307 insn_info
= insn_info
->prev_insn
;
3312 /* Set the gen set of the exit block, and also any block with no
3313 successors that does not have a wild read. */
3316 dse_step3_exit_block_scan (bb_info_t bb_info
)
3318 /* The gen set is all 0's for the exit block except for the
3319 frame_pointer_group. */
3321 if (stores_off_frame_dead_at_return
)
3326 FOR_EACH_VEC_ELT (group_info_t
, rtx_group_vec
, i
, group
)
3328 if (group
->process_globally
&& group
->frame_related
)
3329 bitmap_ior_into (bb_info
->gen
, group
->group_kill
);
3335 /* Find all of the blocks that are not backwards reachable from the
3336 exit block or any block with no successors (BB). These are the
3337 infinite loops or infinite self loops. These blocks will still
3338 have their bits set in UNREACHABLE_BLOCKS. */
3341 mark_reachable_blocks (sbitmap unreachable_blocks
, basic_block bb
)
3346 if (TEST_BIT (unreachable_blocks
, bb
->index
))
3348 RESET_BIT (unreachable_blocks
, bb
->index
);
3349 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3351 mark_reachable_blocks (unreachable_blocks
, e
->src
);
3356 /* Build the transfer functions for the function. */
3359 dse_step3 (bool for_spills
)
3362 sbitmap unreachable_blocks
= sbitmap_alloc (last_basic_block
);
3363 sbitmap_iterator sbi
;
3364 bitmap all_ones
= NULL
;
3367 sbitmap_ones (unreachable_blocks
);
3371 bb_info_t bb_info
= bb_table
[bb
->index
];
3373 bitmap_clear (bb_info
->gen
);
3375 bb_info
->gen
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3377 if (bb
->index
== ENTRY_BLOCK
)
3379 else if (bb
->index
== EXIT_BLOCK
)
3380 dse_step3_exit_block_scan (bb_info
);
3382 dse_step3_scan (for_spills
, bb
);
3383 if (EDGE_COUNT (bb
->succs
) == 0)
3384 mark_reachable_blocks (unreachable_blocks
, bb
);
3386 /* If this is the second time dataflow is run, delete the old
3389 BITMAP_FREE (bb_info
->in
);
3391 BITMAP_FREE (bb_info
->out
);
3394 /* For any block in an infinite loop, we must initialize the out set
3395 to all ones. This could be expensive, but almost never occurs in
3396 practice. However, it is common in regression tests. */
3397 EXECUTE_IF_SET_IN_SBITMAP (unreachable_blocks
, 0, i
, sbi
)
3399 if (bitmap_bit_p (all_blocks
, i
))
3401 bb_info_t bb_info
= bb_table
[i
];
3407 all_ones
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3408 FOR_EACH_VEC_ELT (group_info_t
, rtx_group_vec
, j
, group
)
3409 bitmap_ior_into (all_ones
, group
->group_kill
);
3413 bb_info
->out
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3414 bitmap_copy (bb_info
->out
, all_ones
);
3420 BITMAP_FREE (all_ones
);
3421 sbitmap_free (unreachable_blocks
);
3426 /*----------------------------------------------------------------------------
3429 Solve the bitvector equations.
3430 ----------------------------------------------------------------------------*/
3433 /* Confluence function for blocks with no successors. Create an out
3434 set from the gen set of the exit block. This block logically has
3435 the exit block as a successor. */
3440 dse_confluence_0 (basic_block bb
)
3442 bb_info_t bb_info
= bb_table
[bb
->index
];
3444 if (bb
->index
== EXIT_BLOCK
)
3449 bb_info
->out
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3450 bitmap_copy (bb_info
->out
, bb_table
[EXIT_BLOCK
]->gen
);
3454 /* Propagate the information from the in set of the dest of E to the
3455 out set of the src of E. If the various in or out sets are not
3456 there, that means they are all ones. */
3459 dse_confluence_n (edge e
)
3461 bb_info_t src_info
= bb_table
[e
->src
->index
];
3462 bb_info_t dest_info
= bb_table
[e
->dest
->index
];
3467 bitmap_and_into (src_info
->out
, dest_info
->in
);
3470 src_info
->out
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3471 bitmap_copy (src_info
->out
, dest_info
->in
);
3478 /* Propagate the info from the out to the in set of BB_INDEX's basic
3479 block. There are three cases:
3481 1) The block has no kill set. In this case the kill set is all
3482 ones. It does not matter what the out set of the block is, none of
3483 the info can reach the top. The only thing that reaches the top is
3484 the gen set and we just copy the set.
3486 2) There is a kill set but no out set and bb has successors. In
3487 this case we just return. Eventually an out set will be created and
3488 it is better to wait than to create a set of ones.
3490 3) There is both a kill and out set. We apply the obvious transfer
3495 dse_transfer_function (int bb_index
)
3497 bb_info_t bb_info
= bb_table
[bb_index
];
3505 return bitmap_ior_and_compl (bb_info
->in
, bb_info
->gen
,
3506 bb_info
->out
, bb_info
->kill
);
3509 bb_info
->in
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3510 bitmap_ior_and_compl (bb_info
->in
, bb_info
->gen
,
3511 bb_info
->out
, bb_info
->kill
);
3521 /* Case 1 above. If there is already an in set, nothing
3527 bb_info
->in
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3528 bitmap_copy (bb_info
->in
, bb_info
->gen
);
3534 /* Solve the dataflow equations. */
3539 df_simple_dataflow (DF_BACKWARD
, NULL
, dse_confluence_0
,
3540 dse_confluence_n
, dse_transfer_function
,
3541 all_blocks
, df_get_postorder (DF_BACKWARD
),
3542 df_get_n_blocks (DF_BACKWARD
));
3547 fprintf (dump_file
, "\n\n*** Global dataflow info after analysis.\n");
3550 bb_info_t bb_info
= bb_table
[bb
->index
];
3552 df_print_bb_index (bb
, dump_file
);
3554 bitmap_print (dump_file
, bb_info
->in
, " in: ", "\n");
3556 fprintf (dump_file
, " in: *MISSING*\n");
3558 bitmap_print (dump_file
, bb_info
->gen
, " gen: ", "\n");
3560 fprintf (dump_file
, " gen: *MISSING*\n");
3562 bitmap_print (dump_file
, bb_info
->kill
, " kill: ", "\n");
3564 fprintf (dump_file
, " kill: *MISSING*\n");
3566 bitmap_print (dump_file
, bb_info
->out
, " out: ", "\n");
3568 fprintf (dump_file
, " out: *MISSING*\n\n");
3575 /*----------------------------------------------------------------------------
3578 Delete the stores that can only be deleted using the global information.
3579 ----------------------------------------------------------------------------*/
3583 dse_step5_nospill (void)
3588 bb_info_t bb_info
= bb_table
[bb
->index
];
3589 insn_info_t insn_info
= bb_info
->last_insn
;
3590 bitmap v
= bb_info
->out
;
3594 bool deleted
= false;
3595 if (dump_file
&& insn_info
->insn
)
3597 fprintf (dump_file
, "starting to process insn %d\n",
3598 INSN_UID (insn_info
->insn
));
3599 bitmap_print (dump_file
, v
, " v: ", "\n");
3602 /* There may have been code deleted by the dce pass run before
3605 && INSN_P (insn_info
->insn
)
3606 && (!insn_info
->cannot_delete
)
3607 && (!bitmap_empty_p (v
)))
3609 store_info_t store_info
= insn_info
->store_rec
;
3611 /* Try to delete the current insn. */
3614 /* Skip the clobbers. */
3615 while (!store_info
->is_set
)
3616 store_info
= store_info
->next
;
3618 if (store_info
->alias_set
)
3623 group_info_t group_info
3624 = VEC_index (group_info_t
, rtx_group_vec
, store_info
->group_id
);
3626 for (i
= store_info
->begin
; i
< store_info
->end
; i
++)
3628 int index
= get_bitmap_index (group_info
, i
);
3631 fprintf (dump_file
, "i = %d, index = %d\n", (int)i
, index
);
3632 if (index
== 0 || !bitmap_bit_p (v
, index
))
3635 fprintf (dump_file
, "failing at i = %d\n", (int)i
);
3644 && check_for_inc_dec_1 (insn_info
))
3646 delete_insn (insn_info
->insn
);
3647 insn_info
->insn
= NULL
;
3652 /* We do want to process the local info if the insn was
3653 deleted. For instance, if the insn did a wild read, we
3654 no longer need to trash the info. */
3656 && INSN_P (insn_info
->insn
)
3659 scan_stores_nospill (insn_info
->store_rec
, v
, NULL
);
3660 if (insn_info
->wild_read
)
3663 fprintf (dump_file
, "wild read\n");
3666 else if (insn_info
->read_rec
3667 || insn_info
->non_frame_wild_read
)
3669 if (dump_file
&& !insn_info
->non_frame_wild_read
)
3670 fprintf (dump_file
, "regular read\n");
3672 fprintf (dump_file
, "non-frame wild read\n");
3673 scan_reads_nospill (insn_info
, v
, NULL
);
3677 insn_info
= insn_info
->prev_insn
;
3684 dse_step5_spill (void)
3689 bb_info_t bb_info
= bb_table
[bb
->index
];
3690 insn_info_t insn_info
= bb_info
->last_insn
;
3691 bitmap v
= bb_info
->out
;
3695 bool deleted
= false;
3696 /* There may have been code deleted by the dce pass run before
3699 && INSN_P (insn_info
->insn
)
3700 && (!insn_info
->cannot_delete
)
3701 && (!bitmap_empty_p (v
)))
3703 /* Try to delete the current insn. */
3704 store_info_t store_info
= insn_info
->store_rec
;
3709 if (store_info
->alias_set
)
3711 int index
= get_bitmap_index (clear_alias_group
,
3712 store_info
->alias_set
);
3713 if (index
== 0 || !bitmap_bit_p (v
, index
))
3721 store_info
= store_info
->next
;
3723 if (deleted
&& dbg_cnt (dse
)
3724 && check_for_inc_dec_1 (insn_info
))
3727 fprintf (dump_file
, "Spill deleting insn %d\n",
3728 INSN_UID (insn_info
->insn
));
3729 delete_insn (insn_info
->insn
);
3731 insn_info
->insn
= NULL
;
3736 && INSN_P (insn_info
->insn
)
3739 scan_stores_spill (insn_info
->store_rec
, v
, NULL
);
3740 scan_reads_spill (insn_info
->read_rec
, v
, NULL
);
3743 insn_info
= insn_info
->prev_insn
;
3750 /*----------------------------------------------------------------------------
3753 Delete stores made redundant by earlier stores (which store the same
3754 value) that couldn't be eliminated.
3755 ----------------------------------------------------------------------------*/
3764 bb_info_t bb_info
= bb_table
[bb
->index
];
3765 insn_info_t insn_info
= bb_info
->last_insn
;
3769 /* There may have been code deleted by the dce pass run before
3772 && INSN_P (insn_info
->insn
)
3773 && !insn_info
->cannot_delete
)
3775 store_info_t s_info
= insn_info
->store_rec
;
3777 while (s_info
&& !s_info
->is_set
)
3778 s_info
= s_info
->next
;
3780 && s_info
->redundant_reason
3781 && s_info
->redundant_reason
->insn
3782 && INSN_P (s_info
->redundant_reason
->insn
))
3784 rtx rinsn
= s_info
->redundant_reason
->insn
;
3786 fprintf (dump_file
, "Locally deleting insn %d "
3787 "because insn %d stores the "
3788 "same value and couldn't be "
3790 INSN_UID (insn_info
->insn
),
3792 delete_dead_store_insn (insn_info
);
3795 insn_info
= insn_info
->prev_insn
;
3800 /*----------------------------------------------------------------------------
3803 Destroy everything left standing.
3804 ----------------------------------------------------------------------------*/
3809 bitmap_obstack_release (&dse_bitmap_obstack
);
3810 obstack_free (&dse_obstack
, NULL
);
3812 if (clear_alias_sets
)
3814 BITMAP_FREE (clear_alias_sets
);
3815 BITMAP_FREE (disqualified_clear_alias_sets
);
3816 free_alloc_pool (clear_alias_mode_pool
);
3817 htab_delete (clear_alias_mode_table
);
3820 end_alias_analysis ();
3822 htab_delete (rtx_group_table
);
3823 VEC_free (group_info_t
, heap
, rtx_group_vec
);
3824 BITMAP_FREE (all_blocks
);
3825 BITMAP_FREE (scratch
);
3827 free_alloc_pool (rtx_store_info_pool
);
3828 free_alloc_pool (read_info_pool
);
3829 free_alloc_pool (insn_info_pool
);
3830 free_alloc_pool (bb_info_pool
);
3831 free_alloc_pool (rtx_group_info_pool
);
3832 free_alloc_pool (deferred_change_pool
);
3836 /* -------------------------------------------------------------------------
3838 ------------------------------------------------------------------------- */
3840 /* Callback for running pass_rtl_dse. */
3843 rest_of_handle_dse (void)
3845 bool did_global
= false;
3847 df_set_flags (DF_DEFER_INSN_RESCAN
);
3849 /* Need the notes since we must track live hardregs in the forwards
3851 df_note_add_problem ();
3857 if (dse_step2_nospill ())
3859 df_set_flags (DF_LR_RUN_DCE
);
3863 fprintf (dump_file
, "doing global processing\n");
3866 dse_step5_nospill ();
3869 /* For the instance of dse that runs after reload, we make a special
3870 pass to process the spills. These are special in that they are
3871 totally transparent, i.e, there is no aliasing issues that need
3872 to be considered. This means that the wild reads that kill
3873 everything else do not apply here. */
3874 if (clear_alias_sets
&& dse_step2_spill ())
3878 df_set_flags (DF_LR_RUN_DCE
);
3883 fprintf (dump_file
, "doing global spill processing\n");
3893 fprintf (dump_file
, "dse: local deletions = %d, global deletions = %d, spill deletions = %d\n",
3894 locally_deleted
, globally_deleted
, spill_deleted
);
3901 return optimize
> 0 && flag_dse
3908 return optimize
> 0 && flag_dse
3912 struct rtl_opt_pass pass_rtl_dse1
=
3917 gate_dse1
, /* gate */
3918 rest_of_handle_dse
, /* execute */
3921 0, /* static_pass_number */
3922 TV_DSE1
, /* tv_id */
3923 0, /* properties_required */
3924 0, /* properties_provided */
3925 0, /* properties_destroyed */
3926 0, /* todo_flags_start */
3927 TODO_df_finish
| TODO_verify_rtl_sharing
|
3928 TODO_ggc_collect
/* todo_flags_finish */
3932 struct rtl_opt_pass pass_rtl_dse2
=
3937 gate_dse2
, /* gate */
3938 rest_of_handle_dse
, /* execute */
3941 0, /* static_pass_number */
3942 TV_DSE2
, /* tv_id */
3943 0, /* properties_required */
3944 0, /* properties_provided */
3945 0, /* properties_destroyed */
3946 0, /* todo_flags_start */
3947 TODO_df_finish
| TODO_verify_rtl_sharing
|
3948 TODO_ggc_collect
/* todo_flags_finish */