re PR rtl-optimization/62078 (ICE: verify_flow_info failed: missing REG_EH_REGION...
[gcc.git] / gcc / dse.c
1 /* RTL dead store elimination.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3
4 Contributed by Richard Sandiford <rsandifor@codesourcery.com>
5 and Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #undef BASELINE
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "hash-table.h"
29 #include "tm.h"
30 #include "rtl.h"
31 #include "hash-set.h"
32 #include "machmode.h"
33 #include "vec.h"
34 #include "double-int.h"
35 #include "input.h"
36 #include "alias.h"
37 #include "symtab.h"
38 #include "wide-int.h"
39 #include "inchash.h"
40 #include "real.h"
41 #include "tree.h"
42 #include "fold-const.h"
43 #include "stor-layout.h"
44 #include "tm_p.h"
45 #include "regs.h"
46 #include "hard-reg-set.h"
47 #include "regset.h"
48 #include "flags.h"
49 #include "dominance.h"
50 #include "cfg.h"
51 #include "cfgrtl.h"
52 #include "predict.h"
53 #include "basic-block.h"
54 #include "df.h"
55 #include "cselib.h"
56 #include "tree-pass.h"
57 #include "alloc-pool.h"
58 #include "insn-config.h"
59 #include "hashtab.h"
60 #include "function.h"
61 #include "statistics.h"
62 #include "fixed-value.h"
63 #include "expmed.h"
64 #include "dojump.h"
65 #include "explow.h"
66 #include "calls.h"
67 #include "emit-rtl.h"
68 #include "varasm.h"
69 #include "stmt.h"
70 #include "expr.h"
71 #include "recog.h"
72 #include "insn-codes.h"
73 #include "optabs.h"
74 #include "dbgcnt.h"
75 #include "target.h"
76 #include "params.h"
77 #include "tree-ssa-alias.h"
78 #include "internal-fn.h"
79 #include "gimple-expr.h"
80 #include "is-a.h"
81 #include "gimple.h"
82 #include "gimple-ssa.h"
83 #include "rtl-iter.h"
84 #include "cfgcleanup.h"
85
86 /* This file contains three techniques for performing Dead Store
87 Elimination (dse).
88
89 * The first technique performs dse locally on any base address. It
90 is based on the cselib which is a local value numbering technique.
91 This technique is local to a basic block but deals with a fairly
92 general addresses.
93
94 * The second technique performs dse globally but is restricted to
95 base addresses that are either constant or are relative to the
96 frame_pointer.
97
98 * The third technique, (which is only done after register allocation)
99 processes the spill spill slots. This differs from the second
100 technique because it takes advantage of the fact that spilling is
101 completely free from the effects of aliasing.
102
103 Logically, dse is a backwards dataflow problem. A store can be
104 deleted if it if cannot be reached in the backward direction by any
105 use of the value being stored. However, the local technique uses a
106 forwards scan of the basic block because cselib requires that the
107 block be processed in that order.
108
109 The pass is logically broken into 7 steps:
110
111 0) Initialization.
112
113 1) The local algorithm, as well as scanning the insns for the two
114 global algorithms.
115
116 2) Analysis to see if the global algs are necessary. In the case
117 of stores base on a constant address, there must be at least two
118 stores to that address, to make it possible to delete some of the
119 stores. In the case of stores off of the frame or spill related
120 stores, only one store to an address is necessary because those
121 stores die at the end of the function.
122
123 3) Set up the global dataflow equations based on processing the
124 info parsed in the first step.
125
126 4) Solve the dataflow equations.
127
128 5) Delete the insns that the global analysis has indicated are
129 unnecessary.
130
131 6) Delete insns that store the same value as preceding store
132 where the earlier store couldn't be eliminated.
133
134 7) Cleanup.
135
136 This step uses cselib and canon_rtx to build the largest expression
137 possible for each address. This pass is a forwards pass through
138 each basic block. From the point of view of the global technique,
139 the first pass could examine a block in either direction. The
140 forwards ordering is to accommodate cselib.
141
142 We make a simplifying assumption: addresses fall into four broad
143 categories:
144
145 1) base has rtx_varies_p == false, offset is constant.
146 2) base has rtx_varies_p == false, offset variable.
147 3) base has rtx_varies_p == true, offset constant.
148 4) base has rtx_varies_p == true, offset variable.
149
150 The local passes are able to process all 4 kinds of addresses. The
151 global pass only handles 1).
152
153 The global problem is formulated as follows:
154
155 A store, S1, to address A, where A is not relative to the stack
156 frame, can be eliminated if all paths from S1 to the end of the
157 function contain another store to A before a read to A.
158
159 If the address A is relative to the stack frame, a store S2 to A
160 can be eliminated if there are no paths from S2 that reach the
161 end of the function that read A before another store to A. In
162 this case S2 can be deleted if there are paths from S2 to the
163 end of the function that have no reads or writes to A. This
164 second case allows stores to the stack frame to be deleted that
165 would otherwise die when the function returns. This cannot be
166 done if stores_off_frame_dead_at_return is not true. See the doc
167 for that variable for when this variable is false.
168
169 The global problem is formulated as a backwards set union
170 dataflow problem where the stores are the gens and reads are the
171 kills. Set union problems are rare and require some special
172 handling given our representation of bitmaps. A straightforward
173 implementation requires a lot of bitmaps filled with 1s.
174 These are expensive and cumbersome in our bitmap formulation so
175 care has been taken to avoid large vectors filled with 1s. See
176 the comments in bb_info and in the dataflow confluence functions
177 for details.
178
179 There are two places for further enhancements to this algorithm:
180
181 1) The original dse which was embedded in a pass called flow also
182 did local address forwarding. For example in
183
184 A <- r100
185 ... <- A
186
187 flow would replace the right hand side of the second insn with a
188 reference to r100. Most of the information is available to add this
189 to this pass. It has not done it because it is a lot of work in
190 the case that either r100 is assigned to between the first and
191 second insn and/or the second insn is a load of part of the value
192 stored by the first insn.
193
194 insn 5 in gcc.c-torture/compile/990203-1.c simple case.
195 insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
196 insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
197 insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
198
199 2) The cleaning up of spill code is quite profitable. It currently
200 depends on reading tea leaves and chicken entrails left by reload.
201 This pass depends on reload creating a singleton alias set for each
202 spill slot and telling the next dse pass which of these alias sets
203 are the singletons. Rather than analyze the addresses of the
204 spills, dse's spill processing just does analysis of the loads and
205 stores that use those alias sets. There are three cases where this
206 falls short:
207
208 a) Reload sometimes creates the slot for one mode of access, and
209 then inserts loads and/or stores for a smaller mode. In this
210 case, the current code just punts on the slot. The proper thing
211 to do is to back out and use one bit vector position for each
212 byte of the entity associated with the slot. This depends on
213 KNOWING that reload always generates the accesses for each of the
214 bytes in some canonical (read that easy to understand several
215 passes after reload happens) way.
216
217 b) Reload sometimes decides that spill slot it allocated was not
218 large enough for the mode and goes back and allocates more slots
219 with the same mode and alias set. The backout in this case is a
220 little more graceful than (a). In this case the slot is unmarked
221 as being a spill slot and if final address comes out to be based
222 off the frame pointer, the global algorithm handles this slot.
223
224 c) For any pass that may prespill, there is currently no
225 mechanism to tell the dse pass that the slot being used has the
226 special properties that reload uses. It may be that all that is
227 required is to have those passes make the same calls that reload
228 does, assuming that the alias sets can be manipulated in the same
229 way. */
230
231 /* There are limits to the size of constant offsets we model for the
232 global problem. There are certainly test cases, that exceed this
233 limit, however, it is unlikely that there are important programs
234 that really have constant offsets this size. */
235 #define MAX_OFFSET (64 * 1024)
236
237 /* Obstack for the DSE dataflow bitmaps. We don't want to put these
238 on the default obstack because these bitmaps can grow quite large
239 (~2GB for the small (!) test case of PR54146) and we'll hold on to
240 all that memory until the end of the compiler run.
241 As a bonus, delete_tree_live_info can destroy all the bitmaps by just
242 releasing the whole obstack. */
243 static bitmap_obstack dse_bitmap_obstack;
244
245 /* Obstack for other data. As for above: Kinda nice to be able to
246 throw it all away at the end in one big sweep. */
247 static struct obstack dse_obstack;
248
249 /* Scratch bitmap for cselib's cselib_expand_value_rtx. */
250 static bitmap scratch = NULL;
251
252 struct insn_info;
253
254 /* This structure holds information about a candidate store. */
255 struct store_info
256 {
257
258 /* False means this is a clobber. */
259 bool is_set;
260
261 /* False if a single HOST_WIDE_INT bitmap is used for positions_needed. */
262 bool is_large;
263
264 /* The id of the mem group of the base address. If rtx_varies_p is
265 true, this is -1. Otherwise, it is the index into the group
266 table. */
267 int group_id;
268
269 /* This is the cselib value. */
270 cselib_val *cse_base;
271
272 /* This canonized mem. */
273 rtx mem;
274
275 /* Canonized MEM address for use by canon_true_dependence. */
276 rtx mem_addr;
277
278 /* If this is non-zero, it is the alias set of a spill location. */
279 alias_set_type alias_set;
280
281 /* The offset of the first and byte before the last byte associated
282 with the operation. */
283 HOST_WIDE_INT begin, end;
284
285 union
286 {
287 /* A bitmask as wide as the number of bytes in the word that
288 contains a 1 if the byte may be needed. The store is unused if
289 all of the bits are 0. This is used if IS_LARGE is false. */
290 unsigned HOST_WIDE_INT small_bitmask;
291
292 struct
293 {
294 /* A bitmap with one bit per byte. Cleared bit means the position
295 is needed. Used if IS_LARGE is false. */
296 bitmap bmap;
297
298 /* Number of set bits (i.e. unneeded bytes) in BITMAP. If it is
299 equal to END - BEGIN, the whole store is unused. */
300 int count;
301 } large;
302 } positions_needed;
303
304 /* The next store info for this insn. */
305 struct store_info *next;
306
307 /* The right hand side of the store. This is used if there is a
308 subsequent reload of the mems address somewhere later in the
309 basic block. */
310 rtx rhs;
311
312 /* If rhs is or holds a constant, this contains that constant,
313 otherwise NULL. */
314 rtx const_rhs;
315
316 /* Set if this store stores the same constant value as REDUNDANT_REASON
317 insn stored. These aren't eliminated early, because doing that
318 might prevent the earlier larger store to be eliminated. */
319 struct insn_info *redundant_reason;
320 };
321
322 /* Return a bitmask with the first N low bits set. */
323
324 static unsigned HOST_WIDE_INT
325 lowpart_bitmask (int n)
326 {
327 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT) 0;
328 return mask >> (HOST_BITS_PER_WIDE_INT - n);
329 }
330
331 typedef struct store_info *store_info_t;
332 static alloc_pool cse_store_info_pool;
333 static alloc_pool rtx_store_info_pool;
334
335 /* This structure holds information about a load. These are only
336 built for rtx bases. */
337 struct read_info
338 {
339 /* The id of the mem group of the base address. */
340 int group_id;
341
342 /* If this is non-zero, it is the alias set of a spill location. */
343 alias_set_type alias_set;
344
345 /* The offset of the first and byte after the last byte associated
346 with the operation. If begin == end == 0, the read did not have
347 a constant offset. */
348 int begin, end;
349
350 /* The mem being read. */
351 rtx mem;
352
353 /* The next read_info for this insn. */
354 struct read_info *next;
355 };
356 typedef struct read_info *read_info_t;
357 static alloc_pool read_info_pool;
358
359
360 /* One of these records is created for each insn. */
361
362 struct insn_info
363 {
364 /* Set true if the insn contains a store but the insn itself cannot
365 be deleted. This is set if the insn is a parallel and there is
366 more than one non dead output or if the insn is in some way
367 volatile. */
368 bool cannot_delete;
369
370 /* This field is only used by the global algorithm. It is set true
371 if the insn contains any read of mem except for a (1). This is
372 also set if the insn is a call or has a clobber mem. If the insn
373 contains a wild read, the use_rec will be null. */
374 bool wild_read;
375
376 /* This is true only for CALL instructions which could potentially read
377 any non-frame memory location. This field is used by the global
378 algorithm. */
379 bool non_frame_wild_read;
380
381 /* This field is only used for the processing of const functions.
382 These functions cannot read memory, but they can read the stack
383 because that is where they may get their parms. We need to be
384 this conservative because, like the store motion pass, we don't
385 consider CALL_INSN_FUNCTION_USAGE when processing call insns.
386 Moreover, we need to distinguish two cases:
387 1. Before reload (register elimination), the stores related to
388 outgoing arguments are stack pointer based and thus deemed
389 of non-constant base in this pass. This requires special
390 handling but also means that the frame pointer based stores
391 need not be killed upon encountering a const function call.
392 2. After reload, the stores related to outgoing arguments can be
393 either stack pointer or hard frame pointer based. This means
394 that we have no other choice than also killing all the frame
395 pointer based stores upon encountering a const function call.
396 This field is set after reload for const function calls and before
397 reload for const tail function calls on targets where arg pointer
398 is the frame pointer. Having this set is less severe than a wild
399 read, it just means that all the frame related stores are killed
400 rather than all the stores. */
401 bool frame_read;
402
403 /* This field is only used for the processing of const functions.
404 It is set if the insn may contain a stack pointer based store. */
405 bool stack_pointer_based;
406
407 /* This is true if any of the sets within the store contains a
408 cselib base. Such stores can only be deleted by the local
409 algorithm. */
410 bool contains_cselib_groups;
411
412 /* The insn. */
413 rtx_insn *insn;
414
415 /* The list of mem sets or mem clobbers that are contained in this
416 insn. If the insn is deletable, it contains only one mem set.
417 But it could also contain clobbers. Insns that contain more than
418 one mem set are not deletable, but each of those mems are here in
419 order to provide info to delete other insns. */
420 store_info_t store_rec;
421
422 /* The linked list of mem uses in this insn. Only the reads from
423 rtx bases are listed here. The reads to cselib bases are
424 completely processed during the first scan and so are never
425 created. */
426 read_info_t read_rec;
427
428 /* The live fixed registers. We assume only fixed registers can
429 cause trouble by being clobbered from an expanded pattern;
430 storing only the live fixed registers (rather than all registers)
431 means less memory needs to be allocated / copied for the individual
432 stores. */
433 regset fixed_regs_live;
434
435 /* The prev insn in the basic block. */
436 struct insn_info * prev_insn;
437
438 /* The linked list of insns that are in consideration for removal in
439 the forwards pass through the basic block. This pointer may be
440 trash as it is not cleared when a wild read occurs. The only
441 time it is guaranteed to be correct is when the traversal starts
442 at active_local_stores. */
443 struct insn_info * next_local_store;
444 };
445
446 typedef struct insn_info *insn_info_t;
447 static alloc_pool insn_info_pool;
448
449 /* The linked list of stores that are under consideration in this
450 basic block. */
451 static insn_info_t active_local_stores;
452 static int active_local_stores_len;
453
454 struct dse_bb_info
455 {
456
457 /* Pointer to the insn info for the last insn in the block. These
458 are linked so this is how all of the insns are reached. During
459 scanning this is the current insn being scanned. */
460 insn_info_t last_insn;
461
462 /* The info for the global dataflow problem. */
463
464
465 /* This is set if the transfer function should and in the wild_read
466 bitmap before applying the kill and gen sets. That vector knocks
467 out most of the bits in the bitmap and thus speeds up the
468 operations. */
469 bool apply_wild_read;
470
471 /* The following 4 bitvectors hold information about which positions
472 of which stores are live or dead. They are indexed by
473 get_bitmap_index. */
474
475 /* The set of store positions that exist in this block before a wild read. */
476 bitmap gen;
477
478 /* The set of load positions that exist in this block above the
479 same position of a store. */
480 bitmap kill;
481
482 /* The set of stores that reach the top of the block without being
483 killed by a read.
484
485 Do not represent the in if it is all ones. Note that this is
486 what the bitvector should logically be initialized to for a set
487 intersection problem. However, like the kill set, this is too
488 expensive. So initially, the in set will only be created for the
489 exit block and any block that contains a wild read. */
490 bitmap in;
491
492 /* The set of stores that reach the bottom of the block from it's
493 successors.
494
495 Do not represent the in if it is all ones. Note that this is
496 what the bitvector should logically be initialized to for a set
497 intersection problem. However, like the kill and in set, this is
498 too expensive. So what is done is that the confluence operator
499 just initializes the vector from one of the out sets of the
500 successors of the block. */
501 bitmap out;
502
503 /* The following bitvector is indexed by the reg number. It
504 contains the set of regs that are live at the current instruction
505 being processed. While it contains info for all of the
506 registers, only the hard registers are actually examined. It is used
507 to assure that shift and/or add sequences that are inserted do not
508 accidentally clobber live hard regs. */
509 bitmap regs_live;
510 };
511
512 typedef struct dse_bb_info *bb_info_t;
513 static alloc_pool bb_info_pool;
514
515 /* Table to hold all bb_infos. */
516 static bb_info_t *bb_table;
517
518 /* There is a group_info for each rtx base that is used to reference
519 memory. There are also not many of the rtx bases because they are
520 very limited in scope. */
521
522 struct group_info
523 {
524 /* The actual base of the address. */
525 rtx rtx_base;
526
527 /* The sequential id of the base. This allows us to have a
528 canonical ordering of these that is not based on addresses. */
529 int id;
530
531 /* True if there are any positions that are to be processed
532 globally. */
533 bool process_globally;
534
535 /* True if the base of this group is either the frame_pointer or
536 hard_frame_pointer. */
537 bool frame_related;
538
539 /* A mem wrapped around the base pointer for the group in order to do
540 read dependency. It must be given BLKmode in order to encompass all
541 the possible offsets from the base. */
542 rtx base_mem;
543
544 /* Canonized version of base_mem's address. */
545 rtx canon_base_addr;
546
547 /* These two sets of two bitmaps are used to keep track of how many
548 stores are actually referencing that position from this base. We
549 only do this for rtx bases as this will be used to assign
550 positions in the bitmaps for the global problem. Bit N is set in
551 store1 on the first store for offset N. Bit N is set in store2
552 for the second store to offset N. This is all we need since we
553 only care about offsets that have two or more stores for them.
554
555 The "_n" suffix is for offsets less than 0 and the "_p" suffix is
556 for 0 and greater offsets.
557
558 There is one special case here, for stores into the stack frame,
559 we will or store1 into store2 before deciding which stores look
560 at globally. This is because stores to the stack frame that have
561 no other reads before the end of the function can also be
562 deleted. */
563 bitmap store1_n, store1_p, store2_n, store2_p;
564
565 /* These bitmaps keep track of offsets in this group escape this function.
566 An offset escapes if it corresponds to a named variable whose
567 addressable flag is set. */
568 bitmap escaped_n, escaped_p;
569
570 /* The positions in this bitmap have the same assignments as the in,
571 out, gen and kill bitmaps. This bitmap is all zeros except for
572 the positions that are occupied by stores for this group. */
573 bitmap group_kill;
574
575 /* The offset_map is used to map the offsets from this base into
576 positions in the global bitmaps. It is only created after all of
577 the all of stores have been scanned and we know which ones we
578 care about. */
579 int *offset_map_n, *offset_map_p;
580 int offset_map_size_n, offset_map_size_p;
581 };
582 typedef struct group_info *group_info_t;
583 typedef const struct group_info *const_group_info_t;
584 static alloc_pool rtx_group_info_pool;
585
586 /* Index into the rtx_group_vec. */
587 static int rtx_group_next_id;
588
589
590 static vec<group_info_t> rtx_group_vec;
591
592
593 /* This structure holds the set of changes that are being deferred
594 when removing read operation. See replace_read. */
595 struct deferred_change
596 {
597
598 /* The mem that is being replaced. */
599 rtx *loc;
600
601 /* The reg it is being replaced with. */
602 rtx reg;
603
604 struct deferred_change *next;
605 };
606
607 typedef struct deferred_change *deferred_change_t;
608 static alloc_pool deferred_change_pool;
609
610 static deferred_change_t deferred_change_list = NULL;
611
612 /* The group that holds all of the clear_alias_sets. */
613 static group_info_t clear_alias_group;
614
615 /* The modes of the clear_alias_sets. */
616 static htab_t clear_alias_mode_table;
617
618 /* Hash table element to look up the mode for an alias set. */
619 struct clear_alias_mode_holder
620 {
621 alias_set_type alias_set;
622 machine_mode mode;
623 };
624
625 /* This is true except if cfun->stdarg -- i.e. we cannot do
626 this for vararg functions because they play games with the frame. */
627 static bool stores_off_frame_dead_at_return;
628
629 /* Counter for stats. */
630 static int globally_deleted;
631 static int locally_deleted;
632 static int spill_deleted;
633
634 static bitmap all_blocks;
635
636 /* Locations that are killed by calls in the global phase. */
637 static bitmap kill_on_calls;
638
639 /* The number of bits used in the global bitmaps. */
640 static unsigned int current_position;
641 \f
642 /*----------------------------------------------------------------------------
643 Zeroth step.
644
645 Initialization.
646 ----------------------------------------------------------------------------*/
647
648
649 /* Find the entry associated with ALIAS_SET. */
650
651 static struct clear_alias_mode_holder *
652 clear_alias_set_lookup (alias_set_type alias_set)
653 {
654 struct clear_alias_mode_holder tmp_holder;
655 void **slot;
656
657 tmp_holder.alias_set = alias_set;
658 slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, NO_INSERT);
659 gcc_assert (*slot);
660
661 return (struct clear_alias_mode_holder *) *slot;
662 }
663
664
665 /* Hashtable callbacks for maintaining the "bases" field of
666 store_group_info, given that the addresses are function invariants. */
667
668 struct invariant_group_base_hasher : typed_noop_remove <group_info>
669 {
670 typedef group_info value_type;
671 typedef group_info compare_type;
672 static inline hashval_t hash (const value_type *);
673 static inline bool equal (const value_type *, const compare_type *);
674 };
675
676 inline bool
677 invariant_group_base_hasher::equal (const value_type *gi1,
678 const compare_type *gi2)
679 {
680 return rtx_equal_p (gi1->rtx_base, gi2->rtx_base);
681 }
682
683 inline hashval_t
684 invariant_group_base_hasher::hash (const value_type *gi)
685 {
686 int do_not_record;
687 return hash_rtx (gi->rtx_base, Pmode, &do_not_record, NULL, false);
688 }
689
690 /* Tables of group_info structures, hashed by base value. */
691 static hash_table<invariant_group_base_hasher> *rtx_group_table;
692
693
694 /* Get the GROUP for BASE. Add a new group if it is not there. */
695
696 static group_info_t
697 get_group_info (rtx base)
698 {
699 struct group_info tmp_gi;
700 group_info_t gi;
701 group_info **slot;
702
703 if (base)
704 {
705 /* Find the store_base_info structure for BASE, creating a new one
706 if necessary. */
707 tmp_gi.rtx_base = base;
708 slot = rtx_group_table->find_slot (&tmp_gi, INSERT);
709 gi = (group_info_t) *slot;
710 }
711 else
712 {
713 if (!clear_alias_group)
714 {
715 clear_alias_group = gi =
716 (group_info_t) pool_alloc (rtx_group_info_pool);
717 memset (gi, 0, sizeof (struct group_info));
718 gi->id = rtx_group_next_id++;
719 gi->store1_n = BITMAP_ALLOC (&dse_bitmap_obstack);
720 gi->store1_p = BITMAP_ALLOC (&dse_bitmap_obstack);
721 gi->store2_n = BITMAP_ALLOC (&dse_bitmap_obstack);
722 gi->store2_p = BITMAP_ALLOC (&dse_bitmap_obstack);
723 gi->escaped_p = BITMAP_ALLOC (&dse_bitmap_obstack);
724 gi->escaped_n = BITMAP_ALLOC (&dse_bitmap_obstack);
725 gi->group_kill = BITMAP_ALLOC (&dse_bitmap_obstack);
726 gi->process_globally = false;
727 gi->offset_map_size_n = 0;
728 gi->offset_map_size_p = 0;
729 gi->offset_map_n = NULL;
730 gi->offset_map_p = NULL;
731 rtx_group_vec.safe_push (gi);
732 }
733 return clear_alias_group;
734 }
735
736 if (gi == NULL)
737 {
738 *slot = gi = (group_info_t) pool_alloc (rtx_group_info_pool);
739 gi->rtx_base = base;
740 gi->id = rtx_group_next_id++;
741 gi->base_mem = gen_rtx_MEM (BLKmode, base);
742 gi->canon_base_addr = canon_rtx (base);
743 gi->store1_n = BITMAP_ALLOC (&dse_bitmap_obstack);
744 gi->store1_p = BITMAP_ALLOC (&dse_bitmap_obstack);
745 gi->store2_n = BITMAP_ALLOC (&dse_bitmap_obstack);
746 gi->store2_p = BITMAP_ALLOC (&dse_bitmap_obstack);
747 gi->escaped_p = BITMAP_ALLOC (&dse_bitmap_obstack);
748 gi->escaped_n = BITMAP_ALLOC (&dse_bitmap_obstack);
749 gi->group_kill = BITMAP_ALLOC (&dse_bitmap_obstack);
750 gi->process_globally = false;
751 gi->frame_related =
752 (base == frame_pointer_rtx) || (base == hard_frame_pointer_rtx);
753 gi->offset_map_size_n = 0;
754 gi->offset_map_size_p = 0;
755 gi->offset_map_n = NULL;
756 gi->offset_map_p = NULL;
757 rtx_group_vec.safe_push (gi);
758 }
759
760 return gi;
761 }
762
763
764 /* Initialization of data structures. */
765
766 static void
767 dse_step0 (void)
768 {
769 locally_deleted = 0;
770 globally_deleted = 0;
771 spill_deleted = 0;
772
773 bitmap_obstack_initialize (&dse_bitmap_obstack);
774 gcc_obstack_init (&dse_obstack);
775
776 scratch = BITMAP_ALLOC (&reg_obstack);
777 kill_on_calls = BITMAP_ALLOC (&dse_bitmap_obstack);
778
779 rtx_store_info_pool
780 = create_alloc_pool ("rtx_store_info_pool",
781 sizeof (struct store_info), 100);
782 read_info_pool
783 = create_alloc_pool ("read_info_pool",
784 sizeof (struct read_info), 100);
785 insn_info_pool
786 = create_alloc_pool ("insn_info_pool",
787 sizeof (struct insn_info), 100);
788 bb_info_pool
789 = create_alloc_pool ("bb_info_pool",
790 sizeof (struct dse_bb_info), 100);
791 rtx_group_info_pool
792 = create_alloc_pool ("rtx_group_info_pool",
793 sizeof (struct group_info), 100);
794 deferred_change_pool
795 = create_alloc_pool ("deferred_change_pool",
796 sizeof (struct deferred_change), 10);
797
798 rtx_group_table = new hash_table<invariant_group_base_hasher> (11);
799
800 bb_table = XNEWVEC (bb_info_t, last_basic_block_for_fn (cfun));
801 rtx_group_next_id = 0;
802
803 stores_off_frame_dead_at_return = !cfun->stdarg;
804
805 init_alias_analysis ();
806
807 clear_alias_group = NULL;
808 }
809
810
811 \f
812 /*----------------------------------------------------------------------------
813 First step.
814
815 Scan all of the insns. Any random ordering of the blocks is fine.
816 Each block is scanned in forward order to accommodate cselib which
817 is used to remove stores with non-constant bases.
818 ----------------------------------------------------------------------------*/
819
820 /* Delete all of the store_info recs from INSN_INFO. */
821
822 static void
823 free_store_info (insn_info_t insn_info)
824 {
825 store_info_t store_info = insn_info->store_rec;
826 while (store_info)
827 {
828 store_info_t next = store_info->next;
829 if (store_info->is_large)
830 BITMAP_FREE (store_info->positions_needed.large.bmap);
831 if (store_info->cse_base)
832 pool_free (cse_store_info_pool, store_info);
833 else
834 pool_free (rtx_store_info_pool, store_info);
835 store_info = next;
836 }
837
838 insn_info->cannot_delete = true;
839 insn_info->contains_cselib_groups = false;
840 insn_info->store_rec = NULL;
841 }
842
843 typedef struct
844 {
845 rtx_insn *first, *current;
846 regset fixed_regs_live;
847 bool failure;
848 } note_add_store_info;
849
850 /* Callback for emit_inc_dec_insn_before via note_stores.
851 Check if a register is clobbered which is live afterwards. */
852
853 static void
854 note_add_store (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *data)
855 {
856 rtx_insn *insn;
857 note_add_store_info *info = (note_add_store_info *) data;
858 int r, n;
859
860 if (!REG_P (loc))
861 return;
862
863 /* If this register is referenced by the current or an earlier insn,
864 that's OK. E.g. this applies to the register that is being incremented
865 with this addition. */
866 for (insn = info->first;
867 insn != NEXT_INSN (info->current);
868 insn = NEXT_INSN (insn))
869 if (reg_referenced_p (loc, PATTERN (insn)))
870 return;
871
872 /* If we come here, we have a clobber of a register that's only OK
873 if that register is not live. If we don't have liveness information
874 available, fail now. */
875 if (!info->fixed_regs_live)
876 {
877 info->failure = true;
878 return;
879 }
880 /* Now check if this is a live fixed register. */
881 r = REGNO (loc);
882 n = hard_regno_nregs[r][GET_MODE (loc)];
883 while (--n >= 0)
884 if (REGNO_REG_SET_P (info->fixed_regs_live, r+n))
885 info->failure = true;
886 }
887
888 /* Callback for for_each_inc_dec that emits an INSN that sets DEST to
889 SRC + SRCOFF before insn ARG. */
890
891 static int
892 emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED,
893 rtx op ATTRIBUTE_UNUSED,
894 rtx dest, rtx src, rtx srcoff, void *arg)
895 {
896 insn_info_t insn_info = (insn_info_t) arg;
897 rtx_insn *insn = insn_info->insn, *new_insn, *cur;
898 note_add_store_info info;
899
900 /* We can reuse all operands without copying, because we are about
901 to delete the insn that contained it. */
902 if (srcoff)
903 {
904 start_sequence ();
905 emit_insn (gen_add3_insn (dest, src, srcoff));
906 new_insn = get_insns ();
907 end_sequence ();
908 }
909 else
910 new_insn = as_a <rtx_insn *> (gen_move_insn (dest, src));
911 info.first = new_insn;
912 info.fixed_regs_live = insn_info->fixed_regs_live;
913 info.failure = false;
914 for (cur = new_insn; cur; cur = NEXT_INSN (cur))
915 {
916 info.current = cur;
917 note_stores (PATTERN (cur), note_add_store, &info);
918 }
919
920 /* If a failure was flagged above, return 1 so that for_each_inc_dec will
921 return it immediately, communicating the failure to its caller. */
922 if (info.failure)
923 return 1;
924
925 emit_insn_before (new_insn, insn);
926
927 return 0;
928 }
929
930 /* Before we delete INSN_INFO->INSN, make sure that the auto inc/dec, if it
931 is there, is split into a separate insn.
932 Return true on success (or if there was nothing to do), false on failure. */
933
934 static bool
935 check_for_inc_dec_1 (insn_info_t insn_info)
936 {
937 rtx_insn *insn = insn_info->insn;
938 rtx note = find_reg_note (insn, REG_INC, NULL_RTX);
939 if (note)
940 return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
941 insn_info) == 0;
942 return true;
943 }
944
945
946 /* Entry point for postreload. If you work on reload_cse, or you need this
947 anywhere else, consider if you can provide register liveness information
948 and add a parameter to this function so that it can be passed down in
949 insn_info.fixed_regs_live. */
950 bool
951 check_for_inc_dec (rtx_insn *insn)
952 {
953 struct insn_info insn_info;
954 rtx note;
955
956 insn_info.insn = insn;
957 insn_info.fixed_regs_live = NULL;
958 note = find_reg_note (insn, REG_INC, NULL_RTX);
959 if (note)
960 return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
961 &insn_info) == 0;
962 return true;
963 }
964
965 /* Delete the insn and free all of the fields inside INSN_INFO. */
966
967 static void
968 delete_dead_store_insn (insn_info_t insn_info)
969 {
970 read_info_t read_info;
971
972 if (!dbg_cnt (dse))
973 return;
974
975 if (!check_for_inc_dec_1 (insn_info))
976 return;
977 if (dump_file && (dump_flags & TDF_DETAILS))
978 {
979 fprintf (dump_file, "Locally deleting insn %d ",
980 INSN_UID (insn_info->insn));
981 if (insn_info->store_rec->alias_set)
982 fprintf (dump_file, "alias set %d\n",
983 (int) insn_info->store_rec->alias_set);
984 else
985 fprintf (dump_file, "\n");
986 }
987
988 free_store_info (insn_info);
989 read_info = insn_info->read_rec;
990
991 while (read_info)
992 {
993 read_info_t next = read_info->next;
994 pool_free (read_info_pool, read_info);
995 read_info = next;
996 }
997 insn_info->read_rec = NULL;
998
999 delete_insn (insn_info->insn);
1000 locally_deleted++;
1001 insn_info->insn = NULL;
1002
1003 insn_info->wild_read = false;
1004 }
1005
1006 /* Return whether DECL, a local variable, can possibly escape the current
1007 function scope. */
1008
1009 static bool
1010 local_variable_can_escape (tree decl)
1011 {
1012 if (TREE_ADDRESSABLE (decl))
1013 return true;
1014
1015 /* If this is a partitioned variable, we need to consider all the variables
1016 in the partition. This is necessary because a store into one of them can
1017 be replaced with a store into another and this may not change the outcome
1018 of the escape analysis. */
1019 if (cfun->gimple_df->decls_to_pointers != NULL)
1020 {
1021 tree *namep = cfun->gimple_df->decls_to_pointers->get (decl);
1022 if (namep)
1023 return TREE_ADDRESSABLE (*namep);
1024 }
1025
1026 return false;
1027 }
1028
1029 /* Return whether EXPR can possibly escape the current function scope. */
1030
1031 static bool
1032 can_escape (tree expr)
1033 {
1034 tree base;
1035 if (!expr)
1036 return true;
1037 base = get_base_address (expr);
1038 if (DECL_P (base)
1039 && !may_be_aliased (base)
1040 && !(TREE_CODE (base) == VAR_DECL
1041 && !DECL_EXTERNAL (base)
1042 && !TREE_STATIC (base)
1043 && local_variable_can_escape (base)))
1044 return false;
1045 return true;
1046 }
1047
1048 /* Set the store* bitmaps offset_map_size* fields in GROUP based on
1049 OFFSET and WIDTH. */
1050
1051 static void
1052 set_usage_bits (group_info_t group, HOST_WIDE_INT offset, HOST_WIDE_INT width,
1053 tree expr)
1054 {
1055 HOST_WIDE_INT i;
1056 bool expr_escapes = can_escape (expr);
1057 if (offset > -MAX_OFFSET && offset + width < MAX_OFFSET)
1058 for (i=offset; i<offset+width; i++)
1059 {
1060 bitmap store1;
1061 bitmap store2;
1062 bitmap escaped;
1063 int ai;
1064 if (i < 0)
1065 {
1066 store1 = group->store1_n;
1067 store2 = group->store2_n;
1068 escaped = group->escaped_n;
1069 ai = -i;
1070 }
1071 else
1072 {
1073 store1 = group->store1_p;
1074 store2 = group->store2_p;
1075 escaped = group->escaped_p;
1076 ai = i;
1077 }
1078
1079 if (!bitmap_set_bit (store1, ai))
1080 bitmap_set_bit (store2, ai);
1081 else
1082 {
1083 if (i < 0)
1084 {
1085 if (group->offset_map_size_n < ai)
1086 group->offset_map_size_n = ai;
1087 }
1088 else
1089 {
1090 if (group->offset_map_size_p < ai)
1091 group->offset_map_size_p = ai;
1092 }
1093 }
1094 if (expr_escapes)
1095 bitmap_set_bit (escaped, ai);
1096 }
1097 }
1098
1099 static void
1100 reset_active_stores (void)
1101 {
1102 active_local_stores = NULL;
1103 active_local_stores_len = 0;
1104 }
1105
1106 /* Free all READ_REC of the LAST_INSN of BB_INFO. */
1107
1108 static void
1109 free_read_records (bb_info_t bb_info)
1110 {
1111 insn_info_t insn_info = bb_info->last_insn;
1112 read_info_t *ptr = &insn_info->read_rec;
1113 while (*ptr)
1114 {
1115 read_info_t next = (*ptr)->next;
1116 if ((*ptr)->alias_set == 0)
1117 {
1118 pool_free (read_info_pool, *ptr);
1119 *ptr = next;
1120 }
1121 else
1122 ptr = &(*ptr)->next;
1123 }
1124 }
1125
1126 /* Set the BB_INFO so that the last insn is marked as a wild read. */
1127
1128 static void
1129 add_wild_read (bb_info_t bb_info)
1130 {
1131 insn_info_t insn_info = bb_info->last_insn;
1132 insn_info->wild_read = true;
1133 free_read_records (bb_info);
1134 reset_active_stores ();
1135 }
1136
1137 /* Set the BB_INFO so that the last insn is marked as a wild read of
1138 non-frame locations. */
1139
1140 static void
1141 add_non_frame_wild_read (bb_info_t bb_info)
1142 {
1143 insn_info_t insn_info = bb_info->last_insn;
1144 insn_info->non_frame_wild_read = true;
1145 free_read_records (bb_info);
1146 reset_active_stores ();
1147 }
1148
1149 /* Return true if X is a constant or one of the registers that behave
1150 as a constant over the life of a function. This is equivalent to
1151 !rtx_varies_p for memory addresses. */
1152
1153 static bool
1154 const_or_frame_p (rtx x)
1155 {
1156 if (CONSTANT_P (x))
1157 return true;
1158
1159 if (GET_CODE (x) == REG)
1160 {
1161 /* Note that we have to test for the actual rtx used for the frame
1162 and arg pointers and not just the register number in case we have
1163 eliminated the frame and/or arg pointer and are using it
1164 for pseudos. */
1165 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
1166 /* The arg pointer varies if it is not a fixed register. */
1167 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
1168 || x == pic_offset_table_rtx)
1169 return true;
1170 return false;
1171 }
1172
1173 return false;
1174 }
1175
1176 /* Take all reasonable action to put the address of MEM into the form
1177 that we can do analysis on.
1178
1179 The gold standard is to get the address into the form: address +
1180 OFFSET where address is something that rtx_varies_p considers a
1181 constant. When we can get the address in this form, we can do
1182 global analysis on it. Note that for constant bases, address is
1183 not actually returned, only the group_id. The address can be
1184 obtained from that.
1185
1186 If that fails, we try cselib to get a value we can at least use
1187 locally. If that fails we return false.
1188
1189 The GROUP_ID is set to -1 for cselib bases and the index of the
1190 group for non_varying bases.
1191
1192 FOR_READ is true if this is a mem read and false if not. */
1193
1194 static bool
1195 canon_address (rtx mem,
1196 alias_set_type *alias_set_out,
1197 int *group_id,
1198 HOST_WIDE_INT *offset,
1199 cselib_val **base)
1200 {
1201 machine_mode address_mode = get_address_mode (mem);
1202 rtx mem_address = XEXP (mem, 0);
1203 rtx expanded_address, address;
1204 int expanded;
1205
1206 *alias_set_out = 0;
1207
1208 cselib_lookup (mem_address, address_mode, 1, GET_MODE (mem));
1209
1210 if (dump_file && (dump_flags & TDF_DETAILS))
1211 {
1212 fprintf (dump_file, " mem: ");
1213 print_inline_rtx (dump_file, mem_address, 0);
1214 fprintf (dump_file, "\n");
1215 }
1216
1217 /* First see if just canon_rtx (mem_address) is const or frame,
1218 if not, try cselib_expand_value_rtx and call canon_rtx on that. */
1219 address = NULL_RTX;
1220 for (expanded = 0; expanded < 2; expanded++)
1221 {
1222 if (expanded)
1223 {
1224 /* Use cselib to replace all of the reg references with the full
1225 expression. This will take care of the case where we have
1226
1227 r_x = base + offset;
1228 val = *r_x;
1229
1230 by making it into
1231
1232 val = *(base + offset); */
1233
1234 expanded_address = cselib_expand_value_rtx (mem_address,
1235 scratch, 5);
1236
1237 /* If this fails, just go with the address from first
1238 iteration. */
1239 if (!expanded_address)
1240 break;
1241 }
1242 else
1243 expanded_address = mem_address;
1244
1245 /* Split the address into canonical BASE + OFFSET terms. */
1246 address = canon_rtx (expanded_address);
1247
1248 *offset = 0;
1249
1250 if (dump_file && (dump_flags & TDF_DETAILS))
1251 {
1252 if (expanded)
1253 {
1254 fprintf (dump_file, "\n after cselib_expand address: ");
1255 print_inline_rtx (dump_file, expanded_address, 0);
1256 fprintf (dump_file, "\n");
1257 }
1258
1259 fprintf (dump_file, "\n after canon_rtx address: ");
1260 print_inline_rtx (dump_file, address, 0);
1261 fprintf (dump_file, "\n");
1262 }
1263
1264 if (GET_CODE (address) == CONST)
1265 address = XEXP (address, 0);
1266
1267 if (GET_CODE (address) == PLUS
1268 && CONST_INT_P (XEXP (address, 1)))
1269 {
1270 *offset = INTVAL (XEXP (address, 1));
1271 address = XEXP (address, 0);
1272 }
1273
1274 if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (mem))
1275 && const_or_frame_p (address))
1276 {
1277 group_info_t group = get_group_info (address);
1278
1279 if (dump_file && (dump_flags & TDF_DETAILS))
1280 fprintf (dump_file, " gid=%d offset=%d \n",
1281 group->id, (int)*offset);
1282 *base = NULL;
1283 *group_id = group->id;
1284 return true;
1285 }
1286 }
1287
1288 *base = cselib_lookup (address, address_mode, true, GET_MODE (mem));
1289 *group_id = -1;
1290
1291 if (*base == NULL)
1292 {
1293 if (dump_file && (dump_flags & TDF_DETAILS))
1294 fprintf (dump_file, " no cselib val - should be a wild read.\n");
1295 return false;
1296 }
1297 if (dump_file && (dump_flags & TDF_DETAILS))
1298 fprintf (dump_file, " varying cselib base=%u:%u offset = %d\n",
1299 (*base)->uid, (*base)->hash, (int)*offset);
1300 return true;
1301 }
1302
1303
1304 /* Clear the rhs field from the active_local_stores array. */
1305
1306 static void
1307 clear_rhs_from_active_local_stores (void)
1308 {
1309 insn_info_t ptr = active_local_stores;
1310
1311 while (ptr)
1312 {
1313 store_info_t store_info = ptr->store_rec;
1314 /* Skip the clobbers. */
1315 while (!store_info->is_set)
1316 store_info = store_info->next;
1317
1318 store_info->rhs = NULL;
1319 store_info->const_rhs = NULL;
1320
1321 ptr = ptr->next_local_store;
1322 }
1323 }
1324
1325
1326 /* Mark byte POS bytes from the beginning of store S_INFO as unneeded. */
1327
1328 static inline void
1329 set_position_unneeded (store_info_t s_info, int pos)
1330 {
1331 if (__builtin_expect (s_info->is_large, false))
1332 {
1333 if (bitmap_set_bit (s_info->positions_needed.large.bmap, pos))
1334 s_info->positions_needed.large.count++;
1335 }
1336 else
1337 s_info->positions_needed.small_bitmask
1338 &= ~(((unsigned HOST_WIDE_INT) 1) << pos);
1339 }
1340
1341 /* Mark the whole store S_INFO as unneeded. */
1342
1343 static inline void
1344 set_all_positions_unneeded (store_info_t s_info)
1345 {
1346 if (__builtin_expect (s_info->is_large, false))
1347 {
1348 int pos, end = s_info->end - s_info->begin;
1349 for (pos = 0; pos < end; pos++)
1350 bitmap_set_bit (s_info->positions_needed.large.bmap, pos);
1351 s_info->positions_needed.large.count = end;
1352 }
1353 else
1354 s_info->positions_needed.small_bitmask = (unsigned HOST_WIDE_INT) 0;
1355 }
1356
1357 /* Return TRUE if any bytes from S_INFO store are needed. */
1358
1359 static inline bool
1360 any_positions_needed_p (store_info_t s_info)
1361 {
1362 if (__builtin_expect (s_info->is_large, false))
1363 return (s_info->positions_needed.large.count
1364 < s_info->end - s_info->begin);
1365 else
1366 return (s_info->positions_needed.small_bitmask
1367 != (unsigned HOST_WIDE_INT) 0);
1368 }
1369
1370 /* Return TRUE if all bytes START through START+WIDTH-1 from S_INFO
1371 store are needed. */
1372
1373 static inline bool
1374 all_positions_needed_p (store_info_t s_info, int start, int width)
1375 {
1376 if (__builtin_expect (s_info->is_large, false))
1377 {
1378 int end = start + width;
1379 while (start < end)
1380 if (bitmap_bit_p (s_info->positions_needed.large.bmap, start++))
1381 return false;
1382 return true;
1383 }
1384 else
1385 {
1386 unsigned HOST_WIDE_INT mask = lowpart_bitmask (width) << start;
1387 return (s_info->positions_needed.small_bitmask & mask) == mask;
1388 }
1389 }
1390
1391
1392 static rtx get_stored_val (store_info_t, machine_mode, HOST_WIDE_INT,
1393 HOST_WIDE_INT, basic_block, bool);
1394
1395
1396 /* BODY is an instruction pattern that belongs to INSN. Return 1 if
1397 there is a candidate store, after adding it to the appropriate
1398 local store group if so. */
1399
1400 static int
1401 record_store (rtx body, bb_info_t bb_info)
1402 {
1403 rtx mem, rhs, const_rhs, mem_addr;
1404 HOST_WIDE_INT offset = 0;
1405 HOST_WIDE_INT width = 0;
1406 alias_set_type spill_alias_set;
1407 insn_info_t insn_info = bb_info->last_insn;
1408 store_info_t store_info = NULL;
1409 int group_id;
1410 cselib_val *base = NULL;
1411 insn_info_t ptr, last, redundant_reason;
1412 bool store_is_unused;
1413
1414 if (GET_CODE (body) != SET && GET_CODE (body) != CLOBBER)
1415 return 0;
1416
1417 mem = SET_DEST (body);
1418
1419 /* If this is not used, then this cannot be used to keep the insn
1420 from being deleted. On the other hand, it does provide something
1421 that can be used to prove that another store is dead. */
1422 store_is_unused
1423 = (find_reg_note (insn_info->insn, REG_UNUSED, mem) != NULL);
1424
1425 /* Check whether that value is a suitable memory location. */
1426 if (!MEM_P (mem))
1427 {
1428 /* If the set or clobber is unused, then it does not effect our
1429 ability to get rid of the entire insn. */
1430 if (!store_is_unused)
1431 insn_info->cannot_delete = true;
1432 return 0;
1433 }
1434
1435 /* At this point we know mem is a mem. */
1436 if (GET_MODE (mem) == BLKmode)
1437 {
1438 if (GET_CODE (XEXP (mem, 0)) == SCRATCH)
1439 {
1440 if (dump_file && (dump_flags & TDF_DETAILS))
1441 fprintf (dump_file, " adding wild read for (clobber (mem:BLK (scratch))\n");
1442 add_wild_read (bb_info);
1443 insn_info->cannot_delete = true;
1444 return 0;
1445 }
1446 /* Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0))
1447 as memset (addr, 0, 36); */
1448 else if (!MEM_SIZE_KNOWN_P (mem)
1449 || MEM_SIZE (mem) <= 0
1450 || MEM_SIZE (mem) > MAX_OFFSET
1451 || GET_CODE (body) != SET
1452 || !CONST_INT_P (SET_SRC (body)))
1453 {
1454 if (!store_is_unused)
1455 {
1456 /* If the set or clobber is unused, then it does not effect our
1457 ability to get rid of the entire insn. */
1458 insn_info->cannot_delete = true;
1459 clear_rhs_from_active_local_stores ();
1460 }
1461 return 0;
1462 }
1463 }
1464
1465 /* We can still process a volatile mem, we just cannot delete it. */
1466 if (MEM_VOLATILE_P (mem))
1467 insn_info->cannot_delete = true;
1468
1469 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
1470 {
1471 clear_rhs_from_active_local_stores ();
1472 return 0;
1473 }
1474
1475 if (GET_MODE (mem) == BLKmode)
1476 width = MEM_SIZE (mem);
1477 else
1478 width = GET_MODE_SIZE (GET_MODE (mem));
1479
1480 if (spill_alias_set)
1481 {
1482 bitmap store1 = clear_alias_group->store1_p;
1483 bitmap store2 = clear_alias_group->store2_p;
1484
1485 gcc_assert (GET_MODE (mem) != BLKmode);
1486
1487 if (!bitmap_set_bit (store1, spill_alias_set))
1488 bitmap_set_bit (store2, spill_alias_set);
1489
1490 if (clear_alias_group->offset_map_size_p < spill_alias_set)
1491 clear_alias_group->offset_map_size_p = spill_alias_set;
1492
1493 store_info = (store_info_t) pool_alloc (rtx_store_info_pool);
1494
1495 if (dump_file && (dump_flags & TDF_DETAILS))
1496 fprintf (dump_file, " processing spill store %d(%s)\n",
1497 (int) spill_alias_set, GET_MODE_NAME (GET_MODE (mem)));
1498 }
1499 else if (group_id >= 0)
1500 {
1501 /* In the restrictive case where the base is a constant or the
1502 frame pointer we can do global analysis. */
1503
1504 group_info_t group
1505 = rtx_group_vec[group_id];
1506 tree expr = MEM_EXPR (mem);
1507
1508 store_info = (store_info_t) pool_alloc (rtx_store_info_pool);
1509 set_usage_bits (group, offset, width, expr);
1510
1511 if (dump_file && (dump_flags & TDF_DETAILS))
1512 fprintf (dump_file, " processing const base store gid=%d[%d..%d)\n",
1513 group_id, (int)offset, (int)(offset+width));
1514 }
1515 else
1516 {
1517 if (may_be_sp_based_p (XEXP (mem, 0)))
1518 insn_info->stack_pointer_based = true;
1519 insn_info->contains_cselib_groups = true;
1520
1521 store_info = (store_info_t) pool_alloc (cse_store_info_pool);
1522 group_id = -1;
1523
1524 if (dump_file && (dump_flags & TDF_DETAILS))
1525 fprintf (dump_file, " processing cselib store [%d..%d)\n",
1526 (int)offset, (int)(offset+width));
1527 }
1528
1529 const_rhs = rhs = NULL_RTX;
1530 if (GET_CODE (body) == SET
1531 /* No place to keep the value after ra. */
1532 && !reload_completed
1533 && (REG_P (SET_SRC (body))
1534 || GET_CODE (SET_SRC (body)) == SUBREG
1535 || CONSTANT_P (SET_SRC (body)))
1536 && !MEM_VOLATILE_P (mem)
1537 /* Sometimes the store and reload is used for truncation and
1538 rounding. */
1539 && !(FLOAT_MODE_P (GET_MODE (mem)) && (flag_float_store)))
1540 {
1541 rhs = SET_SRC (body);
1542 if (CONSTANT_P (rhs))
1543 const_rhs = rhs;
1544 else if (body == PATTERN (insn_info->insn))
1545 {
1546 rtx tem = find_reg_note (insn_info->insn, REG_EQUAL, NULL_RTX);
1547 if (tem && CONSTANT_P (XEXP (tem, 0)))
1548 const_rhs = XEXP (tem, 0);
1549 }
1550 if (const_rhs == NULL_RTX && REG_P (rhs))
1551 {
1552 rtx tem = cselib_expand_value_rtx (rhs, scratch, 5);
1553
1554 if (tem && CONSTANT_P (tem))
1555 const_rhs = tem;
1556 }
1557 }
1558
1559 /* Check to see if this stores causes some other stores to be
1560 dead. */
1561 ptr = active_local_stores;
1562 last = NULL;
1563 redundant_reason = NULL;
1564 mem = canon_rtx (mem);
1565 /* For alias_set != 0 canon_true_dependence should be never called. */
1566 if (spill_alias_set)
1567 mem_addr = NULL_RTX;
1568 else
1569 {
1570 if (group_id < 0)
1571 mem_addr = base->val_rtx;
1572 else
1573 {
1574 group_info_t group
1575 = rtx_group_vec[group_id];
1576 mem_addr = group->canon_base_addr;
1577 }
1578 if (offset)
1579 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
1580 }
1581
1582 while (ptr)
1583 {
1584 insn_info_t next = ptr->next_local_store;
1585 store_info_t s_info = ptr->store_rec;
1586 bool del = true;
1587
1588 /* Skip the clobbers. We delete the active insn if this insn
1589 shadows the set. To have been put on the active list, it
1590 has exactly on set. */
1591 while (!s_info->is_set)
1592 s_info = s_info->next;
1593
1594 if (s_info->alias_set != spill_alias_set)
1595 del = false;
1596 else if (s_info->alias_set)
1597 {
1598 struct clear_alias_mode_holder *entry
1599 = clear_alias_set_lookup (s_info->alias_set);
1600 /* Generally, spills cannot be processed if and of the
1601 references to the slot have a different mode. But if
1602 we are in the same block and mode is exactly the same
1603 between this store and one before in the same block,
1604 we can still delete it. */
1605 if ((GET_MODE (mem) == GET_MODE (s_info->mem))
1606 && (GET_MODE (mem) == entry->mode))
1607 {
1608 del = true;
1609 set_all_positions_unneeded (s_info);
1610 }
1611 if (dump_file && (dump_flags & TDF_DETAILS))
1612 fprintf (dump_file, " trying spill store in insn=%d alias_set=%d\n",
1613 INSN_UID (ptr->insn), (int) s_info->alias_set);
1614 }
1615 else if ((s_info->group_id == group_id)
1616 && (s_info->cse_base == base))
1617 {
1618 HOST_WIDE_INT i;
1619 if (dump_file && (dump_flags & TDF_DETAILS))
1620 fprintf (dump_file, " trying store in insn=%d gid=%d[%d..%d)\n",
1621 INSN_UID (ptr->insn), s_info->group_id,
1622 (int)s_info->begin, (int)s_info->end);
1623
1624 /* Even if PTR won't be eliminated as unneeded, if both
1625 PTR and this insn store the same constant value, we might
1626 eliminate this insn instead. */
1627 if (s_info->const_rhs
1628 && const_rhs
1629 && offset >= s_info->begin
1630 && offset + width <= s_info->end
1631 && all_positions_needed_p (s_info, offset - s_info->begin,
1632 width))
1633 {
1634 if (GET_MODE (mem) == BLKmode)
1635 {
1636 if (GET_MODE (s_info->mem) == BLKmode
1637 && s_info->const_rhs == const_rhs)
1638 redundant_reason = ptr;
1639 }
1640 else if (s_info->const_rhs == const0_rtx
1641 && const_rhs == const0_rtx)
1642 redundant_reason = ptr;
1643 else
1644 {
1645 rtx val;
1646 start_sequence ();
1647 val = get_stored_val (s_info, GET_MODE (mem),
1648 offset, offset + width,
1649 BLOCK_FOR_INSN (insn_info->insn),
1650 true);
1651 if (get_insns () != NULL)
1652 val = NULL_RTX;
1653 end_sequence ();
1654 if (val && rtx_equal_p (val, const_rhs))
1655 redundant_reason = ptr;
1656 }
1657 }
1658
1659 for (i = MAX (offset, s_info->begin);
1660 i < offset + width && i < s_info->end;
1661 i++)
1662 set_position_unneeded (s_info, i - s_info->begin);
1663 }
1664 else if (s_info->rhs)
1665 /* Need to see if it is possible for this store to overwrite
1666 the value of store_info. If it is, set the rhs to NULL to
1667 keep it from being used to remove a load. */
1668 {
1669 if (canon_true_dependence (s_info->mem,
1670 GET_MODE (s_info->mem),
1671 s_info->mem_addr,
1672 mem, mem_addr))
1673 {
1674 s_info->rhs = NULL;
1675 s_info->const_rhs = NULL;
1676 }
1677 }
1678
1679 /* An insn can be deleted if every position of every one of
1680 its s_infos is zero. */
1681 if (any_positions_needed_p (s_info))
1682 del = false;
1683
1684 if (del)
1685 {
1686 insn_info_t insn_to_delete = ptr;
1687
1688 active_local_stores_len--;
1689 if (last)
1690 last->next_local_store = ptr->next_local_store;
1691 else
1692 active_local_stores = ptr->next_local_store;
1693
1694 if (!insn_to_delete->cannot_delete)
1695 delete_dead_store_insn (insn_to_delete);
1696 }
1697 else
1698 last = ptr;
1699
1700 ptr = next;
1701 }
1702
1703 /* Finish filling in the store_info. */
1704 store_info->next = insn_info->store_rec;
1705 insn_info->store_rec = store_info;
1706 store_info->mem = mem;
1707 store_info->alias_set = spill_alias_set;
1708 store_info->mem_addr = mem_addr;
1709 store_info->cse_base = base;
1710 if (width > HOST_BITS_PER_WIDE_INT)
1711 {
1712 store_info->is_large = true;
1713 store_info->positions_needed.large.count = 0;
1714 store_info->positions_needed.large.bmap = BITMAP_ALLOC (&dse_bitmap_obstack);
1715 }
1716 else
1717 {
1718 store_info->is_large = false;
1719 store_info->positions_needed.small_bitmask = lowpart_bitmask (width);
1720 }
1721 store_info->group_id = group_id;
1722 store_info->begin = offset;
1723 store_info->end = offset + width;
1724 store_info->is_set = GET_CODE (body) == SET;
1725 store_info->rhs = rhs;
1726 store_info->const_rhs = const_rhs;
1727 store_info->redundant_reason = redundant_reason;
1728
1729 /* If this is a clobber, we return 0. We will only be able to
1730 delete this insn if there is only one store USED store, but we
1731 can use the clobber to delete other stores earlier. */
1732 return store_info->is_set ? 1 : 0;
1733 }
1734
1735
1736 static void
1737 dump_insn_info (const char * start, insn_info_t insn_info)
1738 {
1739 fprintf (dump_file, "%s insn=%d %s\n", start,
1740 INSN_UID (insn_info->insn),
1741 insn_info->store_rec ? "has store" : "naked");
1742 }
1743
1744
1745 /* If the modes are different and the value's source and target do not
1746 line up, we need to extract the value from lower part of the rhs of
1747 the store, shift it, and then put it into a form that can be shoved
1748 into the read_insn. This function generates a right SHIFT of a
1749 value that is at least ACCESS_SIZE bytes wide of READ_MODE. The
1750 shift sequence is returned or NULL if we failed to find a
1751 shift. */
1752
1753 static rtx
1754 find_shift_sequence (int access_size,
1755 store_info_t store_info,
1756 machine_mode read_mode,
1757 int shift, bool speed, bool require_cst)
1758 {
1759 machine_mode store_mode = GET_MODE (store_info->mem);
1760 machine_mode new_mode;
1761 rtx read_reg = NULL;
1762
1763 /* Some machines like the x86 have shift insns for each size of
1764 operand. Other machines like the ppc or the ia-64 may only have
1765 shift insns that shift values within 32 or 64 bit registers.
1766 This loop tries to find the smallest shift insn that will right
1767 justify the value we want to read but is available in one insn on
1768 the machine. */
1769
1770 for (new_mode = smallest_mode_for_size (access_size * BITS_PER_UNIT,
1771 MODE_INT);
1772 GET_MODE_BITSIZE (new_mode) <= BITS_PER_WORD;
1773 new_mode = GET_MODE_WIDER_MODE (new_mode))
1774 {
1775 rtx target, new_reg, new_lhs;
1776 rtx_insn *shift_seq, *insn;
1777 int cost;
1778
1779 /* If a constant was stored into memory, try to simplify it here,
1780 otherwise the cost of the shift might preclude this optimization
1781 e.g. at -Os, even when no actual shift will be needed. */
1782 if (store_info->const_rhs)
1783 {
1784 unsigned int byte = subreg_lowpart_offset (new_mode, store_mode);
1785 rtx ret = simplify_subreg (new_mode, store_info->const_rhs,
1786 store_mode, byte);
1787 if (ret && CONSTANT_P (ret))
1788 {
1789 ret = simplify_const_binary_operation (LSHIFTRT, new_mode,
1790 ret, GEN_INT (shift));
1791 if (ret && CONSTANT_P (ret))
1792 {
1793 byte = subreg_lowpart_offset (read_mode, new_mode);
1794 ret = simplify_subreg (read_mode, ret, new_mode, byte);
1795 if (ret && CONSTANT_P (ret)
1796 && set_src_cost (ret, speed) <= COSTS_N_INSNS (1))
1797 return ret;
1798 }
1799 }
1800 }
1801
1802 if (require_cst)
1803 return NULL_RTX;
1804
1805 /* Try a wider mode if truncating the store mode to NEW_MODE
1806 requires a real instruction. */
1807 if (GET_MODE_BITSIZE (new_mode) < GET_MODE_BITSIZE (store_mode)
1808 && !TRULY_NOOP_TRUNCATION_MODES_P (new_mode, store_mode))
1809 continue;
1810
1811 /* Also try a wider mode if the necessary punning is either not
1812 desirable or not possible. */
1813 if (!CONSTANT_P (store_info->rhs)
1814 && !MODES_TIEABLE_P (new_mode, store_mode))
1815 continue;
1816
1817 new_reg = gen_reg_rtx (new_mode);
1818
1819 start_sequence ();
1820
1821 /* In theory we could also check for an ashr. Ian Taylor knows
1822 of one dsp where the cost of these two was not the same. But
1823 this really is a rare case anyway. */
1824 target = expand_binop (new_mode, lshr_optab, new_reg,
1825 GEN_INT (shift), new_reg, 1, OPTAB_DIRECT);
1826
1827 shift_seq = get_insns ();
1828 end_sequence ();
1829
1830 if (target != new_reg || shift_seq == NULL)
1831 continue;
1832
1833 cost = 0;
1834 for (insn = shift_seq; insn != NULL_RTX; insn = NEXT_INSN (insn))
1835 if (INSN_P (insn))
1836 cost += insn_rtx_cost (PATTERN (insn), speed);
1837
1838 /* The computation up to here is essentially independent
1839 of the arguments and could be precomputed. It may
1840 not be worth doing so. We could precompute if
1841 worthwhile or at least cache the results. The result
1842 technically depends on both SHIFT and ACCESS_SIZE,
1843 but in practice the answer will depend only on ACCESS_SIZE. */
1844
1845 if (cost > COSTS_N_INSNS (1))
1846 continue;
1847
1848 new_lhs = extract_low_bits (new_mode, store_mode,
1849 copy_rtx (store_info->rhs));
1850 if (new_lhs == NULL_RTX)
1851 continue;
1852
1853 /* We found an acceptable shift. Generate a move to
1854 take the value from the store and put it into the
1855 shift pseudo, then shift it, then generate another
1856 move to put in into the target of the read. */
1857 emit_move_insn (new_reg, new_lhs);
1858 emit_insn (shift_seq);
1859 read_reg = extract_low_bits (read_mode, new_mode, new_reg);
1860 break;
1861 }
1862
1863 return read_reg;
1864 }
1865
1866
1867 /* Call back for note_stores to find the hard regs set or clobbered by
1868 insn. Data is a bitmap of the hardregs set so far. */
1869
1870 static void
1871 look_for_hardregs (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1872 {
1873 bitmap regs_set = (bitmap) data;
1874
1875 if (REG_P (x)
1876 && HARD_REGISTER_P (x))
1877 {
1878 unsigned int regno = REGNO (x);
1879 bitmap_set_range (regs_set, regno,
1880 hard_regno_nregs[regno][GET_MODE (x)]);
1881 }
1882 }
1883
1884 /* Helper function for replace_read and record_store.
1885 Attempt to return a value stored in STORE_INFO, from READ_BEGIN
1886 to one before READ_END bytes read in READ_MODE. Return NULL
1887 if not successful. If REQUIRE_CST is true, return always constant. */
1888
1889 static rtx
1890 get_stored_val (store_info_t store_info, machine_mode read_mode,
1891 HOST_WIDE_INT read_begin, HOST_WIDE_INT read_end,
1892 basic_block bb, bool require_cst)
1893 {
1894 machine_mode store_mode = GET_MODE (store_info->mem);
1895 int shift;
1896 int access_size; /* In bytes. */
1897 rtx read_reg;
1898
1899 /* To get here the read is within the boundaries of the write so
1900 shift will never be negative. Start out with the shift being in
1901 bytes. */
1902 if (store_mode == BLKmode)
1903 shift = 0;
1904 else if (BYTES_BIG_ENDIAN)
1905 shift = store_info->end - read_end;
1906 else
1907 shift = read_begin - store_info->begin;
1908
1909 access_size = shift + GET_MODE_SIZE (read_mode);
1910
1911 /* From now on it is bits. */
1912 shift *= BITS_PER_UNIT;
1913
1914 if (shift)
1915 read_reg = find_shift_sequence (access_size, store_info, read_mode, shift,
1916 optimize_bb_for_speed_p (bb),
1917 require_cst);
1918 else if (store_mode == BLKmode)
1919 {
1920 /* The store is a memset (addr, const_val, const_size). */
1921 gcc_assert (CONST_INT_P (store_info->rhs));
1922 store_mode = int_mode_for_mode (read_mode);
1923 if (store_mode == BLKmode)
1924 read_reg = NULL_RTX;
1925 else if (store_info->rhs == const0_rtx)
1926 read_reg = extract_low_bits (read_mode, store_mode, const0_rtx);
1927 else if (GET_MODE_BITSIZE (store_mode) > HOST_BITS_PER_WIDE_INT
1928 || BITS_PER_UNIT >= HOST_BITS_PER_WIDE_INT)
1929 read_reg = NULL_RTX;
1930 else
1931 {
1932 unsigned HOST_WIDE_INT c
1933 = INTVAL (store_info->rhs)
1934 & (((HOST_WIDE_INT) 1 << BITS_PER_UNIT) - 1);
1935 int shift = BITS_PER_UNIT;
1936 while (shift < HOST_BITS_PER_WIDE_INT)
1937 {
1938 c |= (c << shift);
1939 shift <<= 1;
1940 }
1941 read_reg = gen_int_mode (c, store_mode);
1942 read_reg = extract_low_bits (read_mode, store_mode, read_reg);
1943 }
1944 }
1945 else if (store_info->const_rhs
1946 && (require_cst
1947 || GET_MODE_CLASS (read_mode) != GET_MODE_CLASS (store_mode)))
1948 read_reg = extract_low_bits (read_mode, store_mode,
1949 copy_rtx (store_info->const_rhs));
1950 else
1951 read_reg = extract_low_bits (read_mode, store_mode,
1952 copy_rtx (store_info->rhs));
1953 if (require_cst && read_reg && !CONSTANT_P (read_reg))
1954 read_reg = NULL_RTX;
1955 return read_reg;
1956 }
1957
1958 /* Take a sequence of:
1959 A <- r1
1960 ...
1961 ... <- A
1962
1963 and change it into
1964 r2 <- r1
1965 A <- r1
1966 ...
1967 ... <- r2
1968
1969 or
1970
1971 r3 <- extract (r1)
1972 r3 <- r3 >> shift
1973 r2 <- extract (r3)
1974 ... <- r2
1975
1976 or
1977
1978 r2 <- extract (r1)
1979 ... <- r2
1980
1981 Depending on the alignment and the mode of the store and
1982 subsequent load.
1983
1984
1985 The STORE_INFO and STORE_INSN are for the store and READ_INFO
1986 and READ_INSN are for the read. Return true if the replacement
1987 went ok. */
1988
1989 static bool
1990 replace_read (store_info_t store_info, insn_info_t store_insn,
1991 read_info_t read_info, insn_info_t read_insn, rtx *loc,
1992 bitmap regs_live)
1993 {
1994 machine_mode store_mode = GET_MODE (store_info->mem);
1995 machine_mode read_mode = GET_MODE (read_info->mem);
1996 rtx_insn *insns, *this_insn;
1997 rtx read_reg;
1998 basic_block bb;
1999
2000 if (!dbg_cnt (dse))
2001 return false;
2002
2003 /* Create a sequence of instructions to set up the read register.
2004 This sequence goes immediately before the store and its result
2005 is read by the load.
2006
2007 We need to keep this in perspective. We are replacing a read
2008 with a sequence of insns, but the read will almost certainly be
2009 in cache, so it is not going to be an expensive one. Thus, we
2010 are not willing to do a multi insn shift or worse a subroutine
2011 call to get rid of the read. */
2012 if (dump_file && (dump_flags & TDF_DETAILS))
2013 fprintf (dump_file, "trying to replace %smode load in insn %d"
2014 " from %smode store in insn %d\n",
2015 GET_MODE_NAME (read_mode), INSN_UID (read_insn->insn),
2016 GET_MODE_NAME (store_mode), INSN_UID (store_insn->insn));
2017 start_sequence ();
2018 bb = BLOCK_FOR_INSN (read_insn->insn);
2019 read_reg = get_stored_val (store_info,
2020 read_mode, read_info->begin, read_info->end,
2021 bb, false);
2022 if (read_reg == NULL_RTX)
2023 {
2024 end_sequence ();
2025 if (dump_file && (dump_flags & TDF_DETAILS))
2026 fprintf (dump_file, " -- could not extract bits of stored value\n");
2027 return false;
2028 }
2029 /* Force the value into a new register so that it won't be clobbered
2030 between the store and the load. */
2031 read_reg = copy_to_mode_reg (read_mode, read_reg);
2032 insns = get_insns ();
2033 end_sequence ();
2034
2035 if (insns != NULL_RTX)
2036 {
2037 /* Now we have to scan the set of new instructions to see if the
2038 sequence contains and sets of hardregs that happened to be
2039 live at this point. For instance, this can happen if one of
2040 the insns sets the CC and the CC happened to be live at that
2041 point. This does occasionally happen, see PR 37922. */
2042 bitmap regs_set = BITMAP_ALLOC (&reg_obstack);
2043
2044 for (this_insn = insns; this_insn != NULL_RTX; this_insn = NEXT_INSN (this_insn))
2045 note_stores (PATTERN (this_insn), look_for_hardregs, regs_set);
2046
2047 bitmap_and_into (regs_set, regs_live);
2048 if (!bitmap_empty_p (regs_set))
2049 {
2050 if (dump_file && (dump_flags & TDF_DETAILS))
2051 {
2052 fprintf (dump_file,
2053 "abandoning replacement because sequence clobbers live hardregs:");
2054 df_print_regset (dump_file, regs_set);
2055 }
2056
2057 BITMAP_FREE (regs_set);
2058 return false;
2059 }
2060 BITMAP_FREE (regs_set);
2061 }
2062
2063 if (validate_change (read_insn->insn, loc, read_reg, 0))
2064 {
2065 deferred_change_t deferred_change =
2066 (deferred_change_t) pool_alloc (deferred_change_pool);
2067
2068 /* Insert this right before the store insn where it will be safe
2069 from later insns that might change it before the read. */
2070 emit_insn_before (insns, store_insn->insn);
2071
2072 /* And now for the kludge part: cselib croaks if you just
2073 return at this point. There are two reasons for this:
2074
2075 1) Cselib has an idea of how many pseudos there are and
2076 that does not include the new ones we just added.
2077
2078 2) Cselib does not know about the move insn we added
2079 above the store_info, and there is no way to tell it
2080 about it, because it has "moved on".
2081
2082 Problem (1) is fixable with a certain amount of engineering.
2083 Problem (2) is requires starting the bb from scratch. This
2084 could be expensive.
2085
2086 So we are just going to have to lie. The move/extraction
2087 insns are not really an issue, cselib did not see them. But
2088 the use of the new pseudo read_insn is a real problem because
2089 cselib has not scanned this insn. The way that we solve this
2090 problem is that we are just going to put the mem back for now
2091 and when we are finished with the block, we undo this. We
2092 keep a table of mems to get rid of. At the end of the basic
2093 block we can put them back. */
2094
2095 *loc = read_info->mem;
2096 deferred_change->next = deferred_change_list;
2097 deferred_change_list = deferred_change;
2098 deferred_change->loc = loc;
2099 deferred_change->reg = read_reg;
2100
2101 /* Get rid of the read_info, from the point of view of the
2102 rest of dse, play like this read never happened. */
2103 read_insn->read_rec = read_info->next;
2104 pool_free (read_info_pool, read_info);
2105 if (dump_file && (dump_flags & TDF_DETAILS))
2106 {
2107 fprintf (dump_file, " -- replaced the loaded MEM with ");
2108 print_simple_rtl (dump_file, read_reg);
2109 fprintf (dump_file, "\n");
2110 }
2111 return true;
2112 }
2113 else
2114 {
2115 if (dump_file && (dump_flags & TDF_DETAILS))
2116 {
2117 fprintf (dump_file, " -- replacing the loaded MEM with ");
2118 print_simple_rtl (dump_file, read_reg);
2119 fprintf (dump_file, " led to an invalid instruction\n");
2120 }
2121 return false;
2122 }
2123 }
2124
2125 /* Check the address of MEM *LOC and kill any appropriate stores that may
2126 be active. */
2127
2128 static void
2129 check_mem_read_rtx (rtx *loc, bb_info_t bb_info)
2130 {
2131 rtx mem = *loc, mem_addr;
2132 insn_info_t insn_info;
2133 HOST_WIDE_INT offset = 0;
2134 HOST_WIDE_INT width = 0;
2135 alias_set_type spill_alias_set = 0;
2136 cselib_val *base = NULL;
2137 int group_id;
2138 read_info_t read_info;
2139
2140 insn_info = bb_info->last_insn;
2141
2142 if ((MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2143 || (MEM_VOLATILE_P (mem)))
2144 {
2145 if (dump_file && (dump_flags & TDF_DETAILS))
2146 fprintf (dump_file, " adding wild read, volatile or barrier.\n");
2147 add_wild_read (bb_info);
2148 insn_info->cannot_delete = true;
2149 return;
2150 }
2151
2152 /* If it is reading readonly mem, then there can be no conflict with
2153 another write. */
2154 if (MEM_READONLY_P (mem))
2155 return;
2156
2157 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
2158 {
2159 if (dump_file && (dump_flags & TDF_DETAILS))
2160 fprintf (dump_file, " adding wild read, canon_address failure.\n");
2161 add_wild_read (bb_info);
2162 return;
2163 }
2164
2165 if (GET_MODE (mem) == BLKmode)
2166 width = -1;
2167 else
2168 width = GET_MODE_SIZE (GET_MODE (mem));
2169
2170 read_info = (read_info_t) pool_alloc (read_info_pool);
2171 read_info->group_id = group_id;
2172 read_info->mem = mem;
2173 read_info->alias_set = spill_alias_set;
2174 read_info->begin = offset;
2175 read_info->end = offset + width;
2176 read_info->next = insn_info->read_rec;
2177 insn_info->read_rec = read_info;
2178 /* For alias_set != 0 canon_true_dependence should be never called. */
2179 if (spill_alias_set)
2180 mem_addr = NULL_RTX;
2181 else
2182 {
2183 if (group_id < 0)
2184 mem_addr = base->val_rtx;
2185 else
2186 {
2187 group_info_t group
2188 = rtx_group_vec[group_id];
2189 mem_addr = group->canon_base_addr;
2190 }
2191 if (offset)
2192 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
2193 }
2194
2195 /* We ignore the clobbers in store_info. The is mildly aggressive,
2196 but there really should not be a clobber followed by a read. */
2197
2198 if (spill_alias_set)
2199 {
2200 insn_info_t i_ptr = active_local_stores;
2201 insn_info_t last = NULL;
2202
2203 if (dump_file && (dump_flags & TDF_DETAILS))
2204 fprintf (dump_file, " processing spill load %d\n",
2205 (int) spill_alias_set);
2206
2207 while (i_ptr)
2208 {
2209 store_info_t store_info = i_ptr->store_rec;
2210
2211 /* Skip the clobbers. */
2212 while (!store_info->is_set)
2213 store_info = store_info->next;
2214
2215 if (store_info->alias_set == spill_alias_set)
2216 {
2217 if (dump_file && (dump_flags & TDF_DETAILS))
2218 dump_insn_info ("removing from active", i_ptr);
2219
2220 active_local_stores_len--;
2221 if (last)
2222 last->next_local_store = i_ptr->next_local_store;
2223 else
2224 active_local_stores = i_ptr->next_local_store;
2225 }
2226 else
2227 last = i_ptr;
2228 i_ptr = i_ptr->next_local_store;
2229 }
2230 }
2231 else if (group_id >= 0)
2232 {
2233 /* This is the restricted case where the base is a constant or
2234 the frame pointer and offset is a constant. */
2235 insn_info_t i_ptr = active_local_stores;
2236 insn_info_t last = NULL;
2237
2238 if (dump_file && (dump_flags & TDF_DETAILS))
2239 {
2240 if (width == -1)
2241 fprintf (dump_file, " processing const load gid=%d[BLK]\n",
2242 group_id);
2243 else
2244 fprintf (dump_file, " processing const load gid=%d[%d..%d)\n",
2245 group_id, (int)offset, (int)(offset+width));
2246 }
2247
2248 while (i_ptr)
2249 {
2250 bool remove = false;
2251 store_info_t store_info = i_ptr->store_rec;
2252
2253 /* Skip the clobbers. */
2254 while (!store_info->is_set)
2255 store_info = store_info->next;
2256
2257 /* There are three cases here. */
2258 if (store_info->group_id < 0)
2259 /* We have a cselib store followed by a read from a
2260 const base. */
2261 remove
2262 = canon_true_dependence (store_info->mem,
2263 GET_MODE (store_info->mem),
2264 store_info->mem_addr,
2265 mem, mem_addr);
2266
2267 else if (group_id == store_info->group_id)
2268 {
2269 /* This is a block mode load. We may get lucky and
2270 canon_true_dependence may save the day. */
2271 if (width == -1)
2272 remove
2273 = canon_true_dependence (store_info->mem,
2274 GET_MODE (store_info->mem),
2275 store_info->mem_addr,
2276 mem, mem_addr);
2277
2278 /* If this read is just reading back something that we just
2279 stored, rewrite the read. */
2280 else
2281 {
2282 if (store_info->rhs
2283 && offset >= store_info->begin
2284 && offset + width <= store_info->end
2285 && all_positions_needed_p (store_info,
2286 offset - store_info->begin,
2287 width)
2288 && replace_read (store_info, i_ptr, read_info,
2289 insn_info, loc, bb_info->regs_live))
2290 return;
2291
2292 /* The bases are the same, just see if the offsets
2293 overlap. */
2294 if ((offset < store_info->end)
2295 && (offset + width > store_info->begin))
2296 remove = true;
2297 }
2298 }
2299
2300 /* else
2301 The else case that is missing here is that the
2302 bases are constant but different. There is nothing
2303 to do here because there is no overlap. */
2304
2305 if (remove)
2306 {
2307 if (dump_file && (dump_flags & TDF_DETAILS))
2308 dump_insn_info ("removing from active", i_ptr);
2309
2310 active_local_stores_len--;
2311 if (last)
2312 last->next_local_store = i_ptr->next_local_store;
2313 else
2314 active_local_stores = i_ptr->next_local_store;
2315 }
2316 else
2317 last = i_ptr;
2318 i_ptr = i_ptr->next_local_store;
2319 }
2320 }
2321 else
2322 {
2323 insn_info_t i_ptr = active_local_stores;
2324 insn_info_t last = NULL;
2325 if (dump_file && (dump_flags & TDF_DETAILS))
2326 {
2327 fprintf (dump_file, " processing cselib load mem:");
2328 print_inline_rtx (dump_file, mem, 0);
2329 fprintf (dump_file, "\n");
2330 }
2331
2332 while (i_ptr)
2333 {
2334 bool remove = false;
2335 store_info_t store_info = i_ptr->store_rec;
2336
2337 if (dump_file && (dump_flags & TDF_DETAILS))
2338 fprintf (dump_file, " processing cselib load against insn %d\n",
2339 INSN_UID (i_ptr->insn));
2340
2341 /* Skip the clobbers. */
2342 while (!store_info->is_set)
2343 store_info = store_info->next;
2344
2345 /* If this read is just reading back something that we just
2346 stored, rewrite the read. */
2347 if (store_info->rhs
2348 && store_info->group_id == -1
2349 && store_info->cse_base == base
2350 && width != -1
2351 && offset >= store_info->begin
2352 && offset + width <= store_info->end
2353 && all_positions_needed_p (store_info,
2354 offset - store_info->begin, width)
2355 && replace_read (store_info, i_ptr, read_info, insn_info, loc,
2356 bb_info->regs_live))
2357 return;
2358
2359 if (!store_info->alias_set)
2360 remove = canon_true_dependence (store_info->mem,
2361 GET_MODE (store_info->mem),
2362 store_info->mem_addr,
2363 mem, mem_addr);
2364
2365 if (remove)
2366 {
2367 if (dump_file && (dump_flags & TDF_DETAILS))
2368 dump_insn_info ("removing from active", i_ptr);
2369
2370 active_local_stores_len--;
2371 if (last)
2372 last->next_local_store = i_ptr->next_local_store;
2373 else
2374 active_local_stores = i_ptr->next_local_store;
2375 }
2376 else
2377 last = i_ptr;
2378 i_ptr = i_ptr->next_local_store;
2379 }
2380 }
2381 }
2382
2383 /* A note_uses callback in which DATA points the INSN_INFO for
2384 as check_mem_read_rtx. Nullify the pointer if i_m_r_m_r returns
2385 true for any part of *LOC. */
2386
2387 static void
2388 check_mem_read_use (rtx *loc, void *data)
2389 {
2390 subrtx_ptr_iterator::array_type array;
2391 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
2392 {
2393 rtx *loc = *iter;
2394 if (MEM_P (*loc))
2395 check_mem_read_rtx (loc, (bb_info_t) data);
2396 }
2397 }
2398
2399
2400 /* Get arguments passed to CALL_INSN. Return TRUE if successful.
2401 So far it only handles arguments passed in registers. */
2402
2403 static bool
2404 get_call_args (rtx call_insn, tree fn, rtx *args, int nargs)
2405 {
2406 CUMULATIVE_ARGS args_so_far_v;
2407 cumulative_args_t args_so_far;
2408 tree arg;
2409 int idx;
2410
2411 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
2412 args_so_far = pack_cumulative_args (&args_so_far_v);
2413
2414 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
2415 for (idx = 0;
2416 arg != void_list_node && idx < nargs;
2417 arg = TREE_CHAIN (arg), idx++)
2418 {
2419 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
2420 rtx reg, link, tmp;
2421 reg = targetm.calls.function_arg (args_so_far, mode, NULL_TREE, true);
2422 if (!reg || !REG_P (reg) || GET_MODE (reg) != mode
2423 || GET_MODE_CLASS (mode) != MODE_INT)
2424 return false;
2425
2426 for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
2427 link;
2428 link = XEXP (link, 1))
2429 if (GET_CODE (XEXP (link, 0)) == USE)
2430 {
2431 args[idx] = XEXP (XEXP (link, 0), 0);
2432 if (REG_P (args[idx])
2433 && REGNO (args[idx]) == REGNO (reg)
2434 && (GET_MODE (args[idx]) == mode
2435 || (GET_MODE_CLASS (GET_MODE (args[idx])) == MODE_INT
2436 && (GET_MODE_SIZE (GET_MODE (args[idx]))
2437 <= UNITS_PER_WORD)
2438 && (GET_MODE_SIZE (GET_MODE (args[idx]))
2439 > GET_MODE_SIZE (mode)))))
2440 break;
2441 }
2442 if (!link)
2443 return false;
2444
2445 tmp = cselib_expand_value_rtx (args[idx], scratch, 5);
2446 if (GET_MODE (args[idx]) != mode)
2447 {
2448 if (!tmp || !CONST_INT_P (tmp))
2449 return false;
2450 tmp = gen_int_mode (INTVAL (tmp), mode);
2451 }
2452 if (tmp)
2453 args[idx] = tmp;
2454
2455 targetm.calls.function_arg_advance (args_so_far, mode, NULL_TREE, true);
2456 }
2457 if (arg != void_list_node || idx != nargs)
2458 return false;
2459 return true;
2460 }
2461
2462 /* Return a bitmap of the fixed registers contained in IN. */
2463
2464 static bitmap
2465 copy_fixed_regs (const_bitmap in)
2466 {
2467 bitmap ret;
2468
2469 ret = ALLOC_REG_SET (NULL);
2470 bitmap_and (ret, in, fixed_reg_set_regset);
2471 return ret;
2472 }
2473
2474 /* Apply record_store to all candidate stores in INSN. Mark INSN
2475 if some part of it is not a candidate store and assigns to a
2476 non-register target. */
2477
2478 static void
2479 scan_insn (bb_info_t bb_info, rtx_insn *insn)
2480 {
2481 rtx body;
2482 insn_info_t insn_info = (insn_info_t) pool_alloc (insn_info_pool);
2483 int mems_found = 0;
2484 memset (insn_info, 0, sizeof (struct insn_info));
2485
2486 if (dump_file && (dump_flags & TDF_DETAILS))
2487 fprintf (dump_file, "\n**scanning insn=%d\n",
2488 INSN_UID (insn));
2489
2490 insn_info->prev_insn = bb_info->last_insn;
2491 insn_info->insn = insn;
2492 bb_info->last_insn = insn_info;
2493
2494 if (DEBUG_INSN_P (insn))
2495 {
2496 insn_info->cannot_delete = true;
2497 return;
2498 }
2499
2500 /* Look at all of the uses in the insn. */
2501 note_uses (&PATTERN (insn), check_mem_read_use, bb_info);
2502
2503 if (CALL_P (insn))
2504 {
2505 bool const_call;
2506 tree memset_call = NULL_TREE;
2507
2508 insn_info->cannot_delete = true;
2509
2510 /* Const functions cannot do anything bad i.e. read memory,
2511 however, they can read their parameters which may have
2512 been pushed onto the stack.
2513 memset and bzero don't read memory either. */
2514 const_call = RTL_CONST_CALL_P (insn);
2515 if (!const_call)
2516 {
2517 rtx call = get_call_rtx_from (insn);
2518 if (call && GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
2519 {
2520 rtx symbol = XEXP (XEXP (call, 0), 0);
2521 if (SYMBOL_REF_DECL (symbol)
2522 && TREE_CODE (SYMBOL_REF_DECL (symbol)) == FUNCTION_DECL)
2523 {
2524 if ((DECL_BUILT_IN_CLASS (SYMBOL_REF_DECL (symbol))
2525 == BUILT_IN_NORMAL
2526 && (DECL_FUNCTION_CODE (SYMBOL_REF_DECL (symbol))
2527 == BUILT_IN_MEMSET))
2528 || SYMBOL_REF_DECL (symbol) == block_clear_fn)
2529 memset_call = SYMBOL_REF_DECL (symbol);
2530 }
2531 }
2532 }
2533 if (const_call || memset_call)
2534 {
2535 insn_info_t i_ptr = active_local_stores;
2536 insn_info_t last = NULL;
2537
2538 if (dump_file && (dump_flags & TDF_DETAILS))
2539 fprintf (dump_file, "%s call %d\n",
2540 const_call ? "const" : "memset", INSN_UID (insn));
2541
2542 /* See the head comment of the frame_read field. */
2543 if (reload_completed
2544 /* Tail calls are storing their arguments using
2545 arg pointer. If it is a frame pointer on the target,
2546 even before reload we need to kill frame pointer based
2547 stores. */
2548 || (SIBLING_CALL_P (insn)
2549 && HARD_FRAME_POINTER_IS_ARG_POINTER))
2550 insn_info->frame_read = true;
2551
2552 /* Loop over the active stores and remove those which are
2553 killed by the const function call. */
2554 while (i_ptr)
2555 {
2556 bool remove_store = false;
2557
2558 /* The stack pointer based stores are always killed. */
2559 if (i_ptr->stack_pointer_based)
2560 remove_store = true;
2561
2562 /* If the frame is read, the frame related stores are killed. */
2563 else if (insn_info->frame_read)
2564 {
2565 store_info_t store_info = i_ptr->store_rec;
2566
2567 /* Skip the clobbers. */
2568 while (!store_info->is_set)
2569 store_info = store_info->next;
2570
2571 if (store_info->group_id >= 0
2572 && rtx_group_vec[store_info->group_id]->frame_related)
2573 remove_store = true;
2574 }
2575
2576 if (remove_store)
2577 {
2578 if (dump_file && (dump_flags & TDF_DETAILS))
2579 dump_insn_info ("removing from active", i_ptr);
2580
2581 active_local_stores_len--;
2582 if (last)
2583 last->next_local_store = i_ptr->next_local_store;
2584 else
2585 active_local_stores = i_ptr->next_local_store;
2586 }
2587 else
2588 last = i_ptr;
2589
2590 i_ptr = i_ptr->next_local_store;
2591 }
2592
2593 if (memset_call)
2594 {
2595 rtx args[3];
2596 if (get_call_args (insn, memset_call, args, 3)
2597 && CONST_INT_P (args[1])
2598 && CONST_INT_P (args[2])
2599 && INTVAL (args[2]) > 0)
2600 {
2601 rtx mem = gen_rtx_MEM (BLKmode, args[0]);
2602 set_mem_size (mem, INTVAL (args[2]));
2603 body = gen_rtx_SET (VOIDmode, mem, args[1]);
2604 mems_found += record_store (body, bb_info);
2605 if (dump_file && (dump_flags & TDF_DETAILS))
2606 fprintf (dump_file, "handling memset as BLKmode store\n");
2607 if (mems_found == 1)
2608 {
2609 if (active_local_stores_len++
2610 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2611 {
2612 active_local_stores_len = 1;
2613 active_local_stores = NULL;
2614 }
2615 insn_info->fixed_regs_live
2616 = copy_fixed_regs (bb_info->regs_live);
2617 insn_info->next_local_store = active_local_stores;
2618 active_local_stores = insn_info;
2619 }
2620 }
2621 }
2622 }
2623 else if (SIBLING_CALL_P (insn) && reload_completed)
2624 /* Arguments for a sibling call that are pushed to memory are passed
2625 using the incoming argument pointer of the current function. After
2626 reload that might be (and likely is) frame pointer based. */
2627 add_wild_read (bb_info);
2628 else
2629 /* Every other call, including pure functions, may read any memory
2630 that is not relative to the frame. */
2631 add_non_frame_wild_read (bb_info);
2632
2633 return;
2634 }
2635
2636 /* Assuming that there are sets in these insns, we cannot delete
2637 them. */
2638 if ((GET_CODE (PATTERN (insn)) == CLOBBER)
2639 || volatile_refs_p (PATTERN (insn))
2640 || (!cfun->can_delete_dead_exceptions && !insn_nothrow_p (insn))
2641 || (RTX_FRAME_RELATED_P (insn))
2642 || find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX))
2643 insn_info->cannot_delete = true;
2644
2645 body = PATTERN (insn);
2646 if (GET_CODE (body) == PARALLEL)
2647 {
2648 int i;
2649 for (i = 0; i < XVECLEN (body, 0); i++)
2650 mems_found += record_store (XVECEXP (body, 0, i), bb_info);
2651 }
2652 else
2653 mems_found += record_store (body, bb_info);
2654
2655 if (dump_file && (dump_flags & TDF_DETAILS))
2656 fprintf (dump_file, "mems_found = %d, cannot_delete = %s\n",
2657 mems_found, insn_info->cannot_delete ? "true" : "false");
2658
2659 /* If we found some sets of mems, add it into the active_local_stores so
2660 that it can be locally deleted if found dead or used for
2661 replace_read and redundant constant store elimination. Otherwise mark
2662 it as cannot delete. This simplifies the processing later. */
2663 if (mems_found == 1)
2664 {
2665 if (active_local_stores_len++
2666 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2667 {
2668 active_local_stores_len = 1;
2669 active_local_stores = NULL;
2670 }
2671 insn_info->fixed_regs_live = copy_fixed_regs (bb_info->regs_live);
2672 insn_info->next_local_store = active_local_stores;
2673 active_local_stores = insn_info;
2674 }
2675 else
2676 insn_info->cannot_delete = true;
2677 }
2678
2679
2680 /* Remove BASE from the set of active_local_stores. This is a
2681 callback from cselib that is used to get rid of the stores in
2682 active_local_stores. */
2683
2684 static void
2685 remove_useless_values (cselib_val *base)
2686 {
2687 insn_info_t insn_info = active_local_stores;
2688 insn_info_t last = NULL;
2689
2690 while (insn_info)
2691 {
2692 store_info_t store_info = insn_info->store_rec;
2693 bool del = false;
2694
2695 /* If ANY of the store_infos match the cselib group that is
2696 being deleted, then the insn can not be deleted. */
2697 while (store_info)
2698 {
2699 if ((store_info->group_id == -1)
2700 && (store_info->cse_base == base))
2701 {
2702 del = true;
2703 break;
2704 }
2705 store_info = store_info->next;
2706 }
2707
2708 if (del)
2709 {
2710 active_local_stores_len--;
2711 if (last)
2712 last->next_local_store = insn_info->next_local_store;
2713 else
2714 active_local_stores = insn_info->next_local_store;
2715 free_store_info (insn_info);
2716 }
2717 else
2718 last = insn_info;
2719
2720 insn_info = insn_info->next_local_store;
2721 }
2722 }
2723
2724
2725 /* Do all of step 1. */
2726
2727 static void
2728 dse_step1 (void)
2729 {
2730 basic_block bb;
2731 bitmap regs_live = BITMAP_ALLOC (&reg_obstack);
2732
2733 cselib_init (0);
2734 all_blocks = BITMAP_ALLOC (NULL);
2735 bitmap_set_bit (all_blocks, ENTRY_BLOCK);
2736 bitmap_set_bit (all_blocks, EXIT_BLOCK);
2737
2738 FOR_ALL_BB_FN (bb, cfun)
2739 {
2740 insn_info_t ptr;
2741 bb_info_t bb_info = (bb_info_t) pool_alloc (bb_info_pool);
2742
2743 memset (bb_info, 0, sizeof (struct dse_bb_info));
2744 bitmap_set_bit (all_blocks, bb->index);
2745 bb_info->regs_live = regs_live;
2746
2747 bitmap_copy (regs_live, DF_LR_IN (bb));
2748 df_simulate_initialize_forwards (bb, regs_live);
2749
2750 bb_table[bb->index] = bb_info;
2751 cselib_discard_hook = remove_useless_values;
2752
2753 if (bb->index >= NUM_FIXED_BLOCKS)
2754 {
2755 rtx_insn *insn;
2756
2757 cse_store_info_pool
2758 = create_alloc_pool ("cse_store_info_pool",
2759 sizeof (struct store_info), 100);
2760 active_local_stores = NULL;
2761 active_local_stores_len = 0;
2762 cselib_clear_table ();
2763
2764 /* Scan the insns. */
2765 FOR_BB_INSNS (bb, insn)
2766 {
2767 if (INSN_P (insn))
2768 scan_insn (bb_info, insn);
2769 cselib_process_insn (insn);
2770 if (INSN_P (insn))
2771 df_simulate_one_insn_forwards (bb, insn, regs_live);
2772 }
2773
2774 /* This is something of a hack, because the global algorithm
2775 is supposed to take care of the case where stores go dead
2776 at the end of the function. However, the global
2777 algorithm must take a more conservative view of block
2778 mode reads than the local alg does. So to get the case
2779 where you have a store to the frame followed by a non
2780 overlapping block more read, we look at the active local
2781 stores at the end of the function and delete all of the
2782 frame and spill based ones. */
2783 if (stores_off_frame_dead_at_return
2784 && (EDGE_COUNT (bb->succs) == 0
2785 || (single_succ_p (bb)
2786 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
2787 && ! crtl->calls_eh_return)))
2788 {
2789 insn_info_t i_ptr = active_local_stores;
2790 while (i_ptr)
2791 {
2792 store_info_t store_info = i_ptr->store_rec;
2793
2794 /* Skip the clobbers. */
2795 while (!store_info->is_set)
2796 store_info = store_info->next;
2797 if (store_info->alias_set && !i_ptr->cannot_delete)
2798 delete_dead_store_insn (i_ptr);
2799 else
2800 if (store_info->group_id >= 0)
2801 {
2802 group_info_t group
2803 = rtx_group_vec[store_info->group_id];
2804 if (group->frame_related && !i_ptr->cannot_delete)
2805 delete_dead_store_insn (i_ptr);
2806 }
2807
2808 i_ptr = i_ptr->next_local_store;
2809 }
2810 }
2811
2812 /* Get rid of the loads that were discovered in
2813 replace_read. Cselib is finished with this block. */
2814 while (deferred_change_list)
2815 {
2816 deferred_change_t next = deferred_change_list->next;
2817
2818 /* There is no reason to validate this change. That was
2819 done earlier. */
2820 *deferred_change_list->loc = deferred_change_list->reg;
2821 pool_free (deferred_change_pool, deferred_change_list);
2822 deferred_change_list = next;
2823 }
2824
2825 /* Get rid of all of the cselib based store_infos in this
2826 block and mark the containing insns as not being
2827 deletable. */
2828 ptr = bb_info->last_insn;
2829 while (ptr)
2830 {
2831 if (ptr->contains_cselib_groups)
2832 {
2833 store_info_t s_info = ptr->store_rec;
2834 while (s_info && !s_info->is_set)
2835 s_info = s_info->next;
2836 if (s_info
2837 && s_info->redundant_reason
2838 && s_info->redundant_reason->insn
2839 && !ptr->cannot_delete)
2840 {
2841 if (dump_file && (dump_flags & TDF_DETAILS))
2842 fprintf (dump_file, "Locally deleting insn %d "
2843 "because insn %d stores the "
2844 "same value and couldn't be "
2845 "eliminated\n",
2846 INSN_UID (ptr->insn),
2847 INSN_UID (s_info->redundant_reason->insn));
2848 delete_dead_store_insn (ptr);
2849 }
2850 free_store_info (ptr);
2851 }
2852 else
2853 {
2854 store_info_t s_info;
2855
2856 /* Free at least positions_needed bitmaps. */
2857 for (s_info = ptr->store_rec; s_info; s_info = s_info->next)
2858 if (s_info->is_large)
2859 {
2860 BITMAP_FREE (s_info->positions_needed.large.bmap);
2861 s_info->is_large = false;
2862 }
2863 }
2864 ptr = ptr->prev_insn;
2865 }
2866
2867 free_alloc_pool (cse_store_info_pool);
2868 }
2869 bb_info->regs_live = NULL;
2870 }
2871
2872 BITMAP_FREE (regs_live);
2873 cselib_finish ();
2874 rtx_group_table->empty ();
2875 }
2876
2877 \f
2878 /*----------------------------------------------------------------------------
2879 Second step.
2880
2881 Assign each byte position in the stores that we are going to
2882 analyze globally to a position in the bitmaps. Returns true if
2883 there are any bit positions assigned.
2884 ----------------------------------------------------------------------------*/
2885
2886 static void
2887 dse_step2_init (void)
2888 {
2889 unsigned int i;
2890 group_info_t group;
2891
2892 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
2893 {
2894 /* For all non stack related bases, we only consider a store to
2895 be deletable if there are two or more stores for that
2896 position. This is because it takes one store to make the
2897 other store redundant. However, for the stores that are
2898 stack related, we consider them if there is only one store
2899 for the position. We do this because the stack related
2900 stores can be deleted if their is no read between them and
2901 the end of the function.
2902
2903 To make this work in the current framework, we take the stack
2904 related bases add all of the bits from store1 into store2.
2905 This has the effect of making the eligible even if there is
2906 only one store. */
2907
2908 if (stores_off_frame_dead_at_return && group->frame_related)
2909 {
2910 bitmap_ior_into (group->store2_n, group->store1_n);
2911 bitmap_ior_into (group->store2_p, group->store1_p);
2912 if (dump_file && (dump_flags & TDF_DETAILS))
2913 fprintf (dump_file, "group %d is frame related ", i);
2914 }
2915
2916 group->offset_map_size_n++;
2917 group->offset_map_n = XOBNEWVEC (&dse_obstack, int,
2918 group->offset_map_size_n);
2919 group->offset_map_size_p++;
2920 group->offset_map_p = XOBNEWVEC (&dse_obstack, int,
2921 group->offset_map_size_p);
2922 group->process_globally = false;
2923 if (dump_file && (dump_flags & TDF_DETAILS))
2924 {
2925 fprintf (dump_file, "group %d(%d+%d): ", i,
2926 (int)bitmap_count_bits (group->store2_n),
2927 (int)bitmap_count_bits (group->store2_p));
2928 bitmap_print (dump_file, group->store2_n, "n ", " ");
2929 bitmap_print (dump_file, group->store2_p, "p ", "\n");
2930 }
2931 }
2932 }
2933
2934
2935 /* Init the offset tables for the normal case. */
2936
2937 static bool
2938 dse_step2_nospill (void)
2939 {
2940 unsigned int i;
2941 group_info_t group;
2942 /* Position 0 is unused because 0 is used in the maps to mean
2943 unused. */
2944 current_position = 1;
2945 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
2946 {
2947 bitmap_iterator bi;
2948 unsigned int j;
2949
2950 if (group == clear_alias_group)
2951 continue;
2952
2953 memset (group->offset_map_n, 0, sizeof (int) * group->offset_map_size_n);
2954 memset (group->offset_map_p, 0, sizeof (int) * group->offset_map_size_p);
2955 bitmap_clear (group->group_kill);
2956
2957 EXECUTE_IF_SET_IN_BITMAP (group->store2_n, 0, j, bi)
2958 {
2959 bitmap_set_bit (group->group_kill, current_position);
2960 if (bitmap_bit_p (group->escaped_n, j))
2961 bitmap_set_bit (kill_on_calls, current_position);
2962 group->offset_map_n[j] = current_position++;
2963 group->process_globally = true;
2964 }
2965 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
2966 {
2967 bitmap_set_bit (group->group_kill, current_position);
2968 if (bitmap_bit_p (group->escaped_p, j))
2969 bitmap_set_bit (kill_on_calls, current_position);
2970 group->offset_map_p[j] = current_position++;
2971 group->process_globally = true;
2972 }
2973 }
2974 return current_position != 1;
2975 }
2976
2977
2978 \f
2979 /*----------------------------------------------------------------------------
2980 Third step.
2981
2982 Build the bit vectors for the transfer functions.
2983 ----------------------------------------------------------------------------*/
2984
2985
2986 /* Look up the bitmap index for OFFSET in GROUP_INFO. If it is not
2987 there, return 0. */
2988
2989 static int
2990 get_bitmap_index (group_info_t group_info, HOST_WIDE_INT offset)
2991 {
2992 if (offset < 0)
2993 {
2994 HOST_WIDE_INT offset_p = -offset;
2995 if (offset_p >= group_info->offset_map_size_n)
2996 return 0;
2997 return group_info->offset_map_n[offset_p];
2998 }
2999 else
3000 {
3001 if (offset >= group_info->offset_map_size_p)
3002 return 0;
3003 return group_info->offset_map_p[offset];
3004 }
3005 }
3006
3007
3008 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
3009 may be NULL. */
3010
3011 static void
3012 scan_stores_nospill (store_info_t store_info, bitmap gen, bitmap kill)
3013 {
3014 while (store_info)
3015 {
3016 HOST_WIDE_INT i;
3017 group_info_t group_info
3018 = rtx_group_vec[store_info->group_id];
3019 if (group_info->process_globally)
3020 for (i = store_info->begin; i < store_info->end; i++)
3021 {
3022 int index = get_bitmap_index (group_info, i);
3023 if (index != 0)
3024 {
3025 bitmap_set_bit (gen, index);
3026 if (kill)
3027 bitmap_clear_bit (kill, index);
3028 }
3029 }
3030 store_info = store_info->next;
3031 }
3032 }
3033
3034
3035 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
3036 may be NULL. */
3037
3038 static void
3039 scan_stores_spill (store_info_t store_info, bitmap gen, bitmap kill)
3040 {
3041 while (store_info)
3042 {
3043 if (store_info->alias_set)
3044 {
3045 int index = get_bitmap_index (clear_alias_group,
3046 store_info->alias_set);
3047 if (index != 0)
3048 {
3049 bitmap_set_bit (gen, index);
3050 if (kill)
3051 bitmap_clear_bit (kill, index);
3052 }
3053 }
3054 store_info = store_info->next;
3055 }
3056 }
3057
3058
3059 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3060 may be NULL. */
3061
3062 static void
3063 scan_reads_nospill (insn_info_t insn_info, bitmap gen, bitmap kill)
3064 {
3065 read_info_t read_info = insn_info->read_rec;
3066 int i;
3067 group_info_t group;
3068
3069 /* If this insn reads the frame, kill all the frame related stores. */
3070 if (insn_info->frame_read)
3071 {
3072 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3073 if (group->process_globally && group->frame_related)
3074 {
3075 if (kill)
3076 bitmap_ior_into (kill, group->group_kill);
3077 bitmap_and_compl_into (gen, group->group_kill);
3078 }
3079 }
3080 if (insn_info->non_frame_wild_read)
3081 {
3082 /* Kill all non-frame related stores. Kill all stores of variables that
3083 escape. */
3084 if (kill)
3085 bitmap_ior_into (kill, kill_on_calls);
3086 bitmap_and_compl_into (gen, kill_on_calls);
3087 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3088 if (group->process_globally && !group->frame_related)
3089 {
3090 if (kill)
3091 bitmap_ior_into (kill, group->group_kill);
3092 bitmap_and_compl_into (gen, group->group_kill);
3093 }
3094 }
3095 while (read_info)
3096 {
3097 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3098 {
3099 if (group->process_globally)
3100 {
3101 if (i == read_info->group_id)
3102 {
3103 if (read_info->begin > read_info->end)
3104 {
3105 /* Begin > end for block mode reads. */
3106 if (kill)
3107 bitmap_ior_into (kill, group->group_kill);
3108 bitmap_and_compl_into (gen, group->group_kill);
3109 }
3110 else
3111 {
3112 /* The groups are the same, just process the
3113 offsets. */
3114 HOST_WIDE_INT j;
3115 for (j = read_info->begin; j < read_info->end; j++)
3116 {
3117 int index = get_bitmap_index (group, j);
3118 if (index != 0)
3119 {
3120 if (kill)
3121 bitmap_set_bit (kill, index);
3122 bitmap_clear_bit (gen, index);
3123 }
3124 }
3125 }
3126 }
3127 else
3128 {
3129 /* The groups are different, if the alias sets
3130 conflict, clear the entire group. We only need
3131 to apply this test if the read_info is a cselib
3132 read. Anything with a constant base cannot alias
3133 something else with a different constant
3134 base. */
3135 if ((read_info->group_id < 0)
3136 && canon_true_dependence (group->base_mem,
3137 GET_MODE (group->base_mem),
3138 group->canon_base_addr,
3139 read_info->mem, NULL_RTX))
3140 {
3141 if (kill)
3142 bitmap_ior_into (kill, group->group_kill);
3143 bitmap_and_compl_into (gen, group->group_kill);
3144 }
3145 }
3146 }
3147 }
3148
3149 read_info = read_info->next;
3150 }
3151 }
3152
3153 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3154 may be NULL. */
3155
3156 static void
3157 scan_reads_spill (read_info_t read_info, bitmap gen, bitmap kill)
3158 {
3159 while (read_info)
3160 {
3161 if (read_info->alias_set)
3162 {
3163 int index = get_bitmap_index (clear_alias_group,
3164 read_info->alias_set);
3165 if (index != 0)
3166 {
3167 if (kill)
3168 bitmap_set_bit (kill, index);
3169 bitmap_clear_bit (gen, index);
3170 }
3171 }
3172
3173 read_info = read_info->next;
3174 }
3175 }
3176
3177
3178 /* Return the insn in BB_INFO before the first wild read or if there
3179 are no wild reads in the block, return the last insn. */
3180
3181 static insn_info_t
3182 find_insn_before_first_wild_read (bb_info_t bb_info)
3183 {
3184 insn_info_t insn_info = bb_info->last_insn;
3185 insn_info_t last_wild_read = NULL;
3186
3187 while (insn_info)
3188 {
3189 if (insn_info->wild_read)
3190 {
3191 last_wild_read = insn_info->prev_insn;
3192 /* Block starts with wild read. */
3193 if (!last_wild_read)
3194 return NULL;
3195 }
3196
3197 insn_info = insn_info->prev_insn;
3198 }
3199
3200 if (last_wild_read)
3201 return last_wild_read;
3202 else
3203 return bb_info->last_insn;
3204 }
3205
3206
3207 /* Scan the insns in BB_INFO starting at PTR and going to the top of
3208 the block in order to build the gen and kill sets for the block.
3209 We start at ptr which may be the last insn in the block or may be
3210 the first insn with a wild read. In the latter case we are able to
3211 skip the rest of the block because it just does not matter:
3212 anything that happens is hidden by the wild read. */
3213
3214 static void
3215 dse_step3_scan (bool for_spills, basic_block bb)
3216 {
3217 bb_info_t bb_info = bb_table[bb->index];
3218 insn_info_t insn_info;
3219
3220 if (for_spills)
3221 /* There are no wild reads in the spill case. */
3222 insn_info = bb_info->last_insn;
3223 else
3224 insn_info = find_insn_before_first_wild_read (bb_info);
3225
3226 /* In the spill case or in the no_spill case if there is no wild
3227 read in the block, we will need a kill set. */
3228 if (insn_info == bb_info->last_insn)
3229 {
3230 if (bb_info->kill)
3231 bitmap_clear (bb_info->kill);
3232 else
3233 bb_info->kill = BITMAP_ALLOC (&dse_bitmap_obstack);
3234 }
3235 else
3236 if (bb_info->kill)
3237 BITMAP_FREE (bb_info->kill);
3238
3239 while (insn_info)
3240 {
3241 /* There may have been code deleted by the dce pass run before
3242 this phase. */
3243 if (insn_info->insn && INSN_P (insn_info->insn))
3244 {
3245 /* Process the read(s) last. */
3246 if (for_spills)
3247 {
3248 scan_stores_spill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3249 scan_reads_spill (insn_info->read_rec, bb_info->gen, bb_info->kill);
3250 }
3251 else
3252 {
3253 scan_stores_nospill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3254 scan_reads_nospill (insn_info, bb_info->gen, bb_info->kill);
3255 }
3256 }
3257
3258 insn_info = insn_info->prev_insn;
3259 }
3260 }
3261
3262
3263 /* Set the gen set of the exit block, and also any block with no
3264 successors that does not have a wild read. */
3265
3266 static void
3267 dse_step3_exit_block_scan (bb_info_t bb_info)
3268 {
3269 /* The gen set is all 0's for the exit block except for the
3270 frame_pointer_group. */
3271
3272 if (stores_off_frame_dead_at_return)
3273 {
3274 unsigned int i;
3275 group_info_t group;
3276
3277 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3278 {
3279 if (group->process_globally && group->frame_related)
3280 bitmap_ior_into (bb_info->gen, group->group_kill);
3281 }
3282 }
3283 }
3284
3285
3286 /* Find all of the blocks that are not backwards reachable from the
3287 exit block or any block with no successors (BB). These are the
3288 infinite loops or infinite self loops. These blocks will still
3289 have their bits set in UNREACHABLE_BLOCKS. */
3290
3291 static void
3292 mark_reachable_blocks (sbitmap unreachable_blocks, basic_block bb)
3293 {
3294 edge e;
3295 edge_iterator ei;
3296
3297 if (bitmap_bit_p (unreachable_blocks, bb->index))
3298 {
3299 bitmap_clear_bit (unreachable_blocks, bb->index);
3300 FOR_EACH_EDGE (e, ei, bb->preds)
3301 {
3302 mark_reachable_blocks (unreachable_blocks, e->src);
3303 }
3304 }
3305 }
3306
3307 /* Build the transfer functions for the function. */
3308
3309 static void
3310 dse_step3 (bool for_spills)
3311 {
3312 basic_block bb;
3313 sbitmap unreachable_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
3314 sbitmap_iterator sbi;
3315 bitmap all_ones = NULL;
3316 unsigned int i;
3317
3318 bitmap_ones (unreachable_blocks);
3319
3320 FOR_ALL_BB_FN (bb, cfun)
3321 {
3322 bb_info_t bb_info = bb_table[bb->index];
3323 if (bb_info->gen)
3324 bitmap_clear (bb_info->gen);
3325 else
3326 bb_info->gen = BITMAP_ALLOC (&dse_bitmap_obstack);
3327
3328 if (bb->index == ENTRY_BLOCK)
3329 ;
3330 else if (bb->index == EXIT_BLOCK)
3331 dse_step3_exit_block_scan (bb_info);
3332 else
3333 dse_step3_scan (for_spills, bb);
3334 if (EDGE_COUNT (bb->succs) == 0)
3335 mark_reachable_blocks (unreachable_blocks, bb);
3336
3337 /* If this is the second time dataflow is run, delete the old
3338 sets. */
3339 if (bb_info->in)
3340 BITMAP_FREE (bb_info->in);
3341 if (bb_info->out)
3342 BITMAP_FREE (bb_info->out);
3343 }
3344
3345 /* For any block in an infinite loop, we must initialize the out set
3346 to all ones. This could be expensive, but almost never occurs in
3347 practice. However, it is common in regression tests. */
3348 EXECUTE_IF_SET_IN_BITMAP (unreachable_blocks, 0, i, sbi)
3349 {
3350 if (bitmap_bit_p (all_blocks, i))
3351 {
3352 bb_info_t bb_info = bb_table[i];
3353 if (!all_ones)
3354 {
3355 unsigned int j;
3356 group_info_t group;
3357
3358 all_ones = BITMAP_ALLOC (&dse_bitmap_obstack);
3359 FOR_EACH_VEC_ELT (rtx_group_vec, j, group)
3360 bitmap_ior_into (all_ones, group->group_kill);
3361 }
3362 if (!bb_info->out)
3363 {
3364 bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3365 bitmap_copy (bb_info->out, all_ones);
3366 }
3367 }
3368 }
3369
3370 if (all_ones)
3371 BITMAP_FREE (all_ones);
3372 sbitmap_free (unreachable_blocks);
3373 }
3374
3375
3376 \f
3377 /*----------------------------------------------------------------------------
3378 Fourth step.
3379
3380 Solve the bitvector equations.
3381 ----------------------------------------------------------------------------*/
3382
3383
3384 /* Confluence function for blocks with no successors. Create an out
3385 set from the gen set of the exit block. This block logically has
3386 the exit block as a successor. */
3387
3388
3389
3390 static void
3391 dse_confluence_0 (basic_block bb)
3392 {
3393 bb_info_t bb_info = bb_table[bb->index];
3394
3395 if (bb->index == EXIT_BLOCK)
3396 return;
3397
3398 if (!bb_info->out)
3399 {
3400 bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3401 bitmap_copy (bb_info->out, bb_table[EXIT_BLOCK]->gen);
3402 }
3403 }
3404
3405 /* Propagate the information from the in set of the dest of E to the
3406 out set of the src of E. If the various in or out sets are not
3407 there, that means they are all ones. */
3408
3409 static bool
3410 dse_confluence_n (edge e)
3411 {
3412 bb_info_t src_info = bb_table[e->src->index];
3413 bb_info_t dest_info = bb_table[e->dest->index];
3414
3415 if (dest_info->in)
3416 {
3417 if (src_info->out)
3418 bitmap_and_into (src_info->out, dest_info->in);
3419 else
3420 {
3421 src_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3422 bitmap_copy (src_info->out, dest_info->in);
3423 }
3424 }
3425 return true;
3426 }
3427
3428
3429 /* Propagate the info from the out to the in set of BB_INDEX's basic
3430 block. There are three cases:
3431
3432 1) The block has no kill set. In this case the kill set is all
3433 ones. It does not matter what the out set of the block is, none of
3434 the info can reach the top. The only thing that reaches the top is
3435 the gen set and we just copy the set.
3436
3437 2) There is a kill set but no out set and bb has successors. In
3438 this case we just return. Eventually an out set will be created and
3439 it is better to wait than to create a set of ones.
3440
3441 3) There is both a kill and out set. We apply the obvious transfer
3442 function.
3443 */
3444
3445 static bool
3446 dse_transfer_function (int bb_index)
3447 {
3448 bb_info_t bb_info = bb_table[bb_index];
3449
3450 if (bb_info->kill)
3451 {
3452 if (bb_info->out)
3453 {
3454 /* Case 3 above. */
3455 if (bb_info->in)
3456 return bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3457 bb_info->out, bb_info->kill);
3458 else
3459 {
3460 bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
3461 bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3462 bb_info->out, bb_info->kill);
3463 return true;
3464 }
3465 }
3466 else
3467 /* Case 2 above. */
3468 return false;
3469 }
3470 else
3471 {
3472 /* Case 1 above. If there is already an in set, nothing
3473 happens. */
3474 if (bb_info->in)
3475 return false;
3476 else
3477 {
3478 bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
3479 bitmap_copy (bb_info->in, bb_info->gen);
3480 return true;
3481 }
3482 }
3483 }
3484
3485 /* Solve the dataflow equations. */
3486
3487 static void
3488 dse_step4 (void)
3489 {
3490 df_simple_dataflow (DF_BACKWARD, NULL, dse_confluence_0,
3491 dse_confluence_n, dse_transfer_function,
3492 all_blocks, df_get_postorder (DF_BACKWARD),
3493 df_get_n_blocks (DF_BACKWARD));
3494 if (dump_file && (dump_flags & TDF_DETAILS))
3495 {
3496 basic_block bb;
3497
3498 fprintf (dump_file, "\n\n*** Global dataflow info after analysis.\n");
3499 FOR_ALL_BB_FN (bb, cfun)
3500 {
3501 bb_info_t bb_info = bb_table[bb->index];
3502
3503 df_print_bb_index (bb, dump_file);
3504 if (bb_info->in)
3505 bitmap_print (dump_file, bb_info->in, " in: ", "\n");
3506 else
3507 fprintf (dump_file, " in: *MISSING*\n");
3508 if (bb_info->gen)
3509 bitmap_print (dump_file, bb_info->gen, " gen: ", "\n");
3510 else
3511 fprintf (dump_file, " gen: *MISSING*\n");
3512 if (bb_info->kill)
3513 bitmap_print (dump_file, bb_info->kill, " kill: ", "\n");
3514 else
3515 fprintf (dump_file, " kill: *MISSING*\n");
3516 if (bb_info->out)
3517 bitmap_print (dump_file, bb_info->out, " out: ", "\n");
3518 else
3519 fprintf (dump_file, " out: *MISSING*\n\n");
3520 }
3521 }
3522 }
3523
3524
3525 \f
3526 /*----------------------------------------------------------------------------
3527 Fifth step.
3528
3529 Delete the stores that can only be deleted using the global information.
3530 ----------------------------------------------------------------------------*/
3531
3532
3533 static void
3534 dse_step5_nospill (void)
3535 {
3536 basic_block bb;
3537 FOR_EACH_BB_FN (bb, cfun)
3538 {
3539 bb_info_t bb_info = bb_table[bb->index];
3540 insn_info_t insn_info = bb_info->last_insn;
3541 bitmap v = bb_info->out;
3542
3543 while (insn_info)
3544 {
3545 bool deleted = false;
3546 if (dump_file && insn_info->insn)
3547 {
3548 fprintf (dump_file, "starting to process insn %d\n",
3549 INSN_UID (insn_info->insn));
3550 bitmap_print (dump_file, v, " v: ", "\n");
3551 }
3552
3553 /* There may have been code deleted by the dce pass run before
3554 this phase. */
3555 if (insn_info->insn
3556 && INSN_P (insn_info->insn)
3557 && (!insn_info->cannot_delete)
3558 && (!bitmap_empty_p (v)))
3559 {
3560 store_info_t store_info = insn_info->store_rec;
3561
3562 /* Try to delete the current insn. */
3563 deleted = true;
3564
3565 /* Skip the clobbers. */
3566 while (!store_info->is_set)
3567 store_info = store_info->next;
3568
3569 if (store_info->alias_set)
3570 deleted = false;
3571 else
3572 {
3573 HOST_WIDE_INT i;
3574 group_info_t group_info
3575 = rtx_group_vec[store_info->group_id];
3576
3577 for (i = store_info->begin; i < store_info->end; i++)
3578 {
3579 int index = get_bitmap_index (group_info, i);
3580
3581 if (dump_file && (dump_flags & TDF_DETAILS))
3582 fprintf (dump_file, "i = %d, index = %d\n", (int)i, index);
3583 if (index == 0 || !bitmap_bit_p (v, index))
3584 {
3585 if (dump_file && (dump_flags & TDF_DETAILS))
3586 fprintf (dump_file, "failing at i = %d\n", (int)i);
3587 deleted = false;
3588 break;
3589 }
3590 }
3591 }
3592 if (deleted)
3593 {
3594 if (dbg_cnt (dse)
3595 && check_for_inc_dec_1 (insn_info))
3596 {
3597 delete_insn (insn_info->insn);
3598 insn_info->insn = NULL;
3599 globally_deleted++;
3600 }
3601 }
3602 }
3603 /* We do want to process the local info if the insn was
3604 deleted. For instance, if the insn did a wild read, we
3605 no longer need to trash the info. */
3606 if (insn_info->insn
3607 && INSN_P (insn_info->insn)
3608 && (!deleted))
3609 {
3610 scan_stores_nospill (insn_info->store_rec, v, NULL);
3611 if (insn_info->wild_read)
3612 {
3613 if (dump_file && (dump_flags & TDF_DETAILS))
3614 fprintf (dump_file, "wild read\n");
3615 bitmap_clear (v);
3616 }
3617 else if (insn_info->read_rec
3618 || insn_info->non_frame_wild_read)
3619 {
3620 if (dump_file && !insn_info->non_frame_wild_read)
3621 fprintf (dump_file, "regular read\n");
3622 else if (dump_file && (dump_flags & TDF_DETAILS))
3623 fprintf (dump_file, "non-frame wild read\n");
3624 scan_reads_nospill (insn_info, v, NULL);
3625 }
3626 }
3627
3628 insn_info = insn_info->prev_insn;
3629 }
3630 }
3631 }
3632
3633
3634 \f
3635 /*----------------------------------------------------------------------------
3636 Sixth step.
3637
3638 Delete stores made redundant by earlier stores (which store the same
3639 value) that couldn't be eliminated.
3640 ----------------------------------------------------------------------------*/
3641
3642 static void
3643 dse_step6 (void)
3644 {
3645 basic_block bb;
3646
3647 FOR_ALL_BB_FN (bb, cfun)
3648 {
3649 bb_info_t bb_info = bb_table[bb->index];
3650 insn_info_t insn_info = bb_info->last_insn;
3651
3652 while (insn_info)
3653 {
3654 /* There may have been code deleted by the dce pass run before
3655 this phase. */
3656 if (insn_info->insn
3657 && INSN_P (insn_info->insn)
3658 && !insn_info->cannot_delete)
3659 {
3660 store_info_t s_info = insn_info->store_rec;
3661
3662 while (s_info && !s_info->is_set)
3663 s_info = s_info->next;
3664 if (s_info
3665 && s_info->redundant_reason
3666 && s_info->redundant_reason->insn
3667 && INSN_P (s_info->redundant_reason->insn))
3668 {
3669 rtx_insn *rinsn = s_info->redundant_reason->insn;
3670 if (dump_file && (dump_flags & TDF_DETAILS))
3671 fprintf (dump_file, "Locally deleting insn %d "
3672 "because insn %d stores the "
3673 "same value and couldn't be "
3674 "eliminated\n",
3675 INSN_UID (insn_info->insn),
3676 INSN_UID (rinsn));
3677 delete_dead_store_insn (insn_info);
3678 }
3679 }
3680 insn_info = insn_info->prev_insn;
3681 }
3682 }
3683 }
3684 \f
3685 /*----------------------------------------------------------------------------
3686 Seventh step.
3687
3688 Destroy everything left standing.
3689 ----------------------------------------------------------------------------*/
3690
3691 static void
3692 dse_step7 (void)
3693 {
3694 bitmap_obstack_release (&dse_bitmap_obstack);
3695 obstack_free (&dse_obstack, NULL);
3696
3697 end_alias_analysis ();
3698 free (bb_table);
3699 delete rtx_group_table;
3700 rtx_group_table = NULL;
3701 rtx_group_vec.release ();
3702 BITMAP_FREE (all_blocks);
3703 BITMAP_FREE (scratch);
3704
3705 free_alloc_pool (rtx_store_info_pool);
3706 free_alloc_pool (read_info_pool);
3707 free_alloc_pool (insn_info_pool);
3708 free_alloc_pool (bb_info_pool);
3709 free_alloc_pool (rtx_group_info_pool);
3710 free_alloc_pool (deferred_change_pool);
3711 }
3712
3713
3714 /* -------------------------------------------------------------------------
3715 DSE
3716 ------------------------------------------------------------------------- */
3717
3718 /* Callback for running pass_rtl_dse. */
3719
3720 static unsigned int
3721 rest_of_handle_dse (void)
3722 {
3723 df_set_flags (DF_DEFER_INSN_RESCAN);
3724
3725 /* Need the notes since we must track live hardregs in the forwards
3726 direction. */
3727 df_note_add_problem ();
3728 df_analyze ();
3729
3730 dse_step0 ();
3731 dse_step1 ();
3732 dse_step2_init ();
3733 if (dse_step2_nospill ())
3734 {
3735 df_set_flags (DF_LR_RUN_DCE);
3736 df_analyze ();
3737 if (dump_file && (dump_flags & TDF_DETAILS))
3738 fprintf (dump_file, "doing global processing\n");
3739 dse_step3 (false);
3740 dse_step4 ();
3741 dse_step5_nospill ();
3742 }
3743
3744 dse_step6 ();
3745 dse_step7 ();
3746
3747 if (dump_file)
3748 fprintf (dump_file, "dse: local deletions = %d, global deletions = %d, spill deletions = %d\n",
3749 locally_deleted, globally_deleted, spill_deleted);
3750
3751 /* DSE can eliminate potentially-trapping MEMs.
3752 Remove any EH edges associated with them. */
3753 if ((locally_deleted || globally_deleted)
3754 && cfun->can_throw_non_call_exceptions
3755 && purge_all_dead_edges ())
3756 cleanup_cfg (0);
3757
3758 return 0;
3759 }
3760
3761 namespace {
3762
3763 const pass_data pass_data_rtl_dse1 =
3764 {
3765 RTL_PASS, /* type */
3766 "dse1", /* name */
3767 OPTGROUP_NONE, /* optinfo_flags */
3768 TV_DSE1, /* tv_id */
3769 0, /* properties_required */
3770 0, /* properties_provided */
3771 0, /* properties_destroyed */
3772 0, /* todo_flags_start */
3773 TODO_df_finish, /* todo_flags_finish */
3774 };
3775
3776 class pass_rtl_dse1 : public rtl_opt_pass
3777 {
3778 public:
3779 pass_rtl_dse1 (gcc::context *ctxt)
3780 : rtl_opt_pass (pass_data_rtl_dse1, ctxt)
3781 {}
3782
3783 /* opt_pass methods: */
3784 virtual bool gate (function *)
3785 {
3786 return optimize > 0 && flag_dse && dbg_cnt (dse1);
3787 }
3788
3789 virtual unsigned int execute (function *) { return rest_of_handle_dse (); }
3790
3791 }; // class pass_rtl_dse1
3792
3793 } // anon namespace
3794
3795 rtl_opt_pass *
3796 make_pass_rtl_dse1 (gcc::context *ctxt)
3797 {
3798 return new pass_rtl_dse1 (ctxt);
3799 }
3800
3801 namespace {
3802
3803 const pass_data pass_data_rtl_dse2 =
3804 {
3805 RTL_PASS, /* type */
3806 "dse2", /* name */
3807 OPTGROUP_NONE, /* optinfo_flags */
3808 TV_DSE2, /* tv_id */
3809 0, /* properties_required */
3810 0, /* properties_provided */
3811 0, /* properties_destroyed */
3812 0, /* todo_flags_start */
3813 TODO_df_finish, /* todo_flags_finish */
3814 };
3815
3816 class pass_rtl_dse2 : public rtl_opt_pass
3817 {
3818 public:
3819 pass_rtl_dse2 (gcc::context *ctxt)
3820 : rtl_opt_pass (pass_data_rtl_dse2, ctxt)
3821 {}
3822
3823 /* opt_pass methods: */
3824 virtual bool gate (function *)
3825 {
3826 return optimize > 0 && flag_dse && dbg_cnt (dse2);
3827 }
3828
3829 virtual unsigned int execute (function *) { return rest_of_handle_dse (); }
3830
3831 }; // class pass_rtl_dse2
3832
3833 } // anon namespace
3834
3835 rtl_opt_pass *
3836 make_pass_rtl_dse2 (gcc::context *ctxt)
3837 {
3838 return new pass_rtl_dse2 (ctxt);
3839 }