re PR bootstrap/80867 (gnat bootstrap broken on powerpc64le-linux-gnu with -O3)
[gcc.git] / gcc / tree-ssa-dse.c
1 /* Dead store elimination
2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "gimple-iterator.h"
32 #include "tree-cfg.h"
33 #include "tree-dfa.h"
34 #include "domwalk.h"
35 #include "tree-cfgcleanup.h"
36 #include "params.h"
37 #include "alias.h"
38
39 /* This file implements dead store elimination.
40
41 A dead store is a store into a memory location which will later be
42 overwritten by another store without any intervening loads. In this
43 case the earlier store can be deleted.
44
45 In our SSA + virtual operand world we use immediate uses of virtual
46 operands to detect dead stores. If a store's virtual definition
47 is used precisely once by a later store to the same location which
48 post dominates the first store, then the first store is dead.
49
50 The single use of the store's virtual definition ensures that
51 there are no intervening aliased loads and the requirement that
52 the second load post dominate the first ensures that if the earlier
53 store executes, then the later stores will execute before the function
54 exits.
55
56 It may help to think of this as first moving the earlier store to
57 the point immediately before the later store. Again, the single
58 use of the virtual definition and the post-dominance relationship
59 ensure that such movement would be safe. Clearly if there are
60 back to back stores, then the second is redundant.
61
62 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
63 may also help in understanding this code since it discusses the
64 relationship between dead store and redundant load elimination. In
65 fact, they are the same transformation applied to different views of
66 the CFG. */
67
68
69 /* Bitmap of blocks that have had EH statements cleaned. We should
70 remove their dead edges eventually. */
71 static bitmap need_eh_cleanup;
72
73 /* Return value from dse_classify_store */
74 enum dse_store_status
75 {
76 DSE_STORE_LIVE,
77 DSE_STORE_MAYBE_PARTIAL_DEAD,
78 DSE_STORE_DEAD
79 };
80
81 /* STMT is a statement that may write into memory. Analyze it and
82 initialize WRITE to describe how STMT affects memory.
83
84 Return TRUE if the the statement was analyzed, FALSE otherwise.
85
86 It is always safe to return FALSE. But typically better optimziation
87 can be achieved by analyzing more statements. */
88
89 static bool
90 initialize_ao_ref_for_dse (gimple *stmt, ao_ref *write)
91 {
92 /* It's advantageous to handle certain mem* functions. */
93 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
94 {
95 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
96 {
97 case BUILT_IN_MEMCPY:
98 case BUILT_IN_MEMMOVE:
99 case BUILT_IN_MEMSET:
100 {
101 tree size = NULL_TREE;
102 if (gimple_call_num_args (stmt) == 3)
103 size = gimple_call_arg (stmt, 2);
104 tree ptr = gimple_call_arg (stmt, 0);
105 ao_ref_init_from_ptr_and_size (write, ptr, size);
106 return true;
107 }
108 default:
109 break;
110 }
111 }
112 else if (is_gimple_assign (stmt))
113 {
114 ao_ref_init (write, gimple_assign_lhs (stmt));
115 return true;
116 }
117 return false;
118 }
119
120 /* Given REF from the the alias oracle, return TRUE if it is a valid
121 memory reference for dead store elimination, false otherwise.
122
123 In particular, the reference must have a known base, known maximum
124 size, start at a byte offset and have a size that is one or more
125 bytes. */
126
127 static bool
128 valid_ao_ref_for_dse (ao_ref *ref)
129 {
130 return (ao_ref_base (ref)
131 && known_size_p (ref->max_size)
132 && maybe_ne (ref->size, 0)
133 && known_eq (ref->max_size, ref->size)
134 && known_ge (ref->offset, 0)
135 && multiple_p (ref->offset, BITS_PER_UNIT)
136 && multiple_p (ref->size, BITS_PER_UNIT));
137 }
138
139 /* Try to normalize COPY (an ao_ref) relative to REF. Essentially when we are
140 done COPY will only refer bytes found within REF. Return true if COPY
141 is known to intersect at least one byte of REF. */
142
143 static bool
144 normalize_ref (ao_ref *copy, ao_ref *ref)
145 {
146 if (!ordered_p (copy->offset, ref->offset))
147 return false;
148
149 /* If COPY starts before REF, then reset the beginning of
150 COPY to match REF and decrease the size of COPY by the
151 number of bytes removed from COPY. */
152 if (maybe_lt (copy->offset, ref->offset))
153 {
154 poly_int64 diff = ref->offset - copy->offset;
155 if (maybe_le (copy->size, diff))
156 return false;
157 copy->size -= diff;
158 copy->offset = ref->offset;
159 }
160
161 poly_int64 diff = copy->offset - ref->offset;
162 if (maybe_le (ref->size, diff))
163 return false;
164
165 /* If COPY extends beyond REF, chop off its size appropriately. */
166 poly_int64 limit = ref->size - diff;
167 if (!ordered_p (limit, copy->size))
168 return false;
169
170 if (maybe_gt (copy->size, limit))
171 copy->size = limit;
172 return true;
173 }
174
175 /* Clear any bytes written by STMT from the bitmap LIVE_BYTES. The base
176 address written by STMT must match the one found in REF, which must
177 have its base address previously initialized.
178
179 This routine must be conservative. If we don't know the offset or
180 actual size written, assume nothing was written. */
181
182 static void
183 clear_bytes_written_by (sbitmap live_bytes, gimple *stmt, ao_ref *ref)
184 {
185 ao_ref write;
186 if (!initialize_ao_ref_for_dse (stmt, &write))
187 return;
188
189 /* Verify we have the same base memory address, the write
190 has a known size and overlaps with REF. */
191 HOST_WIDE_INT start, size;
192 if (valid_ao_ref_for_dse (&write)
193 && operand_equal_p (write.base, ref->base, OEP_ADDRESS_OF)
194 && known_eq (write.size, write.max_size)
195 && normalize_ref (&write, ref)
196 && (write.offset - ref->offset).is_constant (&start)
197 && write.size.is_constant (&size))
198 bitmap_clear_range (live_bytes, start / BITS_PER_UNIT,
199 size / BITS_PER_UNIT);
200 }
201
202 /* REF is a memory write. Extract relevant information from it and
203 initialize the LIVE_BYTES bitmap. If successful, return TRUE.
204 Otherwise return FALSE. */
205
206 static bool
207 setup_live_bytes_from_ref (ao_ref *ref, sbitmap live_bytes)
208 {
209 HOST_WIDE_INT const_size;
210 if (valid_ao_ref_for_dse (ref)
211 && ref->size.is_constant (&const_size)
212 && (const_size / BITS_PER_UNIT
213 <= PARAM_VALUE (PARAM_DSE_MAX_OBJECT_SIZE)))
214 {
215 bitmap_clear (live_bytes);
216 bitmap_set_range (live_bytes, 0, const_size / BITS_PER_UNIT);
217 return true;
218 }
219 return false;
220 }
221
222 /* Compute the number of elements that we can trim from the head and
223 tail of ORIG resulting in a bitmap that is a superset of LIVE.
224
225 Store the number of elements trimmed from the head and tail in
226 TRIM_HEAD and TRIM_TAIL.
227
228 STMT is the statement being trimmed and is used for debugging dump
229 output only. */
230
231 static void
232 compute_trims (ao_ref *ref, sbitmap live, int *trim_head, int *trim_tail,
233 gimple *stmt)
234 {
235 /* We use sbitmaps biased such that ref->offset is bit zero and the bitmap
236 extends through ref->size. So we know that in the original bitmap
237 bits 0..ref->size were true. We don't actually need the bitmap, just
238 the REF to compute the trims. */
239
240 /* Now identify how much, if any of the tail we can chop off. */
241 HOST_WIDE_INT const_size;
242 if (ref->size.is_constant (&const_size))
243 {
244 int last_orig = (const_size / BITS_PER_UNIT) - 1;
245 int last_live = bitmap_last_set_bit (live);
246 *trim_tail = (last_orig - last_live) & ~0x1;
247 }
248 else
249 *trim_tail = 0;
250
251 /* Identify how much, if any of the head we can chop off. */
252 int first_orig = 0;
253 int first_live = bitmap_first_set_bit (live);
254 *trim_head = (first_live - first_orig) & ~0x1;
255
256 if ((*trim_head || *trim_tail)
257 && dump_file && (dump_flags & TDF_DETAILS))
258 {
259 fprintf (dump_file, " Trimming statement (head = %d, tail = %d): ",
260 *trim_head, *trim_tail);
261 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
262 fprintf (dump_file, "\n");
263 }
264 }
265
266 /* STMT initializes an object from COMPLEX_CST where one or more of the
267 bytes written may be dead stores. REF is a representation of the
268 memory written. LIVE is the bitmap of stores that are actually live.
269
270 Attempt to rewrite STMT so that only the real or imaginary part of
271 the object is actually stored. */
272
273 static void
274 maybe_trim_complex_store (ao_ref *ref, sbitmap live, gimple *stmt)
275 {
276 int trim_head, trim_tail;
277 compute_trims (ref, live, &trim_head, &trim_tail, stmt);
278
279 /* The amount of data trimmed from the head or tail must be at
280 least half the size of the object to ensure we're trimming
281 the entire real or imaginary half. By writing things this
282 way we avoid more O(n) bitmap operations. */
283 if (known_ge (trim_tail * 2 * BITS_PER_UNIT, ref->size))
284 {
285 /* TREE_REALPART is live */
286 tree x = TREE_REALPART (gimple_assign_rhs1 (stmt));
287 tree y = gimple_assign_lhs (stmt);
288 y = build1 (REALPART_EXPR, TREE_TYPE (x), y);
289 gimple_assign_set_lhs (stmt, y);
290 gimple_assign_set_rhs1 (stmt, x);
291 }
292 else if (known_ge (trim_head * 2 * BITS_PER_UNIT, ref->size))
293 {
294 /* TREE_IMAGPART is live */
295 tree x = TREE_IMAGPART (gimple_assign_rhs1 (stmt));
296 tree y = gimple_assign_lhs (stmt);
297 y = build1 (IMAGPART_EXPR, TREE_TYPE (x), y);
298 gimple_assign_set_lhs (stmt, y);
299 gimple_assign_set_rhs1 (stmt, x);
300 }
301
302 /* Other cases indicate parts of both the real and imag subobjects
303 are live. We do not try to optimize those cases. */
304 }
305
306 /* STMT initializes an object using a CONSTRUCTOR where one or more of the
307 bytes written are dead stores. ORIG is the bitmap of bytes stored by
308 STMT. LIVE is the bitmap of stores that are actually live.
309
310 Attempt to rewrite STMT so that only the real or imaginary part of
311 the object is actually stored.
312
313 The most common case for getting here is a CONSTRUCTOR with no elements
314 being used to zero initialize an object. We do not try to handle other
315 cases as those would force us to fully cover the object with the
316 CONSTRUCTOR node except for the components that are dead. */
317
318 static void
319 maybe_trim_constructor_store (ao_ref *ref, sbitmap live, gimple *stmt)
320 {
321 tree ctor = gimple_assign_rhs1 (stmt);
322
323 /* This is the only case we currently handle. It actually seems to
324 catch most cases of actual interest. */
325 gcc_assert (CONSTRUCTOR_NELTS (ctor) == 0);
326
327 int head_trim = 0;
328 int tail_trim = 0;
329 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
330
331 /* Now we want to replace the constructor initializer
332 with memset (object + head_trim, 0, size - head_trim - tail_trim). */
333 if (head_trim || tail_trim)
334 {
335 /* We want &lhs for the MEM_REF expression. */
336 tree lhs_addr = build_fold_addr_expr (gimple_assign_lhs (stmt));
337
338 if (! is_gimple_min_invariant (lhs_addr))
339 return;
340
341 /* The number of bytes for the new constructor. */
342 poly_int64 ref_bytes = exact_div (ref->size, BITS_PER_UNIT);
343 poly_int64 count = ref_bytes - head_trim - tail_trim;
344
345 /* And the new type for the CONSTRUCTOR. Essentially it's just
346 a char array large enough to cover the non-trimmed parts of
347 the original CONSTRUCTOR. Note we want explicit bounds here
348 so that we know how many bytes to clear when expanding the
349 CONSTRUCTOR. */
350 tree type = build_array_type_nelts (char_type_node, count);
351
352 /* Build a suitable alias type rather than using alias set zero
353 to avoid pessimizing. */
354 tree alias_type = reference_alias_ptr_type (gimple_assign_lhs (stmt));
355
356 /* Build a MEM_REF representing the whole accessed area, starting
357 at the first byte not trimmed. */
358 tree exp = fold_build2 (MEM_REF, type, lhs_addr,
359 build_int_cst (alias_type, head_trim));
360
361 /* Now update STMT with a new RHS and LHS. */
362 gimple_assign_set_lhs (stmt, exp);
363 gimple_assign_set_rhs1 (stmt, build_constructor (type, NULL));
364 }
365 }
366
367 /* STMT is a memcpy, memmove or memset. Decrement the number of bytes
368 copied/set by DECREMENT. */
369 static void
370 decrement_count (gimple *stmt, int decrement)
371 {
372 tree *countp = gimple_call_arg_ptr (stmt, 2);
373 gcc_assert (TREE_CODE (*countp) == INTEGER_CST);
374 *countp = wide_int_to_tree (TREE_TYPE (*countp), (TREE_INT_CST_LOW (*countp)
375 - decrement));
376
377 }
378
379 static void
380 increment_start_addr (gimple *stmt, tree *where, int increment)
381 {
382 if (TREE_CODE (*where) == SSA_NAME)
383 {
384 tree tem = make_ssa_name (TREE_TYPE (*where));
385 gassign *newop
386 = gimple_build_assign (tem, POINTER_PLUS_EXPR, *where,
387 build_int_cst (sizetype, increment));
388 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
389 gsi_insert_before (&gsi, newop, GSI_SAME_STMT);
390 *where = tem;
391 update_stmt (gsi_stmt (gsi));
392 return;
393 }
394
395 *where = build_fold_addr_expr (fold_build2 (MEM_REF, char_type_node,
396 *where,
397 build_int_cst (ptr_type_node,
398 increment)));
399 }
400
401 /* STMT is builtin call that writes bytes in bitmap ORIG, some bytes are dead
402 (ORIG & ~NEW) and need not be stored. Try to rewrite STMT to reduce
403 the amount of data it actually writes.
404
405 Right now we only support trimming from the head or the tail of the
406 memory region. In theory we could split the mem* call, but it's
407 likely of marginal value. */
408
409 static void
410 maybe_trim_memstar_call (ao_ref *ref, sbitmap live, gimple *stmt)
411 {
412 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
413 {
414 case BUILT_IN_MEMCPY:
415 case BUILT_IN_MEMMOVE:
416 {
417 int head_trim, tail_trim;
418 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
419
420 /* Tail trimming is easy, we can just reduce the count. */
421 if (tail_trim)
422 decrement_count (stmt, tail_trim);
423
424 /* Head trimming requires adjusting all the arguments. */
425 if (head_trim)
426 {
427 tree *dst = gimple_call_arg_ptr (stmt, 0);
428 increment_start_addr (stmt, dst, head_trim);
429 tree *src = gimple_call_arg_ptr (stmt, 1);
430 increment_start_addr (stmt, src, head_trim);
431 decrement_count (stmt, head_trim);
432 }
433 break;
434 }
435
436 case BUILT_IN_MEMSET:
437 {
438 int head_trim, tail_trim;
439 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
440
441 /* Tail trimming is easy, we can just reduce the count. */
442 if (tail_trim)
443 decrement_count (stmt, tail_trim);
444
445 /* Head trimming requires adjusting all the arguments. */
446 if (head_trim)
447 {
448 tree *dst = gimple_call_arg_ptr (stmt, 0);
449 increment_start_addr (stmt, dst, head_trim);
450 decrement_count (stmt, head_trim);
451 }
452 break;
453 }
454
455 default:
456 break;
457 }
458 }
459
460 /* STMT is a memory write where one or more bytes written are dead
461 stores. ORIG is the bitmap of bytes stored by STMT. LIVE is the
462 bitmap of stores that are actually live.
463
464 Attempt to rewrite STMT so that it writes fewer memory locations. Right
465 now we only support trimming at the start or end of the memory region.
466 It's not clear how much there is to be gained by trimming from the middle
467 of the region. */
468
469 static void
470 maybe_trim_partially_dead_store (ao_ref *ref, sbitmap live, gimple *stmt)
471 {
472 if (is_gimple_assign (stmt)
473 && TREE_CODE (gimple_assign_lhs (stmt)) != TARGET_MEM_REF)
474 {
475 switch (gimple_assign_rhs_code (stmt))
476 {
477 case CONSTRUCTOR:
478 maybe_trim_constructor_store (ref, live, stmt);
479 break;
480 case COMPLEX_CST:
481 maybe_trim_complex_store (ref, live, stmt);
482 break;
483 default:
484 break;
485 }
486 }
487 }
488
489 /* Return TRUE if USE_REF reads bytes from LIVE where live is
490 derived from REF, a write reference.
491
492 While this routine may modify USE_REF, it's passed by value, not
493 location. So callers do not see those modifications. */
494
495 static bool
496 live_bytes_read (ao_ref use_ref, ao_ref *ref, sbitmap live)
497 {
498 /* We have already verified that USE_REF and REF hit the same object.
499 Now verify that there's actually an overlap between USE_REF and REF. */
500 HOST_WIDE_INT start, size;
501 if (normalize_ref (&use_ref, ref)
502 && (use_ref.offset - ref->offset).is_constant (&start)
503 && use_ref.size.is_constant (&size))
504 {
505 /* If USE_REF covers all of REF, then it will hit one or more
506 live bytes. This avoids useless iteration over the bitmap
507 below. */
508 if (start == 0 && known_eq (size, ref->size))
509 return true;
510
511 /* Now check if any of the remaining bits in use_ref are set in LIVE. */
512 return bitmap_bit_in_range_p (live, start / BITS_PER_UNIT,
513 (start + size - 1) / BITS_PER_UNIT);
514 }
515 return true;
516 }
517
518 /* A helper of dse_optimize_stmt.
519 Given a GIMPLE_ASSIGN in STMT that writes to REF, find a candidate
520 statement *USE_STMT that may prove STMT to be dead.
521 Return TRUE if the above conditions are met, otherwise FALSE. */
522
523 static dse_store_status
524 dse_classify_store (ao_ref *ref, gimple *stmt, gimple **use_stmt,
525 bool byte_tracking_enabled, sbitmap live_bytes)
526 {
527 gimple *temp;
528 unsigned cnt = 0;
529
530 *use_stmt = NULL;
531
532 /* Find the first dominated statement that clobbers (part of) the
533 memory stmt stores to with no intermediate statement that may use
534 part of the memory stmt stores. That is, find a store that may
535 prove stmt to be a dead store. */
536 temp = stmt;
537 do
538 {
539 gimple *use_stmt, *defvar_def;
540 imm_use_iterator ui;
541 bool fail = false;
542 tree defvar;
543
544 /* Limit stmt walking to be linear in the number of possibly
545 dead stores. */
546 if (++cnt > 256)
547 return DSE_STORE_LIVE;
548
549 if (gimple_code (temp) == GIMPLE_PHI)
550 defvar = PHI_RESULT (temp);
551 else
552 defvar = gimple_vdef (temp);
553 defvar_def = temp;
554 temp = NULL;
555 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
556 {
557 cnt++;
558
559 /* If we ever reach our DSE candidate stmt again fail. We
560 cannot handle dead stores in loops. */
561 if (use_stmt == stmt)
562 {
563 fail = true;
564 BREAK_FROM_IMM_USE_STMT (ui);
565 }
566 /* In simple cases we can look through PHI nodes, but we
567 have to be careful with loops and with memory references
568 containing operands that are also operands of PHI nodes.
569 See gcc.c-torture/execute/20051110-*.c. */
570 else if (gimple_code (use_stmt) == GIMPLE_PHI)
571 {
572 if (temp
573 /* Make sure we are not in a loop latch block. */
574 || gimple_bb (stmt) == gimple_bb (use_stmt)
575 || dominated_by_p (CDI_DOMINATORS,
576 gimple_bb (stmt), gimple_bb (use_stmt))
577 /* We can look through PHIs to regions post-dominating
578 the DSE candidate stmt. */
579 || !dominated_by_p (CDI_POST_DOMINATORS,
580 gimple_bb (stmt), gimple_bb (use_stmt)))
581 {
582 fail = true;
583 BREAK_FROM_IMM_USE_STMT (ui);
584 }
585 /* Do not consider the PHI as use if it dominates the
586 stmt defining the virtual operand we are processing,
587 we have processed it already in this case. */
588 if (gimple_bb (defvar_def) != gimple_bb (use_stmt)
589 && !dominated_by_p (CDI_DOMINATORS,
590 gimple_bb (defvar_def),
591 gimple_bb (use_stmt)))
592 temp = use_stmt;
593 }
594 /* If the statement is a use the store is not dead. */
595 else if (ref_maybe_used_by_stmt_p (use_stmt, ref))
596 {
597 /* Handle common cases where we can easily build an ao_ref
598 structure for USE_STMT and in doing so we find that the
599 references hit non-live bytes and thus can be ignored. */
600 if (byte_tracking_enabled && (!gimple_vdef (use_stmt) || !temp))
601 {
602 if (is_gimple_assign (use_stmt))
603 {
604 /* Other cases were noted as non-aliasing by
605 the call to ref_maybe_used_by_stmt_p. */
606 ao_ref use_ref;
607 ao_ref_init (&use_ref, gimple_assign_rhs1 (use_stmt));
608 if (valid_ao_ref_for_dse (&use_ref)
609 && use_ref.base == ref->base
610 && known_eq (use_ref.size, use_ref.max_size)
611 && !live_bytes_read (use_ref, ref, live_bytes))
612 {
613 /* If this statement has a VDEF, then it is the
614 first store we have seen, so walk through it. */
615 if (gimple_vdef (use_stmt))
616 temp = use_stmt;
617 continue;
618 }
619 }
620 }
621
622 fail = true;
623 BREAK_FROM_IMM_USE_STMT (ui);
624 }
625 /* If this is a store, remember it or bail out if we have
626 multiple ones (the will be in different CFG parts then). */
627 else if (gimple_vdef (use_stmt))
628 {
629 if (temp)
630 {
631 fail = true;
632 BREAK_FROM_IMM_USE_STMT (ui);
633 }
634 temp = use_stmt;
635 }
636 }
637
638 if (fail)
639 {
640 /* STMT might be partially dead and we may be able to reduce
641 how many memory locations it stores into. */
642 if (byte_tracking_enabled && !gimple_clobber_p (stmt))
643 return DSE_STORE_MAYBE_PARTIAL_DEAD;
644 return DSE_STORE_LIVE;
645 }
646
647 /* If we didn't find any definition this means the store is dead
648 if it isn't a store to global reachable memory. In this case
649 just pretend the stmt makes itself dead. Otherwise fail. */
650 if (!temp)
651 {
652 if (ref_may_alias_global_p (ref))
653 return DSE_STORE_LIVE;
654
655 temp = stmt;
656 break;
657 }
658
659 if (byte_tracking_enabled && temp)
660 clear_bytes_written_by (live_bytes, temp, ref);
661 }
662 /* Continue walking until we reach a full kill as a single statement
663 or there are no more live bytes. */
664 while (!stmt_kills_ref_p (temp, ref)
665 && !(byte_tracking_enabled && bitmap_empty_p (live_bytes)));
666
667 *use_stmt = temp;
668 return DSE_STORE_DEAD;
669 }
670
671
672 class dse_dom_walker : public dom_walker
673 {
674 public:
675 dse_dom_walker (cdi_direction direction)
676 : dom_walker (direction),
677 m_live_bytes (PARAM_VALUE (PARAM_DSE_MAX_OBJECT_SIZE)),
678 m_byte_tracking_enabled (false) {}
679
680 virtual edge before_dom_children (basic_block);
681
682 private:
683 auto_sbitmap m_live_bytes;
684 bool m_byte_tracking_enabled;
685 void dse_optimize_stmt (gimple_stmt_iterator *);
686 };
687
688 /* Delete a dead call at GSI, which is mem* call of some kind. */
689 static void
690 delete_dead_call (gimple_stmt_iterator *gsi)
691 {
692 gimple *stmt = gsi_stmt (*gsi);
693 if (dump_file && (dump_flags & TDF_DETAILS))
694 {
695 fprintf (dump_file, " Deleted dead call: ");
696 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
697 fprintf (dump_file, "\n");
698 }
699
700 tree lhs = gimple_call_lhs (stmt);
701 if (lhs)
702 {
703 tree ptr = gimple_call_arg (stmt, 0);
704 gimple *new_stmt = gimple_build_assign (lhs, ptr);
705 unlink_stmt_vdef (stmt);
706 if (gsi_replace (gsi, new_stmt, true))
707 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
708 }
709 else
710 {
711 /* Then we need to fix the operand of the consuming stmt. */
712 unlink_stmt_vdef (stmt);
713
714 /* Remove the dead store. */
715 if (gsi_remove (gsi, true))
716 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
717 release_defs (stmt);
718 }
719 }
720
721 /* Delete a dead store at GSI, which is a gimple assignment. */
722
723 static void
724 delete_dead_assignment (gimple_stmt_iterator *gsi)
725 {
726 gimple *stmt = gsi_stmt (*gsi);
727 if (dump_file && (dump_flags & TDF_DETAILS))
728 {
729 fprintf (dump_file, " Deleted dead store: ");
730 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
731 fprintf (dump_file, "\n");
732 }
733
734 /* Then we need to fix the operand of the consuming stmt. */
735 unlink_stmt_vdef (stmt);
736
737 /* Remove the dead store. */
738 basic_block bb = gimple_bb (stmt);
739 if (gsi_remove (gsi, true))
740 bitmap_set_bit (need_eh_cleanup, bb->index);
741
742 /* And release any SSA_NAMEs set in this statement back to the
743 SSA_NAME manager. */
744 release_defs (stmt);
745 }
746
747 /* Attempt to eliminate dead stores in the statement referenced by BSI.
748
749 A dead store is a store into a memory location which will later be
750 overwritten by another store without any intervening loads. In this
751 case the earlier store can be deleted.
752
753 In our SSA + virtual operand world we use immediate uses of virtual
754 operands to detect dead stores. If a store's virtual definition
755 is used precisely once by a later store to the same location which
756 post dominates the first store, then the first store is dead. */
757
758 void
759 dse_dom_walker::dse_optimize_stmt (gimple_stmt_iterator *gsi)
760 {
761 gimple *stmt = gsi_stmt (*gsi);
762
763 /* If this statement has no virtual defs, then there is nothing
764 to do. */
765 if (!gimple_vdef (stmt))
766 return;
767
768 /* Don't return early on *this_2(D) ={v} {CLOBBER}. */
769 if (gimple_has_volatile_ops (stmt)
770 && (!gimple_clobber_p (stmt)
771 || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
772 return;
773
774 ao_ref ref;
775 if (!initialize_ao_ref_for_dse (stmt, &ref))
776 return;
777
778 /* We know we have virtual definitions. We can handle assignments and
779 some builtin calls. */
780 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
781 {
782 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
783 {
784 case BUILT_IN_MEMCPY:
785 case BUILT_IN_MEMMOVE:
786 case BUILT_IN_MEMSET:
787 {
788 /* Occasionally calls with an explicit length of zero
789 show up in the IL. It's pointless to do analysis
790 on them, they're trivially dead. */
791 tree size = gimple_call_arg (stmt, 2);
792 if (integer_zerop (size))
793 {
794 delete_dead_call (gsi);
795 return;
796 }
797
798 gimple *use_stmt;
799 enum dse_store_status store_status;
800 m_byte_tracking_enabled
801 = setup_live_bytes_from_ref (&ref, m_live_bytes);
802 store_status = dse_classify_store (&ref, stmt, &use_stmt,
803 m_byte_tracking_enabled,
804 m_live_bytes);
805 if (store_status == DSE_STORE_LIVE)
806 return;
807
808 if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
809 {
810 maybe_trim_memstar_call (&ref, m_live_bytes, stmt);
811 return;
812 }
813
814 if (store_status == DSE_STORE_DEAD)
815 delete_dead_call (gsi);
816 return;
817 }
818
819 default:
820 return;
821 }
822 }
823
824 if (is_gimple_assign (stmt))
825 {
826 gimple *use_stmt;
827
828 /* Self-assignments are zombies. */
829 if (operand_equal_p (gimple_assign_rhs1 (stmt),
830 gimple_assign_lhs (stmt), 0))
831 use_stmt = stmt;
832 else
833 {
834 m_byte_tracking_enabled
835 = setup_live_bytes_from_ref (&ref, m_live_bytes);
836 enum dse_store_status store_status;
837 store_status = dse_classify_store (&ref, stmt, &use_stmt,
838 m_byte_tracking_enabled,
839 m_live_bytes);
840 if (store_status == DSE_STORE_LIVE)
841 return;
842
843 if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
844 {
845 maybe_trim_partially_dead_store (&ref, m_live_bytes, stmt);
846 return;
847 }
848 }
849
850 /* Now we know that use_stmt kills the LHS of stmt. */
851
852 /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
853 another clobber stmt. */
854 if (gimple_clobber_p (stmt)
855 && !gimple_clobber_p (use_stmt))
856 return;
857
858 delete_dead_assignment (gsi);
859 }
860 }
861
862 edge
863 dse_dom_walker::before_dom_children (basic_block bb)
864 {
865 gimple_stmt_iterator gsi;
866
867 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
868 {
869 dse_optimize_stmt (&gsi);
870 if (gsi_end_p (gsi))
871 gsi = gsi_last_bb (bb);
872 else
873 gsi_prev (&gsi);
874 }
875 return NULL;
876 }
877
878 namespace {
879
880 const pass_data pass_data_dse =
881 {
882 GIMPLE_PASS, /* type */
883 "dse", /* name */
884 OPTGROUP_NONE, /* optinfo_flags */
885 TV_TREE_DSE, /* tv_id */
886 ( PROP_cfg | PROP_ssa ), /* properties_required */
887 0, /* properties_provided */
888 0, /* properties_destroyed */
889 0, /* todo_flags_start */
890 0, /* todo_flags_finish */
891 };
892
893 class pass_dse : public gimple_opt_pass
894 {
895 public:
896 pass_dse (gcc::context *ctxt)
897 : gimple_opt_pass (pass_data_dse, ctxt)
898 {}
899
900 /* opt_pass methods: */
901 opt_pass * clone () { return new pass_dse (m_ctxt); }
902 virtual bool gate (function *) { return flag_tree_dse != 0; }
903 virtual unsigned int execute (function *);
904
905 }; // class pass_dse
906
907 unsigned int
908 pass_dse::execute (function *fun)
909 {
910 need_eh_cleanup = BITMAP_ALLOC (NULL);
911
912 renumber_gimple_stmt_uids ();
913
914 /* We might consider making this a property of each pass so that it
915 can be [re]computed on an as-needed basis. Particularly since
916 this pass could be seen as an extension of DCE which needs post
917 dominators. */
918 calculate_dominance_info (CDI_POST_DOMINATORS);
919 calculate_dominance_info (CDI_DOMINATORS);
920
921 /* Dead store elimination is fundamentally a walk of the post-dominator
922 tree and a backwards walk of statements within each block. */
923 dse_dom_walker (CDI_POST_DOMINATORS).walk (fun->cfg->x_exit_block_ptr);
924
925 /* Removal of stores may make some EH edges dead. Purge such edges from
926 the CFG as needed. */
927 if (!bitmap_empty_p (need_eh_cleanup))
928 {
929 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
930 cleanup_tree_cfg ();
931 }
932
933 BITMAP_FREE (need_eh_cleanup);
934
935 /* For now, just wipe the post-dominator information. */
936 free_dominance_info (CDI_POST_DOMINATORS);
937 return 0;
938 }
939
940 } // anon namespace
941
942 gimple_opt_pass *
943 make_pass_dse (gcc::context *ctxt)
944 {
945 return new pass_dse (ctxt);
946 }