Eliminate n_edges macro
[gcc.git] / gcc / tree-ssa-dse.c
1 /* Dead store elimination
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "ggc.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "bitmap.h"
30 #include "gimple.h"
31 #include "gimple-iterator.h"
32 #include "gimple-ssa.h"
33 #include "tree-cfg.h"
34 #include "tree-phinodes.h"
35 #include "ssa-iterators.h"
36 #include "stringpool.h"
37 #include "tree-ssanames.h"
38 #include "expr.h"
39 #include "tree-dfa.h"
40 #include "tree-pass.h"
41 #include "domwalk.h"
42 #include "flags.h"
43 #include "langhooks.h"
44 #include "tree-cfgcleanup.h"
45
46 /* This file implements dead store elimination.
47
48 A dead store is a store into a memory location which will later be
49 overwritten by another store without any intervening loads. In this
50 case the earlier store can be deleted.
51
52 In our SSA + virtual operand world we use immediate uses of virtual
53 operands to detect dead stores. If a store's virtual definition
54 is used precisely once by a later store to the same location which
55 post dominates the first store, then the first store is dead.
56
57 The single use of the store's virtual definition ensures that
58 there are no intervening aliased loads and the requirement that
59 the second load post dominate the first ensures that if the earlier
60 store executes, then the later stores will execute before the function
61 exits.
62
63 It may help to think of this as first moving the earlier store to
64 the point immediately before the later store. Again, the single
65 use of the virtual definition and the post-dominance relationship
66 ensure that such movement would be safe. Clearly if there are
67 back to back stores, then the second is redundant.
68
69 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
70 may also help in understanding this code since it discusses the
71 relationship between dead store and redundant load elimination. In
72 fact, they are the same transformation applied to different views of
73 the CFG. */
74
75
76 /* Bitmap of blocks that have had EH statements cleaned. We should
77 remove their dead edges eventually. */
78 static bitmap need_eh_cleanup;
79
80 static bool gate_dse (void);
81 static unsigned int tree_ssa_dse (void);
82
83
84 /* A helper of dse_optimize_stmt.
85 Given a GIMPLE_ASSIGN in STMT, find a candidate statement *USE_STMT that
86 may prove STMT to be dead.
87 Return TRUE if the above conditions are met, otherwise FALSE. */
88
89 static bool
90 dse_possible_dead_store_p (gimple stmt, gimple *use_stmt)
91 {
92 gimple temp;
93 unsigned cnt = 0;
94
95 *use_stmt = NULL;
96
97 /* Self-assignments are zombies. */
98 if (operand_equal_p (gimple_assign_rhs1 (stmt), gimple_assign_lhs (stmt), 0))
99 {
100 *use_stmt = stmt;
101 return true;
102 }
103
104 /* Find the first dominated statement that clobbers (part of) the
105 memory stmt stores to with no intermediate statement that may use
106 part of the memory stmt stores. That is, find a store that may
107 prove stmt to be a dead store. */
108 temp = stmt;
109 do
110 {
111 gimple use_stmt, defvar_def;
112 imm_use_iterator ui;
113 bool fail = false;
114 tree defvar;
115
116 /* Limit stmt walking to be linear in the number of possibly
117 dead stores. */
118 if (++cnt > 256)
119 return false;
120
121 if (gimple_code (temp) == GIMPLE_PHI)
122 defvar = PHI_RESULT (temp);
123 else
124 defvar = gimple_vdef (temp);
125 defvar_def = temp;
126 temp = NULL;
127 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
128 {
129 cnt++;
130
131 /* If we ever reach our DSE candidate stmt again fail. We
132 cannot handle dead stores in loops. */
133 if (use_stmt == stmt)
134 {
135 fail = true;
136 BREAK_FROM_IMM_USE_STMT (ui);
137 }
138 /* In simple cases we can look through PHI nodes, but we
139 have to be careful with loops and with memory references
140 containing operands that are also operands of PHI nodes.
141 See gcc.c-torture/execute/20051110-*.c. */
142 else if (gimple_code (use_stmt) == GIMPLE_PHI)
143 {
144 if (temp
145 /* Make sure we are not in a loop latch block. */
146 || gimple_bb (stmt) == gimple_bb (use_stmt)
147 || dominated_by_p (CDI_DOMINATORS,
148 gimple_bb (stmt), gimple_bb (use_stmt))
149 /* We can look through PHIs to regions post-dominating
150 the DSE candidate stmt. */
151 || !dominated_by_p (CDI_POST_DOMINATORS,
152 gimple_bb (stmt), gimple_bb (use_stmt)))
153 {
154 fail = true;
155 BREAK_FROM_IMM_USE_STMT (ui);
156 }
157 /* Do not consider the PHI as use if it dominates the
158 stmt defining the virtual operand we are processing,
159 we have processed it already in this case. */
160 if (gimple_bb (defvar_def) != gimple_bb (use_stmt)
161 && !dominated_by_p (CDI_DOMINATORS,
162 gimple_bb (defvar_def),
163 gimple_bb (use_stmt)))
164 temp = use_stmt;
165 }
166 /* If the statement is a use the store is not dead. */
167 else if (ref_maybe_used_by_stmt_p (use_stmt,
168 gimple_assign_lhs (stmt)))
169 {
170 fail = true;
171 BREAK_FROM_IMM_USE_STMT (ui);
172 }
173 /* If this is a store, remember it or bail out if we have
174 multiple ones (the will be in different CFG parts then). */
175 else if (gimple_vdef (use_stmt))
176 {
177 if (temp)
178 {
179 fail = true;
180 BREAK_FROM_IMM_USE_STMT (ui);
181 }
182 temp = use_stmt;
183 }
184 }
185
186 if (fail)
187 return false;
188
189 /* If we didn't find any definition this means the store is dead
190 if it isn't a store to global reachable memory. In this case
191 just pretend the stmt makes itself dead. Otherwise fail. */
192 if (!temp)
193 {
194 if (stmt_may_clobber_global_p (stmt))
195 return false;
196
197 temp = stmt;
198 break;
199 }
200 }
201 /* We deliberately stop on clobbering statements and not only on
202 killing ones to make walking cheaper. Otherwise we can just
203 continue walking until both stores have equal reference trees. */
204 while (!stmt_may_clobber_ref_p (temp, gimple_assign_lhs (stmt)));
205
206 *use_stmt = temp;
207
208 return true;
209 }
210
211
212 /* Attempt to eliminate dead stores in the statement referenced by BSI.
213
214 A dead store is a store into a memory location which will later be
215 overwritten by another store without any intervening loads. In this
216 case the earlier store can be deleted.
217
218 In our SSA + virtual operand world we use immediate uses of virtual
219 operands to detect dead stores. If a store's virtual definition
220 is used precisely once by a later store to the same location which
221 post dominates the first store, then the first store is dead. */
222
223 static void
224 dse_optimize_stmt (gimple_stmt_iterator *gsi)
225 {
226 gimple stmt = gsi_stmt (*gsi);
227
228 /* If this statement has no virtual defs, then there is nothing
229 to do. */
230 if (!gimple_vdef (stmt))
231 return;
232
233 /* We know we have virtual definitions. If this is a GIMPLE_ASSIGN
234 that's not also a function call, then record it into our table. */
235 if (is_gimple_call (stmt) && gimple_call_fndecl (stmt))
236 return;
237
238 /* Don't return early on *this_2(D) ={v} {CLOBBER}. */
239 if (gimple_has_volatile_ops (stmt)
240 && (!gimple_clobber_p (stmt)
241 || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
242 return;
243
244 if (is_gimple_assign (stmt))
245 {
246 gimple use_stmt;
247
248 if (!dse_possible_dead_store_p (stmt, &use_stmt))
249 return;
250
251 /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
252 another clobber stmt. */
253 if (gimple_clobber_p (stmt)
254 && !gimple_clobber_p (use_stmt))
255 return;
256
257 /* If we have precisely one immediate use at this point and the
258 stores are to the same memory location or there is a chain of
259 virtual uses from stmt and the stmt which stores to that same
260 memory location, then we may have found redundant store. */
261 if ((gimple_has_lhs (use_stmt)
262 && (operand_equal_p (gimple_assign_lhs (stmt),
263 gimple_get_lhs (use_stmt), 0)))
264 || stmt_kills_ref_p (use_stmt, gimple_assign_lhs (stmt)))
265 {
266 basic_block bb;
267
268 /* If use_stmt is or might be a nop assignment, e.g. for
269 struct { ... } S a, b, *p; ...
270 b = a; b = b;
271 or
272 b = a; b = *p; where p might be &b,
273 or
274 *p = a; *p = b; where p might be &b,
275 or
276 *p = *u; *p = *v; where p might be v, then USE_STMT
277 acts as a use as well as definition, so store in STMT
278 is not dead. */
279 if (stmt != use_stmt
280 && ref_maybe_used_by_stmt_p (use_stmt, gimple_assign_lhs (stmt)))
281 return;
282
283 if (dump_file && (dump_flags & TDF_DETAILS))
284 {
285 fprintf (dump_file, " Deleted dead store '");
286 print_gimple_stmt (dump_file, gsi_stmt (*gsi), dump_flags, 0);
287 fprintf (dump_file, "'\n");
288 }
289
290 /* Then we need to fix the operand of the consuming stmt. */
291 unlink_stmt_vdef (stmt);
292
293 /* Remove the dead store. */
294 bb = gimple_bb (stmt);
295 if (gsi_remove (gsi, true))
296 bitmap_set_bit (need_eh_cleanup, bb->index);
297
298 /* And release any SSA_NAMEs set in this statement back to the
299 SSA_NAME manager. */
300 release_defs (stmt);
301 }
302 }
303 }
304
305 class dse_dom_walker : public dom_walker
306 {
307 public:
308 dse_dom_walker (cdi_direction direction) : dom_walker (direction) {}
309
310 virtual void before_dom_children (basic_block);
311 };
312
313 void
314 dse_dom_walker::before_dom_children (basic_block bb)
315 {
316 gimple_stmt_iterator gsi;
317
318 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
319 {
320 dse_optimize_stmt (&gsi);
321 if (gsi_end_p (gsi))
322 gsi = gsi_last_bb (bb);
323 else
324 gsi_prev (&gsi);
325 }
326 }
327
328 /* Main entry point. */
329
330 static unsigned int
331 tree_ssa_dse (void)
332 {
333 need_eh_cleanup = BITMAP_ALLOC (NULL);
334
335 renumber_gimple_stmt_uids ();
336
337 /* We might consider making this a property of each pass so that it
338 can be [re]computed on an as-needed basis. Particularly since
339 this pass could be seen as an extension of DCE which needs post
340 dominators. */
341 calculate_dominance_info (CDI_POST_DOMINATORS);
342 calculate_dominance_info (CDI_DOMINATORS);
343
344 /* Dead store elimination is fundamentally a walk of the post-dominator
345 tree and a backwards walk of statements within each block. */
346 dse_dom_walker (CDI_POST_DOMINATORS).walk (cfun->cfg->x_exit_block_ptr);
347
348 /* Removal of stores may make some EH edges dead. Purge such edges from
349 the CFG as needed. */
350 if (!bitmap_empty_p (need_eh_cleanup))
351 {
352 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
353 cleanup_tree_cfg ();
354 }
355
356 BITMAP_FREE (need_eh_cleanup);
357
358 /* For now, just wipe the post-dominator information. */
359 free_dominance_info (CDI_POST_DOMINATORS);
360 return 0;
361 }
362
363 static bool
364 gate_dse (void)
365 {
366 return flag_tree_dse != 0;
367 }
368
369 namespace {
370
371 const pass_data pass_data_dse =
372 {
373 GIMPLE_PASS, /* type */
374 "dse", /* name */
375 OPTGROUP_NONE, /* optinfo_flags */
376 true, /* has_gate */
377 true, /* has_execute */
378 TV_TREE_DSE, /* tv_id */
379 ( PROP_cfg | PROP_ssa ), /* properties_required */
380 0, /* properties_provided */
381 0, /* properties_destroyed */
382 0, /* todo_flags_start */
383 TODO_verify_ssa, /* todo_flags_finish */
384 };
385
386 class pass_dse : public gimple_opt_pass
387 {
388 public:
389 pass_dse (gcc::context *ctxt)
390 : gimple_opt_pass (pass_data_dse, ctxt)
391 {}
392
393 /* opt_pass methods: */
394 opt_pass * clone () { return new pass_dse (m_ctxt); }
395 bool gate () { return gate_dse (); }
396 unsigned int execute () { return tree_ssa_dse (); }
397
398 }; // class pass_dse
399
400 } // anon namespace
401
402 gimple_opt_pass *
403 make_pass_dse (gcc::context *ctxt)
404 {
405 return new pass_dse (ctxt);
406 }