symtab.c (change_decl_assembler_name): Fix transparent alias chain construction.
[gcc.git] / gcc / tree-ssa-dse.c
1 /* Dead store elimination
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "tm_p.h"
26 #include "basic-block.h"
27 #include "gimple-pretty-print.h"
28 #include "bitmap.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-expr.h"
32 #include "is-a.h"
33 #include "gimple.h"
34 #include "gimple-iterator.h"
35 #include "gimple-ssa.h"
36 #include "tree-cfg.h"
37 #include "tree-phinodes.h"
38 #include "ssa-iterators.h"
39 #include "stringpool.h"
40 #include "tree-ssanames.h"
41 #include "expr.h"
42 #include "tree-dfa.h"
43 #include "tree-pass.h"
44 #include "domwalk.h"
45 #include "flags.h"
46 #include "langhooks.h"
47 #include "tree-cfgcleanup.h"
48
49 /* This file implements dead store elimination.
50
51 A dead store is a store into a memory location which will later be
52 overwritten by another store without any intervening loads. In this
53 case the earlier store can be deleted.
54
55 In our SSA + virtual operand world we use immediate uses of virtual
56 operands to detect dead stores. If a store's virtual definition
57 is used precisely once by a later store to the same location which
58 post dominates the first store, then the first store is dead.
59
60 The single use of the store's virtual definition ensures that
61 there are no intervening aliased loads and the requirement that
62 the second load post dominate the first ensures that if the earlier
63 store executes, then the later stores will execute before the function
64 exits.
65
66 It may help to think of this as first moving the earlier store to
67 the point immediately before the later store. Again, the single
68 use of the virtual definition and the post-dominance relationship
69 ensure that such movement would be safe. Clearly if there are
70 back to back stores, then the second is redundant.
71
72 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
73 may also help in understanding this code since it discusses the
74 relationship between dead store and redundant load elimination. In
75 fact, they are the same transformation applied to different views of
76 the CFG. */
77
78
79 /* Bitmap of blocks that have had EH statements cleaned. We should
80 remove their dead edges eventually. */
81 static bitmap need_eh_cleanup;
82
83 static bool gate_dse (void);
84 static unsigned int tree_ssa_dse (void);
85
86
87 /* A helper of dse_optimize_stmt.
88 Given a GIMPLE_ASSIGN in STMT, find a candidate statement *USE_STMT that
89 may prove STMT to be dead.
90 Return TRUE if the above conditions are met, otherwise FALSE. */
91
92 static bool
93 dse_possible_dead_store_p (gimple stmt, gimple *use_stmt)
94 {
95 gimple temp;
96 unsigned cnt = 0;
97
98 *use_stmt = NULL;
99
100 /* Self-assignments are zombies. */
101 if (operand_equal_p (gimple_assign_rhs1 (stmt), gimple_assign_lhs (stmt), 0))
102 {
103 *use_stmt = stmt;
104 return true;
105 }
106
107 /* Find the first dominated statement that clobbers (part of) the
108 memory stmt stores to with no intermediate statement that may use
109 part of the memory stmt stores. That is, find a store that may
110 prove stmt to be a dead store. */
111 temp = stmt;
112 do
113 {
114 gimple use_stmt, defvar_def;
115 imm_use_iterator ui;
116 bool fail = false;
117 tree defvar;
118
119 /* Limit stmt walking to be linear in the number of possibly
120 dead stores. */
121 if (++cnt > 256)
122 return false;
123
124 if (gimple_code (temp) == GIMPLE_PHI)
125 defvar = PHI_RESULT (temp);
126 else
127 defvar = gimple_vdef (temp);
128 defvar_def = temp;
129 temp = NULL;
130 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
131 {
132 cnt++;
133
134 /* If we ever reach our DSE candidate stmt again fail. We
135 cannot handle dead stores in loops. */
136 if (use_stmt == stmt)
137 {
138 fail = true;
139 BREAK_FROM_IMM_USE_STMT (ui);
140 }
141 /* In simple cases we can look through PHI nodes, but we
142 have to be careful with loops and with memory references
143 containing operands that are also operands of PHI nodes.
144 See gcc.c-torture/execute/20051110-*.c. */
145 else if (gimple_code (use_stmt) == GIMPLE_PHI)
146 {
147 if (temp
148 /* Make sure we are not in a loop latch block. */
149 || gimple_bb (stmt) == gimple_bb (use_stmt)
150 || dominated_by_p (CDI_DOMINATORS,
151 gimple_bb (stmt), gimple_bb (use_stmt))
152 /* We can look through PHIs to regions post-dominating
153 the DSE candidate stmt. */
154 || !dominated_by_p (CDI_POST_DOMINATORS,
155 gimple_bb (stmt), gimple_bb (use_stmt)))
156 {
157 fail = true;
158 BREAK_FROM_IMM_USE_STMT (ui);
159 }
160 /* Do not consider the PHI as use if it dominates the
161 stmt defining the virtual operand we are processing,
162 we have processed it already in this case. */
163 if (gimple_bb (defvar_def) != gimple_bb (use_stmt)
164 && !dominated_by_p (CDI_DOMINATORS,
165 gimple_bb (defvar_def),
166 gimple_bb (use_stmt)))
167 temp = use_stmt;
168 }
169 /* If the statement is a use the store is not dead. */
170 else if (ref_maybe_used_by_stmt_p (use_stmt,
171 gimple_assign_lhs (stmt)))
172 {
173 fail = true;
174 BREAK_FROM_IMM_USE_STMT (ui);
175 }
176 /* If this is a store, remember it or bail out if we have
177 multiple ones (the will be in different CFG parts then). */
178 else if (gimple_vdef (use_stmt))
179 {
180 if (temp)
181 {
182 fail = true;
183 BREAK_FROM_IMM_USE_STMT (ui);
184 }
185 temp = use_stmt;
186 }
187 }
188
189 if (fail)
190 return false;
191
192 /* If we didn't find any definition this means the store is dead
193 if it isn't a store to global reachable memory. In this case
194 just pretend the stmt makes itself dead. Otherwise fail. */
195 if (!temp)
196 {
197 if (stmt_may_clobber_global_p (stmt))
198 return false;
199
200 temp = stmt;
201 break;
202 }
203 }
204 /* We deliberately stop on clobbering statements and not only on
205 killing ones to make walking cheaper. Otherwise we can just
206 continue walking until both stores have equal reference trees. */
207 while (!stmt_may_clobber_ref_p (temp, gimple_assign_lhs (stmt)));
208
209 *use_stmt = temp;
210
211 return true;
212 }
213
214
215 /* Attempt to eliminate dead stores in the statement referenced by BSI.
216
217 A dead store is a store into a memory location which will later be
218 overwritten by another store without any intervening loads. In this
219 case the earlier store can be deleted.
220
221 In our SSA + virtual operand world we use immediate uses of virtual
222 operands to detect dead stores. If a store's virtual definition
223 is used precisely once by a later store to the same location which
224 post dominates the first store, then the first store is dead. */
225
226 static void
227 dse_optimize_stmt (gimple_stmt_iterator *gsi)
228 {
229 gimple stmt = gsi_stmt (*gsi);
230
231 /* If this statement has no virtual defs, then there is nothing
232 to do. */
233 if (!gimple_vdef (stmt))
234 return;
235
236 /* We know we have virtual definitions. If this is a GIMPLE_ASSIGN
237 that's not also a function call, then record it into our table. */
238 if (is_gimple_call (stmt) && gimple_call_fndecl (stmt))
239 return;
240
241 /* Don't return early on *this_2(D) ={v} {CLOBBER}. */
242 if (gimple_has_volatile_ops (stmt)
243 && (!gimple_clobber_p (stmt)
244 || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
245 return;
246
247 if (is_gimple_assign (stmt))
248 {
249 gimple use_stmt;
250
251 if (!dse_possible_dead_store_p (stmt, &use_stmt))
252 return;
253
254 /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
255 another clobber stmt. */
256 if (gimple_clobber_p (stmt)
257 && !gimple_clobber_p (use_stmt))
258 return;
259
260 /* If we have precisely one immediate use at this point and the
261 stores are to the same memory location or there is a chain of
262 virtual uses from stmt and the stmt which stores to that same
263 memory location, then we may have found redundant store. */
264 if ((gimple_has_lhs (use_stmt)
265 && (operand_equal_p (gimple_assign_lhs (stmt),
266 gimple_get_lhs (use_stmt), 0)))
267 || stmt_kills_ref_p (use_stmt, gimple_assign_lhs (stmt)))
268 {
269 basic_block bb;
270
271 /* If use_stmt is or might be a nop assignment, e.g. for
272 struct { ... } S a, b, *p; ...
273 b = a; b = b;
274 or
275 b = a; b = *p; where p might be &b,
276 or
277 *p = a; *p = b; where p might be &b,
278 or
279 *p = *u; *p = *v; where p might be v, then USE_STMT
280 acts as a use as well as definition, so store in STMT
281 is not dead. */
282 if (stmt != use_stmt
283 && ref_maybe_used_by_stmt_p (use_stmt, gimple_assign_lhs (stmt)))
284 return;
285
286 if (dump_file && (dump_flags & TDF_DETAILS))
287 {
288 fprintf (dump_file, " Deleted dead store '");
289 print_gimple_stmt (dump_file, gsi_stmt (*gsi), dump_flags, 0);
290 fprintf (dump_file, "'\n");
291 }
292
293 /* Then we need to fix the operand of the consuming stmt. */
294 unlink_stmt_vdef (stmt);
295
296 /* Remove the dead store. */
297 bb = gimple_bb (stmt);
298 if (gsi_remove (gsi, true))
299 bitmap_set_bit (need_eh_cleanup, bb->index);
300
301 /* And release any SSA_NAMEs set in this statement back to the
302 SSA_NAME manager. */
303 release_defs (stmt);
304 }
305 }
306 }
307
308 class dse_dom_walker : public dom_walker
309 {
310 public:
311 dse_dom_walker (cdi_direction direction) : dom_walker (direction) {}
312
313 virtual void before_dom_children (basic_block);
314 };
315
316 void
317 dse_dom_walker::before_dom_children (basic_block bb)
318 {
319 gimple_stmt_iterator gsi;
320
321 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
322 {
323 dse_optimize_stmt (&gsi);
324 if (gsi_end_p (gsi))
325 gsi = gsi_last_bb (bb);
326 else
327 gsi_prev (&gsi);
328 }
329 }
330
331 /* Main entry point. */
332
333 static unsigned int
334 tree_ssa_dse (void)
335 {
336 need_eh_cleanup = BITMAP_ALLOC (NULL);
337
338 renumber_gimple_stmt_uids ();
339
340 /* We might consider making this a property of each pass so that it
341 can be [re]computed on an as-needed basis. Particularly since
342 this pass could be seen as an extension of DCE which needs post
343 dominators. */
344 calculate_dominance_info (CDI_POST_DOMINATORS);
345 calculate_dominance_info (CDI_DOMINATORS);
346
347 /* Dead store elimination is fundamentally a walk of the post-dominator
348 tree and a backwards walk of statements within each block. */
349 dse_dom_walker (CDI_POST_DOMINATORS).walk (cfun->cfg->x_exit_block_ptr);
350
351 /* Removal of stores may make some EH edges dead. Purge such edges from
352 the CFG as needed. */
353 if (!bitmap_empty_p (need_eh_cleanup))
354 {
355 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
356 cleanup_tree_cfg ();
357 }
358
359 BITMAP_FREE (need_eh_cleanup);
360
361 /* For now, just wipe the post-dominator information. */
362 free_dominance_info (CDI_POST_DOMINATORS);
363 return 0;
364 }
365
366 static bool
367 gate_dse (void)
368 {
369 return flag_tree_dse != 0;
370 }
371
372 namespace {
373
374 const pass_data pass_data_dse =
375 {
376 GIMPLE_PASS, /* type */
377 "dse", /* name */
378 OPTGROUP_NONE, /* optinfo_flags */
379 true, /* has_gate */
380 true, /* has_execute */
381 TV_TREE_DSE, /* tv_id */
382 ( PROP_cfg | PROP_ssa ), /* properties_required */
383 0, /* properties_provided */
384 0, /* properties_destroyed */
385 0, /* todo_flags_start */
386 TODO_verify_ssa, /* todo_flags_finish */
387 };
388
389 class pass_dse : public gimple_opt_pass
390 {
391 public:
392 pass_dse (gcc::context *ctxt)
393 : gimple_opt_pass (pass_data_dse, ctxt)
394 {}
395
396 /* opt_pass methods: */
397 opt_pass * clone () { return new pass_dse (m_ctxt); }
398 bool gate () { return gate_dse (); }
399 unsigned int execute () { return tree_ssa_dse (); }
400
401 }; // class pass_dse
402
403 } // anon namespace
404
405 gimple_opt_pass *
406 make_pass_dse (gcc::context *ctxt)
407 {
408 return new pass_dse (ctxt);
409 }