arm.h (REG_CLASS_CONTENTS): Remove soft frame pointer from CORE_REGS and GENERAL_REGS...
[gcc.git] / gcc / tree-ssa-dse.c
1 /* Dead store elimination
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "timevar.h"
30 #include "gimple-pretty-print.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-dump.h"
34 #include "domwalk.h"
35 #include "flags.h"
36 #include "langhooks.h"
37
38 /* This file implements dead store elimination.
39
40 A dead store is a store into a memory location which will later be
41 overwritten by another store without any intervening loads. In this
42 case the earlier store can be deleted.
43
44 In our SSA + virtual operand world we use immediate uses of virtual
45 operands to detect dead stores. If a store's virtual definition
46 is used precisely once by a later store to the same location which
47 post dominates the first store, then the first store is dead.
48
49 The single use of the store's virtual definition ensures that
50 there are no intervening aliased loads and the requirement that
51 the second load post dominate the first ensures that if the earlier
52 store executes, then the later stores will execute before the function
53 exits.
54
55 It may help to think of this as first moving the earlier store to
56 the point immediately before the later store. Again, the single
57 use of the virtual definition and the post-dominance relationship
58 ensure that such movement would be safe. Clearly if there are
59 back to back stores, then the second is redundant.
60
61 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
62 may also help in understanding this code since it discusses the
63 relationship between dead store and redundant load elimination. In
64 fact, they are the same transformation applied to different views of
65 the CFG. */
66
67
68 struct dse_global_data
69 {
70 /* This is the global bitmap for store statements.
71
72 Each statement has a unique ID. When we encounter a store statement
73 that we want to record, set the bit corresponding to the statement's
74 unique ID in this bitmap. */
75 bitmap stores;
76 };
77
78 /* We allocate a bitmap-per-block for stores which are encountered
79 during the scan of that block. This allows us to restore the
80 global bitmap of stores when we finish processing a block. */
81 struct dse_block_local_data
82 {
83 bitmap stores;
84 };
85
86 static bool gate_dse (void);
87 static unsigned int tree_ssa_dse (void);
88 static void dse_initialize_block_local_data (struct dom_walk_data *,
89 basic_block,
90 bool);
91 static void dse_enter_block (struct dom_walk_data *, basic_block);
92 static void dse_leave_block (struct dom_walk_data *, basic_block);
93 static void record_voperand_set (bitmap, bitmap *, unsigned int);
94
95 /* Returns uid of statement STMT. */
96
97 static unsigned
98 get_stmt_uid (gimple stmt)
99 {
100 if (gimple_code (stmt) == GIMPLE_PHI)
101 return SSA_NAME_VERSION (gimple_phi_result (stmt))
102 + gimple_stmt_max_uid (cfun);
103
104 return gimple_uid (stmt);
105 }
106
107 /* Set bit UID in bitmaps GLOBAL and *LOCAL, creating *LOCAL as needed. */
108
109 static void
110 record_voperand_set (bitmap global, bitmap *local, unsigned int uid)
111 {
112 /* Lazily allocate the bitmap. Note that we do not get a notification
113 when the block local data structures die, so we allocate the local
114 bitmap backed by the GC system. */
115 if (*local == NULL)
116 *local = BITMAP_GGC_ALLOC ();
117
118 /* Set the bit in the local and global bitmaps. */
119 bitmap_set_bit (*local, uid);
120 bitmap_set_bit (global, uid);
121 }
122
123 /* Initialize block local data structures. */
124
125 static void
126 dse_initialize_block_local_data (struct dom_walk_data *walk_data,
127 basic_block bb ATTRIBUTE_UNUSED,
128 bool recycled)
129 {
130 struct dse_block_local_data *bd
131 = (struct dse_block_local_data *)
132 VEC_last (void_p, walk_data->block_data_stack);
133
134 /* If we are given a recycled block local data structure, ensure any
135 bitmap associated with the block is cleared. */
136 if (recycled)
137 {
138 if (bd->stores)
139 bitmap_clear (bd->stores);
140 }
141 }
142
143 /* A helper of dse_optimize_stmt.
144 Given a GIMPLE_ASSIGN in STMT, find a candidate statement *USE_STMT that
145 may prove STMT to be dead.
146 Return TRUE if the above conditions are met, otherwise FALSE. */
147
148 static bool
149 dse_possible_dead_store_p (gimple stmt, gimple *use_stmt)
150 {
151 gimple temp;
152 unsigned cnt = 0;
153
154 *use_stmt = NULL;
155
156 /* Find the first dominated statement that clobbers (part of) the
157 memory stmt stores to with no intermediate statement that may use
158 part of the memory stmt stores. That is, find a store that may
159 prove stmt to be a dead store. */
160 temp = stmt;
161 do
162 {
163 gimple use_stmt;
164 imm_use_iterator ui;
165 bool fail = false;
166 tree defvar;
167
168 /* Limit stmt walking to be linear in the number of possibly
169 dead stores. */
170 if (++cnt > 256)
171 return false;
172
173 if (gimple_code (temp) == GIMPLE_PHI)
174 defvar = PHI_RESULT (temp);
175 else
176 defvar = gimple_vdef (temp);
177 temp = NULL;
178 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
179 {
180 cnt++;
181
182 /* If we ever reach our DSE candidate stmt again fail. We
183 cannot handle dead stores in loops. */
184 if (use_stmt == stmt)
185 {
186 fail = true;
187 BREAK_FROM_IMM_USE_STMT (ui);
188 }
189 /* In simple cases we can look through PHI nodes, but we
190 have to be careful with loops and with memory references
191 containing operands that are also operands of PHI nodes.
192 See gcc.c-torture/execute/20051110-*.c. */
193 else if (gimple_code (use_stmt) == GIMPLE_PHI)
194 {
195 if (temp
196 /* Make sure we are not in a loop latch block. */
197 || gimple_bb (stmt) == gimple_bb (use_stmt)
198 || dominated_by_p (CDI_DOMINATORS,
199 gimple_bb (stmt), gimple_bb (use_stmt))
200 /* We can look through PHIs to regions post-dominating
201 the DSE candidate stmt. */
202 || !dominated_by_p (CDI_POST_DOMINATORS,
203 gimple_bb (stmt), gimple_bb (use_stmt)))
204 {
205 fail = true;
206 BREAK_FROM_IMM_USE_STMT (ui);
207 }
208 temp = use_stmt;
209 }
210 /* If the statement is a use the store is not dead. */
211 else if (ref_maybe_used_by_stmt_p (use_stmt,
212 gimple_assign_lhs (stmt)))
213 {
214 fail = true;
215 BREAK_FROM_IMM_USE_STMT (ui);
216 }
217 /* If this is a store, remember it or bail out if we have
218 multiple ones (the will be in different CFG parts then). */
219 else if (gimple_vdef (use_stmt))
220 {
221 if (temp)
222 {
223 fail = true;
224 BREAK_FROM_IMM_USE_STMT (ui);
225 }
226 temp = use_stmt;
227 }
228 }
229
230 if (fail)
231 return false;
232
233 /* If we didn't find any definition this means the store is dead
234 if it isn't a store to global reachable memory. In this case
235 just pretend the stmt makes itself dead. Otherwise fail. */
236 if (!temp)
237 {
238 if (is_hidden_global_store (stmt))
239 return false;
240
241 temp = stmt;
242 break;
243 }
244 }
245 /* We deliberately stop on clobbering statements and not only on
246 killing ones to make walking cheaper. Otherwise we can just
247 continue walking until both stores have equal reference trees. */
248 while (!stmt_may_clobber_ref_p (temp, gimple_assign_lhs (stmt)));
249
250 if (!is_gimple_assign (temp))
251 return false;
252
253 *use_stmt = temp;
254
255 return true;
256 }
257
258
259 /* Attempt to eliminate dead stores in the statement referenced by BSI.
260
261 A dead store is a store into a memory location which will later be
262 overwritten by another store without any intervening loads. In this
263 case the earlier store can be deleted.
264
265 In our SSA + virtual operand world we use immediate uses of virtual
266 operands to detect dead stores. If a store's virtual definition
267 is used precisely once by a later store to the same location which
268 post dominates the first store, then the first store is dead. */
269
270 static void
271 dse_optimize_stmt (struct dse_global_data *dse_gd,
272 struct dse_block_local_data *bd,
273 gimple_stmt_iterator gsi)
274 {
275 gimple stmt = gsi_stmt (gsi);
276
277 /* If this statement has no virtual defs, then there is nothing
278 to do. */
279 if (!gimple_vdef (stmt))
280 return;
281
282 /* We know we have virtual definitions. If this is a GIMPLE_ASSIGN
283 that's not also a function call, then record it into our table. */
284 if (is_gimple_call (stmt) && gimple_call_fndecl (stmt))
285 return;
286
287 if (gimple_has_volatile_ops (stmt))
288 return;
289
290 if (is_gimple_assign (stmt))
291 {
292 gimple use_stmt;
293
294 record_voperand_set (dse_gd->stores, &bd->stores, gimple_uid (stmt));
295
296 if (!dse_possible_dead_store_p (stmt, &use_stmt))
297 return;
298
299 /* If we have precisely one immediate use at this point and the
300 stores are to the same memory location or there is a chain of
301 virtual uses from stmt and the stmt which stores to that same
302 memory location, then we may have found redundant store. */
303 if (bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt))
304 && (operand_equal_p (gimple_assign_lhs (stmt),
305 gimple_assign_lhs (use_stmt), 0)
306 || stmt_kills_ref_p (use_stmt, gimple_assign_lhs (stmt))))
307 {
308 /* If use_stmt is or might be a nop assignment, e.g. for
309 struct { ... } S a, b, *p; ...
310 b = a; b = b;
311 or
312 b = a; b = *p; where p might be &b,
313 or
314 *p = a; *p = b; where p might be &b,
315 or
316 *p = *u; *p = *v; where p might be v, then USE_STMT
317 acts as a use as well as definition, so store in STMT
318 is not dead. */
319 if (stmt != use_stmt
320 && !is_gimple_reg (gimple_assign_rhs1 (use_stmt))
321 && !is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))
322 /* ??? Should {} be invariant? */
323 && gimple_assign_rhs_code (use_stmt) != CONSTRUCTOR
324 && refs_may_alias_p (gimple_assign_lhs (use_stmt),
325 gimple_assign_rhs1 (use_stmt)))
326 return;
327
328 if (dump_file && (dump_flags & TDF_DETAILS))
329 {
330 fprintf (dump_file, " Deleted dead store '");
331 print_gimple_stmt (dump_file, gsi_stmt (gsi), dump_flags, 0);
332 fprintf (dump_file, "'\n");
333 }
334
335 /* Then we need to fix the operand of the consuming stmt. */
336 unlink_stmt_vdef (stmt);
337
338 /* Remove the dead store. */
339 gsi_remove (&gsi, true);
340
341 /* And release any SSA_NAMEs set in this statement back to the
342 SSA_NAME manager. */
343 release_defs (stmt);
344 }
345 }
346 }
347
348 /* Record that we have seen the PHIs at the start of BB which correspond
349 to virtual operands. */
350 static void
351 dse_record_phi (struct dse_global_data *dse_gd,
352 struct dse_block_local_data *bd,
353 gimple phi)
354 {
355 if (!is_gimple_reg (gimple_phi_result (phi)))
356 record_voperand_set (dse_gd->stores, &bd->stores, get_stmt_uid (phi));
357 }
358
359 static void
360 dse_enter_block (struct dom_walk_data *walk_data, basic_block bb)
361 {
362 struct dse_block_local_data *bd
363 = (struct dse_block_local_data *)
364 VEC_last (void_p, walk_data->block_data_stack);
365 struct dse_global_data *dse_gd
366 = (struct dse_global_data *) walk_data->global_data;
367 gimple_stmt_iterator gsi;
368
369 for (gsi = gsi_last (bb_seq (bb)); !gsi_end_p (gsi); gsi_prev (&gsi))
370 dse_optimize_stmt (dse_gd, bd, gsi);
371 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
372 dse_record_phi (dse_gd, bd, gsi_stmt (gsi));
373 }
374
375 static void
376 dse_leave_block (struct dom_walk_data *walk_data,
377 basic_block bb ATTRIBUTE_UNUSED)
378 {
379 struct dse_block_local_data *bd
380 = (struct dse_block_local_data *)
381 VEC_last (void_p, walk_data->block_data_stack);
382 struct dse_global_data *dse_gd
383 = (struct dse_global_data *) walk_data->global_data;
384 bitmap stores = dse_gd->stores;
385 unsigned int i;
386 bitmap_iterator bi;
387
388 /* Unwind the stores noted in this basic block. */
389 if (bd->stores)
390 EXECUTE_IF_SET_IN_BITMAP (bd->stores, 0, i, bi)
391 {
392 bitmap_clear_bit (stores, i);
393 }
394 }
395
396 /* Main entry point. */
397
398 static unsigned int
399 tree_ssa_dse (void)
400 {
401 struct dom_walk_data walk_data;
402 struct dse_global_data dse_gd;
403
404 renumber_gimple_stmt_uids ();
405
406 /* We might consider making this a property of each pass so that it
407 can be [re]computed on an as-needed basis. Particularly since
408 this pass could be seen as an extension of DCE which needs post
409 dominators. */
410 calculate_dominance_info (CDI_POST_DOMINATORS);
411 calculate_dominance_info (CDI_DOMINATORS);
412
413 /* Dead store elimination is fundamentally a walk of the post-dominator
414 tree and a backwards walk of statements within each block. */
415 walk_data.dom_direction = CDI_POST_DOMINATORS;
416 walk_data.initialize_block_local_data = dse_initialize_block_local_data;
417 walk_data.before_dom_children = dse_enter_block;
418 walk_data.after_dom_children = dse_leave_block;
419
420 walk_data.block_local_data_size = sizeof (struct dse_block_local_data);
421
422 /* This is the main hash table for the dead store elimination pass. */
423 dse_gd.stores = BITMAP_ALLOC (NULL);
424 walk_data.global_data = &dse_gd;
425
426 /* Initialize the dominator walker. */
427 init_walk_dominator_tree (&walk_data);
428
429 /* Recursively walk the dominator tree. */
430 walk_dominator_tree (&walk_data, EXIT_BLOCK_PTR);
431
432 /* Finalize the dominator walker. */
433 fini_walk_dominator_tree (&walk_data);
434
435 /* Release the main bitmap. */
436 BITMAP_FREE (dse_gd.stores);
437
438 /* For now, just wipe the post-dominator information. */
439 free_dominance_info (CDI_POST_DOMINATORS);
440 return 0;
441 }
442
443 static bool
444 gate_dse (void)
445 {
446 return flag_tree_dse != 0;
447 }
448
449 struct gimple_opt_pass pass_dse =
450 {
451 {
452 GIMPLE_PASS,
453 "dse", /* name */
454 gate_dse, /* gate */
455 tree_ssa_dse, /* execute */
456 NULL, /* sub */
457 NULL, /* next */
458 0, /* static_pass_number */
459 TV_TREE_DSE, /* tv_id */
460 PROP_cfg | PROP_ssa, /* properties_required */
461 0, /* properties_provided */
462 0, /* properties_destroyed */
463 0, /* todo_flags_start */
464 TODO_dump_func
465 | TODO_ggc_collect
466 | TODO_verify_ssa /* todo_flags_finish */
467 }
468 };
469