basic-block.h (basic_block_def): Add phi_nodes and predictions.
[gcc.git] / gcc / tree-ssa-dom.c
1 /* SSA Dominator optimizations for trees
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "ggc.h"
31 #include "basic-block.h"
32 #include "cfgloop.h"
33 #include "output.h"
34 #include "errors.h"
35 #include "expr.h"
36 #include "function.h"
37 #include "diagnostic.h"
38 #include "timevar.h"
39 #include "tree-dump.h"
40 #include "tree-flow.h"
41 #include "domwalk.h"
42 #include "real.h"
43 #include "tree-pass.h"
44 #include "tree-ssa-propagate.h"
45 #include "langhooks.h"
46
47 /* This file implements optimizations on the dominator tree. */
48
49
50 /* Structure for recording edge equivalences as well as any pending
51 edge redirections during the dominator optimizer.
52
53 Computing and storing the edge equivalences instead of creating
54 them on-demand can save significant amounts of time, particularly
55 for pathological cases involving switch statements.
56
57 These structures live for a single iteration of the dominator
58 optimizer in the edge's AUX field. At the end of an iteration we
59 free each of these structures and update the AUX field to point
60 to any requested redirection target (the code for updating the
61 CFG and SSA graph for edge redirection expects redirection edge
62 targets to be in the AUX field for each edge. */
63
64 struct edge_info
65 {
66 /* If this edge creates a simple equivalence, the LHS and RHS of
67 the equivalence will be stored here. */
68 tree lhs;
69 tree rhs;
70
71 /* Traversing an edge may also indicate one or more particular conditions
72 are true or false. The number of recorded conditions can vary, but
73 can be determined by the condition's code. So we have an array
74 and its maximum index rather than use a varray. */
75 tree *cond_equivalences;
76 unsigned int max_cond_equivalences;
77
78 /* If we can thread this edge this field records the new target. */
79 edge redirection_target;
80 };
81
82
83 /* Hash table with expressions made available during the renaming process.
84 When an assignment of the form X_i = EXPR is found, the statement is
85 stored in this table. If the same expression EXPR is later found on the
86 RHS of another statement, it is replaced with X_i (thus performing
87 global redundancy elimination). Similarly as we pass through conditionals
88 we record the conditional itself as having either a true or false value
89 in this table. */
90 static htab_t avail_exprs;
91
92 /* Stack of available expressions in AVAIL_EXPRs. Each block pushes any
93 expressions it enters into the hash table along with a marker entry
94 (null). When we finish processing the block, we pop off entries and
95 remove the expressions from the global hash table until we hit the
96 marker. */
97 static VEC(tree,heap) *avail_exprs_stack;
98
99 /* Stack of statements we need to rescan during finalization for newly
100 exposed variables.
101
102 Statement rescanning must occur after the current block's available
103 expressions are removed from AVAIL_EXPRS. Else we may change the
104 hash code for an expression and be unable to find/remove it from
105 AVAIL_EXPRS. */
106 static VEC(tree,heap) *stmts_to_rescan;
107
108 /* Structure for entries in the expression hash table.
109
110 This requires more memory for the hash table entries, but allows us
111 to avoid creating silly tree nodes and annotations for conditionals,
112 eliminates 2 global hash tables and two block local varrays.
113
114 It also allows us to reduce the number of hash table lookups we
115 have to perform in lookup_avail_expr and finally it allows us to
116 significantly reduce the number of calls into the hashing routine
117 itself. */
118
119 struct expr_hash_elt
120 {
121 /* The value (lhs) of this expression. */
122 tree lhs;
123
124 /* The expression (rhs) we want to record. */
125 tree rhs;
126
127 /* The stmt pointer if this element corresponds to a statement. */
128 tree stmt;
129
130 /* The hash value for RHS/ann. */
131 hashval_t hash;
132 };
133
134 /* Stack of dest,src pairs that need to be restored during finalization.
135
136 A NULL entry is used to mark the end of pairs which need to be
137 restored during finalization of this block. */
138 static VEC(tree,heap) *const_and_copies_stack;
139
140 /* Bitmap of SSA_NAMEs known to have a nonzero value, even if we do not
141 know their exact value. */
142 static bitmap nonzero_vars;
143
144 /* Bitmap of blocks that are scheduled to be threaded through. This
145 is used to communicate with thread_through_blocks. */
146 static bitmap threaded_blocks;
147
148 /* Stack of SSA_NAMEs which need their NONZERO_VARS property cleared
149 when the current block is finalized.
150
151 A NULL entry is used to mark the end of names needing their
152 entry in NONZERO_VARS cleared during finalization of this block. */
153 static VEC(tree,heap) *nonzero_vars_stack;
154
155 /* Track whether or not we have changed the control flow graph. */
156 static bool cfg_altered;
157
158 /* Bitmap of blocks that have had EH statements cleaned. We should
159 remove their dead edges eventually. */
160 static bitmap need_eh_cleanup;
161
162 /* Statistics for dominator optimizations. */
163 struct opt_stats_d
164 {
165 long num_stmts;
166 long num_exprs_considered;
167 long num_re;
168 long num_const_prop;
169 long num_copy_prop;
170 };
171
172 static struct opt_stats_d opt_stats;
173
174 /* Value range propagation record. Each time we encounter a conditional
175 of the form SSA_NAME COND CONST we create a new vrp_element to record
176 how the condition affects the possible values SSA_NAME may have.
177
178 Each record contains the condition tested (COND), and the range of
179 values the variable may legitimately have if COND is true. Note the
180 range of values may be a smaller range than COND specifies if we have
181 recorded other ranges for this variable. Each record also contains the
182 block in which the range was recorded for invalidation purposes.
183
184 Note that the current known range is computed lazily. This allows us
185 to avoid the overhead of computing ranges which are never queried.
186
187 When we encounter a conditional, we look for records which constrain
188 the SSA_NAME used in the condition. In some cases those records allow
189 us to determine the condition's result at compile time. In other cases
190 they may allow us to simplify the condition.
191
192 We also use value ranges to do things like transform signed div/mod
193 operations into unsigned div/mod or to simplify ABS_EXPRs.
194
195 Simple experiments have shown these optimizations to not be all that
196 useful on switch statements (much to my surprise). So switch statement
197 optimizations are not performed.
198
199 Note carefully we do not propagate information through each statement
200 in the block. i.e., if we know variable X has a value defined of
201 [0, 25] and we encounter Y = X + 1, we do not track a value range
202 for Y (which would be [1, 26] if we cared). Similarly we do not
203 constrain values as we encounter narrowing typecasts, etc. */
204
205 struct vrp_element
206 {
207 /* The highest and lowest values the variable in COND may contain when
208 COND is true. Note this may not necessarily be the same values
209 tested by COND if the same variable was used in earlier conditionals.
210
211 Note this is computed lazily and thus can be NULL indicating that
212 the values have not been computed yet. */
213 tree low;
214 tree high;
215
216 /* The actual conditional we recorded. This is needed since we compute
217 ranges lazily. */
218 tree cond;
219
220 /* The basic block where this record was created. We use this to determine
221 when to remove records. */
222 basic_block bb;
223 };
224
225 /* A hash table holding value range records (VRP_ELEMENTs) for a given
226 SSA_NAME. We used to use a varray indexed by SSA_NAME_VERSION, but
227 that gets awful wasteful, particularly since the density objects
228 with useful information is very low. */
229 static htab_t vrp_data;
230
231 /* An entry in the VRP_DATA hash table. We record the variable and a
232 varray of VRP_ELEMENT records associated with that variable. */
233 struct vrp_hash_elt
234 {
235 tree var;
236 varray_type records;
237 };
238
239 /* Array of variables which have their values constrained by operations
240 in this basic block. We use this during finalization to know
241 which variables need their VRP data updated. */
242
243 /* Stack of SSA_NAMEs which had their values constrained by operations
244 in this basic block. During finalization of this block we use this
245 list to determine which variables need their VRP data updated.
246
247 A NULL entry marks the end of the SSA_NAMEs associated with this block. */
248 static VEC(tree,heap) *vrp_variables_stack;
249
250 struct eq_expr_value
251 {
252 tree src;
253 tree dst;
254 };
255
256 /* Local functions. */
257 static void optimize_stmt (struct dom_walk_data *,
258 basic_block bb,
259 block_stmt_iterator);
260 static tree lookup_avail_expr (tree, bool);
261 static hashval_t vrp_hash (const void *);
262 static int vrp_eq (const void *, const void *);
263 static hashval_t avail_expr_hash (const void *);
264 static hashval_t real_avail_expr_hash (const void *);
265 static int avail_expr_eq (const void *, const void *);
266 static void htab_statistics (FILE *, htab_t);
267 static void record_cond (tree, tree);
268 static void record_const_or_copy (tree, tree);
269 static void record_equality (tree, tree);
270 static tree update_rhs_and_lookup_avail_expr (tree, tree, bool);
271 static tree simplify_rhs_and_lookup_avail_expr (struct dom_walk_data *,
272 tree, int);
273 static tree simplify_cond_and_lookup_avail_expr (tree, stmt_ann_t, int);
274 static tree simplify_switch_and_lookup_avail_expr (tree, int);
275 static tree find_equivalent_equality_comparison (tree);
276 static void record_range (tree, basic_block);
277 static bool extract_range_from_cond (tree, tree *, tree *, int *);
278 static void record_equivalences_from_phis (basic_block);
279 static void record_equivalences_from_incoming_edge (basic_block);
280 static bool eliminate_redundant_computations (struct dom_walk_data *,
281 tree, stmt_ann_t);
282 static void record_equivalences_from_stmt (tree, int, stmt_ann_t);
283 static void thread_across_edge (struct dom_walk_data *, edge);
284 static void dom_opt_finalize_block (struct dom_walk_data *, basic_block);
285 static void dom_opt_initialize_block (struct dom_walk_data *, basic_block);
286 static void propagate_to_outgoing_edges (struct dom_walk_data *, basic_block);
287 static void remove_local_expressions_from_table (void);
288 static void restore_vars_to_original_value (void);
289 static edge single_incoming_edge_ignoring_loop_edges (basic_block);
290 static void restore_nonzero_vars_to_original_value (void);
291 static inline bool unsafe_associative_fp_binop (tree);
292
293
294 /* Local version of fold that doesn't introduce cruft. */
295
296 static tree
297 local_fold (tree t)
298 {
299 t = fold (t);
300
301 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
302 may have been added by fold, and "useless" type conversions that might
303 now be apparent due to propagation. */
304 STRIP_USELESS_TYPE_CONVERSION (t);
305
306 return t;
307 }
308
309 /* Allocate an EDGE_INFO for edge E and attach it to E.
310 Return the new EDGE_INFO structure. */
311
312 static struct edge_info *
313 allocate_edge_info (edge e)
314 {
315 struct edge_info *edge_info;
316
317 edge_info = xcalloc (1, sizeof (struct edge_info));
318
319 e->aux = edge_info;
320 return edge_info;
321 }
322
323 /* Free all EDGE_INFO structures associated with edges in the CFG.
324 If a particular edge can be threaded, copy the redirection
325 target from the EDGE_INFO structure into the edge's AUX field
326 as required by code to update the CFG and SSA graph for
327 jump threading. */
328
329 static void
330 free_all_edge_infos (void)
331 {
332 basic_block bb;
333 edge_iterator ei;
334 edge e;
335
336 FOR_EACH_BB (bb)
337 {
338 FOR_EACH_EDGE (e, ei, bb->preds)
339 {
340 struct edge_info *edge_info = e->aux;
341
342 if (edge_info)
343 {
344 e->aux = edge_info->redirection_target;
345 if (edge_info->cond_equivalences)
346 free (edge_info->cond_equivalences);
347 free (edge_info);
348 }
349 }
350 }
351 }
352
353 /* Jump threading, redundancy elimination and const/copy propagation.
354
355 This pass may expose new symbols that need to be renamed into SSA. For
356 every new symbol exposed, its corresponding bit will be set in
357 VARS_TO_RENAME. */
358
359 static void
360 tree_ssa_dominator_optimize (void)
361 {
362 struct dom_walk_data walk_data;
363 unsigned int i;
364 struct loops loops_info;
365
366 memset (&opt_stats, 0, sizeof (opt_stats));
367
368 /* Create our hash tables. */
369 avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free);
370 vrp_data = htab_create (ceil_log2 (num_ssa_names), vrp_hash, vrp_eq, free);
371 avail_exprs_stack = VEC_alloc (tree, heap, 20);
372 const_and_copies_stack = VEC_alloc (tree, heap, 20);
373 nonzero_vars_stack = VEC_alloc (tree, heap, 20);
374 vrp_variables_stack = VEC_alloc (tree, heap, 20);
375 stmts_to_rescan = VEC_alloc (tree, heap, 20);
376 nonzero_vars = BITMAP_ALLOC (NULL);
377 threaded_blocks = BITMAP_ALLOC (NULL);
378 need_eh_cleanup = BITMAP_ALLOC (NULL);
379
380 /* Setup callbacks for the generic dominator tree walker. */
381 walk_data.walk_stmts_backward = false;
382 walk_data.dom_direction = CDI_DOMINATORS;
383 walk_data.initialize_block_local_data = NULL;
384 walk_data.before_dom_children_before_stmts = dom_opt_initialize_block;
385 walk_data.before_dom_children_walk_stmts = optimize_stmt;
386 walk_data.before_dom_children_after_stmts = propagate_to_outgoing_edges;
387 walk_data.after_dom_children_before_stmts = NULL;
388 walk_data.after_dom_children_walk_stmts = NULL;
389 walk_data.after_dom_children_after_stmts = dom_opt_finalize_block;
390 /* Right now we only attach a dummy COND_EXPR to the global data pointer.
391 When we attach more stuff we'll need to fill this out with a real
392 structure. */
393 walk_data.global_data = NULL;
394 walk_data.block_local_data_size = 0;
395 walk_data.interesting_blocks = NULL;
396
397 /* Now initialize the dominator walker. */
398 init_walk_dominator_tree (&walk_data);
399
400 calculate_dominance_info (CDI_DOMINATORS);
401
402 /* We need to know which edges exit loops so that we can
403 aggressively thread through loop headers to an exit
404 edge. */
405 flow_loops_find (&loops_info);
406 mark_loop_exit_edges (&loops_info);
407 flow_loops_free (&loops_info);
408
409 /* Clean up the CFG so that any forwarder blocks created by loop
410 canonicalization are removed. */
411 cleanup_tree_cfg ();
412 calculate_dominance_info (CDI_DOMINATORS);
413
414 /* If we prove certain blocks are unreachable, then we want to
415 repeat the dominator optimization process as PHI nodes may
416 have turned into copies which allows better propagation of
417 values. So we repeat until we do not identify any new unreachable
418 blocks. */
419 do
420 {
421 /* Optimize the dominator tree. */
422 cfg_altered = false;
423
424 /* We need accurate information regarding back edges in the CFG
425 for jump threading. */
426 mark_dfs_back_edges ();
427
428 /* Recursively walk the dominator tree optimizing statements. */
429 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
430
431 {
432 block_stmt_iterator bsi;
433 basic_block bb;
434 FOR_EACH_BB (bb)
435 {
436 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
437 {
438 update_stmt_if_modified (bsi_stmt (bsi));
439 }
440 }
441 }
442
443 /* If we exposed any new variables, go ahead and put them into
444 SSA form now, before we handle jump threading. This simplifies
445 interactions between rewriting of _DECL nodes into SSA form
446 and rewriting SSA_NAME nodes into SSA form after block
447 duplication and CFG manipulation. */
448 update_ssa (TODO_update_ssa);
449
450 free_all_edge_infos ();
451
452 /* Thread jumps, creating duplicate blocks as needed. */
453 cfg_altered |= thread_through_all_blocks (threaded_blocks);
454
455 /* Removal of statements may make some EH edges dead. Purge
456 such edges from the CFG as needed. */
457 if (!bitmap_empty_p (need_eh_cleanup))
458 {
459 cfg_altered |= tree_purge_all_dead_eh_edges (need_eh_cleanup);
460 bitmap_zero (need_eh_cleanup);
461 }
462
463 if (cfg_altered)
464 free_dominance_info (CDI_DOMINATORS);
465
466 cfg_altered = cleanup_tree_cfg ();
467
468 if (rediscover_loops_after_threading)
469 {
470 /* Rerun basic loop analysis to discover any newly
471 created loops and update the set of exit edges. */
472 rediscover_loops_after_threading = false;
473 flow_loops_find (&loops_info);
474 mark_loop_exit_edges (&loops_info);
475 flow_loops_free (&loops_info);
476
477 /* Remove any forwarder blocks inserted by loop
478 header canonicalization. */
479 cleanup_tree_cfg ();
480 }
481
482 calculate_dominance_info (CDI_DOMINATORS);
483
484 update_ssa (TODO_update_ssa);
485
486 /* Reinitialize the various tables. */
487 bitmap_clear (nonzero_vars);
488 bitmap_clear (threaded_blocks);
489 htab_empty (avail_exprs);
490 htab_empty (vrp_data);
491
492 /* Finally, remove everything except invariants in SSA_NAME_VALUE.
493
494 This must be done before we iterate as we might have a
495 reference to an SSA_NAME which was removed by the call to
496 update_ssa.
497
498 Long term we will be able to let everything in SSA_NAME_VALUE
499 persist. However, for now, we know this is the safe thing to do. */
500 for (i = 0; i < num_ssa_names; i++)
501 {
502 tree name = ssa_name (i);
503 tree value;
504
505 if (!name)
506 continue;
507
508 value = SSA_NAME_VALUE (name);
509 if (value && !is_gimple_min_invariant (value))
510 SSA_NAME_VALUE (name) = NULL;
511 }
512 }
513 while (optimize > 1 && cfg_altered);
514
515 /* Debugging dumps. */
516 if (dump_file && (dump_flags & TDF_STATS))
517 dump_dominator_optimization_stats (dump_file);
518
519 /* We emptied the hash table earlier, now delete it completely. */
520 htab_delete (avail_exprs);
521 htab_delete (vrp_data);
522
523 /* It is not necessary to clear CURRDEFS, REDIRECTION_EDGES, VRP_DATA,
524 CONST_AND_COPIES, and NONZERO_VARS as they all get cleared at the bottom
525 of the do-while loop above. */
526
527 /* And finalize the dominator walker. */
528 fini_walk_dominator_tree (&walk_data);
529
530 /* Free nonzero_vars. */
531 BITMAP_FREE (nonzero_vars);
532 BITMAP_FREE (threaded_blocks);
533 BITMAP_FREE (need_eh_cleanup);
534
535 VEC_free (tree, heap, avail_exprs_stack);
536 VEC_free (tree, heap, const_and_copies_stack);
537 VEC_free (tree, heap, nonzero_vars_stack);
538 VEC_free (tree, heap, vrp_variables_stack);
539 VEC_free (tree, heap, stmts_to_rescan);
540 }
541
542 static bool
543 gate_dominator (void)
544 {
545 return flag_tree_dom != 0;
546 }
547
548 struct tree_opt_pass pass_dominator =
549 {
550 "dom", /* name */
551 gate_dominator, /* gate */
552 tree_ssa_dominator_optimize, /* execute */
553 NULL, /* sub */
554 NULL, /* next */
555 0, /* static_pass_number */
556 TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
557 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
558 0, /* properties_provided */
559 0, /* properties_destroyed */
560 0, /* todo_flags_start */
561 TODO_dump_func
562 | TODO_update_ssa
563 | TODO_verify_ssa, /* todo_flags_finish */
564 0 /* letter */
565 };
566
567
568 /* We are exiting E->src, see if E->dest ends with a conditional
569 jump which has a known value when reached via E.
570
571 Special care is necessary if E is a back edge in the CFG as we
572 will have already recorded equivalences for E->dest into our
573 various tables, including the result of the conditional at
574 the end of E->dest. Threading opportunities are severely
575 limited in that case to avoid short-circuiting the loop
576 incorrectly.
577
578 Note it is quite common for the first block inside a loop to
579 end with a conditional which is either always true or always
580 false when reached via the loop backedge. Thus we do not want
581 to blindly disable threading across a loop backedge. */
582
583 static void
584 thread_across_edge (struct dom_walk_data *walk_data, edge e)
585 {
586 block_stmt_iterator bsi;
587 tree stmt = NULL;
588 tree phi;
589
590 /* If E->dest does not end with a conditional, then there is
591 nothing to do. */
592 bsi = bsi_last (e->dest);
593 if (bsi_end_p (bsi)
594 || ! bsi_stmt (bsi)
595 || (TREE_CODE (bsi_stmt (bsi)) != COND_EXPR
596 && TREE_CODE (bsi_stmt (bsi)) != GOTO_EXPR
597 && TREE_CODE (bsi_stmt (bsi)) != SWITCH_EXPR))
598 return;
599
600 /* The basic idea here is to use whatever knowledge we have
601 from our dominator walk to simplify statements in E->dest,
602 with the ultimate goal being to simplify the conditional
603 at the end of E->dest.
604
605 Note that we must undo any changes we make to the underlying
606 statements as the simplifications we are making are control
607 flow sensitive (ie, the simplifications are valid when we
608 traverse E, but may not be valid on other paths to E->dest. */
609
610 /* Each PHI creates a temporary equivalence, record them. Again
611 these are context sensitive equivalences and will be removed
612 by our caller. */
613 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
614 {
615 tree src = PHI_ARG_DEF_FROM_EDGE (phi, e);
616 tree dst = PHI_RESULT (phi);
617
618 /* If the desired argument is not the same as this PHI's result
619 and it is set by a PHI in E->dest, then we can not thread
620 through E->dest. */
621 if (src != dst
622 && TREE_CODE (src) == SSA_NAME
623 && TREE_CODE (SSA_NAME_DEF_STMT (src)) == PHI_NODE
624 && bb_for_stmt (SSA_NAME_DEF_STMT (src)) == e->dest)
625 return;
626
627 record_const_or_copy (dst, src);
628 }
629
630 /* Try to simplify each statement in E->dest, ultimately leading to
631 a simplification of the COND_EXPR at the end of E->dest.
632
633 We might consider marking just those statements which ultimately
634 feed the COND_EXPR. It's not clear if the overhead of bookkeeping
635 would be recovered by trying to simplify fewer statements.
636
637 If we are able to simplify a statement into the form
638 SSA_NAME = (SSA_NAME | gimple invariant), then we can record
639 a context sensitive equivalency which may help us simplify
640 later statements in E->dest.
641
642 Failure to simplify into the form above merely means that the
643 statement provides no equivalences to help simplify later
644 statements. This does not prevent threading through E->dest. */
645 for (bsi = bsi_start (e->dest); ! bsi_end_p (bsi); bsi_next (&bsi))
646 {
647 tree cached_lhs;
648
649 stmt = bsi_stmt (bsi);
650
651 /* Ignore empty statements and labels. */
652 if (IS_EMPTY_STMT (stmt) || TREE_CODE (stmt) == LABEL_EXPR)
653 continue;
654
655 /* Safely handle threading across loop backedges. This is
656 over conservative, but still allows us to capture the
657 majority of the cases where we can thread across a loop
658 backedge. */
659 if ((e->flags & EDGE_DFS_BACK) != 0
660 && TREE_CODE (stmt) != COND_EXPR
661 && TREE_CODE (stmt) != SWITCH_EXPR)
662 return;
663
664 /* If the statement has volatile operands, then we assume we
665 can not thread through this block. This is overly
666 conservative in some ways. */
667 if (TREE_CODE (stmt) == ASM_EXPR && ASM_VOLATILE_P (stmt))
668 return;
669
670 /* If this is not a MODIFY_EXPR which sets an SSA_NAME to a new
671 value, then do not try to simplify this statement as it will
672 not simplify in any way that is helpful for jump threading. */
673 if (TREE_CODE (stmt) != MODIFY_EXPR
674 || TREE_CODE (TREE_OPERAND (stmt, 0)) != SSA_NAME)
675 continue;
676
677 /* At this point we have a statement which assigns an RHS to an
678 SSA_VAR on the LHS. We want to try and simplify this statement
679 to expose more context sensitive equivalences which in turn may
680 allow us to simplify the condition at the end of the loop. */
681 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME)
682 cached_lhs = TREE_OPERAND (stmt, 1);
683 else
684 {
685 /* Copy the operands. */
686 tree *copy;
687 ssa_op_iter iter;
688 use_operand_p use_p;
689 unsigned int num, i = 0;
690
691 num = NUM_SSA_OPERANDS (stmt, (SSA_OP_USE | SSA_OP_VUSE));
692 copy = xcalloc (num, sizeof (tree));
693
694 /* Make a copy of the uses & vuses into USES_COPY, then cprop into
695 the operands. */
696 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
697 {
698 tree tmp = NULL;
699 tree use = USE_FROM_PTR (use_p);
700
701 copy[i++] = use;
702 if (TREE_CODE (use) == SSA_NAME)
703 tmp = SSA_NAME_VALUE (use);
704 if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
705 SET_USE (use_p, tmp);
706 }
707
708 /* Try to fold/lookup the new expression. Inserting the
709 expression into the hash table is unlikely to help
710 simplify anything later, so just query the hashtable. */
711 cached_lhs = fold (TREE_OPERAND (stmt, 1));
712 if (TREE_CODE (cached_lhs) != SSA_NAME
713 && !is_gimple_min_invariant (cached_lhs))
714 cached_lhs = lookup_avail_expr (stmt, false);
715
716
717 /* Restore the statement's original uses/defs. */
718 i = 0;
719 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
720 SET_USE (use_p, copy[i++]);
721
722 free (copy);
723 }
724
725 /* Record the context sensitive equivalence if we were able
726 to simplify this statement. */
727 if (cached_lhs
728 && (TREE_CODE (cached_lhs) == SSA_NAME
729 || is_gimple_min_invariant (cached_lhs)))
730 record_const_or_copy (TREE_OPERAND (stmt, 0), cached_lhs);
731 }
732
733 /* If we stopped at a COND_EXPR or SWITCH_EXPR, see if we know which arm
734 will be taken. */
735 if (stmt
736 && (TREE_CODE (stmt) == COND_EXPR
737 || TREE_CODE (stmt) == GOTO_EXPR
738 || TREE_CODE (stmt) == SWITCH_EXPR))
739 {
740 tree cond, cached_lhs;
741
742 /* Now temporarily cprop the operands and try to find the resulting
743 expression in the hash tables. */
744 if (TREE_CODE (stmt) == COND_EXPR)
745 cond = COND_EXPR_COND (stmt);
746 else if (TREE_CODE (stmt) == GOTO_EXPR)
747 cond = GOTO_DESTINATION (stmt);
748 else
749 cond = SWITCH_COND (stmt);
750
751 if (COMPARISON_CLASS_P (cond))
752 {
753 tree dummy_cond, op0, op1;
754 enum tree_code cond_code;
755
756 op0 = TREE_OPERAND (cond, 0);
757 op1 = TREE_OPERAND (cond, 1);
758 cond_code = TREE_CODE (cond);
759
760 /* Get the current value of both operands. */
761 if (TREE_CODE (op0) == SSA_NAME)
762 {
763 tree tmp = SSA_NAME_VALUE (op0);
764 if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
765 op0 = tmp;
766 }
767
768 if (TREE_CODE (op1) == SSA_NAME)
769 {
770 tree tmp = SSA_NAME_VALUE (op1);
771 if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
772 op1 = tmp;
773 }
774
775 /* Stuff the operator and operands into our dummy conditional
776 expression, creating the dummy conditional if necessary. */
777 dummy_cond = walk_data->global_data;
778 if (! dummy_cond)
779 {
780 dummy_cond = build (cond_code, boolean_type_node, op0, op1);
781 dummy_cond = build (COND_EXPR, void_type_node,
782 dummy_cond, NULL, NULL);
783 walk_data->global_data = dummy_cond;
784 }
785 else
786 {
787 TREE_SET_CODE (COND_EXPR_COND (dummy_cond), cond_code);
788 TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op0;
789 TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1) = op1;
790 }
791
792 /* If the conditional folds to an invariant, then we are done,
793 otherwise look it up in the hash tables. */
794 cached_lhs = local_fold (COND_EXPR_COND (dummy_cond));
795 if (! is_gimple_min_invariant (cached_lhs))
796 {
797 cached_lhs = lookup_avail_expr (dummy_cond, false);
798 if (!cached_lhs || ! is_gimple_min_invariant (cached_lhs))
799 cached_lhs = simplify_cond_and_lookup_avail_expr (dummy_cond,
800 NULL,
801 false);
802 }
803 }
804 /* We can have conditionals which just test the state of a
805 variable rather than use a relational operator. These are
806 simpler to handle. */
807 else if (TREE_CODE (cond) == SSA_NAME)
808 {
809 cached_lhs = cond;
810 cached_lhs = SSA_NAME_VALUE (cached_lhs);
811 if (cached_lhs && ! is_gimple_min_invariant (cached_lhs))
812 cached_lhs = NULL;
813 }
814 else
815 cached_lhs = lookup_avail_expr (stmt, false);
816
817 if (cached_lhs)
818 {
819 edge taken_edge = find_taken_edge (e->dest, cached_lhs);
820 basic_block dest = (taken_edge ? taken_edge->dest : NULL);
821
822 if (dest == e->dest)
823 return;
824
825 /* If we have a known destination for the conditional, then
826 we can perform this optimization, which saves at least one
827 conditional jump each time it applies since we get to
828 bypass the conditional at our original destination. */
829 if (dest)
830 {
831 struct edge_info *edge_info;
832
833 update_bb_profile_for_threading (e->dest, EDGE_FREQUENCY (e),
834 e->count, taken_edge);
835 if (e->aux)
836 edge_info = e->aux;
837 else
838 edge_info = allocate_edge_info (e);
839 edge_info->redirection_target = taken_edge;
840 bitmap_set_bit (threaded_blocks, e->dest->index);
841 }
842 }
843 }
844 }
845
846
847 /* Initialize local stacks for this optimizer and record equivalences
848 upon entry to BB. Equivalences can come from the edge traversed to
849 reach BB or they may come from PHI nodes at the start of BB. */
850
851 static void
852 dom_opt_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
853 basic_block bb)
854 {
855 if (dump_file && (dump_flags & TDF_DETAILS))
856 fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
857
858 /* Push a marker on the stacks of local information so that we know how
859 far to unwind when we finalize this block. */
860 VEC_safe_push (tree, heap, avail_exprs_stack, NULL_TREE);
861 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
862 VEC_safe_push (tree, heap, nonzero_vars_stack, NULL_TREE);
863 VEC_safe_push (tree, heap, vrp_variables_stack, NULL_TREE);
864
865 record_equivalences_from_incoming_edge (bb);
866
867 /* PHI nodes can create equivalences too. */
868 record_equivalences_from_phis (bb);
869 }
870
871 /* Given an expression EXPR (a relational expression or a statement),
872 initialize the hash table element pointed by by ELEMENT. */
873
874 static void
875 initialize_hash_element (tree expr, tree lhs, struct expr_hash_elt *element)
876 {
877 /* Hash table elements may be based on conditional expressions or statements.
878
879 For the former case, we have no annotation and we want to hash the
880 conditional expression. In the latter case we have an annotation and
881 we want to record the expression the statement evaluates. */
882 if (COMPARISON_CLASS_P (expr) || TREE_CODE (expr) == TRUTH_NOT_EXPR)
883 {
884 element->stmt = NULL;
885 element->rhs = expr;
886 }
887 else if (TREE_CODE (expr) == COND_EXPR)
888 {
889 element->stmt = expr;
890 element->rhs = COND_EXPR_COND (expr);
891 }
892 else if (TREE_CODE (expr) == SWITCH_EXPR)
893 {
894 element->stmt = expr;
895 element->rhs = SWITCH_COND (expr);
896 }
897 else if (TREE_CODE (expr) == RETURN_EXPR && TREE_OPERAND (expr, 0))
898 {
899 element->stmt = expr;
900 element->rhs = TREE_OPERAND (TREE_OPERAND (expr, 0), 1);
901 }
902 else if (TREE_CODE (expr) == GOTO_EXPR)
903 {
904 element->stmt = expr;
905 element->rhs = GOTO_DESTINATION (expr);
906 }
907 else
908 {
909 element->stmt = expr;
910 element->rhs = TREE_OPERAND (expr, 1);
911 }
912
913 element->lhs = lhs;
914 element->hash = avail_expr_hash (element);
915 }
916
917 /* Remove all the expressions in LOCALS from TABLE, stopping when there are
918 LIMIT entries left in LOCALs. */
919
920 static void
921 remove_local_expressions_from_table (void)
922 {
923 /* Remove all the expressions made available in this block. */
924 while (VEC_length (tree, avail_exprs_stack) > 0)
925 {
926 struct expr_hash_elt element;
927 tree expr = VEC_pop (tree, avail_exprs_stack);
928
929 if (expr == NULL_TREE)
930 break;
931
932 initialize_hash_element (expr, NULL, &element);
933 htab_remove_elt_with_hash (avail_exprs, &element, element.hash);
934 }
935 }
936
937 /* Use the SSA_NAMES in LOCALS to restore TABLE to its original
938 state, stopping when there are LIMIT entries left in LOCALs. */
939
940 static void
941 restore_nonzero_vars_to_original_value (void)
942 {
943 while (VEC_length (tree, nonzero_vars_stack) > 0)
944 {
945 tree name = VEC_pop (tree, nonzero_vars_stack);
946
947 if (name == NULL)
948 break;
949
950 bitmap_clear_bit (nonzero_vars, SSA_NAME_VERSION (name));
951 }
952 }
953
954 /* Use the source/dest pairs in CONST_AND_COPIES_STACK to restore
955 CONST_AND_COPIES to its original state, stopping when we hit a
956 NULL marker. */
957
958 static void
959 restore_vars_to_original_value (void)
960 {
961 while (VEC_length (tree, const_and_copies_stack) > 0)
962 {
963 tree prev_value, dest;
964
965 dest = VEC_pop (tree, const_and_copies_stack);
966
967 if (dest == NULL)
968 break;
969
970 prev_value = VEC_pop (tree, const_and_copies_stack);
971 SSA_NAME_VALUE (dest) = prev_value;
972 }
973 }
974
975 /* We have finished processing the dominator children of BB, perform
976 any finalization actions in preparation for leaving this node in
977 the dominator tree. */
978
979 static void
980 dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
981 {
982 tree last;
983
984 /* If we are at a leaf node in the dominator tree, see if we can thread
985 the edge from BB through its successor.
986
987 Do this before we remove entries from our equivalence tables. */
988 if (single_succ_p (bb)
989 && (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0
990 && (get_immediate_dominator (CDI_DOMINATORS, single_succ (bb)) != bb
991 || phi_nodes (single_succ (bb))))
992
993 {
994 thread_across_edge (walk_data, single_succ_edge (bb));
995 }
996 else if ((last = last_stmt (bb))
997 && TREE_CODE (last) == COND_EXPR
998 && (COMPARISON_CLASS_P (COND_EXPR_COND (last))
999 || TREE_CODE (COND_EXPR_COND (last)) == SSA_NAME)
1000 && EDGE_COUNT (bb->succs) == 2
1001 && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
1002 && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
1003 {
1004 edge true_edge, false_edge;
1005
1006 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1007
1008 /* If the THEN arm is the end of a dominator tree or has PHI nodes,
1009 then try to thread through its edge. */
1010 if (get_immediate_dominator (CDI_DOMINATORS, true_edge->dest) != bb
1011 || phi_nodes (true_edge->dest))
1012 {
1013 struct edge_info *edge_info;
1014 unsigned int i;
1015
1016 /* Push a marker onto the available expression stack so that we
1017 unwind any expressions related to the TRUE arm before processing
1018 the false arm below. */
1019 VEC_safe_push (tree, heap, avail_exprs_stack, NULL_TREE);
1020 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1021
1022 edge_info = true_edge->aux;
1023
1024 /* If we have info associated with this edge, record it into
1025 our equivalency tables. */
1026 if (edge_info)
1027 {
1028 tree *cond_equivalences = edge_info->cond_equivalences;
1029 tree lhs = edge_info->lhs;
1030 tree rhs = edge_info->rhs;
1031
1032 /* If we have a simple NAME = VALUE equivalency record it. */
1033 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1034 record_const_or_copy (lhs, rhs);
1035
1036 /* If we have 0 = COND or 1 = COND equivalences, record them
1037 into our expression hash tables. */
1038 if (cond_equivalences)
1039 for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
1040 {
1041 tree expr = cond_equivalences[i];
1042 tree value = cond_equivalences[i + 1];
1043
1044 record_cond (expr, value);
1045 }
1046 }
1047
1048 /* Now thread the edge. */
1049 thread_across_edge (walk_data, true_edge);
1050
1051 /* And restore the various tables to their state before
1052 we threaded this edge. */
1053 remove_local_expressions_from_table ();
1054 restore_vars_to_original_value ();
1055 }
1056
1057 /* Similarly for the ELSE arm. */
1058 if (get_immediate_dominator (CDI_DOMINATORS, false_edge->dest) != bb
1059 || phi_nodes (false_edge->dest))
1060 {
1061 struct edge_info *edge_info;
1062 unsigned int i;
1063
1064 edge_info = false_edge->aux;
1065
1066 /* If we have info associated with this edge, record it into
1067 our equivalency tables. */
1068 if (edge_info)
1069 {
1070 tree *cond_equivalences = edge_info->cond_equivalences;
1071 tree lhs = edge_info->lhs;
1072 tree rhs = edge_info->rhs;
1073
1074 /* If we have a simple NAME = VALUE equivalency record it. */
1075 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1076 record_const_or_copy (lhs, rhs);
1077
1078 /* If we have 0 = COND or 1 = COND equivalences, record them
1079 into our expression hash tables. */
1080 if (cond_equivalences)
1081 for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
1082 {
1083 tree expr = cond_equivalences[i];
1084 tree value = cond_equivalences[i + 1];
1085
1086 record_cond (expr, value);
1087 }
1088 }
1089
1090 thread_across_edge (walk_data, false_edge);
1091
1092 /* No need to remove local expressions from our tables
1093 or restore vars to their original value as that will
1094 be done immediately below. */
1095 }
1096 }
1097
1098 remove_local_expressions_from_table ();
1099 restore_nonzero_vars_to_original_value ();
1100 restore_vars_to_original_value ();
1101
1102 /* Remove VRP records associated with this basic block. They are no
1103 longer valid.
1104
1105 To be efficient, we note which variables have had their values
1106 constrained in this block. So walk over each variable in the
1107 VRP_VARIABLEs array. */
1108 while (VEC_length (tree, vrp_variables_stack) > 0)
1109 {
1110 tree var = VEC_pop (tree, vrp_variables_stack);
1111 struct vrp_hash_elt vrp_hash_elt, *vrp_hash_elt_p;
1112 void **slot;
1113
1114 /* Each variable has a stack of value range records. We want to
1115 invalidate those associated with our basic block. So we walk
1116 the array backwards popping off records associated with our
1117 block. Once we hit a record not associated with our block
1118 we are done. */
1119 varray_type var_vrp_records;
1120
1121 if (var == NULL)
1122 break;
1123
1124 vrp_hash_elt.var = var;
1125 vrp_hash_elt.records = NULL;
1126
1127 slot = htab_find_slot (vrp_data, &vrp_hash_elt, NO_INSERT);
1128
1129 vrp_hash_elt_p = (struct vrp_hash_elt *) *slot;
1130 var_vrp_records = vrp_hash_elt_p->records;
1131
1132 while (VARRAY_ACTIVE_SIZE (var_vrp_records) > 0)
1133 {
1134 struct vrp_element *element
1135 = (struct vrp_element *)VARRAY_TOP_GENERIC_PTR (var_vrp_records);
1136
1137 if (element->bb != bb)
1138 break;
1139
1140 VARRAY_POP (var_vrp_records);
1141 }
1142 }
1143
1144 /* If we queued any statements to rescan in this block, then
1145 go ahead and rescan them now. */
1146 while (VEC_length (tree, stmts_to_rescan) > 0)
1147 {
1148 tree stmt = VEC_last (tree, stmts_to_rescan);
1149 basic_block stmt_bb = bb_for_stmt (stmt);
1150
1151 if (stmt_bb != bb)
1152 break;
1153
1154 VEC_pop (tree, stmts_to_rescan);
1155 mark_new_vars_to_rename (stmt);
1156 }
1157 }
1158
1159 /* PHI nodes can create equivalences too.
1160
1161 Ignoring any alternatives which are the same as the result, if
1162 all the alternatives are equal, then the PHI node creates an
1163 equivalence.
1164
1165 Additionally, if all the PHI alternatives are known to have a nonzero
1166 value, then the result of this PHI is known to have a nonzero value,
1167 even if we do not know its exact value. */
1168
1169 static void
1170 record_equivalences_from_phis (basic_block bb)
1171 {
1172 tree phi;
1173
1174 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1175 {
1176 tree lhs = PHI_RESULT (phi);
1177 tree rhs = NULL;
1178 int i;
1179
1180 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1181 {
1182 tree t = PHI_ARG_DEF (phi, i);
1183
1184 /* Ignore alternatives which are the same as our LHS. Since
1185 LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
1186 can simply compare pointers. */
1187 if (lhs == t)
1188 continue;
1189
1190 /* If we have not processed an alternative yet, then set
1191 RHS to this alternative. */
1192 if (rhs == NULL)
1193 rhs = t;
1194 /* If we have processed an alternative (stored in RHS), then
1195 see if it is equal to this one. If it isn't, then stop
1196 the search. */
1197 else if (! operand_equal_for_phi_arg_p (rhs, t))
1198 break;
1199 }
1200
1201 /* If we had no interesting alternatives, then all the RHS alternatives
1202 must have been the same as LHS. */
1203 if (!rhs)
1204 rhs = lhs;
1205
1206 /* If we managed to iterate through each PHI alternative without
1207 breaking out of the loop, then we have a PHI which may create
1208 a useful equivalence. We do not need to record unwind data for
1209 this, since this is a true assignment and not an equivalence
1210 inferred from a comparison. All uses of this ssa name are dominated
1211 by this assignment, so unwinding just costs time and space. */
1212 if (i == PHI_NUM_ARGS (phi)
1213 && may_propagate_copy (lhs, rhs))
1214 SSA_NAME_VALUE (lhs) = rhs;
1215
1216 /* Now see if we know anything about the nonzero property for the
1217 result of this PHI. */
1218 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1219 {
1220 if (!PHI_ARG_NONZERO (phi, i))
1221 break;
1222 }
1223
1224 if (i == PHI_NUM_ARGS (phi))
1225 bitmap_set_bit (nonzero_vars, SSA_NAME_VERSION (PHI_RESULT (phi)));
1226 }
1227 }
1228
1229 /* Ignoring loop backedges, if BB has precisely one incoming edge then
1230 return that edge. Otherwise return NULL. */
1231 static edge
1232 single_incoming_edge_ignoring_loop_edges (basic_block bb)
1233 {
1234 edge retval = NULL;
1235 edge e;
1236 edge_iterator ei;
1237
1238 FOR_EACH_EDGE (e, ei, bb->preds)
1239 {
1240 /* A loop back edge can be identified by the destination of
1241 the edge dominating the source of the edge. */
1242 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1243 continue;
1244
1245 /* If we have already seen a non-loop edge, then we must have
1246 multiple incoming non-loop edges and thus we return NULL. */
1247 if (retval)
1248 return NULL;
1249
1250 /* This is the first non-loop incoming edge we have found. Record
1251 it. */
1252 retval = e;
1253 }
1254
1255 return retval;
1256 }
1257
1258 /* Record any equivalences created by the incoming edge to BB. If BB
1259 has more than one incoming edge, then no equivalence is created. */
1260
1261 static void
1262 record_equivalences_from_incoming_edge (basic_block bb)
1263 {
1264 edge e;
1265 basic_block parent;
1266 struct edge_info *edge_info;
1267
1268 /* If our parent block ended with a control statement, then we may be
1269 able to record some equivalences based on which outgoing edge from
1270 the parent was followed. */
1271 parent = get_immediate_dominator (CDI_DOMINATORS, bb);
1272
1273 e = single_incoming_edge_ignoring_loop_edges (bb);
1274
1275 /* If we had a single incoming edge from our parent block, then enter
1276 any data associated with the edge into our tables. */
1277 if (e && e->src == parent)
1278 {
1279 unsigned int i;
1280
1281 edge_info = e->aux;
1282
1283 if (edge_info)
1284 {
1285 tree lhs = edge_info->lhs;
1286 tree rhs = edge_info->rhs;
1287 tree *cond_equivalences = edge_info->cond_equivalences;
1288
1289 if (lhs)
1290 record_equality (lhs, rhs);
1291
1292 if (cond_equivalences)
1293 {
1294 bool recorded_range = false;
1295 for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
1296 {
1297 tree expr = cond_equivalences[i];
1298 tree value = cond_equivalences[i + 1];
1299
1300 record_cond (expr, value);
1301
1302 /* For the first true equivalence, record range
1303 information. We only do this for the first
1304 true equivalence as it should dominate any
1305 later true equivalences. */
1306 if (! recorded_range
1307 && COMPARISON_CLASS_P (expr)
1308 && value == boolean_true_node
1309 && TREE_CONSTANT (TREE_OPERAND (expr, 1)))
1310 {
1311 record_range (expr, bb);
1312 recorded_range = true;
1313 }
1314 }
1315 }
1316 }
1317 }
1318 }
1319
1320 /* Dump SSA statistics on FILE. */
1321
1322 void
1323 dump_dominator_optimization_stats (FILE *file)
1324 {
1325 long n_exprs;
1326
1327 fprintf (file, "Total number of statements: %6ld\n\n",
1328 opt_stats.num_stmts);
1329 fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
1330 opt_stats.num_exprs_considered);
1331
1332 n_exprs = opt_stats.num_exprs_considered;
1333 if (n_exprs == 0)
1334 n_exprs = 1;
1335
1336 fprintf (file, " Redundant expressions eliminated: %6ld (%.0f%%)\n",
1337 opt_stats.num_re, PERCENT (opt_stats.num_re,
1338 n_exprs));
1339 fprintf (file, " Constants propagated: %6ld\n",
1340 opt_stats.num_const_prop);
1341 fprintf (file, " Copies propagated: %6ld\n",
1342 opt_stats.num_copy_prop);
1343
1344 fprintf (file, "\nHash table statistics:\n");
1345
1346 fprintf (file, " avail_exprs: ");
1347 htab_statistics (file, avail_exprs);
1348 }
1349
1350
1351 /* Dump SSA statistics on stderr. */
1352
1353 void
1354 debug_dominator_optimization_stats (void)
1355 {
1356 dump_dominator_optimization_stats (stderr);
1357 }
1358
1359
1360 /* Dump statistics for the hash table HTAB. */
1361
1362 static void
1363 htab_statistics (FILE *file, htab_t htab)
1364 {
1365 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
1366 (long) htab_size (htab),
1367 (long) htab_elements (htab),
1368 htab_collisions (htab));
1369 }
1370
1371 /* Record the fact that VAR has a nonzero value, though we may not know
1372 its exact value. Note that if VAR is already known to have a nonzero
1373 value, then we do nothing. */
1374
1375 static void
1376 record_var_is_nonzero (tree var)
1377 {
1378 int indx = SSA_NAME_VERSION (var);
1379
1380 if (bitmap_bit_p (nonzero_vars, indx))
1381 return;
1382
1383 /* Mark it in the global table. */
1384 bitmap_set_bit (nonzero_vars, indx);
1385
1386 /* Record this SSA_NAME so that we can reset the global table
1387 when we leave this block. */
1388 VEC_safe_push (tree, heap, nonzero_vars_stack, var);
1389 }
1390
1391 /* Enter a statement into the true/false expression hash table indicating
1392 that the condition COND has the value VALUE. */
1393
1394 static void
1395 record_cond (tree cond, tree value)
1396 {
1397 struct expr_hash_elt *element = xmalloc (sizeof (struct expr_hash_elt));
1398 void **slot;
1399
1400 initialize_hash_element (cond, value, element);
1401
1402 slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
1403 element->hash, INSERT);
1404 if (*slot == NULL)
1405 {
1406 *slot = (void *) element;
1407 VEC_safe_push (tree, heap, avail_exprs_stack, cond);
1408 }
1409 else
1410 free (element);
1411 }
1412
1413 /* Build a new conditional using NEW_CODE, OP0 and OP1 and store
1414 the new conditional into *p, then store a boolean_true_node
1415 into *(p + 1). */
1416
1417 static void
1418 build_and_record_new_cond (enum tree_code new_code, tree op0, tree op1, tree *p)
1419 {
1420 *p = build2 (new_code, boolean_type_node, op0, op1);
1421 p++;
1422 *p = boolean_true_node;
1423 }
1424
1425 /* Record that COND is true and INVERTED is false into the edge information
1426 structure. Also record that any conditions dominated by COND are true
1427 as well.
1428
1429 For example, if a < b is true, then a <= b must also be true. */
1430
1431 static void
1432 record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
1433 {
1434 tree op0, op1;
1435
1436 if (!COMPARISON_CLASS_P (cond))
1437 return;
1438
1439 op0 = TREE_OPERAND (cond, 0);
1440 op1 = TREE_OPERAND (cond, 1);
1441
1442 switch (TREE_CODE (cond))
1443 {
1444 case LT_EXPR:
1445 case GT_EXPR:
1446 edge_info->max_cond_equivalences = 12;
1447 edge_info->cond_equivalences = xmalloc (12 * sizeof (tree));
1448 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1449 ? LE_EXPR : GE_EXPR),
1450 op0, op1, &edge_info->cond_equivalences[4]);
1451 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1452 &edge_info->cond_equivalences[6]);
1453 build_and_record_new_cond (NE_EXPR, op0, op1,
1454 &edge_info->cond_equivalences[8]);
1455 build_and_record_new_cond (LTGT_EXPR, op0, op1,
1456 &edge_info->cond_equivalences[10]);
1457 break;
1458
1459 case GE_EXPR:
1460 case LE_EXPR:
1461 edge_info->max_cond_equivalences = 6;
1462 edge_info->cond_equivalences = xmalloc (6 * sizeof (tree));
1463 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1464 &edge_info->cond_equivalences[4]);
1465 break;
1466
1467 case EQ_EXPR:
1468 edge_info->max_cond_equivalences = 10;
1469 edge_info->cond_equivalences = xmalloc (10 * sizeof (tree));
1470 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1471 &edge_info->cond_equivalences[4]);
1472 build_and_record_new_cond (LE_EXPR, op0, op1,
1473 &edge_info->cond_equivalences[6]);
1474 build_and_record_new_cond (GE_EXPR, op0, op1,
1475 &edge_info->cond_equivalences[8]);
1476 break;
1477
1478 case UNORDERED_EXPR:
1479 edge_info->max_cond_equivalences = 16;
1480 edge_info->cond_equivalences = xmalloc (16 * sizeof (tree));
1481 build_and_record_new_cond (NE_EXPR, op0, op1,
1482 &edge_info->cond_equivalences[4]);
1483 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1484 &edge_info->cond_equivalences[6]);
1485 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1486 &edge_info->cond_equivalences[8]);
1487 build_and_record_new_cond (UNEQ_EXPR, op0, op1,
1488 &edge_info->cond_equivalences[10]);
1489 build_and_record_new_cond (UNLT_EXPR, op0, op1,
1490 &edge_info->cond_equivalences[12]);
1491 build_and_record_new_cond (UNGT_EXPR, op0, op1,
1492 &edge_info->cond_equivalences[14]);
1493 break;
1494
1495 case UNLT_EXPR:
1496 case UNGT_EXPR:
1497 edge_info->max_cond_equivalences = 8;
1498 edge_info->cond_equivalences = xmalloc (8 * sizeof (tree));
1499 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1500 ? UNLE_EXPR : UNGE_EXPR),
1501 op0, op1, &edge_info->cond_equivalences[4]);
1502 build_and_record_new_cond (NE_EXPR, op0, op1,
1503 &edge_info->cond_equivalences[6]);
1504 break;
1505
1506 case UNEQ_EXPR:
1507 edge_info->max_cond_equivalences = 8;
1508 edge_info->cond_equivalences = xmalloc (8 * sizeof (tree));
1509 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1510 &edge_info->cond_equivalences[4]);
1511 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1512 &edge_info->cond_equivalences[6]);
1513 break;
1514
1515 case LTGT_EXPR:
1516 edge_info->max_cond_equivalences = 8;
1517 edge_info->cond_equivalences = xmalloc (8 * sizeof (tree));
1518 build_and_record_new_cond (NE_EXPR, op0, op1,
1519 &edge_info->cond_equivalences[4]);
1520 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1521 &edge_info->cond_equivalences[6]);
1522 break;
1523
1524 default:
1525 edge_info->max_cond_equivalences = 4;
1526 edge_info->cond_equivalences = xmalloc (4 * sizeof (tree));
1527 break;
1528 }
1529
1530 /* Now store the original true and false conditions into the first
1531 two slots. */
1532 edge_info->cond_equivalences[0] = cond;
1533 edge_info->cond_equivalences[1] = boolean_true_node;
1534 edge_info->cond_equivalences[2] = inverted;
1535 edge_info->cond_equivalences[3] = boolean_false_node;
1536 }
1537
1538 /* A helper function for record_const_or_copy and record_equality.
1539 Do the work of recording the value and undo info. */
1540
1541 static void
1542 record_const_or_copy_1 (tree x, tree y, tree prev_x)
1543 {
1544 SSA_NAME_VALUE (x) = y;
1545
1546 VEC_reserve (tree, heap, const_and_copies_stack, 2);
1547 VEC_quick_push (tree, const_and_copies_stack, prev_x);
1548 VEC_quick_push (tree, const_and_copies_stack, x);
1549 }
1550
1551
1552 /* Return the loop depth of the basic block of the defining statement of X.
1553 This number should not be treated as absolutely correct because the loop
1554 information may not be completely up-to-date when dom runs. However, it
1555 will be relatively correct, and as more passes are taught to keep loop info
1556 up to date, the result will become more and more accurate. */
1557
1558 int
1559 loop_depth_of_name (tree x)
1560 {
1561 tree defstmt;
1562 basic_block defbb;
1563
1564 /* If it's not an SSA_NAME, we have no clue where the definition is. */
1565 if (TREE_CODE (x) != SSA_NAME)
1566 return 0;
1567
1568 /* Otherwise return the loop depth of the defining statement's bb.
1569 Note that there may not actually be a bb for this statement, if the
1570 ssa_name is live on entry. */
1571 defstmt = SSA_NAME_DEF_STMT (x);
1572 defbb = bb_for_stmt (defstmt);
1573 if (!defbb)
1574 return 0;
1575
1576 return defbb->loop_depth;
1577 }
1578
1579
1580 /* Record that X is equal to Y in const_and_copies. Record undo
1581 information in the block-local vector. */
1582
1583 static void
1584 record_const_or_copy (tree x, tree y)
1585 {
1586 tree prev_x = SSA_NAME_VALUE (x);
1587
1588 if (TREE_CODE (y) == SSA_NAME)
1589 {
1590 tree tmp = SSA_NAME_VALUE (y);
1591 if (tmp)
1592 y = tmp;
1593 }
1594
1595 record_const_or_copy_1 (x, y, prev_x);
1596 }
1597
1598 /* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1599 This constrains the cases in which we may treat this as assignment. */
1600
1601 static void
1602 record_equality (tree x, tree y)
1603 {
1604 tree prev_x = NULL, prev_y = NULL;
1605
1606 if (TREE_CODE (x) == SSA_NAME)
1607 prev_x = SSA_NAME_VALUE (x);
1608 if (TREE_CODE (y) == SSA_NAME)
1609 prev_y = SSA_NAME_VALUE (y);
1610
1611 /* If one of the previous values is invariant, or invariant in more loops
1612 (by depth), then use that.
1613 Otherwise it doesn't matter which value we choose, just so
1614 long as we canonicalize on one value. */
1615 if (TREE_INVARIANT (y))
1616 ;
1617 else if (TREE_INVARIANT (x) || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
1618 prev_x = x, x = y, y = prev_x, prev_x = prev_y;
1619 else if (prev_x && TREE_INVARIANT (prev_x))
1620 x = y, y = prev_x, prev_x = prev_y;
1621 else if (prev_y && TREE_CODE (prev_y) != VALUE_HANDLE)
1622 y = prev_y;
1623
1624 /* After the swapping, we must have one SSA_NAME. */
1625 if (TREE_CODE (x) != SSA_NAME)
1626 return;
1627
1628 /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1629 variable compared against zero. If we're honoring signed zeros,
1630 then we cannot record this value unless we know that the value is
1631 nonzero. */
1632 if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
1633 && (TREE_CODE (y) != REAL_CST
1634 || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1635 return;
1636
1637 record_const_or_copy_1 (x, y, prev_x);
1638 }
1639
1640 /* Return true, if it is ok to do folding of an associative expression.
1641 EXP is the tree for the associative expression. */
1642
1643 static inline bool
1644 unsafe_associative_fp_binop (tree exp)
1645 {
1646 enum tree_code code = TREE_CODE (exp);
1647 return !(!flag_unsafe_math_optimizations
1648 && (code == MULT_EXPR || code == PLUS_EXPR
1649 || code == MINUS_EXPR)
1650 && FLOAT_TYPE_P (TREE_TYPE (exp)));
1651 }
1652
1653 /* Returns true when STMT is a simple iv increment. It detects the
1654 following situation:
1655
1656 i_1 = phi (..., i_2)
1657 i_2 = i_1 +/- ... */
1658
1659 static bool
1660 simple_iv_increment_p (tree stmt)
1661 {
1662 tree lhs, rhs, preinc, phi;
1663 unsigned i;
1664
1665 if (TREE_CODE (stmt) != MODIFY_EXPR)
1666 return false;
1667
1668 lhs = TREE_OPERAND (stmt, 0);
1669 if (TREE_CODE (lhs) != SSA_NAME)
1670 return false;
1671
1672 rhs = TREE_OPERAND (stmt, 1);
1673
1674 if (TREE_CODE (rhs) != PLUS_EXPR
1675 && TREE_CODE (rhs) != MINUS_EXPR)
1676 return false;
1677
1678 preinc = TREE_OPERAND (rhs, 0);
1679 if (TREE_CODE (preinc) != SSA_NAME)
1680 return false;
1681
1682 phi = SSA_NAME_DEF_STMT (preinc);
1683 if (TREE_CODE (phi) != PHI_NODE)
1684 return false;
1685
1686 for (i = 0; i < (unsigned) PHI_NUM_ARGS (phi); i++)
1687 if (PHI_ARG_DEF (phi, i) == lhs)
1688 return true;
1689
1690 return false;
1691 }
1692
1693 /* STMT is a MODIFY_EXPR for which we were unable to find RHS in the
1694 hash tables. Try to simplify the RHS using whatever equivalences
1695 we may have recorded.
1696
1697 If we are able to simplify the RHS, then lookup the simplified form in
1698 the hash table and return the result. Otherwise return NULL. */
1699
1700 static tree
1701 simplify_rhs_and_lookup_avail_expr (struct dom_walk_data *walk_data,
1702 tree stmt, int insert)
1703 {
1704 tree rhs = TREE_OPERAND (stmt, 1);
1705 enum tree_code rhs_code = TREE_CODE (rhs);
1706 tree result = NULL;
1707
1708 /* If we have lhs = ~x, look and see if we earlier had x = ~y.
1709 In which case we can change this statement to be lhs = y.
1710 Which can then be copy propagated.
1711
1712 Similarly for negation. */
1713 if ((rhs_code == BIT_NOT_EXPR || rhs_code == NEGATE_EXPR)
1714 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1715 {
1716 /* Get the definition statement for our RHS. */
1717 tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
1718
1719 /* See if the RHS_DEF_STMT has the same form as our statement. */
1720 if (TREE_CODE (rhs_def_stmt) == MODIFY_EXPR
1721 && TREE_CODE (TREE_OPERAND (rhs_def_stmt, 1)) == rhs_code)
1722 {
1723 tree rhs_def_operand;
1724
1725 rhs_def_operand = TREE_OPERAND (TREE_OPERAND (rhs_def_stmt, 1), 0);
1726
1727 /* Verify that RHS_DEF_OPERAND is a suitable SSA variable. */
1728 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1729 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1730 result = update_rhs_and_lookup_avail_expr (stmt,
1731 rhs_def_operand,
1732 insert);
1733 }
1734 }
1735
1736 /* If we have z = (x OP C1), see if we earlier had x = y OP C2.
1737 If OP is associative, create and fold (y OP C2) OP C1 which
1738 should result in (y OP C3), use that as the RHS for the
1739 assignment. Add minus to this, as we handle it specially below. */
1740 if ((associative_tree_code (rhs_code) || rhs_code == MINUS_EXPR)
1741 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
1742 && is_gimple_min_invariant (TREE_OPERAND (rhs, 1)))
1743 {
1744 tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
1745
1746 /* If the statement defines an induction variable, do not propagate
1747 its value, so that we do not create overlapping life ranges. */
1748 if (simple_iv_increment_p (rhs_def_stmt))
1749 goto dont_fold_assoc;
1750
1751 /* See if the RHS_DEF_STMT has the same form as our statement. */
1752 if (TREE_CODE (rhs_def_stmt) == MODIFY_EXPR)
1753 {
1754 tree rhs_def_rhs = TREE_OPERAND (rhs_def_stmt, 1);
1755 enum tree_code rhs_def_code = TREE_CODE (rhs_def_rhs);
1756
1757 if ((rhs_code == rhs_def_code && unsafe_associative_fp_binop (rhs))
1758 || (rhs_code == PLUS_EXPR && rhs_def_code == MINUS_EXPR)
1759 || (rhs_code == MINUS_EXPR && rhs_def_code == PLUS_EXPR))
1760 {
1761 tree def_stmt_op0 = TREE_OPERAND (rhs_def_rhs, 0);
1762 tree def_stmt_op1 = TREE_OPERAND (rhs_def_rhs, 1);
1763
1764 if (TREE_CODE (def_stmt_op0) == SSA_NAME
1765 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def_stmt_op0)
1766 && is_gimple_min_invariant (def_stmt_op1))
1767 {
1768 tree outer_const = TREE_OPERAND (rhs, 1);
1769 tree type = TREE_TYPE (TREE_OPERAND (stmt, 0));
1770 tree t;
1771
1772 /* If we care about correct floating point results, then
1773 don't fold x + c1 - c2. Note that we need to take both
1774 the codes and the signs to figure this out. */
1775 if (FLOAT_TYPE_P (type)
1776 && !flag_unsafe_math_optimizations
1777 && (rhs_def_code == PLUS_EXPR
1778 || rhs_def_code == MINUS_EXPR))
1779 {
1780 bool neg = false;
1781
1782 neg ^= (rhs_code == MINUS_EXPR);
1783 neg ^= (rhs_def_code == MINUS_EXPR);
1784 neg ^= real_isneg (TREE_REAL_CST_PTR (outer_const));
1785 neg ^= real_isneg (TREE_REAL_CST_PTR (def_stmt_op1));
1786
1787 if (neg)
1788 goto dont_fold_assoc;
1789 }
1790
1791 /* Ho hum. So fold will only operate on the outermost
1792 thingy that we give it, so we have to build the new
1793 expression in two pieces. This requires that we handle
1794 combinations of plus and minus. */
1795 if (rhs_def_code != rhs_code)
1796 {
1797 if (rhs_def_code == MINUS_EXPR)
1798 t = build (MINUS_EXPR, type, outer_const, def_stmt_op1);
1799 else
1800 t = build (MINUS_EXPR, type, def_stmt_op1, outer_const);
1801 rhs_code = PLUS_EXPR;
1802 }
1803 else if (rhs_def_code == MINUS_EXPR)
1804 t = build (PLUS_EXPR, type, def_stmt_op1, outer_const);
1805 else
1806 t = build (rhs_def_code, type, def_stmt_op1, outer_const);
1807 t = local_fold (t);
1808 t = build (rhs_code, type, def_stmt_op0, t);
1809 t = local_fold (t);
1810
1811 /* If the result is a suitable looking gimple expression,
1812 then use it instead of the original for STMT. */
1813 if (TREE_CODE (t) == SSA_NAME
1814 || (UNARY_CLASS_P (t)
1815 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME)
1816 || ((BINARY_CLASS_P (t) || COMPARISON_CLASS_P (t))
1817 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
1818 && is_gimple_val (TREE_OPERAND (t, 1))))
1819 result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
1820 }
1821 }
1822 }
1823 dont_fold_assoc:;
1824 }
1825
1826 /* Transform TRUNC_DIV_EXPR and TRUNC_MOD_EXPR into RSHIFT_EXPR
1827 and BIT_AND_EXPR respectively if the first operand is greater
1828 than zero and the second operand is an exact power of two. */
1829 if ((rhs_code == TRUNC_DIV_EXPR || rhs_code == TRUNC_MOD_EXPR)
1830 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0)))
1831 && integer_pow2p (TREE_OPERAND (rhs, 1)))
1832 {
1833 tree val;
1834 tree op = TREE_OPERAND (rhs, 0);
1835
1836 if (TYPE_UNSIGNED (TREE_TYPE (op)))
1837 {
1838 val = integer_one_node;
1839 }
1840 else
1841 {
1842 tree dummy_cond = walk_data->global_data;
1843
1844 if (! dummy_cond)
1845 {
1846 dummy_cond = build (GT_EXPR, boolean_type_node,
1847 op, integer_zero_node);
1848 dummy_cond = build (COND_EXPR, void_type_node,
1849 dummy_cond, NULL, NULL);
1850 walk_data->global_data = dummy_cond;
1851 }
1852 else
1853 {
1854 TREE_SET_CODE (COND_EXPR_COND (dummy_cond), GT_EXPR);
1855 TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op;
1856 TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1)
1857 = integer_zero_node;
1858 }
1859 val = simplify_cond_and_lookup_avail_expr (dummy_cond, NULL, false);
1860 }
1861
1862 if (val && integer_onep (val))
1863 {
1864 tree t;
1865 tree op0 = TREE_OPERAND (rhs, 0);
1866 tree op1 = TREE_OPERAND (rhs, 1);
1867
1868 if (rhs_code == TRUNC_DIV_EXPR)
1869 t = build (RSHIFT_EXPR, TREE_TYPE (op0), op0,
1870 build_int_cst (NULL_TREE, tree_log2 (op1)));
1871 else
1872 t = build (BIT_AND_EXPR, TREE_TYPE (op0), op0,
1873 local_fold (build (MINUS_EXPR, TREE_TYPE (op1),
1874 op1, integer_one_node)));
1875
1876 result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
1877 }
1878 }
1879
1880 /* Transform ABS (X) into X or -X as appropriate. */
1881 if (rhs_code == ABS_EXPR
1882 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0))))
1883 {
1884 tree val;
1885 tree op = TREE_OPERAND (rhs, 0);
1886 tree type = TREE_TYPE (op);
1887
1888 if (TYPE_UNSIGNED (type))
1889 {
1890 val = integer_zero_node;
1891 }
1892 else
1893 {
1894 tree dummy_cond = walk_data->global_data;
1895
1896 if (! dummy_cond)
1897 {
1898 dummy_cond = build (LE_EXPR, boolean_type_node,
1899 op, integer_zero_node);
1900 dummy_cond = build (COND_EXPR, void_type_node,
1901 dummy_cond, NULL, NULL);
1902 walk_data->global_data = dummy_cond;
1903 }
1904 else
1905 {
1906 TREE_SET_CODE (COND_EXPR_COND (dummy_cond), LE_EXPR);
1907 TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op;
1908 TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1)
1909 = build_int_cst (type, 0);
1910 }
1911 val = simplify_cond_and_lookup_avail_expr (dummy_cond, NULL, false);
1912
1913 if (!val)
1914 {
1915 TREE_SET_CODE (COND_EXPR_COND (dummy_cond), GE_EXPR);
1916 TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op;
1917 TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1)
1918 = build_int_cst (type, 0);
1919
1920 val = simplify_cond_and_lookup_avail_expr (dummy_cond,
1921 NULL, false);
1922
1923 if (val)
1924 {
1925 if (integer_zerop (val))
1926 val = integer_one_node;
1927 else if (integer_onep (val))
1928 val = integer_zero_node;
1929 }
1930 }
1931 }
1932
1933 if (val
1934 && (integer_onep (val) || integer_zerop (val)))
1935 {
1936 tree t;
1937
1938 if (integer_onep (val))
1939 t = build1 (NEGATE_EXPR, TREE_TYPE (op), op);
1940 else
1941 t = op;
1942
1943 result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
1944 }
1945 }
1946
1947 /* Optimize *"foo" into 'f'. This is done here rather than
1948 in fold to avoid problems with stuff like &*"foo". */
1949 if (TREE_CODE (rhs) == INDIRECT_REF || TREE_CODE (rhs) == ARRAY_REF)
1950 {
1951 tree t = fold_read_from_constant_string (rhs);
1952
1953 if (t)
1954 result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
1955 }
1956
1957 return result;
1958 }
1959
1960 /* COND is a condition of the form:
1961
1962 x == const or x != const
1963
1964 Look back to x's defining statement and see if x is defined as
1965
1966 x = (type) y;
1967
1968 If const is unchanged if we convert it to type, then we can build
1969 the equivalent expression:
1970
1971
1972 y == const or y != const
1973
1974 Which may allow further optimizations.
1975
1976 Return the equivalent comparison or NULL if no such equivalent comparison
1977 was found. */
1978
1979 static tree
1980 find_equivalent_equality_comparison (tree cond)
1981 {
1982 tree op0 = TREE_OPERAND (cond, 0);
1983 tree op1 = TREE_OPERAND (cond, 1);
1984 tree def_stmt = SSA_NAME_DEF_STMT (op0);
1985
1986 /* OP0 might have been a parameter, so first make sure it
1987 was defined by a MODIFY_EXPR. */
1988 if (def_stmt && TREE_CODE (def_stmt) == MODIFY_EXPR)
1989 {
1990 tree def_rhs = TREE_OPERAND (def_stmt, 1);
1991
1992 /* Now make sure the RHS of the MODIFY_EXPR is a typecast. */
1993 if ((TREE_CODE (def_rhs) == NOP_EXPR
1994 || TREE_CODE (def_rhs) == CONVERT_EXPR)
1995 && TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME)
1996 {
1997 tree def_rhs_inner = TREE_OPERAND (def_rhs, 0);
1998 tree def_rhs_inner_type = TREE_TYPE (def_rhs_inner);
1999 tree new;
2000
2001 if (TYPE_PRECISION (def_rhs_inner_type)
2002 > TYPE_PRECISION (TREE_TYPE (def_rhs)))
2003 return NULL;
2004
2005 /* What we want to prove is that if we convert OP1 to
2006 the type of the object inside the NOP_EXPR that the
2007 result is still equivalent to SRC.
2008
2009 If that is true, the build and return new equivalent
2010 condition which uses the source of the typecast and the
2011 new constant (which has only changed its type). */
2012 new = build1 (TREE_CODE (def_rhs), def_rhs_inner_type, op1);
2013 new = local_fold (new);
2014 if (is_gimple_val (new) && tree_int_cst_equal (new, op1))
2015 return build (TREE_CODE (cond), TREE_TYPE (cond),
2016 def_rhs_inner, new);
2017 }
2018 }
2019 return NULL;
2020 }
2021
2022 /* STMT is a COND_EXPR for which we could not trivially determine its
2023 result. This routine attempts to find equivalent forms of the
2024 condition which we may be able to optimize better. It also
2025 uses simple value range propagation to optimize conditionals. */
2026
2027 static tree
2028 simplify_cond_and_lookup_avail_expr (tree stmt,
2029 stmt_ann_t ann,
2030 int insert)
2031 {
2032 tree cond = COND_EXPR_COND (stmt);
2033
2034 if (COMPARISON_CLASS_P (cond))
2035 {
2036 tree op0 = TREE_OPERAND (cond, 0);
2037 tree op1 = TREE_OPERAND (cond, 1);
2038
2039 if (TREE_CODE (op0) == SSA_NAME && is_gimple_min_invariant (op1))
2040 {
2041 int limit;
2042 tree low, high, cond_low, cond_high;
2043 int lowequal, highequal, swapped, no_overlap, subset, cond_inverted;
2044 varray_type vrp_records;
2045 struct vrp_element *element;
2046 struct vrp_hash_elt vrp_hash_elt, *vrp_hash_elt_p;
2047 void **slot;
2048
2049 /* First see if we have test of an SSA_NAME against a constant
2050 where the SSA_NAME is defined by an earlier typecast which
2051 is irrelevant when performing tests against the given
2052 constant. */
2053 if (TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
2054 {
2055 tree new_cond = find_equivalent_equality_comparison (cond);
2056
2057 if (new_cond)
2058 {
2059 /* Update the statement to use the new equivalent
2060 condition. */
2061 COND_EXPR_COND (stmt) = new_cond;
2062
2063 /* If this is not a real stmt, ann will be NULL and we
2064 avoid processing the operands. */
2065 if (ann)
2066 mark_stmt_modified (stmt);
2067
2068 /* Lookup the condition and return its known value if it
2069 exists. */
2070 new_cond = lookup_avail_expr (stmt, insert);
2071 if (new_cond)
2072 return new_cond;
2073
2074 /* The operands have changed, so update op0 and op1. */
2075 op0 = TREE_OPERAND (cond, 0);
2076 op1 = TREE_OPERAND (cond, 1);
2077 }
2078 }
2079
2080 /* Consult the value range records for this variable (if they exist)
2081 to see if we can eliminate or simplify this conditional.
2082
2083 Note two tests are necessary to determine no records exist.
2084 First we have to see if the virtual array exists, if it
2085 exists, then we have to check its active size.
2086
2087 Also note the vast majority of conditionals are not testing
2088 a variable which has had its range constrained by an earlier
2089 conditional. So this filter avoids a lot of unnecessary work. */
2090 vrp_hash_elt.var = op0;
2091 vrp_hash_elt.records = NULL;
2092 slot = htab_find_slot (vrp_data, &vrp_hash_elt, NO_INSERT);
2093 if (slot == NULL)
2094 return NULL;
2095
2096 vrp_hash_elt_p = (struct vrp_hash_elt *) *slot;
2097 vrp_records = vrp_hash_elt_p->records;
2098 if (vrp_records == NULL)
2099 return NULL;
2100
2101 limit = VARRAY_ACTIVE_SIZE (vrp_records);
2102
2103 /* If we have no value range records for this variable, or we are
2104 unable to extract a range for this condition, then there is
2105 nothing to do. */
2106 if (limit == 0
2107 || ! extract_range_from_cond (cond, &cond_high,
2108 &cond_low, &cond_inverted))
2109 return NULL;
2110
2111 /* We really want to avoid unnecessary computations of range
2112 info. So all ranges are computed lazily; this avoids a
2113 lot of unnecessary work. i.e., we record the conditional,
2114 but do not process how it constrains the variable's
2115 potential values until we know that processing the condition
2116 could be helpful.
2117
2118 However, we do not want to have to walk a potentially long
2119 list of ranges, nor do we want to compute a variable's
2120 range more than once for a given path.
2121
2122 Luckily, each time we encounter a conditional that can not
2123 be otherwise optimized we will end up here and we will
2124 compute the necessary range information for the variable
2125 used in this condition.
2126
2127 Thus you can conclude that there will never be more than one
2128 conditional associated with a variable which has not been
2129 processed. So we never need to merge more than one new
2130 conditional into the current range.
2131
2132 These properties also help us avoid unnecessary work. */
2133 element
2134 = (struct vrp_element *)VARRAY_GENERIC_PTR (vrp_records, limit - 1);
2135
2136 if (element->high && element->low)
2137 {
2138 /* The last element has been processed, so there is no range
2139 merging to do, we can simply use the high/low values
2140 recorded in the last element. */
2141 low = element->low;
2142 high = element->high;
2143 }
2144 else
2145 {
2146 tree tmp_high, tmp_low;
2147 int dummy;
2148
2149 /* The last element has not been processed. Process it now.
2150 record_range should ensure for cond inverted is not set.
2151 This call can only fail if cond is x < min or x > max,
2152 which fold should have optimized into false.
2153 If that doesn't happen, just pretend all values are
2154 in the range. */
2155 if (! extract_range_from_cond (element->cond, &tmp_high,
2156 &tmp_low, &dummy))
2157 gcc_unreachable ();
2158 else
2159 gcc_assert (dummy == 0);
2160
2161 /* If this is the only element, then no merging is necessary,
2162 the high/low values from extract_range_from_cond are all
2163 we need. */
2164 if (limit == 1)
2165 {
2166 low = tmp_low;
2167 high = tmp_high;
2168 }
2169 else
2170 {
2171 /* Get the high/low value from the previous element. */
2172 struct vrp_element *prev
2173 = (struct vrp_element *)VARRAY_GENERIC_PTR (vrp_records,
2174 limit - 2);
2175 low = prev->low;
2176 high = prev->high;
2177
2178 /* Merge in this element's range with the range from the
2179 previous element.
2180
2181 The low value for the merged range is the maximum of
2182 the previous low value and the low value of this record.
2183
2184 Similarly the high value for the merged range is the
2185 minimum of the previous high value and the high value of
2186 this record. */
2187 low = (low && tree_int_cst_compare (low, tmp_low) == 1
2188 ? low : tmp_low);
2189 high = (high && tree_int_cst_compare (high, tmp_high) == -1
2190 ? high : tmp_high);
2191 }
2192
2193 /* And record the computed range. */
2194 element->low = low;
2195 element->high = high;
2196
2197 }
2198
2199 /* After we have constrained this variable's potential values,
2200 we try to determine the result of the given conditional.
2201
2202 To simplify later tests, first determine if the current
2203 low value is the same low value as the conditional.
2204 Similarly for the current high value and the high value
2205 for the conditional. */
2206 lowequal = tree_int_cst_equal (low, cond_low);
2207 highequal = tree_int_cst_equal (high, cond_high);
2208
2209 if (lowequal && highequal)
2210 return (cond_inverted ? boolean_false_node : boolean_true_node);
2211
2212 /* To simplify the overlap/subset tests below we may want
2213 to swap the two ranges so that the larger of the two
2214 ranges occurs "first". */
2215 swapped = 0;
2216 if (tree_int_cst_compare (low, cond_low) == 1
2217 || (lowequal
2218 && tree_int_cst_compare (cond_high, high) == 1))
2219 {
2220 tree temp;
2221
2222 swapped = 1;
2223 temp = low;
2224 low = cond_low;
2225 cond_low = temp;
2226 temp = high;
2227 high = cond_high;
2228 cond_high = temp;
2229 }
2230
2231 /* Now determine if there is no overlap in the ranges
2232 or if the second range is a subset of the first range. */
2233 no_overlap = tree_int_cst_lt (high, cond_low);
2234 subset = tree_int_cst_compare (cond_high, high) != 1;
2235
2236 /* If there was no overlap in the ranges, then this conditional
2237 always has a false value (unless we had to invert this
2238 conditional, in which case it always has a true value). */
2239 if (no_overlap)
2240 return (cond_inverted ? boolean_true_node : boolean_false_node);
2241
2242 /* If the current range is a subset of the condition's range,
2243 then this conditional always has a true value (unless we
2244 had to invert this conditional, in which case it always
2245 has a true value). */
2246 if (subset && swapped)
2247 return (cond_inverted ? boolean_false_node : boolean_true_node);
2248
2249 /* We were unable to determine the result of the conditional.
2250 However, we may be able to simplify the conditional. First
2251 merge the ranges in the same manner as range merging above. */
2252 low = tree_int_cst_compare (low, cond_low) == 1 ? low : cond_low;
2253 high = tree_int_cst_compare (high, cond_high) == -1 ? high : cond_high;
2254
2255 /* If the range has converged to a single point, then turn this
2256 into an equality comparison. */
2257 if (TREE_CODE (cond) != EQ_EXPR
2258 && TREE_CODE (cond) != NE_EXPR
2259 && tree_int_cst_equal (low, high))
2260 {
2261 TREE_SET_CODE (cond, EQ_EXPR);
2262 TREE_OPERAND (cond, 1) = high;
2263 }
2264 }
2265 }
2266 return 0;
2267 }
2268
2269 /* STMT is a SWITCH_EXPR for which we could not trivially determine its
2270 result. This routine attempts to find equivalent forms of the
2271 condition which we may be able to optimize better. */
2272
2273 static tree
2274 simplify_switch_and_lookup_avail_expr (tree stmt, int insert)
2275 {
2276 tree cond = SWITCH_COND (stmt);
2277 tree def, to, ti;
2278
2279 /* The optimization that we really care about is removing unnecessary
2280 casts. That will let us do much better in propagating the inferred
2281 constant at the switch target. */
2282 if (TREE_CODE (cond) == SSA_NAME)
2283 {
2284 def = SSA_NAME_DEF_STMT (cond);
2285 if (TREE_CODE (def) == MODIFY_EXPR)
2286 {
2287 def = TREE_OPERAND (def, 1);
2288 if (TREE_CODE (def) == NOP_EXPR)
2289 {
2290 int need_precision;
2291 bool fail;
2292
2293 def = TREE_OPERAND (def, 0);
2294
2295 #ifdef ENABLE_CHECKING
2296 /* ??? Why was Jeff testing this? We are gimple... */
2297 gcc_assert (is_gimple_val (def));
2298 #endif
2299
2300 to = TREE_TYPE (cond);
2301 ti = TREE_TYPE (def);
2302
2303 /* If we have an extension that preserves value, then we
2304 can copy the source value into the switch. */
2305
2306 need_precision = TYPE_PRECISION (ti);
2307 fail = false;
2308 if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
2309 fail = true;
2310 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
2311 need_precision += 1;
2312 if (TYPE_PRECISION (to) < need_precision)
2313 fail = true;
2314
2315 if (!fail)
2316 {
2317 SWITCH_COND (stmt) = def;
2318 mark_stmt_modified (stmt);
2319
2320 return lookup_avail_expr (stmt, insert);
2321 }
2322 }
2323 }
2324 }
2325
2326 return 0;
2327 }
2328
2329
2330 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2331 known value for that SSA_NAME (or NULL if no value is known).
2332
2333 NONZERO_VARS is the set SSA_NAMES known to have a nonzero value,
2334 even if we don't know their precise value.
2335
2336 Propagate values from CONST_AND_COPIES and NONZERO_VARS into the PHI
2337 nodes of the successors of BB. */
2338
2339 static void
2340 cprop_into_successor_phis (basic_block bb, bitmap nonzero_vars)
2341 {
2342 edge e;
2343 edge_iterator ei;
2344
2345 FOR_EACH_EDGE (e, ei, bb->succs)
2346 {
2347 tree phi;
2348 int indx;
2349
2350 /* If this is an abnormal edge, then we do not want to copy propagate
2351 into the PHI alternative associated with this edge. */
2352 if (e->flags & EDGE_ABNORMAL)
2353 continue;
2354
2355 phi = phi_nodes (e->dest);
2356 if (! phi)
2357 continue;
2358
2359 indx = e->dest_idx;
2360 for ( ; phi; phi = PHI_CHAIN (phi))
2361 {
2362 tree new;
2363 use_operand_p orig_p;
2364 tree orig;
2365
2366 /* The alternative may be associated with a constant, so verify
2367 it is an SSA_NAME before doing anything with it. */
2368 orig_p = PHI_ARG_DEF_PTR (phi, indx);
2369 orig = USE_FROM_PTR (orig_p);
2370 if (TREE_CODE (orig) != SSA_NAME)
2371 continue;
2372
2373 /* If the alternative is known to have a nonzero value, record
2374 that fact in the PHI node itself for future use. */
2375 if (bitmap_bit_p (nonzero_vars, SSA_NAME_VERSION (orig)))
2376 PHI_ARG_NONZERO (phi, indx) = true;
2377
2378 /* If we have *ORIG_P in our constant/copy table, then replace
2379 ORIG_P with its value in our constant/copy table. */
2380 new = SSA_NAME_VALUE (orig);
2381 if (new
2382 && new != orig
2383 && (TREE_CODE (new) == SSA_NAME
2384 || is_gimple_min_invariant (new))
2385 && may_propagate_copy (orig, new))
2386 propagate_value (orig_p, new);
2387 }
2388 }
2389 }
2390
2391 /* We have finished optimizing BB, record any information implied by
2392 taking a specific outgoing edge from BB. */
2393
2394 static void
2395 record_edge_info (basic_block bb)
2396 {
2397 block_stmt_iterator bsi = bsi_last (bb);
2398 struct edge_info *edge_info;
2399
2400 if (! bsi_end_p (bsi))
2401 {
2402 tree stmt = bsi_stmt (bsi);
2403
2404 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
2405 {
2406 tree cond = SWITCH_COND (stmt);
2407
2408 if (TREE_CODE (cond) == SSA_NAME)
2409 {
2410 tree labels = SWITCH_LABELS (stmt);
2411 int i, n_labels = TREE_VEC_LENGTH (labels);
2412 tree *info = xcalloc (n_basic_blocks, sizeof (tree));
2413 edge e;
2414 edge_iterator ei;
2415
2416 for (i = 0; i < n_labels; i++)
2417 {
2418 tree label = TREE_VEC_ELT (labels, i);
2419 basic_block target_bb = label_to_block (CASE_LABEL (label));
2420
2421 if (CASE_HIGH (label)
2422 || !CASE_LOW (label)
2423 || info[target_bb->index])
2424 info[target_bb->index] = error_mark_node;
2425 else
2426 info[target_bb->index] = label;
2427 }
2428
2429 FOR_EACH_EDGE (e, ei, bb->succs)
2430 {
2431 basic_block target_bb = e->dest;
2432 tree node = info[target_bb->index];
2433
2434 if (node != NULL && node != error_mark_node)
2435 {
2436 tree x = fold_convert (TREE_TYPE (cond), CASE_LOW (node));
2437 edge_info = allocate_edge_info (e);
2438 edge_info->lhs = cond;
2439 edge_info->rhs = x;
2440 }
2441 }
2442 free (info);
2443 }
2444 }
2445
2446 /* A COND_EXPR may create equivalences too. */
2447 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2448 {
2449 tree cond = COND_EXPR_COND (stmt);
2450 edge true_edge;
2451 edge false_edge;
2452
2453 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2454
2455 /* If the conditional is a single variable 'X', record 'X = 1'
2456 for the true edge and 'X = 0' on the false edge. */
2457 if (SSA_VAR_P (cond))
2458 {
2459 struct edge_info *edge_info;
2460
2461 edge_info = allocate_edge_info (true_edge);
2462 edge_info->lhs = cond;
2463 edge_info->rhs = constant_boolean_node (1, TREE_TYPE (cond));
2464
2465 edge_info = allocate_edge_info (false_edge);
2466 edge_info->lhs = cond;
2467 edge_info->rhs = constant_boolean_node (0, TREE_TYPE (cond));
2468 }
2469 /* Equality tests may create one or two equivalences. */
2470 else if (COMPARISON_CLASS_P (cond))
2471 {
2472 tree op0 = TREE_OPERAND (cond, 0);
2473 tree op1 = TREE_OPERAND (cond, 1);
2474
2475 /* Special case comparing booleans against a constant as we
2476 know the value of OP0 on both arms of the branch. i.e., we
2477 can record an equivalence for OP0 rather than COND. */
2478 if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
2479 && TREE_CODE (op0) == SSA_NAME
2480 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2481 && is_gimple_min_invariant (op1))
2482 {
2483 if (TREE_CODE (cond) == EQ_EXPR)
2484 {
2485 edge_info = allocate_edge_info (true_edge);
2486 edge_info->lhs = op0;
2487 edge_info->rhs = (integer_zerop (op1)
2488 ? boolean_false_node
2489 : boolean_true_node);
2490
2491 edge_info = allocate_edge_info (false_edge);
2492 edge_info->lhs = op0;
2493 edge_info->rhs = (integer_zerop (op1)
2494 ? boolean_true_node
2495 : boolean_false_node);
2496 }
2497 else
2498 {
2499 edge_info = allocate_edge_info (true_edge);
2500 edge_info->lhs = op0;
2501 edge_info->rhs = (integer_zerop (op1)
2502 ? boolean_true_node
2503 : boolean_false_node);
2504
2505 edge_info = allocate_edge_info (false_edge);
2506 edge_info->lhs = op0;
2507 edge_info->rhs = (integer_zerop (op1)
2508 ? boolean_false_node
2509 : boolean_true_node);
2510 }
2511 }
2512
2513 else if (is_gimple_min_invariant (op0)
2514 && (TREE_CODE (op1) == SSA_NAME
2515 || is_gimple_min_invariant (op1)))
2516 {
2517 tree inverted = invert_truthvalue (cond);
2518 struct edge_info *edge_info;
2519
2520 edge_info = allocate_edge_info (true_edge);
2521 record_conditions (edge_info, cond, inverted);
2522
2523 if (TREE_CODE (cond) == EQ_EXPR)
2524 {
2525 edge_info->lhs = op1;
2526 edge_info->rhs = op0;
2527 }
2528
2529 edge_info = allocate_edge_info (false_edge);
2530 record_conditions (edge_info, inverted, cond);
2531
2532 if (TREE_CODE (cond) == NE_EXPR)
2533 {
2534 edge_info->lhs = op1;
2535 edge_info->rhs = op0;
2536 }
2537 }
2538
2539 else if (TREE_CODE (op0) == SSA_NAME
2540 && (is_gimple_min_invariant (op1)
2541 || TREE_CODE (op1) == SSA_NAME))
2542 {
2543 tree inverted = invert_truthvalue (cond);
2544 struct edge_info *edge_info;
2545
2546 edge_info = allocate_edge_info (true_edge);
2547 record_conditions (edge_info, cond, inverted);
2548
2549 if (TREE_CODE (cond) == EQ_EXPR)
2550 {
2551 edge_info->lhs = op0;
2552 edge_info->rhs = op1;
2553 }
2554
2555 edge_info = allocate_edge_info (false_edge);
2556 record_conditions (edge_info, inverted, cond);
2557
2558 if (TREE_CODE (cond) == NE_EXPR)
2559 {
2560 edge_info->lhs = op0;
2561 edge_info->rhs = op1;
2562 }
2563 }
2564 }
2565
2566 /* ??? TRUTH_NOT_EXPR can create an equivalence too. */
2567 }
2568 }
2569 }
2570
2571 /* Propagate information from BB to its outgoing edges.
2572
2573 This can include equivalency information implied by control statements
2574 at the end of BB and const/copy propagation into PHIs in BB's
2575 successor blocks. */
2576
2577 static void
2578 propagate_to_outgoing_edges (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
2579 basic_block bb)
2580 {
2581 record_edge_info (bb);
2582 cprop_into_successor_phis (bb, nonzero_vars);
2583 }
2584
2585 /* Search for redundant computations in STMT. If any are found, then
2586 replace them with the variable holding the result of the computation.
2587
2588 If safe, record this expression into the available expression hash
2589 table. */
2590
2591 static bool
2592 eliminate_redundant_computations (struct dom_walk_data *walk_data,
2593 tree stmt, stmt_ann_t ann)
2594 {
2595 tree *expr_p, def = NULL_TREE;
2596 bool insert = true;
2597 tree cached_lhs;
2598 bool retval = false;
2599
2600 if (TREE_CODE (stmt) == MODIFY_EXPR)
2601 def = TREE_OPERAND (stmt, 0);
2602
2603 /* Certain expressions on the RHS can be optimized away, but can not
2604 themselves be entered into the hash tables. */
2605 if (ann->makes_aliased_stores
2606 || ! def
2607 || TREE_CODE (def) != SSA_NAME
2608 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
2609 || !ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYDEF)
2610 /* Do not record equivalences for increments of ivs. This would create
2611 overlapping live ranges for a very questionable gain. */
2612 || simple_iv_increment_p (stmt))
2613 insert = false;
2614
2615 /* Check if the expression has been computed before. */
2616 cached_lhs = lookup_avail_expr (stmt, insert);
2617
2618 /* If this is an assignment and the RHS was not in the hash table,
2619 then try to simplify the RHS and lookup the new RHS in the
2620 hash table. */
2621 if (! cached_lhs && TREE_CODE (stmt) == MODIFY_EXPR)
2622 cached_lhs = simplify_rhs_and_lookup_avail_expr (walk_data, stmt, insert);
2623 /* Similarly if this is a COND_EXPR and we did not find its
2624 expression in the hash table, simplify the condition and
2625 try again. */
2626 else if (! cached_lhs && TREE_CODE (stmt) == COND_EXPR)
2627 cached_lhs = simplify_cond_and_lookup_avail_expr (stmt, ann, insert);
2628 /* Similarly for a SWITCH_EXPR. */
2629 else if (!cached_lhs && TREE_CODE (stmt) == SWITCH_EXPR)
2630 cached_lhs = simplify_switch_and_lookup_avail_expr (stmt, insert);
2631
2632 opt_stats.num_exprs_considered++;
2633
2634 /* Get a pointer to the expression we are trying to optimize. */
2635 if (TREE_CODE (stmt) == COND_EXPR)
2636 expr_p = &COND_EXPR_COND (stmt);
2637 else if (TREE_CODE (stmt) == SWITCH_EXPR)
2638 expr_p = &SWITCH_COND (stmt);
2639 else if (TREE_CODE (stmt) == RETURN_EXPR && TREE_OPERAND (stmt, 0))
2640 expr_p = &TREE_OPERAND (TREE_OPERAND (stmt, 0), 1);
2641 else
2642 expr_p = &TREE_OPERAND (stmt, 1);
2643
2644 /* It is safe to ignore types here since we have already done
2645 type checking in the hashing and equality routines. In fact
2646 type checking here merely gets in the way of constant
2647 propagation. Also, make sure that it is safe to propagate
2648 CACHED_LHS into *EXPR_P. */
2649 if (cached_lhs
2650 && (TREE_CODE (cached_lhs) != SSA_NAME
2651 || may_propagate_copy (*expr_p, cached_lhs)))
2652 {
2653 if (dump_file && (dump_flags & TDF_DETAILS))
2654 {
2655 fprintf (dump_file, " Replaced redundant expr '");
2656 print_generic_expr (dump_file, *expr_p, dump_flags);
2657 fprintf (dump_file, "' with '");
2658 print_generic_expr (dump_file, cached_lhs, dump_flags);
2659 fprintf (dump_file, "'\n");
2660 }
2661
2662 opt_stats.num_re++;
2663
2664 #if defined ENABLE_CHECKING
2665 gcc_assert (TREE_CODE (cached_lhs) == SSA_NAME
2666 || is_gimple_min_invariant (cached_lhs));
2667 #endif
2668
2669 if (TREE_CODE (cached_lhs) == ADDR_EXPR
2670 || (POINTER_TYPE_P (TREE_TYPE (*expr_p))
2671 && is_gimple_min_invariant (cached_lhs)))
2672 retval = true;
2673
2674 propagate_tree_value (expr_p, cached_lhs);
2675 mark_stmt_modified (stmt);
2676 }
2677 return retval;
2678 }
2679
2680 /* STMT, a MODIFY_EXPR, may create certain equivalences, in either
2681 the available expressions table or the const_and_copies table.
2682 Detect and record those equivalences. */
2683
2684 static void
2685 record_equivalences_from_stmt (tree stmt,
2686 int may_optimize_p,
2687 stmt_ann_t ann)
2688 {
2689 tree lhs = TREE_OPERAND (stmt, 0);
2690 enum tree_code lhs_code = TREE_CODE (lhs);
2691 int i;
2692
2693 if (lhs_code == SSA_NAME)
2694 {
2695 tree rhs = TREE_OPERAND (stmt, 1);
2696
2697 /* Strip away any useless type conversions. */
2698 STRIP_USELESS_TYPE_CONVERSION (rhs);
2699
2700 /* If the RHS of the assignment is a constant or another variable that
2701 may be propagated, register it in the CONST_AND_COPIES table. We
2702 do not need to record unwind data for this, since this is a true
2703 assignment and not an equivalence inferred from a comparison. All
2704 uses of this ssa name are dominated by this assignment, so unwinding
2705 just costs time and space. */
2706 if (may_optimize_p
2707 && (TREE_CODE (rhs) == SSA_NAME
2708 || is_gimple_min_invariant (rhs)))
2709 SSA_NAME_VALUE (lhs) = rhs;
2710
2711 if (expr_computes_nonzero (rhs))
2712 record_var_is_nonzero (lhs);
2713 }
2714
2715 /* Look at both sides for pointer dereferences. If we find one, then
2716 the pointer must be nonnull and we can enter that equivalence into
2717 the hash tables. */
2718 if (flag_delete_null_pointer_checks)
2719 for (i = 0; i < 2; i++)
2720 {
2721 tree t = TREE_OPERAND (stmt, i);
2722
2723 /* Strip away any COMPONENT_REFs. */
2724 while (TREE_CODE (t) == COMPONENT_REF)
2725 t = TREE_OPERAND (t, 0);
2726
2727 /* Now see if this is a pointer dereference. */
2728 if (INDIRECT_REF_P (t))
2729 {
2730 tree op = TREE_OPERAND (t, 0);
2731
2732 /* If the pointer is a SSA variable, then enter new
2733 equivalences into the hash table. */
2734 while (TREE_CODE (op) == SSA_NAME)
2735 {
2736 tree def = SSA_NAME_DEF_STMT (op);
2737
2738 record_var_is_nonzero (op);
2739
2740 /* And walk up the USE-DEF chains noting other SSA_NAMEs
2741 which are known to have a nonzero value. */
2742 if (def
2743 && TREE_CODE (def) == MODIFY_EXPR
2744 && TREE_CODE (TREE_OPERAND (def, 1)) == NOP_EXPR)
2745 op = TREE_OPERAND (TREE_OPERAND (def, 1), 0);
2746 else
2747 break;
2748 }
2749 }
2750 }
2751
2752 /* A memory store, even an aliased store, creates a useful
2753 equivalence. By exchanging the LHS and RHS, creating suitable
2754 vops and recording the result in the available expression table,
2755 we may be able to expose more redundant loads. */
2756 if (!ann->has_volatile_ops
2757 && (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME
2758 || is_gimple_min_invariant (TREE_OPERAND (stmt, 1)))
2759 && !is_gimple_reg (lhs))
2760 {
2761 tree rhs = TREE_OPERAND (stmt, 1);
2762 tree new;
2763
2764 /* FIXME: If the LHS of the assignment is a bitfield and the RHS
2765 is a constant, we need to adjust the constant to fit into the
2766 type of the LHS. If the LHS is a bitfield and the RHS is not
2767 a constant, then we can not record any equivalences for this
2768 statement since we would need to represent the widening or
2769 narrowing of RHS. This fixes gcc.c-torture/execute/921016-1.c
2770 and should not be necessary if GCC represented bitfields
2771 properly. */
2772 if (lhs_code == COMPONENT_REF
2773 && DECL_BIT_FIELD (TREE_OPERAND (lhs, 1)))
2774 {
2775 if (TREE_CONSTANT (rhs))
2776 rhs = widen_bitfield (rhs, TREE_OPERAND (lhs, 1), lhs);
2777 else
2778 rhs = NULL;
2779
2780 /* If the value overflowed, then we can not use this equivalence. */
2781 if (rhs && ! is_gimple_min_invariant (rhs))
2782 rhs = NULL;
2783 }
2784
2785 if (rhs)
2786 {
2787 /* Build a new statement with the RHS and LHS exchanged. */
2788 new = build (MODIFY_EXPR, TREE_TYPE (stmt), rhs, lhs);
2789
2790 create_ssa_artficial_load_stmt (new, stmt);
2791
2792 /* Finally enter the statement into the available expression
2793 table. */
2794 lookup_avail_expr (new, true);
2795 }
2796 }
2797 }
2798
2799 /* Replace *OP_P in STMT with any known equivalent value for *OP_P from
2800 CONST_AND_COPIES. */
2801
2802 static bool
2803 cprop_operand (tree stmt, use_operand_p op_p)
2804 {
2805 bool may_have_exposed_new_symbols = false;
2806 tree val;
2807 tree op = USE_FROM_PTR (op_p);
2808
2809 /* If the operand has a known constant value or it is known to be a
2810 copy of some other variable, use the value or copy stored in
2811 CONST_AND_COPIES. */
2812 val = SSA_NAME_VALUE (op);
2813 if (val && val != op && TREE_CODE (val) != VALUE_HANDLE)
2814 {
2815 tree op_type, val_type;
2816
2817 /* Do not change the base variable in the virtual operand
2818 tables. That would make it impossible to reconstruct
2819 the renamed virtual operand if we later modify this
2820 statement. Also only allow the new value to be an SSA_NAME
2821 for propagation into virtual operands. */
2822 if (!is_gimple_reg (op)
2823 && (TREE_CODE (val) != SSA_NAME
2824 || is_gimple_reg (val)
2825 || get_virtual_var (val) != get_virtual_var (op)))
2826 return false;
2827
2828 /* Do not replace hard register operands in asm statements. */
2829 if (TREE_CODE (stmt) == ASM_EXPR
2830 && !may_propagate_copy_into_asm (op))
2831 return false;
2832
2833 /* Get the toplevel type of each operand. */
2834 op_type = TREE_TYPE (op);
2835 val_type = TREE_TYPE (val);
2836
2837 /* While both types are pointers, get the type of the object
2838 pointed to. */
2839 while (POINTER_TYPE_P (op_type) && POINTER_TYPE_P (val_type))
2840 {
2841 op_type = TREE_TYPE (op_type);
2842 val_type = TREE_TYPE (val_type);
2843 }
2844
2845 /* Make sure underlying types match before propagating a constant by
2846 converting the constant to the proper type. Note that convert may
2847 return a non-gimple expression, in which case we ignore this
2848 propagation opportunity. */
2849 if (TREE_CODE (val) != SSA_NAME)
2850 {
2851 if (!lang_hooks.types_compatible_p (op_type, val_type))
2852 {
2853 val = fold_convert (TREE_TYPE (op), val);
2854 if (!is_gimple_min_invariant (val))
2855 return false;
2856 }
2857 }
2858
2859 /* Certain operands are not allowed to be copy propagated due
2860 to their interaction with exception handling and some GCC
2861 extensions. */
2862 else if (!may_propagate_copy (op, val))
2863 return false;
2864
2865 /* Do not propagate copies if the propagated value is at a deeper loop
2866 depth than the propagatee. Otherwise, this may move loop variant
2867 variables outside of their loops and prevent coalescing
2868 opportunities. If the value was loop invariant, it will be hoisted
2869 by LICM and exposed for copy propagation. */
2870 if (loop_depth_of_name (val) > loop_depth_of_name (op))
2871 return false;
2872
2873 /* Dump details. */
2874 if (dump_file && (dump_flags & TDF_DETAILS))
2875 {
2876 fprintf (dump_file, " Replaced '");
2877 print_generic_expr (dump_file, op, dump_flags);
2878 fprintf (dump_file, "' with %s '",
2879 (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
2880 print_generic_expr (dump_file, val, dump_flags);
2881 fprintf (dump_file, "'\n");
2882 }
2883
2884 /* If VAL is an ADDR_EXPR or a constant of pointer type, note
2885 that we may have exposed a new symbol for SSA renaming. */
2886 if (TREE_CODE (val) == ADDR_EXPR
2887 || (POINTER_TYPE_P (TREE_TYPE (op))
2888 && is_gimple_min_invariant (val)))
2889 may_have_exposed_new_symbols = true;
2890
2891 if (TREE_CODE (val) != SSA_NAME)
2892 opt_stats.num_const_prop++;
2893 else
2894 opt_stats.num_copy_prop++;
2895
2896 propagate_value (op_p, val);
2897
2898 /* And note that we modified this statement. This is now
2899 safe, even if we changed virtual operands since we will
2900 rescan the statement and rewrite its operands again. */
2901 mark_stmt_modified (stmt);
2902 }
2903 return may_have_exposed_new_symbols;
2904 }
2905
2906 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2907 known value for that SSA_NAME (or NULL if no value is known).
2908
2909 Propagate values from CONST_AND_COPIES into the uses, vuses and
2910 v_may_def_ops of STMT. */
2911
2912 static bool
2913 cprop_into_stmt (tree stmt)
2914 {
2915 bool may_have_exposed_new_symbols = false;
2916 use_operand_p op_p;
2917 ssa_op_iter iter;
2918
2919 FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_ALL_USES)
2920 {
2921 if (TREE_CODE (USE_FROM_PTR (op_p)) == SSA_NAME)
2922 may_have_exposed_new_symbols |= cprop_operand (stmt, op_p);
2923 }
2924
2925 return may_have_exposed_new_symbols;
2926 }
2927
2928
2929 /* Optimize the statement pointed by iterator SI.
2930
2931 We try to perform some simplistic global redundancy elimination and
2932 constant propagation:
2933
2934 1- To detect global redundancy, we keep track of expressions that have
2935 been computed in this block and its dominators. If we find that the
2936 same expression is computed more than once, we eliminate repeated
2937 computations by using the target of the first one.
2938
2939 2- Constant values and copy assignments. This is used to do very
2940 simplistic constant and copy propagation. When a constant or copy
2941 assignment is found, we map the value on the RHS of the assignment to
2942 the variable in the LHS in the CONST_AND_COPIES table. */
2943
2944 static void
2945 optimize_stmt (struct dom_walk_data *walk_data, basic_block bb,
2946 block_stmt_iterator si)
2947 {
2948 stmt_ann_t ann;
2949 tree stmt, old_stmt;
2950 bool may_optimize_p;
2951 bool may_have_exposed_new_symbols = false;
2952
2953 old_stmt = stmt = bsi_stmt (si);
2954
2955 update_stmt_if_modified (stmt);
2956 ann = stmt_ann (stmt);
2957 opt_stats.num_stmts++;
2958 may_have_exposed_new_symbols = false;
2959
2960 if (dump_file && (dump_flags & TDF_DETAILS))
2961 {
2962 fprintf (dump_file, "Optimizing statement ");
2963 print_generic_stmt (dump_file, stmt, TDF_SLIM);
2964 }
2965
2966 /* Const/copy propagate into USES, VUSES and the RHS of V_MAY_DEFs. */
2967 may_have_exposed_new_symbols = cprop_into_stmt (stmt);
2968
2969 /* If the statement has been modified with constant replacements,
2970 fold its RHS before checking for redundant computations. */
2971 if (ann->modified)
2972 {
2973 tree rhs;
2974
2975 /* Try to fold the statement making sure that STMT is kept
2976 up to date. */
2977 if (fold_stmt (bsi_stmt_ptr (si)))
2978 {
2979 stmt = bsi_stmt (si);
2980 ann = stmt_ann (stmt);
2981
2982 if (dump_file && (dump_flags & TDF_DETAILS))
2983 {
2984 fprintf (dump_file, " Folded to: ");
2985 print_generic_stmt (dump_file, stmt, TDF_SLIM);
2986 }
2987 }
2988
2989 rhs = get_rhs (stmt);
2990 if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
2991 recompute_tree_invarant_for_addr_expr (rhs);
2992
2993 /* Constant/copy propagation above may change the set of
2994 virtual operands associated with this statement. Folding
2995 may remove the need for some virtual operands.
2996
2997 Indicate we will need to rescan and rewrite the statement. */
2998 may_have_exposed_new_symbols = true;
2999 }
3000
3001 /* Check for redundant computations. Do this optimization only
3002 for assignments that have no volatile ops and conditionals. */
3003 may_optimize_p = (!ann->has_volatile_ops
3004 && ((TREE_CODE (stmt) == RETURN_EXPR
3005 && TREE_OPERAND (stmt, 0)
3006 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR
3007 && ! (TREE_SIDE_EFFECTS
3008 (TREE_OPERAND (TREE_OPERAND (stmt, 0), 1))))
3009 || (TREE_CODE (stmt) == MODIFY_EXPR
3010 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (stmt, 1)))
3011 || TREE_CODE (stmt) == COND_EXPR
3012 || TREE_CODE (stmt) == SWITCH_EXPR));
3013
3014 if (may_optimize_p)
3015 may_have_exposed_new_symbols
3016 |= eliminate_redundant_computations (walk_data, stmt, ann);
3017
3018 /* Record any additional equivalences created by this statement. */
3019 if (TREE_CODE (stmt) == MODIFY_EXPR)
3020 record_equivalences_from_stmt (stmt,
3021 may_optimize_p,
3022 ann);
3023
3024 /* If STMT is a COND_EXPR and it was modified, then we may know
3025 where it goes. If that is the case, then mark the CFG as altered.
3026
3027 This will cause us to later call remove_unreachable_blocks and
3028 cleanup_tree_cfg when it is safe to do so. It is not safe to
3029 clean things up here since removal of edges and such can trigger
3030 the removal of PHI nodes, which in turn can release SSA_NAMEs to
3031 the manager.
3032
3033 That's all fine and good, except that once SSA_NAMEs are released
3034 to the manager, we must not call create_ssa_name until all references
3035 to released SSA_NAMEs have been eliminated.
3036
3037 All references to the deleted SSA_NAMEs can not be eliminated until
3038 we remove unreachable blocks.
3039
3040 We can not remove unreachable blocks until after we have completed
3041 any queued jump threading.
3042
3043 We can not complete any queued jump threads until we have taken
3044 appropriate variables out of SSA form. Taking variables out of
3045 SSA form can call create_ssa_name and thus we lose.
3046
3047 Ultimately I suspect we're going to need to change the interface
3048 into the SSA_NAME manager. */
3049
3050 if (ann->modified)
3051 {
3052 tree val = NULL;
3053
3054 if (TREE_CODE (stmt) == COND_EXPR)
3055 val = COND_EXPR_COND (stmt);
3056 else if (TREE_CODE (stmt) == SWITCH_EXPR)
3057 val = SWITCH_COND (stmt);
3058
3059 if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
3060 cfg_altered = true;
3061
3062 /* If we simplified a statement in such a way as to be shown that it
3063 cannot trap, update the eh information and the cfg to match. */
3064 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
3065 {
3066 bitmap_set_bit (need_eh_cleanup, bb->index);
3067 if (dump_file && (dump_flags & TDF_DETAILS))
3068 fprintf (dump_file, " Flagged to clear EH edges.\n");
3069 }
3070 }
3071
3072 if (may_have_exposed_new_symbols)
3073 VEC_safe_push (tree, heap, stmts_to_rescan, bsi_stmt (si));
3074 }
3075
3076 /* Replace the RHS of STMT with NEW_RHS. If RHS can be found in the
3077 available expression hashtable, then return the LHS from the hash
3078 table.
3079
3080 If INSERT is true, then we also update the available expression
3081 hash table to account for the changes made to STMT. */
3082
3083 static tree
3084 update_rhs_and_lookup_avail_expr (tree stmt, tree new_rhs, bool insert)
3085 {
3086 tree cached_lhs = NULL;
3087
3088 /* Remove the old entry from the hash table. */
3089 if (insert)
3090 {
3091 struct expr_hash_elt element;
3092
3093 initialize_hash_element (stmt, NULL, &element);
3094 htab_remove_elt_with_hash (avail_exprs, &element, element.hash);
3095 }
3096
3097 /* Now update the RHS of the assignment. */
3098 TREE_OPERAND (stmt, 1) = new_rhs;
3099
3100 /* Now lookup the updated statement in the hash table. */
3101 cached_lhs = lookup_avail_expr (stmt, insert);
3102
3103 /* We have now called lookup_avail_expr twice with two different
3104 versions of this same statement, once in optimize_stmt, once here.
3105
3106 We know the call in optimize_stmt did not find an existing entry
3107 in the hash table, so a new entry was created. At the same time
3108 this statement was pushed onto the AVAIL_EXPRS_STACK vector.
3109
3110 If this call failed to find an existing entry on the hash table,
3111 then the new version of this statement was entered into the
3112 hash table. And this statement was pushed onto BLOCK_AVAIL_EXPR
3113 for the second time. So there are two copies on BLOCK_AVAIL_EXPRs
3114
3115 If this call succeeded, we still have one copy of this statement
3116 on the BLOCK_AVAIL_EXPRs vector.
3117
3118 For both cases, we need to pop the most recent entry off the
3119 BLOCK_AVAIL_EXPRs vector. For the case where we never found this
3120 statement in the hash tables, that will leave precisely one
3121 copy of this statement on BLOCK_AVAIL_EXPRs. For the case where
3122 we found a copy of this statement in the second hash table lookup
3123 we want _no_ copies of this statement in BLOCK_AVAIL_EXPRs. */
3124 if (insert)
3125 VEC_pop (tree, avail_exprs_stack);
3126
3127 /* And make sure we record the fact that we modified this
3128 statement. */
3129 mark_stmt_modified (stmt);
3130
3131 return cached_lhs;
3132 }
3133
3134 /* Search for an existing instance of STMT in the AVAIL_EXPRS table. If
3135 found, return its LHS. Otherwise insert STMT in the table and return
3136 NULL_TREE.
3137
3138 Also, when an expression is first inserted in the AVAIL_EXPRS table, it
3139 is also added to the stack pointed by BLOCK_AVAIL_EXPRS_P, so that they
3140 can be removed when we finish processing this block and its children.
3141
3142 NOTE: This function assumes that STMT is a MODIFY_EXPR node that
3143 contains no CALL_EXPR on its RHS and makes no volatile nor
3144 aliased references. */
3145
3146 static tree
3147 lookup_avail_expr (tree stmt, bool insert)
3148 {
3149 void **slot;
3150 tree lhs;
3151 tree temp;
3152 struct expr_hash_elt *element = xmalloc (sizeof (struct expr_hash_elt));
3153
3154 lhs = TREE_CODE (stmt) == MODIFY_EXPR ? TREE_OPERAND (stmt, 0) : NULL;
3155
3156 initialize_hash_element (stmt, lhs, element);
3157
3158 /* Don't bother remembering constant assignments and copy operations.
3159 Constants and copy operations are handled by the constant/copy propagator
3160 in optimize_stmt. */
3161 if (TREE_CODE (element->rhs) == SSA_NAME
3162 || is_gimple_min_invariant (element->rhs))
3163 {
3164 free (element);
3165 return NULL_TREE;
3166 }
3167
3168 /* If this is an equality test against zero, see if we have recorded a
3169 nonzero value for the variable in question. */
3170 if ((TREE_CODE (element->rhs) == EQ_EXPR
3171 || TREE_CODE (element->rhs) == NE_EXPR)
3172 && TREE_CODE (TREE_OPERAND (element->rhs, 0)) == SSA_NAME
3173 && integer_zerop (TREE_OPERAND (element->rhs, 1)))
3174 {
3175 int indx = SSA_NAME_VERSION (TREE_OPERAND (element->rhs, 0));
3176
3177 if (bitmap_bit_p (nonzero_vars, indx))
3178 {
3179 tree t = element->rhs;
3180 free (element);
3181
3182 if (TREE_CODE (t) == EQ_EXPR)
3183 return boolean_false_node;
3184 else
3185 return boolean_true_node;
3186 }
3187 }
3188
3189 /* Finally try to find the expression in the main expression hash table. */
3190 slot = htab_find_slot_with_hash (avail_exprs, element, element->hash,
3191 (insert ? INSERT : NO_INSERT));
3192 if (slot == NULL)
3193 {
3194 free (element);
3195 return NULL_TREE;
3196 }
3197
3198 if (*slot == NULL)
3199 {
3200 *slot = (void *) element;
3201 VEC_safe_push (tree, heap, avail_exprs_stack,
3202 stmt ? stmt : element->rhs);
3203 return NULL_TREE;
3204 }
3205
3206 /* Extract the LHS of the assignment so that it can be used as the current
3207 definition of another variable. */
3208 lhs = ((struct expr_hash_elt *)*slot)->lhs;
3209
3210 /* See if the LHS appears in the CONST_AND_COPIES table. If it does, then
3211 use the value from the const_and_copies table. */
3212 if (TREE_CODE (lhs) == SSA_NAME)
3213 {
3214 temp = SSA_NAME_VALUE (lhs);
3215 if (temp && TREE_CODE (temp) != VALUE_HANDLE)
3216 lhs = temp;
3217 }
3218
3219 free (element);
3220 return lhs;
3221 }
3222
3223 /* Given a condition COND, record into HI_P, LO_P and INVERTED_P the
3224 range of values that result in the conditional having a true value.
3225
3226 Return true if we are successful in extracting a range from COND and
3227 false if we are unsuccessful. */
3228
3229 static bool
3230 extract_range_from_cond (tree cond, tree *hi_p, tree *lo_p, int *inverted_p)
3231 {
3232 tree op1 = TREE_OPERAND (cond, 1);
3233 tree high, low, type;
3234 int inverted;
3235
3236 type = TREE_TYPE (op1);
3237
3238 /* Experiments have shown that it's rarely, if ever useful to
3239 record ranges for enumerations. Presumably this is due to
3240 the fact that they're rarely used directly. They are typically
3241 cast into an integer type and used that way. */
3242 if (TREE_CODE (type) != INTEGER_TYPE
3243 /* We don't know how to deal with types with variable bounds. */
3244 || TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
3245 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
3246 return 0;
3247
3248 switch (TREE_CODE (cond))
3249 {
3250 case EQ_EXPR:
3251 high = low = op1;
3252 inverted = 0;
3253 break;
3254
3255 case NE_EXPR:
3256 high = low = op1;
3257 inverted = 1;
3258 break;
3259
3260 case GE_EXPR:
3261 low = op1;
3262 high = TYPE_MAX_VALUE (type);
3263 inverted = 0;
3264 break;
3265
3266 case GT_EXPR:
3267 high = TYPE_MAX_VALUE (type);
3268 if (!tree_int_cst_lt (op1, high))
3269 return 0;
3270 low = int_const_binop (PLUS_EXPR, op1, integer_one_node, 1);
3271 inverted = 0;
3272 break;
3273
3274 case LE_EXPR:
3275 high = op1;
3276 low = TYPE_MIN_VALUE (type);
3277 inverted = 0;
3278 break;
3279
3280 case LT_EXPR:
3281 low = TYPE_MIN_VALUE (type);
3282 if (!tree_int_cst_lt (low, op1))
3283 return 0;
3284 high = int_const_binop (MINUS_EXPR, op1, integer_one_node, 1);
3285 inverted = 0;
3286 break;
3287
3288 default:
3289 return 0;
3290 }
3291
3292 *hi_p = high;
3293 *lo_p = low;
3294 *inverted_p = inverted;
3295 return 1;
3296 }
3297
3298 /* Record a range created by COND for basic block BB. */
3299
3300 static void
3301 record_range (tree cond, basic_block bb)
3302 {
3303 enum tree_code code = TREE_CODE (cond);
3304
3305 /* We explicitly ignore NE_EXPRs and all the unordered comparisons.
3306 They rarely allow for meaningful range optimizations and significantly
3307 complicate the implementation. */
3308 if ((code == LT_EXPR || code == LE_EXPR || code == GT_EXPR
3309 || code == GE_EXPR || code == EQ_EXPR)
3310 && TREE_CODE (TREE_TYPE (TREE_OPERAND (cond, 1))) == INTEGER_TYPE)
3311 {
3312 struct vrp_hash_elt *vrp_hash_elt;
3313 struct vrp_element *element;
3314 varray_type *vrp_records_p;
3315 void **slot;
3316
3317
3318 vrp_hash_elt = xmalloc (sizeof (struct vrp_hash_elt));
3319 vrp_hash_elt->var = TREE_OPERAND (cond, 0);
3320 vrp_hash_elt->records = NULL;
3321 slot = htab_find_slot (vrp_data, vrp_hash_elt, INSERT);
3322
3323 if (*slot == NULL)
3324 *slot = (void *) vrp_hash_elt;
3325 else
3326 free (vrp_hash_elt);
3327
3328 vrp_hash_elt = (struct vrp_hash_elt *) *slot;
3329 vrp_records_p = &vrp_hash_elt->records;
3330
3331 element = ggc_alloc (sizeof (struct vrp_element));
3332 element->low = NULL;
3333 element->high = NULL;
3334 element->cond = cond;
3335 element->bb = bb;
3336
3337 if (*vrp_records_p == NULL)
3338 VARRAY_GENERIC_PTR_INIT (*vrp_records_p, 2, "vrp records");
3339
3340 VARRAY_PUSH_GENERIC_PTR (*vrp_records_p, element);
3341 VEC_safe_push (tree, heap, vrp_variables_stack, TREE_OPERAND (cond, 0));
3342 }
3343 }
3344
3345 /* Hashing and equality functions for VRP_DATA.
3346
3347 Since this hash table is addressed by SSA_NAMEs, we can hash on
3348 their version number and equality can be determined with a
3349 pointer comparison. */
3350
3351 static hashval_t
3352 vrp_hash (const void *p)
3353 {
3354 tree var = ((struct vrp_hash_elt *)p)->var;
3355
3356 return SSA_NAME_VERSION (var);
3357 }
3358
3359 static int
3360 vrp_eq (const void *p1, const void *p2)
3361 {
3362 tree var1 = ((struct vrp_hash_elt *)p1)->var;
3363 tree var2 = ((struct vrp_hash_elt *)p2)->var;
3364
3365 return var1 == var2;
3366 }
3367
3368 /* Hashing and equality functions for AVAIL_EXPRS. The table stores
3369 MODIFY_EXPR statements. We compute a value number for expressions using
3370 the code of the expression and the SSA numbers of its operands. */
3371
3372 static hashval_t
3373 avail_expr_hash (const void *p)
3374 {
3375 tree stmt = ((struct expr_hash_elt *)p)->stmt;
3376 tree rhs = ((struct expr_hash_elt *)p)->rhs;
3377 tree vuse;
3378 ssa_op_iter iter;
3379 hashval_t val = 0;
3380
3381 /* iterative_hash_expr knows how to deal with any expression and
3382 deals with commutative operators as well, so just use it instead
3383 of duplicating such complexities here. */
3384 val = iterative_hash_expr (rhs, val);
3385
3386 /* If the hash table entry is not associated with a statement, then we
3387 can just hash the expression and not worry about virtual operands
3388 and such. */
3389 if (!stmt || !stmt_ann (stmt))
3390 return val;
3391
3392 /* Add the SSA version numbers of every vuse operand. This is important
3393 because compound variables like arrays are not renamed in the
3394 operands. Rather, the rename is done on the virtual variable
3395 representing all the elements of the array. */
3396 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VUSE)
3397 val = iterative_hash_expr (vuse, val);
3398
3399 return val;
3400 }
3401
3402 static hashval_t
3403 real_avail_expr_hash (const void *p)
3404 {
3405 return ((const struct expr_hash_elt *)p)->hash;
3406 }
3407
3408 static int
3409 avail_expr_eq (const void *p1, const void *p2)
3410 {
3411 tree stmt1 = ((struct expr_hash_elt *)p1)->stmt;
3412 tree rhs1 = ((struct expr_hash_elt *)p1)->rhs;
3413 tree stmt2 = ((struct expr_hash_elt *)p2)->stmt;
3414 tree rhs2 = ((struct expr_hash_elt *)p2)->rhs;
3415
3416 /* If they are the same physical expression, return true. */
3417 if (rhs1 == rhs2 && stmt1 == stmt2)
3418 return true;
3419
3420 /* If their codes are not equal, then quit now. */
3421 if (TREE_CODE (rhs1) != TREE_CODE (rhs2))
3422 return false;
3423
3424 /* In case of a collision, both RHS have to be identical and have the
3425 same VUSE operands. */
3426 if ((TREE_TYPE (rhs1) == TREE_TYPE (rhs2)
3427 || lang_hooks.types_compatible_p (TREE_TYPE (rhs1), TREE_TYPE (rhs2)))
3428 && operand_equal_p (rhs1, rhs2, OEP_PURE_SAME))
3429 {
3430 bool ret = compare_ssa_operands_equal (stmt1, stmt2, SSA_OP_VUSE);
3431 gcc_assert (!ret || ((struct expr_hash_elt *)p1)->hash
3432 == ((struct expr_hash_elt *)p2)->hash);
3433 return ret;
3434 }
3435
3436 return false;
3437 }