re PR c++/24780 (ICE set_mem_attributes_minus_bitpos)
[gcc.git] / gcc / tree-ssa-dom.c
1 /* SSA Dominator optimizations for trees
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "ggc.h"
31 #include "basic-block.h"
32 #include "cfgloop.h"
33 #include "output.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "diagnostic.h"
37 #include "timevar.h"
38 #include "tree-dump.h"
39 #include "tree-flow.h"
40 #include "domwalk.h"
41 #include "real.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-propagate.h"
44 #include "langhooks.h"
45 #include "params.h"
46
47 /* This file implements optimizations on the dominator tree. */
48
49
50 /* Structure for recording edge equivalences as well as any pending
51 edge redirections during the dominator optimizer.
52
53 Computing and storing the edge equivalences instead of creating
54 them on-demand can save significant amounts of time, particularly
55 for pathological cases involving switch statements.
56
57 These structures live for a single iteration of the dominator
58 optimizer in the edge's AUX field. At the end of an iteration we
59 free each of these structures and update the AUX field to point
60 to any requested redirection target (the code for updating the
61 CFG and SSA graph for edge redirection expects redirection edge
62 targets to be in the AUX field for each edge. */
63
64 struct edge_info
65 {
66 /* If this edge creates a simple equivalence, the LHS and RHS of
67 the equivalence will be stored here. */
68 tree lhs;
69 tree rhs;
70
71 /* Traversing an edge may also indicate one or more particular conditions
72 are true or false. The number of recorded conditions can vary, but
73 can be determined by the condition's code. So we have an array
74 and its maximum index rather than use a varray. */
75 tree *cond_equivalences;
76 unsigned int max_cond_equivalences;
77
78 /* If we can thread this edge this field records the new target. */
79 edge redirection_target;
80 };
81
82
83 /* Hash table with expressions made available during the renaming process.
84 When an assignment of the form X_i = EXPR is found, the statement is
85 stored in this table. If the same expression EXPR is later found on the
86 RHS of another statement, it is replaced with X_i (thus performing
87 global redundancy elimination). Similarly as we pass through conditionals
88 we record the conditional itself as having either a true or false value
89 in this table. */
90 static htab_t avail_exprs;
91
92 /* Stack of available expressions in AVAIL_EXPRs. Each block pushes any
93 expressions it enters into the hash table along with a marker entry
94 (null). When we finish processing the block, we pop off entries and
95 remove the expressions from the global hash table until we hit the
96 marker. */
97 static VEC(tree,heap) *avail_exprs_stack;
98
99 /* Stack of statements we need to rescan during finalization for newly
100 exposed variables.
101
102 Statement rescanning must occur after the current block's available
103 expressions are removed from AVAIL_EXPRS. Else we may change the
104 hash code for an expression and be unable to find/remove it from
105 AVAIL_EXPRS. */
106 static VEC(tree,heap) *stmts_to_rescan;
107
108 /* Structure for entries in the expression hash table.
109
110 This requires more memory for the hash table entries, but allows us
111 to avoid creating silly tree nodes and annotations for conditionals,
112 eliminates 2 global hash tables and two block local varrays.
113
114 It also allows us to reduce the number of hash table lookups we
115 have to perform in lookup_avail_expr and finally it allows us to
116 significantly reduce the number of calls into the hashing routine
117 itself. */
118
119 struct expr_hash_elt
120 {
121 /* The value (lhs) of this expression. */
122 tree lhs;
123
124 /* The expression (rhs) we want to record. */
125 tree rhs;
126
127 /* The stmt pointer if this element corresponds to a statement. */
128 tree stmt;
129
130 /* The hash value for RHS/ann. */
131 hashval_t hash;
132 };
133
134 /* Stack of dest,src pairs that need to be restored during finalization.
135
136 A NULL entry is used to mark the end of pairs which need to be
137 restored during finalization of this block. */
138 static VEC(tree,heap) *const_and_copies_stack;
139
140 /* Bitmap of SSA_NAMEs known to have a nonzero value, even if we do not
141 know their exact value. */
142 static bitmap nonzero_vars;
143
144 /* Bitmap of blocks that are scheduled to be threaded through. This
145 is used to communicate with thread_through_blocks. */
146 static bitmap threaded_blocks;
147
148 /* Stack of SSA_NAMEs which need their NONZERO_VARS property cleared
149 when the current block is finalized.
150
151 A NULL entry is used to mark the end of names needing their
152 entry in NONZERO_VARS cleared during finalization of this block. */
153 static VEC(tree,heap) *nonzero_vars_stack;
154
155 /* Track whether or not we have changed the control flow graph. */
156 static bool cfg_altered;
157
158 /* Bitmap of blocks that have had EH statements cleaned. We should
159 remove their dead edges eventually. */
160 static bitmap need_eh_cleanup;
161
162 /* Statistics for dominator optimizations. */
163 struct opt_stats_d
164 {
165 long num_stmts;
166 long num_exprs_considered;
167 long num_re;
168 long num_const_prop;
169 long num_copy_prop;
170 long num_iterations;
171 };
172
173 static struct opt_stats_d opt_stats;
174
175 /* Value range propagation record. Each time we encounter a conditional
176 of the form SSA_NAME COND CONST we create a new vrp_element to record
177 how the condition affects the possible values SSA_NAME may have.
178
179 Each record contains the condition tested (COND), and the range of
180 values the variable may legitimately have if COND is true. Note the
181 range of values may be a smaller range than COND specifies if we have
182 recorded other ranges for this variable. Each record also contains the
183 block in which the range was recorded for invalidation purposes.
184
185 Note that the current known range is computed lazily. This allows us
186 to avoid the overhead of computing ranges which are never queried.
187
188 When we encounter a conditional, we look for records which constrain
189 the SSA_NAME used in the condition. In some cases those records allow
190 us to determine the condition's result at compile time. In other cases
191 they may allow us to simplify the condition.
192
193 We also use value ranges to do things like transform signed div/mod
194 operations into unsigned div/mod or to simplify ABS_EXPRs.
195
196 Simple experiments have shown these optimizations to not be all that
197 useful on switch statements (much to my surprise). So switch statement
198 optimizations are not performed.
199
200 Note carefully we do not propagate information through each statement
201 in the block. i.e., if we know variable X has a value defined of
202 [0, 25] and we encounter Y = X + 1, we do not track a value range
203 for Y (which would be [1, 26] if we cared). Similarly we do not
204 constrain values as we encounter narrowing typecasts, etc. */
205
206 struct vrp_element
207 {
208 /* The highest and lowest values the variable in COND may contain when
209 COND is true. Note this may not necessarily be the same values
210 tested by COND if the same variable was used in earlier conditionals.
211
212 Note this is computed lazily and thus can be NULL indicating that
213 the values have not been computed yet. */
214 tree low;
215 tree high;
216
217 /* The actual conditional we recorded. This is needed since we compute
218 ranges lazily. */
219 tree cond;
220
221 /* The basic block where this record was created. We use this to determine
222 when to remove records. */
223 basic_block bb;
224 };
225
226 /* A hash table holding value range records (VRP_ELEMENTs) for a given
227 SSA_NAME. We used to use a varray indexed by SSA_NAME_VERSION, but
228 that gets awful wasteful, particularly since the density objects
229 with useful information is very low. */
230 static htab_t vrp_data;
231
232 typedef struct vrp_element *vrp_element_p;
233
234 DEF_VEC_P(vrp_element_p);
235 DEF_VEC_ALLOC_P(vrp_element_p,heap);
236
237 /* An entry in the VRP_DATA hash table. We record the variable and a
238 varray of VRP_ELEMENT records associated with that variable. */
239 struct vrp_hash_elt
240 {
241 tree var;
242 VEC(vrp_element_p,heap) *records;
243 };
244
245 /* Array of variables which have their values constrained by operations
246 in this basic block. We use this during finalization to know
247 which variables need their VRP data updated. */
248
249 /* Stack of SSA_NAMEs which had their values constrained by operations
250 in this basic block. During finalization of this block we use this
251 list to determine which variables need their VRP data updated.
252
253 A NULL entry marks the end of the SSA_NAMEs associated with this block. */
254 static VEC(tree,heap) *vrp_variables_stack;
255
256 struct eq_expr_value
257 {
258 tree src;
259 tree dst;
260 };
261
262 /* Local functions. */
263 static void optimize_stmt (struct dom_walk_data *,
264 basic_block bb,
265 block_stmt_iterator);
266 static tree lookup_avail_expr (tree, bool);
267 static hashval_t vrp_hash (const void *);
268 static int vrp_eq (const void *, const void *);
269 static hashval_t avail_expr_hash (const void *);
270 static hashval_t real_avail_expr_hash (const void *);
271 static int avail_expr_eq (const void *, const void *);
272 static void htab_statistics (FILE *, htab_t);
273 static void record_cond (tree, tree);
274 static void record_const_or_copy (tree, tree);
275 static void record_equality (tree, tree);
276 static tree update_rhs_and_lookup_avail_expr (tree, tree, bool);
277 static tree simplify_rhs_and_lookup_avail_expr (tree, int);
278 static tree simplify_cond_and_lookup_avail_expr (tree, stmt_ann_t, int);
279 static tree simplify_switch_and_lookup_avail_expr (tree, int);
280 static tree find_equivalent_equality_comparison (tree);
281 static void record_range (tree, basic_block);
282 static bool extract_range_from_cond (tree, tree *, tree *, int *);
283 static void record_equivalences_from_phis (basic_block);
284 static void record_equivalences_from_incoming_edge (basic_block);
285 static bool eliminate_redundant_computations (tree, stmt_ann_t);
286 static void record_equivalences_from_stmt (tree, int, stmt_ann_t);
287 static void thread_across_edge (struct dom_walk_data *, edge);
288 static void dom_opt_finalize_block (struct dom_walk_data *, basic_block);
289 static void dom_opt_initialize_block (struct dom_walk_data *, basic_block);
290 static void propagate_to_outgoing_edges (struct dom_walk_data *, basic_block);
291 static void remove_local_expressions_from_table (void);
292 static void restore_vars_to_original_value (void);
293 static edge single_incoming_edge_ignoring_loop_edges (basic_block);
294 static void restore_nonzero_vars_to_original_value (void);
295 static inline bool unsafe_associative_fp_binop (tree);
296
297
298 /* Local version of fold that doesn't introduce cruft. */
299
300 static tree
301 local_fold (tree t)
302 {
303 t = fold (t);
304
305 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
306 may have been added by fold, and "useless" type conversions that might
307 now be apparent due to propagation. */
308 STRIP_USELESS_TYPE_CONVERSION (t);
309
310 return t;
311 }
312
313 /* Allocate an EDGE_INFO for edge E and attach it to E.
314 Return the new EDGE_INFO structure. */
315
316 static struct edge_info *
317 allocate_edge_info (edge e)
318 {
319 struct edge_info *edge_info;
320
321 edge_info = xcalloc (1, sizeof (struct edge_info));
322
323 e->aux = edge_info;
324 return edge_info;
325 }
326
327 /* Free all EDGE_INFO structures associated with edges in the CFG.
328 If a particular edge can be threaded, copy the redirection
329 target from the EDGE_INFO structure into the edge's AUX field
330 as required by code to update the CFG and SSA graph for
331 jump threading. */
332
333 static void
334 free_all_edge_infos (void)
335 {
336 basic_block bb;
337 edge_iterator ei;
338 edge e;
339
340 FOR_EACH_BB (bb)
341 {
342 FOR_EACH_EDGE (e, ei, bb->preds)
343 {
344 struct edge_info *edge_info = e->aux;
345
346 if (edge_info)
347 {
348 e->aux = edge_info->redirection_target;
349 if (edge_info->cond_equivalences)
350 free (edge_info->cond_equivalences);
351 free (edge_info);
352 }
353 }
354 }
355 }
356
357 /* Free an instance of vrp_hash_elt. */
358
359 static void
360 vrp_free (void *data)
361 {
362 struct vrp_hash_elt *elt = data;
363 struct VEC(vrp_element_p,heap) **vrp_elt = &elt->records;
364
365 VEC_free (vrp_element_p, heap, *vrp_elt);
366 free (elt);
367 }
368
369 /* Jump threading, redundancy elimination and const/copy propagation.
370
371 This pass may expose new symbols that need to be renamed into SSA. For
372 every new symbol exposed, its corresponding bit will be set in
373 VARS_TO_RENAME. */
374
375 static void
376 tree_ssa_dominator_optimize (void)
377 {
378 struct dom_walk_data walk_data;
379 unsigned int i;
380 struct loops loops_info;
381
382 memset (&opt_stats, 0, sizeof (opt_stats));
383
384 /* Create our hash tables. */
385 avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free);
386 vrp_data = htab_create (ceil_log2 (num_ssa_names), vrp_hash, vrp_eq,
387 vrp_free);
388 avail_exprs_stack = VEC_alloc (tree, heap, 20);
389 const_and_copies_stack = VEC_alloc (tree, heap, 20);
390 nonzero_vars_stack = VEC_alloc (tree, heap, 20);
391 vrp_variables_stack = VEC_alloc (tree, heap, 20);
392 stmts_to_rescan = VEC_alloc (tree, heap, 20);
393 nonzero_vars = BITMAP_ALLOC (NULL);
394 threaded_blocks = BITMAP_ALLOC (NULL);
395 need_eh_cleanup = BITMAP_ALLOC (NULL);
396
397 /* Setup callbacks for the generic dominator tree walker. */
398 walk_data.walk_stmts_backward = false;
399 walk_data.dom_direction = CDI_DOMINATORS;
400 walk_data.initialize_block_local_data = NULL;
401 walk_data.before_dom_children_before_stmts = dom_opt_initialize_block;
402 walk_data.before_dom_children_walk_stmts = optimize_stmt;
403 walk_data.before_dom_children_after_stmts = propagate_to_outgoing_edges;
404 walk_data.after_dom_children_before_stmts = NULL;
405 walk_data.after_dom_children_walk_stmts = NULL;
406 walk_data.after_dom_children_after_stmts = dom_opt_finalize_block;
407 /* Right now we only attach a dummy COND_EXPR to the global data pointer.
408 When we attach more stuff we'll need to fill this out with a real
409 structure. */
410 walk_data.global_data = NULL;
411 walk_data.block_local_data_size = 0;
412 walk_data.interesting_blocks = NULL;
413
414 /* Now initialize the dominator walker. */
415 init_walk_dominator_tree (&walk_data);
416
417 calculate_dominance_info (CDI_DOMINATORS);
418
419 /* We need to know which edges exit loops so that we can
420 aggressively thread through loop headers to an exit
421 edge. */
422 flow_loops_find (&loops_info);
423 mark_loop_exit_edges (&loops_info);
424 flow_loops_free (&loops_info);
425
426 /* Clean up the CFG so that any forwarder blocks created by loop
427 canonicalization are removed. */
428 cleanup_tree_cfg ();
429 calculate_dominance_info (CDI_DOMINATORS);
430
431 /* If we prove certain blocks are unreachable, then we want to
432 repeat the dominator optimization process as PHI nodes may
433 have turned into copies which allows better propagation of
434 values. So we repeat until we do not identify any new unreachable
435 blocks. */
436 do
437 {
438 /* Optimize the dominator tree. */
439 cfg_altered = false;
440
441 /* We need accurate information regarding back edges in the CFG
442 for jump threading. */
443 mark_dfs_back_edges ();
444
445 /* Recursively walk the dominator tree optimizing statements. */
446 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
447
448 {
449 block_stmt_iterator bsi;
450 basic_block bb;
451 FOR_EACH_BB (bb)
452 {
453 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
454 {
455 update_stmt_if_modified (bsi_stmt (bsi));
456 }
457 }
458 }
459
460 /* If we exposed any new variables, go ahead and put them into
461 SSA form now, before we handle jump threading. This simplifies
462 interactions between rewriting of _DECL nodes into SSA form
463 and rewriting SSA_NAME nodes into SSA form after block
464 duplication and CFG manipulation. */
465 update_ssa (TODO_update_ssa);
466
467 free_all_edge_infos ();
468
469 /* Thread jumps, creating duplicate blocks as needed. */
470 cfg_altered |= thread_through_all_blocks (threaded_blocks);
471
472 /* Removal of statements may make some EH edges dead. Purge
473 such edges from the CFG as needed. */
474 if (!bitmap_empty_p (need_eh_cleanup))
475 {
476 cfg_altered |= tree_purge_all_dead_eh_edges (need_eh_cleanup);
477 bitmap_zero (need_eh_cleanup);
478 }
479
480 if (cfg_altered)
481 free_dominance_info (CDI_DOMINATORS);
482
483 /* Only iterate if we threaded jumps AND the CFG cleanup did
484 something interesting. Other cases generate far fewer
485 optimization opportunities and thus are not worth another
486 full DOM iteration. */
487 cfg_altered &= cleanup_tree_cfg ();
488
489 if (rediscover_loops_after_threading)
490 {
491 /* Rerun basic loop analysis to discover any newly
492 created loops and update the set of exit edges. */
493 rediscover_loops_after_threading = false;
494 flow_loops_find (&loops_info);
495 mark_loop_exit_edges (&loops_info);
496 flow_loops_free (&loops_info);
497
498 /* Remove any forwarder blocks inserted by loop
499 header canonicalization. */
500 cleanup_tree_cfg ();
501 }
502
503 calculate_dominance_info (CDI_DOMINATORS);
504
505 update_ssa (TODO_update_ssa);
506
507 /* Reinitialize the various tables. */
508 bitmap_clear (nonzero_vars);
509 bitmap_clear (threaded_blocks);
510 htab_empty (avail_exprs);
511 htab_empty (vrp_data);
512
513 /* Finally, remove everything except invariants in SSA_NAME_VALUE.
514
515 This must be done before we iterate as we might have a
516 reference to an SSA_NAME which was removed by the call to
517 update_ssa.
518
519 Long term we will be able to let everything in SSA_NAME_VALUE
520 persist. However, for now, we know this is the safe thing to do. */
521 for (i = 0; i < num_ssa_names; i++)
522 {
523 tree name = ssa_name (i);
524 tree value;
525
526 if (!name)
527 continue;
528
529 value = SSA_NAME_VALUE (name);
530 if (value && !is_gimple_min_invariant (value))
531 SSA_NAME_VALUE (name) = NULL;
532 }
533
534 opt_stats.num_iterations++;
535 }
536 while (optimize > 1 && cfg_altered);
537
538 /* Debugging dumps. */
539 if (dump_file && (dump_flags & TDF_STATS))
540 dump_dominator_optimization_stats (dump_file);
541
542 /* We emptied the hash table earlier, now delete it completely. */
543 htab_delete (avail_exprs);
544 htab_delete (vrp_data);
545
546 /* It is not necessary to clear CURRDEFS, REDIRECTION_EDGES, VRP_DATA,
547 CONST_AND_COPIES, and NONZERO_VARS as they all get cleared at the bottom
548 of the do-while loop above. */
549
550 /* And finalize the dominator walker. */
551 fini_walk_dominator_tree (&walk_data);
552
553 /* Free nonzero_vars. */
554 BITMAP_FREE (nonzero_vars);
555 BITMAP_FREE (threaded_blocks);
556 BITMAP_FREE (need_eh_cleanup);
557
558 VEC_free (tree, heap, avail_exprs_stack);
559 VEC_free (tree, heap, const_and_copies_stack);
560 VEC_free (tree, heap, nonzero_vars_stack);
561 VEC_free (tree, heap, vrp_variables_stack);
562 VEC_free (tree, heap, stmts_to_rescan);
563 }
564
565 static bool
566 gate_dominator (void)
567 {
568 return flag_tree_dom != 0;
569 }
570
571 struct tree_opt_pass pass_dominator =
572 {
573 "dom", /* name */
574 gate_dominator, /* gate */
575 tree_ssa_dominator_optimize, /* execute */
576 NULL, /* sub */
577 NULL, /* next */
578 0, /* static_pass_number */
579 TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
580 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
581 0, /* properties_provided */
582 0, /* properties_destroyed */
583 0, /* todo_flags_start */
584 TODO_dump_func
585 | TODO_update_ssa
586 | TODO_verify_ssa, /* todo_flags_finish */
587 0 /* letter */
588 };
589
590
591 /* We are exiting E->src, see if E->dest ends with a conditional
592 jump which has a known value when reached via E.
593
594 Special care is necessary if E is a back edge in the CFG as we
595 will have already recorded equivalences for E->dest into our
596 various tables, including the result of the conditional at
597 the end of E->dest. Threading opportunities are severely
598 limited in that case to avoid short-circuiting the loop
599 incorrectly.
600
601 Note it is quite common for the first block inside a loop to
602 end with a conditional which is either always true or always
603 false when reached via the loop backedge. Thus we do not want
604 to blindly disable threading across a loop backedge. */
605
606 static void
607 thread_across_edge (struct dom_walk_data *walk_data, edge e)
608 {
609 block_stmt_iterator bsi;
610 tree stmt = NULL;
611 tree phi;
612 int stmt_count = 0;
613 int max_stmt_count;
614
615
616 /* If E->dest does not end with a conditional, then there is
617 nothing to do. */
618 bsi = bsi_last (e->dest);
619 if (bsi_end_p (bsi)
620 || ! bsi_stmt (bsi)
621 || (TREE_CODE (bsi_stmt (bsi)) != COND_EXPR
622 && TREE_CODE (bsi_stmt (bsi)) != GOTO_EXPR
623 && TREE_CODE (bsi_stmt (bsi)) != SWITCH_EXPR))
624 return;
625
626 /* The basic idea here is to use whatever knowledge we have
627 from our dominator walk to simplify statements in E->dest,
628 with the ultimate goal being to simplify the conditional
629 at the end of E->dest.
630
631 Note that we must undo any changes we make to the underlying
632 statements as the simplifications we are making are control
633 flow sensitive (ie, the simplifications are valid when we
634 traverse E, but may not be valid on other paths to E->dest. */
635
636 /* Each PHI creates a temporary equivalence, record them. Again
637 these are context sensitive equivalences and will be removed
638 by our caller. */
639 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
640 {
641 tree src = PHI_ARG_DEF_FROM_EDGE (phi, e);
642 tree dst = PHI_RESULT (phi);
643
644 /* Do not include virtual PHIs in our statement count as
645 they never generate code. */
646 if (is_gimple_reg (dst))
647 stmt_count++;
648
649 /* If the desired argument is not the same as this PHI's result
650 and it is set by a PHI in E->dest, then we can not thread
651 through E->dest. */
652 if (src != dst
653 && TREE_CODE (src) == SSA_NAME
654 && TREE_CODE (SSA_NAME_DEF_STMT (src)) == PHI_NODE
655 && bb_for_stmt (SSA_NAME_DEF_STMT (src)) == e->dest)
656 return;
657
658 record_const_or_copy (dst, src);
659 }
660
661 /* Try to simplify each statement in E->dest, ultimately leading to
662 a simplification of the COND_EXPR at the end of E->dest.
663
664 We might consider marking just those statements which ultimately
665 feed the COND_EXPR. It's not clear if the overhead of bookkeeping
666 would be recovered by trying to simplify fewer statements.
667
668 If we are able to simplify a statement into the form
669 SSA_NAME = (SSA_NAME | gimple invariant), then we can record
670 a context sensitive equivalency which may help us simplify
671 later statements in E->dest.
672
673 Failure to simplify into the form above merely means that the
674 statement provides no equivalences to help simplify later
675 statements. This does not prevent threading through E->dest. */
676 max_stmt_count = PARAM_VALUE (PARAM_MAX_JUMP_THREAD_DUPLICATION_STMTS);
677 for (bsi = bsi_start (e->dest); ! bsi_end_p (bsi); bsi_next (&bsi))
678 {
679 tree cached_lhs = NULL;
680
681 stmt = bsi_stmt (bsi);
682
683 /* Ignore empty statements and labels. */
684 if (IS_EMPTY_STMT (stmt) || TREE_CODE (stmt) == LABEL_EXPR)
685 continue;
686
687 /* If duplicating this block is going to cause too much code
688 expansion, then do not thread through this block. */
689 stmt_count++;
690 if (stmt_count > max_stmt_count)
691 return;
692
693 /* Safely handle threading across loop backedges. This is
694 over conservative, but still allows us to capture the
695 majority of the cases where we can thread across a loop
696 backedge. */
697 if ((e->flags & EDGE_DFS_BACK) != 0
698 && TREE_CODE (stmt) != COND_EXPR
699 && TREE_CODE (stmt) != SWITCH_EXPR)
700 return;
701
702 /* If the statement has volatile operands, then we assume we
703 can not thread through this block. This is overly
704 conservative in some ways. */
705 if (TREE_CODE (stmt) == ASM_EXPR && ASM_VOLATILE_P (stmt))
706 return;
707
708 /* If this is not a MODIFY_EXPR which sets an SSA_NAME to a new
709 value, then do not try to simplify this statement as it will
710 not simplify in any way that is helpful for jump threading. */
711 if (TREE_CODE (stmt) != MODIFY_EXPR
712 || TREE_CODE (TREE_OPERAND (stmt, 0)) != SSA_NAME)
713 continue;
714
715 /* At this point we have a statement which assigns an RHS to an
716 SSA_VAR on the LHS. We want to try and simplify this statement
717 to expose more context sensitive equivalences which in turn may
718 allow us to simplify the condition at the end of the loop. */
719 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME)
720 cached_lhs = TREE_OPERAND (stmt, 1);
721 else
722 {
723 /* Copy the operands. */
724 tree *copy, pre_fold_expr;
725 ssa_op_iter iter;
726 use_operand_p use_p;
727 unsigned int num, i = 0;
728
729 num = NUM_SSA_OPERANDS (stmt, (SSA_OP_USE | SSA_OP_VUSE));
730 copy = xcalloc (num, sizeof (tree));
731
732 /* Make a copy of the uses & vuses into USES_COPY, then cprop into
733 the operands. */
734 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
735 {
736 tree tmp = NULL;
737 tree use = USE_FROM_PTR (use_p);
738
739 copy[i++] = use;
740 if (TREE_CODE (use) == SSA_NAME)
741 tmp = SSA_NAME_VALUE (use);
742 if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
743 SET_USE (use_p, tmp);
744 }
745
746 /* Try to fold/lookup the new expression. Inserting the
747 expression into the hash table is unlikely to help
748 Sadly, we have to handle conditional assignments specially
749 here, because fold expects all the operands of an expression
750 to be folded before the expression itself is folded, but we
751 can't just substitute the folded condition here. */
752 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == COND_EXPR)
753 {
754 tree cond = COND_EXPR_COND (TREE_OPERAND (stmt, 1));
755 cond = fold (cond);
756 if (cond == boolean_true_node)
757 pre_fold_expr = COND_EXPR_THEN (TREE_OPERAND (stmt, 1));
758 else if (cond == boolean_false_node)
759 pre_fold_expr = COND_EXPR_ELSE (TREE_OPERAND (stmt, 1));
760 else
761 pre_fold_expr = TREE_OPERAND (stmt, 1);
762 }
763 else
764 pre_fold_expr = TREE_OPERAND (stmt, 1);
765
766 if (pre_fold_expr)
767 {
768 cached_lhs = fold (pre_fold_expr);
769 if (TREE_CODE (cached_lhs) != SSA_NAME
770 && !is_gimple_min_invariant (cached_lhs))
771 cached_lhs = lookup_avail_expr (stmt, false);
772 }
773
774 /* Restore the statement's original uses/defs. */
775 i = 0;
776 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
777 SET_USE (use_p, copy[i++]);
778
779 free (copy);
780 }
781
782 /* Record the context sensitive equivalence if we were able
783 to simplify this statement. */
784 if (cached_lhs
785 && (TREE_CODE (cached_lhs) == SSA_NAME
786 || is_gimple_min_invariant (cached_lhs)))
787 record_const_or_copy (TREE_OPERAND (stmt, 0), cached_lhs);
788 }
789
790 /* If we stopped at a COND_EXPR or SWITCH_EXPR, see if we know which arm
791 will be taken. */
792 if (stmt
793 && (TREE_CODE (stmt) == COND_EXPR
794 || TREE_CODE (stmt) == GOTO_EXPR
795 || TREE_CODE (stmt) == SWITCH_EXPR))
796 {
797 tree cond, cached_lhs;
798
799 /* Now temporarily cprop the operands and try to find the resulting
800 expression in the hash tables. */
801 if (TREE_CODE (stmt) == COND_EXPR)
802 cond = COND_EXPR_COND (stmt);
803 else if (TREE_CODE (stmt) == GOTO_EXPR)
804 cond = GOTO_DESTINATION (stmt);
805 else
806 cond = SWITCH_COND (stmt);
807
808 if (COMPARISON_CLASS_P (cond))
809 {
810 tree dummy_cond, op0, op1;
811 enum tree_code cond_code;
812
813 op0 = TREE_OPERAND (cond, 0);
814 op1 = TREE_OPERAND (cond, 1);
815 cond_code = TREE_CODE (cond);
816
817 /* Get the current value of both operands. */
818 if (TREE_CODE (op0) == SSA_NAME)
819 {
820 tree tmp = SSA_NAME_VALUE (op0);
821 if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
822 op0 = tmp;
823 }
824
825 if (TREE_CODE (op1) == SSA_NAME)
826 {
827 tree tmp = SSA_NAME_VALUE (op1);
828 if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
829 op1 = tmp;
830 }
831
832 /* Stuff the operator and operands into our dummy conditional
833 expression, creating the dummy conditional if necessary. */
834 dummy_cond = walk_data->global_data;
835 if (! dummy_cond)
836 {
837 dummy_cond = build (cond_code, boolean_type_node, op0, op1);
838 dummy_cond = build (COND_EXPR, void_type_node,
839 dummy_cond, NULL, NULL);
840 walk_data->global_data = dummy_cond;
841 }
842 else
843 {
844 TREE_SET_CODE (COND_EXPR_COND (dummy_cond), cond_code);
845 TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op0;
846 TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1) = op1;
847 }
848
849 /* If the conditional folds to an invariant, then we are done,
850 otherwise look it up in the hash tables. */
851 cached_lhs = local_fold (COND_EXPR_COND (dummy_cond));
852 if (! is_gimple_min_invariant (cached_lhs))
853 {
854 cached_lhs = lookup_avail_expr (dummy_cond, false);
855 if (!cached_lhs || ! is_gimple_min_invariant (cached_lhs))
856 cached_lhs = simplify_cond_and_lookup_avail_expr (dummy_cond,
857 NULL,
858 false);
859 }
860 }
861 /* We can have conditionals which just test the state of a
862 variable rather than use a relational operator. These are
863 simpler to handle. */
864 else if (TREE_CODE (cond) == SSA_NAME)
865 {
866 cached_lhs = cond;
867 cached_lhs = SSA_NAME_VALUE (cached_lhs);
868 if (cached_lhs && ! is_gimple_min_invariant (cached_lhs))
869 cached_lhs = NULL;
870 }
871 else
872 cached_lhs = lookup_avail_expr (stmt, false);
873
874 if (cached_lhs)
875 {
876 edge taken_edge = find_taken_edge (e->dest, cached_lhs);
877 basic_block dest = (taken_edge ? taken_edge->dest : NULL);
878
879 if (dest == e->dest)
880 return;
881
882 /* If we have a known destination for the conditional, then
883 we can perform this optimization, which saves at least one
884 conditional jump each time it applies since we get to
885 bypass the conditional at our original destination. */
886 if (dest)
887 {
888 struct edge_info *edge_info;
889
890 if (e->aux)
891 edge_info = e->aux;
892 else
893 edge_info = allocate_edge_info (e);
894 edge_info->redirection_target = taken_edge;
895 bitmap_set_bit (threaded_blocks, e->dest->index);
896 }
897 }
898 }
899 }
900
901
902 /* Initialize local stacks for this optimizer and record equivalences
903 upon entry to BB. Equivalences can come from the edge traversed to
904 reach BB or they may come from PHI nodes at the start of BB. */
905
906 static void
907 dom_opt_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
908 basic_block bb)
909 {
910 if (dump_file && (dump_flags & TDF_DETAILS))
911 fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
912
913 /* Push a marker on the stacks of local information so that we know how
914 far to unwind when we finalize this block. */
915 VEC_safe_push (tree, heap, avail_exprs_stack, NULL_TREE);
916 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
917 VEC_safe_push (tree, heap, nonzero_vars_stack, NULL_TREE);
918 VEC_safe_push (tree, heap, vrp_variables_stack, NULL_TREE);
919
920 record_equivalences_from_incoming_edge (bb);
921
922 /* PHI nodes can create equivalences too. */
923 record_equivalences_from_phis (bb);
924 }
925
926 /* Given an expression EXPR (a relational expression or a statement),
927 initialize the hash table element pointed to by ELEMENT. */
928
929 static void
930 initialize_hash_element (tree expr, tree lhs, struct expr_hash_elt *element)
931 {
932 /* Hash table elements may be based on conditional expressions or statements.
933
934 For the former case, we have no annotation and we want to hash the
935 conditional expression. In the latter case we have an annotation and
936 we want to record the expression the statement evaluates. */
937 if (COMPARISON_CLASS_P (expr) || TREE_CODE (expr) == TRUTH_NOT_EXPR)
938 {
939 element->stmt = NULL;
940 element->rhs = expr;
941 }
942 else if (TREE_CODE (expr) == COND_EXPR)
943 {
944 element->stmt = expr;
945 element->rhs = COND_EXPR_COND (expr);
946 }
947 else if (TREE_CODE (expr) == SWITCH_EXPR)
948 {
949 element->stmt = expr;
950 element->rhs = SWITCH_COND (expr);
951 }
952 else if (TREE_CODE (expr) == RETURN_EXPR && TREE_OPERAND (expr, 0))
953 {
954 element->stmt = expr;
955 element->rhs = TREE_OPERAND (TREE_OPERAND (expr, 0), 1);
956 }
957 else if (TREE_CODE (expr) == GOTO_EXPR)
958 {
959 element->stmt = expr;
960 element->rhs = GOTO_DESTINATION (expr);
961 }
962 else
963 {
964 element->stmt = expr;
965 element->rhs = TREE_OPERAND (expr, 1);
966 }
967
968 element->lhs = lhs;
969 element->hash = avail_expr_hash (element);
970 }
971
972 /* Remove all the expressions in LOCALS from TABLE, stopping when there are
973 LIMIT entries left in LOCALs. */
974
975 static void
976 remove_local_expressions_from_table (void)
977 {
978 /* Remove all the expressions made available in this block. */
979 while (VEC_length (tree, avail_exprs_stack) > 0)
980 {
981 struct expr_hash_elt element;
982 tree expr = VEC_pop (tree, avail_exprs_stack);
983
984 if (expr == NULL_TREE)
985 break;
986
987 initialize_hash_element (expr, NULL, &element);
988 htab_remove_elt_with_hash (avail_exprs, &element, element.hash);
989 }
990 }
991
992 /* Use the SSA_NAMES in LOCALS to restore TABLE to its original
993 state, stopping when there are LIMIT entries left in LOCALs. */
994
995 static void
996 restore_nonzero_vars_to_original_value (void)
997 {
998 while (VEC_length (tree, nonzero_vars_stack) > 0)
999 {
1000 tree name = VEC_pop (tree, nonzero_vars_stack);
1001
1002 if (name == NULL)
1003 break;
1004
1005 bitmap_clear_bit (nonzero_vars, SSA_NAME_VERSION (name));
1006 }
1007 }
1008
1009 /* Use the source/dest pairs in CONST_AND_COPIES_STACK to restore
1010 CONST_AND_COPIES to its original state, stopping when we hit a
1011 NULL marker. */
1012
1013 static void
1014 restore_vars_to_original_value (void)
1015 {
1016 while (VEC_length (tree, const_and_copies_stack) > 0)
1017 {
1018 tree prev_value, dest;
1019
1020 dest = VEC_pop (tree, const_and_copies_stack);
1021
1022 if (dest == NULL)
1023 break;
1024
1025 prev_value = VEC_pop (tree, const_and_copies_stack);
1026 SSA_NAME_VALUE (dest) = prev_value;
1027 }
1028 }
1029
1030 /* We have finished processing the dominator children of BB, perform
1031 any finalization actions in preparation for leaving this node in
1032 the dominator tree. */
1033
1034 static void
1035 dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
1036 {
1037 tree last;
1038
1039 /* If we have an outgoing edge to a block with multiple incoming and
1040 outgoing edges, then we may be able to thread the edge. ie, we
1041 may be able to statically determine which of the outgoing edges
1042 will be traversed when the incoming edge from BB is traversed. */
1043 if (single_succ_p (bb)
1044 && (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0
1045 && !single_pred_p (single_succ (bb))
1046 && !single_succ_p (single_succ (bb)))
1047
1048 {
1049 thread_across_edge (walk_data, single_succ_edge (bb));
1050 }
1051 else if ((last = last_stmt (bb))
1052 && TREE_CODE (last) == COND_EXPR
1053 && (COMPARISON_CLASS_P (COND_EXPR_COND (last))
1054 || TREE_CODE (COND_EXPR_COND (last)) == SSA_NAME)
1055 && EDGE_COUNT (bb->succs) == 2
1056 && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
1057 && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
1058 {
1059 edge true_edge, false_edge;
1060
1061 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1062
1063 /* Only try to thread the edge if it reaches a target block with
1064 more than one predecessor and more than one successor. */
1065 if (!single_pred_p (true_edge->dest) && !single_succ_p (true_edge->dest))
1066 {
1067 struct edge_info *edge_info;
1068 unsigned int i;
1069
1070 /* Push a marker onto the available expression stack so that we
1071 unwind any expressions related to the TRUE arm before processing
1072 the false arm below. */
1073 VEC_safe_push (tree, heap, avail_exprs_stack, NULL_TREE);
1074 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1075
1076 edge_info = true_edge->aux;
1077
1078 /* If we have info associated with this edge, record it into
1079 our equivalency tables. */
1080 if (edge_info)
1081 {
1082 tree *cond_equivalences = edge_info->cond_equivalences;
1083 tree lhs = edge_info->lhs;
1084 tree rhs = edge_info->rhs;
1085
1086 /* If we have a simple NAME = VALUE equivalency record it. */
1087 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1088 record_const_or_copy (lhs, rhs);
1089
1090 /* If we have 0 = COND or 1 = COND equivalences, record them
1091 into our expression hash tables. */
1092 if (cond_equivalences)
1093 for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
1094 {
1095 tree expr = cond_equivalences[i];
1096 tree value = cond_equivalences[i + 1];
1097
1098 record_cond (expr, value);
1099 }
1100 }
1101
1102 /* Now thread the edge. */
1103 thread_across_edge (walk_data, true_edge);
1104
1105 /* And restore the various tables to their state before
1106 we threaded this edge. */
1107 remove_local_expressions_from_table ();
1108 restore_vars_to_original_value ();
1109 }
1110
1111 /* Similarly for the ELSE arm. */
1112 if (!single_pred_p (false_edge->dest) && !single_succ_p (false_edge->dest))
1113 {
1114 struct edge_info *edge_info;
1115 unsigned int i;
1116
1117 edge_info = false_edge->aux;
1118
1119 /* If we have info associated with this edge, record it into
1120 our equivalency tables. */
1121 if (edge_info)
1122 {
1123 tree *cond_equivalences = edge_info->cond_equivalences;
1124 tree lhs = edge_info->lhs;
1125 tree rhs = edge_info->rhs;
1126
1127 /* If we have a simple NAME = VALUE equivalency record it. */
1128 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1129 record_const_or_copy (lhs, rhs);
1130
1131 /* If we have 0 = COND or 1 = COND equivalences, record them
1132 into our expression hash tables. */
1133 if (cond_equivalences)
1134 for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
1135 {
1136 tree expr = cond_equivalences[i];
1137 tree value = cond_equivalences[i + 1];
1138
1139 record_cond (expr, value);
1140 }
1141 }
1142
1143 thread_across_edge (walk_data, false_edge);
1144
1145 /* No need to remove local expressions from our tables
1146 or restore vars to their original value as that will
1147 be done immediately below. */
1148 }
1149 }
1150
1151 remove_local_expressions_from_table ();
1152 restore_nonzero_vars_to_original_value ();
1153 restore_vars_to_original_value ();
1154
1155 /* Remove VRP records associated with this basic block. They are no
1156 longer valid.
1157
1158 To be efficient, we note which variables have had their values
1159 constrained in this block. So walk over each variable in the
1160 VRP_VARIABLEs array. */
1161 while (VEC_length (tree, vrp_variables_stack) > 0)
1162 {
1163 tree var = VEC_pop (tree, vrp_variables_stack);
1164 struct vrp_hash_elt vrp_hash_elt, *vrp_hash_elt_p;
1165 void **slot;
1166
1167 /* Each variable has a stack of value range records. We want to
1168 invalidate those associated with our basic block. So we walk
1169 the array backwards popping off records associated with our
1170 block. Once we hit a record not associated with our block
1171 we are done. */
1172 VEC(vrp_element_p,heap) **var_vrp_records;
1173
1174 if (var == NULL)
1175 break;
1176
1177 vrp_hash_elt.var = var;
1178 vrp_hash_elt.records = NULL;
1179
1180 slot = htab_find_slot (vrp_data, &vrp_hash_elt, NO_INSERT);
1181
1182 vrp_hash_elt_p = (struct vrp_hash_elt *) *slot;
1183 var_vrp_records = &vrp_hash_elt_p->records;
1184
1185 while (VEC_length (vrp_element_p, *var_vrp_records) > 0)
1186 {
1187 struct vrp_element *element
1188 = VEC_last (vrp_element_p, *var_vrp_records);
1189
1190 if (element->bb != bb)
1191 break;
1192
1193 VEC_pop (vrp_element_p, *var_vrp_records);
1194 }
1195 }
1196
1197 /* If we queued any statements to rescan in this block, then
1198 go ahead and rescan them now. */
1199 while (VEC_length (tree, stmts_to_rescan) > 0)
1200 {
1201 tree stmt = VEC_last (tree, stmts_to_rescan);
1202 basic_block stmt_bb = bb_for_stmt (stmt);
1203
1204 if (stmt_bb != bb)
1205 break;
1206
1207 VEC_pop (tree, stmts_to_rescan);
1208 mark_new_vars_to_rename (stmt);
1209 }
1210 }
1211
1212 /* PHI nodes can create equivalences too.
1213
1214 Ignoring any alternatives which are the same as the result, if
1215 all the alternatives are equal, then the PHI node creates an
1216 equivalence.
1217
1218 Additionally, if all the PHI alternatives are known to have a nonzero
1219 value, then the result of this PHI is known to have a nonzero value,
1220 even if we do not know its exact value. */
1221
1222 static void
1223 record_equivalences_from_phis (basic_block bb)
1224 {
1225 tree phi;
1226
1227 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1228 {
1229 tree lhs = PHI_RESULT (phi);
1230 tree rhs = NULL;
1231 int i;
1232
1233 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1234 {
1235 tree t = PHI_ARG_DEF (phi, i);
1236
1237 /* Ignore alternatives which are the same as our LHS. Since
1238 LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
1239 can simply compare pointers. */
1240 if (lhs == t)
1241 continue;
1242
1243 /* If we have not processed an alternative yet, then set
1244 RHS to this alternative. */
1245 if (rhs == NULL)
1246 rhs = t;
1247 /* If we have processed an alternative (stored in RHS), then
1248 see if it is equal to this one. If it isn't, then stop
1249 the search. */
1250 else if (! operand_equal_for_phi_arg_p (rhs, t))
1251 break;
1252 }
1253
1254 /* If we had no interesting alternatives, then all the RHS alternatives
1255 must have been the same as LHS. */
1256 if (!rhs)
1257 rhs = lhs;
1258
1259 /* If we managed to iterate through each PHI alternative without
1260 breaking out of the loop, then we have a PHI which may create
1261 a useful equivalence. We do not need to record unwind data for
1262 this, since this is a true assignment and not an equivalence
1263 inferred from a comparison. All uses of this ssa name are dominated
1264 by this assignment, so unwinding just costs time and space. */
1265 if (i == PHI_NUM_ARGS (phi)
1266 && may_propagate_copy (lhs, rhs))
1267 SSA_NAME_VALUE (lhs) = rhs;
1268
1269 /* Now see if we know anything about the nonzero property for the
1270 result of this PHI. */
1271 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1272 {
1273 if (!PHI_ARG_NONZERO (phi, i))
1274 break;
1275 }
1276
1277 if (i == PHI_NUM_ARGS (phi))
1278 bitmap_set_bit (nonzero_vars, SSA_NAME_VERSION (PHI_RESULT (phi)));
1279 }
1280 }
1281
1282 /* Ignoring loop backedges, if BB has precisely one incoming edge then
1283 return that edge. Otherwise return NULL. */
1284 static edge
1285 single_incoming_edge_ignoring_loop_edges (basic_block bb)
1286 {
1287 edge retval = NULL;
1288 edge e;
1289 edge_iterator ei;
1290
1291 FOR_EACH_EDGE (e, ei, bb->preds)
1292 {
1293 /* A loop back edge can be identified by the destination of
1294 the edge dominating the source of the edge. */
1295 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1296 continue;
1297
1298 /* If we have already seen a non-loop edge, then we must have
1299 multiple incoming non-loop edges and thus we return NULL. */
1300 if (retval)
1301 return NULL;
1302
1303 /* This is the first non-loop incoming edge we have found. Record
1304 it. */
1305 retval = e;
1306 }
1307
1308 return retval;
1309 }
1310
1311 /* Record any equivalences created by the incoming edge to BB. If BB
1312 has more than one incoming edge, then no equivalence is created. */
1313
1314 static void
1315 record_equivalences_from_incoming_edge (basic_block bb)
1316 {
1317 edge e;
1318 basic_block parent;
1319 struct edge_info *edge_info;
1320
1321 /* If our parent block ended with a control statement, then we may be
1322 able to record some equivalences based on which outgoing edge from
1323 the parent was followed. */
1324 parent = get_immediate_dominator (CDI_DOMINATORS, bb);
1325
1326 e = single_incoming_edge_ignoring_loop_edges (bb);
1327
1328 /* If we had a single incoming edge from our parent block, then enter
1329 any data associated with the edge into our tables. */
1330 if (e && e->src == parent)
1331 {
1332 unsigned int i;
1333
1334 edge_info = e->aux;
1335
1336 if (edge_info)
1337 {
1338 tree lhs = edge_info->lhs;
1339 tree rhs = edge_info->rhs;
1340 tree *cond_equivalences = edge_info->cond_equivalences;
1341
1342 if (lhs)
1343 record_equality (lhs, rhs);
1344
1345 if (cond_equivalences)
1346 {
1347 bool recorded_range = false;
1348 for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
1349 {
1350 tree expr = cond_equivalences[i];
1351 tree value = cond_equivalences[i + 1];
1352
1353 record_cond (expr, value);
1354
1355 /* For the first true equivalence, record range
1356 information. We only do this for the first
1357 true equivalence as it should dominate any
1358 later true equivalences. */
1359 if (! recorded_range
1360 && COMPARISON_CLASS_P (expr)
1361 && value == boolean_true_node
1362 && TREE_CONSTANT (TREE_OPERAND (expr, 1)))
1363 {
1364 record_range (expr, bb);
1365 recorded_range = true;
1366 }
1367 }
1368 }
1369 }
1370 }
1371 }
1372
1373 /* Dump SSA statistics on FILE. */
1374
1375 void
1376 dump_dominator_optimization_stats (FILE *file)
1377 {
1378 long n_exprs;
1379
1380 fprintf (file, "Total number of statements: %6ld\n\n",
1381 opt_stats.num_stmts);
1382 fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
1383 opt_stats.num_exprs_considered);
1384
1385 n_exprs = opt_stats.num_exprs_considered;
1386 if (n_exprs == 0)
1387 n_exprs = 1;
1388
1389 fprintf (file, " Redundant expressions eliminated: %6ld (%.0f%%)\n",
1390 opt_stats.num_re, PERCENT (opt_stats.num_re,
1391 n_exprs));
1392 fprintf (file, " Constants propagated: %6ld\n",
1393 opt_stats.num_const_prop);
1394 fprintf (file, " Copies propagated: %6ld\n",
1395 opt_stats.num_copy_prop);
1396
1397 fprintf (file, "\nTotal number of DOM iterations: %6ld\n",
1398 opt_stats.num_iterations);
1399
1400 fprintf (file, "\nHash table statistics:\n");
1401
1402 fprintf (file, " avail_exprs: ");
1403 htab_statistics (file, avail_exprs);
1404 }
1405
1406
1407 /* Dump SSA statistics on stderr. */
1408
1409 void
1410 debug_dominator_optimization_stats (void)
1411 {
1412 dump_dominator_optimization_stats (stderr);
1413 }
1414
1415
1416 /* Dump statistics for the hash table HTAB. */
1417
1418 static void
1419 htab_statistics (FILE *file, htab_t htab)
1420 {
1421 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
1422 (long) htab_size (htab),
1423 (long) htab_elements (htab),
1424 htab_collisions (htab));
1425 }
1426
1427 /* Record the fact that VAR has a nonzero value, though we may not know
1428 its exact value. Note that if VAR is already known to have a nonzero
1429 value, then we do nothing. */
1430
1431 static void
1432 record_var_is_nonzero (tree var)
1433 {
1434 int indx = SSA_NAME_VERSION (var);
1435
1436 if (bitmap_bit_p (nonzero_vars, indx))
1437 return;
1438
1439 /* Mark it in the global table. */
1440 bitmap_set_bit (nonzero_vars, indx);
1441
1442 /* Record this SSA_NAME so that we can reset the global table
1443 when we leave this block. */
1444 VEC_safe_push (tree, heap, nonzero_vars_stack, var);
1445 }
1446
1447 /* Enter a statement into the true/false expression hash table indicating
1448 that the condition COND has the value VALUE. */
1449
1450 static void
1451 record_cond (tree cond, tree value)
1452 {
1453 struct expr_hash_elt *element = xmalloc (sizeof (struct expr_hash_elt));
1454 void **slot;
1455
1456 initialize_hash_element (cond, value, element);
1457
1458 slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
1459 element->hash, INSERT);
1460 if (*slot == NULL)
1461 {
1462 *slot = (void *) element;
1463 VEC_safe_push (tree, heap, avail_exprs_stack, cond);
1464 }
1465 else
1466 free (element);
1467 }
1468
1469 /* Build a new conditional using NEW_CODE, OP0 and OP1 and store
1470 the new conditional into *p, then store a boolean_true_node
1471 into *(p + 1). */
1472
1473 static void
1474 build_and_record_new_cond (enum tree_code new_code, tree op0, tree op1, tree *p)
1475 {
1476 *p = build2 (new_code, boolean_type_node, op0, op1);
1477 p++;
1478 *p = boolean_true_node;
1479 }
1480
1481 /* Record that COND is true and INVERTED is false into the edge information
1482 structure. Also record that any conditions dominated by COND are true
1483 as well.
1484
1485 For example, if a < b is true, then a <= b must also be true. */
1486
1487 static void
1488 record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
1489 {
1490 tree op0, op1;
1491
1492 if (!COMPARISON_CLASS_P (cond))
1493 return;
1494
1495 op0 = TREE_OPERAND (cond, 0);
1496 op1 = TREE_OPERAND (cond, 1);
1497
1498 switch (TREE_CODE (cond))
1499 {
1500 case LT_EXPR:
1501 case GT_EXPR:
1502 edge_info->max_cond_equivalences = 12;
1503 edge_info->cond_equivalences = xmalloc (12 * sizeof (tree));
1504 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1505 ? LE_EXPR : GE_EXPR),
1506 op0, op1, &edge_info->cond_equivalences[4]);
1507 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1508 &edge_info->cond_equivalences[6]);
1509 build_and_record_new_cond (NE_EXPR, op0, op1,
1510 &edge_info->cond_equivalences[8]);
1511 build_and_record_new_cond (LTGT_EXPR, op0, op1,
1512 &edge_info->cond_equivalences[10]);
1513 break;
1514
1515 case GE_EXPR:
1516 case LE_EXPR:
1517 edge_info->max_cond_equivalences = 6;
1518 edge_info->cond_equivalences = xmalloc (6 * sizeof (tree));
1519 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1520 &edge_info->cond_equivalences[4]);
1521 break;
1522
1523 case EQ_EXPR:
1524 edge_info->max_cond_equivalences = 10;
1525 edge_info->cond_equivalences = xmalloc (10 * sizeof (tree));
1526 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1527 &edge_info->cond_equivalences[4]);
1528 build_and_record_new_cond (LE_EXPR, op0, op1,
1529 &edge_info->cond_equivalences[6]);
1530 build_and_record_new_cond (GE_EXPR, op0, op1,
1531 &edge_info->cond_equivalences[8]);
1532 break;
1533
1534 case UNORDERED_EXPR:
1535 edge_info->max_cond_equivalences = 16;
1536 edge_info->cond_equivalences = xmalloc (16 * sizeof (tree));
1537 build_and_record_new_cond (NE_EXPR, op0, op1,
1538 &edge_info->cond_equivalences[4]);
1539 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1540 &edge_info->cond_equivalences[6]);
1541 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1542 &edge_info->cond_equivalences[8]);
1543 build_and_record_new_cond (UNEQ_EXPR, op0, op1,
1544 &edge_info->cond_equivalences[10]);
1545 build_and_record_new_cond (UNLT_EXPR, op0, op1,
1546 &edge_info->cond_equivalences[12]);
1547 build_and_record_new_cond (UNGT_EXPR, op0, op1,
1548 &edge_info->cond_equivalences[14]);
1549 break;
1550
1551 case UNLT_EXPR:
1552 case UNGT_EXPR:
1553 edge_info->max_cond_equivalences = 8;
1554 edge_info->cond_equivalences = xmalloc (8 * sizeof (tree));
1555 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1556 ? UNLE_EXPR : UNGE_EXPR),
1557 op0, op1, &edge_info->cond_equivalences[4]);
1558 build_and_record_new_cond (NE_EXPR, op0, op1,
1559 &edge_info->cond_equivalences[6]);
1560 break;
1561
1562 case UNEQ_EXPR:
1563 edge_info->max_cond_equivalences = 8;
1564 edge_info->cond_equivalences = xmalloc (8 * sizeof (tree));
1565 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1566 &edge_info->cond_equivalences[4]);
1567 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1568 &edge_info->cond_equivalences[6]);
1569 break;
1570
1571 case LTGT_EXPR:
1572 edge_info->max_cond_equivalences = 8;
1573 edge_info->cond_equivalences = xmalloc (8 * sizeof (tree));
1574 build_and_record_new_cond (NE_EXPR, op0, op1,
1575 &edge_info->cond_equivalences[4]);
1576 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1577 &edge_info->cond_equivalences[6]);
1578 break;
1579
1580 default:
1581 edge_info->max_cond_equivalences = 4;
1582 edge_info->cond_equivalences = xmalloc (4 * sizeof (tree));
1583 break;
1584 }
1585
1586 /* Now store the original true and false conditions into the first
1587 two slots. */
1588 edge_info->cond_equivalences[0] = cond;
1589 edge_info->cond_equivalences[1] = boolean_true_node;
1590 edge_info->cond_equivalences[2] = inverted;
1591 edge_info->cond_equivalences[3] = boolean_false_node;
1592 }
1593
1594 /* A helper function for record_const_or_copy and record_equality.
1595 Do the work of recording the value and undo info. */
1596
1597 static void
1598 record_const_or_copy_1 (tree x, tree y, tree prev_x)
1599 {
1600 SSA_NAME_VALUE (x) = y;
1601
1602 VEC_reserve (tree, heap, const_and_copies_stack, 2);
1603 VEC_quick_push (tree, const_and_copies_stack, prev_x);
1604 VEC_quick_push (tree, const_and_copies_stack, x);
1605 }
1606
1607
1608 /* Return the loop depth of the basic block of the defining statement of X.
1609 This number should not be treated as absolutely correct because the loop
1610 information may not be completely up-to-date when dom runs. However, it
1611 will be relatively correct, and as more passes are taught to keep loop info
1612 up to date, the result will become more and more accurate. */
1613
1614 int
1615 loop_depth_of_name (tree x)
1616 {
1617 tree defstmt;
1618 basic_block defbb;
1619
1620 /* If it's not an SSA_NAME, we have no clue where the definition is. */
1621 if (TREE_CODE (x) != SSA_NAME)
1622 return 0;
1623
1624 /* Otherwise return the loop depth of the defining statement's bb.
1625 Note that there may not actually be a bb for this statement, if the
1626 ssa_name is live on entry. */
1627 defstmt = SSA_NAME_DEF_STMT (x);
1628 defbb = bb_for_stmt (defstmt);
1629 if (!defbb)
1630 return 0;
1631
1632 return defbb->loop_depth;
1633 }
1634
1635
1636 /* Record that X is equal to Y in const_and_copies. Record undo
1637 information in the block-local vector. */
1638
1639 static void
1640 record_const_or_copy (tree x, tree y)
1641 {
1642 tree prev_x = SSA_NAME_VALUE (x);
1643
1644 if (TREE_CODE (y) == SSA_NAME)
1645 {
1646 tree tmp = SSA_NAME_VALUE (y);
1647 if (tmp)
1648 y = tmp;
1649 }
1650
1651 record_const_or_copy_1 (x, y, prev_x);
1652 }
1653
1654 /* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1655 This constrains the cases in which we may treat this as assignment. */
1656
1657 static void
1658 record_equality (tree x, tree y)
1659 {
1660 tree prev_x = NULL, prev_y = NULL;
1661
1662 if (TREE_CODE (x) == SSA_NAME)
1663 prev_x = SSA_NAME_VALUE (x);
1664 if (TREE_CODE (y) == SSA_NAME)
1665 prev_y = SSA_NAME_VALUE (y);
1666
1667 /* If one of the previous values is invariant, or invariant in more loops
1668 (by depth), then use that.
1669 Otherwise it doesn't matter which value we choose, just so
1670 long as we canonicalize on one value. */
1671 if (TREE_INVARIANT (y))
1672 ;
1673 else if (TREE_INVARIANT (x) || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
1674 prev_x = x, x = y, y = prev_x, prev_x = prev_y;
1675 else if (prev_x && TREE_INVARIANT (prev_x))
1676 x = y, y = prev_x, prev_x = prev_y;
1677 else if (prev_y && TREE_CODE (prev_y) != VALUE_HANDLE)
1678 y = prev_y;
1679
1680 /* After the swapping, we must have one SSA_NAME. */
1681 if (TREE_CODE (x) != SSA_NAME)
1682 return;
1683
1684 /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1685 variable compared against zero. If we're honoring signed zeros,
1686 then we cannot record this value unless we know that the value is
1687 nonzero. */
1688 if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
1689 && (TREE_CODE (y) != REAL_CST
1690 || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1691 return;
1692
1693 record_const_or_copy_1 (x, y, prev_x);
1694 }
1695
1696 /* Return true, if it is ok to do folding of an associative expression.
1697 EXP is the tree for the associative expression. */
1698
1699 static inline bool
1700 unsafe_associative_fp_binop (tree exp)
1701 {
1702 enum tree_code code = TREE_CODE (exp);
1703 return !(!flag_unsafe_math_optimizations
1704 && (code == MULT_EXPR || code == PLUS_EXPR
1705 || code == MINUS_EXPR)
1706 && FLOAT_TYPE_P (TREE_TYPE (exp)));
1707 }
1708
1709 /* Returns true when STMT is a simple iv increment. It detects the
1710 following situation:
1711
1712 i_1 = phi (..., i_2)
1713 i_2 = i_1 +/- ... */
1714
1715 static bool
1716 simple_iv_increment_p (tree stmt)
1717 {
1718 tree lhs, rhs, preinc, phi;
1719 unsigned i;
1720
1721 if (TREE_CODE (stmt) != MODIFY_EXPR)
1722 return false;
1723
1724 lhs = TREE_OPERAND (stmt, 0);
1725 if (TREE_CODE (lhs) != SSA_NAME)
1726 return false;
1727
1728 rhs = TREE_OPERAND (stmt, 1);
1729
1730 if (TREE_CODE (rhs) != PLUS_EXPR
1731 && TREE_CODE (rhs) != MINUS_EXPR)
1732 return false;
1733
1734 preinc = TREE_OPERAND (rhs, 0);
1735 if (TREE_CODE (preinc) != SSA_NAME)
1736 return false;
1737
1738 phi = SSA_NAME_DEF_STMT (preinc);
1739 if (TREE_CODE (phi) != PHI_NODE)
1740 return false;
1741
1742 for (i = 0; i < (unsigned) PHI_NUM_ARGS (phi); i++)
1743 if (PHI_ARG_DEF (phi, i) == lhs)
1744 return true;
1745
1746 return false;
1747 }
1748
1749 /* STMT is a MODIFY_EXPR for which we were unable to find RHS in the
1750 hash tables. Try to simplify the RHS using whatever equivalences
1751 we may have recorded.
1752
1753 If we are able to simplify the RHS, then lookup the simplified form in
1754 the hash table and return the result. Otherwise return NULL. */
1755
1756 static tree
1757 simplify_rhs_and_lookup_avail_expr (tree stmt, int insert)
1758 {
1759 tree rhs = TREE_OPERAND (stmt, 1);
1760 enum tree_code rhs_code = TREE_CODE (rhs);
1761 tree result = NULL;
1762
1763 /* If we have lhs = ~x, look and see if we earlier had x = ~y.
1764 In which case we can change this statement to be lhs = y.
1765 Which can then be copy propagated.
1766
1767 Similarly for negation. */
1768 if ((rhs_code == BIT_NOT_EXPR || rhs_code == NEGATE_EXPR)
1769 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1770 {
1771 /* Get the definition statement for our RHS. */
1772 tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
1773
1774 /* See if the RHS_DEF_STMT has the same form as our statement. */
1775 if (TREE_CODE (rhs_def_stmt) == MODIFY_EXPR
1776 && TREE_CODE (TREE_OPERAND (rhs_def_stmt, 1)) == rhs_code)
1777 {
1778 tree rhs_def_operand;
1779
1780 rhs_def_operand = TREE_OPERAND (TREE_OPERAND (rhs_def_stmt, 1), 0);
1781
1782 /* Verify that RHS_DEF_OPERAND is a suitable SSA variable. */
1783 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1784 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1785 result = update_rhs_and_lookup_avail_expr (stmt,
1786 rhs_def_operand,
1787 insert);
1788 }
1789 }
1790
1791 /* If we have z = (x OP C1), see if we earlier had x = y OP C2.
1792 If OP is associative, create and fold (y OP C2) OP C1 which
1793 should result in (y OP C3), use that as the RHS for the
1794 assignment. Add minus to this, as we handle it specially below. */
1795 if ((associative_tree_code (rhs_code) || rhs_code == MINUS_EXPR)
1796 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
1797 && num_imm_uses (TREE_OPERAND (rhs, 0)) == 1
1798 && is_gimple_min_invariant (TREE_OPERAND (rhs, 1)))
1799 {
1800 tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
1801
1802 /* If the statement defines an induction variable, do not propagate
1803 its value, so that we do not create overlapping life ranges. */
1804 if (simple_iv_increment_p (rhs_def_stmt))
1805 goto dont_fold_assoc;
1806
1807 /* See if the RHS_DEF_STMT has the same form as our statement. */
1808 if (TREE_CODE (rhs_def_stmt) == MODIFY_EXPR)
1809 {
1810 tree rhs_def_rhs = TREE_OPERAND (rhs_def_stmt, 1);
1811 enum tree_code rhs_def_code = TREE_CODE (rhs_def_rhs);
1812
1813 if ((rhs_code == rhs_def_code && unsafe_associative_fp_binop (rhs))
1814 || (rhs_code == PLUS_EXPR && rhs_def_code == MINUS_EXPR)
1815 || (rhs_code == MINUS_EXPR && rhs_def_code == PLUS_EXPR))
1816 {
1817 tree def_stmt_op0 = TREE_OPERAND (rhs_def_rhs, 0);
1818 tree def_stmt_op1 = TREE_OPERAND (rhs_def_rhs, 1);
1819
1820 if (TREE_CODE (def_stmt_op0) == SSA_NAME
1821 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def_stmt_op0)
1822 && is_gimple_min_invariant (def_stmt_op1))
1823 {
1824 tree outer_const = TREE_OPERAND (rhs, 1);
1825 tree type = TREE_TYPE (TREE_OPERAND (stmt, 0));
1826 tree t;
1827
1828 /* If we care about correct floating point results, then
1829 don't fold x + c1 - c2. Note that we need to take both
1830 the codes and the signs to figure this out. */
1831 if (FLOAT_TYPE_P (type)
1832 && !flag_unsafe_math_optimizations
1833 && (rhs_def_code == PLUS_EXPR
1834 || rhs_def_code == MINUS_EXPR))
1835 {
1836 bool neg = false;
1837
1838 neg ^= (rhs_code == MINUS_EXPR);
1839 neg ^= (rhs_def_code == MINUS_EXPR);
1840 neg ^= real_isneg (TREE_REAL_CST_PTR (outer_const));
1841 neg ^= real_isneg (TREE_REAL_CST_PTR (def_stmt_op1));
1842
1843 if (neg)
1844 goto dont_fold_assoc;
1845 }
1846
1847 /* Ho hum. So fold will only operate on the outermost
1848 thingy that we give it, so we have to build the new
1849 expression in two pieces. This requires that we handle
1850 combinations of plus and minus. */
1851 if (rhs_def_code != rhs_code)
1852 {
1853 if (rhs_def_code == MINUS_EXPR)
1854 t = build (MINUS_EXPR, type, outer_const, def_stmt_op1);
1855 else
1856 t = build (MINUS_EXPR, type, def_stmt_op1, outer_const);
1857 rhs_code = PLUS_EXPR;
1858 }
1859 else if (rhs_def_code == MINUS_EXPR)
1860 t = build (PLUS_EXPR, type, def_stmt_op1, outer_const);
1861 else
1862 t = build (rhs_def_code, type, def_stmt_op1, outer_const);
1863 t = local_fold (t);
1864 t = build (rhs_code, type, def_stmt_op0, t);
1865 t = local_fold (t);
1866
1867 /* If the result is a suitable looking gimple expression,
1868 then use it instead of the original for STMT. */
1869 if (TREE_CODE (t) == SSA_NAME
1870 || (UNARY_CLASS_P (t)
1871 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME)
1872 || ((BINARY_CLASS_P (t) || COMPARISON_CLASS_P (t))
1873 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
1874 && is_gimple_val (TREE_OPERAND (t, 1))))
1875 result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
1876 }
1877 }
1878 }
1879 dont_fold_assoc:;
1880 }
1881
1882 /* Optimize *"foo" into 'f'. This is done here rather than
1883 in fold to avoid problems with stuff like &*"foo". */
1884 if (TREE_CODE (rhs) == INDIRECT_REF || TREE_CODE (rhs) == ARRAY_REF)
1885 {
1886 tree t = fold_read_from_constant_string (rhs);
1887
1888 if (t)
1889 result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
1890 }
1891
1892 return result;
1893 }
1894
1895 /* COND is a condition of the form:
1896
1897 x == const or x != const
1898
1899 Look back to x's defining statement and see if x is defined as
1900
1901 x = (type) y;
1902
1903 If const is unchanged if we convert it to type, then we can build
1904 the equivalent expression:
1905
1906
1907 y == const or y != const
1908
1909 Which may allow further optimizations.
1910
1911 Return the equivalent comparison or NULL if no such equivalent comparison
1912 was found. */
1913
1914 static tree
1915 find_equivalent_equality_comparison (tree cond)
1916 {
1917 tree op0 = TREE_OPERAND (cond, 0);
1918 tree op1 = TREE_OPERAND (cond, 1);
1919 tree def_stmt = SSA_NAME_DEF_STMT (op0);
1920
1921 /* OP0 might have been a parameter, so first make sure it
1922 was defined by a MODIFY_EXPR. */
1923 if (def_stmt && TREE_CODE (def_stmt) == MODIFY_EXPR)
1924 {
1925 tree def_rhs = TREE_OPERAND (def_stmt, 1);
1926
1927
1928 /* If either operand to the comparison is a pointer to
1929 a function, then we can not apply this optimization
1930 as some targets require function pointers to be
1931 canonicalized and in this case this optimization would
1932 eliminate a necessary canonicalization. */
1933 if ((POINTER_TYPE_P (TREE_TYPE (op0))
1934 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) == FUNCTION_TYPE)
1935 || (POINTER_TYPE_P (TREE_TYPE (op1))
1936 && TREE_CODE (TREE_TYPE (TREE_TYPE (op1))) == FUNCTION_TYPE))
1937 return NULL;
1938
1939 /* Now make sure the RHS of the MODIFY_EXPR is a typecast. */
1940 if ((TREE_CODE (def_rhs) == NOP_EXPR
1941 || TREE_CODE (def_rhs) == CONVERT_EXPR)
1942 && TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME)
1943 {
1944 tree def_rhs_inner = TREE_OPERAND (def_rhs, 0);
1945 tree def_rhs_inner_type = TREE_TYPE (def_rhs_inner);
1946 tree new;
1947
1948 if (TYPE_PRECISION (def_rhs_inner_type)
1949 > TYPE_PRECISION (TREE_TYPE (def_rhs)))
1950 return NULL;
1951
1952 /* If the inner type of the conversion is a pointer to
1953 a function, then we can not apply this optimization
1954 as some targets require function pointers to be
1955 canonicalized. This optimization would result in
1956 canonicalization of the pointer when it was not originally
1957 needed/intended. */
1958 if (POINTER_TYPE_P (def_rhs_inner_type)
1959 && TREE_CODE (TREE_TYPE (def_rhs_inner_type)) == FUNCTION_TYPE)
1960 return NULL;
1961
1962 /* What we want to prove is that if we convert OP1 to
1963 the type of the object inside the NOP_EXPR that the
1964 result is still equivalent to SRC.
1965
1966 If that is true, the build and return new equivalent
1967 condition which uses the source of the typecast and the
1968 new constant (which has only changed its type). */
1969 new = build1 (TREE_CODE (def_rhs), def_rhs_inner_type, op1);
1970 new = local_fold (new);
1971 if (is_gimple_val (new) && tree_int_cst_equal (new, op1))
1972 return build (TREE_CODE (cond), TREE_TYPE (cond),
1973 def_rhs_inner, new);
1974 }
1975 }
1976 return NULL;
1977 }
1978
1979 /* STMT is a COND_EXPR for which we could not trivially determine its
1980 result. This routine attempts to find equivalent forms of the
1981 condition which we may be able to optimize better. It also
1982 uses simple value range propagation to optimize conditionals. */
1983
1984 static tree
1985 simplify_cond_and_lookup_avail_expr (tree stmt,
1986 stmt_ann_t ann,
1987 int insert)
1988 {
1989 tree cond = COND_EXPR_COND (stmt);
1990
1991 if (COMPARISON_CLASS_P (cond))
1992 {
1993 tree op0 = TREE_OPERAND (cond, 0);
1994 tree op1 = TREE_OPERAND (cond, 1);
1995
1996 if (TREE_CODE (op0) == SSA_NAME && is_gimple_min_invariant (op1))
1997 {
1998 int limit;
1999 tree low, high, cond_low, cond_high;
2000 int lowequal, highequal, swapped, no_overlap, subset, cond_inverted;
2001 VEC(vrp_element_p,heap) **vrp_records;
2002 struct vrp_element *element;
2003 struct vrp_hash_elt vrp_hash_elt, *vrp_hash_elt_p;
2004 void **slot;
2005
2006 /* First see if we have test of an SSA_NAME against a constant
2007 where the SSA_NAME is defined by an earlier typecast which
2008 is irrelevant when performing tests against the given
2009 constant. */
2010 if (TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
2011 {
2012 tree new_cond = find_equivalent_equality_comparison (cond);
2013
2014 if (new_cond)
2015 {
2016 /* Update the statement to use the new equivalent
2017 condition. */
2018 COND_EXPR_COND (stmt) = new_cond;
2019
2020 /* If this is not a real stmt, ann will be NULL and we
2021 avoid processing the operands. */
2022 if (ann)
2023 mark_stmt_modified (stmt);
2024
2025 /* Lookup the condition and return its known value if it
2026 exists. */
2027 new_cond = lookup_avail_expr (stmt, insert);
2028 if (new_cond)
2029 return new_cond;
2030
2031 /* The operands have changed, so update op0 and op1. */
2032 op0 = TREE_OPERAND (cond, 0);
2033 op1 = TREE_OPERAND (cond, 1);
2034 }
2035 }
2036
2037 /* Consult the value range records for this variable (if they exist)
2038 to see if we can eliminate or simplify this conditional.
2039
2040 Note two tests are necessary to determine no records exist.
2041 First we have to see if the virtual array exists, if it
2042 exists, then we have to check its active size.
2043
2044 Also note the vast majority of conditionals are not testing
2045 a variable which has had its range constrained by an earlier
2046 conditional. So this filter avoids a lot of unnecessary work. */
2047 vrp_hash_elt.var = op0;
2048 vrp_hash_elt.records = NULL;
2049 slot = htab_find_slot (vrp_data, &vrp_hash_elt, NO_INSERT);
2050 if (slot == NULL)
2051 return NULL;
2052
2053 vrp_hash_elt_p = (struct vrp_hash_elt *) *slot;
2054 vrp_records = &vrp_hash_elt_p->records;
2055
2056 limit = VEC_length (vrp_element_p, *vrp_records);
2057
2058 /* If we have no value range records for this variable, or we are
2059 unable to extract a range for this condition, then there is
2060 nothing to do. */
2061 if (limit == 0
2062 || ! extract_range_from_cond (cond, &cond_high,
2063 &cond_low, &cond_inverted))
2064 return NULL;
2065
2066 /* We really want to avoid unnecessary computations of range
2067 info. So all ranges are computed lazily; this avoids a
2068 lot of unnecessary work. i.e., we record the conditional,
2069 but do not process how it constrains the variable's
2070 potential values until we know that processing the condition
2071 could be helpful.
2072
2073 However, we do not want to have to walk a potentially long
2074 list of ranges, nor do we want to compute a variable's
2075 range more than once for a given path.
2076
2077 Luckily, each time we encounter a conditional that can not
2078 be otherwise optimized we will end up here and we will
2079 compute the necessary range information for the variable
2080 used in this condition.
2081
2082 Thus you can conclude that there will never be more than one
2083 conditional associated with a variable which has not been
2084 processed. So we never need to merge more than one new
2085 conditional into the current range.
2086
2087 These properties also help us avoid unnecessary work. */
2088 element = VEC_last (vrp_element_p, *vrp_records);
2089
2090 if (element->high && element->low)
2091 {
2092 /* The last element has been processed, so there is no range
2093 merging to do, we can simply use the high/low values
2094 recorded in the last element. */
2095 low = element->low;
2096 high = element->high;
2097 }
2098 else
2099 {
2100 tree tmp_high, tmp_low;
2101 int dummy;
2102
2103 /* The last element has not been processed. Process it now.
2104 record_range should ensure for cond inverted is not set.
2105 This call can only fail if cond is x < min or x > max,
2106 which fold should have optimized into false.
2107 If that doesn't happen, just pretend all values are
2108 in the range. */
2109 if (! extract_range_from_cond (element->cond, &tmp_high,
2110 &tmp_low, &dummy))
2111 gcc_unreachable ();
2112 else
2113 gcc_assert (dummy == 0);
2114
2115 /* If this is the only element, then no merging is necessary,
2116 the high/low values from extract_range_from_cond are all
2117 we need. */
2118 if (limit == 1)
2119 {
2120 low = tmp_low;
2121 high = tmp_high;
2122 }
2123 else
2124 {
2125 /* Get the high/low value from the previous element. */
2126 struct vrp_element *prev
2127 = VEC_index (vrp_element_p, *vrp_records, limit - 2);
2128 low = prev->low;
2129 high = prev->high;
2130
2131 /* Merge in this element's range with the range from the
2132 previous element.
2133
2134 The low value for the merged range is the maximum of
2135 the previous low value and the low value of this record.
2136
2137 Similarly the high value for the merged range is the
2138 minimum of the previous high value and the high value of
2139 this record. */
2140 low = (low && tree_int_cst_compare (low, tmp_low) == 1
2141 ? low : tmp_low);
2142 high = (high && tree_int_cst_compare (high, tmp_high) == -1
2143 ? high : tmp_high);
2144 }
2145
2146 /* And record the computed range. */
2147 element->low = low;
2148 element->high = high;
2149
2150 }
2151
2152 /* After we have constrained this variable's potential values,
2153 we try to determine the result of the given conditional.
2154
2155 To simplify later tests, first determine if the current
2156 low value is the same low value as the conditional.
2157 Similarly for the current high value and the high value
2158 for the conditional. */
2159 lowequal = tree_int_cst_equal (low, cond_low);
2160 highequal = tree_int_cst_equal (high, cond_high);
2161
2162 if (lowequal && highequal)
2163 return (cond_inverted ? boolean_false_node : boolean_true_node);
2164
2165 /* To simplify the overlap/subset tests below we may want
2166 to swap the two ranges so that the larger of the two
2167 ranges occurs "first". */
2168 swapped = 0;
2169 if (tree_int_cst_compare (low, cond_low) == 1
2170 || (lowequal
2171 && tree_int_cst_compare (cond_high, high) == 1))
2172 {
2173 tree temp;
2174
2175 swapped = 1;
2176 temp = low;
2177 low = cond_low;
2178 cond_low = temp;
2179 temp = high;
2180 high = cond_high;
2181 cond_high = temp;
2182 }
2183
2184 /* Now determine if there is no overlap in the ranges
2185 or if the second range is a subset of the first range. */
2186 no_overlap = tree_int_cst_lt (high, cond_low);
2187 subset = tree_int_cst_compare (cond_high, high) != 1;
2188
2189 /* If there was no overlap in the ranges, then this conditional
2190 always has a false value (unless we had to invert this
2191 conditional, in which case it always has a true value). */
2192 if (no_overlap)
2193 return (cond_inverted ? boolean_true_node : boolean_false_node);
2194
2195 /* If the current range is a subset of the condition's range,
2196 then this conditional always has a true value (unless we
2197 had to invert this conditional, in which case it always
2198 has a true value). */
2199 if (subset && swapped)
2200 return (cond_inverted ? boolean_false_node : boolean_true_node);
2201
2202 /* We were unable to determine the result of the conditional.
2203 However, we may be able to simplify the conditional. First
2204 merge the ranges in the same manner as range merging above. */
2205 low = tree_int_cst_compare (low, cond_low) == 1 ? low : cond_low;
2206 high = tree_int_cst_compare (high, cond_high) == -1 ? high : cond_high;
2207
2208 /* If the range has converged to a single point, then turn this
2209 into an equality comparison. */
2210 if (TREE_CODE (cond) != EQ_EXPR
2211 && TREE_CODE (cond) != NE_EXPR
2212 && tree_int_cst_equal (low, high))
2213 {
2214 TREE_SET_CODE (cond, EQ_EXPR);
2215 TREE_OPERAND (cond, 1) = high;
2216 }
2217 }
2218 }
2219 return 0;
2220 }
2221
2222 /* STMT is a SWITCH_EXPR for which we could not trivially determine its
2223 result. This routine attempts to find equivalent forms of the
2224 condition which we may be able to optimize better. */
2225
2226 static tree
2227 simplify_switch_and_lookup_avail_expr (tree stmt, int insert)
2228 {
2229 tree cond = SWITCH_COND (stmt);
2230 tree def, to, ti;
2231
2232 /* The optimization that we really care about is removing unnecessary
2233 casts. That will let us do much better in propagating the inferred
2234 constant at the switch target. */
2235 if (TREE_CODE (cond) == SSA_NAME)
2236 {
2237 def = SSA_NAME_DEF_STMT (cond);
2238 if (TREE_CODE (def) == MODIFY_EXPR)
2239 {
2240 def = TREE_OPERAND (def, 1);
2241 if (TREE_CODE (def) == NOP_EXPR)
2242 {
2243 int need_precision;
2244 bool fail;
2245
2246 def = TREE_OPERAND (def, 0);
2247
2248 #ifdef ENABLE_CHECKING
2249 /* ??? Why was Jeff testing this? We are gimple... */
2250 gcc_assert (is_gimple_val (def));
2251 #endif
2252
2253 to = TREE_TYPE (cond);
2254 ti = TREE_TYPE (def);
2255
2256 /* If we have an extension that preserves value, then we
2257 can copy the source value into the switch. */
2258
2259 need_precision = TYPE_PRECISION (ti);
2260 fail = false;
2261 if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
2262 fail = true;
2263 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
2264 need_precision += 1;
2265 if (TYPE_PRECISION (to) < need_precision)
2266 fail = true;
2267
2268 if (!fail)
2269 {
2270 SWITCH_COND (stmt) = def;
2271 mark_stmt_modified (stmt);
2272
2273 return lookup_avail_expr (stmt, insert);
2274 }
2275 }
2276 }
2277 }
2278
2279 return 0;
2280 }
2281
2282
2283 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2284 known value for that SSA_NAME (or NULL if no value is known).
2285
2286 NONZERO_VARS is the set SSA_NAMES known to have a nonzero value,
2287 even if we don't know their precise value.
2288
2289 Propagate values from CONST_AND_COPIES and NONZERO_VARS into the PHI
2290 nodes of the successors of BB. */
2291
2292 static void
2293 cprop_into_successor_phis (basic_block bb, bitmap nonzero_vars)
2294 {
2295 edge e;
2296 edge_iterator ei;
2297
2298 FOR_EACH_EDGE (e, ei, bb->succs)
2299 {
2300 tree phi;
2301 int indx;
2302
2303 /* If this is an abnormal edge, then we do not want to copy propagate
2304 into the PHI alternative associated with this edge. */
2305 if (e->flags & EDGE_ABNORMAL)
2306 continue;
2307
2308 phi = phi_nodes (e->dest);
2309 if (! phi)
2310 continue;
2311
2312 indx = e->dest_idx;
2313 for ( ; phi; phi = PHI_CHAIN (phi))
2314 {
2315 tree new;
2316 use_operand_p orig_p;
2317 tree orig;
2318
2319 /* The alternative may be associated with a constant, so verify
2320 it is an SSA_NAME before doing anything with it. */
2321 orig_p = PHI_ARG_DEF_PTR (phi, indx);
2322 orig = USE_FROM_PTR (orig_p);
2323 if (TREE_CODE (orig) != SSA_NAME)
2324 continue;
2325
2326 /* If the alternative is known to have a nonzero value, record
2327 that fact in the PHI node itself for future use. */
2328 if (bitmap_bit_p (nonzero_vars, SSA_NAME_VERSION (orig)))
2329 PHI_ARG_NONZERO (phi, indx) = true;
2330
2331 /* If we have *ORIG_P in our constant/copy table, then replace
2332 ORIG_P with its value in our constant/copy table. */
2333 new = SSA_NAME_VALUE (orig);
2334 if (new
2335 && new != orig
2336 && (TREE_CODE (new) == SSA_NAME
2337 || is_gimple_min_invariant (new))
2338 && may_propagate_copy (orig, new))
2339 propagate_value (orig_p, new);
2340 }
2341 }
2342 }
2343
2344 /* We have finished optimizing BB, record any information implied by
2345 taking a specific outgoing edge from BB. */
2346
2347 static void
2348 record_edge_info (basic_block bb)
2349 {
2350 block_stmt_iterator bsi = bsi_last (bb);
2351 struct edge_info *edge_info;
2352
2353 if (! bsi_end_p (bsi))
2354 {
2355 tree stmt = bsi_stmt (bsi);
2356
2357 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
2358 {
2359 tree cond = SWITCH_COND (stmt);
2360
2361 if (TREE_CODE (cond) == SSA_NAME)
2362 {
2363 tree labels = SWITCH_LABELS (stmt);
2364 int i, n_labels = TREE_VEC_LENGTH (labels);
2365 tree *info = xcalloc (last_basic_block, sizeof (tree));
2366 edge e;
2367 edge_iterator ei;
2368
2369 for (i = 0; i < n_labels; i++)
2370 {
2371 tree label = TREE_VEC_ELT (labels, i);
2372 basic_block target_bb = label_to_block (CASE_LABEL (label));
2373
2374 if (CASE_HIGH (label)
2375 || !CASE_LOW (label)
2376 || info[target_bb->index])
2377 info[target_bb->index] = error_mark_node;
2378 else
2379 info[target_bb->index] = label;
2380 }
2381
2382 FOR_EACH_EDGE (e, ei, bb->succs)
2383 {
2384 basic_block target_bb = e->dest;
2385 tree node = info[target_bb->index];
2386
2387 if (node != NULL && node != error_mark_node)
2388 {
2389 tree x = fold_convert (TREE_TYPE (cond), CASE_LOW (node));
2390 edge_info = allocate_edge_info (e);
2391 edge_info->lhs = cond;
2392 edge_info->rhs = x;
2393 }
2394 }
2395 free (info);
2396 }
2397 }
2398
2399 /* A COND_EXPR may create equivalences too. */
2400 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2401 {
2402 tree cond = COND_EXPR_COND (stmt);
2403 edge true_edge;
2404 edge false_edge;
2405
2406 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2407
2408 /* If the conditional is a single variable 'X', record 'X = 1'
2409 for the true edge and 'X = 0' on the false edge. */
2410 if (SSA_VAR_P (cond))
2411 {
2412 struct edge_info *edge_info;
2413
2414 edge_info = allocate_edge_info (true_edge);
2415 edge_info->lhs = cond;
2416 edge_info->rhs = constant_boolean_node (1, TREE_TYPE (cond));
2417
2418 edge_info = allocate_edge_info (false_edge);
2419 edge_info->lhs = cond;
2420 edge_info->rhs = constant_boolean_node (0, TREE_TYPE (cond));
2421 }
2422 /* Equality tests may create one or two equivalences. */
2423 else if (COMPARISON_CLASS_P (cond))
2424 {
2425 tree op0 = TREE_OPERAND (cond, 0);
2426 tree op1 = TREE_OPERAND (cond, 1);
2427
2428 /* Special case comparing booleans against a constant as we
2429 know the value of OP0 on both arms of the branch. i.e., we
2430 can record an equivalence for OP0 rather than COND. */
2431 if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
2432 && TREE_CODE (op0) == SSA_NAME
2433 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2434 && is_gimple_min_invariant (op1))
2435 {
2436 if (TREE_CODE (cond) == EQ_EXPR)
2437 {
2438 edge_info = allocate_edge_info (true_edge);
2439 edge_info->lhs = op0;
2440 edge_info->rhs = (integer_zerop (op1)
2441 ? boolean_false_node
2442 : boolean_true_node);
2443
2444 edge_info = allocate_edge_info (false_edge);
2445 edge_info->lhs = op0;
2446 edge_info->rhs = (integer_zerop (op1)
2447 ? boolean_true_node
2448 : boolean_false_node);
2449 }
2450 else
2451 {
2452 edge_info = allocate_edge_info (true_edge);
2453 edge_info->lhs = op0;
2454 edge_info->rhs = (integer_zerop (op1)
2455 ? boolean_true_node
2456 : boolean_false_node);
2457
2458 edge_info = allocate_edge_info (false_edge);
2459 edge_info->lhs = op0;
2460 edge_info->rhs = (integer_zerop (op1)
2461 ? boolean_false_node
2462 : boolean_true_node);
2463 }
2464 }
2465
2466 else if (is_gimple_min_invariant (op0)
2467 && (TREE_CODE (op1) == SSA_NAME
2468 || is_gimple_min_invariant (op1)))
2469 {
2470 tree inverted = invert_truthvalue (cond);
2471 struct edge_info *edge_info;
2472
2473 edge_info = allocate_edge_info (true_edge);
2474 record_conditions (edge_info, cond, inverted);
2475
2476 if (TREE_CODE (cond) == EQ_EXPR)
2477 {
2478 edge_info->lhs = op1;
2479 edge_info->rhs = op0;
2480 }
2481
2482 edge_info = allocate_edge_info (false_edge);
2483 record_conditions (edge_info, inverted, cond);
2484
2485 if (TREE_CODE (cond) == NE_EXPR)
2486 {
2487 edge_info->lhs = op1;
2488 edge_info->rhs = op0;
2489 }
2490 }
2491
2492 else if (TREE_CODE (op0) == SSA_NAME
2493 && (is_gimple_min_invariant (op1)
2494 || TREE_CODE (op1) == SSA_NAME))
2495 {
2496 tree inverted = invert_truthvalue (cond);
2497 struct edge_info *edge_info;
2498
2499 edge_info = allocate_edge_info (true_edge);
2500 record_conditions (edge_info, cond, inverted);
2501
2502 if (TREE_CODE (cond) == EQ_EXPR)
2503 {
2504 edge_info->lhs = op0;
2505 edge_info->rhs = op1;
2506 }
2507
2508 edge_info = allocate_edge_info (false_edge);
2509 record_conditions (edge_info, inverted, cond);
2510
2511 if (TREE_CODE (cond) == NE_EXPR)
2512 {
2513 edge_info->lhs = op0;
2514 edge_info->rhs = op1;
2515 }
2516 }
2517 }
2518
2519 /* ??? TRUTH_NOT_EXPR can create an equivalence too. */
2520 }
2521 }
2522 }
2523
2524 /* Propagate information from BB to its outgoing edges.
2525
2526 This can include equivalency information implied by control statements
2527 at the end of BB and const/copy propagation into PHIs in BB's
2528 successor blocks. */
2529
2530 static void
2531 propagate_to_outgoing_edges (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
2532 basic_block bb)
2533 {
2534 record_edge_info (bb);
2535 cprop_into_successor_phis (bb, nonzero_vars);
2536 }
2537
2538 /* Search for redundant computations in STMT. If any are found, then
2539 replace them with the variable holding the result of the computation.
2540
2541 If safe, record this expression into the available expression hash
2542 table. */
2543
2544 static bool
2545 eliminate_redundant_computations (tree stmt, stmt_ann_t ann)
2546 {
2547 tree *expr_p, def = NULL_TREE;
2548 bool insert = true;
2549 tree cached_lhs;
2550 bool retval = false;
2551 bool modify_expr_p = false;
2552
2553 if (TREE_CODE (stmt) == MODIFY_EXPR)
2554 def = TREE_OPERAND (stmt, 0);
2555
2556 /* Certain expressions on the RHS can be optimized away, but can not
2557 themselves be entered into the hash tables. */
2558 if (ann->makes_aliased_stores
2559 || ! def
2560 || TREE_CODE (def) != SSA_NAME
2561 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
2562 || !ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYDEF)
2563 /* Do not record equivalences for increments of ivs. This would create
2564 overlapping live ranges for a very questionable gain. */
2565 || simple_iv_increment_p (stmt))
2566 insert = false;
2567
2568 /* Check if the expression has been computed before. */
2569 cached_lhs = lookup_avail_expr (stmt, insert);
2570
2571 /* If this is an assignment and the RHS was not in the hash table,
2572 then try to simplify the RHS and lookup the new RHS in the
2573 hash table. */
2574 if (! cached_lhs && TREE_CODE (stmt) == MODIFY_EXPR)
2575 cached_lhs = simplify_rhs_and_lookup_avail_expr (stmt, insert);
2576 /* Similarly if this is a COND_EXPR and we did not find its
2577 expression in the hash table, simplify the condition and
2578 try again. */
2579 else if (! cached_lhs && TREE_CODE (stmt) == COND_EXPR)
2580 cached_lhs = simplify_cond_and_lookup_avail_expr (stmt, ann, insert);
2581 /* Similarly for a SWITCH_EXPR. */
2582 else if (!cached_lhs && TREE_CODE (stmt) == SWITCH_EXPR)
2583 cached_lhs = simplify_switch_and_lookup_avail_expr (stmt, insert);
2584
2585 opt_stats.num_exprs_considered++;
2586
2587 /* Get a pointer to the expression we are trying to optimize. */
2588 if (TREE_CODE (stmt) == COND_EXPR)
2589 expr_p = &COND_EXPR_COND (stmt);
2590 else if (TREE_CODE (stmt) == SWITCH_EXPR)
2591 expr_p = &SWITCH_COND (stmt);
2592 else if (TREE_CODE (stmt) == RETURN_EXPR && TREE_OPERAND (stmt, 0))
2593 {
2594 expr_p = &TREE_OPERAND (TREE_OPERAND (stmt, 0), 1);
2595 modify_expr_p = true;
2596 }
2597 else
2598 {
2599 expr_p = &TREE_OPERAND (stmt, 1);
2600 modify_expr_p = true;
2601 }
2602
2603 /* It is safe to ignore types here since we have already done
2604 type checking in the hashing and equality routines. In fact
2605 type checking here merely gets in the way of constant
2606 propagation. Also, make sure that it is safe to propagate
2607 CACHED_LHS into *EXPR_P. */
2608 if (cached_lhs
2609 && ((TREE_CODE (cached_lhs) != SSA_NAME
2610 && (modify_expr_p
2611 || tree_ssa_useless_type_conversion_1 (TREE_TYPE (*expr_p),
2612 TREE_TYPE (cached_lhs))))
2613 || may_propagate_copy (*expr_p, cached_lhs)))
2614 {
2615 if (dump_file && (dump_flags & TDF_DETAILS))
2616 {
2617 fprintf (dump_file, " Replaced redundant expr '");
2618 print_generic_expr (dump_file, *expr_p, dump_flags);
2619 fprintf (dump_file, "' with '");
2620 print_generic_expr (dump_file, cached_lhs, dump_flags);
2621 fprintf (dump_file, "'\n");
2622 }
2623
2624 opt_stats.num_re++;
2625
2626 #if defined ENABLE_CHECKING
2627 gcc_assert (TREE_CODE (cached_lhs) == SSA_NAME
2628 || is_gimple_min_invariant (cached_lhs));
2629 #endif
2630
2631 if (TREE_CODE (cached_lhs) == ADDR_EXPR
2632 || (POINTER_TYPE_P (TREE_TYPE (*expr_p))
2633 && is_gimple_min_invariant (cached_lhs)))
2634 retval = true;
2635
2636 if (modify_expr_p
2637 && !tree_ssa_useless_type_conversion_1 (TREE_TYPE (*expr_p),
2638 TREE_TYPE (cached_lhs)))
2639 cached_lhs = fold_convert (TREE_TYPE (*expr_p), cached_lhs);
2640
2641 propagate_tree_value (expr_p, cached_lhs);
2642 mark_stmt_modified (stmt);
2643 }
2644 return retval;
2645 }
2646
2647 /* STMT, a MODIFY_EXPR, may create certain equivalences, in either
2648 the available expressions table or the const_and_copies table.
2649 Detect and record those equivalences. */
2650
2651 static void
2652 record_equivalences_from_stmt (tree stmt,
2653 int may_optimize_p,
2654 stmt_ann_t ann)
2655 {
2656 tree lhs = TREE_OPERAND (stmt, 0);
2657 enum tree_code lhs_code = TREE_CODE (lhs);
2658 int i;
2659
2660 if (lhs_code == SSA_NAME)
2661 {
2662 tree rhs = TREE_OPERAND (stmt, 1);
2663
2664 /* Strip away any useless type conversions. */
2665 STRIP_USELESS_TYPE_CONVERSION (rhs);
2666
2667 /* If the RHS of the assignment is a constant or another variable that
2668 may be propagated, register it in the CONST_AND_COPIES table. We
2669 do not need to record unwind data for this, since this is a true
2670 assignment and not an equivalence inferred from a comparison. All
2671 uses of this ssa name are dominated by this assignment, so unwinding
2672 just costs time and space. */
2673 if (may_optimize_p
2674 && (TREE_CODE (rhs) == SSA_NAME
2675 || is_gimple_min_invariant (rhs)))
2676 SSA_NAME_VALUE (lhs) = rhs;
2677
2678 if (tree_expr_nonzero_p (rhs))
2679 record_var_is_nonzero (lhs);
2680 }
2681
2682 /* Look at both sides for pointer dereferences. If we find one, then
2683 the pointer must be nonnull and we can enter that equivalence into
2684 the hash tables. */
2685 if (flag_delete_null_pointer_checks)
2686 for (i = 0; i < 2; i++)
2687 {
2688 tree t = TREE_OPERAND (stmt, i);
2689
2690 /* Strip away any COMPONENT_REFs. */
2691 while (TREE_CODE (t) == COMPONENT_REF)
2692 t = TREE_OPERAND (t, 0);
2693
2694 /* Now see if this is a pointer dereference. */
2695 if (INDIRECT_REF_P (t))
2696 {
2697 tree op = TREE_OPERAND (t, 0);
2698
2699 /* If the pointer is a SSA variable, then enter new
2700 equivalences into the hash table. */
2701 while (TREE_CODE (op) == SSA_NAME)
2702 {
2703 tree def = SSA_NAME_DEF_STMT (op);
2704
2705 record_var_is_nonzero (op);
2706
2707 /* And walk up the USE-DEF chains noting other SSA_NAMEs
2708 which are known to have a nonzero value. */
2709 if (def
2710 && TREE_CODE (def) == MODIFY_EXPR
2711 && TREE_CODE (TREE_OPERAND (def, 1)) == NOP_EXPR)
2712 op = TREE_OPERAND (TREE_OPERAND (def, 1), 0);
2713 else
2714 break;
2715 }
2716 }
2717 }
2718
2719 /* A memory store, even an aliased store, creates a useful
2720 equivalence. By exchanging the LHS and RHS, creating suitable
2721 vops and recording the result in the available expression table,
2722 we may be able to expose more redundant loads. */
2723 if (!ann->has_volatile_ops
2724 && (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME
2725 || is_gimple_min_invariant (TREE_OPERAND (stmt, 1)))
2726 && !is_gimple_reg (lhs))
2727 {
2728 tree rhs = TREE_OPERAND (stmt, 1);
2729 tree new;
2730
2731 /* FIXME: If the LHS of the assignment is a bitfield and the RHS
2732 is a constant, we need to adjust the constant to fit into the
2733 type of the LHS. If the LHS is a bitfield and the RHS is not
2734 a constant, then we can not record any equivalences for this
2735 statement since we would need to represent the widening or
2736 narrowing of RHS. This fixes gcc.c-torture/execute/921016-1.c
2737 and should not be necessary if GCC represented bitfields
2738 properly. */
2739 if (lhs_code == COMPONENT_REF
2740 && DECL_BIT_FIELD (TREE_OPERAND (lhs, 1)))
2741 {
2742 if (TREE_CONSTANT (rhs))
2743 rhs = widen_bitfield (rhs, TREE_OPERAND (lhs, 1), lhs);
2744 else
2745 rhs = NULL;
2746
2747 /* If the value overflowed, then we can not use this equivalence. */
2748 if (rhs && ! is_gimple_min_invariant (rhs))
2749 rhs = NULL;
2750 }
2751
2752 if (rhs)
2753 {
2754 /* Build a new statement with the RHS and LHS exchanged. */
2755 new = build (MODIFY_EXPR, TREE_TYPE (stmt), rhs, lhs);
2756
2757 create_ssa_artficial_load_stmt (new, stmt);
2758
2759 /* Finally enter the statement into the available expression
2760 table. */
2761 lookup_avail_expr (new, true);
2762 }
2763 }
2764 }
2765
2766 /* Replace *OP_P in STMT with any known equivalent value for *OP_P from
2767 CONST_AND_COPIES. */
2768
2769 static bool
2770 cprop_operand (tree stmt, use_operand_p op_p)
2771 {
2772 bool may_have_exposed_new_symbols = false;
2773 tree val;
2774 tree op = USE_FROM_PTR (op_p);
2775
2776 /* If the operand has a known constant value or it is known to be a
2777 copy of some other variable, use the value or copy stored in
2778 CONST_AND_COPIES. */
2779 val = SSA_NAME_VALUE (op);
2780 if (val && val != op && TREE_CODE (val) != VALUE_HANDLE)
2781 {
2782 tree op_type, val_type;
2783
2784 /* Do not change the base variable in the virtual operand
2785 tables. That would make it impossible to reconstruct
2786 the renamed virtual operand if we later modify this
2787 statement. Also only allow the new value to be an SSA_NAME
2788 for propagation into virtual operands. */
2789 if (!is_gimple_reg (op)
2790 && (TREE_CODE (val) != SSA_NAME
2791 || is_gimple_reg (val)
2792 || get_virtual_var (val) != get_virtual_var (op)))
2793 return false;
2794
2795 /* Do not replace hard register operands in asm statements. */
2796 if (TREE_CODE (stmt) == ASM_EXPR
2797 && !may_propagate_copy_into_asm (op))
2798 return false;
2799
2800 /* Get the toplevel type of each operand. */
2801 op_type = TREE_TYPE (op);
2802 val_type = TREE_TYPE (val);
2803
2804 /* While both types are pointers, get the type of the object
2805 pointed to. */
2806 while (POINTER_TYPE_P (op_type) && POINTER_TYPE_P (val_type))
2807 {
2808 op_type = TREE_TYPE (op_type);
2809 val_type = TREE_TYPE (val_type);
2810 }
2811
2812 /* Make sure underlying types match before propagating a constant by
2813 converting the constant to the proper type. Note that convert may
2814 return a non-gimple expression, in which case we ignore this
2815 propagation opportunity. */
2816 if (TREE_CODE (val) != SSA_NAME)
2817 {
2818 if (!lang_hooks.types_compatible_p (op_type, val_type))
2819 {
2820 val = fold_convert (TREE_TYPE (op), val);
2821 if (!is_gimple_min_invariant (val))
2822 return false;
2823 }
2824 }
2825
2826 /* Certain operands are not allowed to be copy propagated due
2827 to their interaction with exception handling and some GCC
2828 extensions. */
2829 else if (!may_propagate_copy (op, val))
2830 return false;
2831
2832 /* Do not propagate copies if the propagated value is at a deeper loop
2833 depth than the propagatee. Otherwise, this may move loop variant
2834 variables outside of their loops and prevent coalescing
2835 opportunities. If the value was loop invariant, it will be hoisted
2836 by LICM and exposed for copy propagation. */
2837 if (loop_depth_of_name (val) > loop_depth_of_name (op))
2838 return false;
2839
2840 /* Dump details. */
2841 if (dump_file && (dump_flags & TDF_DETAILS))
2842 {
2843 fprintf (dump_file, " Replaced '");
2844 print_generic_expr (dump_file, op, dump_flags);
2845 fprintf (dump_file, "' with %s '",
2846 (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
2847 print_generic_expr (dump_file, val, dump_flags);
2848 fprintf (dump_file, "'\n");
2849 }
2850
2851 /* If VAL is an ADDR_EXPR or a constant of pointer type, note
2852 that we may have exposed a new symbol for SSA renaming. */
2853 if (TREE_CODE (val) == ADDR_EXPR
2854 || (POINTER_TYPE_P (TREE_TYPE (op))
2855 && is_gimple_min_invariant (val)))
2856 may_have_exposed_new_symbols = true;
2857
2858 if (TREE_CODE (val) != SSA_NAME)
2859 opt_stats.num_const_prop++;
2860 else
2861 opt_stats.num_copy_prop++;
2862
2863 propagate_value (op_p, val);
2864
2865 /* And note that we modified this statement. This is now
2866 safe, even if we changed virtual operands since we will
2867 rescan the statement and rewrite its operands again. */
2868 mark_stmt_modified (stmt);
2869 }
2870 return may_have_exposed_new_symbols;
2871 }
2872
2873 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2874 known value for that SSA_NAME (or NULL if no value is known).
2875
2876 Propagate values from CONST_AND_COPIES into the uses, vuses and
2877 v_may_def_ops of STMT. */
2878
2879 static bool
2880 cprop_into_stmt (tree stmt)
2881 {
2882 bool may_have_exposed_new_symbols = false;
2883 use_operand_p op_p;
2884 ssa_op_iter iter;
2885
2886 FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_ALL_USES)
2887 {
2888 if (TREE_CODE (USE_FROM_PTR (op_p)) == SSA_NAME)
2889 may_have_exposed_new_symbols |= cprop_operand (stmt, op_p);
2890 }
2891
2892 return may_have_exposed_new_symbols;
2893 }
2894
2895
2896 /* Optimize the statement pointed to by iterator SI.
2897
2898 We try to perform some simplistic global redundancy elimination and
2899 constant propagation:
2900
2901 1- To detect global redundancy, we keep track of expressions that have
2902 been computed in this block and its dominators. If we find that the
2903 same expression is computed more than once, we eliminate repeated
2904 computations by using the target of the first one.
2905
2906 2- Constant values and copy assignments. This is used to do very
2907 simplistic constant and copy propagation. When a constant or copy
2908 assignment is found, we map the value on the RHS of the assignment to
2909 the variable in the LHS in the CONST_AND_COPIES table. */
2910
2911 static void
2912 optimize_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
2913 basic_block bb, block_stmt_iterator si)
2914 {
2915 stmt_ann_t ann;
2916 tree stmt, old_stmt;
2917 bool may_optimize_p;
2918 bool may_have_exposed_new_symbols = false;
2919
2920 old_stmt = stmt = bsi_stmt (si);
2921
2922 update_stmt_if_modified (stmt);
2923 ann = stmt_ann (stmt);
2924 opt_stats.num_stmts++;
2925 may_have_exposed_new_symbols = false;
2926
2927 if (dump_file && (dump_flags & TDF_DETAILS))
2928 {
2929 fprintf (dump_file, "Optimizing statement ");
2930 print_generic_stmt (dump_file, stmt, TDF_SLIM);
2931 }
2932
2933 /* Const/copy propagate into USES, VUSES and the RHS of V_MAY_DEFs. */
2934 may_have_exposed_new_symbols = cprop_into_stmt (stmt);
2935
2936 /* If the statement has been modified with constant replacements,
2937 fold its RHS before checking for redundant computations. */
2938 if (ann->modified)
2939 {
2940 tree rhs;
2941
2942 /* Try to fold the statement making sure that STMT is kept
2943 up to date. */
2944 if (fold_stmt (bsi_stmt_ptr (si)))
2945 {
2946 stmt = bsi_stmt (si);
2947 ann = stmt_ann (stmt);
2948
2949 if (dump_file && (dump_flags & TDF_DETAILS))
2950 {
2951 fprintf (dump_file, " Folded to: ");
2952 print_generic_stmt (dump_file, stmt, TDF_SLIM);
2953 }
2954 }
2955
2956 rhs = get_rhs (stmt);
2957 if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
2958 recompute_tree_invarant_for_addr_expr (rhs);
2959
2960 /* Constant/copy propagation above may change the set of
2961 virtual operands associated with this statement. Folding
2962 may remove the need for some virtual operands.
2963
2964 Indicate we will need to rescan and rewrite the statement. */
2965 may_have_exposed_new_symbols = true;
2966 }
2967
2968 /* Check for redundant computations. Do this optimization only
2969 for assignments that have no volatile ops and conditionals. */
2970 may_optimize_p = (!ann->has_volatile_ops
2971 && ((TREE_CODE (stmt) == RETURN_EXPR
2972 && TREE_OPERAND (stmt, 0)
2973 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR
2974 && ! (TREE_SIDE_EFFECTS
2975 (TREE_OPERAND (TREE_OPERAND (stmt, 0), 1))))
2976 || (TREE_CODE (stmt) == MODIFY_EXPR
2977 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (stmt, 1)))
2978 || TREE_CODE (stmt) == COND_EXPR
2979 || TREE_CODE (stmt) == SWITCH_EXPR));
2980
2981 if (may_optimize_p)
2982 may_have_exposed_new_symbols
2983 |= eliminate_redundant_computations (stmt, ann);
2984
2985 /* Record any additional equivalences created by this statement. */
2986 if (TREE_CODE (stmt) == MODIFY_EXPR)
2987 record_equivalences_from_stmt (stmt,
2988 may_optimize_p,
2989 ann);
2990
2991 /* If STMT is a COND_EXPR and it was modified, then we may know
2992 where it goes. If that is the case, then mark the CFG as altered.
2993
2994 This will cause us to later call remove_unreachable_blocks and
2995 cleanup_tree_cfg when it is safe to do so. It is not safe to
2996 clean things up here since removal of edges and such can trigger
2997 the removal of PHI nodes, which in turn can release SSA_NAMEs to
2998 the manager.
2999
3000 That's all fine and good, except that once SSA_NAMEs are released
3001 to the manager, we must not call create_ssa_name until all references
3002 to released SSA_NAMEs have been eliminated.
3003
3004 All references to the deleted SSA_NAMEs can not be eliminated until
3005 we remove unreachable blocks.
3006
3007 We can not remove unreachable blocks until after we have completed
3008 any queued jump threading.
3009
3010 We can not complete any queued jump threads until we have taken
3011 appropriate variables out of SSA form. Taking variables out of
3012 SSA form can call create_ssa_name and thus we lose.
3013
3014 Ultimately I suspect we're going to need to change the interface
3015 into the SSA_NAME manager. */
3016
3017 if (ann->modified)
3018 {
3019 tree val = NULL;
3020
3021 if (TREE_CODE (stmt) == COND_EXPR)
3022 val = COND_EXPR_COND (stmt);
3023 else if (TREE_CODE (stmt) == SWITCH_EXPR)
3024 val = SWITCH_COND (stmt);
3025
3026 if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
3027 cfg_altered = true;
3028
3029 /* If we simplified a statement in such a way as to be shown that it
3030 cannot trap, update the eh information and the cfg to match. */
3031 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
3032 {
3033 bitmap_set_bit (need_eh_cleanup, bb->index);
3034 if (dump_file && (dump_flags & TDF_DETAILS))
3035 fprintf (dump_file, " Flagged to clear EH edges.\n");
3036 }
3037 }
3038
3039 if (may_have_exposed_new_symbols)
3040 VEC_safe_push (tree, heap, stmts_to_rescan, bsi_stmt (si));
3041 }
3042
3043 /* Replace the RHS of STMT with NEW_RHS. If RHS can be found in the
3044 available expression hashtable, then return the LHS from the hash
3045 table.
3046
3047 If INSERT is true, then we also update the available expression
3048 hash table to account for the changes made to STMT. */
3049
3050 static tree
3051 update_rhs_and_lookup_avail_expr (tree stmt, tree new_rhs, bool insert)
3052 {
3053 tree cached_lhs = NULL;
3054
3055 /* Remove the old entry from the hash table. */
3056 if (insert)
3057 {
3058 struct expr_hash_elt element;
3059
3060 initialize_hash_element (stmt, NULL, &element);
3061 htab_remove_elt_with_hash (avail_exprs, &element, element.hash);
3062 }
3063
3064 /* Now update the RHS of the assignment. */
3065 TREE_OPERAND (stmt, 1) = new_rhs;
3066
3067 /* Now lookup the updated statement in the hash table. */
3068 cached_lhs = lookup_avail_expr (stmt, insert);
3069
3070 /* We have now called lookup_avail_expr twice with two different
3071 versions of this same statement, once in optimize_stmt, once here.
3072
3073 We know the call in optimize_stmt did not find an existing entry
3074 in the hash table, so a new entry was created. At the same time
3075 this statement was pushed onto the AVAIL_EXPRS_STACK vector.
3076
3077 If this call failed to find an existing entry on the hash table,
3078 then the new version of this statement was entered into the
3079 hash table. And this statement was pushed onto BLOCK_AVAIL_EXPR
3080 for the second time. So there are two copies on BLOCK_AVAIL_EXPRs
3081
3082 If this call succeeded, we still have one copy of this statement
3083 on the BLOCK_AVAIL_EXPRs vector.
3084
3085 For both cases, we need to pop the most recent entry off the
3086 BLOCK_AVAIL_EXPRs vector. For the case where we never found this
3087 statement in the hash tables, that will leave precisely one
3088 copy of this statement on BLOCK_AVAIL_EXPRs. For the case where
3089 we found a copy of this statement in the second hash table lookup
3090 we want _no_ copies of this statement in BLOCK_AVAIL_EXPRs. */
3091 if (insert)
3092 VEC_pop (tree, avail_exprs_stack);
3093
3094 /* And make sure we record the fact that we modified this
3095 statement. */
3096 mark_stmt_modified (stmt);
3097
3098 return cached_lhs;
3099 }
3100
3101 /* Search for an existing instance of STMT in the AVAIL_EXPRS table. If
3102 found, return its LHS. Otherwise insert STMT in the table and return
3103 NULL_TREE.
3104
3105 Also, when an expression is first inserted in the AVAIL_EXPRS table, it
3106 is also added to the stack pointed to by BLOCK_AVAIL_EXPRS_P, so that they
3107 can be removed when we finish processing this block and its children.
3108
3109 NOTE: This function assumes that STMT is a MODIFY_EXPR node that
3110 contains no CALL_EXPR on its RHS and makes no volatile nor
3111 aliased references. */
3112
3113 static tree
3114 lookup_avail_expr (tree stmt, bool insert)
3115 {
3116 void **slot;
3117 tree lhs;
3118 tree temp;
3119 struct expr_hash_elt *element = xmalloc (sizeof (struct expr_hash_elt));
3120
3121 lhs = TREE_CODE (stmt) == MODIFY_EXPR ? TREE_OPERAND (stmt, 0) : NULL;
3122
3123 initialize_hash_element (stmt, lhs, element);
3124
3125 /* Don't bother remembering constant assignments and copy operations.
3126 Constants and copy operations are handled by the constant/copy propagator
3127 in optimize_stmt. */
3128 if (TREE_CODE (element->rhs) == SSA_NAME
3129 || is_gimple_min_invariant (element->rhs))
3130 {
3131 free (element);
3132 return NULL_TREE;
3133 }
3134
3135 /* If this is an equality test against zero, see if we have recorded a
3136 nonzero value for the variable in question. */
3137 if ((TREE_CODE (element->rhs) == EQ_EXPR
3138 || TREE_CODE (element->rhs) == NE_EXPR)
3139 && TREE_CODE (TREE_OPERAND (element->rhs, 0)) == SSA_NAME
3140 && integer_zerop (TREE_OPERAND (element->rhs, 1)))
3141 {
3142 int indx = SSA_NAME_VERSION (TREE_OPERAND (element->rhs, 0));
3143
3144 if (bitmap_bit_p (nonzero_vars, indx))
3145 {
3146 tree t = element->rhs;
3147 free (element);
3148 return constant_boolean_node (TREE_CODE (t) != EQ_EXPR,
3149 TREE_TYPE (t));
3150 }
3151 }
3152
3153 /* Finally try to find the expression in the main expression hash table. */
3154 slot = htab_find_slot_with_hash (avail_exprs, element, element->hash,
3155 (insert ? INSERT : NO_INSERT));
3156 if (slot == NULL)
3157 {
3158 free (element);
3159 return NULL_TREE;
3160 }
3161
3162 if (*slot == NULL)
3163 {
3164 *slot = (void *) element;
3165 VEC_safe_push (tree, heap, avail_exprs_stack,
3166 stmt ? stmt : element->rhs);
3167 return NULL_TREE;
3168 }
3169
3170 /* Extract the LHS of the assignment so that it can be used as the current
3171 definition of another variable. */
3172 lhs = ((struct expr_hash_elt *)*slot)->lhs;
3173
3174 /* See if the LHS appears in the CONST_AND_COPIES table. If it does, then
3175 use the value from the const_and_copies table. */
3176 if (TREE_CODE (lhs) == SSA_NAME)
3177 {
3178 temp = SSA_NAME_VALUE (lhs);
3179 if (temp && TREE_CODE (temp) != VALUE_HANDLE)
3180 lhs = temp;
3181 }
3182
3183 free (element);
3184 return lhs;
3185 }
3186
3187 /* Given a condition COND, record into HI_P, LO_P and INVERTED_P the
3188 range of values that result in the conditional having a true value.
3189
3190 Return true if we are successful in extracting a range from COND and
3191 false if we are unsuccessful. */
3192
3193 static bool
3194 extract_range_from_cond (tree cond, tree *hi_p, tree *lo_p, int *inverted_p)
3195 {
3196 tree op1 = TREE_OPERAND (cond, 1);
3197 tree high, low, type;
3198 int inverted;
3199
3200 type = TREE_TYPE (op1);
3201
3202 /* Experiments have shown that it's rarely, if ever useful to
3203 record ranges for enumerations. Presumably this is due to
3204 the fact that they're rarely used directly. They are typically
3205 cast into an integer type and used that way. */
3206 if (TREE_CODE (type) != INTEGER_TYPE
3207 /* We don't know how to deal with types with variable bounds. */
3208 || TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
3209 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
3210 return 0;
3211
3212 switch (TREE_CODE (cond))
3213 {
3214 case EQ_EXPR:
3215 high = low = op1;
3216 inverted = 0;
3217 break;
3218
3219 case NE_EXPR:
3220 high = low = op1;
3221 inverted = 1;
3222 break;
3223
3224 case GE_EXPR:
3225 low = op1;
3226 high = TYPE_MAX_VALUE (type);
3227 inverted = 0;
3228 break;
3229
3230 case GT_EXPR:
3231 high = TYPE_MAX_VALUE (type);
3232 if (!tree_int_cst_lt (op1, high))
3233 return 0;
3234 low = int_const_binop (PLUS_EXPR, op1, integer_one_node, 1);
3235 inverted = 0;
3236 break;
3237
3238 case LE_EXPR:
3239 high = op1;
3240 low = TYPE_MIN_VALUE (type);
3241 inverted = 0;
3242 break;
3243
3244 case LT_EXPR:
3245 low = TYPE_MIN_VALUE (type);
3246 if (!tree_int_cst_lt (low, op1))
3247 return 0;
3248 high = int_const_binop (MINUS_EXPR, op1, integer_one_node, 1);
3249 inverted = 0;
3250 break;
3251
3252 default:
3253 return 0;
3254 }
3255
3256 *hi_p = high;
3257 *lo_p = low;
3258 *inverted_p = inverted;
3259 return 1;
3260 }
3261
3262 /* Record a range created by COND for basic block BB. */
3263
3264 static void
3265 record_range (tree cond, basic_block bb)
3266 {
3267 enum tree_code code = TREE_CODE (cond);
3268
3269 /* We explicitly ignore NE_EXPRs and all the unordered comparisons.
3270 They rarely allow for meaningful range optimizations and significantly
3271 complicate the implementation. */
3272 if ((code == LT_EXPR || code == LE_EXPR || code == GT_EXPR
3273 || code == GE_EXPR || code == EQ_EXPR)
3274 && TREE_CODE (TREE_TYPE (TREE_OPERAND (cond, 1))) == INTEGER_TYPE)
3275 {
3276 struct vrp_hash_elt *vrp_hash_elt;
3277 struct vrp_element *element;
3278 VEC(vrp_element_p,heap) **vrp_records_p;
3279 void **slot;
3280
3281
3282 vrp_hash_elt = xmalloc (sizeof (struct vrp_hash_elt));
3283 vrp_hash_elt->var = TREE_OPERAND (cond, 0);
3284 vrp_hash_elt->records = NULL;
3285 slot = htab_find_slot (vrp_data, vrp_hash_elt, INSERT);
3286
3287 if (*slot == NULL)
3288 *slot = (void *) vrp_hash_elt;
3289 else
3290 vrp_free (vrp_hash_elt);
3291
3292 vrp_hash_elt = (struct vrp_hash_elt *) *slot;
3293 vrp_records_p = &vrp_hash_elt->records;
3294
3295 element = ggc_alloc (sizeof (struct vrp_element));
3296 element->low = NULL;
3297 element->high = NULL;
3298 element->cond = cond;
3299 element->bb = bb;
3300
3301 VEC_safe_push (vrp_element_p, heap, *vrp_records_p, element);
3302 VEC_safe_push (tree, heap, vrp_variables_stack, TREE_OPERAND (cond, 0));
3303 }
3304 }
3305
3306 /* Hashing and equality functions for VRP_DATA.
3307
3308 Since this hash table is addressed by SSA_NAMEs, we can hash on
3309 their version number and equality can be determined with a
3310 pointer comparison. */
3311
3312 static hashval_t
3313 vrp_hash (const void *p)
3314 {
3315 tree var = ((struct vrp_hash_elt *)p)->var;
3316
3317 return SSA_NAME_VERSION (var);
3318 }
3319
3320 static int
3321 vrp_eq (const void *p1, const void *p2)
3322 {
3323 tree var1 = ((struct vrp_hash_elt *)p1)->var;
3324 tree var2 = ((struct vrp_hash_elt *)p2)->var;
3325
3326 return var1 == var2;
3327 }
3328
3329 /* Hashing and equality functions for AVAIL_EXPRS. The table stores
3330 MODIFY_EXPR statements. We compute a value number for expressions using
3331 the code of the expression and the SSA numbers of its operands. */
3332
3333 static hashval_t
3334 avail_expr_hash (const void *p)
3335 {
3336 tree stmt = ((struct expr_hash_elt *)p)->stmt;
3337 tree rhs = ((struct expr_hash_elt *)p)->rhs;
3338 tree vuse;
3339 ssa_op_iter iter;
3340 hashval_t val = 0;
3341
3342 /* iterative_hash_expr knows how to deal with any expression and
3343 deals with commutative operators as well, so just use it instead
3344 of duplicating such complexities here. */
3345 val = iterative_hash_expr (rhs, val);
3346
3347 /* If the hash table entry is not associated with a statement, then we
3348 can just hash the expression and not worry about virtual operands
3349 and such. */
3350 if (!stmt || !stmt_ann (stmt))
3351 return val;
3352
3353 /* Add the SSA version numbers of every vuse operand. This is important
3354 because compound variables like arrays are not renamed in the
3355 operands. Rather, the rename is done on the virtual variable
3356 representing all the elements of the array. */
3357 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VUSE)
3358 val = iterative_hash_expr (vuse, val);
3359
3360 return val;
3361 }
3362
3363 static hashval_t
3364 real_avail_expr_hash (const void *p)
3365 {
3366 return ((const struct expr_hash_elt *)p)->hash;
3367 }
3368
3369 static int
3370 avail_expr_eq (const void *p1, const void *p2)
3371 {
3372 tree stmt1 = ((struct expr_hash_elt *)p1)->stmt;
3373 tree rhs1 = ((struct expr_hash_elt *)p1)->rhs;
3374 tree stmt2 = ((struct expr_hash_elt *)p2)->stmt;
3375 tree rhs2 = ((struct expr_hash_elt *)p2)->rhs;
3376
3377 /* If they are the same physical expression, return true. */
3378 if (rhs1 == rhs2 && stmt1 == stmt2)
3379 return true;
3380
3381 /* If their codes are not equal, then quit now. */
3382 if (TREE_CODE (rhs1) != TREE_CODE (rhs2))
3383 return false;
3384
3385 /* In case of a collision, both RHS have to be identical and have the
3386 same VUSE operands. */
3387 if ((TREE_TYPE (rhs1) == TREE_TYPE (rhs2)
3388 || lang_hooks.types_compatible_p (TREE_TYPE (rhs1), TREE_TYPE (rhs2)))
3389 && operand_equal_p (rhs1, rhs2, OEP_PURE_SAME))
3390 {
3391 bool ret = compare_ssa_operands_equal (stmt1, stmt2, SSA_OP_VUSE);
3392 gcc_assert (!ret || ((struct expr_hash_elt *)p1)->hash
3393 == ((struct expr_hash_elt *)p2)->hash);
3394 return ret;
3395 }
3396
3397 return false;
3398 }