+2015-12-10 Jeff Law <law@redhat.com>
+
+ PR tree-optimization/68619
+ * tree-ssa-dom.c (dom_opt_dom_walker::before_dom_children): Propgate
+ return value from optimize_stmt.
+ (dom_opt_dom_walker): Add new argument to dom_walker constructor.
+ (pass_dominator:execute): If a block has an unreachable edge,
+ remove all jump threads through any successor of the affected block.
+ (record_equivalences_from_phis): Ignore alternative if the edge
+ does not have EDGE_EXECUTABLE set.
+ (single_incoming_edge_ignoring_loop_edges): Similarly.
+ (optimize_stmt): If a gimple_code has a compile-time constant
+ condition, return the edge taken for that constant value. Also
+ change the condition to true/false as necessary.
+ * domwalk.h (dom_walker::dom_walker): Add new argument
+ skip_unreachable_blocks. Don't provide empty constructor body.
+ (dom_walker::before_dom_children): Change return type.
+ (dom_walker::bb_reachable): Declare new private method.
+ (dom_walker::propagate_unreachable_to_edges): Likewise.
+ (dom_walker::m_unreachable_dom): Declare new private data member.
+ (dom_walker::m_skip_unreachable_blocks): Likewise.
+ * domwalk.c: Include dumpfile.h.
+ (dom_walker::dom_walker): New constructor. Initialize private data
+ members. If needed, set EDGE_EXECUTABLE for all edges in the CFG,
+ extracted from tree-ssa-sccvn.c.
+ (dom_walker::bb_reachable): New method extracted from tree-ssa-sccvn.c
+ (dom_walker::propagate_unreachable_to_edges): Likewise.
+ (dom_walker::walk): Only call before_dom_children on reachable
+ blocks. If before_dom_children returns an edge, then clear
+ EDGE_EXECUTABLE for all other outgoing edges from the same block.
+ For unreachable blocks, call propagate_unreachable_to_edges.
+ Similarly, only call after_dom_children on reachable blocks. For
+ unreachable blocks, conditionally clear m_unreachable_dom.
+ * tree-ssa-sccvn.c (sccvn_dom_walker::unreachable_dom): Remove
+ private data member.
+ (sccvn_dom_walker::after_dom_children): Use methods from dom_walker
+ class.
+ (run_scc_vn): Likewise.
+ (sccvn_dom_walker::before_dom_children): Likewise. Return the taken
+ outgoing edge if a COND, SWITCH, or GOTO are optimized.
+ * compare-elim.c (find_comparison_dom_walker::before_dom_children):
+ Change return type to an edge. Always return NULL.
+ * fwprop.c (single_def_use_dom_walker::before_dom_children): Likewise.
+ * gimple-ssa-strength-reduction.c
+ (find_candidates_dom_walker::before_dom_children): Likewise.
+ * ipa-prop.c (analysis_dom_walker::before_dom_children): Likewise.
+ (ipcp_modif_dom_walker::before_dom_children): Likewise.
+ * tree-into-ssa.c (rewrite_dom_walker::before_dom_children): Likewise.
+ (rewrite_update_dom_walker::before_dom_children): Likewise.
+ (mark_def_dom_children::before_dom_children): Likewise.
+ * tree-ssa-dse.c (dse_dom_walker::before_dom_children): Likewise.
+ * tree-ssa-loop-im.c
+ (invariantness_dom_walker::before_dom_children): Likewise.
+ (move_computations_dom_walker::before_dom_walker): Likewise.
+ * tree-ssa-phiopt.c
+ (nontrapping_dom_walker::before_dom_children): Likewise.
+ * tree-ssa-pre.c
+ (eliminate_dom_walker::before_dom_children): Likewise.
+ * tree-ssa-propagate.c
+ (substitute_and_fold_dom_walker::before_dom_children): Likewise.
+ * tree-ssa-strlen.c
+ (strlen_dom_walker::before_dom_children): Likewise.
+ * tree-ssa-uncprop.c
+ (uncprop_dom_walker::before_dom_children): Likewise.
+
2015-12-10 Jakub Jelinek <jakub@redhat.com>
PR rtl-optimization/68376
find_comparison_dom_walker (cdi_direction direction)
: dom_walker (direction) {}
- virtual void before_dom_children (basic_block);
+ virtual edge before_dom_children (basic_block);
};
/* Return true if conforming COMPARE with EH_NOTE is redundant with comparison
compare in the BB is live at the end of the block, install the compare
in BB->AUX. Called via dom_walker.walk (). */
-void
+edge
find_comparison_dom_walker::before_dom_children (basic_block bb)
{
struct comparison *last_cmp;
remove EH edges. */
if (need_purge)
purge_dead_edges (bb);
+
+ return NULL;
}
/* Find all comparisons in the function. */
#include "backend.h"
#include "cfganal.h"
#include "domwalk.h"
+#include "dumpfile.h"
/* This file implements a generic walker for dominator trees.
return 1;
}
+/* Constructor for a dom walker.
+
+ If SKIP_UNREACHBLE_BLOCKS is true, then we need to set
+ EDGE_EXECUTABLE on every edge in the CFG. */
+dom_walker::dom_walker (cdi_direction direction,
+ bool skip_unreachable_blocks)
+ : m_dom_direction (direction),
+ m_skip_unreachable_blocks (skip_unreachable_blocks),
+ m_unreachable_dom (NULL)
+{
+ /* If we are not skipping unreachable blocks, then there is nothing
+ to do. */
+ if (!m_skip_unreachable_blocks)
+ return;
+
+ basic_block bb;
+ FOR_ALL_BB_FN (bb, cfun)
+ {
+ edge_iterator ei;
+ edge e;
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ e->flags |= EDGE_EXECUTABLE;
+ }
+}
+
+/* Return TRUE if BB is reachable, false otherwise. */
+
+bool
+dom_walker::bb_reachable (struct function *fun, basic_block bb)
+{
+ /* If we're not skipping unreachable blocks, then assume everything
+ is reachable. */
+ if (!m_skip_unreachable_blocks)
+ return true;
+
+ /* If any of the predecessor edges that do not come from blocks dominated
+ by us are still marked as possibly executable consider this block
+ reachable. */
+ bool reachable = false;
+ if (!m_unreachable_dom)
+ {
+ reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (fun);
+ edge_iterator ei;
+ edge e;
+ FOR_EACH_EDGE (e, ei, bb->preds)
+ if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
+ reachable |= (e->flags & EDGE_EXECUTABLE);
+ }
+
+ return reachable;
+}
+
+/* BB has been determined to be unreachable. Propagate that property
+ to incoming and outgoing edges of BB as appropriate. */
+
+void
+dom_walker::propagate_unreachable_to_edges (basic_block bb,
+ FILE *dump_file,
+ int dump_flags)
+{
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "Marking all outgoing edges of unreachable "
+ "BB %d as not executable\n", bb->index);
+
+ edge_iterator ei;
+ edge e;
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ e->flags &= ~EDGE_EXECUTABLE;
+
+ FOR_EACH_EDGE (e, ei, bb->preds)
+ {
+ if (dominated_by_p (CDI_DOMINATORS, e->src, bb))
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "Marking backedge from BB %d into "
+ "unreachable BB %d as not executable\n",
+ e->src->index, bb->index);
+ e->flags &= ~EDGE_EXECUTABLE;
+ }
+ }
+
+ if (!m_unreachable_dom)
+ m_unreachable_dom = bb;
+}
+
/* Recursively walk the dominator tree.
BB is the basic block we are currently visiting. */
|| bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
|| bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
{
+
/* Callback for subclasses to do custom things before we have walked
the dominator children, but before we walk statements. */
- before_dom_children (bb);
+ if (this->bb_reachable (cfun, bb))
+ {
+ edge taken_edge = before_dom_children (bb);
+ if (taken_edge)
+ {
+ edge_iterator ei;
+ edge e;
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ if (e != taken_edge)
+ e->flags &= ~EDGE_EXECUTABLE;
+ }
+ }
+ else
+ propagate_unreachable_to_edges (bb, dump_file, dump_flags);
/* Mark the current BB to be popped out of the recursion stack
once children are processed. */
/* Callback allowing subclasses to do custom things after we have
walked dominator children, but before we walk statements. */
- after_dom_children (bb);
+ if (bb_reachable (cfun, bb))
+ after_dom_children (bb);
+ else if (m_unreachable_dom == bb)
+ m_unreachable_dom = NULL;
}
if (sp)
bb = worklist[--sp];
class dom_walker
{
public:
- dom_walker (cdi_direction direction) : m_dom_direction (direction) {}
+ /* Use SKIP_UNREACHBLE_BLOCKS = true when your client can discover
+ that some edges are not executable.
+
+ If a client can discover that a COND, SWITCH or GOTO has a static
+ target in the before_dom_children callback, the taken edge should
+ be returned. The generic walker will clear EDGE_EXECUTABLE on all
+ edges it can determine are not executable. */
+ dom_walker (cdi_direction direction, bool skip_unreachable_blocks = false);
/* Walk the dominator tree. */
void walk (basic_block);
- /* Function to call before the recursive walk of the dominator children. */
- virtual void before_dom_children (basic_block) {}
+ /* Function to call before the recursive walk of the dominator children.
+
+ Return value is the always taken edge if the block has multiple outgoing
+ edges, NULL otherwise. When skipping unreachable blocks, the walker
+ uses the taken edge information to clear EDGE_EXECUTABLE on the other
+ edges, exposing unreachable blocks. A NULL return value means all
+ outgoing edges should still be considered executable. */
+ virtual edge before_dom_children (basic_block) { return NULL; }
/* Function to call after the recursive walk of the dominator children. */
virtual void after_dom_children (basic_block) {}
if it is set to CDI_POST_DOMINATORS, then we walk the post
dominator tree. */
const ENUM_BITFIELD (cdi_direction) m_dom_direction : 2;
+ bool m_skip_unreachable_blocks;
+ basic_block m_unreachable_dom;
+
+ /* Query whether or not the given block is reachable or not. */
+ bool bb_reachable (struct function *, basic_block);
+
+ /* Given an unreachable block, propagate that property to outgoing
+ and possibly incoming edges for the block. Typically called after
+ determining a block is unreachable in the before_dom_children
+ callback. */
+ void propagate_unreachable_to_edges (basic_block, FILE *, int);
+
};
#endif
public:
single_def_use_dom_walker (cdi_direction direction)
: dom_walker (direction) {}
- virtual void before_dom_children (basic_block);
+ virtual edge before_dom_children (basic_block);
virtual void after_dom_children (basic_block);
};
-void
+edge
single_def_use_dom_walker::before_dom_children (basic_block bb)
{
int bb_index = bb->index;
process_uses (df_get_artificial_uses (bb_index), 0);
process_defs (df_get_artificial_defs (bb_index), 0);
+
+ return NULL;
}
/* Pop the definitions created in this basic block when leaving its
public:
find_candidates_dom_walker (cdi_direction direction)
: dom_walker (direction) {}
- virtual void before_dom_children (basic_block);
+ virtual edge before_dom_children (basic_block);
};
/* Find strength-reduction candidates in block BB. */
-void
+edge
find_candidates_dom_walker::before_dom_children (basic_block bb)
{
bool speed = optimize_bb_for_speed_p (bb);
}
}
}
+ return NULL;
}
\f
/* Dump a candidate for debug. */
analysis_dom_walker (struct ipa_func_body_info *fbi)
: dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
- virtual void before_dom_children (basic_block);
+ virtual edge before_dom_children (basic_block);
private:
struct ipa_func_body_info *m_fbi;
};
-void
+edge
analysis_dom_walker::before_dom_children (basic_block bb)
{
ipa_analyze_params_uses_in_bb (m_fbi, bb);
ipa_compute_jump_functions_for_bb (m_fbi, bb);
+ return NULL;
}
/* Release body info FBI. */
: dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
- virtual void before_dom_children (basic_block);
+ virtual edge before_dom_children (basic_block);
private:
struct ipa_func_body_info *m_fbi;
bool *m_something_changed, *m_cfg_changed;
};
-void
+edge
ipcp_modif_dom_walker::before_dom_children (basic_block bb)
{
gimple_stmt_iterator gsi;
&& gimple_purge_dead_eh_edges (gimple_bb (stmt)))
*m_cfg_changed = true;
}
-
+ return NULL;
}
/* Update alignment of formal parameters as described in
+2015-12-10 Jeff Law <law@redhat.com>
+
+ PR tree-optimization/68619
+ * gcc.dg/tree-ssa/pr68619-1.c: New test.
+ * gcc.dg/tree-ssa/pr68619-2.c: New test.
+ * gcc.dg/tree-ssa/pr68619-3.c: New test.
+ * gcc.dg/tree-ssa/pr68619-4.c: New test.
+ * gcc.dg/tree-ssa/pr68619-5.c: New test.
+
2015-12-10 Jakub Jelinek <jakub@redhat.com>
PR rtl-optimization/68376
--- /dev/null
+/* { dg-do compile } */
+/* { dg-options "-O2 -w" } */
+
+extern void fn2(int);
+int a, b, c;
+void fn1() {
+ int d;
+ for (; b; b++) {
+ a = 7;
+ for (; a;) {
+ jump:
+ fn2(d ?: c);
+ d = 0;
+ }
+ d = c;
+ if (c)
+ goto jump;
+ }
+ goto jump;
+}
--- /dev/null
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-dom2-details -w" } */
+
+typedef union tree_node *tree;
+struct gcc_options
+{
+ int x_flag_finite_math_only;
+};
+extern struct gcc_options global_options;
+enum mode_class
+{ MODE_RANDOM, MODE_CC, MODE_INT, MODE_PARTIAL_INT, MODE_FRACT, MODE_UFRACT,
+ MODE_ACCUM, MODE_UACCUM, MODE_FLOAT, MODE_DECIMAL_FLOAT, MODE_COMPLEX_INT,
+ MODE_COMPLEX_FLOAT, MODE_VECTOR_INT, MODE_VECTOR_FRACT,
+ MODE_VECTOR_UFRACT, MODE_VECTOR_ACCUM, MODE_VECTOR_UACCUM,
+ MODE_VECTOR_FLOAT, MAX_MODE_CLASS
+};
+extern const unsigned char mode_class[27];
+extern const unsigned char mode_inner[27];
+struct real_value
+{
+};
+struct real_format
+{
+ unsigned char has_inf;
+};
+extern const struct real_format *real_format_for_mode[5 -
+ 2 + 1 + 15 - 10 + 1];
+struct tree_type
+{
+};
+union tree_node
+{
+ int code;
+ int mode;
+ struct tree_type type;
+};
+tree
+omp_reduction_init (tree clause, tree type)
+{
+ if ((((type)->code) == 64))
+ {
+ struct real_value max;
+ if (((((mode_class[((((type))->code) ==
+ 32 ?
+ vector_type_mode (type)
+ : (type)->mode)]) ==
+ MODE_VECTOR_FLOAT)
+ &&
+ ((real_format_for_mode
+ [((mode_class[((mode_class[((((type))->code) ==
+ 32 ?
+ vector_type_mode (type)
+ : (type)->mode)]) ==
+ 12) ? (((((type))->code)
+ ==
+ 32 ?
+ vector_type_mode
+ (type)
+ : (type)->mode))
+ : (mode_inner[((((type))->code) ==
+ 32 ?
+ vector_type_mode (type)
+ : (type)->mode)])]) ==
+ 12)
+ ? (((((mode_class[((((type))->code) ==
+ 32 ? vector_type_mode (type)
+ : (type)->mode)]) ==
+ 12) ? (((((type))->code) ==
+ 32 ?
+ vector_type_mode (type)
+ : (type)->mode)) : (mode_inner
+ [((((type))->code) ==
+ 32 ?
+ vector_type_mode (type)
+ : (type)->mode)])) - 10) +
+ (5 - 2 +
+ 1))
+ : ((((mode_class
+ [((((type))->code) ==
+ 32 ? vector_type_mode (type) : (type)->mode)]) ==
+ 12) ? (((((type))->code) ==
+ 32 ? vector_type_mode (type) : (type)->
+ mode)) : (mode_inner[((((type))->code) ==
+ 32 ? vector_type_mode (type)
+ : (type)->mode)])) -
+ 2)]))->has_inf) && !global_options.x_flag_finite_math_only))
+ real_inf (&max);
+ }
+}
+
+/* { dg-final { scan-tree-dump "Marking all outgoing edges of unreachable" "dom2"} } */
+
--- /dev/null
+/* { dg-do compile } */
+/* { dg-options "-O2 -w" } */
+typedef unsigned int hashval_t;
+enum ETYPE
+{
+ ETYPE_ARRAY, ETYPE_STRUCT, ETYPE_UNION,
+};
+struct entry
+{
+ enum ETYPE etype:8;
+ unsigned short len;
+ const char *attrib;
+};
+e_hash (const void *a)
+{
+ const struct entry *e = a;
+ hashval_t ret = 0;
+ int i;
+ if (e[0].etype != ETYPE_STRUCT && e[0].etype != ETYPE_UNION)
+ abort ();
+ for (i = 0; i <= e[0].len; ++i)
+ {
+ ret = iterative_hash (&e[i], __builtin_offsetof (struct entry, attrib), ret);
+ }
+ return ret;
+}
--- /dev/null
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-optimized -w" } */
+
+typedef struct rtx_def *rtx;
+enum rtx_code
+{
+ UNKNOWN, VALUE, DEBUG_EXPR, EXPR_LIST, INSN_LIST, SEQUENCE, ADDRESS,
+ DEBUG_INSN, INSN, JUMP_INSN, CALL_INSN, BARRIER, CODE_LABEL, NOTE,
+ COND_EXEC, PARALLEL, ASM_INPUT, ASM_OPERANDS, UNSPEC, UNSPEC_VOLATILE,
+ ADDR_VEC, ADDR_DIFF_VEC, PREFETCH, SET, USE, CLOBBER, CALL, RETURN,
+ EH_RETURN, TRAP_IF, CONST_INT, CONST_FIXED, CONST_DOUBLE, CONST_VECTOR,
+ CONST_STRING, CONST, PC, REG, SCRATCH, SUBREG, STRICT_LOW_PART, CONCAT,
+ CONCATN, MEM, LABEL_REF, SYMBOL_REF, CC0, IF_THEN_ELSE, COMPARE, PLUS,
+ MINUS, NEG, MULT, SS_MULT, US_MULT, DIV, SS_DIV, US_DIV, MOD, UDIV, UMOD,
+ AND, IOR, XOR, NOT, ASHIFT, ROTATE, ASHIFTRT, LSHIFTRT, ROTATERT, SMIN,
+ SMAX, UMIN, UMAX, PRE_DEC, PRE_INC, POST_DEC, POST_INC, PRE_MODIFY,
+ POST_MODIFY, NE, EQ, GE, GT, LE, LT, GEU, GTU, LEU, LTU, UNORDERED,
+ ORDERED, UNEQ, UNGE, UNGT, UNLE, UNLT, LTGT, SIGN_EXTEND, ZERO_EXTEND,
+ TRUNCATE, FLOAT_EXTEND, FLOAT_TRUNCATE, FLOAT, FIX, UNSIGNED_FLOAT,
+ UNSIGNED_FIX, FRACT_CONVERT, UNSIGNED_FRACT_CONVERT, SAT_FRACT,
+ UNSIGNED_SAT_FRACT, ABS, SQRT, BSWAP, FFS, CLZ, CTZ, POPCOUNT, PARITY,
+ SIGN_EXTRACT, ZERO_EXTRACT, HIGH, LO_SUM, VEC_MERGE, VEC_SELECT,
+ VEC_CONCAT, VEC_DUPLICATE, SS_PLUS, US_PLUS, SS_MINUS, SS_NEG, US_NEG,
+ SS_ABS, SS_ASHIFT, US_ASHIFT, US_MINUS, SS_TRUNCATE, US_TRUNCATE, FMA,
+ VAR_LOCATION, DEBUG_IMPLICIT_PTR, ENTRY_VALUE, LAST_AND_UNUSED_RTX_CODE
+};
+enum rtx_class
+{
+ RTX_COMPARE, RTX_COMM_COMPARE, RTX_BIN_ARITH, RTX_COMM_ARITH, RTX_UNARY,
+ RTX_EXTRA, RTX_MATCH, RTX_INSN, RTX_OBJ, RTX_CONST_OBJ, RTX_TERNARY,
+ RTX_BITFIELD_OPS, RTX_AUTOINC
+};
+extern const unsigned char rtx_length[((int) LAST_AND_UNUSED_RTX_CODE)];
+extern const enum rtx_class rtx_class[((int) LAST_AND_UNUSED_RTX_CODE)];
+union rtunion_def
+{
+ rtx rt_rtx;
+};
+typedef union rtunion_def rtunion;
+struct rtx_def
+{
+ enum rtx_code code:16;
+ union u
+ {
+ rtunion fld[1];
+ }
+ u;
+};
+struct cse_reg_info
+{
+ unsigned int timestamp;
+ int reg_qty;
+ int reg_tick;
+ int reg_in_table;
+ unsigned int subreg_ticked;
+};
+static struct cse_reg_info *cse_reg_info_table;
+static unsigned int cse_reg_info_timestamp;
+
+static __inline__ struct cse_reg_info *
+get_cse_reg_info (unsigned int regno)
+{
+ struct cse_reg_info *p = &cse_reg_info_table[regno];
+ if (p->timestamp != cse_reg_info_timestamp)
+ cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
+}
+
+int
+mention_regs (rtx x)
+{
+ enum rtx_code code;
+ int i, j;
+ const char *fmt;
+ int changed = 0;
+ code = ((x)->code);
+ if (code == SUBREG
+ && ((((((x)->u.fld[0]).rt_rtx))->code) == REG))
+ {
+ (get_cse_reg_info (i)->reg_in_table) = (get_cse_reg_info (i)->reg_tick);
+ (get_cse_reg_info (i)->subreg_ticked) =
+ (rhs_regno ((((x)->u.fld[0]).rt_rtx)));
+ }
+ if ((((rtx_class[(int) (((x)->code))]) & (~1)) == (RTX_COMPARE & (~1))))
+ {
+ if (((((((x)->u.fld[0]).rt_rtx))->code) == REG))
+ foop ();
+ }
+ for (i = (rtx_length[(int) (code)]) - 1; i >= 0; i--)
+ arf ();
+}
+
+/* Make sure the constant 39 gets propagated into the PHI at the join point. */
+/* { dg-final { scan-tree-dump "PHI <.*, 39" "optimized"} } */
+
--- /dev/null
+/* { dg-do compile } */
+/* { dg-options "-O2 -w" } */
+typedef union tree_node *tree;
+typedef union gimple_statement_d *gimple;
+enum machine_mode
+{ VOIDmode, BLKmode, CCmode, CCGCmode, CCGOCmode, CCNOmode, CCAmode, CCCmode,
+ CCOmode, CCSmode, CCZmode, CCFPmode, CCFPUmode, BImode, QImode, HImode,
+ SImode, DImode, TImode, OImode, QQmode, HQmode, SQmode, DQmode, TQmode,
+ UQQmode, UHQmode, USQmode, UDQmode, UTQmode, HAmode, SAmode, DAmode,
+ TAmode, UHAmode, USAmode, UDAmode, UTAmode, SFmode, DFmode, XFmode,
+ TFmode, SDmode, DDmode, TDmode, CQImode, CHImode, CSImode, CDImode,
+ CTImode, COImode, SCmode, DCmode, XCmode, TCmode, V2QImode, V4QImode,
+ V2HImode, V1SImode, V8QImode, V4HImode, V2SImode, V1DImode, V16QImode,
+ V8HImode, V4SImode, V2DImode, V1TImode, V32QImode, V16HImode, V8SImode,
+ V4DImode, V2TImode, V64QImode, V32HImode, V16SImode, V8DImode, V4TImode,
+ V2SFmode, V4SFmode, V2DFmode, V8SFmode, V4DFmode, V2TFmode, V16SFmode,
+ V8DFmode, V4TFmode, MAX_MACHINE_MODE, MIN_MODE_RANDOM =
+ VOIDmode, MAX_MODE_RANDOM = BLKmode, MIN_MODE_CC = CCmode, MAX_MODE_CC =
+ CCFPUmode, MIN_MODE_INT = QImode, MAX_MODE_INT =
+ OImode, MIN_MODE_PARTIAL_INT = VOIDmode, MAX_MODE_PARTIAL_INT =
+ VOIDmode, MIN_MODE_FRACT = QQmode, MAX_MODE_FRACT =
+ TQmode, MIN_MODE_UFRACT = UQQmode, MAX_MODE_UFRACT =
+ UTQmode, MIN_MODE_ACCUM = HAmode, MAX_MODE_ACCUM =
+ TAmode, MIN_MODE_UACCUM = UHAmode, MAX_MODE_UACCUM =
+ UTAmode, MIN_MODE_FLOAT = SFmode, MAX_MODE_FLOAT =
+ TFmode, MIN_MODE_DECIMAL_FLOAT = SDmode, MAX_MODE_DECIMAL_FLOAT =
+ TDmode, MIN_MODE_COMPLEX_INT = CQImode, MAX_MODE_COMPLEX_INT =
+ COImode, MIN_MODE_COMPLEX_FLOAT = SCmode, MAX_MODE_COMPLEX_FLOAT =
+ TCmode, MIN_MODE_VECTOR_INT = V2QImode, MAX_MODE_VECTOR_INT =
+ V4TImode, MIN_MODE_VECTOR_FRACT = VOIDmode, MAX_MODE_VECTOR_FRACT =
+ VOIDmode, MIN_MODE_VECTOR_UFRACT = VOIDmode, MAX_MODE_VECTOR_UFRACT =
+ VOIDmode, MIN_MODE_VECTOR_ACCUM = VOIDmode, MAX_MODE_VECTOR_ACCUM =
+ VOIDmode, MIN_MODE_VECTOR_UACCUM = VOIDmode, MAX_MODE_VECTOR_UACCUM =
+ VOIDmode, MIN_MODE_VECTOR_FLOAT = V2SFmode, MAX_MODE_VECTOR_FLOAT =
+ V4TFmode, NUM_MACHINE_MODES = MAX_MACHINE_MODE };
+enum mode_class
+{ MODE_RANDOM, MODE_CC, MODE_INT, MODE_PARTIAL_INT, MODE_FRACT, MODE_UFRACT,
+ MODE_ACCUM, MODE_UACCUM, MODE_FLOAT, MODE_DECIMAL_FLOAT, MODE_COMPLEX_INT,
+ MODE_COMPLEX_FLOAT, MODE_VECTOR_INT, MODE_VECTOR_FRACT,
+ MODE_VECTOR_UFRACT, MODE_VECTOR_ACCUM, MODE_VECTOR_UACCUM,
+ MODE_VECTOR_FLOAT, MAX_MODE_CLASS };
+extern const unsigned char mode_class[NUM_MACHINE_MODES];
+extern const unsigned char mode_inner[NUM_MACHINE_MODES];
+struct real_format
+{
+ unsigned char has_nans;
+};
+extern const struct real_format *real_format_for_mode[MAX_MODE_FLOAT -
+ MIN_MODE_FLOAT + 1 +
+ MAX_MODE_DECIMAL_FLOAT -
+ MIN_MODE_DECIMAL_FLOAT +
+ 1];
+enum tree_code
+{ ERROR_MARK, IDENTIFIER_NODE, TREE_LIST, TREE_VEC, BLOCK, OFFSET_TYPE,
+ ENUMERAL_TYPE, BOOLEAN_TYPE, INTEGER_TYPE, REAL_TYPE, POINTER_TYPE,
+ REFERENCE_TYPE, NULLPTR_TYPE, FIXED_POINT_TYPE, COMPLEX_TYPE, VECTOR_TYPE,
+ ARRAY_TYPE, RECORD_TYPE, UNION_TYPE, QUAL_UNION_TYPE, VOID_TYPE,
+ FUNCTION_TYPE, METHOD_TYPE, LANG_TYPE, INTEGER_CST, REAL_CST, FIXED_CST,
+ COMPLEX_CST, VECTOR_CST, STRING_CST, FUNCTION_DECL, LABEL_DECL,
+ FIELD_DECL, VAR_DECL, CONST_DECL, PARM_DECL, TYPE_DECL, RESULT_DECL,
+ DEBUG_EXPR_DECL, NAMESPACE_DECL, IMPORTED_DECL, TRANSLATION_UNIT_DECL,
+ COMPONENT_REF, BIT_FIELD_REF, REALPART_EXPR, IMAGPART_EXPR, ARRAY_REF,
+ ARRAY_RANGE_REF, INDIRECT_REF, OBJ_TYPE_REF, CONSTRUCTOR, COMPOUND_EXPR,
+ MODIFY_EXPR, INIT_EXPR, TARGET_EXPR, COND_EXPR, VEC_COND_EXPR, BIND_EXPR,
+ CALL_EXPR, WITH_CLEANUP_EXPR, CLEANUP_POINT_EXPR, PLACEHOLDER_EXPR,
+ PLUS_EXPR, MINUS_EXPR, MULT_EXPR, POINTER_PLUS_EXPR, TRUNC_DIV_EXPR,
+ CEIL_DIV_EXPR, FLOOR_DIV_EXPR, ROUND_DIV_EXPR, TRUNC_MOD_EXPR,
+ CEIL_MOD_EXPR, FLOOR_MOD_EXPR, ROUND_MOD_EXPR, RDIV_EXPR, EXACT_DIV_EXPR,
+ FIX_TRUNC_EXPR, FLOAT_EXPR, NEGATE_EXPR, MIN_EXPR, MAX_EXPR, ABS_EXPR,
+ LSHIFT_EXPR, RSHIFT_EXPR, LROTATE_EXPR, RROTATE_EXPR, BIT_IOR_EXPR,
+ BIT_XOR_EXPR, BIT_AND_EXPR, BIT_NOT_EXPR, TRUTH_ANDIF_EXPR,
+ TRUTH_ORIF_EXPR, TRUTH_AND_EXPR, TRUTH_OR_EXPR, TRUTH_XOR_EXPR,
+ TRUTH_NOT_EXPR, LT_EXPR, LE_EXPR, GT_EXPR, GE_EXPR, EQ_EXPR, NE_EXPR,
+ UNORDERED_EXPR, ORDERED_EXPR, UNLT_EXPR, UNLE_EXPR, UNGT_EXPR, UNGE_EXPR,
+ UNEQ_EXPR, LTGT_EXPR, RANGE_EXPR, PAREN_EXPR, CONVERT_EXPR,
+ ADDR_SPACE_CONVERT_EXPR, FIXED_CONVERT_EXPR, NOP_EXPR, NON_LVALUE_EXPR,
+ VIEW_CONVERT_EXPR, COMPOUND_LITERAL_EXPR, SAVE_EXPR, ADDR_EXPR,
+ FDESC_EXPR, COMPLEX_EXPR, CONJ_EXPR, PREDECREMENT_EXPR, PREINCREMENT_EXPR,
+ POSTDECREMENT_EXPR, POSTINCREMENT_EXPR, VA_ARG_EXPR, TRY_CATCH_EXPR,
+ TRY_FINALLY_EXPR, DECL_EXPR, LABEL_EXPR, GOTO_EXPR, RETURN_EXPR,
+ EXIT_EXPR, LOOP_EXPR, SWITCH_EXPR, CASE_LABEL_EXPR, ASM_EXPR, SSA_NAME,
+ CATCH_EXPR, EH_FILTER_EXPR, SCEV_KNOWN, SCEV_NOT_KNOWN, POLYNOMIAL_CHREC,
+ STATEMENT_LIST, ASSERT_EXPR, TREE_BINFO, WITH_SIZE_EXPR,
+ REALIGN_LOAD_EXPR, TARGET_MEM_REF, MEM_REF, OMP_PARALLEL, OMP_TASK,
+ OMP_FOR, OMP_SECTIONS, OMP_SINGLE, OMP_SECTION, OMP_MASTER, OMP_ORDERED,
+ OMP_CRITICAL, OMP_ATOMIC, OMP_CLAUSE, REDUC_MAX_EXPR, REDUC_MIN_EXPR,
+ REDUC_PLUS_EXPR, DOT_PROD_EXPR, WIDEN_SUM_EXPR, WIDEN_MULT_EXPR,
+ WIDEN_MULT_PLUS_EXPR, WIDEN_MULT_MINUS_EXPR, FMA_EXPR, VEC_LSHIFT_EXPR,
+ VEC_RSHIFT_EXPR, VEC_WIDEN_MULT_HI_EXPR, VEC_WIDEN_MULT_LO_EXPR,
+ VEC_UNPACK_HI_EXPR, VEC_UNPACK_LO_EXPR, VEC_UNPACK_FLOAT_HI_EXPR,
+ VEC_UNPACK_FLOAT_LO_EXPR, VEC_PACK_TRUNC_EXPR, VEC_PACK_SAT_EXPR,
+ VEC_PACK_FIX_TRUNC_EXPR, VEC_EXTRACT_EVEN_EXPR, VEC_EXTRACT_ODD_EXPR,
+ VEC_INTERLEAVE_HIGH_EXPR, VEC_INTERLEAVE_LOW_EXPR, PREDICT_EXPR,
+ OPTIMIZATION_NODE, TARGET_OPTION_NODE, LAST_AND_UNUSED_TREE_CODE,
+ C_MAYBE_CONST_EXPR, EXCESS_PRECISION_EXPR, UNCONSTRAINED_ARRAY_TYPE,
+ UNCONSTRAINED_ARRAY_REF, NULL_EXPR, PLUS_NOMOD_EXPR, MINUS_NOMOD_EXPR,
+ ATTR_ADDR_EXPR, STMT_STMT, LOOP_STMT, EXIT_STMT, OFFSET_REF, PTRMEM_CST,
+ NEW_EXPR, VEC_NEW_EXPR, DELETE_EXPR, VEC_DELETE_EXPR, SCOPE_REF,
+ MEMBER_REF, TYPE_EXPR, AGGR_INIT_EXPR, VEC_INIT_EXPR, THROW_EXPR,
+ EMPTY_CLASS_EXPR, BASELINK, TEMPLATE_DECL, TEMPLATE_PARM_INDEX,
+ TEMPLATE_TEMPLATE_PARM, TEMPLATE_TYPE_PARM, TYPENAME_TYPE, TYPEOF_TYPE,
+ BOUND_TEMPLATE_TEMPLATE_PARM, UNBOUND_CLASS_TEMPLATE, USING_DECL,
+ USING_STMT, DEFAULT_ARG, TEMPLATE_ID_EXPR, OVERLOAD, PSEUDO_DTOR_EXPR,
+ MODOP_EXPR, CAST_EXPR, REINTERPRET_CAST_EXPR, CONST_CAST_EXPR,
+ STATIC_CAST_EXPR, DYNAMIC_CAST_EXPR, DOTSTAR_EXPR, TYPEID_EXPR,
+ NOEXCEPT_EXPR, NON_DEPENDENT_EXPR, CTOR_INITIALIZER, TRY_BLOCK,
+ EH_SPEC_BLOCK, HANDLER, MUST_NOT_THROW_EXPR, CLEANUP_STMT, IF_STMT,
+ FOR_STMT, RANGE_FOR_STMT, WHILE_STMT, DO_STMT, BREAK_STMT, CONTINUE_STMT,
+ SWITCH_STMT, EXPR_STMT, TAG_DEFN, OFFSETOF_EXPR, SIZEOF_EXPR, ARROW_EXPR,
+ ALIGNOF_EXPR, AT_ENCODE_EXPR, STMT_EXPR, UNARY_PLUS_EXPR, STATIC_ASSERT,
+ TYPE_ARGUMENT_PACK, NONTYPE_ARGUMENT_PACK, TYPE_PACK_EXPANSION,
+ EXPR_PACK_EXPANSION, ARGUMENT_PACK_SELECT, TRAIT_EXPR, LAMBDA_EXPR,
+ DECLTYPE_TYPE, TEMPLATE_INFO, URSHIFT_EXPR, COMPARE_EXPR, COMPARE_L_EXPR,
+ COMPARE_G_EXPR, CLASS_INTERFACE_TYPE, CLASS_IMPLEMENTATION_TYPE,
+ CATEGORY_INTERFACE_TYPE, CATEGORY_IMPLEMENTATION_TYPE,
+ PROTOCOL_INTERFACE_TYPE, KEYWORD_DECL, INSTANCE_METHOD_DECL,
+ CLASS_METHOD_DECL, PROPERTY_DECL, MESSAGE_SEND_EXPR, CLASS_REFERENCE_EXPR,
+ PROPERTY_REF, MAX_TREE_CODES };
+struct tree_base
+{
+ enum tree_code code:16;
+};
+struct tree_typed
+{
+ tree type;
+};
+struct tree_type
+{
+ enum machine_mode mode:8;
+};
+union tree_node
+{
+ struct tree_base base;
+ struct tree_typed typed;
+ struct tree_type type;
+};
+enum tree_code
+parse_predicate (tree cond, tree * op0, tree * op1)
+{
+ gimple s;
+ tree op;
+ tree type = (*(&op->typed.type));
+ enum tree_code code;
+ return invert_tree_comparison (code,
+ ((((mode_class
+ [((((type))->base.code) ==
+ VECTOR_TYPE ? vector_type_mode (type)
+ : (type)->type.mode)]) ==
+ MODE_VECTOR_FLOAT)
+ &&
+ ((real_format_for_mode
+ [(((enum mode_class)
+ mode_class[(((enum mode_class)
+ mode_class[((((type))->
+ base.
+ code) ==
+ VECTOR_TYPE ?
+ vector_type_mode
+ (type)
+ : (type)->
+ type.
+ mode)]) ==
+ MODE_DECIMAL_FLOAT)
+ ? (((((type))->base.
+ code) ==
+ VECTOR_TYPE ?
+ vector_type_mode (type)
+ : (type)->type.
+ mode)) : ((enum
+ machine_mode)
+ mode_inner[((((type))->base.code) == VECTOR_TYPE ? vector_type_mode (type) : (type)->type.mode)])]) == MODE_DECIMAL_FLOAT) ? ((((((enum mode_class) mode_class[((((type))->base.code) == VECTOR_TYPE ? vector_type_mode (type) : (type)->type.mode)]) == MODE_DECIMAL_FLOAT) ? (((((type))->base.code) == VECTOR_TYPE ? vector_type_mode (type) : (type)->type.mode)) : ((enum machine_mode) mode_inner[((((type))->base.code) == VECTOR_TYPE ? vector_type_mode (type) : (type)->type.mode)])) - MIN_MODE_DECIMAL_FLOAT) + (MAX_MODE_FLOAT - MIN_MODE_FLOAT + 1)) : (((((enum mode_class) mode_class[((((type))->base.code) == VECTOR_TYPE ? vector_type_mode (type) : (type)->type.mode)]) == MODE_DECIMAL_FLOAT) ? (((((type))->base.code) == VECTOR_TYPE ? vector_type_mode (type) : (type)->type.mode)) : ((enum machine_mode) mode_inner[((((type))->base.code) == VECTOR_TYPE ? vector_type_mode (type) : (type)->type.mode)])) - MIN_MODE_FLOAT)]))->has_nans) ));
+}
public:
rewrite_dom_walker (cdi_direction direction) : dom_walker (direction) {}
- virtual void before_dom_children (basic_block);
+ virtual edge before_dom_children (basic_block);
virtual void after_dom_children (basic_block);
};
(BLOCK_DEFS). Register new definitions for every PHI node in the
block. */
-void
+edge
rewrite_dom_walker::before_dom_children (basic_block bb)
{
if (dump_file && (dump_flags & TDF_DETAILS))
reaching definition for the variable and the edge through which that
definition is reaching the PHI node. */
rewrite_add_phi_arguments (bb);
+
+ return NULL;
}
public:
rewrite_update_dom_walker (cdi_direction direction) : dom_walker (direction) {}
- virtual void before_dom_children (basic_block);
+ virtual edge before_dom_children (basic_block);
virtual void after_dom_children (basic_block);
};
for new SSA names produced in this block (BLOCK_DEFS). Register
new definitions for every PHI node in the block. */
-void
+edge
rewrite_update_dom_walker::before_dom_children (basic_block bb)
{
bool is_abnormal_phi;
block_defs_stack.safe_push (NULL_TREE);
if (!bitmap_bit_p (blocks_to_update, bb->index))
- return;
+ return NULL;
/* Mark the LHS if any of the arguments flows through an abnormal
edge. */
/* Step 3. Update PHI nodes. */
rewrite_update_phi_arguments (bb);
+
+ return NULL;
}
/* Called after visiting block BB. Unwind BLOCK_DEFS_STACK to restore
mark_def_dom_walker (cdi_direction direction);
~mark_def_dom_walker ();
- virtual void before_dom_children (basic_block);
+ virtual edge before_dom_children (basic_block);
private:
/* Notice that this bitmap is indexed using variable UIDs, so it must be
/* Block processing routine for mark_def_sites. Clear the KILLS bitmap
at the start of each block, and call mark_def_sites for each statement. */
-void
+edge
mark_def_dom_walker::before_dom_children (basic_block bb)
{
gimple_stmt_iterator gsi;
bitmap_clear (m_kills);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
mark_def_sites (bb, gsi_stmt (gsi), m_kills);
+ return NULL;
}
/* Initialize internal data needed during renaming. */
static struct opt_stats_d opt_stats;
/* Local functions. */
-static void optimize_stmt (basic_block, gimple_stmt_iterator,
+static edge optimize_stmt (basic_block, gimple_stmt_iterator,
class const_and_copies *,
class avail_exprs_stack *);
static tree lookup_avail_expr (gimple *, bool, class avail_exprs_stack *);
dom_opt_dom_walker (cdi_direction direction,
class const_and_copies *const_and_copies,
class avail_exprs_stack *avail_exprs_stack)
- : dom_walker (direction),
+ : dom_walker (direction, true),
m_const_and_copies (const_and_copies),
m_avail_exprs_stack (avail_exprs_stack),
m_dummy_cond (NULL) {}
- virtual void before_dom_children (basic_block);
+ virtual edge before_dom_children (basic_block);
virtual void after_dom_children (basic_block);
private:
avail_exprs_stack);
walker.walk (fun->cfg->x_entry_block_ptr);
+ /* Look for blocks where we cleared EDGE_EXECUTABLE on an outgoing
+ edge. When found, remove jump threads which contain any outgoing
+ edge from the affected block. */
+ if (cfg_altered)
+ {
+ FOR_EACH_BB_FN (bb, fun)
+ {
+ edge_iterator ei;
+ edge e;
+
+ /* First see if there are any edges without EDGE_EXECUTABLE
+ set. */
+ bool found = false;
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ {
+ if ((e->flags & EDGE_EXECUTABLE) == 0)
+ {
+ found = true;
+ break;
+ }
+ }
+
+ /* If there were any such edges found, then remove jump threads
+ containing any edge leaving BB. */
+ if (found)
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ remove_jump_threads_including (e);
+ }
+ }
+
{
gimple_stmt_iterator gsi;
basic_block bb;
if (lhs == t)
continue;
+ /* If the associated edge is not marked as executable, then it
+ can be ignored. */
+ if ((gimple_phi_arg_edge (phi, i)->flags & EDGE_EXECUTABLE) == 0)
+ continue;
+
t = dom_valueize (t);
/* If we have not processed an alternative yet, then set
if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
continue;
+ /* We can safely ignore edges that are not executable. */
+ if ((e->flags & EDGE_EXECUTABLE) == 0)
+ continue;
+
/* If we have already seen a non-loop edge, then we must have
multiple incoming non-loop edges and thus we return NULL. */
if (retval)
}
}
-void
+edge
dom_opt_dom_walker::before_dom_children (basic_block bb)
{
gimple_stmt_iterator gsi;
m_avail_exprs_stack);
m_avail_exprs_stack->pop_to_marker ();
+ edge taken_edge = NULL;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- optimize_stmt (bb, gsi, m_const_and_copies, m_avail_exprs_stack);
+ taken_edge
+ = optimize_stmt (bb, gsi, m_const_and_copies, m_avail_exprs_stack);
/* Now prepare to process dominated blocks. */
record_edge_info (bb);
cprop_into_successor_phis (bb, m_const_and_copies);
+ return taken_edge;
}
/* We have finished processing the dominator children of BB, perform
assignment is found, we map the value on the RHS of the assignment to
the variable in the LHS in the CONST_AND_COPIES table. */
-static void
+static edge
optimize_stmt (basic_block bb, gimple_stmt_iterator si,
class const_and_copies *const_and_copies,
class avail_exprs_stack *avail_exprs_stack)
bool may_optimize_p;
bool modified_p = false;
bool was_noreturn;
+ edge retval = NULL;
old_stmt = stmt = gsi_stmt (si);
was_noreturn = is_gimple_call (stmt) && gimple_call_noreturn_p (stmt);
fprintf (dump_file, " Flagged to clear EH edges.\n");
}
release_defs (stmt);
- return;
+ return retval;
}
}
}
if (val && TREE_CODE (val) == INTEGER_CST)
{
- edge taken_edge = find_taken_edge (bb, val);
- if (taken_edge)
+ retval = find_taken_edge (bb, val);
+ if (retval)
{
-
- /* We need to remove any queued jump threads that
- reference outgoing edges from this block. */
- edge_iterator ei;
- edge e;
- FOR_EACH_EDGE (e, ei, bb->succs)
- remove_jump_threads_including (e);
-
- /* Now clean up the control statement at the end of
- BB and remove unexecutable edges. */
- remove_ctrl_stmt_and_useless_edges (bb, taken_edge->dest);
-
- /* Fixup the flags on the single remaining edge. */
- taken_edge->flags
- &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_ABNORMAL);
- taken_edge->flags |= EDGE_FALLTHRU;
+ /* Fix the condition to be either true or false. */
+ if (gimple_code (stmt) == GIMPLE_COND)
+ {
+ if (integer_zerop (val))
+ gimple_cond_make_false (as_a <gcond *> (stmt));
+ else if (integer_onep (val))
+ gimple_cond_make_true (as_a <gcond *> (stmt));
+ else
+ gcc_unreachable ();
+ }
/* Further simplifications may be possible. */
cfg_altered = true;
&& is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
need_noreturn_fixup.safe_push (stmt);
}
+ return retval;
}
/* Helper for walk_non_aliased_vuses. Determine if we arrived at
public:
dse_dom_walker (cdi_direction direction) : dom_walker (direction) {}
- virtual void before_dom_children (basic_block);
+ virtual edge before_dom_children (basic_block);
};
-void
+edge
dse_dom_walker::before_dom_children (basic_block bb)
{
gimple_stmt_iterator gsi;
else
gsi_prev (&gsi);
}
+ return NULL;
}
namespace {
invariantness_dom_walker (cdi_direction direction)
: dom_walker (direction) {}
- virtual void before_dom_children (basic_block);
+ virtual edge before_dom_children (basic_block);
};
/* Determine the outermost loops in that statements in basic block BB are
invariant, and record them to the LIM_DATA associated with the statements.
Callback for dom_walker. */
-void
+edge
invariantness_dom_walker::before_dom_children (basic_block bb)
{
enum move_pos pos;
struct lim_aux_data *lim_data;
if (!loop_outer (bb->loop_father))
- return;
+ return NULL;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n",
if (lim_data->cost >= LIM_EXPENSIVE)
set_profitable_level (stmt);
}
+ return NULL;
}
class move_computations_dom_walker : public dom_walker
move_computations_dom_walker (cdi_direction direction)
: dom_walker (direction), todo_ (0) {}
- virtual void before_dom_children (basic_block);
+ virtual edge before_dom_children (basic_block);
unsigned int todo_;
};
data stored in LIM_DATA structures associated with each statement. Callback
for walk_dominator_tree. */
-void
+edge
move_computations_dom_walker::before_dom_children (basic_block bb)
{
struct loop *level;
struct lim_aux_data *lim_data;
if (!loop_outer (bb->loop_father))
- return;
+ return NULL;
for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi); )
{
else
gsi_insert_on_edge (e, stmt);
}
+ return NULL;
}
/* Hoist the statements out of the loops prescribed by data stored in
nontrapping_dom_walker (cdi_direction direction, hash_set<tree> *ps)
: dom_walker (direction), m_nontrapping (ps), m_seen_ssa_names (128) {}
- virtual void before_dom_children (basic_block);
+ virtual edge before_dom_children (basic_block);
virtual void after_dom_children (basic_block);
private:
};
/* Called by walk_dominator_tree, when entering the block BB. */
-void
+edge
nontrapping_dom_walker::before_dom_children (basic_block bb)
{
edge e;
add_or_mark_expr (bb, gimple_assign_rhs1 (stmt), false);
}
}
+ return NULL;
}
/* Called by walk_dominator_tree, when basic block BB is exited. */
eliminate_dom_walker (cdi_direction direction, bool do_pre_)
: dom_walker (direction), do_pre (do_pre_) {}
- virtual void before_dom_children (basic_block);
+ virtual edge before_dom_children (basic_block);
virtual void after_dom_children (basic_block);
bool do_pre;
/* Perform elimination for the basic-block B during the domwalk. */
-void
+edge
eliminate_dom_walker::before_dom_children (basic_block b)
{
/* Mark new bb. */
}
}
}
+ return NULL;
}
/* Make no longer available leaders no longer available. */
BITMAP_FREE (need_eh_cleanup);
}
- virtual void before_dom_children (basic_block);
+ virtual edge before_dom_children (basic_block);
virtual void after_dom_children (basic_block) {}
ssa_prop_get_value_fn get_value_fn;
bitmap need_eh_cleanup;
};
-void
+edge
substitute_and_fold_dom_walker::before_dom_children (basic_block bb)
{
/* Propagate known values into PHI nodes. */
fprintf (dump_file, "Not folded\n");
}
}
+ return NULL;
}
{
public:
sccvn_dom_walker ()
- : dom_walker (CDI_DOMINATORS), fail (false), unreachable_dom (NULL),
- cond_stack (vNULL) {}
+ : dom_walker (CDI_DOMINATORS, true), fail (false), cond_stack (vNULL) {}
~sccvn_dom_walker ();
- virtual void before_dom_children (basic_block);
+ virtual edge before_dom_children (basic_block);
virtual void after_dom_children (basic_block);
void record_cond (basic_block,
enum tree_code code, tree lhs, tree rhs, bool value);
bool fail;
- basic_block unreachable_dom;
vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
cond_stack;
};
void
sccvn_dom_walker::after_dom_children (basic_block bb)
{
- if (unreachable_dom == bb)
- unreachable_dom = NULL;
-
while (!cond_stack.is_empty ()
&& cond_stack.last ().first == bb)
{
/* Value number all statements in BB. */
-void
+edge
sccvn_dom_walker::before_dom_children (basic_block bb)
{
edge e;
edge_iterator ei;
if (fail)
- return;
-
- /* If any of the predecessor edges that do not come from blocks dominated
- by us are still marked as possibly executable consider this block
- reachable. */
- bool reachable = false;
- if (!unreachable_dom)
- {
- reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
- FOR_EACH_EDGE (e, ei, bb->preds)
- if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
- reachable |= (e->flags & EDGE_EXECUTABLE);
- }
-
- /* If the block is not reachable all outgoing edges are not
- executable. Neither are incoming edges with src dominated by us. */
- if (!reachable)
- {
- if (dump_file && (dump_flags & TDF_DETAILS))
- fprintf (dump_file, "Marking all outgoing edges of unreachable "
- "BB %d as not executable\n", bb->index);
-
- FOR_EACH_EDGE (e, ei, bb->succs)
- e->flags &= ~EDGE_EXECUTABLE;
-
- FOR_EACH_EDGE (e, ei, bb->preds)
- {
- if (dominated_by_p (CDI_DOMINATORS, e->src, bb))
- {
- if (dump_file && (dump_flags & TDF_DETAILS))
- fprintf (dump_file, "Marking backedge from BB %d into "
- "unreachable BB %d as not executable\n",
- e->src->index, bb->index);
- e->flags &= ~EDGE_EXECUTABLE;
- }
- }
-
- /* Record the most dominating unreachable block. */
- if (!unreachable_dom)
- unreachable_dom = bb;
-
- return;
- }
+ return NULL;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Visiting BB %d\n", bb->index);
&& !DFS (res))
{
fail = true;
- return;
+ return NULL;
}
}
for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
&& !DFS (op))
{
fail = true;
- return;
+ return NULL;
}
}
/* Finally look at the last stmt. */
gimple *stmt = last_stmt (bb);
if (!stmt)
- return;
+ return NULL;
enum gimple_code code = gimple_code (stmt);
if (code != GIMPLE_COND
&& code != GIMPLE_SWITCH
&& code != GIMPLE_GOTO)
- return;
+ return NULL;
if (dump_file && (dump_flags & TDF_DETAILS))
{
gcc_unreachable ();
}
if (!val)
- return;
+ return NULL;
edge taken = find_taken_edge (bb, vn_valueize (val));
if (!taken)
- return;
+ return NULL;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
"not executable\n", bb->index, bb->index, taken->dest->index);
- FOR_EACH_EDGE (e, ei, bb->succs)
- if (e != taken)
- e->flags &= ~EDGE_EXECUTABLE;
+ return taken;
}
/* Do SCCVN. Returns true if it finished, false if we bailed out
bool
run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
{
- basic_block bb;
size_t i;
default_vn_walk_kind = default_vn_walk_kind_;
}
}
- /* Mark all edges as possibly executable. */
- FOR_ALL_BB_FN (bb, cfun)
- {
- edge_iterator ei;
- edge e;
- FOR_EACH_EDGE (e, ei, bb->succs)
- e->flags |= EDGE_EXECUTABLE;
- }
-
/* Walk all blocks in dominator order, value-numbering stmts
SSA defs and decide whether outgoing edges are not executable. */
sccvn_dom_walker walker;
public:
strlen_dom_walker (cdi_direction direction) : dom_walker (direction) {}
- virtual void before_dom_children (basic_block);
+ virtual edge before_dom_children (basic_block);
virtual void after_dom_children (basic_block);
};
/* Callback for walk_dominator_tree. Attempt to optimize various
string ops by remembering string lenths pointed by pointer SSA_NAMEs. */
-void
+edge
strlen_dom_walker::before_dom_children (basic_block bb)
{
basic_block dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
bb->aux = stridx_to_strinfo;
if (vec_safe_length (stridx_to_strinfo) && !strinfo_shared ())
(*stridx_to_strinfo)[0] = (strinfo *) bb;
+ return NULL;
}
/* Callback for walk_dominator_tree. Free strinfo vector if it is
public:
uncprop_dom_walker (cdi_direction direction) : dom_walker (direction) {}
- virtual void before_dom_children (basic_block);
+ virtual edge before_dom_children (basic_block);
virtual void after_dom_children (basic_block);
private:
return retval;
}
-void
+edge
uncprop_dom_walker::before_dom_children (basic_block bb)
{
basic_block parent;
m_equiv_stack.safe_push (NULL_TREE);
uncprop_into_successor_phis (bb);
+ return NULL;
}
namespace {