* internal-fn.c (expand_UNIQUE): New.
* internal-fn.h (enum ifn_unique_kind): New.
* internal-fn.def (IFN_UNIQUE): New.
* target-insns.def (unique): Define.
* gimple.h (gimple_call_internal_unique_p): New.
* gimple.c (gimple_call_same_target_p): Check internal fn
uniqueness.
* tracer.c (ignore_bb_p): Check for IFN_UNIQUE call.
* tree-ssa-threadedge.c
(record_temporary_equivalences_from_stmts): Likewise.
* tree-cfg.c (gmple_call_initialize_ctrl_altering): Likewise.
From-SVN: r229459
+2015-10-27 Nathan Sidwell <nathan@codesourcery.com>
+
+ * internal-fn.c (expand_UNIQUE): New.
+ * internal-fn.h (enum ifn_unique_kind): New.
+ * internal-fn.def (IFN_UNIQUE): New.
+ * target-insns.def (unique): Define.
+ * gimple.h (gimple_call_internal_unique_p): New.
+ * gimple.c (gimple_call_same_target_p): Check internal fn
+ uniqueness.
+ * tracer.c (ignore_bb_p): Check for IFN_UNIQUE call.
+ * tree-ssa-threadedge.c
+ (record_temporary_equivalences_from_stmts): Likewise.
+ * tree-cfg.c (gmple_call_initialize_ctrl_altering): Likewise.
+
2015-10-27 Richard Henderson <rth@redhat.com>
PR rtl-opt/67609
* graphite-optimize-isl.c (get_schedule_for_node_st): New callback
function to schedule based on isl_schedule_node.
- (get_schedule_map_st): New schedule optimizer based on isl_schedule_node.
- (scop_get_domains): New. Return the isl_union_set containing the domains of all the pbbs.
+ (get_schedule_map_st): New schedule optimizer based on
+ isl_schedule_node.
+ (scop_get_domains): New. Return the isl_union_set containing the
+ domains of all the pbbs.
(optimize_isl): Call the new function get_schedule_map_st for isl-0.15
2015-10-27 H.J. Lu <hongjiu.lu@intel.com>
{
if (gimple_call_internal_p (c1))
return (gimple_call_internal_p (c2)
- && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
+ && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2)
+ && !gimple_call_internal_unique_p (as_a <const gcall *> (c1)));
else
return (gimple_call_fn (c1) == gimple_call_fn (c2)
|| (gimple_call_fndecl (c1)
return gimple_call_internal_fn (gc);
}
+/* Return true, if this internal gimple call is unique. */
+
+static inline bool
+gimple_call_internal_unique_p (const gcall *gs)
+{
+ return gimple_call_internal_fn (gs) == IFN_UNIQUE;
+}
+
+static inline bool
+gimple_call_internal_unique_p (const gimple *gs)
+{
+ const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
+ return gimple_call_internal_unique_p (gc);
+}
+
/* If CTRL_ALTERING_P is true, mark GIMPLE_CALL S to be a stmt
that could alter control flow. */
gcc_unreachable ();
}
+/* Expand the IFN_UNIQUE function according to its first argument. */
+
+static void
+expand_UNIQUE (gcall *stmt)
+{
+ rtx pattern = NULL_RTX;
+ enum ifn_unique_kind kind
+ = (enum ifn_unique_kind) TREE_INT_CST_LOW (gimple_call_arg (stmt, 0));
+
+ switch (kind)
+ {
+ default:
+ gcc_unreachable ();
+
+ case IFN_UNIQUE_UNSPEC:
+ if (targetm.have_unique ())
+ pattern = targetm.gen_unique ();
+ break;
+ }
+
+ if (pattern)
+ emit_insn (pattern);
+}
+
/* Routines to expand each internal function, indexed by function number.
Each routine has the prototype:
DEF_INTERNAL_FN (MUL_OVERFLOW, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL)
DEF_INTERNAL_FN (TSAN_FUNC_EXIT, ECF_NOVOPS | ECF_LEAF | ECF_NOTHROW, NULL)
DEF_INTERNAL_FN (VA_ARG, ECF_NOTHROW | ECF_LEAF, NULL)
+
+/* An unduplicable, uncombinable function. Generally used to preserve
+ a CFG property in the face of jump threading, tail merging or
+ other such optimizations. The first argument distinguishes
+ between uses. See internal-fn.h for usage. */
+DEF_INTERNAL_FN (UNIQUE, ECF_NOTHROW, NULL)
+
#ifndef GCC_INTERNAL_FN_H
#define GCC_INTERNAL_FN_H
+/* INTEGER_CST values for IFN_UNIQUE function arg-0. */
+enum ifn_unique_kind {
+ IFN_UNIQUE_UNSPEC /* Undifferentiated UNIQUE. */
+};
+
/* Initialize internal function tables. */
extern void init_internal_fns ();
DEF_TARGET_INSN (store_multiple, (rtx x0, rtx x1, rtx x2))
DEF_TARGET_INSN (tablejump, (rtx x0, rtx x1))
DEF_TARGET_INSN (trap, (void))
+DEF_TARGET_INSN (unique, (void))
DEF_TARGET_INSN (untyped_call, (rtx x0, rtx x1, rtx x2))
DEF_TARGET_INSN (untyped_return, (rtx x0, rtx x1))
static bool
ignore_bb_p (const_basic_block bb)
{
- gimple *g;
-
if (bb->index < NUM_FIXED_BLOCKS)
return true;
if (optimize_bb_for_size_p (bb))
return true;
- /* A transaction is a single entry multiple exit region. It must be
- duplicated in its entirety or not at all. */
- g = last_stmt (CONST_CAST_BB (bb));
- if (g && gimple_code (g) == GIMPLE_TRANSACTION)
- return true;
+ if (gimple *g = last_stmt (CONST_CAST_BB (bb)))
+ {
+ /* A transaction is a single entry multiple exit region. It
+ must be duplicated in its entirety or not at all. */
+ if (gimple_code (g) == GIMPLE_TRANSACTION)
+ return true;
+
+ /* An IFN_UNIQUE call must be duplicated as part of its group,
+ or not at all. */
+ if (is_gimple_call (g)
+ && gimple_call_internal_p (g)
+ && gimple_call_internal_unique_p (g))
+ return true;
+ }
return false;
}
|| ((flags & ECF_TM_BUILTIN)
&& is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
/* BUILT_IN_RETURN call is same as return statement. */
- || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
+ || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
+ /* IFN_UNIQUE should be the last insn, to make checking for it
+ as cheap as possible. */
+ || (gimple_call_internal_p (stmt)
+ && gimple_call_internal_unique_p (stmt)))
gimple_call_set_ctrl_altering (stmt, true);
else
gimple_call_set_ctrl_altering (stmt, false);
&& gimple_asm_volatile_p (as_a <gasm *> (stmt)))
return NULL;
+ /* If the statement is a unique builtin, we can not thread
+ through here. */
+ if (gimple_code (stmt) == GIMPLE_CALL
+ && gimple_call_internal_p (stmt)
+ && gimple_call_internal_unique_p (stmt))
+ return NULL;
+
/* If duplicating this block is going to cause too much code
expansion, then do not thread through this block. */
stmt_count++;