From: Yury Gribov Date: Wed, 3 Dec 2014 09:23:28 +0000 (+0000) Subject: Improve optimization of ASan checks. X-Git-Url: https://git.libre-soc.org/?a=commitdiff_plain;h=e28f2090dbbb50728e9c0933df59e1788307f59b;p=gcc.git Improve optimization of ASan checks. 2014-12-03 Yury Gribov gcc/ * sanopt.c (maybe_get_single_definition): New function. (maybe_get_dominating_check): Ditto. (can_remove_asan_check): Ditto. (struct tree_map_traits): New struct. (struct sanopt_ctx): Use custom traits for asan_check_map. (maybe_optimize_ubsan_null_ifn): Move code to maybe_get_dominating_check. (maybe_optimize_asan_check_ifn): Move code and take non-SSA expressions into account when optimizing. (sanopt_optimize_walker): Optimize ASan checks even when recovering. From-SVN: r218304 --- diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 85a68b90d66..fb97618b3ba 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,17 @@ +2014-12-03 Yury Gribov + + * sanopt.c (maybe_get_single_definition): New function. + (maybe_get_dominating_check): Ditto. + (can_remove_asan_check): Ditto. + (struct tree_map_traits): New struct. + (struct sanopt_ctx): Use custom traits for asan_check_map. + (maybe_optimize_ubsan_null_ifn): Move code to + maybe_get_dominating_check. + (maybe_optimize_asan_check_ifn): Move code and take non-SSA expressions + into account when optimizing. + (sanopt_optimize_walker): Optimize ASan checks even when + recovering. + 2014-12-03 Ilya Enkovich * config/i386/constraints.md (Yr): New. diff --git a/gcc/sanopt.c b/gcc/sanopt.c index e1d11e0d5e6..e0d3a8562b3 100644 --- a/gcc/sanopt.c +++ b/gcc/sanopt.c @@ -84,6 +84,35 @@ struct sanopt_info bool visited_p; }; +/* If T has a single definition of form T = T2, return T2. */ + +static tree +maybe_get_single_definition (tree t) +{ + if (TREE_CODE (t) == SSA_NAME) + { + gimple g = SSA_NAME_DEF_STMT (t); + if (gimple_assign_single_p (g)) + return gimple_assign_rhs1 (g); + } + return NULL_TREE; +} + +/* Traits class for tree hash maps below. */ + +struct tree_map_traits : default_hashmap_traits +{ + static inline hashval_t hash (const_tree ref) + { + return iterative_hash_expr (ref, 0); + } + + static inline bool equal_keys (const_tree ref1, const_tree ref2) + { + return operand_equal_p (ref1, ref2, 0); + } +}; + /* This is used to carry various hash maps and variables used in sanopt_optimize_walker. */ @@ -95,7 +124,7 @@ struct sanopt_ctx /* This map maps a pointer (the second argument of ASAN_CHECK) to a vector of ASAN_CHECK call statements that check the access. */ - hash_map > asan_check_map; + hash_map, tree_map_traits> asan_check_map; /* Number of IFN_ASAN_CHECK statements. */ int asan_num_accesses; @@ -197,6 +226,24 @@ imm_dom_path_with_freeing_call (basic_block bb, basic_block dom) return false; } +/* Get the first dominating check from the list of stored checks. + Non-dominating checks are silently dropped. */ + +static gimple +maybe_get_dominating_check (auto_vec &v) +{ + for (; !v.is_empty (); v.pop ()) + { + gimple g = v.last (); + sanopt_info *si = (sanopt_info *) gimple_bb (g)->aux; + if (!si->visited_p) + /* At this point we shouldn't have any statements + that aren't dominating the current BB. */ + return g; + } + return NULL; +} + /* Optimize away redundant UBSAN_NULL calls. */ static bool @@ -209,7 +256,8 @@ maybe_optimize_ubsan_null_ifn (struct sanopt_ctx *ctx, gimple stmt) bool remove = false; auto_vec &v = ctx->null_check_map.get_or_insert (ptr); - if (v.is_empty ()) + gimple g = maybe_get_dominating_check (v); + if (!g) { /* For this PTR we don't have any UBSAN_NULL stmts recorded, so there's nothing to optimize yet. */ @@ -220,90 +268,42 @@ maybe_optimize_ubsan_null_ifn (struct sanopt_ctx *ctx, gimple stmt) /* We already have recorded a UBSAN_NULL check for this pointer. Perhaps we can drop this one. But only if this check doesn't specify stricter alignment. */ - while (!v.is_empty ()) - { - gimple g = v.last (); - /* Remove statements for BBs that have been already processed. */ - sanopt_info *si = (sanopt_info *) gimple_bb (g)->aux; - if (si->visited_p) - { - v.pop (); - continue; - } - /* At this point we shouldn't have any statements that aren't dominating - the current BB. */ - tree align = gimple_call_arg (g, 2); - int kind = tree_to_shwi (gimple_call_arg (g, 1)); - /* If this is a NULL pointer check where we had segv anyway, we can - remove it. */ - if (integer_zerop (align) - && (kind == UBSAN_LOAD_OF - || kind == UBSAN_STORE_OF - || kind == UBSAN_MEMBER_ACCESS)) - remove = true; - /* Otherwise remove the check in non-recovering mode, or if the - stmts have same location. */ - else if (integer_zerop (align)) - remove = (flag_sanitize_recover & SANITIZE_NULL) == 0 - || flag_sanitize_undefined_trap_on_error - || gimple_location (g) == gimple_location (stmt); - else if (tree_int_cst_le (cur_align, align)) - remove = (flag_sanitize_recover & SANITIZE_ALIGNMENT) == 0 - || flag_sanitize_undefined_trap_on_error - || gimple_location (g) == gimple_location (stmt); - if (!remove && gimple_bb (g) == gimple_bb (stmt) - && tree_int_cst_compare (cur_align, align) == 0) - v.pop (); - break; - } + tree align = gimple_call_arg (g, 2); + int kind = tree_to_shwi (gimple_call_arg (g, 1)); + /* If this is a NULL pointer check where we had segv anyway, we can + remove it. */ + if (integer_zerop (align) + && (kind == UBSAN_LOAD_OF + || kind == UBSAN_STORE_OF + || kind == UBSAN_MEMBER_ACCESS)) + remove = true; + /* Otherwise remove the check in non-recovering mode, or if the + stmts have same location. */ + else if (integer_zerop (align)) + remove = (flag_sanitize_recover & SANITIZE_NULL) == 0 + || flag_sanitize_undefined_trap_on_error + || gimple_location (g) == gimple_location (stmt); + else if (tree_int_cst_le (cur_align, align)) + remove = (flag_sanitize_recover & SANITIZE_ALIGNMENT) == 0 + || flag_sanitize_undefined_trap_on_error + || gimple_location (g) == gimple_location (stmt); + + if (!remove && gimple_bb (g) == gimple_bb (stmt) + && tree_int_cst_compare (cur_align, align) == 0) + v.pop (); if (!remove) v.safe_push (stmt); return remove; } -/* Optimize away redundant ASAN_CHECK calls. */ +/* Returns TRUE if ASan check of length LEN in block BB can be removed + if preceded by checks in V. */ static bool -maybe_optimize_asan_check_ifn (struct sanopt_ctx *ctx, gimple stmt) +can_remove_asan_check (auto_vec &v, tree len, basic_block bb) { - gcc_assert (gimple_call_num_args (stmt) == 4); - tree ptr = gimple_call_arg (stmt, 1); - tree len = gimple_call_arg (stmt, 2); - basic_block bb = gimple_bb (stmt); - sanopt_info *info = (sanopt_info *) bb->aux; - - if (TREE_CODE (len) != INTEGER_CST) - return false; - if (integer_zerop (len)) - return false; - - gimple_set_uid (stmt, info->freeing_call_events); - - auto_vec &v = ctx->asan_check_map.get_or_insert (ptr); - if (v.is_empty ()) - { - /* For this PTR we don't have any ASAN_CHECK stmts recorded, so there's - nothing to optimize yet. */ - v.safe_push (stmt); - return false; - } - - /* We already have recorded a ASAN_CHECK check for this pointer. Perhaps - we can drop this one. But only if this check doesn't specify larger - size. */ - while (!v.is_empty ()) - { - gimple g = v.last (); - /* Remove statements for BBs that have been already processed. */ - sanopt_info *si = (sanopt_info *) gimple_bb (g)->aux; - if (si->visited_p) - v.pop (); - else - break; - } - unsigned int i; gimple g; gimple to_pop = NULL; @@ -323,17 +323,9 @@ maybe_optimize_asan_check_ifn (struct sanopt_ctx *ctx, gimple stmt) continue; } - if (TREE_CODE (len) != INTEGER_CST) - { - /* If there is some stmts not followed by freeing call event - for ptr already in the current bb, no need to insert anything. - Non-constant len is treated just as length 1. */ - if (gbb == bb) - return false; - break; - } - tree glen = gimple_call_arg (g, 2); + gcc_assert (TREE_CODE (glen) == INTEGER_CST); + /* If we've checked only smaller length than we want to check now, we can't remove the current stmt. If g is in the same basic block, we want to remove it though, as the current stmt is better. */ @@ -383,8 +375,70 @@ maybe_optimize_asan_check_ifn (struct sanopt_ctx *ctx, gimple stmt) v.truncate (j); } + return remove; +} + +/* Optimize away redundant ASAN_CHECK calls. */ + +static bool +maybe_optimize_asan_check_ifn (struct sanopt_ctx *ctx, gimple stmt) +{ + gcc_assert (gimple_call_num_args (stmt) == 4); + tree ptr = gimple_call_arg (stmt, 1); + tree len = gimple_call_arg (stmt, 2); + basic_block bb = gimple_bb (stmt); + sanopt_info *info = (sanopt_info *) bb->aux; + + if (TREE_CODE (len) != INTEGER_CST) + return false; + if (integer_zerop (len)) + return false; + + gimple_set_uid (stmt, info->freeing_call_events); + + auto_vec *ptr_checks = &ctx->asan_check_map.get_or_insert (ptr); + + tree base_addr = maybe_get_single_definition (ptr); + auto_vec *base_checks = NULL; + if (base_addr) + { + base_checks = &ctx->asan_check_map.get_or_insert (base_addr); + /* Original pointer might have been invalidated. */ + ptr_checks = ctx->asan_check_map.get (ptr); + } + + gimple g = maybe_get_dominating_check (*ptr_checks); + + if (!g && base_checks) + /* Try with base address as well. */ + g = maybe_get_dominating_check (*base_checks); + + if (!g) + { + /* For this PTR we don't have any ASAN_CHECK stmts recorded, so there's + nothing to optimize yet. */ + ptr_checks->safe_push (stmt); + if (base_checks) + base_checks->safe_push (stmt); + return false; + } + + bool remove = false; + + if (ptr_checks) + remove = can_remove_asan_check (*ptr_checks, len, bb); + + if (!remove && base_checks) + /* Try with base address as well. */ + remove = can_remove_asan_check (*base_checks, len, bb); + if (!remove) - v.safe_push (stmt); + { + ptr_checks->safe_push (stmt); + if (base_checks) + base_checks->safe_push (stmt); + } + return remove; } @@ -404,10 +458,7 @@ sanopt_optimize_walker (basic_block bb, struct sanopt_ctx *ctx) basic_block son; gimple_stmt_iterator gsi; sanopt_info *info = (sanopt_info *) bb->aux; - bool asan_check_optimize - = (flag_sanitize & SANITIZE_ADDRESS) - && ((flag_sanitize & flag_sanitize_recover - & SANITIZE_KERNEL_ADDRESS) == 0); + bool asan_check_optimize = (flag_sanitize & SANITIZE_ADDRESS) != 0; for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);) {