+2020-01-20 Martin Liska <mliska@suse.cz>
+
+ PR tree-optimization/93199
+ * tree-eh.c (struct leh_state): Add
+ new field outer_non_cleanup.
+ (cleanup_is_dead_in): Pass leh_state instead
+ of eh_region. Add a checking that state->outer_non_cleanup
+ points to outer non-clean up region.
+ (lower_try_finally): Record outer_non_cleanup
+ for this_state.
+ (lower_catch): Likewise.
+ (lower_eh_filter): Likewise.
+ (lower_eh_must_not_throw): Likewise.
+ (lower_cleanup): Likewise.
+
2020-01-20 Richard Biener <rguenther@suse.de>
PR tree-optimization/93094
split out into a separate structure so that we don't have to
copy so much when processing other nodes. */
struct leh_tf_state *tf;
+
+ /* Outer non-clean up region. */
+ eh_region outer_non_cleanup;
};
struct leh_tf_state
return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
}
-/* REG is the enclosing region for a possible cleanup region, or the region
+/* REG is current region of a LEH state.
+ is the enclosing region for a possible cleanup region, or the region
itself. Returns TRUE if such a region would be unreachable.
Cleanup regions within a must-not-throw region aren't actually reachable
routine will call terminate before unwinding. */
static bool
-cleanup_is_dead_in (eh_region reg)
+cleanup_is_dead_in (leh_state *state)
{
- while (reg && reg->type == ERT_CLEANUP)
- reg = reg->outer;
+ if (flag_checking)
+ {
+ eh_region reg = state->cur_region;
+ while (reg && reg->type == ERT_CLEANUP)
+ reg = reg->outer;
+
+ gcc_assert (reg == state->outer_non_cleanup);
+ }
+
+ eh_region reg = state->outer_non_cleanup;
return (reg && reg->type == ERT_MUST_NOT_THROW);
}
this_tf.try_finally_expr = tp;
this_tf.top_p = tp;
this_tf.outer = state;
- if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state->cur_region))
+ if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state))
{
this_tf.region = gen_eh_region_cleanup (state->cur_region);
this_state.cur_region = this_tf.region;
this_state.cur_region = state->cur_region;
}
+ this_state.outer_non_cleanup = state->outer_non_cleanup;
this_state.ehp_region = state->ehp_region;
this_state.tf = &this_tf;
{
try_region = gen_eh_region_try (state->cur_region);
this_state.cur_region = try_region;
+ this_state.outer_non_cleanup = this_state.cur_region;
}
lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
emit_resx (&new_seq, try_region);
this_state.cur_region = state->cur_region;
+ this_state.outer_non_cleanup = state->outer_non_cleanup;
this_state.ehp_region = try_region;
/* Add eh_seq from lowering EH in the cleanup sequence after the cleanup
this_region = gen_eh_region_allowed (state->cur_region,
gimple_eh_filter_types (inner));
this_state.cur_region = this_region;
+ this_state.outer_non_cleanup = this_state.cur_region;
}
lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
this_state.cur_region = this_region;
+ this_state.outer_non_cleanup = this_state.cur_region;
}
lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
eh_region this_region = NULL;
struct leh_tf_state fake_tf;
gimple_seq result;
- bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
+ bool cleanup_dead = cleanup_is_dead_in (state);
if (flag_exceptions && !cleanup_dead)
{
this_region = gen_eh_region_cleanup (state->cur_region);
this_state.cur_region = this_region;
+ this_state.outer_non_cleanup = state->outer_non_cleanup;
}
lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));