From: Jakub Jelinek Date: Tue, 5 Jan 2021 15:37:40 +0000 (+0100) Subject: reassoc: Fix reassociation on 32-bit hosts with > 32767 bbs [PR98514] X-Git-Url: https://git.libre-soc.org/?a=commitdiff_plain;h=4ddee425b8c427d3cc13c49b26f442313e239572;p=gcc.git reassoc: Fix reassociation on 32-bit hosts with > 32767 bbs [PR98514] Apparently reassoc ICEs on large functions (more than 32767 basic blocks with something to reassociate in those). The problem is that the pass uses long type to store the ranks, and the bb ranks are (number of SSA_NAMEs with default defs + 2 + bb->index) << 16, so with many basic blocks we overflow the ranks and we then have assertions rank is not negative. The following patch just uses int64_t instead of long in the pass, yes, it means slightly higher memory consumption (one array indexed by bb->index is twice as large, and one hash_map from trees to the ranks will grow by 50%, but I think it is better than punting on large functions the reassociation on 32-bit hosts and making it inconsistent e.g. when cross-compiling. Given vec.h uses unsigned for vect element counts, we don't really support more than 4G of SSA_NAMEs or more than 2G of basic blocks in a function, so even with the << 16 we can't really overflow the int64_t rank counters. 2021-01-05 Jakub Jelinek PR tree-optimization/98514 * tree-ssa-reassoc.c (bb_rank): Change type from long * to int64_t *. (operand_rank): Change type from hash_map to hash_map. (phi_rank): Change return type from long to int64_t. (loop_carried_phi): Change block_rank variable type from long to int64_t. (propagate_rank): Change return type, rank parameter type and op_rank variable type from long to int64_t. (find_operand_rank): Change return type from long to int64_t and change slot variable type from long * to int64_t *. (insert_operand_rank): Change rank parameter type from long to int64_t. (get_rank): Change return type and rank variable type from long to int64_t. Use PRId64 instead of ld to print the rank. (init_reassoc): Change rank variable type from long to int64_t and adjust correspondingly bb_rank and operand_rank initialization. --- diff --git a/gcc/tree-ssa-reassoc.c b/gcc/tree-ssa-reassoc.c index e3e2555aff8..fac74a3d83c 100644 --- a/gcc/tree-ssa-reassoc.c +++ b/gcc/tree-ssa-reassoc.c @@ -200,10 +200,10 @@ static unsigned int next_operand_entry_id; /* Starting rank number for a given basic block, so that we can rank operations using unmovable instructions in that BB based on the bb depth. */ -static long *bb_rank; +static int64_t *bb_rank; /* Operand->rank hashtable. */ -static hash_map *operand_rank; +static hash_map *operand_rank; /* Vector of SSA_NAMEs on which after reassociate_bb is done with all basic blocks the CFG should be adjusted - basic blocks @@ -212,7 +212,7 @@ static hash_map *operand_rank; static vec reassoc_branch_fixups; /* Forward decls. */ -static long get_rank (tree); +static int64_t get_rank (tree); static bool reassoc_stmt_dominates_stmt_p (gimple *, gimple *); /* Wrapper around gsi_remove, which adjusts gimple_uid of debug stmts @@ -257,7 +257,7 @@ reassoc_remove_stmt (gimple_stmt_iterator *gsi) calculated into an accumulator variable to be independent for each iteration of the loop. If STMT is some other phi, the rank is the block rank of its containing block. */ -static long +static int64_t phi_rank (gimple *stmt) { basic_block bb = gimple_bb (stmt); @@ -311,7 +311,7 @@ static bool loop_carried_phi (tree exp) { gimple *phi_stmt; - long block_rank; + int64_t block_rank; if (TREE_CODE (exp) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (exp)) @@ -337,10 +337,10 @@ loop_carried_phi (tree exp) from expression OP. For most operands, this is just the rank of OP. For loop-carried phis, the value is zero to avoid undoing the bias in favor of the phi. */ -static long -propagate_rank (long rank, tree op) +static int64_t +propagate_rank (int64_t rank, tree op) { - long op_rank; + int64_t op_rank; if (loop_carried_phi (op)) return rank; @@ -352,17 +352,17 @@ propagate_rank (long rank, tree op) /* Look up the operand rank structure for expression E. */ -static inline long +static inline int64_t find_operand_rank (tree e) { - long *slot = operand_rank->get (e); + int64_t *slot = operand_rank->get (e); return slot ? *slot : -1; } /* Insert {E,RANK} into the operand rank hashtable. */ static inline void -insert_operand_rank (tree e, long rank) +insert_operand_rank (tree e, int64_t rank) { gcc_assert (rank > 0); gcc_assert (!operand_rank->put (e, rank)); @@ -370,7 +370,7 @@ insert_operand_rank (tree e, long rank) /* Given an expression E, return the rank of the expression. */ -static long +static int64_t get_rank (tree e) { /* SSA_NAME's have the rank of the expression they are the result @@ -414,7 +414,7 @@ get_rank (tree e) { ssa_op_iter iter; gimple *stmt; - long rank; + int64_t rank; tree op; /* If we already have a rank for this expression, use that. */ @@ -448,7 +448,7 @@ get_rank (tree e) { fprintf (dump_file, "Rank for "); print_generic_expr (dump_file, e); - fprintf (dump_file, " is %ld\n", rank); + fprintf (dump_file, " is %" PRId64 "\n", rank); } /* Note the rank in the hashtable so we don't recompute it. */ @@ -6774,7 +6774,7 @@ static void init_reassoc (void) { int i; - long rank = 2; + int64_t rank = 2; int *bbs = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS); /* Find the loops, so that we can prevent moving calculations in @@ -6788,8 +6788,8 @@ init_reassoc (void) /* Reverse RPO (Reverse Post Order) will give us something where deeper loops come later. */ pre_and_rev_post_order_compute (NULL, bbs, false); - bb_rank = XCNEWVEC (long, last_basic_block_for_fn (cfun)); - operand_rank = new hash_map; + bb_rank = XCNEWVEC (int64_t, last_basic_block_for_fn (cfun)); + operand_rank = new hash_map; /* Give each default definition a distinct rank. This includes parameters and the static chain. Walk backwards over all