On Tue, 5 Jan 2021, Jakub Jelinek wrote:

> Hi!
> 
> Apparently reassoc ICEs on large functions (more than 32767 basic blocks
> with something to reassociate in those).
> The problem is that the pass uses long type to store the ranks, and
> the bb ranks are (number of SSA_NAMEs with default defs + 2 + bb->index) << 
> 16,
> so with many basic blocks we overflow the ranks and we then have assertions
> rank is not negative.
> 
> The following patch just uses HOST_WIDE_INT instead of long in the pass,
> yes, it means slightly higher memory consumption (one array indexed by
> bb->index is twice as large, and one hash_map from trees to the ranks
> will grow by 50%, but I think it is better than punting on large functions
> the reassociation on 32-bit hosts and making it inconsistent e.g. when
> cross-compiling.  Given vec.h uses unsigned for vect element counts,
> we don't really support more than 4G of SSA_NAMEs or more than 2G of basic
> blocks in a function, so even with the << 16 we can't really overflow the
> HOST_WIDE_INT rank counters.
> 
> Bootstrapped/regtested on x86_64-linux and i686-linux, ok for trunk?

OK (can you use int64_t instead?)

Thanks,
Richard.

> 2021-01-05  Jakub Jelinek  <ja...@redhat.com>
> 
>       PR tree-optimization/98514
>       * tree-ssa-reassoc.c (bb_rank): Change type from long * to
>       HOST_WIDE_INT *.
>       (operand_rank): Change type from hash_map<tree, long> to
>       hash_map<tree, HOST_WIDE_INT>.
>       (phi_rank): Change return type from long to HOST_WIDE_INT.
>       (loop_carried_phi): Change block_rank variable type from long to
>       HOST_WIDE_INT.
>       (propagate_rank): Change return type, rank parameter type and
>       op_rank variable type from long to HOST_WIDE_INT.
>       (find_operand_rank): Change return type from long to HOST_WIDE_INT
>       and change slot variable type from long * to HOST_WIDE_INT *.
>       (insert_operand_rank): Change rank parameter type from long to
>       HOST_WIDE_INT.
>       (get_rank): Change return type and rank variable type from long to
>       HOST_WIDE_INT.  Use HOST_WIDE_INT_PRINT_DEC instead of %ld to print
>       the rank.
>       (init_reassoc): Change rank variable type from long to HOST_WIDE_INT
>       and adjust correspondingly bb_rank and operand_rank initialization.
> 
> --- gcc/tree-ssa-reassoc.c.jj 2021-01-04 10:25:37.153252851 +0100
> +++ gcc/tree-ssa-reassoc.c    2021-01-04 17:01:15.211112328 +0100
> @@ -200,10 +200,10 @@ static unsigned int next_operand_entry_i
>  /* Starting rank number for a given basic block, so that we can rank
>     operations using unmovable instructions in that BB based on the bb
>     depth.  */
> -static long *bb_rank;
> +static HOST_WIDE_INT *bb_rank;
>  
>  /* Operand->rank hashtable.  */
> -static hash_map<tree, long> *operand_rank;
> +static hash_map<tree, HOST_WIDE_INT> *operand_rank;
>  
>  /* Vector of SSA_NAMEs on which after reassociate_bb is done with
>     all basic blocks the CFG should be adjusted - basic blocks
> @@ -212,7 +212,7 @@ static hash_map<tree, long> *operand_ran
>  static vec<tree> reassoc_branch_fixups;
>  
>  /* Forward decls.  */
> -static long get_rank (tree);
> +static HOST_WIDE_INT get_rank (tree);
>  static bool reassoc_stmt_dominates_stmt_p (gimple *, gimple *);
>  
>  /* Wrapper around gsi_remove, which adjusts gimple_uid of debug stmts
> @@ -257,7 +257,7 @@ reassoc_remove_stmt (gimple_stmt_iterato
>     calculated into an accumulator variable to be independent for each
>     iteration of the loop.  If STMT is some other phi, the rank is the
>     block rank of its containing block.  */
> -static long
> +static HOST_WIDE_INT
>  phi_rank (gimple *stmt)
>  {
>    basic_block bb = gimple_bb (stmt);
> @@ -311,7 +311,7 @@ static bool
>  loop_carried_phi (tree exp)
>  {
>    gimple *phi_stmt;
> -  long block_rank;
> +  HOST_WIDE_INT block_rank;
>  
>    if (TREE_CODE (exp) != SSA_NAME
>        || SSA_NAME_IS_DEFAULT_DEF (exp))
> @@ -337,10 +337,10 @@ loop_carried_phi (tree exp)
>     from expression OP.  For most operands, this is just the rank of OP.
>     For loop-carried phis, the value is zero to avoid undoing the bias
>     in favor of the phi.  */
> -static long
> -propagate_rank (long rank, tree op)
> +static HOST_WIDE_INT
> +propagate_rank (HOST_WIDE_INT rank, tree op)
>  {
> -  long op_rank;
> +  HOST_WIDE_INT op_rank;
>  
>    if (loop_carried_phi (op))
>      return rank;
> @@ -352,17 +352,17 @@ propagate_rank (long rank, tree op)
>  
>  /* Look up the operand rank structure for expression E.  */
>  
> -static inline long
> +static inline HOST_WIDE_INT
>  find_operand_rank (tree e)
>  {
> -  long *slot = operand_rank->get (e);
> +  HOST_WIDE_INT *slot = operand_rank->get (e);
>    return slot ? *slot : -1;
>  }
>  
>  /* Insert {E,RANK} into the operand rank hashtable.  */
>  
>  static inline void
> -insert_operand_rank (tree e, long rank)
> +insert_operand_rank (tree e, HOST_WIDE_INT rank)
>  {
>    gcc_assert (rank > 0);
>    gcc_assert (!operand_rank->put (e, rank));
> @@ -370,7 +370,7 @@ insert_operand_rank (tree e, long rank)
>  
>  /* Given an expression E, return the rank of the expression.  */
>  
> -static long
> +static HOST_WIDE_INT
>  get_rank (tree e)
>  {
>    /* SSA_NAME's have the rank of the expression they are the result
> @@ -414,7 +414,7 @@ get_rank (tree e)
>      {
>        ssa_op_iter iter;
>        gimple *stmt;
> -      long rank;
> +      HOST_WIDE_INT rank;
>        tree op;
>  
>        /* If we already have a rank for this expression, use that.  */
> @@ -448,7 +448,7 @@ get_rank (tree e)
>       {
>         fprintf (dump_file, "Rank for ");
>         print_generic_expr (dump_file, e);
> -       fprintf (dump_file, " is %ld\n", rank);
> +       fprintf (dump_file, " is " HOST_WIDE_INT_PRINT_DEC "\n", rank);
>       }
>  
>        /* Note the rank in the hashtable so we don't recompute it.  */
> @@ -6774,7 +6774,7 @@ static void
>  init_reassoc (void)
>  {
>    int i;
> -  long rank = 2;
> +  HOST_WIDE_INT rank = 2;
>    int *bbs = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
>  
>    /* Find the loops, so that we can prevent moving calculations in
> @@ -6788,8 +6788,8 @@ init_reassoc (void)
>    /* Reverse RPO (Reverse Post Order) will give us something where
>       deeper loops come later.  */
>    pre_and_rev_post_order_compute (NULL, bbs, false);
> -  bb_rank = XCNEWVEC (long, last_basic_block_for_fn (cfun));
> -  operand_rank = new hash_map<tree, long>;
> +  bb_rank = XCNEWVEC (HOST_WIDE_INT, last_basic_block_for_fn (cfun));
> +  operand_rank = new hash_map<tree, HOST_WIDE_INT>;
>  
>    /* Give each default definition a distinct rank.  This includes
>       parameters and the static chain.  Walk backwards over all
> 
>       Jakub
> 
> 

-- 
Richard Biener <rguent...@suse.de>
SUSE Software Solutions Germany GmbH, Maxfeldstrasse 5, 90409 Nuernberg,
Germany; GF: Felix Imendörffer; HRB 36809 (AG Nuernberg)

Reply via email to