Richi has asked the we break the wide-int patch so that the individual port and front end maintainers can review their parts without have to go through the entire patch. This patch covers the tree-vec code.
Ok?
* tree-vect-data-refs.c (vect_prune_runtime_alias_test_list): Use wide-int interfaces. (vect_check_gather): Likewise. * tree-vect-generic.c (build_replicated_const): Use wide-int interfaces. (expand_vector_divmod): Likewise. * tree-vect-loop.c (vect_transform_loop): Use wide-int interfaces. * tree-vect-loop-manip.c (vect_do_peeling_for_loop_bound): Use wide-int interfaces. (vect_do_peeling_for_alignment): Likewise. * tree-vect-patterns.c (vect_recog_divmod_pattern): Use wide-int interfaces. diff --git a/gcc/tree-vect-data-refs.c b/gcc/tree-vect-data-refs.c index c1eb455..f8c8442 100644 --- a/gcc/tree-vect-data-refs.c +++ b/gcc/tree-vect-data-refs.c @@ -2904,15 +2904,13 @@ vect_prune_runtime_alias_test_list (loop_vec_info loop_vinfo) */ - HOST_WIDE_INT - min_seg_len_b = (TREE_CODE (dr_b1->seg_len) == INTEGER_CST) ? - TREE_INT_CST_LOW (dr_b1->seg_len) : - vect_factor; + HOST_WIDE_INT min_seg_len_b = (tree_fits_shwi_p (dr_b1->seg_len) + ? tree_to_shwi (dr_b1->seg_len) + : vect_factor); if (diff <= min_seg_len_b - || (TREE_CODE (dr_a1->seg_len) == INTEGER_CST - && diff - (HOST_WIDE_INT) TREE_INT_CST_LOW (dr_a1->seg_len) < - min_seg_len_b)) + || (tree_fits_shwi_p (dr_a1->seg_len) + && diff - tree_to_shwi (dr_a1->seg_len) < min_seg_len_b)) { dr_a1->seg_len = size_binop (PLUS_EXPR, dr_a2->seg_len, size_int (diff)); @@ -2975,8 +2973,8 @@ vect_check_gather (gimple stmt, loop_vec_info loop_vinfo, tree *basep, { if (off == NULL_TREE) { - double_int moff = mem_ref_offset (base); - off = double_int_to_tree (sizetype, moff); + offset_int moff = mem_ref_offset (base); + off = wide_int_to_tree (sizetype, moff); } else off = size_binop (PLUS_EXPR, off, diff --git a/gcc/tree-vect-generic.c b/gcc/tree-vect-generic.c index fc1c7b1..3679926 100644 --- a/gcc/tree-vect-generic.c +++ b/gcc/tree-vect-generic.c @@ -52,9 +52,10 @@ static tree build_replicated_const (tree type, tree inner_type, HOST_WIDE_INT value) { int width = tree_to_uhwi (TYPE_SIZE (inner_type)); - int n = HOST_BITS_PER_WIDE_INT / width; - unsigned HOST_WIDE_INT low, high, mask; - tree ret; + int n = TYPE_PRECISION (type) / width; + unsigned HOST_WIDE_INT low, mask; + HOST_WIDE_INT a[WIDE_INT_MAX_ELTS]; + int i; gcc_assert (n); @@ -66,17 +67,11 @@ build_replicated_const (tree type, tree inner_type, HOST_WIDE_INT value) low = (unsigned HOST_WIDE_INT) ~0 / mask * (value & mask); } - if (TYPE_PRECISION (type) < HOST_BITS_PER_WIDE_INT) - low &= ((HOST_WIDE_INT)1 << TYPE_PRECISION (type)) - 1, high = 0; - else if (TYPE_PRECISION (type) == HOST_BITS_PER_WIDE_INT) - high = 0; - else if (TYPE_PRECISION (type) == HOST_BITS_PER_DOUBLE_INT) - high = low; - else - gcc_unreachable (); + for (i = 0; i < n; i++) + a[i] = low; - ret = build_int_cst_wide (type, low, high); - return ret; + return wide_int_to_tree + (type, wide_int::from_array (a, n, TYPE_PRECISION (type))); } static GTY(()) tree vector_inner_type; @@ -410,7 +405,8 @@ expand_vector_divmod (gimple_stmt_iterator *gsi, tree type, tree op0, unsigned HOST_WIDE_INT *mulc = XALLOCAVEC (unsigned HOST_WIDE_INT, nunits); int prec = TYPE_PRECISION (TREE_TYPE (type)); int dummy_int; - unsigned int i, unsignedp = TYPE_UNSIGNED (TREE_TYPE (type)); + unsigned int i; + signop sign_p = TYPE_SIGN (TREE_TYPE (type)); unsigned HOST_WIDE_INT mask = GET_MODE_MASK (TYPE_MODE (TREE_TYPE (type))); tree *vec; tree cur_op, mulcst, tem; @@ -452,7 +448,7 @@ expand_vector_divmod (gimple_stmt_iterator *gsi, tree type, tree op0, } if (mode == -2) continue; - if (unsignedp) + if (sign_p == UNSIGNED) { unsigned HOST_WIDE_INT mh; unsigned HOST_WIDE_INT d = TREE_INT_CST_LOW (cst) & mask; @@ -581,7 +577,7 @@ expand_vector_divmod (gimple_stmt_iterator *gsi, tree type, tree op0, if (use_pow2) { tree addend = NULL_TREE; - if (!unsignedp) + if (sign_p == SIGNED) { tree uns_type; @@ -633,7 +629,7 @@ expand_vector_divmod (gimple_stmt_iterator *gsi, tree type, tree op0, } if (code == TRUNC_DIV_EXPR) { - if (unsignedp) + if (sign_p == UNSIGNED) { /* q = op0 >> shift; */ cur_op = add_rshift (gsi, type, op0, shifts); @@ -667,7 +663,7 @@ expand_vector_divmod (gimple_stmt_iterator *gsi, tree type, tree op0, if (op != unknown_optab && optab_handler (op, TYPE_MODE (type)) != CODE_FOR_nothing) { - if (unsignedp) + if (sign_p == UNSIGNED) /* r = op0 & mask; */ return gimplify_build2 (gsi, BIT_AND_EXPR, type, op0, mask); else if (addend != NULL_TREE) @@ -708,7 +704,7 @@ expand_vector_divmod (gimple_stmt_iterator *gsi, tree type, tree op0, switch (mode) { case 0: - gcc_assert (unsignedp); + gcc_assert (sign_p == UNSIGNED); /* t1 = oprnd0 >> pre_shift; t2 = t1 h* ml; q = t2 >> post_shift; */ @@ -717,7 +713,7 @@ expand_vector_divmod (gimple_stmt_iterator *gsi, tree type, tree op0, return NULL_TREE; break; case 1: - gcc_assert (unsignedp); + gcc_assert (sign_p == UNSIGNED); for (i = 0; i < nunits; i++) { shift_temps[i] = 1; @@ -728,7 +724,7 @@ expand_vector_divmod (gimple_stmt_iterator *gsi, tree type, tree op0, case 3: case 4: case 5: - gcc_assert (!unsignedp); + gcc_assert (sign_p == SIGNED); for (i = 0; i < nunits; i++) shift_temps[i] = prec - 1; break; diff --git a/gcc/tree-vect-loop-manip.c b/gcc/tree-vect-loop-manip.c index 289e852..22d936e 100644 --- a/gcc/tree-vect-loop-manip.c +++ b/gcc/tree-vect-loop-manip.c @@ -1675,7 +1675,7 @@ vect_do_peeling_for_loop_bound (loop_vec_info loop_vinfo, : LOOP_VINFO_VECT_FACTOR (loop_vinfo)) - 2; if (check_profitability) max_iter = MAX (max_iter, (int) th - 1); - record_niter_bound (new_loop, double_int::from_shwi (max_iter), false, true); + record_niter_bound (new_loop, max_iter, false, true); dump_printf (MSG_NOTE, "Setting upper bound of nb iterations for epilogue " "loop to %d\n", max_iter); @@ -1911,7 +1911,7 @@ vect_do_peeling_for_alignment (loop_vec_info loop_vinfo, tree ni_name, max_iter = LOOP_VINFO_VECT_FACTOR (loop_vinfo) - 2; if (check_profitability) max_iter = MAX (max_iter, (int) th - 1); - record_niter_bound (new_loop, double_int::from_shwi (max_iter), false, true); + record_niter_bound (new_loop, max_iter, false, true); dump_printf (MSG_NOTE, "Setting upper bound of nb iterations for prologue " "loop to %d\n", max_iter); diff --git a/gcc/tree-vect-loop.c b/gcc/tree-vect-loop.c index bcd3516..ab8e0f2 100644 --- a/gcc/tree-vect-loop.c +++ b/gcc/tree-vect-loop.c @@ -6030,19 +6030,17 @@ vect_transform_loop (loop_vec_info loop_vinfo) scale_loop_profile (loop, GCOV_COMPUTE_SCALE (1, vectorization_factor), expected_iterations / vectorization_factor); loop->nb_iterations_upper_bound - = loop->nb_iterations_upper_bound.udiv (double_int::from_uhwi (vectorization_factor), - FLOOR_DIV_EXPR); + = wi::udiv_floor (loop->nb_iterations_upper_bound, vectorization_factor); if (LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo) - && loop->nb_iterations_upper_bound != double_int_zero) - loop->nb_iterations_upper_bound = loop->nb_iterations_upper_bound - double_int_one; + && loop->nb_iterations_upper_bound != 0) + loop->nb_iterations_upper_bound = loop->nb_iterations_upper_bound - 1; if (loop->any_estimate) { loop->nb_iterations_estimate - = loop->nb_iterations_estimate.udiv (double_int::from_uhwi (vectorization_factor), - FLOOR_DIV_EXPR); + = wi::udiv_floor (loop->nb_iterations_estimate, vectorization_factor); if (LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo) - && loop->nb_iterations_estimate != double_int_zero) - loop->nb_iterations_estimate = loop->nb_iterations_estimate - double_int_one; + && loop->nb_iterations_estimate != 0) + loop->nb_iterations_estimate = loop->nb_iterations_estimate - 1; } if (dump_enabled_p ()) diff --git a/gcc/tree-vect-patterns.c b/gcc/tree-vect-patterns.c index de854e1..da4cc94 100644 --- a/gcc/tree-vect-patterns.c +++ b/gcc/tree-vect-patterns.c @@ -2261,13 +2261,13 @@ vect_recog_divmod_pattern (vec<gimple> *stmts, else t3 = t2; - double_int oprnd0_min, oprnd0_max; + widest_int oprnd0_min, oprnd0_max; int msb = 1; if (get_range_info (oprnd0, &oprnd0_min, &oprnd0_max) == VR_RANGE) { - if (!oprnd0_min.is_negative ()) + if (!wi::neg_p (oprnd0_min)) msb = 0; - else if (oprnd0_max.is_negative ()) + else if (wi::neg_p (oprnd0_max)) msb = -1; }