get_best_mode always returns a scalar_int_mode on success, so this patch makes that explicit in the type system. Also, the "largest_mode" argument is used simply to provide a maximum size, and in practice that size is always a compile-time constant, even when the concept of variable-sized modes is added later. The patch therefore passes the size directly.
2017-07-13 Richard Sandiford <richard.sandif...@linaro.org> Alan Hayward <alan.hayw...@arm.com> David Sherwood <david.sherw...@arm.com> gcc/ * machmode.h (bit_field_mode_iterator::next_mode): Take a pointer to a scalar_int_mode instead of a machine_mode. (bit_field_mode_iterator::m_mode): Change type to opt_scalar_int_mode. (get_best_mode): Return a boolean and use a pointer argument to store the selected mode. Replace the limit mode parameter with a bit limit. * expmed.c (adjust_bit_field_mem_for_reg): Use scalar_int_mode for the values returned by bit_field_mode_iterator::next_mode. (store_bit_field): Update call to get_best_mode. (store_fixed_bit_field): Likewise. (extract_fixed_bit_field): Likewise. * expr.c (optimize_bitfield_assignment_op): Likewise. * fold-const.c (optimize_bit_field_compare): Likewise. (fold_truth_andor_1): Likewise. * stor-layout.c (bit_field_mode_iterator::next_mode): As above. Update for new type of m_mode. (get_best_mode): As above. Index: gcc/machmode.h =================================================================== --- gcc/machmode.h 2017-07-13 09:18:38.043865449 +0100 +++ gcc/machmode.h 2017-07-13 09:18:41.680558844 +0100 @@ -618,11 +618,11 @@ extern machine_mode mode_for_vector (mac bit_field_mode_iterator (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT, unsigned int, bool); - bool next_mode (machine_mode *); + bool next_mode (scalar_int_mode *); bool prefer_smaller_modes (); private: - machine_mode m_mode; + opt_scalar_int_mode m_mode; /* We use signed values here because the bit position can be negative for invalid input such as gcc.dg/pr48335-8.c. */ HOST_WIDE_INT m_bitsize; @@ -636,11 +636,9 @@ extern machine_mode mode_for_vector (mac /* Find the best mode to use to access a bit field. */ -extern machine_mode get_best_mode (int, int, - unsigned HOST_WIDE_INT, - unsigned HOST_WIDE_INT, - unsigned int, - machine_mode, bool); +extern bool get_best_mode (int, int, unsigned HOST_WIDE_INT, + unsigned HOST_WIDE_INT, unsigned int, + unsigned HOST_WIDE_INT, bool, scalar_int_mode *); /* Determine alignment, 1<=result<=BIGGEST_ALIGNMENT. */ Index: gcc/expmed.c =================================================================== --- gcc/expmed.c 2017-07-13 09:18:41.241595261 +0100 +++ gcc/expmed.c 2017-07-13 09:18:41.678559010 +0100 @@ -459,7 +459,7 @@ adjust_bit_field_mem_for_reg (enum extra bit_field_mode_iterator iter (bitsize, bitnum, bitregion_start, bitregion_end, MEM_ALIGN (op0), MEM_VOLATILE_P (op0)); - machine_mode best_mode; + scalar_int_mode best_mode; if (iter.next_mode (&best_mode)) { /* We can use a memory in BEST_MODE. See whether this is true for @@ -477,7 +477,7 @@ adjust_bit_field_mem_for_reg (enum extra fieldmode)) limit_mode = insn.field_mode; - machine_mode wider_mode; + scalar_int_mode wider_mode; while (iter.next_mode (&wider_mode) && GET_MODE_SIZE (wider_mode) <= GET_MODE_SIZE (limit_mode)) best_mode = wider_mode; @@ -1092,7 +1092,8 @@ store_bit_field (rtx str_rtx, unsigned H bit region. */ if (MEM_P (str_rtx) && bitregion_start > 0) { - machine_mode bestmode; + scalar_int_mode best_mode; + machine_mode addr_mode = VOIDmode; HOST_WIDE_INT offset, size; gcc_assert ((bitregion_start % BITS_PER_UNIT) == 0); @@ -1102,11 +1103,13 @@ store_bit_field (rtx str_rtx, unsigned H size = (bitnum + bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT; bitregion_end -= bitregion_start; bitregion_start = 0; - bestmode = get_best_mode (bitsize, bitnum, - bitregion_start, bitregion_end, - MEM_ALIGN (str_rtx), VOIDmode, - MEM_VOLATILE_P (str_rtx)); - str_rtx = adjust_bitfield_address_size (str_rtx, bestmode, offset, size); + if (get_best_mode (bitsize, bitnum, + bitregion_start, bitregion_end, + MEM_ALIGN (str_rtx), INT_MAX, + MEM_VOLATILE_P (str_rtx), &best_mode)) + addr_mode = best_mode; + str_rtx = adjust_bitfield_address_size (str_rtx, addr_mode, + offset, size); } if (!store_bit_field_1 (str_rtx, bitsize, bitnum, @@ -1140,10 +1143,10 @@ store_fixed_bit_field (rtx op0, unsigned if (GET_MODE_BITSIZE (mode) == 0 || GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (word_mode)) mode = word_mode; - mode = get_best_mode (bitsize, bitnum, bitregion_start, bitregion_end, - MEM_ALIGN (op0), mode, MEM_VOLATILE_P (op0)); - - if (mode == VOIDmode) + scalar_int_mode best_mode; + if (!get_best_mode (bitsize, bitnum, bitregion_start, bitregion_end, + MEM_ALIGN (op0), GET_MODE_BITSIZE (mode), + MEM_VOLATILE_P (op0), &best_mode)) { /* The only way this should occur is if the field spans word boundaries. */ @@ -1152,7 +1155,7 @@ store_fixed_bit_field (rtx op0, unsigned return; } - op0 = narrow_bit_field_mem (op0, mode, bitsize, bitnum, &bitnum); + op0 = narrow_bit_field_mem (op0, best_mode, bitsize, bitnum, &bitnum); } store_fixed_bit_field_1 (op0, bitsize, bitnum, value, reverse); @@ -1942,11 +1945,9 @@ extract_fixed_bit_field (machine_mode tm { if (MEM_P (op0)) { - machine_mode mode - = get_best_mode (bitsize, bitnum, 0, 0, MEM_ALIGN (op0), word_mode, - MEM_VOLATILE_P (op0)); - - if (mode == VOIDmode) + scalar_int_mode mode; + if (!get_best_mode (bitsize, bitnum, 0, 0, MEM_ALIGN (op0), + BITS_PER_WORD, MEM_VOLATILE_P (op0), &mode)) /* The only way this should occur is if the field spans word boundaries. */ return extract_split_bit_field (op0, bitsize, bitnum, unsignedp, Index: gcc/expr.c =================================================================== --- gcc/expr.c 2017-07-13 09:18:39.589733813 +0100 +++ gcc/expr.c 2017-07-13 09:18:41.678559010 +0100 @@ -4682,13 +4682,14 @@ optimize_bitfield_assignment_op (unsigne unsigned HOST_WIDE_INT offset1; if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD) - str_mode = word_mode; - str_mode = get_best_mode (bitsize, bitpos, - bitregion_start, bitregion_end, - MEM_ALIGN (str_rtx), str_mode, 0); - if (str_mode == VOIDmode) + str_bitsize = BITS_PER_WORD; + + scalar_int_mode best_mode; + if (!get_best_mode (bitsize, bitpos, bitregion_start, bitregion_end, + MEM_ALIGN (str_rtx), str_bitsize, false, &best_mode)) return false; - str_bitsize = GET_MODE_BITSIZE (str_mode); + str_mode = best_mode; + str_bitsize = GET_MODE_BITSIZE (best_mode); offset1 = bitpos; bitpos %= str_bitsize; Index: gcc/fold-const.c =================================================================== --- gcc/fold-const.c 2017-07-13 09:18:38.661812628 +0100 +++ gcc/fold-const.c 2017-07-13 09:18:41.680558844 +0100 @@ -3966,7 +3966,8 @@ optimize_bit_field_compare (location_t l tree type = TREE_TYPE (lhs); tree unsigned_type; int const_p = TREE_CODE (rhs) == INTEGER_CST; - machine_mode lmode, rmode, nmode; + machine_mode lmode, rmode; + scalar_int_mode nmode; int lunsignedp, runsignedp; int lreversep, rreversep; int lvolatilep = 0, rvolatilep = 0; @@ -4013,12 +4014,11 @@ optimize_bit_field_compare (location_t l /* See if we can find a mode to refer to this field. We should be able to, but fail if we can't. */ - nmode = get_best_mode (lbitsize, lbitpos, bitstart, bitend, - const_p ? TYPE_ALIGN (TREE_TYPE (linner)) - : MIN (TYPE_ALIGN (TREE_TYPE (linner)), - TYPE_ALIGN (TREE_TYPE (rinner))), - word_mode, false); - if (nmode == VOIDmode) + if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend, + const_p ? TYPE_ALIGN (TREE_TYPE (linner)) + : MIN (TYPE_ALIGN (TREE_TYPE (linner)), + TYPE_ALIGN (TREE_TYPE (rinner))), + BITS_PER_WORD, false, &nmode)) return 0; /* Set signed and unsigned types of the precision of this mode for the @@ -5621,7 +5621,7 @@ fold_truth_andor_1 (location_t loc, enum int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp; int ll_reversep, lr_reversep, rl_reversep, rr_reversep; machine_mode ll_mode, lr_mode, rl_mode, rr_mode; - machine_mode lnmode, rnmode; + scalar_int_mode lnmode, rnmode; tree ll_mask, lr_mask, rl_mask, rr_mask; tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask; tree l_const, r_const; @@ -5807,10 +5807,9 @@ fold_truth_andor_1 (location_t loc, enum to be relative to a field of that size. */ first_bit = MIN (ll_bitpos, rl_bitpos); end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize); - lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0, - TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode, - volatilep); - if (lnmode == VOIDmode) + if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0, + TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD, + volatilep, &lnmode)) return 0; lnbitsize = GET_MODE_BITSIZE (lnmode); @@ -5872,10 +5871,9 @@ fold_truth_andor_1 (location_t loc, enum first_bit = MIN (lr_bitpos, rr_bitpos); end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize); - rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0, - TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode, - volatilep); - if (rnmode == VOIDmode) + if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0, + TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD, + volatilep, &rnmode)) return 0; rnbitsize = GET_MODE_BITSIZE (rnmode); Index: gcc/stor-layout.c =================================================================== --- gcc/stor-layout.c 2017-07-13 09:18:38.663812457 +0100 +++ gcc/stor-layout.c 2017-07-13 09:18:41.680558844 +0100 @@ -2722,15 +2722,15 @@ fixup_unsigned_type (tree type) available, storing it in *OUT_MODE if so. */ bool -bit_field_mode_iterator::next_mode (machine_mode *out_mode) +bit_field_mode_iterator::next_mode (scalar_int_mode *out_mode) { - for (; m_mode != VOIDmode; - m_mode = GET_MODE_WIDER_MODE (m_mode).else_void ()) + for (; m_mode.exists (); m_mode = GET_MODE_WIDER_MODE (*m_mode)) { - unsigned int unit = GET_MODE_BITSIZE (m_mode); + scalar_int_mode mode = *m_mode; + unsigned int unit = GET_MODE_BITSIZE (mode); /* Skip modes that don't have full precision. */ - if (unit != GET_MODE_PRECISION (m_mode)) + if (unit != GET_MODE_PRECISION (mode)) continue; /* Stop if the mode is too wide to handle efficiently. */ @@ -2757,12 +2757,12 @@ bit_field_mode_iterator::next_mode (mach break; /* Stop if the mode requires too much alignment. */ - if (GET_MODE_ALIGNMENT (m_mode) > m_align - && SLOW_UNALIGNED_ACCESS (m_mode, m_align)) + if (GET_MODE_ALIGNMENT (mode) > m_align + && SLOW_UNALIGNED_ACCESS (mode, m_align)) break; - *out_mode = m_mode; - m_mode = GET_MODE_WIDER_MODE (m_mode).else_void (); + *out_mode = mode; + m_mode = GET_MODE_WIDER_MODE (mode); m_count++; return true; } @@ -2789,12 +2789,14 @@ bit_field_mode_iterator::prefer_smaller_ memory access to that range. Otherwise, we are allowed to touch any adjacent non bit-fields. - The underlying object is known to be aligned to a boundary of ALIGN bits. - If LARGEST_MODE is not VOIDmode, it means that we should not use a mode - larger than LARGEST_MODE (usually SImode). + The chosen mode must have no more than LARGEST_MODE_BITSIZE bits. + INT_MAX is a suitable value for LARGEST_MODE_BITSIZE if the caller + doesn't want to apply a specific limit. If no mode meets all these conditions, we return VOIDmode. + The underlying object is known to be aligned to a boundary of ALIGN bits. + If VOLATILEP is false and SLOW_BYTE_ACCESS is false, we return the smallest mode meeting these conditions. @@ -2805,17 +2807,18 @@ bit_field_mode_iterator::prefer_smaller_ If VOLATILEP is true the narrow_volatile_bitfields target hook is used to decide which of the above modes should be used. */ -machine_mode +bool get_best_mode (int bitsize, int bitpos, unsigned HOST_WIDE_INT bitregion_start, unsigned HOST_WIDE_INT bitregion_end, unsigned int align, - machine_mode largest_mode, bool volatilep) + unsigned HOST_WIDE_INT largest_mode_bitsize, bool volatilep, + scalar_int_mode *best_mode) { bit_field_mode_iterator iter (bitsize, bitpos, bitregion_start, bitregion_end, align, volatilep); - machine_mode widest_mode = VOIDmode; - machine_mode mode; + scalar_int_mode mode; + bool found = false; while (iter.next_mode (&mode) /* ??? For historical reasons, reject modes that would normally receive greater alignment, even if unaligned accesses are @@ -2874,14 +2877,15 @@ get_best_mode (int bitsize, int bitpos, so that the final bitfield reference still has a MEM_EXPR and MEM_OFFSET. */ && GET_MODE_ALIGNMENT (mode) <= align - && (largest_mode == VOIDmode - || GET_MODE_SIZE (mode) <= GET_MODE_SIZE (largest_mode))) + && GET_MODE_BITSIZE (mode) <= largest_mode_bitsize) { - widest_mode = mode; + *best_mode = mode; + found = true; if (iter.prefer_smaller_modes ()) break; } - return widest_mode; + + return found; } /* Gets minimal and maximal values for MODE (signed or unsigned depending on