A later commit will disallow such comparisons. We can instead convert directly to a boolean value, and make sure all such conversions are explicit.
TODO: FIX SYSREG GATING. gcc/ChangeLog: * config/aarch64/aarch64-sve-builtins.cc (check_required_extensions): Replace comparison with 0. (add_overloaded_function): Ditto. * config/aarch64/aarch64.cc (aarch64_add_offset): Ditto. (aarch64_guard_switch_pstate_sm): Ditto. (aarch64_switch_pstate_sm): Ditto. (aarch64_need_old_pstate_sm): Ditto. (aarch64_epilogue_uses): Ditto. (aarch64_update_ipa_fn_target_info): Ditto. (aarch64_optimize_mode_switching): Ditto. (aarch64_mode_entry): Ditto. (aarch64_mode_exit): Ditto. (aarch64_valid_sysreg_name_p): Ditto. (aarch64_retrieve_sysreg): Ditto.. * config/aarch64/aarch64.h (TARGET_STREAMING_COMPATIBLE): Ditto. diff --git a/gcc/config/aarch64/aarch64-sve-builtins.cc b/gcc/config/aarch64/aarch64-sve-builtins.cc index d555f350cd79ebed21dab77208b0ce291ab90e79..f033db5b25371d6b20a7c3cc2a4dc5462f8f991a 100644 --- a/gcc/config/aarch64/aarch64-sve-builtins.cc +++ b/gcc/config/aarch64/aarch64-sve-builtins.cc @@ -1125,7 +1125,7 @@ check_required_extensions (location_t location, tree fndecl, aarch64_feature_flags required_extensions) { auto missing_extensions = required_extensions & ~aarch64_asm_isa_flags; - if (missing_extensions == 0) + if (!missing_extensions) return check_required_registers (location, fndecl); if (missing_extensions & AARCH64_FL_SM_OFF) @@ -1635,8 +1635,8 @@ add_overloaded_function (const function_instance &instance, tree id = get_identifier (name); if (registered_function **map_value = name_map->get (id)) gcc_assert ((*map_value)->instance == instance - && ((*map_value)->required_extensions - & ~required_extensions) == 0); + && !((*map_value)->required_extensions + & ~required_extensions)); else { registered_function &rfn diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h index 8eb21cfcfc1e80bef051c571ec7cfae47e3393ed..f4ab220271239ce5a750cf211120d5b37d7f8b27 100644 --- a/gcc/config/aarch64/aarch64.h +++ b/gcc/config/aarch64/aarch64.h @@ -275,7 +275,7 @@ constexpr auto AARCH64_FL_DEFAULT_ISA_MODE = AARCH64_FL_SM_OFF; /* The current function has a streaming-compatible body. */ #define TARGET_STREAMING_COMPATIBLE \ - ((aarch64_isa_flags & AARCH64_FL_SM_STATE) == 0) + (!(aarch64_isa_flags & AARCH64_FL_SM_STATE)) /* PSTATE.ZA is enabled in the current function body. */ #define TARGET_ZA (AARCH64_ISA_ZA_ON) diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc index 582dac5129faccee0db3a68f6bdf866e8b41a059..e84151c474029b437ce67eb0cd6fca591a823b82 100644 --- a/gcc/config/aarch64/aarch64.cc +++ b/gcc/config/aarch64/aarch64.cc @@ -4649,7 +4649,7 @@ aarch64_add_offset (scalar_int_mode mode, rtx dest, rtx src, { gcc_assert (offset.coeffs[0] == offset.coeffs[1]); rtx offset_rtx; - if (force_isa_mode == 0) + if (!force_isa_mode) offset_rtx = gen_int_mode (offset, mode); else offset_rtx = aarch64_sme_vq_immediate (mode, offset.coeffs[0], 0); @@ -4675,7 +4675,7 @@ aarch64_add_offset (scalar_int_mode mode, rtx dest, rtx src, && aarch64_sve_addvl_addpl_immediate_p (poly_offset)) { rtx offset_rtx; - if (force_isa_mode == 0) + if (!force_isa_mode) offset_rtx = gen_int_mode (poly_offset, mode); else offset_rtx = aarch64_sme_vq_immediate (mode, factor, 0); @@ -4759,8 +4759,7 @@ aarch64_add_offset (scalar_int_mode mode, rtx dest, rtx src, a shift and add sequence for the multiplication. If CNTB << SHIFT is out of range, stick with the current shift factor. */ - if (force_isa_mode == 0 - && IN_RANGE (low_bit, 2, 16 * 16)) + if (!force_isa_mode && IN_RANGE (low_bit, 2, 16 * 16)) { val = gen_int_mode (poly_int64 (low_bit, low_bit), mode); shift = 0; @@ -4900,7 +4899,7 @@ static rtx_insn * aarch64_guard_switch_pstate_sm (rtx old_svcr, aarch64_feature_flags local_mode) { local_mode &= AARCH64_FL_SM_STATE; - gcc_assert (local_mode != 0); + gcc_assert (local_mode); auto already_ok_cond = (local_mode & AARCH64_FL_SM_ON ? NE : EQ); auto *label = gen_label_rtx (); auto branch = aarch64_gen_test_and_branch (already_ok_cond, old_svcr, 0, @@ -4923,7 +4922,7 @@ aarch64_switch_pstate_sm (aarch64_feature_flags old_mode, gcc_assert (old_mode != new_mode); if ((new_mode & AARCH64_FL_SM_ON) - || (new_mode == 0 && (old_mode & AARCH64_FL_SM_OFF))) + || (!new_mode && (old_mode & AARCH64_FL_SM_OFF))) emit_insn (gen_aarch64_smstart_sm ()); else emit_insn (gen_aarch64_smstop_sm ()); @@ -7724,7 +7723,7 @@ aarch64_need_old_pstate_sm () { /* Exit early if the incoming value of PSTATE.SM is known at compile time. */ - if (aarch64_cfun_incoming_pstate_sm () != 0) + if (aarch64_cfun_incoming_pstate_sm ()) return false; if (aarch64_cfun_enables_pstate_sm ()) @@ -9407,7 +9406,7 @@ aarch64_epilogue_uses (int regno) return 1; /* If the function shares SME state with its caller, ensure that that data is not in the lazy save buffer on exit. */ - if (regno == ZA_SAVED_REGNUM && aarch64_cfun_incoming_pstate_za () != 0) + if (regno == ZA_SAVED_REGNUM && aarch64_cfun_incoming_pstate_za ()) return 1; if (regno == ZA_REGNUM && aarch64_cfun_shared_flags ("za") != 0) return 1; @@ -20631,7 +20630,7 @@ aarch64_update_ipa_fn_target_info (unsigned int &info, const gimple *stmt) If the function isn't marked streaming-compatible then it needs whichever SM mode it selects. */ tree decl = gimple_call_fndecl (call); - if (aarch64_fndecl_pstate_sm (decl) != 0) + if (aarch64_fndecl_pstate_sm (decl)) info |= AARCH64_IPA_SM_FIXED; } } @@ -29286,7 +29285,7 @@ aarch64_pars_overlap_p (rtx par1, rtx par2) bool aarch64_optimize_mode_switching (aarch64_mode_entity entity) { - bool have_sme_state = (aarch64_cfun_incoming_pstate_za () != 0 + bool have_sme_state = (aarch64_cfun_incoming_pstate_za () || (aarch64_cfun_has_new_state ("za") && df_regs_ever_live_p (ZA_REGNUM)) || (aarch64_cfun_has_new_state ("zt0") @@ -29854,7 +29853,7 @@ aarch64_mode_entry (int entity) case aarch64_mode_entity::LOCAL_SME_STATE: return int (aarch64_cfun_shared_flags ("za") != 0 ? aarch64_local_sme_state::ACTIVE_LIVE - : aarch64_cfun_incoming_pstate_za () != 0 + : aarch64_cfun_incoming_pstate_za () ? aarch64_local_sme_state::ACTIVE_DEAD : aarch64_local_sme_state::INACTIVE_CALLER); } @@ -29874,7 +29873,7 @@ aarch64_mode_exit (int entity) case aarch64_mode_entity::LOCAL_SME_STATE: return int (aarch64_cfun_shared_flags ("za") != 0 ? aarch64_local_sme_state::ACTIVE_LIVE - : aarch64_cfun_incoming_pstate_za () != 0 + : aarch64_cfun_incoming_pstate_za () ? aarch64_local_sme_state::ACTIVE_DEAD : aarch64_local_sme_state::INACTIVE_CALLER); } @@ -30216,7 +30215,7 @@ aarch64_valid_sysreg_name_p (const char *regname) if (sysreg == NULL) return aarch64_is_implem_def_reg (regname); if (sysreg->arch_reqs) - return (aarch64_isa_flags & sysreg->arch_reqs); + return (bool) (aarch64_isa_flags & sysreg->arch_reqs); return true; } @@ -30240,7 +30239,7 @@ aarch64_retrieve_sysreg (const char *regname, bool write_p, bool is128op) if ((write_p && (sysreg->properties & F_REG_READ)) || (!write_p && (sysreg->properties & F_REG_WRITE))) return NULL; - if ((~aarch64_isa_flags & sysreg->arch_reqs) != 0) + if (~aarch64_isa_flags & sysreg->arch_reqs) return NULL; return sysreg->encoding; }