From: Karl Meakin <[email protected]>

Change the type of variables/parameters/data members that are used to hold
unspec codes from `int` to `unspec`. This makes GDB print their symbolic names
rather than their integer value.

gcc/ChangeLog:

        * config/aarch64/aarch64-builtins.cc (struct
        aarch64_pragma_builtins_data): Change type to `unspec`.
        (aarch64_get_low_unspec): Likewise.
        (aarch64_expand_fpsr_fpcr_setter): Likewise.
        (aarch64_expand_tbl_tbx): Add `default` case to `switch` statement to 
silence warning.
        * config/aarch64/aarch64-sve-builtins-base.cc (unspec_cmla): Change 
type to `unspec`.
        (unspec_fcmla): Likewise.
        (unspec_cond_fcmla): Likewise.
        (expand_mla_mls_lane): Likewise.
        (svac_impl): Likewise.
        (svbrk_binary_impl): Likewise.
        (svbrk_unary_impl): Likewise.
        (svclast_impl): Likewise.
        (svcmla_impl): Likewise.
        (svcmp_impl): Likewise.
        (svcmp_wide_impl): Likewise.
        (svcvt_impl): Likewise.
        (svlast_impl): Likewise.
        (svldxf1_impl): Likewise.
        (svldxf1_extend_impl): Likewise.
        (svnot_impl): Likewise.
        (svpfirst_svpnext_impl): Likewise.
        (svrint_impl): Likewise.
        (svsel_impl): Likewise.
        (svwhilelx_impl): Likewise.
        * 
config/aarch64/aarch64-sve-builtins-functions.h(rtx_code_function_base): 
Likewise.
        * config/aarch64/aarch64-sve-builtins-sme.cc (expand_ld1_st1): Likewise.
        * config/aarch64/aarch64-sve-builtins-sve2.cc (expand_ld1_st1): 
Likewise.
        * config/aarch64/aarch64-sve-builtins.cc 
(function_expander::map_to_rtx_codes): Likewise.
        (function_expander::map_to_unspecs): Likewise.
        * config/aarch64/aarch64-sve-builtins.h: Likewise.
        * config/aarch64/aarch64.cc (aarch64_replace_reg_mode): Likewise.
        (aarch64_split_sve_subreg_move): Likewise.
        (aarch_pac_insn_p): Likewise.
        (aarch64_evpc_hvla): Likewise.
        * config/aarch64/aarch64.md: Add `UNSPEC_NONE` enum member.
---
 gcc/config/aarch64/aarch64-builtins.cc        |  12 +-
 .../aarch64/aarch64-sve-builtins-base.cc      | 157 +++++-----
 .../aarch64/aarch64-sve-builtins-functions.h  | 125 ++++----
 .../aarch64/aarch64-sve-builtins-sme.cc       |  50 ++--
 .../aarch64/aarch64-sve-builtins-sve2.cc      | 276 +++++++++---------
 gcc/config/aarch64/aarch64-sve-builtins.cc    |  15 +-
 gcc/config/aarch64/aarch64-sve-builtins.h     |   4 +-
 gcc/config/aarch64/aarch64.cc                 |   8 +-
 gcc/config/aarch64/aarch64.md                 |   1 +
 9 files changed, 324 insertions(+), 324 deletions(-)

diff --git a/gcc/config/aarch64/aarch64-builtins.cc 
b/gcc/config/aarch64/aarch64-builtins.cc
index dc84d87519f7..1ad04466b335 100644
--- a/gcc/config/aarch64/aarch64-builtins.cc
+++ b/gcc/config/aarch64/aarch64-builtins.cc
@@ -1736,7 +1736,7 @@ struct aarch64_pragma_builtins_data
   const char *name;
   aarch64_builtin_signatures signature;
   simd_type types[4];
-  int unspec;
+  enum unspec unspec;
   aarch64_required_extensions required_extensions;
   unsigned int flags;
 };
@@ -3947,7 +3947,7 @@ aarch64_pack_into_v128s (expand_operand *op)
 /* UNSPEC is a high unspec, indicated by "2" in mnemonics and "_high" in
    intrinsic names.  Return the equivalent low unspec.  */
 static int
-aarch64_get_low_unspec (int unspec)
+aarch64_get_low_unspec (unspec unspec)
 {
   switch (unspec)
     {
@@ -3986,7 +3986,7 @@ aarch64_expand_permute_pair (vec<expand_operand> &ops, 
int permute1,
    UNSPEC is either UNSPEC_TBL or UNSPEC_TBX.  The inputs must already be in
    registers.  */
 static rtx
-aarch64_expand_tbl_tbx (vec<rtx> &inputs, int unspec, machine_mode mode)
+aarch64_expand_tbl_tbx (vec<rtx> &inputs, unspec unspec, machine_mode mode)
 {
   rtx result = gen_reg_rtx (mode);
   rtvec vec = gen_rtvec_v (inputs.length (), inputs.address ());
@@ -3999,7 +3999,7 @@ aarch64_expand_tbl_tbx (vec<rtx> &inputs, int unspec, 
machine_mode mode)
 
    UNSPEC is either UNSPEC_TBL or UNSPEC_TBX.  */
 static rtx
-aarch64_expand_tbl_tbx (vec<expand_operand> &ops, int unspec)
+aarch64_expand_tbl_tbx (vec<expand_operand> &ops, unspec unspec)
 {
   for (unsigned int i = 1; i < ops.length (); ++i)
     ops[i].value = force_reg (ops[i].mode, ops[i].value);
@@ -4148,6 +4148,8 @@ aarch64_expand_pragma_builtin (tree exp, rtx target,
         halves don't actually matter.  */
       aarch64_convert_to_v64 (&ops[1]);
       break;
+    default:
+      break;
     }
 
   insn_code icode;
@@ -4393,7 +4395,7 @@ aarch64_expand_pragma_builtin (tree exp, rtx target,
 /* Expand an expression EXP as fpsr or fpcr setter (depending on
    UNSPEC) using MODE.  */
 static void
-aarch64_expand_fpsr_fpcr_setter (int unspec, machine_mode mode, tree exp)
+aarch64_expand_fpsr_fpcr_setter (unspecv unspec, machine_mode mode, tree exp)
 {
   tree arg = CALL_EXPR_ARG (exp, 0);
   rtx op = force_reg (mode, expand_normal (arg));
diff --git a/gcc/config/aarch64/aarch64-sve-builtins-base.cc 
b/gcc/config/aarch64/aarch64-sve-builtins-base.cc
index f07727416b5f..cd9ea353e10e 100644
--- a/gcc/config/aarch64/aarch64-sve-builtins-base.cc
+++ b/gcc/config/aarch64/aarch64-sve-builtins-base.cc
@@ -73,7 +73,7 @@ is_undef (tree val)
 }
 
 /* Return the UNSPEC_CMLA* unspec for rotation amount ROT.  */
-static int
+static unspec
 unspec_cmla (int rot)
 {
   switch (rot)
@@ -87,7 +87,7 @@ unspec_cmla (int rot)
 }
 
 /* Return the UNSPEC_FCMLA* unspec for rotation amount ROT.  */
-static int
+static unspec
 unspec_fcmla (int rot)
 {
   switch (rot)
@@ -101,7 +101,7 @@ unspec_fcmla (int rot)
 }
 
 /* Return the UNSPEC_COND_FCMLA* unspec for rotation amount ROT.  */
-static int
+static unspec
 unspec_cond_fcmla (int rot)
 {
   switch (rot)
@@ -137,7 +137,7 @@ expand_mad (function_expander &e,
 /* Expand a call to svmla_lane or svmls_lane using floating-point unspec
    UNSPEC.  */
 static rtx
-expand_mla_mls_lane (function_expander &e, int unspec)
+expand_mla_mls_lane (function_expander &e, unspec unspec)
 {
   /* Put the operands in the normal (fma ...) order, with the accumulator
      last.  This fits naturally since that's also the unprinted operand
@@ -199,7 +199,7 @@ public:
 class svac_impl : public function_base
 {
 public:
-  CONSTEXPR svac_impl (int unspec) : m_unspec (unspec) {}
+  CONSTEXPR svac_impl (unspec unspec) : m_unspec (unspec) {}
 
   gimple *
   fold (gimple_folder &f) const override
@@ -220,7 +220,7 @@ public:
   }
 
   /* The unspec code for the underlying comparison.  */
-  int m_unspec;
+  unspec m_unspec;
 };
 
 class svadda_impl : public function_base
@@ -308,7 +308,7 @@ public:
       {
        machine_mode mode = GET_MODE_INNER (e.vector_mode (0));
        e.args[2] = simplify_unary_operation (NOT, mode, e.args[2], mode);
-       return e.map_to_rtx_codes (AND, AND, -1, -1);
+       return e.map_to_rtx_codes (AND, AND, UNSPEC_NONE, UNSPEC_NONE);
       }
 
     if (e.type_suffix_ids[0] == TYPE_SUFFIX_b)
@@ -328,7 +328,7 @@ public:
 class svbrk_binary_impl : public function_base
 {
 public:
-  CONSTEXPR svbrk_binary_impl (int unspec) : m_unspec (unspec) {}
+  CONSTEXPR svbrk_binary_impl (unspec unspec) : m_unspec (unspec) {}
 
   rtx
   expand (function_expander &e) const override
@@ -337,14 +337,14 @@ public:
   }
 
   /* The unspec code associated with the operation.  */
-  int m_unspec;
+  unspec m_unspec;
 };
 
 /* Implements svbrka and svbrkb.  */
 class svbrk_unary_impl : public function_base
 {
 public:
-  CONSTEXPR svbrk_unary_impl (int unspec) : m_unspec (unspec) {}
+  CONSTEXPR svbrk_unary_impl (unspec unspec) : m_unspec (unspec) {}
 
   rtx
   expand (function_expander &e) const override
@@ -353,7 +353,7 @@ public:
   }
 
   /* The unspec code associated with the operation.  */
-  int m_unspec;
+  unspec m_unspec;
 };
 
 class svcadd_impl : public function_base
@@ -378,7 +378,7 @@ public:
 class svclast_impl : public quiet<function_base>
 {
 public:
-  CONSTEXPR svclast_impl (int unspec) : m_unspec (unspec) {}
+  CONSTEXPR svclast_impl (unspec unspec) : m_unspec (unspec) {}
 
   gimple *
   fold (gimple_folder &f) const override
@@ -403,7 +403,7 @@ public:
   }
 
   /* The unspec code associated with the operation.  */
-  int m_unspec;
+  unspec m_unspec;
 };
 
 class svcmla_impl : public function_base
@@ -419,12 +419,12 @@ public:
        /* Make the operand order the same as the one used by the fma optabs,
           with the accumulator last.  */
        e.rotate_inputs_left (1, 4);
-       return e.map_to_unspecs (-1, -1, unspec_cond_fcmla (rot), 3);
+       return e.map_to_unspecs (UNSPEC_NONE, UNSPEC_NONE, unspec_cond_fcmla 
(rot), 3);
       }
     else
       {
-       int cmla = unspec_cmla (rot);
-       return e.map_to_unspecs (cmla, cmla, -1);
+       unspec cmla = unspec_cmla (rot);
+       return e.map_to_unspecs (cmla, cmla, UNSPEC_NONE);
       }
   }
 };
@@ -458,8 +458,9 @@ public:
 class svcmp_impl : public function_base
 {
 public:
-  CONSTEXPR svcmp_impl (tree_code code, int unspec_for_fp)
-    : m_code (code), m_unspec_for_fp (unspec_for_fp) {}
+  CONSTEXPR svcmp_impl (tree_code code, unspec unspec_for_fp)
+    : m_code (code), m_unspec_for_fp (unspec_for_fp)
+  {}
 
   gimple *
   fold (gimple_folder &f) const override
@@ -509,17 +510,18 @@ public:
   tree_code m_code;
 
   /* The unspec code to use for floating-point comparisons.  */
-  int m_unspec_for_fp;
+  unspec m_unspec_for_fp;
 };
 
 /* Implements svcmp<cc>_wide.  */
 class svcmp_wide_impl : public function_base
 {
 public:
-  CONSTEXPR svcmp_wide_impl (tree_code code, int unspec_for_sint,
-                            int unspec_for_uint)
+  CONSTEXPR svcmp_wide_impl (tree_code code, unspec unspec_for_sint,
+                            unspec unspec_for_uint)
     : m_code (code), m_unspec_for_sint (unspec_for_sint),
-      m_unspec_for_uint (unspec_for_uint) {}
+      m_unspec_for_uint (unspec_for_uint)
+  {}
 
   gimple *
   fold (gimple_folder &f) const override
@@ -552,7 +554,7 @@ public:
        return e.use_exact_insn (icode);
       }
 
-    int unspec = (unsigned_p ? m_unspec_for_uint : m_unspec_for_sint);
+    unspec unspec = (unsigned_p ? m_unspec_for_uint : m_unspec_for_sint);
     return e.use_exact_insn (code_for_aarch64_pred_cmp_wide (unspec, mode));
   }
 
@@ -561,8 +563,8 @@ public:
 
   /* The unspec codes for signed and unsigned wide comparisons
      respectively.  */
-  int m_unspec_for_sint;
-  int m_unspec_for_uint;
+  unspec m_unspec_for_sint;
+  unspec m_unspec_for_uint;
 };
 
 class svcmpuo_impl : public quiet<function_base>
@@ -822,9 +824,8 @@ public:
        the source mode before the destination mode.  */
     if (e.type_suffix (1).integer_p)
       {
-       int unspec = (e.type_suffix (1).unsigned_p
-                     ? UNSPEC_COND_UCVTF
-                     : UNSPEC_COND_SCVTF);
+       unspec unspec = (e.type_suffix (1).unsigned_p ? UNSPEC_COND_UCVTF
+                                                     : UNSPEC_COND_SCVTF);
        if (e.type_suffix (0).element_bytes <= e.type_suffix (1).element_bytes)
          icode = (e.pred == PRED_x
                   ? code_for_aarch64_sve_nonextend (unspec, mode1, mode0)
@@ -836,9 +837,9 @@ public:
       }
     else
       {
-       int unspec = (!e.type_suffix (0).integer_p ? UNSPEC_COND_FCVT
-                     : e.type_suffix (0).unsigned_p ? UNSPEC_COND_FCVTZU
-                     : UNSPEC_COND_FCVTZS);
+       unspec unspec = (!e.type_suffix (0).integer_p   ? UNSPEC_COND_FCVT
+                        : e.type_suffix (0).unsigned_p ? UNSPEC_COND_FCVTZU
+                                                       : UNSPEC_COND_FCVTZS);
        if (e.type_suffix (0).element_bytes >= e.type_suffix (1).element_bytes)
          icode = (e.pred == PRED_x
                   ? code_for_aarch64_sve_nontrunc (unspec, mode1, mode0)
@@ -1000,7 +1001,7 @@ public:
        /* Use the same ordering as the dot_prod_optab, with the
           accumulator last.  */
        e.rotate_inputs_left (0, 4);
-       int unspec = unspec_for (e);
+       unspec unspec = unspec_for (e);
        if (unspec == UNSPEC_FDOT)
          icode = CODE_FOR_aarch64_fdot_prod_lanevnx4sfvnx8hf;
        else
@@ -1324,7 +1325,7 @@ public:
             with an extra argument on the end.  Take the inactive elements
             from this extra argument.  */
          e.rotate_inputs_left (0, 4);
-       return e.map_to_rtx_codes (AND, AND, -1, -1, 3);
+       return e.map_to_rtx_codes (AND, AND, UNSPEC_NONE, UNSPEC_NONE, 3);
       }
 
     machine_mode wide_mode = e.vector_mode (0);
@@ -1536,7 +1537,7 @@ public:
 class svlast_impl : public quiet<function_base>
 {
 public:
-  CONSTEXPR svlast_impl (int unspec) : m_unspec (unspec) {}
+  CONSTEXPR svlast_impl (unspec unspec) : m_unspec (unspec) {}
 
   bool is_lasta () const { return m_unspec == UNSPEC_LASTA; }
   bool is_lastb () const { return m_unspec == UNSPEC_LASTB; }
@@ -1666,7 +1667,7 @@ public:
   }
 
   /* The unspec code associated with the operation.  */
-  int m_unspec;
+  unspec m_unspec;
 };
 
 class svld1_impl : public full_width_access
@@ -2025,7 +2026,7 @@ public:
 class svldxf1_impl : public full_width_access
 {
 public:
-  CONSTEXPR svldxf1_impl (int unspec) : m_unspec (unspec) {}
+  CONSTEXPR svldxf1_impl (unspec unspec) : m_unspec (unspec) {}
 
   unsigned int
   call_properties (const function_instance &) const override
@@ -2045,15 +2046,16 @@ public:
   }
 
   /* The unspec associated with the load.  */
-  int m_unspec;
+  unspec m_unspec;
 };
 
 /* Implements extending contiguous forms of svldff1 and svldnf1.  */
 class svldxf1_extend_impl : public extending_load
 {
 public:
-  CONSTEXPR svldxf1_extend_impl (type_suffix_index memory_type, int unspec)
-    : extending_load (memory_type), m_unspec (unspec) {}
+  CONSTEXPR svldxf1_extend_impl (type_suffix_index memory_type, unspec unspec)
+    : extending_load (memory_type), m_unspec (unspec)
+  {}
 
   unsigned int
   call_properties (const function_instance &) const override
@@ -2075,7 +2077,7 @@ public:
   }
 
   /* The unspec associated with the load.  */
-  int m_unspec;
+  unspec m_unspec;
 };
 
 class svlen_impl : public quiet<function_base>
@@ -2433,7 +2435,7 @@ public:
 class svnot_impl : public rtx_code_function
 {
 public:
-  CONSTEXPR svnot_impl () : rtx_code_function (NOT, NOT, -1) {}
+  CONSTEXPR svnot_impl () : rtx_code_function (NOT, NOT, UNSPEC_NONE) {}
 
   rtx
   expand (function_expander &e) const override
@@ -2498,7 +2500,7 @@ public:
 class svpfirst_svpnext_impl : public function_base
 {
 public:
-  CONSTEXPR svpfirst_svpnext_impl (int unspec) : m_unspec (unspec) {}
+  CONSTEXPR svpfirst_svpnext_impl (unspec unspec) : m_unspec (unspec) {}
   gimple *
   fold (gimple_folder &f) const override
   {
@@ -2519,7 +2521,7 @@ public:
   }
 
   /* The unspec associated with the operation.  */
-  int m_unspec;
+  unspec m_unspec;
 };
 
 /* Implements contiguous forms of svprf[bhwd].  */
@@ -2893,7 +2895,7 @@ public:
 class svrint_impl : public function_base
 {
 public:
-  CONSTEXPR svrint_impl (optab_tag optab, int cond_unspec)
+  CONSTEXPR svrint_impl (optab_tag optab, unspec cond_unspec)
     : m_optab (optab), m_cond_unspec (cond_unspec)
   {}
 
@@ -2905,11 +2907,11 @@ public:
        auto icode = direct_optab_handler (m_optab, e.tuple_mode (0));
        return e.use_exact_insn (icode);
       }
-    return e.map_to_unspecs (-1, -1, m_cond_unspec);
+    return e.map_to_unspecs (UNSPEC_NONE, UNSPEC_NONE, m_cond_unspec);
   }
 
   optab_tag m_optab;
-  int m_cond_unspec;
+  unspec m_cond_unspec;
 };
 
 class svsel_impl : public quiet<function_base>
@@ -3201,7 +3203,7 @@ public:
     /* Canonicalize subtractions of constants to additions.  */
     machine_mode mode = e.vector_mode (0);
     if (e.try_negating_argument (2, mode))
-      return e.map_to_rtx_codes (PLUS, PLUS, UNSPEC_COND_FADD, -1);
+      return e.map_to_rtx_codes (PLUS, PLUS, UNSPEC_COND_FADD, UNSPEC_NONE);
 
     return rtx_code_function::expand (e);
   }
@@ -3273,14 +3275,14 @@ public:
   expand (function_expander &e) const override
   {
     machine_mode mode = GET_MODE (e.args[0]);
-    unsigned int unpacku = m_high_p ? UNSPEC_UNPACKUHI : UNSPEC_UNPACKULO;
-    unsigned int unpacks = m_high_p ? UNSPEC_UNPACKSHI : UNSPEC_UNPACKSLO;
+    unspec unpacku = m_high_p ? UNSPEC_UNPACKUHI : UNSPEC_UNPACKULO;
+    unspec unpacks = m_high_p ? UNSPEC_UNPACKSHI : UNSPEC_UNPACKSLO;
     insn_code icode;
     if (GET_MODE_CLASS (mode) == MODE_VECTOR_BOOL)
       icode = code_for_aarch64_sve_punpk_acle (unpacku);
     else
       {
-       int unspec = e.type_suffix (0).unsigned_p ? unpacku : unpacks;
+       unspec unspec = e.type_suffix (0).unsigned_p ? unpacku : unpacks;
        icode = code_for_aarch64_sve_unpk (unspec, unspec, mode);
       }
     return e.use_exact_insn (icode);
@@ -3345,7 +3347,8 @@ public:
 class svwhilelx_impl : public while_comparison
 {
 public:
-  CONSTEXPR svwhilelx_impl (int unspec_for_sint, int unspec_for_uint, bool 
eq_p)
+  CONSTEXPR svwhilelx_impl (unspec unspec_for_sint, unspec unspec_for_uint,
+                           bool eq_p)
     : while_comparison (unspec_for_sint, unspec_for_uint), m_eq_p (eq_p)
   {}
 
@@ -3474,7 +3477,7 @@ public:
   expand (function_expander &e) const override
   {
     if (vectors_per_tuple (e) == 1)
-      return e.map_to_unspecs (-1, -1, UNSPEC_COND_FSCALE);
+      return e.map_to_unspecs (UNSPEC_NONE, UNSPEC_NONE, UNSPEC_COND_FSCALE);
     else
       {
        machine_mode mode = GET_MODE (e.args[0]);
@@ -3507,7 +3510,7 @@ FUNCTION (svand, rtx_code_function, (AND, AND))
 FUNCTION (svandv, svandv_impl,)
 FUNCTION (svasr, rtx_code_function, (ASHIFTRT, ASHIFTRT))
 FUNCTION (svasr_wide, shift_wide, (ASHIFTRT, UNSPEC_ASHIFTRT_WIDE))
-FUNCTION (svasrd, unspec_based_function, (UNSPEC_ASRD, -1, -1))
+FUNCTION (svasrd, unspec_based_function, (UNSPEC_ASRD, UNSPEC_NONE, 
UNSPEC_NONE))
 FUNCTION (svbfdot, fixed_insn_function, (CODE_FOR_aarch64_sve_bfdotvnx4sf))
 FUNCTION (svbfdot_lane, fixed_insn_function,
          (CODE_FOR_aarch64_sve_bfdot_lanevnx4sf))
@@ -3576,9 +3579,9 @@ FUNCTION (svdup, svdup_impl,)
 FUNCTION (svdup_lane, svdup_lane_impl,)
 FUNCTION (svdupq, svdupq_impl,)
 FUNCTION (svdupq_lane, svdupq_lane_impl,)
-FUNCTION (sveor, rtx_code_function, (XOR, XOR, -1))
+FUNCTION (sveor, rtx_code_function, (XOR, XOR, UNSPEC_NONE))
 FUNCTION (sveorv, sveorv_impl,)
-FUNCTION (svexpa, unspec_based_function, (-1, -1, UNSPEC_FEXPA))
+FUNCTION (svexpa, unspec_based_function, (UNSPEC_NONE, UNSPEC_NONE, 
UNSPEC_FEXPA))
 FUNCTION (svext, QUIET_CODE_FOR_MODE0 (aarch64_sve_ext),)
 FUNCTION (svextb, svext_bhw_impl, (QImode))
 FUNCTION (svexth, svext_bhw_impl, (HImode))
@@ -3659,14 +3662,14 @@ FUNCTION (svmsb, svmsb_impl,)
 FUNCTION (svmul, svmul_impl,)
 FUNCTION (svmul_lane, CODE_FOR_MODE0 (aarch64_mul_lane),)
 FUNCTION (svmulh, unspec_based_function, (UNSPEC_SMUL_HIGHPART,
-                                         UNSPEC_UMUL_HIGHPART, -1))
-FUNCTION (svmulx, unspec_based_function, (-1, -1, UNSPEC_COND_FMULX))
+                                         UNSPEC_UMUL_HIGHPART, UNSPEC_NONE))
+FUNCTION (svmulx, unspec_based_function, (UNSPEC_NONE, UNSPEC_NONE, 
UNSPEC_COND_FMULX))
 FUNCTION (svnand, svnand_impl,)
 FUNCTION (svneg, quiet<rtx_code_function>, (NEG, NEG, UNSPEC_COND_FNEG))
-FUNCTION (svnmad, unspec_based_function, (-1, -1, UNSPEC_COND_FNMLA))
-FUNCTION (svnmla, unspec_based_function_rotated, (-1, -1, UNSPEC_COND_FNMLA))
-FUNCTION (svnmls, unspec_based_function_rotated, (-1, -1, UNSPEC_COND_FNMLS))
-FUNCTION (svnmsb, unspec_based_function, (-1, -1, UNSPEC_COND_FNMLS))
+FUNCTION (svnmad, unspec_based_function, (UNSPEC_NONE, UNSPEC_NONE, 
UNSPEC_COND_FNMLA))
+FUNCTION (svnmla, unspec_based_function_rotated, (UNSPEC_NONE, UNSPEC_NONE, 
UNSPEC_COND_FNMLA))
+FUNCTION (svnmls, unspec_based_function_rotated, (UNSPEC_NONE, UNSPEC_NONE, 
UNSPEC_COND_FNMLS))
+FUNCTION (svnmsb, unspec_based_function, (UNSPEC_NONE, UNSPEC_NONE, 
UNSPEC_COND_FNMLS))
 FUNCTION (svnor, svnor_impl,)
 FUNCTION (svnot, svnot_impl,)
 FUNCTION (svorn, svorn_impl,)
@@ -3688,7 +3691,7 @@ FUNCTION (svptest_first, svptest_impl, (LT))
 FUNCTION (svptest_last, svptest_impl, (LTU))
 FUNCTION (svptrue, svptrue_impl,)
 FUNCTION (svptrue_pat, svptrue_pat_impl,)
-FUNCTION (svqadd, rtx_code_function, (SS_PLUS, US_PLUS, -1))
+FUNCTION (svqadd, rtx_code_function, (SS_PLUS, US_PLUS, UNSPEC_NONE))
 FUNCTION (svqdecb, svqdec_bhwd_impl, (QImode))
 FUNCTION (svqdecb_pat, svqdec_bhwd_impl, (QImode))
 FUNCTION (svqdecd, svqdec_bhwd_impl, (DImode))
@@ -3707,17 +3710,17 @@ FUNCTION (svqinch_pat, svqinc_bhwd_impl, (HImode))
 FUNCTION (svqincp, svqdecp_svqincp_impl, (SS_PLUS, US_PLUS))
 FUNCTION (svqincw, svqinc_bhwd_impl, (SImode))
 FUNCTION (svqincw_pat, svqinc_bhwd_impl, (SImode))
-FUNCTION (svqsub, rtx_code_function, (SS_MINUS, US_MINUS, -1))
-FUNCTION (svrbit, rtx_code_function, (BITREVERSE, BITREVERSE, -1))
+FUNCTION (svqsub, rtx_code_function, (SS_MINUS, US_MINUS, UNSPEC_NONE))
+FUNCTION (svrbit, rtx_code_function, (BITREVERSE, BITREVERSE, UNSPEC_NONE))
 FUNCTION (svrdffr, svrdffr_impl,)
-FUNCTION (svrecpe, unspec_based_function, (-1, UNSPEC_URECPE, UNSPEC_FRECPE))
-FUNCTION (svrecps, unspec_based_function, (-1, -1, UNSPEC_FRECPS))
-FUNCTION (svrecpx, unspec_based_function, (-1, -1, UNSPEC_COND_FRECPX))
+FUNCTION (svrecpe, unspec_based_function, (UNSPEC_NONE, UNSPEC_URECPE, 
UNSPEC_FRECPE))
+FUNCTION (svrecps, unspec_based_function, (UNSPEC_NONE, UNSPEC_NONE, 
UNSPEC_FRECPS))
+FUNCTION (svrecpx, unspec_based_function, (UNSPEC_NONE, UNSPEC_NONE, 
UNSPEC_COND_FRECPX))
 FUNCTION (svreinterpret, svreinterpret_impl,)
 FUNCTION (svrev, svrev_impl,)
-FUNCTION (svrevb, unspec_based_function, (UNSPEC_REVB, UNSPEC_REVB, -1))
-FUNCTION (svrevh, unspec_based_function, (UNSPEC_REVH, UNSPEC_REVH, -1))
-FUNCTION (svrevw, unspec_based_function, (UNSPEC_REVW, UNSPEC_REVW, -1))
+FUNCTION (svrevb, unspec_based_function, (UNSPEC_REVB, UNSPEC_REVB, 
UNSPEC_NONE))
+FUNCTION (svrevh, unspec_based_function, (UNSPEC_REVH, UNSPEC_REVH, 
UNSPEC_NONE))
+FUNCTION (svrevw, unspec_based_function, (UNSPEC_REVW, UNSPEC_REVW, 
UNSPEC_NONE))
 FUNCTION (svrinta, svrint_impl, (round_optab, UNSPEC_COND_FRINTA))
 FUNCTION (svrinti, svrint_impl, (nearbyint_optab, UNSPEC_COND_FRINTI))
 FUNCTION (svrintm, svrint_impl, (floor_optab, UNSPEC_COND_FRINTM))
@@ -3725,8 +3728,8 @@ FUNCTION (svrintn, svrint_impl, (roundeven_optab, 
UNSPEC_COND_FRINTN))
 FUNCTION (svrintp, svrint_impl, (ceil_optab, UNSPEC_COND_FRINTP))
 FUNCTION (svrintx, svrint_impl, (rint_optab, UNSPEC_COND_FRINTX))
 FUNCTION (svrintz, svrint_impl, (btrunc_optab, UNSPEC_COND_FRINTZ))
-FUNCTION (svrsqrte, unspec_based_function, (-1, UNSPEC_RSQRTE, UNSPEC_RSQRTE))
-FUNCTION (svrsqrts, unspec_based_function, (-1, -1, UNSPEC_RSQRTS))
+FUNCTION (svrsqrte, unspec_based_function, (UNSPEC_NONE, UNSPEC_RSQRTE, 
UNSPEC_RSQRTE))
+FUNCTION (svrsqrts, unspec_based_function, (UNSPEC_NONE, UNSPEC_NONE, 
UNSPEC_RSQRTS))
 FUNCTION (svscale, svscale_impl,)
 FUNCTION (svsel, svsel_impl,)
 FUNCTION (svset2, svset_impl, (2))
@@ -3750,7 +3753,7 @@ FUNCTION (svstnt1, svstnt1_impl,)
 FUNCTION (svsub, svsub_impl,)
 FUNCTION (svsubr, rtx_code_function_rotated, (MINUS, MINUS, UNSPEC_COND_FSUB))
 FUNCTION (svsudot, svusdot_impl, (true))
-FUNCTION (svsudot_lane, svdotprod_lane_impl, (UNSPEC_SUDOT, -1, -1))
+FUNCTION (svsudot_lane, svdotprod_lane_impl, (UNSPEC_SUDOT, UNSPEC_NONE, 
UNSPEC_NONE))
 FUNCTION (svtbl, quiet<unspec_based_uncond_function>, (UNSPEC_TBL, UNSPEC_TBL,
                                                       UNSPEC_TBL))
 FUNCTION (svtmad, CODE_FOR_MODE0 (aarch64_sve_tmad),)
@@ -3760,8 +3763,8 @@ FUNCTION (svtrn1q, unspec_based_function, (UNSPEC_TRN1Q, 
UNSPEC_TRN1Q,
 FUNCTION (svtrn2, svtrn_impl, (1))
 FUNCTION (svtrn2q, unspec_based_function, (UNSPEC_TRN2Q, UNSPEC_TRN2Q,
                                           UNSPEC_TRN2Q))
-FUNCTION (svtsmul, unspec_based_function, (-1, -1, UNSPEC_FTSMUL))
-FUNCTION (svtssel, unspec_based_function, (-1, -1, UNSPEC_FTSSEL))
+FUNCTION (svtsmul, unspec_based_function, (UNSPEC_NONE, UNSPEC_NONE, 
UNSPEC_FTSMUL))
+FUNCTION (svtssel, unspec_based_function, (UNSPEC_NONE, UNSPEC_NONE, 
UNSPEC_FTSSEL))
 FUNCTION (svundef, svundef_impl, (1))
 FUNCTION (svundef2, svundef_impl, (2))
 FUNCTION (svundef3, svundef_impl, (3))
@@ -3769,8 +3772,8 @@ FUNCTION (svundef4, svundef_impl, (4))
 FUNCTION (svunpkhi, svunpk_impl, (true))
 FUNCTION (svunpklo, svunpk_impl, (false))
 FUNCTION (svusdot, svusdot_impl, (false))
-FUNCTION (svusdot_lane, svdotprod_lane_impl, (UNSPEC_USDOT, -1, -1))
-FUNCTION (svusmmla, unspec_based_add_function, (UNSPEC_USMATMUL, -1, -1))
+FUNCTION (svusdot_lane, svdotprod_lane_impl, (UNSPEC_USDOT, UNSPEC_NONE, 
UNSPEC_NONE))
+FUNCTION (svusmmla, unspec_based_add_function, (UNSPEC_USMATMUL, UNSPEC_NONE, 
UNSPEC_NONE))
 FUNCTION (svuzp1, svuzp_impl, (0))
 FUNCTION (svuzp1q, unspec_based_function, (UNSPEC_UZP1Q, UNSPEC_UZP1Q,
                                           UNSPEC_UZP1Q))
diff --git a/gcc/config/aarch64/aarch64-sve-builtins-functions.h 
b/gcc/config/aarch64/aarch64-sve-builtins-functions.h
index df5e44a294fd..1e1c44fbf74e 100644
--- a/gcc/config/aarch64/aarch64-sve-builtins-functions.h
+++ b/gcc/config/aarch64/aarch64-sve-builtins-functions.h
@@ -203,11 +203,12 @@ class rtx_code_function_base : public function_base
 public:
   CONSTEXPR rtx_code_function_base (rtx_code code_for_sint,
                                    rtx_code code_for_uint,
-                                   int unspec_for_cond_fp = -1,
-                                   int unspec_for_uncond_fp = -1)
+                                   unspec unspec_for_cond_fp = UNSPEC_NONE,
+                                   unspec unspec_for_uncond_fp = UNSPEC_NONE)
     : m_code_for_sint (code_for_sint), m_code_for_uint (code_for_uint),
       m_unspec_for_cond_fp (unspec_for_cond_fp),
-      m_unspec_for_uncond_fp (unspec_for_uncond_fp) {}
+      m_unspec_for_uncond_fp (unspec_for_uncond_fp)
+  {}
 
   /* The rtx code to use for signed and unsigned integers respectively.
      Can be UNKNOWN for functions that don't have integer forms.  */
@@ -216,11 +217,11 @@ public:
 
   /* The UNSPEC_COND_* to use for floating-point operations.  Can be -1
      for functions that only operate on integers.  */
-  int m_unspec_for_cond_fp;
+  unspec m_unspec_for_cond_fp;
 
   /* The UNSPEC_* to use for unpredicated floating-point operations.
      Can be -1 if there is no such operation.  */
-  int m_unspec_for_uncond_fp;
+  unspec m_unspec_for_uncond_fp;
 };
 
 /* A function_base for functions that have an associated rtx code.
@@ -267,20 +268,17 @@ public:
 class unspec_based_function_base : public function_base
 {
 public:
-  CONSTEXPR unspec_based_function_base (int unspec_for_sint,
-                                       int unspec_for_uint,
-                                       int unspec_for_fp,
-                                       int unspec_for_mfp8 = -1,
-                                       unsigned int suffix_index = 0)
-    : m_unspec_for_sint (unspec_for_sint),
-      m_unspec_for_uint (unspec_for_uint),
-      m_unspec_for_fp (unspec_for_fp),
-      m_unspec_for_mfp8 (unspec_for_mfp8),
+  CONSTEXPR
+  unspec_based_function_base (unspec unspec_for_sint, unspec unspec_for_uint,
+                             unspec unspec_for_fp, unspec unspec_for_mfp8 = 
UNSPEC_NONE,
+                             unsigned int suffix_index = 0)
+    : m_unspec_for_sint (unspec_for_sint), m_unspec_for_uint (unspec_for_uint),
+      m_unspec_for_fp (unspec_for_fp), m_unspec_for_mfp8 (unspec_for_mfp8),
       m_suffix_index (suffix_index)
   {}
 
   /* Return the unspec code to use for INSTANCE, based on type suffix 0.  */
-  int
+  unspec
   unspec_for (const function_instance &instance) const
   {
     if (instance.fpm_mode == FPM_set)
@@ -294,10 +292,10 @@ public:
 
   /* The unspec code associated with signed-integer, unsigned-integer
      and floating-point operations respectively.  */
-  int m_unspec_for_sint;
-  int m_unspec_for_uint;
-  int m_unspec_for_fp;
-  int m_unspec_for_mfp8;
+  unspec m_unspec_for_sint;
+  unspec m_unspec_for_uint;
+  unspec m_unspec_for_fp;
+  unspec m_unspec_for_mfp8;
 
   /* Which type suffix is used to choose between the unspecs.  */
   unsigned int m_suffix_index;
@@ -402,7 +400,7 @@ typedef 
unspec_based_function_exact_insn<code_for_aarch64_sve_sub_lane>
 class cond_or_uncond_unspec_function : public function_base
 {
 public:
-  CONSTEXPR cond_or_uncond_unspec_function (int cond_unspec, int uncond_unspec)
+  CONSTEXPR cond_or_uncond_unspec_function (unspec cond_unspec, unspec 
uncond_unspec)
     : m_cond_unspec (cond_unspec), m_uncond_unspec (uncond_unspec) {}
 
   rtx
@@ -421,8 +419,8 @@ public:
 
   /* The unspecs for the conditional and unconditional instructions,
      respectively.  */
-  int m_cond_unspec;
-  int m_uncond_unspec;
+  unspec m_cond_unspec;
+  unspec m_uncond_unspec;
 };
 
 /* General SME unspec-based functions, parameterized on the vector mode.  */
@@ -431,9 +429,9 @@ class sme_1mode_function : public 
read_write_za<unspec_based_function_base>
 public:
   using parent = read_write_za<unspec_based_function_base>;
 
-  CONSTEXPR sme_1mode_function (int unspec_for_sint, int unspec_for_uint,
-                               int unspec_for_fp)
-    : parent (unspec_for_sint, unspec_for_uint, unspec_for_fp, -1, 1)
+  CONSTEXPR sme_1mode_function (unspec unspec_for_sint, unspec unspec_for_uint,
+                               unspec unspec_for_fp)
+    : parent (unspec_for_sint, unspec_for_uint, unspec_for_fp, UNSPEC_NONE, 1)
   {}
 
   rtx
@@ -461,8 +459,8 @@ class sme_2mode_function_t : public 
read_write_za<unspec_based_function_base>
 public:
   using parent = read_write_za<unspec_based_function_base>;
 
-  CONSTEXPR sme_2mode_function_t (int unspec_for_sint, int unspec_for_uint,
-                                 int unspec_for_fp, int unspec_for_mfp8 = -1)
+  CONSTEXPR sme_2mode_function_t (unspec unspec_for_sint, unspec 
unspec_for_uint,
+                                 unspec unspec_for_fp, unspec unspec_for_mfp8 
= UNSPEC_NONE)
     : parent (unspec_for_sint, unspec_for_uint, unspec_for_fp, unspec_for_mfp8,
              1)
   {}
@@ -488,8 +486,8 @@ class svvdot_half_impl : public 
read_write_za<unspec_based_function_base>
 public:
   using parent = read_write_za<unspec_based_function_base>;
 
-  CONSTEXPR svvdot_half_impl (int unspec_for_sint, int unspec_for_uint,
-                             int unspec_for_fp, int unspec_for_mfp8)
+  CONSTEXPR svvdot_half_impl (unspec unspec_for_sint, unspec unspec_for_uint,
+                             unspec unspec_for_fp, unspec unspec_for_mfp8)
     : parent (unspec_for_sint, unspec_for_uint, unspec_for_fp, unspec_for_mfp8,
              1)
   {}
@@ -519,7 +517,7 @@ public:
   rtx
   expand (function_expander &e) const override
   {
-    int unspec = unspec_for (e);
+    unspec unspec = unspec_for (e);
     insn_code icode;
     if (e.type_suffix (m_suffix_index).float_p
        && e.fpm_mode != FPM_set)
@@ -550,7 +548,7 @@ public:
   rtx
   expand (function_expander &e) const override
   {
-    int unspec = unspec_for (e);
+    unspec unspec = unspec_for (e);
     insn_code icode;
     if (e.type_suffix (m_suffix_index).float_p
        && e.fpm_mode != FPM_set)
@@ -644,7 +642,7 @@ public:
 class binary_permute : public permute
 {
 public:
-  CONSTEXPR binary_permute (int unspec) : m_unspec (unspec) {}
+  CONSTEXPR binary_permute (unspec unspec) : m_unspec (unspec) {}
 
   rtx
   expand (function_expander &e) const override
@@ -657,7 +655,7 @@ public:
   }
 
   /* The unspec code associated with the operation.  */
-  int m_unspec;
+  unspec m_unspec;
 };
 
 /* A function that implements a x2 or x4 permute instruction.  Both forms
@@ -666,7 +664,7 @@ public:
 class multireg_permute : public function_base
 {
 public:
-  CONSTEXPR multireg_permute (int unspec) : m_unspec (unspec) {}
+  CONSTEXPR multireg_permute (unspec unspec) : m_unspec (unspec) {}
 
   rtx
   expand (function_expander &e) const override
@@ -684,7 +682,7 @@ public:
   }
 
   /* The unspec associated with the permutation.  */
-  int m_unspec;
+  unspec m_unspec;
 };
 
 /* A function that has two type integer type suffixes, which might agree
@@ -693,12 +691,11 @@ public:
 class integer_conversion : public function_base
 {
 public:
-  CONSTEXPR integer_conversion (int unspec_for_sint, int unspec_for_sintu,
-                               int unspec_for_uint, int unspec_for_uints)
+  CONSTEXPR integer_conversion (unspec unspec_for_sint, unspec 
unspec_for_sintu,
+                               unspec unspec_for_uint, unspec unspec_for_uints)
     : m_unspec_for_sint (unspec_for_sint),
       m_unspec_for_sintu (unspec_for_sintu),
-      m_unspec_for_uint (unspec_for_uint),
-      m_unspec_for_uints (unspec_for_uints)
+      m_unspec_for_uint (unspec_for_uint), m_unspec_for_uints 
(unspec_for_uints)
   {}
 
   rtx
@@ -706,7 +703,7 @@ public:
   {
     machine_mode mode0 = e.vector_mode (0);
     machine_mode mode1 = GET_MODE (e.args[0]);
-    int unspec;
+    unspec unspec;
     if (e.type_suffix (0).unsigned_p == e.type_suffix (1).unsigned_p)
       unspec = (e.type_suffix (0).unsigned_p
                ? m_unspec_for_uint
@@ -719,32 +716,30 @@ public:
   }
 
   /* The unspec for signed -> signed.  */
-  int m_unspec_for_sint;
+  unspec m_unspec_for_sint;
 
   /* The unspec for signed -> unsigned.  */
-  int m_unspec_for_sintu;
+  unspec m_unspec_for_sintu;
 
   /* The unspec for unsigned -> signed.  */
-  int m_unspec_for_uint;
+  unspec m_unspec_for_uint;
 
   /* The unspec for unsigned -> unsigned.  */
-  int m_unspec_for_uints;
+  unspec m_unspec_for_uints;
 };
 
 /* A function_base for functions that reduce a vector to a scalar.  */
 class reduction : public function_base
 {
 public:
-  CONSTEXPR reduction (int unspec)
-    : m_unspec_for_sint (unspec),
-      m_unspec_for_uint (unspec),
+  CONSTEXPR reduction (unspec unspec)
+    : m_unspec_for_sint (unspec), m_unspec_for_uint (unspec),
       m_unspec_for_fp (unspec)
   {}
 
-  CONSTEXPR reduction (int unspec_for_sint, int unspec_for_uint,
-                      int unspec_for_fp)
-    : m_unspec_for_sint (unspec_for_sint),
-      m_unspec_for_uint (unspec_for_uint),
+  CONSTEXPR reduction (unspec unspec_for_sint, unspec unspec_for_uint,
+                      unspec unspec_for_fp)
+    : m_unspec_for_sint (unspec_for_sint), m_unspec_for_uint (unspec_for_uint),
       m_unspec_for_fp (unspec_for_fp)
   {}
 
@@ -752,9 +747,9 @@ public:
   expand (function_expander &e) const override
   {
     machine_mode mode = e.vector_mode (0);
-    int unspec = (!e.type_suffix (0).integer_p ? m_unspec_for_fp
-                 : e.type_suffix (0).unsigned_p ? m_unspec_for_uint
-                 : m_unspec_for_sint);
+    unspec unspec = (!e.type_suffix (0).integer_p   ? m_unspec_for_fp
+                    : e.type_suffix (0).unsigned_p ? m_unspec_for_uint
+                                                   : m_unspec_for_sint);
     /* There's no distinction between SADDV and UADDV for 64-bit elements;
        the signed versions only exist for narrower elements.  */
     if (GET_MODE_UNIT_BITSIZE (mode) == 64 && unspec == UNSPEC_SADDV)
@@ -764,9 +759,9 @@ public:
 
   /* The unspec code associated with signed-integer, unsigned-integer
      and floating-point operations respectively.  */
-  int m_unspec_for_sint;
-  int m_unspec_for_uint;
-  int m_unspec_for_fp;
+  unspec m_unspec_for_sint;
+  unspec m_unspec_for_uint;
+  unspec m_unspec_for_fp;
 };
 
 /* A function_base for functions that shift narrower-than-64-bit values
@@ -789,7 +784,7 @@ public:
     if (aarch64_simd_shift_imm_p (shift, elem_mode, m_code == ASHIFT))
       {
        e.args.last () = shift;
-       return e.map_to_rtx_codes (m_code, m_code, -1, -1);
+       return e.map_to_rtx_codes (m_code, m_code, UNSPEC_NONE, UNSPEC_NONE);
       }
 
     if (e.pred == PRED_x)
@@ -832,9 +827,8 @@ public:
 class while_comparison : public function_base
 {
 public:
-  CONSTEXPR while_comparison (int unspec_for_sint, int unspec_for_uint)
-    : m_unspec_for_sint (unspec_for_sint),
-      m_unspec_for_uint (unspec_for_uint)
+  CONSTEXPR while_comparison (unspec unspec_for_sint, unspec unspec_for_uint)
+    : m_unspec_for_sint (unspec_for_sint), m_unspec_for_uint (unspec_for_uint)
   {}
 
   rtx
@@ -842,9 +836,8 @@ public:
   {
     /* Suffix 0 determines the predicate mode, suffix 1 determines the
        scalar mode and signedness.  */
-    int unspec = (e.type_suffix (1).unsigned_p
-                 ? m_unspec_for_uint
-                 : m_unspec_for_sint);
+    unspec unspec
+      = (e.type_suffix (1).unsigned_p ? m_unspec_for_uint : m_unspec_for_sint);
     if (e.vectors_per_tuple () > 1)
       {
        auto bits = e.type_suffix (0).element_bits;
@@ -866,8 +859,8 @@ public:
 
   /* The unspec codes associated with signed and unsigned operations
      respectively.  */
-  int m_unspec_for_sint;
-  int m_unspec_for_uint;
+  unspec m_unspec_for_sint;
+  unspec m_unspec_for_uint;
 };
 
 }
diff --git a/gcc/config/aarch64/aarch64-sve-builtins-sme.cc 
b/gcc/config/aarch64/aarch64-sve-builtins-sme.cc
index 1b809492da45..2c64229a554b 100644
--- a/gcc/config/aarch64/aarch64-sve-builtins-sme.cc
+++ b/gcc/config/aarch64/aarch64-sve-builtins-sme.cc
@@ -60,7 +60,7 @@ public:
 class read_write_za_base : public function_base
 {
 public:
-  constexpr read_write_za_base (int unspec) : m_unspec (unspec) {}
+  constexpr read_write_za_base (unspec unspec) : m_unspec (unspec) {}
 
   rtx
   expand (function_expander &e) const override
@@ -73,7 +73,7 @@ public:
     return e.use_exact_insn (icode);
   }
 
-  int m_unspec;
+  unspec m_unspec;
 };
 
 using load_za_base = add_call_properties<load_store_za_zt0_base,
@@ -176,7 +176,7 @@ expand_ldr_str_zt0 (function_expander &e, insn_code icode)
    IS_LOAD is true if E is a load, false if it is a store.  */
 
 static rtx
-expand_ld1_st1 (function_expander &e, int unspec, bool is_load)
+expand_ld1_st1 (function_expander &e, unspec unspec, bool is_load)
 {
   bool is_vnum = has_in_range_vnum_arg (e, e.vector_mode (0), 4);
   auto icode = (is_vnum
@@ -298,7 +298,7 @@ public:
 class svld1_za_impl : public load_za_base
 {
 public:
-  constexpr svld1_za_impl (int unspec) : m_unspec (unspec) {}
+  constexpr svld1_za_impl (unspec unspec) : m_unspec (unspec) {}
 
   rtx
   expand (function_expander &e) const override
@@ -306,7 +306,7 @@ public:
     return expand_ld1_st1 (e, m_unspec, true);
   }
 
-  int m_unspec;
+  unspec m_unspec;
 };
 
 class svldr_za_impl : public load_za_base
@@ -404,7 +404,7 @@ using svreadz_za_tile_impl = 
add_call_properties<read_write_za_base,
 class svst1_za_impl : public store_za_base
 {
 public:
-  constexpr svst1_za_impl (int unspec) : m_unspec (unspec) {}
+  constexpr svst1_za_impl (unspec unspec) : m_unspec (unspec) {}
 
   rtx
   expand (function_expander &e) const override
@@ -412,7 +412,7 @@ public:
     return expand_ld1_st1 (e, m_unspec, false);
   }
 
-  int m_unspec;
+  unspec m_unspec;
 };
 
 class svstr_za_impl : public store_za_base
@@ -616,13 +616,13 @@ FUNCTION (arm_in_streaming_mode, 
arm_in_streaming_mode_impl, )
 FUNCTION (svadd_za, sme_1mode_function, (UNSPEC_SME_ADD, UNSPEC_SME_ADD,
                                         UNSPEC_SME_FADD))
 FUNCTION (svadd_write_za, sme_1mode_function, (UNSPEC_SME_ADD_WRITE,
-                                              UNSPEC_SME_ADD_WRITE, -1))
+                                              UNSPEC_SME_ADD_WRITE, 
UNSPEC_NONE))
 FUNCTION (svaddha_za, sme_1mode_function, (UNSPEC_SME_ADDHA,
-                                          UNSPEC_SME_ADDHA, -1))
+                                          UNSPEC_SME_ADDHA, UNSPEC_NONE))
 FUNCTION (svaddva_za, sme_1mode_function, (UNSPEC_SME_ADDVA,
-                                         UNSPEC_SME_ADDVA, -1))
-FUNCTION (svbmopa_za, sme_2mode_function, (-1, UNSPEC_SME_BMOPA, -1))
-FUNCTION (svbmops_za, sme_2mode_function, (-1, UNSPEC_SME_BMOPS, -1))
+                                         UNSPEC_SME_ADDVA, UNSPEC_NONE))
+FUNCTION (svbmopa_za, sme_2mode_function, (UNSPEC_NONE, UNSPEC_SME_BMOPA, 
UNSPEC_NONE))
+FUNCTION (svbmops_za, sme_2mode_function, (UNSPEC_NONE, UNSPEC_SME_BMOPS, 
UNSPEC_NONE))
 FUNCTION (svcntsb, svcnts_bhwd_impl, (VNx16QImode))
 FUNCTION (svcntsd, svcnts_bhwd_impl, (VNx2DImode))
 FUNCTION (svcntsh, svcnts_bhwd_impl, (VNx8HImode))
@@ -669,28 +669,28 @@ FUNCTION (svstr_zt, svstr_zt_impl, )
 FUNCTION (svsub_za, sme_1mode_function, (UNSPEC_SME_SUB, UNSPEC_SME_SUB,
                                         UNSPEC_SME_FSUB))
 FUNCTION (svsub_write_za, sme_1mode_function, (UNSPEC_SME_SUB_WRITE,
-                                              UNSPEC_SME_SUB_WRITE, -1))
+                                              UNSPEC_SME_SUB_WRITE, 
UNSPEC_NONE))
 FUNCTION (svsudot_za, svsudot_za_impl,)
-FUNCTION (svsudot_lane_za, sme_2mode_lane_function, (UNSPEC_SME_SUDOT, -1, -1))
+FUNCTION (svsudot_lane_za, sme_2mode_lane_function, (UNSPEC_SME_SUDOT, 
UNSPEC_NONE, UNSPEC_NONE))
 FUNCTION (svsuvdot_lane_za, sme_2mode_lane_function, (UNSPEC_SME_SUVDOT,
-                                                     -1, -1))
-FUNCTION (svsumopa_za, sme_2mode_function, (UNSPEC_SME_SUMOPA, -1, -1))
-FUNCTION (svsumops_za, sme_2mode_function, (UNSPEC_SME_SUMOPS, -1, -1))
+                                                     UNSPEC_NONE, UNSPEC_NONE))
+FUNCTION (svsumopa_za, sme_2mode_function, (UNSPEC_SME_SUMOPA, UNSPEC_NONE, 
UNSPEC_NONE))
+FUNCTION (svsumops_za, sme_2mode_function, (UNSPEC_SME_SUMOPS, UNSPEC_NONE, 
UNSPEC_NONE))
 FUNCTION (svundef_za, svundef_za_impl, )
-FUNCTION (svusdot_za, sme_2mode_function, (-1, UNSPEC_SME_USDOT, -1))
-FUNCTION (svusdot_lane_za, sme_2mode_lane_function, (-1, UNSPEC_SME_USDOT, -1))
-FUNCTION (svusvdot_lane_za, sme_2mode_lane_function, (-1, UNSPEC_SME_USVDOT,
-                                                     -1))
-FUNCTION (svusmopa_za, sme_2mode_function, (-1, UNSPEC_SME_USMOPA, -1))
-FUNCTION (svusmops_za, sme_2mode_function, (-1, UNSPEC_SME_USMOPS, -1))
+FUNCTION (svusdot_za, sme_2mode_function, (UNSPEC_NONE, UNSPEC_SME_USDOT, 
UNSPEC_NONE))
+FUNCTION (svusdot_lane_za, sme_2mode_lane_function, (UNSPEC_NONE, 
UNSPEC_SME_USDOT, UNSPEC_NONE))
+FUNCTION (svusvdot_lane_za, sme_2mode_lane_function, (UNSPEC_NONE, 
UNSPEC_SME_USVDOT,
+                                                     UNSPEC_NONE))
+FUNCTION (svusmopa_za, sme_2mode_function, (UNSPEC_NONE, UNSPEC_SME_USMOPA, 
UNSPEC_NONE))
+FUNCTION (svusmops_za, sme_2mode_function, (UNSPEC_NONE, UNSPEC_SME_USMOPS, 
UNSPEC_NONE))
 FUNCTION (svvdot_lane_za, sme_2mode_lane_function, (UNSPEC_SME_SVDOT,
                                                    UNSPEC_SME_UVDOT,
                                                    UNSPEC_SME_FVDOT,
                                                    UNSPEC_SME_FVDOT_FP8))
 FUNCTION (svvdotb_lane_za, svvdot_half_impl,
-         (-1, -1, -1, UNSPEC_SME_FVDOTB_FP8))
+         (UNSPEC_NONE, UNSPEC_NONE, UNSPEC_NONE, UNSPEC_SME_FVDOTB_FP8))
 FUNCTION (svvdott_lane_za, svvdot_half_impl,
-         (-1, -1, -1, UNSPEC_SME_FVDOTT_FP8))
+         (UNSPEC_NONE, UNSPEC_NONE, UNSPEC_NONE, UNSPEC_SME_FVDOTT_FP8))
 FUNCTION (svwrite_za, svwrite_za_impl,)
 FUNCTION (svwrite_hor_za, svwrite_za_tile_impl, (UNSPEC_SME_WRITE_HOR))
 FUNCTION (svwrite_ver_za, svwrite_za_tile_impl, (UNSPEC_SME_WRITE_VER))
diff --git a/gcc/config/aarch64/aarch64-sve-builtins-sve2.cc 
b/gcc/config/aarch64/aarch64-sve-builtins-sve2.cc
index c4c8bae86b8e..d5e11cd60bad 100644
--- a/gcc/config/aarch64/aarch64-sve-builtins-sve2.cc
+++ b/gcc/config/aarch64/aarch64-sve-builtins-sve2.cc
@@ -51,7 +51,7 @@ using namespace aarch64_sve;
 namespace {
 
 /* Return the UNSPEC_CDOT* unspec for rotation amount ROT.  */
-static int
+static unspec
 unspec_cdot (int rot)
 {
   switch (rot)
@@ -65,7 +65,7 @@ unspec_cdot (int rot)
 }
 
 /* Return the UNSPEC_SQRDCMLAH* unspec for rotation amount ROT.  */
-static int
+static unspec
 unspec_sqrdcmlah (int rot)
 {
   switch (rot)
@@ -225,7 +225,7 @@ class svcvt_fp8_impl : public function_base
 {
 public:
   CONSTEXPR
-  svcvt_fp8_impl (int unspec) : m_unspec (unspec) {}
+  svcvt_fp8_impl (unspec unspec) : m_unspec (unspec) {}
 
   rtx
   expand (function_expander &e) const override
@@ -234,7 +234,7 @@ public:
     return e.use_exact_insn (icode);
   }
 
-  int m_unspec;
+  unspec m_unspec;
 };
 
 class svcvtn_impl : public function_base
@@ -401,7 +401,7 @@ public:
 class svmatch_svnmatch_impl : public function_base
 {
 public:
-  CONSTEXPR svmatch_svnmatch_impl (int unspec) : m_unspec (unspec) {}
+  CONSTEXPR svmatch_svnmatch_impl (unspec unspec) : m_unspec (unspec) {}
   gimple *
   fold (gimple_folder &f) const override
   {
@@ -420,7 +420,7 @@ public:
                                                    e.vector_mode (0)));
   }
 
-  int m_unspec;
+  unspec m_unspec;
 };
 
 /* Implements both svmovlb and svmovlt; the unspec parameters decide
@@ -504,9 +504,9 @@ public:
     /* Convert the rotation amount into a specific unspec.  */
     int rot = INTVAL (e.args.pop ());
     if (rot == 90)
-      return e.map_to_unspecs (UNSPEC_SQCADD90, -1, -1);
+      return e.map_to_unspecs (UNSPEC_SQCADD90, UNSPEC_NONE, UNSPEC_NONE);
     if (rot == 270)
-      return e.map_to_unspecs (UNSPEC_SQCADD270, -1, -1);
+      return e.map_to_unspecs (UNSPEC_SQCADD270, UNSPEC_NONE, UNSPEC_NONE);
     gcc_unreachable ();
   }
 };
@@ -541,7 +541,7 @@ class svqrshl_impl : public unspec_based_function
 {
 public:
   CONSTEXPR svqrshl_impl ()
-    : unspec_based_function (UNSPEC_SQRSHL, UNSPEC_UQRSHL, -1) {}
+    : unspec_based_function (UNSPEC_SQRSHL, UNSPEC_UQRSHL, UNSPEC_NONE) {}
 
   gimple *
   fold (gimple_folder &f) const override
@@ -577,7 +577,7 @@ class svqshl_impl : public unspec_based_function
 {
 public:
   CONSTEXPR svqshl_impl ()
-    : unspec_based_function (UNSPEC_SQSHL, UNSPEC_UQSHL, -1) {}
+    : unspec_based_function (UNSPEC_SQSHL, UNSPEC_UQSHL, UNSPEC_NONE) {}
 
   gimple *
   fold (gimple_folder &f) const override
@@ -614,7 +614,7 @@ class svrshl_impl : public unspec_based_function
 {
 public:
   CONSTEXPR svrshl_impl ()
-    : unspec_based_function (UNSPEC_SRSHL, UNSPEC_URSHL, -1) {}
+    : unspec_based_function (UNSPEC_SRSHL, UNSPEC_URSHL, UNSPEC_NONE) {}
 
   gimple *
   fold (gimple_folder &f) const override
@@ -664,8 +664,8 @@ public:
     machine_mode mode = e.vector_mode (0);
     if (e.pred == PRED_x
        && aarch64_sve_sqadd_sqsub_immediate_p (mode, e.args[2], false))
-      return e.map_to_rtx_codes (UNKNOWN, US_PLUS, -1, -1);
-    return e.map_to_unspecs (-1, UNSPEC_USQADD, -1);
+      return e.map_to_rtx_codes (UNKNOWN, US_PLUS, UNSPEC_NONE, UNSPEC_NONE);
+    return e.map_to_unspecs (UNSPEC_NONE, UNSPEC_USQADD, UNSPEC_NONE);
   }
 };
 
@@ -837,7 +837,7 @@ public:
     if (e.pred == PRED_x
        && aarch64_sve_arith_immediate_p (mode, e.args[2], false))
       return e.use_unpred_insn (code_for_aarch64_sve_suqadd_const (mode));
-    return e.map_to_unspecs (UNSPEC_SUQADD, -1, -1);
+    return e.map_to_unspecs (UNSPEC_SUQADD, UNSPEC_NONE, UNSPEC_NONE);
   }
 };
 
@@ -874,7 +874,7 @@ public:
 class svwhilerw_svwhilewr_impl : public full_width_access
 {
 public:
-  CONSTEXPR svwhilerw_svwhilewr_impl (int unspec) : m_unspec (unspec) {}
+  CONSTEXPR svwhilerw_svwhilewr_impl (unspec unspec) : m_unspec (unspec) {}
 
   rtx
   expand (function_expander &e) const override
@@ -886,7 +886,7 @@ public:
     return e.use_exact_insn (icode);
   }
 
-  int m_unspec;
+  unspec m_unspec;
 };
 
 /* Implements svzipq1 and svzipq2.  */
@@ -937,7 +937,7 @@ public:
 class faminmaximpl : public function_base
 {
 public:
-  CONSTEXPR faminmaximpl (int cond_unspec, int uncond_unspec)
+  CONSTEXPR faminmaximpl (unspec cond_unspec, unspec uncond_unspec)
     : m_cond_unspec (cond_unspec), m_uncond_unspec (uncond_unspec)
     {}
 
@@ -966,8 +966,8 @@ public:
 
   /* The unspecs for the conditional and unconditional instructions,
      respectively.  */
-  int m_cond_unspec;
-  int m_uncond_unspec;
+  unspec m_cond_unspec;
+  unspec m_uncond_unspec;
 };
 
 } /* end anonymous namespace */
@@ -976,41 +976,41 @@ namespace aarch64_sve {
 
 FUNCTION (svaba, svaba_impl,)
 FUNCTION (svabalb, unspec_based_add_function, (UNSPEC_SABDLB,
-                                              UNSPEC_UABDLB, -1))
+                                              UNSPEC_UABDLB, UNSPEC_NONE))
 FUNCTION (svabalt, unspec_based_add_function, (UNSPEC_SABDLT,
-                                              UNSPEC_UABDLT, -1))
-FUNCTION (svabdlb, unspec_based_function, (UNSPEC_SABDLB, UNSPEC_UABDLB, -1))
-FUNCTION (svabdlt, unspec_based_function, (UNSPEC_SABDLT, UNSPEC_UABDLT, -1))
-FUNCTION (svadalp, unspec_based_function, (UNSPEC_SADALP, UNSPEC_UADALP, -1))
-FUNCTION (svadclb, unspec_based_function, (-1, UNSPEC_ADCLB, -1))
-FUNCTION (svadclt, unspec_based_function, (-1, UNSPEC_ADCLT, -1))
-FUNCTION (svaddhnb, unspec_based_function, (UNSPEC_ADDHNB, UNSPEC_ADDHNB, -1))
-FUNCTION (svaddhnt, unspec_based_function, (UNSPEC_ADDHNT, UNSPEC_ADDHNT, -1))
-FUNCTION (svaddlb, unspec_based_function, (UNSPEC_SADDLB, UNSPEC_UADDLB, -1))
-FUNCTION (svaddlbt, unspec_based_function, (UNSPEC_SADDLBT, -1, -1))
-FUNCTION (svaddlt, unspec_based_function, (UNSPEC_SADDLT, UNSPEC_UADDLT, -1))
+                                              UNSPEC_UABDLT, UNSPEC_NONE))
+FUNCTION (svabdlb, unspec_based_function, (UNSPEC_SABDLB, UNSPEC_UABDLB, 
UNSPEC_NONE))
+FUNCTION (svabdlt, unspec_based_function, (UNSPEC_SABDLT, UNSPEC_UABDLT, 
UNSPEC_NONE))
+FUNCTION (svadalp, unspec_based_function, (UNSPEC_SADALP, UNSPEC_UADALP, 
UNSPEC_NONE))
+FUNCTION (svadclb, unspec_based_function, (UNSPEC_NONE, UNSPEC_ADCLB, 
UNSPEC_NONE))
+FUNCTION (svadclt, unspec_based_function, (UNSPEC_NONE, UNSPEC_ADCLT, 
UNSPEC_NONE))
+FUNCTION (svaddhnb, unspec_based_function, (UNSPEC_ADDHNB, UNSPEC_ADDHNB, 
UNSPEC_NONE))
+FUNCTION (svaddhnt, unspec_based_function, (UNSPEC_ADDHNT, UNSPEC_ADDHNT, 
UNSPEC_NONE))
+FUNCTION (svaddlb, unspec_based_function, (UNSPEC_SADDLB, UNSPEC_UADDLB, 
UNSPEC_NONE))
+FUNCTION (svaddlbt, unspec_based_function, (UNSPEC_SADDLBT, UNSPEC_NONE, 
UNSPEC_NONE))
+FUNCTION (svaddlt, unspec_based_function, (UNSPEC_SADDLT, UNSPEC_UADDLT, 
UNSPEC_NONE))
 FUNCTION (svaddp, unspec_based_pred_function, (UNSPEC_ADDP, UNSPEC_ADDP,
                                               UNSPEC_FADDP))
 FUNCTION (svaddqv, reduction, (UNSPEC_ADDQV, UNSPEC_ADDQV, UNSPEC_FADDQV))
-FUNCTION (svaddwb, unspec_based_function, (UNSPEC_SADDWB, UNSPEC_UADDWB, -1))
-FUNCTION (svaddwt, unspec_based_function, (UNSPEC_SADDWT, UNSPEC_UADDWT, -1))
+FUNCTION (svaddwb, unspec_based_function, (UNSPEC_SADDWB, UNSPEC_UADDWB, 
UNSPEC_NONE))
+FUNCTION (svaddwt, unspec_based_function, (UNSPEC_SADDWT, UNSPEC_UADDWT, 
UNSPEC_NONE))
 FUNCTION (svaesd, fixed_insn_function, (CODE_FOR_aarch64_sve2_aesd))
 FUNCTION (svaese, fixed_insn_function, (CODE_FOR_aarch64_sve2_aese))
 FUNCTION (svaesimc, fixed_insn_function, (CODE_FOR_aarch64_sve2_aesimc))
 FUNCTION (svaesmc, fixed_insn_function, (CODE_FOR_aarch64_sve2_aesmc))
 FUNCTION (svamax, faminmaximpl, (UNSPEC_COND_FAMAX, UNSPEC_FAMAX))
 FUNCTION (svamin, faminmaximpl, (UNSPEC_COND_FAMIN, UNSPEC_FAMIN))
-FUNCTION (svandqv, reduction, (UNSPEC_ANDQV, UNSPEC_ANDQV, -1))
+FUNCTION (svandqv, reduction, (UNSPEC_ANDQV, UNSPEC_ANDQV, UNSPEC_NONE))
 FUNCTION (svbcax, CODE_FOR_MODE0 (aarch64_sve2_bcax),)
-FUNCTION (svbdep, unspec_based_function, (UNSPEC_BDEP, UNSPEC_BDEP, -1))
-FUNCTION (svbext, unspec_based_function, (UNSPEC_BEXT, UNSPEC_BEXT, -1))
+FUNCTION (svbdep, unspec_based_function, (UNSPEC_BDEP, UNSPEC_BDEP, 
UNSPEC_NONE))
+FUNCTION (svbext, unspec_based_function, (UNSPEC_BEXT, UNSPEC_BEXT, 
UNSPEC_NONE))
 FUNCTION (svbfmlslb, fixed_insn_function, (CODE_FOR_aarch64_sve_bfmlslbvnx4sf))
 FUNCTION (svbfmlslb_lane, fixed_insn_function,
          (CODE_FOR_aarch64_sve_bfmlslb_lanevnx4sf))
 FUNCTION (svbfmlslt, fixed_insn_function, (CODE_FOR_aarch64_sve_bfmlsltvnx4sf))
 FUNCTION (svbfmlslt_lane, fixed_insn_function,
          (CODE_FOR_aarch64_sve_bfmlslt_lanevnx4sf))
-FUNCTION (svbgrp, unspec_based_function, (UNSPEC_BGRP, UNSPEC_BGRP, -1))
+FUNCTION (svbgrp, unspec_based_function, (UNSPEC_BGRP, UNSPEC_BGRP, 
UNSPEC_NONE))
 FUNCTION (svbsl, CODE_FOR_MODE0 (aarch64_sve2_bsl),)
 FUNCTION (svbsl1n, CODE_FOR_MODE0 (aarch64_sve2_bsl1n),)
 FUNCTION (svbsl2n, CODE_FOR_MODE0 (aarch64_sve2_bsl2n),)
@@ -1024,23 +1024,23 @@ FUNCTION (svcvtl1, svcvt_fp8_impl, (UNSPEC_F1CVTL))
 FUNCTION (svcvtl2, svcvt_fp8_impl, (UNSPEC_F2CVTL))
 FUNCTION (svcvtlt1, svcvt_fp8_impl, (UNSPEC_F1CVTLT))
 FUNCTION (svcvtlt2, svcvt_fp8_impl, (UNSPEC_F2CVTLT))
-FUNCTION (svcvtlt, unspec_based_function, (-1, -1, UNSPEC_COND_FCVTLT))
+FUNCTION (svcvtlt, unspec_based_function, (UNSPEC_NONE, UNSPEC_NONE, 
UNSPEC_COND_FCVTLT))
 FUNCTION (svcvtn, svcvtn_impl,)
 FUNCTION (svcvtnb, fixed_insn_function, 
(CODE_FOR_aarch64_sve2_fp8_cvtnbvnx16qi))
-FUNCTION (svcvtx, unspec_based_function, (-1, -1, UNSPEC_COND_FCVTX))
+FUNCTION (svcvtx, unspec_based_function, (UNSPEC_NONE, UNSPEC_NONE, 
UNSPEC_COND_FCVTX))
 FUNCTION (svcvtxnt, svcvtxnt_impl,)
 FUNCTION (svdup_laneq, svdup_laneq_impl,)
 FUNCTION (sveor3, CODE_FOR_MODE0 (aarch64_sve2_eor3),)
-FUNCTION (sveorbt, unspec_based_function, (UNSPEC_EORBT, UNSPEC_EORBT, -1))
-FUNCTION (sveorqv, reduction, (UNSPEC_EORQV, UNSPEC_EORQV, -1))
-FUNCTION (sveortb, unspec_based_function, (UNSPEC_EORTB, UNSPEC_EORTB, -1))
+FUNCTION (sveorbt, unspec_based_function, (UNSPEC_EORBT, UNSPEC_EORBT, 
UNSPEC_NONE))
+FUNCTION (sveorqv, reduction, (UNSPEC_EORQV, UNSPEC_EORQV, UNSPEC_NONE))
+FUNCTION (sveortb, unspec_based_function, (UNSPEC_EORTB, UNSPEC_EORTB, 
UNSPEC_NONE))
 FUNCTION (svextq, svextq_impl,)
-FUNCTION (svhadd, unspec_based_function, (UNSPEC_SHADD, UNSPEC_UHADD, -1))
-FUNCTION (svhsub, unspec_based_function, (UNSPEC_SHSUB, UNSPEC_UHSUB, -1))
+FUNCTION (svhadd, unspec_based_function, (UNSPEC_SHADD, UNSPEC_UHADD, 
UNSPEC_NONE))
+FUNCTION (svhsub, unspec_based_function, (UNSPEC_SHSUB, UNSPEC_UHSUB, 
UNSPEC_NONE))
 FUNCTION (svhistcnt, CODE_FOR_MODE0 (aarch64_sve2_histcnt),)
 FUNCTION (svhistseg, CODE_FOR_MODE0 (aarch64_sve2_histseg),)
 FUNCTION (svhsubr, unspec_based_function_rotated, (UNSPEC_SHSUB,
-                                                  UNSPEC_UHSUB, -1))
+                                                  UNSPEC_UHSUB, UNSPEC_NONE))
 FUNCTION (svld1q_gather, svld1q_gather_impl,)
 FUNCTION (svld1udq, svld1uxq_impl, (VNx1DImode))
 FUNCTION (svld1uwq, svld1uxq_impl, (VNx1SImode))
@@ -1054,15 +1054,15 @@ FUNCTION (svldnt1sw_gather, svldnt1_gather_extend_impl, 
(TYPE_SUFFIX_s32))
 FUNCTION (svldnt1ub_gather, svldnt1_gather_extend_impl, (TYPE_SUFFIX_u8))
 FUNCTION (svldnt1uh_gather, svldnt1_gather_extend_impl, (TYPE_SUFFIX_u16))
 FUNCTION (svldnt1uw_gather, svldnt1_gather_extend_impl, (TYPE_SUFFIX_u32))
-FUNCTION (svlogb, unspec_based_function, (-1, -1, UNSPEC_COND_FLOGB))
+FUNCTION (svlogb, unspec_based_function, (UNSPEC_NONE, UNSPEC_NONE, 
UNSPEC_COND_FLOGB))
 FUNCTION (svmatch, svmatch_svnmatch_impl, (UNSPEC_MATCH))
-FUNCTION (svmaxnmp, unspec_based_pred_function, (-1, -1, UNSPEC_FMAXNMP))
-FUNCTION (svmaxnmqv, reduction, (-1, -1, UNSPEC_FMAXNMQV))
+FUNCTION (svmaxnmp, unspec_based_pred_function, (UNSPEC_NONE, UNSPEC_NONE, 
UNSPEC_FMAXNMP))
+FUNCTION (svmaxnmqv, reduction, (UNSPEC_NONE, UNSPEC_NONE, UNSPEC_FMAXNMQV))
 FUNCTION (svmaxp, unspec_based_pred_function, (UNSPEC_SMAXP, UNSPEC_UMAXP,
                                               UNSPEC_FMAXP))
 FUNCTION (svmaxqv, reduction, (UNSPEC_SMAXQV, UNSPEC_UMAXQV, UNSPEC_FMAXQV))
-FUNCTION (svminnmp, unspec_based_pred_function, (-1, -1, UNSPEC_FMINNMP))
-FUNCTION (svminnmqv, reduction, (-1, -1, UNSPEC_FMINNMQV))
+FUNCTION (svminnmp, unspec_based_pred_function, (UNSPEC_NONE, UNSPEC_NONE, 
UNSPEC_FMINNMP))
+FUNCTION (svminnmqv, reduction, (UNSPEC_NONE, UNSPEC_NONE, UNSPEC_FMINNMQV))
 FUNCTION (svminp, unspec_based_pred_function, (UNSPEC_SMINP, UNSPEC_UMINP,
                                               UNSPEC_FMINP))
 FUNCTION (svminqv, reduction, (UNSPEC_SMINQV, UNSPEC_UMINQV, UNSPEC_FMINQV))
@@ -1073,21 +1073,21 @@ FUNCTION (svmlalb, unspec_based_mla_function,
          (UNSPEC_SMULLB, UNSPEC_UMULLB, UNSPEC_FMLALB,
           UNSPEC_FMLALB_FP8))
 FUNCTION (svmlallbb_lane, unspec_based_mla_lane_function,
-         (-1, -1, -1, UNSPEC_FMLALLBB_FP8))
+         (UNSPEC_NONE, UNSPEC_NONE, UNSPEC_NONE, UNSPEC_FMLALLBB_FP8))
 FUNCTION (svmlallbb, unspec_based_mla_function,
-         (-1, -1, -1, UNSPEC_FMLALLBB_FP8))
+         (UNSPEC_NONE, UNSPEC_NONE, UNSPEC_NONE, UNSPEC_FMLALLBB_FP8))
 FUNCTION (svmlallbt_lane, unspec_based_mla_lane_function,
-         (-1, -1, -1, UNSPEC_FMLALLBT_FP8))
+         (UNSPEC_NONE, UNSPEC_NONE, UNSPEC_NONE, UNSPEC_FMLALLBT_FP8))
 FUNCTION (svmlallbt, unspec_based_mla_function,
-         (-1, -1, -1, UNSPEC_FMLALLBT_FP8))
+         (UNSPEC_NONE, UNSPEC_NONE, UNSPEC_NONE, UNSPEC_FMLALLBT_FP8))
 FUNCTION (svmlalltb_lane, unspec_based_mla_lane_function,
-         (-1, -1, -1, UNSPEC_FMLALLTB_FP8))
+         (UNSPEC_NONE, UNSPEC_NONE, UNSPEC_NONE, UNSPEC_FMLALLTB_FP8))
 FUNCTION (svmlalltb, unspec_based_mla_function,
-         (-1, -1, -1, UNSPEC_FMLALLTB_FP8))
+         (UNSPEC_NONE, UNSPEC_NONE, UNSPEC_NONE, UNSPEC_FMLALLTB_FP8))
 FUNCTION (svmlalltt_lane, unspec_based_mla_lane_function,
-         (-1, -1, -1, UNSPEC_FMLALLTT_FP8))
+         (UNSPEC_NONE, UNSPEC_NONE, UNSPEC_NONE, UNSPEC_FMLALLTT_FP8))
 FUNCTION (svmlalltt, unspec_based_mla_function,
-         (-1, -1, -1, UNSPEC_FMLALLTT_FP8))
+         (UNSPEC_NONE, UNSPEC_NONE, UNSPEC_NONE, UNSPEC_FMLALLTT_FP8))
 FUNCTION (svmlalt_lane, unspec_based_mla_lane_function,
          (UNSPEC_SMULLT, UNSPEC_UMULLT, UNSPEC_FMLALT,
           UNSPEC_FMLALT_FP8))
@@ -1104,121 +1104,121 @@ FUNCTION (svmlslt, unspec_based_mls_function, 
(UNSPEC_SMULLT,
 FUNCTION (svmlslt_lane, unspec_based_mls_lane_function, (UNSPEC_SMULLT,
                                                         UNSPEC_UMULLT,
                                                         UNSPEC_FMLSLT))
-FUNCTION (svmovlb, svmovl_lb_impl, (UNSPEC_SSHLLB, UNSPEC_USHLLB, -1))
-FUNCTION (svmovlt, svmovl_lb_impl, (UNSPEC_SSHLLT, UNSPEC_USHLLT, -1))
-FUNCTION (svmullb, unspec_based_function, (UNSPEC_SMULLB, UNSPEC_UMULLB, -1))
+FUNCTION (svmovlb, svmovl_lb_impl, (UNSPEC_SSHLLB, UNSPEC_USHLLB, UNSPEC_NONE))
+FUNCTION (svmovlt, svmovl_lb_impl, (UNSPEC_SSHLLT, UNSPEC_USHLLT, UNSPEC_NONE))
+FUNCTION (svmullb, unspec_based_function, (UNSPEC_SMULLB, UNSPEC_UMULLB, 
UNSPEC_NONE))
 FUNCTION (svmullb_lane, unspec_based_lane_function, (UNSPEC_SMULLB,
-                                                    UNSPEC_UMULLB, -1))
-FUNCTION (svmullt, unspec_based_function, (UNSPEC_SMULLT, UNSPEC_UMULLT, -1))
+                                                    UNSPEC_UMULLB, 
UNSPEC_NONE))
+FUNCTION (svmullt, unspec_based_function, (UNSPEC_SMULLT, UNSPEC_UMULLT, 
UNSPEC_NONE))
 FUNCTION (svmullt_lane, unspec_based_lane_function, (UNSPEC_SMULLT,
-                                                    UNSPEC_UMULLT, -1))
+                                                    UNSPEC_UMULLT, 
UNSPEC_NONE))
 FUNCTION (svnbsl, CODE_FOR_MODE0 (aarch64_sve2_nbsl),)
 FUNCTION (svnmatch, svmatch_svnmatch_impl, (UNSPEC_NMATCH))
-FUNCTION (svorqv, reduction, (UNSPEC_ORQV, UNSPEC_ORQV, -1))
+FUNCTION (svorqv, reduction, (UNSPEC_ORQV, UNSPEC_ORQV, UNSPEC_NONE))
 FUNCTION (svpext_lane, svpext_lane_impl,)
 FUNCTION (svpmov, svpmov_impl,)
 FUNCTION (svpmov_lane, svpmov_lane_impl,)
 FUNCTION (svpmul, CODE_FOR_MODE0 (aarch64_sve2_pmul),)
-FUNCTION (svpmullb, unspec_based_function, (-1, UNSPEC_PMULLB, -1))
-FUNCTION (svpmullb_pair, unspec_based_function, (-1, UNSPEC_PMULLB_PAIR, -1))
-FUNCTION (svpmullt, unspec_based_function, (-1, UNSPEC_PMULLT, -1))
-FUNCTION (svpmullt_pair, unspec_based_function, (-1, UNSPEC_PMULLT_PAIR, -1))
+FUNCTION (svpmullb, unspec_based_function, (UNSPEC_NONE, UNSPEC_PMULLB, 
UNSPEC_NONE))
+FUNCTION (svpmullb_pair, unspec_based_function, (UNSPEC_NONE, 
UNSPEC_PMULLB_PAIR, UNSPEC_NONE))
+FUNCTION (svpmullt, unspec_based_function, (UNSPEC_NONE, UNSPEC_PMULLT, 
UNSPEC_NONE))
+FUNCTION (svpmullt_pair, unspec_based_function, (UNSPEC_NONE, 
UNSPEC_PMULLT_PAIR, UNSPEC_NONE))
 FUNCTION (svpsel_lane, svpsel_lane_impl,)
-FUNCTION (svqabs, rtx_code_function, (SS_ABS, UNKNOWN, UNKNOWN))
+FUNCTION (svqabs, rtx_code_function, (SS_ABS, UNKNOWN, UNSPEC_NONE))
 FUNCTION (svqcadd, svqcadd_impl,)
 FUNCTION (svqcvt, integer_conversion, (UNSPEC_SQCVT, UNSPEC_SQCVTU,
-                                      UNSPEC_UQCVT, -1))
+                                      UNSPEC_UQCVT, UNSPEC_NONE))
 FUNCTION (svqcvtn, integer_conversion, (UNSPEC_SQCVTN, UNSPEC_SQCVTUN,
-                                       UNSPEC_UQCVTN, -1))
-FUNCTION (svqdmlalb, unspec_based_qadd_function, (UNSPEC_SQDMULLB, -1, -1))
+                                       UNSPEC_UQCVTN, UNSPEC_NONE))
+FUNCTION (svqdmlalb, unspec_based_qadd_function, (UNSPEC_SQDMULLB, 
UNSPEC_NONE, UNSPEC_NONE))
 FUNCTION (svqdmlalb_lane, unspec_based_qadd_lane_function, (UNSPEC_SQDMULLB,
-                                                           -1, -1))
-FUNCTION (svqdmlalbt, unspec_based_qadd_function, (UNSPEC_SQDMULLBT, -1, -1))
-FUNCTION (svqdmlalt, unspec_based_qadd_function, (UNSPEC_SQDMULLT, -1, -1))
+                                                           UNSPEC_NONE, 
UNSPEC_NONE))
+FUNCTION (svqdmlalbt, unspec_based_qadd_function, (UNSPEC_SQDMULLBT, 
UNSPEC_NONE, UNSPEC_NONE))
+FUNCTION (svqdmlalt, unspec_based_qadd_function, (UNSPEC_SQDMULLT, 
UNSPEC_NONE, UNSPEC_NONE))
 FUNCTION (svqdmlalt_lane, unspec_based_qadd_lane_function, (UNSPEC_SQDMULLT,
-                                                           -1, -1))
-FUNCTION (svqdmlslb, unspec_based_qsub_function, (UNSPEC_SQDMULLB, -1, -1))
+                                                           UNSPEC_NONE, 
UNSPEC_NONE))
+FUNCTION (svqdmlslb, unspec_based_qsub_function, (UNSPEC_SQDMULLB, 
UNSPEC_NONE, UNSPEC_NONE))
 FUNCTION (svqdmlslb_lane, unspec_based_qsub_lane_function, (UNSPEC_SQDMULLB,
-                                                           -1, -1))
-FUNCTION (svqdmlslbt, unspec_based_qsub_function, (UNSPEC_SQDMULLBT, -1, -1))
-FUNCTION (svqdmlslt, unspec_based_qsub_function, (UNSPEC_SQDMULLT, -1, -1))
+                                                           UNSPEC_NONE, 
UNSPEC_NONE))
+FUNCTION (svqdmlslbt, unspec_based_qsub_function, (UNSPEC_SQDMULLBT, 
UNSPEC_NONE, UNSPEC_NONE))
+FUNCTION (svqdmlslt, unspec_based_qsub_function, (UNSPEC_SQDMULLT, 
UNSPEC_NONE, UNSPEC_NONE))
 FUNCTION (svqdmlslt_lane, unspec_based_qsub_lane_function, (UNSPEC_SQDMULLT,
-                                                           -1, -1))
-FUNCTION (svqdmulh, unspec_based_function, (UNSPEC_SQDMULH, -1, -1))
-FUNCTION (svqdmulh_lane, unspec_based_lane_function, (UNSPEC_SQDMULH, -1, -1))
-FUNCTION (svqdmullb, unspec_based_function, (UNSPEC_SQDMULLB, -1, -1))
+                                                           UNSPEC_NONE, 
UNSPEC_NONE))
+FUNCTION (svqdmulh, unspec_based_function, (UNSPEC_SQDMULH, UNSPEC_NONE, 
UNSPEC_NONE))
+FUNCTION (svqdmulh_lane, unspec_based_lane_function, (UNSPEC_SQDMULH, 
UNSPEC_NONE, UNSPEC_NONE))
+FUNCTION (svqdmullb, unspec_based_function, (UNSPEC_SQDMULLB, UNSPEC_NONE, 
UNSPEC_NONE))
 FUNCTION (svqdmullb_lane, unspec_based_lane_function, (UNSPEC_SQDMULLB,
-                                                      -1, -1))
-FUNCTION (svqdmullt, unspec_based_function, (UNSPEC_SQDMULLT, -1, -1))
+                                                      UNSPEC_NONE, 
UNSPEC_NONE))
+FUNCTION (svqdmullt, unspec_based_function, (UNSPEC_SQDMULLT, UNSPEC_NONE, 
UNSPEC_NONE))
 FUNCTION (svqdmullt_lane, unspec_based_lane_function, (UNSPEC_SQDMULLT,
-                                                      -1, -1))
-FUNCTION (svqneg, rtx_code_function, (SS_NEG, UNKNOWN, UNKNOWN))
+                                                      UNSPEC_NONE, 
UNSPEC_NONE))
+FUNCTION (svqneg, rtx_code_function, (SS_NEG, UNKNOWN, UNSPEC_NONE))
 FUNCTION (svqrdcmlah, svqrdcmlah_impl,)
 FUNCTION (svqrdcmlah_lane, svqrdcmlah_lane_impl,)
-FUNCTION (svqrdmlah, unspec_based_function, (UNSPEC_SQRDMLAH, -1, -1))
+FUNCTION (svqrdmlah, unspec_based_function, (UNSPEC_SQRDMLAH, UNSPEC_NONE, 
UNSPEC_NONE))
 FUNCTION (svqrdmlah_lane, unspec_based_lane_function, (UNSPEC_SQRDMLAH,
-                                                      -1, -1))
-FUNCTION (svqrdmlsh, unspec_based_function, (UNSPEC_SQRDMLSH, -1, -1))
+                                                      UNSPEC_NONE, 
UNSPEC_NONE))
+FUNCTION (svqrdmlsh, unspec_based_function, (UNSPEC_SQRDMLSH, UNSPEC_NONE, 
UNSPEC_NONE))
 FUNCTION (svqrdmlsh_lane, unspec_based_lane_function, (UNSPEC_SQRDMLSH,
-                                                      -1, -1))
-FUNCTION (svqrdmulh, unspec_based_function, (UNSPEC_SQRDMULH, -1, -1))
+                                                      UNSPEC_NONE, 
UNSPEC_NONE))
+FUNCTION (svqrdmulh, unspec_based_function, (UNSPEC_SQRDMULH, UNSPEC_NONE, 
UNSPEC_NONE))
 FUNCTION (svqrdmulh_lane, unspec_based_lane_function, (UNSPEC_SQRDMULH,
-                                                      -1, -1))
+                                                      UNSPEC_NONE, 
UNSPEC_NONE))
 FUNCTION (svqrshl, svqrshl_impl,)
 FUNCTION (svqrshr, unspec_based_uncond_function, (UNSPEC_SQRSHR,
-                                                 UNSPEC_UQRSHR, -1, -1, 1))
+                                                 UNSPEC_UQRSHR, UNSPEC_NONE, 
UNSPEC_NONE, 1))
 FUNCTION (svqrshrn, unspec_based_uncond_function, (UNSPEC_SQRSHRN,
-                                                  UNSPEC_UQRSHRN, -1, -1, 1))
+                                                  UNSPEC_UQRSHRN, UNSPEC_NONE, 
UNSPEC_NONE, 1))
 FUNCTION (svqrshrnb, unspec_based_function, (UNSPEC_SQRSHRNB,
-                                            UNSPEC_UQRSHRNB, -1))
+                                            UNSPEC_UQRSHRNB, UNSPEC_NONE))
 FUNCTION (svqrshrnt, unspec_based_function, (UNSPEC_SQRSHRNT,
-                                            UNSPEC_UQRSHRNT, -1))
-FUNCTION (svqrshru, unspec_based_uncond_function, (UNSPEC_SQRSHRU, -1, -1, -1, 
1))
-FUNCTION (svqrshrun, unspec_based_uncond_function, (UNSPEC_SQRSHRUN, -1, -1, 
-1, 1))
-FUNCTION (svqrshrunb, unspec_based_function, (UNSPEC_SQRSHRUNB, -1, -1))
-FUNCTION (svqrshrunt, unspec_based_function, (UNSPEC_SQRSHRUNT, -1, -1))
+                                            UNSPEC_UQRSHRNT, UNSPEC_NONE))
+FUNCTION (svqrshru, unspec_based_uncond_function, (UNSPEC_SQRSHRU, 
UNSPEC_NONE, UNSPEC_NONE, UNSPEC_NONE, 1))
+FUNCTION (svqrshrun, unspec_based_uncond_function, (UNSPEC_SQRSHRUN, 
UNSPEC_NONE, UNSPEC_NONE, UNSPEC_NONE, 1))
+FUNCTION (svqrshrunb, unspec_based_function, (UNSPEC_SQRSHRUNB, UNSPEC_NONE, 
UNSPEC_NONE))
+FUNCTION (svqrshrunt, unspec_based_function, (UNSPEC_SQRSHRUNT, UNSPEC_NONE, 
UNSPEC_NONE))
 FUNCTION (svqshl, svqshl_impl,)
-FUNCTION (svqshlu, unspec_based_function, (UNSPEC_SQSHLU, -1, -1))
+FUNCTION (svqshlu, unspec_based_function, (UNSPEC_SQSHLU, UNSPEC_NONE, 
UNSPEC_NONE))
 FUNCTION (svqshrnb, unspec_based_function, (UNSPEC_SQSHRNB,
-                                           UNSPEC_UQSHRNB, -1))
+                                           UNSPEC_UQSHRNB, UNSPEC_NONE))
 FUNCTION (svqshrnt, unspec_based_function, (UNSPEC_SQSHRNT,
-                                           UNSPEC_UQSHRNT, -1))
-FUNCTION (svqshrunb, unspec_based_function, (UNSPEC_SQSHRUNB, -1, -1))
-FUNCTION (svqshrunt, unspec_based_function, (UNSPEC_SQSHRUNT, -1, -1))
-FUNCTION (svqsubr, rtx_code_function_rotated, (SS_MINUS, US_MINUS, -1))
-FUNCTION (svqxtnb, unspec_based_function, (UNSPEC_SQXTNB, UNSPEC_UQXTNB, -1))
-FUNCTION (svqxtnt, unspec_based_function, (UNSPEC_SQXTNT, UNSPEC_UQXTNT, -1))
-FUNCTION (svqxtunb, unspec_based_function, (UNSPEC_SQXTUNB, -1, -1))
-FUNCTION (svqxtunt, unspec_based_function, (UNSPEC_SQXTUNT, -1, -1))
+                                           UNSPEC_UQSHRNT, UNSPEC_NONE))
+FUNCTION (svqshrunb, unspec_based_function, (UNSPEC_SQSHRUNB, UNSPEC_NONE, 
UNSPEC_NONE))
+FUNCTION (svqshrunt, unspec_based_function, (UNSPEC_SQSHRUNT, UNSPEC_NONE, 
UNSPEC_NONE))
+FUNCTION (svqsubr, rtx_code_function_rotated, (SS_MINUS, US_MINUS, 
UNSPEC_NONE))
+FUNCTION (svqxtnb, unspec_based_function, (UNSPEC_SQXTNB, UNSPEC_UQXTNB, 
UNSPEC_NONE))
+FUNCTION (svqxtnt, unspec_based_function, (UNSPEC_SQXTNT, UNSPEC_UQXTNT, 
UNSPEC_NONE))
+FUNCTION (svqxtunb, unspec_based_function, (UNSPEC_SQXTUNB, UNSPEC_NONE, 
UNSPEC_NONE))
+FUNCTION (svqxtunt, unspec_based_function, (UNSPEC_SQXTUNT, UNSPEC_NONE, 
UNSPEC_NONE))
 FUNCTION (svraddhnb, unspec_based_function, (UNSPEC_RADDHNB,
-                                            UNSPEC_RADDHNB, -1))
+                                            UNSPEC_RADDHNB, UNSPEC_NONE))
 FUNCTION (svraddhnt, unspec_based_function, (UNSPEC_RADDHNT,
-                                            UNSPEC_RADDHNT, -1))
+                                            UNSPEC_RADDHNT, UNSPEC_NONE))
 FUNCTION (svrax1, fixed_insn_function, (CODE_FOR_aarch64_sve2_rax1))
 FUNCTION (svrevd, unspec_based_function, (UNSPEC_REVD, UNSPEC_REVD,
                                          UNSPEC_REVD))
-FUNCTION (svrhadd, unspec_based_function, (UNSPEC_SRHADD, UNSPEC_URHADD, -1))
+FUNCTION (svrhadd, unspec_based_function, (UNSPEC_SRHADD, UNSPEC_URHADD, 
UNSPEC_NONE))
 FUNCTION (svrshl, svrshl_impl,)
-FUNCTION (svrshr, unspec_based_function, (UNSPEC_SRSHR, UNSPEC_URSHR, -1))
-FUNCTION (svrshrnb, unspec_based_function, (UNSPEC_RSHRNB, UNSPEC_RSHRNB, -1))
-FUNCTION (svrshrnt, unspec_based_function, (UNSPEC_RSHRNT, UNSPEC_RSHRNT, -1))
-FUNCTION (svrsra, unspec_based_add_function, (UNSPEC_SRSHR, UNSPEC_URSHR, -1))
+FUNCTION (svrshr, unspec_based_function, (UNSPEC_SRSHR, UNSPEC_URSHR, 
UNSPEC_NONE))
+FUNCTION (svrshrnb, unspec_based_function, (UNSPEC_RSHRNB, UNSPEC_RSHRNB, 
UNSPEC_NONE))
+FUNCTION (svrshrnt, unspec_based_function, (UNSPEC_RSHRNT, UNSPEC_RSHRNT, 
UNSPEC_NONE))
+FUNCTION (svrsra, unspec_based_add_function, (UNSPEC_SRSHR, UNSPEC_URSHR, 
UNSPEC_NONE))
 FUNCTION (svrsubhnb, unspec_based_function, (UNSPEC_RSUBHNB,
-                                            UNSPEC_RSUBHNB, -1))
+                                            UNSPEC_RSUBHNB, UNSPEC_NONE))
 FUNCTION (svrsubhnt, unspec_based_function, (UNSPEC_RSUBHNT,
-                                            UNSPEC_RSUBHNT, -1))
-FUNCTION (svsbclb, unspec_based_function, (-1, UNSPEC_SBCLB, -1))
-FUNCTION (svsbclt, unspec_based_function, (-1, UNSPEC_SBCLT, -1))
-FUNCTION (svshllb, unspec_based_function, (UNSPEC_SSHLLB, UNSPEC_USHLLB, -1))
-FUNCTION (svshllt, unspec_based_function, (UNSPEC_SSHLLT, UNSPEC_USHLLT, -1))
-FUNCTION (svshrnb, unspec_based_function, (UNSPEC_SHRNB, UNSPEC_SHRNB, -1))
-FUNCTION (svshrnt, unspec_based_function, (UNSPEC_SHRNT, UNSPEC_SHRNT, -1))
-FUNCTION (svsli, unspec_based_function, (UNSPEC_SLI, UNSPEC_SLI, -1))
+                                            UNSPEC_RSUBHNT, UNSPEC_NONE))
+FUNCTION (svsbclb, unspec_based_function, (UNSPEC_NONE, UNSPEC_SBCLB, 
UNSPEC_NONE))
+FUNCTION (svsbclt, unspec_based_function, (UNSPEC_NONE, UNSPEC_SBCLT, 
UNSPEC_NONE))
+FUNCTION (svshllb, unspec_based_function, (UNSPEC_SSHLLB, UNSPEC_USHLLB, 
UNSPEC_NONE))
+FUNCTION (svshllt, unspec_based_function, (UNSPEC_SSHLLT, UNSPEC_USHLLT, 
UNSPEC_NONE))
+FUNCTION (svshrnb, unspec_based_function, (UNSPEC_SHRNB, UNSPEC_SHRNB, 
UNSPEC_NONE))
+FUNCTION (svshrnt, unspec_based_function, (UNSPEC_SHRNT, UNSPEC_SHRNT, 
UNSPEC_NONE))
+FUNCTION (svsli, unspec_based_function, (UNSPEC_SLI, UNSPEC_SLI, UNSPEC_NONE))
 FUNCTION (svsm4e, fixed_insn_function, (CODE_FOR_aarch64_sve2_sm4e))
 FUNCTION (svsm4ekey, fixed_insn_function, (CODE_FOR_aarch64_sve2_sm4ekey))
 FUNCTION (svsqadd, svsqadd_impl,)
 FUNCTION (svsra, svsra_impl,)
-FUNCTION (svsri, unspec_based_function, (UNSPEC_SRI, UNSPEC_SRI, -1))
+FUNCTION (svsri, unspec_based_function, (UNSPEC_SRI, UNSPEC_SRI, UNSPEC_NONE))
 FUNCTION (svst1dq, svst1xq_impl, (VNx1DImode))
 FUNCTION (svst1q_scatter, svst1q_scatter_impl,)
 FUNCTION (svst1wq, svst1xq_impl, (VNx1SImode))
@@ -1229,14 +1229,14 @@ FUNCTION (svstnt1_scatter, svstnt1_scatter_impl,)
 FUNCTION (svstnt1b_scatter, svstnt1_scatter_truncate_impl, (QImode))
 FUNCTION (svstnt1h_scatter, svstnt1_scatter_truncate_impl, (HImode))
 FUNCTION (svstnt1w_scatter, svstnt1_scatter_truncate_impl, (SImode))
-FUNCTION (svsubhnb, unspec_based_function, (UNSPEC_SUBHNB, UNSPEC_SUBHNB, -1))
-FUNCTION (svsubhnt, unspec_based_function, (UNSPEC_SUBHNT, UNSPEC_SUBHNT, -1))
-FUNCTION (svsublb, unspec_based_function, (UNSPEC_SSUBLB, UNSPEC_USUBLB, -1))
-FUNCTION (svsublbt, unspec_based_function, (UNSPEC_SSUBLBT, -1, -1))
-FUNCTION (svsublt, unspec_based_function, (UNSPEC_SSUBLT, UNSPEC_USUBLT, -1))
-FUNCTION (svsubltb, unspec_based_function, (UNSPEC_SSUBLTB, -1, -1))
-FUNCTION (svsubwb, unspec_based_function, (UNSPEC_SSUBWB, UNSPEC_USUBWB, -1))
-FUNCTION (svsubwt, unspec_based_function, (UNSPEC_SSUBWT, UNSPEC_USUBWT, -1))
+FUNCTION (svsubhnb, unspec_based_function, (UNSPEC_SUBHNB, UNSPEC_SUBHNB, 
UNSPEC_NONE))
+FUNCTION (svsubhnt, unspec_based_function, (UNSPEC_SUBHNT, UNSPEC_SUBHNT, 
UNSPEC_NONE))
+FUNCTION (svsublb, unspec_based_function, (UNSPEC_SSUBLB, UNSPEC_USUBLB, 
UNSPEC_NONE))
+FUNCTION (svsublbt, unspec_based_function, (UNSPEC_SSUBLBT, UNSPEC_NONE, 
UNSPEC_NONE))
+FUNCTION (svsublt, unspec_based_function, (UNSPEC_SSUBLT, UNSPEC_USUBLT, 
UNSPEC_NONE))
+FUNCTION (svsubltb, unspec_based_function, (UNSPEC_SSUBLTB, UNSPEC_NONE, 
UNSPEC_NONE))
+FUNCTION (svsubwb, unspec_based_function, (UNSPEC_SSUBWB, UNSPEC_USUBWB, 
UNSPEC_NONE))
+FUNCTION (svsubwt, unspec_based_function, (UNSPEC_SSUBWT, UNSPEC_USUBWT, 
UNSPEC_NONE))
 FUNCTION (svtbl2, svtbl2_impl,)
 FUNCTION (svtblq, quiet<unspec_based_uncond_function>, (UNSPEC_TBLQ,
                                                        UNSPEC_TBLQ,
diff --git a/gcc/config/aarch64/aarch64-sve-builtins.cc 
b/gcc/config/aarch64/aarch64-sve-builtins.cc
index c42fead46e1c..9f6c8d80ba89 100644
--- a/gcc/config/aarch64/aarch64-sve-builtins.cc
+++ b/gcc/config/aarch64/aarch64-sve-builtins.cc
@@ -4523,8 +4523,8 @@ function_expander::use_contiguous_store_insn (insn_code 
icode)
 rtx
 function_expander::map_to_rtx_codes (rtx_code code_for_sint,
                                     rtx_code code_for_uint,
-                                    int unspec_for_cond_fp,
-                                    int unspec_for_uncond_fp,
+                                    unspec unspec_for_cond_fp,
+                                    unspec unspec_for_uncond_fp,
                                     unsigned int merge_argno)
 {
   machine_mode mode = tuple_mode (0);
@@ -4600,13 +4600,14 @@ function_expander::map_to_rtx_codes (rtx_code 
code_for_sint,
    MERGE_ARGNO is the argument that provides the values of inactive lanes for
    _m functions, or DEFAULT_MERGE_ARGNO if we should apply the usual rules.  */
 rtx
-function_expander::map_to_unspecs (int unspec_for_sint, int unspec_for_uint,
-                                  int unspec_for_fp, unsigned int merge_argno)
+function_expander::map_to_unspecs (unspec unspec_for_sint,
+                                  unspec unspec_for_uint, unspec unspec_for_fp,
+                                  unsigned int merge_argno)
 {
   machine_mode mode = tuple_mode (0);
-  int unspec = (!type_suffix (0).integer_p ? unspec_for_fp
-               : type_suffix (0).unsigned_p ? unspec_for_uint
-               : unspec_for_sint);
+  unspec unspec = (!type_suffix (0).integer_p  ? unspec_for_fp
+                  : type_suffix (0).unsigned_p ? unspec_for_uint
+                                               : unspec_for_sint);
 
   if (mode_suffix_id == MODE_single)
     {
diff --git a/gcc/config/aarch64/aarch64-sve-builtins.h 
b/gcc/config/aarch64/aarch64-sve-builtins.h
index 34e28cf58bf0..d79490a39b1e 100644
--- a/gcc/config/aarch64/aarch64-sve-builtins.h
+++ b/gcc/config/aarch64/aarch64-sve-builtins.h
@@ -724,9 +724,9 @@ public:
   rtx use_contiguous_prefetch_insn (insn_code);
   rtx use_contiguous_store_insn (insn_code);
 
-  rtx map_to_rtx_codes (rtx_code, rtx_code, int, int,
+  rtx map_to_rtx_codes (rtx_code, rtx_code, unspec, unspec,
                        unsigned int = DEFAULT_MERGE_ARGNO);
-  rtx map_to_unspecs (int, int, int, unsigned int = DEFAULT_MERGE_ARGNO);
+  rtx map_to_unspecs (unspec, unspec, unspec, unsigned int = 
DEFAULT_MERGE_ARGNO);
 
   /* The function call expression.  */
   tree call_expr;
diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
index 293afa52b3b3..134fc6c4fbaa 100644
--- a/gcc/config/aarch64/aarch64.cc
+++ b/gcc/config/aarch64/aarch64.cc
@@ -7032,7 +7032,7 @@ aarch64_replace_reg_mode (rtx x, machine_mode mode)
 /* Return the SVE REV[BHW] unspec for reversing quantites of mode MODE
    stored in wider integer containers.  */
 
-static unsigned int
+static unspec
 aarch64_sve_rev_unspec (machine_mode mode)
 {
   switch (GET_MODE_UNIT_SIZE (mode))
@@ -7059,7 +7059,7 @@ aarch64_split_sve_subreg_move (rtx dest, rtx ptrue, rtx 
src)
       < GET_MODE_UNIT_SIZE (mode_with_narrower_elts))
     std::swap (mode_with_wider_elts, mode_with_narrower_elts);
 
-  unsigned int unspec = aarch64_sve_rev_unspec (mode_with_narrower_elts);
+  unspec unspec = aarch64_sve_rev_unspec (mode_with_narrower_elts);
   machine_mode pred_mode = aarch64_sve_pred_mode (mode_with_wider_elts);
 
   /* Get the operands in the appropriate modes and emit the instruction.  */
@@ -9375,7 +9375,7 @@ aarch_pac_insn_p (rtx x)
       rtx sub = *iter;
       if (sub && GET_CODE (sub) == UNSPEC)
        {
-         int unspec_val = XINT (sub, 1);
+         unspec unspec_val = (unspec) XINT (sub, 1);
          switch (unspec_val)
            {
            case UNSPEC_PACIASP:
@@ -27700,7 +27700,7 @@ aarch64_evpc_hvla (struct expand_vec_perm_d *d)
       return false;
 
   /* Used once we have verified that we can use UNSPEC to do the operation.  */
-  auto use_binary = [&](int unspec) -> bool
+  auto use_binary = [&](unspec unspec) -> bool
     {
       if (!d->testing_p)
        {
diff --git a/gcc/config/aarch64/aarch64.md b/gcc/config/aarch64/aarch64.md
index 71458bf78f5c..363023691024 100644
--- a/gcc/config/aarch64/aarch64.md
+++ b/gcc/config/aarch64/aarch64.md
@@ -202,6 +202,7 @@
 )
 
 (define_c_enum "unspec" [
+    UNSPEC_NONE
     UNSPEC_AUTIA1716
     UNSPEC_AUTIB1716
     UNSPEC_AUTIASP
-- 
2.52.0

Reply via email to