Author: andrew
Date: Wed Oct 30 10:06:57 2019
New Revision: 354170
URL: https://svnweb.freebsd.org/changeset/base/354170

Log:
  Rename the macros to extract a single arm64 ID field.
  
  Because of the previous naming scheme the old ID_AA64PFR0_EL1 macro
  collided with a potential macro for the register of the same name. To fix
  this collision rename these macros.
  
  Sponsored by: DARPA, AFRL

Modified:
  head/sys/arm64/arm64/elf32_machdep.c
  head/sys/arm64/include/armreg.h
  head/sys/libkern/gsb_crc32.c

Modified: head/sys/arm64/arm64/elf32_machdep.c
==============================================================================
--- head/sys/arm64/arm64/elf32_machdep.c        Wed Oct 30 08:06:22 2019        
(r354169)
+++ head/sys/arm64/arm64/elf32_machdep.c        Wed Oct 30 10:06:57 2019        
(r354170)
@@ -130,7 +130,7 @@ elf32_arm_abi_supported(struct image_params *imgp)
        const Elf32_Ehdr *hdr;
 
        /* Check if we support AArch32 */
-       if (ID_AA64PFR0_EL0(READ_SPECIALREG(id_aa64pfr0_el1)) !=
+       if (ID_AA64PFR0_EL0_VAL(READ_SPECIALREG(id_aa64pfr0_el1)) !=
            ID_AA64PFR0_EL0_64_32)
                return (FALSE);
 

Modified: head/sys/arm64/include/armreg.h
==============================================================================
--- head/sys/arm64/include/armreg.h     Wed Oct 30 08:06:22 2019        
(r354169)
+++ head/sys/arm64/include/armreg.h     Wed Oct 30 10:06:57 2019        
(r354170)
@@ -178,140 +178,140 @@
 /* ID_AA64DFR0_EL1 */
 #define        ID_AA64DFR0_DebugVer_SHIFT      0
 #define        ID_AA64DFR0_DebugVer_MASK       (UL(0xf) << 
ID_AA64DFR0_DebugVer_SHIFT)
-#define        ID_AA64DFR0_DebugVer(x)         ((x) & 
ID_AA64DFR0_DebugVer_MASK)
+#define        ID_AA64DFR0_DebugVer_VAL(x)     ((x) & 
ID_AA64DFR0_DebugVer_MASK)
 #define         ID_AA64DFR0_DebugVer_8         (UL(0x6) << 
ID_AA64DFR0_DebugVer_SHIFT)
 #define         ID_AA64DFR0_DebugVer_8_VHE     (UL(0x7) << 
ID_AA64DFR0_DebugVer_SHIFT)
 #define         ID_AA64DFR0_DebugVer_8_2       (UL(0x8) << 
ID_AA64DFR0_DebugVer_SHIFT)
 #define        ID_AA64DFR0_TraceVer_SHIFT      4
 #define        ID_AA64DFR0_TraceVer_MASK       (UL(0xf) << 
ID_AA64DFR0_TraceVer_SHIFT)
-#define        ID_AA64DFR0_TraceVer(x)         ((x) & 
ID_AA64DFR0_TraceVer_MASK)
+#define        ID_AA64DFR0_TraceVer_VAL(x)     ((x) & 
ID_AA64DFR0_TraceVer_MASK)
 #define         ID_AA64DFR0_TraceVer_NONE      (UL(0x0) << 
ID_AA64DFR0_TraceVer_SHIFT)
 #define         ID_AA64DFR0_TraceVer_IMPL      (UL(0x1) << 
ID_AA64DFR0_TraceVer_SHIFT)
 #define        ID_AA64DFR0_PMUVer_SHIFT        8
 #define        ID_AA64DFR0_PMUVer_MASK         (UL(0xf) << 
ID_AA64DFR0_PMUVer_SHIFT)
-#define        ID_AA64DFR0_PMUVer(x)           ((x) & ID_AA64DFR0_PMUVer_MASK)
+#define        ID_AA64DFR0_PMUVer_VAL(x)       ((x) & ID_AA64DFR0_PMUVer_MASK)
 #define         ID_AA64DFR0_PMUVer_NONE        (UL(0x0) << 
ID_AA64DFR0_PMUVer_SHIFT)
 #define         ID_AA64DFR0_PMUVer_3           (UL(0x1) << 
ID_AA64DFR0_PMUVer_SHIFT)
 #define         ID_AA64DFR0_PMUVer_3_1         (UL(0x4) << 
ID_AA64DFR0_PMUVer_SHIFT)
 #define         ID_AA64DFR0_PMUVer_IMPL        (UL(0xf) << 
ID_AA64DFR0_PMUVer_SHIFT)
 #define        ID_AA64DFR0_BRPs_SHIFT          12
 #define        ID_AA64DFR0_BRPs_MASK           (UL(0xf) << 
ID_AA64DFR0_BRPs_SHIFT)
-#define        ID_AA64DFR0_BRPs(x)             \
+#define        ID_AA64DFR0_BRPs_VAL(x) \
     ((((x) >> ID_AA64DFR0_BRPs_SHIFT) & 0xf) + 1)
 #define        ID_AA64DFR0_WRPs_SHIFT          20
 #define        ID_AA64DFR0_WRPs_MASK           (UL(0xf) << 
ID_AA64DFR0_WRPs_SHIFT)
-#define        ID_AA64DFR0_WRPs(x)             \
+#define        ID_AA64DFR0_WRPs_VAL(x) \
     ((((x) >> ID_AA64DFR0_WRPs_SHIFT) & 0xf) + 1)
 #define        ID_AA64DFR0_CTX_CMPs_SHIFT      28
 #define        ID_AA64DFR0_CTX_CMPs_MASK       (UL(0xf) << 
ID_AA64DFR0_CTX_CMPs_SHIFT)
-#define        ID_AA64DFR0_CTX_CMPs(x)         \
+#define        ID_AA64DFR0_CTX_CMPs_VAL(x)     \
     ((((x) >> ID_AA64DFR0_CTX_CMPs_SHIFT) & 0xf) + 1)
 #define        ID_AA64DFR0_PMSVer_SHIFT        32
 #define        ID_AA64DFR0_PMSVer_MASK         (UL(0xf) << 
ID_AA64DFR0_PMSVer_SHIFT)
-#define        ID_AA64DFR0_PMSVer(x)           ((x) & ID_AA64DFR0_PMSVer_MASK)
+#define        ID_AA64DFR0_PMSVer_VAL(x)       ((x) & ID_AA64DFR0_PMSVer_MASK)
 #define         ID_AA64DFR0_PMSVer_NONE        (UL(0x0) << 
ID_AA64DFR0_PMSVer_SHIFT)
 #define         ID_AA64DFR0_PMSVer_V1          (UL(0x1) << 
ID_AA64DFR0_PMSVer_SHIFT)
 
 /* ID_AA64ISAR0_EL1 */
 #define        ID_AA64ISAR0_AES_SHIFT          4
 #define        ID_AA64ISAR0_AES_MASK           (UL(0xf) << 
ID_AA64ISAR0_AES_SHIFT)
-#define        ID_AA64ISAR0_AES(x)             ((x) & ID_AA64ISAR0_AES_MASK)
+#define        ID_AA64ISAR0_AES_VAL(x)         ((x) & ID_AA64ISAR0_AES_MASK)
 #define         ID_AA64ISAR0_AES_NONE          (UL(0x0) << 
ID_AA64ISAR0_AES_SHIFT)
 #define         ID_AA64ISAR0_AES_BASE          (UL(0x1) << 
ID_AA64ISAR0_AES_SHIFT)
 #define         ID_AA64ISAR0_AES_PMULL         (UL(0x2) << 
ID_AA64ISAR0_AES_SHIFT)
 #define        ID_AA64ISAR0_SHA1_SHIFT         8
 #define        ID_AA64ISAR0_SHA1_MASK          (UL(0xf) << 
ID_AA64ISAR0_SHA1_SHIFT)
-#define        ID_AA64ISAR0_SHA1(x)            ((x) & ID_AA64ISAR0_SHA1_MASK)
+#define        ID_AA64ISAR0_SHA1_VAL(x)        ((x) & ID_AA64ISAR0_SHA1_MASK)
 #define         ID_AA64ISAR0_SHA1_NONE         (UL(0x0) << 
ID_AA64ISAR0_SHA1_SHIFT)
 #define         ID_AA64ISAR0_SHA1_BASE         (UL(0x1) << 
ID_AA64ISAR0_SHA1_SHIFT)
 #define        ID_AA64ISAR0_SHA2_SHIFT         12
 #define        ID_AA64ISAR0_SHA2_MASK          (UL(0xf) << 
ID_AA64ISAR0_SHA2_SHIFT)
-#define        ID_AA64ISAR0_SHA2(x)            ((x) & ID_AA64ISAR0_SHA2_MASK)
+#define        ID_AA64ISAR0_SHA2_VAL(x)        ((x) & ID_AA64ISAR0_SHA2_MASK)
 #define         ID_AA64ISAR0_SHA2_NONE         (UL(0x0) << 
ID_AA64ISAR0_SHA2_SHIFT)
 #define         ID_AA64ISAR0_SHA2_BASE         (UL(0x1) << 
ID_AA64ISAR0_SHA2_SHIFT)
 #define         ID_AA64ISAR0_SHA2_512          (UL(0x2) << 
ID_AA64ISAR0_SHA2_SHIFT)
 #define        ID_AA64ISAR0_CRC32_SHIFT        16
 #define        ID_AA64ISAR0_CRC32_MASK         (UL(0xf) << 
ID_AA64ISAR0_CRC32_SHIFT)
-#define        ID_AA64ISAR0_CRC32(x)           ((x) & ID_AA64ISAR0_CRC32_MASK)
+#define        ID_AA64ISAR0_CRC32_VAL(x)       ((x) & ID_AA64ISAR0_CRC32_MASK)
 #define         ID_AA64ISAR0_CRC32_NONE        (UL(0x0) << 
ID_AA64ISAR0_CRC32_SHIFT)
 #define         ID_AA64ISAR0_CRC32_BASE        (UL(0x1) << 
ID_AA64ISAR0_CRC32_SHIFT)
 #define        ID_AA64ISAR0_Atomic_SHIFT       20
 #define        ID_AA64ISAR0_Atomic_MASK        (UL(0xf) << 
ID_AA64ISAR0_Atomic_SHIFT)
-#define        ID_AA64ISAR0_Atomic(x)          ((x) & ID_AA64ISAR0_Atomic_MASK)
+#define        ID_AA64ISAR0_Atomic_VAL(x)      ((x) & ID_AA64ISAR0_Atomic_MASK)
 #define         ID_AA64ISAR0_Atomic_NONE       (UL(0x0) << 
ID_AA64ISAR0_Atomic_SHIFT)
 #define         ID_AA64ISAR0_Atomic_IMPL       (UL(0x2) << 
ID_AA64ISAR0_Atomic_SHIFT)
 #define        ID_AA64ISAR0_RDM_SHIFT          28
 #define        ID_AA64ISAR0_RDM_MASK           (UL(0xf) << 
ID_AA64ISAR0_RDM_SHIFT)
-#define        ID_AA64ISAR0_RDM(x)             ((x) & ID_AA64ISAR0_RDM_MASK)
+#define        ID_AA64ISAR0_RDM_VAL(x)         ((x) & ID_AA64ISAR0_RDM_MASK)
 #define         ID_AA64ISAR0_RDM_NONE          (UL(0x0) << 
ID_AA64ISAR0_RDM_SHIFT)
 #define         ID_AA64ISAR0_RDM_IMPL          (UL(0x1) << 
ID_AA64ISAR0_RDM_SHIFT)
 #define        ID_AA64ISAR0_SHA3_SHIFT         32
 #define        ID_AA64ISAR0_SHA3_MASK          (UL(0xf) << 
ID_AA64ISAR0_SHA3_SHIFT)
-#define        ID_AA64ISAR0_SHA3(x)            ((x) & ID_AA64ISAR0_SHA3_MASK)
+#define        ID_AA64ISAR0_SHA3_VAL(x)        ((x) & ID_AA64ISAR0_SHA3_MASK)
 #define         ID_AA64ISAR0_SHA3_NONE         (UL(0x0) << 
ID_AA64ISAR0_SHA3_SHIFT)
 #define         ID_AA64ISAR0_SHA3_IMPL         (UL(0x1) << 
ID_AA64ISAR0_SHA3_SHIFT)
 #define        ID_AA64ISAR0_SM3_SHIFT          36
 #define        ID_AA64ISAR0_SM3_MASK           (UL(0xf) << 
ID_AA64ISAR0_SM3_SHIFT)
-#define        ID_AA64ISAR0_SM3(x)             ((x) & ID_AA64ISAR0_SM3_MASK)
+#define        ID_AA64ISAR0_SM3_VAL(x)         ((x) & ID_AA64ISAR0_SM3_MASK)
 #define         ID_AA64ISAR0_SM3_NONE          (UL(0x0) << 
ID_AA64ISAR0_SM3_SHIFT)
 #define         ID_AA64ISAR0_SM3_IMPL          (UL(0x1) << 
ID_AA64ISAR0_SM3_SHIFT)
 #define        ID_AA64ISAR0_SM4_SHIFT          40
 #define        ID_AA64ISAR0_SM4_MASK           (UL(0xf) << 
ID_AA64ISAR0_SM4_SHIFT)
-#define        ID_AA64ISAR0_SM4(x)             ((x) & ID_AA64ISAR0_SM4_MASK)
+#define        ID_AA64ISAR0_SM4_VAL(x)         ((x) & ID_AA64ISAR0_SM4_MASK)
 #define         ID_AA64ISAR0_SM4_NONE          (UL(0x0) << 
ID_AA64ISAR0_SM4_SHIFT)
 #define         ID_AA64ISAR0_SM4_IMPL          (UL(0x1) << 
ID_AA64ISAR0_SM4_SHIFT)
 #define        ID_AA64ISAR0_DP_SHIFT           44
 #define        ID_AA64ISAR0_DP_MASK            (UL(0xf) << 
ID_AA64ISAR0_DP_SHIFT)
-#define        ID_AA64ISAR0_DP(x)              ((x) & ID_AA64ISAR0_DP_MASK)
+#define        ID_AA64ISAR0_DP_VAL(x)          ((x) & ID_AA64ISAR0_DP_MASK)
 #define         ID_AA64ISAR0_DP_NONE           (UL(0x0) << 
ID_AA64ISAR0_DP_SHIFT)
 #define         ID_AA64ISAR0_DP_IMPL           (UL(0x1) << 
ID_AA64ISAR0_DP_SHIFT)
 
 /* ID_AA64ISAR1_EL1 */
 #define        ID_AA64ISAR1_DPB_SHIFT          0
 #define        ID_AA64ISAR1_DPB_MASK           (UL(0xf) << 
ID_AA64ISAR1_DPB_SHIFT)
-#define        ID_AA64ISAR1_DPB(x)             ((x) & ID_AA64ISAR1_DPB_MASK)
+#define        ID_AA64ISAR1_DPB_VAL(x)         ((x) & ID_AA64ISAR1_DPB_MASK)
 #define         ID_AA64ISAR1_DPB_NONE          (UL(0x0) << 
ID_AA64ISAR1_DPB_SHIFT)
 #define         ID_AA64ISAR1_DPB_IMPL          (UL(0x1) << 
ID_AA64ISAR1_DPB_SHIFT)
 #define        ID_AA64ISAR1_APA_SHIFT          4
 #define        ID_AA64ISAR1_APA_MASK           (UL(0xf) << 
ID_AA64ISAR1_APA_SHIFT)
-#define        ID_AA64ISAR1_APA(x)             ((x) & ID_AA64ISAR1_APA_MASK)
+#define        ID_AA64ISAR1_APA_VAL(x)         ((x) & ID_AA64ISAR1_APA_MASK)
 #define         ID_AA64ISAR1_APA_NONE          (UL(0x0) << 
ID_AA64ISAR1_APA_SHIFT)
 #define         ID_AA64ISAR1_APA_IMPL          (UL(0x1) << 
ID_AA64ISAR1_APA_SHIFT)
 #define        ID_AA64ISAR1_API_SHIFT          8
 #define        ID_AA64ISAR1_API_MASK           (UL(0xf) << 
ID_AA64ISAR1_API_SHIFT)
-#define        ID_AA64ISAR1_API(x)             ((x) & ID_AA64ISAR1_API_MASK)
+#define        ID_AA64ISAR1_API_VAL(x)         ((x) & ID_AA64ISAR1_API_MASK)
 #define         ID_AA64ISAR1_API_NONE          (UL(0x0) << 
ID_AA64ISAR1_API_SHIFT)
 #define         ID_AA64ISAR1_API_IMPL          (UL(0x1) << 
ID_AA64ISAR1_API_SHIFT)
 #define        ID_AA64ISAR1_JSCVT_SHIFT        12
 #define        ID_AA64ISAR1_JSCVT_MASK         (UL(0xf) << 
ID_AA64ISAR1_JSCVT_SHIFT)
-#define        ID_AA64ISAR1_JSCVT(x)           ((x) & ID_AA64ISAR1_JSCVT_MASK)
+#define        ID_AA64ISAR1_JSCVT_VAL(x)       ((x) & ID_AA64ISAR1_JSCVT_MASK)
 #define         ID_AA64ISAR1_JSCVT_NONE        (UL(0x0) << 
ID_AA64ISAR1_JSCVT_SHIFT)
 #define         ID_AA64ISAR1_JSCVT_IMPL        (UL(0x1) << 
ID_AA64ISAR1_JSCVT_SHIFT)
 #define        ID_AA64ISAR1_FCMA_SHIFT         16
 #define        ID_AA64ISAR1_FCMA_MASK          (UL(0xf) << 
ID_AA64ISAR1_FCMA_SHIFT)
-#define        ID_AA64ISAR1_FCMA(x)            ((x) & ID_AA64ISAR1_FCMA_MASK)
+#define        ID_AA64ISAR1_FCMA_VAL(x)        ((x) & ID_AA64ISAR1_FCMA_MASK)
 #define         ID_AA64ISAR1_FCMA_NONE         (UL(0x0) << 
ID_AA64ISAR1_FCMA_SHIFT)
 #define         ID_AA64ISAR1_FCMA_IMPL         (UL(0x1) << 
ID_AA64ISAR1_FCMA_SHIFT)
 #define        ID_AA64ISAR1_LRCPC_SHIFT        20
 #define        ID_AA64ISAR1_LRCPC_MASK         (UL(0xf) << 
ID_AA64ISAR1_LRCPC_SHIFT)
-#define        ID_AA64ISAR1_LRCPC(x)           ((x) & ID_AA64ISAR1_LRCPC_MASK)
+#define        ID_AA64ISAR1_LRCPC_VAL(x)       ((x) & ID_AA64ISAR1_LRCPC_MASK)
 #define         ID_AA64ISAR1_LRCPC_NONE        (UL(0x0) << 
ID_AA64ISAR1_LRCPC_SHIFT)
 #define         ID_AA64ISAR1_LRCPC_IMPL        (UL(0x1) << 
ID_AA64ISAR1_LRCPC_SHIFT)
 #define        ID_AA64ISAR1_GPA_SHIFT          24
 #define        ID_AA64ISAR1_GPA_MASK           (UL(0xf) << 
ID_AA64ISAR1_GPA_SHIFT)
-#define        ID_AA64ISAR1_GPA(x)             ((x) & ID_AA64ISAR1_GPA_MASK)
+#define        ID_AA64ISAR1_GPA_VAL(x)         ((x) & ID_AA64ISAR1_GPA_MASK)
 #define         ID_AA64ISAR1_GPA_NONE          (UL(0x0) << 
ID_AA64ISAR1_GPA_SHIFT)
 #define         ID_AA64ISAR1_GPA_IMPL          (UL(0x1) << 
ID_AA64ISAR1_GPA_SHIFT)
 #define        ID_AA64ISAR1_GPI_SHIFT          28
 #define        ID_AA64ISAR1_GPI_MASK           (UL(0xf) << 
ID_AA64ISAR1_GPI_SHIFT)
-#define        ID_AA64ISAR1_GPI(x)             ((x) & ID_AA64ISAR1_GPI_MASK)
+#define        ID_AA64ISAR1_GPI_VAL(x)         ((x) & ID_AA64ISAR1_GPI_MASK)
 #define         ID_AA64ISAR1_GPI_NONE          (UL(0x0) << 
ID_AA64ISAR1_GPI_SHIFT)
 #define         ID_AA64ISAR1_GPI_IMPL          (UL(0x1) << 
ID_AA64ISAR1_GPI_SHIFT)
 
 /* ID_AA64MMFR0_EL1 */
 #define        ID_AA64MMFR0_PARange_SHIFT      0
 #define        ID_AA64MMFR0_PARange_MASK       (UL(0xf) << 
ID_AA64MMFR0_PARange_SHIFT)
-#define        ID_AA64MMFR0_PARange(x)         ((x) & 
ID_AA64MMFR0_PARange_MASK)
+#define        ID_AA64MMFR0_PARange_VAL(x)     ((x) & 
ID_AA64MMFR0_PARange_MASK)
 #define         ID_AA64MMFR0_PARange_4G        (UL(0x0) << 
ID_AA64MMFR0_PARange_SHIFT)
 #define         ID_AA64MMFR0_PARange_64G       (UL(0x1) << 
ID_AA64MMFR0_PARange_SHIFT)
 #define         ID_AA64MMFR0_PARange_1T        (UL(0x2) << 
ID_AA64MMFR0_PARange_SHIFT)
@@ -321,82 +321,82 @@
 #define         ID_AA64MMFR0_PARange_4P        (UL(0x6) << 
ID_AA64MMFR0_PARange_SHIFT)
 #define        ID_AA64MMFR0_ASIDBits_SHIFT     4
 #define        ID_AA64MMFR0_ASIDBits_MASK      (UL(0xf) << 
ID_AA64MMFR0_ASIDBits_SHIFT)
-#define        ID_AA64MMFR0_ASIDBits(x)        ((x) & 
ID_AA64MMFR0_ASIDBits_MASK)
+#define        ID_AA64MMFR0_ASIDBits_VAL(x)    ((x) & 
ID_AA64MMFR0_ASIDBits_MASK)
 #define         ID_AA64MMFR0_ASIDBits_8        (UL(0x0) << 
ID_AA64MMFR0_ASIDBits_SHIFT)
 #define         ID_AA64MMFR0_ASIDBits_16       (UL(0x2) << 
ID_AA64MMFR0_ASIDBits_SHIFT)
 #define        ID_AA64MMFR0_BigEnd_SHIFT       8
 #define        ID_AA64MMFR0_BigEnd_MASK        (UL(0xf) << 
ID_AA64MMFR0_BigEnd_SHIFT)
-#define        ID_AA64MMFR0_BigEnd(x)          ((x) & ID_AA64MMFR0_BigEnd_MASK)
+#define        ID_AA64MMFR0_BigEnd_VAL(x)      ((x) & ID_AA64MMFR0_BigEnd_MASK)
 #define         ID_AA64MMFR0_BigEnd_FIXED      (UL(0x0) << 
ID_AA64MMFR0_BigEnd_SHIFT)
 #define         ID_AA64MMFR0_BigEnd_MIXED      (UL(0x1) << 
ID_AA64MMFR0_BigEnd_SHIFT)
 #define        ID_AA64MMFR0_SNSMem_SHIFT       12
 #define        ID_AA64MMFR0_SNSMem_MASK        (UL(0xf) << 
ID_AA64MMFR0_SNSMem_SHIFT)
-#define        ID_AA64MMFR0_SNSMem(x)          ((x) & ID_AA64MMFR0_SNSMem_MASK)
+#define        ID_AA64MMFR0_SNSMem_VAL(x)      ((x) & ID_AA64MMFR0_SNSMem_MASK)
 #define         ID_AA64MMFR0_SNSMem_NONE       (UL(0x0) << 
ID_AA64MMFR0_SNSMem_SHIFT)
 #define         ID_AA64MMFR0_SNSMem_DISTINCT   (UL(0x1) << 
ID_AA64MMFR0_SNSMem_SHIFT)
 #define        ID_AA64MMFR0_BigEndEL0_SHIFT    16
 #define        ID_AA64MMFR0_BigEndEL0_MASK     (UL(0xf) << 
ID_AA64MMFR0_BigEndEL0_SHIFT)
-#define        ID_AA64MMFR0_BigEndEL0(x)       ((x) & 
ID_AA64MMFR0_BigEndEL0_MASK)
+#define        ID_AA64MMFR0_BigEndEL0_VAL(x)   ((x) & 
ID_AA64MMFR0_BigEndEL0_MASK)
 #define         ID_AA64MMFR0_BigEndEL0_FIXED   (UL(0x0) << 
ID_AA64MMFR0_BigEndEL0_SHIFT)
 #define         ID_AA64MMFR0_BigEndEL0_MIXED   (UL(0x1) << 
ID_AA64MMFR0_BigEndEL0_SHIFT)
 #define        ID_AA64MMFR0_TGran16_SHIFT      20
 #define        ID_AA64MMFR0_TGran16_MASK       (UL(0xf) << 
ID_AA64MMFR0_TGran16_SHIFT)
-#define        ID_AA64MMFR0_TGran16(x)         ((x) & 
ID_AA64MMFR0_TGran16_MASK)
+#define        ID_AA64MMFR0_TGran16_VAL(x)     ((x) & 
ID_AA64MMFR0_TGran16_MASK)
 #define         ID_AA64MMFR0_TGran16_NONE      (UL(0x0) << 
ID_AA64MMFR0_TGran16_SHIFT)
 #define         ID_AA64MMFR0_TGran16_IMPL      (UL(0x1) << 
ID_AA64MMFR0_TGran16_SHIFT)
 #define        ID_AA64MMFR0_TGran64_SHIFT      24
 #define        ID_AA64MMFR0_TGran64_MASK       (UL(0xf) << 
ID_AA64MMFR0_TGran64_SHIFT)
-#define        ID_AA64MMFR0_TGran64(x)         ((x) & 
ID_AA64MMFR0_TGran64_MASK)
+#define        ID_AA64MMFR0_TGran64_VAL(x)     ((x) & 
ID_AA64MMFR0_TGran64_MASK)
 #define         ID_AA64MMFR0_TGran64_IMPL      (UL(0x0) << 
ID_AA64MMFR0_TGran64_SHIFT)
 #define         ID_AA64MMFR0_TGran64_NONE      (UL(0xf) << 
ID_AA64MMFR0_TGran64_SHIFT)
 #define        ID_AA64MMFR0_TGran4_SHIFT       28
 #define        ID_AA64MMFR0_TGran4_MASK        (UL(0xf) << 
ID_AA64MMFR0_TGran4_SHIFT)
-#define        ID_AA64MMFR0_TGran4(x)          ((x) & ID_AA64MMFR0_TGran4_MASK)
+#define        ID_AA64MMFR0_TGran4_VAL(x)      ((x) & ID_AA64MMFR0_TGran4_MASK)
 #define         ID_AA64MMFR0_TGran4_IMPL       (UL(0x0) << 
ID_AA64MMFR0_TGran4_SHIFT)
 #define         ID_AA64MMFR0_TGran4_NONE       (UL(0xf) << 
ID_AA64MMFR0_TGran4_SHIFT)
 
 /* ID_AA64MMFR1_EL1 */
 #define        ID_AA64MMFR1_HAFDBS_SHIFT       0
 #define        ID_AA64MMFR1_HAFDBS_MASK        (UL(0xf) << 
ID_AA64MMFR1_HAFDBS_SHIFT)
-#define        ID_AA64MMFR1_HAFDBS(x)          ((x) & ID_AA64MMFR1_HAFDBS_MASK)
+#define        ID_AA64MMFR1_HAFDBS_VAL(x)      ((x) & ID_AA64MMFR1_HAFDBS_MASK)
 #define         ID_AA64MMFR1_HAFDBS_NONE       (UL(0x0) << 
ID_AA64MMFR1_HAFDBS_SHIFT)
 #define         ID_AA64MMFR1_HAFDBS_AF         (UL(0x1) << 
ID_AA64MMFR1_HAFDBS_SHIFT)
 #define         ID_AA64MMFR1_HAFDBS_AF_DBS     (UL(0x2) << 
ID_AA64MMFR1_HAFDBS_SHIFT)
 #define        ID_AA64MMFR1_VMIDBits_SHIFT     4
 #define        ID_AA64MMFR1_VMIDBits_MASK      (UL(0xf) << 
ID_AA64MMFR1_VMIDBits_SHIFT)
-#define        ID_AA64MMFR1_VMIDBits(x)        ((x) & 
ID_AA64MMFR1_VMIDBits_MASK)
+#define        ID_AA64MMFR1_VMIDBits_VAL(x)    ((x) & 
ID_AA64MMFR1_VMIDBits_MASK)
 #define         ID_AA64MMFR1_VMIDBits_8        (UL(0x0) << 
ID_AA64MMFR1_VMIDBits_SHIFT)
 #define         ID_AA64MMFR1_VMIDBits_16       (UL(0x2) << 
ID_AA64MMFR1_VMIDBits_SHIFT)
 #define        ID_AA64MMFR1_VH_SHIFT           8
 #define        ID_AA64MMFR1_VH_MASK            (UL(0xf) << 
ID_AA64MMFR1_VH_SHIFT)
-#define        ID_AA64MMFR1_VH(x)              ((x) & ID_AA64MMFR1_VH_MASK)
+#define        ID_AA64MMFR1_VH_VAL(x)          ((x) & ID_AA64MMFR1_VH_MASK)
 #define         ID_AA64MMFR1_VH_NONE           (UL(0x0) << 
ID_AA64MMFR1_VH_SHIFT)
 #define         ID_AA64MMFR1_VH_IMPL           (UL(0x1) << 
ID_AA64MMFR1_VH_SHIFT)
 #define        ID_AA64MMFR1_HPDS_SHIFT         12
 #define        ID_AA64MMFR1_HPDS_MASK          (UL(0xf) << 
ID_AA64MMFR1_HPDS_SHIFT)
-#define        ID_AA64MMFR1_HPDS(x)            ((x) & ID_AA64MMFR1_HPDS_MASK)
+#define        ID_AA64MMFR1_HPDS_VAL(x)        ((x) & ID_AA64MMFR1_HPDS_MASK)
 #define         ID_AA64MMFR1_HPDS_NONE         (UL(0x0) << 
ID_AA64MMFR1_HPDS_SHIFT)
 #define         ID_AA64MMFR1_HPDS_HPD          (UL(0x1) << 
ID_AA64MMFR1_HPDS_SHIFT)
 #define         ID_AA64MMFR1_HPDS_TTPBHA       (UL(0x2) << 
ID_AA64MMFR1_HPDS_SHIFT)
 #define        ID_AA64MMFR1_LO_SHIFT           16
 #define        ID_AA64MMFR1_LO_MASK            (UL(0xf) << 
ID_AA64MMFR1_LO_SHIFT)
-#define        ID_AA64MMFR1_LO(x)              ((x) & ID_AA64MMFR1_LO_MASK)
+#define        ID_AA64MMFR1_LO_VAL(x)          ((x) & ID_AA64MMFR1_LO_MASK)
 #define         ID_AA64MMFR1_LO_NONE           (UL(0x0) << 
ID_AA64MMFR1_LO_SHIFT)
 #define         ID_AA64MMFR1_LO_IMPL           (UL(0x1) << 
ID_AA64MMFR1_LO_SHIFT)
 #define        ID_AA64MMFR1_PAN_SHIFT          20
 #define        ID_AA64MMFR1_PAN_MASK           (UL(0xf) << 
ID_AA64MMFR1_PAN_SHIFT)
-#define        ID_AA64MMFR1_PAN(x)             ((x) & ID_AA64MMFR1_PAN_MASK)
+#define        ID_AA64MMFR1_PAN_VAL(x)         ((x) & ID_AA64MMFR1_PAN_MASK)
 #define         ID_AA64MMFR1_PAN_NONE          (UL(0x0) << 
ID_AA64MMFR1_PAN_SHIFT)
 #define         ID_AA64MMFR1_PAN_IMPL          (UL(0x1) << 
ID_AA64MMFR1_PAN_SHIFT)
 #define         ID_AA64MMFR1_PAN_ATS1E1        (UL(0x2) << 
ID_AA64MMFR1_PAN_SHIFT)
 #define        ID_AA64MMFR1_SpecSEI_SHIFT      24
 #define        ID_AA64MMFR1_SpecSEI_MASK       (UL(0xf) << 
ID_AA64MMFR1_SpecSEI_SHIFT)
-#define        ID_AA64MMFR1_SpecSEI(x)         ((x) & 
ID_AA64MMFR1_SpecSEI_MASK)
+#define        ID_AA64MMFR1_SpecSEI_VAL(x)     ((x) & 
ID_AA64MMFR1_SpecSEI_MASK)
 #define         ID_AA64MMFR1_SpecSEI_NONE      (UL(0x0) << 
ID_AA64MMFR1_SpecSEI_SHIFT)
 #define         ID_AA64MMFR1_SpecSEI_IMPL      (UL(0x1) << 
ID_AA64MMFR1_SpecSEI_SHIFT)
 #define        ID_AA64MMFR1_XNX_SHIFT          28
 #define        ID_AA64MMFR1_XNX_MASK           (UL(0xf) << 
ID_AA64MMFR1_XNX_SHIFT)
-#define        ID_AA64MMFR1_XNX(x)             ((x) & ID_AA64MMFR1_XNX_MASK)
+#define        ID_AA64MMFR1_XNX_VAL(x)         ((x) & ID_AA64MMFR1_XNX_MASK)
 #define         ID_AA64MMFR1_XNX_NONE          (UL(0x0) << 
ID_AA64MMFR1_XNX_SHIFT)
 #define         ID_AA64MMFR1_XNX_IMPL          (UL(0x1) << 
ID_AA64MMFR1_XNX_SHIFT)
 
@@ -404,89 +404,89 @@
 #define        ID_AA64MMFR2_EL1                S3_0_C0_C7_2
 #define        ID_AA64MMFR2_CnP_SHIFT          0
 #define        ID_AA64MMFR2_CnP_MASK           (UL(0xf) << 
ID_AA64MMFR2_CnP_SHIFT)
-#define        ID_AA64MMFR2_CnP(x)             ((x) & ID_AA64MMFR2_CnP_MASK)
+#define        ID_AA64MMFR2_CnP_VAL(x)         ((x) & ID_AA64MMFR2_CnP_MASK)
 #define         ID_AA64MMFR2_CnP_NONE          (UL(0x0) << 
ID_AA64MMFR2_CnP_SHIFT)
 #define         ID_AA64MMFR2_CnP_IMPL          (UL(0x1) << 
ID_AA64MMFR2_CnP_SHIFT)
 #define        ID_AA64MMFR2_UAO_SHIFT          4
 #define        ID_AA64MMFR2_UAO_MASK           (UL(0xf) << 
ID_AA64MMFR2_UAO_SHIFT)
-#define        ID_AA64MMFR2_UAO(x)             ((x) & ID_AA64MMFR2_UAO_MASK)
+#define        ID_AA64MMFR2_UAO_VAL(x)         ((x) & ID_AA64MMFR2_UAO_MASK)
 #define         ID_AA64MMFR2_UAO_NONE          (UL(0x0) << 
ID_AA64MMFR2_UAO_SHIFT)
 #define         ID_AA64MMFR2_UAO_IMPL          (UL(0x1) << 
ID_AA64MMFR2_UAO_SHIFT)
 #define        ID_AA64MMFR2_LSM_SHIFT          8
 #define        ID_AA64MMFR2_LSM_MASK           (UL(0xf) << 
ID_AA64MMFR2_LSM_SHIFT)
-#define        ID_AA64MMFR2_LSM(x)             ((x) & ID_AA64MMFR2_LSM_MASK)
+#define        ID_AA64MMFR2_LSM_VAL(x)         ((x) & ID_AA64MMFR2_LSM_MASK)
 #define         ID_AA64MMFR2_LSM_NONE          (UL(0x0) << 
ID_AA64MMFR2_LSM_SHIFT)
 #define         ID_AA64MMFR2_LSM_IMPL          (UL(0x1) << 
ID_AA64MMFR2_LSM_SHIFT)
 #define        ID_AA64MMFR2_IESB_SHIFT         12
 #define        ID_AA64MMFR2_IESB_MASK          (UL(0xf) << 
ID_AA64MMFR2_IESB_SHIFT)
-#define        ID_AA64MMFR2_IESB(x)            ((x) & ID_AA64MMFR2_IESB_MASK)
+#define        ID_AA64MMFR2_IESB_VAL(x)        ((x) & ID_AA64MMFR2_IESB_MASK)
 #define         ID_AA64MMFR2_IESB_NONE         (UL(0x0) << 
ID_AA64MMFR2_IESB_SHIFT)
 #define         ID_AA64MMFR2_IESB_IMPL         (UL(0x1) << 
ID_AA64MMFR2_IESB_SHIFT)
 #define        ID_AA64MMFR2_VARange_SHIFT      16
 #define        ID_AA64MMFR2_VARange_MASK       (UL(0xf) << 
ID_AA64MMFR2_VARange_SHIFT)
-#define        ID_AA64MMFR2_VARange(x)         ((x) & 
ID_AA64MMFR2_VARange_MASK)
+#define        ID_AA64MMFR2_VARange_VAL(x)     ((x) & 
ID_AA64MMFR2_VARange_MASK)
 #define         ID_AA64MMFR2_VARange_48        (UL(0x0) << 
ID_AA64MMFR2_VARange_SHIFT)
 #define         ID_AA64MMFR2_VARange_52        (UL(0x1) << 
ID_AA64MMFR2_VARange_SHIFT)
 #define        ID_AA64MMFR2_CCIDX_SHIFT        20
 #define        ID_AA64MMFR2_CCIDX_MASK         (UL(0xf) << 
ID_AA64MMFR2_CCIDX_SHIFT)
-#define        ID_AA64MMFR2_CCIDX(x)           ((x) & ID_AA64MMFR2_CCIDX_MASK)
+#define        ID_AA64MMFR2_CCIDX_VAL(x)       ((x) & ID_AA64MMFR2_CCIDX_MASK)
 #define         ID_AA64MMFR2_CCIDX_32          (UL(0x0) << 
ID_AA64MMFR2_CCIDX_SHIFT)
 #define         ID_AA64MMFR2_CCIDX_64          (UL(0x1) << 
ID_AA64MMFR2_CCIDX_SHIFT)
 #define        ID_AA64MMFR2_NV_SHIFT           24
 #define        ID_AA64MMFR2_NV_MASK            (UL(0xf) << 
ID_AA64MMFR2_NV_SHIFT)
-#define        ID_AA64MMFR2_NV(x)              ((x) & ID_AA64MMFR2_NV_MASK)
+#define        ID_AA64MMFR2_NV_VAL(x)          ((x) & ID_AA64MMFR2_NV_MASK)
 #define         ID_AA64MMFR2_NV_NONE           (UL(0x0) << 
ID_AA64MMFR2_NV_SHIFT)
 #define         ID_AA64MMFR2_NV_IMPL           (UL(0x1) << 
ID_AA64MMFR2_NV_SHIFT)
 
 /* ID_AA64PFR0_EL1 */
 #define        ID_AA64PFR0_EL0_SHIFT           0
 #define        ID_AA64PFR0_EL0_MASK            (UL(0xf) << 
ID_AA64PFR0_EL0_SHIFT)
-#define        ID_AA64PFR0_EL0(x)              ((x) & ID_AA64PFR0_EL0_MASK)
+#define        ID_AA64PFR0_EL0_VAL(x)          ((x) & ID_AA64PFR0_EL0_MASK)
 #define         ID_AA64PFR0_EL0_64             (UL(0x1) << 
ID_AA64PFR0_EL0_SHIFT)
 #define         ID_AA64PFR0_EL0_64_32          (UL(0x2) << 
ID_AA64PFR0_EL0_SHIFT)
 #define        ID_AA64PFR0_EL1_SHIFT           4
 #define        ID_AA64PFR0_EL1_MASK            (UL(0xf) << 
ID_AA64PFR0_EL1_SHIFT)
-#define        ID_AA64PFR0_EL1(x)              ((x) & ID_AA64PFR0_EL1_MASK)
+#define        ID_AA64PFR0_EL1_VAL(x)          ((x) & ID_AA64PFR0_EL1_MASK)
 #define         ID_AA64PFR0_EL1_64             (UL(0x1) << 
ID_AA64PFR0_EL1_SHIFT)
 #define         ID_AA64PFR0_EL1_64_32          (UL(0x2) << 
ID_AA64PFR0_EL1_SHIFT)
 #define        ID_AA64PFR0_EL2_SHIFT           8
 #define        ID_AA64PFR0_EL2_MASK            (UL(0xf) << 
ID_AA64PFR0_EL2_SHIFT)
-#define        ID_AA64PFR0_EL2(x)              ((x) & ID_AA64PFR0_EL2_MASK)
+#define        ID_AA64PFR0_EL2_VAL(x)          ((x) & ID_AA64PFR0_EL2_MASK)
 #define         ID_AA64PFR0_EL2_NONE           (UL(0x0) << 
ID_AA64PFR0_EL2_SHIFT)
 #define         ID_AA64PFR0_EL2_64             (UL(0x1) << 
ID_AA64PFR0_EL2_SHIFT)
 #define         ID_AA64PFR0_EL2_64_32          (UL(0x2) << 
ID_AA64PFR0_EL2_SHIFT)
 #define        ID_AA64PFR0_EL3_SHIFT           12
 #define        ID_AA64PFR0_EL3_MASK            (UL(0xf) << 
ID_AA64PFR0_EL3_SHIFT)
-#define        ID_AA64PFR0_EL3(x)              ((x) & ID_AA64PFR0_EL3_MASK)
+#define        ID_AA64PFR0_EL3_VAL(x)          ((x) & ID_AA64PFR0_EL3_MASK)
 #define         ID_AA64PFR0_EL3_NONE           (UL(0x0) << 
ID_AA64PFR0_EL3_SHIFT)
 #define         ID_AA64PFR0_EL3_64             (UL(0x1) << 
ID_AA64PFR0_EL3_SHIFT)
 #define         ID_AA64PFR0_EL3_64_32          (UL(0x2) << 
ID_AA64PFR0_EL3_SHIFT)
 #define        ID_AA64PFR0_FP_SHIFT            16
 #define        ID_AA64PFR0_FP_MASK             (UL(0xf) << 
ID_AA64PFR0_FP_SHIFT)
-#define        ID_AA64PFR0_FP(x)               ((x) & ID_AA64PFR0_FP_MASK)
+#define        ID_AA64PFR0_FP_VAL(x)           ((x) & ID_AA64PFR0_FP_MASK)
 #define         ID_AA64PFR0_FP_IMPL            (UL(0x0) << 
ID_AA64PFR0_FP_SHIFT)
 #define         ID_AA64PFR0_FP_HP              (UL(0x1) << 
ID_AA64PFR0_FP_SHIFT)
 #define         ID_AA64PFR0_FP_NONE            (UL(0xf) << 
ID_AA64PFR0_FP_SHIFT)
 #define        ID_AA64PFR0_AdvSIMD_SHIFT       20
 #define        ID_AA64PFR0_AdvSIMD_MASK        (UL(0xf) << 
ID_AA64PFR0_AdvSIMD_SHIFT)
-#define        ID_AA64PFR0_AdvSIMD(x)          ((x) & ID_AA64PFR0_AdvSIMD_MASK)
+#define        ID_AA64PFR0_AdvSIMD_VAL(x)      ((x) & ID_AA64PFR0_AdvSIMD_MASK)
 #define         ID_AA64PFR0_AdvSIMD_IMPL       (UL(0x0) << 
ID_AA64PFR0_AdvSIMD_SHIFT)
 #define         ID_AA64PFR0_AdvSIMD_HP         (UL(0x1) << 
ID_AA64PFR0_AdvSIMD_SHIFT)
 #define         ID_AA64PFR0_AdvSIMD_NONE       (UL(0xf) << 
ID_AA64PFR0_AdvSIMD_SHIFT)
 #define        ID_AA64PFR0_GIC_BITS            0x4 /* Number of bits in GIC 
field */
 #define        ID_AA64PFR0_GIC_SHIFT           24
 #define        ID_AA64PFR0_GIC_MASK            (UL(0xf) << 
ID_AA64PFR0_GIC_SHIFT)
-#define        ID_AA64PFR0_GIC(x)              ((x) & ID_AA64PFR0_GIC_MASK)
+#define        ID_AA64PFR0_GIC_VAL(x)          ((x) & ID_AA64PFR0_GIC_MASK)
 #define         ID_AA64PFR0_GIC_CPUIF_NONE     (UL(0x0) << 
ID_AA64PFR0_GIC_SHIFT)
 #define         ID_AA64PFR0_GIC_CPUIF_EN       (UL(0x1) << 
ID_AA64PFR0_GIC_SHIFT)
 #define        ID_AA64PFR0_RAS_SHIFT           28
 #define        ID_AA64PFR0_RAS_MASK            (UL(0xf) << 
ID_AA64PFR0_RAS_SHIFT)
-#define        ID_AA64PFR0_RAS(x)              ((x) & ID_AA64PFR0_RAS_MASK)
+#define        ID_AA64PFR0_RAS_VAL(x)          ((x) & ID_AA64PFR0_RAS_MASK)
 #define         ID_AA64PFR0_RAS_NONE           (UL(0x0) << 
ID_AA64PFR0_RAS_SHIFT)
 #define         ID_AA64PFR0_RAS_V1             (UL(0x1) << 
ID_AA64PFR0_RAS_SHIFT)
 #define        ID_AA64PFR0_SVE_SHIFT           32
 #define        ID_AA64PFR0_SVE_MASK            (UL(0xf) << 
ID_AA64PFR0_SVE_SHIFT)
-#define        ID_AA64PFR0_SVE(x)              ((x) & ID_AA64PFR0_SVE_MASK)
+#define        ID_AA64PFR0_SVE_VAL(x)          ((x) & ID_AA64PFR0_SVE_MASK)
 #define         ID_AA64PFR0_SVE_NONE           (UL(0x0) << 
ID_AA64PFR0_SVE_SHIFT)
 #define         ID_AA64PFR0_SVE_IMPL           (UL(0x1) << 
ID_AA64PFR0_SVE_SHIFT)
 

Modified: head/sys/libkern/gsb_crc32.c
==============================================================================
--- head/sys/libkern/gsb_crc32.c        Wed Oct 30 08:06:22 2019        
(r354169)
+++ head/sys/libkern/gsb_crc32.c        Wed Oct 30 10:06:57 2019        
(r354170)
@@ -777,7 +777,7 @@ calculate_crc32c(uint32_t crc32c,
         * this applies to all CPUs.
         */
        reg = READ_SPECIALREG(id_aa64isar0_el1);
-       if (ID_AA64ISAR0_CRC32(reg) != ID_AA64ISAR0_CRC32_NONE) {
+       if (ID_AA64ISAR0_CRC32_VAL(reg) != ID_AA64ISAR0_CRC32_NONE) {
                return (armv8_crc32c(crc32c, buffer, length));
        } else
 #endif
_______________________________________________
svn-src-head@freebsd.org mailing list
https://lists.freebsd.org/mailman/listinfo/svn-src-head
To unsubscribe, send any mail to "svn-src-head-unsubscr...@freebsd.org"

Reply via email to