This is easily reproducible via CC=clang+CONFIG_STAGING=y+CONFIG_VT6656=m. It turns out that if your config tickles __builtin_constant_p via differences in choices to inline or not, these statements produce invalid assembly:
$ cat foo.c long a(long b, long c) { asm("orb\t%1, %0" : "+q"(c): "r"(b)); return c; } $ gcc foo.c foo.c: Assembler messages: foo.c:2: Error: `%rax' not allowed with `orb' Use the `%b` "x86 Operand Modifier" to instead force register allocation to select a lower-8-bit GPR operand. The "q" constraint only has meaning on -m32 otherwise is treated as "r". Not all GPRs have low-8-bit aliases for -m32. Cc: Jesse Brandeburg <jesse.brandeb...@intel.com> Link: https://github.com/ClangBuiltLinux/linux/issues/961 Link: https://lore.kernel.org/lkml/20200504193524.ga221...@google.com/ Link: https://gcc.gnu.org/onlinedocs/gcc/Extended-Asm.html#x86Operandmodifiers Fixes: 1651e700664b4 ("x86: Fix bitops.h warning with a moved cast") Reported-by: Sedat Dilek <sedat.di...@gmail.com> Reported-by: kernelci.org bot <b...@kernelci.org> Suggested-by: Andy Shevchenko <andriy.shevche...@intel.com> Suggested-by: Brian Gerst <brge...@gmail.com> Suggested-by: H. Peter Anvin <h...@zytor.com> Suggested-by: Ilie Halip <ilie.ha...@gmail.com> Signed-off-by: Nick Desaulniers <ndesaulni...@google.com> --- Changes V4 -> V5: * actually use `%b` in arch_change_bit(). Changes V3 -> V4: * drop (u8) cast from arch_change_bit() as well. Changes V2 -> V3: * use `%b` "x86 Operand Modifier" instead of bitwise op then cast. * reword commit message. * add Brian and HPA suggested by tags * drop Nathan & Sedat Tested by/reviewed by tags (new patch is different enough). * Take over authorship. Changes V1 -> V2: * change authorship/signed-off-by to Ilie * add Nathan's Tested by/reviewed by * update commit message slightly with info sent to HPA. arch/x86/include/asm/bitops.h | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/arch/x86/include/asm/bitops.h b/arch/x86/include/asm/bitops.h index b392571c1f1d..35460fef39b8 100644 --- a/arch/x86/include/asm/bitops.h +++ b/arch/x86/include/asm/bitops.h @@ -52,9 +52,9 @@ static __always_inline void arch_set_bit(long nr, volatile unsigned long *addr) { if (__builtin_constant_p(nr)) { - asm volatile(LOCK_PREFIX "orb %1,%0" + asm volatile(LOCK_PREFIX "orb %b1,%0" : CONST_MASK_ADDR(nr, addr) - : "iq" (CONST_MASK(nr) & 0xff) + : "iq" (CONST_MASK(nr)) : "memory"); } else { asm volatile(LOCK_PREFIX __ASM_SIZE(bts) " %1,%0" @@ -72,9 +72,9 @@ static __always_inline void arch_clear_bit(long nr, volatile unsigned long *addr) { if (__builtin_constant_p(nr)) { - asm volatile(LOCK_PREFIX "andb %1,%0" + asm volatile(LOCK_PREFIX "andb %b1,%0" : CONST_MASK_ADDR(nr, addr) - : "iq" (CONST_MASK(nr) ^ 0xff)); + : "iq" (~CONST_MASK(nr))); } else { asm volatile(LOCK_PREFIX __ASM_SIZE(btr) " %1,%0" : : RLONG_ADDR(addr), "Ir" (nr) : "memory"); @@ -123,9 +123,9 @@ static __always_inline void arch_change_bit(long nr, volatile unsigned long *addr) { if (__builtin_constant_p(nr)) { - asm volatile(LOCK_PREFIX "xorb %1,%0" + asm volatile(LOCK_PREFIX "xorb %b1,%0" : CONST_MASK_ADDR(nr, addr) - : "iq" ((u8)CONST_MASK(nr))); + : "iq" (CONST_MASK(nr))); } else { asm volatile(LOCK_PREFIX __ASM_SIZE(btc) " %1,%0" : : RLONG_ADDR(addr), "Ir" (nr) : "memory"); -- 2.26.2.645.ge9eca65c58-goog