On Thu, 2018-01-11 at 09:04 -0600, Josh Poimboeuf wrote:
>
> > How about this one then (with ANNOTATE_NOSPEC_ALTERNATIVE):
> >
> > - asm volatile (ALTERNATIVE("",
> > + asm volatile (ALTERNATIVE("jmp " alt_end_marker "f",
> > __stringify(__FILL_RETURN_BUFFER(%0, %1,
> > _%=)),
> > X86_FEATURE_RETPOLINE)
> > : "=r" (dummy), ASM_CALL_CONSTRAINT : : "memory" );
>
> Looks good to me.
>
> Another IRC discussion was that Boris may eventually add a feature to
> the alternatives code to automatically insert such a jump if there are a
> lot of nops.
OK, v3 sent out with that. I've just manually put in a jump round it
(less hackishly than the alt_end_marker one) in the oldinstr for now.
This wants rolling into your objtool fixes:
--- a/arch/x86/include/asm/nospec-branch.h
+++ b/arch/x86/include/asm/nospec-branch.h
@@ -119,7 +119,8 @@
*/
.macro FILL_RETURN_BUFFER reg:req nr:req ftr:req
#ifdef CONFIG_RETPOLINE
- ALTERNATIVE "jmp .Lskip_rsb_\@", \
+ ANNOTATE_NOSPEC_ALTERNATIVE
+ ALTERNATIVE "jmp .Lskip_rsb_\@", \
__stringify(__FILL_RETURN_BUFFER(\reg,\nr,%_ASM_SP,\@)) \
\ftr
.Lskip_rsb_\@:
@@ -149,6 +150,7 @@
# define THUNK_TARGET(addr) [thunk_target] "r" (addr)
#elif defined(CONFIG_X86_32) && defined(CONFIG_RETPOLINE)
+# define ANNOTATE_NOSPEC_ALTERNATIVE
/*
* For i386 we use the original ret-equivalent retpoline, because
* otherwise we'll run out of registers. We don't care about CET
@@ -201,7 +203,8 @@ static inline void vmexit_fill_RSB(void)
#ifdef CONFIG_RETPOLINE
unsigned long loops = RSB_CLEAR_LOOPS / 2;
- asm volatile (ALTERNATIVE("jmp .Lskip_rsb_%=",
+ asm volatile (ANNOTATE_NOSPEC_ALTERNATIVE
+ ALTERNATIVE("jmp .Lskip_rsb_%=",
__stringify(__FILL_RETURN_BUFFER(%0,
RSB_CLEAR_LOOPS, %1, _%=)),
X86_FEATURE_RETPOLINE)
".Lskip_rsb_%=:"
smime.p7s
Description: S/MIME cryptographic signature

