On Mon, Apr 8, 2019 at 4:29 PM Denys Vlasenko <dvlas...@redhat.com> wrote: > > On 4/8/19 4:23 PM, Sedat Dilek wrote: > > For the .rodata.cst16 part you mean sth. like this? > > yes, see below > > > --- a/arch/x86/crypto/camellia-aesni-avx-asm_64.S > > +++ b/arch/x86/crypto/camellia-aesni-avx-asm_64.S > > @@ -573,8 +573,12 @@ > > ENDPROC(roundsm16_x4_x5_x6_x7_x0_x1_x2_x3_y4_y5_y6_y7_y0_y1_y2_y3_ab) > > vmovdqu y7, 15 * 16(rio); > > > > > > -/* NB: section is mergeable, all elements must be aligned 16-byte blocks */ > > -.section .rodata.cst16, "aM", @progbits, 16 > > +/* > > + * NB: section is mergeable, all elements must be aligned 16-byte blocks > > + * There is more than one object in this section, let's use module name > > + * instead of object name as unique suffix > > + */ > > +.section .rodata.cst16.camellia-aesni-avx-asm_64, "aM", @progbits, > > 16 > > dashes in the name may cause problems, replace with '_'. > > > .align 16 > > > > #define SHUFB_BYTES(idx) \ > > diff --git a/arch/x86/crypto/camellia-aesni-avx2-asm_64.S > > b/arch/x86/crypto/camellia-aesni-avx2-asm_64.S > > index b66bbfa62f50..d6ce36e82a93 100644 > > --- a/arch/x86/crypto/camellia-aesni-avx2-asm_64.S > > +++ b/arch/x86/crypto/camellia-aesni-avx2-asm_64.S > > @@ -626,8 +626,12 @@ > > ENDPROC(roundsm32_x4_x5_x6_x7_x0_x1_x2_x3_y4_y5_y6_y7_y0_y1_y2_y3_ab) > > .long 0x00010203, 0x04050607, 0x80808080, 0x80808080 > > .long 0x00010203, 0x04050607, 0x80808080, 0x80808080 > > > > -/* NB: section is mergeable, all elements must be aligned 16-byte blocks */ > > -.section .rodata.cst16, "aM", @progbits, 16 > > +/* > > + * NB: section is mergeable, all elements must be aligned 16-byte blocks > > + * There is more than one object in this section, let's use module name > > + * instead of object name as unique suffix > > +*/ > > +.section .rodata.cst16.ccamellia-aesni-avx2-asm_64, "aM", > > @progbits, 16 > > dashes in the name may cause problems, replace with '_'.
Oops. v2: sdi@iniza:~/src/linux-kernel/linux$ git --no-pager diff diff --git a/arch/x86/crypto/camellia-aesni-avx-asm_64.S b/arch/x86/crypto/camellia-aesni-avx-asm_64.S index a14af6eb09cb..712d6a7e8b8f 100644 --- a/arch/x86/crypto/camellia-aesni-avx-asm_64.S +++ b/arch/x86/crypto/camellia-aesni-avx-asm_64.S @@ -573,8 +573,12 @@ ENDPROC(roundsm16_x4_x5_x6_x7_x0_x1_x2_x3_y4_y5_y6_y7_y0_y1_y2_y3_ab) vmovdqu y7, 15 * 16(rio); -/* NB: section is mergeable, all elements must be aligned 16-byte blocks */ -.section .rodata.cst16, "aM", @progbits, 16 +/* + * NB: section is mergeable, all elements must be aligned 16-byte blocks + * There is more than one object in this section, let's use module name + * instead of object name as unique suffix + */ +.section .rodata.cst16.camellia_aesni_avx_asm_64, "aM", @progbits, 16 .align 16 #define SHUFB_BYTES(idx) \ diff --git a/arch/x86/crypto/camellia-aesni-avx2-asm_64.S b/arch/x86/crypto/camellia-aesni-avx2-asm_64.S index b66bbfa62f50..34f6b0c4196d 100644 --- a/arch/x86/crypto/camellia-aesni-avx2-asm_64.S +++ b/arch/x86/crypto/camellia-aesni-avx2-asm_64.S @@ -626,8 +626,12 @@ ENDPROC(roundsm32_x4_x5_x6_x7_x0_x1_x2_x3_y4_y5_y6_y7_y0_y1_y2_y3_ab) .long 0x00010203, 0x04050607, 0x80808080, 0x80808080 .long 0x00010203, 0x04050607, 0x80808080, 0x80808080 -/* NB: section is mergeable, all elements must be aligned 16-byte blocks */ -.section .rodata.cst16, "aM", @progbits, 16 +/* + * NB: section is mergeable, all elements must be aligned 16-byte blocks + * There is more than one object in this section, let's use module name + * instead of object name as unique suffix +*/ +.section .rodata.cst16.ccamellia_aesni_avx2_asm_64, "aM", @progbits, 16 .align 16 /* For CTR-mode IV byteswap */ diff --git a/arch/x86/crypto/cast6-avx-x86_64-asm_64.S b/arch/x86/crypto/cast6-avx-x86_64-asm_64.S index 7f30b6f0d72c..fcd5732aba4d 100644 --- a/arch/x86/crypto/cast6-avx-x86_64-asm_64.S +++ b/arch/x86/crypto/cast6-avx-x86_64-asm_64.S @@ -225,7 +225,12 @@ vpshufb rmask, x2, x2; \ vpshufb rmask, x3, x3; -.section .rodata.cst16, "aM", @progbits, 16 +/* + * NB: section is mergeable, all elements must be aligned 16-byte blocks + * There is more than one object in this section, let's use module name + * instead of object name as unique suffix + */ +.section .rodata.cst16.cast6_avx_x86_64_asm_64, "aM", @progbits, 16 .align 16 .Lxts_gf128mul_and_shl1_mask: .byte 0x87, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0 I tried to check for the .rodata.cst32 case, how do I identify the *.S files? - Sedat -