Module Name:    src
Committed By:   christos
Date:           Thu Jan 23 17:37:03 UTC 2020

Modified Files:
        src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64:
            aesni-x86_64.S ecp_nistz256-x86_64.S ghash-x86_64.S rsaz-avx2.S
            vpaes-x86_64.S x86_64-mont5.S

Log Message:
regen


To generate a diff of this commit:
cvs rdiff -u -r1.9 -r1.10 \
    src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/aesni-x86_64.S
cvs rdiff -u -r1.4 -r1.5 \
    
src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/ecp_nistz256-x86_64.S
cvs rdiff -u -r1.6 -r1.7 \
    src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/ghash-x86_64.S
cvs rdiff -u -r1.3 -r1.4 \
    src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/rsaz-avx2.S
cvs rdiff -u -r1.5 -r1.6 \
    src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/vpaes-x86_64.S
cvs rdiff -u -r1.7 -r1.8 \
    src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/x86_64-mont5.S

Please note that diffs are not public domain; they are subject to the
copyright notices on the relevant files.

Modified files:

Index: src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/aesni-x86_64.S
diff -u src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/aesni-x86_64.S:1.9 src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/aesni-x86_64.S:1.10
--- src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/aesni-x86_64.S:1.9	Sun Sep 23 09:33:05 2018
+++ src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/aesni-x86_64.S	Thu Jan 23 12:37:03 2020
@@ -5,6 +5,7 @@
 .type	aesni_encrypt,@function
 .align	16
 aesni_encrypt:
+.cfi_startproc	
 	movups	(%rdi),%xmm2
 	movl	240(%rdx),%eax
 	movups	(%rdx),%xmm0
@@ -23,12 +24,14 @@ aesni_encrypt:
 	movups	%xmm2,(%rsi)
 	pxor	%xmm2,%xmm2
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	aesni_encrypt,.-aesni_encrypt
 
 .globl	aesni_decrypt
 .type	aesni_decrypt,@function
 .align	16
 aesni_decrypt:
+.cfi_startproc	
 	movups	(%rdi),%xmm2
 	movl	240(%rdx),%eax
 	movups	(%rdx),%xmm0
@@ -47,10 +50,12 @@ aesni_decrypt:
 	movups	%xmm2,(%rsi)
 	pxor	%xmm2,%xmm2
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	aesni_decrypt, .-aesni_decrypt
 .type	_aesni_encrypt2,@function
 .align	16
 _aesni_encrypt2:
+.cfi_startproc	
 	movups	(%rcx),%xmm0
 	shll	$4,%eax
 	movups	16(%rcx),%xmm1
@@ -76,10 +81,12 @@ _aesni_encrypt2:
 .byte	102,15,56,221,208
 .byte	102,15,56,221,216
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	_aesni_encrypt2,.-_aesni_encrypt2
 .type	_aesni_decrypt2,@function
 .align	16
 _aesni_decrypt2:
+.cfi_startproc	
 	movups	(%rcx),%xmm0
 	shll	$4,%eax
 	movups	16(%rcx),%xmm1
@@ -105,10 +112,12 @@ _aesni_decrypt2:
 .byte	102,15,56,223,208
 .byte	102,15,56,223,216
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	_aesni_decrypt2,.-_aesni_decrypt2
 .type	_aesni_encrypt3,@function
 .align	16
 _aesni_encrypt3:
+.cfi_startproc	
 	movups	(%rcx),%xmm0
 	shll	$4,%eax
 	movups	16(%rcx),%xmm1
@@ -139,10 +148,12 @@ _aesni_encrypt3:
 .byte	102,15,56,221,216
 .byte	102,15,56,221,224
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	_aesni_encrypt3,.-_aesni_encrypt3
 .type	_aesni_decrypt3,@function
 .align	16
 _aesni_decrypt3:
+.cfi_startproc	
 	movups	(%rcx),%xmm0
 	shll	$4,%eax
 	movups	16(%rcx),%xmm1
@@ -173,10 +184,12 @@ _aesni_decrypt3:
 .byte	102,15,56,223,216
 .byte	102,15,56,223,224
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	_aesni_decrypt3,.-_aesni_decrypt3
 .type	_aesni_encrypt4,@function
 .align	16
 _aesni_encrypt4:
+.cfi_startproc	
 	movups	(%rcx),%xmm0
 	shll	$4,%eax
 	movups	16(%rcx),%xmm1
@@ -213,10 +226,12 @@ _aesni_encrypt4:
 .byte	102,15,56,221,224
 .byte	102,15,56,221,232
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	_aesni_encrypt4,.-_aesni_encrypt4
 .type	_aesni_decrypt4,@function
 .align	16
 _aesni_decrypt4:
+.cfi_startproc	
 	movups	(%rcx),%xmm0
 	shll	$4,%eax
 	movups	16(%rcx),%xmm1
@@ -253,10 +268,12 @@ _aesni_decrypt4:
 .byte	102,15,56,223,224
 .byte	102,15,56,223,232
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	_aesni_decrypt4,.-_aesni_decrypt4
 .type	_aesni_encrypt6,@function
 .align	16
 _aesni_encrypt6:
+.cfi_startproc	
 	movups	(%rcx),%xmm0
 	shll	$4,%eax
 	movups	16(%rcx),%xmm1
@@ -307,10 +324,12 @@ _aesni_encrypt6:
 .byte	102,15,56,221,240
 .byte	102,15,56,221,248
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	_aesni_encrypt6,.-_aesni_encrypt6
 .type	_aesni_decrypt6,@function
 .align	16
 _aesni_decrypt6:
+.cfi_startproc	
 	movups	(%rcx),%xmm0
 	shll	$4,%eax
 	movups	16(%rcx),%xmm1
@@ -361,10 +380,12 @@ _aesni_decrypt6:
 .byte	102,15,56,223,240
 .byte	102,15,56,223,248
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	_aesni_decrypt6,.-_aesni_decrypt6
 .type	_aesni_encrypt8,@function
 .align	16
 _aesni_encrypt8:
+.cfi_startproc	
 	movups	(%rcx),%xmm0
 	shll	$4,%eax
 	movups	16(%rcx),%xmm1
@@ -425,10 +446,12 @@ _aesni_encrypt8:
 .byte	102,68,15,56,221,192
 .byte	102,68,15,56,221,200
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	_aesni_encrypt8,.-_aesni_encrypt8
 .type	_aesni_decrypt8,@function
 .align	16
 _aesni_decrypt8:
+.cfi_startproc	
 	movups	(%rcx),%xmm0
 	shll	$4,%eax
 	movups	16(%rcx),%xmm1
@@ -489,11 +512,13 @@ _aesni_decrypt8:
 .byte	102,68,15,56,223,192
 .byte	102,68,15,56,223,200
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	_aesni_decrypt8,.-_aesni_decrypt8
 .globl	aesni_ecb_encrypt
 .type	aesni_ecb_encrypt,@function
 .align	16
 aesni_ecb_encrypt:
+.cfi_startproc	
 	andq	$-16,%rdx
 	jz	.Lecb_ret
 
@@ -831,6 +856,7 @@ aesni_ecb_encrypt:
 	xorps	%xmm0,%xmm0
 	pxor	%xmm1,%xmm1
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	aesni_ecb_encrypt,.-aesni_ecb_encrypt
 .globl	aesni_ccm64_encrypt_blocks
 .type	aesni_ccm64_encrypt_blocks,@function

Index: src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/ecp_nistz256-x86_64.S
diff -u src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/ecp_nistz256-x86_64.S:1.4 src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/ecp_nistz256-x86_64.S:1.5
--- src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/ecp_nistz256-x86_64.S:1.4	Sun Sep 23 09:33:05 2018
+++ src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/ecp_nistz256-x86_64.S	Thu Jan 23 12:37:03 2020
@@ -3960,6 +3960,7 @@ ecp_nistz256_mul_mont:
 .type	__ecp_nistz256_mul_montq,@function
 .align	32
 __ecp_nistz256_mul_montq:
+.cfi_startproc	
 
 
 	movq	%rax,%rbp
@@ -4171,6 +4172,7 @@ __ecp_nistz256_mul_montq:
 	movq	%r9,24(%rdi)
 
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	__ecp_nistz256_mul_montq,.-__ecp_nistz256_mul_montq
 
 
@@ -4248,6 +4250,7 @@ ecp_nistz256_sqr_mont:
 .type	__ecp_nistz256_sqr_montq,@function
 .align	32
 __ecp_nistz256_sqr_montq:
+.cfi_startproc	
 	movq	%rax,%r13
 	mulq	%r14
 	movq	%rax,%r9
@@ -4405,10 +4408,12 @@ __ecp_nistz256_sqr_montq:
 	movq	%r15,24(%rdi)
 
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	__ecp_nistz256_sqr_montq,.-__ecp_nistz256_sqr_montq
 .type	__ecp_nistz256_mul_montx,@function
 .align	32
 __ecp_nistz256_mul_montx:
+.cfi_startproc	
 
 
 	mulxq	%r9,%r8,%r9
@@ -4571,11 +4576,13 @@ __ecp_nistz256_mul_montx:
 	movq	%r9,24(%rdi)
 
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	__ecp_nistz256_mul_montx,.-__ecp_nistz256_mul_montx
 
 .type	__ecp_nistz256_sqr_montx,@function
 .align	32
 __ecp_nistz256_sqr_montx:
+.cfi_startproc	
 	mulxq	%r14,%r9,%r10
 	mulxq	%r15,%rcx,%r11
 	xorl	%eax,%eax
@@ -4699,6 +4706,7 @@ __ecp_nistz256_sqr_montx:
 	movq	%r15,24(%rdi)
 
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	__ecp_nistz256_sqr_montx,.-__ecp_nistz256_sqr_montx
 
 
@@ -4838,6 +4846,7 @@ ecp_nistz256_scatter_w5:
 .type	ecp_nistz256_gather_w5,@function
 .align	32
 ecp_nistz256_gather_w5:
+.cfi_startproc	
 	movl	OPENSSL_ia32cap_P+8(%rip),%eax
 	testl	$32,%eax
 	jnz	.Lavx2_gather_w5
@@ -4892,6 +4901,7 @@ ecp_nistz256_gather_w5:
 	movdqu	%xmm6,64(%rdi)
 	movdqu	%xmm7,80(%rdi)
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .LSEH_end_ecp_nistz256_gather_w5:
 .size	ecp_nistz256_gather_w5,.-ecp_nistz256_gather_w5
 
@@ -4920,6 +4930,7 @@ ecp_nistz256_scatter_w7:
 .type	ecp_nistz256_gather_w7,@function
 .align	32
 ecp_nistz256_gather_w7:
+.cfi_startproc	
 	movl	OPENSSL_ia32cap_P+8(%rip),%eax
 	testl	$32,%eax
 	jnz	.Lavx2_gather_w7
@@ -4963,6 +4974,7 @@ ecp_nistz256_gather_w7:
 	movdqu	%xmm4,32(%rdi)
 	movdqu	%xmm5,48(%rdi)
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .LSEH_end_ecp_nistz256_gather_w7:
 .size	ecp_nistz256_gather_w7,.-ecp_nistz256_gather_w7
 
@@ -4970,6 +4982,7 @@ ecp_nistz256_gather_w7:
 .type	ecp_nistz256_avx2_gather_w5,@function
 .align	32
 ecp_nistz256_avx2_gather_w5:
+.cfi_startproc	
 .Lavx2_gather_w5:
 	vzeroupper
 	vmovdqa	.LTwo(%rip),%ymm0
@@ -5024,6 +5037,7 @@ ecp_nistz256_avx2_gather_w5:
 	vmovdqu	%ymm4,64(%rdi)
 	vzeroupper
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .LSEH_end_ecp_nistz256_avx2_gather_w5:
 .size	ecp_nistz256_avx2_gather_w5,.-ecp_nistz256_avx2_gather_w5
 
@@ -5033,6 +5047,7 @@ ecp_nistz256_avx2_gather_w5:
 .type	ecp_nistz256_avx2_gather_w7,@function
 .align	32
 ecp_nistz256_avx2_gather_w7:
+.cfi_startproc	
 .Lavx2_gather_w7:
 	vzeroupper
 	vmovdqa	.LThree(%rip),%ymm0
@@ -5102,11 +5117,13 @@ ecp_nistz256_avx2_gather_w7:
 	vmovdqu	%ymm3,32(%rdi)
 	vzeroupper
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .LSEH_end_ecp_nistz256_avx2_gather_w7:
 .size	ecp_nistz256_avx2_gather_w7,.-ecp_nistz256_avx2_gather_w7
 .type	__ecp_nistz256_add_toq,@function
 .align	32
 __ecp_nistz256_add_toq:
+.cfi_startproc	
 	xorq	%r11,%r11
 	addq	0(%rbx),%r12
 	adcq	8(%rbx),%r13
@@ -5134,11 +5151,13 @@ __ecp_nistz256_add_toq:
 	movq	%r9,24(%rdi)
 
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	__ecp_nistz256_add_toq,.-__ecp_nistz256_add_toq
 
 .type	__ecp_nistz256_sub_fromq,@function
 .align	32
 __ecp_nistz256_sub_fromq:
+.cfi_startproc	
 	subq	0(%rbx),%r12
 	sbbq	8(%rbx),%r13
 	movq	%r12,%rax
@@ -5165,11 +5184,13 @@ __ecp_nistz256_sub_fromq:
 	movq	%r9,24(%rdi)
 
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	__ecp_nistz256_sub_fromq,.-__ecp_nistz256_sub_fromq
 
 .type	__ecp_nistz256_subq,@function
 .align	32
 __ecp_nistz256_subq:
+.cfi_startproc	
 	subq	%r12,%rax
 	sbbq	%r13,%rbp
 	movq	%rax,%r12
@@ -5192,11 +5213,13 @@ __ecp_nistz256_subq:
 	cmovnzq	%r10,%r9
 
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	__ecp_nistz256_subq,.-__ecp_nistz256_subq
 
 .type	__ecp_nistz256_mul_by_2q,@function
 .align	32
 __ecp_nistz256_mul_by_2q:
+.cfi_startproc	
 	xorq	%r11,%r11
 	addq	%r12,%r12
 	adcq	%r13,%r13
@@ -5224,6 +5247,7 @@ __ecp_nistz256_mul_by_2q:
 	movq	%r9,24(%rdi)
 
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	__ecp_nistz256_mul_by_2q,.-__ecp_nistz256_mul_by_2q
 .globl	ecp_nistz256_point_double
 .type	ecp_nistz256_point_double,@function
@@ -5656,7 +5680,9 @@ ecp_nistz256_point_add:
 .byte	102,72,15,126,206
 .byte	102,72,15,126,199
 	addq	$416,%rsp
+.cfi_adjust_cfa_offset	-416
 	jmp	.Lpoint_double_shortcutq
+.cfi_adjust_cfa_offset	416
 
 .align	32
 .Ladd_proceedq:
@@ -6218,6 +6244,7 @@ ecp_nistz256_point_add_affine:
 .type	__ecp_nistz256_add_tox,@function
 .align	32
 __ecp_nistz256_add_tox:
+.cfi_startproc	
 	xorq	%r11,%r11
 	adcq	0(%rbx),%r12
 	adcq	8(%rbx),%r13
@@ -6246,11 +6273,13 @@ __ecp_nistz256_add_tox:
 	movq	%r9,24(%rdi)
 
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	__ecp_nistz256_add_tox,.-__ecp_nistz256_add_tox
 
 .type	__ecp_nistz256_sub_fromx,@function
 .align	32
 __ecp_nistz256_sub_fromx:
+.cfi_startproc	
 	xorq	%r11,%r11
 	sbbq	0(%rbx),%r12
 	sbbq	8(%rbx),%r13
@@ -6279,11 +6308,13 @@ __ecp_nistz256_sub_fromx:
 	movq	%r9,24(%rdi)
 
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	__ecp_nistz256_sub_fromx,.-__ecp_nistz256_sub_fromx
 
 .type	__ecp_nistz256_subx,@function
 .align	32
 __ecp_nistz256_subx:
+.cfi_startproc	
 	xorq	%r11,%r11
 	sbbq	%r12,%rax
 	sbbq	%r13,%rbp
@@ -6308,11 +6339,13 @@ __ecp_nistz256_subx:
 	cmovcq	%r10,%r9
 
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	__ecp_nistz256_subx,.-__ecp_nistz256_subx
 
 .type	__ecp_nistz256_mul_by_2x,@function
 .align	32
 __ecp_nistz256_mul_by_2x:
+.cfi_startproc	
 	xorq	%r11,%r11
 	adcq	%r12,%r12
 	adcq	%r13,%r13
@@ -6341,6 +6374,7 @@ __ecp_nistz256_mul_by_2x:
 	movq	%r9,24(%rdi)
 
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	__ecp_nistz256_mul_by_2x,.-__ecp_nistz256_mul_by_2x
 .type	ecp_nistz256_point_doublex,@function
 .align	32
@@ -6765,7 +6799,9 @@ ecp_nistz256_point_addx:
 .byte	102,72,15,126,206
 .byte	102,72,15,126,199
 	addq	$416,%rsp
+.cfi_adjust_cfa_offset	-416
 	jmp	.Lpoint_double_shortcutx
+.cfi_adjust_cfa_offset	416
 
 .align	32
 .Ladd_proceedx:

Index: src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/ghash-x86_64.S
diff -u src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/ghash-x86_64.S:1.6 src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/ghash-x86_64.S:1.7
--- src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/ghash-x86_64.S:1.6	Sun Sep 23 09:33:05 2018
+++ src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/ghash-x86_64.S	Thu Jan 23 12:37:03 2020
@@ -706,6 +706,7 @@ gcm_ghash_4bit:
 .type	gcm_init_clmul,@function
 .align	16
 gcm_init_clmul:
+.cfi_startproc	
 .L_init_clmul:
 	movdqu	(%rsi),%xmm2
 	pshufd	$78,%xmm2,%xmm2
@@ -857,11 +858,13 @@ gcm_init_clmul:
 .byte	102,15,58,15,227,8
 	movdqu	%xmm4,80(%rdi)
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	gcm_init_clmul,.-gcm_init_clmul
 .globl	gcm_gmult_clmul
 .type	gcm_gmult_clmul,@function
 .align	16
 gcm_gmult_clmul:
+.cfi_startproc	
 .L_gmult_clmul:
 	movdqu	(%rdi),%xmm0
 	movdqa	.Lbswap_mask(%rip),%xmm5
@@ -908,11 +911,13 @@ gcm_gmult_clmul:
 .byte	102,15,56,0,197
 	movdqu	%xmm0,(%rdi)
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	gcm_gmult_clmul,.-gcm_gmult_clmul
 .globl	gcm_ghash_clmul
 .type	gcm_ghash_clmul,@function
 .align	32
 gcm_ghash_clmul:
+.cfi_startproc	
 .L_ghash_clmul:
 	movdqa	.Lbswap_mask(%rip),%xmm10
 
@@ -1291,11 +1296,13 @@ gcm_ghash_clmul:
 .byte	102,65,15,56,0,194
 	movdqu	%xmm0,(%rdi)
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	gcm_ghash_clmul,.-gcm_ghash_clmul
 .globl	gcm_init_avx
 .type	gcm_init_avx,@function
 .align	32
 gcm_init_avx:
+.cfi_startproc	
 	vzeroupper
 
 	vmovdqu	(%rsi),%xmm2
@@ -1398,17 +1405,21 @@ gcm_init_avx:
 
 	vzeroupper
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	gcm_init_avx,.-gcm_init_avx
 .globl	gcm_gmult_avx
 .type	gcm_gmult_avx,@function
 .align	32
 gcm_gmult_avx:
+.cfi_startproc	
 	jmp	.L_gmult_clmul
+.cfi_endproc	
 .size	gcm_gmult_avx,.-gcm_gmult_avx
 .globl	gcm_ghash_avx
 .type	gcm_ghash_avx,@function
 .align	32
 gcm_ghash_avx:
+.cfi_startproc	
 	vzeroupper
 
 	vmovdqu	(%rdi),%xmm10
@@ -1780,6 +1791,7 @@ gcm_ghash_avx:
 	vmovdqu	%xmm10,(%rdi)
 	vzeroupper
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	gcm_ghash_avx,.-gcm_ghash_avx
 .align	64
 .Lbswap_mask:

Index: src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/rsaz-avx2.S
diff -u src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/rsaz-avx2.S:1.3 src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/rsaz-avx2.S:1.4
--- src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/rsaz-avx2.S:1.3	Sun Sep 23 09:33:05 2018
+++ src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/rsaz-avx2.S	Thu Jan 23 12:37:03 2020
@@ -1213,6 +1213,7 @@ rsaz_1024_mul_avx2:
 .type	rsaz_1024_red2norm_avx2,@function
 .align	32
 rsaz_1024_red2norm_avx2:
+.cfi_startproc	
 	subq	$-128,%rsi
 	xorq	%rax,%rax
 	movq	-128(%rsi),%r8
@@ -1404,12 +1405,14 @@ rsaz_1024_red2norm_avx2:
 	movq	%rax,120(%rdi)
 	movq	%r11,%rax
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	rsaz_1024_red2norm_avx2,.-rsaz_1024_red2norm_avx2
 
 .globl	rsaz_1024_norm2red_avx2
 .type	rsaz_1024_norm2red_avx2,@function
 .align	32
 rsaz_1024_norm2red_avx2:
+.cfi_startproc	
 	subq	$-128,%rdi
 	movq	(%rsi),%r8
 	movl	$0x1fffffff,%eax
@@ -1562,11 +1565,13 @@ rsaz_1024_norm2red_avx2:
 	movq	%r8,176(%rdi)
 	movq	%r8,184(%rdi)
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	rsaz_1024_norm2red_avx2,.-rsaz_1024_norm2red_avx2
 .globl	rsaz_1024_scatter5_avx2
 .type	rsaz_1024_scatter5_avx2,@function
 .align	32
 rsaz_1024_scatter5_avx2:
+.cfi_startproc	
 	vzeroupper
 	vmovdqu	.Lscatter_permd(%rip),%ymm5
 	shll	$4,%edx
@@ -1586,6 +1591,7 @@ rsaz_1024_scatter5_avx2:
 
 	vzeroupper
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	rsaz_1024_scatter5_avx2,.-rsaz_1024_scatter5_avx2
 
 .globl	rsaz_1024_gather5_avx2

Index: src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/vpaes-x86_64.S
diff -u src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/vpaes-x86_64.S:1.5 src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/vpaes-x86_64.S:1.6
--- src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/vpaes-x86_64.S:1.5	Fri Oct 14 12:09:44 2016
+++ src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/vpaes-x86_64.S	Thu Jan 23 12:37:03 2020
@@ -19,6 +19,7 @@
 .type	_vpaes_encrypt_core,@function
 .align	16
 _vpaes_encrypt_core:
+.cfi_startproc	
 	movq	%rdx,%r9
 	movq	$16,%r11
 	movl	240(%rdx),%eax
@@ -99,6 +100,7 @@ _vpaes_encrypt_core:
 	pxor	%xmm4,%xmm0
 .byte	102,15,56,0,193
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	_vpaes_encrypt_core,.-_vpaes_encrypt_core
 
 
@@ -109,6 +111,7 @@ _vpaes_encrypt_core:
 .type	_vpaes_decrypt_core,@function
 .align	16
 _vpaes_decrypt_core:
+.cfi_startproc	
 	movq	%rdx,%r9
 	movl	240(%rdx),%eax
 	movdqa	%xmm9,%xmm1
@@ -205,6 +208,7 @@ _vpaes_decrypt_core:
 	pxor	%xmm4,%xmm0
 .byte	102,15,56,0,194
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	_vpaes_decrypt_core,.-_vpaes_decrypt_core
 
 
@@ -215,6 +219,7 @@ _vpaes_decrypt_core:
 .type	_vpaes_schedule_core,@function
 .align	16
 _vpaes_schedule_core:
+.cfi_startproc	
 
 
 
@@ -381,6 +386,7 @@ _vpaes_schedule_core:
 	pxor	%xmm6,%xmm6
 	pxor	%xmm7,%xmm7
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	_vpaes_schedule_core,.-_vpaes_schedule_core
 
 
@@ -400,6 +406,7 @@ _vpaes_schedule_core:
 .type	_vpaes_schedule_192_smear,@function
 .align	16
 _vpaes_schedule_192_smear:
+.cfi_startproc	
 	pshufd	$0x80,%xmm6,%xmm1
 	pshufd	$0xFE,%xmm7,%xmm0
 	pxor	%xmm1,%xmm6
@@ -408,6 +415,7 @@ _vpaes_schedule_192_smear:
 	movdqa	%xmm6,%xmm0
 	movhlps	%xmm1,%xmm6
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	_vpaes_schedule_192_smear,.-_vpaes_schedule_192_smear
 
 
@@ -431,6 +439,7 @@ _vpaes_schedule_192_smear:
 .type	_vpaes_schedule_round,@function
 .align	16
 _vpaes_schedule_round:
+.cfi_startproc	
 
 	pxor	%xmm1,%xmm1
 .byte	102,65,15,58,15,200,15
@@ -484,6 +493,7 @@ _vpaes_schedule_low_round:
 	pxor	%xmm7,%xmm0
 	movdqa	%xmm0,%xmm7
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	_vpaes_schedule_round,.-_vpaes_schedule_round
 
 
@@ -498,6 +508,7 @@ _vpaes_schedule_low_round:
 .type	_vpaes_schedule_transform,@function
 .align	16
 _vpaes_schedule_transform:
+.cfi_startproc	
 	movdqa	%xmm9,%xmm1
 	pandn	%xmm0,%xmm1
 	psrld	$4,%xmm1
@@ -508,6 +519,7 @@ _vpaes_schedule_transform:
 .byte	102,15,56,0,193
 	pxor	%xmm2,%xmm0
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	_vpaes_schedule_transform,.-_vpaes_schedule_transform
 
 
@@ -536,6 +548,7 @@ _vpaes_schedule_transform:
 .type	_vpaes_schedule_mangle,@function
 .align	16
 _vpaes_schedule_mangle:
+.cfi_startproc	
 	movdqa	%xmm0,%xmm4
 	movdqa	.Lk_mc_forward(%rip),%xmm5
 	testq	%rcx,%rcx
@@ -600,6 +613,7 @@ _vpaes_schedule_mangle:
 	andq	$0x30,%r8
 	movdqu	%xmm3,(%rdx)
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	_vpaes_schedule_mangle,.-_vpaes_schedule_mangle
 
 
@@ -609,6 +623,7 @@ _vpaes_schedule_mangle:
 .type	vpaes_set_encrypt_key,@function
 .align	16
 vpaes_set_encrypt_key:
+.cfi_startproc	
 	movl	%esi,%eax
 	shrl	$5,%eax
 	addl	$5,%eax
@@ -619,12 +634,14 @@ vpaes_set_encrypt_key:
 	call	_vpaes_schedule_core
 	xorl	%eax,%eax
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	vpaes_set_encrypt_key,.-vpaes_set_encrypt_key
 
 .globl	vpaes_set_decrypt_key
 .type	vpaes_set_decrypt_key,@function
 .align	16
 vpaes_set_decrypt_key:
+.cfi_startproc	
 	movl	%esi,%eax
 	shrl	$5,%eax
 	addl	$5,%eax
@@ -640,33 +657,39 @@ vpaes_set_decrypt_key:
 	call	_vpaes_schedule_core
 	xorl	%eax,%eax
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	vpaes_set_decrypt_key,.-vpaes_set_decrypt_key
 
 .globl	vpaes_encrypt
 .type	vpaes_encrypt,@function
 .align	16
 vpaes_encrypt:
+.cfi_startproc	
 	movdqu	(%rdi),%xmm0
 	call	_vpaes_preheat
 	call	_vpaes_encrypt_core
 	movdqu	%xmm0,(%rsi)
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	vpaes_encrypt,.-vpaes_encrypt
 
 .globl	vpaes_decrypt
 .type	vpaes_decrypt,@function
 .align	16
 vpaes_decrypt:
+.cfi_startproc	
 	movdqu	(%rdi),%xmm0
 	call	_vpaes_preheat
 	call	_vpaes_decrypt_core
 	movdqu	%xmm0,(%rsi)
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	vpaes_decrypt,.-vpaes_decrypt
 .globl	vpaes_cbc_encrypt
 .type	vpaes_cbc_encrypt,@function
 .align	16
 vpaes_cbc_encrypt:
+.cfi_startproc	
 	xchgq	%rcx,%rdx
 	subq	$16,%rcx
 	jc	.Lcbc_abort
@@ -702,6 +725,7 @@ vpaes_cbc_encrypt:
 	movdqu	%xmm6,(%r8)
 .Lcbc_abort:
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	vpaes_cbc_encrypt,.-vpaes_cbc_encrypt
 
 
@@ -712,6 +736,7 @@ vpaes_cbc_encrypt:
 .type	_vpaes_preheat,@function
 .align	16
 _vpaes_preheat:
+.cfi_startproc	
 	leaq	.Lk_s0F(%rip),%r10
 	movdqa	-32(%r10),%xmm10
 	movdqa	-16(%r10),%xmm11
@@ -721,6 +746,7 @@ _vpaes_preheat:
 	movdqa	80(%r10),%xmm15
 	movdqa	96(%r10),%xmm14
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	_vpaes_preheat,.-_vpaes_preheat
 
 

Index: src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/x86_64-mont5.S
diff -u src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/x86_64-mont5.S:1.7 src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/x86_64-mont5.S:1.8
--- src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/x86_64-mont5.S:1.7	Sun Sep 23 09:33:05 2018
+++ src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/x86_64-mont5.S	Thu Jan 23 12:37:03 2020
@@ -2894,6 +2894,7 @@ bn_powerx5:
 .align	32
 bn_sqrx8x_internal:
 __bn_sqrx8x_internal:
+.cfi_startproc	
 
 
 
@@ -3505,6 +3506,7 @@ __bn_sqrx8x_reduction:
 	cmpq	8+8(%rsp),%r8
 	jb	.Lsqrx8x_reduction_loop
 	.byte	0xf3,0xc3
+.cfi_endproc	
 .size	bn_sqrx8x_internal,.-bn_sqrx8x_internal
 .align	32
 __bn_postx4x_internal:

Reply via email to