aboutsummaryrefslogtreecommitdiff
path: root/crypto/aes/asm/vpaes-x86_64.pl
diff options
context:
space:
mode:
Diffstat (limited to 'crypto/aes/asm/vpaes-x86_64.pl')
-rwxr-xr-xcrypto/aes/asm/vpaes-x86_64.pl28
1 files changed, 27 insertions, 1 deletions
diff --git a/crypto/aes/asm/vpaes-x86_64.pl b/crypto/aes/asm/vpaes-x86_64.pl
index b715aca167d1..33d293e623d5 100755
--- a/crypto/aes/asm/vpaes-x86_64.pl
+++ b/crypto/aes/asm/vpaes-x86_64.pl
@@ -1,5 +1,5 @@
#! /usr/bin/env perl
-# Copyright 2011-2016 The OpenSSL Project Authors. All Rights Reserved.
+# Copyright 2011-2019 The OpenSSL Project Authors. All Rights Reserved.
#
# Licensed under the OpenSSL license (the "License"). You may not use
# this file except in compliance with the License. You can obtain a copy
@@ -91,6 +91,7 @@ $code.=<<___;
.type _vpaes_encrypt_core,\@abi-omnipotent
.align 16
_vpaes_encrypt_core:
+.cfi_startproc
mov %rdx, %r9
mov \$16, %r11
mov 240(%rdx),%eax
@@ -171,6 +172,7 @@ _vpaes_encrypt_core:
pxor %xmm4, %xmm0 # 0 = A
pshufb %xmm1, %xmm0
ret
+.cfi_endproc
.size _vpaes_encrypt_core,.-_vpaes_encrypt_core
##
@@ -181,6 +183,7 @@ _vpaes_encrypt_core:
.type _vpaes_decrypt_core,\@abi-omnipotent
.align 16
_vpaes_decrypt_core:
+.cfi_startproc
mov %rdx, %r9 # load key
mov 240(%rdx),%eax
movdqa %xmm9, %xmm1
@@ -277,6 +280,7 @@ _vpaes_decrypt_core:
pxor %xmm4, %xmm0 # 0 = A
pshufb %xmm2, %xmm0
ret
+.cfi_endproc
.size _vpaes_decrypt_core,.-_vpaes_decrypt_core
########################################################
@@ -287,6 +291,7 @@ _vpaes_decrypt_core:
.type _vpaes_schedule_core,\@abi-omnipotent
.align 16
_vpaes_schedule_core:
+.cfi_startproc
# rdi = key
# rsi = size in bits
# rdx = buffer
@@ -453,6 +458,7 @@ _vpaes_schedule_core:
pxor %xmm6, %xmm6
pxor %xmm7, %xmm7
ret
+.cfi_endproc
.size _vpaes_schedule_core,.-_vpaes_schedule_core
##
@@ -472,6 +478,7 @@ _vpaes_schedule_core:
.type _vpaes_schedule_192_smear,\@abi-omnipotent
.align 16
_vpaes_schedule_192_smear:
+.cfi_startproc
pshufd \$0x80, %xmm6, %xmm1 # d c 0 0 -> c 0 0 0
pshufd \$0xFE, %xmm7, %xmm0 # b a _ _ -> b b b a
pxor %xmm1, %xmm6 # -> c+d c 0 0
@@ -480,6 +487,7 @@ _vpaes_schedule_192_smear:
movdqa %xmm6, %xmm0
movhlps %xmm1, %xmm6 # clobber low side with zeros
ret
+.cfi_endproc
.size _vpaes_schedule_192_smear,.-_vpaes_schedule_192_smear
##
@@ -503,6 +511,7 @@ _vpaes_schedule_192_smear:
.type _vpaes_schedule_round,\@abi-omnipotent
.align 16
_vpaes_schedule_round:
+.cfi_startproc
# extract rcon from xmm8
pxor %xmm1, %xmm1
palignr \$15, %xmm8, %xmm1
@@ -556,6 +565,7 @@ _vpaes_schedule_low_round:
pxor %xmm7, %xmm0
movdqa %xmm0, %xmm7
ret
+.cfi_endproc
.size _vpaes_schedule_round,.-_vpaes_schedule_round
##
@@ -570,6 +580,7 @@ _vpaes_schedule_low_round:
.type _vpaes_schedule_transform,\@abi-omnipotent
.align 16
_vpaes_schedule_transform:
+.cfi_startproc
movdqa %xmm9, %xmm1
pandn %xmm0, %xmm1
psrld \$4, %xmm1
@@ -580,6 +591,7 @@ _vpaes_schedule_transform:
pshufb %xmm1, %xmm0
pxor %xmm2, %xmm0
ret
+.cfi_endproc
.size _vpaes_schedule_transform,.-_vpaes_schedule_transform
##
@@ -608,6 +620,7 @@ _vpaes_schedule_transform:
.type _vpaes_schedule_mangle,\@abi-omnipotent
.align 16
_vpaes_schedule_mangle:
+.cfi_startproc
movdqa %xmm0, %xmm4 # save xmm0 for later
movdqa .Lk_mc_forward(%rip),%xmm5
test %rcx, %rcx
@@ -672,6 +685,7 @@ _vpaes_schedule_mangle:
and \$0x30, %r8
movdqu %xmm3, (%rdx)
ret
+.cfi_endproc
.size _vpaes_schedule_mangle,.-_vpaes_schedule_mangle
#
@@ -681,6 +695,7 @@ _vpaes_schedule_mangle:
.type ${PREFIX}_set_encrypt_key,\@function,3
.align 16
${PREFIX}_set_encrypt_key:
+.cfi_startproc
___
$code.=<<___ if ($win64);
lea -0xb8(%rsp),%rsp
@@ -723,12 +738,14 @@ ___
$code.=<<___;
xor %eax,%eax
ret
+.cfi_endproc
.size ${PREFIX}_set_encrypt_key,.-${PREFIX}_set_encrypt_key
.globl ${PREFIX}_set_decrypt_key
.type ${PREFIX}_set_decrypt_key,\@function,3
.align 16
${PREFIX}_set_decrypt_key:
+.cfi_startproc
___
$code.=<<___ if ($win64);
lea -0xb8(%rsp),%rsp
@@ -776,12 +793,14 @@ ___
$code.=<<___;
xor %eax,%eax
ret
+.cfi_endproc
.size ${PREFIX}_set_decrypt_key,.-${PREFIX}_set_decrypt_key
.globl ${PREFIX}_encrypt
.type ${PREFIX}_encrypt,\@function,3
.align 16
${PREFIX}_encrypt:
+.cfi_startproc
___
$code.=<<___ if ($win64);
lea -0xb8(%rsp),%rsp
@@ -819,12 +838,14 @@ $code.=<<___ if ($win64);
___
$code.=<<___;
ret
+.cfi_endproc
.size ${PREFIX}_encrypt,.-${PREFIX}_encrypt
.globl ${PREFIX}_decrypt
.type ${PREFIX}_decrypt,\@function,3
.align 16
${PREFIX}_decrypt:
+.cfi_startproc
___
$code.=<<___ if ($win64);
lea -0xb8(%rsp),%rsp
@@ -862,6 +883,7 @@ $code.=<<___ if ($win64);
___
$code.=<<___;
ret
+.cfi_endproc
.size ${PREFIX}_decrypt,.-${PREFIX}_decrypt
___
{
@@ -874,6 +896,7 @@ $code.=<<___;
.type ${PREFIX}_cbc_encrypt,\@function,6
.align 16
${PREFIX}_cbc_encrypt:
+.cfi_startproc
xchg $key,$len
___
($len,$key)=($key,$len);
@@ -944,6 +967,7 @@ ___
$code.=<<___;
.Lcbc_abort:
ret
+.cfi_endproc
.size ${PREFIX}_cbc_encrypt,.-${PREFIX}_cbc_encrypt
___
}
@@ -957,6 +981,7 @@ $code.=<<___;
.type _vpaes_preheat,\@abi-omnipotent
.align 16
_vpaes_preheat:
+.cfi_startproc
lea .Lk_s0F(%rip), %r10
movdqa -0x20(%r10), %xmm10 # .Lk_inv
movdqa -0x10(%r10), %xmm11 # .Lk_inv+16
@@ -966,6 +991,7 @@ _vpaes_preheat:
movdqa 0x50(%r10), %xmm15 # .Lk_sb2
movdqa 0x60(%r10), %xmm14 # .Lk_sb2+16
ret
+.cfi_endproc
.size _vpaes_preheat,.-_vpaes_preheat
########################################################
## ##