aboutsummaryrefslogtreecommitdiff
path: root/arch/x86/crypto/sha256-avx-asm.S
diff options
context:
space:
mode:
authorGravatar Eric Biggers <ebiggers@google.com> 2022-11-18 11:44:14 -0800
committerGravatar Herbert Xu <herbert@gondor.apana.org.au> 2022-11-25 17:39:19 +0800
commit19940ebbb59c12146d05c5f8acd873197b290648 (patch)
tree987dddfc44f71eec4175b5b466f37e6d6770fe53 /arch/x86/crypto/sha256-avx-asm.S
parentcrypto: x86/sha1 - fix possible crash with CFI enabled (diff)
downloadlinux-19940ebbb59c12146d05c5f8acd873197b290648.tar.gz
linux-19940ebbb59c12146d05c5f8acd873197b290648.tar.bz2
linux-19940ebbb59c12146d05c5f8acd873197b290648.zip
crypto: x86/sha256 - fix possible crash with CFI enabled
sha256_transform_ssse3(), sha256_transform_avx(), sha256_transform_rorx(), and sha256_ni_transform() are called via indirect function calls. Therefore they need to use SYM_TYPED_FUNC_START instead of SYM_FUNC_START to cause their type hashes to be emitted when the kernel is built with CONFIG_CFI_CLANG=y. Otherwise, the code crashes with a CFI failure (if the compiler didn't happen to optimize out the indirect calls). Fixes: ccace936eec7 ("x86: Add types to indirectly called assembly functions") Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org> Reviewed-by: Sami Tolvanen <samitolvanen@google.com> Signed-off-by: Eric Biggers <ebiggers@google.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch/x86/crypto/sha256-avx-asm.S')
-rw-r--r--arch/x86/crypto/sha256-avx-asm.S3
1 files changed, 2 insertions, 1 deletions
diff --git a/arch/x86/crypto/sha256-avx-asm.S b/arch/x86/crypto/sha256-avx-asm.S
index 3baa1ec39097..06ea30c20828 100644
--- a/arch/x86/crypto/sha256-avx-asm.S
+++ b/arch/x86/crypto/sha256-avx-asm.S
@@ -48,6 +48,7 @@
########################################################################
#include <linux/linkage.h>
+#include <linux/cfi_types.h>
## assume buffers not aligned
#define VMOVDQ vmovdqu
@@ -346,7 +347,7 @@ a = TMP_
## arg 3 : Num blocks
########################################################################
.text
-SYM_FUNC_START(sha256_transform_avx)
+SYM_TYPED_FUNC_START(sha256_transform_avx)
.align 32
pushq %rbx
pushq %r12