summaryrefslogtreecommitdiff
path: root/lib/crypto/arm/sha512.h
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@kernel.org>2025-06-30 09:03:11 -0700
committerEric Biggers <ebiggers@kernel.org>2025-06-30 09:26:19 -0700
commit24c91b62ac50a798ceabb3482efbca3e0e88a2db (patch)
tree859589acb7b87b05b71120e42dec8c7fe7067731 /lib/crypto/arm/sha512.h
parent4bc7f7b687a2a1506cdc457bc4f6d29a81794a08 (diff)
lib/crypto: arm/sha512: Migrate optimized SHA-512 code to library
Instead of exposing the arm-optimized SHA-512 code via arm-specific crypto_shash algorithms, instead just implement the sha512_blocks() library function. This is much simpler, it makes the SHA-512 (and SHA-384) library functions be arm-optimized, and it fixes the longstanding issue where the arm-optimized SHA-512 code was disabled by default. SHA-512 still remains available through crypto_shash, but individual architectures no longer need to handle it. To match sha512_blocks(), change the type of the nblocks parameter of the assembly functions from int to size_t. The assembly functions actually already treated it as size_t. Acked-by: Ard Biesheuvel <ardb@kernel.org> Link: https://lore.kernel.org/r/20250630160320.2888-8-ebiggers@kernel.org Signed-off-by: Eric Biggers <ebiggers@kernel.org>
Diffstat (limited to 'lib/crypto/arm/sha512.h')
-rw-r--r--lib/crypto/arm/sha512.h38
1 files changed, 38 insertions, 0 deletions
diff --git a/lib/crypto/arm/sha512.h b/lib/crypto/arm/sha512.h
new file mode 100644
index 0000000000000..f147b6490d6cd
--- /dev/null
+++ b/lib/crypto/arm/sha512.h
@@ -0,0 +1,38 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * arm32-optimized SHA-512 block function
+ *
+ * Copyright 2025 Google LLC
+ */
+
+#include <asm/neon.h>
+#include <crypto/internal/simd.h>
+
+static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon);
+
+asmlinkage void sha512_block_data_order(struct sha512_block_state *state,
+ const u8 *data, size_t nblocks);
+asmlinkage void sha512_block_data_order_neon(struct sha512_block_state *state,
+ const u8 *data, size_t nblocks);
+
+static void sha512_blocks(struct sha512_block_state *state,
+ const u8 *data, size_t nblocks)
+{
+ if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
+ static_branch_likely(&have_neon) && likely(crypto_simd_usable())) {
+ kernel_neon_begin();
+ sha512_block_data_order_neon(state, data, nblocks);
+ kernel_neon_end();
+ } else {
+ sha512_block_data_order(state, data, nblocks);
+ }
+}
+
+#ifdef CONFIG_KERNEL_MODE_NEON
+#define sha512_mod_init_arch sha512_mod_init_arch
+static inline void sha512_mod_init_arch(void)
+{
+ if (cpu_has_neon())
+ static_branch_enable(&have_neon);
+}
+#endif /* CONFIG_KERNEL_MODE_NEON */