aboutsummaryrefslogtreecommitdiffstats
path: root/include/crypto
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@kernel.org>2025-06-30 09:06:43 -0700
committerEric Biggers <ebiggers@kernel.org>2025-07-04 10:23:11 -0700
commite96cb9507f2d8ba150d417dcd283204564945831 (patch)
tree33976c08850b8d15d9017cd0f2cc642ab5fb8cd2 /include/crypto
parentlib/crypto: sha256: Remove sha256_is_arch_optimized() (diff)
downloadlinux-e96cb9507f2d8ba150d417dcd283204564945831.tar.gz
linux-e96cb9507f2d8ba150d417dcd283204564945831.zip
lib/crypto: sha256: Consolidate into single module
Consolidate the CPU-based SHA-256 code into a single module, following what I did with SHA-512: - Each arch now provides a header file lib/crypto/$(SRCARCH)/sha256.h, replacing lib/crypto/$(SRCARCH)/sha256.c. The header defines sha256_blocks() and optionally sha256_mod_init_arch(). It is included by lib/crypto/sha256.c, and thus the code gets built into the single libsha256 module, with proper inlining and dead code elimination. - sha256_blocks_generic() is moved from lib/crypto/sha256-generic.c into lib/crypto/sha256.c. It's now a static function marked with __maybe_unused, so the compiler automatically eliminates it in any cases where it's not used. - Whether arch-optimized SHA-256 is buildable is now controlled centrally by lib/crypto/Kconfig instead of by lib/crypto/$(SRCARCH)/Kconfig. The conditions for enabling it remain the same as before, and it remains enabled by default. - Any additional arch-specific translation units for the optimized SHA-256 code (such as assembly files) are now compiled by lib/crypto/Makefile instead of lib/crypto/$(SRCARCH)/Makefile. Acked-by: Ard Biesheuvel <ardb@kernel.org> Link: https://lore.kernel.org/r/20250630160645.3198-13-ebiggers@kernel.org Signed-off-by: Eric Biggers <ebiggers@kernel.org>
Diffstat (limited to 'include/crypto')
-rw-r--r--include/crypto/internal/sha2.h52
1 files changed, 0 insertions, 52 deletions
diff --git a/include/crypto/internal/sha2.h b/include/crypto/internal/sha2.h
deleted file mode 100644
index 7915a3a46bc8..000000000000
--- a/include/crypto/internal/sha2.h
+++ /dev/null
@@ -1,52 +0,0 @@
-/* SPDX-License-Identifier: GPL-2.0-only */
-
-#ifndef _CRYPTO_INTERNAL_SHA2_H
-#define _CRYPTO_INTERNAL_SHA2_H
-
-#include <crypto/sha2.h>
-#include <linux/compiler_attributes.h>
-#include <linux/string.h>
-#include <linux/types.h>
-#include <linux/unaligned.h>
-
-void sha256_blocks_generic(struct sha256_block_state *state,
- const u8 *data, size_t nblocks);
-void sha256_blocks_arch(struct sha256_block_state *state,
- const u8 *data, size_t nblocks);
-
-static __always_inline void sha256_choose_blocks(
- u32 state[SHA256_STATE_WORDS], const u8 *data, size_t nblocks,
- bool force_generic, bool force_simd)
-{
- if (!IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_SHA256) || force_generic)
- sha256_blocks_generic((struct sha256_block_state *)state, data, nblocks);
- else
- sha256_blocks_arch((struct sha256_block_state *)state, data, nblocks);
-}
-
-static __always_inline void sha256_finup(
- struct crypto_sha256_state *sctx, u8 buf[SHA256_BLOCK_SIZE],
- size_t len, u8 out[SHA256_DIGEST_SIZE], size_t digest_size,
- bool force_generic, bool force_simd)
-{
- const size_t bit_offset = SHA256_BLOCK_SIZE - 8;
- __be64 *bits = (__be64 *)&buf[bit_offset];
- int i;
-
- buf[len++] = 0x80;
- if (len > bit_offset) {
- memset(&buf[len], 0, SHA256_BLOCK_SIZE - len);
- sha256_choose_blocks(sctx->state, buf, 1, force_generic,
- force_simd);
- len = 0;
- }
-
- memset(&buf[len], 0, bit_offset - len);
- *bits = cpu_to_be64(sctx->count << 3);
- sha256_choose_blocks(sctx->state, buf, 1, force_generic, force_simd);
-
- for (i = 0; i < digest_size; i += 4)
- put_unaligned_be32(sctx->state[i / 4], out + i);
-}
-
-#endif /* _CRYPTO_INTERNAL_SHA2_H */