diff options
| author | Eric Biggers <ebiggers@google.com> | 2025-05-05 11:18:21 -0700 |
|---|---|---|
| committer | Herbert Xu <herbert@gondor.apana.org.au> | 2025-05-12 13:32:53 +0800 |
| commit | 98066f2f8901ccf72f3c5d6c391c8fff1cabd49d (patch) | |
| tree | a88e8b02bcfc5fbc4a1b71213ba078d98c07fba1 /arch/arm/lib | |
| parent | crypto: crypto4xx - Remove ahash-related code (diff) | |
| download | linux-98066f2f8901ccf72f3c5d6c391c8fff1cabd49d.tar.gz linux-98066f2f8901ccf72f3c5d6c391c8fff1cabd49d.zip | |
crypto: lib/chacha - strongly type the ChaCha state
The ChaCha state matrix is 16 32-bit words. Currently it is represented
in the code as a raw u32 array, or even just a pointer to u32. This
weak typing is error-prone. Instead, introduce struct chacha_state:
struct chacha_state {
u32 x[16];
};
Convert all ChaCha and HChaCha functions to use struct chacha_state.
No functional changes.
Signed-off-by: Eric Biggers <ebiggers@google.com>
Acked-by: Kent Overstreet <kent.overstreet@linux.dev>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch/arm/lib')
| -rw-r--r-- | arch/arm/lib/crypto/chacha-glue.c | 30 | ||||
| -rw-r--r-- | arch/arm/lib/crypto/chacha-scalar-core.S | 5 |
2 files changed, 20 insertions, 15 deletions
diff --git a/arch/arm/lib/crypto/chacha-glue.c b/arch/arm/lib/crypto/chacha-glue.c index 1e28736834a0..0c2b4c62d484 100644 --- a/arch/arm/lib/crypto/chacha-glue.c +++ b/arch/arm/lib/crypto/chacha-glue.c @@ -17,15 +17,18 @@ #include <asm/neon.h> #include <asm/simd.h> -asmlinkage void chacha_block_xor_neon(const u32 *state, u8 *dst, const u8 *src, - int nrounds); -asmlinkage void chacha_4block_xor_neon(const u32 *state, u8 *dst, const u8 *src, +asmlinkage void chacha_block_xor_neon(const struct chacha_state *state, + u8 *dst, const u8 *src, int nrounds); +asmlinkage void chacha_4block_xor_neon(const struct chacha_state *state, + u8 *dst, const u8 *src, int nrounds, unsigned int nbytes); -asmlinkage void hchacha_block_arm(const u32 *state, u32 *out, int nrounds); -asmlinkage void hchacha_block_neon(const u32 *state, u32 *out, int nrounds); +asmlinkage void hchacha_block_arm(const struct chacha_state *state, + u32 *out, int nrounds); +asmlinkage void hchacha_block_neon(const struct chacha_state *state, + u32 *out, int nrounds); asmlinkage void chacha_doarm(u8 *dst, const u8 *src, unsigned int bytes, - const u32 *state, int nrounds); + const struct chacha_state *state, int nrounds); static __ro_after_init DEFINE_STATIC_KEY_FALSE(use_neon); @@ -34,7 +37,7 @@ static inline bool neon_usable(void) return static_branch_likely(&use_neon) && crypto_simd_usable(); } -static void chacha_doneon(u32 *state, u8 *dst, const u8 *src, +static void chacha_doneon(struct chacha_state *state, u8 *dst, const u8 *src, unsigned int bytes, int nrounds) { u8 buf[CHACHA_BLOCK_SIZE]; @@ -46,7 +49,7 @@ static void chacha_doneon(u32 *state, u8 *dst, const u8 *src, bytes -= l; src += l; dst += l; - state[12] += DIV_ROUND_UP(l, CHACHA_BLOCK_SIZE); + state->x[12] += DIV_ROUND_UP(l, CHACHA_BLOCK_SIZE); } if (bytes) { const u8 *s = src; @@ -57,11 +60,12 @@ static void chacha_doneon(u32 *state, u8 *dst, const u8 *src, chacha_block_xor_neon(state, d, s, nrounds); if (d != dst) memcpy(dst, buf, bytes); - state[12]++; + state->x[12]++; } } -void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds) +void hchacha_block_arch(const struct chacha_state *state, u32 *stream, + int nrounds) { if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon_usable()) { hchacha_block_arm(state, stream, nrounds); @@ -73,13 +77,13 @@ void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds) } EXPORT_SYMBOL(hchacha_block_arch); -void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes, - int nrounds) +void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src, + unsigned int bytes, int nrounds) { if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon_usable() || bytes <= CHACHA_BLOCK_SIZE) { chacha_doarm(dst, src, bytes, state, nrounds); - state[12] += DIV_ROUND_UP(bytes, CHACHA_BLOCK_SIZE); + state->x[12] += DIV_ROUND_UP(bytes, CHACHA_BLOCK_SIZE); return; } diff --git a/arch/arm/lib/crypto/chacha-scalar-core.S b/arch/arm/lib/crypto/chacha-scalar-core.S index 083fe1ab96d0..d20b5de755cc 100644 --- a/arch/arm/lib/crypto/chacha-scalar-core.S +++ b/arch/arm/lib/crypto/chacha-scalar-core.S @@ -367,7 +367,7 @@ /* * void chacha_doarm(u8 *dst, const u8 *src, unsigned int bytes, - * const u32 *state, int nrounds); + * const struct chacha_state *state, int nrounds); */ ENTRY(chacha_doarm) cmp r2, #0 // len == 0? @@ -407,7 +407,8 @@ ENTRY(chacha_doarm) ENDPROC(chacha_doarm) /* - * void hchacha_block_arm(const u32 state[16], u32 out[8], int nrounds); + * void hchacha_block_arm(const struct chacha_state *state, + * u32 out[8], int nrounds); */ ENTRY(hchacha_block_arm) push {r1,r4-r11,lr} |
