summaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@kernel.org>2025-06-30 09:06:40 -0700
committerEric Biggers <ebiggers@kernel.org>2025-07-04 10:23:11 -0700
commite0cd3716910385ba1ccbd433c860516cf806fc71 (patch)
treea43b5a89b495a71dd1701d5315cddf17feb7036b /crypto
parentlib/crypto: sha256: Add HMAC-SHA224 and HMAC-SHA256 support (diff)
downloadlinux-e0cd3716910385ba1ccbd433c860516cf806fc71.tar.gz
linux-e0cd3716910385ba1ccbd433c860516cf806fc71.zip
crypto: sha256 - Wrap library and add HMAC support
Like I did for crypto/sha512.c, rework crypto/sha256.c to simply wrap the normal library functions instead of accessing the low-level arch- optimized and generic block functions directly. Also add support for HMAC-SHA224 and HMAC-SHA256, again just wrapping the library functions. Since the replacement crypto_shash algorithms are implemented using the (potentially arch-optimized) library functions, give them driver names ending with "-lib" rather than "-generic". Update crypto/testmgr.c and a couple odd drivers to take this change in driver name into account. Besides the above cases which are accounted for, there are no known cases where the driver names were being depended on. There is potential for confusion for people manually checking /proc/crypto (e.g. https://lore.kernel.org/r/9e33c893-2466-4d4e-afb1-966334e451a2@linux.ibm.com/), but really people just need to get used to the driver name not being meaningful for the software algorithms. Historically, the optimized code was disabled by default, so there was some purpose to checking whether it was enabled or not. However, this is now fixed for all SHA-2 algorithms, and the library code just always does the right thing. E.g. if the CPU supports SHA-256 instructions, they are used. This change does also mean that the generic partial block handling code in crypto/shash.c, which got added in 6.16, no longer gets used. But that's fine; the library has to implement the partial block handling anyway, and it's better to do it in the library since the block size and other properties of the algorithm are all fixed at compile time there, resulting in more streamlined code. Acked-by: Ard Biesheuvel <ardb@kernel.org> Link: https://lore.kernel.org/r/20250630160645.3198-10-ebiggers@kernel.org Signed-off-by: Eric Biggers <ebiggers@kernel.org>
Diffstat (limited to 'crypto')
-rw-r--r--crypto/Kconfig4
-rw-r--r--crypto/Makefile1
-rw-r--r--crypto/sha256.c286
-rw-r--r--crypto/testmgr.c12
4 files changed, 142 insertions, 161 deletions
diff --git a/crypto/Kconfig b/crypto/Kconfig
index cb40a9b46972..3ea1397214e0 100644
--- a/crypto/Kconfig
+++ b/crypto/Kconfig
@@ -992,9 +992,9 @@ config CRYPTO_SHA256
tristate "SHA-224 and SHA-256"
select CRYPTO_HASH
select CRYPTO_LIB_SHA256
- select CRYPTO_LIB_SHA256_GENERIC
help
- SHA-224 and SHA-256 secure hash algorithms (FIPS 180, ISO/IEC 10118-3)
+ SHA-224 and SHA-256 secure hash algorithms (FIPS 180, ISO/IEC
+ 10118-3), including HMAC support.
This is required for IPsec AH (XFRM_AH) and IPsec ESP (XFRM_ESP).
Used by the btrfs filesystem, Ceph, NFS, and SMB.
diff --git a/crypto/Makefile b/crypto/Makefile
index 271c77462cec..5098fa6d5f39 100644
--- a/crypto/Makefile
+++ b/crypto/Makefile
@@ -77,7 +77,6 @@ obj-$(CONFIG_CRYPTO_MD5) += md5.o
obj-$(CONFIG_CRYPTO_RMD160) += rmd160.o
obj-$(CONFIG_CRYPTO_SHA1) += sha1_generic.o
obj-$(CONFIG_CRYPTO_SHA256) += sha256.o
-CFLAGS_sha256.o += -DARCH=$(ARCH)
obj-$(CONFIG_CRYPTO_SHA512) += sha512.o
obj-$(CONFIG_CRYPTO_SHA3) += sha3_generic.o
obj-$(CONFIG_CRYPTO_SM3_GENERIC) += sm3_generic.o
diff --git a/crypto/sha256.c b/crypto/sha256.c
index 15c57fba256b..d81166cbba95 100644
--- a/crypto/sha256.c
+++ b/crypto/sha256.c
@@ -1,17 +1,20 @@
// SPDX-License-Identifier: GPL-2.0-or-later
/*
- * Crypto API wrapper for the SHA-256 and SHA-224 library functions
+ * Crypto API support for SHA-224, SHA-256, HMAC-SHA224, and HMAC-SHA256
*
* Copyright (c) Jean-Luc Cooke <jlcooke@certainkey.com>
* Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
* Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
* SHA224 Support Copyright 2007 Intel Corporation <jonathan.lynch@intel.com>
+ * Copyright 2025 Google LLC
*/
#include <crypto/internal/hash.h>
-#include <crypto/internal/sha2.h>
+#include <crypto/sha2.h>
#include <linux/kernel.h>
#include <linux/module.h>
+/* SHA-224 */
+
const u8 sha224_zero_message_hash[SHA224_DIGEST_SIZE] = {
0xd1, 0x4a, 0x02, 0x8c, 0x2a, 0x3a, 0x2b, 0xc9, 0x47,
0x61, 0x02, 0xbb, 0x28, 0x82, 0x34, 0xc4, 0x15, 0xa2,
@@ -20,6 +23,36 @@ const u8 sha224_zero_message_hash[SHA224_DIGEST_SIZE] = {
};
EXPORT_SYMBOL_GPL(sha224_zero_message_hash);
+#define SHA224_CTX(desc) ((struct sha224_ctx *)shash_desc_ctx(desc))
+
+static int crypto_sha224_init(struct shash_desc *desc)
+{
+ sha224_init(SHA224_CTX(desc));
+ return 0;
+}
+
+static int crypto_sha224_update(struct shash_desc *desc,
+ const u8 *data, unsigned int len)
+{
+ sha224_update(SHA224_CTX(desc), data, len);
+ return 0;
+}
+
+static int crypto_sha224_final(struct shash_desc *desc, u8 *out)
+{
+ sha224_final(SHA224_CTX(desc), out);
+ return 0;
+}
+
+static int crypto_sha224_digest(struct shash_desc *desc,
+ const u8 *data, unsigned int len, u8 *out)
+{
+ sha224(data, len, out);
+ return 0;
+}
+
+/* SHA-256 */
+
const u8 sha256_zero_message_hash[SHA256_DIGEST_SIZE] = {
0xe3, 0xb0, 0xc4, 0x42, 0x98, 0xfc, 0x1c, 0x14,
0x9a, 0xfb, 0xf4, 0xc8, 0x99, 0x6f, 0xb9, 0x24,
@@ -28,256 +61,193 @@ const u8 sha256_zero_message_hash[SHA256_DIGEST_SIZE] = {
};
EXPORT_SYMBOL_GPL(sha256_zero_message_hash);
+#define SHA256_CTX(desc) ((struct sha256_ctx *)shash_desc_ctx(desc))
+
static int crypto_sha256_init(struct shash_desc *desc)
{
- sha256_block_init(shash_desc_ctx(desc));
+ sha256_init(SHA256_CTX(desc));
return 0;
}
-static inline int crypto_sha256_update(struct shash_desc *desc, const u8 *data,
- unsigned int len, bool force_generic)
+static int crypto_sha256_update(struct shash_desc *desc,
+ const u8 *data, unsigned int len)
{
- struct crypto_sha256_state *sctx = shash_desc_ctx(desc);
- int remain = len % SHA256_BLOCK_SIZE;
-
- sctx->count += len - remain;
- sha256_choose_blocks(sctx->state, data, len / SHA256_BLOCK_SIZE,
- force_generic, !force_generic);
- return remain;
+ sha256_update(SHA256_CTX(desc), data, len);
+ return 0;
}
-static int crypto_sha256_update_generic(struct shash_desc *desc, const u8 *data,
- unsigned int len)
+static int crypto_sha256_final(struct shash_desc *desc, u8 *out)
{
- return crypto_sha256_update(desc, data, len, true);
+ sha256_final(SHA256_CTX(desc), out);
+ return 0;
}
-static int crypto_sha256_update_lib(struct shash_desc *desc, const u8 *data,
- unsigned int len)
+static int crypto_sha256_digest(struct shash_desc *desc,
+ const u8 *data, unsigned int len, u8 *out)
{
- sha256_update(shash_desc_ctx(desc), data, len);
+ sha256(data, len, out);
return 0;
}
-static int crypto_sha256_update_arch(struct shash_desc *desc, const u8 *data,
- unsigned int len)
-{
- return crypto_sha256_update(desc, data, len, false);
-}
+/* HMAC-SHA224 */
-static int crypto_sha256_final_lib(struct shash_desc *desc, u8 *out)
-{
- sha256_final(shash_desc_ctx(desc), out);
- return 0;
-}
+#define HMAC_SHA224_KEY(tfm) ((struct hmac_sha224_key *)crypto_shash_ctx(tfm))
+#define HMAC_SHA224_CTX(desc) ((struct hmac_sha224_ctx *)shash_desc_ctx(desc))
-static __always_inline int crypto_sha256_finup(struct shash_desc *desc,
- const u8 *data,
- unsigned int len, u8 *out,
- bool force_generic)
+static int crypto_hmac_sha224_setkey(struct crypto_shash *tfm,
+ const u8 *raw_key, unsigned int keylen)
{
- struct crypto_sha256_state *sctx = shash_desc_ctx(desc);
- unsigned int remain = len;
- u8 *buf;
-
- if (len >= SHA256_BLOCK_SIZE)
- remain = crypto_sha256_update(desc, data, len, force_generic);
- sctx->count += remain;
- buf = memcpy(sctx + 1, data + len - remain, remain);
- sha256_finup(sctx, buf, remain, out,
- crypto_shash_digestsize(desc->tfm), force_generic,
- !force_generic);
+ hmac_sha224_preparekey(HMAC_SHA224_KEY(tfm), raw_key, keylen);
return 0;
}
-static int crypto_sha256_finup_generic(struct shash_desc *desc, const u8 *data,
- unsigned int len, u8 *out)
+static int crypto_hmac_sha224_init(struct shash_desc *desc)
{
- return crypto_sha256_finup(desc, data, len, out, true);
+ hmac_sha224_init(HMAC_SHA224_CTX(desc), HMAC_SHA224_KEY(desc->tfm));
+ return 0;
}
-static int crypto_sha256_finup_arch(struct shash_desc *desc, const u8 *data,
- unsigned int len, u8 *out)
+static int crypto_hmac_sha224_update(struct shash_desc *desc,
+ const u8 *data, unsigned int len)
{
- return crypto_sha256_finup(desc, data, len, out, false);
+ hmac_sha224_update(HMAC_SHA224_CTX(desc), data, len);
+ return 0;
}
-static int crypto_sha256_digest_generic(struct shash_desc *desc, const u8 *data,
- unsigned int len, u8 *out)
+static int crypto_hmac_sha224_final(struct shash_desc *desc, u8 *out)
{
- crypto_sha256_init(desc);
- return crypto_sha256_finup_generic(desc, data, len, out);
+ hmac_sha224_final(HMAC_SHA224_CTX(desc), out);
+ return 0;
}
-static int crypto_sha256_digest_lib(struct shash_desc *desc, const u8 *data,
- unsigned int len, u8 *out)
+static int crypto_hmac_sha224_digest(struct shash_desc *desc,
+ const u8 *data, unsigned int len,
+ u8 *out)
{
- sha256(data, len, out);
+ hmac_sha224(HMAC_SHA224_KEY(desc->tfm), data, len, out);
return 0;
}
-static int crypto_sha256_digest_arch(struct shash_desc *desc, const u8 *data,
- unsigned int len, u8 *out)
+/* HMAC-SHA256 */
+
+#define HMAC_SHA256_KEY(tfm) ((struct hmac_sha256_key *)crypto_shash_ctx(tfm))
+#define HMAC_SHA256_CTX(desc) ((struct hmac_sha256_ctx *)shash_desc_ctx(desc))
+
+static int crypto_hmac_sha256_setkey(struct crypto_shash *tfm,
+ const u8 *raw_key, unsigned int keylen)
{
- crypto_sha256_init(desc);
- return crypto_sha256_finup_arch(desc, data, len, out);
+ hmac_sha256_preparekey(HMAC_SHA256_KEY(tfm), raw_key, keylen);
+ return 0;
}
-static int crypto_sha224_init(struct shash_desc *desc)
+static int crypto_hmac_sha256_init(struct shash_desc *desc)
{
- sha224_block_init(shash_desc_ctx(desc));
+ hmac_sha256_init(HMAC_SHA256_CTX(desc), HMAC_SHA256_KEY(desc->tfm));
return 0;
}
-static int crypto_sha224_final_lib(struct shash_desc *desc, u8 *out)
+static int crypto_hmac_sha256_update(struct shash_desc *desc,
+ const u8 *data, unsigned int len)
{
- sha224_final(shash_desc_ctx(desc), out);
+ hmac_sha256_update(HMAC_SHA256_CTX(desc), data, len);
return 0;
}
-static int crypto_sha256_import_lib(struct shash_desc *desc, const void *in)
+static int crypto_hmac_sha256_final(struct shash_desc *desc, u8 *out)
{
- struct __sha256_ctx *sctx = shash_desc_ctx(desc);
- const u8 *p = in;
-
- memcpy(sctx, p, sizeof(*sctx));
- p += sizeof(*sctx);
- sctx->bytecount += *p;
+ hmac_sha256_final(HMAC_SHA256_CTX(desc), out);
return 0;
}
-static int crypto_sha256_export_lib(struct shash_desc *desc, void *out)
+static int crypto_hmac_sha256_digest(struct shash_desc *desc,
+ const u8 *data, unsigned int len,
+ u8 *out)
{
- struct __sha256_ctx *sctx0 = shash_desc_ctx(desc);
- struct __sha256_ctx sctx = *sctx0;
- unsigned int partial;
- u8 *p = out;
-
- partial = sctx.bytecount % SHA256_BLOCK_SIZE;
- sctx.bytecount -= partial;
- memcpy(p, &sctx, sizeof(sctx));
- p += sizeof(sctx);
- *p = partial;
+ hmac_sha256(HMAC_SHA256_KEY(desc->tfm), data, len, out);
return 0;
}
+/* Algorithm definitions */
+
static struct shash_alg algs[] = {
{
- .base.cra_name = "sha256",
- .base.cra_driver_name = "sha256-generic",
- .base.cra_priority = 100,
- .base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY |
- CRYPTO_AHASH_ALG_FINUP_MAX,
- .base.cra_blocksize = SHA256_BLOCK_SIZE,
- .base.cra_module = THIS_MODULE,
- .digestsize = SHA256_DIGEST_SIZE,
- .init = crypto_sha256_init,
- .update = crypto_sha256_update_generic,
- .finup = crypto_sha256_finup_generic,
- .digest = crypto_sha256_digest_generic,
- .descsize = sizeof(struct crypto_sha256_state),
- },
- {
.base.cra_name = "sha224",
- .base.cra_driver_name = "sha224-generic",
- .base.cra_priority = 100,
- .base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY |
- CRYPTO_AHASH_ALG_FINUP_MAX,
+ .base.cra_driver_name = "sha224-lib",
+ .base.cra_priority = 300,
.base.cra_blocksize = SHA224_BLOCK_SIZE,
.base.cra_module = THIS_MODULE,
.digestsize = SHA224_DIGEST_SIZE,
.init = crypto_sha224_init,
- .update = crypto_sha256_update_generic,
- .finup = crypto_sha256_finup_generic,
- .descsize = sizeof(struct crypto_sha256_state),
+ .update = crypto_sha224_update,
+ .final = crypto_sha224_final,
+ .digest = crypto_sha224_digest,
+ .descsize = sizeof(struct sha224_ctx),
},
{
.base.cra_name = "sha256",
.base.cra_driver_name = "sha256-lib",
+ .base.cra_priority = 300,
.base.cra_blocksize = SHA256_BLOCK_SIZE,
.base.cra_module = THIS_MODULE,
.digestsize = SHA256_DIGEST_SIZE,
.init = crypto_sha256_init,
- .update = crypto_sha256_update_lib,
- .final = crypto_sha256_final_lib,
- .digest = crypto_sha256_digest_lib,
+ .update = crypto_sha256_update,
+ .final = crypto_sha256_final,
+ .digest = crypto_sha256_digest,
.descsize = sizeof(struct sha256_ctx),
- .statesize = sizeof(struct crypto_sha256_state) +
- SHA256_BLOCK_SIZE + 1,
- .import = crypto_sha256_import_lib,
- .export = crypto_sha256_export_lib,
},
{
- .base.cra_name = "sha224",
- .base.cra_driver_name = "sha224-lib",
+ .base.cra_name = "hmac(sha224)",
+ .base.cra_driver_name = "hmac-sha224-lib",
+ .base.cra_priority = 300,
.base.cra_blocksize = SHA224_BLOCK_SIZE,
+ .base.cra_ctxsize = sizeof(struct hmac_sha224_key),
.base.cra_module = THIS_MODULE,
.digestsize = SHA224_DIGEST_SIZE,
- .init = crypto_sha224_init,
- .update = crypto_sha256_update_lib,
- .final = crypto_sha224_final_lib,
- .descsize = sizeof(struct sha224_ctx),
- .statesize = sizeof(struct crypto_sha256_state) +
- SHA256_BLOCK_SIZE + 1,
- .import = crypto_sha256_import_lib,
- .export = crypto_sha256_export_lib,
+ .setkey = crypto_hmac_sha224_setkey,
+ .init = crypto_hmac_sha224_init,
+ .update = crypto_hmac_sha224_update,
+ .final = crypto_hmac_sha224_final,
+ .digest = crypto_hmac_sha224_digest,
+ .descsize = sizeof(struct hmac_sha224_ctx),
},
{
- .base.cra_name = "sha256",
- .base.cra_driver_name = "sha256-" __stringify(ARCH),
+ .base.cra_name = "hmac(sha256)",
+ .base.cra_driver_name = "hmac-sha256-lib",
.base.cra_priority = 300,
- .base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY |
- CRYPTO_AHASH_ALG_FINUP_MAX,
.base.cra_blocksize = SHA256_BLOCK_SIZE,
+ .base.cra_ctxsize = sizeof(struct hmac_sha256_key),
.base.cra_module = THIS_MODULE,
.digestsize = SHA256_DIGEST_SIZE,
- .init = crypto_sha256_init,
- .update = crypto_sha256_update_arch,
- .finup = crypto_sha256_finup_arch,
- .digest = crypto_sha256_digest_arch,
- .descsize = sizeof(struct crypto_sha256_state),
- },
- {
- .base.cra_name = "sha224",
- .base.cra_driver_name = "sha224-" __stringify(ARCH),
- .base.cra_priority = 300,
- .base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY |
- CRYPTO_AHASH_ALG_FINUP_MAX,
- .base.cra_blocksize = SHA224_BLOCK_SIZE,
- .base.cra_module = THIS_MODULE,
- .digestsize = SHA224_DIGEST_SIZE,
- .init = crypto_sha224_init,
- .update = crypto_sha256_update_arch,
- .finup = crypto_sha256_finup_arch,
- .descsize = sizeof(struct crypto_sha256_state),
+ .setkey = crypto_hmac_sha256_setkey,
+ .init = crypto_hmac_sha256_init,
+ .update = crypto_hmac_sha256_update,
+ .final = crypto_hmac_sha256_final,
+ .digest = crypto_hmac_sha256_digest,
+ .descsize = sizeof(struct hmac_sha256_ctx),
},
};
-static unsigned int num_algs;
-
static int __init crypto_sha256_mod_init(void)
{
- /* register the arch flavours only if they differ from generic */
- num_algs = ARRAY_SIZE(algs);
- BUILD_BUG_ON(ARRAY_SIZE(algs) <= 2);
- if (!sha256_is_arch_optimized())
- num_algs -= 2;
return crypto_register_shashes(algs, ARRAY_SIZE(algs));
}
module_init(crypto_sha256_mod_init);
static void __exit crypto_sha256_mod_exit(void)
{
- crypto_unregister_shashes(algs, num_algs);
+ crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
}
module_exit(crypto_sha256_mod_exit);
MODULE_LICENSE("GPL");
-MODULE_DESCRIPTION("Crypto API wrapper for the SHA-256 and SHA-224 library functions");
+MODULE_DESCRIPTION("Crypto API support for SHA-224, SHA-256, HMAC-SHA224, and HMAC-SHA256");
-MODULE_ALIAS_CRYPTO("sha256");
-MODULE_ALIAS_CRYPTO("sha256-generic");
-MODULE_ALIAS_CRYPTO("sha256-" __stringify(ARCH));
MODULE_ALIAS_CRYPTO("sha224");
-MODULE_ALIAS_CRYPTO("sha224-generic");
-MODULE_ALIAS_CRYPTO("sha224-" __stringify(ARCH));
+MODULE_ALIAS_CRYPTO("sha224-lib");
+MODULE_ALIAS_CRYPTO("sha256");
+MODULE_ALIAS_CRYPTO("sha256-lib");
+MODULE_ALIAS_CRYPTO("hmac(sha224)");
+MODULE_ALIAS_CRYPTO("hmac-sha224-lib");
+MODULE_ALIAS_CRYPTO("hmac(sha256)");
+MODULE_ALIAS_CRYPTO("hmac-sha256-lib");
diff --git a/crypto/testmgr.c b/crypto/testmgr.c
index 9d8b11ea4af7..4e95567f7ed1 100644
--- a/crypto/testmgr.c
+++ b/crypto/testmgr.c
@@ -4270,18 +4270,21 @@ static const struct alg_test_desc alg_test_descs[] = {
.fips_allowed = 1,
}, {
.alg = "authenc(hmac(sha224),cbc(des))",
+ .generic_driver = "authenc(hmac-sha224-lib,cbc(des-generic))",
.test = alg_test_aead,
.suite = {
.aead = __VECS(hmac_sha224_des_cbc_tv_temp)
}
}, {
.alg = "authenc(hmac(sha224),cbc(des3_ede))",
+ .generic_driver = "authenc(hmac-sha224-lib,cbc(des3_ede-generic))",
.test = alg_test_aead,
.suite = {
.aead = __VECS(hmac_sha224_des3_ede_cbc_tv_temp)
}
}, {
.alg = "authenc(hmac(sha256),cbc(aes))",
+ .generic_driver = "authenc(hmac-sha256-lib,cbc(aes-generic))",
.test = alg_test_aead,
.fips_allowed = 1,
.suite = {
@@ -4289,12 +4292,14 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}, {
.alg = "authenc(hmac(sha256),cbc(des))",
+ .generic_driver = "authenc(hmac-sha256-lib,cbc(des-generic))",
.test = alg_test_aead,
.suite = {
.aead = __VECS(hmac_sha256_des_cbc_tv_temp)
}
}, {
.alg = "authenc(hmac(sha256),cbc(des3_ede))",
+ .generic_driver = "authenc(hmac-sha256-lib,cbc(des3_ede-generic))",
.test = alg_test_aead,
.suite = {
.aead = __VECS(hmac_sha256_des3_ede_cbc_tv_temp)
@@ -4305,6 +4310,7 @@ static const struct alg_test_desc alg_test_descs[] = {
.fips_allowed = 1,
}, {
.alg = "authenc(hmac(sha256),cts(cbc(aes)))",
+ .generic_driver = "authenc(hmac-sha256-lib,cts(cbc(aes-generic)))",
.test = alg_test_aead,
.suite = {
.aead = __VECS(krb5_test_aes128_cts_hmac_sha256_128)
@@ -5015,6 +5021,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}, {
.alg = "essiv(authenc(hmac(sha256),cbc(aes)),sha256)",
+ .generic_driver = "essiv(authenc(hmac-sha256-lib,cbc(aes-generic)),sha256-lib)",
.test = alg_test_aead,
.fips_allowed = 1,
.suite = {
@@ -5022,6 +5029,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}, {
.alg = "essiv(cbc(aes),sha256)",
+ .generic_driver = "essiv(cbc(aes-generic),sha256-lib)",
.test = alg_test_skcipher,
.fips_allowed = 1,
.suite = {
@@ -5121,6 +5129,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}, {
.alg = "hmac(sha224)",
+ .generic_driver = "hmac-sha224-lib",
.test = alg_test_hash,
.fips_allowed = 1,
.suite = {
@@ -5128,6 +5137,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}, {
.alg = "hmac(sha256)",
+ .generic_driver = "hmac-sha256-lib",
.test = alg_test_hash,
.fips_allowed = 1,
.suite = {
@@ -5459,6 +5469,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}, {
.alg = "sha224",
+ .generic_driver = "sha224-lib",
.test = alg_test_hash,
.fips_allowed = 1,
.suite = {
@@ -5466,6 +5477,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}, {
.alg = "sha256",
+ .generic_driver = "sha256-lib",
.test = alg_test_hash,
.fips_allowed = 1,
.suite = {