Use the shash partial block API by default. Add a separate set of lib shash algorithms to preserve testing coverage until lib/sha256 has its own tests. Signed-off-by: Herbert Xu <herbert@xxxxxxxxxxxxxxxxxxx> --- crypto/sha256.c | 90 +++++++++++++++++++++++++++++++++++++------------ 1 file changed, 69 insertions(+), 21 deletions(-) diff --git a/crypto/sha256.c b/crypto/sha256.c index 9463c06ea39c..1068c206247f 100644 --- a/crypto/sha256.c +++ b/crypto/sha256.c @@ -45,14 +45,26 @@ static int crypto_sha256_update_generic(struct shash_desc *desc, const u8 *data, return remain; } -static int crypto_sha256_update_arch(struct shash_desc *desc, const u8 *data, - unsigned int len) +static int crypto_sha256_update_lib(struct shash_desc *desc, const u8 *data, + unsigned int len) { sha256_update(shash_desc_ctx(desc), data, len); return 0; } -static int crypto_sha256_final_arch(struct shash_desc *desc, u8 *out) +static int crypto_sha256_update_arch(struct shash_desc *desc, const u8 *data, + unsigned int len) +{ + struct crypto_sha256_state *sctx = shash_desc_ctx(desc); + int remain = len - round_down(len, SHA256_BLOCK_SIZE); + + sctx->count += len - remain; + sha256_choose_blocks(sctx->state, data, len / SHA256_BLOCK_SIZE, + false, true); + return remain; +} + +static int crypto_sha256_final_lib(struct shash_desc *desc, u8 *out) { sha256_final(shash_desc_ctx(desc), out); return 0; @@ -74,10 +86,13 @@ static int crypto_sha256_finup_generic(struct shash_desc *desc, const u8 *data, static int crypto_sha256_finup_arch(struct shash_desc *desc, const u8 *data, unsigned int len, u8 *out) { - struct sha256_state *sctx = shash_desc_ctx(desc); + struct crypto_sha256_state *sctx = shash_desc_ctx(desc); + int remain = len; - sha256_update(sctx, data, len); - sha256_final(sctx, out); + if (remain >= SHA256_BLOCK_SIZE) + remain = crypto_sha256_update_arch(desc, data, remain); + sha256_finup(sctx, data + len - remain, remain, out, + crypto_shash_digestsize(desc->tfm), false, true); return 0; } @@ -88,20 +103,27 @@ static int crypto_sha256_digest_generic(struct shash_desc *desc, const u8 *data, return crypto_sha256_finup_generic(desc, data, len, out); } -static int crypto_sha256_digest_arch(struct shash_desc *desc, const u8 *data, - unsigned int len, u8 *out) +static int crypto_sha256_digest_lib(struct shash_desc *desc, const u8 *data, + unsigned int len, u8 *out) { sha256(data, len, out); return 0; } +static int crypto_sha256_digest_arch(struct shash_desc *desc, const u8 *data, + unsigned int len, u8 *out) +{ + crypto_sha256_init(desc); + return crypto_sha256_finup_arch(desc, data, len, out); +} + static int crypto_sha224_init(struct shash_desc *desc) { sha224_block_init(shash_desc_ctx(desc)); return 0; } -static int crypto_sha224_final_arch(struct shash_desc *desc, u8 *out) +static int crypto_sha224_final_lib(struct shash_desc *desc, u8 *out) { sha224_final(shash_desc_ctx(desc), out); return 0; @@ -165,16 +187,14 @@ static struct shash_alg algs[] = { }, { .base.cra_name = "sha256", - .base.cra_driver_name = "sha256-" __stringify(ARCH), - .base.cra_priority = 300, + .base.cra_driver_name = "sha256-lib", .base.cra_blocksize = SHA256_BLOCK_SIZE, .base.cra_module = THIS_MODULE, .digestsize = SHA256_DIGEST_SIZE, .init = crypto_sha256_init, - .update = crypto_sha256_update_arch, - .final = crypto_sha256_final_arch, - .finup = crypto_sha256_finup_arch, - .digest = crypto_sha256_digest_arch, + .update = crypto_sha256_update_lib, + .final = crypto_sha256_final_lib, + .digest = crypto_sha256_digest_lib, .descsize = sizeof(struct sha256_state), .statesize = sizeof(struct crypto_sha256_state) + SHA256_BLOCK_SIZE + 1, @@ -183,20 +203,48 @@ static struct shash_alg algs[] = { }, { .base.cra_name = "sha224", - .base.cra_driver_name = "sha224-" __stringify(ARCH), - .base.cra_priority = 300, + .base.cra_driver_name = "sha224-lib", .base.cra_blocksize = SHA224_BLOCK_SIZE, .base.cra_module = THIS_MODULE, .digestsize = SHA224_DIGEST_SIZE, .init = crypto_sha224_init, - .update = crypto_sha256_update_arch, - .final = crypto_sha224_final_arch, + .update = crypto_sha256_update_lib, + .final = crypto_sha224_final_lib, .descsize = sizeof(struct sha256_state), .statesize = sizeof(struct crypto_sha256_state) + SHA256_BLOCK_SIZE + 1, .import = crypto_sha256_import_lib, .export = crypto_sha256_export_lib, }, + { + .base.cra_name = "sha256", + .base.cra_driver_name = "sha256-" __stringify(ARCH), + .base.cra_priority = 300, + .base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY | + CRYPTO_AHASH_ALG_FINUP_MAX, + .base.cra_blocksize = SHA256_BLOCK_SIZE, + .base.cra_module = THIS_MODULE, + .digestsize = SHA256_DIGEST_SIZE, + .init = crypto_sha256_init, + .update = crypto_sha256_update_arch, + .finup = crypto_sha256_finup_arch, + .digest = crypto_sha256_digest_arch, + .descsize = sizeof(struct crypto_sha256_state), + }, + { + .base.cra_name = "sha224", + .base.cra_driver_name = "sha224-" __stringify(ARCH), + .base.cra_priority = 300, + .base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY | + CRYPTO_AHASH_ALG_FINUP_MAX, + .base.cra_blocksize = SHA224_BLOCK_SIZE, + .base.cra_module = THIS_MODULE, + .digestsize = SHA224_DIGEST_SIZE, + .init = crypto_sha224_init, + .update = crypto_sha256_update_arch, + .finup = crypto_sha256_finup_arch, + .descsize = sizeof(struct crypto_sha256_state), + }, }; static unsigned int num_algs; @@ -205,9 +253,9 @@ static int __init crypto_sha256_mod_init(void) { /* register the arch flavours only if they differ from generic */ num_algs = ARRAY_SIZE(algs); - BUILD_BUG_ON(ARRAY_SIZE(algs) % 2 != 0); + BUILD_BUG_ON(ARRAY_SIZE(algs) <= 2); if (!sha256_is_arch_optimized()) - num_algs /= 2; + num_algs -= 2; return crypto_register_shashes(algs, ARRAY_SIZE(algs)); } subsys_initcall(crypto_sha256_mod_init); -- 2.39.5