In essiv_cbc_set_key(), just use the SHA-256 library instead of crypto_shash. This is simpler and also slightly faster. Signed-off-by: Eric Biggers <ebiggers@xxxxxxxxxx> --- arch/arm64/crypto/Kconfig | 1 + arch/arm64/crypto/aes-glue.c | 21 +-------------------- 2 files changed, 2 insertions(+), 20 deletions(-) diff --git a/arch/arm64/crypto/Kconfig b/arch/arm64/crypto/Kconfig index 3bb5b513d5ae2..91f3093eee6ab 100644 --- a/arch/arm64/crypto/Kconfig +++ b/arch/arm64/crypto/Kconfig @@ -69,10 +69,11 @@ config CRYPTO_POLYVAL_ARM64_CE - ARMv8 Crypto Extensions config CRYPTO_AES_ARM64 tristate "Ciphers: AES, modes: ECB, CBC, CTR, CTS, XCTR, XTS" select CRYPTO_AES + select CRYPTO_LIB_SHA256 help Block ciphers: AES cipher algorithms (FIPS-197) Length-preserving ciphers: AES with ECB, CBC, CTR, CTS, XCTR, and XTS modes AEAD cipher: AES with CBC, ESSIV, and SHA-256 diff --git a/arch/arm64/crypto/aes-glue.c b/arch/arm64/crypto/aes-glue.c index 81560f722b9de..5e207ff34482f 100644 --- a/arch/arm64/crypto/aes-glue.c +++ b/arch/arm64/crypto/aes-glue.c @@ -120,11 +120,10 @@ struct crypto_aes_xts_ctx { }; struct crypto_aes_essiv_cbc_ctx { struct crypto_aes_ctx key1; struct crypto_aes_ctx __aligned(8) key2; - struct crypto_shash *hash; }; struct mac_tfm_ctx { struct crypto_aes_ctx key; u8 __aligned(8) consts[]; @@ -169,11 +168,11 @@ static int __maybe_unused essiv_cbc_set_key(struct crypto_skcipher *tfm, ret = aes_expandkey(&ctx->key1, in_key, key_len); if (ret) return ret; - crypto_shash_tfm_digest(ctx->hash, in_key, key_len, digest); + sha256(in_key, key_len, digest); return aes_expandkey(&ctx->key2, digest, sizeof(digest)); } static int __maybe_unused ecb_encrypt(struct skcipher_request *req) @@ -386,26 +385,10 @@ static int cts_cbc_decrypt(struct skcipher_request *req) kernel_neon_end(); return skcipher_walk_done(&walk, 0); } -static int __maybe_unused essiv_cbc_init_tfm(struct crypto_skcipher *tfm) -{ - struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm); - - ctx->hash = crypto_alloc_shash("sha256", 0, 0); - - return PTR_ERR_OR_ZERO(ctx->hash); -} - -static void __maybe_unused essiv_cbc_exit_tfm(struct crypto_skcipher *tfm) -{ - struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm); - - crypto_free_shash(ctx->hash); -} - static int __maybe_unused essiv_cbc_encrypt(struct skcipher_request *req) { struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm); int err, rounds = 6 + ctx->key1.key_length / 4; @@ -791,12 +774,10 @@ static struct skcipher_alg aes_algs[] = { { .max_keysize = AES_MAX_KEY_SIZE, .ivsize = AES_BLOCK_SIZE, .setkey = essiv_cbc_set_key, .encrypt = essiv_cbc_encrypt, .decrypt = essiv_cbc_decrypt, - .init = essiv_cbc_init_tfm, - .exit = essiv_cbc_exit_tfm, } }; static int cbcmac_setkey(struct crypto_shash *tfm, const u8 *in_key, unsigned int key_len) { base-commit: 34c065fe1d0dbb08073d83559d3173bb4f17dcc5 -- 2.50.1