From: Eric Biggers <ebiggers@xxxxxxxxxx> Stop wrapping skcipher and aead algorithms with the crypto SIMD helper (crypto/simd.c). The only purpose of doing so was to work around x86 not always supporting kernel-mode FPU in softirqs. Specifically, if a hardirq interrupted a task context kernel-mode FPU section and then a softirqs were run at the end of that hardirq, those softirqs could not use kernel-mode FPU. This has now been fixed. In combination with the fact that the skcipher and aead APIs only support task and softirq contexts, these can now just use kernel-mode FPU unconditionally on x86. This simplifies the code and improves performance. Signed-off-by: Eric Biggers <ebiggers@xxxxxxxxxx> --- arch/x86/crypto/Kconfig | 3 --- arch/x86/crypto/aria_aesni_avx2_glue.c | 22 +++++++--------------- arch/x86/crypto/aria_aesni_avx_glue.c | 20 ++++++-------------- arch/x86/crypto/aria_gfni_avx512_glue.c | 22 +++++++--------------- 4 files changed, 20 insertions(+), 47 deletions(-) diff --git a/arch/x86/crypto/Kconfig b/arch/x86/crypto/Kconfig index d8f9d6279cb26..ad00d53ab83d8 100644 --- a/arch/x86/crypto/Kconfig +++ b/arch/x86/crypto/Kconfig @@ -294,11 +294,10 @@ config CRYPTO_TWOFISH_AVX_X86_64 config CRYPTO_ARIA_AESNI_AVX_X86_64 tristate "Ciphers: ARIA with modes: ECB, CTR (AES-NI/AVX/GFNI)" depends on X86 && 64BIT select CRYPTO_SKCIPHER - select CRYPTO_SIMD select CRYPTO_ALGAPI select CRYPTO_ARIA help Length-preserving cipher: ARIA cipher algorithms (RFC 5794) with ECB and CTR modes @@ -312,11 +311,10 @@ config CRYPTO_ARIA_AESNI_AVX_X86_64 config CRYPTO_ARIA_AESNI_AVX2_X86_64 tristate "Ciphers: ARIA with modes: ECB, CTR (AES-NI/AVX2/GFNI)" depends on X86 && 64BIT select CRYPTO_SKCIPHER - select CRYPTO_SIMD select CRYPTO_ALGAPI select CRYPTO_ARIA select CRYPTO_ARIA_AESNI_AVX_X86_64 help Length-preserving cipher: ARIA cipher algorithms @@ -331,11 +329,10 @@ config CRYPTO_ARIA_AESNI_AVX2_X86_64 config CRYPTO_ARIA_GFNI_AVX512_X86_64 tristate "Ciphers: ARIA with modes: ECB, CTR (AVX512/GFNI)" depends on X86 && 64BIT && AS_AVX512 && AS_GFNI select CRYPTO_SKCIPHER - select CRYPTO_SIMD select CRYPTO_ALGAPI select CRYPTO_ARIA select CRYPTO_ARIA_AESNI_AVX_X86_64 select CRYPTO_ARIA_AESNI_AVX2_X86_64 help diff --git a/arch/x86/crypto/aria_aesni_avx2_glue.c b/arch/x86/crypto/aria_aesni_avx2_glue.c index 87a11804fc77f..b4bddcd584577 100644 --- a/arch/x86/crypto/aria_aesni_avx2_glue.c +++ b/arch/x86/crypto/aria_aesni_avx2_glue.c @@ -4,11 +4,10 @@ * * Copyright (c) 2022 Taehee Yoo <ap420073@xxxxxxxxx> */ #include <crypto/algapi.h> -#include <crypto/internal/simd.h> #include <crypto/aria.h> #include <linux/crypto.h> #include <linux/err.h> #include <linux/module.h> #include <linux/types.h> @@ -163,28 +162,26 @@ static int aria_avx2_init_tfm(struct crypto_skcipher *tfm) return 0; } static struct skcipher_alg aria_algs[] = { { - .base.cra_name = "__ecb(aria)", - .base.cra_driver_name = "__ecb-aria-avx2", + .base.cra_name = "ecb(aria)", + .base.cra_driver_name = "ecb-aria-avx2", .base.cra_priority = 500, - .base.cra_flags = CRYPTO_ALG_INTERNAL, .base.cra_blocksize = ARIA_BLOCK_SIZE, .base.cra_ctxsize = sizeof(struct aria_ctx), .base.cra_module = THIS_MODULE, .min_keysize = ARIA_MIN_KEY_SIZE, .max_keysize = ARIA_MAX_KEY_SIZE, .setkey = aria_avx2_set_key, .encrypt = aria_avx2_ecb_encrypt, .decrypt = aria_avx2_ecb_decrypt, }, { - .base.cra_name = "__ctr(aria)", - .base.cra_driver_name = "__ctr-aria-avx2", + .base.cra_name = "ctr(aria)", + .base.cra_driver_name = "ctr-aria-avx2", .base.cra_priority = 500, - .base.cra_flags = CRYPTO_ALG_INTERNAL | - CRYPTO_ALG_SKCIPHER_REQSIZE_LARGE, + .base.cra_flags = CRYPTO_ALG_SKCIPHER_REQSIZE_LARGE, .base.cra_blocksize = 1, .base.cra_ctxsize = sizeof(struct aria_ctx), .base.cra_module = THIS_MODULE, .min_keysize = ARIA_MIN_KEY_SIZE, .max_keysize = ARIA_MAX_KEY_SIZE, @@ -195,12 +192,10 @@ static struct skcipher_alg aria_algs[] = { .decrypt = aria_avx2_ctr_encrypt, .init = aria_avx2_init_tfm, } }; -static struct simd_skcipher_alg *aria_simd_algs[ARRAY_SIZE(aria_algs)]; - static int __init aria_avx2_init(void) { const char *feature_name; if (!boot_cpu_has(X86_FEATURE_AVX) || @@ -231,19 +226,16 @@ static int __init aria_avx2_init(void) aria_ops.aria_encrypt_32way = aria_aesni_avx2_encrypt_32way; aria_ops.aria_decrypt_32way = aria_aesni_avx2_decrypt_32way; aria_ops.aria_ctr_crypt_32way = aria_aesni_avx2_ctr_crypt_32way; } - return simd_register_skciphers_compat(aria_algs, - ARRAY_SIZE(aria_algs), - aria_simd_algs); + return crypto_register_skciphers(aria_algs, ARRAY_SIZE(aria_algs)); } static void __exit aria_avx2_exit(void) { - simd_unregister_skciphers(aria_algs, ARRAY_SIZE(aria_algs), - aria_simd_algs); + crypto_unregister_skciphers(aria_algs, ARRAY_SIZE(aria_algs)); } module_init(aria_avx2_init); module_exit(aria_avx2_exit); diff --git a/arch/x86/crypto/aria_aesni_avx_glue.c b/arch/x86/crypto/aria_aesni_avx_glue.c index 4e1516b76669e..ab9b38d05332a 100644 --- a/arch/x86/crypto/aria_aesni_avx_glue.c +++ b/arch/x86/crypto/aria_aesni_avx_glue.c @@ -4,11 +4,10 @@ * * Copyright (c) 2022 Taehee Yoo <ap420073@xxxxxxxxx> */ #include <crypto/algapi.h> -#include <crypto/internal/simd.h> #include <crypto/aria.h> #include <linux/crypto.h> #include <linux/err.h> #include <linux/module.h> #include <linux/types.h> @@ -150,27 +149,25 @@ static int aria_avx_init_tfm(struct crypto_skcipher *tfm) return 0; } static struct skcipher_alg aria_algs[] = { { - .base.cra_name = "__ecb(aria)", - .base.cra_driver_name = "__ecb-aria-avx", + .base.cra_name = "ecb(aria)", + .base.cra_driver_name = "ecb-aria-avx", .base.cra_priority = 400, - .base.cra_flags = CRYPTO_ALG_INTERNAL, .base.cra_blocksize = ARIA_BLOCK_SIZE, .base.cra_ctxsize = sizeof(struct aria_ctx), .base.cra_module = THIS_MODULE, .min_keysize = ARIA_MIN_KEY_SIZE, .max_keysize = ARIA_MAX_KEY_SIZE, .setkey = aria_avx_set_key, .encrypt = aria_avx_ecb_encrypt, .decrypt = aria_avx_ecb_decrypt, }, { - .base.cra_name = "__ctr(aria)", - .base.cra_driver_name = "__ctr-aria-avx", + .base.cra_name = "ctr(aria)", + .base.cra_driver_name = "ctr-aria-avx", .base.cra_priority = 400, - .base.cra_flags = CRYPTO_ALG_INTERNAL, .base.cra_blocksize = 1, .base.cra_ctxsize = sizeof(struct aria_ctx), .base.cra_module = THIS_MODULE, .min_keysize = ARIA_MIN_KEY_SIZE, .max_keysize = ARIA_MAX_KEY_SIZE, @@ -182,12 +179,10 @@ static struct skcipher_alg aria_algs[] = { .decrypt = aria_avx_ctr_encrypt, .init = aria_avx_init_tfm, } }; -static struct simd_skcipher_alg *aria_simd_algs[ARRAY_SIZE(aria_algs)]; - static int __init aria_avx_init(void) { const char *feature_name; if (!boot_cpu_has(X86_FEATURE_AVX) || @@ -211,19 +206,16 @@ static int __init aria_avx_init(void) aria_ops.aria_encrypt_16way = aria_aesni_avx_encrypt_16way; aria_ops.aria_decrypt_16way = aria_aesni_avx_decrypt_16way; aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_ctr_crypt_16way; } - return simd_register_skciphers_compat(aria_algs, - ARRAY_SIZE(aria_algs), - aria_simd_algs); + return crypto_register_skciphers(aria_algs, ARRAY_SIZE(aria_algs)); } static void __exit aria_avx_exit(void) { - simd_unregister_skciphers(aria_algs, ARRAY_SIZE(aria_algs), - aria_simd_algs); + crypto_unregister_skciphers(aria_algs, ARRAY_SIZE(aria_algs)); } module_init(aria_avx_init); module_exit(aria_avx_exit); diff --git a/arch/x86/crypto/aria_gfni_avx512_glue.c b/arch/x86/crypto/aria_gfni_avx512_glue.c index f4a2208d26383..363cbf4399cca 100644 --- a/arch/x86/crypto/aria_gfni_avx512_glue.c +++ b/arch/x86/crypto/aria_gfni_avx512_glue.c @@ -4,11 +4,10 @@ * * Copyright (c) 2022 Taehee Yoo <ap420073@xxxxxxxxx> */ #include <crypto/algapi.h> -#include <crypto/internal/simd.h> #include <crypto/aria.h> #include <linux/crypto.h> #include <linux/err.h> #include <linux/module.h> #include <linux/types.h> @@ -163,28 +162,26 @@ static int aria_avx512_init_tfm(struct crypto_skcipher *tfm) return 0; } static struct skcipher_alg aria_algs[] = { { - .base.cra_name = "__ecb(aria)", - .base.cra_driver_name = "__ecb-aria-avx512", + .base.cra_name = "ecb(aria)", + .base.cra_driver_name = "ecb-aria-avx512", .base.cra_priority = 600, - .base.cra_flags = CRYPTO_ALG_INTERNAL, .base.cra_blocksize = ARIA_BLOCK_SIZE, .base.cra_ctxsize = sizeof(struct aria_ctx), .base.cra_module = THIS_MODULE, .min_keysize = ARIA_MIN_KEY_SIZE, .max_keysize = ARIA_MAX_KEY_SIZE, .setkey = aria_avx512_set_key, .encrypt = aria_avx512_ecb_encrypt, .decrypt = aria_avx512_ecb_decrypt, }, { - .base.cra_name = "__ctr(aria)", - .base.cra_driver_name = "__ctr-aria-avx512", + .base.cra_name = "ctr(aria)", + .base.cra_driver_name = "ctr-aria-avx512", .base.cra_priority = 600, - .base.cra_flags = CRYPTO_ALG_INTERNAL | - CRYPTO_ALG_SKCIPHER_REQSIZE_LARGE, + .base.cra_flags = CRYPTO_ALG_SKCIPHER_REQSIZE_LARGE, .base.cra_blocksize = 1, .base.cra_ctxsize = sizeof(struct aria_ctx), .base.cra_module = THIS_MODULE, .min_keysize = ARIA_MIN_KEY_SIZE, .max_keysize = ARIA_MAX_KEY_SIZE, @@ -195,12 +192,10 @@ static struct skcipher_alg aria_algs[] = { .decrypt = aria_avx512_ctr_encrypt, .init = aria_avx512_init_tfm, } }; -static struct simd_skcipher_alg *aria_simd_algs[ARRAY_SIZE(aria_algs)]; - static int __init aria_avx512_init(void) { const char *feature_name; if (!boot_cpu_has(X86_FEATURE_AVX) || @@ -227,19 +222,16 @@ static int __init aria_avx512_init(void) aria_ops.aria_ctr_crypt_32way = aria_aesni_avx2_gfni_ctr_crypt_32way; aria_ops.aria_encrypt_64way = aria_gfni_avx512_encrypt_64way; aria_ops.aria_decrypt_64way = aria_gfni_avx512_decrypt_64way; aria_ops.aria_ctr_crypt_64way = aria_gfni_avx512_ctr_crypt_64way; - return simd_register_skciphers_compat(aria_algs, - ARRAY_SIZE(aria_algs), - aria_simd_algs); + return crypto_register_skciphers(aria_algs, ARRAY_SIZE(aria_algs)); } static void __exit aria_avx512_exit(void) { - simd_unregister_skciphers(aria_algs, ARRAY_SIZE(aria_algs), - aria_simd_algs); + crypto_unregister_skciphers(aria_algs, ARRAY_SIZE(aria_algs)); } module_init(aria_avx512_init); module_exit(aria_avx512_exit); -- 2.49.0