aboutsummaryrefslogtreecommitdiffstats
path: root/target/linux/generic/backport-5.4/080-wireguard-0006-crypto-arm64-chacha-expose-arm64-ChaCha-routine-as-l.patch
diff options
context:
space:
mode:
Diffstat (limited to 'target/linux/generic/backport-5.4/080-wireguard-0006-crypto-arm64-chacha-expose-arm64-ChaCha-routine-as-l.patch')
-rw-r--r--target/linux/generic/backport-5.4/080-wireguard-0006-crypto-arm64-chacha-expose-arm64-ChaCha-routine-as-l.patch138
1 files changed, 0 insertions, 138 deletions
diff --git a/target/linux/generic/backport-5.4/080-wireguard-0006-crypto-arm64-chacha-expose-arm64-ChaCha-routine-as-l.patch b/target/linux/generic/backport-5.4/080-wireguard-0006-crypto-arm64-chacha-expose-arm64-ChaCha-routine-as-l.patch
deleted file mode 100644
index 71665e8bfd..0000000000
--- a/target/linux/generic/backport-5.4/080-wireguard-0006-crypto-arm64-chacha-expose-arm64-ChaCha-routine-as-l.patch
+++ /dev/null
@@ -1,138 +0,0 @@
-From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
-From: Ard Biesheuvel <ardb@kernel.org>
-Date: Fri, 8 Nov 2019 13:22:12 +0100
-Subject: [PATCH] crypto: arm64/chacha - expose arm64 ChaCha routine as library
- function
-
-commit b3aad5bad26a01a4bd8c49a5c5f52aec665f3b7c upstream.
-
-Expose the accelerated NEON ChaCha routine directly as a symbol
-export so that users of the ChaCha library API can use it directly.
-
-Given that calls into the library API will always go through the
-routines in this module if it is enabled, switch to static keys
-to select the optimal implementation available (which may be none
-at all, in which case we defer to the generic implementation for
-all invocations).
-
-Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
-Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com>
----
- arch/arm64/crypto/Kconfig | 1 +
- arch/arm64/crypto/chacha-neon-glue.c | 53 ++++++++++++++++++++++------
- 2 files changed, 43 insertions(+), 11 deletions(-)
-
---- a/arch/arm64/crypto/Kconfig
-+++ b/arch/arm64/crypto/Kconfig
-@@ -104,6 +104,7 @@ config CRYPTO_CHACHA20_NEON
- depends on KERNEL_MODE_NEON
- select CRYPTO_BLKCIPHER
- select CRYPTO_LIB_CHACHA_GENERIC
-+ select CRYPTO_ARCH_HAVE_LIB_CHACHA
-
- config CRYPTO_NHPOLY1305_NEON
- tristate "NHPoly1305 hash function using NEON instructions (for Adiantum)"
---- a/arch/arm64/crypto/chacha-neon-glue.c
-+++ b/arch/arm64/crypto/chacha-neon-glue.c
-@@ -23,6 +23,7 @@
- #include <crypto/internal/chacha.h>
- #include <crypto/internal/simd.h>
- #include <crypto/internal/skcipher.h>
-+#include <linux/jump_label.h>
- #include <linux/kernel.h>
- #include <linux/module.h>
-
-@@ -36,6 +37,8 @@ asmlinkage void chacha_4block_xor_neon(u
- int nrounds, int bytes);
- asmlinkage void hchacha_block_neon(const u32 *state, u32 *out, int nrounds);
-
-+static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon);
-+
- static void chacha_doneon(u32 *state, u8 *dst, const u8 *src,
- int bytes, int nrounds)
- {
-@@ -59,6 +62,37 @@ static void chacha_doneon(u32 *state, u8
- }
- }
-
-+void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
-+{
-+ if (!static_branch_likely(&have_neon) || !crypto_simd_usable()) {
-+ hchacha_block_generic(state, stream, nrounds);
-+ } else {
-+ kernel_neon_begin();
-+ hchacha_block_neon(state, stream, nrounds);
-+ kernel_neon_end();
-+ }
-+}
-+EXPORT_SYMBOL(hchacha_block_arch);
-+
-+void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv)
-+{
-+ chacha_init_generic(state, key, iv);
-+}
-+EXPORT_SYMBOL(chacha_init_arch);
-+
-+void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
-+ int nrounds)
-+{
-+ if (!static_branch_likely(&have_neon) || bytes <= CHACHA_BLOCK_SIZE ||
-+ !crypto_simd_usable())
-+ return chacha_crypt_generic(state, dst, src, bytes, nrounds);
-+
-+ kernel_neon_begin();
-+ chacha_doneon(state, dst, src, bytes, nrounds);
-+ kernel_neon_end();
-+}
-+EXPORT_SYMBOL(chacha_crypt_arch);
-+
- static int chacha_neon_stream_xor(struct skcipher_request *req,
- const struct chacha_ctx *ctx, const u8 *iv)
- {
-@@ -76,7 +110,8 @@ static int chacha_neon_stream_xor(struct
- if (nbytes < walk.total)
- nbytes = rounddown(nbytes, walk.stride);
-
-- if (!crypto_simd_usable()) {
-+ if (!static_branch_likely(&have_neon) ||
-+ !crypto_simd_usable()) {
- chacha_crypt_generic(state, walk.dst.virt.addr,
- walk.src.virt.addr, nbytes,
- ctx->nrounds);
-@@ -109,14 +144,7 @@ static int xchacha_neon(struct skcipher_
- u8 real_iv[16];
-
- chacha_init_generic(state, ctx->key, req->iv);
--
-- if (crypto_simd_usable()) {
-- kernel_neon_begin();
-- hchacha_block_neon(state, subctx.key, ctx->nrounds);
-- kernel_neon_end();
-- } else {
-- hchacha_block_generic(state, subctx.key, ctx->nrounds);
-- }
-+ hchacha_block_arch(state, subctx.key, ctx->nrounds);
- subctx.nrounds = ctx->nrounds;
-
- memcpy(&real_iv[0], req->iv + 24, 8);
-@@ -179,14 +207,17 @@ static struct skcipher_alg algs[] = {
- static int __init chacha_simd_mod_init(void)
- {
- if (!cpu_have_named_feature(ASIMD))
-- return -ENODEV;
-+ return 0;
-+
-+ static_branch_enable(&have_neon);
-
- return crypto_register_skciphers(algs, ARRAY_SIZE(algs));
- }
-
- static void __exit chacha_simd_mod_fini(void)
- {
-- crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
-+ if (cpu_have_named_feature(ASIMD))
-+ crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
- }
-
- module_init(chacha_simd_mod_init);