aboutsummaryrefslogtreecommitdiffstats
path: root/target/linux/generic/backport-5.4/080-wireguard-0037-crypto-arch-conditionalize-crypto-api-in-arch-glue-f.patch
diff options
context:
space:
mode:
authorJason A. Donenfeld <Jason@zx2c4.com>2021-02-19 14:29:04 +0100
committerHauke Mehrtens <hauke@hauke-m.de>2021-04-10 14:21:32 +0200
commitc0cb86e1d5ed9ab2fdbbe2d66ab612892d22d508 (patch)
tree2ecd3601151780e21352a001470b6ca1acfc3068 /target/linux/generic/backport-5.4/080-wireguard-0037-crypto-arch-conditionalize-crypto-api-in-arch-glue-f.patch
parentaebfc2f6f3e2307cb586b1ff50924a2803cbcd3c (diff)
downloadupstream-c0cb86e1d5ed9ab2fdbbe2d66ab612892d22d508.tar.gz
upstream-c0cb86e1d5ed9ab2fdbbe2d66ab612892d22d508.tar.bz2
upstream-c0cb86e1d5ed9ab2fdbbe2d66ab612892d22d508.zip
kernel: 5.4: import wireguard backport
Rather than using the clunky, old, slower wireguard-linux-compat out of tree module, this commit does a patch-by-patch backport of upstream's wireguard to 5.4. This specific backport is in widespread use, being part of SUSE's enterprise kernel, Oracle's enterprise kernel, Google's Android kernel, Gentoo's distro kernel, and probably more I've forgotten about. It's definately the "more proper" way of adding wireguard to a kernel than the ugly compat.h hell of the wireguard-linux-compat repo. And most importantly for OpenWRT, it allows using the same module configuration code for 5.10 as for 5.4, with no need for bifurcation. These patches are from the backport tree which is maintained in the open here: https://git.zx2c4.com/wireguard-linux/log/?h=backport-5.4.y I'll be sending PRs to update this as needed. Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com> (cherry picked from commit 3888fa78802354ab7bbd19b7d061fd80a16ce06b) (cherry picked from commit d54072587146dd0db9bb52b513234d944edabda3) (cherry picked from commit 196f3d586f11d96ba4ab60068cfb12420bcd20fd) (cherry picked from commit 3500fd7938a6d0c0e320295f0aa2fa34b1ebc08d) (cherry picked from commit 23b801d3ba57e34cc609ea40982c7fbed08164e9) (cherry picked from commit 0c0cb97da7f5cc06919449131dd57ed805f8f78d) (cherry picked from commit 2a27f6f90a430342cdbe84806e8b10acff446a2d) Signed-off-by: Ilya Lipnitskiy <ilya.lipnitskiy@gmail.com>
Diffstat (limited to 'target/linux/generic/backport-5.4/080-wireguard-0037-crypto-arch-conditionalize-crypto-api-in-arch-glue-f.patch')
-rw-r--r--target/linux/generic/backport-5.4/080-wireguard-0037-crypto-arch-conditionalize-crypto-api-in-arch-glue-f.patch275
1 files changed, 275 insertions, 0 deletions
diff --git a/target/linux/generic/backport-5.4/080-wireguard-0037-crypto-arch-conditionalize-crypto-api-in-arch-glue-f.patch b/target/linux/generic/backport-5.4/080-wireguard-0037-crypto-arch-conditionalize-crypto-api-in-arch-glue-f.patch
new file mode 100644
index 0000000000..d510438f1d
--- /dev/null
+++ b/target/linux/generic/backport-5.4/080-wireguard-0037-crypto-arch-conditionalize-crypto-api-in-arch-glue-f.patch
@@ -0,0 +1,275 @@
+From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
+From: "Jason A. Donenfeld" <Jason@zx2c4.com>
+Date: Mon, 25 Nov 2019 11:31:12 +0100
+Subject: [PATCH] crypto: arch - conditionalize crypto api in arch glue for lib
+ code
+
+commit 8394bfec51e0e565556101bcc4e2fe7551104cd8 upstream.
+
+For glue code that's used by Zinc, the actual Crypto API functions might
+not necessarily exist, and don't need to exist either. Before this
+patch, there are valid build configurations that lead to a unbuildable
+kernel. This fixes it to conditionalize those symbols on the existence
+of the proper config entry.
+
+Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com>
+Acked-by: Ard Biesheuvel <ardb@kernel.org>
+Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
+Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com>
+---
+ arch/arm/crypto/chacha-glue.c | 26 ++++++++++++++++----------
+ arch/arm/crypto/curve25519-glue.c | 5 +++--
+ arch/arm/crypto/poly1305-glue.c | 9 ++++++---
+ arch/arm64/crypto/chacha-neon-glue.c | 5 +++--
+ arch/arm64/crypto/poly1305-glue.c | 5 +++--
+ arch/mips/crypto/chacha-glue.c | 6 ++++--
+ arch/mips/crypto/poly1305-glue.c | 6 ++++--
+ arch/x86/crypto/blake2s-glue.c | 6 ++++--
+ arch/x86/crypto/chacha_glue.c | 5 +++--
+ arch/x86/crypto/curve25519-x86_64.c | 7 ++++---
+ arch/x86/crypto/poly1305_glue.c | 5 +++--
+ 11 files changed, 53 insertions(+), 32 deletions(-)
+
+--- a/arch/arm/crypto/chacha-glue.c
++++ b/arch/arm/crypto/chacha-glue.c
+@@ -286,11 +286,13 @@ static struct skcipher_alg neon_algs[] =
+
+ static int __init chacha_simd_mod_init(void)
+ {
+- int err;
++ int err = 0;
+
+- err = crypto_register_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
+- if (err)
+- return err;
++ if (IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER)) {
++ err = crypto_register_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
++ if (err)
++ return err;
++ }
+
+ if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && (elf_hwcap & HWCAP_NEON)) {
+ int i;
+@@ -310,18 +312,22 @@ static int __init chacha_simd_mod_init(v
+ static_branch_enable(&use_neon);
+ }
+
+- err = crypto_register_skciphers(neon_algs, ARRAY_SIZE(neon_algs));
+- if (err)
+- crypto_unregister_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
++ if (IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER)) {
++ err = crypto_register_skciphers(neon_algs, ARRAY_SIZE(neon_algs));
++ if (err)
++ crypto_unregister_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
++ }
+ }
+ return err;
+ }
+
+ static void __exit chacha_simd_mod_fini(void)
+ {
+- crypto_unregister_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
+- if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && (elf_hwcap & HWCAP_NEON))
+- crypto_unregister_skciphers(neon_algs, ARRAY_SIZE(neon_algs));
++ if (IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER)) {
++ crypto_unregister_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
++ if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && (elf_hwcap & HWCAP_NEON))
++ crypto_unregister_skciphers(neon_algs, ARRAY_SIZE(neon_algs));
++ }
+ }
+
+ module_init(chacha_simd_mod_init);
+--- a/arch/arm/crypto/curve25519-glue.c
++++ b/arch/arm/crypto/curve25519-glue.c
+@@ -108,14 +108,15 @@ static int __init mod_init(void)
+ {
+ if (elf_hwcap & HWCAP_NEON) {
+ static_branch_enable(&have_neon);
+- return crypto_register_kpp(&curve25519_alg);
++ return IS_REACHABLE(CONFIG_CRYPTO_KPP) ?
++ crypto_register_kpp(&curve25519_alg) : 0;
+ }
+ return 0;
+ }
+
+ static void __exit mod_exit(void)
+ {
+- if (elf_hwcap & HWCAP_NEON)
++ if (IS_REACHABLE(CONFIG_CRYPTO_KPP) && elf_hwcap & HWCAP_NEON)
+ crypto_unregister_kpp(&curve25519_alg);
+ }
+
+--- a/arch/arm/crypto/poly1305-glue.c
++++ b/arch/arm/crypto/poly1305-glue.c
+@@ -249,16 +249,19 @@ static int __init arm_poly1305_mod_init(
+ if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
+ (elf_hwcap & HWCAP_NEON))
+ static_branch_enable(&have_neon);
+- else
++ else if (IS_REACHABLE(CONFIG_CRYPTO_HASH))
+ /* register only the first entry */
+ return crypto_register_shash(&arm_poly1305_algs[0]);
+
+- return crypto_register_shashes(arm_poly1305_algs,
+- ARRAY_SIZE(arm_poly1305_algs));
++ return IS_REACHABLE(CONFIG_CRYPTO_HASH) ?
++ crypto_register_shashes(arm_poly1305_algs,
++ ARRAY_SIZE(arm_poly1305_algs)) : 0;
+ }
+
+ static void __exit arm_poly1305_mod_exit(void)
+ {
++ if (!IS_REACHABLE(CONFIG_CRYPTO_HASH))
++ return;
+ if (!static_branch_likely(&have_neon)) {
+ crypto_unregister_shash(&arm_poly1305_algs[0]);
+ return;
+--- a/arch/arm64/crypto/chacha-neon-glue.c
++++ b/arch/arm64/crypto/chacha-neon-glue.c
+@@ -211,12 +211,13 @@ static int __init chacha_simd_mod_init(v
+
+ static_branch_enable(&have_neon);
+
+- return crypto_register_skciphers(algs, ARRAY_SIZE(algs));
++ return IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER) ?
++ crypto_register_skciphers(algs, ARRAY_SIZE(algs)) : 0;
+ }
+
+ static void __exit chacha_simd_mod_fini(void)
+ {
+- if (cpu_have_named_feature(ASIMD))
++ if (IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER) && cpu_have_named_feature(ASIMD))
+ crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
+ }
+
+--- a/arch/arm64/crypto/poly1305-glue.c
++++ b/arch/arm64/crypto/poly1305-glue.c
+@@ -220,12 +220,13 @@ static int __init neon_poly1305_mod_init
+
+ static_branch_enable(&have_neon);
+
+- return crypto_register_shash(&neon_poly1305_alg);
++ return IS_REACHABLE(CONFIG_CRYPTO_HASH) ?
++ crypto_register_shash(&neon_poly1305_alg) : 0;
+ }
+
+ static void __exit neon_poly1305_mod_exit(void)
+ {
+- if (cpu_have_named_feature(ASIMD))
++ if (IS_REACHABLE(CONFIG_CRYPTO_HASH) && cpu_have_named_feature(ASIMD))
+ crypto_unregister_shash(&neon_poly1305_alg);
+ }
+
+--- a/arch/mips/crypto/chacha-glue.c
++++ b/arch/mips/crypto/chacha-glue.c
+@@ -128,12 +128,14 @@ static struct skcipher_alg algs[] = {
+
+ static int __init chacha_simd_mod_init(void)
+ {
+- return crypto_register_skciphers(algs, ARRAY_SIZE(algs));
++ return IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER) ?
++ crypto_register_skciphers(algs, ARRAY_SIZE(algs)) : 0;
+ }
+
+ static void __exit chacha_simd_mod_fini(void)
+ {
+- crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
++ if (IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER))
++ crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
+ }
+
+ module_init(chacha_simd_mod_init);
+--- a/arch/mips/crypto/poly1305-glue.c
++++ b/arch/mips/crypto/poly1305-glue.c
+@@ -187,12 +187,14 @@ static struct shash_alg mips_poly1305_al
+
+ static int __init mips_poly1305_mod_init(void)
+ {
+- return crypto_register_shash(&mips_poly1305_alg);
++ return IS_REACHABLE(CONFIG_CRYPTO_HASH) ?
++ crypto_register_shash(&mips_poly1305_alg) : 0;
+ }
+
+ static void __exit mips_poly1305_mod_exit(void)
+ {
+- crypto_unregister_shash(&mips_poly1305_alg);
++ if (IS_REACHABLE(CONFIG_CRYPTO_HASH))
++ crypto_unregister_shash(&mips_poly1305_alg);
+ }
+
+ module_init(mips_poly1305_mod_init);
+--- a/arch/x86/crypto/blake2s-glue.c
++++ b/arch/x86/crypto/blake2s-glue.c
+@@ -210,12 +210,14 @@ static int __init blake2s_mod_init(void)
+ XFEATURE_MASK_AVX512, NULL))
+ static_branch_enable(&blake2s_use_avx512);
+
+- return crypto_register_shashes(blake2s_algs, ARRAY_SIZE(blake2s_algs));
++ return IS_REACHABLE(CONFIG_CRYPTO_HASH) ?
++ crypto_register_shashes(blake2s_algs,
++ ARRAY_SIZE(blake2s_algs)) : 0;
+ }
+
+ static void __exit blake2s_mod_exit(void)
+ {
+- if (boot_cpu_has(X86_FEATURE_SSSE3))
++ if (IS_REACHABLE(CONFIG_CRYPTO_HASH) && boot_cpu_has(X86_FEATURE_SSSE3))
+ crypto_unregister_shashes(blake2s_algs, ARRAY_SIZE(blake2s_algs));
+ }
+
+--- a/arch/x86/crypto/chacha_glue.c
++++ b/arch/x86/crypto/chacha_glue.c
+@@ -299,12 +299,13 @@ static int __init chacha_simd_mod_init(v
+ boot_cpu_has(X86_FEATURE_AVX512BW)) /* kmovq */
+ static_branch_enable(&chacha_use_avx512vl);
+ }
+- return crypto_register_skciphers(algs, ARRAY_SIZE(algs));
++ return IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER) ?
++ crypto_register_skciphers(algs, ARRAY_SIZE(algs)) : 0;
+ }
+
+ static void __exit chacha_simd_mod_fini(void)
+ {
+- if (boot_cpu_has(X86_FEATURE_SSSE3))
++ if (IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER) && boot_cpu_has(X86_FEATURE_SSSE3))
+ crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
+ }
+
+--- a/arch/x86/crypto/curve25519-x86_64.c
++++ b/arch/x86/crypto/curve25519-x86_64.c
+@@ -2457,13 +2457,14 @@ static int __init curve25519_mod_init(vo
+ static_branch_enable(&curve25519_use_adx);
+ else
+ return 0;
+- return crypto_register_kpp(&curve25519_alg);
++ return IS_REACHABLE(CONFIG_CRYPTO_KPP) ?
++ crypto_register_kpp(&curve25519_alg) : 0;
+ }
+
+ static void __exit curve25519_mod_exit(void)
+ {
+- if (boot_cpu_has(X86_FEATURE_BMI2) ||
+- boot_cpu_has(X86_FEATURE_ADX))
++ if (IS_REACHABLE(CONFIG_CRYPTO_KPP) &&
++ (boot_cpu_has(X86_FEATURE_BMI2) || boot_cpu_has(X86_FEATURE_ADX)))
+ crypto_unregister_kpp(&curve25519_alg);
+ }
+
+--- a/arch/x86/crypto/poly1305_glue.c
++++ b/arch/x86/crypto/poly1305_glue.c
+@@ -224,12 +224,13 @@ static int __init poly1305_simd_mod_init
+ cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL))
+ static_branch_enable(&poly1305_use_avx2);
+
+- return crypto_register_shash(&alg);
++ return IS_REACHABLE(CONFIG_CRYPTO_HASH) ? crypto_register_shash(&alg) : 0;
+ }
+
+ static void __exit poly1305_simd_mod_exit(void)
+ {
+- crypto_unregister_shash(&alg);
++ if (IS_REACHABLE(CONFIG_CRYPTO_HASH))
++ crypto_unregister_shash(&alg);
+ }
+
+ module_init(poly1305_simd_mod_init);