aboutsummaryrefslogtreecommitdiffstats
path: root/target/linux/generic/backport-5.4/080-wireguard-0037-crypto-arch-conditionalize-crypto-api-in-arch-glue-f.patch
diff options
context:
space:
mode:
Diffstat (limited to 'target/linux/generic/backport-5.4/080-wireguard-0037-crypto-arch-conditionalize-crypto-api-in-arch-glue-f.patch')
-rw-r--r--target/linux/generic/backport-5.4/080-wireguard-0037-crypto-arch-conditionalize-crypto-api-in-arch-glue-f.patch275
1 files changed, 275 insertions, 0 deletions
diff --git a/target/linux/generic/backport-5.4/080-wireguard-0037-crypto-arch-conditionalize-crypto-api-in-arch-glue-f.patch b/target/linux/generic/backport-5.4/080-wireguard-0037-crypto-arch-conditionalize-crypto-api-in-arch-glue-f.patch
new file mode 100644
index 0000000000..ab04cecf05
--- /dev/null
+++ b/target/linux/generic/backport-5.4/080-wireguard-0037-crypto-arch-conditionalize-crypto-api-in-arch-glue-f.patch
@@ -0,0 +1,275 @@
+From f23fdc58a0a08afada84fe4910279ec3d8d085e7 Mon Sep 17 00:00:00 2001
+From: "Jason A. Donenfeld" <Jason@zx2c4.com>
+Date: Mon, 25 Nov 2019 11:31:12 +0100
+Subject: [PATCH 037/124] crypto: arch - conditionalize crypto api in arch glue
+ for lib code
+
+commit 8394bfec51e0e565556101bcc4e2fe7551104cd8 upstream.
+
+For glue code that's used by Zinc, the actual Crypto API functions might
+not necessarily exist, and don't need to exist either. Before this
+patch, there are valid build configurations that lead to a unbuildable
+kernel. This fixes it to conditionalize those symbols on the existence
+of the proper config entry.
+
+Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com>
+Acked-by: Ard Biesheuvel <ardb@kernel.org>
+Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
+Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com>
+---
+ arch/arm/crypto/chacha-glue.c | 26 ++++++++++++++++----------
+ arch/arm/crypto/curve25519-glue.c | 5 +++--
+ arch/arm/crypto/poly1305-glue.c | 9 ++++++---
+ arch/arm64/crypto/chacha-neon-glue.c | 5 +++--
+ arch/arm64/crypto/poly1305-glue.c | 5 +++--
+ arch/mips/crypto/chacha-glue.c | 6 ++++--
+ arch/mips/crypto/poly1305-glue.c | 6 ++++--
+ arch/x86/crypto/blake2s-glue.c | 6 ++++--
+ arch/x86/crypto/chacha_glue.c | 5 +++--
+ arch/x86/crypto/curve25519-x86_64.c | 7 ++++---
+ arch/x86/crypto/poly1305_glue.c | 5 +++--
+ 11 files changed, 53 insertions(+), 32 deletions(-)
+
+--- a/arch/arm/crypto/chacha-glue.c
++++ b/arch/arm/crypto/chacha-glue.c
+@@ -286,11 +286,13 @@ static struct skcipher_alg neon_algs[] =
+
+ static int __init chacha_simd_mod_init(void)
+ {
+- int err;
++ int err = 0;
+
+- err = crypto_register_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
+- if (err)
+- return err;
++ if (IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER)) {
++ err = crypto_register_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
++ if (err)
++ return err;
++ }
+
+ if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && (elf_hwcap & HWCAP_NEON)) {
+ int i;
+@@ -310,18 +312,22 @@ static int __init chacha_simd_mod_init(v
+ static_branch_enable(&use_neon);
+ }
+
+- err = crypto_register_skciphers(neon_algs, ARRAY_SIZE(neon_algs));
+- if (err)
+- crypto_unregister_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
++ if (IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER)) {
++ err = crypto_register_skciphers(neon_algs, ARRAY_SIZE(neon_algs));
++ if (err)
++ crypto_unregister_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
++ }
+ }
+ return err;
+ }
+
+ static void __exit chacha_simd_mod_fini(void)
+ {
+- crypto_unregister_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
+- if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && (elf_hwcap & HWCAP_NEON))
+- crypto_unregister_skciphers(neon_algs, ARRAY_SIZE(neon_algs));
++ if (IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER)) {
++ crypto_unregister_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
++ if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && (elf_hwcap & HWCAP_NEON))
++ crypto_unregister_skciphers(neon_algs, ARRAY_SIZE(neon_algs));
++ }
+ }
+
+ module_init(chacha_simd_mod_init);
+--- a/arch/arm/crypto/curve25519-glue.c
++++ b/arch/arm/crypto/curve25519-glue.c
+@@ -108,14 +108,15 @@ static int __init mod_init(void)
+ {
+ if (elf_hwcap & HWCAP_NEON) {
+ static_branch_enable(&have_neon);
+- return crypto_register_kpp(&curve25519_alg);
++ return IS_REACHABLE(CONFIG_CRYPTO_KPP) ?
++ crypto_register_kpp(&curve25519_alg) : 0;
+ }
+ return 0;
+ }
+
+ static void __exit mod_exit(void)
+ {
+- if (elf_hwcap & HWCAP_NEON)
++ if (IS_REACHABLE(CONFIG_CRYPTO_KPP) && elf_hwcap & HWCAP_NEON)
+ crypto_unregister_kpp(&curve25519_alg);
+ }
+
+--- a/arch/arm/crypto/poly1305-glue.c
++++ b/arch/arm/crypto/poly1305-glue.c
+@@ -249,16 +249,19 @@ static int __init arm_poly1305_mod_init(
+ if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
+ (elf_hwcap & HWCAP_NEON))
+ static_branch_enable(&have_neon);
+- else
++ else if (IS_REACHABLE(CONFIG_CRYPTO_HASH))
+ /* register only the first entry */
+ return crypto_register_shash(&arm_poly1305_algs[0]);
+
+- return crypto_register_shashes(arm_poly1305_algs,
+- ARRAY_SIZE(arm_poly1305_algs));
++ return IS_REACHABLE(CONFIG_CRYPTO_HASH) ?
++ crypto_register_shashes(arm_poly1305_algs,
++ ARRAY_SIZE(arm_poly1305_algs)) : 0;
+ }
+
+ static void __exit arm_poly1305_mod_exit(void)
+ {
++ if (!IS_REACHABLE(CONFIG_CRYPTO_HASH))
++ return;
+ if (!static_branch_likely(&have_neon)) {
+ crypto_unregister_shash(&arm_poly1305_algs[0]);
+ return;
+--- a/arch/arm64/crypto/chacha-neon-glue.c
++++ b/arch/arm64/crypto/chacha-neon-glue.c
+@@ -211,12 +211,13 @@ static int __init chacha_simd_mod_init(v
+
+ static_branch_enable(&have_neon);
+
+- return crypto_register_skciphers(algs, ARRAY_SIZE(algs));
++ return IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER) ?
++ crypto_register_skciphers(algs, ARRAY_SIZE(algs)) : 0;
+ }
+
+ static void __exit chacha_simd_mod_fini(void)
+ {
+- if (cpu_have_named_feature(ASIMD))
++ if (IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER) && cpu_have_named_feature(ASIMD))
+ crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
+ }
+
+--- a/arch/arm64/crypto/poly1305-glue.c
++++ b/arch/arm64/crypto/poly1305-glue.c
+@@ -220,12 +220,13 @@ static int __init neon_poly1305_mod_init
+
+ static_branch_enable(&have_neon);
+
+- return crypto_register_shash(&neon_poly1305_alg);
++ return IS_REACHABLE(CONFIG_CRYPTO_HASH) ?
++ crypto_register_shash(&neon_poly1305_alg) : 0;
+ }
+
+ static void __exit neon_poly1305_mod_exit(void)
+ {
+- if (cpu_have_named_feature(ASIMD))
++ if (IS_REACHABLE(CONFIG_CRYPTO_HASH) && cpu_have_named_feature(ASIMD))
+ crypto_unregister_shash(&neon_poly1305_alg);
+ }
+
+--- a/arch/mips/crypto/chacha-glue.c
++++ b/arch/mips/crypto/chacha-glue.c
+@@ -128,12 +128,14 @@ static struct skcipher_alg algs[] = {
+
+ static int __init chacha_simd_mod_init(void)
+ {
+- return crypto_register_skciphers(algs, ARRAY_SIZE(algs));
++ return IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER) ?
++ crypto_register_skciphers(algs, ARRAY_SIZE(algs)) : 0;
+ }
+
+ static void __exit chacha_simd_mod_fini(void)
+ {
+- crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
++ if (IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER))
++ crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
+ }
+
+ module_init(chacha_simd_mod_init);
+--- a/arch/mips/crypto/poly1305-glue.c
++++ b/arch/mips/crypto/poly1305-glue.c
+@@ -187,12 +187,14 @@ static struct shash_alg mips_poly1305_al
+
+ static int __init mips_poly1305_mod_init(void)
+ {
+- return crypto_register_shash(&mips_poly1305_alg);
++ return IS_REACHABLE(CONFIG_CRYPTO_HASH) ?
++ crypto_register_shash(&mips_poly1305_alg) : 0;
+ }
+
+ static void __exit mips_poly1305_mod_exit(void)
+ {
+- crypto_unregister_shash(&mips_poly1305_alg);
++ if (IS_REACHABLE(CONFIG_CRYPTO_HASH))
++ crypto_unregister_shash(&mips_poly1305_alg);
+ }
+
+ module_init(mips_poly1305_mod_init);
+--- a/arch/x86/crypto/blake2s-glue.c
++++ b/arch/x86/crypto/blake2s-glue.c
+@@ -210,12 +210,14 @@ static int __init blake2s_mod_init(void)
+ XFEATURE_MASK_AVX512, NULL))
+ static_branch_enable(&blake2s_use_avx512);
+
+- return crypto_register_shashes(blake2s_algs, ARRAY_SIZE(blake2s_algs));
++ return IS_REACHABLE(CONFIG_CRYPTO_HASH) ?
++ crypto_register_shashes(blake2s_algs,
++ ARRAY_SIZE(blake2s_algs)) : 0;
+ }
+
+ static void __exit blake2s_mod_exit(void)
+ {
+- if (boot_cpu_has(X86_FEATURE_SSSE3))
++ if (IS_REACHABLE(CONFIG_CRYPTO_HASH) && boot_cpu_has(X86_FEATURE_SSSE3))
+ crypto_unregister_shashes(blake2s_algs, ARRAY_SIZE(blake2s_algs));
+ }
+
+--- a/arch/x86/crypto/chacha_glue.c
++++ b/arch/x86/crypto/chacha_glue.c
+@@ -299,12 +299,13 @@ static int __init chacha_simd_mod_init(v
+ boot_cpu_has(X86_FEATURE_AVX512BW)) /* kmovq */
+ static_branch_enable(&chacha_use_avx512vl);
+ }
+- return crypto_register_skciphers(algs, ARRAY_SIZE(algs));
++ return IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER) ?
++ crypto_register_skciphers(algs, ARRAY_SIZE(algs)) : 0;
+ }
+
+ static void __exit chacha_simd_mod_fini(void)
+ {
+- if (boot_cpu_has(X86_FEATURE_SSSE3))
++ if (IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER) && boot_cpu_has(X86_FEATURE_SSSE3))
+ crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
+ }
+
+--- a/arch/x86/crypto/curve25519-x86_64.c
++++ b/arch/x86/crypto/curve25519-x86_64.c
+@@ -2457,13 +2457,14 @@ static int __init curve25519_mod_init(vo
+ static_branch_enable(&curve25519_use_adx);
+ else
+ return 0;
+- return crypto_register_kpp(&curve25519_alg);
++ return IS_REACHABLE(CONFIG_CRYPTO_KPP) ?
++ crypto_register_kpp(&curve25519_alg) : 0;
+ }
+
+ static void __exit curve25519_mod_exit(void)
+ {
+- if (boot_cpu_has(X86_FEATURE_BMI2) ||
+- boot_cpu_has(X86_FEATURE_ADX))
++ if (IS_REACHABLE(CONFIG_CRYPTO_KPP) &&
++ (boot_cpu_has(X86_FEATURE_BMI2) || boot_cpu_has(X86_FEATURE_ADX)))
+ crypto_unregister_kpp(&curve25519_alg);
+ }
+
+--- a/arch/x86/crypto/poly1305_glue.c
++++ b/arch/x86/crypto/poly1305_glue.c
+@@ -224,12 +224,13 @@ static int __init poly1305_simd_mod_init
+ cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL))
+ static_branch_enable(&poly1305_use_avx2);
+
+- return crypto_register_shash(&alg);
++ return IS_REACHABLE(CONFIG_CRYPTO_HASH) ? crypto_register_shash(&alg) : 0;
+ }
+
+ static void __exit poly1305_simd_mod_exit(void)
+ {
+- crypto_unregister_shash(&alg);
++ if (IS_REACHABLE(CONFIG_CRYPTO_HASH))
++ crypto_unregister_shash(&alg);
+ }
+
+ module_init(poly1305_simd_mod_init);