ab04cecf05146e0686b389573c46060abe2de96a
[openwrt/staging/mans0n.git] /
1 From f23fdc58a0a08afada84fe4910279ec3d8d085e7 Mon Sep 17 00:00:00 2001
2 From: "Jason A. Donenfeld" <Jason@zx2c4.com>
3 Date: Mon, 25 Nov 2019 11:31:12 +0100
4 Subject: [PATCH 037/124] crypto: arch - conditionalize crypto api in arch glue
5 for lib code
6
7 commit 8394bfec51e0e565556101bcc4e2fe7551104cd8 upstream.
8
9 For glue code that's used by Zinc, the actual Crypto API functions might
10 not necessarily exist, and don't need to exist either. Before this
11 patch, there are valid build configurations that lead to a unbuildable
12 kernel. This fixes it to conditionalize those symbols on the existence
13 of the proper config entry.
14
15 Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com>
16 Acked-by: Ard Biesheuvel <ardb@kernel.org>
17 Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
18 Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com>
19 ---
20 arch/arm/crypto/chacha-glue.c | 26 ++++++++++++++++----------
21 arch/arm/crypto/curve25519-glue.c | 5 +++--
22 arch/arm/crypto/poly1305-glue.c | 9 ++++++---
23 arch/arm64/crypto/chacha-neon-glue.c | 5 +++--
24 arch/arm64/crypto/poly1305-glue.c | 5 +++--
25 arch/mips/crypto/chacha-glue.c | 6 ++++--
26 arch/mips/crypto/poly1305-glue.c | 6 ++++--
27 arch/x86/crypto/blake2s-glue.c | 6 ++++--
28 arch/x86/crypto/chacha_glue.c | 5 +++--
29 arch/x86/crypto/curve25519-x86_64.c | 7 ++++---
30 arch/x86/crypto/poly1305_glue.c | 5 +++--
31 11 files changed, 53 insertions(+), 32 deletions(-)
32
33 --- a/arch/arm/crypto/chacha-glue.c
34 +++ b/arch/arm/crypto/chacha-glue.c
35 @@ -286,11 +286,13 @@ static struct skcipher_alg neon_algs[] =
36
37 static int __init chacha_simd_mod_init(void)
38 {
39 - int err;
40 + int err = 0;
41
42 - err = crypto_register_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
43 - if (err)
44 - return err;
45 + if (IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER)) {
46 + err = crypto_register_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
47 + if (err)
48 + return err;
49 + }
50
51 if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && (elf_hwcap & HWCAP_NEON)) {
52 int i;
53 @@ -310,18 +312,22 @@ static int __init chacha_simd_mod_init(v
54 static_branch_enable(&use_neon);
55 }
56
57 - err = crypto_register_skciphers(neon_algs, ARRAY_SIZE(neon_algs));
58 - if (err)
59 - crypto_unregister_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
60 + if (IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER)) {
61 + err = crypto_register_skciphers(neon_algs, ARRAY_SIZE(neon_algs));
62 + if (err)
63 + crypto_unregister_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
64 + }
65 }
66 return err;
67 }
68
69 static void __exit chacha_simd_mod_fini(void)
70 {
71 - crypto_unregister_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
72 - if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && (elf_hwcap & HWCAP_NEON))
73 - crypto_unregister_skciphers(neon_algs, ARRAY_SIZE(neon_algs));
74 + if (IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER)) {
75 + crypto_unregister_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
76 + if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && (elf_hwcap & HWCAP_NEON))
77 + crypto_unregister_skciphers(neon_algs, ARRAY_SIZE(neon_algs));
78 + }
79 }
80
81 module_init(chacha_simd_mod_init);
82 --- a/arch/arm/crypto/curve25519-glue.c
83 +++ b/arch/arm/crypto/curve25519-glue.c
84 @@ -108,14 +108,15 @@ static int __init mod_init(void)
85 {
86 if (elf_hwcap & HWCAP_NEON) {
87 static_branch_enable(&have_neon);
88 - return crypto_register_kpp(&curve25519_alg);
89 + return IS_REACHABLE(CONFIG_CRYPTO_KPP) ?
90 + crypto_register_kpp(&curve25519_alg) : 0;
91 }
92 return 0;
93 }
94
95 static void __exit mod_exit(void)
96 {
97 - if (elf_hwcap & HWCAP_NEON)
98 + if (IS_REACHABLE(CONFIG_CRYPTO_KPP) && elf_hwcap & HWCAP_NEON)
99 crypto_unregister_kpp(&curve25519_alg);
100 }
101
102 --- a/arch/arm/crypto/poly1305-glue.c
103 +++ b/arch/arm/crypto/poly1305-glue.c
104 @@ -249,16 +249,19 @@ static int __init arm_poly1305_mod_init(
105 if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
106 (elf_hwcap & HWCAP_NEON))
107 static_branch_enable(&have_neon);
108 - else
109 + else if (IS_REACHABLE(CONFIG_CRYPTO_HASH))
110 /* register only the first entry */
111 return crypto_register_shash(&arm_poly1305_algs[0]);
112
113 - return crypto_register_shashes(arm_poly1305_algs,
114 - ARRAY_SIZE(arm_poly1305_algs));
115 + return IS_REACHABLE(CONFIG_CRYPTO_HASH) ?
116 + crypto_register_shashes(arm_poly1305_algs,
117 + ARRAY_SIZE(arm_poly1305_algs)) : 0;
118 }
119
120 static void __exit arm_poly1305_mod_exit(void)
121 {
122 + if (!IS_REACHABLE(CONFIG_CRYPTO_HASH))
123 + return;
124 if (!static_branch_likely(&have_neon)) {
125 crypto_unregister_shash(&arm_poly1305_algs[0]);
126 return;
127 --- a/arch/arm64/crypto/chacha-neon-glue.c
128 +++ b/arch/arm64/crypto/chacha-neon-glue.c
129 @@ -211,12 +211,13 @@ static int __init chacha_simd_mod_init(v
130
131 static_branch_enable(&have_neon);
132
133 - return crypto_register_skciphers(algs, ARRAY_SIZE(algs));
134 + return IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER) ?
135 + crypto_register_skciphers(algs, ARRAY_SIZE(algs)) : 0;
136 }
137
138 static void __exit chacha_simd_mod_fini(void)
139 {
140 - if (cpu_have_named_feature(ASIMD))
141 + if (IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER) && cpu_have_named_feature(ASIMD))
142 crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
143 }
144
145 --- a/arch/arm64/crypto/poly1305-glue.c
146 +++ b/arch/arm64/crypto/poly1305-glue.c
147 @@ -220,12 +220,13 @@ static int __init neon_poly1305_mod_init
148
149 static_branch_enable(&have_neon);
150
151 - return crypto_register_shash(&neon_poly1305_alg);
152 + return IS_REACHABLE(CONFIG_CRYPTO_HASH) ?
153 + crypto_register_shash(&neon_poly1305_alg) : 0;
154 }
155
156 static void __exit neon_poly1305_mod_exit(void)
157 {
158 - if (cpu_have_named_feature(ASIMD))
159 + if (IS_REACHABLE(CONFIG_CRYPTO_HASH) && cpu_have_named_feature(ASIMD))
160 crypto_unregister_shash(&neon_poly1305_alg);
161 }
162
163 --- a/arch/mips/crypto/chacha-glue.c
164 +++ b/arch/mips/crypto/chacha-glue.c
165 @@ -128,12 +128,14 @@ static struct skcipher_alg algs[] = {
166
167 static int __init chacha_simd_mod_init(void)
168 {
169 - return crypto_register_skciphers(algs, ARRAY_SIZE(algs));
170 + return IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER) ?
171 + crypto_register_skciphers(algs, ARRAY_SIZE(algs)) : 0;
172 }
173
174 static void __exit chacha_simd_mod_fini(void)
175 {
176 - crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
177 + if (IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER))
178 + crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
179 }
180
181 module_init(chacha_simd_mod_init);
182 --- a/arch/mips/crypto/poly1305-glue.c
183 +++ b/arch/mips/crypto/poly1305-glue.c
184 @@ -187,12 +187,14 @@ static struct shash_alg mips_poly1305_al
185
186 static int __init mips_poly1305_mod_init(void)
187 {
188 - return crypto_register_shash(&mips_poly1305_alg);
189 + return IS_REACHABLE(CONFIG_CRYPTO_HASH) ?
190 + crypto_register_shash(&mips_poly1305_alg) : 0;
191 }
192
193 static void __exit mips_poly1305_mod_exit(void)
194 {
195 - crypto_unregister_shash(&mips_poly1305_alg);
196 + if (IS_REACHABLE(CONFIG_CRYPTO_HASH))
197 + crypto_unregister_shash(&mips_poly1305_alg);
198 }
199
200 module_init(mips_poly1305_mod_init);
201 --- a/arch/x86/crypto/blake2s-glue.c
202 +++ b/arch/x86/crypto/blake2s-glue.c
203 @@ -210,12 +210,14 @@ static int __init blake2s_mod_init(void)
204 XFEATURE_MASK_AVX512, NULL))
205 static_branch_enable(&blake2s_use_avx512);
206
207 - return crypto_register_shashes(blake2s_algs, ARRAY_SIZE(blake2s_algs));
208 + return IS_REACHABLE(CONFIG_CRYPTO_HASH) ?
209 + crypto_register_shashes(blake2s_algs,
210 + ARRAY_SIZE(blake2s_algs)) : 0;
211 }
212
213 static void __exit blake2s_mod_exit(void)
214 {
215 - if (boot_cpu_has(X86_FEATURE_SSSE3))
216 + if (IS_REACHABLE(CONFIG_CRYPTO_HASH) && boot_cpu_has(X86_FEATURE_SSSE3))
217 crypto_unregister_shashes(blake2s_algs, ARRAY_SIZE(blake2s_algs));
218 }
219
220 --- a/arch/x86/crypto/chacha_glue.c
221 +++ b/arch/x86/crypto/chacha_glue.c
222 @@ -299,12 +299,13 @@ static int __init chacha_simd_mod_init(v
223 boot_cpu_has(X86_FEATURE_AVX512BW)) /* kmovq */
224 static_branch_enable(&chacha_use_avx512vl);
225 }
226 - return crypto_register_skciphers(algs, ARRAY_SIZE(algs));
227 + return IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER) ?
228 + crypto_register_skciphers(algs, ARRAY_SIZE(algs)) : 0;
229 }
230
231 static void __exit chacha_simd_mod_fini(void)
232 {
233 - if (boot_cpu_has(X86_FEATURE_SSSE3))
234 + if (IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER) && boot_cpu_has(X86_FEATURE_SSSE3))
235 crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
236 }
237
238 --- a/arch/x86/crypto/curve25519-x86_64.c
239 +++ b/arch/x86/crypto/curve25519-x86_64.c
240 @@ -2457,13 +2457,14 @@ static int __init curve25519_mod_init(vo
241 static_branch_enable(&curve25519_use_adx);
242 else
243 return 0;
244 - return crypto_register_kpp(&curve25519_alg);
245 + return IS_REACHABLE(CONFIG_CRYPTO_KPP) ?
246 + crypto_register_kpp(&curve25519_alg) : 0;
247 }
248
249 static void __exit curve25519_mod_exit(void)
250 {
251 - if (boot_cpu_has(X86_FEATURE_BMI2) ||
252 - boot_cpu_has(X86_FEATURE_ADX))
253 + if (IS_REACHABLE(CONFIG_CRYPTO_KPP) &&
254 + (boot_cpu_has(X86_FEATURE_BMI2) || boot_cpu_has(X86_FEATURE_ADX)))
255 crypto_unregister_kpp(&curve25519_alg);
256 }
257
258 --- a/arch/x86/crypto/poly1305_glue.c
259 +++ b/arch/x86/crypto/poly1305_glue.c
260 @@ -224,12 +224,13 @@ static int __init poly1305_simd_mod_init
261 cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL))
262 static_branch_enable(&poly1305_use_avx2);
263
264 - return crypto_register_shash(&alg);
265 + return IS_REACHABLE(CONFIG_CRYPTO_HASH) ? crypto_register_shash(&alg) : 0;
266 }
267
268 static void __exit poly1305_simd_mod_exit(void)
269 {
270 - crypto_unregister_shash(&alg);
271 + if (IS_REACHABLE(CONFIG_CRYPTO_HASH))
272 + crypto_unregister_shash(&alg);
273 }
274
275 module_init(poly1305_simd_mod_init);