1 From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
2 From: Ard Biesheuvel <ardb@kernel.org>
3 Date: Fri, 8 Nov 2019 13:22:40 +0100
4 Subject: [PATCH] crypto: lib/chacha20poly1305 - reimplement crypt_from_sg()
7 commit d95312a3ccc0cd544d374be2fc45aeaa803e5fd9 upstream.
9 Reimplement the library routines to perform chacha20poly1305 en/decryption
10 on scatterlists, without [ab]using the [deprecated] blkcipher interface,
11 which is rather heavyweight and does things we don't really need.
13 Instead, we use the sg_miter API in a novel and clever way, to iterate
14 over the scatterlist in-place (i.e., source == destination, which is the
15 only way this library is expected to be used). That way, we don't have to
16 iterate over two scatterlists in parallel.
18 Another optimization is that, instead of relying on the blkcipher walker
19 to present the input in suitable chunks, we recognize that ChaCha is a
20 streamcipher, and so we can simply deal with partial blocks by keeping a
21 block of cipherstream on the stack and use crypto_xor() to mix it with
24 Finally, we omit the scatterwalk_and_copy() call if the last element of
25 the scatterlist covers the MAC as well (which is the common case),
26 avoiding the need to walk the scatterlist and kmap() the page twice.
28 Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
29 Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
30 Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com>
32 include/crypto/chacha20poly1305.h | 11 ++
33 lib/crypto/chacha20poly1305-selftest.c | 45 ++++++++
34 lib/crypto/chacha20poly1305.c | 150 +++++++++++++++++++++++++
35 3 files changed, 206 insertions(+)
37 --- a/include/crypto/chacha20poly1305.h
38 +++ b/include/crypto/chacha20poly1305.h
40 #define __CHACHA20POLY1305_H
42 #include <linux/types.h>
43 +#include <linux/scatterlist.h>
45 enum chacha20poly1305_lengths {
46 XCHACHA20POLY1305_NONCE_SIZE = 24,
47 @@ -34,4 +35,14 @@ bool __must_check xchacha20poly1305_decr
48 const size_t ad_len, const u8 nonce[XCHACHA20POLY1305_NONCE_SIZE],
49 const u8 key[CHACHA20POLY1305_KEY_SIZE]);
51 +bool chacha20poly1305_encrypt_sg_inplace(struct scatterlist *src, size_t src_len,
52 + const u8 *ad, const size_t ad_len,
54 + const u8 key[CHACHA20POLY1305_KEY_SIZE]);
56 +bool chacha20poly1305_decrypt_sg_inplace(struct scatterlist *src, size_t src_len,
57 + const u8 *ad, const size_t ad_len,
59 + const u8 key[CHACHA20POLY1305_KEY_SIZE]);
61 #endif /* __CHACHA20POLY1305_H */
62 --- a/lib/crypto/chacha20poly1305-selftest.c
63 +++ b/lib/crypto/chacha20poly1305-selftest.c
64 @@ -7250,6 +7250,7 @@ bool __init chacha20poly1305_selftest(vo
65 enum { MAXIMUM_TEST_BUFFER_LEN = 1UL << 12 };
67 u8 *computed_output = NULL, *heap_src = NULL;
68 + struct scatterlist sg_src;
69 bool success = true, ret;
71 heap_src = kmalloc(MAXIMUM_TEST_BUFFER_LEN, GFP_KERNEL);
72 @@ -7280,6 +7281,29 @@ bool __init chacha20poly1305_selftest(vo
76 + for (i = 0; i < ARRAY_SIZE(chacha20poly1305_enc_vectors); ++i) {
77 + if (chacha20poly1305_enc_vectors[i].nlen != 8)
79 + memcpy(heap_src, chacha20poly1305_enc_vectors[i].input,
80 + chacha20poly1305_enc_vectors[i].ilen);
81 + sg_init_one(&sg_src, heap_src,
82 + chacha20poly1305_enc_vectors[i].ilen + POLY1305_DIGEST_SIZE);
83 + chacha20poly1305_encrypt_sg_inplace(&sg_src,
84 + chacha20poly1305_enc_vectors[i].ilen,
85 + chacha20poly1305_enc_vectors[i].assoc,
86 + chacha20poly1305_enc_vectors[i].alen,
87 + get_unaligned_le64(chacha20poly1305_enc_vectors[i].nonce),
88 + chacha20poly1305_enc_vectors[i].key);
89 + if (memcmp(heap_src,
90 + chacha20poly1305_enc_vectors[i].output,
91 + chacha20poly1305_enc_vectors[i].ilen +
92 + POLY1305_DIGEST_SIZE)) {
93 + pr_err("chacha20poly1305 sg encryption self-test %zu: FAIL\n",
99 for (i = 0; i < ARRAY_SIZE(chacha20poly1305_dec_vectors); ++i) {
100 memset(computed_output, 0, MAXIMUM_TEST_BUFFER_LEN);
101 ret = chacha20poly1305_decrypt(computed_output,
102 @@ -7301,6 +7325,27 @@ bool __init chacha20poly1305_selftest(vo
106 + for (i = 0; i < ARRAY_SIZE(chacha20poly1305_dec_vectors); ++i) {
107 + memcpy(heap_src, chacha20poly1305_dec_vectors[i].input,
108 + chacha20poly1305_dec_vectors[i].ilen);
109 + sg_init_one(&sg_src, heap_src,
110 + chacha20poly1305_dec_vectors[i].ilen);
111 + ret = chacha20poly1305_decrypt_sg_inplace(&sg_src,
112 + chacha20poly1305_dec_vectors[i].ilen,
113 + chacha20poly1305_dec_vectors[i].assoc,
114 + chacha20poly1305_dec_vectors[i].alen,
115 + get_unaligned_le64(chacha20poly1305_dec_vectors[i].nonce),
116 + chacha20poly1305_dec_vectors[i].key);
117 + if (!decryption_success(ret,
118 + chacha20poly1305_dec_vectors[i].failure,
119 + memcmp(heap_src, chacha20poly1305_dec_vectors[i].output,
120 + chacha20poly1305_dec_vectors[i].ilen -
121 + POLY1305_DIGEST_SIZE))) {
122 + pr_err("chacha20poly1305 sg decryption self-test %zu: FAIL\n",
128 for (i = 0; i < ARRAY_SIZE(xchacha20poly1305_enc_vectors); ++i) {
129 memset(computed_output, 0, MAXIMUM_TEST_BUFFER_LEN);
130 --- a/lib/crypto/chacha20poly1305.c
131 +++ b/lib/crypto/chacha20poly1305.c
133 #include <crypto/chacha20poly1305.h>
134 #include <crypto/chacha.h>
135 #include <crypto/poly1305.h>
136 +#include <crypto/scatterwalk.h>
138 #include <asm/unaligned.h>
139 #include <linux/kernel.h>
140 @@ -205,6 +206,155 @@ bool xchacha20poly1305_decrypt(u8 *dst,
142 EXPORT_SYMBOL(xchacha20poly1305_decrypt);
145 +bool chacha20poly1305_crypt_sg_inplace(struct scatterlist *src,
146 + const size_t src_len,
147 + const u8 *ad, const size_t ad_len,
149 + const u8 key[CHACHA20POLY1305_KEY_SIZE],
152 + const u8 *pad0 = page_address(ZERO_PAGE(0));
153 + struct poly1305_desc_ctx poly1305_state;
154 + u32 chacha_state[CHACHA_STATE_WORDS];
155 + struct sg_mapping_iter miter;
156 + size_t partial = 0;
157 + unsigned int flags;
162 + u32 k[CHACHA_KEY_WORDS];
165 + u8 block0[POLY1305_KEY_SIZE];
166 + u8 chacha_stream[CHACHA_BLOCK_SIZE];
168 + u8 mac[2][POLY1305_DIGEST_SIZE];
173 + chacha_load_key(b.k, key);
176 + b.iv[1] = cpu_to_le64(nonce);
178 + chacha_init(chacha_state, b.k, (u8 *)b.iv);
179 + chacha_crypt(chacha_state, b.block0, pad0, sizeof(b.block0), 20);
180 + poly1305_init(&poly1305_state, b.block0);
182 + if (unlikely(ad_len)) {
183 + poly1305_update(&poly1305_state, ad, ad_len);
185 + poly1305_update(&poly1305_state, pad0, 0x10 - (ad_len & 0xf));
188 + flags = SG_MITER_TO_SG;
189 + if (!preemptible())
190 + flags |= SG_MITER_ATOMIC;
192 + sg_miter_start(&miter, src, sg_nents(src), flags);
194 + for (sl = src_len; sl > 0 && sg_miter_next(&miter); sl -= miter.length) {
195 + u8 *addr = miter.addr;
196 + size_t length = min_t(size_t, sl, miter.length);
199 + poly1305_update(&poly1305_state, addr, length);
201 + if (unlikely(partial)) {
202 + size_t l = min(length, CHACHA_BLOCK_SIZE - partial);
204 + crypto_xor(addr, b.chacha_stream + partial, l);
205 + partial = (partial + l) & (CHACHA_BLOCK_SIZE - 1);
211 + if (likely(length >= CHACHA_BLOCK_SIZE || length == sl)) {
214 + if (unlikely(length < sl))
215 + l &= ~(CHACHA_BLOCK_SIZE - 1);
216 + chacha_crypt(chacha_state, addr, addr, l, 20);
221 + if (unlikely(length > 0)) {
222 + chacha_crypt(chacha_state, b.chacha_stream, pad0,
223 + CHACHA_BLOCK_SIZE, 20);
224 + crypto_xor(addr, b.chacha_stream, length);
229 + poly1305_update(&poly1305_state, miter.addr,
230 + min_t(size_t, sl, miter.length));
234 + poly1305_update(&poly1305_state, pad0, 0x10 - (src_len & 0xf));
236 + b.lens[0] = cpu_to_le64(ad_len);
237 + b.lens[1] = cpu_to_le64(src_len);
238 + poly1305_update(&poly1305_state, (u8 *)b.lens, sizeof(b.lens));
240 + if (likely(sl <= -POLY1305_DIGEST_SIZE)) {
242 + poly1305_final(&poly1305_state,
243 + miter.addr + miter.length + sl);
246 + poly1305_final(&poly1305_state, b.mac[0]);
247 + ret = !crypto_memneq(b.mac[0],
248 + miter.addr + miter.length + sl,
249 + POLY1305_DIGEST_SIZE);
253 + sg_miter_stop(&miter);
255 + if (unlikely(sl > -POLY1305_DIGEST_SIZE)) {
256 + poly1305_final(&poly1305_state, b.mac[1]);
257 + scatterwalk_map_and_copy(b.mac[encrypt], src, src_len,
258 + sizeof(b.mac[1]), encrypt);
260 + !crypto_memneq(b.mac[0], b.mac[1], POLY1305_DIGEST_SIZE);
263 + memzero_explicit(chacha_state, sizeof(chacha_state));
264 + memzero_explicit(&b, sizeof(b));
269 +bool chacha20poly1305_encrypt_sg_inplace(struct scatterlist *src, size_t src_len,
270 + const u8 *ad, const size_t ad_len,
272 + const u8 key[CHACHA20POLY1305_KEY_SIZE])
274 + return chacha20poly1305_crypt_sg_inplace(src, src_len, ad, ad_len,
277 +EXPORT_SYMBOL(chacha20poly1305_encrypt_sg_inplace);
279 +bool chacha20poly1305_decrypt_sg_inplace(struct scatterlist *src, size_t src_len,
280 + const u8 *ad, const size_t ad_len,
282 + const u8 key[CHACHA20POLY1305_KEY_SIZE])
284 + if (unlikely(src_len < POLY1305_DIGEST_SIZE))
287 + return chacha20poly1305_crypt_sg_inplace(src,
288 + src_len - POLY1305_DIGEST_SIZE,
289 + ad, ad_len, nonce, key, 0);
291 +EXPORT_SYMBOL(chacha20poly1305_decrypt_sg_inplace);
293 static int __init mod_init(void)
295 if (!IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS) &&