1 From 6f71439c260ddd0f9a21fee3e34449fe9c017ab6 Mon Sep 17 00:00:00 2001
2 From: Ard Biesheuvel <ardb@kernel.org>
3 Date: Fri, 8 Nov 2019 13:22:08 +0100
4 Subject: [PATCH 002/124] crypto: chacha - move existing library code into
7 commit 5fb8ef25803ef33e2eb60b626435828b937bed75 upstream.
9 Currently, our generic ChaCha implementation consists of a permute
10 function in lib/chacha.c that operates on the 64-byte ChaCha state
11 directly [and which is always included into the core kernel since it
12 is used by the /dev/random driver], and the crypto API plumbing to
13 expose it as a skcipher.
15 In order to support in-kernel users that need the ChaCha streamcipher
16 but have no need [or tolerance] for going through the abstractions of
17 the crypto API, let's expose the streamcipher bits via a library API
18 as well, in a way that permits the implementation to be superseded by
19 an architecture specific one if provided.
21 So move the streamcipher code into a separate module in lib/crypto,
22 and expose the init() and crypt() routines to users of the library.
24 Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
25 Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
26 Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com>
28 arch/arm/crypto/chacha-neon-glue.c | 2 +-
29 arch/arm64/crypto/chacha-neon-glue.c | 2 +-
30 arch/x86/crypto/chacha_glue.c | 2 +-
32 crypto/chacha_generic.c | 60 ++--------------------
33 include/crypto/chacha.h | 77 ++++++++++++++++++++++------
34 include/crypto/internal/chacha.h | 53 +++++++++++++++++++
36 lib/crypto/Kconfig | 26 ++++++++++
37 lib/crypto/Makefile | 4 ++
38 lib/{ => crypto}/chacha.c | 20 ++++----
39 lib/crypto/libchacha.c | 35 +++++++++++++
40 12 files changed, 199 insertions(+), 86 deletions(-)
41 create mode 100644 include/crypto/internal/chacha.h
42 rename lib/{ => crypto}/chacha.c (88%)
43 create mode 100644 lib/crypto/libchacha.c
45 --- a/arch/arm/crypto/chacha-neon-glue.c
46 +++ b/arch/arm/crypto/chacha-neon-glue.c
50 #include <crypto/algapi.h>
51 -#include <crypto/chacha.h>
52 +#include <crypto/internal/chacha.h>
53 #include <crypto/internal/simd.h>
54 #include <crypto/internal/skcipher.h>
55 #include <linux/kernel.h>
56 --- a/arch/arm64/crypto/chacha-neon-glue.c
57 +++ b/arch/arm64/crypto/chacha-neon-glue.c
61 #include <crypto/algapi.h>
62 -#include <crypto/chacha.h>
63 +#include <crypto/internal/chacha.h>
64 #include <crypto/internal/simd.h>
65 #include <crypto/internal/skcipher.h>
66 #include <linux/kernel.h>
67 --- a/arch/x86/crypto/chacha_glue.c
68 +++ b/arch/x86/crypto/chacha_glue.c
72 #include <crypto/algapi.h>
73 -#include <crypto/chacha.h>
74 +#include <crypto/internal/chacha.h>
75 #include <crypto/internal/simd.h>
76 #include <crypto/internal/skcipher.h>
77 #include <linux/kernel.h>
80 @@ -1393,6 +1393,7 @@ config CRYPTO_SALSA20
82 config CRYPTO_CHACHA20
83 tristate "ChaCha stream cipher algorithms"
84 + select CRYPTO_LIB_CHACHA_GENERIC
85 select CRYPTO_BLKCIPHER
87 The ChaCha20, XChaCha20, and XChaCha12 stream cipher algorithms.
88 --- a/crypto/chacha_generic.c
89 +++ b/crypto/chacha_generic.c
92 #include <asm/unaligned.h>
93 #include <crypto/algapi.h>
94 -#include <crypto/chacha.h>
95 +#include <crypto/internal/chacha.h>
96 #include <crypto/internal/skcipher.h>
97 #include <linux/module.h>
99 -static void chacha_docrypt(u32 *state, u8 *dst, const u8 *src,
100 - unsigned int bytes, int nrounds)
102 - /* aligned to potentially speed up crypto_xor() */
103 - u8 stream[CHACHA_BLOCK_SIZE] __aligned(sizeof(long));
105 - while (bytes >= CHACHA_BLOCK_SIZE) {
106 - chacha_block(state, stream, nrounds);
107 - crypto_xor_cpy(dst, src, stream, CHACHA_BLOCK_SIZE);
108 - bytes -= CHACHA_BLOCK_SIZE;
109 - dst += CHACHA_BLOCK_SIZE;
110 - src += CHACHA_BLOCK_SIZE;
113 - chacha_block(state, stream, nrounds);
114 - crypto_xor_cpy(dst, src, stream, bytes);
118 static int chacha_stream_xor(struct skcipher_request *req,
119 const struct chacha_ctx *ctx, const u8 *iv)
121 @@ -48,8 +29,8 @@ static int chacha_stream_xor(struct skci
122 if (nbytes < walk.total)
123 nbytes = round_down(nbytes, CHACHA_BLOCK_SIZE);
125 - chacha_docrypt(state, walk.dst.virt.addr, walk.src.virt.addr,
126 - nbytes, ctx->nrounds);
127 + chacha_crypt_generic(state, walk.dst.virt.addr,
128 + walk.src.virt.addr, nbytes, ctx->nrounds);
129 err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
132 @@ -58,41 +39,10 @@ static int chacha_stream_xor(struct skci
134 void crypto_chacha_init(u32 *state, const struct chacha_ctx *ctx, const u8 *iv)
136 - state[0] = 0x61707865; /* "expa" */
137 - state[1] = 0x3320646e; /* "nd 3" */
138 - state[2] = 0x79622d32; /* "2-by" */
139 - state[3] = 0x6b206574; /* "te k" */
140 - state[4] = ctx->key[0];
141 - state[5] = ctx->key[1];
142 - state[6] = ctx->key[2];
143 - state[7] = ctx->key[3];
144 - state[8] = ctx->key[4];
145 - state[9] = ctx->key[5];
146 - state[10] = ctx->key[6];
147 - state[11] = ctx->key[7];
148 - state[12] = get_unaligned_le32(iv + 0);
149 - state[13] = get_unaligned_le32(iv + 4);
150 - state[14] = get_unaligned_le32(iv + 8);
151 - state[15] = get_unaligned_le32(iv + 12);
152 + chacha_init_generic(state, ctx->key, iv);
154 EXPORT_SYMBOL_GPL(crypto_chacha_init);
156 -static int chacha_setkey(struct crypto_skcipher *tfm, const u8 *key,
157 - unsigned int keysize, int nrounds)
159 - struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
162 - if (keysize != CHACHA_KEY_SIZE)
165 - for (i = 0; i < ARRAY_SIZE(ctx->key); i++)
166 - ctx->key[i] = get_unaligned_le32(key + i * sizeof(u32));
168 - ctx->nrounds = nrounds;
172 int crypto_chacha20_setkey(struct crypto_skcipher *tfm, const u8 *key,
173 unsigned int keysize)
175 @@ -126,7 +76,7 @@ int crypto_xchacha_crypt(struct skcipher
177 /* Compute the subkey given the original key and first 128 nonce bits */
178 crypto_chacha_init(state, ctx, req->iv);
179 - hchacha_block(state, subctx.key, ctx->nrounds);
180 + hchacha_block_generic(state, subctx.key, ctx->nrounds);
181 subctx.nrounds = ctx->nrounds;
183 /* Build the real IV */
184 --- a/include/crypto/chacha.h
185 +++ b/include/crypto/chacha.h
187 #ifndef _CRYPTO_CHACHA_H
188 #define _CRYPTO_CHACHA_H
190 -#include <crypto/skcipher.h>
191 +#include <asm/unaligned.h>
192 #include <linux/types.h>
193 -#include <linux/crypto.h>
195 /* 32-bit stream position, then 96-bit nonce (RFC7539 convention) */
196 #define CHACHA_IV_SIZE 16
198 /* 192-bit nonce, then 64-bit stream position */
199 #define XCHACHA_IV_SIZE 32
206 -void chacha_block(u32 *state, u8 *stream, int nrounds);
207 +void chacha_block_generic(u32 *state, u8 *stream, int nrounds);
208 static inline void chacha20_block(u32 *state, u8 *stream)
210 - chacha_block(state, stream, 20);
211 + chacha_block_generic(state, stream, 20);
213 -void hchacha_block(const u32 *in, u32 *out, int nrounds);
215 -void crypto_chacha_init(u32 *state, const struct chacha_ctx *ctx, const u8 *iv);
216 +void hchacha_block_arch(const u32 *state, u32 *out, int nrounds);
217 +void hchacha_block_generic(const u32 *state, u32 *out, int nrounds);
219 +static inline void hchacha_block(const u32 *state, u32 *out, int nrounds)
221 + if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA))
222 + hchacha_block_arch(state, out, nrounds);
224 + hchacha_block_generic(state, out, nrounds);
227 -int crypto_chacha20_setkey(struct crypto_skcipher *tfm, const u8 *key,
228 - unsigned int keysize);
229 -int crypto_chacha12_setkey(struct crypto_skcipher *tfm, const u8 *key,
230 - unsigned int keysize);
231 +void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv);
232 +static inline void chacha_init_generic(u32 *state, const u32 *key, const u8 *iv)
234 + state[0] = 0x61707865; /* "expa" */
235 + state[1] = 0x3320646e; /* "nd 3" */
236 + state[2] = 0x79622d32; /* "2-by" */
237 + state[3] = 0x6b206574; /* "te k" */
244 + state[10] = key[6];
245 + state[11] = key[7];
246 + state[12] = get_unaligned_le32(iv + 0);
247 + state[13] = get_unaligned_le32(iv + 4);
248 + state[14] = get_unaligned_le32(iv + 8);
249 + state[15] = get_unaligned_le32(iv + 12);
252 -int crypto_chacha_crypt(struct skcipher_request *req);
253 -int crypto_xchacha_crypt(struct skcipher_request *req);
254 +static inline void chacha_init(u32 *state, const u32 *key, const u8 *iv)
256 + if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA))
257 + chacha_init_arch(state, key, iv);
259 + chacha_init_generic(state, key, iv);
262 +void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src,
263 + unsigned int bytes, int nrounds);
264 +void chacha_crypt_generic(u32 *state, u8 *dst, const u8 *src,
265 + unsigned int bytes, int nrounds);
267 +static inline void chacha_crypt(u32 *state, u8 *dst, const u8 *src,
268 + unsigned int bytes, int nrounds)
270 + if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA))
271 + chacha_crypt_arch(state, dst, src, bytes, nrounds);
273 + chacha_crypt_generic(state, dst, src, bytes, nrounds);
276 +static inline void chacha20_crypt(u32 *state, u8 *dst, const u8 *src,
277 + unsigned int bytes)
279 + chacha_crypt(state, dst, src, bytes, 20);
282 #endif /* _CRYPTO_CHACHA_H */
284 +++ b/include/crypto/internal/chacha.h
286 +/* SPDX-License-Identifier: GPL-2.0 */
288 +#ifndef _CRYPTO_INTERNAL_CHACHA_H
289 +#define _CRYPTO_INTERNAL_CHACHA_H
291 +#include <crypto/chacha.h>
292 +#include <crypto/internal/skcipher.h>
293 +#include <linux/crypto.h>
300 +void crypto_chacha_init(u32 *state, const struct chacha_ctx *ctx, const u8 *iv);
302 +static inline int chacha_setkey(struct crypto_skcipher *tfm, const u8 *key,
303 + unsigned int keysize, int nrounds)
305 + struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
308 + if (keysize != CHACHA_KEY_SIZE)
311 + for (i = 0; i < ARRAY_SIZE(ctx->key); i++)
312 + ctx->key[i] = get_unaligned_le32(key + i * sizeof(u32));
314 + ctx->nrounds = nrounds;
318 +static inline int chacha20_setkey(struct crypto_skcipher *tfm, const u8 *key,
319 + unsigned int keysize)
321 + return chacha_setkey(tfm, key, keysize, 20);
324 +static int inline chacha12_setkey(struct crypto_skcipher *tfm, const u8 *key,
325 + unsigned int keysize)
327 + return chacha_setkey(tfm, key, keysize, 12);
330 +int crypto_chacha20_setkey(struct crypto_skcipher *tfm, const u8 *key,
331 + unsigned int keysize);
332 +int crypto_chacha12_setkey(struct crypto_skcipher *tfm, const u8 *key,
333 + unsigned int keysize);
335 +int crypto_chacha_crypt(struct skcipher_request *req);
336 +int crypto_xchacha_crypt(struct skcipher_request *req);
338 +#endif /* _CRYPTO_CHACHA_H */
341 @@ -26,8 +26,7 @@ endif
343 lib-y := ctype.o string.o vsprintf.o cmdline.o \
344 rbtree.o radix-tree.o timerqueue.o xarray.o \
346 - sha1.o chacha.o irq_regs.o argv_split.o \
347 + idr.o extable.o sha1.o irq_regs.o argv_split.o \
348 flex_proportions.o ratelimit.o show_mem.o \
349 is_single_threaded.o plist.o decompress.o kobject_uevent.o \
350 earlycpio.o seq_buf.o siphash.o dec_and_lock.o \
351 --- a/lib/crypto/Kconfig
352 +++ b/lib/crypto/Kconfig
353 @@ -8,6 +8,32 @@ config CRYPTO_LIB_AES
354 config CRYPTO_LIB_ARC4
357 +config CRYPTO_ARCH_HAVE_LIB_CHACHA
360 + Declares whether the architecture provides an arch-specific
361 + accelerated implementation of the ChaCha library interface,
362 + either builtin or as a module.
364 +config CRYPTO_LIB_CHACHA_GENERIC
366 + select CRYPTO_ALGAPI
368 + This symbol can be depended upon by arch implementations of the
369 + ChaCha library interface that require the generic code as a
370 + fallback, e.g., for SIMD implementations. If no arch specific
371 + implementation is enabled, this implementation serves the users
372 + of CRYPTO_LIB_CHACHA.
374 +config CRYPTO_LIB_CHACHA
375 + tristate "ChaCha library interface"
376 + depends on CRYPTO_ARCH_HAVE_LIB_CHACHA || !CRYPTO_ARCH_HAVE_LIB_CHACHA
377 + select CRYPTO_LIB_CHACHA_GENERIC if CRYPTO_ARCH_HAVE_LIB_CHACHA=n
379 + Enable the ChaCha library interface. This interface may be fulfilled
380 + by either the generic implementation or an arch-specific one, if one
381 + is available and enabled.
383 config CRYPTO_LIB_DES
386 --- a/lib/crypto/Makefile
387 +++ b/lib/crypto/Makefile
389 # SPDX-License-Identifier: GPL-2.0
391 +# chacha is used by the /dev/random driver which is always builtin
393 +obj-$(CONFIG_CRYPTO_LIB_CHACHA_GENERIC) += libchacha.o
395 obj-$(CONFIG_CRYPTO_LIB_AES) += libaes.o
401 -// SPDX-License-Identifier: GPL-2.0-or-later
403 - * The "hash function" used as the core of the ChaCha stream cipher (RFC7539)
405 - * Copyright (C) 2015 Martin Willi
408 -#include <linux/kernel.h>
409 -#include <linux/export.h>
410 -#include <linux/bitops.h>
411 -#include <linux/cryptohash.h>
412 -#include <asm/unaligned.h>
413 -#include <crypto/chacha.h>
415 -static void chacha_permute(u32 *x, int nrounds)
419 - /* whitelist the allowed round counts */
420 - WARN_ON_ONCE(nrounds != 20 && nrounds != 12);
422 - for (i = 0; i < nrounds; i += 2) {
423 - x[0] += x[4]; x[12] = rol32(x[12] ^ x[0], 16);
424 - x[1] += x[5]; x[13] = rol32(x[13] ^ x[1], 16);
425 - x[2] += x[6]; x[14] = rol32(x[14] ^ x[2], 16);
426 - x[3] += x[7]; x[15] = rol32(x[15] ^ x[3], 16);
428 - x[8] += x[12]; x[4] = rol32(x[4] ^ x[8], 12);
429 - x[9] += x[13]; x[5] = rol32(x[5] ^ x[9], 12);
430 - x[10] += x[14]; x[6] = rol32(x[6] ^ x[10], 12);
431 - x[11] += x[15]; x[7] = rol32(x[7] ^ x[11], 12);
433 - x[0] += x[4]; x[12] = rol32(x[12] ^ x[0], 8);
434 - x[1] += x[5]; x[13] = rol32(x[13] ^ x[1], 8);
435 - x[2] += x[6]; x[14] = rol32(x[14] ^ x[2], 8);
436 - x[3] += x[7]; x[15] = rol32(x[15] ^ x[3], 8);
438 - x[8] += x[12]; x[4] = rol32(x[4] ^ x[8], 7);
439 - x[9] += x[13]; x[5] = rol32(x[5] ^ x[9], 7);
440 - x[10] += x[14]; x[6] = rol32(x[6] ^ x[10], 7);
441 - x[11] += x[15]; x[7] = rol32(x[7] ^ x[11], 7);
443 - x[0] += x[5]; x[15] = rol32(x[15] ^ x[0], 16);
444 - x[1] += x[6]; x[12] = rol32(x[12] ^ x[1], 16);
445 - x[2] += x[7]; x[13] = rol32(x[13] ^ x[2], 16);
446 - x[3] += x[4]; x[14] = rol32(x[14] ^ x[3], 16);
448 - x[10] += x[15]; x[5] = rol32(x[5] ^ x[10], 12);
449 - x[11] += x[12]; x[6] = rol32(x[6] ^ x[11], 12);
450 - x[8] += x[13]; x[7] = rol32(x[7] ^ x[8], 12);
451 - x[9] += x[14]; x[4] = rol32(x[4] ^ x[9], 12);
453 - x[0] += x[5]; x[15] = rol32(x[15] ^ x[0], 8);
454 - x[1] += x[6]; x[12] = rol32(x[12] ^ x[1], 8);
455 - x[2] += x[7]; x[13] = rol32(x[13] ^ x[2], 8);
456 - x[3] += x[4]; x[14] = rol32(x[14] ^ x[3], 8);
458 - x[10] += x[15]; x[5] = rol32(x[5] ^ x[10], 7);
459 - x[11] += x[12]; x[6] = rol32(x[6] ^ x[11], 7);
460 - x[8] += x[13]; x[7] = rol32(x[7] ^ x[8], 7);
461 - x[9] += x[14]; x[4] = rol32(x[4] ^ x[9], 7);
466 - * chacha_block - generate one keystream block and increment block counter
467 - * @state: input state matrix (16 32-bit words)
468 - * @stream: output keystream block (64 bytes)
469 - * @nrounds: number of rounds (20 or 12; 20 is recommended)
471 - * This is the ChaCha core, a function from 64-byte strings to 64-byte strings.
472 - * The caller has already converted the endianness of the input. This function
473 - * also handles incrementing the block counter in the input matrix.
475 -void chacha_block(u32 *state, u8 *stream, int nrounds)
480 - memcpy(x, state, 64);
482 - chacha_permute(x, nrounds);
484 - for (i = 0; i < ARRAY_SIZE(x); i++)
485 - put_unaligned_le32(x[i] + state[i], &stream[i * sizeof(u32)]);
489 -EXPORT_SYMBOL(chacha_block);
492 - * hchacha_block - abbreviated ChaCha core, for XChaCha
493 - * @in: input state matrix (16 32-bit words)
494 - * @out: output (8 32-bit words)
495 - * @nrounds: number of rounds (20 or 12; 20 is recommended)
497 - * HChaCha is the ChaCha equivalent of HSalsa and is an intermediate step
498 - * towards XChaCha (see https://cr.yp.to/snuffle/xsalsa-20081128.pdf). HChaCha
499 - * skips the final addition of the initial state, and outputs only certain words
500 - * of the state. It should not be used for streaming directly.
502 -void hchacha_block(const u32 *in, u32 *out, int nrounds)
508 - chacha_permute(x, nrounds);
510 - memcpy(&out[0], &x[0], 16);
511 - memcpy(&out[4], &x[12], 16);
513 -EXPORT_SYMBOL(hchacha_block);
515 +++ b/lib/crypto/chacha.c
517 +// SPDX-License-Identifier: GPL-2.0-or-later
519 + * The "hash function" used as the core of the ChaCha stream cipher (RFC7539)
521 + * Copyright (C) 2015 Martin Willi
524 +#include <linux/bug.h>
525 +#include <linux/kernel.h>
526 +#include <linux/export.h>
527 +#include <linux/bitops.h>
528 +#include <linux/string.h>
529 +#include <linux/cryptohash.h>
530 +#include <asm/unaligned.h>
531 +#include <crypto/chacha.h>
533 +static void chacha_permute(u32 *x, int nrounds)
537 + /* whitelist the allowed round counts */
538 + WARN_ON_ONCE(nrounds != 20 && nrounds != 12);
540 + for (i = 0; i < nrounds; i += 2) {
541 + x[0] += x[4]; x[12] = rol32(x[12] ^ x[0], 16);
542 + x[1] += x[5]; x[13] = rol32(x[13] ^ x[1], 16);
543 + x[2] += x[6]; x[14] = rol32(x[14] ^ x[2], 16);
544 + x[3] += x[7]; x[15] = rol32(x[15] ^ x[3], 16);
546 + x[8] += x[12]; x[4] = rol32(x[4] ^ x[8], 12);
547 + x[9] += x[13]; x[5] = rol32(x[5] ^ x[9], 12);
548 + x[10] += x[14]; x[6] = rol32(x[6] ^ x[10], 12);
549 + x[11] += x[15]; x[7] = rol32(x[7] ^ x[11], 12);
551 + x[0] += x[4]; x[12] = rol32(x[12] ^ x[0], 8);
552 + x[1] += x[5]; x[13] = rol32(x[13] ^ x[1], 8);
553 + x[2] += x[6]; x[14] = rol32(x[14] ^ x[2], 8);
554 + x[3] += x[7]; x[15] = rol32(x[15] ^ x[3], 8);
556 + x[8] += x[12]; x[4] = rol32(x[4] ^ x[8], 7);
557 + x[9] += x[13]; x[5] = rol32(x[5] ^ x[9], 7);
558 + x[10] += x[14]; x[6] = rol32(x[6] ^ x[10], 7);
559 + x[11] += x[15]; x[7] = rol32(x[7] ^ x[11], 7);
561 + x[0] += x[5]; x[15] = rol32(x[15] ^ x[0], 16);
562 + x[1] += x[6]; x[12] = rol32(x[12] ^ x[1], 16);
563 + x[2] += x[7]; x[13] = rol32(x[13] ^ x[2], 16);
564 + x[3] += x[4]; x[14] = rol32(x[14] ^ x[3], 16);
566 + x[10] += x[15]; x[5] = rol32(x[5] ^ x[10], 12);
567 + x[11] += x[12]; x[6] = rol32(x[6] ^ x[11], 12);
568 + x[8] += x[13]; x[7] = rol32(x[7] ^ x[8], 12);
569 + x[9] += x[14]; x[4] = rol32(x[4] ^ x[9], 12);
571 + x[0] += x[5]; x[15] = rol32(x[15] ^ x[0], 8);
572 + x[1] += x[6]; x[12] = rol32(x[12] ^ x[1], 8);
573 + x[2] += x[7]; x[13] = rol32(x[13] ^ x[2], 8);
574 + x[3] += x[4]; x[14] = rol32(x[14] ^ x[3], 8);
576 + x[10] += x[15]; x[5] = rol32(x[5] ^ x[10], 7);
577 + x[11] += x[12]; x[6] = rol32(x[6] ^ x[11], 7);
578 + x[8] += x[13]; x[7] = rol32(x[7] ^ x[8], 7);
579 + x[9] += x[14]; x[4] = rol32(x[4] ^ x[9], 7);
584 + * chacha_block - generate one keystream block and increment block counter
585 + * @state: input state matrix (16 32-bit words)
586 + * @stream: output keystream block (64 bytes)
587 + * @nrounds: number of rounds (20 or 12; 20 is recommended)
589 + * This is the ChaCha core, a function from 64-byte strings to 64-byte strings.
590 + * The caller has already converted the endianness of the input. This function
591 + * also handles incrementing the block counter in the input matrix.
593 +void chacha_block_generic(u32 *state, u8 *stream, int nrounds)
598 + memcpy(x, state, 64);
600 + chacha_permute(x, nrounds);
602 + for (i = 0; i < ARRAY_SIZE(x); i++)
603 + put_unaligned_le32(x[i] + state[i], &stream[i * sizeof(u32)]);
607 +EXPORT_SYMBOL(chacha_block_generic);
610 + * hchacha_block_generic - abbreviated ChaCha core, for XChaCha
611 + * @state: input state matrix (16 32-bit words)
612 + * @out: output (8 32-bit words)
613 + * @nrounds: number of rounds (20 or 12; 20 is recommended)
615 + * HChaCha is the ChaCha equivalent of HSalsa and is an intermediate step
616 + * towards XChaCha (see https://cr.yp.to/snuffle/xsalsa-20081128.pdf). HChaCha
617 + * skips the final addition of the initial state, and outputs only certain words
618 + * of the state. It should not be used for streaming directly.
620 +void hchacha_block_generic(const u32 *state, u32 *stream, int nrounds)
624 + memcpy(x, state, 64);
626 + chacha_permute(x, nrounds);
628 + memcpy(&stream[0], &x[0], 16);
629 + memcpy(&stream[4], &x[12], 16);
631 +EXPORT_SYMBOL(hchacha_block_generic);
633 +++ b/lib/crypto/libchacha.c
635 +// SPDX-License-Identifier: GPL-2.0-or-later
637 + * The ChaCha stream cipher (RFC7539)
639 + * Copyright (C) 2015 Martin Willi
642 +#include <linux/kernel.h>
643 +#include <linux/export.h>
644 +#include <linux/module.h>
646 +#include <crypto/algapi.h> // for crypto_xor_cpy
647 +#include <crypto/chacha.h>
649 +void chacha_crypt_generic(u32 *state, u8 *dst, const u8 *src,
650 + unsigned int bytes, int nrounds)
652 + /* aligned to potentially speed up crypto_xor() */
653 + u8 stream[CHACHA_BLOCK_SIZE] __aligned(sizeof(long));
655 + while (bytes >= CHACHA_BLOCK_SIZE) {
656 + chacha_block_generic(state, stream, nrounds);
657 + crypto_xor_cpy(dst, src, stream, CHACHA_BLOCK_SIZE);
658 + bytes -= CHACHA_BLOCK_SIZE;
659 + dst += CHACHA_BLOCK_SIZE;
660 + src += CHACHA_BLOCK_SIZE;
663 + chacha_block_generic(state, stream, nrounds);
664 + crypto_xor_cpy(dst, src, stream, bytes);
667 +EXPORT_SYMBOL(chacha_crypt_generic);
669 +MODULE_LICENSE("GPL");