1 From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
2 From: "Jason A. Donenfeld" <Jason@zx2c4.com>
3 Date: Sun, 5 Jan 2020 22:40:49 -0500
4 Subject: [PATCH] crypto: {arm,arm64,mips}/poly1305 - remove redundant
5 non-reduction from emit
7 Content-Type: text/plain; charset=UTF-8
8 Content-Transfer-Encoding: 8bit
10 commit 31899908a0d248b030b4464425b86c717e0007d4 upstream.
12 This appears to be some kind of copy and paste error, and is actually
15 Pre: f = 0 ⇒ (f >> 32) = 0
16 f = (f >> 32) + le32_to_cpu(digest[0]);
18 put_unaligned_le32(f, dst);
20 Pre: 0 ≤ f < 2³² ⇒ (f >> 32) = 0
21 f = (f >> 32) + le32_to_cpu(digest[1]);
23 put_unaligned_le32(f, dst + 4);
25 Pre: 0 ≤ f < 2³² ⇒ (f >> 32) = 0
26 f = (f >> 32) + le32_to_cpu(digest[2]);
28 put_unaligned_le32(f, dst + 8);
30 Pre: 0 ≤ f < 2³² ⇒ (f >> 32) = 0
31 f = (f >> 32) + le32_to_cpu(digest[3]);
33 put_unaligned_le32(f, dst + 12);
35 Therefore this sequence is redundant. And Andy's code appears to handle
36 misalignment acceptably.
38 Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com>
39 Tested-by: Ard Biesheuvel <ardb@kernel.org>
40 Reviewed-by: Ard Biesheuvel <ardb@kernel.org>
41 Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
42 Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com>
44 arch/arm/crypto/poly1305-glue.c | 18 ++----------------
45 arch/arm64/crypto/poly1305-glue.c | 18 ++----------------
46 arch/mips/crypto/poly1305-glue.c | 18 ++----------------
47 3 files changed, 6 insertions(+), 48 deletions(-)
49 --- a/arch/arm/crypto/poly1305-glue.c
50 +++ b/arch/arm/crypto/poly1305-glue.c
53 void poly1305_init_arm(void *state, const u8 *key);
54 void poly1305_blocks_arm(void *state, const u8 *src, u32 len, u32 hibit);
55 -void poly1305_emit_arm(void *state, __le32 *digest, const u32 *nonce);
56 +void poly1305_emit_arm(void *state, u8 *digest, const u32 *nonce);
58 void __weak poly1305_blocks_neon(void *state, const u8 *src, u32 len, u32 hibit)
60 @@ -179,9 +179,6 @@ EXPORT_SYMBOL(poly1305_update_arch);
62 void poly1305_final_arch(struct poly1305_desc_ctx *dctx, u8 *dst)
67 if (unlikely(dctx->buflen)) {
68 dctx->buf[dctx->buflen++] = 1;
69 memset(dctx->buf + dctx->buflen, 0,
70 @@ -189,18 +186,7 @@ void poly1305_final_arch(struct poly1305
71 poly1305_blocks_arm(&dctx->h, dctx->buf, POLY1305_BLOCK_SIZE, 0);
74 - poly1305_emit_arm(&dctx->h, digest, dctx->s);
76 - /* mac = (h + s) % (2^128) */
77 - f = (f >> 32) + le32_to_cpu(digest[0]);
78 - put_unaligned_le32(f, dst);
79 - f = (f >> 32) + le32_to_cpu(digest[1]);
80 - put_unaligned_le32(f, dst + 4);
81 - f = (f >> 32) + le32_to_cpu(digest[2]);
82 - put_unaligned_le32(f, dst + 8);
83 - f = (f >> 32) + le32_to_cpu(digest[3]);
84 - put_unaligned_le32(f, dst + 12);
86 + poly1305_emit_arm(&dctx->h, dst, dctx->s);
87 *dctx = (struct poly1305_desc_ctx){};
89 EXPORT_SYMBOL(poly1305_final_arch);
90 --- a/arch/arm64/crypto/poly1305-glue.c
91 +++ b/arch/arm64/crypto/poly1305-glue.c
93 asmlinkage void poly1305_init_arm64(void *state, const u8 *key);
94 asmlinkage void poly1305_blocks(void *state, const u8 *src, u32 len, u32 hibit);
95 asmlinkage void poly1305_blocks_neon(void *state, const u8 *src, u32 len, u32 hibit);
96 -asmlinkage void poly1305_emit(void *state, __le32 *digest, const u32 *nonce);
97 +asmlinkage void poly1305_emit(void *state, u8 *digest, const u32 *nonce);
99 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon);
101 @@ -162,9 +162,6 @@ EXPORT_SYMBOL(poly1305_update_arch);
103 void poly1305_final_arch(struct poly1305_desc_ctx *dctx, u8 *dst)
108 if (unlikely(dctx->buflen)) {
109 dctx->buf[dctx->buflen++] = 1;
110 memset(dctx->buf + dctx->buflen, 0,
111 @@ -172,18 +169,7 @@ void poly1305_final_arch(struct poly1305
112 poly1305_blocks(&dctx->h, dctx->buf, POLY1305_BLOCK_SIZE, 0);
115 - poly1305_emit(&dctx->h, digest, dctx->s);
117 - /* mac = (h + s) % (2^128) */
118 - f = (f >> 32) + le32_to_cpu(digest[0]);
119 - put_unaligned_le32(f, dst);
120 - f = (f >> 32) + le32_to_cpu(digest[1]);
121 - put_unaligned_le32(f, dst + 4);
122 - f = (f >> 32) + le32_to_cpu(digest[2]);
123 - put_unaligned_le32(f, dst + 8);
124 - f = (f >> 32) + le32_to_cpu(digest[3]);
125 - put_unaligned_le32(f, dst + 12);
127 + poly1305_emit(&dctx->h, dst, dctx->s);
128 *dctx = (struct poly1305_desc_ctx){};
130 EXPORT_SYMBOL(poly1305_final_arch);
131 --- a/arch/mips/crypto/poly1305-glue.c
132 +++ b/arch/mips/crypto/poly1305-glue.c
135 asmlinkage void poly1305_init_mips(void *state, const u8 *key);
136 asmlinkage void poly1305_blocks_mips(void *state, const u8 *src, u32 len, u32 hibit);
137 -asmlinkage void poly1305_emit_mips(void *state, __le32 *digest, const u32 *nonce);
138 +asmlinkage void poly1305_emit_mips(void *state, u8 *digest, const u32 *nonce);
140 void poly1305_init_arch(struct poly1305_desc_ctx *dctx, const u8 *key)
142 @@ -134,9 +134,6 @@ EXPORT_SYMBOL(poly1305_update_arch);
144 void poly1305_final_arch(struct poly1305_desc_ctx *dctx, u8 *dst)
149 if (unlikely(dctx->buflen)) {
150 dctx->buf[dctx->buflen++] = 1;
151 memset(dctx->buf + dctx->buflen, 0,
152 @@ -144,18 +141,7 @@ void poly1305_final_arch(struct poly1305
153 poly1305_blocks_mips(&dctx->h, dctx->buf, POLY1305_BLOCK_SIZE, 0);
156 - poly1305_emit_mips(&dctx->h, digest, dctx->s);
158 - /* mac = (h + s) % (2^128) */
159 - f = (f >> 32) + le32_to_cpu(digest[0]);
160 - put_unaligned_le32(f, dst);
161 - f = (f >> 32) + le32_to_cpu(digest[1]);
162 - put_unaligned_le32(f, dst + 4);
163 - f = (f >> 32) + le32_to_cpu(digest[2]);
164 - put_unaligned_le32(f, dst + 8);
165 - f = (f >> 32) + le32_to_cpu(digest[3]);
166 - put_unaligned_le32(f, dst + 12);
168 + poly1305_emit_mips(&dctx->h, dst, dctx->s);
169 *dctx = (struct poly1305_desc_ctx){};
171 EXPORT_SYMBOL(poly1305_final_arch);