crypto: arm64/aes-ce-cipher - use AES library as fallback
authorArd Biesheuvel <ard.biesheuvel@linaro.org>
Tue, 2 Jul 2019 19:41:36 +0000 (21:41 +0200)
committerHerbert Xu <herbert@gondor.apana.org.au>
Fri, 26 Jul 2019 04:58:09 +0000 (14:58 +1000)
Instead of calling into the table based scalar AES code in situations
where the SIMD unit may not be used, use the generic AES code, which
is more appropriate since it is less likely to be susceptible to
timing attacks.

Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
arch/arm64/crypto/Kconfig
arch/arm64/crypto/aes-ce-glue.c
arch/arm64/crypto/aes-cipher-glue.c

index 66dea518221c7ea7d453d87ea8bd906192296471..4922c4451e7c3a57030ef86a4d2373ffc1ef4a7d 100644 (file)
@@ -73,7 +73,7 @@ config CRYPTO_AES_ARM64_CE
        tristate "AES core cipher using ARMv8 Crypto Extensions"
        depends on ARM64 && KERNEL_MODE_NEON
        select CRYPTO_ALGAPI
-       select CRYPTO_AES_ARM64
+       select CRYPTO_LIB_AES
 
 config CRYPTO_AES_ARM64_CE_CCM
        tristate "AES in CCM mode using ARMv8 Crypto Extensions"
index d3bc97afde203c61f785290648a776dcbf7c8ffc..6d085dc56c5123b8c0c053f256f73a2744f070b3 100644 (file)
@@ -20,9 +20,6 @@ MODULE_DESCRIPTION("Synchronous AES cipher using ARMv8 Crypto Extensions");
 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
 MODULE_LICENSE("GPL v2");
 
-asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
-asmlinkage void __aes_arm64_decrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
-
 struct aes_block {
        u8 b[AES_BLOCK_SIZE];
 };
@@ -51,7 +48,7 @@ static void aes_cipher_encrypt(struct crypto_tfm *tfm, u8 dst[], u8 const src[])
        struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
 
        if (!crypto_simd_usable()) {
-               __aes_arm64_encrypt(ctx->key_enc, dst, src, num_rounds(ctx));
+               aes_encrypt(ctx, dst, src);
                return;
        }
 
@@ -65,7 +62,7 @@ static void aes_cipher_decrypt(struct crypto_tfm *tfm, u8 dst[], u8 const src[])
        struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
 
        if (!crypto_simd_usable()) {
-               __aes_arm64_decrypt(ctx->key_dec, dst, src, num_rounds(ctx));
+               aes_decrypt(ctx, dst, src);
                return;
        }
 
index cc7a6dad7c2e410f1e954f4029f10dae660b2b18..8caf6dfefce88ec91cd0b3f5e510a30ebcde7849 100644 (file)
 #include <linux/module.h>
 
 asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
-EXPORT_SYMBOL(__aes_arm64_encrypt);
-
 asmlinkage void __aes_arm64_decrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
-EXPORT_SYMBOL(__aes_arm64_decrypt);
 
 static void aes_arm64_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 {