crypto: x86/aes-ni - switch to generic for fallback and key routines

The AES-NI code contains fallbacks for invocations that occur from a
context where the SIMD unit is unavailable, which really only occurs
when running in softirq context that was entered from a hard IRQ that
was taken while running kernel code that was already using the FPU.

That means performance is not really a consideration, and we can just
use the new library code for this use case, which has a smaller
footprint and is believed to be time invariant. This will allow us to
drop the non-SIMD asm routines in a subsequent patch.

Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
Ard Biesheuvel 2019-07-02 21:41:23 +02:00 committed by Herbert Xu
parent e59c1c9874
commit 2c53fd11f7
3 changed files with 8 additions and 22 deletions

View File

@ -26,7 +26,6 @@
#include <crypto/gcm.h> #include <crypto/gcm.h>
#include <crypto/xts.h> #include <crypto/xts.h>
#include <asm/cpu_device_id.h> #include <asm/cpu_device_id.h>
#include <asm/crypto/aes.h>
#include <asm/simd.h> #include <asm/simd.h>
#include <crypto/scatterwalk.h> #include <crypto/scatterwalk.h>
#include <crypto/internal/aead.h> #include <crypto/internal/aead.h>
@ -329,7 +328,7 @@ static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx,
} }
if (!crypto_simd_usable()) if (!crypto_simd_usable())
err = crypto_aes_expand_key(ctx, in_key, key_len); err = aes_expandkey(ctx, in_key, key_len);
else { else {
kernel_fpu_begin(); kernel_fpu_begin();
err = aesni_set_key(ctx, in_key, key_len); err = aesni_set_key(ctx, in_key, key_len);
@ -349,9 +348,9 @@ static void aesni_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
{ {
struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm)); struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
if (!crypto_simd_usable()) if (!crypto_simd_usable()) {
crypto_aes_encrypt_x86(ctx, dst, src); aes_encrypt(ctx, dst, src);
else { } else {
kernel_fpu_begin(); kernel_fpu_begin();
aesni_enc(ctx, dst, src); aesni_enc(ctx, dst, src);
kernel_fpu_end(); kernel_fpu_end();
@ -362,9 +361,9 @@ static void aesni_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
{ {
struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm)); struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
if (!crypto_simd_usable()) if (!crypto_simd_usable()) {
crypto_aes_decrypt_x86(ctx, dst, src); aes_decrypt(ctx, dst, src);
else { } else {
kernel_fpu_begin(); kernel_fpu_begin();
aesni_dec(ctx, dst, src); aesni_dec(ctx, dst, src);
kernel_fpu_end(); kernel_fpu_end();

View File

@ -1,12 +0,0 @@
/* SPDX-License-Identifier: GPL-2.0 */
#ifndef ASM_X86_AES_H
#define ASM_X86_AES_H
#include <linux/crypto.h>
#include <crypto/aes.h>
void crypto_aes_encrypt_x86(struct crypto_aes_ctx *ctx, u8 *dst,
const u8 *src);
void crypto_aes_decrypt_x86(struct crypto_aes_ctx *ctx, u8 *dst,
const u8 *src);
#endif

View File

@ -1156,8 +1156,7 @@ config CRYPTO_AES_NI_INTEL
tristate "AES cipher algorithms (AES-NI)" tristate "AES cipher algorithms (AES-NI)"
depends on X86 depends on X86
select CRYPTO_AEAD select CRYPTO_AEAD
select CRYPTO_AES_X86_64 if 64BIT select CRYPTO_LIB_AES
select CRYPTO_AES_586 if !64BIT
select CRYPTO_ALGAPI select CRYPTO_ALGAPI
select CRYPTO_BLKCIPHER select CRYPTO_BLKCIPHER
select CRYPTO_GLUE_HELPER_X86 if 64BIT select CRYPTO_GLUE_HELPER_X86 if 64BIT