bed593c0e8
This adds a scalar implementation of AES, based on the precomputed tables that are exposed by the generic AES code. Since rotates are cheap on arm64, this implementation only uses the 4 core tables (of 1 KB each), and avoids the prerotated ones, reducing the D-cache footprint by 75%. On Cortex-A57, this code manages 13.0 cycles per byte, which is ~34% faster than the generic C code. (Note that this is still >13x slower than the code that uses the optional ARMv8 Crypto Extensions, which manages <1 cycles per byte.) Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
69 lines
1.9 KiB
C
69 lines
1.9 KiB
C
/*
|
|
* Scalar AES core transform
|
|
*
|
|
* Copyright (C) 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
|
|
*
|
|
* This program is free software; you can redistribute it and/or modify
|
|
* it under the terms of the GNU General Public License version 2 as
|
|
* published by the Free Software Foundation.
|
|
*/
|
|
|
|
#include <crypto/aes.h>
|
|
#include <linux/crypto.h>
|
|
#include <linux/module.h>
|
|
|
|
asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
|
|
EXPORT_SYMBOL(__aes_arm64_encrypt);
|
|
|
|
asmlinkage void __aes_arm64_decrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
|
|
EXPORT_SYMBOL(__aes_arm64_decrypt);
|
|
|
|
static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
|
|
{
|
|
struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
int rounds = 6 + ctx->key_length / 4;
|
|
|
|
__aes_arm64_encrypt(ctx->key_enc, out, in, rounds);
|
|
}
|
|
|
|
static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
|
|
{
|
|
struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
int rounds = 6 + ctx->key_length / 4;
|
|
|
|
__aes_arm64_decrypt(ctx->key_dec, out, in, rounds);
|
|
}
|
|
|
|
static struct crypto_alg aes_alg = {
|
|
.cra_name = "aes",
|
|
.cra_driver_name = "aes-arm64",
|
|
.cra_priority = 200,
|
|
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
|
|
.cra_blocksize = AES_BLOCK_SIZE,
|
|
.cra_ctxsize = sizeof(struct crypto_aes_ctx),
|
|
.cra_module = THIS_MODULE,
|
|
|
|
.cra_cipher.cia_min_keysize = AES_MIN_KEY_SIZE,
|
|
.cra_cipher.cia_max_keysize = AES_MAX_KEY_SIZE,
|
|
.cra_cipher.cia_setkey = crypto_aes_set_key,
|
|
.cra_cipher.cia_encrypt = aes_encrypt,
|
|
.cra_cipher.cia_decrypt = aes_decrypt
|
|
};
|
|
|
|
static int __init aes_init(void)
|
|
{
|
|
return crypto_register_alg(&aes_alg);
|
|
}
|
|
|
|
static void __exit aes_fini(void)
|
|
{
|
|
crypto_unregister_alg(&aes_alg);
|
|
}
|
|
|
|
module_init(aes_init);
|
|
module_exit(aes_fini);
|
|
|
|
MODULE_DESCRIPTION("Scalar AES cipher for arm64");
|
|
MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
|
|
MODULE_LICENSE("GPL v2");
|
|
MODULE_ALIAS_CRYPTO("aes");
|