crypto: arm/aes-neonbs - provide a synchronous version of ctr(aes)
AES in CTR mode is used by modes such as GCM and CCM, which are often used in contexts where only synchronous ciphers are permitted. So provide a synchronous version of ctr(aes) based on the existing code. This requires a non-SIMD fallback to deal with invocations occurring from a context where SIMD instructions may not be used. We have a helper for this now in the AES library, so wire that up. Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
parent
5eedf315f2
commit
e5f050402f
|
@ -6,8 +6,10 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include <asm/neon.h>
|
#include <asm/neon.h>
|
||||||
|
#include <asm/simd.h>
|
||||||
#include <crypto/aes.h>
|
#include <crypto/aes.h>
|
||||||
#include <crypto/cbc.h>
|
#include <crypto/cbc.h>
|
||||||
|
#include <crypto/ctr.h>
|
||||||
#include <crypto/internal/simd.h>
|
#include <crypto/internal/simd.h>
|
||||||
#include <crypto/internal/skcipher.h>
|
#include <crypto/internal/skcipher.h>
|
||||||
#include <crypto/xts.h>
|
#include <crypto/xts.h>
|
||||||
|
@ -54,6 +56,11 @@ struct aesbs_xts_ctx {
|
||||||
struct crypto_cipher *tweak_tfm;
|
struct crypto_cipher *tweak_tfm;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
struct aesbs_ctr_ctx {
|
||||||
|
struct aesbs_ctx key; /* must be first member */
|
||||||
|
struct crypto_aes_ctx fallback;
|
||||||
|
};
|
||||||
|
|
||||||
static int aesbs_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
|
static int aesbs_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
|
||||||
unsigned int key_len)
|
unsigned int key_len)
|
||||||
{
|
{
|
||||||
|
@ -189,6 +196,25 @@ static void cbc_exit(struct crypto_tfm *tfm)
|
||||||
crypto_free_cipher(ctx->enc_tfm);
|
crypto_free_cipher(ctx->enc_tfm);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static int aesbs_ctr_setkey_sync(struct crypto_skcipher *tfm, const u8 *in_key,
|
||||||
|
unsigned int key_len)
|
||||||
|
{
|
||||||
|
struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||||
|
int err;
|
||||||
|
|
||||||
|
err = aes_expandkey(&ctx->fallback, in_key, key_len);
|
||||||
|
if (err)
|
||||||
|
return err;
|
||||||
|
|
||||||
|
ctx->key.rounds = 6 + key_len / 4;
|
||||||
|
|
||||||
|
kernel_neon_begin();
|
||||||
|
aesbs_convert_key(ctx->key.rk, ctx->fallback.key_enc, ctx->key.rounds);
|
||||||
|
kernel_neon_end();
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
static int ctr_encrypt(struct skcipher_request *req)
|
static int ctr_encrypt(struct skcipher_request *req)
|
||||||
{
|
{
|
||||||
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
||||||
|
@ -231,6 +257,29 @@ static int ctr_encrypt(struct skcipher_request *req)
|
||||||
return err;
|
return err;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static void ctr_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst)
|
||||||
|
{
|
||||||
|
struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||||
|
unsigned long flags;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Temporarily disable interrupts to avoid races where
|
||||||
|
* cachelines are evicted when the CPU is interrupted
|
||||||
|
* to do something else.
|
||||||
|
*/
|
||||||
|
local_irq_save(flags);
|
||||||
|
aes_encrypt(&ctx->fallback, dst, src);
|
||||||
|
local_irq_restore(flags);
|
||||||
|
}
|
||||||
|
|
||||||
|
static int ctr_encrypt_sync(struct skcipher_request *req)
|
||||||
|
{
|
||||||
|
if (!crypto_simd_usable())
|
||||||
|
return crypto_ctr_encrypt_walk(req, ctr_encrypt_one);
|
||||||
|
|
||||||
|
return ctr_encrypt(req);
|
||||||
|
}
|
||||||
|
|
||||||
static int aesbs_xts_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
|
static int aesbs_xts_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
|
||||||
unsigned int key_len)
|
unsigned int key_len)
|
||||||
{
|
{
|
||||||
|
@ -358,6 +407,22 @@ static struct skcipher_alg aes_algs[] = { {
|
||||||
.setkey = aesbs_setkey,
|
.setkey = aesbs_setkey,
|
||||||
.encrypt = ctr_encrypt,
|
.encrypt = ctr_encrypt,
|
||||||
.decrypt = ctr_encrypt,
|
.decrypt = ctr_encrypt,
|
||||||
|
}, {
|
||||||
|
.base.cra_name = "ctr(aes)",
|
||||||
|
.base.cra_driver_name = "ctr-aes-neonbs-sync",
|
||||||
|
.base.cra_priority = 250 - 1,
|
||||||
|
.base.cra_blocksize = 1,
|
||||||
|
.base.cra_ctxsize = sizeof(struct aesbs_ctr_ctx),
|
||||||
|
.base.cra_module = THIS_MODULE,
|
||||||
|
|
||||||
|
.min_keysize = AES_MIN_KEY_SIZE,
|
||||||
|
.max_keysize = AES_MAX_KEY_SIZE,
|
||||||
|
.chunksize = AES_BLOCK_SIZE,
|
||||||
|
.walksize = 8 * AES_BLOCK_SIZE,
|
||||||
|
.ivsize = AES_BLOCK_SIZE,
|
||||||
|
.setkey = aesbs_ctr_setkey_sync,
|
||||||
|
.encrypt = ctr_encrypt_sync,
|
||||||
|
.decrypt = ctr_encrypt_sync,
|
||||||
}, {
|
}, {
|
||||||
.base.cra_name = "__xts(aes)",
|
.base.cra_name = "__xts(aes)",
|
||||||
.base.cra_driver_name = "__xts-aes-neonbs",
|
.base.cra_driver_name = "__xts-aes-neonbs",
|
||||||
|
|
Loading…
Reference in New Issue