crypto: x86/aegis128 - convert to use AEAD SIMD helpers
Convert the x86 implementation of AEGIS-128 to use the AEAD SIMD helpers, rather than hand-rolling the same functionality. This simplifies the code and also fixes the bug where the user-provided aead_request is modified. Signed-off-by: Eric Biggers <ebiggers@google.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
parent
149e12252f
commit
de272ca72c
|
@ -11,8 +11,8 @@
|
|||
* any later version.
|
||||
*/
|
||||
|
||||
#include <crypto/cryptd.h>
|
||||
#include <crypto/internal/aead.h>
|
||||
#include <crypto/internal/simd.h>
|
||||
#include <crypto/internal/skcipher.h>
|
||||
#include <crypto/scatterwalk.h>
|
||||
#include <linux/module.h>
|
||||
|
@ -242,131 +242,35 @@ static void crypto_aegis128_aesni_exit_tfm(struct crypto_aead *aead)
|
|||
{
|
||||
}
|
||||
|
||||
static int cryptd_aegis128_aesni_setkey(struct crypto_aead *aead,
|
||||
const u8 *key, unsigned int keylen)
|
||||
{
|
||||
struct cryptd_aead **ctx = crypto_aead_ctx(aead);
|
||||
struct cryptd_aead *cryptd_tfm = *ctx;
|
||||
static struct aead_alg crypto_aegis128_aesni_alg = {
|
||||
.setkey = crypto_aegis128_aesni_setkey,
|
||||
.setauthsize = crypto_aegis128_aesni_setauthsize,
|
||||
.encrypt = crypto_aegis128_aesni_encrypt,
|
||||
.decrypt = crypto_aegis128_aesni_decrypt,
|
||||
.init = crypto_aegis128_aesni_init_tfm,
|
||||
.exit = crypto_aegis128_aesni_exit_tfm,
|
||||
|
||||
return crypto_aead_setkey(&cryptd_tfm->base, key, keylen);
|
||||
}
|
||||
.ivsize = AEGIS128_NONCE_SIZE,
|
||||
.maxauthsize = AEGIS128_MAX_AUTH_SIZE,
|
||||
.chunksize = AEGIS128_BLOCK_SIZE,
|
||||
|
||||
static int cryptd_aegis128_aesni_setauthsize(struct crypto_aead *aead,
|
||||
unsigned int authsize)
|
||||
{
|
||||
struct cryptd_aead **ctx = crypto_aead_ctx(aead);
|
||||
struct cryptd_aead *cryptd_tfm = *ctx;
|
||||
.base = {
|
||||
.cra_flags = CRYPTO_ALG_INTERNAL,
|
||||
.cra_blocksize = 1,
|
||||
.cra_ctxsize = sizeof(struct aegis_ctx) +
|
||||
__alignof__(struct aegis_ctx),
|
||||
.cra_alignmask = 0,
|
||||
.cra_priority = 400,
|
||||
|
||||
return crypto_aead_setauthsize(&cryptd_tfm->base, authsize);
|
||||
}
|
||||
.cra_name = "__aegis128",
|
||||
.cra_driver_name = "__aegis128-aesni",
|
||||
|
||||
static int cryptd_aegis128_aesni_encrypt(struct aead_request *req)
|
||||
{
|
||||
struct crypto_aead *aead = crypto_aead_reqtfm(req);
|
||||
struct cryptd_aead **ctx = crypto_aead_ctx(aead);
|
||||
struct cryptd_aead *cryptd_tfm = *ctx;
|
||||
|
||||
aead = &cryptd_tfm->base;
|
||||
if (irq_fpu_usable() && (!in_atomic() ||
|
||||
!cryptd_aead_queued(cryptd_tfm)))
|
||||
aead = cryptd_aead_child(cryptd_tfm);
|
||||
|
||||
aead_request_set_tfm(req, aead);
|
||||
|
||||
return crypto_aead_encrypt(req);
|
||||
}
|
||||
|
||||
static int cryptd_aegis128_aesni_decrypt(struct aead_request *req)
|
||||
{
|
||||
struct crypto_aead *aead = crypto_aead_reqtfm(req);
|
||||
struct cryptd_aead **ctx = crypto_aead_ctx(aead);
|
||||
struct cryptd_aead *cryptd_tfm = *ctx;
|
||||
|
||||
aead = &cryptd_tfm->base;
|
||||
if (irq_fpu_usable() && (!in_atomic() ||
|
||||
!cryptd_aead_queued(cryptd_tfm)))
|
||||
aead = cryptd_aead_child(cryptd_tfm);
|
||||
|
||||
aead_request_set_tfm(req, aead);
|
||||
|
||||
return crypto_aead_decrypt(req);
|
||||
}
|
||||
|
||||
static int cryptd_aegis128_aesni_init_tfm(struct crypto_aead *aead)
|
||||
{
|
||||
struct cryptd_aead *cryptd_tfm;
|
||||
struct cryptd_aead **ctx = crypto_aead_ctx(aead);
|
||||
|
||||
cryptd_tfm = cryptd_alloc_aead("__aegis128-aesni", CRYPTO_ALG_INTERNAL,
|
||||
CRYPTO_ALG_INTERNAL);
|
||||
if (IS_ERR(cryptd_tfm))
|
||||
return PTR_ERR(cryptd_tfm);
|
||||
|
||||
*ctx = cryptd_tfm;
|
||||
crypto_aead_set_reqsize(aead, crypto_aead_reqsize(&cryptd_tfm->base));
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void cryptd_aegis128_aesni_exit_tfm(struct crypto_aead *aead)
|
||||
{
|
||||
struct cryptd_aead **ctx = crypto_aead_ctx(aead);
|
||||
|
||||
cryptd_free_aead(*ctx);
|
||||
}
|
||||
|
||||
static struct aead_alg crypto_aegis128_aesni_alg[] = {
|
||||
{
|
||||
.setkey = crypto_aegis128_aesni_setkey,
|
||||
.setauthsize = crypto_aegis128_aesni_setauthsize,
|
||||
.encrypt = crypto_aegis128_aesni_encrypt,
|
||||
.decrypt = crypto_aegis128_aesni_decrypt,
|
||||
.init = crypto_aegis128_aesni_init_tfm,
|
||||
.exit = crypto_aegis128_aesni_exit_tfm,
|
||||
|
||||
.ivsize = AEGIS128_NONCE_SIZE,
|
||||
.maxauthsize = AEGIS128_MAX_AUTH_SIZE,
|
||||
.chunksize = AEGIS128_BLOCK_SIZE,
|
||||
|
||||
.base = {
|
||||
.cra_flags = CRYPTO_ALG_INTERNAL,
|
||||
.cra_blocksize = 1,
|
||||
.cra_ctxsize = sizeof(struct aegis_ctx) +
|
||||
__alignof__(struct aegis_ctx),
|
||||
.cra_alignmask = 0,
|
||||
|
||||
.cra_name = "__aegis128",
|
||||
.cra_driver_name = "__aegis128-aesni",
|
||||
|
||||
.cra_module = THIS_MODULE,
|
||||
}
|
||||
}, {
|
||||
.setkey = cryptd_aegis128_aesni_setkey,
|
||||
.setauthsize = cryptd_aegis128_aesni_setauthsize,
|
||||
.encrypt = cryptd_aegis128_aesni_encrypt,
|
||||
.decrypt = cryptd_aegis128_aesni_decrypt,
|
||||
.init = cryptd_aegis128_aesni_init_tfm,
|
||||
.exit = cryptd_aegis128_aesni_exit_tfm,
|
||||
|
||||
.ivsize = AEGIS128_NONCE_SIZE,
|
||||
.maxauthsize = AEGIS128_MAX_AUTH_SIZE,
|
||||
.chunksize = AEGIS128_BLOCK_SIZE,
|
||||
|
||||
.base = {
|
||||
.cra_flags = CRYPTO_ALG_ASYNC,
|
||||
.cra_blocksize = 1,
|
||||
.cra_ctxsize = sizeof(struct cryptd_aead *),
|
||||
.cra_alignmask = 0,
|
||||
|
||||
.cra_priority = 400,
|
||||
|
||||
.cra_name = "aegis128",
|
||||
.cra_driver_name = "aegis128-aesni",
|
||||
|
||||
.cra_module = THIS_MODULE,
|
||||
}
|
||||
.cra_module = THIS_MODULE,
|
||||
}
|
||||
};
|
||||
|
||||
static struct simd_aead_alg *simd_alg;
|
||||
|
||||
static int __init crypto_aegis128_aesni_module_init(void)
|
||||
{
|
||||
if (!boot_cpu_has(X86_FEATURE_XMM2) ||
|
||||
|
@ -374,14 +278,13 @@ static int __init crypto_aegis128_aesni_module_init(void)
|
|||
!cpu_has_xfeatures(XFEATURE_MASK_SSE, NULL))
|
||||
return -ENODEV;
|
||||
|
||||
return crypto_register_aeads(crypto_aegis128_aesni_alg,
|
||||
ARRAY_SIZE(crypto_aegis128_aesni_alg));
|
||||
return simd_register_aeads_compat(&crypto_aegis128_aesni_alg, 1,
|
||||
&simd_alg);
|
||||
}
|
||||
|
||||
static void __exit crypto_aegis128_aesni_module_exit(void)
|
||||
{
|
||||
crypto_unregister_aeads(crypto_aegis128_aesni_alg,
|
||||
ARRAY_SIZE(crypto_aegis128_aesni_alg));
|
||||
simd_unregister_aeads(&crypto_aegis128_aesni_alg, 1, &simd_alg);
|
||||
}
|
||||
|
||||
module_init(crypto_aegis128_aesni_module_init);
|
||||
|
|
|
@ -310,7 +310,7 @@ config CRYPTO_AEGIS128_AESNI_SSE2
|
|||
tristate "AEGIS-128 AEAD algorithm (x86_64 AESNI+SSE2 implementation)"
|
||||
depends on X86 && 64BIT
|
||||
select CRYPTO_AEAD
|
||||
select CRYPTO_CRYPTD
|
||||
select CRYPTO_SIMD
|
||||
help
|
||||
AESNI+SSE2 implementation of the AEGSI-128 dedicated AEAD algorithm.
|
||||
|
||||
|
|
Loading…
Reference in New Issue