crypto: chelsio - Remove VLA usage of skcipher
In the quest to remove all stack VLA usage from the kernel[1], this replaces struct crypto_skcipher and SKCIPHER_REQUEST_ON_STACK() usage with struct crypto_sync_skcipher and SYNC_SKCIPHER_REQUEST_ON_STACK(), which uses a fixed stack size. [1] https://lkml.kernel.org/r/CA+55aFzCG-zNmZwX4A2FQpadafLfEzK6CC=qPXydAacU1RqZWA@mail.gmail.com Cc: Harsh Jain <harsh@chelsio.com> Signed-off-by: Kees Cook <keescook@chromium.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
parent
888a649c11
commit
28874f2637
|
@ -671,7 +671,7 @@ static int chcr_sg_ent_in_wr(struct scatterlist *src,
|
|||
return min(srclen, dstlen);
|
||||
}
|
||||
|
||||
static int chcr_cipher_fallback(struct crypto_skcipher *cipher,
|
||||
static int chcr_cipher_fallback(struct crypto_sync_skcipher *cipher,
|
||||
u32 flags,
|
||||
struct scatterlist *src,
|
||||
struct scatterlist *dst,
|
||||
|
@ -681,9 +681,9 @@ static int chcr_cipher_fallback(struct crypto_skcipher *cipher,
|
|||
{
|
||||
int err;
|
||||
|
||||
SKCIPHER_REQUEST_ON_STACK(subreq, cipher);
|
||||
SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, cipher);
|
||||
|
||||
skcipher_request_set_tfm(subreq, cipher);
|
||||
skcipher_request_set_sync_tfm(subreq, cipher);
|
||||
skcipher_request_set_callback(subreq, flags, NULL, NULL);
|
||||
skcipher_request_set_crypt(subreq, src, dst,
|
||||
nbytes, iv);
|
||||
|
@ -854,13 +854,14 @@ static int chcr_cipher_fallback_setkey(struct crypto_ablkcipher *cipher,
|
|||
struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
|
||||
int err = 0;
|
||||
|
||||
crypto_skcipher_clear_flags(ablkctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
|
||||
crypto_skcipher_set_flags(ablkctx->sw_cipher, cipher->base.crt_flags &
|
||||
CRYPTO_TFM_REQ_MASK);
|
||||
err = crypto_skcipher_setkey(ablkctx->sw_cipher, key, keylen);
|
||||
crypto_sync_skcipher_clear_flags(ablkctx->sw_cipher,
|
||||
CRYPTO_TFM_REQ_MASK);
|
||||
crypto_sync_skcipher_set_flags(ablkctx->sw_cipher,
|
||||
cipher->base.crt_flags & CRYPTO_TFM_REQ_MASK);
|
||||
err = crypto_sync_skcipher_setkey(ablkctx->sw_cipher, key, keylen);
|
||||
tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
|
||||
tfm->crt_flags |=
|
||||
crypto_skcipher_get_flags(ablkctx->sw_cipher) &
|
||||
crypto_sync_skcipher_get_flags(ablkctx->sw_cipher) &
|
||||
CRYPTO_TFM_RES_MASK;
|
||||
return err;
|
||||
}
|
||||
|
@ -1360,8 +1361,8 @@ static int chcr_cra_init(struct crypto_tfm *tfm)
|
|||
struct chcr_context *ctx = crypto_tfm_ctx(tfm);
|
||||
struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
|
||||
|
||||
ablkctx->sw_cipher = crypto_alloc_skcipher(alg->cra_name, 0,
|
||||
CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
|
||||
ablkctx->sw_cipher = crypto_alloc_sync_skcipher(alg->cra_name, 0,
|
||||
CRYPTO_ALG_NEED_FALLBACK);
|
||||
if (IS_ERR(ablkctx->sw_cipher)) {
|
||||
pr_err("failed to allocate fallback for %s\n", alg->cra_name);
|
||||
return PTR_ERR(ablkctx->sw_cipher);
|
||||
|
@ -1390,8 +1391,8 @@ static int chcr_rfc3686_init(struct crypto_tfm *tfm)
|
|||
/*RFC3686 initialises IV counter value to 1, rfc3686(ctr(aes))
|
||||
* cannot be used as fallback in chcr_handle_cipher_response
|
||||
*/
|
||||
ablkctx->sw_cipher = crypto_alloc_skcipher("ctr(aes)", 0,
|
||||
CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
|
||||
ablkctx->sw_cipher = crypto_alloc_sync_skcipher("ctr(aes)", 0,
|
||||
CRYPTO_ALG_NEED_FALLBACK);
|
||||
if (IS_ERR(ablkctx->sw_cipher)) {
|
||||
pr_err("failed to allocate fallback for %s\n", alg->cra_name);
|
||||
return PTR_ERR(ablkctx->sw_cipher);
|
||||
|
@ -1406,7 +1407,7 @@ static void chcr_cra_exit(struct crypto_tfm *tfm)
|
|||
struct chcr_context *ctx = crypto_tfm_ctx(tfm);
|
||||
struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
|
||||
|
||||
crypto_free_skcipher(ablkctx->sw_cipher);
|
||||
crypto_free_sync_skcipher(ablkctx->sw_cipher);
|
||||
if (ablkctx->aes_generic)
|
||||
crypto_free_cipher(ablkctx->aes_generic);
|
||||
}
|
||||
|
|
|
@ -170,7 +170,7 @@ static inline struct chcr_context *h_ctx(struct crypto_ahash *tfm)
|
|||
}
|
||||
|
||||
struct ablk_ctx {
|
||||
struct crypto_skcipher *sw_cipher;
|
||||
struct crypto_sync_skcipher *sw_cipher;
|
||||
struct crypto_cipher *aes_generic;
|
||||
__be32 key_ctx_hdr;
|
||||
unsigned int enckey_len;
|
||||
|
|
Loading…
Reference in New Issue