Merge branch 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto fixes from Herbert Xu: "This fixes a number of issues in the chelsio and caam drivers" * 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: Revert "crypto: caam/jr - Remove extra memory barrier during job ring dequeue" crypto: caam - fix caam_dump_sg that iterates through scatterlist crypto: caam - fix DKP detection logic MAINTAINERS: Maintainer for Chelsio crypto driver crypto: chelsio - count incomplete block in IV crypto: chelsio - Fix softlockup with heavy I/O crypto: chelsio - Fix NULL pointer dereference
This commit is contained in:
commit
88f76bc31b
|
@ -4316,7 +4316,7 @@ F: drivers/infiniband/hw/cxgb3/
|
|||
F: include/uapi/rdma/cxgb3-abi.h
|
||||
|
||||
CXGB4 CRYPTO DRIVER (chcr)
|
||||
M: Harsh Jain <harsh@chelsio.com>
|
||||
M: Atul Gupta <atul.gupta@chelsio.com>
|
||||
L: linux-crypto@vger.kernel.org
|
||||
W: http://www.chelsio.com
|
||||
S: Supported
|
||||
|
|
|
@ -89,6 +89,7 @@ struct caam_alg_entry {
|
|||
int class2_alg_type;
|
||||
bool rfc3686;
|
||||
bool geniv;
|
||||
bool nodkp;
|
||||
};
|
||||
|
||||
struct caam_aead_alg {
|
||||
|
@ -2052,6 +2053,7 @@ static struct caam_aead_alg driver_aeads[] = {
|
|||
},
|
||||
.caam = {
|
||||
.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
|
||||
.nodkp = true,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -2070,6 +2072,7 @@ static struct caam_aead_alg driver_aeads[] = {
|
|||
},
|
||||
.caam = {
|
||||
.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
|
||||
.nodkp = true,
|
||||
},
|
||||
},
|
||||
/* Galois Counter Mode */
|
||||
|
@ -2089,6 +2092,7 @@ static struct caam_aead_alg driver_aeads[] = {
|
|||
},
|
||||
.caam = {
|
||||
.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
|
||||
.nodkp = true,
|
||||
},
|
||||
},
|
||||
/* single-pass ipsec_esp descriptor */
|
||||
|
@ -3334,6 +3338,7 @@ static struct caam_aead_alg driver_aeads[] = {
|
|||
OP_ALG_AAI_AEAD,
|
||||
.class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
|
||||
OP_ALG_AAI_AEAD,
|
||||
.nodkp = true,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -3356,6 +3361,7 @@ static struct caam_aead_alg driver_aeads[] = {
|
|||
OP_ALG_AAI_AEAD,
|
||||
.class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
|
||||
OP_ALG_AAI_AEAD,
|
||||
.nodkp = true,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
@ -3417,8 +3423,7 @@ static int caam_aead_init(struct crypto_aead *tfm)
|
|||
container_of(alg, struct caam_aead_alg, aead);
|
||||
struct caam_ctx *ctx = crypto_aead_ctx(tfm);
|
||||
|
||||
return caam_init_common(ctx, &caam_alg->caam,
|
||||
alg->setkey == aead_setkey);
|
||||
return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp);
|
||||
}
|
||||
|
||||
static void caam_exit_common(struct caam_ctx *ctx)
|
||||
|
|
|
@ -36,6 +36,7 @@ struct caam_alg_entry {
|
|||
int class2_alg_type;
|
||||
bool rfc3686;
|
||||
bool geniv;
|
||||
bool nodkp;
|
||||
};
|
||||
|
||||
struct caam_aead_alg {
|
||||
|
@ -1523,6 +1524,7 @@ static struct caam_aead_alg driver_aeads[] = {
|
|||
},
|
||||
.caam = {
|
||||
.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
|
||||
.nodkp = true,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -1541,6 +1543,7 @@ static struct caam_aead_alg driver_aeads[] = {
|
|||
},
|
||||
.caam = {
|
||||
.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
|
||||
.nodkp = true,
|
||||
},
|
||||
},
|
||||
/* Galois Counter Mode */
|
||||
|
@ -1560,6 +1563,7 @@ static struct caam_aead_alg driver_aeads[] = {
|
|||
},
|
||||
.caam = {
|
||||
.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
|
||||
.nodkp = true,
|
||||
}
|
||||
},
|
||||
/* single-pass ipsec_esp descriptor */
|
||||
|
@ -2433,8 +2437,7 @@ static int caam_aead_init(struct crypto_aead *tfm)
|
|||
aead);
|
||||
struct caam_ctx *ctx = crypto_aead_ctx(tfm);
|
||||
|
||||
return caam_init_common(ctx, &caam_alg->caam,
|
||||
alg->setkey == aead_setkey);
|
||||
return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp);
|
||||
}
|
||||
|
||||
static void caam_exit_common(struct caam_ctx *ctx)
|
||||
|
|
|
@ -42,6 +42,7 @@ struct caam_alg_entry {
|
|||
int class2_alg_type;
|
||||
bool rfc3686;
|
||||
bool geniv;
|
||||
bool nodkp;
|
||||
};
|
||||
|
||||
struct caam_aead_alg {
|
||||
|
@ -1480,7 +1481,7 @@ static int caam_cra_init_aead(struct crypto_aead *tfm)
|
|||
|
||||
crypto_aead_set_reqsize(tfm, sizeof(struct caam_request));
|
||||
return caam_cra_init(crypto_aead_ctx(tfm), &caam_alg->caam,
|
||||
alg->setkey == aead_setkey);
|
||||
!caam_alg->caam.nodkp);
|
||||
}
|
||||
|
||||
static void caam_exit_common(struct caam_ctx *ctx)
|
||||
|
@ -1641,6 +1642,7 @@ static struct caam_aead_alg driver_aeads[] = {
|
|||
},
|
||||
.caam = {
|
||||
.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
|
||||
.nodkp = true,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -1659,6 +1661,7 @@ static struct caam_aead_alg driver_aeads[] = {
|
|||
},
|
||||
.caam = {
|
||||
.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
|
||||
.nodkp = true,
|
||||
},
|
||||
},
|
||||
/* Galois Counter Mode */
|
||||
|
@ -1678,6 +1681,7 @@ static struct caam_aead_alg driver_aeads[] = {
|
|||
},
|
||||
.caam = {
|
||||
.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
|
||||
.nodkp = true,
|
||||
}
|
||||
},
|
||||
/* single-pass ipsec_esp descriptor */
|
||||
|
@ -2755,6 +2759,7 @@ static struct caam_aead_alg driver_aeads[] = {
|
|||
OP_ALG_AAI_AEAD,
|
||||
.class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
|
||||
OP_ALG_AAI_AEAD,
|
||||
.nodkp = true,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -2777,6 +2782,7 @@ static struct caam_aead_alg driver_aeads[] = {
|
|||
OP_ALG_AAI_AEAD,
|
||||
.class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
|
||||
OP_ALG_AAI_AEAD,
|
||||
.nodkp = true,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
|
@ -22,7 +22,7 @@ void caam_dump_sg(const char *level, const char *prefix_str, int prefix_type,
|
|||
size_t len;
|
||||
void *buf;
|
||||
|
||||
for (it = sg; it && tlen > 0 ; it = sg_next(sg)) {
|
||||
for (it = sg; it && tlen > 0 ; it = sg_next(it)) {
|
||||
/*
|
||||
* make sure the scatterlist's page
|
||||
* has a valid virtual memory mapping
|
||||
|
|
|
@ -213,7 +213,7 @@ static void caam_jr_dequeue(unsigned long devarg)
|
|||
mb();
|
||||
|
||||
/* set done */
|
||||
wr_reg32_relaxed(&jrp->rregs->outring_rmvd, 1);
|
||||
wr_reg32(&jrp->rregs->outring_rmvd, 1);
|
||||
|
||||
jrp->out_ring_read_index = (jrp->out_ring_read_index + 1) &
|
||||
(JOBR_DEPTH - 1);
|
||||
|
|
|
@ -96,14 +96,6 @@ cpu_to_caam(16)
|
|||
cpu_to_caam(32)
|
||||
cpu_to_caam(64)
|
||||
|
||||
static inline void wr_reg32_relaxed(void __iomem *reg, u32 data)
|
||||
{
|
||||
if (caam_little_end)
|
||||
writel_relaxed(data, reg);
|
||||
else
|
||||
writel_relaxed(cpu_to_be32(data), reg);
|
||||
}
|
||||
|
||||
static inline void wr_reg32(void __iomem *reg, u32 data)
|
||||
{
|
||||
if (caam_little_end)
|
||||
|
|
|
@ -200,17 +200,10 @@ void chcr_verify_tag(struct aead_request *req, u8 *input, int *err)
|
|||
|
||||
static int chcr_inc_wrcount(struct chcr_dev *dev)
|
||||
{
|
||||
int err = 0;
|
||||
|
||||
spin_lock_bh(&dev->lock_chcr_dev);
|
||||
if (dev->state == CHCR_DETACH)
|
||||
err = 1;
|
||||
else
|
||||
atomic_inc(&dev->inflight);
|
||||
|
||||
spin_unlock_bh(&dev->lock_chcr_dev);
|
||||
|
||||
return err;
|
||||
return 1;
|
||||
atomic_inc(&dev->inflight);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static inline void chcr_dec_wrcount(struct chcr_dev *dev)
|
||||
|
@ -1101,8 +1094,8 @@ static int chcr_final_cipher_iv(struct ablkcipher_request *req,
|
|||
int ret = 0;
|
||||
|
||||
if (subtype == CRYPTO_ALG_SUB_TYPE_CTR)
|
||||
ctr_add_iv(iv, req->info, (reqctx->processed /
|
||||
AES_BLOCK_SIZE));
|
||||
ctr_add_iv(iv, req->info, DIV_ROUND_UP(reqctx->processed,
|
||||
AES_BLOCK_SIZE));
|
||||
else if (subtype == CRYPTO_ALG_SUB_TYPE_XTS)
|
||||
ret = chcr_update_tweak(req, iv, 1);
|
||||
else if (subtype == CRYPTO_ALG_SUB_TYPE_CBC) {
|
||||
|
|
|
@ -243,15 +243,11 @@ static void chcr_detach_device(struct uld_ctx *u_ctx)
|
|||
{
|
||||
struct chcr_dev *dev = &u_ctx->dev;
|
||||
|
||||
spin_lock_bh(&dev->lock_chcr_dev);
|
||||
if (dev->state == CHCR_DETACH) {
|
||||
spin_unlock_bh(&dev->lock_chcr_dev);
|
||||
pr_debug("Detached Event received for already detach device\n");
|
||||
return;
|
||||
}
|
||||
dev->state = CHCR_DETACH;
|
||||
spin_unlock_bh(&dev->lock_chcr_dev);
|
||||
|
||||
if (atomic_read(&dev->inflight) != 0) {
|
||||
schedule_delayed_work(&dev->detach_work, WQ_DETACH_TM);
|
||||
wait_for_completion(&dev->detach_comp);
|
||||
|
|
|
@ -575,7 +575,8 @@ inline void *chcr_crypto_wreq(struct sk_buff *skb,
|
|||
if (unlikely(credits < ETHTXQ_STOP_THRES)) {
|
||||
netif_tx_stop_queue(q->txq);
|
||||
q->q.stops++;
|
||||
wr_mid |= FW_WR_EQUEQ_F | FW_WR_EQUIQ_F;
|
||||
if (!q->dbqt)
|
||||
wr_mid |= FW_WR_EQUEQ_F | FW_WR_EQUIQ_F;
|
||||
}
|
||||
wr_mid |= FW_ULPTX_WR_DATA_F;
|
||||
wr->wreq.flowid_len16 = htonl(wr_mid);
|
||||
|
|
Loading…
Reference in New Issue