mirror of https://gitee.com/openkylin/linux.git
crypto: vmx: Only call enable_kernel_vsx()
With the recent change to enable_kernel_vsx(), we no longer need to call enable_kernel_fp() and enable_kernel_altivec(). Signed-off-by: Anton Blanchard <anton@samba.org> Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
This commit is contained in:
parent
a7d623d4d0
commit
1552cd703c
|
@ -83,7 +83,6 @@ static int p8_aes_setkey(struct crypto_tfm *tfm, const u8 *key,
|
|||
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
enable_kernel_vsx();
|
||||
ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
|
||||
ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key);
|
||||
|
@ -103,7 +102,6 @@ static void p8_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
|
|||
} else {
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
enable_kernel_vsx();
|
||||
aes_p8_encrypt(src, dst, &ctx->enc_key);
|
||||
pagefault_enable();
|
||||
|
@ -120,7 +118,6 @@ static void p8_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
|
|||
} else {
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
enable_kernel_vsx();
|
||||
aes_p8_decrypt(src, dst, &ctx->dec_key);
|
||||
pagefault_enable();
|
||||
|
|
|
@ -84,7 +84,6 @@ static int p8_aes_cbc_setkey(struct crypto_tfm *tfm, const u8 *key,
|
|||
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
enable_kernel_vsx();
|
||||
ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
|
||||
ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key);
|
||||
|
@ -115,7 +114,6 @@ static int p8_aes_cbc_encrypt(struct blkcipher_desc *desc,
|
|||
} else {
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
enable_kernel_vsx();
|
||||
|
||||
blkcipher_walk_init(&walk, dst, src, nbytes);
|
||||
|
@ -156,7 +154,6 @@ static int p8_aes_cbc_decrypt(struct blkcipher_desc *desc,
|
|||
} else {
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
enable_kernel_vsx();
|
||||
|
||||
blkcipher_walk_init(&walk, dst, src, nbytes);
|
||||
|
|
|
@ -81,7 +81,6 @@ static int p8_aes_ctr_setkey(struct crypto_tfm *tfm, const u8 *key,
|
|||
struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
enable_kernel_vsx();
|
||||
ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
|
||||
pagefault_enable();
|
||||
|
@ -100,7 +99,6 @@ static void p8_aes_ctr_final(struct p8_aes_ctr_ctx *ctx,
|
|||
unsigned int nbytes = walk->nbytes;
|
||||
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
enable_kernel_vsx();
|
||||
aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key);
|
||||
pagefault_enable();
|
||||
|
@ -133,7 +131,6 @@ static int p8_aes_ctr_crypt(struct blkcipher_desc *desc,
|
|||
ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
|
||||
while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
enable_kernel_vsx();
|
||||
aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr,
|
||||
walk.dst.virt.addr,
|
||||
|
|
|
@ -118,9 +118,7 @@ static int p8_ghash_setkey(struct crypto_shash *tfm, const u8 *key,
|
|||
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
enable_kernel_vsx();
|
||||
enable_kernel_fp();
|
||||
gcm_init_p8(ctx->htable, (const u64 *) key);
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
|
@ -149,9 +147,7 @@ static int p8_ghash_update(struct shash_desc *desc,
|
|||
GHASH_DIGEST_SIZE - dctx->bytes);
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
enable_kernel_vsx();
|
||||
enable_kernel_fp();
|
||||
gcm_ghash_p8(dctx->shash, ctx->htable,
|
||||
dctx->buffer, GHASH_DIGEST_SIZE);
|
||||
pagefault_enable();
|
||||
|
@ -164,9 +160,7 @@ static int p8_ghash_update(struct shash_desc *desc,
|
|||
if (len) {
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
enable_kernel_vsx();
|
||||
enable_kernel_fp();
|
||||
gcm_ghash_p8(dctx->shash, ctx->htable, src, len);
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
|
@ -195,9 +189,7 @@ static int p8_ghash_final(struct shash_desc *desc, u8 *out)
|
|||
dctx->buffer[i] = 0;
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
enable_kernel_vsx();
|
||||
enable_kernel_fp();
|
||||
gcm_ghash_p8(dctx->shash, ctx->htable,
|
||||
dctx->buffer, GHASH_DIGEST_SIZE);
|
||||
pagefault_enable();
|
||||
|
|
Loading…
Reference in New Issue