summaryrefslogtreecommitdiffstats
path: root/drivers/crypto/vmx/aes_xts.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/crypto/vmx/aes_xts.c')
-rw-r--r--drivers/crypto/vmx/aes_xts.c32
1 files changed, 16 insertions, 16 deletions
diff --git a/drivers/crypto/vmx/aes_xts.c b/drivers/crypto/vmx/aes_xts.c
index 24353ec336c5..6adc9290557a 100644
--- a/drivers/crypto/vmx/aes_xts.c
+++ b/drivers/crypto/vmx/aes_xts.c
@@ -28,11 +28,12 @@
#include <crypto/aes.h>
#include <crypto/scatterwalk.h>
#include <crypto/xts.h>
+#include <crypto/skcipher.h>
#include "aesp8-ppc.h"
struct p8_aes_xts_ctx {
- struct crypto_blkcipher *fallback;
+ struct crypto_skcipher *fallback;
struct aes_key enc_key;
struct aes_key dec_key;
struct aes_key tweak_key;
@@ -41,7 +42,7 @@ struct p8_aes_xts_ctx {
static int p8_aes_xts_init(struct crypto_tfm *tfm)
{
const char *alg;
- struct crypto_blkcipher *fallback;
+ struct crypto_skcipher *fallback;
struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
if (!(alg = crypto_tfm_alg_name(tfm))) {
@@ -49,8 +50,8 @@ static int p8_aes_xts_init(struct crypto_tfm *tfm)
return -ENOENT;
}
- fallback =
- crypto_alloc_blkcipher(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
+ fallback = crypto_alloc_skcipher(alg, 0,
+ CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
if (IS_ERR(fallback)) {
printk(KERN_ERR
"Failed to allocate transformation for '%s': %ld\n",
@@ -58,11 +59,11 @@ static int p8_aes_xts_init(struct crypto_tfm *tfm)
return PTR_ERR(fallback);
}
printk(KERN_INFO "Using '%s' as fallback implementation.\n",
- crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback));
+ crypto_skcipher_driver_name(fallback));
- crypto_blkcipher_set_flags(
+ crypto_skcipher_set_flags(
fallback,
- crypto_blkcipher_get_flags((struct crypto_blkcipher *)tfm));
+ crypto_skcipher_get_flags((struct crypto_skcipher *)tfm));
ctx->fallback = fallback;
return 0;
@@ -73,7 +74,7 @@ static void p8_aes_xts_exit(struct crypto_tfm *tfm)
struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
if (ctx->fallback) {
- crypto_free_blkcipher(ctx->fallback);
+ crypto_free_skcipher(ctx->fallback);
ctx->fallback = NULL;
}
}
@@ -98,7 +99,7 @@ static int p8_aes_xts_setkey(struct crypto_tfm *tfm, const u8 *key,
pagefault_enable();
preempt_enable();
- ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen);
+ ret += crypto_skcipher_setkey(ctx->fallback, key, keylen);
return ret;
}
@@ -113,15 +114,14 @@ static int p8_aes_xts_crypt(struct blkcipher_desc *desc,
struct blkcipher_walk walk;
struct p8_aes_xts_ctx *ctx =
crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
- struct blkcipher_desc fallback_desc = {
- .tfm = ctx->fallback,
- .info = desc->info,
- .flags = desc->flags
- };
if (in_interrupt()) {
- ret = enc ? crypto_blkcipher_encrypt(&fallback_desc, dst, src, nbytes) :
- crypto_blkcipher_decrypt(&fallback_desc, dst, src, nbytes);
+ SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
+ skcipher_request_set_tfm(req, ctx->fallback);
+ skcipher_request_set_callback(req, desc->flags, NULL, NULL);
+ skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
+ ret = enc? crypto_skcipher_encrypt(req) : crypto_skcipher_decrypt(req);
+ skcipher_request_zero(req);
} else {
preempt_disable();
pagefault_disable();