summaryrefslogtreecommitdiffstats
path: root/arch/s390/crypto
diff options
context:
space:
mode:
Diffstat (limited to 'arch/s390/crypto')
-rw-r--r--arch/s390/crypto/aes_s390.c60
-rw-r--r--arch/s390/crypto/des_s390.c360
-rw-r--r--arch/s390/crypto/sha256_s390.c29
3 files changed, 337 insertions, 112 deletions
diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c
index 7a1033d8e00f..c5ca2dc5d428 100644
--- a/arch/s390/crypto/aes_s390.c
+++ b/arch/s390/crypto/aes_s390.c
@@ -114,80 +114,108 @@ static unsigned int aes_encrypt_ecb(const struct cipher_desc *desc, u8 *out,
const u8 *in, unsigned int nbytes)
{
struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm);
+ int ret;
+
+ /* only use complete blocks */
+ nbytes &= ~(AES_BLOCK_SIZE - 1);
switch (sctx->key_len) {
case 16:
- crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in, nbytes);
+ ret = crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
break;
case 24:
- crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in, nbytes);
+ ret = crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
break;
case 32:
- crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in, nbytes);
+ ret = crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
break;
}
- return nbytes & ~(AES_BLOCK_SIZE - 1);
+ return nbytes;
}
static unsigned int aes_decrypt_ecb(const struct cipher_desc *desc, u8 *out,
const u8 *in, unsigned int nbytes)
{
struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm);
+ int ret;
+
+ /* only use complete blocks */
+ nbytes &= ~(AES_BLOCK_SIZE - 1);
switch (sctx->key_len) {
case 16:
- crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in, nbytes);
+ ret = crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
break;
case 24:
- crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in, nbytes);
+ ret = crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
break;
case 32:
- crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in, nbytes);
+ ret = crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
break;
}
- return nbytes & ~(AES_BLOCK_SIZE - 1);
+ return nbytes;
}
static unsigned int aes_encrypt_cbc(const struct cipher_desc *desc, u8 *out,
const u8 *in, unsigned int nbytes)
{
struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm);
+ int ret;
+
+ /* only use complete blocks */
+ nbytes &= ~(AES_BLOCK_SIZE - 1);
memcpy(&sctx->iv, desc->info, AES_BLOCK_SIZE);
switch (sctx->key_len) {
case 16:
- crypt_s390_kmc(KMC_AES_128_ENCRYPT, &sctx->iv, out, in, nbytes);
+ ret = crypt_s390_kmc(KMC_AES_128_ENCRYPT, &sctx->iv, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
break;
case 24:
- crypt_s390_kmc(KMC_AES_192_ENCRYPT, &sctx->iv, out, in, nbytes);
+ ret = crypt_s390_kmc(KMC_AES_192_ENCRYPT, &sctx->iv, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
break;
case 32:
- crypt_s390_kmc(KMC_AES_256_ENCRYPT, &sctx->iv, out, in, nbytes);
+ ret = crypt_s390_kmc(KMC_AES_256_ENCRYPT, &sctx->iv, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
break;
}
memcpy(desc->info, &sctx->iv, AES_BLOCK_SIZE);
- return nbytes & ~(AES_BLOCK_SIZE - 1);
+ return nbytes;
}
static unsigned int aes_decrypt_cbc(const struct cipher_desc *desc, u8 *out,
const u8 *in, unsigned int nbytes)
{
struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm);
+ int ret;
+
+ /* only use complete blocks */
+ nbytes &= ~(AES_BLOCK_SIZE - 1);
memcpy(&sctx->iv, desc->info, AES_BLOCK_SIZE);
switch (sctx->key_len) {
case 16:
- crypt_s390_kmc(KMC_AES_128_DECRYPT, &sctx->iv, out, in, nbytes);
+ ret = crypt_s390_kmc(KMC_AES_128_DECRYPT, &sctx->iv, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
break;
case 24:
- crypt_s390_kmc(KMC_AES_192_DECRYPT, &sctx->iv, out, in, nbytes);
+ ret = crypt_s390_kmc(KMC_AES_192_DECRYPT, &sctx->iv, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
break;
case 32:
- crypt_s390_kmc(KMC_AES_256_DECRYPT, &sctx->iv, out, in, nbytes);
+ ret = crypt_s390_kmc(KMC_AES_256_DECRYPT, &sctx->iv, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
break;
}
- return nbytes & ~(AES_BLOCK_SIZE - 1);
+ return nbytes;
}
diff --git a/arch/s390/crypto/des_s390.c b/arch/s390/crypto/des_s390.c
index a38bb2a3eef6..e3c37aa0a199 100644
--- a/arch/s390/crypto/des_s390.c
+++ b/arch/s390/crypto/des_s390.c
@@ -15,10 +15,8 @@
*/
#include <linux/init.h>
#include <linux/module.h>
-#include <linux/mm.h>
-#include <linux/errno.h>
-#include <asm/scatterlist.h>
#include <linux/crypto.h>
+
#include "crypt_s390.h"
#include "crypto_des.h"
@@ -46,38 +44,92 @@ struct crypt_s390_des3_192_ctx {
u8 key[DES3_192_KEY_SIZE];
};
-static int
-des_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags)
+static int des_setkey(void *ctx, const u8 *key, unsigned int keylen,
+ u32 *flags)
{
- struct crypt_s390_des_ctx *dctx;
+ struct crypt_s390_des_ctx *dctx = ctx;
int ret;
- dctx = ctx;
- //test if key is valid (not a weak key)
+ /* test if key is valid (not a weak key) */
ret = crypto_des_check_key(key, keylen, flags);
- if (ret == 0){
+ if (ret == 0)
memcpy(dctx->key, key, keylen);
- }
return ret;
}
+static void des_encrypt(void *ctx, u8 *out, const u8 *in)
+{
+ struct crypt_s390_des_ctx *dctx = ctx;
+
+ crypt_s390_km(KM_DEA_ENCRYPT, dctx->key, out, in, DES_BLOCK_SIZE);
+}
+
+static void des_decrypt(void *ctx, u8 *out, const u8 *in)
+{
+ struct crypt_s390_des_ctx *dctx = ctx;
+
+ crypt_s390_km(KM_DEA_DECRYPT, dctx->key, out, in, DES_BLOCK_SIZE);
+}
+
+static unsigned int des_encrypt_ecb(const struct cipher_desc *desc, u8 *out,
+ const u8 *in, unsigned int nbytes)
+{
+ struct crypt_s390_des_ctx *sctx = crypto_tfm_ctx(desc->tfm);
+ int ret;
+
+ /* only use complete blocks */
+ nbytes &= ~(DES_BLOCK_SIZE - 1);
+ ret = crypt_s390_km(KM_DEA_ENCRYPT, sctx->key, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
+
+ return nbytes;
+}
-static void
-des_encrypt(void *ctx, u8 *dst, const u8 *src)
+static unsigned int des_decrypt_ecb(const struct cipher_desc *desc, u8 *out,
+ const u8 *in, unsigned int nbytes)
{
- struct crypt_s390_des_ctx *dctx;
+ struct crypt_s390_des_ctx *sctx = crypto_tfm_ctx(desc->tfm);
+ int ret;
+
+ /* only use complete blocks */
+ nbytes &= ~(DES_BLOCK_SIZE - 1);
+ ret = crypt_s390_km(KM_DEA_DECRYPT, sctx->key, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
- dctx = ctx;
- crypt_s390_km(KM_DEA_ENCRYPT, dctx->key, dst, src, DES_BLOCK_SIZE);
+ return nbytes;
}
-static void
-des_decrypt(void *ctx, u8 *dst, const u8 *src)
+static unsigned int des_encrypt_cbc(const struct cipher_desc *desc, u8 *out,
+ const u8 *in, unsigned int nbytes)
{
- struct crypt_s390_des_ctx *dctx;
+ struct crypt_s390_des_ctx *sctx = crypto_tfm_ctx(desc->tfm);
+ int ret;
- dctx = ctx;
- crypt_s390_km(KM_DEA_DECRYPT, dctx->key, dst, src, DES_BLOCK_SIZE);
+ /* only use complete blocks */
+ nbytes &= ~(DES_BLOCK_SIZE - 1);
+
+ memcpy(sctx->iv, desc->info, DES_BLOCK_SIZE);
+ ret = crypt_s390_kmc(KMC_DEA_ENCRYPT, &sctx->iv, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
+
+ memcpy(desc->info, sctx->iv, DES_BLOCK_SIZE);
+ return nbytes;
+}
+
+static unsigned int des_decrypt_cbc(const struct cipher_desc *desc, u8 *out,
+ const u8 *in, unsigned int nbytes)
+{
+ struct crypt_s390_des_ctx *sctx = crypto_tfm_ctx(desc->tfm);
+ int ret;
+
+ /* only use complete blocks */
+ nbytes &= ~(DES_BLOCK_SIZE - 1);
+
+ memcpy(&sctx->iv, desc->info, DES_BLOCK_SIZE);
+ ret = crypt_s390_kmc(KMC_DEA_DECRYPT, &sctx->iv, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
+
+ return nbytes;
}
static struct crypto_alg des_alg = {
@@ -87,12 +139,19 @@ static struct crypto_alg des_alg = {
.cra_ctxsize = sizeof(struct crypt_s390_des_ctx),
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(des_alg.cra_list),
- .cra_u = { .cipher = {
- .cia_min_keysize = DES_KEY_SIZE,
- .cia_max_keysize = DES_KEY_SIZE,
- .cia_setkey = des_setkey,
- .cia_encrypt = des_encrypt,
- .cia_decrypt = des_decrypt } }
+ .cra_u = {
+ .cipher = {
+ .cia_min_keysize = DES_KEY_SIZE,
+ .cia_max_keysize = DES_KEY_SIZE,
+ .cia_setkey = des_setkey,
+ .cia_encrypt = des_encrypt,
+ .cia_decrypt = des_decrypt,
+ .cia_encrypt_ecb = des_encrypt_ecb,
+ .cia_decrypt_ecb = des_decrypt_ecb,
+ .cia_encrypt_cbc = des_encrypt_cbc,
+ .cia_decrypt_cbc = des_decrypt_cbc,
+ }
+ }
};
/*
@@ -107,20 +166,18 @@ static struct crypto_alg des_alg = {
* Implementers MUST reject keys that exhibit this property.
*
*/
-static int
-des3_128_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags)
+static int des3_128_setkey(void *ctx, const u8 *key, unsigned int keylen,
+ u32 *flags)
{
int i, ret;
- struct crypt_s390_des3_128_ctx *dctx;
+ struct crypt_s390_des3_128_ctx *dctx = ctx;
const u8* temp_key = key;
- dctx = ctx;
if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE))) {
-
*flags |= CRYPTO_TFM_RES_BAD_KEY_SCHED;
return -EINVAL;
}
- for (i = 0; i < 2; i++, temp_key += DES_KEY_SIZE) {
+ for (i = 0; i < 2; i++, temp_key += DES_KEY_SIZE) {
ret = crypto_des_check_key(temp_key, DES_KEY_SIZE, flags);
if (ret < 0)
return ret;
@@ -129,24 +186,85 @@ des3_128_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags)
return 0;
}
-static void
-des3_128_encrypt(void *ctx, u8 *dst, const u8 *src)
+static void des3_128_encrypt(void *ctx, u8 *dst, const u8 *src)
{
- struct crypt_s390_des3_128_ctx *dctx;
+ struct crypt_s390_des3_128_ctx *dctx = ctx;
- dctx = ctx;
crypt_s390_km(KM_TDEA_128_ENCRYPT, dctx->key, dst, (void*)src,
- DES3_128_BLOCK_SIZE);
+ DES3_128_BLOCK_SIZE);
}
-static void
-des3_128_decrypt(void *ctx, u8 *dst, const u8 *src)
+static void des3_128_decrypt(void *ctx, u8 *dst, const u8 *src)
{
- struct crypt_s390_des3_128_ctx *dctx;
+ struct crypt_s390_des3_128_ctx *dctx = ctx;
- dctx = ctx;
crypt_s390_km(KM_TDEA_128_DECRYPT, dctx->key, dst, (void*)src,
- DES3_128_BLOCK_SIZE);
+ DES3_128_BLOCK_SIZE);
+}
+
+static unsigned int des3_128_encrypt_ecb(const struct cipher_desc *desc,
+ u8 *out, const u8 *in,
+ unsigned int nbytes)
+{
+ struct crypt_s390_des3_128_ctx *sctx = crypto_tfm_ctx(desc->tfm);
+ int ret;
+
+ /* only use complete blocks */
+ nbytes &= ~(DES3_128_BLOCK_SIZE - 1);
+ ret = crypt_s390_km(KM_TDEA_128_ENCRYPT, sctx->key, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
+
+ return nbytes;
+}
+
+static unsigned int des3_128_decrypt_ecb(const struct cipher_desc *desc,
+ u8 *out, const u8 *in,
+ unsigned int nbytes)
+{
+ struct crypt_s390_des3_128_ctx *sctx = crypto_tfm_ctx(desc->tfm);
+ int ret;
+
+ /* only use complete blocks */
+ nbytes &= ~(DES3_128_BLOCK_SIZE - 1);
+ ret = crypt_s390_km(KM_TDEA_128_DECRYPT, sctx->key, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
+
+ return nbytes;
+}
+
+static unsigned int des3_128_encrypt_cbc(const struct cipher_desc *desc,
+ u8 *out, const u8 *in,
+ unsigned int nbytes)
+{
+ struct crypt_s390_des3_128_ctx *sctx = crypto_tfm_ctx(desc->tfm);
+ int ret;
+
+ /* only use complete blocks */
+ nbytes &= ~(DES3_128_BLOCK_SIZE - 1);
+
+ memcpy(sctx->iv, desc->info, DES3_128_BLOCK_SIZE);
+ ret = crypt_s390_kmc(KMC_TDEA_128_ENCRYPT, &sctx->iv, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
+
+ memcpy(desc->info, sctx->iv, DES3_128_BLOCK_SIZE);
+ return nbytes;
+}
+
+static unsigned int des3_128_decrypt_cbc(const struct cipher_desc *desc,
+ u8 *out, const u8 *in,
+ unsigned int nbytes)
+{
+ struct crypt_s390_des3_128_ctx *sctx = crypto_tfm_ctx(desc->tfm);
+ int ret;
+
+ /* only use complete blocks */
+ nbytes &= ~(DES3_128_BLOCK_SIZE - 1);
+
+ memcpy(&sctx->iv, desc->info, DES3_128_BLOCK_SIZE);
+ ret = crypt_s390_kmc(KMC_TDEA_128_DECRYPT, &sctx->iv, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
+
+ return nbytes;
}
static struct crypto_alg des3_128_alg = {
@@ -156,12 +274,19 @@ static struct crypto_alg des3_128_alg = {
.cra_ctxsize = sizeof(struct crypt_s390_des3_128_ctx),
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(des3_128_alg.cra_list),
- .cra_u = { .cipher = {
- .cia_min_keysize = DES3_128_KEY_SIZE,
- .cia_max_keysize = DES3_128_KEY_SIZE,
- .cia_setkey = des3_128_setkey,
- .cia_encrypt = des3_128_encrypt,
- .cia_decrypt = des3_128_decrypt } }
+ .cra_u = {
+ .cipher = {
+ .cia_min_keysize = DES3_128_KEY_SIZE,
+ .cia_max_keysize = DES3_128_KEY_SIZE,
+ .cia_setkey = des3_128_setkey,
+ .cia_encrypt = des3_128_encrypt,
+ .cia_decrypt = des3_128_decrypt,
+ .cia_encrypt_ecb = des3_128_encrypt_ecb,
+ .cia_decrypt_ecb = des3_128_decrypt_ecb,
+ .cia_encrypt_cbc = des3_128_encrypt_cbc,
+ .cia_decrypt_cbc = des3_128_decrypt_cbc,
+ }
+ }
};
/*
@@ -177,50 +302,108 @@ static struct crypto_alg des3_128_alg = {
* property.
*
*/
-static int
-des3_192_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags)
+static int des3_192_setkey(void *ctx, const u8 *key, unsigned int keylen,
+ u32 *flags)
{
int i, ret;
- struct crypt_s390_des3_192_ctx *dctx;
- const u8* temp_key;
+ struct crypt_s390_des3_192_ctx *dctx = ctx;
+ const u8* temp_key = key;
- dctx = ctx;
- temp_key = key;
if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) &&
memcmp(&key[DES_KEY_SIZE], &key[DES_KEY_SIZE * 2],
- DES_KEY_SIZE))) {
+ DES_KEY_SIZE))) {
*flags |= CRYPTO_TFM_RES_BAD_KEY_SCHED;
return -EINVAL;
}
for (i = 0; i < 3; i++, temp_key += DES_KEY_SIZE) {
ret = crypto_des_check_key(temp_key, DES_KEY_SIZE, flags);
- if (ret < 0){
+ if (ret < 0)
return ret;
- }
}
memcpy(dctx->key, key, keylen);
return 0;
}
-static void
-des3_192_encrypt(void *ctx, u8 *dst, const u8 *src)
+static void des3_192_encrypt(void *ctx, u8 *dst, const u8 *src)
{
- struct crypt_s390_des3_192_ctx *dctx;
+ struct crypt_s390_des3_192_ctx *dctx = ctx;
- dctx = ctx;
crypt_s390_km(KM_TDEA_192_ENCRYPT, dctx->key, dst, (void*)src,
- DES3_192_BLOCK_SIZE);
+ DES3_192_BLOCK_SIZE);
}
-static void
-des3_192_decrypt(void *ctx, u8 *dst, const u8 *src)
+static void des3_192_decrypt(void *ctx, u8 *dst, const u8 *src)
{
- struct crypt_s390_des3_192_ctx *dctx;
+ struct crypt_s390_des3_192_ctx *dctx = ctx;
- dctx = ctx;
crypt_s390_km(KM_TDEA_192_DECRYPT, dctx->key, dst, (void*)src,
- DES3_192_BLOCK_SIZE);
+ DES3_192_BLOCK_SIZE);
+}
+
+static unsigned int des3_192_encrypt_ecb(const struct cipher_desc *desc,
+ u8 *out, const u8 *in,
+ unsigned int nbytes)
+{
+ struct crypt_s390_des3_192_ctx *sctx = crypto_tfm_ctx(desc->tfm);
+ int ret;
+
+ /* only use complete blocks */
+ nbytes &= ~(DES3_192_BLOCK_SIZE - 1);
+ ret = crypt_s390_km(KM_TDEA_192_ENCRYPT, sctx->key, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
+
+ return nbytes;
+}
+
+static unsigned int des3_192_decrypt_ecb(const struct cipher_desc *desc,
+ u8 *out, const u8 *in,
+ unsigned int nbytes)
+{
+ struct crypt_s390_des3_192_ctx *sctx = crypto_tfm_ctx(desc->tfm);
+ int ret;
+
+ /* only use complete blocks */
+ nbytes &= ~(DES3_192_BLOCK_SIZE - 1);
+ ret = crypt_s390_km(KM_TDEA_192_DECRYPT, sctx->key, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
+
+ return nbytes;
+}
+
+static unsigned int des3_192_encrypt_cbc(const struct cipher_desc *desc,
+ u8 *out, const u8 *in,
+ unsigned int nbytes)
+{
+ struct crypt_s390_des3_192_ctx *sctx = crypto_tfm_ctx(desc->tfm);
+ int ret;
+
+ /* only use complete blocks */
+ nbytes &= ~(DES3_192_BLOCK_SIZE - 1);
+
+ memcpy(sctx->iv, desc->info, DES3_192_BLOCK_SIZE);
+ ret = crypt_s390_kmc(KMC_TDEA_192_ENCRYPT, &sctx->iv, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
+
+ memcpy(desc->info, sctx->iv, DES3_192_BLOCK_SIZE);
+ return nbytes;
+}
+
+static unsigned int des3_192_decrypt_cbc(const struct cipher_desc *desc,
+ u8 *out, const u8 *in,
+ unsigned int nbytes)
+{
+ struct crypt_s390_des3_192_ctx *sctx = crypto_tfm_ctx(desc->tfm);
+ int ret;
+
+ /* only use complete blocks */
+ nbytes &= ~(DES3_192_BLOCK_SIZE - 1);
+
+ memcpy(&sctx->iv, desc->info, DES3_192_BLOCK_SIZE);
+ ret = crypt_s390_kmc(KMC_TDEA_192_DECRYPT, &sctx->iv, out, in, nbytes);
+ BUG_ON((ret < 0) || (ret != nbytes));
+
+ return nbytes;
}
static struct crypto_alg des3_192_alg = {
@@ -230,44 +413,43 @@ static struct crypto_alg des3_192_alg = {
.cra_ctxsize = sizeof(struct crypt_s390_des3_192_ctx),
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(des3_192_alg.cra_list),
- .cra_u = { .cipher = {
- .cia_min_keysize = DES3_192_KEY_SIZE,
- .cia_max_keysize = DES3_192_KEY_SIZE,
- .cia_setkey = des3_192_setkey,
- .cia_encrypt = des3_192_encrypt,
- .cia_decrypt = des3_192_decrypt } }
+ .cra_u = {
+ .cipher = {
+ .cia_min_keysize = DES3_192_KEY_SIZE,
+ .cia_max_keysize = DES3_192_KEY_SIZE,
+ .cia_setkey = des3_192_setkey,
+ .cia_encrypt = des3_192_encrypt,
+ .cia_decrypt = des3_192_decrypt,
+ .cia_encrypt_ecb = des3_192_encrypt_ecb,
+ .cia_decrypt_ecb = des3_192_decrypt_ecb,
+ .cia_encrypt_cbc = des3_192_encrypt_cbc,
+ .cia_decrypt_cbc = des3_192_decrypt_cbc,
+ }
+ }
};
-
-
-static int
-init(void)
+static int init(void)
{
- int ret;
+ int ret = 0;
if (!crypt_s390_func_available(KM_DEA_ENCRYPT) ||
!crypt_s390_func_available(KM_TDEA_128_ENCRYPT) ||
- !crypt_s390_func_available(KM_TDEA_192_ENCRYPT)){
+ !crypt_s390_func_available(KM_TDEA_192_ENCRYPT))
return -ENOSYS;
- }
- ret = 0;
- ret |= (crypto_register_alg(&des_alg) == 0)? 0:1;
- ret |= (crypto_register_alg(&des3_128_alg) == 0)? 0:2;
- ret |= (crypto_register_alg(&des3_192_alg) == 0)? 0:4;
- if (ret){
+ ret |= (crypto_register_alg(&des_alg) == 0) ? 0:1;
+ ret |= (crypto_register_alg(&des3_128_alg) == 0) ? 0:2;
+ ret |= (crypto_register_alg(&des3_192_alg) == 0) ? 0:4;
+ if (ret) {
crypto_unregister_alg(&des3_192_alg);
crypto_unregister_alg(&des3_128_alg);
crypto_unregister_alg(&des_alg);
return -EEXIST;
}
-
- printk(KERN_INFO "crypt_s390: des_s390 loaded.\n");
return 0;
}
-static void __exit
-fini(void)
+static void __exit fini(void)
{
crypto_unregister_alg(&des3_192_alg);
crypto_unregister_alg(&des3_128_alg);
diff --git a/arch/s390/crypto/sha256_s390.c b/arch/s390/crypto/sha256_s390.c
index b75bdbd476c7..1ec5e92b3454 100644
--- a/arch/s390/crypto/sha256_s390.c
+++ b/arch/s390/crypto/sha256_s390.c
@@ -51,6 +51,7 @@ static void sha256_update(void *ctx, const u8 *data, unsigned int len)
{
struct s390_sha256_ctx *sctx = ctx;
unsigned int index;
+ int ret;
/* how much is already in the buffer? */
index = sctx->count / 8 & 0x3f;
@@ -58,15 +59,29 @@ static void sha256_update(void *ctx, const u8 *data, unsigned int len)
/* update message bit length */
sctx->count += len * 8;
- /* process one block */
- if ((index + len) >= SHA256_BLOCK_SIZE) {
+ if ((index + len) < SHA256_BLOCK_SIZE)
+ goto store;
+
+ /* process one stored block */
+ if (index) {
memcpy(sctx->buf + index, data, SHA256_BLOCK_SIZE - index);
- crypt_s390_kimd(KIMD_SHA_256, sctx->state, sctx->buf,
- SHA256_BLOCK_SIZE);
+ ret = crypt_s390_kimd(KIMD_SHA_256, sctx->state, sctx->buf,
+ SHA256_BLOCK_SIZE);
+ BUG_ON(ret != SHA256_BLOCK_SIZE);
data += SHA256_BLOCK_SIZE - index;
len -= SHA256_BLOCK_SIZE - index;
}
+ /* process as many blocks as possible */
+ if (len >= SHA256_BLOCK_SIZE) {
+ ret = crypt_s390_kimd(KIMD_SHA_256, sctx->state, data,
+ len & ~(SHA256_BLOCK_SIZE - 1));
+ BUG_ON(ret != (len & ~(SHA256_BLOCK_SIZE - 1)));
+ data += ret;
+ len -= ret;
+ }
+
+store:
/* anything left? */
if (len)
memcpy(sctx->buf + index , data, len);
@@ -119,9 +134,9 @@ static struct crypto_alg alg = {
.cra_list = LIST_HEAD_INIT(alg.cra_list),
.cra_u = { .digest = {
.dia_digestsize = SHA256_DIGEST_SIZE,
- .dia_init = sha256_init,
- .dia_update = sha256_update,
- .dia_final = sha256_final } }
+ .dia_init = sha256_init,
+ .dia_update = sha256_update,
+ .dia_final = sha256_final } }
};
static int init(void)