From 00425bb181c204c8f250fec122e2817a930e0286 Mon Sep 17 00:00:00 2001 From: Ard Biesheuvel Date: Fri, 24 Apr 2015 08:37:09 +0200 Subject: crypto: x86/sha512_ssse3 - fixup for asm function prototype change Patch e68410ebf626 ("crypto: x86/sha512_ssse3 - move SHA-384/512 SSSE3 implementation to base layer") changed the prototypes of the core asm SHA-512 implementations so that they are compatible with the prototype used by the base layer. However, in one instance, the register that was used for passing the input buffer was reused as a scratch register later on in the code, and since the input buffer param changed places with the digest param -which needs to be written back before the function returns- this resulted in the scratch register to be dereferenced in a memory write operation, causing a GPF. Fix this by changing the scratch register to use the same register as the input buffer param again. Fixes: e68410ebf626 ("crypto: x86/sha512_ssse3 - move SHA-384/512 SSSE3 implementation to base layer") Reported-By: Bobby Powers Tested-By: Bobby Powers Signed-off-by: Ard Biesheuvel Signed-off-by: Herbert Xu --- arch/x86/crypto/sha512-avx2-asm.S | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'arch') diff --git a/arch/x86/crypto/sha512-avx2-asm.S b/arch/x86/crypto/sha512-avx2-asm.S index a4771dcd1fcf..1f20b35d8573 100644 --- a/arch/x86/crypto/sha512-avx2-asm.S +++ b/arch/x86/crypto/sha512-avx2-asm.S @@ -79,7 +79,7 @@ NUM_BLKS = %rdx c = %rcx d = %r8 e = %rdx -y3 = %rdi +y3 = %rsi TBL = %rbp -- cgit v1.2.3 From ac02c6ea6b404461829697792cd2b67f6a14d28a Mon Sep 17 00:00:00 2001 From: Ard Biesheuvel Date: Mon, 4 May 2015 11:00:16 +0200 Subject: crypto: arm64/crc32 - bring in line with generic CRC32 The arm64 CRC32 (not CRC32c) implementation was not quite doing the same thing as the generic one. Fix that. Signed-off-by: Ard Biesheuvel Acked-by: Steve Capper Signed-off-by: Herbert Xu --- arch/arm64/crypto/crc32-arm64.c | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) (limited to 'arch') diff --git a/arch/arm64/crypto/crc32-arm64.c b/arch/arm64/crypto/crc32-arm64.c index 9499199924ae..6a37c3c6b11d 100644 --- a/arch/arm64/crypto/crc32-arm64.c +++ b/arch/arm64/crypto/crc32-arm64.c @@ -147,13 +147,21 @@ static int chksum_final(struct shash_desc *desc, u8 *out) { struct chksum_desc_ctx *ctx = shash_desc_ctx(desc); + put_unaligned_le32(ctx->crc, out); + return 0; +} + +static int chksumc_final(struct shash_desc *desc, u8 *out) +{ + struct chksum_desc_ctx *ctx = shash_desc_ctx(desc); + put_unaligned_le32(~ctx->crc, out); return 0; } static int __chksum_finup(u32 crc, const u8 *data, unsigned int len, u8 *out) { - put_unaligned_le32(~crc32_arm64_le_hw(crc, data, len), out); + put_unaligned_le32(crc32_arm64_le_hw(crc, data, len), out); return 0; } @@ -199,6 +207,14 @@ static int crc32_cra_init(struct crypto_tfm *tfm) { struct chksum_ctx *mctx = crypto_tfm_ctx(tfm); + mctx->key = 0; + return 0; +} + +static int crc32c_cra_init(struct crypto_tfm *tfm) +{ + struct chksum_ctx *mctx = crypto_tfm_ctx(tfm); + mctx->key = ~0; return 0; } @@ -229,7 +245,7 @@ static struct shash_alg crc32c_alg = { .setkey = chksum_setkey, .init = chksum_init, .update = chksumc_update, - .final = chksum_final, + .final = chksumc_final, .finup = chksumc_finup, .digest = chksumc_digest, .descsize = sizeof(struct chksum_desc_ctx), @@ -241,7 +257,7 @@ static struct shash_alg crc32c_alg = { .cra_alignmask = 0, .cra_ctxsize = sizeof(struct chksum_ctx), .cra_module = THIS_MODULE, - .cra_init = crc32_cra_init, + .cra_init = crc32c_cra_init, } }; -- cgit v1.2.3 From bf7883ebcb9c0880b8f5d22c1435a3de15283f0a Mon Sep 17 00:00:00 2001 From: Ard Biesheuvel Date: Wed, 6 May 2015 15:54:31 +0200 Subject: crypto: arm64/sha1-ce - prevent asm code finalization in final() path Ensure that the asm code finalization path is not triggered when invoked via final(), since it already takes care of that itself. Signed-off-by: Ard Biesheuvel Signed-off-by: Herbert Xu --- arch/arm64/crypto/sha1-ce-glue.c | 3 +++ 1 file changed, 3 insertions(+) (limited to 'arch') diff --git a/arch/arm64/crypto/sha1-ce-glue.c b/arch/arm64/crypto/sha1-ce-glue.c index 114e7cc5de8c..aefda9868627 100644 --- a/arch/arm64/crypto/sha1-ce-glue.c +++ b/arch/arm64/crypto/sha1-ce-glue.c @@ -74,6 +74,9 @@ static int sha1_ce_finup(struct shash_desc *desc, const u8 *data, static int sha1_ce_final(struct shash_desc *desc, u8 *out) { + struct sha1_ce_state *sctx = shash_desc_ctx(desc); + + sctx->finalize = 0; kernel_neon_begin_partial(16); sha1_base_do_finalize(desc, (sha1_block_fn *)sha1_ce_transform); kernel_neon_end(); -- cgit v1.2.3 From ec59a65d694e5fd99d76565b93468c99ae8dff79 Mon Sep 17 00:00:00 2001 From: Ard Biesheuvel Date: Wed, 6 May 2015 15:54:32 +0200 Subject: crypto: arm64/sha2-ce - prevent asm code finalization in final() path Ensure that the asm code finalization path is not triggered when invoked via final(), since it already takes care of that itself. Signed-off-by: Ard Biesheuvel Signed-off-by: Herbert Xu --- arch/arm64/crypto/sha2-ce-glue.c | 3 +++ 1 file changed, 3 insertions(+) (limited to 'arch') diff --git a/arch/arm64/crypto/sha2-ce-glue.c b/arch/arm64/crypto/sha2-ce-glue.c index 1340e44c048b..7cd587564a41 100644 --- a/arch/arm64/crypto/sha2-ce-glue.c +++ b/arch/arm64/crypto/sha2-ce-glue.c @@ -75,6 +75,9 @@ static int sha256_ce_finup(struct shash_desc *desc, const u8 *data, static int sha256_ce_final(struct shash_desc *desc, u8 *out) { + struct sha256_ce_state *sctx = shash_desc_ctx(desc); + + sctx->finalize = 0; kernel_neon_begin_partial(28); sha256_base_do_finalize(desc, (sha256_block_fn *)sha2_ce_transform); kernel_neon_end(); -- cgit v1.2.3 From a1cae34e23b1293eccbcc8ee9b39298039c3952a Mon Sep 17 00:00:00 2001 From: Harald Freudenberger Date: Thu, 21 May 2015 10:01:11 +0200 Subject: crypto: s390/ghash - Fix incorrect ghash icv buffer handling. Multitheaded tests showed that the icv buffer in the current ghash implementation is not handled correctly. A move of this working ghash buffer value to the descriptor context fixed this. Code is tested and verified with an multithreaded application via af_alg interface. Cc: stable@vger.kernel.org Signed-off-by: Harald Freudenberger Signed-off-by: Gerald Schaefer Reported-by: Herbert Xu Signed-off-by: Herbert Xu --- arch/s390/crypto/ghash_s390.c | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) (limited to 'arch') diff --git a/arch/s390/crypto/ghash_s390.c b/arch/s390/crypto/ghash_s390.c index 7940dc90e80b..b258110da952 100644 --- a/arch/s390/crypto/ghash_s390.c +++ b/arch/s390/crypto/ghash_s390.c @@ -16,11 +16,12 @@ #define GHASH_DIGEST_SIZE 16 struct ghash_ctx { - u8 icv[16]; - u8 key[16]; + u8 key[GHASH_BLOCK_SIZE]; }; struct ghash_desc_ctx { + u8 icv[GHASH_BLOCK_SIZE]; + u8 key[GHASH_BLOCK_SIZE]; u8 buffer[GHASH_BLOCK_SIZE]; u32 bytes; }; @@ -28,8 +29,10 @@ struct ghash_desc_ctx { static int ghash_init(struct shash_desc *desc) { struct ghash_desc_ctx *dctx = shash_desc_ctx(desc); + struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm); memset(dctx, 0, sizeof(*dctx)); + memcpy(dctx->key, ctx->key, GHASH_BLOCK_SIZE); return 0; } @@ -45,7 +48,6 @@ static int ghash_setkey(struct crypto_shash *tfm, } memcpy(ctx->key, key, GHASH_BLOCK_SIZE); - memset(ctx->icv, 0, GHASH_BLOCK_SIZE); return 0; } @@ -54,7 +56,6 @@ static int ghash_update(struct shash_desc *desc, const u8 *src, unsigned int srclen) { struct ghash_desc_ctx *dctx = shash_desc_ctx(desc); - struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm); unsigned int n; u8 *buf = dctx->buffer; int ret; @@ -70,7 +71,7 @@ static int ghash_update(struct shash_desc *desc, src += n; if (!dctx->bytes) { - ret = crypt_s390_kimd(KIMD_GHASH, ctx, buf, + ret = crypt_s390_kimd(KIMD_GHASH, dctx, buf, GHASH_BLOCK_SIZE); if (ret != GHASH_BLOCK_SIZE) return -EIO; @@ -79,7 +80,7 @@ static int ghash_update(struct shash_desc *desc, n = srclen & ~(GHASH_BLOCK_SIZE - 1); if (n) { - ret = crypt_s390_kimd(KIMD_GHASH, ctx, src, n); + ret = crypt_s390_kimd(KIMD_GHASH, dctx, src, n); if (ret != n) return -EIO; src += n; @@ -94,7 +95,7 @@ static int ghash_update(struct shash_desc *desc, return 0; } -static int ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx) +static int ghash_flush(struct ghash_desc_ctx *dctx) { u8 *buf = dctx->buffer; int ret; @@ -104,24 +105,24 @@ static int ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx) memset(pos, 0, dctx->bytes); - ret = crypt_s390_kimd(KIMD_GHASH, ctx, buf, GHASH_BLOCK_SIZE); + ret = crypt_s390_kimd(KIMD_GHASH, dctx, buf, GHASH_BLOCK_SIZE); if (ret != GHASH_BLOCK_SIZE) return -EIO; + + dctx->bytes = 0; } - dctx->bytes = 0; return 0; } static int ghash_final(struct shash_desc *desc, u8 *dst) { struct ghash_desc_ctx *dctx = shash_desc_ctx(desc); - struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm); int ret; - ret = ghash_flush(ctx, dctx); + ret = ghash_flush(dctx); if (!ret) - memcpy(dst, ctx->icv, GHASH_BLOCK_SIZE); + memcpy(dst, dctx->icv, GHASH_BLOCK_SIZE); return ret; } -- cgit v1.2.3