diff options
Diffstat (limited to 'arch/arm')
-rw-r--r-- | arch/arm/crypto/aes-ce-glue.c | 14 | ||||
-rw-r--r-- | arch/arm/crypto/chacha-glue.c | 4 | ||||
-rw-r--r-- | arch/arm/crypto/crc32-ce-glue.c | 4 | ||||
-rw-r--r-- | arch/arm/crypto/ghash-ce-glue.c | 11 | ||||
-rw-r--r-- | arch/arm/crypto/poly1305-glue.c | 18 |
5 files changed, 9 insertions, 42 deletions
diff --git a/arch/arm/crypto/aes-ce-glue.c b/arch/arm/crypto/aes-ce-glue.c index cdb1a07e7ad0..b668c97663ec 100644 --- a/arch/arm/crypto/aes-ce-glue.c +++ b/arch/arm/crypto/aes-ce-glue.c @@ -138,14 +138,8 @@ static int ce_aes_setkey(struct crypto_skcipher *tfm, const u8 *in_key, unsigned int key_len) { struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); - int ret; - - ret = ce_aes_expandkey(ctx, in_key, key_len); - if (!ret) - return 0; - crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); - return -EINVAL; + return ce_aes_expandkey(ctx, in_key, key_len); } struct crypto_aes_xts_ctx { @@ -167,11 +161,7 @@ static int xts_set_key(struct crypto_skcipher *tfm, const u8 *in_key, if (!ret) ret = ce_aes_expandkey(&ctx->key2, &in_key[key_len / 2], key_len / 2); - if (!ret) - return 0; - - crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); - return -EINVAL; + return ret; } static int ecb_encrypt(struct skcipher_request *req) diff --git a/arch/arm/crypto/chacha-glue.c b/arch/arm/crypto/chacha-glue.c index 6ebbb2b241d2..6fdb0ac62b3d 100644 --- a/arch/arm/crypto/chacha-glue.c +++ b/arch/arm/crypto/chacha-glue.c @@ -115,7 +115,7 @@ static int chacha_stream_xor(struct skcipher_request *req, if (nbytes < walk.total) nbytes = round_down(nbytes, walk.stride); - if (!neon) { + if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon) { chacha_doarm(walk.dst.virt.addr, walk.src.virt.addr, nbytes, state, ctx->nrounds); state[12] += DIV_ROUND_UP(nbytes, CHACHA_BLOCK_SIZE); @@ -159,7 +159,7 @@ static int do_xchacha(struct skcipher_request *req, bool neon) chacha_init_generic(state, ctx->key, req->iv); - if (!neon) { + if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon) { hchacha_block_arm(state, subctx.key, ctx->nrounds); } else { kernel_neon_begin(); diff --git a/arch/arm/crypto/crc32-ce-glue.c b/arch/arm/crypto/crc32-ce-glue.c index 95592499b9bd..2208445808d7 100644 --- a/arch/arm/crypto/crc32-ce-glue.c +++ b/arch/arm/crypto/crc32-ce-glue.c @@ -54,10 +54,8 @@ static int crc32_setkey(struct crypto_shash *hash, const u8 *key, { u32 *mctx = crypto_shash_ctx(hash); - if (keylen != sizeof(u32)) { - crypto_shash_set_flags(hash, CRYPTO_TFM_RES_BAD_KEY_LEN); + if (keylen != sizeof(u32)) return -EINVAL; - } *mctx = le32_to_cpup((__le32 *)key); return 0; } diff --git a/arch/arm/crypto/ghash-ce-glue.c b/arch/arm/crypto/ghash-ce-glue.c index c691077679a6..a00fd329255f 100644 --- a/arch/arm/crypto/ghash-ce-glue.c +++ b/arch/arm/crypto/ghash-ce-glue.c @@ -163,10 +163,8 @@ static int ghash_setkey(struct crypto_shash *tfm, struct ghash_key *key = crypto_shash_ctx(tfm); be128 h; - if (keylen != GHASH_BLOCK_SIZE) { - crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); + if (keylen != GHASH_BLOCK_SIZE) return -EINVAL; - } /* needed for the fallback */ memcpy(&key->k, inkey, GHASH_BLOCK_SIZE); @@ -296,16 +294,11 @@ static int ghash_async_setkey(struct crypto_ahash *tfm, const u8 *key, { struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm); struct crypto_ahash *child = &ctx->cryptd_tfm->base; - int err; crypto_ahash_clear_flags(child, CRYPTO_TFM_REQ_MASK); crypto_ahash_set_flags(child, crypto_ahash_get_flags(tfm) & CRYPTO_TFM_REQ_MASK); - err = crypto_ahash_setkey(child, key, keylen); - crypto_ahash_set_flags(tfm, crypto_ahash_get_flags(child) - & CRYPTO_TFM_RES_MASK); - - return err; + return crypto_ahash_setkey(child, key, keylen); } static int ghash_async_init_tfm(struct crypto_tfm *tfm) diff --git a/arch/arm/crypto/poly1305-glue.c b/arch/arm/crypto/poly1305-glue.c index abe3f2d587dc..ceec04ec2f40 100644 --- a/arch/arm/crypto/poly1305-glue.c +++ b/arch/arm/crypto/poly1305-glue.c @@ -20,7 +20,7 @@ void poly1305_init_arm(void *state, const u8 *key); void poly1305_blocks_arm(void *state, const u8 *src, u32 len, u32 hibit); -void poly1305_emit_arm(void *state, __le32 *digest, const u32 *nonce); +void poly1305_emit_arm(void *state, u8 *digest, const u32 *nonce); void __weak poly1305_blocks_neon(void *state, const u8 *src, u32 len, u32 hibit) { @@ -179,9 +179,6 @@ EXPORT_SYMBOL(poly1305_update_arch); void poly1305_final_arch(struct poly1305_desc_ctx *dctx, u8 *dst) { - __le32 digest[4]; - u64 f = 0; - if (unlikely(dctx->buflen)) { dctx->buf[dctx->buflen++] = 1; memset(dctx->buf + dctx->buflen, 0, @@ -189,18 +186,7 @@ void poly1305_final_arch(struct poly1305_desc_ctx *dctx, u8 *dst) poly1305_blocks_arm(&dctx->h, dctx->buf, POLY1305_BLOCK_SIZE, 0); } - poly1305_emit_arm(&dctx->h, digest, dctx->s); - - /* mac = (h + s) % (2^128) */ - f = (f >> 32) + le32_to_cpu(digest[0]); - put_unaligned_le32(f, dst); - f = (f >> 32) + le32_to_cpu(digest[1]); - put_unaligned_le32(f, dst + 4); - f = (f >> 32) + le32_to_cpu(digest[2]); - put_unaligned_le32(f, dst + 8); - f = (f >> 32) + le32_to_cpu(digest[3]); - put_unaligned_le32(f, dst + 12); - + poly1305_emit_arm(&dctx->h, dst, dctx->s); *dctx = (struct poly1305_desc_ctx){}; } EXPORT_SYMBOL(poly1305_final_arch); |