From cde001e4c3c3625c60b68a83eb1f1c2572dee07a Mon Sep 17 00:00:00 2001 From: Stephan Mueller Date: Fri, 6 Mar 2015 08:26:31 +0100 Subject: crypto: rng - RNGs must return 0 in success case Change the RNGs to always return 0 in success case. This patch ensures that seqiv.c works with RNGs other than krng. seqiv expects that any return code other than 0 is an error. Without the patch, rfc4106(gcm(aes)) will not work when using a DRBG or an ANSI X9.31 RNG. Signed-off-by: Stephan Mueller Signed-off-by: Herbert Xu --- include/crypto/rng.h | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) (limited to 'include/crypto') diff --git a/include/crypto/rng.h b/include/crypto/rng.h index a16fb10142bf..6e28ea5be9f1 100644 --- a/include/crypto/rng.h +++ b/include/crypto/rng.h @@ -103,8 +103,7 @@ static inline void crypto_free_rng(struct crypto_rng *tfm) * This function fills the caller-allocated buffer with random numbers using the * random number generator referenced by the cipher handle. * - * Return: > 0 function was successful and returns the number of generated - * bytes; < 0 if an error occurred + * Return: 0 function was successful; < 0 if an error occurred */ static inline int crypto_rng_get_bytes(struct crypto_rng *tfm, u8 *rdata, unsigned int dlen) -- cgit v1.2.3 From 87b1675634e1e9f1570445c55604204771ac9078 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Thu, 2 Apr 2015 22:39:40 +0800 Subject: crypto: api - Change crypto_unregister_instance argument type This patch makes crypto_unregister_instance take a crypto_instance instead of a crypto_alg. This allows us to remove a duplicate CRYPTO_ALG_INSTANCE check in crypto_unregister_instance. Signed-off-by: Herbert Xu --- crypto/algapi.c | 8 ++------ crypto/crypto_user.c | 2 +- include/crypto/algapi.h | 2 +- 3 files changed, 4 insertions(+), 8 deletions(-) (limited to 'include/crypto') diff --git a/crypto/algapi.c b/crypto/algapi.c index 0f1976eceb27..f1d0307b1d08 100644 --- a/crypto/algapi.c +++ b/crypto/algapi.c @@ -551,17 +551,13 @@ err: } EXPORT_SYMBOL_GPL(crypto_register_instance); -int crypto_unregister_instance(struct crypto_alg *alg) +int crypto_unregister_instance(struct crypto_instance *inst) { - struct crypto_instance *inst = (void *)alg; LIST_HEAD(list); - if (!(alg->cra_flags & CRYPTO_ALG_INSTANCE)) - return -EINVAL; - down_write(&crypto_alg_sem); - crypto_remove_spawns(alg, &list, NULL); + crypto_remove_spawns(&inst->alg, &list, NULL); crypto_remove_instance(inst, &list); up_write(&crypto_alg_sem); diff --git a/crypto/crypto_user.c b/crypto/crypto_user.c index c5148a35ae0a..eab249723830 100644 --- a/crypto/crypto_user.c +++ b/crypto/crypto_user.c @@ -316,7 +316,7 @@ static int crypto_del_alg(struct sk_buff *skb, struct nlmsghdr *nlh, if (atomic_read(&alg->cra_refcnt) != 1) return -EBUSY; - return crypto_unregister_instance(alg); + return crypto_unregister_instance((struct crypto_instance *)alg); } static struct crypto_alg *crypto_user_skcipher_alg(const char *name, u32 type, diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h index 623a59c1ff5a..0ecb7688af71 100644 --- a/include/crypto/algapi.h +++ b/include/crypto/algapi.h @@ -137,7 +137,7 @@ struct crypto_template *crypto_lookup_template(const char *name); int crypto_register_instance(struct crypto_template *tmpl, struct crypto_instance *inst); -int crypto_unregister_instance(struct crypto_alg *alg); +int crypto_unregister_instance(struct crypto_instance *inst); int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg, struct crypto_instance *inst, u32 mask); -- cgit v1.2.3 From c4d5b9ffa31f7283b34cba991514dacae4c8d3de Mon Sep 17 00:00:00 2001 From: Ard Biesheuvel Date: Thu, 9 Apr 2015 12:55:33 +0200 Subject: crypto: sha1 - implement base layer for SHA-1 To reduce the number of copies of boilerplate code throughout the tree, this patch implements generic glue for the SHA-1 algorithm. This allows a specific arch or hardware implementation to only implement the special handling that it needs. The users need to supply an implementation of void (sha1_block_fn)(struct sha1_state *sst, u8 const *src, int blocks) and pass it to the SHA-1 base functions. For easy casting between the prototype above and existing block functions that take a 'u32 state[]' as their first argument, the 'state' member of struct sha1_state is moved to the base of the struct. Signed-off-by: Ard Biesheuvel Signed-off-by: Herbert Xu --- include/crypto/sha.h | 2 +- include/crypto/sha1_base.h | 106 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 107 insertions(+), 1 deletion(-) create mode 100644 include/crypto/sha1_base.h (limited to 'include/crypto') diff --git a/include/crypto/sha.h b/include/crypto/sha.h index 190f8a0e0242..a9aad8e63f43 100644 --- a/include/crypto/sha.h +++ b/include/crypto/sha.h @@ -65,8 +65,8 @@ #define SHA512_H7 0x5be0cd19137e2179ULL struct sha1_state { - u64 count; u32 state[SHA1_DIGEST_SIZE / 4]; + u64 count; u8 buffer[SHA1_BLOCK_SIZE]; }; diff --git a/include/crypto/sha1_base.h b/include/crypto/sha1_base.h new file mode 100644 index 000000000000..d0df431f9a97 --- /dev/null +++ b/include/crypto/sha1_base.h @@ -0,0 +1,106 @@ +/* + * sha1_base.h - core logic for SHA-1 implementations + * + * Copyright (C) 2015 Linaro Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + */ + +#include +#include +#include +#include + +#include + +typedef void (sha1_block_fn)(struct sha1_state *sst, u8 const *src, int blocks); + +static inline int sha1_base_init(struct shash_desc *desc) +{ + struct sha1_state *sctx = shash_desc_ctx(desc); + + sctx->state[0] = SHA1_H0; + sctx->state[1] = SHA1_H1; + sctx->state[2] = SHA1_H2; + sctx->state[3] = SHA1_H3; + sctx->state[4] = SHA1_H4; + sctx->count = 0; + + return 0; +} + +static inline int sha1_base_do_update(struct shash_desc *desc, + const u8 *data, + unsigned int len, + sha1_block_fn *block_fn) +{ + struct sha1_state *sctx = shash_desc_ctx(desc); + unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; + + sctx->count += len; + + if (unlikely((partial + len) >= SHA1_BLOCK_SIZE)) { + int blocks; + + if (partial) { + int p = SHA1_BLOCK_SIZE - partial; + + memcpy(sctx->buffer + partial, data, p); + data += p; + len -= p; + + block_fn(sctx, sctx->buffer, 1); + } + + blocks = len / SHA1_BLOCK_SIZE; + len %= SHA1_BLOCK_SIZE; + + if (blocks) { + block_fn(sctx, data, blocks); + data += blocks * SHA1_BLOCK_SIZE; + } + partial = 0; + } + if (len) + memcpy(sctx->buffer + partial, data, len); + + return 0; +} + +static inline int sha1_base_do_finalize(struct shash_desc *desc, + sha1_block_fn *block_fn) +{ + const int bit_offset = SHA1_BLOCK_SIZE - sizeof(__be64); + struct sha1_state *sctx = shash_desc_ctx(desc); + __be64 *bits = (__be64 *)(sctx->buffer + bit_offset); + unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; + + sctx->buffer[partial++] = 0x80; + if (partial > bit_offset) { + memset(sctx->buffer + partial, 0x0, SHA1_BLOCK_SIZE - partial); + partial = 0; + + block_fn(sctx, sctx->buffer, 1); + } + + memset(sctx->buffer + partial, 0x0, bit_offset - partial); + *bits = cpu_to_be64(sctx->count << 3); + block_fn(sctx, sctx->buffer, 1); + + return 0; +} + +static inline int sha1_base_finish(struct shash_desc *desc, u8 *out) +{ + struct sha1_state *sctx = shash_desc_ctx(desc); + __be32 *digest = (__be32 *)out; + int i; + + for (i = 0; i < SHA1_DIGEST_SIZE / sizeof(__be32); i++) + put_unaligned_be32(sctx->state[i], digest++); + + *sctx = (struct sha1_state){}; + return 0; +} -- cgit v1.2.3 From 11b8d5ef91388162103bf28f2e9d6dfd400d192d Mon Sep 17 00:00:00 2001 From: Ard Biesheuvel Date: Thu, 9 Apr 2015 12:55:34 +0200 Subject: crypto: sha256 - implement base layer for SHA-256 To reduce the number of copies of boilerplate code throughout the tree, this patch implements generic glue for the SHA-256 algorithm. This allows a specific arch or hardware implementation to only implement the special handling that it needs. The users need to supply an implementation of void (sha256_block_fn)(struct sha256_state *sst, u8 const *src, int blocks) and pass it to the SHA-256 base functions. For easy casting between the prototype above and existing block functions that take a 'u32 state[]' as their first argument, the 'state' member of struct sha256_state is moved to the base of the struct. Signed-off-by: Ard Biesheuvel Signed-off-by: Herbert Xu --- include/crypto/sha.h | 2 +- include/crypto/sha256_base.h | 128 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 129 insertions(+), 1 deletion(-) create mode 100644 include/crypto/sha256_base.h (limited to 'include/crypto') diff --git a/include/crypto/sha.h b/include/crypto/sha.h index a9aad8e63f43..a75bc80cc776 100644 --- a/include/crypto/sha.h +++ b/include/crypto/sha.h @@ -71,8 +71,8 @@ struct sha1_state { }; struct sha256_state { - u64 count; u32 state[SHA256_DIGEST_SIZE / 4]; + u64 count; u8 buf[SHA256_BLOCK_SIZE]; }; diff --git a/include/crypto/sha256_base.h b/include/crypto/sha256_base.h new file mode 100644 index 000000000000..d1f2195bb7de --- /dev/null +++ b/include/crypto/sha256_base.h @@ -0,0 +1,128 @@ +/* + * sha256_base.h - core logic for SHA-256 implementations + * + * Copyright (C) 2015 Linaro Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + */ + +#include +#include +#include +#include + +#include + +typedef void (sha256_block_fn)(struct sha256_state *sst, u8 const *src, + int blocks); + +static inline int sha224_base_init(struct shash_desc *desc) +{ + struct sha256_state *sctx = shash_desc_ctx(desc); + + sctx->state[0] = SHA224_H0; + sctx->state[1] = SHA224_H1; + sctx->state[2] = SHA224_H2; + sctx->state[3] = SHA224_H3; + sctx->state[4] = SHA224_H4; + sctx->state[5] = SHA224_H5; + sctx->state[6] = SHA224_H6; + sctx->state[7] = SHA224_H7; + sctx->count = 0; + + return 0; +} + +static inline int sha256_base_init(struct shash_desc *desc) +{ + struct sha256_state *sctx = shash_desc_ctx(desc); + + sctx->state[0] = SHA256_H0; + sctx->state[1] = SHA256_H1; + sctx->state[2] = SHA256_H2; + sctx->state[3] = SHA256_H3; + sctx->state[4] = SHA256_H4; + sctx->state[5] = SHA256_H5; + sctx->state[6] = SHA256_H6; + sctx->state[7] = SHA256_H7; + sctx->count = 0; + + return 0; +} + +static inline int sha256_base_do_update(struct shash_desc *desc, + const u8 *data, + unsigned int len, + sha256_block_fn *block_fn) +{ + struct sha256_state *sctx = shash_desc_ctx(desc); + unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; + + sctx->count += len; + + if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) { + int blocks; + + if (partial) { + int p = SHA256_BLOCK_SIZE - partial; + + memcpy(sctx->buf + partial, data, p); + data += p; + len -= p; + + block_fn(sctx, sctx->buf, 1); + } + + blocks = len / SHA256_BLOCK_SIZE; + len %= SHA256_BLOCK_SIZE; + + if (blocks) { + block_fn(sctx, data, blocks); + data += blocks * SHA256_BLOCK_SIZE; + } + partial = 0; + } + if (len) + memcpy(sctx->buf + partial, data, len); + + return 0; +} + +static inline int sha256_base_do_finalize(struct shash_desc *desc, + sha256_block_fn *block_fn) +{ + const int bit_offset = SHA256_BLOCK_SIZE - sizeof(__be64); + struct sha256_state *sctx = shash_desc_ctx(desc); + __be64 *bits = (__be64 *)(sctx->buf + bit_offset); + unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; + + sctx->buf[partial++] = 0x80; + if (partial > bit_offset) { + memset(sctx->buf + partial, 0x0, SHA256_BLOCK_SIZE - partial); + partial = 0; + + block_fn(sctx, sctx->buf, 1); + } + + memset(sctx->buf + partial, 0x0, bit_offset - partial); + *bits = cpu_to_be64(sctx->count << 3); + block_fn(sctx, sctx->buf, 1); + + return 0; +} + +static inline int sha256_base_finish(struct shash_desc *desc, u8 *out) +{ + unsigned int digest_size = crypto_shash_digestsize(desc->tfm); + struct sha256_state *sctx = shash_desc_ctx(desc); + __be32 *digest = (__be32 *)out; + int i; + + for (i = 0; digest_size > 0; i++, digest_size -= sizeof(__be32)) + put_unaligned_be32(sctx->state[i], digest++); + + *sctx = (struct sha256_state){}; + return 0; +} -- cgit v1.2.3 From b84a2a0b4ec214fc5d4ba1d3d5b26d4f88733dba Mon Sep 17 00:00:00 2001 From: Ard Biesheuvel Date: Thu, 9 Apr 2015 12:55:35 +0200 Subject: crypto: sha512 - implement base layer for SHA-512 To reduce the number of copies of boilerplate code throughout the tree, this patch implements generic glue for the SHA-512 algorithm. This allows a specific arch or hardware implementation to only implement the special handling that it needs. The users need to supply an implementation of void (sha512_block_fn)(struct sha512_state *sst, u8 const *src, int blocks) and pass it to the SHA-512 base functions. For easy casting between the prototype above and existing block functions that take a 'u64 state[]' as their first argument, the 'state' member of struct sha512_state is moved to the base of the struct. Signed-off-by: Ard Biesheuvel Signed-off-by: Herbert Xu --- include/crypto/sha.h | 2 +- include/crypto/sha512_base.h | 131 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 132 insertions(+), 1 deletion(-) create mode 100644 include/crypto/sha512_base.h (limited to 'include/crypto') diff --git a/include/crypto/sha.h b/include/crypto/sha.h index a75bc80cc776..05e82cbc4d8f 100644 --- a/include/crypto/sha.h +++ b/include/crypto/sha.h @@ -77,8 +77,8 @@ struct sha256_state { }; struct sha512_state { - u64 count[2]; u64 state[SHA512_DIGEST_SIZE / 8]; + u64 count[2]; u8 buf[SHA512_BLOCK_SIZE]; }; diff --git a/include/crypto/sha512_base.h b/include/crypto/sha512_base.h new file mode 100644 index 000000000000..6c5341e005ea --- /dev/null +++ b/include/crypto/sha512_base.h @@ -0,0 +1,131 @@ +/* + * sha512_base.h - core logic for SHA-512 implementations + * + * Copyright (C) 2015 Linaro Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + */ + +#include +#include +#include +#include + +#include + +typedef void (sha512_block_fn)(struct sha512_state *sst, u8 const *src, + int blocks); + +static inline int sha384_base_init(struct shash_desc *desc) +{ + struct sha512_state *sctx = shash_desc_ctx(desc); + + sctx->state[0] = SHA384_H0; + sctx->state[1] = SHA384_H1; + sctx->state[2] = SHA384_H2; + sctx->state[3] = SHA384_H3; + sctx->state[4] = SHA384_H4; + sctx->state[5] = SHA384_H5; + sctx->state[6] = SHA384_H6; + sctx->state[7] = SHA384_H7; + sctx->count[0] = sctx->count[1] = 0; + + return 0; +} + +static inline int sha512_base_init(struct shash_desc *desc) +{ + struct sha512_state *sctx = shash_desc_ctx(desc); + + sctx->state[0] = SHA512_H0; + sctx->state[1] = SHA512_H1; + sctx->state[2] = SHA512_H2; + sctx->state[3] = SHA512_H3; + sctx->state[4] = SHA512_H4; + sctx->state[5] = SHA512_H5; + sctx->state[6] = SHA512_H6; + sctx->state[7] = SHA512_H7; + sctx->count[0] = sctx->count[1] = 0; + + return 0; +} + +static inline int sha512_base_do_update(struct shash_desc *desc, + const u8 *data, + unsigned int len, + sha512_block_fn *block_fn) +{ + struct sha512_state *sctx = shash_desc_ctx(desc); + unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE; + + sctx->count[0] += len; + if (sctx->count[0] < len) + sctx->count[1]++; + + if (unlikely((partial + len) >= SHA512_BLOCK_SIZE)) { + int blocks; + + if (partial) { + int p = SHA512_BLOCK_SIZE - partial; + + memcpy(sctx->buf + partial, data, p); + data += p; + len -= p; + + block_fn(sctx, sctx->buf, 1); + } + + blocks = len / SHA512_BLOCK_SIZE; + len %= SHA512_BLOCK_SIZE; + + if (blocks) { + block_fn(sctx, data, blocks); + data += blocks * SHA512_BLOCK_SIZE; + } + partial = 0; + } + if (len) + memcpy(sctx->buf + partial, data, len); + + return 0; +} + +static inline int sha512_base_do_finalize(struct shash_desc *desc, + sha512_block_fn *block_fn) +{ + const int bit_offset = SHA512_BLOCK_SIZE - sizeof(__be64[2]); + struct sha512_state *sctx = shash_desc_ctx(desc); + __be64 *bits = (__be64 *)(sctx->buf + bit_offset); + unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE; + + sctx->buf[partial++] = 0x80; + if (partial > bit_offset) { + memset(sctx->buf + partial, 0x0, SHA512_BLOCK_SIZE - partial); + partial = 0; + + block_fn(sctx, sctx->buf, 1); + } + + memset(sctx->buf + partial, 0x0, bit_offset - partial); + bits[0] = cpu_to_be64(sctx->count[1] << 3 | sctx->count[0] >> 61); + bits[1] = cpu_to_be64(sctx->count[0] << 3); + block_fn(sctx, sctx->buf, 1); + + return 0; +} + +static inline int sha512_base_finish(struct shash_desc *desc, u8 *out) +{ + unsigned int digest_size = crypto_shash_digestsize(desc->tfm); + struct sha512_state *sctx = shash_desc_ctx(desc); + __be64 *digest = (__be64 *)out; + int i; + + for (i = 0; digest_size > 0; i++, digest_size -= sizeof(__be64)) + put_unaligned_be64(sctx->state[i], digest++); + + *sctx = (struct sha512_state){}; + return 0; +} -- cgit v1.2.3 From 7c71f0f760d65f270aca0849caebb1fd5d53c66b Mon Sep 17 00:00:00 2001 From: Ard Biesheuvel Date: Thu, 9 Apr 2015 12:55:36 +0200 Subject: crypto: sha1-generic - move to generic glue implementation This updated the generic SHA-1 implementation to use the generic shared SHA-1 glue code. It also implements a .finup hook crypto_sha1_finup() and exports it to other modules. The import and export() functions and the .statesize member are dropped, since the default implementation is perfectly suitable for this module. Signed-off-by: Ard Biesheuvel Signed-off-by: Herbert Xu --- crypto/sha1_generic.c | 102 ++++++++++---------------------------------------- include/crypto/sha.h | 3 ++ 2 files changed, 23 insertions(+), 82 deletions(-) (limited to 'include/crypto') diff --git a/crypto/sha1_generic.c b/crypto/sha1_generic.c index a3e50c37eb6f..39e3acc438d9 100644 --- a/crypto/sha1_generic.c +++ b/crypto/sha1_generic.c @@ -23,111 +23,49 @@ #include #include #include +#include #include -static int sha1_init(struct shash_desc *desc) +static void sha1_generic_block_fn(struct sha1_state *sst, u8 const *src, + int blocks) { - struct sha1_state *sctx = shash_desc_ctx(desc); + u32 temp[SHA_WORKSPACE_WORDS]; - *sctx = (struct sha1_state){ - .state = { SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 }, - }; - - return 0; + while (blocks--) { + sha_transform(sst->state, src, temp); + src += SHA1_BLOCK_SIZE; + } + memzero_explicit(temp, sizeof(temp)); } int crypto_sha1_update(struct shash_desc *desc, const u8 *data, - unsigned int len) + unsigned int len) { - struct sha1_state *sctx = shash_desc_ctx(desc); - unsigned int partial, done; - const u8 *src; - - partial = sctx->count % SHA1_BLOCK_SIZE; - sctx->count += len; - done = 0; - src = data; - - if ((partial + len) >= SHA1_BLOCK_SIZE) { - u32 temp[SHA_WORKSPACE_WORDS]; - - if (partial) { - done = -partial; - memcpy(sctx->buffer + partial, data, - done + SHA1_BLOCK_SIZE); - src = sctx->buffer; - } - - do { - sha_transform(sctx->state, src, temp); - done += SHA1_BLOCK_SIZE; - src = data + done; - } while (done + SHA1_BLOCK_SIZE <= len); - - memzero_explicit(temp, sizeof(temp)); - partial = 0; - } - memcpy(sctx->buffer + partial, src, len - done); - - return 0; + return sha1_base_do_update(desc, data, len, sha1_generic_block_fn); } EXPORT_SYMBOL(crypto_sha1_update); - -/* Add padding and return the message digest. */ static int sha1_final(struct shash_desc *desc, u8 *out) { - struct sha1_state *sctx = shash_desc_ctx(desc); - __be32 *dst = (__be32 *)out; - u32 i, index, padlen; - __be64 bits; - static const u8 padding[64] = { 0x80, }; - - bits = cpu_to_be64(sctx->count << 3); - - /* Pad out to 56 mod 64 */ - index = sctx->count & 0x3f; - padlen = (index < 56) ? (56 - index) : ((64+56) - index); - crypto_sha1_update(desc, padding, padlen); - - /* Append length */ - crypto_sha1_update(desc, (const u8 *)&bits, sizeof(bits)); - - /* Store state in digest */ - for (i = 0; i < 5; i++) - dst[i] = cpu_to_be32(sctx->state[i]); - - /* Wipe context */ - memset(sctx, 0, sizeof *sctx); - - return 0; + sha1_base_do_finalize(desc, sha1_generic_block_fn); + return sha1_base_finish(desc, out); } -static int sha1_export(struct shash_desc *desc, void *out) +int crypto_sha1_finup(struct shash_desc *desc, const u8 *data, + unsigned int len, u8 *out) { - struct sha1_state *sctx = shash_desc_ctx(desc); - - memcpy(out, sctx, sizeof(*sctx)); - return 0; -} - -static int sha1_import(struct shash_desc *desc, const void *in) -{ - struct sha1_state *sctx = shash_desc_ctx(desc); - - memcpy(sctx, in, sizeof(*sctx)); - return 0; + sha1_base_do_update(desc, data, len, sha1_generic_block_fn); + return sha1_final(desc, out); } +EXPORT_SYMBOL(crypto_sha1_finup); static struct shash_alg alg = { .digestsize = SHA1_DIGEST_SIZE, - .init = sha1_init, + .init = sha1_base_init, .update = crypto_sha1_update, .final = sha1_final, - .export = sha1_export, - .import = sha1_import, + .finup = crypto_sha1_finup, .descsize = sizeof(struct sha1_state), - .statesize = sizeof(struct sha1_state), .base = { .cra_name = "sha1", .cra_driver_name= "sha1-generic", diff --git a/include/crypto/sha.h b/include/crypto/sha.h index 05e82cbc4d8f..a754cdd749c6 100644 --- a/include/crypto/sha.h +++ b/include/crypto/sha.h @@ -87,6 +87,9 @@ struct shash_desc; extern int crypto_sha1_update(struct shash_desc *desc, const u8 *data, unsigned int len); +extern int crypto_sha1_finup(struct shash_desc *desc, const u8 *data, + unsigned int len, u8 *hash); + extern int crypto_sha256_update(struct shash_desc *desc, const u8 *data, unsigned int len); -- cgit v1.2.3 From a2e5ba4fedd6e590bb2cd83817c2bf0cd0de69d1 Mon Sep 17 00:00:00 2001 From: Ard Biesheuvel Date: Thu, 9 Apr 2015 12:55:37 +0200 Subject: crypto: sha256-generic - move to generic glue implementation This updates the generic SHA-256 implementation to use the new shared SHA-256 glue code. It also implements a .finup hook crypto_sha256_finup() and exports it to other modules. The import and export() functions and the .statesize member are dropped, since the default implementation is perfectly suitable for this module. Signed-off-by: Ard Biesheuvel Signed-off-by: Herbert Xu --- crypto/sha256_generic.c | 133 ++++++++---------------------------------------- include/crypto/sha.h | 3 ++ 2 files changed, 23 insertions(+), 113 deletions(-) (limited to 'include/crypto') diff --git a/crypto/sha256_generic.c b/crypto/sha256_generic.c index b001ff5c2efc..78431163ed3c 100644 --- a/crypto/sha256_generic.c +++ b/crypto/sha256_generic.c @@ -23,6 +23,7 @@ #include #include #include +#include #include #include @@ -214,138 +215,43 @@ static void sha256_transform(u32 *state, const u8 *input) memzero_explicit(W, 64 * sizeof(u32)); } -static int sha224_init(struct shash_desc *desc) +static void sha256_generic_block_fn(struct sha256_state *sst, u8 const *src, + int blocks) { - struct sha256_state *sctx = shash_desc_ctx(desc); - sctx->state[0] = SHA224_H0; - sctx->state[1] = SHA224_H1; - sctx->state[2] = SHA224_H2; - sctx->state[3] = SHA224_H3; - sctx->state[4] = SHA224_H4; - sctx->state[5] = SHA224_H5; - sctx->state[6] = SHA224_H6; - sctx->state[7] = SHA224_H7; - sctx->count = 0; - - return 0; -} - -static int sha256_init(struct shash_desc *desc) -{ - struct sha256_state *sctx = shash_desc_ctx(desc); - sctx->state[0] = SHA256_H0; - sctx->state[1] = SHA256_H1; - sctx->state[2] = SHA256_H2; - sctx->state[3] = SHA256_H3; - sctx->state[4] = SHA256_H4; - sctx->state[5] = SHA256_H5; - sctx->state[6] = SHA256_H6; - sctx->state[7] = SHA256_H7; - sctx->count = 0; - - return 0; + while (blocks--) { + sha256_transform(sst->state, src); + src += SHA256_BLOCK_SIZE; + } } int crypto_sha256_update(struct shash_desc *desc, const u8 *data, unsigned int len) { - struct sha256_state *sctx = shash_desc_ctx(desc); - unsigned int partial, done; - const u8 *src; - - partial = sctx->count & 0x3f; - sctx->count += len; - done = 0; - src = data; - - if ((partial + len) > 63) { - if (partial) { - done = -partial; - memcpy(sctx->buf + partial, data, done + 64); - src = sctx->buf; - } - - do { - sha256_transform(sctx->state, src); - done += 64; - src = data + done; - } while (done + 63 < len); - - partial = 0; - } - memcpy(sctx->buf + partial, src, len - done); - - return 0; + return sha256_base_do_update(desc, data, len, sha256_generic_block_fn); } EXPORT_SYMBOL(crypto_sha256_update); static int sha256_final(struct shash_desc *desc, u8 *out) { - struct sha256_state *sctx = shash_desc_ctx(desc); - __be32 *dst = (__be32 *)out; - __be64 bits; - unsigned int index, pad_len; - int i; - static const u8 padding[64] = { 0x80, }; - - /* Save number of bits */ - bits = cpu_to_be64(sctx->count << 3); - - /* Pad out to 56 mod 64. */ - index = sctx->count & 0x3f; - pad_len = (index < 56) ? (56 - index) : ((64+56) - index); - crypto_sha256_update(desc, padding, pad_len); - - /* Append length (before padding) */ - crypto_sha256_update(desc, (const u8 *)&bits, sizeof(bits)); - - /* Store state in digest */ - for (i = 0; i < 8; i++) - dst[i] = cpu_to_be32(sctx->state[i]); - - /* Zeroize sensitive information. */ - memset(sctx, 0, sizeof(*sctx)); - - return 0; + sha256_base_do_finalize(desc, sha256_generic_block_fn); + return sha256_base_finish(desc, out); } -static int sha224_final(struct shash_desc *desc, u8 *hash) +int crypto_sha256_finup(struct shash_desc *desc, const u8 *data, + unsigned int len, u8 *hash) { - u8 D[SHA256_DIGEST_SIZE]; - - sha256_final(desc, D); - - memcpy(hash, D, SHA224_DIGEST_SIZE); - memzero_explicit(D, SHA256_DIGEST_SIZE); - - return 0; -} - -static int sha256_export(struct shash_desc *desc, void *out) -{ - struct sha256_state *sctx = shash_desc_ctx(desc); - - memcpy(out, sctx, sizeof(*sctx)); - return 0; -} - -static int sha256_import(struct shash_desc *desc, const void *in) -{ - struct sha256_state *sctx = shash_desc_ctx(desc); - - memcpy(sctx, in, sizeof(*sctx)); - return 0; + sha256_base_do_update(desc, data, len, sha256_generic_block_fn); + return sha256_final(desc, hash); } +EXPORT_SYMBOL(crypto_sha256_finup); static struct shash_alg sha256_algs[2] = { { .digestsize = SHA256_DIGEST_SIZE, - .init = sha256_init, + .init = sha256_base_init, .update = crypto_sha256_update, .final = sha256_final, - .export = sha256_export, - .import = sha256_import, + .finup = crypto_sha256_finup, .descsize = sizeof(struct sha256_state), - .statesize = sizeof(struct sha256_state), .base = { .cra_name = "sha256", .cra_driver_name= "sha256-generic", @@ -355,9 +261,10 @@ static struct shash_alg sha256_algs[2] = { { } }, { .digestsize = SHA224_DIGEST_SIZE, - .init = sha224_init, + .init = sha224_base_init, .update = crypto_sha256_update, - .final = sha224_final, + .final = sha256_final, + .finup = crypto_sha256_finup, .descsize = sizeof(struct sha256_state), .base = { .cra_name = "sha224", diff --git a/include/crypto/sha.h b/include/crypto/sha.h index a754cdd749c6..e28c4b5e805d 100644 --- a/include/crypto/sha.h +++ b/include/crypto/sha.h @@ -93,6 +93,9 @@ extern int crypto_sha1_finup(struct shash_desc *desc, const u8 *data, extern int crypto_sha256_update(struct shash_desc *desc, const u8 *data, unsigned int len); +extern int crypto_sha256_finup(struct shash_desc *desc, const u8 *data, + unsigned int len, u8 *hash); + extern int crypto_sha512_update(struct shash_desc *desc, const u8 *data, unsigned int len); #endif -- cgit v1.2.3 From ca142584bc8ebc8e15b317680771ade58ca96315 Mon Sep 17 00:00:00 2001 From: Ard Biesheuvel Date: Thu, 9 Apr 2015 12:55:38 +0200 Subject: crypto: sha512-generic - move to generic glue implementation This updated the generic SHA-512 implementation to use the generic shared SHA-512 glue code. It also implements a .finup hook crypto_sha512_finup() and exports it to other modules. The import and export() functions and the .statesize member are dropped, since the default implementation is perfectly suitable for this module. Signed-off-by: Ard Biesheuvel Signed-off-by: Herbert Xu --- crypto/sha512_generic.c | 123 +++++++++--------------------------------------- include/crypto/sha.h | 3 ++ 2 files changed, 24 insertions(+), 102 deletions(-) (limited to 'include/crypto') diff --git a/crypto/sha512_generic.c b/crypto/sha512_generic.c index 1c3c3767e079..eba965d18bfc 100644 --- a/crypto/sha512_generic.c +++ b/crypto/sha512_generic.c @@ -18,6 +18,7 @@ #include #include #include +#include #include #include #include @@ -130,125 +131,42 @@ sha512_transform(u64 *state, const u8 *input) a = b = c = d = e = f = g = h = t1 = t2 = 0; } -static int -sha512_init(struct shash_desc *desc) +static void sha512_generic_block_fn(struct sha512_state *sst, u8 const *src, + int blocks) { - struct sha512_state *sctx = shash_desc_ctx(desc); - sctx->state[0] = SHA512_H0; - sctx->state[1] = SHA512_H1; - sctx->state[2] = SHA512_H2; - sctx->state[3] = SHA512_H3; - sctx->state[4] = SHA512_H4; - sctx->state[5] = SHA512_H5; - sctx->state[6] = SHA512_H6; - sctx->state[7] = SHA512_H7; - sctx->count[0] = sctx->count[1] = 0; - - return 0; -} - -static int -sha384_init(struct shash_desc *desc) -{ - struct sha512_state *sctx = shash_desc_ctx(desc); - sctx->state[0] = SHA384_H0; - sctx->state[1] = SHA384_H1; - sctx->state[2] = SHA384_H2; - sctx->state[3] = SHA384_H3; - sctx->state[4] = SHA384_H4; - sctx->state[5] = SHA384_H5; - sctx->state[6] = SHA384_H6; - sctx->state[7] = SHA384_H7; - sctx->count[0] = sctx->count[1] = 0; - - return 0; + while (blocks--) { + sha512_transform(sst->state, src); + src += SHA512_BLOCK_SIZE; + } } int crypto_sha512_update(struct shash_desc *desc, const u8 *data, unsigned int len) { - struct sha512_state *sctx = shash_desc_ctx(desc); - - unsigned int i, index, part_len; - - /* Compute number of bytes mod 128 */ - index = sctx->count[0] & 0x7f; - - /* Update number of bytes */ - if ((sctx->count[0] += len) < len) - sctx->count[1]++; - - part_len = 128 - index; - - /* Transform as many times as possible. */ - if (len >= part_len) { - memcpy(&sctx->buf[index], data, part_len); - sha512_transform(sctx->state, sctx->buf); - - for (i = part_len; i + 127 < len; i+=128) - sha512_transform(sctx->state, &data[i]); - - index = 0; - } else { - i = 0; - } - - /* Buffer remaining input */ - memcpy(&sctx->buf[index], &data[i], len - i); - - return 0; + return sha512_base_do_update(desc, data, len, sha512_generic_block_fn); } EXPORT_SYMBOL(crypto_sha512_update); -static int -sha512_final(struct shash_desc *desc, u8 *hash) +static int sha512_final(struct shash_desc *desc, u8 *hash) { - struct sha512_state *sctx = shash_desc_ctx(desc); - static u8 padding[128] = { 0x80, }; - __be64 *dst = (__be64 *)hash; - __be64 bits[2]; - unsigned int index, pad_len; - int i; - - /* Save number of bits */ - bits[1] = cpu_to_be64(sctx->count[0] << 3); - bits[0] = cpu_to_be64(sctx->count[1] << 3 | sctx->count[0] >> 61); - - /* Pad out to 112 mod 128. */ - index = sctx->count[0] & 0x7f; - pad_len = (index < 112) ? (112 - index) : ((128+112) - index); - crypto_sha512_update(desc, padding, pad_len); - - /* Append length (before padding) */ - crypto_sha512_update(desc, (const u8 *)bits, sizeof(bits)); - - /* Store state in digest */ - for (i = 0; i < 8; i++) - dst[i] = cpu_to_be64(sctx->state[i]); - - /* Zeroize sensitive information. */ - memset(sctx, 0, sizeof(struct sha512_state)); - - return 0; + sha512_base_do_finalize(desc, sha512_generic_block_fn); + return sha512_base_finish(desc, hash); } -static int sha384_final(struct shash_desc *desc, u8 *hash) +int crypto_sha512_finup(struct shash_desc *desc, const u8 *data, + unsigned int len, u8 *hash) { - u8 D[64]; - - sha512_final(desc, D); - - memcpy(hash, D, 48); - memzero_explicit(D, 64); - - return 0; + sha512_base_do_update(desc, data, len, sha512_generic_block_fn); + return sha512_final(desc, hash); } +EXPORT_SYMBOL(crypto_sha512_finup); static struct shash_alg sha512_algs[2] = { { .digestsize = SHA512_DIGEST_SIZE, - .init = sha512_init, + .init = sha512_base_init, .update = crypto_sha512_update, .final = sha512_final, + .finup = crypto_sha512_finup, .descsize = sizeof(struct sha512_state), .base = { .cra_name = "sha512", @@ -259,9 +177,10 @@ static struct shash_alg sha512_algs[2] = { { } }, { .digestsize = SHA384_DIGEST_SIZE, - .init = sha384_init, + .init = sha384_base_init, .update = crypto_sha512_update, - .final = sha384_final, + .final = sha512_final, + .finup = crypto_sha512_finup, .descsize = sizeof(struct sha512_state), .base = { .cra_name = "sha384", diff --git a/include/crypto/sha.h b/include/crypto/sha.h index e28c4b5e805d..dd7905a3c22e 100644 --- a/include/crypto/sha.h +++ b/include/crypto/sha.h @@ -98,4 +98,7 @@ extern int crypto_sha256_finup(struct shash_desc *desc, const u8 *data, extern int crypto_sha512_update(struct shash_desc *desc, const u8 *data, unsigned int len); + +extern int crypto_sha512_finup(struct shash_desc *desc, const u8 *data, + unsigned int len, u8 *hash); #endif -- cgit v1.2.3