diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2019-09-18 12:11:14 -0700 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2019-09-18 12:11:14 -0700 |
commit | 8b53c76533aa4356602aea98f98a2f3b4051464c (patch) | |
tree | ab10ba58e21501407f8108a6bb9003daa2176962 /crypto/cryptd.c | |
parent | 6cfae0c26b21dce323fe8799b66cf4bc996e3565 (diff) | |
parent | 9575d1a5c0780ea26ff8dd29c94a32be32ce3c85 (diff) | |
download | linux-8b53c76533aa4356602aea98f98a2f3b4051464c.tar.bz2 |
Merge branch 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto updates from Herbert Xu:
"API:
- Add the ability to abort a skcipher walk.
Algorithms:
- Fix XTS to actually do the stealing.
- Add library helpers for AES and DES for single-block users.
- Add library helpers for SHA256.
- Add new DES key verification helper.
- Add surrounding bits for ESSIV generator.
- Add accelerations for aegis128.
- Add test vectors for lzo-rle.
Drivers:
- Add i.MX8MQ support to caam.
- Add gcm/ccm/cfb/ofb aes support in inside-secure.
- Add ofb/cfb aes support in media-tek.
- Add HiSilicon ZIP accelerator support.
Others:
- Fix potential race condition in padata.
- Use unbound workqueues in padata"
* 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (311 commits)
crypto: caam - Cast to long first before pointer conversion
crypto: ccree - enable CTS support in AES-XTS
crypto: inside-secure - Probe transform record cache RAM sizes
crypto: inside-secure - Base RD fetchcount on actual RD FIFO size
crypto: inside-secure - Base CD fetchcount on actual CD FIFO size
crypto: inside-secure - Enable extended algorithms on newer HW
crypto: inside-secure: Corrected configuration of EIP96_TOKEN_CTRL
crypto: inside-secure - Add EIP97/EIP197 and endianness detection
padata: remove cpu_index from the parallel_queue
padata: unbind parallel jobs from specific CPUs
padata: use separate workqueues for parallel and serial work
padata, pcrypt: take CPU hotplug lock internally in padata_alloc_possible
crypto: pcrypt - remove padata cpumask notifier
padata: make padata_do_parallel find alternate callback CPU
workqueue: require CPU hotplug read exclusion for apply_workqueue_attrs
workqueue: unconfine alloc/apply/free_workqueue_attrs()
padata: allocate workqueue internally
arm64: dts: imx8mq: Add CAAM node
random: Use wait_event_freezable() in add_hwgenerator_randomness()
crypto: ux500 - Fix COMPILE_TEST warnings
...
Diffstat (limited to 'crypto/cryptd.c')
-rw-r--r-- | crypto/cryptd.c | 44 |
1 files changed, 22 insertions, 22 deletions
diff --git a/crypto/cryptd.c b/crypto/cryptd.c index 3748f9b4516d..927760b316a4 100644 --- a/crypto/cryptd.c +++ b/crypto/cryptd.c @@ -16,7 +16,7 @@ #include <crypto/internal/aead.h> #include <crypto/internal/skcipher.h> #include <crypto/cryptd.h> -#include <linux/atomic.h> +#include <linux/refcount.h> #include <linux/err.h> #include <linux/init.h> #include <linux/kernel.h> @@ -63,7 +63,7 @@ struct aead_instance_ctx { }; struct cryptd_skcipher_ctx { - atomic_t refcnt; + refcount_t refcnt; struct crypto_sync_skcipher *child; }; @@ -72,7 +72,7 @@ struct cryptd_skcipher_request_ctx { }; struct cryptd_hash_ctx { - atomic_t refcnt; + refcount_t refcnt; struct crypto_shash *child; }; @@ -82,7 +82,7 @@ struct cryptd_hash_request_ctx { }; struct cryptd_aead_ctx { - atomic_t refcnt; + refcount_t refcnt; struct crypto_aead *child; }; @@ -127,7 +127,7 @@ static int cryptd_enqueue_request(struct cryptd_queue *queue, { int cpu, err; struct cryptd_cpu_queue *cpu_queue; - atomic_t *refcnt; + refcount_t *refcnt; cpu = get_cpu(); cpu_queue = this_cpu_ptr(queue->cpu_queue); @@ -140,10 +140,10 @@ static int cryptd_enqueue_request(struct cryptd_queue *queue, queue_work_on(cpu, cryptd_wq, &cpu_queue->work); - if (!atomic_read(refcnt)) + if (!refcount_read(refcnt)) goto out_put_cpu; - atomic_inc(refcnt); + refcount_inc(refcnt); out_put_cpu: put_cpu(); @@ -270,13 +270,13 @@ static void cryptd_skcipher_complete(struct skcipher_request *req, int err) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm); struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req); - int refcnt = atomic_read(&ctx->refcnt); + int refcnt = refcount_read(&ctx->refcnt); local_bh_disable(); rctx->complete(&req->base, err); local_bh_enable(); - if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt)) + if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt)) crypto_free_skcipher(tfm); } @@ -521,13 +521,13 @@ static void cryptd_hash_complete(struct ahash_request *req, int err) struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm); struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); - int refcnt = atomic_read(&ctx->refcnt); + int refcnt = refcount_read(&ctx->refcnt); local_bh_disable(); rctx->complete(&req->base, err); local_bh_enable(); - if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt)) + if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt)) crypto_free_ahash(tfm); } @@ -772,13 +772,13 @@ static void cryptd_aead_crypt(struct aead_request *req, out: ctx = crypto_aead_ctx(tfm); - refcnt = atomic_read(&ctx->refcnt); + refcnt = refcount_read(&ctx->refcnt); local_bh_disable(); compl(&req->base, err); local_bh_enable(); - if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt)) + if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt)) crypto_free_aead(tfm); } @@ -979,7 +979,7 @@ struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name, } ctx = crypto_skcipher_ctx(tfm); - atomic_set(&ctx->refcnt, 1); + refcount_set(&ctx->refcnt, 1); return container_of(tfm, struct cryptd_skcipher, base); } @@ -997,7 +997,7 @@ bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm) { struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base); - return atomic_read(&ctx->refcnt) - 1; + return refcount_read(&ctx->refcnt) - 1; } EXPORT_SYMBOL_GPL(cryptd_skcipher_queued); @@ -1005,7 +1005,7 @@ void cryptd_free_skcipher(struct cryptd_skcipher *tfm) { struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base); - if (atomic_dec_and_test(&ctx->refcnt)) + if (refcount_dec_and_test(&ctx->refcnt)) crypto_free_skcipher(&tfm->base); } EXPORT_SYMBOL_GPL(cryptd_free_skcipher); @@ -1029,7 +1029,7 @@ struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name, } ctx = crypto_ahash_ctx(tfm); - atomic_set(&ctx->refcnt, 1); + refcount_set(&ctx->refcnt, 1); return __cryptd_ahash_cast(tfm); } @@ -1054,7 +1054,7 @@ bool cryptd_ahash_queued(struct cryptd_ahash *tfm) { struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base); - return atomic_read(&ctx->refcnt) - 1; + return refcount_read(&ctx->refcnt) - 1; } EXPORT_SYMBOL_GPL(cryptd_ahash_queued); @@ -1062,7 +1062,7 @@ void cryptd_free_ahash(struct cryptd_ahash *tfm) { struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base); - if (atomic_dec_and_test(&ctx->refcnt)) + if (refcount_dec_and_test(&ctx->refcnt)) crypto_free_ahash(&tfm->base); } EXPORT_SYMBOL_GPL(cryptd_free_ahash); @@ -1086,7 +1086,7 @@ struct cryptd_aead *cryptd_alloc_aead(const char *alg_name, } ctx = crypto_aead_ctx(tfm); - atomic_set(&ctx->refcnt, 1); + refcount_set(&ctx->refcnt, 1); return __cryptd_aead_cast(tfm); } @@ -1104,7 +1104,7 @@ bool cryptd_aead_queued(struct cryptd_aead *tfm) { struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base); - return atomic_read(&ctx->refcnt) - 1; + return refcount_read(&ctx->refcnt) - 1; } EXPORT_SYMBOL_GPL(cryptd_aead_queued); @@ -1112,7 +1112,7 @@ void cryptd_free_aead(struct cryptd_aead *tfm) { struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base); - if (atomic_dec_and_test(&ctx->refcnt)) + if (refcount_dec_and_test(&ctx->refcnt)) crypto_free_aead(&tfm->base); } EXPORT_SYMBOL_GPL(cryptd_free_aead); |