diff options
author | Eric Biggers <ebiggers@google.com> | 2018-02-19 23:48:27 -0800 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2018-03-03 00:03:35 +0800 |
commit | 75d8a5532fc6db34e5aa712ec8117c9f9cb83088 (patch) | |
tree | fb3963688f7f4b75f66930dd58e36523b6a14d8c | |
parent | 0d87d0f4254cc581d3fac51d1e11b51b2549e42a (diff) | |
download | linux-75d8a5532fc6db34e5aa712ec8117c9f9cb83088.tar.bz2 |
crypto: x86/glue_helper - rename glue_skwalk_fpu_begin()
There are no users of the original glue_fpu_begin() anymore, so rename
glue_skwalk_fpu_begin() to glue_fpu_begin() so that it matches
glue_fpu_end() again.
Signed-off-by: Eric Biggers <ebiggers@google.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r-- | arch/x86/crypto/cast5_avx_glue.c | 4 | ||||
-rw-r--r-- | arch/x86/crypto/glue_helper.c | 21 | ||||
-rw-r--r-- | arch/x86/include/asm/crypto/glue_helper.h | 7 |
3 files changed, 14 insertions, 18 deletions
diff --git a/arch/x86/crypto/cast5_avx_glue.c b/arch/x86/crypto/cast5_avx_glue.c index 56f2a1b0ccf5..41034745d6a2 100644 --- a/arch/x86/crypto/cast5_avx_glue.c +++ b/arch/x86/crypto/cast5_avx_glue.c @@ -50,8 +50,8 @@ static int cast5_setkey_skcipher(struct crypto_skcipher *tfm, const u8 *key, static inline bool cast5_fpu_begin(bool fpu_enabled, struct skcipher_walk *walk, unsigned int nbytes) { - return glue_skwalk_fpu_begin(CAST5_BLOCK_SIZE, CAST5_PARALLEL_BLOCKS, - walk, fpu_enabled, nbytes); + return glue_fpu_begin(CAST5_BLOCK_SIZE, CAST5_PARALLEL_BLOCKS, + walk, fpu_enabled, nbytes); } static inline void cast5_fpu_end(bool fpu_enabled) diff --git a/arch/x86/crypto/glue_helper.c b/arch/x86/crypto/glue_helper.c index fab5fa1aed77..a78ef99a9981 100644 --- a/arch/x86/crypto/glue_helper.c +++ b/arch/x86/crypto/glue_helper.c @@ -50,9 +50,8 @@ int glue_ecb_req_128bit(const struct common_glue_ctx *gctx, unsigned int func_bytes; unsigned int i; - fpu_enabled = glue_skwalk_fpu_begin(bsize, - gctx->fpu_blocks_limit, - &walk, fpu_enabled, nbytes); + fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, + &walk, fpu_enabled, nbytes); for (i = 0; i < gctx->num_funcs; i++) { func_bytes = bsize * gctx->funcs[i].num_blocks; @@ -129,9 +128,8 @@ int glue_cbc_decrypt_req_128bit(const struct common_glue_ctx *gctx, unsigned int i; u128 last_iv; - fpu_enabled = glue_skwalk_fpu_begin(bsize, - gctx->fpu_blocks_limit, - &walk, fpu_enabled, nbytes); + fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, + &walk, fpu_enabled, nbytes); /* Start of the last block. */ src += nbytes / bsize - 1; dst += nbytes / bsize - 1; @@ -190,9 +188,8 @@ int glue_ctr_req_128bit(const struct common_glue_ctx *gctx, unsigned int i; le128 ctrblk; - fpu_enabled = glue_skwalk_fpu_begin(bsize, - gctx->fpu_blocks_limit, - &walk, fpu_enabled, nbytes); + fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, + &walk, fpu_enabled, nbytes); be128_to_le128(&ctrblk, (be128 *)walk.iv); @@ -291,9 +288,9 @@ int glue_xts_req_128bit(const struct common_glue_ctx *gctx, return err; /* set minimum length to bsize, for tweak_fn */ - fpu_enabled = glue_skwalk_fpu_begin(bsize, gctx->fpu_blocks_limit, - &walk, fpu_enabled, - nbytes < bsize ? bsize : nbytes); + fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, + &walk, fpu_enabled, + nbytes < bsize ? bsize : nbytes); /* calculate first value of T */ tweak_fn(tweak_ctx, walk.iv, walk.iv); diff --git a/arch/x86/include/asm/crypto/glue_helper.h b/arch/x86/include/asm/crypto/glue_helper.h index b925a6363b3f..d1818634ae7e 100644 --- a/arch/x86/include/asm/crypto/glue_helper.h +++ b/arch/x86/include/asm/crypto/glue_helper.h @@ -44,10 +44,9 @@ struct common_glue_ctx { struct common_glue_func_entry funcs[]; }; -static inline bool glue_skwalk_fpu_begin(unsigned int bsize, - int fpu_blocks_limit, - struct skcipher_walk *walk, - bool fpu_enabled, unsigned int nbytes) +static inline bool glue_fpu_begin(unsigned int bsize, int fpu_blocks_limit, + struct skcipher_walk *walk, + bool fpu_enabled, unsigned int nbytes) { if (likely(fpu_blocks_limit < 0)) return false; |