summaryrefslogtreecommitdiffstats
path: root/arch/arm64/crypto/aes-ctr-fallback.h
blob: c9285717b6b5e3d86d38077436328cc62b04449c (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
/*
 * Fallback for sync aes(ctr) in contexts where kernel mode NEON
 * is not allowed
 *
 * Copyright (C) 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License version 2 as
 * published by the Free Software Foundation.
 */

#include <crypto/aes.h>
#include <crypto/internal/skcipher.h>

asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);

static inline int aes_ctr_encrypt_fallback(struct crypto_aes_ctx *ctx,
					   struct skcipher_request *req)
{
	struct skcipher_walk walk;
	u8 buf[AES_BLOCK_SIZE];
	int err;

	err = skcipher_walk_virt(&walk, req, true);

	while (walk.nbytes > 0) {
		u8 *dst = walk.dst.virt.addr;
		u8 *src = walk.src.virt.addr;
		int nbytes = walk.nbytes;
		int tail = 0;

		if (nbytes < walk.total) {
			nbytes = round_down(nbytes, AES_BLOCK_SIZE);
			tail = walk.nbytes % AES_BLOCK_SIZE;
		}

		do {
			int bsize = min(nbytes, AES_BLOCK_SIZE);

			__aes_arm64_encrypt(ctx->key_enc, buf, walk.iv,
					    6 + ctx->key_length / 4);
			crypto_xor_cpy(dst, src, buf, bsize);
			crypto_inc(walk.iv, AES_BLOCK_SIZE);

			dst += AES_BLOCK_SIZE;
			src += AES_BLOCK_SIZE;
			nbytes -= AES_BLOCK_SIZE;
		} while (nbytes > 0);

		err = skcipher_walk_done(&walk, tail);
	}
	return err;
}