summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorSebastian Siewior <sebastian@breakpoint.cc>2008-03-06 18:56:19 +0800
committerHerbert Xu <herbert@gondor.apana.org.au>2008-03-06 18:56:19 +0800
commit6212f2c7f70c591efb0d9f3d50ad29112392fee2 (patch)
treebfd2394ae9e7e930e704357d6a9ff307cf10b6cb
parentbc97f19dc8be1f181f33b4368542c72498f3562a (diff)
downloadlinux-6212f2c7f70c591efb0d9f3d50ad29112392fee2.tar.bz2
[CRYPTO] xts: Use proper alignment
The XTS blockmode uses a copy of the IV which is saved on the stack and may or may not be properly aligned. If it is not, it will break hardware cipher like the geode or padlock. This patch encrypts the IV in place so we don't have to worry about alignment. Signed-off-by: Sebastian Siewior <sebastian@breakpoint.cc> Tested-by: Stefan Hellermann <stefan@the2masters.de> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r--crypto/xts.c13
1 files changed, 6 insertions, 7 deletions
diff --git a/crypto/xts.c b/crypto/xts.c
index 8eb08bfaf7c0..d87b0f3102c3 100644
--- a/crypto/xts.c
+++ b/crypto/xts.c
@@ -77,16 +77,16 @@ static int setkey(struct crypto_tfm *parent, const u8 *key,
}
struct sinfo {
- be128 t;
+ be128 *t;
struct crypto_tfm *tfm;
void (*fn)(struct crypto_tfm *, u8 *, const u8 *);
};
static inline void xts_round(struct sinfo *s, void *dst, const void *src)
{
- be128_xor(dst, &s->t, src); /* PP <- T xor P */
+ be128_xor(dst, s->t, src); /* PP <- T xor P */
s->fn(s->tfm, dst, dst); /* CC <- E(Key1,PP) */
- be128_xor(dst, dst, &s->t); /* C <- T xor CC */
+ be128_xor(dst, dst, s->t); /* C <- T xor CC */
}
static int crypt(struct blkcipher_desc *d,
@@ -101,7 +101,6 @@ static int crypt(struct blkcipher_desc *d,
.tfm = crypto_cipher_tfm(ctx->child),
.fn = fn
};
- be128 *iv;
u8 *wsrc;
u8 *wdst;
@@ -109,20 +108,20 @@ static int crypt(struct blkcipher_desc *d,
if (!w->nbytes)
return err;
+ s.t = (be128 *)w->iv;
avail = w->nbytes;
wsrc = w->src.virt.addr;
wdst = w->dst.virt.addr;
/* calculate first value of T */
- iv = (be128 *)w->iv;
- tw(crypto_cipher_tfm(ctx->tweak), (void *)&s.t, w->iv);
+ tw(crypto_cipher_tfm(ctx->tweak), w->iv, w->iv);
goto first;
for (;;) {
do {
- gf128mul_x_ble(&s.t, &s.t);
+ gf128mul_x_ble(s.t, s.t);
first:
xts_round(&s, wdst, wsrc);