]> asedeno.scripts.mit.edu Git - linux.git/commitdiff
crypto: x86/chacha20 - Manually align stack buffer
authorHerbert Xu <herbert@gondor.apana.org.au>
Wed, 11 Jan 2017 12:28:06 +0000 (20:28 +0800)
committerHerbert Xu <herbert@gondor.apana.org.au>
Thu, 12 Jan 2017 16:26:46 +0000 (00:26 +0800)
The kernel on x86-64 cannot use gcc attribute align to align to
a 16-byte boundary.  This patch reverts to the old way of aligning
it by hand.

Fixes: 9ae433bc79f9 ("crypto: chacha20 - convert generic and...")
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Reviewed-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
arch/x86/crypto/chacha20_glue.c

index 78f75b07dc2592f1382f5177b23f39dedbeadc8c..1e6af1b35f7b4bc4ee156f3a133f6cc6363d0355 100644 (file)
@@ -67,10 +67,13 @@ static int chacha20_simd(struct skcipher_request *req)
 {
        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
        struct chacha20_ctx *ctx = crypto_skcipher_ctx(tfm);
-       u32 state[16] __aligned(CHACHA20_STATE_ALIGN);
+       u32 *state, state_buf[16 + 2] __aligned(8);
        struct skcipher_walk walk;
        int err;
 
+       BUILD_BUG_ON(CHACHA20_STATE_ALIGN != 16);
+       state = PTR_ALIGN(state_buf + 0, CHACHA20_STATE_ALIGN);
+
        if (req->cryptlen <= CHACHA20_BLOCK_SIZE || !may_use_simd())
                return crypto_chacha20_crypt(req);