From b8fbe71f7535d4dfeed0bb8d924107dc58d502e2 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Wed, 11 Jan 2017 20:28:06 +0800 Subject: [PATCH] crypto: x86/chacha20 - Manually align stack buffer The kernel on x86-64 cannot use gcc attribute align to align to a 16-byte boundary. This patch reverts to the old way of aligning it by hand. Fixes: 9ae433bc79f9 ("crypto: chacha20 - convert generic and...") Signed-off-by: Herbert Xu Reviewed-by: Ard Biesheuvel --- arch/x86/crypto/chacha20_glue.c | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/arch/x86/crypto/chacha20_glue.c b/arch/x86/crypto/chacha20_glue.c index 78f75b07dc25..1e6af1b35f7b 100644 --- a/arch/x86/crypto/chacha20_glue.c +++ b/arch/x86/crypto/chacha20_glue.c @@ -67,10 +67,13 @@ static int chacha20_simd(struct skcipher_request *req) { struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); struct chacha20_ctx *ctx = crypto_skcipher_ctx(tfm); - u32 state[16] __aligned(CHACHA20_STATE_ALIGN); + u32 *state, state_buf[16 + 2] __aligned(8); struct skcipher_walk walk; int err; + BUILD_BUG_ON(CHACHA20_STATE_ALIGN != 16); + state = PTR_ALIGN(state_buf + 0, CHACHA20_STATE_ALIGN); + if (req->cryptlen <= CHACHA20_BLOCK_SIZE || !may_use_simd()) return crypto_chacha20_crypt(req); -- 2.45.2