1 // SPDX-License-Identifier: GPL-2.0-only
3 * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
5 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
10 #include <asm/unaligned.h>
11 #include <crypto/aes.h>
12 #include <crypto/scatterwalk.h>
13 #include <crypto/internal/aead.h>
14 #include <crypto/internal/simd.h>
15 #include <crypto/internal/skcipher.h>
16 #include <linux/module.h>
18 #include "aes-ce-setkey.h"
20 static int num_rounds(struct crypto_aes_ctx *ctx)
23 * # of rounds specified by AES:
24 * 128 bit key 10 rounds
25 * 192 bit key 12 rounds
26 * 256 bit key 14 rounds
27 * => n byte key => 6 + (n/4) rounds
29 return 6 + ctx->key_length / 4;
32 asmlinkage void ce_aes_ccm_auth_data(u8 mac[], u8 const in[], u32 abytes,
33 u32 *macp, u32 const rk[], u32 rounds);
35 asmlinkage void ce_aes_ccm_encrypt(u8 out[], u8 const in[], u32 cbytes,
36 u32 const rk[], u32 rounds, u8 mac[],
39 asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes,
40 u32 const rk[], u32 rounds, u8 mac[],
43 asmlinkage void ce_aes_ccm_final(u8 mac[], u8 const ctr[], u32 const rk[],
46 asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
48 static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key,
51 struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm);
54 ret = ce_aes_expandkey(ctx, in_key, key_len);
58 tfm->base.crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
62 static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
64 if ((authsize & 1) || authsize < 4)
69 static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen)
71 struct crypto_aead *aead = crypto_aead_reqtfm(req);
72 __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8];
73 u32 l = req->iv[0] + 1;
75 /* verify that CCM dimension 'L' is set correctly in the IV */
79 /* verify that msglen can in fact be represented in L bytes */
80 if (l < 4 && msglen >> (8 * l))
84 * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
85 * uses a u32 type to represent msglen so the top 4 bytes are always 0.
88 n[1] = cpu_to_be32(msglen);
90 memcpy(maciv, req->iv, AES_BLOCK_SIZE - l);
93 * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
94 * - bits 0..2 : max # of bytes required to represent msglen, minus 1
95 * (already set by caller)
96 * - bits 3..5 : size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
97 * - bit 6 : indicates presence of authenticate-only data
99 maciv[0] |= (crypto_aead_authsize(aead) - 2) << 2;
103 memset(&req->iv[AES_BLOCK_SIZE - l], 0, l);
107 static void ccm_update_mac(struct crypto_aes_ctx *key, u8 mac[], u8 const in[],
108 u32 abytes, u32 *macp)
110 if (crypto_simd_usable()) {
112 ce_aes_ccm_auth_data(mac, in, abytes, macp, key->key_enc,
116 if (*macp > 0 && *macp < AES_BLOCK_SIZE) {
117 int added = min(abytes, AES_BLOCK_SIZE - *macp);
119 crypto_xor(&mac[*macp], in, added);
126 while (abytes >= AES_BLOCK_SIZE) {
127 __aes_arm64_encrypt(key->key_enc, mac, mac,
129 crypto_xor(mac, in, AES_BLOCK_SIZE);
131 in += AES_BLOCK_SIZE;
132 abytes -= AES_BLOCK_SIZE;
136 __aes_arm64_encrypt(key->key_enc, mac, mac,
138 crypto_xor(mac, in, abytes);
144 static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
146 struct crypto_aead *aead = crypto_aead_reqtfm(req);
147 struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
148 struct __packed { __be16 l; __be32 h; u16 len; } ltag;
149 struct scatter_walk walk;
150 u32 len = req->assoclen;
153 /* prepend the AAD with a length tag */
155 ltag.l = cpu_to_be16(len);
158 ltag.l = cpu_to_be16(0xfffe);
159 put_unaligned_be32(len, <ag.h);
163 ccm_update_mac(ctx, mac, (u8 *)<ag, ltag.len, &macp);
164 scatterwalk_start(&walk, req->src);
167 u32 n = scatterwalk_clamp(&walk, len);
171 scatterwalk_start(&walk, sg_next(walk.sg));
172 n = scatterwalk_clamp(&walk, len);
174 p = scatterwalk_map(&walk);
175 ccm_update_mac(ctx, mac, p, n, &macp);
178 scatterwalk_unmap(p);
179 scatterwalk_advance(&walk, n);
180 scatterwalk_done(&walk, 0, len);
184 static int ccm_crypt_fallback(struct skcipher_walk *walk, u8 mac[], u8 iv0[],
185 struct crypto_aes_ctx *ctx, bool enc)
187 u8 buf[AES_BLOCK_SIZE];
190 while (walk->nbytes) {
191 int blocks = walk->nbytes / AES_BLOCK_SIZE;
192 u32 tail = walk->nbytes % AES_BLOCK_SIZE;
193 u8 *dst = walk->dst.virt.addr;
194 u8 *src = walk->src.virt.addr;
195 u32 nbytes = walk->nbytes;
197 if (nbytes == walk->total && tail > 0) {
203 u32 bsize = AES_BLOCK_SIZE;
205 if (nbytes < AES_BLOCK_SIZE)
208 crypto_inc(walk->iv, AES_BLOCK_SIZE);
209 __aes_arm64_encrypt(ctx->key_enc, buf, walk->iv,
211 __aes_arm64_encrypt(ctx->key_enc, mac, mac,
214 crypto_xor(mac, src, bsize);
215 crypto_xor_cpy(dst, src, buf, bsize);
217 crypto_xor(mac, dst, bsize);
223 err = skcipher_walk_done(walk, tail);
227 __aes_arm64_encrypt(ctx->key_enc, buf, iv0, num_rounds(ctx));
228 __aes_arm64_encrypt(ctx->key_enc, mac, mac, num_rounds(ctx));
229 crypto_xor(mac, buf, AES_BLOCK_SIZE);
234 static int ccm_encrypt(struct aead_request *req)
236 struct crypto_aead *aead = crypto_aead_reqtfm(req);
237 struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
238 struct skcipher_walk walk;
239 u8 __aligned(8) mac[AES_BLOCK_SIZE];
240 u8 buf[AES_BLOCK_SIZE];
241 u32 len = req->cryptlen;
244 err = ccm_init_mac(req, mac, len);
249 ccm_calculate_auth_mac(req, mac);
251 /* preserve the original iv for the final round */
252 memcpy(buf, req->iv, AES_BLOCK_SIZE);
254 err = skcipher_walk_aead_encrypt(&walk, req, false);
256 if (crypto_simd_usable()) {
257 while (walk.nbytes) {
258 u32 tail = walk.nbytes % AES_BLOCK_SIZE;
260 if (walk.nbytes == walk.total)
264 ce_aes_ccm_encrypt(walk.dst.virt.addr,
266 walk.nbytes - tail, ctx->key_enc,
267 num_rounds(ctx), mac, walk.iv);
270 err = skcipher_walk_done(&walk, tail);
274 ce_aes_ccm_final(mac, buf, ctx->key_enc,
279 err = ccm_crypt_fallback(&walk, mac, buf, ctx, true);
284 /* copy authtag to end of dst */
285 scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen,
286 crypto_aead_authsize(aead), 1);
291 static int ccm_decrypt(struct aead_request *req)
293 struct crypto_aead *aead = crypto_aead_reqtfm(req);
294 struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
295 unsigned int authsize = crypto_aead_authsize(aead);
296 struct skcipher_walk walk;
297 u8 __aligned(8) mac[AES_BLOCK_SIZE];
298 u8 buf[AES_BLOCK_SIZE];
299 u32 len = req->cryptlen - authsize;
302 err = ccm_init_mac(req, mac, len);
307 ccm_calculate_auth_mac(req, mac);
309 /* preserve the original iv for the final round */
310 memcpy(buf, req->iv, AES_BLOCK_SIZE);
312 err = skcipher_walk_aead_decrypt(&walk, req, false);
314 if (crypto_simd_usable()) {
315 while (walk.nbytes) {
316 u32 tail = walk.nbytes % AES_BLOCK_SIZE;
318 if (walk.nbytes == walk.total)
322 ce_aes_ccm_decrypt(walk.dst.virt.addr,
324 walk.nbytes - tail, ctx->key_enc,
325 num_rounds(ctx), mac, walk.iv);
328 err = skcipher_walk_done(&walk, tail);
332 ce_aes_ccm_final(mac, buf, ctx->key_enc,
337 err = ccm_crypt_fallback(&walk, mac, buf, ctx, false);
343 /* compare calculated auth tag with the stored one */
344 scatterwalk_map_and_copy(buf, req->src,
345 req->assoclen + req->cryptlen - authsize,
348 if (crypto_memneq(mac, buf, authsize))
353 static struct aead_alg ccm_aes_alg = {
355 .cra_name = "ccm(aes)",
356 .cra_driver_name = "ccm-aes-ce",
359 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
360 .cra_module = THIS_MODULE,
362 .ivsize = AES_BLOCK_SIZE,
363 .chunksize = AES_BLOCK_SIZE,
364 .maxauthsize = AES_BLOCK_SIZE,
365 .setkey = ccm_setkey,
366 .setauthsize = ccm_setauthsize,
367 .encrypt = ccm_encrypt,
368 .decrypt = ccm_decrypt,
371 static int __init aes_mod_init(void)
373 if (!cpu_have_named_feature(AES))
375 return crypto_register_aead(&ccm_aes_alg);
378 static void __exit aes_mod_exit(void)
380 crypto_unregister_aead(&ccm_aes_alg);
383 module_init(aes_mod_init);
384 module_exit(aes_mod_exit);
386 MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
387 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
388 MODULE_LICENSE("GPL v2");
389 MODULE_ALIAS_CRYPTO("ccm(aes)");