1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Shared glue code for 128bit block ciphers
5 * Copyright © 2012-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
7 * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by:
8 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
9 * CTR part based on code (crypto/ctr.c) by:
10 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
13 #include <linux/module.h>
14 #include <crypto/b128ops.h>
15 #include <crypto/gf128mul.h>
16 #include <crypto/internal/skcipher.h>
17 #include <crypto/xts.h>
18 #include <asm/crypto/glue_helper.h>
20 int glue_ecb_req_128bit(const struct common_glue_ctx *gctx,
21 struct skcipher_request *req)
23 void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
24 const unsigned int bsize = 128 / 8;
25 struct skcipher_walk walk;
26 bool fpu_enabled = false;
30 err = skcipher_walk_virt(&walk, req, false);
32 while ((nbytes = walk.nbytes)) {
33 const u8 *src = walk.src.virt.addr;
34 u8 *dst = walk.dst.virt.addr;
35 unsigned int func_bytes;
38 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
39 &walk, fpu_enabled, nbytes);
40 for (i = 0; i < gctx->num_funcs; i++) {
41 func_bytes = bsize * gctx->funcs[i].num_blocks;
43 if (nbytes < func_bytes)
46 /* Process multi-block batch */
48 gctx->funcs[i].fn_u.ecb(ctx, dst, src);
52 } while (nbytes >= func_bytes);
57 err = skcipher_walk_done(&walk, nbytes);
60 glue_fpu_end(fpu_enabled);
63 EXPORT_SYMBOL_GPL(glue_ecb_req_128bit);
65 int glue_cbc_encrypt_req_128bit(const common_glue_func_t fn,
66 struct skcipher_request *req)
68 void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
69 const unsigned int bsize = 128 / 8;
70 struct skcipher_walk walk;
74 err = skcipher_walk_virt(&walk, req, false);
76 while ((nbytes = walk.nbytes)) {
77 const u128 *src = (u128 *)walk.src.virt.addr;
78 u128 *dst = (u128 *)walk.dst.virt.addr;
79 u128 *iv = (u128 *)walk.iv;
82 u128_xor(dst, src, iv);
83 fn(ctx, (u8 *)dst, (u8 *)dst);
88 } while (nbytes >= bsize);
90 *(u128 *)walk.iv = *iv;
91 err = skcipher_walk_done(&walk, nbytes);
95 EXPORT_SYMBOL_GPL(glue_cbc_encrypt_req_128bit);
97 int glue_cbc_decrypt_req_128bit(const struct common_glue_ctx *gctx,
98 struct skcipher_request *req)
100 void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
101 const unsigned int bsize = 128 / 8;
102 struct skcipher_walk walk;
103 bool fpu_enabled = false;
107 err = skcipher_walk_virt(&walk, req, false);
109 while ((nbytes = walk.nbytes)) {
110 const u128 *src = walk.src.virt.addr;
111 u128 *dst = walk.dst.virt.addr;
112 unsigned int func_bytes, num_blocks;
116 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
117 &walk, fpu_enabled, nbytes);
118 /* Start of the last block. */
119 src += nbytes / bsize - 1;
120 dst += nbytes / bsize - 1;
124 for (i = 0; i < gctx->num_funcs; i++) {
125 num_blocks = gctx->funcs[i].num_blocks;
126 func_bytes = bsize * num_blocks;
128 if (nbytes < func_bytes)
131 /* Process multi-block batch */
133 src -= num_blocks - 1;
134 dst -= num_blocks - 1;
136 gctx->funcs[i].fn_u.cbc(ctx, dst, src);
138 nbytes -= func_bytes;
142 u128_xor(dst, dst, --src);
144 } while (nbytes >= func_bytes);
147 u128_xor(dst, dst, (u128 *)walk.iv);
148 *(u128 *)walk.iv = last_iv;
149 err = skcipher_walk_done(&walk, nbytes);
152 glue_fpu_end(fpu_enabled);
155 EXPORT_SYMBOL_GPL(glue_cbc_decrypt_req_128bit);
157 int glue_ctr_req_128bit(const struct common_glue_ctx *gctx,
158 struct skcipher_request *req)
160 void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
161 const unsigned int bsize = 128 / 8;
162 struct skcipher_walk walk;
163 bool fpu_enabled = false;
167 err = skcipher_walk_virt(&walk, req, false);
169 while ((nbytes = walk.nbytes) >= bsize) {
170 const u128 *src = walk.src.virt.addr;
171 u128 *dst = walk.dst.virt.addr;
172 unsigned int func_bytes, num_blocks;
176 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
177 &walk, fpu_enabled, nbytes);
179 be128_to_le128(&ctrblk, (be128 *)walk.iv);
181 for (i = 0; i < gctx->num_funcs; i++) {
182 num_blocks = gctx->funcs[i].num_blocks;
183 func_bytes = bsize * num_blocks;
185 if (nbytes < func_bytes)
188 /* Process multi-block batch */
190 gctx->funcs[i].fn_u.ctr(ctx, dst, src, &ctrblk);
193 nbytes -= func_bytes;
194 } while (nbytes >= func_bytes);
200 le128_to_be128((be128 *)walk.iv, &ctrblk);
201 err = skcipher_walk_done(&walk, nbytes);
204 glue_fpu_end(fpu_enabled);
210 be128_to_le128(&ctrblk, (be128 *)walk.iv);
211 memcpy(&tmp, walk.src.virt.addr, nbytes);
212 gctx->funcs[gctx->num_funcs - 1].fn_u.ctr(ctx, &tmp, &tmp,
214 memcpy(walk.dst.virt.addr, &tmp, nbytes);
215 le128_to_be128((be128 *)walk.iv, &ctrblk);
217 err = skcipher_walk_done(&walk, 0);
222 EXPORT_SYMBOL_GPL(glue_ctr_req_128bit);
224 static unsigned int __glue_xts_req_128bit(const struct common_glue_ctx *gctx,
226 struct skcipher_walk *walk)
228 const unsigned int bsize = 128 / 8;
229 unsigned int nbytes = walk->nbytes;
230 u128 *src = walk->src.virt.addr;
231 u128 *dst = walk->dst.virt.addr;
232 unsigned int num_blocks, func_bytes;
235 /* Process multi-block batch */
236 for (i = 0; i < gctx->num_funcs; i++) {
237 num_blocks = gctx->funcs[i].num_blocks;
238 func_bytes = bsize * num_blocks;
240 if (nbytes >= func_bytes) {
242 gctx->funcs[i].fn_u.xts(ctx, dst, src,
247 nbytes -= func_bytes;
248 } while (nbytes >= func_bytes);
259 int glue_xts_req_128bit(const struct common_glue_ctx *gctx,
260 struct skcipher_request *req,
261 common_glue_func_t tweak_fn, void *tweak_ctx,
264 const unsigned int bsize = 128 / 8;
265 struct skcipher_walk walk;
266 bool fpu_enabled = false;
270 err = skcipher_walk_virt(&walk, req, false);
271 nbytes = walk.nbytes;
275 /* set minimum length to bsize, for tweak_fn */
276 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
278 nbytes < bsize ? bsize : nbytes);
280 /* calculate first value of T */
281 tweak_fn(tweak_ctx, walk.iv, walk.iv);
284 nbytes = __glue_xts_req_128bit(gctx, crypt_ctx, &walk);
286 err = skcipher_walk_done(&walk, nbytes);
287 nbytes = walk.nbytes;
290 glue_fpu_end(fpu_enabled);
294 EXPORT_SYMBOL_GPL(glue_xts_req_128bit);
296 void glue_xts_crypt_128bit_one(void *ctx, u128 *dst, const u128 *src, le128 *iv,
297 common_glue_func_t fn)
301 /* generate next IV */
302 gf128mul_x_ble(iv, &ivblk);
305 u128_xor(dst, src, (u128 *)&ivblk);
307 /* PP <- D(Key2,CC) */
308 fn(ctx, (u8 *)dst, (u8 *)dst);
311 u128_xor(dst, dst, (u128 *)&ivblk);
313 EXPORT_SYMBOL_GPL(glue_xts_crypt_128bit_one);
315 MODULE_LICENSE("GPL");