]> asedeno.scripts.mit.edu Git - linux.git/blob - arch/x86/crypto/camellia_aesni_avx_glue.c
Merge tag 'spi-fix-v5.3-rc3' of git://git.kernel.org/pub/scm/linux/kernel/git/broonie/spi
[linux.git] / arch / x86 / crypto / camellia_aesni_avx_glue.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Glue Code for x86_64/AVX/AES-NI assembler optimized version of Camellia
4  *
5  * Copyright © 2012-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
6  */
7
8 #include <asm/crypto/camellia.h>
9 #include <asm/crypto/glue_helper.h>
10 #include <crypto/algapi.h>
11 #include <crypto/internal/simd.h>
12 #include <crypto/xts.h>
13 #include <linux/crypto.h>
14 #include <linux/err.h>
15 #include <linux/module.h>
16 #include <linux/types.h>
17
18 #define CAMELLIA_AESNI_PARALLEL_BLOCKS 16
19
20 /* 16-way parallel cipher functions (avx/aes-ni) */
21 asmlinkage void camellia_ecb_enc_16way(struct camellia_ctx *ctx, u8 *dst,
22                                        const u8 *src);
23 EXPORT_SYMBOL_GPL(camellia_ecb_enc_16way);
24
25 asmlinkage void camellia_ecb_dec_16way(struct camellia_ctx *ctx, u8 *dst,
26                                        const u8 *src);
27 EXPORT_SYMBOL_GPL(camellia_ecb_dec_16way);
28
29 asmlinkage void camellia_cbc_dec_16way(struct camellia_ctx *ctx, u8 *dst,
30                                        const u8 *src);
31 EXPORT_SYMBOL_GPL(camellia_cbc_dec_16way);
32
33 asmlinkage void camellia_ctr_16way(struct camellia_ctx *ctx, u8 *dst,
34                                    const u8 *src, le128 *iv);
35 EXPORT_SYMBOL_GPL(camellia_ctr_16way);
36
37 asmlinkage void camellia_xts_enc_16way(struct camellia_ctx *ctx, u8 *dst,
38                                        const u8 *src, le128 *iv);
39 EXPORT_SYMBOL_GPL(camellia_xts_enc_16way);
40
41 asmlinkage void camellia_xts_dec_16way(struct camellia_ctx *ctx, u8 *dst,
42                                        const u8 *src, le128 *iv);
43 EXPORT_SYMBOL_GPL(camellia_xts_dec_16way);
44
45 void camellia_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
46 {
47         glue_xts_crypt_128bit_one(ctx, dst, src, iv,
48                                   GLUE_FUNC_CAST(camellia_enc_blk));
49 }
50 EXPORT_SYMBOL_GPL(camellia_xts_enc);
51
52 void camellia_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
53 {
54         glue_xts_crypt_128bit_one(ctx, dst, src, iv,
55                                   GLUE_FUNC_CAST(camellia_dec_blk));
56 }
57 EXPORT_SYMBOL_GPL(camellia_xts_dec);
58
59 static const struct common_glue_ctx camellia_enc = {
60         .num_funcs = 3,
61         .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
62
63         .funcs = { {
64                 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
65                 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_16way) }
66         }, {
67                 .num_blocks = 2,
68                 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk_2way) }
69         }, {
70                 .num_blocks = 1,
71                 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk) }
72         } }
73 };
74
75 static const struct common_glue_ctx camellia_ctr = {
76         .num_funcs = 3,
77         .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
78
79         .funcs = { {
80                 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
81                 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_ctr_16way) }
82         }, {
83                 .num_blocks = 2,
84                 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr_2way) }
85         }, {
86                 .num_blocks = 1,
87                 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr) }
88         } }
89 };
90
91 static const struct common_glue_ctx camellia_enc_xts = {
92         .num_funcs = 2,
93         .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
94
95         .funcs = { {
96                 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
97                 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_16way) }
98         }, {
99                 .num_blocks = 1,
100                 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc) }
101         } }
102 };
103
104 static const struct common_glue_ctx camellia_dec = {
105         .num_funcs = 3,
106         .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
107
108         .funcs = { {
109                 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
110                 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_16way) }
111         }, {
112                 .num_blocks = 2,
113                 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk_2way) }
114         }, {
115                 .num_blocks = 1,
116                 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk) }
117         } }
118 };
119
120 static const struct common_glue_ctx camellia_dec_cbc = {
121         .num_funcs = 3,
122         .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
123
124         .funcs = { {
125                 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
126                 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_cbc_dec_16way) }
127         }, {
128                 .num_blocks = 2,
129                 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_decrypt_cbc_2way) }
130         }, {
131                 .num_blocks = 1,
132                 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_dec_blk) }
133         } }
134 };
135
136 static const struct common_glue_ctx camellia_dec_xts = {
137         .num_funcs = 2,
138         .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
139
140         .funcs = { {
141                 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
142                 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_16way) }
143         }, {
144                 .num_blocks = 1,
145                 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec) }
146         } }
147 };
148
149 static int camellia_setkey(struct crypto_skcipher *tfm, const u8 *key,
150                            unsigned int keylen)
151 {
152         return __camellia_setkey(crypto_skcipher_ctx(tfm), key, keylen,
153                                  &tfm->base.crt_flags);
154 }
155
156 static int ecb_encrypt(struct skcipher_request *req)
157 {
158         return glue_ecb_req_128bit(&camellia_enc, req);
159 }
160
161 static int ecb_decrypt(struct skcipher_request *req)
162 {
163         return glue_ecb_req_128bit(&camellia_dec, req);
164 }
165
166 static int cbc_encrypt(struct skcipher_request *req)
167 {
168         return glue_cbc_encrypt_req_128bit(GLUE_FUNC_CAST(camellia_enc_blk),
169                                            req);
170 }
171
172 static int cbc_decrypt(struct skcipher_request *req)
173 {
174         return glue_cbc_decrypt_req_128bit(&camellia_dec_cbc, req);
175 }
176
177 static int ctr_crypt(struct skcipher_request *req)
178 {
179         return glue_ctr_req_128bit(&camellia_ctr, req);
180 }
181
182 int xts_camellia_setkey(struct crypto_skcipher *tfm, const u8 *key,
183                         unsigned int keylen)
184 {
185         struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
186         u32 *flags = &tfm->base.crt_flags;
187         int err;
188
189         err = xts_verify_key(tfm, key, keylen);
190         if (err)
191                 return err;
192
193         /* first half of xts-key is for crypt */
194         err = __camellia_setkey(&ctx->crypt_ctx, key, keylen / 2, flags);
195         if (err)
196                 return err;
197
198         /* second half of xts-key is for tweak */
199         return __camellia_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2,
200                                 flags);
201 }
202 EXPORT_SYMBOL_GPL(xts_camellia_setkey);
203
204 static int xts_encrypt(struct skcipher_request *req)
205 {
206         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
207         struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
208
209         return glue_xts_req_128bit(&camellia_enc_xts, req,
210                                    XTS_TWEAK_CAST(camellia_enc_blk),
211                                    &ctx->tweak_ctx, &ctx->crypt_ctx);
212 }
213
214 static int xts_decrypt(struct skcipher_request *req)
215 {
216         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
217         struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
218
219         return glue_xts_req_128bit(&camellia_dec_xts, req,
220                                    XTS_TWEAK_CAST(camellia_enc_blk),
221                                    &ctx->tweak_ctx, &ctx->crypt_ctx);
222 }
223
224 static struct skcipher_alg camellia_algs[] = {
225         {
226                 .base.cra_name          = "__ecb(camellia)",
227                 .base.cra_driver_name   = "__ecb-camellia-aesni",
228                 .base.cra_priority      = 400,
229                 .base.cra_flags         = CRYPTO_ALG_INTERNAL,
230                 .base.cra_blocksize     = CAMELLIA_BLOCK_SIZE,
231                 .base.cra_ctxsize       = sizeof(struct camellia_ctx),
232                 .base.cra_module        = THIS_MODULE,
233                 .min_keysize            = CAMELLIA_MIN_KEY_SIZE,
234                 .max_keysize            = CAMELLIA_MAX_KEY_SIZE,
235                 .setkey                 = camellia_setkey,
236                 .encrypt                = ecb_encrypt,
237                 .decrypt                = ecb_decrypt,
238         }, {
239                 .base.cra_name          = "__cbc(camellia)",
240                 .base.cra_driver_name   = "__cbc-camellia-aesni",
241                 .base.cra_priority      = 400,
242                 .base.cra_flags         = CRYPTO_ALG_INTERNAL,
243                 .base.cra_blocksize     = CAMELLIA_BLOCK_SIZE,
244                 .base.cra_ctxsize       = sizeof(struct camellia_ctx),
245                 .base.cra_module        = THIS_MODULE,
246                 .min_keysize            = CAMELLIA_MIN_KEY_SIZE,
247                 .max_keysize            = CAMELLIA_MAX_KEY_SIZE,
248                 .ivsize                 = CAMELLIA_BLOCK_SIZE,
249                 .setkey                 = camellia_setkey,
250                 .encrypt                = cbc_encrypt,
251                 .decrypt                = cbc_decrypt,
252         }, {
253                 .base.cra_name          = "__ctr(camellia)",
254                 .base.cra_driver_name   = "__ctr-camellia-aesni",
255                 .base.cra_priority      = 400,
256                 .base.cra_flags         = CRYPTO_ALG_INTERNAL,
257                 .base.cra_blocksize     = 1,
258                 .base.cra_ctxsize       = sizeof(struct camellia_ctx),
259                 .base.cra_module        = THIS_MODULE,
260                 .min_keysize            = CAMELLIA_MIN_KEY_SIZE,
261                 .max_keysize            = CAMELLIA_MAX_KEY_SIZE,
262                 .ivsize                 = CAMELLIA_BLOCK_SIZE,
263                 .chunksize              = CAMELLIA_BLOCK_SIZE,
264                 .setkey                 = camellia_setkey,
265                 .encrypt                = ctr_crypt,
266                 .decrypt                = ctr_crypt,
267         }, {
268                 .base.cra_name          = "__xts(camellia)",
269                 .base.cra_driver_name   = "__xts-camellia-aesni",
270                 .base.cra_priority      = 400,
271                 .base.cra_flags         = CRYPTO_ALG_INTERNAL,
272                 .base.cra_blocksize     = CAMELLIA_BLOCK_SIZE,
273                 .base.cra_ctxsize       = sizeof(struct camellia_xts_ctx),
274                 .base.cra_module        = THIS_MODULE,
275                 .min_keysize            = 2 * CAMELLIA_MIN_KEY_SIZE,
276                 .max_keysize            = 2 * CAMELLIA_MAX_KEY_SIZE,
277                 .ivsize                 = CAMELLIA_BLOCK_SIZE,
278                 .setkey                 = xts_camellia_setkey,
279                 .encrypt                = xts_encrypt,
280                 .decrypt                = xts_decrypt,
281         },
282 };
283
284 static struct simd_skcipher_alg *camellia_simd_algs[ARRAY_SIZE(camellia_algs)];
285
286 static int __init camellia_aesni_init(void)
287 {
288         const char *feature_name;
289
290         if (!boot_cpu_has(X86_FEATURE_AVX) ||
291             !boot_cpu_has(X86_FEATURE_AES) ||
292             !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
293                 pr_info("AVX or AES-NI instructions are not detected.\n");
294                 return -ENODEV;
295         }
296
297         if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
298                                 &feature_name)) {
299                 pr_info("CPU feature '%s' is not supported.\n", feature_name);
300                 return -ENODEV;
301         }
302
303         return simd_register_skciphers_compat(camellia_algs,
304                                               ARRAY_SIZE(camellia_algs),
305                                               camellia_simd_algs);
306 }
307
308 static void __exit camellia_aesni_fini(void)
309 {
310         simd_unregister_skciphers(camellia_algs, ARRAY_SIZE(camellia_algs),
311                                   camellia_simd_algs);
312 }
313
314 module_init(camellia_aesni_init);
315 module_exit(camellia_aesni_fini);
316
317 MODULE_LICENSE("GPL");
318 MODULE_DESCRIPTION("Camellia Cipher Algorithm, AES-NI/AVX optimized");
319 MODULE_ALIAS_CRYPTO("camellia");
320 MODULE_ALIAS_CRYPTO("camellia-asm");