1 // SPDX-License-Identifier: GPL-2.0-only
3 * pcrypt - Parallel crypto wrapper.
5 * Copyright (C) 2009 secunet Security Networks AG
6 * Copyright (C) 2009 Steffen Klassert <steffen.klassert@secunet.com>
9 #include <crypto/algapi.h>
10 #include <crypto/internal/aead.h>
11 #include <linux/atomic.h>
12 #include <linux/err.h>
13 #include <linux/init.h>
14 #include <linux/module.h>
15 #include <linux/slab.h>
16 #include <linux/notifier.h>
17 #include <linux/kobject.h>
18 #include <linux/cpu.h>
19 #include <crypto/pcrypt.h>
21 struct padata_pcrypt {
22 struct padata_instance *pinst;
25 * Cpumask for callback CPUs. It should be
26 * equal to serial cpumask of corresponding padata instance,
27 * so it is updated when padata notifies us about serial
30 * cb_cpumask is protected by RCU. This fact prevents us from
31 * using cpumask_var_t directly because the actual type of
32 * cpumsak_var_t depends on kernel configuration(particularly on
33 * CONFIG_CPUMASK_OFFSTACK macro). Depending on the configuration
34 * cpumask_var_t may be either a pointer to the struct cpumask
35 * or a variable allocated on the stack. Thus we can not safely use
36 * cpumask_var_t with RCU operations such as rcu_assign_pointer or
37 * rcu_dereference. So cpumask_var_t is wrapped with struct
38 * pcrypt_cpumask which makes possible to use it with RCU.
40 struct pcrypt_cpumask {
43 struct notifier_block nblock;
46 static struct padata_pcrypt pencrypt;
47 static struct padata_pcrypt pdecrypt;
48 static struct kset *pcrypt_kset;
50 struct pcrypt_instance_ctx {
51 struct crypto_aead_spawn spawn;
55 struct pcrypt_aead_ctx {
56 struct crypto_aead *child;
60 static int pcrypt_aead_setkey(struct crypto_aead *parent,
61 const u8 *key, unsigned int keylen)
63 struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(parent);
65 return crypto_aead_setkey(ctx->child, key, keylen);
68 static int pcrypt_aead_setauthsize(struct crypto_aead *parent,
69 unsigned int authsize)
71 struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(parent);
73 return crypto_aead_setauthsize(ctx->child, authsize);
76 static void pcrypt_aead_serial(struct padata_priv *padata)
78 struct pcrypt_request *preq = pcrypt_padata_request(padata);
79 struct aead_request *req = pcrypt_request_ctx(preq);
81 aead_request_complete(req->base.data, padata->info);
84 static void pcrypt_aead_done(struct crypto_async_request *areq, int err)
86 struct aead_request *req = areq->data;
87 struct pcrypt_request *preq = aead_request_ctx(req);
88 struct padata_priv *padata = pcrypt_request_padata(preq);
91 req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
93 padata_do_serial(padata);
96 static void pcrypt_aead_enc(struct padata_priv *padata)
98 struct pcrypt_request *preq = pcrypt_padata_request(padata);
99 struct aead_request *req = pcrypt_request_ctx(preq);
101 padata->info = crypto_aead_encrypt(req);
103 if (padata->info == -EINPROGRESS)
106 padata_do_serial(padata);
109 static int pcrypt_aead_encrypt(struct aead_request *req)
112 struct pcrypt_request *preq = aead_request_ctx(req);
113 struct aead_request *creq = pcrypt_request_ctx(preq);
114 struct padata_priv *padata = pcrypt_request_padata(preq);
115 struct crypto_aead *aead = crypto_aead_reqtfm(req);
116 struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead);
117 u32 flags = aead_request_flags(req);
119 memset(padata, 0, sizeof(struct padata_priv));
121 padata->parallel = pcrypt_aead_enc;
122 padata->serial = pcrypt_aead_serial;
124 aead_request_set_tfm(creq, ctx->child);
125 aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP,
126 pcrypt_aead_done, req);
127 aead_request_set_crypt(creq, req->src, req->dst,
128 req->cryptlen, req->iv);
129 aead_request_set_ad(creq, req->assoclen);
131 err = padata_do_parallel(pencrypt.pinst, padata, &ctx->cb_cpu);
138 static void pcrypt_aead_dec(struct padata_priv *padata)
140 struct pcrypt_request *preq = pcrypt_padata_request(padata);
141 struct aead_request *req = pcrypt_request_ctx(preq);
143 padata->info = crypto_aead_decrypt(req);
145 if (padata->info == -EINPROGRESS)
148 padata_do_serial(padata);
151 static int pcrypt_aead_decrypt(struct aead_request *req)
154 struct pcrypt_request *preq = aead_request_ctx(req);
155 struct aead_request *creq = pcrypt_request_ctx(preq);
156 struct padata_priv *padata = pcrypt_request_padata(preq);
157 struct crypto_aead *aead = crypto_aead_reqtfm(req);
158 struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead);
159 u32 flags = aead_request_flags(req);
161 memset(padata, 0, sizeof(struct padata_priv));
163 padata->parallel = pcrypt_aead_dec;
164 padata->serial = pcrypt_aead_serial;
166 aead_request_set_tfm(creq, ctx->child);
167 aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP,
168 pcrypt_aead_done, req);
169 aead_request_set_crypt(creq, req->src, req->dst,
170 req->cryptlen, req->iv);
171 aead_request_set_ad(creq, req->assoclen);
173 err = padata_do_parallel(pdecrypt.pinst, padata, &ctx->cb_cpu);
180 static int pcrypt_aead_init_tfm(struct crypto_aead *tfm)
183 struct aead_instance *inst = aead_alg_instance(tfm);
184 struct pcrypt_instance_ctx *ictx = aead_instance_ctx(inst);
185 struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(tfm);
186 struct crypto_aead *cipher;
188 cpu_index = (unsigned int)atomic_inc_return(&ictx->tfm_count) %
189 cpumask_weight(cpu_online_mask);
191 ctx->cb_cpu = cpumask_first(cpu_online_mask);
192 for (cpu = 0; cpu < cpu_index; cpu++)
193 ctx->cb_cpu = cpumask_next(ctx->cb_cpu, cpu_online_mask);
195 cipher = crypto_spawn_aead(&ictx->spawn);
198 return PTR_ERR(cipher);
201 crypto_aead_set_reqsize(tfm, sizeof(struct pcrypt_request) +
202 sizeof(struct aead_request) +
203 crypto_aead_reqsize(cipher));
208 static void pcrypt_aead_exit_tfm(struct crypto_aead *tfm)
210 struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(tfm);
212 crypto_free_aead(ctx->child);
215 static void pcrypt_free(struct aead_instance *inst)
217 struct pcrypt_instance_ctx *ctx = aead_instance_ctx(inst);
219 crypto_drop_aead(&ctx->spawn);
223 static int pcrypt_init_instance(struct crypto_instance *inst,
224 struct crypto_alg *alg)
226 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
227 "pcrypt(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
228 return -ENAMETOOLONG;
230 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
232 inst->alg.cra_priority = alg->cra_priority + 100;
233 inst->alg.cra_blocksize = alg->cra_blocksize;
234 inst->alg.cra_alignmask = alg->cra_alignmask;
239 static int pcrypt_create_aead(struct crypto_template *tmpl, struct rtattr **tb,
242 struct pcrypt_instance_ctx *ctx;
243 struct crypto_attr_type *algt;
244 struct aead_instance *inst;
245 struct aead_alg *alg;
249 algt = crypto_get_attr_type(tb);
251 return PTR_ERR(algt);
253 name = crypto_attr_alg_name(tb[1]);
255 return PTR_ERR(name);
257 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
261 ctx = aead_instance_ctx(inst);
262 crypto_set_aead_spawn(&ctx->spawn, aead_crypto_instance(inst));
264 err = crypto_grab_aead(&ctx->spawn, name, 0, 0);
268 alg = crypto_spawn_aead_alg(&ctx->spawn);
269 err = pcrypt_init_instance(aead_crypto_instance(inst), &alg->base);
273 inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC;
275 inst->alg.ivsize = crypto_aead_alg_ivsize(alg);
276 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
278 inst->alg.base.cra_ctxsize = sizeof(struct pcrypt_aead_ctx);
280 inst->alg.init = pcrypt_aead_init_tfm;
281 inst->alg.exit = pcrypt_aead_exit_tfm;
283 inst->alg.setkey = pcrypt_aead_setkey;
284 inst->alg.setauthsize = pcrypt_aead_setauthsize;
285 inst->alg.encrypt = pcrypt_aead_encrypt;
286 inst->alg.decrypt = pcrypt_aead_decrypt;
288 inst->free = pcrypt_free;
290 err = aead_register_instance(tmpl, inst);
298 crypto_drop_aead(&ctx->spawn);
304 static int pcrypt_create(struct crypto_template *tmpl, struct rtattr **tb)
306 struct crypto_attr_type *algt;
308 algt = crypto_get_attr_type(tb);
310 return PTR_ERR(algt);
312 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
313 case CRYPTO_ALG_TYPE_AEAD:
314 return pcrypt_create_aead(tmpl, tb, algt->type, algt->mask);
320 static int pcrypt_cpumask_change_notify(struct notifier_block *self,
321 unsigned long val, void *data)
323 struct padata_pcrypt *pcrypt;
324 struct pcrypt_cpumask *new_mask, *old_mask;
325 struct padata_cpumask *cpumask = (struct padata_cpumask *)data;
327 if (!(val & PADATA_CPU_SERIAL))
330 pcrypt = container_of(self, struct padata_pcrypt, nblock);
331 new_mask = kmalloc(sizeof(*new_mask), GFP_KERNEL);
334 if (!alloc_cpumask_var(&new_mask->mask, GFP_KERNEL)) {
339 old_mask = pcrypt->cb_cpumask;
341 cpumask_copy(new_mask->mask, cpumask->cbcpu);
342 rcu_assign_pointer(pcrypt->cb_cpumask, new_mask);
345 free_cpumask_var(old_mask->mask);
350 static int pcrypt_sysfs_add(struct padata_instance *pinst, const char *name)
354 pinst->kobj.kset = pcrypt_kset;
355 ret = kobject_add(&pinst->kobj, NULL, "%s", name);
357 kobject_uevent(&pinst->kobj, KOBJ_ADD);
362 static int pcrypt_init_padata(struct padata_pcrypt *pcrypt,
366 struct pcrypt_cpumask *mask;
370 pcrypt->pinst = padata_alloc_possible(name);
374 mask = kmalloc(sizeof(*mask), GFP_KERNEL);
376 goto err_free_padata;
377 if (!alloc_cpumask_var(&mask->mask, GFP_KERNEL)) {
379 goto err_free_padata;
382 cpumask_and(mask->mask, cpu_possible_mask, cpu_online_mask);
383 rcu_assign_pointer(pcrypt->cb_cpumask, mask);
385 pcrypt->nblock.notifier_call = pcrypt_cpumask_change_notify;
386 ret = padata_register_cpumask_notifier(pcrypt->pinst, &pcrypt->nblock);
388 goto err_free_cpumask;
390 ret = pcrypt_sysfs_add(pcrypt->pinst, name);
392 goto err_unregister_notifier;
398 err_unregister_notifier:
399 padata_unregister_cpumask_notifier(pcrypt->pinst, &pcrypt->nblock);
401 free_cpumask_var(mask->mask);
404 padata_free(pcrypt->pinst);
411 static void pcrypt_fini_padata(struct padata_pcrypt *pcrypt)
413 free_cpumask_var(pcrypt->cb_cpumask->mask);
414 kfree(pcrypt->cb_cpumask);
416 padata_stop(pcrypt->pinst);
417 padata_unregister_cpumask_notifier(pcrypt->pinst, &pcrypt->nblock);
418 padata_free(pcrypt->pinst);
421 static struct crypto_template pcrypt_tmpl = {
423 .create = pcrypt_create,
424 .module = THIS_MODULE,
427 static int __init pcrypt_init(void)
431 pcrypt_kset = kset_create_and_add("pcrypt", NULL, kernel_kobj);
435 err = pcrypt_init_padata(&pencrypt, "pencrypt");
439 err = pcrypt_init_padata(&pdecrypt, "pdecrypt");
441 goto err_deinit_pencrypt;
443 padata_start(pencrypt.pinst);
444 padata_start(pdecrypt.pinst);
446 return crypto_register_template(&pcrypt_tmpl);
449 pcrypt_fini_padata(&pencrypt);
451 kset_unregister(pcrypt_kset);
456 static void __exit pcrypt_exit(void)
458 pcrypt_fini_padata(&pencrypt);
459 pcrypt_fini_padata(&pdecrypt);
461 kset_unregister(pcrypt_kset);
462 crypto_unregister_template(&pcrypt_tmpl);
465 subsys_initcall(pcrypt_init);
466 module_exit(pcrypt_exit);
468 MODULE_LICENSE("GPL");
469 MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
470 MODULE_DESCRIPTION("Parallel crypto wrapper");
471 MODULE_ALIAS_CRYPTO("pcrypt");