]> asedeno.scripts.mit.edu Git - linux.git/blob - crypto/algapi.c
crypto: algapi - make crypto_drop_spawn() a no-op on uninitialized spawns
[linux.git] / crypto / algapi.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Cryptographic API for algorithms (i.e., low-level API).
4  *
5  * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6  */
7
8 #include <crypto/algapi.h>
9 #include <linux/err.h>
10 #include <linux/errno.h>
11 #include <linux/fips.h>
12 #include <linux/init.h>
13 #include <linux/kernel.h>
14 #include <linux/list.h>
15 #include <linux/module.h>
16 #include <linux/rtnetlink.h>
17 #include <linux/slab.h>
18 #include <linux/string.h>
19
20 #include "internal.h"
21
22 static LIST_HEAD(crypto_template_list);
23
24 static inline void crypto_check_module_sig(struct module *mod)
25 {
26         if (fips_enabled && mod && !module_sig_ok(mod))
27                 panic("Module %s signature verification failed in FIPS mode\n",
28                       module_name(mod));
29 }
30
31 static int crypto_check_alg(struct crypto_alg *alg)
32 {
33         crypto_check_module_sig(alg->cra_module);
34
35         if (!alg->cra_name[0] || !alg->cra_driver_name[0])
36                 return -EINVAL;
37
38         if (alg->cra_alignmask & (alg->cra_alignmask + 1))
39                 return -EINVAL;
40
41         /* General maximums for all algs. */
42         if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
43                 return -EINVAL;
44
45         if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
46                 return -EINVAL;
47
48         /* Lower maximums for specific alg types. */
49         if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
50                                CRYPTO_ALG_TYPE_CIPHER) {
51                 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
52                         return -EINVAL;
53
54                 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
55                         return -EINVAL;
56         }
57
58         if (alg->cra_priority < 0)
59                 return -EINVAL;
60
61         refcount_set(&alg->cra_refcnt, 1);
62
63         return 0;
64 }
65
66 static void crypto_free_instance(struct crypto_instance *inst)
67 {
68         if (!inst->alg.cra_type->free) {
69                 inst->tmpl->free(inst);
70                 return;
71         }
72
73         inst->alg.cra_type->free(inst);
74 }
75
76 static void crypto_destroy_instance(struct crypto_alg *alg)
77 {
78         struct crypto_instance *inst = (void *)alg;
79         struct crypto_template *tmpl = inst->tmpl;
80
81         crypto_free_instance(inst);
82         crypto_tmpl_put(tmpl);
83 }
84
85 /*
86  * This function adds a spawn to the list secondary_spawns which
87  * will be used at the end of crypto_remove_spawns to unregister
88  * instances, unless the spawn happens to be one that is depended
89  * on by the new algorithm (nalg in crypto_remove_spawns).
90  *
91  * This function is also responsible for resurrecting any algorithms
92  * in the dependency chain of nalg by unsetting n->dead.
93  */
94 static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
95                                             struct list_head *stack,
96                                             struct list_head *top,
97                                             struct list_head *secondary_spawns)
98 {
99         struct crypto_spawn *spawn, *n;
100
101         spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
102         if (!spawn)
103                 return NULL;
104
105         n = list_prev_entry(spawn, list);
106         list_move(&spawn->list, secondary_spawns);
107
108         if (list_is_last(&n->list, stack))
109                 return top;
110
111         n = list_next_entry(n, list);
112         if (!spawn->dead)
113                 n->dead = false;
114
115         return &n->inst->alg.cra_users;
116 }
117
118 static void crypto_remove_instance(struct crypto_instance *inst,
119                                    struct list_head *list)
120 {
121         struct crypto_template *tmpl = inst->tmpl;
122
123         if (crypto_is_dead(&inst->alg))
124                 return;
125
126         inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
127
128         if (!tmpl || !crypto_tmpl_get(tmpl))
129                 return;
130
131         list_move(&inst->alg.cra_list, list);
132         hlist_del(&inst->list);
133         inst->alg.cra_destroy = crypto_destroy_instance;
134
135         BUG_ON(!list_empty(&inst->alg.cra_users));
136 }
137
138 /*
139  * Given an algorithm alg, remove all algorithms that depend on it
140  * through spawns.  If nalg is not null, then exempt any algorithms
141  * that is depended on by nalg.  This is useful when nalg itself
142  * depends on alg.
143  */
144 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
145                           struct crypto_alg *nalg)
146 {
147         u32 new_type = (nalg ?: alg)->cra_flags;
148         struct crypto_spawn *spawn, *n;
149         LIST_HEAD(secondary_spawns);
150         struct list_head *spawns;
151         LIST_HEAD(stack);
152         LIST_HEAD(top);
153
154         spawns = &alg->cra_users;
155         list_for_each_entry_safe(spawn, n, spawns, list) {
156                 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
157                         continue;
158
159                 list_move(&spawn->list, &top);
160         }
161
162         /*
163          * Perform a depth-first walk starting from alg through
164          * the cra_users tree.  The list stack records the path
165          * from alg to the current spawn.
166          */
167         spawns = &top;
168         do {
169                 while (!list_empty(spawns)) {
170                         struct crypto_instance *inst;
171
172                         spawn = list_first_entry(spawns, struct crypto_spawn,
173                                                  list);
174                         inst = spawn->inst;
175
176                         list_move(&spawn->list, &stack);
177                         spawn->dead = !spawn->registered || &inst->alg != nalg;
178
179                         if (!spawn->registered)
180                                 break;
181
182                         BUG_ON(&inst->alg == alg);
183
184                         if (&inst->alg == nalg)
185                                 break;
186
187                         spawns = &inst->alg.cra_users;
188
189                         /*
190                          * Even if spawn->registered is true, the
191                          * instance itself may still be unregistered.
192                          * This is because it may have failed during
193                          * registration.  Therefore we still need to
194                          * make the following test.
195                          *
196                          * We may encounter an unregistered instance here, since
197                          * an instance's spawns are set up prior to the instance
198                          * being registered.  An unregistered instance will have
199                          * NULL ->cra_users.next, since ->cra_users isn't
200                          * properly initialized until registration.  But an
201                          * unregistered instance cannot have any users, so treat
202                          * it the same as ->cra_users being empty.
203                          */
204                         if (spawns->next == NULL)
205                                 break;
206                 }
207         } while ((spawns = crypto_more_spawns(alg, &stack, &top,
208                                               &secondary_spawns)));
209
210         /*
211          * Remove all instances that are marked as dead.  Also
212          * complete the resurrection of the others by moving them
213          * back to the cra_users list.
214          */
215         list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
216                 if (!spawn->dead)
217                         list_move(&spawn->list, &spawn->alg->cra_users);
218                 else if (spawn->registered)
219                         crypto_remove_instance(spawn->inst, list);
220         }
221 }
222 EXPORT_SYMBOL_GPL(crypto_remove_spawns);
223
224 static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
225 {
226         struct crypto_alg *q;
227         struct crypto_larval *larval;
228         int ret = -EAGAIN;
229
230         if (crypto_is_dead(alg))
231                 goto err;
232
233         INIT_LIST_HEAD(&alg->cra_users);
234
235         /* No cheating! */
236         alg->cra_flags &= ~CRYPTO_ALG_TESTED;
237
238         ret = -EEXIST;
239
240         list_for_each_entry(q, &crypto_alg_list, cra_list) {
241                 if (q == alg)
242                         goto err;
243
244                 if (crypto_is_moribund(q))
245                         continue;
246
247                 if (crypto_is_larval(q)) {
248                         if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
249                                 goto err;
250                         continue;
251                 }
252
253                 if (!strcmp(q->cra_driver_name, alg->cra_name) ||
254                     !strcmp(q->cra_name, alg->cra_driver_name))
255                         goto err;
256         }
257
258         larval = crypto_larval_alloc(alg->cra_name,
259                                      alg->cra_flags | CRYPTO_ALG_TESTED, 0);
260         if (IS_ERR(larval))
261                 goto out;
262
263         ret = -ENOENT;
264         larval->adult = crypto_mod_get(alg);
265         if (!larval->adult)
266                 goto free_larval;
267
268         refcount_set(&larval->alg.cra_refcnt, 1);
269         memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
270                CRYPTO_MAX_ALG_NAME);
271         larval->alg.cra_priority = alg->cra_priority;
272
273         list_add(&alg->cra_list, &crypto_alg_list);
274         list_add(&larval->alg.cra_list, &crypto_alg_list);
275
276         crypto_stats_init(alg);
277
278 out:
279         return larval;
280
281 free_larval:
282         kfree(larval);
283 err:
284         larval = ERR_PTR(ret);
285         goto out;
286 }
287
288 void crypto_alg_tested(const char *name, int err)
289 {
290         struct crypto_larval *test;
291         struct crypto_alg *alg;
292         struct crypto_alg *q;
293         LIST_HEAD(list);
294         bool best;
295
296         down_write(&crypto_alg_sem);
297         list_for_each_entry(q, &crypto_alg_list, cra_list) {
298                 if (crypto_is_moribund(q) || !crypto_is_larval(q))
299                         continue;
300
301                 test = (struct crypto_larval *)q;
302
303                 if (!strcmp(q->cra_driver_name, name))
304                         goto found;
305         }
306
307         pr_err("alg: Unexpected test result for %s: %d\n", name, err);
308         goto unlock;
309
310 found:
311         q->cra_flags |= CRYPTO_ALG_DEAD;
312         alg = test->adult;
313         if (err || list_empty(&alg->cra_list))
314                 goto complete;
315
316         alg->cra_flags |= CRYPTO_ALG_TESTED;
317
318         /* Only satisfy larval waiters if we are the best. */
319         best = true;
320         list_for_each_entry(q, &crypto_alg_list, cra_list) {
321                 if (crypto_is_moribund(q) || !crypto_is_larval(q))
322                         continue;
323
324                 if (strcmp(alg->cra_name, q->cra_name))
325                         continue;
326
327                 if (q->cra_priority > alg->cra_priority) {
328                         best = false;
329                         break;
330                 }
331         }
332
333         list_for_each_entry(q, &crypto_alg_list, cra_list) {
334                 if (q == alg)
335                         continue;
336
337                 if (crypto_is_moribund(q))
338                         continue;
339
340                 if (crypto_is_larval(q)) {
341                         struct crypto_larval *larval = (void *)q;
342
343                         /*
344                          * Check to see if either our generic name or
345                          * specific name can satisfy the name requested
346                          * by the larval entry q.
347                          */
348                         if (strcmp(alg->cra_name, q->cra_name) &&
349                             strcmp(alg->cra_driver_name, q->cra_name))
350                                 continue;
351
352                         if (larval->adult)
353                                 continue;
354                         if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
355                                 continue;
356
357                         if (best && crypto_mod_get(alg))
358                                 larval->adult = alg;
359                         else
360                                 larval->adult = ERR_PTR(-EAGAIN);
361
362                         continue;
363                 }
364
365                 if (strcmp(alg->cra_name, q->cra_name))
366                         continue;
367
368                 if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
369                     q->cra_priority > alg->cra_priority)
370                         continue;
371
372                 crypto_remove_spawns(q, &list, alg);
373         }
374
375 complete:
376         complete_all(&test->completion);
377
378 unlock:
379         up_write(&crypto_alg_sem);
380
381         crypto_remove_final(&list);
382 }
383 EXPORT_SYMBOL_GPL(crypto_alg_tested);
384
385 void crypto_remove_final(struct list_head *list)
386 {
387         struct crypto_alg *alg;
388         struct crypto_alg *n;
389
390         list_for_each_entry_safe(alg, n, list, cra_list) {
391                 list_del_init(&alg->cra_list);
392                 crypto_alg_put(alg);
393         }
394 }
395 EXPORT_SYMBOL_GPL(crypto_remove_final);
396
397 static void crypto_wait_for_test(struct crypto_larval *larval)
398 {
399         int err;
400
401         err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
402         if (err != NOTIFY_STOP) {
403                 if (WARN_ON(err != NOTIFY_DONE))
404                         goto out;
405                 crypto_alg_tested(larval->alg.cra_driver_name, 0);
406         }
407
408         err = wait_for_completion_killable(&larval->completion);
409         WARN_ON(err);
410         if (!err)
411                 crypto_probing_notify(CRYPTO_MSG_ALG_LOADED, larval);
412
413 out:
414         crypto_larval_kill(&larval->alg);
415 }
416
417 int crypto_register_alg(struct crypto_alg *alg)
418 {
419         struct crypto_larval *larval;
420         int err;
421
422         alg->cra_flags &= ~CRYPTO_ALG_DEAD;
423         err = crypto_check_alg(alg);
424         if (err)
425                 return err;
426
427         down_write(&crypto_alg_sem);
428         larval = __crypto_register_alg(alg);
429         up_write(&crypto_alg_sem);
430
431         if (IS_ERR(larval))
432                 return PTR_ERR(larval);
433
434         crypto_wait_for_test(larval);
435         return 0;
436 }
437 EXPORT_SYMBOL_GPL(crypto_register_alg);
438
439 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
440 {
441         if (unlikely(list_empty(&alg->cra_list)))
442                 return -ENOENT;
443
444         alg->cra_flags |= CRYPTO_ALG_DEAD;
445
446         list_del_init(&alg->cra_list);
447         crypto_remove_spawns(alg, list, NULL);
448
449         return 0;
450 }
451
452 void crypto_unregister_alg(struct crypto_alg *alg)
453 {
454         int ret;
455         LIST_HEAD(list);
456
457         down_write(&crypto_alg_sem);
458         ret = crypto_remove_alg(alg, &list);
459         up_write(&crypto_alg_sem);
460
461         if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
462                 return;
463
464         BUG_ON(refcount_read(&alg->cra_refcnt) != 1);
465         if (alg->cra_destroy)
466                 alg->cra_destroy(alg);
467
468         crypto_remove_final(&list);
469 }
470 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
471
472 int crypto_register_algs(struct crypto_alg *algs, int count)
473 {
474         int i, ret;
475
476         for (i = 0; i < count; i++) {
477                 ret = crypto_register_alg(&algs[i]);
478                 if (ret)
479                         goto err;
480         }
481
482         return 0;
483
484 err:
485         for (--i; i >= 0; --i)
486                 crypto_unregister_alg(&algs[i]);
487
488         return ret;
489 }
490 EXPORT_SYMBOL_GPL(crypto_register_algs);
491
492 void crypto_unregister_algs(struct crypto_alg *algs, int count)
493 {
494         int i;
495
496         for (i = 0; i < count; i++)
497                 crypto_unregister_alg(&algs[i]);
498 }
499 EXPORT_SYMBOL_GPL(crypto_unregister_algs);
500
501 int crypto_register_template(struct crypto_template *tmpl)
502 {
503         struct crypto_template *q;
504         int err = -EEXIST;
505
506         down_write(&crypto_alg_sem);
507
508         crypto_check_module_sig(tmpl->module);
509
510         list_for_each_entry(q, &crypto_template_list, list) {
511                 if (q == tmpl)
512                         goto out;
513         }
514
515         list_add(&tmpl->list, &crypto_template_list);
516         err = 0;
517 out:
518         up_write(&crypto_alg_sem);
519         return err;
520 }
521 EXPORT_SYMBOL_GPL(crypto_register_template);
522
523 int crypto_register_templates(struct crypto_template *tmpls, int count)
524 {
525         int i, err;
526
527         for (i = 0; i < count; i++) {
528                 err = crypto_register_template(&tmpls[i]);
529                 if (err)
530                         goto out;
531         }
532         return 0;
533
534 out:
535         for (--i; i >= 0; --i)
536                 crypto_unregister_template(&tmpls[i]);
537         return err;
538 }
539 EXPORT_SYMBOL_GPL(crypto_register_templates);
540
541 void crypto_unregister_template(struct crypto_template *tmpl)
542 {
543         struct crypto_instance *inst;
544         struct hlist_node *n;
545         struct hlist_head *list;
546         LIST_HEAD(users);
547
548         down_write(&crypto_alg_sem);
549
550         BUG_ON(list_empty(&tmpl->list));
551         list_del_init(&tmpl->list);
552
553         list = &tmpl->instances;
554         hlist_for_each_entry(inst, list, list) {
555                 int err = crypto_remove_alg(&inst->alg, &users);
556
557                 BUG_ON(err);
558         }
559
560         up_write(&crypto_alg_sem);
561
562         hlist_for_each_entry_safe(inst, n, list, list) {
563                 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
564                 crypto_free_instance(inst);
565         }
566         crypto_remove_final(&users);
567 }
568 EXPORT_SYMBOL_GPL(crypto_unregister_template);
569
570 void crypto_unregister_templates(struct crypto_template *tmpls, int count)
571 {
572         int i;
573
574         for (i = count - 1; i >= 0; --i)
575                 crypto_unregister_template(&tmpls[i]);
576 }
577 EXPORT_SYMBOL_GPL(crypto_unregister_templates);
578
579 static struct crypto_template *__crypto_lookup_template(const char *name)
580 {
581         struct crypto_template *q, *tmpl = NULL;
582
583         down_read(&crypto_alg_sem);
584         list_for_each_entry(q, &crypto_template_list, list) {
585                 if (strcmp(q->name, name))
586                         continue;
587                 if (unlikely(!crypto_tmpl_get(q)))
588                         continue;
589
590                 tmpl = q;
591                 break;
592         }
593         up_read(&crypto_alg_sem);
594
595         return tmpl;
596 }
597
598 struct crypto_template *crypto_lookup_template(const char *name)
599 {
600         return try_then_request_module(__crypto_lookup_template(name),
601                                        "crypto-%s", name);
602 }
603 EXPORT_SYMBOL_GPL(crypto_lookup_template);
604
605 int crypto_register_instance(struct crypto_template *tmpl,
606                              struct crypto_instance *inst)
607 {
608         struct crypto_larval *larval;
609         struct crypto_spawn *spawn;
610         int err;
611
612         err = crypto_check_alg(&inst->alg);
613         if (err)
614                 return err;
615
616         inst->alg.cra_module = tmpl->module;
617         inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
618
619         down_write(&crypto_alg_sem);
620
621         larval = ERR_PTR(-EAGAIN);
622         for (spawn = inst->spawns; spawn;) {
623                 struct crypto_spawn *next;
624
625                 if (spawn->dead)
626                         goto unlock;
627
628                 next = spawn->next;
629                 spawn->inst = inst;
630                 spawn->registered = true;
631
632                 if (spawn->dropref)
633                         crypto_mod_put(spawn->alg);
634
635                 spawn = next;
636         }
637
638         larval = __crypto_register_alg(&inst->alg);
639         if (IS_ERR(larval))
640                 goto unlock;
641
642         hlist_add_head(&inst->list, &tmpl->instances);
643         inst->tmpl = tmpl;
644
645 unlock:
646         up_write(&crypto_alg_sem);
647
648         err = PTR_ERR(larval);
649         if (IS_ERR(larval))
650                 goto err;
651
652         crypto_wait_for_test(larval);
653         err = 0;
654
655 err:
656         return err;
657 }
658 EXPORT_SYMBOL_GPL(crypto_register_instance);
659
660 void crypto_unregister_instance(struct crypto_instance *inst)
661 {
662         LIST_HEAD(list);
663
664         down_write(&crypto_alg_sem);
665
666         crypto_remove_spawns(&inst->alg, &list, NULL);
667         crypto_remove_instance(inst, &list);
668
669         up_write(&crypto_alg_sem);
670
671         crypto_remove_final(&list);
672 }
673 EXPORT_SYMBOL_GPL(crypto_unregister_instance);
674
675 int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
676                       struct crypto_instance *inst, u32 mask)
677 {
678         int err = -EAGAIN;
679
680         if (WARN_ON_ONCE(inst == NULL))
681                 return -EINVAL;
682
683         spawn->next = inst->spawns;
684         inst->spawns = spawn;
685
686         spawn->mask = mask;
687
688         down_write(&crypto_alg_sem);
689         if (!crypto_is_moribund(alg)) {
690                 list_add(&spawn->list, &alg->cra_users);
691                 spawn->alg = alg;
692                 err = 0;
693         }
694         up_write(&crypto_alg_sem);
695
696         return err;
697 }
698 EXPORT_SYMBOL_GPL(crypto_init_spawn);
699
700 int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
701                        struct crypto_instance *inst,
702                        const struct crypto_type *frontend)
703 {
704         int err = -EINVAL;
705
706         if ((alg->cra_flags ^ frontend->type) & frontend->maskset)
707                 goto out;
708
709         spawn->frontend = frontend;
710         err = crypto_init_spawn(spawn, alg, inst, frontend->maskset);
711
712 out:
713         return err;
714 }
715 EXPORT_SYMBOL_GPL(crypto_init_spawn2);
716
717 int crypto_grab_spawn(struct crypto_spawn *spawn, const char *name,
718                       u32 type, u32 mask)
719 {
720         struct crypto_alg *alg;
721         int err;
722
723         alg = crypto_find_alg(name, spawn->frontend, type, mask);
724         if (IS_ERR(alg))
725                 return PTR_ERR(alg);
726
727         spawn->dropref = true;
728         err = crypto_init_spawn(spawn, alg, spawn->inst, mask);
729         if (err)
730                 crypto_mod_put(alg);
731         return err;
732 }
733 EXPORT_SYMBOL_GPL(crypto_grab_spawn);
734
735 void crypto_drop_spawn(struct crypto_spawn *spawn)
736 {
737         if (!spawn->alg) /* not yet initialized? */
738                 return;
739
740         down_write(&crypto_alg_sem);
741         if (!spawn->dead)
742                 list_del(&spawn->list);
743         up_write(&crypto_alg_sem);
744
745         if (spawn->dropref && !spawn->registered)
746                 crypto_mod_put(spawn->alg);
747 }
748 EXPORT_SYMBOL_GPL(crypto_drop_spawn);
749
750 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
751 {
752         struct crypto_alg *alg;
753
754         down_read(&crypto_alg_sem);
755         alg = spawn->alg;
756         if (!spawn->dead && !crypto_mod_get(alg)) {
757                 alg->cra_flags |= CRYPTO_ALG_DYING;
758                 alg = NULL;
759         }
760         up_read(&crypto_alg_sem);
761
762         return alg ?: ERR_PTR(-EAGAIN);
763 }
764
765 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
766                                     u32 mask)
767 {
768         struct crypto_alg *alg;
769         struct crypto_tfm *tfm;
770
771         alg = crypto_spawn_alg(spawn);
772         if (IS_ERR(alg))
773                 return ERR_CAST(alg);
774
775         tfm = ERR_PTR(-EINVAL);
776         if (unlikely((alg->cra_flags ^ type) & mask))
777                 goto out_put_alg;
778
779         tfm = __crypto_alloc_tfm(alg, type, mask);
780         if (IS_ERR(tfm))
781                 goto out_put_alg;
782
783         return tfm;
784
785 out_put_alg:
786         crypto_mod_put(alg);
787         return tfm;
788 }
789 EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
790
791 void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
792 {
793         struct crypto_alg *alg;
794         struct crypto_tfm *tfm;
795
796         alg = crypto_spawn_alg(spawn);
797         if (IS_ERR(alg))
798                 return ERR_CAST(alg);
799
800         tfm = crypto_create_tfm(alg, spawn->frontend);
801         if (IS_ERR(tfm))
802                 goto out_put_alg;
803
804         return tfm;
805
806 out_put_alg:
807         crypto_mod_put(alg);
808         return tfm;
809 }
810 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
811
812 int crypto_register_notifier(struct notifier_block *nb)
813 {
814         return blocking_notifier_chain_register(&crypto_chain, nb);
815 }
816 EXPORT_SYMBOL_GPL(crypto_register_notifier);
817
818 int crypto_unregister_notifier(struct notifier_block *nb)
819 {
820         return blocking_notifier_chain_unregister(&crypto_chain, nb);
821 }
822 EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
823
824 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
825 {
826         struct rtattr *rta = tb[0];
827         struct crypto_attr_type *algt;
828
829         if (!rta)
830                 return ERR_PTR(-ENOENT);
831         if (RTA_PAYLOAD(rta) < sizeof(*algt))
832                 return ERR_PTR(-EINVAL);
833         if (rta->rta_type != CRYPTOA_TYPE)
834                 return ERR_PTR(-EINVAL);
835
836         algt = RTA_DATA(rta);
837
838         return algt;
839 }
840 EXPORT_SYMBOL_GPL(crypto_get_attr_type);
841
842 int crypto_check_attr_type(struct rtattr **tb, u32 type)
843 {
844         struct crypto_attr_type *algt;
845
846         algt = crypto_get_attr_type(tb);
847         if (IS_ERR(algt))
848                 return PTR_ERR(algt);
849
850         if ((algt->type ^ type) & algt->mask)
851                 return -EINVAL;
852
853         return 0;
854 }
855 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
856
857 const char *crypto_attr_alg_name(struct rtattr *rta)
858 {
859         struct crypto_attr_alg *alga;
860
861         if (!rta)
862                 return ERR_PTR(-ENOENT);
863         if (RTA_PAYLOAD(rta) < sizeof(*alga))
864                 return ERR_PTR(-EINVAL);
865         if (rta->rta_type != CRYPTOA_ALG)
866                 return ERR_PTR(-EINVAL);
867
868         alga = RTA_DATA(rta);
869         alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
870
871         return alga->name;
872 }
873 EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
874
875 struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
876                                     const struct crypto_type *frontend,
877                                     u32 type, u32 mask)
878 {
879         const char *name;
880
881         name = crypto_attr_alg_name(rta);
882         if (IS_ERR(name))
883                 return ERR_CAST(name);
884
885         return crypto_find_alg(name, frontend, type, mask);
886 }
887 EXPORT_SYMBOL_GPL(crypto_attr_alg2);
888
889 int crypto_attr_u32(struct rtattr *rta, u32 *num)
890 {
891         struct crypto_attr_u32 *nu32;
892
893         if (!rta)
894                 return -ENOENT;
895         if (RTA_PAYLOAD(rta) < sizeof(*nu32))
896                 return -EINVAL;
897         if (rta->rta_type != CRYPTOA_U32)
898                 return -EINVAL;
899
900         nu32 = RTA_DATA(rta);
901         *num = nu32->num;
902
903         return 0;
904 }
905 EXPORT_SYMBOL_GPL(crypto_attr_u32);
906
907 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
908                         struct crypto_alg *alg)
909 {
910         if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
911                      alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
912                 return -ENAMETOOLONG;
913
914         if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
915                      name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
916                 return -ENAMETOOLONG;
917
918         return 0;
919 }
920 EXPORT_SYMBOL_GPL(crypto_inst_setname);
921
922 void *crypto_alloc_instance(const char *name, struct crypto_alg *alg,
923                             unsigned int head)
924 {
925         struct crypto_instance *inst;
926         char *p;
927         int err;
928
929         p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn),
930                     GFP_KERNEL);
931         if (!p)
932                 return ERR_PTR(-ENOMEM);
933
934         inst = (void *)(p + head);
935
936         err = crypto_inst_setname(inst, name, alg);
937         if (err)
938                 goto err_free_inst;
939
940         return p;
941
942 err_free_inst:
943         kfree(p);
944         return ERR_PTR(err);
945 }
946 EXPORT_SYMBOL_GPL(crypto_alloc_instance);
947
948 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
949 {
950         INIT_LIST_HEAD(&queue->list);
951         queue->backlog = &queue->list;
952         queue->qlen = 0;
953         queue->max_qlen = max_qlen;
954 }
955 EXPORT_SYMBOL_GPL(crypto_init_queue);
956
957 int crypto_enqueue_request(struct crypto_queue *queue,
958                            struct crypto_async_request *request)
959 {
960         int err = -EINPROGRESS;
961
962         if (unlikely(queue->qlen >= queue->max_qlen)) {
963                 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
964                         err = -ENOSPC;
965                         goto out;
966                 }
967                 err = -EBUSY;
968                 if (queue->backlog == &queue->list)
969                         queue->backlog = &request->list;
970         }
971
972         queue->qlen++;
973         list_add_tail(&request->list, &queue->list);
974
975 out:
976         return err;
977 }
978 EXPORT_SYMBOL_GPL(crypto_enqueue_request);
979
980 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
981 {
982         struct list_head *request;
983
984         if (unlikely(!queue->qlen))
985                 return NULL;
986
987         queue->qlen--;
988
989         if (queue->backlog != &queue->list)
990                 queue->backlog = queue->backlog->next;
991
992         request = queue->list.next;
993         list_del(request);
994
995         return list_entry(request, struct crypto_async_request, list);
996 }
997 EXPORT_SYMBOL_GPL(crypto_dequeue_request);
998
999 static inline void crypto_inc_byte(u8 *a, unsigned int size)
1000 {
1001         u8 *b = (a + size);
1002         u8 c;
1003
1004         for (; size; size--) {
1005                 c = *--b + 1;
1006                 *b = c;
1007                 if (c)
1008                         break;
1009         }
1010 }
1011
1012 void crypto_inc(u8 *a, unsigned int size)
1013 {
1014         __be32 *b = (__be32 *)(a + size);
1015         u32 c;
1016
1017         if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
1018             IS_ALIGNED((unsigned long)b, __alignof__(*b)))
1019                 for (; size >= 4; size -= 4) {
1020                         c = be32_to_cpu(*--b) + 1;
1021                         *b = cpu_to_be32(c);
1022                         if (likely(c))
1023                                 return;
1024                 }
1025
1026         crypto_inc_byte(a, size);
1027 }
1028 EXPORT_SYMBOL_GPL(crypto_inc);
1029
1030 void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len)
1031 {
1032         int relalign = 0;
1033
1034         if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
1035                 int size = sizeof(unsigned long);
1036                 int d = (((unsigned long)dst ^ (unsigned long)src1) |
1037                          ((unsigned long)dst ^ (unsigned long)src2)) &
1038                         (size - 1);
1039
1040                 relalign = d ? 1 << __ffs(d) : size;
1041
1042                 /*
1043                  * If we care about alignment, process as many bytes as
1044                  * needed to advance dst and src to values whose alignments
1045                  * equal their relative alignment. This will allow us to
1046                  * process the remainder of the input using optimal strides.
1047                  */
1048                 while (((unsigned long)dst & (relalign - 1)) && len > 0) {
1049                         *dst++ = *src1++ ^ *src2++;
1050                         len--;
1051                 }
1052         }
1053
1054         while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) {
1055                 *(u64 *)dst = *(u64 *)src1 ^  *(u64 *)src2;
1056                 dst += 8;
1057                 src1 += 8;
1058                 src2 += 8;
1059                 len -= 8;
1060         }
1061
1062         while (len >= 4 && !(relalign & 3)) {
1063                 *(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2;
1064                 dst += 4;
1065                 src1 += 4;
1066                 src2 += 4;
1067                 len -= 4;
1068         }
1069
1070         while (len >= 2 && !(relalign & 1)) {
1071                 *(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2;
1072                 dst += 2;
1073                 src1 += 2;
1074                 src2 += 2;
1075                 len -= 2;
1076         }
1077
1078         while (len--)
1079                 *dst++ = *src1++ ^ *src2++;
1080 }
1081 EXPORT_SYMBOL_GPL(__crypto_xor);
1082
1083 unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1084 {
1085         return alg->cra_ctxsize +
1086                (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1087 }
1088 EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1089
1090 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1091                         u32 type, u32 mask)
1092 {
1093         int ret = 0;
1094         struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1095
1096         if (!IS_ERR(alg)) {
1097                 crypto_mod_put(alg);
1098                 ret = 1;
1099         }
1100
1101         return ret;
1102 }
1103 EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1104
1105 #ifdef CONFIG_CRYPTO_STATS
1106 void crypto_stats_init(struct crypto_alg *alg)
1107 {
1108         memset(&alg->stats, 0, sizeof(alg->stats));
1109 }
1110 EXPORT_SYMBOL_GPL(crypto_stats_init);
1111
1112 void crypto_stats_get(struct crypto_alg *alg)
1113 {
1114         crypto_alg_get(alg);
1115 }
1116 EXPORT_SYMBOL_GPL(crypto_stats_get);
1117
1118 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1119                                int ret)
1120 {
1121         if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1122                 atomic64_inc(&alg->stats.aead.err_cnt);
1123         } else {
1124                 atomic64_inc(&alg->stats.aead.encrypt_cnt);
1125                 atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
1126         }
1127         crypto_alg_put(alg);
1128 }
1129 EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1130
1131 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1132                                int ret)
1133 {
1134         if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1135                 atomic64_inc(&alg->stats.aead.err_cnt);
1136         } else {
1137                 atomic64_inc(&alg->stats.aead.decrypt_cnt);
1138                 atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
1139         }
1140         crypto_alg_put(alg);
1141 }
1142 EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1143
1144 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1145                                    struct crypto_alg *alg)
1146 {
1147         if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1148                 atomic64_inc(&alg->stats.akcipher.err_cnt);
1149         } else {
1150                 atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1151                 atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
1152         }
1153         crypto_alg_put(alg);
1154 }
1155 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1156
1157 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1158                                    struct crypto_alg *alg)
1159 {
1160         if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1161                 atomic64_inc(&alg->stats.akcipher.err_cnt);
1162         } else {
1163                 atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1164                 atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
1165         }
1166         crypto_alg_put(alg);
1167 }
1168 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1169
1170 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1171 {
1172         if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1173                 atomic64_inc(&alg->stats.akcipher.err_cnt);
1174         else
1175                 atomic64_inc(&alg->stats.akcipher.sign_cnt);
1176         crypto_alg_put(alg);
1177 }
1178 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1179
1180 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1181 {
1182         if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1183                 atomic64_inc(&alg->stats.akcipher.err_cnt);
1184         else
1185                 atomic64_inc(&alg->stats.akcipher.verify_cnt);
1186         crypto_alg_put(alg);
1187 }
1188 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1189
1190 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1191 {
1192         if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1193                 atomic64_inc(&alg->stats.compress.err_cnt);
1194         } else {
1195                 atomic64_inc(&alg->stats.compress.compress_cnt);
1196                 atomic64_add(slen, &alg->stats.compress.compress_tlen);
1197         }
1198         crypto_alg_put(alg);
1199 }
1200 EXPORT_SYMBOL_GPL(crypto_stats_compress);
1201
1202 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1203 {
1204         if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1205                 atomic64_inc(&alg->stats.compress.err_cnt);
1206         } else {
1207                 atomic64_inc(&alg->stats.compress.decompress_cnt);
1208                 atomic64_add(slen, &alg->stats.compress.decompress_tlen);
1209         }
1210         crypto_alg_put(alg);
1211 }
1212 EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1213
1214 void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1215                                struct crypto_alg *alg)
1216 {
1217         if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1218                 atomic64_inc(&alg->stats.hash.err_cnt);
1219         else
1220                 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1221         crypto_alg_put(alg);
1222 }
1223 EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1224
1225 void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1226                               struct crypto_alg *alg)
1227 {
1228         if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1229                 atomic64_inc(&alg->stats.hash.err_cnt);
1230         } else {
1231                 atomic64_inc(&alg->stats.hash.hash_cnt);
1232                 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1233         }
1234         crypto_alg_put(alg);
1235 }
1236 EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1237
1238 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1239 {
1240         if (ret)
1241                 atomic64_inc(&alg->stats.kpp.err_cnt);
1242         else
1243                 atomic64_inc(&alg->stats.kpp.setsecret_cnt);
1244         crypto_alg_put(alg);
1245 }
1246 EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1247
1248 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1249 {
1250         if (ret)
1251                 atomic64_inc(&alg->stats.kpp.err_cnt);
1252         else
1253                 atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
1254         crypto_alg_put(alg);
1255 }
1256 EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1257
1258 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1259 {
1260         if (ret)
1261                 atomic64_inc(&alg->stats.kpp.err_cnt);
1262         else
1263                 atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
1264         crypto_alg_put(alg);
1265 }
1266 EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1267
1268 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1269 {
1270         if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1271                 atomic64_inc(&alg->stats.rng.err_cnt);
1272         else
1273                 atomic64_inc(&alg->stats.rng.seed_cnt);
1274         crypto_alg_put(alg);
1275 }
1276 EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1277
1278 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1279                                int ret)
1280 {
1281         if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1282                 atomic64_inc(&alg->stats.rng.err_cnt);
1283         } else {
1284                 atomic64_inc(&alg->stats.rng.generate_cnt);
1285                 atomic64_add(dlen, &alg->stats.rng.generate_tlen);
1286         }
1287         crypto_alg_put(alg);
1288 }
1289 EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1290
1291 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1292                                    struct crypto_alg *alg)
1293 {
1294         if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1295                 atomic64_inc(&alg->stats.cipher.err_cnt);
1296         } else {
1297                 atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1298                 atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
1299         }
1300         crypto_alg_put(alg);
1301 }
1302 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1303
1304 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1305                                    struct crypto_alg *alg)
1306 {
1307         if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1308                 atomic64_inc(&alg->stats.cipher.err_cnt);
1309         } else {
1310                 atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1311                 atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
1312         }
1313         crypto_alg_put(alg);
1314 }
1315 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1316 #endif
1317
1318 static int __init crypto_algapi_init(void)
1319 {
1320         crypto_init_proc();
1321         return 0;
1322 }
1323
1324 static void __exit crypto_algapi_exit(void)
1325 {
1326         crypto_exit_proc();
1327 }
1328
1329 module_init(crypto_algapi_init);
1330 module_exit(crypto_algapi_exit);
1331
1332 MODULE_LICENSE("GPL");
1333 MODULE_DESCRIPTION("Cryptographic algorithms API");