1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Cryptographic API for algorithms (i.e., low-level API).
8 #include <crypto/algapi.h>
10 #include <linux/errno.h>
11 #include <linux/fips.h>
12 #include <linux/init.h>
13 #include <linux/kernel.h>
14 #include <linux/list.h>
15 #include <linux/module.h>
16 #include <linux/rtnetlink.h>
17 #include <linux/slab.h>
18 #include <linux/string.h>
22 static LIST_HEAD(crypto_template_list);
24 static inline void crypto_check_module_sig(struct module *mod)
26 if (fips_enabled && mod && !module_sig_ok(mod))
27 panic("Module %s signature verification failed in FIPS mode\n",
31 static int crypto_check_alg(struct crypto_alg *alg)
33 crypto_check_module_sig(alg->cra_module);
35 if (!alg->cra_name[0] || !alg->cra_driver_name[0])
38 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
41 /* General maximums for all algs. */
42 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
45 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
48 /* Lower maximums for specific alg types. */
49 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
50 CRYPTO_ALG_TYPE_CIPHER) {
51 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
54 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
58 if (alg->cra_priority < 0)
61 refcount_set(&alg->cra_refcnt, 1);
66 static void crypto_free_instance(struct crypto_instance *inst)
68 inst->alg.cra_type->free(inst);
71 static void crypto_destroy_instance(struct crypto_alg *alg)
73 struct crypto_instance *inst = (void *)alg;
74 struct crypto_template *tmpl = inst->tmpl;
76 crypto_free_instance(inst);
77 crypto_tmpl_put(tmpl);
81 * This function adds a spawn to the list secondary_spawns which
82 * will be used at the end of crypto_remove_spawns to unregister
83 * instances, unless the spawn happens to be one that is depended
84 * on by the new algorithm (nalg in crypto_remove_spawns).
86 * This function is also responsible for resurrecting any algorithms
87 * in the dependency chain of nalg by unsetting n->dead.
89 static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
90 struct list_head *stack,
91 struct list_head *top,
92 struct list_head *secondary_spawns)
94 struct crypto_spawn *spawn, *n;
96 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
100 n = list_prev_entry(spawn, list);
101 list_move(&spawn->list, secondary_spawns);
103 if (list_is_last(&n->list, stack))
106 n = list_next_entry(n, list);
110 return &n->inst->alg.cra_users;
113 static void crypto_remove_instance(struct crypto_instance *inst,
114 struct list_head *list)
116 struct crypto_template *tmpl = inst->tmpl;
118 if (crypto_is_dead(&inst->alg))
121 inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
123 if (!tmpl || !crypto_tmpl_get(tmpl))
126 list_move(&inst->alg.cra_list, list);
127 hlist_del(&inst->list);
128 inst->alg.cra_destroy = crypto_destroy_instance;
130 BUG_ON(!list_empty(&inst->alg.cra_users));
134 * Given an algorithm alg, remove all algorithms that depend on it
135 * through spawns. If nalg is not null, then exempt any algorithms
136 * that is depended on by nalg. This is useful when nalg itself
139 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
140 struct crypto_alg *nalg)
142 u32 new_type = (nalg ?: alg)->cra_flags;
143 struct crypto_spawn *spawn, *n;
144 LIST_HEAD(secondary_spawns);
145 struct list_head *spawns;
149 spawns = &alg->cra_users;
150 list_for_each_entry_safe(spawn, n, spawns, list) {
151 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
154 list_move(&spawn->list, &top);
158 * Perform a depth-first walk starting from alg through
159 * the cra_users tree. The list stack records the path
160 * from alg to the current spawn.
164 while (!list_empty(spawns)) {
165 struct crypto_instance *inst;
167 spawn = list_first_entry(spawns, struct crypto_spawn,
171 list_move(&spawn->list, &stack);
172 spawn->dead = !spawn->registered || &inst->alg != nalg;
174 if (!spawn->registered)
177 BUG_ON(&inst->alg == alg);
179 if (&inst->alg == nalg)
182 spawns = &inst->alg.cra_users;
185 * Even if spawn->registered is true, the
186 * instance itself may still be unregistered.
187 * This is because it may have failed during
188 * registration. Therefore we still need to
189 * make the following test.
191 * We may encounter an unregistered instance here, since
192 * an instance's spawns are set up prior to the instance
193 * being registered. An unregistered instance will have
194 * NULL ->cra_users.next, since ->cra_users isn't
195 * properly initialized until registration. But an
196 * unregistered instance cannot have any users, so treat
197 * it the same as ->cra_users being empty.
199 if (spawns->next == NULL)
202 } while ((spawns = crypto_more_spawns(alg, &stack, &top,
203 &secondary_spawns)));
206 * Remove all instances that are marked as dead. Also
207 * complete the resurrection of the others by moving them
208 * back to the cra_users list.
210 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
212 list_move(&spawn->list, &spawn->alg->cra_users);
213 else if (spawn->registered)
214 crypto_remove_instance(spawn->inst, list);
217 EXPORT_SYMBOL_GPL(crypto_remove_spawns);
219 static struct crypto_larval *crypto_alloc_test_larval(struct crypto_alg *alg)
221 struct crypto_larval *larval;
223 if (!IS_ENABLED(CONFIG_CRYPTO_MANAGER))
226 larval = crypto_larval_alloc(alg->cra_name,
227 alg->cra_flags | CRYPTO_ALG_TESTED, 0);
231 larval->adult = crypto_mod_get(alg);
232 if (!larval->adult) {
234 return ERR_PTR(-ENOENT);
237 refcount_set(&larval->alg.cra_refcnt, 1);
238 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
239 CRYPTO_MAX_ALG_NAME);
240 larval->alg.cra_priority = alg->cra_priority;
245 static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
247 struct crypto_alg *q;
248 struct crypto_larval *larval;
251 if (crypto_is_dead(alg))
254 INIT_LIST_HEAD(&alg->cra_users);
257 alg->cra_flags &= ~CRYPTO_ALG_TESTED;
261 list_for_each_entry(q, &crypto_alg_list, cra_list) {
265 if (crypto_is_moribund(q))
268 if (crypto_is_larval(q)) {
269 if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
274 if (!strcmp(q->cra_driver_name, alg->cra_name) ||
275 !strcmp(q->cra_name, alg->cra_driver_name))
279 larval = crypto_alloc_test_larval(alg);
283 list_add(&alg->cra_list, &crypto_alg_list);
286 list_add(&larval->alg.cra_list, &crypto_alg_list);
288 alg->cra_flags |= CRYPTO_ALG_TESTED;
290 crypto_stats_init(alg);
296 larval = ERR_PTR(ret);
300 void crypto_alg_tested(const char *name, int err)
302 struct crypto_larval *test;
303 struct crypto_alg *alg;
304 struct crypto_alg *q;
308 down_write(&crypto_alg_sem);
309 list_for_each_entry(q, &crypto_alg_list, cra_list) {
310 if (crypto_is_moribund(q) || !crypto_is_larval(q))
313 test = (struct crypto_larval *)q;
315 if (!strcmp(q->cra_driver_name, name))
319 pr_err("alg: Unexpected test result for %s: %d\n", name, err);
323 q->cra_flags |= CRYPTO_ALG_DEAD;
325 if (err || list_empty(&alg->cra_list))
328 alg->cra_flags |= CRYPTO_ALG_TESTED;
330 /* Only satisfy larval waiters if we are the best. */
332 list_for_each_entry(q, &crypto_alg_list, cra_list) {
333 if (crypto_is_moribund(q) || !crypto_is_larval(q))
336 if (strcmp(alg->cra_name, q->cra_name))
339 if (q->cra_priority > alg->cra_priority) {
345 list_for_each_entry(q, &crypto_alg_list, cra_list) {
349 if (crypto_is_moribund(q))
352 if (crypto_is_larval(q)) {
353 struct crypto_larval *larval = (void *)q;
356 * Check to see if either our generic name or
357 * specific name can satisfy the name requested
358 * by the larval entry q.
360 if (strcmp(alg->cra_name, q->cra_name) &&
361 strcmp(alg->cra_driver_name, q->cra_name))
366 if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
369 if (best && crypto_mod_get(alg))
372 larval->adult = ERR_PTR(-EAGAIN);
377 if (strcmp(alg->cra_name, q->cra_name))
380 if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
381 q->cra_priority > alg->cra_priority)
384 crypto_remove_spawns(q, &list, alg);
388 complete_all(&test->completion);
391 up_write(&crypto_alg_sem);
393 crypto_remove_final(&list);
395 EXPORT_SYMBOL_GPL(crypto_alg_tested);
397 void crypto_remove_final(struct list_head *list)
399 struct crypto_alg *alg;
400 struct crypto_alg *n;
402 list_for_each_entry_safe(alg, n, list, cra_list) {
403 list_del_init(&alg->cra_list);
407 EXPORT_SYMBOL_GPL(crypto_remove_final);
409 int crypto_register_alg(struct crypto_alg *alg)
411 struct crypto_larval *larval;
415 alg->cra_flags &= ~CRYPTO_ALG_DEAD;
416 err = crypto_check_alg(alg);
420 down_write(&crypto_alg_sem);
421 larval = __crypto_register_alg(alg);
422 test_started = static_key_enabled(&crypto_boot_test_finished);
423 if (!IS_ERR_OR_NULL(larval))
424 larval->test_started = test_started;
425 up_write(&crypto_alg_sem);
427 if (IS_ERR_OR_NULL(larval))
428 return PTR_ERR(larval);
431 crypto_wait_for_test(larval);
434 EXPORT_SYMBOL_GPL(crypto_register_alg);
436 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
438 if (unlikely(list_empty(&alg->cra_list)))
441 alg->cra_flags |= CRYPTO_ALG_DEAD;
443 list_del_init(&alg->cra_list);
444 crypto_remove_spawns(alg, list, NULL);
449 void crypto_unregister_alg(struct crypto_alg *alg)
454 down_write(&crypto_alg_sem);
455 ret = crypto_remove_alg(alg, &list);
456 up_write(&crypto_alg_sem);
458 if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
461 BUG_ON(refcount_read(&alg->cra_refcnt) != 1);
462 if (alg->cra_destroy)
463 alg->cra_destroy(alg);
465 crypto_remove_final(&list);
467 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
469 int crypto_register_algs(struct crypto_alg *algs, int count)
473 for (i = 0; i < count; i++) {
474 ret = crypto_register_alg(&algs[i]);
482 for (--i; i >= 0; --i)
483 crypto_unregister_alg(&algs[i]);
487 EXPORT_SYMBOL_GPL(crypto_register_algs);
489 void crypto_unregister_algs(struct crypto_alg *algs, int count)
493 for (i = 0; i < count; i++)
494 crypto_unregister_alg(&algs[i]);
496 EXPORT_SYMBOL_GPL(crypto_unregister_algs);
498 int crypto_register_template(struct crypto_template *tmpl)
500 struct crypto_template *q;
503 down_write(&crypto_alg_sem);
505 crypto_check_module_sig(tmpl->module);
507 list_for_each_entry(q, &crypto_template_list, list) {
512 list_add(&tmpl->list, &crypto_template_list);
515 up_write(&crypto_alg_sem);
518 EXPORT_SYMBOL_GPL(crypto_register_template);
520 int crypto_register_templates(struct crypto_template *tmpls, int count)
524 for (i = 0; i < count; i++) {
525 err = crypto_register_template(&tmpls[i]);
532 for (--i; i >= 0; --i)
533 crypto_unregister_template(&tmpls[i]);
536 EXPORT_SYMBOL_GPL(crypto_register_templates);
538 void crypto_unregister_template(struct crypto_template *tmpl)
540 struct crypto_instance *inst;
541 struct hlist_node *n;
542 struct hlist_head *list;
545 down_write(&crypto_alg_sem);
547 BUG_ON(list_empty(&tmpl->list));
548 list_del_init(&tmpl->list);
550 list = &tmpl->instances;
551 hlist_for_each_entry(inst, list, list) {
552 int err = crypto_remove_alg(&inst->alg, &users);
557 up_write(&crypto_alg_sem);
559 hlist_for_each_entry_safe(inst, n, list, list) {
560 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
561 crypto_free_instance(inst);
563 crypto_remove_final(&users);
565 EXPORT_SYMBOL_GPL(crypto_unregister_template);
567 void crypto_unregister_templates(struct crypto_template *tmpls, int count)
571 for (i = count - 1; i >= 0; --i)
572 crypto_unregister_template(&tmpls[i]);
574 EXPORT_SYMBOL_GPL(crypto_unregister_templates);
576 static struct crypto_template *__crypto_lookup_template(const char *name)
578 struct crypto_template *q, *tmpl = NULL;
580 down_read(&crypto_alg_sem);
581 list_for_each_entry(q, &crypto_template_list, list) {
582 if (strcmp(q->name, name))
584 if (unlikely(!crypto_tmpl_get(q)))
590 up_read(&crypto_alg_sem);
595 struct crypto_template *crypto_lookup_template(const char *name)
597 return try_then_request_module(__crypto_lookup_template(name),
600 EXPORT_SYMBOL_GPL(crypto_lookup_template);
602 int crypto_register_instance(struct crypto_template *tmpl,
603 struct crypto_instance *inst)
605 struct crypto_larval *larval;
606 struct crypto_spawn *spawn;
609 err = crypto_check_alg(&inst->alg);
613 inst->alg.cra_module = tmpl->module;
614 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
616 down_write(&crypto_alg_sem);
618 larval = ERR_PTR(-EAGAIN);
619 for (spawn = inst->spawns; spawn;) {
620 struct crypto_spawn *next;
627 spawn->registered = true;
629 crypto_mod_put(spawn->alg);
634 larval = __crypto_register_alg(&inst->alg);
638 larval->test_started = true;
640 hlist_add_head(&inst->list, &tmpl->instances);
644 up_write(&crypto_alg_sem);
646 err = PTR_ERR(larval);
647 if (IS_ERR_OR_NULL(larval))
650 crypto_wait_for_test(larval);
656 EXPORT_SYMBOL_GPL(crypto_register_instance);
658 void crypto_unregister_instance(struct crypto_instance *inst)
662 down_write(&crypto_alg_sem);
664 crypto_remove_spawns(&inst->alg, &list, NULL);
665 crypto_remove_instance(inst, &list);
667 up_write(&crypto_alg_sem);
669 crypto_remove_final(&list);
671 EXPORT_SYMBOL_GPL(crypto_unregister_instance);
673 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
674 const char *name, u32 type, u32 mask)
676 struct crypto_alg *alg;
679 if (WARN_ON_ONCE(inst == NULL))
682 /* Allow the result of crypto_attr_alg_name() to be passed directly */
684 return PTR_ERR(name);
686 alg = crypto_find_alg(name, spawn->frontend, type, mask);
690 down_write(&crypto_alg_sem);
691 if (!crypto_is_moribund(alg)) {
692 list_add(&spawn->list, &alg->cra_users);
695 spawn->next = inst->spawns;
696 inst->spawns = spawn;
697 inst->alg.cra_flags |=
698 (alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
701 up_write(&crypto_alg_sem);
706 EXPORT_SYMBOL_GPL(crypto_grab_spawn);
708 void crypto_drop_spawn(struct crypto_spawn *spawn)
710 if (!spawn->alg) /* not yet initialized? */
713 down_write(&crypto_alg_sem);
715 list_del(&spawn->list);
716 up_write(&crypto_alg_sem);
718 if (!spawn->registered)
719 crypto_mod_put(spawn->alg);
721 EXPORT_SYMBOL_GPL(crypto_drop_spawn);
723 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
725 struct crypto_alg *alg = ERR_PTR(-EAGAIN);
726 struct crypto_alg *target;
729 down_read(&crypto_alg_sem);
732 if (!crypto_mod_get(alg)) {
733 target = crypto_alg_get(alg);
735 alg = ERR_PTR(-EAGAIN);
738 up_read(&crypto_alg_sem);
741 crypto_shoot_alg(target);
742 crypto_alg_put(target);
748 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
751 struct crypto_alg *alg;
752 struct crypto_tfm *tfm;
754 alg = crypto_spawn_alg(spawn);
756 return ERR_CAST(alg);
758 tfm = ERR_PTR(-EINVAL);
759 if (unlikely((alg->cra_flags ^ type) & mask))
762 tfm = __crypto_alloc_tfm(alg, type, mask);
772 EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
774 void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
776 struct crypto_alg *alg;
777 struct crypto_tfm *tfm;
779 alg = crypto_spawn_alg(spawn);
781 return ERR_CAST(alg);
783 tfm = crypto_create_tfm(alg, spawn->frontend);
793 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
795 int crypto_register_notifier(struct notifier_block *nb)
797 return blocking_notifier_chain_register(&crypto_chain, nb);
799 EXPORT_SYMBOL_GPL(crypto_register_notifier);
801 int crypto_unregister_notifier(struct notifier_block *nb)
803 return blocking_notifier_chain_unregister(&crypto_chain, nb);
805 EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
807 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
809 struct rtattr *rta = tb[0];
810 struct crypto_attr_type *algt;
813 return ERR_PTR(-ENOENT);
814 if (RTA_PAYLOAD(rta) < sizeof(*algt))
815 return ERR_PTR(-EINVAL);
816 if (rta->rta_type != CRYPTOA_TYPE)
817 return ERR_PTR(-EINVAL);
819 algt = RTA_DATA(rta);
823 EXPORT_SYMBOL_GPL(crypto_get_attr_type);
826 * crypto_check_attr_type() - check algorithm type and compute inherited mask
827 * @tb: the template parameters
828 * @type: the algorithm type the template would be instantiated as
829 * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
830 * to restrict the flags of any inner algorithms
832 * Validate that the algorithm type the user requested is compatible with the
833 * one the template would actually be instantiated as. E.g., if the user is
834 * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
835 * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
837 * Also compute the mask to use to restrict the flags of any inner algorithms.
839 * Return: 0 on success; -errno on failure
841 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
843 struct crypto_attr_type *algt;
845 algt = crypto_get_attr_type(tb);
847 return PTR_ERR(algt);
849 if ((algt->type ^ type) & algt->mask)
852 *mask_ret = crypto_algt_inherited_mask(algt);
855 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
857 const char *crypto_attr_alg_name(struct rtattr *rta)
859 struct crypto_attr_alg *alga;
862 return ERR_PTR(-ENOENT);
863 if (RTA_PAYLOAD(rta) < sizeof(*alga))
864 return ERR_PTR(-EINVAL);
865 if (rta->rta_type != CRYPTOA_ALG)
866 return ERR_PTR(-EINVAL);
868 alga = RTA_DATA(rta);
869 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
873 EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
875 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
876 struct crypto_alg *alg)
878 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
879 alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
880 return -ENAMETOOLONG;
882 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
883 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
884 return -ENAMETOOLONG;
888 EXPORT_SYMBOL_GPL(crypto_inst_setname);
890 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
892 INIT_LIST_HEAD(&queue->list);
893 queue->backlog = &queue->list;
895 queue->max_qlen = max_qlen;
897 EXPORT_SYMBOL_GPL(crypto_init_queue);
899 int crypto_enqueue_request(struct crypto_queue *queue,
900 struct crypto_async_request *request)
902 int err = -EINPROGRESS;
904 if (unlikely(queue->qlen >= queue->max_qlen)) {
905 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
910 if (queue->backlog == &queue->list)
911 queue->backlog = &request->list;
915 list_add_tail(&request->list, &queue->list);
920 EXPORT_SYMBOL_GPL(crypto_enqueue_request);
922 void crypto_enqueue_request_head(struct crypto_queue *queue,
923 struct crypto_async_request *request)
926 list_add(&request->list, &queue->list);
928 EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
930 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
932 struct list_head *request;
934 if (unlikely(!queue->qlen))
939 if (queue->backlog != &queue->list)
940 queue->backlog = queue->backlog->next;
942 request = queue->list.next;
945 return list_entry(request, struct crypto_async_request, list);
947 EXPORT_SYMBOL_GPL(crypto_dequeue_request);
949 static inline void crypto_inc_byte(u8 *a, unsigned int size)
954 for (; size; size--) {
962 void crypto_inc(u8 *a, unsigned int size)
964 __be32 *b = (__be32 *)(a + size);
967 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
968 IS_ALIGNED((unsigned long)b, __alignof__(*b)))
969 for (; size >= 4; size -= 4) {
970 c = be32_to_cpu(*--b) + 1;
976 crypto_inc_byte(a, size);
978 EXPORT_SYMBOL_GPL(crypto_inc);
980 void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len)
984 if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
985 int size = sizeof(unsigned long);
986 int d = (((unsigned long)dst ^ (unsigned long)src1) |
987 ((unsigned long)dst ^ (unsigned long)src2)) &
990 relalign = d ? 1 << __ffs(d) : size;
993 * If we care about alignment, process as many bytes as
994 * needed to advance dst and src to values whose alignments
995 * equal their relative alignment. This will allow us to
996 * process the remainder of the input using optimal strides.
998 while (((unsigned long)dst & (relalign - 1)) && len > 0) {
999 *dst++ = *src1++ ^ *src2++;
1004 while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) {
1005 *(u64 *)dst = *(u64 *)src1 ^ *(u64 *)src2;
1012 while (len >= 4 && !(relalign & 3)) {
1013 *(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2;
1020 while (len >= 2 && !(relalign & 1)) {
1021 *(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2;
1029 *dst++ = *src1++ ^ *src2++;
1031 EXPORT_SYMBOL_GPL(__crypto_xor);
1033 unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1035 return alg->cra_ctxsize +
1036 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1038 EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1040 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1044 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1047 crypto_mod_put(alg);
1053 EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1055 #ifdef CONFIG_CRYPTO_STATS
1056 void crypto_stats_init(struct crypto_alg *alg)
1058 memset(&alg->stats, 0, sizeof(alg->stats));
1060 EXPORT_SYMBOL_GPL(crypto_stats_init);
1062 void crypto_stats_get(struct crypto_alg *alg)
1064 crypto_alg_get(alg);
1066 EXPORT_SYMBOL_GPL(crypto_stats_get);
1068 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1071 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1072 atomic64_inc(&alg->stats.aead.err_cnt);
1074 atomic64_inc(&alg->stats.aead.encrypt_cnt);
1075 atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
1077 crypto_alg_put(alg);
1079 EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1081 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1084 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1085 atomic64_inc(&alg->stats.aead.err_cnt);
1087 atomic64_inc(&alg->stats.aead.decrypt_cnt);
1088 atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
1090 crypto_alg_put(alg);
1092 EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1094 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1095 struct crypto_alg *alg)
1097 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1098 atomic64_inc(&alg->stats.akcipher.err_cnt);
1100 atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1101 atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
1103 crypto_alg_put(alg);
1105 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1107 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1108 struct crypto_alg *alg)
1110 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1111 atomic64_inc(&alg->stats.akcipher.err_cnt);
1113 atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1114 atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
1116 crypto_alg_put(alg);
1118 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1120 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1122 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1123 atomic64_inc(&alg->stats.akcipher.err_cnt);
1125 atomic64_inc(&alg->stats.akcipher.sign_cnt);
1126 crypto_alg_put(alg);
1128 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1130 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1132 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1133 atomic64_inc(&alg->stats.akcipher.err_cnt);
1135 atomic64_inc(&alg->stats.akcipher.verify_cnt);
1136 crypto_alg_put(alg);
1138 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1140 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1142 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1143 atomic64_inc(&alg->stats.compress.err_cnt);
1145 atomic64_inc(&alg->stats.compress.compress_cnt);
1146 atomic64_add(slen, &alg->stats.compress.compress_tlen);
1148 crypto_alg_put(alg);
1150 EXPORT_SYMBOL_GPL(crypto_stats_compress);
1152 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1154 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1155 atomic64_inc(&alg->stats.compress.err_cnt);
1157 atomic64_inc(&alg->stats.compress.decompress_cnt);
1158 atomic64_add(slen, &alg->stats.compress.decompress_tlen);
1160 crypto_alg_put(alg);
1162 EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1164 void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1165 struct crypto_alg *alg)
1167 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1168 atomic64_inc(&alg->stats.hash.err_cnt);
1170 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1171 crypto_alg_put(alg);
1173 EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1175 void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1176 struct crypto_alg *alg)
1178 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1179 atomic64_inc(&alg->stats.hash.err_cnt);
1181 atomic64_inc(&alg->stats.hash.hash_cnt);
1182 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1184 crypto_alg_put(alg);
1186 EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1188 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1191 atomic64_inc(&alg->stats.kpp.err_cnt);
1193 atomic64_inc(&alg->stats.kpp.setsecret_cnt);
1194 crypto_alg_put(alg);
1196 EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1198 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1201 atomic64_inc(&alg->stats.kpp.err_cnt);
1203 atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
1204 crypto_alg_put(alg);
1206 EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1208 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1211 atomic64_inc(&alg->stats.kpp.err_cnt);
1213 atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
1214 crypto_alg_put(alg);
1216 EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1218 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1220 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1221 atomic64_inc(&alg->stats.rng.err_cnt);
1223 atomic64_inc(&alg->stats.rng.seed_cnt);
1224 crypto_alg_put(alg);
1226 EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1228 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1231 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1232 atomic64_inc(&alg->stats.rng.err_cnt);
1234 atomic64_inc(&alg->stats.rng.generate_cnt);
1235 atomic64_add(dlen, &alg->stats.rng.generate_tlen);
1237 crypto_alg_put(alg);
1239 EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1241 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1242 struct crypto_alg *alg)
1244 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1245 atomic64_inc(&alg->stats.cipher.err_cnt);
1247 atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1248 atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
1250 crypto_alg_put(alg);
1252 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1254 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1255 struct crypto_alg *alg)
1257 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1258 atomic64_inc(&alg->stats.cipher.err_cnt);
1260 atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1261 atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
1263 crypto_alg_put(alg);
1265 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1268 static void __init crypto_start_tests(void)
1271 struct crypto_larval *larval = NULL;
1272 struct crypto_alg *q;
1274 down_write(&crypto_alg_sem);
1276 list_for_each_entry(q, &crypto_alg_list, cra_list) {
1277 struct crypto_larval *l;
1279 if (!crypto_is_larval(q))
1284 if (!crypto_is_test_larval(l))
1287 if (l->test_started)
1290 l->test_started = true;
1295 up_write(&crypto_alg_sem);
1300 crypto_wait_for_test(larval);
1303 static_branch_enable(&crypto_boot_test_finished);
1306 static int __init crypto_algapi_init(void)
1309 crypto_start_tests();
1313 static void __exit crypto_algapi_exit(void)
1319 * We run this at late_initcall so that all the built-in algorithms
1320 * have had a chance to register themselves first.
1322 late_initcall(crypto_algapi_init);
1323 module_exit(crypto_algapi_exit);
1325 MODULE_LICENSE("GPL");
1326 MODULE_DESCRIPTION("Cryptographic algorithms API");