1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Scatterlist Cryptographic API.
10 * and Nettle, by Niels Möller.
13 #include <linux/err.h>
14 #include <linux/errno.h>
15 #include <linux/jump_label.h>
16 #include <linux/kernel.h>
17 #include <linux/kmod.h>
18 #include <linux/module.h>
19 #include <linux/param.h>
20 #include <linux/sched/signal.h>
21 #include <linux/slab.h>
22 #include <linux/string.h>
23 #include <linux/completion.h>
26 LIST_HEAD(crypto_alg_list);
27 EXPORT_SYMBOL_GPL(crypto_alg_list);
28 DECLARE_RWSEM(crypto_alg_sem);
29 EXPORT_SYMBOL_GPL(crypto_alg_sem);
31 BLOCKING_NOTIFIER_HEAD(crypto_chain);
32 EXPORT_SYMBOL_GPL(crypto_chain);
34 #if IS_BUILTIN(CONFIG_CRYPTO_ALGAPI) && \
35 !IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS)
36 DEFINE_STATIC_KEY_FALSE(__crypto_boot_test_finished);
39 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
40 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
43 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
45 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
47 EXPORT_SYMBOL_GPL(crypto_mod_get);
49 void crypto_mod_put(struct crypto_alg *alg)
51 struct module *module = alg->cra_module;
56 EXPORT_SYMBOL_GPL(crypto_mod_put);
58 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
61 struct crypto_alg *q, *alg = NULL;
64 list_for_each_entry(q, &crypto_alg_list, cra_list) {
67 if (crypto_is_moribund(q))
70 if ((q->cra_flags ^ type) & mask)
73 exact = !strcmp(q->cra_driver_name, name);
74 fuzzy = !strcmp(q->cra_name, name);
75 if (!exact && !(fuzzy && q->cra_priority > best))
78 if (unlikely(!crypto_mod_get(q)))
81 best = q->cra_priority;
93 static void crypto_larval_destroy(struct crypto_alg *alg)
95 struct crypto_larval *larval = (void *)alg;
97 BUG_ON(!crypto_is_larval(alg));
98 if (!IS_ERR_OR_NULL(larval->adult))
99 crypto_mod_put(larval->adult);
103 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
105 struct crypto_larval *larval;
107 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
109 return ERR_PTR(-ENOMEM);
111 type &= ~CRYPTO_ALG_TYPE_MASK | (mask ?: CRYPTO_ALG_TYPE_MASK);
114 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
115 larval->alg.cra_priority = -1;
116 larval->alg.cra_destroy = crypto_larval_destroy;
118 strscpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
119 init_completion(&larval->completion);
123 EXPORT_SYMBOL_GPL(crypto_larval_alloc);
125 static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
128 struct crypto_alg *alg;
129 struct crypto_larval *larval;
131 larval = crypto_larval_alloc(name, type, mask);
133 return ERR_CAST(larval);
135 refcount_set(&larval->alg.cra_refcnt, 2);
137 down_write(&crypto_alg_sem);
138 alg = __crypto_alg_lookup(name, type, mask);
141 list_add(&alg->cra_list, &crypto_alg_list);
143 up_write(&crypto_alg_sem);
145 if (alg != &larval->alg) {
147 if (crypto_is_larval(alg))
148 alg = crypto_larval_wait(alg);
154 static void crypto_larval_kill(struct crypto_larval *larval)
158 down_write(&crypto_alg_sem);
159 unlinked = list_empty(&larval->alg.cra_list);
161 list_del_init(&larval->alg.cra_list);
162 up_write(&crypto_alg_sem);
167 complete_all(&larval->completion);
168 crypto_alg_put(&larval->alg);
171 void crypto_schedule_test(struct crypto_larval *larval)
175 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
176 WARN_ON_ONCE(err != NOTIFY_STOP);
178 EXPORT_SYMBOL_GPL(crypto_schedule_test);
180 static void crypto_start_test(struct crypto_larval *larval)
182 if (!crypto_is_test_larval(larval))
185 if (larval->test_started)
188 down_write(&crypto_alg_sem);
189 if (larval->test_started) {
190 up_write(&crypto_alg_sem);
194 larval->test_started = true;
195 up_write(&crypto_alg_sem);
197 crypto_schedule_test(larval);
200 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
202 struct crypto_larval *larval;
206 larval = container_of(alg, struct crypto_larval, alg);
208 if (!crypto_boot_test_finished())
209 crypto_start_test(larval);
211 time_left = wait_for_completion_killable_timeout(
212 &larval->completion, 60 * HZ);
216 alg = ERR_PTR(-EINTR);
217 else if (!time_left) {
218 if (crypto_is_test_larval(larval))
219 crypto_larval_kill(larval);
220 alg = ERR_PTR(-ETIMEDOUT);
226 type = alg->cra_flags & ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
228 alg = crypto_alg_lookup(alg->cra_name, type, mask) ?:
230 } else if (IS_ERR(alg))
232 else if (crypto_is_test_larval(larval) &&
233 !(alg->cra_flags & CRYPTO_ALG_TESTED))
234 alg = ERR_PTR(-EAGAIN);
235 else if (alg->cra_flags & CRYPTO_ALG_FIPS_INTERNAL)
236 alg = ERR_PTR(-EAGAIN);
237 else if (!crypto_mod_get(alg))
238 alg = ERR_PTR(-EAGAIN);
239 crypto_mod_put(&larval->alg);
241 if (!IS_ERR(alg) && crypto_is_larval(alg))
247 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
250 const u32 fips = CRYPTO_ALG_FIPS_INTERNAL;
251 struct crypto_alg *alg;
254 if (!((type | mask) & CRYPTO_ALG_TESTED))
255 test |= CRYPTO_ALG_TESTED;
257 down_read(&crypto_alg_sem);
258 alg = __crypto_alg_lookup(name, (type | test) & ~fips,
259 (mask | test) & ~fips);
261 if (((type | mask) ^ fips) & fips)
265 if (!crypto_is_larval(alg) &&
266 ((type ^ alg->cra_flags) & mask)) {
267 /* Algorithm is disallowed in FIPS mode. */
269 alg = ERR_PTR(-ENOENT);
272 alg = __crypto_alg_lookup(name, type, mask);
273 if (alg && !crypto_is_larval(alg)) {
276 alg = ERR_PTR(-ELIBBAD);
279 up_read(&crypto_alg_sem);
284 static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
287 struct crypto_alg *alg;
290 return ERR_PTR(-ENOENT);
292 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
293 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
295 alg = crypto_alg_lookup(name, type, mask);
296 if (!alg && !(mask & CRYPTO_NOLOAD)) {
297 request_module("crypto-%s", name);
299 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
300 CRYPTO_ALG_NEED_FALLBACK))
301 request_module("crypto-%s-all", name);
303 alg = crypto_alg_lookup(name, type, mask);
306 if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
307 alg = crypto_larval_wait(alg);
310 else if (!(mask & CRYPTO_ALG_TESTED))
311 alg = crypto_larval_add(name, type, mask);
313 alg = ERR_PTR(-ENOENT);
318 int crypto_probing_notify(unsigned long val, void *v)
322 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
323 if (ok == NOTIFY_DONE) {
324 request_module("cryptomgr");
325 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
330 EXPORT_SYMBOL_GPL(crypto_probing_notify);
332 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
334 struct crypto_alg *alg;
335 struct crypto_alg *larval;
339 * If the internal flag is set for a cipher, require a caller to
340 * invoke the cipher with the internal flag to use that cipher.
341 * Also, if a caller wants to allocate a cipher that may or may
342 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
343 * !(mask & CRYPTO_ALG_INTERNAL).
345 if (!((type | mask) & CRYPTO_ALG_INTERNAL))
346 mask |= CRYPTO_ALG_INTERNAL;
348 larval = crypto_larval_lookup(name, type, mask);
349 if (IS_ERR(larval) || !crypto_is_larval(larval))
352 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
354 if (ok == NOTIFY_STOP)
355 alg = crypto_larval_wait(larval);
357 crypto_mod_put(larval);
358 alg = ERR_PTR(-ENOENT);
360 crypto_larval_kill(container_of(larval, struct crypto_larval, alg));
363 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
365 static void crypto_exit_ops(struct crypto_tfm *tfm)
367 const struct crypto_type *type = tfm->__crt_alg->cra_type;
369 if (type && tfm->exit)
373 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
375 const struct crypto_type *type_obj = alg->cra_type;
378 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
380 return len + type_obj->ctxsize(alg, type, mask);
382 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
386 case CRYPTO_ALG_TYPE_CIPHER:
387 len += crypto_cipher_ctxsize(alg);
390 case CRYPTO_ALG_TYPE_COMPRESS:
391 len += crypto_compress_ctxsize(alg);
398 void crypto_shoot_alg(struct crypto_alg *alg)
400 down_write(&crypto_alg_sem);
401 alg->cra_flags |= CRYPTO_ALG_DYING;
402 up_write(&crypto_alg_sem);
404 EXPORT_SYMBOL_GPL(crypto_shoot_alg);
406 struct crypto_tfm *__crypto_alloc_tfmgfp(struct crypto_alg *alg, u32 type,
409 struct crypto_tfm *tfm;
410 unsigned int tfm_size;
413 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
414 tfm = kzalloc(tfm_size, gfp);
418 tfm->__crt_alg = alg;
419 refcount_set(&tfm->refcnt, 1);
421 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
422 goto cra_init_failed;
427 crypto_exit_ops(tfm);
429 crypto_shoot_alg(alg);
436 EXPORT_SYMBOL_GPL(__crypto_alloc_tfmgfp);
438 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
441 return __crypto_alloc_tfmgfp(alg, type, mask, GFP_KERNEL);
443 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
446 * crypto_alloc_base - Locate algorithm and allocate transform
447 * @alg_name: Name of algorithm
448 * @type: Type of algorithm
449 * @mask: Mask for type comparison
451 * This function should not be used by new algorithm types.
452 * Please use crypto_alloc_tfm instead.
454 * crypto_alloc_base() will first attempt to locate an already loaded
455 * algorithm. If that fails and the kernel supports dynamically loadable
456 * modules, it will then attempt to load a module of the same name or
457 * alias. If that fails it will send a query to any loaded crypto manager
458 * to construct an algorithm on the fly. A refcount is grabbed on the
459 * algorithm which is then associated with the new transform.
461 * The returned transform is of a non-determinate type. Most people
462 * should use one of the more specific allocation functions such as
463 * crypto_alloc_skcipher().
465 * In case of error the return value is an error pointer.
467 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
469 struct crypto_tfm *tfm;
473 struct crypto_alg *alg;
475 alg = crypto_alg_mod_lookup(alg_name, type, mask);
481 tfm = __crypto_alloc_tfm(alg, type, mask);
491 if (fatal_signal_pending(current)) {
499 EXPORT_SYMBOL_GPL(crypto_alloc_base);
501 static void *crypto_alloc_tfmmem(struct crypto_alg *alg,
502 const struct crypto_type *frontend, int node,
505 struct crypto_tfm *tfm;
506 unsigned int tfmsize;
510 tfmsize = frontend->tfmsize;
511 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
513 mem = kzalloc_node(total, gfp, node);
515 return ERR_PTR(-ENOMEM);
517 tfm = (struct crypto_tfm *)(mem + tfmsize);
518 tfm->__crt_alg = alg;
520 refcount_set(&tfm->refcnt, 1);
525 void *crypto_create_tfm_node(struct crypto_alg *alg,
526 const struct crypto_type *frontend,
529 struct crypto_tfm *tfm;
533 mem = crypto_alloc_tfmmem(alg, frontend, node, GFP_KERNEL);
537 tfm = (struct crypto_tfm *)(mem + frontend->tfmsize);
539 err = frontend->init_tfm(tfm);
543 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
544 goto cra_init_failed;
549 crypto_exit_ops(tfm);
552 crypto_shoot_alg(alg);
558 EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
560 void *crypto_clone_tfm(const struct crypto_type *frontend,
561 struct crypto_tfm *otfm)
563 struct crypto_alg *alg = otfm->__crt_alg;
564 struct crypto_tfm *tfm;
567 mem = ERR_PTR(-ESTALE);
568 if (unlikely(!crypto_mod_get(alg)))
571 mem = crypto_alloc_tfmmem(alg, frontend, otfm->node, GFP_ATOMIC);
577 tfm = (struct crypto_tfm *)(mem + frontend->tfmsize);
578 tfm->crt_flags = otfm->crt_flags;
579 tfm->exit = otfm->exit;
584 EXPORT_SYMBOL_GPL(crypto_clone_tfm);
586 struct crypto_alg *crypto_find_alg(const char *alg_name,
587 const struct crypto_type *frontend,
591 type &= frontend->maskclear;
592 mask &= frontend->maskclear;
593 type |= frontend->type;
594 mask |= frontend->maskset;
597 return crypto_alg_mod_lookup(alg_name, type, mask);
599 EXPORT_SYMBOL_GPL(crypto_find_alg);
602 * crypto_alloc_tfm_node - Locate algorithm and allocate transform
603 * @alg_name: Name of algorithm
604 * @frontend: Frontend algorithm type
605 * @type: Type of algorithm
606 * @mask: Mask for type comparison
607 * @node: NUMA node in which users desire to put requests, if node is
608 * NUMA_NO_NODE, it means users have no special requirement.
610 * crypto_alloc_tfm() will first attempt to locate an already loaded
611 * algorithm. If that fails and the kernel supports dynamically loadable
612 * modules, it will then attempt to load a module of the same name or
613 * alias. If that fails it will send a query to any loaded crypto manager
614 * to construct an algorithm on the fly. A refcount is grabbed on the
615 * algorithm which is then associated with the new transform.
617 * The returned transform is of a non-determinate type. Most people
618 * should use one of the more specific allocation functions such as
619 * crypto_alloc_skcipher().
621 * In case of error the return value is an error pointer.
624 void *crypto_alloc_tfm_node(const char *alg_name,
625 const struct crypto_type *frontend, u32 type, u32 mask,
632 struct crypto_alg *alg;
634 alg = crypto_find_alg(alg_name, frontend, type, mask);
640 tfm = crypto_create_tfm_node(alg, frontend, node);
650 if (fatal_signal_pending(current)) {
658 EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
661 * crypto_destroy_tfm - Free crypto transform
662 * @mem: Start of tfm slab
663 * @tfm: Transform to free
665 * This function frees up the transform and any associated resources,
666 * then drops the refcount on the associated algorithm.
668 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
670 struct crypto_alg *alg;
672 if (IS_ERR_OR_NULL(mem))
675 if (!refcount_dec_and_test(&tfm->refcnt))
677 alg = tfm->__crt_alg;
679 if (!tfm->exit && alg->cra_exit)
681 crypto_exit_ops(tfm);
683 kfree_sensitive(mem);
685 EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
687 int crypto_has_alg(const char *name, u32 type, u32 mask)
690 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
699 EXPORT_SYMBOL_GPL(crypto_has_alg);
701 void crypto_req_done(void *data, int err)
703 struct crypto_wait *wait = data;
705 if (err == -EINPROGRESS)
709 complete(&wait->completion);
711 EXPORT_SYMBOL_GPL(crypto_req_done);
713 MODULE_DESCRIPTION("Cryptographic core API");
714 MODULE_LICENSE("GPL");