1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Asynchronous Cryptographic Hash operations.
5 * This is the asynchronous version of hash.c with notification of
6 * completion via a callback.
11 #include <crypto/scatterwalk.h>
12 #include <linux/cryptouser.h>
13 #include <linux/err.h>
14 #include <linux/kernel.h>
15 #include <linux/module.h>
16 #include <linux/sched.h>
17 #include <linux/slab.h>
18 #include <linux/seq_file.h>
19 #include <linux/string.h>
20 #include <net/netlink.h>
24 static const struct crypto_type crypto_ahash_type;
26 struct ahash_request_priv {
27 crypto_completion_t complete;
31 void *ubuf[] CRYPTO_MINALIGN_ATTR;
34 static inline struct ahash_alg *crypto_ahash_alg(struct crypto_ahash *hash)
36 return container_of(crypto_hash_alg_common(hash), struct ahash_alg,
40 static int hash_walk_next(struct crypto_hash_walk *walk)
42 unsigned int alignmask = walk->alignmask;
43 unsigned int offset = walk->offset;
44 unsigned int nbytes = min(walk->entrylen,
45 ((unsigned int)(PAGE_SIZE)) - offset);
47 walk->data = kmap_local_page(walk->pg);
50 if (offset & alignmask) {
51 unsigned int unaligned = alignmask + 1 - (offset & alignmask);
53 if (nbytes > unaligned)
57 walk->entrylen -= nbytes;
61 static int hash_walk_new_entry(struct crypto_hash_walk *walk)
63 struct scatterlist *sg;
66 walk->offset = sg->offset;
67 walk->pg = sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT);
68 walk->offset = offset_in_page(walk->offset);
69 walk->entrylen = sg->length;
71 if (walk->entrylen > walk->total)
72 walk->entrylen = walk->total;
73 walk->total -= walk->entrylen;
75 return hash_walk_next(walk);
78 int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err)
80 unsigned int alignmask = walk->alignmask;
82 walk->data -= walk->offset;
84 if (walk->entrylen && (walk->offset & alignmask) && !err) {
87 walk->offset = ALIGN(walk->offset, alignmask + 1);
88 nbytes = min(walk->entrylen,
89 (unsigned int)(PAGE_SIZE - walk->offset));
91 walk->entrylen -= nbytes;
92 walk->data += walk->offset;
97 kunmap_local(walk->data);
98 crypto_yield(walk->flags);
103 if (walk->entrylen) {
106 return hash_walk_next(walk);
112 walk->sg = sg_next(walk->sg);
114 return hash_walk_new_entry(walk);
116 EXPORT_SYMBOL_GPL(crypto_hash_walk_done);
118 int crypto_hash_walk_first(struct ahash_request *req,
119 struct crypto_hash_walk *walk)
121 walk->total = req->nbytes;
128 walk->alignmask = crypto_ahash_alignmask(crypto_ahash_reqtfm(req));
130 walk->flags = req->base.flags;
132 return hash_walk_new_entry(walk);
134 EXPORT_SYMBOL_GPL(crypto_hash_walk_first);
136 static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key,
139 unsigned long alignmask = crypto_ahash_alignmask(tfm);
141 u8 *buffer, *alignbuffer;
142 unsigned long absize;
144 absize = keylen + alignmask;
145 buffer = kmalloc(absize, GFP_KERNEL);
149 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
150 memcpy(alignbuffer, key, keylen);
151 ret = tfm->setkey(tfm, alignbuffer, keylen);
152 kfree_sensitive(buffer);
156 static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
162 static void ahash_set_needkey(struct crypto_ahash *tfm)
164 const struct hash_alg_common *alg = crypto_hash_alg_common(tfm);
166 if (tfm->setkey != ahash_nosetkey &&
167 !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
168 crypto_ahash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
171 int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
174 unsigned long alignmask = crypto_ahash_alignmask(tfm);
177 if ((unsigned long)key & alignmask)
178 err = ahash_setkey_unaligned(tfm, key, keylen);
180 err = tfm->setkey(tfm, key, keylen);
183 ahash_set_needkey(tfm);
187 crypto_ahash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
190 EXPORT_SYMBOL_GPL(crypto_ahash_setkey);
192 static int ahash_save_req(struct ahash_request *req, crypto_completion_t cplt,
195 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
196 unsigned long alignmask = crypto_ahash_alignmask(tfm);
197 unsigned int ds = crypto_ahash_digestsize(tfm);
198 struct ahash_request *subreq;
199 unsigned int subreq_size;
200 unsigned int reqsize;
205 subreq_size = sizeof(*subreq);
206 reqsize = crypto_ahash_reqsize(tfm);
207 reqsize = ALIGN(reqsize, crypto_tfm_ctx_alignment());
208 subreq_size += reqsize;
210 subreq_size += alignmask & ~(crypto_tfm_ctx_alignment() - 1);
212 flags = ahash_request_flags(req);
213 gfp = (flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL : GFP_ATOMIC;
214 subreq = kmalloc(subreq_size, gfp);
218 ahash_request_set_tfm(subreq, tfm);
219 ahash_request_set_callback(subreq, flags, cplt, req);
221 result = (u8 *)(subreq + 1) + reqsize;
222 result = PTR_ALIGN(result, alignmask + 1);
224 ahash_request_set_crypt(subreq, req->src, result, req->nbytes);
229 state = kmalloc(crypto_ahash_statesize(tfm), gfp);
235 crypto_ahash_export(req, state);
236 crypto_ahash_import(subreq, state);
237 kfree_sensitive(state);
245 static void ahash_restore_req(struct ahash_request *req, int err)
247 struct ahash_request *subreq = req->priv;
250 memcpy(req->result, subreq->result,
251 crypto_ahash_digestsize(crypto_ahash_reqtfm(req)));
255 kfree_sensitive(subreq);
258 static void ahash_op_unaligned_done(void *data, int err)
260 struct ahash_request *areq = data;
262 if (err == -EINPROGRESS)
265 /* First copy req->result into req->priv.result */
266 ahash_restore_req(areq, err);
269 /* Complete the ORIGINAL request. */
270 ahash_request_complete(areq, err);
273 static int ahash_op_unaligned(struct ahash_request *req,
274 int (*op)(struct ahash_request *),
279 err = ahash_save_req(req, ahash_op_unaligned_done, has_state);
284 if (err == -EINPROGRESS || err == -EBUSY)
287 ahash_restore_req(req, err);
292 static int crypto_ahash_op(struct ahash_request *req,
293 int (*op)(struct ahash_request *),
296 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
297 unsigned long alignmask = crypto_ahash_alignmask(tfm);
300 if ((unsigned long)req->result & alignmask)
301 err = ahash_op_unaligned(req, op, has_state);
305 return crypto_hash_errstat(crypto_hash_alg_common(tfm), err);
308 int crypto_ahash_final(struct ahash_request *req)
310 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
311 struct hash_alg_common *alg = crypto_hash_alg_common(tfm);
313 if (IS_ENABLED(CONFIG_CRYPTO_STATS))
314 atomic64_inc(&hash_get_stat(alg)->hash_cnt);
316 return crypto_ahash_op(req, tfm->final, true);
318 EXPORT_SYMBOL_GPL(crypto_ahash_final);
320 int crypto_ahash_finup(struct ahash_request *req)
322 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
323 struct hash_alg_common *alg = crypto_hash_alg_common(tfm);
325 if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
326 struct crypto_istat_hash *istat = hash_get_stat(alg);
328 atomic64_inc(&istat->hash_cnt);
329 atomic64_add(req->nbytes, &istat->hash_tlen);
332 return crypto_ahash_op(req, tfm->finup, true);
334 EXPORT_SYMBOL_GPL(crypto_ahash_finup);
336 int crypto_ahash_digest(struct ahash_request *req)
338 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
339 struct hash_alg_common *alg = crypto_hash_alg_common(tfm);
341 if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
342 struct crypto_istat_hash *istat = hash_get_stat(alg);
344 atomic64_inc(&istat->hash_cnt);
345 atomic64_add(req->nbytes, &istat->hash_tlen);
348 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
349 return crypto_hash_errstat(alg, -ENOKEY);
351 return crypto_ahash_op(req, tfm->digest, false);
353 EXPORT_SYMBOL_GPL(crypto_ahash_digest);
355 static void ahash_def_finup_done2(void *data, int err)
357 struct ahash_request *areq = data;
359 if (err == -EINPROGRESS)
362 ahash_restore_req(areq, err);
364 ahash_request_complete(areq, err);
367 static int ahash_def_finup_finish1(struct ahash_request *req, int err)
369 struct ahash_request *subreq = req->priv;
374 subreq->base.complete = ahash_def_finup_done2;
376 err = crypto_ahash_reqtfm(req)->final(subreq);
377 if (err == -EINPROGRESS || err == -EBUSY)
381 ahash_restore_req(req, err);
385 static void ahash_def_finup_done1(void *data, int err)
387 struct ahash_request *areq = data;
388 struct ahash_request *subreq;
390 if (err == -EINPROGRESS)
394 subreq->base.flags &= CRYPTO_TFM_REQ_MAY_BACKLOG;
396 err = ahash_def_finup_finish1(areq, err);
397 if (err == -EINPROGRESS || err == -EBUSY)
401 ahash_request_complete(areq, err);
404 static int ahash_def_finup(struct ahash_request *req)
406 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
409 err = ahash_save_req(req, ahash_def_finup_done1, true);
413 err = tfm->update(req->priv);
414 if (err == -EINPROGRESS || err == -EBUSY)
417 return ahash_def_finup_finish1(req, err);
420 static void crypto_ahash_exit_tfm(struct crypto_tfm *tfm)
422 struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
423 struct ahash_alg *alg = crypto_ahash_alg(hash);
428 static int crypto_ahash_init_tfm(struct crypto_tfm *tfm)
430 struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
431 struct ahash_alg *alg = crypto_ahash_alg(hash);
433 hash->setkey = ahash_nosetkey;
435 if (tfm->__crt_alg->cra_type != &crypto_ahash_type)
436 return crypto_init_shash_ops_async(tfm);
438 hash->init = alg->init;
439 hash->update = alg->update;
440 hash->final = alg->final;
441 hash->finup = alg->finup ?: ahash_def_finup;
442 hash->digest = alg->digest;
443 hash->export = alg->export;
444 hash->import = alg->import;
447 hash->setkey = alg->setkey;
448 ahash_set_needkey(hash);
452 tfm->exit = crypto_ahash_exit_tfm;
454 return alg->init_tfm ? alg->init_tfm(hash) : 0;
457 static unsigned int crypto_ahash_extsize(struct crypto_alg *alg)
459 if (alg->cra_type != &crypto_ahash_type)
460 return sizeof(struct crypto_shash *);
462 return crypto_alg_extsize(alg);
465 static void crypto_ahash_free_instance(struct crypto_instance *inst)
467 struct ahash_instance *ahash = ahash_instance(inst);
472 static int __maybe_unused crypto_ahash_report(
473 struct sk_buff *skb, struct crypto_alg *alg)
475 struct crypto_report_hash rhash;
477 memset(&rhash, 0, sizeof(rhash));
479 strscpy(rhash.type, "ahash", sizeof(rhash.type));
481 rhash.blocksize = alg->cra_blocksize;
482 rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize;
484 return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
487 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
489 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
491 seq_printf(m, "type : ahash\n");
492 seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
494 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
495 seq_printf(m, "digestsize : %u\n",
496 __crypto_hash_alg_common(alg)->digestsize);
499 static int __maybe_unused crypto_ahash_report_stat(
500 struct sk_buff *skb, struct crypto_alg *alg)
502 return crypto_hash_report_stat(skb, alg, "ahash");
505 static const struct crypto_type crypto_ahash_type = {
506 .extsize = crypto_ahash_extsize,
507 .init_tfm = crypto_ahash_init_tfm,
508 .free = crypto_ahash_free_instance,
509 #ifdef CONFIG_PROC_FS
510 .show = crypto_ahash_show,
512 #if IS_ENABLED(CONFIG_CRYPTO_USER)
513 .report = crypto_ahash_report,
515 #ifdef CONFIG_CRYPTO_STATS
516 .report_stat = crypto_ahash_report_stat,
518 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
519 .maskset = CRYPTO_ALG_TYPE_AHASH_MASK,
520 .type = CRYPTO_ALG_TYPE_AHASH,
521 .tfmsize = offsetof(struct crypto_ahash, base),
524 int crypto_grab_ahash(struct crypto_ahash_spawn *spawn,
525 struct crypto_instance *inst,
526 const char *name, u32 type, u32 mask)
528 spawn->base.frontend = &crypto_ahash_type;
529 return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
531 EXPORT_SYMBOL_GPL(crypto_grab_ahash);
533 struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type,
536 return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask);
538 EXPORT_SYMBOL_GPL(crypto_alloc_ahash);
540 int crypto_has_ahash(const char *alg_name, u32 type, u32 mask)
542 return crypto_type_has_alg(alg_name, &crypto_ahash_type, type, mask);
544 EXPORT_SYMBOL_GPL(crypto_has_ahash);
546 struct crypto_ahash *crypto_clone_ahash(struct crypto_ahash *hash)
548 struct hash_alg_common *halg = crypto_hash_alg_common(hash);
549 struct crypto_tfm *tfm = crypto_ahash_tfm(hash);
550 struct crypto_ahash *nhash;
551 struct ahash_alg *alg;
554 if (!crypto_hash_alg_has_setkey(halg)) {
555 tfm = crypto_tfm_get(tfm);
557 return ERR_CAST(tfm);
562 nhash = crypto_clone_tfm(&crypto_ahash_type, tfm);
567 nhash->init = hash->init;
568 nhash->update = hash->update;
569 nhash->final = hash->final;
570 nhash->finup = hash->finup;
571 nhash->digest = hash->digest;
572 nhash->export = hash->export;
573 nhash->import = hash->import;
574 nhash->setkey = hash->setkey;
575 nhash->reqsize = hash->reqsize;
577 if (tfm->__crt_alg->cra_type != &crypto_ahash_type)
578 return crypto_clone_shash_ops_async(nhash, hash);
581 alg = crypto_ahash_alg(hash);
585 err = alg->clone_tfm(nhash, hash);
592 crypto_free_ahash(nhash);
595 EXPORT_SYMBOL_GPL(crypto_clone_ahash);
597 static int ahash_prepare_alg(struct ahash_alg *alg)
599 struct crypto_alg *base = &alg->halg.base;
602 if (alg->halg.statesize == 0)
605 err = hash_prepare_alg(&alg->halg);
609 base->cra_type = &crypto_ahash_type;
610 base->cra_flags |= CRYPTO_ALG_TYPE_AHASH;
615 int crypto_register_ahash(struct ahash_alg *alg)
617 struct crypto_alg *base = &alg->halg.base;
620 err = ahash_prepare_alg(alg);
624 return crypto_register_alg(base);
626 EXPORT_SYMBOL_GPL(crypto_register_ahash);
628 void crypto_unregister_ahash(struct ahash_alg *alg)
630 crypto_unregister_alg(&alg->halg.base);
632 EXPORT_SYMBOL_GPL(crypto_unregister_ahash);
634 int crypto_register_ahashes(struct ahash_alg *algs, int count)
638 for (i = 0; i < count; i++) {
639 ret = crypto_register_ahash(&algs[i]);
647 for (--i; i >= 0; --i)
648 crypto_unregister_ahash(&algs[i]);
652 EXPORT_SYMBOL_GPL(crypto_register_ahashes);
654 void crypto_unregister_ahashes(struct ahash_alg *algs, int count)
658 for (i = count - 1; i >= 0; --i)
659 crypto_unregister_ahash(&algs[i]);
661 EXPORT_SYMBOL_GPL(crypto_unregister_ahashes);
663 int ahash_register_instance(struct crypto_template *tmpl,
664 struct ahash_instance *inst)
668 if (WARN_ON(!inst->free))
671 err = ahash_prepare_alg(&inst->alg);
675 return crypto_register_instance(tmpl, ahash_crypto_instance(inst));
677 EXPORT_SYMBOL_GPL(ahash_register_instance);
679 bool crypto_hash_alg_has_setkey(struct hash_alg_common *halg)
681 struct crypto_alg *alg = &halg->base;
683 if (alg->cra_type != &crypto_ahash_type)
684 return crypto_shash_alg_has_setkey(__crypto_shash_alg(alg));
686 return __crypto_ahash_alg(alg)->setkey != NULL;
688 EXPORT_SYMBOL_GPL(crypto_hash_alg_has_setkey);
690 MODULE_LICENSE("GPL");
691 MODULE_DESCRIPTION("Asynchronous cryptographic hash type");