1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Shared crypto simd helpers
7 * Copyright (c) 2019 Google LLC
9 * Based on aesni-intel_glue.c by:
10 * Copyright (C) 2008, Intel Corp.
15 * Shared crypto SIMD helpers. These functions dynamically create and register
16 * an skcipher or AEAD algorithm that wraps another, internal algorithm. The
17 * wrapper ensures that the internal algorithm is only executed in a context
18 * where SIMD instructions are usable, i.e. where may_use_simd() returns true.
19 * If SIMD is already usable, the wrapper directly calls the internal algorithm.
20 * Otherwise it defers execution to a workqueue via cryptd.
22 * This is an alternative to the internal algorithm implementing a fallback for
23 * the !may_use_simd() case itself.
25 * Note that the wrapper algorithm is asynchronous, i.e. it has the
26 * CRYPTO_ALG_ASYNC flag set. Therefore it won't be found by users who
27 * explicitly allocate a synchronous algorithm.
30 #include <crypto/cryptd.h>
31 #include <crypto/internal/aead.h>
32 #include <crypto/internal/simd.h>
33 #include <crypto/internal/skcipher.h>
34 #include <linux/kernel.h>
35 #include <linux/module.h>
36 #include <linux/preempt.h>
39 /* skcipher support */
41 struct simd_skcipher_alg {
42 const char *ialg_name;
43 struct skcipher_alg alg;
46 struct simd_skcipher_ctx {
47 struct cryptd_skcipher *cryptd_tfm;
50 static int simd_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
53 struct simd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
54 struct crypto_skcipher *child = &ctx->cryptd_tfm->base;
56 crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
57 crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(tfm) &
59 return crypto_skcipher_setkey(child, key, key_len);
62 static int simd_skcipher_encrypt(struct skcipher_request *req)
64 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
65 struct simd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
66 struct skcipher_request *subreq;
67 struct crypto_skcipher *child;
69 subreq = skcipher_request_ctx(req);
72 if (!crypto_simd_usable() ||
73 (in_atomic() && cryptd_skcipher_queued(ctx->cryptd_tfm)))
74 child = &ctx->cryptd_tfm->base;
76 child = cryptd_skcipher_child(ctx->cryptd_tfm);
78 skcipher_request_set_tfm(subreq, child);
80 return crypto_skcipher_encrypt(subreq);
83 static int simd_skcipher_decrypt(struct skcipher_request *req)
85 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
86 struct simd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
87 struct skcipher_request *subreq;
88 struct crypto_skcipher *child;
90 subreq = skcipher_request_ctx(req);
93 if (!crypto_simd_usable() ||
94 (in_atomic() && cryptd_skcipher_queued(ctx->cryptd_tfm)))
95 child = &ctx->cryptd_tfm->base;
97 child = cryptd_skcipher_child(ctx->cryptd_tfm);
99 skcipher_request_set_tfm(subreq, child);
101 return crypto_skcipher_decrypt(subreq);
104 static void simd_skcipher_exit(struct crypto_skcipher *tfm)
106 struct simd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
108 cryptd_free_skcipher(ctx->cryptd_tfm);
111 static int simd_skcipher_init(struct crypto_skcipher *tfm)
113 struct simd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
114 struct cryptd_skcipher *cryptd_tfm;
115 struct simd_skcipher_alg *salg;
116 struct skcipher_alg *alg;
119 alg = crypto_skcipher_alg(tfm);
120 salg = container_of(alg, struct simd_skcipher_alg, alg);
122 cryptd_tfm = cryptd_alloc_skcipher(salg->ialg_name,
124 CRYPTO_ALG_INTERNAL);
125 if (IS_ERR(cryptd_tfm))
126 return PTR_ERR(cryptd_tfm);
128 ctx->cryptd_tfm = cryptd_tfm;
130 reqsize = crypto_skcipher_reqsize(cryptd_skcipher_child(cryptd_tfm));
131 reqsize = max(reqsize, crypto_skcipher_reqsize(&cryptd_tfm->base));
132 reqsize += sizeof(struct skcipher_request);
134 crypto_skcipher_set_reqsize(tfm, reqsize);
139 struct simd_skcipher_alg *simd_skcipher_create_compat(const char *algname,
141 const char *basename)
143 struct simd_skcipher_alg *salg;
144 struct crypto_skcipher *tfm;
145 struct skcipher_alg *ialg;
146 struct skcipher_alg *alg;
149 tfm = crypto_alloc_skcipher(basename, CRYPTO_ALG_INTERNAL,
150 CRYPTO_ALG_INTERNAL | CRYPTO_ALG_ASYNC);
152 return ERR_CAST(tfm);
154 ialg = crypto_skcipher_alg(tfm);
156 salg = kzalloc(sizeof(*salg), GFP_KERNEL);
158 salg = ERR_PTR(-ENOMEM);
162 salg->ialg_name = basename;
166 if (snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", algname) >=
170 if (snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
171 drvname) >= CRYPTO_MAX_ALG_NAME)
174 alg->base.cra_flags = CRYPTO_ALG_ASYNC;
175 alg->base.cra_priority = ialg->base.cra_priority;
176 alg->base.cra_blocksize = ialg->base.cra_blocksize;
177 alg->base.cra_alignmask = ialg->base.cra_alignmask;
178 alg->base.cra_module = ialg->base.cra_module;
179 alg->base.cra_ctxsize = sizeof(struct simd_skcipher_ctx);
181 alg->ivsize = ialg->ivsize;
182 alg->chunksize = ialg->chunksize;
183 alg->min_keysize = ialg->min_keysize;
184 alg->max_keysize = ialg->max_keysize;
186 alg->init = simd_skcipher_init;
187 alg->exit = simd_skcipher_exit;
189 alg->setkey = simd_skcipher_setkey;
190 alg->encrypt = simd_skcipher_encrypt;
191 alg->decrypt = simd_skcipher_decrypt;
193 err = crypto_register_skcipher(alg);
198 crypto_free_skcipher(tfm);
206 EXPORT_SYMBOL_GPL(simd_skcipher_create_compat);
208 struct simd_skcipher_alg *simd_skcipher_create(const char *algname,
209 const char *basename)
211 char drvname[CRYPTO_MAX_ALG_NAME];
213 if (snprintf(drvname, CRYPTO_MAX_ALG_NAME, "simd-%s", basename) >=
215 return ERR_PTR(-ENAMETOOLONG);
217 return simd_skcipher_create_compat(algname, drvname, basename);
219 EXPORT_SYMBOL_GPL(simd_skcipher_create);
221 void simd_skcipher_free(struct simd_skcipher_alg *salg)
223 crypto_unregister_skcipher(&salg->alg);
226 EXPORT_SYMBOL_GPL(simd_skcipher_free);
228 int simd_register_skciphers_compat(struct skcipher_alg *algs, int count,
229 struct simd_skcipher_alg **simd_algs)
235 const char *basename;
236 struct simd_skcipher_alg *simd;
238 err = crypto_register_skciphers(algs, count);
242 for (i = 0; i < count; i++) {
243 WARN_ON(strncmp(algs[i].base.cra_name, "__", 2));
244 WARN_ON(strncmp(algs[i].base.cra_driver_name, "__", 2));
245 algname = algs[i].base.cra_name + 2;
246 drvname = algs[i].base.cra_driver_name + 2;
247 basename = algs[i].base.cra_driver_name;
248 simd = simd_skcipher_create_compat(algname, drvname, basename);
257 simd_unregister_skciphers(algs, count, simd_algs);
260 EXPORT_SYMBOL_GPL(simd_register_skciphers_compat);
262 void simd_unregister_skciphers(struct skcipher_alg *algs, int count,
263 struct simd_skcipher_alg **simd_algs)
267 crypto_unregister_skciphers(algs, count);
269 for (i = 0; i < count; i++) {
271 simd_skcipher_free(simd_algs[i]);
276 EXPORT_SYMBOL_GPL(simd_unregister_skciphers);
280 struct simd_aead_alg {
281 const char *ialg_name;
285 struct simd_aead_ctx {
286 struct cryptd_aead *cryptd_tfm;
289 static int simd_aead_setkey(struct crypto_aead *tfm, const u8 *key,
290 unsigned int key_len)
292 struct simd_aead_ctx *ctx = crypto_aead_ctx(tfm);
293 struct crypto_aead *child = &ctx->cryptd_tfm->base;
295 crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
296 crypto_aead_set_flags(child, crypto_aead_get_flags(tfm) &
297 CRYPTO_TFM_REQ_MASK);
298 return crypto_aead_setkey(child, key, key_len);
301 static int simd_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
303 struct simd_aead_ctx *ctx = crypto_aead_ctx(tfm);
304 struct crypto_aead *child = &ctx->cryptd_tfm->base;
306 return crypto_aead_setauthsize(child, authsize);
309 static int simd_aead_encrypt(struct aead_request *req)
311 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
312 struct simd_aead_ctx *ctx = crypto_aead_ctx(tfm);
313 struct aead_request *subreq;
314 struct crypto_aead *child;
316 subreq = aead_request_ctx(req);
319 if (!crypto_simd_usable() ||
320 (in_atomic() && cryptd_aead_queued(ctx->cryptd_tfm)))
321 child = &ctx->cryptd_tfm->base;
323 child = cryptd_aead_child(ctx->cryptd_tfm);
325 aead_request_set_tfm(subreq, child);
327 return crypto_aead_encrypt(subreq);
330 static int simd_aead_decrypt(struct aead_request *req)
332 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
333 struct simd_aead_ctx *ctx = crypto_aead_ctx(tfm);
334 struct aead_request *subreq;
335 struct crypto_aead *child;
337 subreq = aead_request_ctx(req);
340 if (!crypto_simd_usable() ||
341 (in_atomic() && cryptd_aead_queued(ctx->cryptd_tfm)))
342 child = &ctx->cryptd_tfm->base;
344 child = cryptd_aead_child(ctx->cryptd_tfm);
346 aead_request_set_tfm(subreq, child);
348 return crypto_aead_decrypt(subreq);
351 static void simd_aead_exit(struct crypto_aead *tfm)
353 struct simd_aead_ctx *ctx = crypto_aead_ctx(tfm);
355 cryptd_free_aead(ctx->cryptd_tfm);
358 static int simd_aead_init(struct crypto_aead *tfm)
360 struct simd_aead_ctx *ctx = crypto_aead_ctx(tfm);
361 struct cryptd_aead *cryptd_tfm;
362 struct simd_aead_alg *salg;
363 struct aead_alg *alg;
366 alg = crypto_aead_alg(tfm);
367 salg = container_of(alg, struct simd_aead_alg, alg);
369 cryptd_tfm = cryptd_alloc_aead(salg->ialg_name, CRYPTO_ALG_INTERNAL,
370 CRYPTO_ALG_INTERNAL);
371 if (IS_ERR(cryptd_tfm))
372 return PTR_ERR(cryptd_tfm);
374 ctx->cryptd_tfm = cryptd_tfm;
376 reqsize = crypto_aead_reqsize(cryptd_aead_child(cryptd_tfm));
377 reqsize = max(reqsize, crypto_aead_reqsize(&cryptd_tfm->base));
378 reqsize += sizeof(struct aead_request);
380 crypto_aead_set_reqsize(tfm, reqsize);
385 struct simd_aead_alg *simd_aead_create_compat(const char *algname,
387 const char *basename)
389 struct simd_aead_alg *salg;
390 struct crypto_aead *tfm;
391 struct aead_alg *ialg;
392 struct aead_alg *alg;
395 tfm = crypto_alloc_aead(basename, CRYPTO_ALG_INTERNAL,
396 CRYPTO_ALG_INTERNAL | CRYPTO_ALG_ASYNC);
398 return ERR_CAST(tfm);
400 ialg = crypto_aead_alg(tfm);
402 salg = kzalloc(sizeof(*salg), GFP_KERNEL);
404 salg = ERR_PTR(-ENOMEM);
408 salg->ialg_name = basename;
412 if (snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", algname) >=
416 if (snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
417 drvname) >= CRYPTO_MAX_ALG_NAME)
420 alg->base.cra_flags = CRYPTO_ALG_ASYNC;
421 alg->base.cra_priority = ialg->base.cra_priority;
422 alg->base.cra_blocksize = ialg->base.cra_blocksize;
423 alg->base.cra_alignmask = ialg->base.cra_alignmask;
424 alg->base.cra_module = ialg->base.cra_module;
425 alg->base.cra_ctxsize = sizeof(struct simd_aead_ctx);
427 alg->ivsize = ialg->ivsize;
428 alg->maxauthsize = ialg->maxauthsize;
429 alg->chunksize = ialg->chunksize;
431 alg->init = simd_aead_init;
432 alg->exit = simd_aead_exit;
434 alg->setkey = simd_aead_setkey;
435 alg->setauthsize = simd_aead_setauthsize;
436 alg->encrypt = simd_aead_encrypt;
437 alg->decrypt = simd_aead_decrypt;
439 err = crypto_register_aead(alg);
444 crypto_free_aead(tfm);
452 EXPORT_SYMBOL_GPL(simd_aead_create_compat);
454 struct simd_aead_alg *simd_aead_create(const char *algname,
455 const char *basename)
457 char drvname[CRYPTO_MAX_ALG_NAME];
459 if (snprintf(drvname, CRYPTO_MAX_ALG_NAME, "simd-%s", basename) >=
461 return ERR_PTR(-ENAMETOOLONG);
463 return simd_aead_create_compat(algname, drvname, basename);
465 EXPORT_SYMBOL_GPL(simd_aead_create);
467 void simd_aead_free(struct simd_aead_alg *salg)
469 crypto_unregister_aead(&salg->alg);
472 EXPORT_SYMBOL_GPL(simd_aead_free);
474 int simd_register_aeads_compat(struct aead_alg *algs, int count,
475 struct simd_aead_alg **simd_algs)
481 const char *basename;
482 struct simd_aead_alg *simd;
484 err = crypto_register_aeads(algs, count);
488 for (i = 0; i < count; i++) {
489 WARN_ON(strncmp(algs[i].base.cra_name, "__", 2));
490 WARN_ON(strncmp(algs[i].base.cra_driver_name, "__", 2));
491 algname = algs[i].base.cra_name + 2;
492 drvname = algs[i].base.cra_driver_name + 2;
493 basename = algs[i].base.cra_driver_name;
494 simd = simd_aead_create_compat(algname, drvname, basename);
503 simd_unregister_aeads(algs, count, simd_algs);
506 EXPORT_SYMBOL_GPL(simd_register_aeads_compat);
508 void simd_unregister_aeads(struct aead_alg *algs, int count,
509 struct simd_aead_alg **simd_algs)
513 crypto_unregister_aeads(algs, count);
515 for (i = 0; i < count; i++) {
517 simd_aead_free(simd_algs[i]);
522 EXPORT_SYMBOL_GPL(simd_unregister_aeads);
524 MODULE_LICENSE("GPL");