2 * Scatterlist Cryptographic API.
9 * and Nettle, by Niels Möller.
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
18 #include <linux/compiler.h>
19 #include <linux/init.h>
20 #include <linux/crypto.h>
21 #include <linux/errno.h>
22 #include <linux/kernel.h>
23 #include <linux/kmod.h>
24 #include <linux/rwsem.h>
25 #include <linux/slab.h>
26 #include <linux/string.h>
29 LIST_HEAD(crypto_alg_list);
30 DECLARE_RWSEM(crypto_alg_sem);
32 static inline int crypto_alg_get(struct crypto_alg *alg)
34 return try_module_get(alg->cra_module);
37 static inline void crypto_alg_put(struct crypto_alg *alg)
39 module_put(alg->cra_module);
42 static struct crypto_alg *crypto_alg_lookup(const char *name)
44 struct crypto_alg *q, *alg = NULL;
50 down_read(&crypto_alg_sem);
52 list_for_each_entry(q, &crypto_alg_list, cra_list) {
55 exact = !strcmp(q->cra_driver_name, name);
56 fuzzy = !strcmp(q->cra_name, name);
57 if (!exact && !(fuzzy && q->cra_priority > best))
60 if (unlikely(!crypto_alg_get(q)))
63 best = q->cra_priority;
72 up_read(&crypto_alg_sem);
76 /* A far more intelligent version of this is planned. For now, just
77 * try an exact match on the name of the algorithm. */
78 static inline struct crypto_alg *crypto_alg_mod_lookup(const char *name)
80 return try_then_request_module(crypto_alg_lookup(name), name);
83 static int crypto_init_flags(struct crypto_tfm *tfm, u32 flags)
85 tfm->crt_flags = flags & CRYPTO_TFM_REQ_MASK;
86 flags &= ~CRYPTO_TFM_REQ_MASK;
88 switch (crypto_tfm_alg_type(tfm)) {
89 case CRYPTO_ALG_TYPE_CIPHER:
90 return crypto_init_cipher_flags(tfm, flags);
92 case CRYPTO_ALG_TYPE_DIGEST:
93 return crypto_init_digest_flags(tfm, flags);
95 case CRYPTO_ALG_TYPE_COMPRESS:
96 return crypto_init_compress_flags(tfm, flags);
106 static int crypto_init_ops(struct crypto_tfm *tfm)
108 switch (crypto_tfm_alg_type(tfm)) {
109 case CRYPTO_ALG_TYPE_CIPHER:
110 return crypto_init_cipher_ops(tfm);
112 case CRYPTO_ALG_TYPE_DIGEST:
113 return crypto_init_digest_ops(tfm);
115 case CRYPTO_ALG_TYPE_COMPRESS:
116 return crypto_init_compress_ops(tfm);
126 static void crypto_exit_ops(struct crypto_tfm *tfm)
128 switch (crypto_tfm_alg_type(tfm)) {
129 case CRYPTO_ALG_TYPE_CIPHER:
130 crypto_exit_cipher_ops(tfm);
133 case CRYPTO_ALG_TYPE_DIGEST:
134 crypto_exit_digest_ops(tfm);
137 case CRYPTO_ALG_TYPE_COMPRESS:
138 crypto_exit_compress_ops(tfm);
147 static unsigned int crypto_ctxsize(struct crypto_alg *alg, int flags)
151 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
155 case CRYPTO_ALG_TYPE_CIPHER:
156 len = crypto_cipher_ctxsize(alg, flags);
159 case CRYPTO_ALG_TYPE_DIGEST:
160 len = crypto_digest_ctxsize(alg, flags);
163 case CRYPTO_ALG_TYPE_COMPRESS:
164 len = crypto_compress_ctxsize(alg, flags);
168 return len + (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
171 struct crypto_tfm *crypto_alloc_tfm(const char *name, u32 flags)
173 struct crypto_tfm *tfm = NULL;
174 struct crypto_alg *alg;
175 unsigned int tfm_size;
177 alg = crypto_alg_mod_lookup(name);
181 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, flags);
182 tfm = kzalloc(tfm_size, GFP_KERNEL);
186 tfm->__crt_alg = alg;
188 if (crypto_init_flags(tfm, flags))
191 if (crypto_init_ops(tfm))
194 if (alg->cra_init && alg->cra_init(tfm))
195 goto cra_init_failed;
200 crypto_exit_ops(tfm);
210 void crypto_free_tfm(struct crypto_tfm *tfm)
212 struct crypto_alg *alg;
218 alg = tfm->__crt_alg;
219 size = sizeof(*tfm) + alg->cra_ctxsize;
223 crypto_exit_ops(tfm);
225 memset(tfm, 0, size);
229 static inline int crypto_set_driver_name(struct crypto_alg *alg)
231 static const char suffix[] = "-generic";
232 char *driver_name = alg->cra_driver_name;
238 len = strlcpy(driver_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
239 if (len + sizeof(suffix) > CRYPTO_MAX_ALG_NAME)
240 return -ENAMETOOLONG;
242 memcpy(driver_name + len, suffix, sizeof(suffix));
246 int crypto_register_alg(struct crypto_alg *alg)
249 struct crypto_alg *q;
251 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
254 if (alg->cra_alignmask & alg->cra_blocksize)
257 if (alg->cra_blocksize > PAGE_SIZE / 8)
260 if (alg->cra_priority < 0)
263 ret = crypto_set_driver_name(alg);
267 down_write(&crypto_alg_sem);
269 list_for_each_entry(q, &crypto_alg_list, cra_list) {
276 list_add(&alg->cra_list, &crypto_alg_list);
278 up_write(&crypto_alg_sem);
282 int crypto_unregister_alg(struct crypto_alg *alg)
285 struct crypto_alg *q;
287 BUG_ON(!alg->cra_module);
289 down_write(&crypto_alg_sem);
290 list_for_each_entry(q, &crypto_alg_list, cra_list) {
292 list_del(&alg->cra_list);
298 up_write(&crypto_alg_sem);
302 int crypto_alg_available(const char *name, u32 flags)
305 struct crypto_alg *alg = crypto_alg_mod_lookup(name);
315 static int __init init_crypto(void)
317 printk(KERN_INFO "Initializing Cryptographic API\n");
322 __initcall(init_crypto);
324 EXPORT_SYMBOL_GPL(crypto_register_alg);
325 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
326 EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
327 EXPORT_SYMBOL_GPL(crypto_free_tfm);
328 EXPORT_SYMBOL_GPL(crypto_alg_available);