]>
Commit | Line | Data |
---|---|---|
7f470739 HX |
1 | /* |
2 | * chainiv: Chain IV Generator | |
3 | * | |
4 | * Generate IVs simply be using the last block of the previous encryption. | |
5 | * This is mainly useful for CBC with a synchronous algorithm. | |
6 | * | |
7 | * Copyright (c) 2007 Herbert Xu <[email protected]> | |
8 | * | |
9 | * This program is free software; you can redistribute it and/or modify it | |
10 | * under the terms of the GNU General Public License as published by the Free | |
11 | * Software Foundation; either version 2 of the License, or (at your option) | |
12 | * any later version. | |
13 | * | |
14 | */ | |
15 | ||
16 | #include <crypto/internal/skcipher.h> | |
a0f000ec | 17 | #include <crypto/rng.h> |
0a2e821d | 18 | #include <crypto/crypto_wq.h> |
7f470739 HX |
19 | #include <linux/err.h> |
20 | #include <linux/init.h> | |
e7cd2514 | 21 | #include <linux/kernel.h> |
7f470739 | 22 | #include <linux/module.h> |
7f470739 HX |
23 | #include <linux/spinlock.h> |
24 | #include <linux/string.h> | |
e7cd2514 HX |
25 | #include <linux/workqueue.h> |
26 | ||
27 | enum { | |
28 | CHAINIV_STATE_INUSE = 0, | |
29 | }; | |
7f470739 HX |
30 | |
31 | struct chainiv_ctx { | |
32 | spinlock_t lock; | |
33 | char iv[]; | |
34 | }; | |
35 | ||
e7cd2514 HX |
36 | struct async_chainiv_ctx { |
37 | unsigned long state; | |
38 | ||
39 | spinlock_t lock; | |
40 | int err; | |
41 | ||
42 | struct crypto_queue queue; | |
43 | struct work_struct postponed; | |
44 | ||
45 | char iv[]; | |
46 | }; | |
47 | ||
7f470739 HX |
48 | static int chainiv_givencrypt(struct skcipher_givcrypt_request *req) |
49 | { | |
50 | struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); | |
51 | struct chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); | |
52 | struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req); | |
53 | unsigned int ivsize; | |
54 | int err; | |
55 | ||
56 | ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv)); | |
57 | ablkcipher_request_set_callback(subreq, req->creq.base.flags & | |
58 | ~CRYPTO_TFM_REQ_MAY_SLEEP, | |
59 | req->creq.base.complete, | |
60 | req->creq.base.data); | |
61 | ablkcipher_request_set_crypt(subreq, req->creq.src, req->creq.dst, | |
62 | req->creq.nbytes, req->creq.info); | |
63 | ||
64 | spin_lock_bh(&ctx->lock); | |
65 | ||
66 | ivsize = crypto_ablkcipher_ivsize(geniv); | |
67 | ||
68 | memcpy(req->giv, ctx->iv, ivsize); | |
69 | memcpy(subreq->info, ctx->iv, ivsize); | |
70 | ||
71 | err = crypto_ablkcipher_encrypt(subreq); | |
72 | if (err) | |
73 | goto unlock; | |
74 | ||
75 | memcpy(ctx->iv, subreq->info, ivsize); | |
76 | ||
77 | unlock: | |
78 | spin_unlock_bh(&ctx->lock); | |
79 | ||
80 | return err; | |
81 | } | |
82 | ||
65fe6740 | 83 | static int chainiv_init_common(struct crypto_tfm *tfm, char iv[]) |
7f470739 | 84 | { |
65fe6740 | 85 | struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm); |
341476d6 | 86 | int err = 0; |
7f470739 | 87 | |
e7cd2514 HX |
88 | tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request); |
89 | ||
341476d6 HX |
90 | if (iv) { |
91 | err = crypto_rng_get_bytes(crypto_default_rng, iv, | |
92 | crypto_ablkcipher_ivsize(geniv)); | |
93 | crypto_put_default_rng(); | |
94 | } | |
95 | ||
96 | return err ?: skcipher_geniv_init(tfm); | |
e7cd2514 HX |
97 | } |
98 | ||
7f470739 HX |
99 | static int chainiv_init(struct crypto_tfm *tfm) |
100 | { | |
341476d6 | 101 | struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm); |
e7cd2514 | 102 | struct chainiv_ctx *ctx = crypto_tfm_ctx(tfm); |
341476d6 | 103 | char *iv; |
7f470739 HX |
104 | |
105 | spin_lock_init(&ctx->lock); | |
106 | ||
341476d6 HX |
107 | iv = NULL; |
108 | if (!crypto_get_default_rng()) { | |
109 | crypto_ablkcipher_crt(geniv)->givencrypt = chainiv_givencrypt; | |
110 | iv = ctx->iv; | |
111 | } | |
112 | ||
113 | return chainiv_init_common(tfm, iv); | |
e7cd2514 | 114 | } |
7f470739 | 115 | |
e7cd2514 HX |
116 | static int async_chainiv_schedule_work(struct async_chainiv_ctx *ctx) |
117 | { | |
118 | int queued; | |
872ac874 | 119 | int err = ctx->err; |
e7cd2514 HX |
120 | |
121 | if (!ctx->queue.qlen) { | |
4e857c58 | 122 | smp_mb__before_atomic(); |
e7cd2514 HX |
123 | clear_bit(CHAINIV_STATE_INUSE, &ctx->state); |
124 | ||
125 | if (!ctx->queue.qlen || | |
126 | test_and_set_bit(CHAINIV_STATE_INUSE, &ctx->state)) | |
127 | goto out; | |
128 | } | |
129 | ||
0a2e821d | 130 | queued = queue_work(kcrypto_wq, &ctx->postponed); |
e7cd2514 HX |
131 | BUG_ON(!queued); |
132 | ||
133 | out: | |
872ac874 | 134 | return err; |
e7cd2514 HX |
135 | } |
136 | ||
137 | static int async_chainiv_postpone_request(struct skcipher_givcrypt_request *req) | |
138 | { | |
139 | struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); | |
140 | struct async_chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); | |
141 | int err; | |
142 | ||
143 | spin_lock_bh(&ctx->lock); | |
144 | err = skcipher_enqueue_givcrypt(&ctx->queue, req); | |
145 | spin_unlock_bh(&ctx->lock); | |
146 | ||
147 | if (test_and_set_bit(CHAINIV_STATE_INUSE, &ctx->state)) | |
148 | return err; | |
149 | ||
150 | ctx->err = err; | |
151 | return async_chainiv_schedule_work(ctx); | |
152 | } | |
153 | ||
154 | static int async_chainiv_givencrypt_tail(struct skcipher_givcrypt_request *req) | |
155 | { | |
156 | struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); | |
157 | struct async_chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); | |
158 | struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req); | |
159 | unsigned int ivsize = crypto_ablkcipher_ivsize(geniv); | |
160 | ||
161 | memcpy(req->giv, ctx->iv, ivsize); | |
162 | memcpy(subreq->info, ctx->iv, ivsize); | |
163 | ||
164 | ctx->err = crypto_ablkcipher_encrypt(subreq); | |
165 | if (ctx->err) | |
166 | goto out; | |
167 | ||
168 | memcpy(ctx->iv, subreq->info, ivsize); | |
169 | ||
170 | out: | |
171 | return async_chainiv_schedule_work(ctx); | |
172 | } | |
173 | ||
174 | static int async_chainiv_givencrypt(struct skcipher_givcrypt_request *req) | |
175 | { | |
176 | struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); | |
177 | struct async_chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); | |
178 | struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req); | |
179 | ||
180 | ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv)); | |
181 | ablkcipher_request_set_callback(subreq, req->creq.base.flags, | |
182 | req->creq.base.complete, | |
183 | req->creq.base.data); | |
184 | ablkcipher_request_set_crypt(subreq, req->creq.src, req->creq.dst, | |
185 | req->creq.nbytes, req->creq.info); | |
186 | ||
187 | if (test_and_set_bit(CHAINIV_STATE_INUSE, &ctx->state)) | |
188 | goto postpone; | |
189 | ||
190 | if (ctx->queue.qlen) { | |
191 | clear_bit(CHAINIV_STATE_INUSE, &ctx->state); | |
192 | goto postpone; | |
193 | } | |
194 | ||
195 | return async_chainiv_givencrypt_tail(req); | |
196 | ||
197 | postpone: | |
198 | return async_chainiv_postpone_request(req); | |
199 | } | |
200 | ||
e7cd2514 HX |
201 | static void async_chainiv_do_postponed(struct work_struct *work) |
202 | { | |
203 | struct async_chainiv_ctx *ctx = container_of(work, | |
204 | struct async_chainiv_ctx, | |
205 | postponed); | |
206 | struct skcipher_givcrypt_request *req; | |
207 | struct ablkcipher_request *subreq; | |
872ac874 | 208 | int err; |
e7cd2514 HX |
209 | |
210 | /* Only handle one request at a time to avoid hogging keventd. */ | |
211 | spin_lock_bh(&ctx->lock); | |
212 | req = skcipher_dequeue_givcrypt(&ctx->queue); | |
213 | spin_unlock_bh(&ctx->lock); | |
214 | ||
215 | if (!req) { | |
216 | async_chainiv_schedule_work(ctx); | |
217 | return; | |
218 | } | |
219 | ||
220 | subreq = skcipher_givcrypt_reqctx(req); | |
221 | subreq->base.flags |= CRYPTO_TFM_REQ_MAY_SLEEP; | |
222 | ||
872ac874 HX |
223 | err = async_chainiv_givencrypt_tail(req); |
224 | ||
225 | local_bh_disable(); | |
226 | skcipher_givcrypt_complete(req, err); | |
227 | local_bh_enable(); | |
e7cd2514 HX |
228 | } |
229 | ||
230 | static int async_chainiv_init(struct crypto_tfm *tfm) | |
231 | { | |
341476d6 | 232 | struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm); |
e7cd2514 | 233 | struct async_chainiv_ctx *ctx = crypto_tfm_ctx(tfm); |
341476d6 | 234 | char *iv; |
e7cd2514 HX |
235 | |
236 | spin_lock_init(&ctx->lock); | |
237 | ||
238 | crypto_init_queue(&ctx->queue, 100); | |
239 | INIT_WORK(&ctx->postponed, async_chainiv_do_postponed); | |
240 | ||
341476d6 HX |
241 | iv = NULL; |
242 | if (!crypto_get_default_rng()) { | |
243 | crypto_ablkcipher_crt(geniv)->givencrypt = | |
244 | async_chainiv_givencrypt; | |
245 | iv = ctx->iv; | |
246 | } | |
247 | ||
248 | return chainiv_init_common(tfm, iv); | |
e7cd2514 HX |
249 | } |
250 | ||
251 | static void async_chainiv_exit(struct crypto_tfm *tfm) | |
252 | { | |
253 | struct async_chainiv_ctx *ctx = crypto_tfm_ctx(tfm); | |
254 | ||
255 | BUG_ON(test_bit(CHAINIV_STATE_INUSE, &ctx->state) || ctx->queue.qlen); | |
256 | ||
257 | skcipher_geniv_exit(tfm); | |
7f470739 HX |
258 | } |
259 | ||
260 | static struct crypto_template chainiv_tmpl; | |
261 | ||
262 | static struct crypto_instance *chainiv_alloc(struct rtattr **tb) | |
263 | { | |
e7cd2514 | 264 | struct crypto_attr_type *algt; |
7f470739 HX |
265 | struct crypto_instance *inst; |
266 | ||
e7cd2514 | 267 | algt = crypto_get_attr_type(tb); |
e7cd2514 | 268 | if (IS_ERR(algt)) |
3e8afe35 | 269 | return ERR_CAST(algt); |
e7cd2514 HX |
270 | |
271 | inst = skcipher_geniv_alloc(&chainiv_tmpl, tb, 0, 0); | |
7f470739 | 272 | if (IS_ERR(inst)) |
341476d6 | 273 | goto out; |
7f470739 HX |
274 | |
275 | inst->alg.cra_init = chainiv_init; | |
276 | inst->alg.cra_exit = skcipher_geniv_exit; | |
277 | ||
e7cd2514 HX |
278 | inst->alg.cra_ctxsize = sizeof(struct chainiv_ctx); |
279 | ||
280 | if (!crypto_requires_sync(algt->type, algt->mask)) { | |
281 | inst->alg.cra_flags |= CRYPTO_ALG_ASYNC; | |
282 | ||
e7cd2514 HX |
283 | inst->alg.cra_init = async_chainiv_init; |
284 | inst->alg.cra_exit = async_chainiv_exit; | |
285 | ||
286 | inst->alg.cra_ctxsize = sizeof(struct async_chainiv_ctx); | |
287 | } | |
288 | ||
289 | inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize; | |
7f470739 HX |
290 | |
291 | out: | |
292 | return inst; | |
293 | } | |
294 | ||
295 | static struct crypto_template chainiv_tmpl = { | |
296 | .name = "chainiv", | |
297 | .alloc = chainiv_alloc, | |
341476d6 | 298 | .free = skcipher_geniv_free, |
7f470739 HX |
299 | .module = THIS_MODULE, |
300 | }; | |
301 | ||
5be5e667 | 302 | static int __init chainiv_module_init(void) |
7f470739 HX |
303 | { |
304 | return crypto_register_template(&chainiv_tmpl); | |
305 | } | |
306 | ||
5be5e667 | 307 | static void chainiv_module_exit(void) |
7f470739 HX |
308 | { |
309 | crypto_unregister_template(&chainiv_tmpl); | |
310 | } | |
5be5e667 HX |
311 | |
312 | module_init(chainiv_module_init); | |
313 | module_exit(chainiv_module_exit); | |
314 | ||
315 | MODULE_LICENSE("GPL"); | |
316 | MODULE_DESCRIPTION("Chain IV Generator"); | |
4943ba16 | 317 | MODULE_ALIAS_CRYPTO("chainiv"); |