]>
Commit | Line | Data |
---|---|---|
23e353c8 JL |
1 | /* |
2 | * CTR: Counter mode | |
3 | * | |
4 | * (C) Copyright IBM Corp. 2007 - Joy Latten <[email protected]> | |
5 | * | |
6 | * This program is free software; you can redistribute it and/or modify it | |
7 | * under the terms of the GNU General Public License as published by the Free | |
8 | * Software Foundation; either version 2 of the License, or (at your option) | |
9 | * any later version. | |
10 | * | |
11 | */ | |
12 | ||
13 | #include <crypto/algapi.h> | |
5311f248 | 14 | #include <crypto/ctr.h> |
69d3150c | 15 | #include <crypto/internal/skcipher.h> |
23e353c8 JL |
16 | #include <linux/err.h> |
17 | #include <linux/init.h> | |
18 | #include <linux/kernel.h> | |
19 | #include <linux/module.h> | |
20 | #include <linux/random.h> | |
21 | #include <linux/scatterlist.h> | |
22 | #include <linux/slab.h> | |
23 | ||
23e353c8 JL |
24 | struct crypto_ctr_ctx { |
25 | struct crypto_cipher *child; | |
5311f248 HX |
26 | }; |
27 | ||
28 | struct crypto_rfc3686_ctx { | |
b2b39c2f | 29 | struct crypto_skcipher *child; |
5311f248 | 30 | u8 nonce[CTR_RFC3686_NONCE_SIZE]; |
23e353c8 JL |
31 | }; |
32 | ||
69d3150c JK |
33 | struct crypto_rfc3686_req_ctx { |
34 | u8 iv[CTR_RFC3686_BLOCK_SIZE]; | |
b2b39c2f | 35 | struct skcipher_request subreq CRYPTO_MINALIGN_ATTR; |
69d3150c JK |
36 | }; |
37 | ||
23e353c8 JL |
38 | static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key, |
39 | unsigned int keylen) | |
40 | { | |
41 | struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(parent); | |
42 | struct crypto_cipher *child = ctx->child; | |
5311f248 | 43 | int err; |
23e353c8 JL |
44 | |
45 | crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); | |
46 | crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) & | |
47 | CRYPTO_TFM_REQ_MASK); | |
48 | err = crypto_cipher_setkey(child, key, keylen); | |
49 | crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) & | |
50 | CRYPTO_TFM_RES_MASK); | |
51 | ||
52 | return err; | |
53 | } | |
54 | ||
0971eb0d | 55 | static void crypto_ctr_crypt_final(struct blkcipher_walk *walk, |
5311f248 | 56 | struct crypto_cipher *tfm) |
0971eb0d HX |
57 | { |
58 | unsigned int bsize = crypto_cipher_blocksize(tfm); | |
5311f248 HX |
59 | unsigned long alignmask = crypto_cipher_alignmask(tfm); |
60 | u8 *ctrblk = walk->iv; | |
6650c4de | 61 | u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK]; |
5311f248 | 62 | u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1); |
0971eb0d HX |
63 | u8 *src = walk->src.virt.addr; |
64 | u8 *dst = walk->dst.virt.addr; | |
65 | unsigned int nbytes = walk->nbytes; | |
66 | ||
67 | crypto_cipher_encrypt_one(tfm, keystream, ctrblk); | |
45fe93df | 68 | crypto_xor_cpy(dst, keystream, src, nbytes); |
5311f248 HX |
69 | |
70 | crypto_inc(ctrblk, bsize); | |
0971eb0d HX |
71 | } |
72 | ||
23e353c8 | 73 | static int crypto_ctr_crypt_segment(struct blkcipher_walk *walk, |
5311f248 | 74 | struct crypto_cipher *tfm) |
23e353c8 JL |
75 | { |
76 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = | |
77 | crypto_cipher_alg(tfm)->cia_encrypt; | |
78 | unsigned int bsize = crypto_cipher_blocksize(tfm); | |
5311f248 | 79 | u8 *ctrblk = walk->iv; |
23e353c8 JL |
80 | u8 *src = walk->src.virt.addr; |
81 | u8 *dst = walk->dst.virt.addr; | |
82 | unsigned int nbytes = walk->nbytes; | |
83 | ||
84 | do { | |
85 | /* create keystream */ | |
0971eb0d HX |
86 | fn(crypto_cipher_tfm(tfm), dst, ctrblk); |
87 | crypto_xor(dst, src, bsize); | |
23e353c8 JL |
88 | |
89 | /* increment counter in counterblock */ | |
5311f248 | 90 | crypto_inc(ctrblk, bsize); |
23e353c8 | 91 | |
23e353c8 JL |
92 | src += bsize; |
93 | dst += bsize; | |
0971eb0d | 94 | } while ((nbytes -= bsize) >= bsize); |
23e353c8 | 95 | |
0971eb0d | 96 | return nbytes; |
23e353c8 JL |
97 | } |
98 | ||
99 | static int crypto_ctr_crypt_inplace(struct blkcipher_walk *walk, | |
5311f248 | 100 | struct crypto_cipher *tfm) |
23e353c8 JL |
101 | { |
102 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = | |
103 | crypto_cipher_alg(tfm)->cia_encrypt; | |
104 | unsigned int bsize = crypto_cipher_blocksize(tfm); | |
5311f248 | 105 | unsigned long alignmask = crypto_cipher_alignmask(tfm); |
23e353c8 | 106 | unsigned int nbytes = walk->nbytes; |
5311f248 | 107 | u8 *ctrblk = walk->iv; |
23e353c8 | 108 | u8 *src = walk->src.virt.addr; |
6650c4de | 109 | u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK]; |
5311f248 | 110 | u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1); |
23e353c8 JL |
111 | |
112 | do { | |
113 | /* create keystream */ | |
114 | fn(crypto_cipher_tfm(tfm), keystream, ctrblk); | |
0971eb0d | 115 | crypto_xor(src, keystream, bsize); |
23e353c8 JL |
116 | |
117 | /* increment counter in counterblock */ | |
5311f248 | 118 | crypto_inc(ctrblk, bsize); |
23e353c8 | 119 | |
23e353c8 | 120 | src += bsize; |
0971eb0d | 121 | } while ((nbytes -= bsize) >= bsize); |
23e353c8 | 122 | |
0971eb0d | 123 | return nbytes; |
23e353c8 JL |
124 | } |
125 | ||
126 | static int crypto_ctr_crypt(struct blkcipher_desc *desc, | |
127 | struct scatterlist *dst, struct scatterlist *src, | |
128 | unsigned int nbytes) | |
129 | { | |
130 | struct blkcipher_walk walk; | |
131 | struct crypto_blkcipher *tfm = desc->tfm; | |
132 | struct crypto_ctr_ctx *ctx = crypto_blkcipher_ctx(tfm); | |
133 | struct crypto_cipher *child = ctx->child; | |
134 | unsigned int bsize = crypto_cipher_blocksize(child); | |
23e353c8 JL |
135 | int err; |
136 | ||
137 | blkcipher_walk_init(&walk, dst, src, nbytes); | |
138 | err = blkcipher_walk_virt_block(desc, &walk, bsize); | |
139 | ||
0971eb0d | 140 | while (walk.nbytes >= bsize) { |
23e353c8 | 141 | if (walk.src.virt.addr == walk.dst.virt.addr) |
5311f248 | 142 | nbytes = crypto_ctr_crypt_inplace(&walk, child); |
23e353c8 | 143 | else |
5311f248 | 144 | nbytes = crypto_ctr_crypt_segment(&walk, child); |
23e353c8 JL |
145 | |
146 | err = blkcipher_walk_done(desc, &walk, nbytes); | |
147 | } | |
0971eb0d HX |
148 | |
149 | if (walk.nbytes) { | |
5311f248 | 150 | crypto_ctr_crypt_final(&walk, child); |
0971eb0d HX |
151 | err = blkcipher_walk_done(desc, &walk, 0); |
152 | } | |
153 | ||
23e353c8 JL |
154 | return err; |
155 | } | |
156 | ||
157 | static int crypto_ctr_init_tfm(struct crypto_tfm *tfm) | |
158 | { | |
159 | struct crypto_instance *inst = (void *)tfm->__crt_alg; | |
5311f248 | 160 | struct crypto_spawn *spawn = crypto_instance_ctx(inst); |
23e353c8 JL |
161 | struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm); |
162 | struct crypto_cipher *cipher; | |
163 | ||
5311f248 | 164 | cipher = crypto_spawn_cipher(spawn); |
23e353c8 JL |
165 | if (IS_ERR(cipher)) |
166 | return PTR_ERR(cipher); | |
167 | ||
168 | ctx->child = cipher; | |
169 | ||
170 | return 0; | |
171 | } | |
172 | ||
173 | static void crypto_ctr_exit_tfm(struct crypto_tfm *tfm) | |
174 | { | |
175 | struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm); | |
176 | ||
23e353c8 JL |
177 | crypto_free_cipher(ctx->child); |
178 | } | |
179 | ||
180 | static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb) | |
181 | { | |
182 | struct crypto_instance *inst; | |
d2c2a85c | 183 | struct crypto_attr_type *algt; |
23e353c8 | 184 | struct crypto_alg *alg; |
d2c2a85c | 185 | u32 mask; |
23e353c8 JL |
186 | int err; |
187 | ||
188 | err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER); | |
189 | if (err) | |
190 | return ERR_PTR(err); | |
191 | ||
d2c2a85c MC |
192 | algt = crypto_get_attr_type(tb); |
193 | if (IS_ERR(algt)) | |
194 | return ERR_CAST(algt); | |
195 | ||
196 | mask = CRYPTO_ALG_TYPE_MASK | | |
197 | crypto_requires_off(algt->type, algt->mask, | |
198 | CRYPTO_ALG_NEED_FALLBACK); | |
199 | ||
200 | alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER, mask); | |
23e353c8 | 201 | if (IS_ERR(alg)) |
8db05078 | 202 | return ERR_CAST(alg); |
23e353c8 | 203 | |
5311f248 | 204 | /* Block size must be >= 4 bytes. */ |
23e353c8 | 205 | err = -EINVAL; |
5311f248 | 206 | if (alg->cra_blocksize < 4) |
23e353c8 JL |
207 | goto out_put_alg; |
208 | ||
3f8214ea | 209 | /* If this is false we'd fail the alignment of crypto_inc. */ |
5311f248 | 210 | if (alg->cra_blocksize % 4) |
3f8214ea HX |
211 | goto out_put_alg; |
212 | ||
5311f248 HX |
213 | inst = crypto_alloc_instance("ctr", alg); |
214 | if (IS_ERR(inst)) | |
215 | goto out; | |
23e353c8 | 216 | |
23e353c8 JL |
217 | inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER; |
218 | inst->alg.cra_priority = alg->cra_priority; | |
219 | inst->alg.cra_blocksize = 1; | |
db91af0f | 220 | inst->alg.cra_alignmask = alg->cra_alignmask; |
23e353c8 JL |
221 | inst->alg.cra_type = &crypto_blkcipher_type; |
222 | ||
5311f248 HX |
223 | inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize; |
224 | inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize; | |
225 | inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize; | |
23e353c8 JL |
226 | |
227 | inst->alg.cra_ctxsize = sizeof(struct crypto_ctr_ctx); | |
228 | ||
229 | inst->alg.cra_init = crypto_ctr_init_tfm; | |
230 | inst->alg.cra_exit = crypto_ctr_exit_tfm; | |
231 | ||
232 | inst->alg.cra_blkcipher.setkey = crypto_ctr_setkey; | |
233 | inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt; | |
234 | inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt; | |
235 | ||
5311f248 | 236 | out: |
23e353c8 | 237 | crypto_mod_put(alg); |
23e353c8 | 238 | return inst; |
5311f248 HX |
239 | |
240 | out_put_alg: | |
241 | inst = ERR_PTR(err); | |
242 | goto out; | |
23e353c8 JL |
243 | } |
244 | ||
245 | static void crypto_ctr_free(struct crypto_instance *inst) | |
246 | { | |
5311f248 | 247 | crypto_drop_spawn(crypto_instance_ctx(inst)); |
23e353c8 JL |
248 | kfree(inst); |
249 | } | |
250 | ||
251 | static struct crypto_template crypto_ctr_tmpl = { | |
252 | .name = "ctr", | |
253 | .alloc = crypto_ctr_alloc, | |
254 | .free = crypto_ctr_free, | |
255 | .module = THIS_MODULE, | |
256 | }; | |
257 | ||
b2b39c2f | 258 | static int crypto_rfc3686_setkey(struct crypto_skcipher *parent, |
69d3150c | 259 | const u8 *key, unsigned int keylen) |
5311f248 | 260 | { |
b2b39c2f HX |
261 | struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(parent); |
262 | struct crypto_skcipher *child = ctx->child; | |
5311f248 HX |
263 | int err; |
264 | ||
265 | /* the nonce is stored in bytes at end of key */ | |
266 | if (keylen < CTR_RFC3686_NONCE_SIZE) | |
267 | return -EINVAL; | |
268 | ||
269 | memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE), | |
270 | CTR_RFC3686_NONCE_SIZE); | |
271 | ||
272 | keylen -= CTR_RFC3686_NONCE_SIZE; | |
273 | ||
b2b39c2f HX |
274 | crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); |
275 | crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) & | |
276 | CRYPTO_TFM_REQ_MASK); | |
277 | err = crypto_skcipher_setkey(child, key, keylen); | |
278 | crypto_skcipher_set_flags(parent, crypto_skcipher_get_flags(child) & | |
279 | CRYPTO_TFM_RES_MASK); | |
5311f248 HX |
280 | |
281 | return err; | |
282 | } | |
283 | ||
b2b39c2f | 284 | static int crypto_rfc3686_crypt(struct skcipher_request *req) |
5311f248 | 285 | { |
b2b39c2f HX |
286 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
287 | struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm); | |
288 | struct crypto_skcipher *child = ctx->child; | |
289 | unsigned long align = crypto_skcipher_alignmask(tfm); | |
69d3150c | 290 | struct crypto_rfc3686_req_ctx *rctx = |
b2b39c2f HX |
291 | (void *)PTR_ALIGN((u8 *)skcipher_request_ctx(req), align + 1); |
292 | struct skcipher_request *subreq = &rctx->subreq; | |
69d3150c | 293 | u8 *iv = rctx->iv; |
5311f248 HX |
294 | |
295 | /* set up counter block */ | |
296 | memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE); | |
b2b39c2f | 297 | memcpy(iv + CTR_RFC3686_NONCE_SIZE, req->iv, CTR_RFC3686_IV_SIZE); |
5311f248 HX |
298 | |
299 | /* initialize counter portion of counter block */ | |
300 | *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) = | |
301 | cpu_to_be32(1); | |
302 | ||
b2b39c2f HX |
303 | skcipher_request_set_tfm(subreq, child); |
304 | skcipher_request_set_callback(subreq, req->base.flags, | |
305 | req->base.complete, req->base.data); | |
306 | skcipher_request_set_crypt(subreq, req->src, req->dst, | |
307 | req->cryptlen, iv); | |
5311f248 | 308 | |
b2b39c2f | 309 | return crypto_skcipher_encrypt(subreq); |
5311f248 HX |
310 | } |
311 | ||
b2b39c2f | 312 | static int crypto_rfc3686_init_tfm(struct crypto_skcipher *tfm) |
5311f248 | 313 | { |
b2b39c2f HX |
314 | struct skcipher_instance *inst = skcipher_alg_instance(tfm); |
315 | struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst); | |
316 | struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm); | |
317 | struct crypto_skcipher *cipher; | |
69d3150c | 318 | unsigned long align; |
b2b39c2f | 319 | unsigned int reqsize; |
5311f248 | 320 | |
60425a8b | 321 | cipher = crypto_spawn_skcipher(spawn); |
5311f248 HX |
322 | if (IS_ERR(cipher)) |
323 | return PTR_ERR(cipher); | |
324 | ||
325 | ctx->child = cipher; | |
326 | ||
b2b39c2f | 327 | align = crypto_skcipher_alignmask(tfm); |
69d3150c | 328 | align &= ~(crypto_tfm_ctx_alignment() - 1); |
b2b39c2f HX |
329 | reqsize = align + sizeof(struct crypto_rfc3686_req_ctx) + |
330 | crypto_skcipher_reqsize(cipher); | |
331 | crypto_skcipher_set_reqsize(tfm, reqsize); | |
69d3150c | 332 | |
5311f248 HX |
333 | return 0; |
334 | } | |
335 | ||
b2b39c2f | 336 | static void crypto_rfc3686_exit_tfm(struct crypto_skcipher *tfm) |
5311f248 | 337 | { |
b2b39c2f HX |
338 | struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm); |
339 | ||
340 | crypto_free_skcipher(ctx->child); | |
341 | } | |
5311f248 | 342 | |
b2b39c2f HX |
343 | static void crypto_rfc3686_free(struct skcipher_instance *inst) |
344 | { | |
345 | struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst); | |
346 | ||
347 | crypto_drop_skcipher(spawn); | |
348 | kfree(inst); | |
5311f248 HX |
349 | } |
350 | ||
b2b39c2f HX |
351 | static int crypto_rfc3686_create(struct crypto_template *tmpl, |
352 | struct rtattr **tb) | |
5311f248 | 353 | { |
69d3150c | 354 | struct crypto_attr_type *algt; |
b2b39c2f HX |
355 | struct skcipher_instance *inst; |
356 | struct skcipher_alg *alg; | |
69d3150c JK |
357 | struct crypto_skcipher_spawn *spawn; |
358 | const char *cipher_name; | |
d2c2a85c MC |
359 | u32 mask; |
360 | ||
5311f248 HX |
361 | int err; |
362 | ||
69d3150c | 363 | algt = crypto_get_attr_type(tb); |
69d3150c | 364 | if (IS_ERR(algt)) |
b2b39c2f | 365 | return PTR_ERR(algt); |
5311f248 | 366 | |
b2b39c2f HX |
367 | if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask) |
368 | return -EINVAL; | |
69d3150c JK |
369 | |
370 | cipher_name = crypto_attr_alg_name(tb[1]); | |
69d3150c | 371 | if (IS_ERR(cipher_name)) |
b2b39c2f | 372 | return PTR_ERR(cipher_name); |
5311f248 | 373 | |
69d3150c JK |
374 | inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); |
375 | if (!inst) | |
b2b39c2f | 376 | return -ENOMEM; |
69d3150c | 377 | |
d2c2a85c MC |
378 | mask = crypto_requires_sync(algt->type, algt->mask) | |
379 | crypto_requires_off(algt->type, algt->mask, | |
380 | CRYPTO_ALG_NEED_FALLBACK); | |
381 | ||
b2b39c2f | 382 | spawn = skcipher_instance_ctx(inst); |
69d3150c | 383 | |
b2b39c2f | 384 | crypto_set_skcipher_spawn(spawn, skcipher_crypto_instance(inst)); |
d2c2a85c | 385 | err = crypto_grab_skcipher(spawn, cipher_name, 0, mask); |
69d3150c JK |
386 | if (err) |
387 | goto err_free_inst; | |
388 | ||
b2b39c2f | 389 | alg = crypto_spawn_skcipher_alg(spawn); |
69d3150c | 390 | |
5311f248 HX |
391 | /* We only support 16-byte blocks. */ |
392 | err = -EINVAL; | |
b2b39c2f | 393 | if (crypto_skcipher_alg_ivsize(alg) != CTR_RFC3686_BLOCK_SIZE) |
69d3150c | 394 | goto err_drop_spawn; |
5311f248 HX |
395 | |
396 | /* Not a stream cipher? */ | |
b2b39c2f | 397 | if (alg->base.cra_blocksize != 1) |
69d3150c | 398 | goto err_drop_spawn; |
5311f248 | 399 | |
69d3150c | 400 | err = -ENAMETOOLONG; |
b2b39c2f HX |
401 | if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, |
402 | "rfc3686(%s)", alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME) | |
69d3150c | 403 | goto err_drop_spawn; |
b2b39c2f HX |
404 | if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, |
405 | "rfc3686(%s)", alg->base.cra_driver_name) >= | |
406 | CRYPTO_MAX_ALG_NAME) | |
69d3150c | 407 | goto err_drop_spawn; |
5311f248 | 408 | |
b2b39c2f HX |
409 | inst->alg.base.cra_priority = alg->base.cra_priority; |
410 | inst->alg.base.cra_blocksize = 1; | |
411 | inst->alg.base.cra_alignmask = alg->base.cra_alignmask; | |
5311f248 | 412 | |
b2b39c2f | 413 | inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC; |
69d3150c | 414 | |
b2b39c2f HX |
415 | inst->alg.ivsize = CTR_RFC3686_IV_SIZE; |
416 | inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg); | |
417 | inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg) + | |
418 | CTR_RFC3686_NONCE_SIZE; | |
419 | inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg) + | |
420 | CTR_RFC3686_NONCE_SIZE; | |
5311f248 | 421 | |
b2b39c2f HX |
422 | inst->alg.setkey = crypto_rfc3686_setkey; |
423 | inst->alg.encrypt = crypto_rfc3686_crypt; | |
424 | inst->alg.decrypt = crypto_rfc3686_crypt; | |
69d3150c | 425 | |
b2b39c2f | 426 | inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx); |
0a270321 | 427 | |
b2b39c2f HX |
428 | inst->alg.init = crypto_rfc3686_init_tfm; |
429 | inst->alg.exit = crypto_rfc3686_exit_tfm; | |
5311f248 | 430 | |
b2b39c2f | 431 | inst->free = crypto_rfc3686_free; |
5311f248 | 432 | |
b2b39c2f HX |
433 | err = skcipher_register_instance(tmpl, inst); |
434 | if (err) | |
435 | goto err_drop_spawn; | |
436 | ||
437 | out: | |
438 | return err; | |
5311f248 | 439 | |
69d3150c JK |
440 | err_drop_spawn: |
441 | crypto_drop_skcipher(spawn); | |
442 | err_free_inst: | |
443 | kfree(inst); | |
b2b39c2f | 444 | goto out; |
5311f248 HX |
445 | } |
446 | ||
447 | static struct crypto_template crypto_rfc3686_tmpl = { | |
448 | .name = "rfc3686", | |
b2b39c2f | 449 | .create = crypto_rfc3686_create, |
5311f248 HX |
450 | .module = THIS_MODULE, |
451 | }; | |
452 | ||
23e353c8 JL |
453 | static int __init crypto_ctr_module_init(void) |
454 | { | |
5311f248 HX |
455 | int err; |
456 | ||
457 | err = crypto_register_template(&crypto_ctr_tmpl); | |
458 | if (err) | |
459 | goto out; | |
460 | ||
461 | err = crypto_register_template(&crypto_rfc3686_tmpl); | |
462 | if (err) | |
463 | goto out_drop_ctr; | |
464 | ||
465 | out: | |
466 | return err; | |
467 | ||
468 | out_drop_ctr: | |
469 | crypto_unregister_template(&crypto_ctr_tmpl); | |
470 | goto out; | |
23e353c8 JL |
471 | } |
472 | ||
473 | static void __exit crypto_ctr_module_exit(void) | |
474 | { | |
5311f248 | 475 | crypto_unregister_template(&crypto_rfc3686_tmpl); |
23e353c8 JL |
476 | crypto_unregister_template(&crypto_ctr_tmpl); |
477 | } | |
478 | ||
479 | module_init(crypto_ctr_module_init); | |
480 | module_exit(crypto_ctr_module_exit); | |
481 | ||
482 | MODULE_LICENSE("GPL"); | |
483 | MODULE_DESCRIPTION("CTR Counter block mode"); | |
5d26a105 | 484 | MODULE_ALIAS_CRYPTO("rfc3686"); |
4943ba16 | 485 | MODULE_ALIAS_CRYPTO("ctr"); |