]>
Commit | Line | Data |
---|---|---|
2874c5fd | 1 | // SPDX-License-Identifier: GPL-2.0-or-later |
db131ef9 HX |
2 | /* |
3 | * CBC: Cipher Block Chaining mode | |
4 | * | |
cc868d82 | 5 | * Copyright (c) 2006-2016 Herbert Xu <[email protected]> |
db131ef9 HX |
6 | */ |
7 | ||
e6c2e65c | 8 | #include <crypto/algapi.h> |
79c65d17 | 9 | #include <crypto/internal/skcipher.h> |
db131ef9 HX |
10 | #include <linux/err.h> |
11 | #include <linux/init.h> | |
12 | #include <linux/kernel.h> | |
50b6544e | 13 | #include <linux/log2.h> |
db131ef9 | 14 | #include <linux/module.h> |
db131ef9 | 15 | |
5f254dd4 HX |
16 | static int crypto_cbc_encrypt_segment(struct skcipher_walk *walk, |
17 | struct crypto_skcipher *skcipher) | |
79c65d17 | 18 | { |
5f254dd4 HX |
19 | unsigned int bsize = crypto_skcipher_blocksize(skcipher); |
20 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *); | |
21 | unsigned int nbytes = walk->nbytes; | |
22 | u8 *src = walk->src.virt.addr; | |
23 | u8 *dst = walk->dst.virt.addr; | |
24 | struct crypto_cipher *cipher; | |
25 | struct crypto_tfm *tfm; | |
26 | u8 *iv = walk->iv; | |
27 | ||
28 | cipher = skcipher_cipher_simple(skcipher); | |
29 | tfm = crypto_cipher_tfm(cipher); | |
30 | fn = crypto_cipher_alg(cipher)->cia_encrypt; | |
31 | ||
32 | do { | |
33 | crypto_xor(iv, src, bsize); | |
34 | fn(tfm, dst, iv); | |
35 | memcpy(iv, dst, bsize); | |
36 | ||
37 | src += bsize; | |
38 | dst += bsize; | |
39 | } while ((nbytes -= bsize) >= bsize); | |
40 | ||
41 | return nbytes; | |
42 | } | |
43 | ||
44 | static int crypto_cbc_encrypt_inplace(struct skcipher_walk *walk, | |
45 | struct crypto_skcipher *skcipher) | |
46 | { | |
47 | unsigned int bsize = crypto_skcipher_blocksize(skcipher); | |
48 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *); | |
49 | unsigned int nbytes = walk->nbytes; | |
50 | u8 *src = walk->src.virt.addr; | |
51 | struct crypto_cipher *cipher; | |
52 | struct crypto_tfm *tfm; | |
53 | u8 *iv = walk->iv; | |
54 | ||
55 | cipher = skcipher_cipher_simple(skcipher); | |
56 | tfm = crypto_cipher_tfm(cipher); | |
57 | fn = crypto_cipher_alg(cipher)->cia_encrypt; | |
58 | ||
59 | do { | |
60 | crypto_xor(src, iv, bsize); | |
61 | fn(tfm, src, src); | |
62 | iv = src; | |
63 | ||
64 | src += bsize; | |
65 | } while ((nbytes -= bsize) >= bsize); | |
66 | ||
67 | memcpy(walk->iv, iv, bsize); | |
68 | ||
69 | return nbytes; | |
79c65d17 HX |
70 | } |
71 | ||
72 | static int crypto_cbc_encrypt(struct skcipher_request *req) | |
db131ef9 | 73 | { |
5f254dd4 HX |
74 | struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); |
75 | struct skcipher_walk walk; | |
76 | int err; | |
77 | ||
78 | err = skcipher_walk_virt(&walk, req, false); | |
79 | ||
80 | while (walk.nbytes) { | |
81 | if (walk.src.virt.addr == walk.dst.virt.addr) | |
82 | err = crypto_cbc_encrypt_inplace(&walk, skcipher); | |
83 | else | |
84 | err = crypto_cbc_encrypt_segment(&walk, skcipher); | |
85 | err = skcipher_walk_done(&walk, err); | |
86 | } | |
87 | ||
88 | return err; | |
89 | } | |
90 | ||
91 | static int crypto_cbc_decrypt_segment(struct skcipher_walk *walk, | |
92 | struct crypto_skcipher *skcipher) | |
93 | { | |
94 | unsigned int bsize = crypto_skcipher_blocksize(skcipher); | |
95 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *); | |
96 | unsigned int nbytes = walk->nbytes; | |
97 | u8 *src = walk->src.virt.addr; | |
98 | u8 *dst = walk->dst.virt.addr; | |
99 | struct crypto_cipher *cipher; | |
100 | struct crypto_tfm *tfm; | |
101 | u8 *iv = walk->iv; | |
102 | ||
103 | cipher = skcipher_cipher_simple(skcipher); | |
104 | tfm = crypto_cipher_tfm(cipher); | |
105 | fn = crypto_cipher_alg(cipher)->cia_decrypt; | |
106 | ||
107 | do { | |
108 | fn(tfm, dst, src); | |
109 | crypto_xor(dst, iv, bsize); | |
110 | iv = src; | |
111 | ||
112 | src += bsize; | |
113 | dst += bsize; | |
114 | } while ((nbytes -= bsize) >= bsize); | |
115 | ||
116 | memcpy(walk->iv, iv, bsize); | |
117 | ||
118 | return nbytes; | |
79c65d17 HX |
119 | } |
120 | ||
5f254dd4 HX |
121 | static int crypto_cbc_decrypt_inplace(struct skcipher_walk *walk, |
122 | struct crypto_skcipher *skcipher) | |
79c65d17 | 123 | { |
5f254dd4 HX |
124 | unsigned int bsize = crypto_skcipher_blocksize(skcipher); |
125 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *); | |
126 | unsigned int nbytes = walk->nbytes; | |
127 | u8 *src = walk->src.virt.addr; | |
128 | u8 last_iv[MAX_CIPHER_BLOCKSIZE]; | |
129 | struct crypto_cipher *cipher; | |
130 | struct crypto_tfm *tfm; | |
131 | ||
132 | cipher = skcipher_cipher_simple(skcipher); | |
133 | tfm = crypto_cipher_tfm(cipher); | |
134 | fn = crypto_cipher_alg(cipher)->cia_decrypt; | |
135 | ||
136 | /* Start of the last block. */ | |
137 | src += nbytes - (nbytes & (bsize - 1)) - bsize; | |
138 | memcpy(last_iv, src, bsize); | |
139 | ||
140 | for (;;) { | |
141 | fn(tfm, src, src); | |
142 | if ((nbytes -= bsize) < bsize) | |
143 | break; | |
144 | crypto_xor(src, src - bsize, bsize); | |
145 | src -= bsize; | |
146 | } | |
147 | ||
148 | crypto_xor(src, walk->iv, bsize); | |
149 | memcpy(walk->iv, last_iv, bsize); | |
150 | ||
151 | return nbytes; | |
79c65d17 HX |
152 | } |
153 | ||
154 | static int crypto_cbc_decrypt(struct skcipher_request *req) | |
155 | { | |
5f254dd4 | 156 | struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); |
79c65d17 | 157 | struct skcipher_walk walk; |
db131ef9 HX |
158 | int err; |
159 | ||
79c65d17 | 160 | err = skcipher_walk_virt(&walk, req, false); |
db131ef9 | 161 | |
79c65d17 | 162 | while (walk.nbytes) { |
5f254dd4 HX |
163 | if (walk.src.virt.addr == walk.dst.virt.addr) |
164 | err = crypto_cbc_decrypt_inplace(&walk, skcipher); | |
165 | else | |
166 | err = crypto_cbc_decrypt_segment(&walk, skcipher); | |
79c65d17 | 167 | err = skcipher_walk_done(&walk, err); |
db131ef9 HX |
168 | } |
169 | ||
170 | return err; | |
171 | } | |
172 | ||
79c65d17 HX |
173 | static int crypto_cbc_create(struct crypto_template *tmpl, struct rtattr **tb) |
174 | { | |
175 | struct skcipher_instance *inst; | |
db131ef9 | 176 | struct crypto_alg *alg; |
ebc610e5 HX |
177 | int err; |
178 | ||
b3c16bfc | 179 | inst = skcipher_alloc_instance_simple(tmpl, tb); |
a5a84a9d EB |
180 | if (IS_ERR(inst)) |
181 | return PTR_ERR(inst); | |
50b6544e | 182 | |
b3c16bfc HX |
183 | alg = skcipher_ialg_simple(inst); |
184 | ||
79c65d17 HX |
185 | err = -EINVAL; |
186 | if (!is_power_of_2(alg->cra_blocksize)) | |
a5a84a9d | 187 | goto out_free_inst; |
db131ef9 | 188 | |
79c65d17 HX |
189 | inst->alg.encrypt = crypto_cbc_encrypt; |
190 | inst->alg.decrypt = crypto_cbc_decrypt; | |
db131ef9 | 191 | |
79c65d17 | 192 | err = skcipher_register_instance(tmpl, inst); |
b3c16bfc | 193 | if (err) { |
a5a84a9d | 194 | out_free_inst: |
b3c16bfc HX |
195 | inst->free(inst); |
196 | } | |
197 | ||
a5a84a9d | 198 | return err; |
db131ef9 HX |
199 | } |
200 | ||
201 | static struct crypto_template crypto_cbc_tmpl = { | |
202 | .name = "cbc", | |
79c65d17 | 203 | .create = crypto_cbc_create, |
db131ef9 HX |
204 | .module = THIS_MODULE, |
205 | }; | |
206 | ||
207 | static int __init crypto_cbc_module_init(void) | |
208 | { | |
209 | return crypto_register_template(&crypto_cbc_tmpl); | |
210 | } | |
211 | ||
212 | static void __exit crypto_cbc_module_exit(void) | |
213 | { | |
214 | crypto_unregister_template(&crypto_cbc_tmpl); | |
215 | } | |
216 | ||
c4741b23 | 217 | subsys_initcall(crypto_cbc_module_init); |
db131ef9 HX |
218 | module_exit(crypto_cbc_module_exit); |
219 | ||
220 | MODULE_LICENSE("GPL"); | |
a5a84a9d | 221 | MODULE_DESCRIPTION("CBC block cipher mode of operation"); |
4943ba16 | 222 | MODULE_ALIAS_CRYPTO("cbc"); |