1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Glue Code for assembler optimized version of 3DES
7 * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by:
11 #include <crypto/algapi.h>
12 #include <crypto/des.h>
13 #include <crypto/internal/skcipher.h>
14 #include <linux/crypto.h>
15 #include <linux/init.h>
16 #include <linux/module.h>
17 #include <linux/types.h>
19 struct des3_ede_x86_ctx {
20 struct des3_ede_ctx enc;
21 struct des3_ede_ctx dec;
24 /* regular block cipher functions */
25 asmlinkage void des3_ede_x86_64_crypt_blk(const u32 *expkey, u8 *dst,
28 /* 3-way parallel cipher functions */
29 asmlinkage void des3_ede_x86_64_crypt_blk_3way(const u32 *expkey, u8 *dst,
32 static inline void des3_ede_enc_blk(struct des3_ede_x86_ctx *ctx, u8 *dst,
35 u32 *enc_ctx = ctx->enc.expkey;
37 des3_ede_x86_64_crypt_blk(enc_ctx, dst, src);
40 static inline void des3_ede_dec_blk(struct des3_ede_x86_ctx *ctx, u8 *dst,
43 u32 *dec_ctx = ctx->dec.expkey;
45 des3_ede_x86_64_crypt_blk(dec_ctx, dst, src);
48 static inline void des3_ede_enc_blk_3way(struct des3_ede_x86_ctx *ctx, u8 *dst,
51 u32 *enc_ctx = ctx->enc.expkey;
53 des3_ede_x86_64_crypt_blk_3way(enc_ctx, dst, src);
56 static inline void des3_ede_dec_blk_3way(struct des3_ede_x86_ctx *ctx, u8 *dst,
59 u32 *dec_ctx = ctx->dec.expkey;
61 des3_ede_x86_64_crypt_blk_3way(dec_ctx, dst, src);
64 static void des3_ede_x86_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
66 des3_ede_enc_blk(crypto_tfm_ctx(tfm), dst, src);
69 static void des3_ede_x86_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
71 des3_ede_dec_blk(crypto_tfm_ctx(tfm), dst, src);
74 static int ecb_crypt(struct skcipher_request *req, const u32 *expkey)
76 const unsigned int bsize = DES3_EDE_BLOCK_SIZE;
77 struct skcipher_walk walk;
81 err = skcipher_walk_virt(&walk, req, false);
83 while ((nbytes = walk.nbytes)) {
84 u8 *wsrc = walk.src.virt.addr;
85 u8 *wdst = walk.dst.virt.addr;
87 /* Process four block batch */
88 if (nbytes >= bsize * 3) {
90 des3_ede_x86_64_crypt_blk_3way(expkey, wdst,
96 } while (nbytes >= bsize * 3);
102 /* Handle leftovers */
104 des3_ede_x86_64_crypt_blk(expkey, wdst, wsrc);
109 } while (nbytes >= bsize);
112 err = skcipher_walk_done(&walk, nbytes);
118 static int ecb_encrypt(struct skcipher_request *req)
120 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
121 struct des3_ede_x86_ctx *ctx = crypto_skcipher_ctx(tfm);
123 return ecb_crypt(req, ctx->enc.expkey);
126 static int ecb_decrypt(struct skcipher_request *req)
128 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
129 struct des3_ede_x86_ctx *ctx = crypto_skcipher_ctx(tfm);
131 return ecb_crypt(req, ctx->dec.expkey);
134 static unsigned int __cbc_encrypt(struct des3_ede_x86_ctx *ctx,
135 struct skcipher_walk *walk)
137 unsigned int bsize = DES3_EDE_BLOCK_SIZE;
138 unsigned int nbytes = walk->nbytes;
139 u64 *src = (u64 *)walk->src.virt.addr;
140 u64 *dst = (u64 *)walk->dst.virt.addr;
141 u64 *iv = (u64 *)walk->iv;
145 des3_ede_enc_blk(ctx, (u8 *)dst, (u8 *)dst);
151 } while (nbytes >= bsize);
153 *(u64 *)walk->iv = *iv;
157 static int cbc_encrypt(struct skcipher_request *req)
159 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
160 struct des3_ede_x86_ctx *ctx = crypto_skcipher_ctx(tfm);
161 struct skcipher_walk walk;
165 err = skcipher_walk_virt(&walk, req, false);
167 while ((nbytes = walk.nbytes)) {
168 nbytes = __cbc_encrypt(ctx, &walk);
169 err = skcipher_walk_done(&walk, nbytes);
175 static unsigned int __cbc_decrypt(struct des3_ede_x86_ctx *ctx,
176 struct skcipher_walk *walk)
178 unsigned int bsize = DES3_EDE_BLOCK_SIZE;
179 unsigned int nbytes = walk->nbytes;
180 u64 *src = (u64 *)walk->src.virt.addr;
181 u64 *dst = (u64 *)walk->dst.virt.addr;
185 /* Start of the last block. */
186 src += nbytes / bsize - 1;
187 dst += nbytes / bsize - 1;
191 /* Process four block batch */
192 if (nbytes >= bsize * 3) {
194 nbytes -= bsize * 3 - bsize;
201 des3_ede_dec_blk_3way(ctx, (u8 *)dst, (u8 *)src);
213 } while (nbytes >= bsize * 3);
216 /* Handle leftovers */
218 des3_ede_dec_blk(ctx, (u8 *)dst, (u8 *)src);
230 *dst ^= *(u64 *)walk->iv;
231 *(u64 *)walk->iv = last_iv;
236 static int cbc_decrypt(struct skcipher_request *req)
238 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
239 struct des3_ede_x86_ctx *ctx = crypto_skcipher_ctx(tfm);
240 struct skcipher_walk walk;
244 err = skcipher_walk_virt(&walk, req, false);
246 while ((nbytes = walk.nbytes)) {
247 nbytes = __cbc_decrypt(ctx, &walk);
248 err = skcipher_walk_done(&walk, nbytes);
254 static int des3_ede_x86_setkey(struct crypto_tfm *tfm, const u8 *key,
257 struct des3_ede_x86_ctx *ctx = crypto_tfm_ctx(tfm);
261 err = des3_ede_expand_key(&ctx->enc, key, keylen);
262 if (err == -ENOKEY) {
263 if (crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)
270 memset(ctx, 0, sizeof(*ctx));
274 /* Fix encryption context for this implementation and form decryption
276 j = DES3_EDE_EXPKEY_WORDS - 2;
277 for (i = 0; i < DES3_EDE_EXPKEY_WORDS; i += 2, j -= 2) {
278 tmp = ror32(ctx->enc.expkey[i + 1], 4);
279 ctx->enc.expkey[i + 1] = tmp;
281 ctx->dec.expkey[j + 0] = ctx->enc.expkey[i + 0];
282 ctx->dec.expkey[j + 1] = tmp;
288 static int des3_ede_x86_setkey_skcipher(struct crypto_skcipher *tfm,
292 return des3_ede_x86_setkey(&tfm->base, key, keylen);
295 static struct crypto_alg des3_ede_cipher = {
296 .cra_name = "des3_ede",
297 .cra_driver_name = "des3_ede-asm",
299 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
300 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
301 .cra_ctxsize = sizeof(struct des3_ede_x86_ctx),
303 .cra_module = THIS_MODULE,
306 .cia_min_keysize = DES3_EDE_KEY_SIZE,
307 .cia_max_keysize = DES3_EDE_KEY_SIZE,
308 .cia_setkey = des3_ede_x86_setkey,
309 .cia_encrypt = des3_ede_x86_encrypt,
310 .cia_decrypt = des3_ede_x86_decrypt,
315 static struct skcipher_alg des3_ede_skciphers[] = {
317 .base.cra_name = "ecb(des3_ede)",
318 .base.cra_driver_name = "ecb-des3_ede-asm",
319 .base.cra_priority = 300,
320 .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
321 .base.cra_ctxsize = sizeof(struct des3_ede_x86_ctx),
322 .base.cra_module = THIS_MODULE,
323 .min_keysize = DES3_EDE_KEY_SIZE,
324 .max_keysize = DES3_EDE_KEY_SIZE,
325 .setkey = des3_ede_x86_setkey_skcipher,
326 .encrypt = ecb_encrypt,
327 .decrypt = ecb_decrypt,
329 .base.cra_name = "cbc(des3_ede)",
330 .base.cra_driver_name = "cbc-des3_ede-asm",
331 .base.cra_priority = 300,
332 .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
333 .base.cra_ctxsize = sizeof(struct des3_ede_x86_ctx),
334 .base.cra_module = THIS_MODULE,
335 .min_keysize = DES3_EDE_KEY_SIZE,
336 .max_keysize = DES3_EDE_KEY_SIZE,
337 .ivsize = DES3_EDE_BLOCK_SIZE,
338 .setkey = des3_ede_x86_setkey_skcipher,
339 .encrypt = cbc_encrypt,
340 .decrypt = cbc_decrypt,
344 static bool is_blacklisted_cpu(void)
346 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL)
349 if (boot_cpu_data.x86 == 0x0f) {
351 * On Pentium 4, des3_ede-x86_64 is slower than generic C
352 * implementation because use of 64bit rotates (which are really
353 * slow on P4). Therefore blacklist P4s.
362 module_param(force, int, 0);
363 MODULE_PARM_DESC(force, "Force module load, ignore CPU blacklist");
365 static int __init des3_ede_x86_init(void)
369 if (!force && is_blacklisted_cpu()) {
370 pr_info("des3_ede-x86_64: performance on this CPU would be suboptimal: disabling des3_ede-x86_64.\n");
374 err = crypto_register_alg(&des3_ede_cipher);
378 err = crypto_register_skciphers(des3_ede_skciphers,
379 ARRAY_SIZE(des3_ede_skciphers));
381 crypto_unregister_alg(&des3_ede_cipher);
386 static void __exit des3_ede_x86_fini(void)
388 crypto_unregister_alg(&des3_ede_cipher);
389 crypto_unregister_skciphers(des3_ede_skciphers,
390 ARRAY_SIZE(des3_ede_skciphers));
393 module_init(des3_ede_x86_init);
394 module_exit(des3_ede_x86_fini);
396 MODULE_LICENSE("GPL");
397 MODULE_DESCRIPTION("Triple DES EDE Cipher Algorithm, asm optimized");
398 MODULE_ALIAS_CRYPTO("des3_ede");
399 MODULE_ALIAS_CRYPTO("des3_ede-asm");