]> Git Repo - J-linux.git/blob - drivers/crypto/ccree/cc_cipher.c
Merge tag 'vfs-6.13-rc7.fixes' of git://git.kernel.org/pub/scm/linux/kernel/git/vfs/vfs
[J-linux.git] / drivers / crypto / ccree / cc_cipher.c
1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (C) 2012-2019 ARM Limited (or its affiliates). */
3
4 #include <linux/kernel.h>
5 #include <linux/module.h>
6 #include <crypto/algapi.h>
7 #include <crypto/internal/skcipher.h>
8 #include <crypto/internal/des.h>
9 #include <crypto/xts.h>
10 #include <crypto/sm4.h>
11 #include <crypto/scatterwalk.h>
12
13 #include "cc_driver.h"
14 #include "cc_lli_defs.h"
15 #include "cc_buffer_mgr.h"
16 #include "cc_cipher.h"
17 #include "cc_request_mgr.h"
18
19 #define MAX_SKCIPHER_SEQ_LEN 6
20
21 #define template_skcipher       template_u.skcipher
22
23 struct cc_user_key_info {
24         u8 *key;
25         dma_addr_t key_dma_addr;
26 };
27
28 struct cc_hw_key_info {
29         enum cc_hw_crypto_key key1_slot;
30         enum cc_hw_crypto_key key2_slot;
31 };
32
33 struct cc_cpp_key_info {
34         u8 slot;
35         enum cc_cpp_alg alg;
36 };
37
38 enum cc_key_type {
39         CC_UNPROTECTED_KEY,             /* User key */
40         CC_HW_PROTECTED_KEY,            /* HW (FDE) key */
41         CC_POLICY_PROTECTED_KEY,        /* CPP key */
42         CC_INVALID_PROTECTED_KEY        /* Invalid key */
43 };
44
45 struct cc_cipher_ctx {
46         struct cc_drvdata *drvdata;
47         int keylen;
48         int cipher_mode;
49         int flow_mode;
50         unsigned int flags;
51         enum cc_key_type key_type;
52         struct cc_user_key_info user;
53         union {
54                 struct cc_hw_key_info hw;
55                 struct cc_cpp_key_info cpp;
56         };
57         struct crypto_shash *shash_tfm;
58         struct crypto_skcipher *fallback_tfm;
59         bool fallback_on;
60 };
61
62 static void cc_cipher_complete(struct device *dev, void *cc_req, int err);
63
64 static inline enum cc_key_type cc_key_type(struct crypto_tfm *tfm)
65 {
66         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
67
68         return ctx_p->key_type;
69 }
70
71 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size)
72 {
73         switch (ctx_p->flow_mode) {
74         case S_DIN_to_AES:
75                 switch (size) {
76                 case CC_AES_128_BIT_KEY_SIZE:
77                 case CC_AES_192_BIT_KEY_SIZE:
78                         if (ctx_p->cipher_mode != DRV_CIPHER_XTS)
79                                 return 0;
80                         break;
81                 case CC_AES_256_BIT_KEY_SIZE:
82                         return 0;
83                 case (CC_AES_192_BIT_KEY_SIZE * 2):
84                 case (CC_AES_256_BIT_KEY_SIZE * 2):
85                         if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
86                             ctx_p->cipher_mode == DRV_CIPHER_ESSIV)
87                                 return 0;
88                         break;
89                 default:
90                         break;
91                 }
92                 break;
93         case S_DIN_to_DES:
94                 if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE)
95                         return 0;
96                 break;
97         case S_DIN_to_SM4:
98                 if (size == SM4_KEY_SIZE)
99                         return 0;
100                 break;
101         default:
102                 break;
103         }
104         return -EINVAL;
105 }
106
107 static int validate_data_size(struct cc_cipher_ctx *ctx_p,
108                               unsigned int size)
109 {
110         switch (ctx_p->flow_mode) {
111         case S_DIN_to_AES:
112                 switch (ctx_p->cipher_mode) {
113                 case DRV_CIPHER_XTS:
114                 case DRV_CIPHER_CBC_CTS:
115                         if (size >= AES_BLOCK_SIZE)
116                                 return 0;
117                         break;
118                 case DRV_CIPHER_OFB:
119                 case DRV_CIPHER_CTR:
120                                 return 0;
121                 case DRV_CIPHER_ECB:
122                 case DRV_CIPHER_CBC:
123                 case DRV_CIPHER_ESSIV:
124                         if (IS_ALIGNED(size, AES_BLOCK_SIZE))
125                                 return 0;
126                         break;
127                 default:
128                         break;
129                 }
130                 break;
131         case S_DIN_to_DES:
132                 if (IS_ALIGNED(size, DES_BLOCK_SIZE))
133                         return 0;
134                 break;
135         case S_DIN_to_SM4:
136                 switch (ctx_p->cipher_mode) {
137                 case DRV_CIPHER_CTR:
138                         return 0;
139                 case DRV_CIPHER_ECB:
140                 case DRV_CIPHER_CBC:
141                         if (IS_ALIGNED(size, SM4_BLOCK_SIZE))
142                                 return 0;
143                         break;
144                 default:
145                         break;
146                 }
147                 break;
148         default:
149                 break;
150         }
151         return -EINVAL;
152 }
153
154 static int cc_cipher_init(struct crypto_tfm *tfm)
155 {
156         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
157         struct cc_crypto_alg *cc_alg =
158                         container_of(tfm->__crt_alg, struct cc_crypto_alg,
159                                      skcipher_alg.base);
160         struct device *dev = drvdata_to_dev(cc_alg->drvdata);
161         unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
162         unsigned int fallback_req_size = 0;
163
164         dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p,
165                 crypto_tfm_alg_name(tfm));
166
167         ctx_p->cipher_mode = cc_alg->cipher_mode;
168         ctx_p->flow_mode = cc_alg->flow_mode;
169         ctx_p->drvdata = cc_alg->drvdata;
170
171         if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
172                 const char *name = crypto_tfm_alg_name(tfm);
173
174                 /* Alloc hash tfm for essiv */
175                 ctx_p->shash_tfm = crypto_alloc_shash("sha256", 0, 0);
176                 if (IS_ERR(ctx_p->shash_tfm)) {
177                         dev_err(dev, "Error allocating hash tfm for ESSIV.\n");
178                         return PTR_ERR(ctx_p->shash_tfm);
179                 }
180                 max_key_buf_size <<= 1;
181
182                 /* Alloc fallback tfm or essiv when key size != 256 bit */
183                 ctx_p->fallback_tfm =
184                         crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC);
185
186                 if (IS_ERR(ctx_p->fallback_tfm)) {
187                         /* Note we're still allowing registration with no fallback since it's
188                          * better to have most modes supported than none at all.
189                          */
190                         dev_warn(dev, "Error allocating fallback algo %s. Some modes may be available.\n",
191                                name);
192                         ctx_p->fallback_tfm = NULL;
193                 } else {
194                         fallback_req_size = crypto_skcipher_reqsize(ctx_p->fallback_tfm);
195                 }
196         }
197
198         crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
199                                     sizeof(struct cipher_req_ctx) + fallback_req_size);
200
201         /* Allocate key buffer, cache line aligned */
202         ctx_p->user.key = kzalloc(max_key_buf_size, GFP_KERNEL);
203         if (!ctx_p->user.key)
204                 goto free_fallback;
205
206         dev_dbg(dev, "Allocated key buffer in context. key=@%p\n",
207                 ctx_p->user.key);
208
209         /* Map key buffer */
210         ctx_p->user.key_dma_addr = dma_map_single(dev, ctx_p->user.key,
211                                                   max_key_buf_size,
212                                                   DMA_TO_DEVICE);
213         if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
214                 dev_err(dev, "Mapping Key %u B at va=%pK for DMA failed\n",
215                         max_key_buf_size, ctx_p->user.key);
216                 goto free_key;
217         }
218         dev_dbg(dev, "Mapped key %u B at va=%pK to dma=%pad\n",
219                 max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr);
220
221         return 0;
222
223 free_key:
224         kfree(ctx_p->user.key);
225 free_fallback:
226         crypto_free_skcipher(ctx_p->fallback_tfm);
227         crypto_free_shash(ctx_p->shash_tfm);
228
229         return -ENOMEM;
230 }
231
232 static void cc_cipher_exit(struct crypto_tfm *tfm)
233 {
234         struct crypto_alg *alg = tfm->__crt_alg;
235         struct cc_crypto_alg *cc_alg =
236                         container_of(alg, struct cc_crypto_alg,
237                                      skcipher_alg.base);
238         unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
239         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
240         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
241
242         dev_dbg(dev, "Clearing context @%p for %s\n",
243                 crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm));
244
245         if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
246                 /* Free hash tfm for essiv */
247                 crypto_free_shash(ctx_p->shash_tfm);
248                 ctx_p->shash_tfm = NULL;
249                 crypto_free_skcipher(ctx_p->fallback_tfm);
250                 ctx_p->fallback_tfm = NULL;
251         }
252
253         /* Unmap key buffer */
254         dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size,
255                          DMA_TO_DEVICE);
256         dev_dbg(dev, "Unmapped key buffer key_dma_addr=%pad\n",
257                 &ctx_p->user.key_dma_addr);
258
259         /* Free key buffer in context */
260         dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key);
261         kfree_sensitive(ctx_p->user.key);
262 }
263
264 static enum cc_hw_crypto_key cc_slot_to_hw_key(u8 slot_num)
265 {
266         switch (slot_num) {
267         case 0:
268                 return KFDE0_KEY;
269         case 1:
270                 return KFDE1_KEY;
271         case 2:
272                 return KFDE2_KEY;
273         case 3:
274                 return KFDE3_KEY;
275         }
276         return END_OF_KEYS;
277 }
278
279 static u8 cc_slot_to_cpp_key(u8 slot_num)
280 {
281         return (slot_num - CC_FIRST_CPP_KEY_SLOT);
282 }
283
284 static inline enum cc_key_type cc_slot_to_key_type(u8 slot_num)
285 {
286         if (slot_num >= CC_FIRST_HW_KEY_SLOT && slot_num <= CC_LAST_HW_KEY_SLOT)
287                 return CC_HW_PROTECTED_KEY;
288         else if (slot_num >=  CC_FIRST_CPP_KEY_SLOT &&
289                  slot_num <=  CC_LAST_CPP_KEY_SLOT)
290                 return CC_POLICY_PROTECTED_KEY;
291         else
292                 return CC_INVALID_PROTECTED_KEY;
293 }
294
295 static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key,
296                              unsigned int keylen)
297 {
298         struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
299         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
300         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
301         struct cc_hkey_info hki;
302
303         dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n",
304                 ctx_p, crypto_tfm_alg_name(tfm), keylen);
305         dump_byte_array("key", key, keylen);
306
307         /* STAT_PHASE_0: Init and sanity checks */
308
309         /* This check the size of the protected key token */
310         if (keylen != sizeof(hki)) {
311                 dev_err(dev, "Unsupported protected key size %d.\n", keylen);
312                 return -EINVAL;
313         }
314
315         memcpy(&hki, key, keylen);
316
317         /* The real key len for crypto op is the size of the HW key
318          * referenced by the HW key slot, not the hardware key token
319          */
320         keylen = hki.keylen;
321
322         if (validate_keys_sizes(ctx_p, keylen)) {
323                 dev_dbg(dev, "Unsupported key size %d.\n", keylen);
324                 return -EINVAL;
325         }
326
327         ctx_p->keylen = keylen;
328         ctx_p->fallback_on = false;
329
330         switch (cc_slot_to_key_type(hki.hw_key1)) {
331         case CC_HW_PROTECTED_KEY:
332                 if (ctx_p->flow_mode == S_DIN_to_SM4) {
333                         dev_err(dev, "Only AES HW protected keys are supported\n");
334                         return -EINVAL;
335                 }
336
337                 ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1);
338                 if (ctx_p->hw.key1_slot == END_OF_KEYS) {
339                         dev_err(dev, "Unsupported hw key1 number (%d)\n",
340                                 hki.hw_key1);
341                         return -EINVAL;
342                 }
343
344                 if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
345                     ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
346                         if (hki.hw_key1 == hki.hw_key2) {
347                                 dev_err(dev, "Illegal hw key numbers (%d,%d)\n",
348                                         hki.hw_key1, hki.hw_key2);
349                                 return -EINVAL;
350                         }
351
352                         ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2);
353                         if (ctx_p->hw.key2_slot == END_OF_KEYS) {
354                                 dev_err(dev, "Unsupported hw key2 number (%d)\n",
355                                         hki.hw_key2);
356                                 return -EINVAL;
357                         }
358                 }
359
360                 ctx_p->key_type = CC_HW_PROTECTED_KEY;
361                 dev_dbg(dev, "HW protected key  %d/%d set\n.",
362                         ctx_p->hw.key1_slot, ctx_p->hw.key2_slot);
363                 break;
364
365         case CC_POLICY_PROTECTED_KEY:
366                 if (ctx_p->drvdata->hw_rev < CC_HW_REV_713) {
367                         dev_err(dev, "CPP keys not supported in this hardware revision.\n");
368                         return -EINVAL;
369                 }
370
371                 if (ctx_p->cipher_mode != DRV_CIPHER_CBC &&
372                     ctx_p->cipher_mode != DRV_CIPHER_CTR) {
373                         dev_err(dev, "CPP keys only supported in CBC or CTR modes.\n");
374                         return -EINVAL;
375                 }
376
377                 ctx_p->cpp.slot = cc_slot_to_cpp_key(hki.hw_key1);
378                 if (ctx_p->flow_mode == S_DIN_to_AES)
379                         ctx_p->cpp.alg = CC_CPP_AES;
380                 else /* Must be SM4 since due to sethkey registration */
381                         ctx_p->cpp.alg = CC_CPP_SM4;
382                 ctx_p->key_type = CC_POLICY_PROTECTED_KEY;
383                 dev_dbg(dev, "policy protected key alg: %d slot: %d.\n",
384                         ctx_p->cpp.alg, ctx_p->cpp.slot);
385                 break;
386
387         default:
388                 dev_err(dev, "Unsupported protected key (%d)\n", hki.hw_key1);
389                 return -EINVAL;
390         }
391
392         return 0;
393 }
394
395 static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key,
396                             unsigned int keylen)
397 {
398         struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
399         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
400         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
401         struct cc_crypto_alg *cc_alg =
402                         container_of(tfm->__crt_alg, struct cc_crypto_alg,
403                                      skcipher_alg.base);
404         unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
405
406         dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n",
407                 ctx_p, crypto_tfm_alg_name(tfm), keylen);
408         dump_byte_array("key", key, keylen);
409
410         /* STAT_PHASE_0: Init and sanity checks */
411
412         if (validate_keys_sizes(ctx_p, keylen)) {
413                 dev_dbg(dev, "Invalid key size %d.\n", keylen);
414                 return -EINVAL;
415         }
416
417         if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
418
419                 /* We only support 256 bit ESSIV-CBC-AES keys */
420                 if (keylen != AES_KEYSIZE_256)  {
421                         unsigned int flags = crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_MASK;
422
423                         if (likely(ctx_p->fallback_tfm)) {
424                                 ctx_p->fallback_on = true;
425                                 crypto_skcipher_clear_flags(ctx_p->fallback_tfm,
426                                                             CRYPTO_TFM_REQ_MASK);
427                                 crypto_skcipher_clear_flags(ctx_p->fallback_tfm, flags);
428                                 return crypto_skcipher_setkey(ctx_p->fallback_tfm, key, keylen);
429                         }
430
431                         dev_dbg(dev, "Unsupported key size %d and no fallback.\n", keylen);
432                         return -EINVAL;
433                 }
434
435                 /* Internal ESSIV key buffer is double sized */
436                 max_key_buf_size <<= 1;
437         }
438
439         ctx_p->fallback_on = false;
440         ctx_p->key_type = CC_UNPROTECTED_KEY;
441
442         /*
443          * Verify DES weak keys
444          * Note that we're dropping the expanded key since the
445          * HW does the expansion on its own.
446          */
447         if (ctx_p->flow_mode == S_DIN_to_DES) {
448                 if ((keylen == DES3_EDE_KEY_SIZE &&
449                      verify_skcipher_des3_key(sktfm, key)) ||
450                     verify_skcipher_des_key(sktfm, key)) {
451                         dev_dbg(dev, "weak DES key");
452                         return -EINVAL;
453                 }
454         }
455
456         if (ctx_p->cipher_mode == DRV_CIPHER_XTS &&
457             xts_verify_key(sktfm, key, keylen)) {
458                 dev_dbg(dev, "weak XTS key");
459                 return -EINVAL;
460         }
461
462         /* STAT_PHASE_1: Copy key to ctx */
463         dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr,
464                                 max_key_buf_size, DMA_TO_DEVICE);
465
466         memcpy(ctx_p->user.key, key, keylen);
467
468         if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
469                 /* sha256 for key2 - use sw implementation */
470                 int err;
471
472                 err = crypto_shash_tfm_digest(ctx_p->shash_tfm,
473                                               ctx_p->user.key, keylen,
474                                               ctx_p->user.key + keylen);
475                 if (err) {
476                         dev_err(dev, "Failed to hash ESSIV key.\n");
477                         return err;
478                 }
479
480                 keylen <<= 1;
481         }
482         dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr,
483                                    max_key_buf_size, DMA_TO_DEVICE);
484         ctx_p->keylen = keylen;
485
486         dev_dbg(dev, "return safely");
487         return 0;
488 }
489
490 static int cc_out_setup_mode(struct cc_cipher_ctx *ctx_p)
491 {
492         switch (ctx_p->flow_mode) {
493         case S_DIN_to_AES:
494                 return S_AES_to_DOUT;
495         case S_DIN_to_DES:
496                 return S_DES_to_DOUT;
497         case S_DIN_to_SM4:
498                 return S_SM4_to_DOUT;
499         default:
500                 return ctx_p->flow_mode;
501         }
502 }
503
504 static void cc_setup_readiv_desc(struct crypto_tfm *tfm,
505                                  struct cipher_req_ctx *req_ctx,
506                                  unsigned int ivsize, struct cc_hw_desc desc[],
507                                  unsigned int *seq_size)
508 {
509         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
510         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
511         int cipher_mode = ctx_p->cipher_mode;
512         int flow_mode = cc_out_setup_mode(ctx_p);
513         int direction = req_ctx->gen_ctx.op_type;
514         dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
515
516         if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY)
517                 return;
518
519         switch (cipher_mode) {
520         case DRV_CIPHER_ECB:
521                 break;
522         case DRV_CIPHER_CBC:
523         case DRV_CIPHER_CBC_CTS:
524         case DRV_CIPHER_CTR:
525         case DRV_CIPHER_OFB:
526                 /* Read next IV */
527                 hw_desc_init(&desc[*seq_size]);
528                 set_dout_dlli(&desc[*seq_size], iv_dma_addr, ivsize, NS_BIT, 1);
529                 set_cipher_config0(&desc[*seq_size], direction);
530                 set_flow_mode(&desc[*seq_size], flow_mode);
531                 set_cipher_mode(&desc[*seq_size], cipher_mode);
532                 if (cipher_mode == DRV_CIPHER_CTR ||
533                     cipher_mode == DRV_CIPHER_OFB) {
534                         set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
535                 } else {
536                         set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE0);
537                 }
538                 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
539                 (*seq_size)++;
540                 break;
541         case DRV_CIPHER_XTS:
542         case DRV_CIPHER_ESSIV:
543                 /*  IV */
544                 hw_desc_init(&desc[*seq_size]);
545                 set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
546                 set_cipher_mode(&desc[*seq_size], cipher_mode);
547                 set_cipher_config0(&desc[*seq_size], direction);
548                 set_flow_mode(&desc[*seq_size], flow_mode);
549                 set_dout_dlli(&desc[*seq_size], iv_dma_addr, CC_AES_BLOCK_SIZE,
550                              NS_BIT, 1);
551                 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
552                 (*seq_size)++;
553                 break;
554         default:
555                 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
556         }
557 }
558
559
560 static void cc_setup_state_desc(struct crypto_tfm *tfm,
561                                  struct cipher_req_ctx *req_ctx,
562                                  unsigned int ivsize, unsigned int nbytes,
563                                  struct cc_hw_desc desc[],
564                                  unsigned int *seq_size)
565 {
566         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
567         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
568         int cipher_mode = ctx_p->cipher_mode;
569         int flow_mode = ctx_p->flow_mode;
570         int direction = req_ctx->gen_ctx.op_type;
571         dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
572
573         switch (cipher_mode) {
574         case DRV_CIPHER_ECB:
575                 break;
576         case DRV_CIPHER_CBC:
577         case DRV_CIPHER_CBC_CTS:
578         case DRV_CIPHER_CTR:
579         case DRV_CIPHER_OFB:
580                 /* Load IV */
581                 hw_desc_init(&desc[*seq_size]);
582                 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize,
583                              NS_BIT);
584                 set_cipher_config0(&desc[*seq_size], direction);
585                 set_flow_mode(&desc[*seq_size], flow_mode);
586                 set_cipher_mode(&desc[*seq_size], cipher_mode);
587                 if (cipher_mode == DRV_CIPHER_CTR ||
588                     cipher_mode == DRV_CIPHER_OFB) {
589                         set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
590                 } else {
591                         set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0);
592                 }
593                 (*seq_size)++;
594                 break;
595         case DRV_CIPHER_XTS:
596         case DRV_CIPHER_ESSIV:
597                 break;
598         default:
599                 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
600         }
601 }
602
603
604 static void cc_setup_xex_state_desc(struct crypto_tfm *tfm,
605                                  struct cipher_req_ctx *req_ctx,
606                                  unsigned int ivsize, unsigned int nbytes,
607                                  struct cc_hw_desc desc[],
608                                  unsigned int *seq_size)
609 {
610         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
611         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
612         int cipher_mode = ctx_p->cipher_mode;
613         int flow_mode = ctx_p->flow_mode;
614         int direction = req_ctx->gen_ctx.op_type;
615         dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
616         unsigned int key_len = (ctx_p->keylen / 2);
617         dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
618         unsigned int key_offset = key_len;
619
620         switch (cipher_mode) {
621         case DRV_CIPHER_ECB:
622                 break;
623         case DRV_CIPHER_CBC:
624         case DRV_CIPHER_CBC_CTS:
625         case DRV_CIPHER_CTR:
626         case DRV_CIPHER_OFB:
627                 break;
628         case DRV_CIPHER_XTS:
629         case DRV_CIPHER_ESSIV:
630
631                 if (cipher_mode == DRV_CIPHER_ESSIV)
632                         key_len = SHA256_DIGEST_SIZE;
633
634                 /* load XEX key */
635                 hw_desc_init(&desc[*seq_size]);
636                 set_cipher_mode(&desc[*seq_size], cipher_mode);
637                 set_cipher_config0(&desc[*seq_size], direction);
638                 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
639                         set_hw_crypto_key(&desc[*seq_size],
640                                           ctx_p->hw.key2_slot);
641                 } else {
642                         set_din_type(&desc[*seq_size], DMA_DLLI,
643                                      (key_dma_addr + key_offset),
644                                      key_len, NS_BIT);
645                 }
646                 set_xex_data_unit_size(&desc[*seq_size], nbytes);
647                 set_flow_mode(&desc[*seq_size], S_DIN_to_AES2);
648                 set_key_size_aes(&desc[*seq_size], key_len);
649                 set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
650                 (*seq_size)++;
651
652                 /* Load IV */
653                 hw_desc_init(&desc[*seq_size]);
654                 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
655                 set_cipher_mode(&desc[*seq_size], cipher_mode);
656                 set_cipher_config0(&desc[*seq_size], direction);
657                 set_key_size_aes(&desc[*seq_size], key_len);
658                 set_flow_mode(&desc[*seq_size], flow_mode);
659                 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr,
660                              CC_AES_BLOCK_SIZE, NS_BIT);
661                 (*seq_size)++;
662                 break;
663         default:
664                 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
665         }
666 }
667
668 static int cc_out_flow_mode(struct cc_cipher_ctx *ctx_p)
669 {
670         switch (ctx_p->flow_mode) {
671         case S_DIN_to_AES:
672                 return DIN_AES_DOUT;
673         case S_DIN_to_DES:
674                 return DIN_DES_DOUT;
675         case S_DIN_to_SM4:
676                 return DIN_SM4_DOUT;
677         default:
678                 return ctx_p->flow_mode;
679         }
680 }
681
682 static void cc_setup_key_desc(struct crypto_tfm *tfm,
683                               struct cipher_req_ctx *req_ctx,
684                               unsigned int nbytes, struct cc_hw_desc desc[],
685                               unsigned int *seq_size)
686 {
687         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
688         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
689         int cipher_mode = ctx_p->cipher_mode;
690         int flow_mode = ctx_p->flow_mode;
691         int direction = req_ctx->gen_ctx.op_type;
692         dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
693         unsigned int key_len = ctx_p->keylen;
694         unsigned int din_size;
695
696         switch (cipher_mode) {
697         case DRV_CIPHER_CBC:
698         case DRV_CIPHER_CBC_CTS:
699         case DRV_CIPHER_CTR:
700         case DRV_CIPHER_OFB:
701         case DRV_CIPHER_ECB:
702                 /* Load key */
703                 hw_desc_init(&desc[*seq_size]);
704                 set_cipher_mode(&desc[*seq_size], cipher_mode);
705                 set_cipher_config0(&desc[*seq_size], direction);
706
707                 if (cc_key_type(tfm) == CC_POLICY_PROTECTED_KEY) {
708                         /* We use the AES key size coding for all CPP algs */
709                         set_key_size_aes(&desc[*seq_size], key_len);
710                         set_cpp_crypto_key(&desc[*seq_size], ctx_p->cpp.slot);
711                         flow_mode = cc_out_flow_mode(ctx_p);
712                 } else {
713                         if (flow_mode == S_DIN_to_AES) {
714                                 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
715                                         set_hw_crypto_key(&desc[*seq_size],
716                                                           ctx_p->hw.key1_slot);
717                                 } else {
718                                         /* CC_POLICY_UNPROTECTED_KEY
719                                          * Invalid keys are filtered out in
720                                          * sethkey()
721                                          */
722                                         din_size = (key_len == 24) ?
723                                                 AES_MAX_KEY_SIZE : key_len;
724
725                                         set_din_type(&desc[*seq_size], DMA_DLLI,
726                                                      key_dma_addr, din_size,
727                                                      NS_BIT);
728                                 }
729                                 set_key_size_aes(&desc[*seq_size], key_len);
730                         } else {
731                                 /*des*/
732                                 set_din_type(&desc[*seq_size], DMA_DLLI,
733                                              key_dma_addr, key_len, NS_BIT);
734                                 set_key_size_des(&desc[*seq_size], key_len);
735                         }
736                         set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
737                 }
738                 set_flow_mode(&desc[*seq_size], flow_mode);
739                 (*seq_size)++;
740                 break;
741         case DRV_CIPHER_XTS:
742         case DRV_CIPHER_ESSIV:
743                 /* Load AES key */
744                 hw_desc_init(&desc[*seq_size]);
745                 set_cipher_mode(&desc[*seq_size], cipher_mode);
746                 set_cipher_config0(&desc[*seq_size], direction);
747                 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
748                         set_hw_crypto_key(&desc[*seq_size],
749                                           ctx_p->hw.key1_slot);
750                 } else {
751                         set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
752                                      (key_len / 2), NS_BIT);
753                 }
754                 set_key_size_aes(&desc[*seq_size], (key_len / 2));
755                 set_flow_mode(&desc[*seq_size], flow_mode);
756                 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
757                 (*seq_size)++;
758                 break;
759         default:
760                 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
761         }
762 }
763
764 static void cc_setup_mlli_desc(struct crypto_tfm *tfm,
765                                struct cipher_req_ctx *req_ctx,
766                                struct scatterlist *dst, struct scatterlist *src,
767                                unsigned int nbytes, void *areq,
768                                struct cc_hw_desc desc[], unsigned int *seq_size)
769 {
770         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
771         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
772
773         if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI) {
774                 /* bypass */
775                 dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n",
776                         &req_ctx->mlli_params.mlli_dma_addr,
777                         req_ctx->mlli_params.mlli_len,
778                         ctx_p->drvdata->mlli_sram_addr);
779                 hw_desc_init(&desc[*seq_size]);
780                 set_din_type(&desc[*seq_size], DMA_DLLI,
781                              req_ctx->mlli_params.mlli_dma_addr,
782                              req_ctx->mlli_params.mlli_len, NS_BIT);
783                 set_dout_sram(&desc[*seq_size],
784                               ctx_p->drvdata->mlli_sram_addr,
785                               req_ctx->mlli_params.mlli_len);
786                 set_flow_mode(&desc[*seq_size], BYPASS);
787                 (*seq_size)++;
788         }
789 }
790
791 static void cc_setup_flow_desc(struct crypto_tfm *tfm,
792                                struct cipher_req_ctx *req_ctx,
793                                struct scatterlist *dst, struct scatterlist *src,
794                                unsigned int nbytes, struct cc_hw_desc desc[],
795                                unsigned int *seq_size)
796 {
797         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
798         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
799         unsigned int flow_mode = cc_out_flow_mode(ctx_p);
800         bool last_desc = (ctx_p->key_type == CC_POLICY_PROTECTED_KEY ||
801                           ctx_p->cipher_mode == DRV_CIPHER_ECB);
802
803         /* Process */
804         if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) {
805                 dev_dbg(dev, " data params addr %pad length 0x%X\n",
806                         &sg_dma_address(src), nbytes);
807                 dev_dbg(dev, " data params addr %pad length 0x%X\n",
808                         &sg_dma_address(dst), nbytes);
809                 hw_desc_init(&desc[*seq_size]);
810                 set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src),
811                              nbytes, NS_BIT);
812                 set_dout_dlli(&desc[*seq_size], sg_dma_address(dst),
813                               nbytes, NS_BIT, (!last_desc ? 0 : 1));
814                 if (last_desc)
815                         set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
816
817                 set_flow_mode(&desc[*seq_size], flow_mode);
818                 (*seq_size)++;
819         } else {
820                 hw_desc_init(&desc[*seq_size]);
821                 set_din_type(&desc[*seq_size], DMA_MLLI,
822                              ctx_p->drvdata->mlli_sram_addr,
823                              req_ctx->in_mlli_nents, NS_BIT);
824                 if (req_ctx->out_nents == 0) {
825                         dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
826                                 ctx_p->drvdata->mlli_sram_addr,
827                                 ctx_p->drvdata->mlli_sram_addr);
828                         set_dout_mlli(&desc[*seq_size],
829                                       ctx_p->drvdata->mlli_sram_addr,
830                                       req_ctx->in_mlli_nents, NS_BIT,
831                                       (!last_desc ? 0 : 1));
832                 } else {
833                         dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
834                                 ctx_p->drvdata->mlli_sram_addr,
835                                 ctx_p->drvdata->mlli_sram_addr +
836                                 (u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents);
837                         set_dout_mlli(&desc[*seq_size],
838                                       (ctx_p->drvdata->mlli_sram_addr +
839                                        (LLI_ENTRY_BYTE_SIZE *
840                                         req_ctx->in_mlli_nents)),
841                                       req_ctx->out_mlli_nents, NS_BIT,
842                                       (!last_desc ? 0 : 1));
843                 }
844                 if (last_desc)
845                         set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
846
847                 set_flow_mode(&desc[*seq_size], flow_mode);
848                 (*seq_size)++;
849         }
850 }
851
852 static void cc_cipher_complete(struct device *dev, void *cc_req, int err)
853 {
854         struct skcipher_request *req = (struct skcipher_request *)cc_req;
855         struct scatterlist *dst = req->dst;
856         struct scatterlist *src = req->src;
857         struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
858         struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
859         unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
860
861         if (err != -EINPROGRESS) {
862                 /* Not a BACKLOG notification */
863                 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
864                 memcpy(req->iv, req_ctx->iv, ivsize);
865                 kfree_sensitive(req_ctx->iv);
866         }
867
868         skcipher_request_complete(req, err);
869 }
870
871 static int cc_cipher_process(struct skcipher_request *req,
872                              enum drv_crypto_direction direction)
873 {
874         struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
875         struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm);
876         struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
877         unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
878         struct scatterlist *dst = req->dst;
879         struct scatterlist *src = req->src;
880         unsigned int nbytes = req->cryptlen;
881         void *iv = req->iv;
882         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
883         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
884         struct cc_hw_desc desc[MAX_SKCIPHER_SEQ_LEN];
885         struct cc_crypto_req cc_req = {};
886         int rc;
887         unsigned int seq_len = 0;
888         gfp_t flags = cc_gfp_flags(&req->base);
889
890         dev_dbg(dev, "%s req=%p iv=%p nbytes=%d\n",
891                 ((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ?
892                 "Encrypt" : "Decrypt"), req, iv, nbytes);
893
894         /* STAT_PHASE_0: Init and sanity checks */
895
896         if (validate_data_size(ctx_p, nbytes)) {
897                 dev_dbg(dev, "Unsupported data size %d.\n", nbytes);
898                 rc = -EINVAL;
899                 goto exit_process;
900         }
901         if (nbytes == 0) {
902                 /* No data to process is valid */
903                 rc = 0;
904                 goto exit_process;
905         }
906
907         if (ctx_p->fallback_on) {
908                 struct skcipher_request *subreq = skcipher_request_ctx(req);
909
910                 *subreq = *req;
911                 skcipher_request_set_tfm(subreq, ctx_p->fallback_tfm);
912                 if (direction == DRV_CRYPTO_DIRECTION_ENCRYPT)
913                         return crypto_skcipher_encrypt(subreq);
914                 else
915                         return crypto_skcipher_decrypt(subreq);
916         }
917
918         /* The IV we are handed may be allocated from the stack so
919          * we must copy it to a DMAable buffer before use.
920          */
921         req_ctx->iv = kmemdup(iv, ivsize, flags);
922         if (!req_ctx->iv) {
923                 rc = -ENOMEM;
924                 goto exit_process;
925         }
926
927         /* Setup request structure */
928         cc_req.user_cb = cc_cipher_complete;
929         cc_req.user_arg = req;
930
931         /* Setup CPP operation details */
932         if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) {
933                 cc_req.cpp.is_cpp = true;
934                 cc_req.cpp.alg = ctx_p->cpp.alg;
935                 cc_req.cpp.slot = ctx_p->cpp.slot;
936         }
937
938         /* Setup request context */
939         req_ctx->gen_ctx.op_type = direction;
940
941         /* STAT_PHASE_1: Map buffers */
942
943         rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes,
944                                       req_ctx->iv, src, dst, flags);
945         if (rc) {
946                 dev_err(dev, "map_request() failed\n");
947                 goto exit_process;
948         }
949
950         /* STAT_PHASE_2: Create sequence */
951
952         /* Setup state (IV)  */
953         cc_setup_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
954         /* Setup MLLI line, if needed */
955         cc_setup_mlli_desc(tfm, req_ctx, dst, src, nbytes, req, desc, &seq_len);
956         /* Setup key */
957         cc_setup_key_desc(tfm, req_ctx, nbytes, desc, &seq_len);
958         /* Setup state (IV and XEX key)  */
959         cc_setup_xex_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
960         /* Data processing */
961         cc_setup_flow_desc(tfm, req_ctx, dst, src, nbytes, desc, &seq_len);
962         /* Read next IV */
963         cc_setup_readiv_desc(tfm, req_ctx, ivsize, desc, &seq_len);
964
965         /* STAT_PHASE_3: Lock HW and push sequence */
966
967         rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len,
968                              &req->base);
969         if (rc != -EINPROGRESS && rc != -EBUSY) {
970                 /* Failed to send the request or request completed
971                  * synchronously
972                  */
973                 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
974         }
975
976 exit_process:
977         if (rc != -EINPROGRESS && rc != -EBUSY) {
978                 kfree_sensitive(req_ctx->iv);
979         }
980
981         return rc;
982 }
983
984 static int cc_cipher_encrypt(struct skcipher_request *req)
985 {
986         struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
987
988         memset(req_ctx, 0, sizeof(*req_ctx));
989
990         return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_ENCRYPT);
991 }
992
993 static int cc_cipher_decrypt(struct skcipher_request *req)
994 {
995         struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
996
997         memset(req_ctx, 0, sizeof(*req_ctx));
998
999         return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT);
1000 }
1001
1002 /* Block cipher alg */
1003 static const struct cc_alg_template skcipher_algs[] = {
1004         {
1005                 .name = "xts(paes)",
1006                 .driver_name = "xts-paes-ccree",
1007                 .blocksize = 1,
1008                 .template_skcipher = {
1009                         .setkey = cc_cipher_sethkey,
1010                         .encrypt = cc_cipher_encrypt,
1011                         .decrypt = cc_cipher_decrypt,
1012                         .min_keysize = CC_HW_KEY_SIZE,
1013                         .max_keysize = CC_HW_KEY_SIZE,
1014                         .ivsize = AES_BLOCK_SIZE,
1015                         },
1016                 .cipher_mode = DRV_CIPHER_XTS,
1017                 .flow_mode = S_DIN_to_AES,
1018                 .min_hw_rev = CC_HW_REV_630,
1019                 .std_body = CC_STD_NIST,
1020                 .sec_func = true,
1021         },
1022         {
1023                 .name = "essiv(cbc(paes),sha256)",
1024                 .driver_name = "essiv-paes-ccree",
1025                 .blocksize = AES_BLOCK_SIZE,
1026                 .template_skcipher = {
1027                         .setkey = cc_cipher_sethkey,
1028                         .encrypt = cc_cipher_encrypt,
1029                         .decrypt = cc_cipher_decrypt,
1030                         .min_keysize = CC_HW_KEY_SIZE,
1031                         .max_keysize = CC_HW_KEY_SIZE,
1032                         .ivsize = AES_BLOCK_SIZE,
1033                         },
1034                 .cipher_mode = DRV_CIPHER_ESSIV,
1035                 .flow_mode = S_DIN_to_AES,
1036                 .min_hw_rev = CC_HW_REV_712,
1037                 .std_body = CC_STD_NIST,
1038                 .sec_func = true,
1039         },
1040         {
1041                 .name = "ecb(paes)",
1042                 .driver_name = "ecb-paes-ccree",
1043                 .blocksize = AES_BLOCK_SIZE,
1044                 .template_skcipher = {
1045                         .setkey = cc_cipher_sethkey,
1046                         .encrypt = cc_cipher_encrypt,
1047                         .decrypt = cc_cipher_decrypt,
1048                         .min_keysize = CC_HW_KEY_SIZE,
1049                         .max_keysize = CC_HW_KEY_SIZE,
1050                         .ivsize = 0,
1051                         },
1052                 .cipher_mode = DRV_CIPHER_ECB,
1053                 .flow_mode = S_DIN_to_AES,
1054                 .min_hw_rev = CC_HW_REV_712,
1055                 .std_body = CC_STD_NIST,
1056                 .sec_func = true,
1057         },
1058         {
1059                 .name = "cbc(paes)",
1060                 .driver_name = "cbc-paes-ccree",
1061                 .blocksize = AES_BLOCK_SIZE,
1062                 .template_skcipher = {
1063                         .setkey = cc_cipher_sethkey,
1064                         .encrypt = cc_cipher_encrypt,
1065                         .decrypt = cc_cipher_decrypt,
1066                         .min_keysize = CC_HW_KEY_SIZE,
1067                         .max_keysize = CC_HW_KEY_SIZE,
1068                         .ivsize = AES_BLOCK_SIZE,
1069                 },
1070                 .cipher_mode = DRV_CIPHER_CBC,
1071                 .flow_mode = S_DIN_to_AES,
1072                 .min_hw_rev = CC_HW_REV_712,
1073                 .std_body = CC_STD_NIST,
1074                 .sec_func = true,
1075         },
1076         {
1077                 .name = "cts(cbc(paes))",
1078                 .driver_name = "cts-cbc-paes-ccree",
1079                 .blocksize = AES_BLOCK_SIZE,
1080                 .template_skcipher = {
1081                         .setkey = cc_cipher_sethkey,
1082                         .encrypt = cc_cipher_encrypt,
1083                         .decrypt = cc_cipher_decrypt,
1084                         .min_keysize = CC_HW_KEY_SIZE,
1085                         .max_keysize = CC_HW_KEY_SIZE,
1086                         .ivsize = AES_BLOCK_SIZE,
1087                         },
1088                 .cipher_mode = DRV_CIPHER_CBC_CTS,
1089                 .flow_mode = S_DIN_to_AES,
1090                 .min_hw_rev = CC_HW_REV_712,
1091                 .std_body = CC_STD_NIST,
1092                 .sec_func = true,
1093         },
1094         {
1095                 .name = "ctr(paes)",
1096                 .driver_name = "ctr-paes-ccree",
1097                 .blocksize = 1,
1098                 .template_skcipher = {
1099                         .setkey = cc_cipher_sethkey,
1100                         .encrypt = cc_cipher_encrypt,
1101                         .decrypt = cc_cipher_decrypt,
1102                         .min_keysize = CC_HW_KEY_SIZE,
1103                         .max_keysize = CC_HW_KEY_SIZE,
1104                         .ivsize = AES_BLOCK_SIZE,
1105                         },
1106                 .cipher_mode = DRV_CIPHER_CTR,
1107                 .flow_mode = S_DIN_to_AES,
1108                 .min_hw_rev = CC_HW_REV_712,
1109                 .std_body = CC_STD_NIST,
1110                 .sec_func = true,
1111         },
1112         {
1113                 /* See https://www.mail-archive.com/[email protected]/msg40576.html
1114                  * for the reason why this differs from the generic
1115                  * implementation.
1116                  */
1117                 .name = "xts(aes)",
1118                 .driver_name = "xts-aes-ccree",
1119                 .blocksize = 1,
1120                 .template_skcipher = {
1121                         .setkey = cc_cipher_setkey,
1122                         .encrypt = cc_cipher_encrypt,
1123                         .decrypt = cc_cipher_decrypt,
1124                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1125                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1126                         .ivsize = AES_BLOCK_SIZE,
1127                         },
1128                 .cipher_mode = DRV_CIPHER_XTS,
1129                 .flow_mode = S_DIN_to_AES,
1130                 .min_hw_rev = CC_HW_REV_630,
1131                 .std_body = CC_STD_NIST,
1132         },
1133         {
1134                 .name = "essiv(cbc(aes),sha256)",
1135                 .driver_name = "essiv-aes-ccree",
1136                 .blocksize = AES_BLOCK_SIZE,
1137                 .template_skcipher = {
1138                         .setkey = cc_cipher_setkey,
1139                         .encrypt = cc_cipher_encrypt,
1140                         .decrypt = cc_cipher_decrypt,
1141                         .min_keysize = AES_MIN_KEY_SIZE,
1142                         .max_keysize = AES_MAX_KEY_SIZE,
1143                         .ivsize = AES_BLOCK_SIZE,
1144                         },
1145                 .cipher_mode = DRV_CIPHER_ESSIV,
1146                 .flow_mode = S_DIN_to_AES,
1147                 .min_hw_rev = CC_HW_REV_712,
1148                 .std_body = CC_STD_NIST,
1149         },
1150         {
1151                 .name = "ecb(aes)",
1152                 .driver_name = "ecb-aes-ccree",
1153                 .blocksize = AES_BLOCK_SIZE,
1154                 .template_skcipher = {
1155                         .setkey = cc_cipher_setkey,
1156                         .encrypt = cc_cipher_encrypt,
1157                         .decrypt = cc_cipher_decrypt,
1158                         .min_keysize = AES_MIN_KEY_SIZE,
1159                         .max_keysize = AES_MAX_KEY_SIZE,
1160                         .ivsize = 0,
1161                         },
1162                 .cipher_mode = DRV_CIPHER_ECB,
1163                 .flow_mode = S_DIN_to_AES,
1164                 .min_hw_rev = CC_HW_REV_630,
1165                 .std_body = CC_STD_NIST,
1166         },
1167         {
1168                 .name = "cbc(aes)",
1169                 .driver_name = "cbc-aes-ccree",
1170                 .blocksize = AES_BLOCK_SIZE,
1171                 .template_skcipher = {
1172                         .setkey = cc_cipher_setkey,
1173                         .encrypt = cc_cipher_encrypt,
1174                         .decrypt = cc_cipher_decrypt,
1175                         .min_keysize = AES_MIN_KEY_SIZE,
1176                         .max_keysize = AES_MAX_KEY_SIZE,
1177                         .ivsize = AES_BLOCK_SIZE,
1178                 },
1179                 .cipher_mode = DRV_CIPHER_CBC,
1180                 .flow_mode = S_DIN_to_AES,
1181                 .min_hw_rev = CC_HW_REV_630,
1182                 .std_body = CC_STD_NIST,
1183         },
1184         {
1185                 .name = "cts(cbc(aes))",
1186                 .driver_name = "cts-cbc-aes-ccree",
1187                 .blocksize = AES_BLOCK_SIZE,
1188                 .template_skcipher = {
1189                         .setkey = cc_cipher_setkey,
1190                         .encrypt = cc_cipher_encrypt,
1191                         .decrypt = cc_cipher_decrypt,
1192                         .min_keysize = AES_MIN_KEY_SIZE,
1193                         .max_keysize = AES_MAX_KEY_SIZE,
1194                         .ivsize = AES_BLOCK_SIZE,
1195                         },
1196                 .cipher_mode = DRV_CIPHER_CBC_CTS,
1197                 .flow_mode = S_DIN_to_AES,
1198                 .min_hw_rev = CC_HW_REV_630,
1199                 .std_body = CC_STD_NIST,
1200         },
1201         {
1202                 .name = "ctr(aes)",
1203                 .driver_name = "ctr-aes-ccree",
1204                 .blocksize = 1,
1205                 .template_skcipher = {
1206                         .setkey = cc_cipher_setkey,
1207                         .encrypt = cc_cipher_encrypt,
1208                         .decrypt = cc_cipher_decrypt,
1209                         .min_keysize = AES_MIN_KEY_SIZE,
1210                         .max_keysize = AES_MAX_KEY_SIZE,
1211                         .ivsize = AES_BLOCK_SIZE,
1212                         },
1213                 .cipher_mode = DRV_CIPHER_CTR,
1214                 .flow_mode = S_DIN_to_AES,
1215                 .min_hw_rev = CC_HW_REV_630,
1216                 .std_body = CC_STD_NIST,
1217         },
1218         {
1219                 .name = "cbc(des3_ede)",
1220                 .driver_name = "cbc-3des-ccree",
1221                 .blocksize = DES3_EDE_BLOCK_SIZE,
1222                 .template_skcipher = {
1223                         .setkey = cc_cipher_setkey,
1224                         .encrypt = cc_cipher_encrypt,
1225                         .decrypt = cc_cipher_decrypt,
1226                         .min_keysize = DES3_EDE_KEY_SIZE,
1227                         .max_keysize = DES3_EDE_KEY_SIZE,
1228                         .ivsize = DES3_EDE_BLOCK_SIZE,
1229                         },
1230                 .cipher_mode = DRV_CIPHER_CBC,
1231                 .flow_mode = S_DIN_to_DES,
1232                 .min_hw_rev = CC_HW_REV_630,
1233                 .std_body = CC_STD_NIST,
1234         },
1235         {
1236                 .name = "ecb(des3_ede)",
1237                 .driver_name = "ecb-3des-ccree",
1238                 .blocksize = DES3_EDE_BLOCK_SIZE,
1239                 .template_skcipher = {
1240                         .setkey = cc_cipher_setkey,
1241                         .encrypt = cc_cipher_encrypt,
1242                         .decrypt = cc_cipher_decrypt,
1243                         .min_keysize = DES3_EDE_KEY_SIZE,
1244                         .max_keysize = DES3_EDE_KEY_SIZE,
1245                         .ivsize = 0,
1246                         },
1247                 .cipher_mode = DRV_CIPHER_ECB,
1248                 .flow_mode = S_DIN_to_DES,
1249                 .min_hw_rev = CC_HW_REV_630,
1250                 .std_body = CC_STD_NIST,
1251         },
1252         {
1253                 .name = "cbc(des)",
1254                 .driver_name = "cbc-des-ccree",
1255                 .blocksize = DES_BLOCK_SIZE,
1256                 .template_skcipher = {
1257                         .setkey = cc_cipher_setkey,
1258                         .encrypt = cc_cipher_encrypt,
1259                         .decrypt = cc_cipher_decrypt,
1260                         .min_keysize = DES_KEY_SIZE,
1261                         .max_keysize = DES_KEY_SIZE,
1262                         .ivsize = DES_BLOCK_SIZE,
1263                         },
1264                 .cipher_mode = DRV_CIPHER_CBC,
1265                 .flow_mode = S_DIN_to_DES,
1266                 .min_hw_rev = CC_HW_REV_630,
1267                 .std_body = CC_STD_NIST,
1268         },
1269         {
1270                 .name = "ecb(des)",
1271                 .driver_name = "ecb-des-ccree",
1272                 .blocksize = DES_BLOCK_SIZE,
1273                 .template_skcipher = {
1274                         .setkey = cc_cipher_setkey,
1275                         .encrypt = cc_cipher_encrypt,
1276                         .decrypt = cc_cipher_decrypt,
1277                         .min_keysize = DES_KEY_SIZE,
1278                         .max_keysize = DES_KEY_SIZE,
1279                         .ivsize = 0,
1280                         },
1281                 .cipher_mode = DRV_CIPHER_ECB,
1282                 .flow_mode = S_DIN_to_DES,
1283                 .min_hw_rev = CC_HW_REV_630,
1284                 .std_body = CC_STD_NIST,
1285         },
1286         {
1287                 .name = "cbc(sm4)",
1288                 .driver_name = "cbc-sm4-ccree",
1289                 .blocksize = SM4_BLOCK_SIZE,
1290                 .template_skcipher = {
1291                         .setkey = cc_cipher_setkey,
1292                         .encrypt = cc_cipher_encrypt,
1293                         .decrypt = cc_cipher_decrypt,
1294                         .min_keysize = SM4_KEY_SIZE,
1295                         .max_keysize = SM4_KEY_SIZE,
1296                         .ivsize = SM4_BLOCK_SIZE,
1297                         },
1298                 .cipher_mode = DRV_CIPHER_CBC,
1299                 .flow_mode = S_DIN_to_SM4,
1300                 .min_hw_rev = CC_HW_REV_713,
1301                 .std_body = CC_STD_OSCCA,
1302         },
1303         {
1304                 .name = "ecb(sm4)",
1305                 .driver_name = "ecb-sm4-ccree",
1306                 .blocksize = SM4_BLOCK_SIZE,
1307                 .template_skcipher = {
1308                         .setkey = cc_cipher_setkey,
1309                         .encrypt = cc_cipher_encrypt,
1310                         .decrypt = cc_cipher_decrypt,
1311                         .min_keysize = SM4_KEY_SIZE,
1312                         .max_keysize = SM4_KEY_SIZE,
1313                         .ivsize = 0,
1314                         },
1315                 .cipher_mode = DRV_CIPHER_ECB,
1316                 .flow_mode = S_DIN_to_SM4,
1317                 .min_hw_rev = CC_HW_REV_713,
1318                 .std_body = CC_STD_OSCCA,
1319         },
1320         {
1321                 .name = "ctr(sm4)",
1322                 .driver_name = "ctr-sm4-ccree",
1323                 .blocksize = 1,
1324                 .template_skcipher = {
1325                         .setkey = cc_cipher_setkey,
1326                         .encrypt = cc_cipher_encrypt,
1327                         .decrypt = cc_cipher_decrypt,
1328                         .min_keysize = SM4_KEY_SIZE,
1329                         .max_keysize = SM4_KEY_SIZE,
1330                         .ivsize = SM4_BLOCK_SIZE,
1331                         },
1332                 .cipher_mode = DRV_CIPHER_CTR,
1333                 .flow_mode = S_DIN_to_SM4,
1334                 .min_hw_rev = CC_HW_REV_713,
1335                 .std_body = CC_STD_OSCCA,
1336         },
1337         {
1338                 .name = "cbc(psm4)",
1339                 .driver_name = "cbc-psm4-ccree",
1340                 .blocksize = SM4_BLOCK_SIZE,
1341                 .template_skcipher = {
1342                         .setkey = cc_cipher_sethkey,
1343                         .encrypt = cc_cipher_encrypt,
1344                         .decrypt = cc_cipher_decrypt,
1345                         .min_keysize = CC_HW_KEY_SIZE,
1346                         .max_keysize = CC_HW_KEY_SIZE,
1347                         .ivsize = SM4_BLOCK_SIZE,
1348                         },
1349                 .cipher_mode = DRV_CIPHER_CBC,
1350                 .flow_mode = S_DIN_to_SM4,
1351                 .min_hw_rev = CC_HW_REV_713,
1352                 .std_body = CC_STD_OSCCA,
1353                 .sec_func = true,
1354         },
1355         {
1356                 .name = "ctr(psm4)",
1357                 .driver_name = "ctr-psm4-ccree",
1358                 .blocksize = SM4_BLOCK_SIZE,
1359                 .template_skcipher = {
1360                         .setkey = cc_cipher_sethkey,
1361                         .encrypt = cc_cipher_encrypt,
1362                         .decrypt = cc_cipher_decrypt,
1363                         .min_keysize = CC_HW_KEY_SIZE,
1364                         .max_keysize = CC_HW_KEY_SIZE,
1365                         .ivsize = SM4_BLOCK_SIZE,
1366                         },
1367                 .cipher_mode = DRV_CIPHER_CTR,
1368                 .flow_mode = S_DIN_to_SM4,
1369                 .min_hw_rev = CC_HW_REV_713,
1370                 .std_body = CC_STD_OSCCA,
1371                 .sec_func = true,
1372         },
1373 };
1374
1375 static struct cc_crypto_alg *cc_create_alg(const struct cc_alg_template *tmpl,
1376                                            struct device *dev)
1377 {
1378         struct cc_crypto_alg *t_alg;
1379         struct skcipher_alg *alg;
1380
1381         t_alg = devm_kzalloc(dev, sizeof(*t_alg), GFP_KERNEL);
1382         if (!t_alg)
1383                 return ERR_PTR(-ENOMEM);
1384
1385         alg = &t_alg->skcipher_alg;
1386
1387         memcpy(alg, &tmpl->template_skcipher, sizeof(*alg));
1388
1389         if (snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s",
1390                      tmpl->name) >= CRYPTO_MAX_ALG_NAME)
1391                 return ERR_PTR(-EINVAL);
1392         if (snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1393                      tmpl->driver_name) >= CRYPTO_MAX_ALG_NAME)
1394                 return ERR_PTR(-EINVAL);
1395
1396         alg->base.cra_module = THIS_MODULE;
1397         alg->base.cra_priority = CC_CRA_PRIO;
1398         alg->base.cra_blocksize = tmpl->blocksize;
1399         alg->base.cra_alignmask = 0;
1400         alg->base.cra_ctxsize = sizeof(struct cc_cipher_ctx);
1401
1402         alg->base.cra_init = cc_cipher_init;
1403         alg->base.cra_exit = cc_cipher_exit;
1404         alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
1405
1406         t_alg->cipher_mode = tmpl->cipher_mode;
1407         t_alg->flow_mode = tmpl->flow_mode;
1408
1409         return t_alg;
1410 }
1411
1412 int cc_cipher_free(struct cc_drvdata *drvdata)
1413 {
1414         struct cc_crypto_alg *t_alg, *n;
1415
1416         /* Remove registered algs */
1417         list_for_each_entry_safe(t_alg, n, &drvdata->alg_list, entry) {
1418                 crypto_unregister_skcipher(&t_alg->skcipher_alg);
1419                 list_del(&t_alg->entry);
1420         }
1421         return 0;
1422 }
1423
1424 int cc_cipher_alloc(struct cc_drvdata *drvdata)
1425 {
1426         struct cc_crypto_alg *t_alg;
1427         struct device *dev = drvdata_to_dev(drvdata);
1428         int rc = -ENOMEM;
1429         int alg;
1430
1431         INIT_LIST_HEAD(&drvdata->alg_list);
1432
1433         /* Linux crypto */
1434         dev_dbg(dev, "Number of algorithms = %zu\n",
1435                 ARRAY_SIZE(skcipher_algs));
1436         for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) {
1437                 if ((skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) ||
1438                     !(drvdata->std_bodies & skcipher_algs[alg].std_body) ||
1439                     (drvdata->sec_disabled && skcipher_algs[alg].sec_func))
1440                         continue;
1441
1442                 dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name);
1443                 t_alg = cc_create_alg(&skcipher_algs[alg], dev);
1444                 if (IS_ERR(t_alg)) {
1445                         rc = PTR_ERR(t_alg);
1446                         dev_err(dev, "%s alg allocation failed\n",
1447                                 skcipher_algs[alg].driver_name);
1448                         goto fail0;
1449                 }
1450                 t_alg->drvdata = drvdata;
1451
1452                 dev_dbg(dev, "registering %s\n",
1453                         skcipher_algs[alg].driver_name);
1454                 rc = crypto_register_skcipher(&t_alg->skcipher_alg);
1455                 dev_dbg(dev, "%s alg registration rc = %x\n",
1456                         t_alg->skcipher_alg.base.cra_driver_name, rc);
1457                 if (rc) {
1458                         dev_err(dev, "%s alg registration failed\n",
1459                                 t_alg->skcipher_alg.base.cra_driver_name);
1460                         goto fail0;
1461                 }
1462
1463                 list_add_tail(&t_alg->entry, &drvdata->alg_list);
1464                 dev_dbg(dev, "Registered %s\n",
1465                         t_alg->skcipher_alg.base.cra_driver_name);
1466         }
1467         return 0;
1468
1469 fail0:
1470         cc_cipher_free(drvdata);
1471         return rc;
1472 }
This page took 0.114583 seconds and 4 git commands to generate.