]> Git Repo - linux.git/blob - drivers/crypto/stm32/stm32-cryp.c
ARM: dts: imx7s: Enable SNVS power key according to board design
[linux.git] / drivers / crypto / stm32 / stm32-cryp.c
1 /*
2  * Copyright (C) STMicroelectronics SA 2017
3  * Author: Fabien Dessenne <[email protected]>
4  * License terms:  GNU General Public License (GPL), version 2
5  */
6
7 #include <linux/clk.h>
8 #include <linux/delay.h>
9 #include <linux/interrupt.h>
10 #include <linux/iopoll.h>
11 #include <linux/module.h>
12 #include <linux/of_device.h>
13 #include <linux/platform_device.h>
14 #include <linux/pm_runtime.h>
15 #include <linux/reset.h>
16
17 #include <crypto/aes.h>
18 #include <crypto/des.h>
19 #include <crypto/engine.h>
20 #include <crypto/scatterwalk.h>
21 #include <crypto/internal/aead.h>
22
23 #define DRIVER_NAME             "stm32-cryp"
24
25 /* Bit [0] encrypt / decrypt */
26 #define FLG_ENCRYPT             BIT(0)
27 /* Bit [8..1] algo & operation mode */
28 #define FLG_AES                 BIT(1)
29 #define FLG_DES                 BIT(2)
30 #define FLG_TDES                BIT(3)
31 #define FLG_ECB                 BIT(4)
32 #define FLG_CBC                 BIT(5)
33 #define FLG_CTR                 BIT(6)
34 #define FLG_GCM                 BIT(7)
35 #define FLG_CCM                 BIT(8)
36 /* Mode mask = bits [15..0] */
37 #define FLG_MODE_MASK           GENMASK(15, 0)
38 /* Bit [31..16] status  */
39 #define FLG_CCM_PADDED_WA       BIT(16)
40
41 /* Registers */
42 #define CRYP_CR                 0x00000000
43 #define CRYP_SR                 0x00000004
44 #define CRYP_DIN                0x00000008
45 #define CRYP_DOUT               0x0000000C
46 #define CRYP_DMACR              0x00000010
47 #define CRYP_IMSCR              0x00000014
48 #define CRYP_RISR               0x00000018
49 #define CRYP_MISR               0x0000001C
50 #define CRYP_K0LR               0x00000020
51 #define CRYP_K0RR               0x00000024
52 #define CRYP_K1LR               0x00000028
53 #define CRYP_K1RR               0x0000002C
54 #define CRYP_K2LR               0x00000030
55 #define CRYP_K2RR               0x00000034
56 #define CRYP_K3LR               0x00000038
57 #define CRYP_K3RR               0x0000003C
58 #define CRYP_IV0LR              0x00000040
59 #define CRYP_IV0RR              0x00000044
60 #define CRYP_IV1LR              0x00000048
61 #define CRYP_IV1RR              0x0000004C
62 #define CRYP_CSGCMCCM0R         0x00000050
63 #define CRYP_CSGCM0R            0x00000070
64
65 /* Registers values */
66 #define CR_DEC_NOT_ENC          0x00000004
67 #define CR_TDES_ECB             0x00000000
68 #define CR_TDES_CBC             0x00000008
69 #define CR_DES_ECB              0x00000010
70 #define CR_DES_CBC              0x00000018
71 #define CR_AES_ECB              0x00000020
72 #define CR_AES_CBC              0x00000028
73 #define CR_AES_CTR              0x00000030
74 #define CR_AES_KP               0x00000038
75 #define CR_AES_GCM              0x00080000
76 #define CR_AES_CCM              0x00080008
77 #define CR_AES_UNKNOWN          0xFFFFFFFF
78 #define CR_ALGO_MASK            0x00080038
79 #define CR_DATA32               0x00000000
80 #define CR_DATA16               0x00000040
81 #define CR_DATA8                0x00000080
82 #define CR_DATA1                0x000000C0
83 #define CR_KEY128               0x00000000
84 #define CR_KEY192               0x00000100
85 #define CR_KEY256               0x00000200
86 #define CR_FFLUSH               0x00004000
87 #define CR_CRYPEN               0x00008000
88 #define CR_PH_INIT              0x00000000
89 #define CR_PH_HEADER            0x00010000
90 #define CR_PH_PAYLOAD           0x00020000
91 #define CR_PH_FINAL             0x00030000
92 #define CR_PH_MASK              0x00030000
93 #define CR_NBPBL_SHIFT          20
94
95 #define SR_BUSY                 0x00000010
96 #define SR_OFNE                 0x00000004
97
98 #define IMSCR_IN                BIT(0)
99 #define IMSCR_OUT               BIT(1)
100
101 #define MISR_IN                 BIT(0)
102 #define MISR_OUT                BIT(1)
103
104 /* Misc */
105 #define AES_BLOCK_32            (AES_BLOCK_SIZE / sizeof(u32))
106 #define GCM_CTR_INIT            2
107 #define _walked_in              (cryp->in_walk.offset - cryp->in_sg->offset)
108 #define _walked_out             (cryp->out_walk.offset - cryp->out_sg->offset)
109 #define CRYP_AUTOSUSPEND_DELAY  50
110
111 struct stm32_cryp_caps {
112         bool                    swap_final;
113         bool                    padding_wa;
114 };
115
116 struct stm32_cryp_ctx {
117         struct crypto_engine_ctx enginectx;
118         struct stm32_cryp       *cryp;
119         int                     keylen;
120         u32                     key[AES_KEYSIZE_256 / sizeof(u32)];
121         unsigned long           flags;
122 };
123
124 struct stm32_cryp_reqctx {
125         unsigned long mode;
126 };
127
128 struct stm32_cryp {
129         struct list_head        list;
130         struct device           *dev;
131         void __iomem            *regs;
132         struct clk              *clk;
133         unsigned long           flags;
134         u32                     irq_status;
135         const struct stm32_cryp_caps *caps;
136         struct stm32_cryp_ctx   *ctx;
137
138         struct crypto_engine    *engine;
139
140         struct ablkcipher_request *req;
141         struct aead_request     *areq;
142
143         size_t                  authsize;
144         size_t                  hw_blocksize;
145
146         size_t                  total_in;
147         size_t                  total_in_save;
148         size_t                  total_out;
149         size_t                  total_out_save;
150
151         struct scatterlist      *in_sg;
152         struct scatterlist      *out_sg;
153         struct scatterlist      *out_sg_save;
154
155         struct scatterlist      in_sgl;
156         struct scatterlist      out_sgl;
157         bool                    sgs_copied;
158
159         int                     in_sg_len;
160         int                     out_sg_len;
161
162         struct scatter_walk     in_walk;
163         struct scatter_walk     out_walk;
164
165         u32                     last_ctr[4];
166         u32                     gcm_ctr;
167 };
168
169 struct stm32_cryp_list {
170         struct list_head        dev_list;
171         spinlock_t              lock; /* protect dev_list */
172 };
173
174 static struct stm32_cryp_list cryp_list = {
175         .dev_list = LIST_HEAD_INIT(cryp_list.dev_list),
176         .lock     = __SPIN_LOCK_UNLOCKED(cryp_list.lock),
177 };
178
179 static inline bool is_aes(struct stm32_cryp *cryp)
180 {
181         return cryp->flags & FLG_AES;
182 }
183
184 static inline bool is_des(struct stm32_cryp *cryp)
185 {
186         return cryp->flags & FLG_DES;
187 }
188
189 static inline bool is_tdes(struct stm32_cryp *cryp)
190 {
191         return cryp->flags & FLG_TDES;
192 }
193
194 static inline bool is_ecb(struct stm32_cryp *cryp)
195 {
196         return cryp->flags & FLG_ECB;
197 }
198
199 static inline bool is_cbc(struct stm32_cryp *cryp)
200 {
201         return cryp->flags & FLG_CBC;
202 }
203
204 static inline bool is_ctr(struct stm32_cryp *cryp)
205 {
206         return cryp->flags & FLG_CTR;
207 }
208
209 static inline bool is_gcm(struct stm32_cryp *cryp)
210 {
211         return cryp->flags & FLG_GCM;
212 }
213
214 static inline bool is_ccm(struct stm32_cryp *cryp)
215 {
216         return cryp->flags & FLG_CCM;
217 }
218
219 static inline bool is_encrypt(struct stm32_cryp *cryp)
220 {
221         return cryp->flags & FLG_ENCRYPT;
222 }
223
224 static inline bool is_decrypt(struct stm32_cryp *cryp)
225 {
226         return !is_encrypt(cryp);
227 }
228
229 static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst)
230 {
231         return readl_relaxed(cryp->regs + ofst);
232 }
233
234 static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val)
235 {
236         writel_relaxed(val, cryp->regs + ofst);
237 }
238
239 static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp)
240 {
241         u32 status;
242
243         return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
244                         !(status & SR_BUSY), 10, 100000);
245 }
246
247 static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp)
248 {
249         u32 status;
250
251         return readl_relaxed_poll_timeout(cryp->regs + CRYP_CR, status,
252                         !(status & CR_CRYPEN), 10, 100000);
253 }
254
255 static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp)
256 {
257         u32 status;
258
259         return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
260                         status & SR_OFNE, 10, 100000);
261 }
262
263 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp);
264
265 static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx)
266 {
267         struct stm32_cryp *tmp, *cryp = NULL;
268
269         spin_lock_bh(&cryp_list.lock);
270         if (!ctx->cryp) {
271                 list_for_each_entry(tmp, &cryp_list.dev_list, list) {
272                         cryp = tmp;
273                         break;
274                 }
275                 ctx->cryp = cryp;
276         } else {
277                 cryp = ctx->cryp;
278         }
279
280         spin_unlock_bh(&cryp_list.lock);
281
282         return cryp;
283 }
284
285 static int stm32_cryp_check_aligned(struct scatterlist *sg, size_t total,
286                                     size_t align)
287 {
288         int len = 0;
289
290         if (!total)
291                 return 0;
292
293         if (!IS_ALIGNED(total, align))
294                 return -EINVAL;
295
296         while (sg) {
297                 if (!IS_ALIGNED(sg->offset, sizeof(u32)))
298                         return -EINVAL;
299
300                 if (!IS_ALIGNED(sg->length, align))
301                         return -EINVAL;
302
303                 len += sg->length;
304                 sg = sg_next(sg);
305         }
306
307         if (len != total)
308                 return -EINVAL;
309
310         return 0;
311 }
312
313 static int stm32_cryp_check_io_aligned(struct stm32_cryp *cryp)
314 {
315         int ret;
316
317         ret = stm32_cryp_check_aligned(cryp->in_sg, cryp->total_in,
318                                        cryp->hw_blocksize);
319         if (ret)
320                 return ret;
321
322         ret = stm32_cryp_check_aligned(cryp->out_sg, cryp->total_out,
323                                        cryp->hw_blocksize);
324
325         return ret;
326 }
327
328 static void sg_copy_buf(void *buf, struct scatterlist *sg,
329                         unsigned int start, unsigned int nbytes, int out)
330 {
331         struct scatter_walk walk;
332
333         if (!nbytes)
334                 return;
335
336         scatterwalk_start(&walk, sg);
337         scatterwalk_advance(&walk, start);
338         scatterwalk_copychunks(buf, &walk, nbytes, out);
339         scatterwalk_done(&walk, out, 0);
340 }
341
342 static int stm32_cryp_copy_sgs(struct stm32_cryp *cryp)
343 {
344         void *buf_in, *buf_out;
345         int pages, total_in, total_out;
346
347         if (!stm32_cryp_check_io_aligned(cryp)) {
348                 cryp->sgs_copied = 0;
349                 return 0;
350         }
351
352         total_in = ALIGN(cryp->total_in, cryp->hw_blocksize);
353         pages = total_in ? get_order(total_in) : 1;
354         buf_in = (void *)__get_free_pages(GFP_ATOMIC, pages);
355
356         total_out = ALIGN(cryp->total_out, cryp->hw_blocksize);
357         pages = total_out ? get_order(total_out) : 1;
358         buf_out = (void *)__get_free_pages(GFP_ATOMIC, pages);
359
360         if (!buf_in || !buf_out) {
361                 dev_err(cryp->dev, "Can't allocate pages when unaligned\n");
362                 cryp->sgs_copied = 0;
363                 return -EFAULT;
364         }
365
366         sg_copy_buf(buf_in, cryp->in_sg, 0, cryp->total_in, 0);
367
368         sg_init_one(&cryp->in_sgl, buf_in, total_in);
369         cryp->in_sg = &cryp->in_sgl;
370         cryp->in_sg_len = 1;
371
372         sg_init_one(&cryp->out_sgl, buf_out, total_out);
373         cryp->out_sg_save = cryp->out_sg;
374         cryp->out_sg = &cryp->out_sgl;
375         cryp->out_sg_len = 1;
376
377         cryp->sgs_copied = 1;
378
379         return 0;
380 }
381
382 static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, u32 *iv)
383 {
384         if (!iv)
385                 return;
386
387         stm32_cryp_write(cryp, CRYP_IV0LR, cpu_to_be32(*iv++));
388         stm32_cryp_write(cryp, CRYP_IV0RR, cpu_to_be32(*iv++));
389
390         if (is_aes(cryp)) {
391                 stm32_cryp_write(cryp, CRYP_IV1LR, cpu_to_be32(*iv++));
392                 stm32_cryp_write(cryp, CRYP_IV1RR, cpu_to_be32(*iv++));
393         }
394 }
395
396 static void stm32_cryp_get_iv(struct stm32_cryp *cryp)
397 {
398         struct ablkcipher_request *req = cryp->req;
399         u32 *tmp = req->info;
400
401         if (!tmp)
402                 return;
403
404         *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0LR));
405         *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0RR));
406
407         if (is_aes(cryp)) {
408                 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1LR));
409                 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1RR));
410         }
411 }
412
413 static void stm32_cryp_hw_write_key(struct stm32_cryp *c)
414 {
415         unsigned int i;
416         int r_id;
417
418         if (is_des(c)) {
419                 stm32_cryp_write(c, CRYP_K1LR, cpu_to_be32(c->ctx->key[0]));
420                 stm32_cryp_write(c, CRYP_K1RR, cpu_to_be32(c->ctx->key[1]));
421         } else {
422                 r_id = CRYP_K3RR;
423                 for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4)
424                         stm32_cryp_write(c, r_id,
425                                          cpu_to_be32(c->ctx->key[i - 1]));
426         }
427 }
428
429 static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp)
430 {
431         if (is_aes(cryp) && is_ecb(cryp))
432                 return CR_AES_ECB;
433
434         if (is_aes(cryp) && is_cbc(cryp))
435                 return CR_AES_CBC;
436
437         if (is_aes(cryp) && is_ctr(cryp))
438                 return CR_AES_CTR;
439
440         if (is_aes(cryp) && is_gcm(cryp))
441                 return CR_AES_GCM;
442
443         if (is_aes(cryp) && is_ccm(cryp))
444                 return CR_AES_CCM;
445
446         if (is_des(cryp) && is_ecb(cryp))
447                 return CR_DES_ECB;
448
449         if (is_des(cryp) && is_cbc(cryp))
450                 return CR_DES_CBC;
451
452         if (is_tdes(cryp) && is_ecb(cryp))
453                 return CR_TDES_ECB;
454
455         if (is_tdes(cryp) && is_cbc(cryp))
456                 return CR_TDES_CBC;
457
458         dev_err(cryp->dev, "Unknown mode\n");
459         return CR_AES_UNKNOWN;
460 }
461
462 static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp)
463 {
464         return is_encrypt(cryp) ? cryp->areq->cryptlen :
465                                   cryp->areq->cryptlen - cryp->authsize;
466 }
467
468 static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg)
469 {
470         int ret;
471         u32 iv[4];
472
473         /* Phase 1 : init */
474         memcpy(iv, cryp->areq->iv, 12);
475         iv[3] = cpu_to_be32(GCM_CTR_INIT);
476         cryp->gcm_ctr = GCM_CTR_INIT;
477         stm32_cryp_hw_write_iv(cryp, iv);
478
479         stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
480
481         /* Wait for end of processing */
482         ret = stm32_cryp_wait_enable(cryp);
483         if (ret)
484                 dev_err(cryp->dev, "Timeout (gcm init)\n");
485
486         return ret;
487 }
488
489 static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg)
490 {
491         int ret;
492         u8 iv[AES_BLOCK_SIZE], b0[AES_BLOCK_SIZE];
493         u32 *d;
494         unsigned int i, textlen;
495
496         /* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */
497         memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
498         memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
499         iv[AES_BLOCK_SIZE - 1] = 1;
500         stm32_cryp_hw_write_iv(cryp, (u32 *)iv);
501
502         /* Build B0 */
503         memcpy(b0, iv, AES_BLOCK_SIZE);
504
505         b0[0] |= (8 * ((cryp->authsize - 2) / 2));
506
507         if (cryp->areq->assoclen)
508                 b0[0] |= 0x40;
509
510         textlen = stm32_cryp_get_input_text_len(cryp);
511
512         b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
513         b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
514
515         /* Enable HW */
516         stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
517
518         /* Write B0 */
519         d = (u32 *)b0;
520
521         for (i = 0; i < AES_BLOCK_32; i++) {
522                 if (!cryp->caps->padding_wa)
523                         *d = cpu_to_be32(*d);
524                 stm32_cryp_write(cryp, CRYP_DIN, *d++);
525         }
526
527         /* Wait for end of processing */
528         ret = stm32_cryp_wait_enable(cryp);
529         if (ret)
530                 dev_err(cryp->dev, "Timeout (ccm init)\n");
531
532         return ret;
533 }
534
535 static int stm32_cryp_hw_init(struct stm32_cryp *cryp)
536 {
537         int ret;
538         u32 cfg, hw_mode;
539
540         pm_runtime_get_sync(cryp->dev);
541
542         /* Disable interrupt */
543         stm32_cryp_write(cryp, CRYP_IMSCR, 0);
544
545         /* Set key */
546         stm32_cryp_hw_write_key(cryp);
547
548         /* Set configuration */
549         cfg = CR_DATA8 | CR_FFLUSH;
550
551         switch (cryp->ctx->keylen) {
552         case AES_KEYSIZE_128:
553                 cfg |= CR_KEY128;
554                 break;
555
556         case AES_KEYSIZE_192:
557                 cfg |= CR_KEY192;
558                 break;
559
560         default:
561         case AES_KEYSIZE_256:
562                 cfg |= CR_KEY256;
563                 break;
564         }
565
566         hw_mode = stm32_cryp_get_hw_mode(cryp);
567         if (hw_mode == CR_AES_UNKNOWN)
568                 return -EINVAL;
569
570         /* AES ECB/CBC decrypt: run key preparation first */
571         if (is_decrypt(cryp) &&
572             ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) {
573                 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_AES_KP | CR_CRYPEN);
574
575                 /* Wait for end of processing */
576                 ret = stm32_cryp_wait_busy(cryp);
577                 if (ret) {
578                         dev_err(cryp->dev, "Timeout (key preparation)\n");
579                         return ret;
580                 }
581         }
582
583         cfg |= hw_mode;
584
585         if (is_decrypt(cryp))
586                 cfg |= CR_DEC_NOT_ENC;
587
588         /* Apply config and flush (valid when CRYPEN = 0) */
589         stm32_cryp_write(cryp, CRYP_CR, cfg);
590
591         switch (hw_mode) {
592         case CR_AES_GCM:
593         case CR_AES_CCM:
594                 /* Phase 1 : init */
595                 if (hw_mode == CR_AES_CCM)
596                         ret = stm32_cryp_ccm_init(cryp, cfg);
597                 else
598                         ret = stm32_cryp_gcm_init(cryp, cfg);
599
600                 if (ret)
601                         return ret;
602
603                 /* Phase 2 : header (authenticated data) */
604                 if (cryp->areq->assoclen) {
605                         cfg |= CR_PH_HEADER;
606                 } else if (stm32_cryp_get_input_text_len(cryp)) {
607                         cfg |= CR_PH_PAYLOAD;
608                         stm32_cryp_write(cryp, CRYP_CR, cfg);
609                 } else {
610                         cfg |= CR_PH_INIT;
611                 }
612
613                 break;
614
615         case CR_DES_CBC:
616         case CR_TDES_CBC:
617         case CR_AES_CBC:
618         case CR_AES_CTR:
619                 stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->req->info);
620                 break;
621
622         default:
623                 break;
624         }
625
626         /* Enable now */
627         cfg |= CR_CRYPEN;
628
629         stm32_cryp_write(cryp, CRYP_CR, cfg);
630
631         cryp->flags &= ~FLG_CCM_PADDED_WA;
632
633         return 0;
634 }
635
636 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err)
637 {
638         if (!err && (is_gcm(cryp) || is_ccm(cryp)))
639                 /* Phase 4 : output tag */
640                 err = stm32_cryp_read_auth_tag(cryp);
641
642         if (!err && (!(is_gcm(cryp) || is_ccm(cryp))))
643                 stm32_cryp_get_iv(cryp);
644
645         if (cryp->sgs_copied) {
646                 void *buf_in, *buf_out;
647                 int pages, len;
648
649                 buf_in = sg_virt(&cryp->in_sgl);
650                 buf_out = sg_virt(&cryp->out_sgl);
651
652                 sg_copy_buf(buf_out, cryp->out_sg_save, 0,
653                             cryp->total_out_save, 1);
654
655                 len = ALIGN(cryp->total_in_save, cryp->hw_blocksize);
656                 pages = len ? get_order(len) : 1;
657                 free_pages((unsigned long)buf_in, pages);
658
659                 len = ALIGN(cryp->total_out_save, cryp->hw_blocksize);
660                 pages = len ? get_order(len) : 1;
661                 free_pages((unsigned long)buf_out, pages);
662         }
663
664         pm_runtime_mark_last_busy(cryp->dev);
665         pm_runtime_put_autosuspend(cryp->dev);
666
667         if (is_gcm(cryp) || is_ccm(cryp))
668                 crypto_finalize_aead_request(cryp->engine, cryp->areq, err);
669         else
670                 crypto_finalize_ablkcipher_request(cryp->engine, cryp->req,
671                                                    err);
672
673         memset(cryp->ctx->key, 0, cryp->ctx->keylen);
674 }
675
676 static int stm32_cryp_cpu_start(struct stm32_cryp *cryp)
677 {
678         /* Enable interrupt and let the IRQ handler do everything */
679         stm32_cryp_write(cryp, CRYP_IMSCR, IMSCR_IN | IMSCR_OUT);
680
681         return 0;
682 }
683
684 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq);
685 static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
686                                          void *areq);
687
688 static int stm32_cryp_cra_init(struct crypto_tfm *tfm)
689 {
690         struct stm32_cryp_ctx *ctx = crypto_tfm_ctx(tfm);
691
692         tfm->crt_ablkcipher.reqsize = sizeof(struct stm32_cryp_reqctx);
693
694         ctx->enginectx.op.do_one_request = stm32_cryp_cipher_one_req;
695         ctx->enginectx.op.prepare_request = stm32_cryp_prepare_cipher_req;
696         ctx->enginectx.op.unprepare_request = NULL;
697         return 0;
698 }
699
700 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq);
701 static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine,
702                                        void *areq);
703
704 static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm)
705 {
706         struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
707
708         tfm->reqsize = sizeof(struct stm32_cryp_reqctx);
709
710         ctx->enginectx.op.do_one_request = stm32_cryp_aead_one_req;
711         ctx->enginectx.op.prepare_request = stm32_cryp_prepare_aead_req;
712         ctx->enginectx.op.unprepare_request = NULL;
713
714         return 0;
715 }
716
717 static int stm32_cryp_crypt(struct ablkcipher_request *req, unsigned long mode)
718 {
719         struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(
720                         crypto_ablkcipher_reqtfm(req));
721         struct stm32_cryp_reqctx *rctx = ablkcipher_request_ctx(req);
722         struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
723
724         if (!cryp)
725                 return -ENODEV;
726
727         rctx->mode = mode;
728
729         return crypto_transfer_ablkcipher_request_to_engine(cryp->engine, req);
730 }
731
732 static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode)
733 {
734         struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
735         struct stm32_cryp_reqctx *rctx = aead_request_ctx(req);
736         struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
737
738         if (!cryp)
739                 return -ENODEV;
740
741         rctx->mode = mode;
742
743         return crypto_transfer_aead_request_to_engine(cryp->engine, req);
744 }
745
746 static int stm32_cryp_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
747                              unsigned int keylen)
748 {
749         struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(tfm);
750
751         memcpy(ctx->key, key, keylen);
752         ctx->keylen = keylen;
753
754         return 0;
755 }
756
757 static int stm32_cryp_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
758                                  unsigned int keylen)
759 {
760         if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
761             keylen != AES_KEYSIZE_256)
762                 return -EINVAL;
763         else
764                 return stm32_cryp_setkey(tfm, key, keylen);
765 }
766
767 static int stm32_cryp_des_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
768                                  unsigned int keylen)
769 {
770         u32 tmp[DES_EXPKEY_WORDS];
771
772         if (keylen != DES_KEY_SIZE)
773                 return -EINVAL;
774
775         if ((crypto_ablkcipher_get_flags(tfm) &
776              CRYPTO_TFM_REQ_FORBID_WEAK_KEYS) &&
777             unlikely(!des_ekey(tmp, key))) {
778                 crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_RES_WEAK_KEY);
779                 return -EINVAL;
780         }
781
782         return stm32_cryp_setkey(tfm, key, keylen);
783 }
784
785 static int stm32_cryp_tdes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
786                                   unsigned int keylen)
787 {
788         u32 flags;
789         int err;
790
791         flags = crypto_ablkcipher_get_flags(tfm);
792         err = __des3_verify_key(&flags, key);
793         if (unlikely(err)) {
794                 crypto_ablkcipher_set_flags(tfm, flags);
795                 return err;
796         }
797
798         return stm32_cryp_setkey(tfm, key, keylen);
799 }
800
801 static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
802                                       unsigned int keylen)
803 {
804         struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
805
806         if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
807             keylen != AES_KEYSIZE_256)
808                 return -EINVAL;
809
810         memcpy(ctx->key, key, keylen);
811         ctx->keylen = keylen;
812
813         return 0;
814 }
815
816 static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm,
817                                           unsigned int authsize)
818 {
819         return authsize == AES_BLOCK_SIZE ? 0 : -EINVAL;
820 }
821
822 static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm,
823                                           unsigned int authsize)
824 {
825         switch (authsize) {
826         case 4:
827         case 6:
828         case 8:
829         case 10:
830         case 12:
831         case 14:
832         case 16:
833                 break;
834         default:
835                 return -EINVAL;
836         }
837
838         return 0;
839 }
840
841 static int stm32_cryp_aes_ecb_encrypt(struct ablkcipher_request *req)
842 {
843         return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT);
844 }
845
846 static int stm32_cryp_aes_ecb_decrypt(struct ablkcipher_request *req)
847 {
848         return stm32_cryp_crypt(req, FLG_AES | FLG_ECB);
849 }
850
851 static int stm32_cryp_aes_cbc_encrypt(struct ablkcipher_request *req)
852 {
853         return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT);
854 }
855
856 static int stm32_cryp_aes_cbc_decrypt(struct ablkcipher_request *req)
857 {
858         return stm32_cryp_crypt(req, FLG_AES | FLG_CBC);
859 }
860
861 static int stm32_cryp_aes_ctr_encrypt(struct ablkcipher_request *req)
862 {
863         return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT);
864 }
865
866 static int stm32_cryp_aes_ctr_decrypt(struct ablkcipher_request *req)
867 {
868         return stm32_cryp_crypt(req, FLG_AES | FLG_CTR);
869 }
870
871 static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req)
872 {
873         return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT);
874 }
875
876 static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req)
877 {
878         return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM);
879 }
880
881 static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req)
882 {
883         return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT);
884 }
885
886 static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req)
887 {
888         return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM);
889 }
890
891 static int stm32_cryp_des_ecb_encrypt(struct ablkcipher_request *req)
892 {
893         return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT);
894 }
895
896 static int stm32_cryp_des_ecb_decrypt(struct ablkcipher_request *req)
897 {
898         return stm32_cryp_crypt(req, FLG_DES | FLG_ECB);
899 }
900
901 static int stm32_cryp_des_cbc_encrypt(struct ablkcipher_request *req)
902 {
903         return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT);
904 }
905
906 static int stm32_cryp_des_cbc_decrypt(struct ablkcipher_request *req)
907 {
908         return stm32_cryp_crypt(req, FLG_DES | FLG_CBC);
909 }
910
911 static int stm32_cryp_tdes_ecb_encrypt(struct ablkcipher_request *req)
912 {
913         return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT);
914 }
915
916 static int stm32_cryp_tdes_ecb_decrypt(struct ablkcipher_request *req)
917 {
918         return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB);
919 }
920
921 static int stm32_cryp_tdes_cbc_encrypt(struct ablkcipher_request *req)
922 {
923         return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT);
924 }
925
926 static int stm32_cryp_tdes_cbc_decrypt(struct ablkcipher_request *req)
927 {
928         return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC);
929 }
930
931 static int stm32_cryp_prepare_req(struct ablkcipher_request *req,
932                                   struct aead_request *areq)
933 {
934         struct stm32_cryp_ctx *ctx;
935         struct stm32_cryp *cryp;
936         struct stm32_cryp_reqctx *rctx;
937         int ret;
938
939         if (!req && !areq)
940                 return -EINVAL;
941
942         ctx = req ? crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req)) :
943                     crypto_aead_ctx(crypto_aead_reqtfm(areq));
944
945         cryp = ctx->cryp;
946
947         if (!cryp)
948                 return -ENODEV;
949
950         rctx = req ? ablkcipher_request_ctx(req) : aead_request_ctx(areq);
951         rctx->mode &= FLG_MODE_MASK;
952
953         ctx->cryp = cryp;
954
955         cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode;
956         cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE;
957         cryp->ctx = ctx;
958
959         if (req) {
960                 cryp->req = req;
961                 cryp->areq = NULL;
962                 cryp->total_in = req->nbytes;
963                 cryp->total_out = cryp->total_in;
964         } else {
965                 /*
966                  * Length of input and output data:
967                  * Encryption case:
968                  *  INPUT  =   AssocData  ||   PlainText
969                  *          <- assoclen ->  <- cryptlen ->
970                  *          <------- total_in ----------->
971                  *
972                  *  OUTPUT =   AssocData  ||  CipherText  ||   AuthTag
973                  *          <- assoclen ->  <- cryptlen ->  <- authsize ->
974                  *          <---------------- total_out ----------------->
975                  *
976                  * Decryption case:
977                  *  INPUT  =   AssocData  ||  CipherText  ||  AuthTag
978                  *          <- assoclen ->  <--------- cryptlen --------->
979                  *                                          <- authsize ->
980                  *          <---------------- total_in ------------------>
981                  *
982                  *  OUTPUT =   AssocData  ||   PlainText
983                  *          <- assoclen ->  <- crypten - authsize ->
984                  *          <---------- total_out ----------------->
985                  */
986                 cryp->areq = areq;
987                 cryp->req = NULL;
988                 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
989                 cryp->total_in = areq->assoclen + areq->cryptlen;
990                 if (is_encrypt(cryp))
991                         /* Append auth tag to output */
992                         cryp->total_out = cryp->total_in + cryp->authsize;
993                 else
994                         /* No auth tag in output */
995                         cryp->total_out = cryp->total_in - cryp->authsize;
996         }
997
998         cryp->total_in_save = cryp->total_in;
999         cryp->total_out_save = cryp->total_out;
1000
1001         cryp->in_sg = req ? req->src : areq->src;
1002         cryp->out_sg = req ? req->dst : areq->dst;
1003         cryp->out_sg_save = cryp->out_sg;
1004
1005         cryp->in_sg_len = sg_nents_for_len(cryp->in_sg, cryp->total_in);
1006         if (cryp->in_sg_len < 0) {
1007                 dev_err(cryp->dev, "Cannot get in_sg_len\n");
1008                 ret = cryp->in_sg_len;
1009                 return ret;
1010         }
1011
1012         cryp->out_sg_len = sg_nents_for_len(cryp->out_sg, cryp->total_out);
1013         if (cryp->out_sg_len < 0) {
1014                 dev_err(cryp->dev, "Cannot get out_sg_len\n");
1015                 ret = cryp->out_sg_len;
1016                 return ret;
1017         }
1018
1019         ret = stm32_cryp_copy_sgs(cryp);
1020         if (ret)
1021                 return ret;
1022
1023         scatterwalk_start(&cryp->in_walk, cryp->in_sg);
1024         scatterwalk_start(&cryp->out_walk, cryp->out_sg);
1025
1026         if (is_gcm(cryp) || is_ccm(cryp)) {
1027                 /* In output, jump after assoc data */
1028                 scatterwalk_advance(&cryp->out_walk, cryp->areq->assoclen);
1029                 cryp->total_out -= cryp->areq->assoclen;
1030         }
1031
1032         ret = stm32_cryp_hw_init(cryp);
1033         return ret;
1034 }
1035
1036 static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
1037                                          void *areq)
1038 {
1039         struct ablkcipher_request *req = container_of(areq,
1040                                                       struct ablkcipher_request,
1041                                                       base);
1042
1043         return stm32_cryp_prepare_req(req, NULL);
1044 }
1045
1046 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq)
1047 {
1048         struct ablkcipher_request *req = container_of(areq,
1049                                                       struct ablkcipher_request,
1050                                                       base);
1051         struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(
1052                         crypto_ablkcipher_reqtfm(req));
1053         struct stm32_cryp *cryp = ctx->cryp;
1054
1055         if (!cryp)
1056                 return -ENODEV;
1057
1058         return stm32_cryp_cpu_start(cryp);
1059 }
1060
1061 static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine, void *areq)
1062 {
1063         struct aead_request *req = container_of(areq, struct aead_request,
1064                                                 base);
1065
1066         return stm32_cryp_prepare_req(NULL, req);
1067 }
1068
1069 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq)
1070 {
1071         struct aead_request *req = container_of(areq, struct aead_request,
1072                                                 base);
1073         struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1074         struct stm32_cryp *cryp = ctx->cryp;
1075
1076         if (!cryp)
1077                 return -ENODEV;
1078
1079         if (unlikely(!cryp->areq->assoclen &&
1080                      !stm32_cryp_get_input_text_len(cryp))) {
1081                 /* No input data to process: get tag and finish */
1082                 stm32_cryp_finish_req(cryp, 0);
1083                 return 0;
1084         }
1085
1086         return stm32_cryp_cpu_start(cryp);
1087 }
1088
1089 static u32 *stm32_cryp_next_out(struct stm32_cryp *cryp, u32 *dst,
1090                                 unsigned int n)
1091 {
1092         scatterwalk_advance(&cryp->out_walk, n);
1093
1094         if (unlikely(cryp->out_sg->length == _walked_out)) {
1095                 cryp->out_sg = sg_next(cryp->out_sg);
1096                 if (cryp->out_sg) {
1097                         scatterwalk_start(&cryp->out_walk, cryp->out_sg);
1098                         return (sg_virt(cryp->out_sg) + _walked_out);
1099                 }
1100         }
1101
1102         return (u32 *)((u8 *)dst + n);
1103 }
1104
1105 static u32 *stm32_cryp_next_in(struct stm32_cryp *cryp, u32 *src,
1106                                unsigned int n)
1107 {
1108         scatterwalk_advance(&cryp->in_walk, n);
1109
1110         if (unlikely(cryp->in_sg->length == _walked_in)) {
1111                 cryp->in_sg = sg_next(cryp->in_sg);
1112                 if (cryp->in_sg) {
1113                         scatterwalk_start(&cryp->in_walk, cryp->in_sg);
1114                         return (sg_virt(cryp->in_sg) + _walked_in);
1115                 }
1116         }
1117
1118         return (u32 *)((u8 *)src + n);
1119 }
1120
1121 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp)
1122 {
1123         u32 cfg, size_bit, *dst, d32;
1124         u8 *d8;
1125         unsigned int i, j;
1126         int ret = 0;
1127
1128         /* Update Config */
1129         cfg = stm32_cryp_read(cryp, CRYP_CR);
1130
1131         cfg &= ~CR_PH_MASK;
1132         cfg |= CR_PH_FINAL;
1133         cfg &= ~CR_DEC_NOT_ENC;
1134         cfg |= CR_CRYPEN;
1135
1136         stm32_cryp_write(cryp, CRYP_CR, cfg);
1137
1138         if (is_gcm(cryp)) {
1139                 /* GCM: write aad and payload size (in bits) */
1140                 size_bit = cryp->areq->assoclen * 8;
1141                 if (cryp->caps->swap_final)
1142                         size_bit = cpu_to_be32(size_bit);
1143
1144                 stm32_cryp_write(cryp, CRYP_DIN, 0);
1145                 stm32_cryp_write(cryp, CRYP_DIN, size_bit);
1146
1147                 size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen :
1148                                 cryp->areq->cryptlen - AES_BLOCK_SIZE;
1149                 size_bit *= 8;
1150                 if (cryp->caps->swap_final)
1151                         size_bit = cpu_to_be32(size_bit);
1152
1153                 stm32_cryp_write(cryp, CRYP_DIN, 0);
1154                 stm32_cryp_write(cryp, CRYP_DIN, size_bit);
1155         } else {
1156                 /* CCM: write CTR0 */
1157                 u8 iv[AES_BLOCK_SIZE];
1158                 u32 *iv32 = (u32 *)iv;
1159
1160                 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
1161                 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
1162
1163                 for (i = 0; i < AES_BLOCK_32; i++) {
1164                         if (!cryp->caps->padding_wa)
1165                                 *iv32 = cpu_to_be32(*iv32);
1166                         stm32_cryp_write(cryp, CRYP_DIN, *iv32++);
1167                 }
1168         }
1169
1170         /* Wait for output data */
1171         ret = stm32_cryp_wait_output(cryp);
1172         if (ret) {
1173                 dev_err(cryp->dev, "Timeout (read tag)\n");
1174                 return ret;
1175         }
1176
1177         if (is_encrypt(cryp)) {
1178                 /* Get and write tag */
1179                 dst = sg_virt(cryp->out_sg) + _walked_out;
1180
1181                 for (i = 0; i < AES_BLOCK_32; i++) {
1182                         if (cryp->total_out >= sizeof(u32)) {
1183                                 /* Read a full u32 */
1184                                 *dst = stm32_cryp_read(cryp, CRYP_DOUT);
1185
1186                                 dst = stm32_cryp_next_out(cryp, dst,
1187                                                           sizeof(u32));
1188                                 cryp->total_out -= sizeof(u32);
1189                         } else if (!cryp->total_out) {
1190                                 /* Empty fifo out (data from input padding) */
1191                                 stm32_cryp_read(cryp, CRYP_DOUT);
1192                         } else {
1193                                 /* Read less than an u32 */
1194                                 d32 = stm32_cryp_read(cryp, CRYP_DOUT);
1195                                 d8 = (u8 *)&d32;
1196
1197                                 for (j = 0; j < cryp->total_out; j++) {
1198                                         *((u8 *)dst) = *(d8++);
1199                                         dst = stm32_cryp_next_out(cryp, dst, 1);
1200                                 }
1201                                 cryp->total_out = 0;
1202                         }
1203                 }
1204         } else {
1205                 /* Get and check tag */
1206                 u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32];
1207
1208                 scatterwalk_map_and_copy(in_tag, cryp->in_sg,
1209                                          cryp->total_in_save - cryp->authsize,
1210                                          cryp->authsize, 0);
1211
1212                 for (i = 0; i < AES_BLOCK_32; i++)
1213                         out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1214
1215                 if (crypto_memneq(in_tag, out_tag, cryp->authsize))
1216                         ret = -EBADMSG;
1217         }
1218
1219         /* Disable cryp */
1220         cfg &= ~CR_CRYPEN;
1221         stm32_cryp_write(cryp, CRYP_CR, cfg);
1222
1223         return ret;
1224 }
1225
1226 static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp)
1227 {
1228         u32 cr;
1229
1230         if (unlikely(cryp->last_ctr[3] == 0xFFFFFFFF)) {
1231                 cryp->last_ctr[3] = 0;
1232                 cryp->last_ctr[2]++;
1233                 if (!cryp->last_ctr[2]) {
1234                         cryp->last_ctr[1]++;
1235                         if (!cryp->last_ctr[1])
1236                                 cryp->last_ctr[0]++;
1237                 }
1238
1239                 cr = stm32_cryp_read(cryp, CRYP_CR);
1240                 stm32_cryp_write(cryp, CRYP_CR, cr & ~CR_CRYPEN);
1241
1242                 stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->last_ctr);
1243
1244                 stm32_cryp_write(cryp, CRYP_CR, cr);
1245         }
1246
1247         cryp->last_ctr[0] = stm32_cryp_read(cryp, CRYP_IV0LR);
1248         cryp->last_ctr[1] = stm32_cryp_read(cryp, CRYP_IV0RR);
1249         cryp->last_ctr[2] = stm32_cryp_read(cryp, CRYP_IV1LR);
1250         cryp->last_ctr[3] = stm32_cryp_read(cryp, CRYP_IV1RR);
1251 }
1252
1253 static bool stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
1254 {
1255         unsigned int i, j;
1256         u32 d32, *dst;
1257         u8 *d8;
1258         size_t tag_size;
1259
1260         /* Do no read tag now (if any) */
1261         if (is_encrypt(cryp) && (is_gcm(cryp) || is_ccm(cryp)))
1262                 tag_size = cryp->authsize;
1263         else
1264                 tag_size = 0;
1265
1266         dst = sg_virt(cryp->out_sg) + _walked_out;
1267
1268         for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) {
1269                 if (likely(cryp->total_out - tag_size >= sizeof(u32))) {
1270                         /* Read a full u32 */
1271                         *dst = stm32_cryp_read(cryp, CRYP_DOUT);
1272
1273                         dst = stm32_cryp_next_out(cryp, dst, sizeof(u32));
1274                         cryp->total_out -= sizeof(u32);
1275                 } else if (cryp->total_out == tag_size) {
1276                         /* Empty fifo out (data from input padding) */
1277                         d32 = stm32_cryp_read(cryp, CRYP_DOUT);
1278                 } else {
1279                         /* Read less than an u32 */
1280                         d32 = stm32_cryp_read(cryp, CRYP_DOUT);
1281                         d8 = (u8 *)&d32;
1282
1283                         for (j = 0; j < cryp->total_out - tag_size; j++) {
1284                                 *((u8 *)dst) = *(d8++);
1285                                 dst = stm32_cryp_next_out(cryp, dst, 1);
1286                         }
1287                         cryp->total_out = tag_size;
1288                 }
1289         }
1290
1291         return !(cryp->total_out - tag_size) || !cryp->total_in;
1292 }
1293
1294 static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp)
1295 {
1296         unsigned int i, j;
1297         u32 *src;
1298         u8 d8[4];
1299         size_t tag_size;
1300
1301         /* Do no write tag (if any) */
1302         if (is_decrypt(cryp) && (is_gcm(cryp) || is_ccm(cryp)))
1303                 tag_size = cryp->authsize;
1304         else
1305                 tag_size = 0;
1306
1307         src = sg_virt(cryp->in_sg) + _walked_in;
1308
1309         for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) {
1310                 if (likely(cryp->total_in - tag_size >= sizeof(u32))) {
1311                         /* Write a full u32 */
1312                         stm32_cryp_write(cryp, CRYP_DIN, *src);
1313
1314                         src = stm32_cryp_next_in(cryp, src, sizeof(u32));
1315                         cryp->total_in -= sizeof(u32);
1316                 } else if (cryp->total_in == tag_size) {
1317                         /* Write padding data */
1318                         stm32_cryp_write(cryp, CRYP_DIN, 0);
1319                 } else {
1320                         /* Write less than an u32 */
1321                         memset(d8, 0, sizeof(u32));
1322                         for (j = 0; j < cryp->total_in - tag_size; j++) {
1323                                 d8[j] = *((u8 *)src);
1324                                 src = stm32_cryp_next_in(cryp, src, 1);
1325                         }
1326
1327                         stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1328                         cryp->total_in = tag_size;
1329                 }
1330         }
1331 }
1332
1333 static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp)
1334 {
1335         int err;
1336         u32 cfg, tmp[AES_BLOCK_32];
1337         size_t total_in_ori = cryp->total_in;
1338         struct scatterlist *out_sg_ori = cryp->out_sg;
1339         unsigned int i;
1340
1341         /* 'Special workaround' procedure described in the datasheet */
1342
1343         /* a) disable ip */
1344         stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1345         cfg = stm32_cryp_read(cryp, CRYP_CR);
1346         cfg &= ~CR_CRYPEN;
1347         stm32_cryp_write(cryp, CRYP_CR, cfg);
1348
1349         /* b) Update IV1R */
1350         stm32_cryp_write(cryp, CRYP_IV1RR, cryp->gcm_ctr - 2);
1351
1352         /* c) change mode to CTR */
1353         cfg &= ~CR_ALGO_MASK;
1354         cfg |= CR_AES_CTR;
1355         stm32_cryp_write(cryp, CRYP_CR, cfg);
1356
1357         /* a) enable IP */
1358         cfg |= CR_CRYPEN;
1359         stm32_cryp_write(cryp, CRYP_CR, cfg);
1360
1361         /* b) pad and write the last block */
1362         stm32_cryp_irq_write_block(cryp);
1363         cryp->total_in = total_in_ori;
1364         err = stm32_cryp_wait_output(cryp);
1365         if (err) {
1366                 dev_err(cryp->dev, "Timeout (write gcm header)\n");
1367                 return stm32_cryp_finish_req(cryp, err);
1368         }
1369
1370         /* c) get and store encrypted data */
1371         stm32_cryp_irq_read_data(cryp);
1372         scatterwalk_map_and_copy(tmp, out_sg_ori,
1373                                  cryp->total_in_save - total_in_ori,
1374                                  total_in_ori, 0);
1375
1376         /* d) change mode back to AES GCM */
1377         cfg &= ~CR_ALGO_MASK;
1378         cfg |= CR_AES_GCM;
1379         stm32_cryp_write(cryp, CRYP_CR, cfg);
1380
1381         /* e) change phase to Final */
1382         cfg &= ~CR_PH_MASK;
1383         cfg |= CR_PH_FINAL;
1384         stm32_cryp_write(cryp, CRYP_CR, cfg);
1385
1386         /* f) write padded data */
1387         for (i = 0; i < AES_BLOCK_32; i++) {
1388                 if (cryp->total_in)
1389                         stm32_cryp_write(cryp, CRYP_DIN, tmp[i]);
1390                 else
1391                         stm32_cryp_write(cryp, CRYP_DIN, 0);
1392
1393                 cryp->total_in -= min_t(size_t, sizeof(u32), cryp->total_in);
1394         }
1395
1396         /* g) Empty fifo out */
1397         err = stm32_cryp_wait_output(cryp);
1398         if (err) {
1399                 dev_err(cryp->dev, "Timeout (write gcm header)\n");
1400                 return stm32_cryp_finish_req(cryp, err);
1401         }
1402
1403         for (i = 0; i < AES_BLOCK_32; i++)
1404                 stm32_cryp_read(cryp, CRYP_DOUT);
1405
1406         /* h) run the he normal Final phase */
1407         stm32_cryp_finish_req(cryp, 0);
1408 }
1409
1410 static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp)
1411 {
1412         u32 cfg, payload_bytes;
1413
1414         /* disable ip, set NPBLB and reneable ip */
1415         cfg = stm32_cryp_read(cryp, CRYP_CR);
1416         cfg &= ~CR_CRYPEN;
1417         stm32_cryp_write(cryp, CRYP_CR, cfg);
1418
1419         payload_bytes = is_decrypt(cryp) ? cryp->total_in - cryp->authsize :
1420                                            cryp->total_in;
1421         cfg |= (cryp->hw_blocksize - payload_bytes) << CR_NBPBL_SHIFT;
1422         cfg |= CR_CRYPEN;
1423         stm32_cryp_write(cryp, CRYP_CR, cfg);
1424 }
1425
1426 static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp)
1427 {
1428         int err = 0;
1429         u32 cfg, iv1tmp;
1430         u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32], tmp[AES_BLOCK_32];
1431         size_t last_total_out, total_in_ori = cryp->total_in;
1432         struct scatterlist *out_sg_ori = cryp->out_sg;
1433         unsigned int i;
1434
1435         /* 'Special workaround' procedure described in the datasheet */
1436         cryp->flags |= FLG_CCM_PADDED_WA;
1437
1438         /* a) disable ip */
1439         stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1440
1441         cfg = stm32_cryp_read(cryp, CRYP_CR);
1442         cfg &= ~CR_CRYPEN;
1443         stm32_cryp_write(cryp, CRYP_CR, cfg);
1444
1445         /* b) get IV1 from CRYP_CSGCMCCM7 */
1446         iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4);
1447
1448         /* c) Load CRYP_CSGCMCCMxR */
1449         for (i = 0; i < ARRAY_SIZE(cstmp1); i++)
1450                 cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1451
1452         /* d) Write IV1R */
1453         stm32_cryp_write(cryp, CRYP_IV1RR, iv1tmp);
1454
1455         /* e) change mode to CTR */
1456         cfg &= ~CR_ALGO_MASK;
1457         cfg |= CR_AES_CTR;
1458         stm32_cryp_write(cryp, CRYP_CR, cfg);
1459
1460         /* a) enable IP */
1461         cfg |= CR_CRYPEN;
1462         stm32_cryp_write(cryp, CRYP_CR, cfg);
1463
1464         /* b) pad and write the last block */
1465         stm32_cryp_irq_write_block(cryp);
1466         cryp->total_in = total_in_ori;
1467         err = stm32_cryp_wait_output(cryp);
1468         if (err) {
1469                 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
1470                 return stm32_cryp_finish_req(cryp, err);
1471         }
1472
1473         /* c) get and store decrypted data */
1474         last_total_out = cryp->total_out;
1475         stm32_cryp_irq_read_data(cryp);
1476
1477         memset(tmp, 0, sizeof(tmp));
1478         scatterwalk_map_and_copy(tmp, out_sg_ori,
1479                                  cryp->total_out_save - last_total_out,
1480                                  last_total_out, 0);
1481
1482         /* d) Load again CRYP_CSGCMCCMxR */
1483         for (i = 0; i < ARRAY_SIZE(cstmp2); i++)
1484                 cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1485
1486         /* e) change mode back to AES CCM */
1487         cfg &= ~CR_ALGO_MASK;
1488         cfg |= CR_AES_CCM;
1489         stm32_cryp_write(cryp, CRYP_CR, cfg);
1490
1491         /* f) change phase to header */
1492         cfg &= ~CR_PH_MASK;
1493         cfg |= CR_PH_HEADER;
1494         stm32_cryp_write(cryp, CRYP_CR, cfg);
1495
1496         /* g) XOR and write padded data */
1497         for (i = 0; i < ARRAY_SIZE(tmp); i++) {
1498                 tmp[i] ^= cstmp1[i];
1499                 tmp[i] ^= cstmp2[i];
1500                 stm32_cryp_write(cryp, CRYP_DIN, tmp[i]);
1501         }
1502
1503         /* h) wait for completion */
1504         err = stm32_cryp_wait_busy(cryp);
1505         if (err)
1506                 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
1507
1508         /* i) run the he normal Final phase */
1509         stm32_cryp_finish_req(cryp, err);
1510 }
1511
1512 static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp)
1513 {
1514         if (unlikely(!cryp->total_in)) {
1515                 dev_warn(cryp->dev, "No more data to process\n");
1516                 return;
1517         }
1518
1519         if (unlikely(cryp->total_in < AES_BLOCK_SIZE &&
1520                      (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) &&
1521                      is_encrypt(cryp))) {
1522                 /* Padding for AES GCM encryption */
1523                 if (cryp->caps->padding_wa)
1524                         /* Special case 1 */
1525                         return stm32_cryp_irq_write_gcm_padded_data(cryp);
1526
1527                 /* Setting padding bytes (NBBLB) */
1528                 stm32_cryp_irq_set_npblb(cryp);
1529         }
1530
1531         if (unlikely((cryp->total_in - cryp->authsize < AES_BLOCK_SIZE) &&
1532                      (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) &&
1533                      is_decrypt(cryp))) {
1534                 /* Padding for AES CCM decryption */
1535                 if (cryp->caps->padding_wa)
1536                         /* Special case 2 */
1537                         return stm32_cryp_irq_write_ccm_padded_data(cryp);
1538
1539                 /* Setting padding bytes (NBBLB) */
1540                 stm32_cryp_irq_set_npblb(cryp);
1541         }
1542
1543         if (is_aes(cryp) && is_ctr(cryp))
1544                 stm32_cryp_check_ctr_counter(cryp);
1545
1546         stm32_cryp_irq_write_block(cryp);
1547 }
1548
1549 static void stm32_cryp_irq_write_gcm_header(struct stm32_cryp *cryp)
1550 {
1551         int err;
1552         unsigned int i, j;
1553         u32 cfg, *src;
1554
1555         src = sg_virt(cryp->in_sg) + _walked_in;
1556
1557         for (i = 0; i < AES_BLOCK_32; i++) {
1558                 stm32_cryp_write(cryp, CRYP_DIN, *src);
1559
1560                 src = stm32_cryp_next_in(cryp, src, sizeof(u32));
1561                 cryp->total_in -= min_t(size_t, sizeof(u32), cryp->total_in);
1562
1563                 /* Check if whole header written */
1564                 if ((cryp->total_in_save - cryp->total_in) ==
1565                                 cryp->areq->assoclen) {
1566                         /* Write padding if needed */
1567                         for (j = i + 1; j < AES_BLOCK_32; j++)
1568                                 stm32_cryp_write(cryp, CRYP_DIN, 0);
1569
1570                         /* Wait for completion */
1571                         err = stm32_cryp_wait_busy(cryp);
1572                         if (err) {
1573                                 dev_err(cryp->dev, "Timeout (gcm header)\n");
1574                                 return stm32_cryp_finish_req(cryp, err);
1575                         }
1576
1577                         if (stm32_cryp_get_input_text_len(cryp)) {
1578                                 /* Phase 3 : payload */
1579                                 cfg = stm32_cryp_read(cryp, CRYP_CR);
1580                                 cfg &= ~CR_CRYPEN;
1581                                 stm32_cryp_write(cryp, CRYP_CR, cfg);
1582
1583                                 cfg &= ~CR_PH_MASK;
1584                                 cfg |= CR_PH_PAYLOAD;
1585                                 cfg |= CR_CRYPEN;
1586                                 stm32_cryp_write(cryp, CRYP_CR, cfg);
1587                         } else {
1588                                 /* Phase 4 : tag */
1589                                 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1590                                 stm32_cryp_finish_req(cryp, 0);
1591                         }
1592
1593                         break;
1594                 }
1595
1596                 if (!cryp->total_in)
1597                         break;
1598         }
1599 }
1600
1601 static void stm32_cryp_irq_write_ccm_header(struct stm32_cryp *cryp)
1602 {
1603         int err;
1604         unsigned int i = 0, j, k;
1605         u32 alen, cfg, *src;
1606         u8 d8[4];
1607
1608         src = sg_virt(cryp->in_sg) + _walked_in;
1609         alen = cryp->areq->assoclen;
1610
1611         if (!_walked_in) {
1612                 if (cryp->areq->assoclen <= 65280) {
1613                         /* Write first u32 of B1 */
1614                         d8[0] = (alen >> 8) & 0xFF;
1615                         d8[1] = alen & 0xFF;
1616                         d8[2] = *((u8 *)src);
1617                         src = stm32_cryp_next_in(cryp, src, 1);
1618                         d8[3] = *((u8 *)src);
1619                         src = stm32_cryp_next_in(cryp, src, 1);
1620
1621                         stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1622                         i++;
1623
1624                         cryp->total_in -= min_t(size_t, 2, cryp->total_in);
1625                 } else {
1626                         /* Build the two first u32 of B1 */
1627                         d8[0] = 0xFF;
1628                         d8[1] = 0xFE;
1629                         d8[2] = alen & 0xFF000000;
1630                         d8[3] = alen & 0x00FF0000;
1631
1632                         stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1633                         i++;
1634
1635                         d8[0] = alen & 0x0000FF00;
1636                         d8[1] = alen & 0x000000FF;
1637                         d8[2] = *((u8 *)src);
1638                         src = stm32_cryp_next_in(cryp, src, 1);
1639                         d8[3] = *((u8 *)src);
1640                         src = stm32_cryp_next_in(cryp, src, 1);
1641
1642                         stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1643                         i++;
1644
1645                         cryp->total_in -= min_t(size_t, 2, cryp->total_in);
1646                 }
1647         }
1648
1649         /* Write next u32 */
1650         for (; i < AES_BLOCK_32; i++) {
1651                 /* Build an u32 */
1652                 memset(d8, 0, sizeof(u32));
1653                 for (k = 0; k < sizeof(u32); k++) {
1654                         d8[k] = *((u8 *)src);
1655                         src = stm32_cryp_next_in(cryp, src, 1);
1656
1657                         cryp->total_in -= min_t(size_t, 1, cryp->total_in);
1658                         if ((cryp->total_in_save - cryp->total_in) == alen)
1659                                 break;
1660                 }
1661
1662                 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1663
1664                 if ((cryp->total_in_save - cryp->total_in) == alen) {
1665                         /* Write padding if needed */
1666                         for (j = i + 1; j < AES_BLOCK_32; j++)
1667                                 stm32_cryp_write(cryp, CRYP_DIN, 0);
1668
1669                         /* Wait for completion */
1670                         err = stm32_cryp_wait_busy(cryp);
1671                         if (err) {
1672                                 dev_err(cryp->dev, "Timeout (ccm header)\n");
1673                                 return stm32_cryp_finish_req(cryp, err);
1674                         }
1675
1676                         if (stm32_cryp_get_input_text_len(cryp)) {
1677                                 /* Phase 3 : payload */
1678                                 cfg = stm32_cryp_read(cryp, CRYP_CR);
1679                                 cfg &= ~CR_CRYPEN;
1680                                 stm32_cryp_write(cryp, CRYP_CR, cfg);
1681
1682                                 cfg &= ~CR_PH_MASK;
1683                                 cfg |= CR_PH_PAYLOAD;
1684                                 cfg |= CR_CRYPEN;
1685                                 stm32_cryp_write(cryp, CRYP_CR, cfg);
1686                         } else {
1687                                 /* Phase 4 : tag */
1688                                 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1689                                 stm32_cryp_finish_req(cryp, 0);
1690                         }
1691
1692                         break;
1693                 }
1694         }
1695 }
1696
1697 static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg)
1698 {
1699         struct stm32_cryp *cryp = arg;
1700         u32 ph;
1701
1702         if (cryp->irq_status & MISR_OUT)
1703                 /* Output FIFO IRQ: read data */
1704                 if (unlikely(stm32_cryp_irq_read_data(cryp))) {
1705                         /* All bytes processed, finish */
1706                         stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1707                         stm32_cryp_finish_req(cryp, 0);
1708                         return IRQ_HANDLED;
1709                 }
1710
1711         if (cryp->irq_status & MISR_IN) {
1712                 if (is_gcm(cryp)) {
1713                         ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK;
1714                         if (unlikely(ph == CR_PH_HEADER))
1715                                 /* Write Header */
1716                                 stm32_cryp_irq_write_gcm_header(cryp);
1717                         else
1718                                 /* Input FIFO IRQ: write data */
1719                                 stm32_cryp_irq_write_data(cryp);
1720                         cryp->gcm_ctr++;
1721                 } else if (is_ccm(cryp)) {
1722                         ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK;
1723                         if (unlikely(ph == CR_PH_HEADER))
1724                                 /* Write Header */
1725                                 stm32_cryp_irq_write_ccm_header(cryp);
1726                         else
1727                                 /* Input FIFO IRQ: write data */
1728                                 stm32_cryp_irq_write_data(cryp);
1729                 } else {
1730                         /* Input FIFO IRQ: write data */
1731                         stm32_cryp_irq_write_data(cryp);
1732                 }
1733         }
1734
1735         return IRQ_HANDLED;
1736 }
1737
1738 static irqreturn_t stm32_cryp_irq(int irq, void *arg)
1739 {
1740         struct stm32_cryp *cryp = arg;
1741
1742         cryp->irq_status = stm32_cryp_read(cryp, CRYP_MISR);
1743
1744         return IRQ_WAKE_THREAD;
1745 }
1746
1747 static struct crypto_alg crypto_algs[] = {
1748 {
1749         .cra_name               = "ecb(aes)",
1750         .cra_driver_name        = "stm32-ecb-aes",
1751         .cra_priority           = 200,
1752         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
1753                                   CRYPTO_ALG_ASYNC,
1754         .cra_blocksize          = AES_BLOCK_SIZE,
1755         .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1756         .cra_alignmask          = 0xf,
1757         .cra_type               = &crypto_ablkcipher_type,
1758         .cra_module             = THIS_MODULE,
1759         .cra_init               = stm32_cryp_cra_init,
1760         .cra_ablkcipher = {
1761                 .min_keysize    = AES_MIN_KEY_SIZE,
1762                 .max_keysize    = AES_MAX_KEY_SIZE,
1763                 .setkey         = stm32_cryp_aes_setkey,
1764                 .encrypt        = stm32_cryp_aes_ecb_encrypt,
1765                 .decrypt        = stm32_cryp_aes_ecb_decrypt,
1766         }
1767 },
1768 {
1769         .cra_name               = "cbc(aes)",
1770         .cra_driver_name        = "stm32-cbc-aes",
1771         .cra_priority           = 200,
1772         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
1773                                   CRYPTO_ALG_ASYNC,
1774         .cra_blocksize          = AES_BLOCK_SIZE,
1775         .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1776         .cra_alignmask          = 0xf,
1777         .cra_type               = &crypto_ablkcipher_type,
1778         .cra_module             = THIS_MODULE,
1779         .cra_init               = stm32_cryp_cra_init,
1780         .cra_ablkcipher = {
1781                 .min_keysize    = AES_MIN_KEY_SIZE,
1782                 .max_keysize    = AES_MAX_KEY_SIZE,
1783                 .ivsize         = AES_BLOCK_SIZE,
1784                 .setkey         = stm32_cryp_aes_setkey,
1785                 .encrypt        = stm32_cryp_aes_cbc_encrypt,
1786                 .decrypt        = stm32_cryp_aes_cbc_decrypt,
1787         }
1788 },
1789 {
1790         .cra_name               = "ctr(aes)",
1791         .cra_driver_name        = "stm32-ctr-aes",
1792         .cra_priority           = 200,
1793         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
1794                                   CRYPTO_ALG_ASYNC,
1795         .cra_blocksize          = 1,
1796         .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1797         .cra_alignmask          = 0xf,
1798         .cra_type               = &crypto_ablkcipher_type,
1799         .cra_module             = THIS_MODULE,
1800         .cra_init               = stm32_cryp_cra_init,
1801         .cra_ablkcipher = {
1802                 .min_keysize    = AES_MIN_KEY_SIZE,
1803                 .max_keysize    = AES_MAX_KEY_SIZE,
1804                 .ivsize         = AES_BLOCK_SIZE,
1805                 .setkey         = stm32_cryp_aes_setkey,
1806                 .encrypt        = stm32_cryp_aes_ctr_encrypt,
1807                 .decrypt        = stm32_cryp_aes_ctr_decrypt,
1808         }
1809 },
1810 {
1811         .cra_name               = "ecb(des)",
1812         .cra_driver_name        = "stm32-ecb-des",
1813         .cra_priority           = 200,
1814         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
1815                                   CRYPTO_ALG_ASYNC,
1816         .cra_blocksize          = DES_BLOCK_SIZE,
1817         .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1818         .cra_alignmask          = 0xf,
1819         .cra_type               = &crypto_ablkcipher_type,
1820         .cra_module             = THIS_MODULE,
1821         .cra_init               = stm32_cryp_cra_init,
1822         .cra_ablkcipher = {
1823                 .min_keysize    = DES_BLOCK_SIZE,
1824                 .max_keysize    = DES_BLOCK_SIZE,
1825                 .setkey         = stm32_cryp_des_setkey,
1826                 .encrypt        = stm32_cryp_des_ecb_encrypt,
1827                 .decrypt        = stm32_cryp_des_ecb_decrypt,
1828         }
1829 },
1830 {
1831         .cra_name               = "cbc(des)",
1832         .cra_driver_name        = "stm32-cbc-des",
1833         .cra_priority           = 200,
1834         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
1835                                   CRYPTO_ALG_ASYNC,
1836         .cra_blocksize          = DES_BLOCK_SIZE,
1837         .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1838         .cra_alignmask          = 0xf,
1839         .cra_type               = &crypto_ablkcipher_type,
1840         .cra_module             = THIS_MODULE,
1841         .cra_init               = stm32_cryp_cra_init,
1842         .cra_ablkcipher = {
1843                 .min_keysize    = DES_BLOCK_SIZE,
1844                 .max_keysize    = DES_BLOCK_SIZE,
1845                 .ivsize         = DES_BLOCK_SIZE,
1846                 .setkey         = stm32_cryp_des_setkey,
1847                 .encrypt        = stm32_cryp_des_cbc_encrypt,
1848                 .decrypt        = stm32_cryp_des_cbc_decrypt,
1849         }
1850 },
1851 {
1852         .cra_name               = "ecb(des3_ede)",
1853         .cra_driver_name        = "stm32-ecb-des3",
1854         .cra_priority           = 200,
1855         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
1856                                   CRYPTO_ALG_ASYNC,
1857         .cra_blocksize          = DES_BLOCK_SIZE,
1858         .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1859         .cra_alignmask          = 0xf,
1860         .cra_type               = &crypto_ablkcipher_type,
1861         .cra_module             = THIS_MODULE,
1862         .cra_init               = stm32_cryp_cra_init,
1863         .cra_ablkcipher = {
1864                 .min_keysize    = 3 * DES_BLOCK_SIZE,
1865                 .max_keysize    = 3 * DES_BLOCK_SIZE,
1866                 .setkey         = stm32_cryp_tdes_setkey,
1867                 .encrypt        = stm32_cryp_tdes_ecb_encrypt,
1868                 .decrypt        = stm32_cryp_tdes_ecb_decrypt,
1869         }
1870 },
1871 {
1872         .cra_name               = "cbc(des3_ede)",
1873         .cra_driver_name        = "stm32-cbc-des3",
1874         .cra_priority           = 200,
1875         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
1876                                   CRYPTO_ALG_ASYNC,
1877         .cra_blocksize          = DES_BLOCK_SIZE,
1878         .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1879         .cra_alignmask          = 0xf,
1880         .cra_type               = &crypto_ablkcipher_type,
1881         .cra_module             = THIS_MODULE,
1882         .cra_init               = stm32_cryp_cra_init,
1883         .cra_ablkcipher = {
1884                 .min_keysize    = 3 * DES_BLOCK_SIZE,
1885                 .max_keysize    = 3 * DES_BLOCK_SIZE,
1886                 .ivsize         = DES_BLOCK_SIZE,
1887                 .setkey         = stm32_cryp_tdes_setkey,
1888                 .encrypt        = stm32_cryp_tdes_cbc_encrypt,
1889                 .decrypt        = stm32_cryp_tdes_cbc_decrypt,
1890         }
1891 },
1892 };
1893
1894 static struct aead_alg aead_algs[] = {
1895 {
1896         .setkey         = stm32_cryp_aes_aead_setkey,
1897         .setauthsize    = stm32_cryp_aes_gcm_setauthsize,
1898         .encrypt        = stm32_cryp_aes_gcm_encrypt,
1899         .decrypt        = stm32_cryp_aes_gcm_decrypt,
1900         .init           = stm32_cryp_aes_aead_init,
1901         .ivsize         = 12,
1902         .maxauthsize    = AES_BLOCK_SIZE,
1903
1904         .base = {
1905                 .cra_name               = "gcm(aes)",
1906                 .cra_driver_name        = "stm32-gcm-aes",
1907                 .cra_priority           = 200,
1908                 .cra_flags              = CRYPTO_ALG_ASYNC,
1909                 .cra_blocksize          = 1,
1910                 .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1911                 .cra_alignmask          = 0xf,
1912                 .cra_module             = THIS_MODULE,
1913         },
1914 },
1915 {
1916         .setkey         = stm32_cryp_aes_aead_setkey,
1917         .setauthsize    = stm32_cryp_aes_ccm_setauthsize,
1918         .encrypt        = stm32_cryp_aes_ccm_encrypt,
1919         .decrypt        = stm32_cryp_aes_ccm_decrypt,
1920         .init           = stm32_cryp_aes_aead_init,
1921         .ivsize         = AES_BLOCK_SIZE,
1922         .maxauthsize    = AES_BLOCK_SIZE,
1923
1924         .base = {
1925                 .cra_name               = "ccm(aes)",
1926                 .cra_driver_name        = "stm32-ccm-aes",
1927                 .cra_priority           = 200,
1928                 .cra_flags              = CRYPTO_ALG_ASYNC,
1929                 .cra_blocksize          = 1,
1930                 .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1931                 .cra_alignmask          = 0xf,
1932                 .cra_module             = THIS_MODULE,
1933         },
1934 },
1935 };
1936
1937 static const struct stm32_cryp_caps f7_data = {
1938         .swap_final = true,
1939         .padding_wa = true,
1940 };
1941
1942 static const struct stm32_cryp_caps mp1_data = {
1943         .swap_final = false,
1944         .padding_wa = false,
1945 };
1946
1947 static const struct of_device_id stm32_dt_ids[] = {
1948         { .compatible = "st,stm32f756-cryp", .data = &f7_data},
1949         { .compatible = "st,stm32mp1-cryp", .data = &mp1_data},
1950         {},
1951 };
1952 MODULE_DEVICE_TABLE(of, stm32_dt_ids);
1953
1954 static int stm32_cryp_probe(struct platform_device *pdev)
1955 {
1956         struct device *dev = &pdev->dev;
1957         struct stm32_cryp *cryp;
1958         struct resource *res;
1959         struct reset_control *rst;
1960         int irq, ret;
1961
1962         cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL);
1963         if (!cryp)
1964                 return -ENOMEM;
1965
1966         cryp->caps = of_device_get_match_data(dev);
1967         if (!cryp->caps)
1968                 return -ENODEV;
1969
1970         cryp->dev = dev;
1971
1972         res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1973         cryp->regs = devm_ioremap_resource(dev, res);
1974         if (IS_ERR(cryp->regs))
1975                 return PTR_ERR(cryp->regs);
1976
1977         irq = platform_get_irq(pdev, 0);
1978         if (irq < 0) {
1979                 dev_err(dev, "Cannot get IRQ resource\n");
1980                 return irq;
1981         }
1982
1983         ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq,
1984                                         stm32_cryp_irq_thread, IRQF_ONESHOT,
1985                                         dev_name(dev), cryp);
1986         if (ret) {
1987                 dev_err(dev, "Cannot grab IRQ\n");
1988                 return ret;
1989         }
1990
1991         cryp->clk = devm_clk_get(dev, NULL);
1992         if (IS_ERR(cryp->clk)) {
1993                 dev_err(dev, "Could not get clock\n");
1994                 return PTR_ERR(cryp->clk);
1995         }
1996
1997         ret = clk_prepare_enable(cryp->clk);
1998         if (ret) {
1999                 dev_err(cryp->dev, "Failed to enable clock\n");
2000                 return ret;
2001         }
2002
2003         pm_runtime_set_autosuspend_delay(dev, CRYP_AUTOSUSPEND_DELAY);
2004         pm_runtime_use_autosuspend(dev);
2005
2006         pm_runtime_get_noresume(dev);
2007         pm_runtime_set_active(dev);
2008         pm_runtime_enable(dev);
2009
2010         rst = devm_reset_control_get(dev, NULL);
2011         if (!IS_ERR(rst)) {
2012                 reset_control_assert(rst);
2013                 udelay(2);
2014                 reset_control_deassert(rst);
2015         }
2016
2017         platform_set_drvdata(pdev, cryp);
2018
2019         spin_lock(&cryp_list.lock);
2020         list_add(&cryp->list, &cryp_list.dev_list);
2021         spin_unlock(&cryp_list.lock);
2022
2023         /* Initialize crypto engine */
2024         cryp->engine = crypto_engine_alloc_init(dev, 1);
2025         if (!cryp->engine) {
2026                 dev_err(dev, "Could not init crypto engine\n");
2027                 ret = -ENOMEM;
2028                 goto err_engine1;
2029         }
2030
2031         ret = crypto_engine_start(cryp->engine);
2032         if (ret) {
2033                 dev_err(dev, "Could not start crypto engine\n");
2034                 goto err_engine2;
2035         }
2036
2037         ret = crypto_register_algs(crypto_algs, ARRAY_SIZE(crypto_algs));
2038         if (ret) {
2039                 dev_err(dev, "Could not register algs\n");
2040                 goto err_algs;
2041         }
2042
2043         ret = crypto_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2044         if (ret)
2045                 goto err_aead_algs;
2046
2047         dev_info(dev, "Initialized\n");
2048
2049         pm_runtime_put_sync(dev);
2050
2051         return 0;
2052
2053 err_aead_algs:
2054         crypto_unregister_algs(crypto_algs, ARRAY_SIZE(crypto_algs));
2055 err_algs:
2056 err_engine2:
2057         crypto_engine_exit(cryp->engine);
2058 err_engine1:
2059         spin_lock(&cryp_list.lock);
2060         list_del(&cryp->list);
2061         spin_unlock(&cryp_list.lock);
2062
2063         pm_runtime_disable(dev);
2064         pm_runtime_put_noidle(dev);
2065         pm_runtime_disable(dev);
2066         pm_runtime_put_noidle(dev);
2067
2068         clk_disable_unprepare(cryp->clk);
2069
2070         return ret;
2071 }
2072
2073 static int stm32_cryp_remove(struct platform_device *pdev)
2074 {
2075         struct stm32_cryp *cryp = platform_get_drvdata(pdev);
2076         int ret;
2077
2078         if (!cryp)
2079                 return -ENODEV;
2080
2081         ret = pm_runtime_get_sync(cryp->dev);
2082         if (ret < 0)
2083                 return ret;
2084
2085         crypto_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2086         crypto_unregister_algs(crypto_algs, ARRAY_SIZE(crypto_algs));
2087
2088         crypto_engine_exit(cryp->engine);
2089
2090         spin_lock(&cryp_list.lock);
2091         list_del(&cryp->list);
2092         spin_unlock(&cryp_list.lock);
2093
2094         pm_runtime_disable(cryp->dev);
2095         pm_runtime_put_noidle(cryp->dev);
2096
2097         clk_disable_unprepare(cryp->clk);
2098
2099         return 0;
2100 }
2101
2102 #ifdef CONFIG_PM
2103 static int stm32_cryp_runtime_suspend(struct device *dev)
2104 {
2105         struct stm32_cryp *cryp = dev_get_drvdata(dev);
2106
2107         clk_disable_unprepare(cryp->clk);
2108
2109         return 0;
2110 }
2111
2112 static int stm32_cryp_runtime_resume(struct device *dev)
2113 {
2114         struct stm32_cryp *cryp = dev_get_drvdata(dev);
2115         int ret;
2116
2117         ret = clk_prepare_enable(cryp->clk);
2118         if (ret) {
2119                 dev_err(cryp->dev, "Failed to prepare_enable clock\n");
2120                 return ret;
2121         }
2122
2123         return 0;
2124 }
2125 #endif
2126
2127 static const struct dev_pm_ops stm32_cryp_pm_ops = {
2128         SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
2129                                 pm_runtime_force_resume)
2130         SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend,
2131                            stm32_cryp_runtime_resume, NULL)
2132 };
2133
2134 static struct platform_driver stm32_cryp_driver = {
2135         .probe  = stm32_cryp_probe,
2136         .remove = stm32_cryp_remove,
2137         .driver = {
2138                 .name           = DRIVER_NAME,
2139                 .pm             = &stm32_cryp_pm_ops,
2140                 .of_match_table = stm32_dt_ids,
2141         },
2142 };
2143
2144 module_platform_driver(stm32_cryp_driver);
2145
2146 MODULE_AUTHOR("Fabien Dessenne <[email protected]>");
2147 MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");
2148 MODULE_LICENSE("GPL");
This page took 0.152987 seconds and 4 git commands to generate.