1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright (C) STMicroelectronics SA 2017
5 * Ux500 support taken from snippets in the old Ux500 cryp driver
8 #include <crypto/aes.h>
9 #include <crypto/engine.h>
10 #include <crypto/internal/aead.h>
11 #include <crypto/internal/des.h>
12 #include <crypto/internal/skcipher.h>
13 #include <crypto/scatterwalk.h>
14 #include <linux/clk.h>
15 #include <linux/delay.h>
16 #include <linux/err.h>
17 #include <linux/iopoll.h>
18 #include <linux/interrupt.h>
19 #include <linux/kernel.h>
20 #include <linux/module.h>
22 #include <linux/platform_device.h>
23 #include <linux/pm_runtime.h>
24 #include <linux/reset.h>
25 #include <linux/string.h>
27 #define DRIVER_NAME "stm32-cryp"
29 /* Bit [0] encrypt / decrypt */
30 #define FLG_ENCRYPT BIT(0)
31 /* Bit [8..1] algo & operation mode */
32 #define FLG_AES BIT(1)
33 #define FLG_DES BIT(2)
34 #define FLG_TDES BIT(3)
35 #define FLG_ECB BIT(4)
36 #define FLG_CBC BIT(5)
37 #define FLG_CTR BIT(6)
38 #define FLG_GCM BIT(7)
39 #define FLG_CCM BIT(8)
40 /* Mode mask = bits [15..0] */
41 #define FLG_MODE_MASK GENMASK(15, 0)
42 /* Bit [31..16] status */
45 #define CRYP_CR 0x00000000
46 #define CRYP_SR 0x00000004
47 #define CRYP_DIN 0x00000008
48 #define CRYP_DOUT 0x0000000C
49 #define CRYP_DMACR 0x00000010
50 #define CRYP_IMSCR 0x00000014
51 #define CRYP_RISR 0x00000018
52 #define CRYP_MISR 0x0000001C
53 #define CRYP_K0LR 0x00000020
54 #define CRYP_K0RR 0x00000024
55 #define CRYP_K1LR 0x00000028
56 #define CRYP_K1RR 0x0000002C
57 #define CRYP_K2LR 0x00000030
58 #define CRYP_K2RR 0x00000034
59 #define CRYP_K3LR 0x00000038
60 #define CRYP_K3RR 0x0000003C
61 #define CRYP_IV0LR 0x00000040
62 #define CRYP_IV0RR 0x00000044
63 #define CRYP_IV1LR 0x00000048
64 #define CRYP_IV1RR 0x0000004C
65 #define CRYP_CSGCMCCM0R 0x00000050
66 #define CRYP_CSGCM0R 0x00000070
68 #define UX500_CRYP_CR 0x00000000
69 #define UX500_CRYP_SR 0x00000004
70 #define UX500_CRYP_DIN 0x00000008
71 #define UX500_CRYP_DINSIZE 0x0000000C
72 #define UX500_CRYP_DOUT 0x00000010
73 #define UX500_CRYP_DOUSIZE 0x00000014
74 #define UX500_CRYP_DMACR 0x00000018
75 #define UX500_CRYP_IMSC 0x0000001C
76 #define UX500_CRYP_RIS 0x00000020
77 #define UX500_CRYP_MIS 0x00000024
78 #define UX500_CRYP_K1L 0x00000028
79 #define UX500_CRYP_K1R 0x0000002C
80 #define UX500_CRYP_K2L 0x00000030
81 #define UX500_CRYP_K2R 0x00000034
82 #define UX500_CRYP_K3L 0x00000038
83 #define UX500_CRYP_K3R 0x0000003C
84 #define UX500_CRYP_K4L 0x00000040
85 #define UX500_CRYP_K4R 0x00000044
86 #define UX500_CRYP_IV0L 0x00000048
87 #define UX500_CRYP_IV0R 0x0000004C
88 #define UX500_CRYP_IV1L 0x00000050
89 #define UX500_CRYP_IV1R 0x00000054
91 /* Registers values */
92 #define CR_DEC_NOT_ENC 0x00000004
93 #define CR_TDES_ECB 0x00000000
94 #define CR_TDES_CBC 0x00000008
95 #define CR_DES_ECB 0x00000010
96 #define CR_DES_CBC 0x00000018
97 #define CR_AES_ECB 0x00000020
98 #define CR_AES_CBC 0x00000028
99 #define CR_AES_CTR 0x00000030
100 #define CR_AES_KP 0x00000038 /* Not on Ux500 */
101 #define CR_AES_XTS 0x00000038 /* Only on Ux500 */
102 #define CR_AES_GCM 0x00080000
103 #define CR_AES_CCM 0x00080008
104 #define CR_AES_UNKNOWN 0xFFFFFFFF
105 #define CR_ALGO_MASK 0x00080038
106 #define CR_DATA32 0x00000000
107 #define CR_DATA16 0x00000040
108 #define CR_DATA8 0x00000080
109 #define CR_DATA1 0x000000C0
110 #define CR_KEY128 0x00000000
111 #define CR_KEY192 0x00000100
112 #define CR_KEY256 0x00000200
113 #define CR_KEYRDEN 0x00000400 /* Only on Ux500 */
114 #define CR_KSE 0x00000800 /* Only on Ux500 */
115 #define CR_FFLUSH 0x00004000
116 #define CR_CRYPEN 0x00008000
117 #define CR_PH_INIT 0x00000000
118 #define CR_PH_HEADER 0x00010000
119 #define CR_PH_PAYLOAD 0x00020000
120 #define CR_PH_FINAL 0x00030000
121 #define CR_PH_MASK 0x00030000
122 #define CR_NBPBL_SHIFT 20
124 #define SR_BUSY 0x00000010
125 #define SR_OFNE 0x00000004
127 #define IMSCR_IN BIT(0)
128 #define IMSCR_OUT BIT(1)
130 #define MISR_IN BIT(0)
131 #define MISR_OUT BIT(1)
134 #define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32))
135 #define GCM_CTR_INIT 2
136 #define CRYP_AUTOSUSPEND_DELAY 50
138 struct stm32_cryp_caps {
160 struct stm32_cryp_ctx {
161 struct stm32_cryp *cryp;
163 __be32 key[AES_KEYSIZE_256 / sizeof(u32)];
167 struct stm32_cryp_reqctx {
172 struct list_head list;
178 const struct stm32_cryp_caps *caps;
179 struct stm32_cryp_ctx *ctx;
181 struct crypto_engine *engine;
183 struct skcipher_request *req;
184 struct aead_request *areq;
193 struct scatterlist *out_sg;
195 struct scatter_walk in_walk;
196 struct scatter_walk out_walk;
202 struct stm32_cryp_list {
203 struct list_head dev_list;
204 spinlock_t lock; /* protect dev_list */
207 static struct stm32_cryp_list cryp_list = {
208 .dev_list = LIST_HEAD_INIT(cryp_list.dev_list),
209 .lock = __SPIN_LOCK_UNLOCKED(cryp_list.lock),
212 static inline bool is_aes(struct stm32_cryp *cryp)
214 return cryp->flags & FLG_AES;
217 static inline bool is_des(struct stm32_cryp *cryp)
219 return cryp->flags & FLG_DES;
222 static inline bool is_tdes(struct stm32_cryp *cryp)
224 return cryp->flags & FLG_TDES;
227 static inline bool is_ecb(struct stm32_cryp *cryp)
229 return cryp->flags & FLG_ECB;
232 static inline bool is_cbc(struct stm32_cryp *cryp)
234 return cryp->flags & FLG_CBC;
237 static inline bool is_ctr(struct stm32_cryp *cryp)
239 return cryp->flags & FLG_CTR;
242 static inline bool is_gcm(struct stm32_cryp *cryp)
244 return cryp->flags & FLG_GCM;
247 static inline bool is_ccm(struct stm32_cryp *cryp)
249 return cryp->flags & FLG_CCM;
252 static inline bool is_encrypt(struct stm32_cryp *cryp)
254 return cryp->flags & FLG_ENCRYPT;
257 static inline bool is_decrypt(struct stm32_cryp *cryp)
259 return !is_encrypt(cryp);
262 static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst)
264 return readl_relaxed(cryp->regs + ofst);
267 static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val)
269 writel_relaxed(val, cryp->regs + ofst);
272 static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp)
276 return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->sr, status,
277 !(status & SR_BUSY), 10, 100000);
280 static inline void stm32_cryp_enable(struct stm32_cryp *cryp)
282 writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) | CR_CRYPEN,
283 cryp->regs + cryp->caps->cr);
286 static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp)
290 return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->cr, status,
291 !(status & CR_CRYPEN), 10, 100000);
294 static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp)
298 return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->sr, status,
299 status & SR_OFNE, 10, 100000);
302 static inline void stm32_cryp_key_read_enable(struct stm32_cryp *cryp)
304 writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) | CR_KEYRDEN,
305 cryp->regs + cryp->caps->cr);
308 static inline void stm32_cryp_key_read_disable(struct stm32_cryp *cryp)
310 writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) & ~CR_KEYRDEN,
311 cryp->regs + cryp->caps->cr);
314 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp);
315 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err);
317 static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx)
319 struct stm32_cryp *tmp, *cryp = NULL;
321 spin_lock_bh(&cryp_list.lock);
323 list_for_each_entry(tmp, &cryp_list.dev_list, list) {
332 spin_unlock_bh(&cryp_list.lock);
337 static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, __be32 *iv)
342 stm32_cryp_write(cryp, cryp->caps->iv0l, be32_to_cpu(*iv++));
343 stm32_cryp_write(cryp, cryp->caps->iv0r, be32_to_cpu(*iv++));
346 stm32_cryp_write(cryp, cryp->caps->iv1l, be32_to_cpu(*iv++));
347 stm32_cryp_write(cryp, cryp->caps->iv1r, be32_to_cpu(*iv++));
351 static void stm32_cryp_get_iv(struct stm32_cryp *cryp)
353 struct skcipher_request *req = cryp->req;
354 __be32 *tmp = (void *)req->iv;
359 if (cryp->caps->iv_protection)
360 stm32_cryp_key_read_enable(cryp);
362 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0l));
363 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0r));
366 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1l));
367 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1r));
370 if (cryp->caps->iv_protection)
371 stm32_cryp_key_read_disable(cryp);
375 * ux500_swap_bits_in_byte() - mirror the bits in a byte
376 * @b: the byte to be mirrored
378 * The bits are swapped the following way:
379 * Byte b include bits 0-7, nibble 1 (n1) include bits 0-3 and
380 * nibble 2 (n2) bits 4-7.
383 * (The "old" (moved) bit is replaced with a zero)
384 * 1. Move bit 6 and 7, 4 positions to the left.
385 * 2. Move bit 3 and 5, 2 positions to the left.
386 * 3. Move bit 1-4, 1 position to the left.
389 * 1. Move bit 0 and 1, 4 positions to the right.
390 * 2. Move bit 2 and 4, 2 positions to the right.
391 * 3. Move bit 3-6, 1 position to the right.
393 * Combine the two nibbles to a complete and swapped byte.
395 static inline u8 ux500_swap_bits_in_byte(u8 b)
397 #define R_SHIFT_4_MASK 0xc0 /* Bits 6 and 7, right shift 4 */
398 #define R_SHIFT_2_MASK 0x28 /* (After right shift 4) Bits 3 and 5,
400 #define R_SHIFT_1_MASK 0x1e /* (After right shift 2) Bits 1-4,
402 #define L_SHIFT_4_MASK 0x03 /* Bits 0 and 1, left shift 4 */
403 #define L_SHIFT_2_MASK 0x14 /* (After left shift 4) Bits 2 and 4,
405 #define L_SHIFT_1_MASK 0x78 /* (After left shift 1) Bits 3-6,
411 /* Swap most significant nibble */
412 /* Right shift 4, bits 6 and 7 */
413 n1 = ((b & R_SHIFT_4_MASK) >> 4) | (b & ~(R_SHIFT_4_MASK >> 4));
414 /* Right shift 2, bits 3 and 5 */
415 n1 = ((n1 & R_SHIFT_2_MASK) >> 2) | (n1 & ~(R_SHIFT_2_MASK >> 2));
416 /* Right shift 1, bits 1-4 */
417 n1 = (n1 & R_SHIFT_1_MASK) >> 1;
419 /* Swap least significant nibble */
420 /* Left shift 4, bits 0 and 1 */
421 n2 = ((b & L_SHIFT_4_MASK) << 4) | (b & ~(L_SHIFT_4_MASK << 4));
422 /* Left shift 2, bits 2 and 4 */
423 n2 = ((n2 & L_SHIFT_2_MASK) << 2) | (n2 & ~(L_SHIFT_2_MASK << 2));
424 /* Left shift 1, bits 3-6 */
425 n2 = (n2 & L_SHIFT_1_MASK) << 1;
431 * ux500_swizzle_key() - Shuffle around words and bits in the AES key
432 * @in: key to swizzle
434 * @len: length of key, in bytes
436 * This "key swizzling procedure" is described in the examples in the
437 * DB8500 design specification. There is no real description of why
438 * the bits have been arranged like this in the hardware.
440 static inline void ux500_swizzle_key(const u8 *in, u8 *out, u32 len)
443 int bpw = sizeof(u32);
449 for (i = 0; i < bpw; i++) {
450 index = len - j - bpw + i;
452 ux500_swap_bits_in_byte(in[index]);
458 static void stm32_cryp_hw_write_key(struct stm32_cryp *c)
464 stm32_cryp_write(c, c->caps->k1l, be32_to_cpu(c->ctx->key[0]));
465 stm32_cryp_write(c, c->caps->k1r, be32_to_cpu(c->ctx->key[1]));
470 * On the Ux500 the AES key is considered as a single bit sequence
471 * of 128, 192 or 256 bits length. It is written linearly into the
472 * registers from K1L and down, and need to be processed to become
473 * a proper big-endian bit sequence.
475 if (is_aes(c) && c->caps->linear_aes_key) {
478 ux500_swizzle_key((u8 *)c->ctx->key,
479 (u8 *)tmpkey, c->ctx->keylen);
482 for (i = 0; i < c->ctx->keylen / sizeof(u32); i++, r_id += 4)
483 stm32_cryp_write(c, r_id, tmpkey[i]);
489 for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4)
490 stm32_cryp_write(c, r_id, be32_to_cpu(c->ctx->key[i - 1]));
493 static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp)
495 if (is_aes(cryp) && is_ecb(cryp))
498 if (is_aes(cryp) && is_cbc(cryp))
501 if (is_aes(cryp) && is_ctr(cryp))
504 if (is_aes(cryp) && is_gcm(cryp))
507 if (is_aes(cryp) && is_ccm(cryp))
510 if (is_des(cryp) && is_ecb(cryp))
513 if (is_des(cryp) && is_cbc(cryp))
516 if (is_tdes(cryp) && is_ecb(cryp))
519 if (is_tdes(cryp) && is_cbc(cryp))
522 dev_err(cryp->dev, "Unknown mode\n");
523 return CR_AES_UNKNOWN;
526 static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp)
528 return is_encrypt(cryp) ? cryp->areq->cryptlen :
529 cryp->areq->cryptlen - cryp->authsize;
532 static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg)
538 memcpy(iv, cryp->areq->iv, 12);
539 iv[3] = cpu_to_be32(GCM_CTR_INIT);
540 cryp->gcm_ctr = GCM_CTR_INIT;
541 stm32_cryp_hw_write_iv(cryp, iv);
543 stm32_cryp_write(cryp, cryp->caps->cr, cfg | CR_PH_INIT | CR_CRYPEN);
545 /* Wait for end of processing */
546 ret = stm32_cryp_wait_enable(cryp);
548 dev_err(cryp->dev, "Timeout (gcm init)\n");
552 /* Prepare next phase */
553 if (cryp->areq->assoclen) {
555 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
556 } else if (stm32_cryp_get_input_text_len(cryp)) {
557 cfg |= CR_PH_PAYLOAD;
558 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
564 static void stm32_crypt_gcmccm_end_header(struct stm32_cryp *cryp)
569 /* Check if whole header written */
570 if (!cryp->header_in) {
571 /* Wait for completion */
572 err = stm32_cryp_wait_busy(cryp);
574 dev_err(cryp->dev, "Timeout (gcm/ccm header)\n");
575 stm32_cryp_write(cryp, cryp->caps->imsc, 0);
576 stm32_cryp_finish_req(cryp, err);
580 if (stm32_cryp_get_input_text_len(cryp)) {
581 /* Phase 3 : payload */
582 cfg = stm32_cryp_read(cryp, cryp->caps->cr);
584 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
587 cfg |= CR_PH_PAYLOAD | CR_CRYPEN;
588 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
592 * Nothing to read, nothing to write, caller have to
599 static void stm32_cryp_write_ccm_first_header(struct stm32_cryp *cryp)
603 u32 alen = cryp->areq->assoclen;
604 u32 block[AES_BLOCK_32] = {0};
605 u8 *b8 = (u8 *)block;
608 /* Write first u32 of B1 */
609 b8[0] = (alen >> 8) & 0xFF;
613 /* Build the two first u32 of B1 */
616 b8[2] = (alen & 0xFF000000) >> 24;
617 b8[3] = (alen & 0x00FF0000) >> 16;
618 b8[4] = (alen & 0x0000FF00) >> 8;
619 b8[5] = alen & 0x000000FF;
623 written = min_t(size_t, AES_BLOCK_SIZE - len, alen);
625 scatterwalk_copychunks((char *)block + len, &cryp->in_walk, written, 0);
627 writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32);
629 cryp->header_in -= written;
631 stm32_crypt_gcmccm_end_header(cryp);
634 static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg)
637 u32 iv_32[AES_BLOCK_32], b0_32[AES_BLOCK_32];
638 u8 *iv = (u8 *)iv_32, *b0 = (u8 *)b0_32;
641 unsigned int i, textlen;
643 /* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */
644 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
645 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
646 iv[AES_BLOCK_SIZE - 1] = 1;
647 stm32_cryp_hw_write_iv(cryp, (__be32 *)iv);
650 memcpy(b0, iv, AES_BLOCK_SIZE);
652 b0[0] |= (8 * ((cryp->authsize - 2) / 2));
654 if (cryp->areq->assoclen)
657 textlen = stm32_cryp_get_input_text_len(cryp);
659 b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
660 b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
663 stm32_cryp_write(cryp, cryp->caps->cr, cfg | CR_PH_INIT | CR_CRYPEN);
669 for (i = 0; i < AES_BLOCK_32; i++) {
672 if (!cryp->caps->padding_wa)
673 xd = be32_to_cpu(bd[i]);
674 stm32_cryp_write(cryp, cryp->caps->din, xd);
677 /* Wait for end of processing */
678 ret = stm32_cryp_wait_enable(cryp);
680 dev_err(cryp->dev, "Timeout (ccm init)\n");
684 /* Prepare next phase */
685 if (cryp->areq->assoclen) {
686 cfg |= CR_PH_HEADER | CR_CRYPEN;
687 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
689 /* Write first (special) block (may move to next phase [payload]) */
690 stm32_cryp_write_ccm_first_header(cryp);
691 } else if (stm32_cryp_get_input_text_len(cryp)) {
692 cfg |= CR_PH_PAYLOAD;
693 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
699 static int stm32_cryp_hw_init(struct stm32_cryp *cryp)
704 pm_runtime_get_sync(cryp->dev);
706 /* Disable interrupt */
707 stm32_cryp_write(cryp, cryp->caps->imsc, 0);
709 /* Set configuration */
710 cfg = CR_DATA8 | CR_FFLUSH;
712 switch (cryp->ctx->keylen) {
713 case AES_KEYSIZE_128:
717 case AES_KEYSIZE_192:
722 case AES_KEYSIZE_256:
727 hw_mode = stm32_cryp_get_hw_mode(cryp);
728 if (hw_mode == CR_AES_UNKNOWN)
731 /* AES ECB/CBC decrypt: run key preparation first */
732 if (is_decrypt(cryp) &&
733 ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) {
734 /* Configure in key preparation mode */
735 if (cryp->caps->kp_mode)
736 stm32_cryp_write(cryp, cryp->caps->cr,
739 stm32_cryp_write(cryp,
740 cryp->caps->cr, cfg | CR_AES_ECB | CR_KSE);
742 /* Set key only after full configuration done */
743 stm32_cryp_hw_write_key(cryp);
745 /* Start prepare key */
746 stm32_cryp_enable(cryp);
747 /* Wait for end of processing */
748 ret = stm32_cryp_wait_busy(cryp);
750 dev_err(cryp->dev, "Timeout (key preparation)\n");
754 cfg |= hw_mode | CR_DEC_NOT_ENC;
756 /* Apply updated config (Decrypt + algo) and flush */
757 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
760 if (is_decrypt(cryp))
761 cfg |= CR_DEC_NOT_ENC;
763 /* Apply config and flush */
764 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
766 /* Set key only after configuration done */
767 stm32_cryp_hw_write_key(cryp);
774 if (hw_mode == CR_AES_CCM)
775 ret = stm32_cryp_ccm_init(cryp, cfg);
777 ret = stm32_cryp_gcm_init(cryp, cfg);
788 stm32_cryp_hw_write_iv(cryp, (__be32 *)cryp->req->iv);
796 stm32_cryp_enable(cryp);
801 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err)
803 if (!err && (is_gcm(cryp) || is_ccm(cryp)))
804 /* Phase 4 : output tag */
805 err = stm32_cryp_read_auth_tag(cryp);
807 if (!err && (!(is_gcm(cryp) || is_ccm(cryp) || is_ecb(cryp))))
808 stm32_cryp_get_iv(cryp);
810 pm_runtime_mark_last_busy(cryp->dev);
811 pm_runtime_put_autosuspend(cryp->dev);
813 if (is_gcm(cryp) || is_ccm(cryp))
814 crypto_finalize_aead_request(cryp->engine, cryp->areq, err);
816 crypto_finalize_skcipher_request(cryp->engine, cryp->req,
820 static int stm32_cryp_cpu_start(struct stm32_cryp *cryp)
822 /* Enable interrupt and let the IRQ handler do everything */
823 stm32_cryp_write(cryp, cryp->caps->imsc, IMSCR_IN | IMSCR_OUT);
828 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq);
830 static int stm32_cryp_init_tfm(struct crypto_skcipher *tfm)
832 crypto_skcipher_set_reqsize(tfm, sizeof(struct stm32_cryp_reqctx));
837 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq);
839 static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm)
841 tfm->reqsize = sizeof(struct stm32_cryp_reqctx);
846 static int stm32_cryp_crypt(struct skcipher_request *req, unsigned long mode)
848 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
849 crypto_skcipher_reqtfm(req));
850 struct stm32_cryp_reqctx *rctx = skcipher_request_ctx(req);
851 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
858 return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
861 static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode)
863 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
864 struct stm32_cryp_reqctx *rctx = aead_request_ctx(req);
865 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
872 return crypto_transfer_aead_request_to_engine(cryp->engine, req);
875 static int stm32_cryp_setkey(struct crypto_skcipher *tfm, const u8 *key,
878 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
880 memcpy(ctx->key, key, keylen);
881 ctx->keylen = keylen;
886 static int stm32_cryp_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
889 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
890 keylen != AES_KEYSIZE_256)
893 return stm32_cryp_setkey(tfm, key, keylen);
896 static int stm32_cryp_des_setkey(struct crypto_skcipher *tfm, const u8 *key,
899 return verify_skcipher_des_key(tfm, key) ?:
900 stm32_cryp_setkey(tfm, key, keylen);
903 static int stm32_cryp_tdes_setkey(struct crypto_skcipher *tfm, const u8 *key,
906 return verify_skcipher_des3_key(tfm, key) ?:
907 stm32_cryp_setkey(tfm, key, keylen);
910 static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
913 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
915 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
916 keylen != AES_KEYSIZE_256)
919 memcpy(ctx->key, key, keylen);
920 ctx->keylen = keylen;
925 static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm,
926 unsigned int authsize)
944 static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm,
945 unsigned int authsize)
963 static int stm32_cryp_aes_ecb_encrypt(struct skcipher_request *req)
965 if (req->cryptlen % AES_BLOCK_SIZE)
968 if (req->cryptlen == 0)
971 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT);
974 static int stm32_cryp_aes_ecb_decrypt(struct skcipher_request *req)
976 if (req->cryptlen % AES_BLOCK_SIZE)
979 if (req->cryptlen == 0)
982 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB);
985 static int stm32_cryp_aes_cbc_encrypt(struct skcipher_request *req)
987 if (req->cryptlen % AES_BLOCK_SIZE)
990 if (req->cryptlen == 0)
993 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT);
996 static int stm32_cryp_aes_cbc_decrypt(struct skcipher_request *req)
998 if (req->cryptlen % AES_BLOCK_SIZE)
1001 if (req->cryptlen == 0)
1004 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC);
1007 static int stm32_cryp_aes_ctr_encrypt(struct skcipher_request *req)
1009 if (req->cryptlen == 0)
1012 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT);
1015 static int stm32_cryp_aes_ctr_decrypt(struct skcipher_request *req)
1017 if (req->cryptlen == 0)
1020 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR);
1023 static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req)
1025 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT);
1028 static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req)
1030 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM);
1033 static inline int crypto_ccm_check_iv(const u8 *iv)
1035 /* 2 <= L <= 8, so 1 <= L' <= 7. */
1036 if (iv[0] < 1 || iv[0] > 7)
1042 static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req)
1046 err = crypto_ccm_check_iv(req->iv);
1050 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT);
1053 static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req)
1057 err = crypto_ccm_check_iv(req->iv);
1061 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM);
1064 static int stm32_cryp_des_ecb_encrypt(struct skcipher_request *req)
1066 if (req->cryptlen % DES_BLOCK_SIZE)
1069 if (req->cryptlen == 0)
1072 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT);
1075 static int stm32_cryp_des_ecb_decrypt(struct skcipher_request *req)
1077 if (req->cryptlen % DES_BLOCK_SIZE)
1080 if (req->cryptlen == 0)
1083 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB);
1086 static int stm32_cryp_des_cbc_encrypt(struct skcipher_request *req)
1088 if (req->cryptlen % DES_BLOCK_SIZE)
1091 if (req->cryptlen == 0)
1094 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT);
1097 static int stm32_cryp_des_cbc_decrypt(struct skcipher_request *req)
1099 if (req->cryptlen % DES_BLOCK_SIZE)
1102 if (req->cryptlen == 0)
1105 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC);
1108 static int stm32_cryp_tdes_ecb_encrypt(struct skcipher_request *req)
1110 if (req->cryptlen % DES_BLOCK_SIZE)
1113 if (req->cryptlen == 0)
1116 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT);
1119 static int stm32_cryp_tdes_ecb_decrypt(struct skcipher_request *req)
1121 if (req->cryptlen % DES_BLOCK_SIZE)
1124 if (req->cryptlen == 0)
1127 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB);
1130 static int stm32_cryp_tdes_cbc_encrypt(struct skcipher_request *req)
1132 if (req->cryptlen % DES_BLOCK_SIZE)
1135 if (req->cryptlen == 0)
1138 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT);
1141 static int stm32_cryp_tdes_cbc_decrypt(struct skcipher_request *req)
1143 if (req->cryptlen % DES_BLOCK_SIZE)
1146 if (req->cryptlen == 0)
1149 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC);
1152 static int stm32_cryp_prepare_req(struct skcipher_request *req,
1153 struct aead_request *areq)
1155 struct stm32_cryp_ctx *ctx;
1156 struct stm32_cryp *cryp;
1157 struct stm32_cryp_reqctx *rctx;
1158 struct scatterlist *in_sg;
1164 ctx = req ? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)) :
1165 crypto_aead_ctx(crypto_aead_reqtfm(areq));
1169 rctx = req ? skcipher_request_ctx(req) : aead_request_ctx(areq);
1170 rctx->mode &= FLG_MODE_MASK;
1174 cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode;
1175 cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE;
1181 cryp->header_in = 0;
1182 cryp->payload_in = req->cryptlen;
1183 cryp->payload_out = req->cryptlen;
1187 * Length of input and output data:
1189 * INPUT = AssocData || PlainText
1190 * <- assoclen -> <- cryptlen ->
1192 * OUTPUT = AssocData || CipherText || AuthTag
1193 * <- assoclen -> <-- cryptlen --> <- authsize ->
1196 * INPUT = AssocData || CipherTex || AuthTag
1197 * <- assoclen ---> <---------- cryptlen ---------->
1199 * OUTPUT = AssocData || PlainText
1200 * <- assoclen -> <- cryptlen - authsize ->
1204 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
1205 if (is_encrypt(cryp)) {
1206 cryp->payload_in = areq->cryptlen;
1207 cryp->header_in = areq->assoclen;
1208 cryp->payload_out = areq->cryptlen;
1210 cryp->payload_in = areq->cryptlen - cryp->authsize;
1211 cryp->header_in = areq->assoclen;
1212 cryp->payload_out = cryp->payload_in;
1216 in_sg = req ? req->src : areq->src;
1217 scatterwalk_start(&cryp->in_walk, in_sg);
1219 cryp->out_sg = req ? req->dst : areq->dst;
1220 scatterwalk_start(&cryp->out_walk, cryp->out_sg);
1222 if (is_gcm(cryp) || is_ccm(cryp)) {
1223 /* In output, jump after assoc data */
1224 scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->areq->assoclen, 2);
1228 memset(cryp->last_ctr, 0, sizeof(cryp->last_ctr));
1230 ret = stm32_cryp_hw_init(cryp);
1234 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq)
1236 struct skcipher_request *req = container_of(areq,
1237 struct skcipher_request,
1239 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
1240 crypto_skcipher_reqtfm(req));
1241 struct stm32_cryp *cryp = ctx->cryp;
1246 return stm32_cryp_prepare_req(req, NULL) ?:
1247 stm32_cryp_cpu_start(cryp);
1250 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq)
1252 struct aead_request *req = container_of(areq, struct aead_request,
1254 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1255 struct stm32_cryp *cryp = ctx->cryp;
1261 err = stm32_cryp_prepare_req(NULL, req);
1265 if (unlikely(!cryp->payload_in && !cryp->header_in)) {
1266 /* No input data to process: get tag and finish */
1267 stm32_cryp_finish_req(cryp, 0);
1271 return stm32_cryp_cpu_start(cryp);
1274 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp)
1281 cfg = stm32_cryp_read(cryp, cryp->caps->cr);
1285 cfg &= ~CR_DEC_NOT_ENC;
1288 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1291 /* GCM: write aad and payload size (in bits) */
1292 size_bit = cryp->areq->assoclen * 8;
1293 if (cryp->caps->swap_final)
1294 size_bit = (__force u32)cpu_to_be32(size_bit);
1296 stm32_cryp_write(cryp, cryp->caps->din, 0);
1297 stm32_cryp_write(cryp, cryp->caps->din, size_bit);
1299 size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen :
1300 cryp->areq->cryptlen - cryp->authsize;
1302 if (cryp->caps->swap_final)
1303 size_bit = (__force u32)cpu_to_be32(size_bit);
1305 stm32_cryp_write(cryp, cryp->caps->din, 0);
1306 stm32_cryp_write(cryp, cryp->caps->din, size_bit);
1308 /* CCM: write CTR0 */
1309 u32 iv32[AES_BLOCK_32];
1310 u8 *iv = (u8 *)iv32;
1311 __be32 *biv = (__be32 *)iv32;
1313 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
1314 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
1316 for (i = 0; i < AES_BLOCK_32; i++) {
1319 if (!cryp->caps->padding_wa)
1320 xiv = be32_to_cpu(biv[i]);
1321 stm32_cryp_write(cryp, cryp->caps->din, xiv);
1325 /* Wait for output data */
1326 ret = stm32_cryp_wait_output(cryp);
1328 dev_err(cryp->dev, "Timeout (read tag)\n");
1332 if (is_encrypt(cryp)) {
1333 u32 out_tag[AES_BLOCK_32];
1335 /* Get and write tag */
1336 readsl(cryp->regs + cryp->caps->dout, out_tag, AES_BLOCK_32);
1337 scatterwalk_copychunks(out_tag, &cryp->out_walk, cryp->authsize, 1);
1339 /* Get and check tag */
1340 u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32];
1342 scatterwalk_copychunks(in_tag, &cryp->in_walk, cryp->authsize, 0);
1343 readsl(cryp->regs + cryp->caps->dout, out_tag, AES_BLOCK_32);
1345 if (crypto_memneq(in_tag, out_tag, cryp->authsize))
1351 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1356 static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp)
1360 if (unlikely(cryp->last_ctr[3] == cpu_to_be32(0xFFFFFFFF))) {
1362 * In this case, we need to increment manually the ctr counter,
1363 * as HW doesn't handle the U32 carry.
1365 crypto_inc((u8 *)cryp->last_ctr, sizeof(cryp->last_ctr));
1367 cr = stm32_cryp_read(cryp, cryp->caps->cr);
1368 stm32_cryp_write(cryp, cryp->caps->cr, cr & ~CR_CRYPEN);
1370 stm32_cryp_hw_write_iv(cryp, cryp->last_ctr);
1372 stm32_cryp_write(cryp, cryp->caps->cr, cr);
1375 /* The IV registers are BE */
1376 cryp->last_ctr[0] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0l));
1377 cryp->last_ctr[1] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0r));
1378 cryp->last_ctr[2] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1l));
1379 cryp->last_ctr[3] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1r));
1382 static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
1384 u32 block[AES_BLOCK_32];
1386 readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32));
1387 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1388 cryp->payload_out), 1);
1389 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
1393 static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp)
1395 u32 block[AES_BLOCK_32] = {0};
1397 scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, cryp->hw_blocksize,
1398 cryp->payload_in), 0);
1399 writesl(cryp->regs + cryp->caps->din, block, cryp->hw_blocksize / sizeof(u32));
1400 cryp->payload_in -= min_t(size_t, cryp->hw_blocksize, cryp->payload_in);
1403 static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp)
1406 u32 cfg, block[AES_BLOCK_32] = {0};
1409 /* 'Special workaround' procedure described in the datasheet */
1412 stm32_cryp_write(cryp, cryp->caps->imsc, 0);
1413 cfg = stm32_cryp_read(cryp, cryp->caps->cr);
1415 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1417 /* b) Update IV1R */
1418 stm32_cryp_write(cryp, cryp->caps->iv1r, cryp->gcm_ctr - 2);
1420 /* c) change mode to CTR */
1421 cfg &= ~CR_ALGO_MASK;
1423 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1427 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1429 /* b) pad and write the last block */
1430 stm32_cryp_irq_write_block(cryp);
1431 /* wait end of process */
1432 err = stm32_cryp_wait_output(cryp);
1434 dev_err(cryp->dev, "Timeout (write gcm last data)\n");
1435 return stm32_cryp_finish_req(cryp, err);
1438 /* c) get and store encrypted data */
1440 * Same code as stm32_cryp_irq_read_data(), but we want to store
1443 readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32));
1445 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1446 cryp->payload_out), 1);
1447 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
1450 /* d) change mode back to AES GCM */
1451 cfg &= ~CR_ALGO_MASK;
1453 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1455 /* e) change phase to Final */
1458 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1460 /* f) write padded data */
1461 writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32);
1463 /* g) Empty fifo out */
1464 err = stm32_cryp_wait_output(cryp);
1466 dev_err(cryp->dev, "Timeout (write gcm padded data)\n");
1467 return stm32_cryp_finish_req(cryp, err);
1470 for (i = 0; i < AES_BLOCK_32; i++)
1471 stm32_cryp_read(cryp, cryp->caps->dout);
1473 /* h) run the he normal Final phase */
1474 stm32_cryp_finish_req(cryp, 0);
1477 static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp)
1481 /* disable ip, set NPBLB and reneable ip */
1482 cfg = stm32_cryp_read(cryp, cryp->caps->cr);
1484 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1486 cfg |= (cryp->hw_blocksize - cryp->payload_in) << CR_NBPBL_SHIFT;
1488 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1491 static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp)
1495 u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32];
1496 u32 block[AES_BLOCK_32] = {0};
1499 /* 'Special workaround' procedure described in the datasheet */
1502 stm32_cryp_write(cryp, cryp->caps->imsc, 0);
1504 cfg = stm32_cryp_read(cryp, cryp->caps->cr);
1506 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1508 /* b) get IV1 from CRYP_CSGCMCCM7 */
1509 iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4);
1511 /* c) Load CRYP_CSGCMCCMxR */
1512 for (i = 0; i < ARRAY_SIZE(cstmp1); i++)
1513 cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1516 stm32_cryp_write(cryp, cryp->caps->iv1r, iv1tmp);
1518 /* e) change mode to CTR */
1519 cfg &= ~CR_ALGO_MASK;
1521 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1525 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1527 /* b) pad and write the last block */
1528 stm32_cryp_irq_write_block(cryp);
1529 /* wait end of process */
1530 err = stm32_cryp_wait_output(cryp);
1532 dev_err(cryp->dev, "Timeout (write ccm padded data)\n");
1533 return stm32_cryp_finish_req(cryp, err);
1536 /* c) get and store decrypted data */
1538 * Same code as stm32_cryp_irq_read_data(), but we want to store
1541 readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32));
1543 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1544 cryp->payload_out), 1);
1545 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, cryp->payload_out);
1547 /* d) Load again CRYP_CSGCMCCMxR */
1548 for (i = 0; i < ARRAY_SIZE(cstmp2); i++)
1549 cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1551 /* e) change mode back to AES CCM */
1552 cfg &= ~CR_ALGO_MASK;
1554 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1556 /* f) change phase to header */
1558 cfg |= CR_PH_HEADER;
1559 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1561 /* g) XOR and write padded data */
1562 for (i = 0; i < ARRAY_SIZE(block); i++) {
1563 block[i] ^= cstmp1[i];
1564 block[i] ^= cstmp2[i];
1565 stm32_cryp_write(cryp, cryp->caps->din, block[i]);
1568 /* h) wait for completion */
1569 err = stm32_cryp_wait_busy(cryp);
1571 dev_err(cryp->dev, "Timeout (write ccm padded data)\n");
1573 /* i) run the he normal Final phase */
1574 stm32_cryp_finish_req(cryp, err);
1577 static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp)
1579 if (unlikely(!cryp->payload_in)) {
1580 dev_warn(cryp->dev, "No more data to process\n");
1584 if (unlikely(cryp->payload_in < AES_BLOCK_SIZE &&
1585 (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) &&
1586 is_encrypt(cryp))) {
1587 /* Padding for AES GCM encryption */
1588 if (cryp->caps->padding_wa) {
1589 /* Special case 1 */
1590 stm32_cryp_irq_write_gcm_padded_data(cryp);
1594 /* Setting padding bytes (NBBLB) */
1595 stm32_cryp_irq_set_npblb(cryp);
1598 if (unlikely((cryp->payload_in < AES_BLOCK_SIZE) &&
1599 (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) &&
1600 is_decrypt(cryp))) {
1601 /* Padding for AES CCM decryption */
1602 if (cryp->caps->padding_wa) {
1603 /* Special case 2 */
1604 stm32_cryp_irq_write_ccm_padded_data(cryp);
1608 /* Setting padding bytes (NBBLB) */
1609 stm32_cryp_irq_set_npblb(cryp);
1612 if (is_aes(cryp) && is_ctr(cryp))
1613 stm32_cryp_check_ctr_counter(cryp);
1615 stm32_cryp_irq_write_block(cryp);
1618 static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp)
1620 u32 block[AES_BLOCK_32] = {0};
1623 written = min_t(size_t, AES_BLOCK_SIZE, cryp->header_in);
1625 scatterwalk_copychunks(block, &cryp->in_walk, written, 0);
1627 writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32);
1629 cryp->header_in -= written;
1631 stm32_crypt_gcmccm_end_header(cryp);
1634 static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg)
1636 struct stm32_cryp *cryp = arg;
1638 u32 it_mask = stm32_cryp_read(cryp, cryp->caps->imsc);
1640 if (cryp->irq_status & MISR_OUT)
1641 /* Output FIFO IRQ: read data */
1642 stm32_cryp_irq_read_data(cryp);
1644 if (cryp->irq_status & MISR_IN) {
1645 if (is_gcm(cryp) || is_ccm(cryp)) {
1646 ph = stm32_cryp_read(cryp, cryp->caps->cr) & CR_PH_MASK;
1647 if (unlikely(ph == CR_PH_HEADER))
1649 stm32_cryp_irq_write_gcmccm_header(cryp);
1651 /* Input FIFO IRQ: write data */
1652 stm32_cryp_irq_write_data(cryp);
1656 /* Input FIFO IRQ: write data */
1657 stm32_cryp_irq_write_data(cryp);
1661 /* Mask useless interrupts */
1662 if (!cryp->payload_in && !cryp->header_in)
1663 it_mask &= ~IMSCR_IN;
1664 if (!cryp->payload_out)
1665 it_mask &= ~IMSCR_OUT;
1666 stm32_cryp_write(cryp, cryp->caps->imsc, it_mask);
1668 if (!cryp->payload_in && !cryp->header_in && !cryp->payload_out)
1669 stm32_cryp_finish_req(cryp, 0);
1674 static irqreturn_t stm32_cryp_irq(int irq, void *arg)
1676 struct stm32_cryp *cryp = arg;
1678 cryp->irq_status = stm32_cryp_read(cryp, cryp->caps->mis);
1680 return IRQ_WAKE_THREAD;
1683 static struct skcipher_engine_alg crypto_algs[] = {
1686 .base.cra_name = "ecb(aes)",
1687 .base.cra_driver_name = "stm32-ecb-aes",
1688 .base.cra_priority = 200,
1689 .base.cra_flags = CRYPTO_ALG_ASYNC,
1690 .base.cra_blocksize = AES_BLOCK_SIZE,
1691 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1692 .base.cra_alignmask = 0,
1693 .base.cra_module = THIS_MODULE,
1695 .init = stm32_cryp_init_tfm,
1696 .min_keysize = AES_MIN_KEY_SIZE,
1697 .max_keysize = AES_MAX_KEY_SIZE,
1698 .setkey = stm32_cryp_aes_setkey,
1699 .encrypt = stm32_cryp_aes_ecb_encrypt,
1700 .decrypt = stm32_cryp_aes_ecb_decrypt,
1703 .do_one_request = stm32_cryp_cipher_one_req,
1708 .base.cra_name = "cbc(aes)",
1709 .base.cra_driver_name = "stm32-cbc-aes",
1710 .base.cra_priority = 200,
1711 .base.cra_flags = CRYPTO_ALG_ASYNC,
1712 .base.cra_blocksize = AES_BLOCK_SIZE,
1713 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1714 .base.cra_alignmask = 0,
1715 .base.cra_module = THIS_MODULE,
1717 .init = stm32_cryp_init_tfm,
1718 .min_keysize = AES_MIN_KEY_SIZE,
1719 .max_keysize = AES_MAX_KEY_SIZE,
1720 .ivsize = AES_BLOCK_SIZE,
1721 .setkey = stm32_cryp_aes_setkey,
1722 .encrypt = stm32_cryp_aes_cbc_encrypt,
1723 .decrypt = stm32_cryp_aes_cbc_decrypt,
1726 .do_one_request = stm32_cryp_cipher_one_req,
1731 .base.cra_name = "ctr(aes)",
1732 .base.cra_driver_name = "stm32-ctr-aes",
1733 .base.cra_priority = 200,
1734 .base.cra_flags = CRYPTO_ALG_ASYNC,
1735 .base.cra_blocksize = 1,
1736 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1737 .base.cra_alignmask = 0,
1738 .base.cra_module = THIS_MODULE,
1740 .init = stm32_cryp_init_tfm,
1741 .min_keysize = AES_MIN_KEY_SIZE,
1742 .max_keysize = AES_MAX_KEY_SIZE,
1743 .ivsize = AES_BLOCK_SIZE,
1744 .setkey = stm32_cryp_aes_setkey,
1745 .encrypt = stm32_cryp_aes_ctr_encrypt,
1746 .decrypt = stm32_cryp_aes_ctr_decrypt,
1749 .do_one_request = stm32_cryp_cipher_one_req,
1754 .base.cra_name = "ecb(des)",
1755 .base.cra_driver_name = "stm32-ecb-des",
1756 .base.cra_priority = 200,
1757 .base.cra_flags = CRYPTO_ALG_ASYNC,
1758 .base.cra_blocksize = DES_BLOCK_SIZE,
1759 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1760 .base.cra_alignmask = 0,
1761 .base.cra_module = THIS_MODULE,
1763 .init = stm32_cryp_init_tfm,
1764 .min_keysize = DES_BLOCK_SIZE,
1765 .max_keysize = DES_BLOCK_SIZE,
1766 .setkey = stm32_cryp_des_setkey,
1767 .encrypt = stm32_cryp_des_ecb_encrypt,
1768 .decrypt = stm32_cryp_des_ecb_decrypt,
1771 .do_one_request = stm32_cryp_cipher_one_req,
1776 .base.cra_name = "cbc(des)",
1777 .base.cra_driver_name = "stm32-cbc-des",
1778 .base.cra_priority = 200,
1779 .base.cra_flags = CRYPTO_ALG_ASYNC,
1780 .base.cra_blocksize = DES_BLOCK_SIZE,
1781 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1782 .base.cra_alignmask = 0,
1783 .base.cra_module = THIS_MODULE,
1785 .init = stm32_cryp_init_tfm,
1786 .min_keysize = DES_BLOCK_SIZE,
1787 .max_keysize = DES_BLOCK_SIZE,
1788 .ivsize = DES_BLOCK_SIZE,
1789 .setkey = stm32_cryp_des_setkey,
1790 .encrypt = stm32_cryp_des_cbc_encrypt,
1791 .decrypt = stm32_cryp_des_cbc_decrypt,
1794 .do_one_request = stm32_cryp_cipher_one_req,
1799 .base.cra_name = "ecb(des3_ede)",
1800 .base.cra_driver_name = "stm32-ecb-des3",
1801 .base.cra_priority = 200,
1802 .base.cra_flags = CRYPTO_ALG_ASYNC,
1803 .base.cra_blocksize = DES_BLOCK_SIZE,
1804 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1805 .base.cra_alignmask = 0,
1806 .base.cra_module = THIS_MODULE,
1808 .init = stm32_cryp_init_tfm,
1809 .min_keysize = 3 * DES_BLOCK_SIZE,
1810 .max_keysize = 3 * DES_BLOCK_SIZE,
1811 .setkey = stm32_cryp_tdes_setkey,
1812 .encrypt = stm32_cryp_tdes_ecb_encrypt,
1813 .decrypt = stm32_cryp_tdes_ecb_decrypt,
1816 .do_one_request = stm32_cryp_cipher_one_req,
1821 .base.cra_name = "cbc(des3_ede)",
1822 .base.cra_driver_name = "stm32-cbc-des3",
1823 .base.cra_priority = 200,
1824 .base.cra_flags = CRYPTO_ALG_ASYNC,
1825 .base.cra_blocksize = DES_BLOCK_SIZE,
1826 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1827 .base.cra_alignmask = 0,
1828 .base.cra_module = THIS_MODULE,
1830 .init = stm32_cryp_init_tfm,
1831 .min_keysize = 3 * DES_BLOCK_SIZE,
1832 .max_keysize = 3 * DES_BLOCK_SIZE,
1833 .ivsize = DES_BLOCK_SIZE,
1834 .setkey = stm32_cryp_tdes_setkey,
1835 .encrypt = stm32_cryp_tdes_cbc_encrypt,
1836 .decrypt = stm32_cryp_tdes_cbc_decrypt,
1839 .do_one_request = stm32_cryp_cipher_one_req,
1844 static struct aead_engine_alg aead_algs[] = {
1846 .base.setkey = stm32_cryp_aes_aead_setkey,
1847 .base.setauthsize = stm32_cryp_aes_gcm_setauthsize,
1848 .base.encrypt = stm32_cryp_aes_gcm_encrypt,
1849 .base.decrypt = stm32_cryp_aes_gcm_decrypt,
1850 .base.init = stm32_cryp_aes_aead_init,
1852 .base.maxauthsize = AES_BLOCK_SIZE,
1855 .cra_name = "gcm(aes)",
1856 .cra_driver_name = "stm32-gcm-aes",
1857 .cra_priority = 200,
1858 .cra_flags = CRYPTO_ALG_ASYNC,
1860 .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1862 .cra_module = THIS_MODULE,
1865 .do_one_request = stm32_cryp_aead_one_req,
1869 .base.setkey = stm32_cryp_aes_aead_setkey,
1870 .base.setauthsize = stm32_cryp_aes_ccm_setauthsize,
1871 .base.encrypt = stm32_cryp_aes_ccm_encrypt,
1872 .base.decrypt = stm32_cryp_aes_ccm_decrypt,
1873 .base.init = stm32_cryp_aes_aead_init,
1874 .base.ivsize = AES_BLOCK_SIZE,
1875 .base.maxauthsize = AES_BLOCK_SIZE,
1878 .cra_name = "ccm(aes)",
1879 .cra_driver_name = "stm32-ccm-aes",
1880 .cra_priority = 200,
1881 .cra_flags = CRYPTO_ALG_ASYNC,
1883 .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1885 .cra_module = THIS_MODULE,
1888 .do_one_request = stm32_cryp_aead_one_req,
1893 static const struct stm32_cryp_caps ux500_data = {
1894 .aeads_support = false,
1895 .linear_aes_key = true,
1897 .iv_protection = true,
1900 .cr = UX500_CRYP_CR,
1901 .sr = UX500_CRYP_SR,
1902 .din = UX500_CRYP_DIN,
1903 .dout = UX500_CRYP_DOUT,
1904 .imsc = UX500_CRYP_IMSC,
1905 .mis = UX500_CRYP_MIS,
1906 .k1l = UX500_CRYP_K1L,
1907 .k1r = UX500_CRYP_K1R,
1908 .k3r = UX500_CRYP_K3R,
1909 .iv0l = UX500_CRYP_IV0L,
1910 .iv0r = UX500_CRYP_IV0R,
1911 .iv1l = UX500_CRYP_IV1L,
1912 .iv1r = UX500_CRYP_IV1R,
1915 static const struct stm32_cryp_caps f7_data = {
1916 .aeads_support = true,
1917 .linear_aes_key = false,
1919 .iv_protection = false,
1937 static const struct stm32_cryp_caps mp1_data = {
1938 .aeads_support = true,
1939 .linear_aes_key = false,
1941 .iv_protection = false,
1942 .swap_final = false,
1943 .padding_wa = false,
1959 static const struct of_device_id stm32_dt_ids[] = {
1960 { .compatible = "stericsson,ux500-cryp", .data = &ux500_data},
1961 { .compatible = "st,stm32f756-cryp", .data = &f7_data},
1962 { .compatible = "st,stm32mp1-cryp", .data = &mp1_data},
1965 MODULE_DEVICE_TABLE(of, stm32_dt_ids);
1967 static int stm32_cryp_probe(struct platform_device *pdev)
1969 struct device *dev = &pdev->dev;
1970 struct stm32_cryp *cryp;
1971 struct reset_control *rst;
1974 cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL);
1978 cryp->caps = of_device_get_match_data(dev);
1984 cryp->regs = devm_platform_ioremap_resource(pdev, 0);
1985 if (IS_ERR(cryp->regs))
1986 return PTR_ERR(cryp->regs);
1988 irq = platform_get_irq(pdev, 0);
1992 ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq,
1993 stm32_cryp_irq_thread, IRQF_ONESHOT,
1994 dev_name(dev), cryp);
1996 dev_err(dev, "Cannot grab IRQ\n");
2000 cryp->clk = devm_clk_get(dev, NULL);
2001 if (IS_ERR(cryp->clk)) {
2002 dev_err_probe(dev, PTR_ERR(cryp->clk), "Could not get clock\n");
2004 return PTR_ERR(cryp->clk);
2007 ret = clk_prepare_enable(cryp->clk);
2009 dev_err(cryp->dev, "Failed to enable clock\n");
2013 pm_runtime_set_autosuspend_delay(dev, CRYP_AUTOSUSPEND_DELAY);
2014 pm_runtime_use_autosuspend(dev);
2016 pm_runtime_get_noresume(dev);
2017 pm_runtime_set_active(dev);
2018 pm_runtime_enable(dev);
2020 rst = devm_reset_control_get(dev, NULL);
2023 if (ret == -EPROBE_DEFER)
2026 reset_control_assert(rst);
2028 reset_control_deassert(rst);
2031 platform_set_drvdata(pdev, cryp);
2033 spin_lock(&cryp_list.lock);
2034 list_add(&cryp->list, &cryp_list.dev_list);
2035 spin_unlock(&cryp_list.lock);
2037 /* Initialize crypto engine */
2038 cryp->engine = crypto_engine_alloc_init(dev, 1);
2039 if (!cryp->engine) {
2040 dev_err(dev, "Could not init crypto engine\n");
2045 ret = crypto_engine_start(cryp->engine);
2047 dev_err(dev, "Could not start crypto engine\n");
2051 ret = crypto_engine_register_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2053 dev_err(dev, "Could not register algs\n");
2057 if (cryp->caps->aeads_support) {
2058 ret = crypto_engine_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2063 dev_info(dev, "Initialized\n");
2065 pm_runtime_put_sync(dev);
2070 crypto_engine_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2073 crypto_engine_exit(cryp->engine);
2075 spin_lock(&cryp_list.lock);
2076 list_del(&cryp->list);
2077 spin_unlock(&cryp_list.lock);
2079 pm_runtime_disable(dev);
2080 pm_runtime_put_noidle(dev);
2082 clk_disable_unprepare(cryp->clk);
2087 static void stm32_cryp_remove(struct platform_device *pdev)
2089 struct stm32_cryp *cryp = platform_get_drvdata(pdev);
2092 ret = pm_runtime_get_sync(cryp->dev);
2094 if (cryp->caps->aeads_support)
2095 crypto_engine_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2096 crypto_engine_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2098 crypto_engine_exit(cryp->engine);
2100 spin_lock(&cryp_list.lock);
2101 list_del(&cryp->list);
2102 spin_unlock(&cryp_list.lock);
2104 pm_runtime_disable(cryp->dev);
2105 pm_runtime_put_noidle(cryp->dev);
2108 clk_disable_unprepare(cryp->clk);
2112 static int stm32_cryp_runtime_suspend(struct device *dev)
2114 struct stm32_cryp *cryp = dev_get_drvdata(dev);
2116 clk_disable_unprepare(cryp->clk);
2121 static int stm32_cryp_runtime_resume(struct device *dev)
2123 struct stm32_cryp *cryp = dev_get_drvdata(dev);
2126 ret = clk_prepare_enable(cryp->clk);
2128 dev_err(cryp->dev, "Failed to prepare_enable clock\n");
2136 static const struct dev_pm_ops stm32_cryp_pm_ops = {
2137 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
2138 pm_runtime_force_resume)
2139 SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend,
2140 stm32_cryp_runtime_resume, NULL)
2143 static struct platform_driver stm32_cryp_driver = {
2144 .probe = stm32_cryp_probe,
2145 .remove_new = stm32_cryp_remove,
2147 .name = DRIVER_NAME,
2148 .pm = &stm32_cryp_pm_ops,
2149 .of_match_table = stm32_dt_ids,
2153 module_platform_driver(stm32_cryp_driver);
2156 MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");
2157 MODULE_LICENSE("GPL");