]> Git Repo - linux.git/blob - drivers/crypto/stm32/stm32-hash.c
drm/amdkfd: Import DMABufs for interop through DRM
[linux.git] / drivers / crypto / stm32 / stm32-hash.c
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * This file is part of STM32 Crypto driver for Linux.
4  *
5  * Copyright (C) 2017, STMicroelectronics - All Rights Reserved
6  * Author(s): Lionel DEBIEVE <[email protected]> for STMicroelectronics.
7  */
8
9 #include <crypto/engine.h>
10 #include <crypto/internal/hash.h>
11 #include <crypto/md5.h>
12 #include <crypto/scatterwalk.h>
13 #include <crypto/sha1.h>
14 #include <crypto/sha2.h>
15 #include <crypto/sha3.h>
16 #include <linux/clk.h>
17 #include <linux/delay.h>
18 #include <linux/dma-mapping.h>
19 #include <linux/dmaengine.h>
20 #include <linux/interrupt.h>
21 #include <linux/iopoll.h>
22 #include <linux/kernel.h>
23 #include <linux/module.h>
24 #include <linux/of.h>
25 #include <linux/platform_device.h>
26 #include <linux/pm_runtime.h>
27 #include <linux/reset.h>
28 #include <linux/string.h>
29
30 #define HASH_CR                         0x00
31 #define HASH_DIN                        0x04
32 #define HASH_STR                        0x08
33 #define HASH_UX500_HREG(x)              (0x0c + ((x) * 0x04))
34 #define HASH_IMR                        0x20
35 #define HASH_SR                         0x24
36 #define HASH_CSR(x)                     (0x0F8 + ((x) * 0x04))
37 #define HASH_HREG(x)                    (0x310 + ((x) * 0x04))
38 #define HASH_HWCFGR                     0x3F0
39 #define HASH_VER                        0x3F4
40 #define HASH_ID                         0x3F8
41
42 /* Control Register */
43 #define HASH_CR_INIT                    BIT(2)
44 #define HASH_CR_DMAE                    BIT(3)
45 #define HASH_CR_DATATYPE_POS            4
46 #define HASH_CR_MODE                    BIT(6)
47 #define HASH_CR_ALGO_POS                7
48 #define HASH_CR_MDMAT                   BIT(13)
49 #define HASH_CR_DMAA                    BIT(14)
50 #define HASH_CR_LKEY                    BIT(16)
51
52 /* Interrupt */
53 #define HASH_DINIE                      BIT(0)
54 #define HASH_DCIE                       BIT(1)
55
56 /* Interrupt Mask */
57 #define HASH_MASK_CALC_COMPLETION       BIT(0)
58 #define HASH_MASK_DATA_INPUT            BIT(1)
59
60 /* Status Flags */
61 #define HASH_SR_DATA_INPUT_READY        BIT(0)
62 #define HASH_SR_OUTPUT_READY            BIT(1)
63 #define HASH_SR_DMA_ACTIVE              BIT(2)
64 #define HASH_SR_BUSY                    BIT(3)
65
66 /* STR Register */
67 #define HASH_STR_NBLW_MASK              GENMASK(4, 0)
68 #define HASH_STR_DCAL                   BIT(8)
69
70 /* HWCFGR Register */
71 #define HASH_HWCFG_DMA_MASK             GENMASK(3, 0)
72
73 /* Context swap register */
74 #define HASH_CSR_NB_SHA256_HMAC         54
75 #define HASH_CSR_NB_SHA256              38
76 #define HASH_CSR_NB_SHA512_HMAC         103
77 #define HASH_CSR_NB_SHA512              91
78 #define HASH_CSR_NB_SHA3_HMAC           88
79 #define HASH_CSR_NB_SHA3                72
80 #define HASH_CSR_NB_MAX                 HASH_CSR_NB_SHA512_HMAC
81
82 #define HASH_FLAGS_INIT                 BIT(0)
83 #define HASH_FLAGS_OUTPUT_READY         BIT(1)
84 #define HASH_FLAGS_CPU                  BIT(2)
85 #define HASH_FLAGS_DMA_ACTIVE           BIT(3)
86 #define HASH_FLAGS_HMAC_INIT            BIT(4)
87 #define HASH_FLAGS_HMAC_FINAL           BIT(5)
88 #define HASH_FLAGS_HMAC_KEY             BIT(6)
89 #define HASH_FLAGS_SHA3_MODE            BIT(7)
90 #define HASH_FLAGS_FINAL                BIT(15)
91 #define HASH_FLAGS_FINUP                BIT(16)
92 #define HASH_FLAGS_ALGO_MASK            GENMASK(20, 17)
93 #define HASH_FLAGS_ALGO_SHIFT           17
94 #define HASH_FLAGS_ERRORS               BIT(21)
95 #define HASH_FLAGS_EMPTY                BIT(22)
96 #define HASH_FLAGS_HMAC                 BIT(23)
97
98 #define HASH_OP_UPDATE                  1
99 #define HASH_OP_FINAL                   2
100
101 #define HASH_BURST_LEVEL                4
102
103 enum stm32_hash_data_format {
104         HASH_DATA_32_BITS               = 0x0,
105         HASH_DATA_16_BITS               = 0x1,
106         HASH_DATA_8_BITS                = 0x2,
107         HASH_DATA_1_BIT                 = 0x3
108 };
109
110 #define HASH_BUFLEN                     (SHA3_224_BLOCK_SIZE + 4)
111 #define HASH_MAX_KEY_SIZE               (SHA512_BLOCK_SIZE * 8)
112
113 enum stm32_hash_algo {
114         HASH_SHA1                       = 0,
115         HASH_MD5                        = 1,
116         HASH_SHA224                     = 2,
117         HASH_SHA256                     = 3,
118         HASH_SHA3_224                   = 4,
119         HASH_SHA3_256                   = 5,
120         HASH_SHA3_384                   = 6,
121         HASH_SHA3_512                   = 7,
122         HASH_SHA384                     = 12,
123         HASH_SHA512                     = 15,
124 };
125
126 enum ux500_hash_algo {
127         HASH_SHA256_UX500               = 0,
128         HASH_SHA1_UX500                 = 1,
129 };
130
131 #define HASH_AUTOSUSPEND_DELAY          50
132
133 struct stm32_hash_ctx {
134         struct stm32_hash_dev   *hdev;
135         struct crypto_shash     *xtfm;
136         unsigned long           flags;
137
138         u8                      key[HASH_MAX_KEY_SIZE];
139         int                     keylen;
140 };
141
142 struct stm32_hash_state {
143         u32                     flags;
144
145         u16                     bufcnt;
146         u16                     blocklen;
147
148         u8 buffer[HASH_BUFLEN] __aligned(4);
149
150         /* hash state */
151         u32                     hw_context[3 + HASH_CSR_NB_MAX];
152 };
153
154 struct stm32_hash_request_ctx {
155         struct stm32_hash_dev   *hdev;
156         unsigned long           op;
157
158         u8 digest[SHA512_DIGEST_SIZE] __aligned(sizeof(u32));
159         size_t                  digcnt;
160
161         /* DMA */
162         struct scatterlist      *sg;
163         unsigned int            offset;
164         unsigned int            total;
165         struct scatterlist      sg_key;
166
167         dma_addr_t              dma_addr;
168         size_t                  dma_ct;
169         int                     nents;
170
171         u8                      data_type;
172
173         struct stm32_hash_state state;
174 };
175
176 struct stm32_hash_algs_info {
177         struct ahash_engine_alg *algs_list;
178         size_t                  size;
179 };
180
181 struct stm32_hash_pdata {
182         const int                               alg_shift;
183         const struct stm32_hash_algs_info       *algs_info;
184         size_t                                  algs_info_size;
185         bool                                    has_sr;
186         bool                                    has_mdmat;
187         bool                                    broken_emptymsg;
188         bool                                    ux500;
189 };
190
191 struct stm32_hash_dev {
192         struct list_head        list;
193         struct device           *dev;
194         struct clk              *clk;
195         struct reset_control    *rst;
196         void __iomem            *io_base;
197         phys_addr_t             phys_base;
198         u32                     dma_mode;
199         bool                    polled;
200
201         struct ahash_request    *req;
202         struct crypto_engine    *engine;
203
204         unsigned long           flags;
205
206         struct dma_chan         *dma_lch;
207         struct completion       dma_completion;
208
209         const struct stm32_hash_pdata   *pdata;
210 };
211
212 struct stm32_hash_drv {
213         struct list_head        dev_list;
214         spinlock_t              lock; /* List protection access */
215 };
216
217 static struct stm32_hash_drv stm32_hash = {
218         .dev_list = LIST_HEAD_INIT(stm32_hash.dev_list),
219         .lock = __SPIN_LOCK_UNLOCKED(stm32_hash.lock),
220 };
221
222 static void stm32_hash_dma_callback(void *param);
223
224 static inline u32 stm32_hash_read(struct stm32_hash_dev *hdev, u32 offset)
225 {
226         return readl_relaxed(hdev->io_base + offset);
227 }
228
229 static inline void stm32_hash_write(struct stm32_hash_dev *hdev,
230                                     u32 offset, u32 value)
231 {
232         writel_relaxed(value, hdev->io_base + offset);
233 }
234
235 static inline int stm32_hash_wait_busy(struct stm32_hash_dev *hdev)
236 {
237         u32 status;
238
239         /* The Ux500 lacks the special status register, we poll the DCAL bit instead */
240         if (!hdev->pdata->has_sr)
241                 return readl_relaxed_poll_timeout(hdev->io_base + HASH_STR, status,
242                                                   !(status & HASH_STR_DCAL), 10, 10000);
243
244         return readl_relaxed_poll_timeout(hdev->io_base + HASH_SR, status,
245                                    !(status & HASH_SR_BUSY), 10, 10000);
246 }
247
248 static void stm32_hash_set_nblw(struct stm32_hash_dev *hdev, int length)
249 {
250         u32 reg;
251
252         reg = stm32_hash_read(hdev, HASH_STR);
253         reg &= ~(HASH_STR_NBLW_MASK);
254         reg |= (8U * ((length) % 4U));
255         stm32_hash_write(hdev, HASH_STR, reg);
256 }
257
258 static int stm32_hash_write_key(struct stm32_hash_dev *hdev)
259 {
260         struct crypto_ahash *tfm = crypto_ahash_reqtfm(hdev->req);
261         struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
262         u32 reg;
263         int keylen = ctx->keylen;
264         void *key = ctx->key;
265
266         if (keylen) {
267                 stm32_hash_set_nblw(hdev, keylen);
268
269                 while (keylen > 0) {
270                         stm32_hash_write(hdev, HASH_DIN, *(u32 *)key);
271                         keylen -= 4;
272                         key += 4;
273                 }
274
275                 reg = stm32_hash_read(hdev, HASH_STR);
276                 reg |= HASH_STR_DCAL;
277                 stm32_hash_write(hdev, HASH_STR, reg);
278
279                 return -EINPROGRESS;
280         }
281
282         return 0;
283 }
284
285 static void stm32_hash_write_ctrl(struct stm32_hash_dev *hdev)
286 {
287         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
288         struct crypto_ahash *tfm = crypto_ahash_reqtfm(hdev->req);
289         struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
290         struct stm32_hash_state *state = &rctx->state;
291         u32 alg = (state->flags & HASH_FLAGS_ALGO_MASK) >> HASH_FLAGS_ALGO_SHIFT;
292
293         u32 reg = HASH_CR_INIT;
294
295         if (!(hdev->flags & HASH_FLAGS_INIT)) {
296                 if (hdev->pdata->ux500) {
297                         reg |= ((alg & BIT(0)) << HASH_CR_ALGO_POS);
298                 } else {
299                         if (hdev->pdata->alg_shift == HASH_CR_ALGO_POS)
300                                 reg |= ((alg & BIT(1)) << 17) |
301                                        ((alg & BIT(0)) << HASH_CR_ALGO_POS);
302                         else
303                                 reg |= alg << hdev->pdata->alg_shift;
304                 }
305
306                 reg |= (rctx->data_type << HASH_CR_DATATYPE_POS);
307
308                 if (state->flags & HASH_FLAGS_HMAC) {
309                         hdev->flags |= HASH_FLAGS_HMAC;
310                         reg |= HASH_CR_MODE;
311                         if (ctx->keylen > crypto_ahash_blocksize(tfm))
312                                 reg |= HASH_CR_LKEY;
313                 }
314
315                 if (!hdev->polled)
316                         stm32_hash_write(hdev, HASH_IMR, HASH_DCIE);
317
318                 stm32_hash_write(hdev, HASH_CR, reg);
319
320                 hdev->flags |= HASH_FLAGS_INIT;
321
322                 /*
323                  * After first block + 1 words are fill up,
324                  * we only need to fill 1 block to start partial computation
325                  */
326                 rctx->state.blocklen -= sizeof(u32);
327
328                 dev_dbg(hdev->dev, "Write Control %x\n", reg);
329         }
330 }
331
332 static void stm32_hash_append_sg(struct stm32_hash_request_ctx *rctx)
333 {
334         struct stm32_hash_state *state = &rctx->state;
335         size_t count;
336
337         while ((state->bufcnt < state->blocklen) && rctx->total) {
338                 count = min(rctx->sg->length - rctx->offset, rctx->total);
339                 count = min_t(size_t, count, state->blocklen - state->bufcnt);
340
341                 if (count <= 0) {
342                         if ((rctx->sg->length == 0) && !sg_is_last(rctx->sg)) {
343                                 rctx->sg = sg_next(rctx->sg);
344                                 continue;
345                         } else {
346                                 break;
347                         }
348                 }
349
350                 scatterwalk_map_and_copy(state->buffer + state->bufcnt,
351                                          rctx->sg, rctx->offset, count, 0);
352
353                 state->bufcnt += count;
354                 rctx->offset += count;
355                 rctx->total -= count;
356
357                 if (rctx->offset == rctx->sg->length) {
358                         rctx->sg = sg_next(rctx->sg);
359                         if (rctx->sg)
360                                 rctx->offset = 0;
361                         else
362                                 rctx->total = 0;
363                 }
364         }
365 }
366
367 static int stm32_hash_xmit_cpu(struct stm32_hash_dev *hdev,
368                                const u8 *buf, size_t length, int final)
369 {
370         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
371         struct stm32_hash_state *state = &rctx->state;
372         unsigned int count, len32;
373         const u32 *buffer = (const u32 *)buf;
374         u32 reg;
375
376         if (final) {
377                 hdev->flags |= HASH_FLAGS_FINAL;
378
379                 /* Do not process empty messages if hw is buggy. */
380                 if (!(hdev->flags & HASH_FLAGS_INIT) && !length &&
381                     hdev->pdata->broken_emptymsg) {
382                         state->flags |= HASH_FLAGS_EMPTY;
383                         return 0;
384                 }
385         }
386
387         len32 = DIV_ROUND_UP(length, sizeof(u32));
388
389         dev_dbg(hdev->dev, "%s: length: %zd, final: %x len32 %i\n",
390                 __func__, length, final, len32);
391
392         hdev->flags |= HASH_FLAGS_CPU;
393
394         stm32_hash_write_ctrl(hdev);
395
396         if (stm32_hash_wait_busy(hdev))
397                 return -ETIMEDOUT;
398
399         if ((hdev->flags & HASH_FLAGS_HMAC) &&
400             (!(hdev->flags & HASH_FLAGS_HMAC_KEY))) {
401                 hdev->flags |= HASH_FLAGS_HMAC_KEY;
402                 stm32_hash_write_key(hdev);
403                 if (stm32_hash_wait_busy(hdev))
404                         return -ETIMEDOUT;
405         }
406
407         for (count = 0; count < len32; count++)
408                 stm32_hash_write(hdev, HASH_DIN, buffer[count]);
409
410         if (final) {
411                 if (stm32_hash_wait_busy(hdev))
412                         return -ETIMEDOUT;
413
414                 stm32_hash_set_nblw(hdev, length);
415                 reg = stm32_hash_read(hdev, HASH_STR);
416                 reg |= HASH_STR_DCAL;
417                 stm32_hash_write(hdev, HASH_STR, reg);
418                 if (hdev->flags & HASH_FLAGS_HMAC) {
419                         if (stm32_hash_wait_busy(hdev))
420                                 return -ETIMEDOUT;
421                         stm32_hash_write_key(hdev);
422                 }
423                 return -EINPROGRESS;
424         }
425
426         return 0;
427 }
428
429 static int hash_swap_reg(struct stm32_hash_request_ctx *rctx)
430 {
431         struct stm32_hash_state *state = &rctx->state;
432
433         switch ((state->flags & HASH_FLAGS_ALGO_MASK) >>
434                 HASH_FLAGS_ALGO_SHIFT) {
435         case HASH_MD5:
436         case HASH_SHA1:
437         case HASH_SHA224:
438         case HASH_SHA256:
439                 if (state->flags & HASH_FLAGS_HMAC)
440                         return HASH_CSR_NB_SHA256_HMAC;
441                 else
442                         return HASH_CSR_NB_SHA256;
443                 break;
444
445         case HASH_SHA384:
446         case HASH_SHA512:
447                 if (state->flags & HASH_FLAGS_HMAC)
448                         return HASH_CSR_NB_SHA512_HMAC;
449                 else
450                         return HASH_CSR_NB_SHA512;
451                 break;
452
453         case HASH_SHA3_224:
454         case HASH_SHA3_256:
455         case HASH_SHA3_384:
456         case HASH_SHA3_512:
457                 if (state->flags & HASH_FLAGS_HMAC)
458                         return HASH_CSR_NB_SHA3_HMAC;
459                 else
460                         return HASH_CSR_NB_SHA3;
461                 break;
462
463         default:
464                 return -EINVAL;
465         }
466 }
467
468 static int stm32_hash_update_cpu(struct stm32_hash_dev *hdev)
469 {
470         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
471         struct stm32_hash_state *state = &rctx->state;
472         u32 *preg = state->hw_context;
473         int bufcnt, err = 0, final;
474         int i, swap_reg;
475
476         dev_dbg(hdev->dev, "%s flags %x\n", __func__, state->flags);
477
478         final = state->flags & HASH_FLAGS_FINAL;
479
480         while ((rctx->total >= state->blocklen) ||
481                (state->bufcnt + rctx->total >= state->blocklen)) {
482                 stm32_hash_append_sg(rctx);
483                 bufcnt = state->bufcnt;
484                 state->bufcnt = 0;
485                 err = stm32_hash_xmit_cpu(hdev, state->buffer, bufcnt, 0);
486                 if (err)
487                         return err;
488         }
489
490         stm32_hash_append_sg(rctx);
491
492         if (final) {
493                 bufcnt = state->bufcnt;
494                 state->bufcnt = 0;
495                 return stm32_hash_xmit_cpu(hdev, state->buffer, bufcnt, 1);
496         }
497
498         if (!(hdev->flags & HASH_FLAGS_INIT))
499                 return 0;
500
501         if (stm32_hash_wait_busy(hdev))
502                 return -ETIMEDOUT;
503
504         swap_reg = hash_swap_reg(rctx);
505
506         if (!hdev->pdata->ux500)
507                 *preg++ = stm32_hash_read(hdev, HASH_IMR);
508         *preg++ = stm32_hash_read(hdev, HASH_STR);
509         *preg++ = stm32_hash_read(hdev, HASH_CR);
510         for (i = 0; i < swap_reg; i++)
511                 *preg++ = stm32_hash_read(hdev, HASH_CSR(i));
512
513         state->flags |= HASH_FLAGS_INIT;
514
515         return err;
516 }
517
518 static int stm32_hash_xmit_dma(struct stm32_hash_dev *hdev,
519                                struct scatterlist *sg, int length, int mdma)
520 {
521         struct dma_async_tx_descriptor *in_desc;
522         dma_cookie_t cookie;
523         u32 reg;
524         int err;
525
526         in_desc = dmaengine_prep_slave_sg(hdev->dma_lch, sg, 1,
527                                           DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT |
528                                           DMA_CTRL_ACK);
529         if (!in_desc) {
530                 dev_err(hdev->dev, "dmaengine_prep_slave error\n");
531                 return -ENOMEM;
532         }
533
534         reinit_completion(&hdev->dma_completion);
535         in_desc->callback = stm32_hash_dma_callback;
536         in_desc->callback_param = hdev;
537
538         hdev->flags |= HASH_FLAGS_FINAL;
539         hdev->flags |= HASH_FLAGS_DMA_ACTIVE;
540
541         reg = stm32_hash_read(hdev, HASH_CR);
542
543         if (hdev->pdata->has_mdmat) {
544                 if (mdma)
545                         reg |= HASH_CR_MDMAT;
546                 else
547                         reg &= ~HASH_CR_MDMAT;
548         }
549         reg |= HASH_CR_DMAE;
550
551         stm32_hash_write(hdev, HASH_CR, reg);
552
553         stm32_hash_set_nblw(hdev, length);
554
555         cookie = dmaengine_submit(in_desc);
556         err = dma_submit_error(cookie);
557         if (err)
558                 return -ENOMEM;
559
560         dma_async_issue_pending(hdev->dma_lch);
561
562         if (!wait_for_completion_timeout(&hdev->dma_completion,
563                                          msecs_to_jiffies(100)))
564                 err = -ETIMEDOUT;
565
566         if (dma_async_is_tx_complete(hdev->dma_lch, cookie,
567                                      NULL, NULL) != DMA_COMPLETE)
568                 err = -ETIMEDOUT;
569
570         if (err) {
571                 dev_err(hdev->dev, "DMA Error %i\n", err);
572                 dmaengine_terminate_all(hdev->dma_lch);
573                 return err;
574         }
575
576         return -EINPROGRESS;
577 }
578
579 static void stm32_hash_dma_callback(void *param)
580 {
581         struct stm32_hash_dev *hdev = param;
582
583         complete(&hdev->dma_completion);
584 }
585
586 static int stm32_hash_hmac_dma_send(struct stm32_hash_dev *hdev)
587 {
588         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
589         struct crypto_ahash *tfm = crypto_ahash_reqtfm(hdev->req);
590         struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
591         int err;
592
593         if (ctx->keylen < rctx->state.blocklen || hdev->dma_mode == 1) {
594                 err = stm32_hash_write_key(hdev);
595                 if (stm32_hash_wait_busy(hdev))
596                         return -ETIMEDOUT;
597         } else {
598                 if (!(hdev->flags & HASH_FLAGS_HMAC_KEY))
599                         sg_init_one(&rctx->sg_key, ctx->key,
600                                     ALIGN(ctx->keylen, sizeof(u32)));
601
602                 rctx->dma_ct = dma_map_sg(hdev->dev, &rctx->sg_key, 1,
603                                           DMA_TO_DEVICE);
604                 if (rctx->dma_ct == 0) {
605                         dev_err(hdev->dev, "dma_map_sg error\n");
606                         return -ENOMEM;
607                 }
608
609                 err = stm32_hash_xmit_dma(hdev, &rctx->sg_key, ctx->keylen, 0);
610
611                 dma_unmap_sg(hdev->dev, &rctx->sg_key, 1, DMA_TO_DEVICE);
612         }
613
614         return err;
615 }
616
617 static int stm32_hash_dma_init(struct stm32_hash_dev *hdev)
618 {
619         struct dma_slave_config dma_conf;
620         struct dma_chan *chan;
621         int err;
622
623         memset(&dma_conf, 0, sizeof(dma_conf));
624
625         dma_conf.direction = DMA_MEM_TO_DEV;
626         dma_conf.dst_addr = hdev->phys_base + HASH_DIN;
627         dma_conf.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
628         dma_conf.src_maxburst = HASH_BURST_LEVEL;
629         dma_conf.dst_maxburst = HASH_BURST_LEVEL;
630         dma_conf.device_fc = false;
631
632         chan = dma_request_chan(hdev->dev, "in");
633         if (IS_ERR(chan))
634                 return PTR_ERR(chan);
635
636         hdev->dma_lch = chan;
637
638         err = dmaengine_slave_config(hdev->dma_lch, &dma_conf);
639         if (err) {
640                 dma_release_channel(hdev->dma_lch);
641                 hdev->dma_lch = NULL;
642                 dev_err(hdev->dev, "Couldn't configure DMA slave.\n");
643                 return err;
644         }
645
646         init_completion(&hdev->dma_completion);
647
648         return 0;
649 }
650
651 static int stm32_hash_dma_send(struct stm32_hash_dev *hdev)
652 {
653         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
654         u32 *buffer = (void *)rctx->state.buffer;
655         struct scatterlist sg[1], *tsg;
656         int err = 0, reg, ncp = 0;
657         unsigned int i, len = 0, bufcnt = 0;
658         bool is_last = false;
659
660         rctx->sg = hdev->req->src;
661         rctx->total = hdev->req->nbytes;
662
663         rctx->nents = sg_nents(rctx->sg);
664         if (rctx->nents < 0)
665                 return -EINVAL;
666
667         stm32_hash_write_ctrl(hdev);
668
669         if (hdev->flags & HASH_FLAGS_HMAC) {
670                 err = stm32_hash_hmac_dma_send(hdev);
671                 if (err != -EINPROGRESS)
672                         return err;
673         }
674
675         for_each_sg(rctx->sg, tsg, rctx->nents, i) {
676                 sg[0] = *tsg;
677                 len = sg->length;
678
679                 if (sg_is_last(sg) || (bufcnt + sg[0].length) >= rctx->total) {
680                         sg->length = rctx->total - bufcnt;
681                         is_last = true;
682                         if (hdev->dma_mode == 1) {
683                                 len = (ALIGN(sg->length, 16) - 16);
684
685                                 ncp = sg_pcopy_to_buffer(
686                                         rctx->sg, rctx->nents,
687                                         rctx->state.buffer, sg->length - len,
688                                         rctx->total - sg->length + len);
689
690                                 sg->length = len;
691                         } else {
692                                 if (!(IS_ALIGNED(sg->length, sizeof(u32)))) {
693                                         len = sg->length;
694                                         sg->length = ALIGN(sg->length,
695                                                            sizeof(u32));
696                                 }
697                         }
698                 }
699
700                 rctx->dma_ct = dma_map_sg(hdev->dev, sg, 1,
701                                           DMA_TO_DEVICE);
702                 if (rctx->dma_ct == 0) {
703                         dev_err(hdev->dev, "dma_map_sg error\n");
704                         return -ENOMEM;
705                 }
706
707                 err = stm32_hash_xmit_dma(hdev, sg, len, !is_last);
708
709                 bufcnt += sg[0].length;
710                 dma_unmap_sg(hdev->dev, sg, 1, DMA_TO_DEVICE);
711
712                 if (err == -ENOMEM)
713                         return err;
714                 if (is_last)
715                         break;
716         }
717
718         if (hdev->dma_mode == 1) {
719                 if (stm32_hash_wait_busy(hdev))
720                         return -ETIMEDOUT;
721                 reg = stm32_hash_read(hdev, HASH_CR);
722                 reg &= ~HASH_CR_DMAE;
723                 reg |= HASH_CR_DMAA;
724                 stm32_hash_write(hdev, HASH_CR, reg);
725
726                 if (ncp) {
727                         memset(buffer + ncp, 0,
728                                DIV_ROUND_UP(ncp, sizeof(u32)) - ncp);
729                         writesl(hdev->io_base + HASH_DIN, buffer,
730                                 DIV_ROUND_UP(ncp, sizeof(u32)));
731                 }
732                 stm32_hash_set_nblw(hdev, ncp);
733                 reg = stm32_hash_read(hdev, HASH_STR);
734                 reg |= HASH_STR_DCAL;
735                 stm32_hash_write(hdev, HASH_STR, reg);
736                 err = -EINPROGRESS;
737         }
738
739         if (hdev->flags & HASH_FLAGS_HMAC) {
740                 if (stm32_hash_wait_busy(hdev))
741                         return -ETIMEDOUT;
742                 err = stm32_hash_hmac_dma_send(hdev);
743         }
744
745         return err;
746 }
747
748 static struct stm32_hash_dev *stm32_hash_find_dev(struct stm32_hash_ctx *ctx)
749 {
750         struct stm32_hash_dev *hdev = NULL, *tmp;
751
752         spin_lock_bh(&stm32_hash.lock);
753         if (!ctx->hdev) {
754                 list_for_each_entry(tmp, &stm32_hash.dev_list, list) {
755                         hdev = tmp;
756                         break;
757                 }
758                 ctx->hdev = hdev;
759         } else {
760                 hdev = ctx->hdev;
761         }
762
763         spin_unlock_bh(&stm32_hash.lock);
764
765         return hdev;
766 }
767
768 static bool stm32_hash_dma_aligned_data(struct ahash_request *req)
769 {
770         struct scatterlist *sg;
771         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
772         struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
773         struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
774         int i;
775
776         if (!hdev->dma_lch || req->nbytes <= rctx->state.blocklen)
777                 return false;
778
779         if (sg_nents(req->src) > 1) {
780                 if (hdev->dma_mode == 1)
781                         return false;
782                 for_each_sg(req->src, sg, sg_nents(req->src), i) {
783                         if ((!IS_ALIGNED(sg->length, sizeof(u32))) &&
784                             (!sg_is_last(sg)))
785                                 return false;
786                 }
787         }
788
789         if (req->src->offset % 4)
790                 return false;
791
792         return true;
793 }
794
795 static int stm32_hash_init(struct ahash_request *req)
796 {
797         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
798         struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
799         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
800         struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
801         struct stm32_hash_state *state = &rctx->state;
802         bool sha3_mode = ctx->flags & HASH_FLAGS_SHA3_MODE;
803
804         rctx->hdev = hdev;
805
806         state->flags = HASH_FLAGS_CPU;
807
808         if (sha3_mode)
809                 state->flags |= HASH_FLAGS_SHA3_MODE;
810
811         rctx->digcnt = crypto_ahash_digestsize(tfm);
812         switch (rctx->digcnt) {
813         case MD5_DIGEST_SIZE:
814                 state->flags |= HASH_MD5 << HASH_FLAGS_ALGO_SHIFT;
815                 break;
816         case SHA1_DIGEST_SIZE:
817                 if (hdev->pdata->ux500)
818                         state->flags |= HASH_SHA1_UX500 << HASH_FLAGS_ALGO_SHIFT;
819                 else
820                         state->flags |= HASH_SHA1 << HASH_FLAGS_ALGO_SHIFT;
821                 break;
822         case SHA224_DIGEST_SIZE:
823                 if (sha3_mode)
824                         state->flags |= HASH_SHA3_224 << HASH_FLAGS_ALGO_SHIFT;
825                 else
826                         state->flags |= HASH_SHA224 << HASH_FLAGS_ALGO_SHIFT;
827                 break;
828         case SHA256_DIGEST_SIZE:
829                 if (sha3_mode) {
830                         state->flags |= HASH_SHA3_256 << HASH_FLAGS_ALGO_SHIFT;
831                 } else {
832                         if (hdev->pdata->ux500)
833                                 state->flags |= HASH_SHA256_UX500 << HASH_FLAGS_ALGO_SHIFT;
834                         else
835                                 state->flags |= HASH_SHA256 << HASH_FLAGS_ALGO_SHIFT;
836                 }
837                 break;
838         case SHA384_DIGEST_SIZE:
839                 if (sha3_mode)
840                         state->flags |= HASH_SHA3_384 << HASH_FLAGS_ALGO_SHIFT;
841                 else
842                         state->flags |= HASH_SHA384 << HASH_FLAGS_ALGO_SHIFT;
843                 break;
844         case SHA512_DIGEST_SIZE:
845                 if (sha3_mode)
846                         state->flags |= HASH_SHA3_512 << HASH_FLAGS_ALGO_SHIFT;
847                 else
848                         state->flags |= HASH_SHA512 << HASH_FLAGS_ALGO_SHIFT;
849                 break;
850         default:
851                 return -EINVAL;
852         }
853
854         rctx->state.bufcnt = 0;
855         rctx->state.blocklen = crypto_ahash_blocksize(tfm) + sizeof(u32);
856         if (rctx->state.blocklen > HASH_BUFLEN) {
857                 dev_err(hdev->dev, "Error, block too large");
858                 return -EINVAL;
859         }
860         rctx->total = 0;
861         rctx->offset = 0;
862         rctx->data_type = HASH_DATA_8_BITS;
863
864         if (ctx->flags & HASH_FLAGS_HMAC)
865                 state->flags |= HASH_FLAGS_HMAC;
866
867         dev_dbg(hdev->dev, "%s Flags %x\n", __func__, state->flags);
868
869         return 0;
870 }
871
872 static int stm32_hash_update_req(struct stm32_hash_dev *hdev)
873 {
874         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
875         struct stm32_hash_state *state = &rctx->state;
876
877         if (!(state->flags & HASH_FLAGS_CPU))
878                 return stm32_hash_dma_send(hdev);
879
880         return stm32_hash_update_cpu(hdev);
881 }
882
883 static int stm32_hash_final_req(struct stm32_hash_dev *hdev)
884 {
885         struct ahash_request *req = hdev->req;
886         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
887         struct stm32_hash_state *state = &rctx->state;
888         int buflen = state->bufcnt;
889
890         if (state->flags & HASH_FLAGS_FINUP)
891                 return stm32_hash_update_req(hdev);
892
893         state->bufcnt = 0;
894
895         return stm32_hash_xmit_cpu(hdev, state->buffer, buflen, 1);
896 }
897
898 static void stm32_hash_emptymsg_fallback(struct ahash_request *req)
899 {
900         struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
901         struct stm32_hash_ctx *ctx = crypto_ahash_ctx(ahash);
902         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
903         struct stm32_hash_dev *hdev = rctx->hdev;
904         int ret;
905
906         dev_dbg(hdev->dev, "use fallback message size 0 key size %d\n",
907                 ctx->keylen);
908
909         if (!ctx->xtfm) {
910                 dev_err(hdev->dev, "no fallback engine\n");
911                 return;
912         }
913
914         if (ctx->keylen) {
915                 ret = crypto_shash_setkey(ctx->xtfm, ctx->key, ctx->keylen);
916                 if (ret) {
917                         dev_err(hdev->dev, "failed to set key ret=%d\n", ret);
918                         return;
919                 }
920         }
921
922         ret = crypto_shash_tfm_digest(ctx->xtfm, NULL, 0, rctx->digest);
923         if (ret)
924                 dev_err(hdev->dev, "shash digest error\n");
925 }
926
927 static void stm32_hash_copy_hash(struct ahash_request *req)
928 {
929         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
930         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
931         struct stm32_hash_state *state = &rctx->state;
932         struct stm32_hash_dev *hdev = rctx->hdev;
933         __be32 *hash = (void *)rctx->digest;
934         unsigned int i, hashsize;
935
936         if (hdev->pdata->broken_emptymsg && (state->flags & HASH_FLAGS_EMPTY))
937                 return stm32_hash_emptymsg_fallback(req);
938
939         hashsize = crypto_ahash_digestsize(tfm);
940
941         for (i = 0; i < hashsize / sizeof(u32); i++) {
942                 if (hdev->pdata->ux500)
943                         hash[i] = cpu_to_be32(stm32_hash_read(hdev,
944                                               HASH_UX500_HREG(i)));
945                 else
946                         hash[i] = cpu_to_be32(stm32_hash_read(hdev,
947                                               HASH_HREG(i)));
948         }
949 }
950
951 static int stm32_hash_finish(struct ahash_request *req)
952 {
953         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
954         u32 reg;
955
956         reg = stm32_hash_read(rctx->hdev, HASH_SR);
957         reg &= ~HASH_SR_OUTPUT_READY;
958         stm32_hash_write(rctx->hdev, HASH_SR, reg);
959
960         if (!req->result)
961                 return -EINVAL;
962
963         memcpy(req->result, rctx->digest, rctx->digcnt);
964
965         return 0;
966 }
967
968 static void stm32_hash_finish_req(struct ahash_request *req, int err)
969 {
970         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
971         struct stm32_hash_dev *hdev = rctx->hdev;
972
973         if (!err && (HASH_FLAGS_FINAL & hdev->flags)) {
974                 stm32_hash_copy_hash(req);
975                 err = stm32_hash_finish(req);
976         }
977
978         pm_runtime_mark_last_busy(hdev->dev);
979         pm_runtime_put_autosuspend(hdev->dev);
980
981         crypto_finalize_hash_request(hdev->engine, req, err);
982 }
983
984 static int stm32_hash_handle_queue(struct stm32_hash_dev *hdev,
985                                    struct ahash_request *req)
986 {
987         return crypto_transfer_hash_request_to_engine(hdev->engine, req);
988 }
989
990 static int stm32_hash_one_request(struct crypto_engine *engine, void *areq)
991 {
992         struct ahash_request *req = container_of(areq, struct ahash_request,
993                                                  base);
994         struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
995         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
996         struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
997         struct stm32_hash_state *state = &rctx->state;
998         int swap_reg;
999         int err = 0;
1000
1001         if (!hdev)
1002                 return -ENODEV;
1003
1004         dev_dbg(hdev->dev, "processing new req, op: %lu, nbytes %d\n",
1005                 rctx->op, req->nbytes);
1006
1007         pm_runtime_get_sync(hdev->dev);
1008
1009         hdev->req = req;
1010         hdev->flags = 0;
1011         swap_reg = hash_swap_reg(rctx);
1012
1013         if (state->flags & HASH_FLAGS_INIT) {
1014                 u32 *preg = rctx->state.hw_context;
1015                 u32 reg;
1016                 int i;
1017
1018                 if (!hdev->pdata->ux500)
1019                         stm32_hash_write(hdev, HASH_IMR, *preg++);
1020                 stm32_hash_write(hdev, HASH_STR, *preg++);
1021                 stm32_hash_write(hdev, HASH_CR, *preg);
1022                 reg = *preg++ | HASH_CR_INIT;
1023                 stm32_hash_write(hdev, HASH_CR, reg);
1024
1025                 for (i = 0; i < swap_reg; i++)
1026                         stm32_hash_write(hdev, HASH_CSR(i), *preg++);
1027
1028                 hdev->flags |= HASH_FLAGS_INIT;
1029
1030                 if (state->flags & HASH_FLAGS_HMAC)
1031                         hdev->flags |= HASH_FLAGS_HMAC |
1032                                        HASH_FLAGS_HMAC_KEY;
1033         }
1034
1035         if (rctx->op == HASH_OP_UPDATE)
1036                 err = stm32_hash_update_req(hdev);
1037         else if (rctx->op == HASH_OP_FINAL)
1038                 err = stm32_hash_final_req(hdev);
1039
1040         /* If we have an IRQ, wait for that, else poll for completion */
1041         if (err == -EINPROGRESS && hdev->polled) {
1042                 if (stm32_hash_wait_busy(hdev))
1043                         err = -ETIMEDOUT;
1044                 else {
1045                         hdev->flags |= HASH_FLAGS_OUTPUT_READY;
1046                         err = 0;
1047                 }
1048         }
1049
1050         if (err != -EINPROGRESS)
1051         /* done task will not finish it, so do it here */
1052                 stm32_hash_finish_req(req, err);
1053
1054         return 0;
1055 }
1056
1057 static int stm32_hash_enqueue(struct ahash_request *req, unsigned int op)
1058 {
1059         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
1060         struct stm32_hash_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1061         struct stm32_hash_dev *hdev = ctx->hdev;
1062
1063         rctx->op = op;
1064
1065         return stm32_hash_handle_queue(hdev, req);
1066 }
1067
1068 static int stm32_hash_update(struct ahash_request *req)
1069 {
1070         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
1071         struct stm32_hash_state *state = &rctx->state;
1072
1073         if (!req->nbytes || !(state->flags & HASH_FLAGS_CPU))
1074                 return 0;
1075
1076         rctx->total = req->nbytes;
1077         rctx->sg = req->src;
1078         rctx->offset = 0;
1079
1080         if ((state->bufcnt + rctx->total < state->blocklen)) {
1081                 stm32_hash_append_sg(rctx);
1082                 return 0;
1083         }
1084
1085         return stm32_hash_enqueue(req, HASH_OP_UPDATE);
1086 }
1087
1088 static int stm32_hash_final(struct ahash_request *req)
1089 {
1090         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
1091         struct stm32_hash_state *state = &rctx->state;
1092
1093         state->flags |= HASH_FLAGS_FINAL;
1094
1095         return stm32_hash_enqueue(req, HASH_OP_FINAL);
1096 }
1097
1098 static int stm32_hash_finup(struct ahash_request *req)
1099 {
1100         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
1101         struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
1102         struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
1103         struct stm32_hash_state *state = &rctx->state;
1104
1105         if (!req->nbytes)
1106                 goto out;
1107
1108         state->flags |= HASH_FLAGS_FINUP;
1109         rctx->total = req->nbytes;
1110         rctx->sg = req->src;
1111         rctx->offset = 0;
1112
1113         if (hdev->dma_lch && stm32_hash_dma_aligned_data(req))
1114                 state->flags &= ~HASH_FLAGS_CPU;
1115
1116 out:
1117         return stm32_hash_final(req);
1118 }
1119
1120 static int stm32_hash_digest(struct ahash_request *req)
1121 {
1122         return stm32_hash_init(req) ?: stm32_hash_finup(req);
1123 }
1124
1125 static int stm32_hash_export(struct ahash_request *req, void *out)
1126 {
1127         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
1128
1129         memcpy(out, &rctx->state, sizeof(rctx->state));
1130
1131         return 0;
1132 }
1133
1134 static int stm32_hash_import(struct ahash_request *req, const void *in)
1135 {
1136         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
1137
1138         stm32_hash_init(req);
1139         memcpy(&rctx->state, in, sizeof(rctx->state));
1140
1141         return 0;
1142 }
1143
1144 static int stm32_hash_setkey(struct crypto_ahash *tfm,
1145                              const u8 *key, unsigned int keylen)
1146 {
1147         struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
1148
1149         if (keylen <= HASH_MAX_KEY_SIZE) {
1150                 memcpy(ctx->key, key, keylen);
1151                 ctx->keylen = keylen;
1152         } else {
1153                 return -ENOMEM;
1154         }
1155
1156         return 0;
1157 }
1158
1159 static int stm32_hash_init_fallback(struct crypto_tfm *tfm)
1160 {
1161         struct stm32_hash_ctx *ctx = crypto_tfm_ctx(tfm);
1162         struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
1163         const char *name = crypto_tfm_alg_name(tfm);
1164         struct crypto_shash *xtfm;
1165
1166         /* The fallback is only needed on Ux500 */
1167         if (!hdev->pdata->ux500)
1168                 return 0;
1169
1170         xtfm = crypto_alloc_shash(name, 0, CRYPTO_ALG_NEED_FALLBACK);
1171         if (IS_ERR(xtfm)) {
1172                 dev_err(hdev->dev, "failed to allocate %s fallback\n",
1173                         name);
1174                 return PTR_ERR(xtfm);
1175         }
1176         dev_info(hdev->dev, "allocated %s fallback\n", name);
1177         ctx->xtfm = xtfm;
1178
1179         return 0;
1180 }
1181
1182 static int stm32_hash_cra_init_algs(struct crypto_tfm *tfm, u32 algs_flags)
1183 {
1184         struct stm32_hash_ctx *ctx = crypto_tfm_ctx(tfm);
1185
1186         crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
1187                                  sizeof(struct stm32_hash_request_ctx));
1188
1189         ctx->keylen = 0;
1190
1191         if (algs_flags)
1192                 ctx->flags |= algs_flags;
1193
1194         return stm32_hash_init_fallback(tfm);
1195 }
1196
1197 static int stm32_hash_cra_init(struct crypto_tfm *tfm)
1198 {
1199         return stm32_hash_cra_init_algs(tfm, 0);
1200 }
1201
1202 static int stm32_hash_cra_hmac_init(struct crypto_tfm *tfm)
1203 {
1204         return stm32_hash_cra_init_algs(tfm, HASH_FLAGS_HMAC);
1205 }
1206
1207 static int stm32_hash_cra_sha3_init(struct crypto_tfm *tfm)
1208 {
1209         return stm32_hash_cra_init_algs(tfm, HASH_FLAGS_SHA3_MODE);
1210 }
1211
1212 static int stm32_hash_cra_sha3_hmac_init(struct crypto_tfm *tfm)
1213 {
1214         return stm32_hash_cra_init_algs(tfm, HASH_FLAGS_SHA3_MODE |
1215                                         HASH_FLAGS_HMAC);
1216 }
1217
1218
1219 static void stm32_hash_cra_exit(struct crypto_tfm *tfm)
1220 {
1221         struct stm32_hash_ctx *ctx = crypto_tfm_ctx(tfm);
1222
1223         if (ctx->xtfm)
1224                 crypto_free_shash(ctx->xtfm);
1225 }
1226
1227 static irqreturn_t stm32_hash_irq_thread(int irq, void *dev_id)
1228 {
1229         struct stm32_hash_dev *hdev = dev_id;
1230
1231         if (HASH_FLAGS_CPU & hdev->flags) {
1232                 if (HASH_FLAGS_OUTPUT_READY & hdev->flags) {
1233                         hdev->flags &= ~HASH_FLAGS_OUTPUT_READY;
1234                         goto finish;
1235                 }
1236         } else if (HASH_FLAGS_DMA_ACTIVE & hdev->flags) {
1237                 hdev->flags &= ~HASH_FLAGS_DMA_ACTIVE;
1238                         goto finish;
1239         }
1240
1241         return IRQ_HANDLED;
1242
1243 finish:
1244         /* Finish current request */
1245         stm32_hash_finish_req(hdev->req, 0);
1246
1247         return IRQ_HANDLED;
1248 }
1249
1250 static irqreturn_t stm32_hash_irq_handler(int irq, void *dev_id)
1251 {
1252         struct stm32_hash_dev *hdev = dev_id;
1253         u32 reg;
1254
1255         reg = stm32_hash_read(hdev, HASH_SR);
1256         if (reg & HASH_SR_OUTPUT_READY) {
1257                 hdev->flags |= HASH_FLAGS_OUTPUT_READY;
1258                 /* Disable IT*/
1259                 stm32_hash_write(hdev, HASH_IMR, 0);
1260                 return IRQ_WAKE_THREAD;
1261         }
1262
1263         return IRQ_NONE;
1264 }
1265
1266 static struct ahash_engine_alg algs_md5[] = {
1267         {
1268                 .base.init = stm32_hash_init,
1269                 .base.update = stm32_hash_update,
1270                 .base.final = stm32_hash_final,
1271                 .base.finup = stm32_hash_finup,
1272                 .base.digest = stm32_hash_digest,
1273                 .base.export = stm32_hash_export,
1274                 .base.import = stm32_hash_import,
1275                 .base.halg = {
1276                         .digestsize = MD5_DIGEST_SIZE,
1277                         .statesize = sizeof(struct stm32_hash_state),
1278                         .base = {
1279                                 .cra_name = "md5",
1280                                 .cra_driver_name = "stm32-md5",
1281                                 .cra_priority = 200,
1282                                 .cra_flags = CRYPTO_ALG_ASYNC |
1283                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1284                                 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1285                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1286                                 .cra_init = stm32_hash_cra_init,
1287                                 .cra_exit = stm32_hash_cra_exit,
1288                                 .cra_module = THIS_MODULE,
1289                         }
1290                 },
1291                 .op = {
1292                         .do_one_request = stm32_hash_one_request,
1293                 },
1294         },
1295         {
1296                 .base.init = stm32_hash_init,
1297                 .base.update = stm32_hash_update,
1298                 .base.final = stm32_hash_final,
1299                 .base.finup = stm32_hash_finup,
1300                 .base.digest = stm32_hash_digest,
1301                 .base.export = stm32_hash_export,
1302                 .base.import = stm32_hash_import,
1303                 .base.setkey = stm32_hash_setkey,
1304                 .base.halg = {
1305                         .digestsize = MD5_DIGEST_SIZE,
1306                         .statesize = sizeof(struct stm32_hash_state),
1307                         .base = {
1308                                 .cra_name = "hmac(md5)",
1309                                 .cra_driver_name = "stm32-hmac-md5",
1310                                 .cra_priority = 200,
1311                                 .cra_flags = CRYPTO_ALG_ASYNC |
1312                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1313                                 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1314                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1315                                 .cra_init = stm32_hash_cra_hmac_init,
1316                                 .cra_exit = stm32_hash_cra_exit,
1317                                 .cra_module = THIS_MODULE,
1318                         }
1319                 },
1320                 .op = {
1321                         .do_one_request = stm32_hash_one_request,
1322                 },
1323         }
1324 };
1325
1326 static struct ahash_engine_alg algs_sha1[] = {
1327         {
1328                 .base.init = stm32_hash_init,
1329                 .base.update = stm32_hash_update,
1330                 .base.final = stm32_hash_final,
1331                 .base.finup = stm32_hash_finup,
1332                 .base.digest = stm32_hash_digest,
1333                 .base.export = stm32_hash_export,
1334                 .base.import = stm32_hash_import,
1335                 .base.halg = {
1336                         .digestsize = SHA1_DIGEST_SIZE,
1337                         .statesize = sizeof(struct stm32_hash_state),
1338                         .base = {
1339                                 .cra_name = "sha1",
1340                                 .cra_driver_name = "stm32-sha1",
1341                                 .cra_priority = 200,
1342                                 .cra_flags = CRYPTO_ALG_ASYNC |
1343                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1344                                 .cra_blocksize = SHA1_BLOCK_SIZE,
1345                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1346                                 .cra_init = stm32_hash_cra_init,
1347                                 .cra_exit = stm32_hash_cra_exit,
1348                                 .cra_module = THIS_MODULE,
1349                         }
1350                 },
1351                 .op = {
1352                         .do_one_request = stm32_hash_one_request,
1353                 },
1354         },
1355         {
1356                 .base.init = stm32_hash_init,
1357                 .base.update = stm32_hash_update,
1358                 .base.final = stm32_hash_final,
1359                 .base.finup = stm32_hash_finup,
1360                 .base.digest = stm32_hash_digest,
1361                 .base.export = stm32_hash_export,
1362                 .base.import = stm32_hash_import,
1363                 .base.setkey = stm32_hash_setkey,
1364                 .base.halg = {
1365                         .digestsize = SHA1_DIGEST_SIZE,
1366                         .statesize = sizeof(struct stm32_hash_state),
1367                         .base = {
1368                                 .cra_name = "hmac(sha1)",
1369                                 .cra_driver_name = "stm32-hmac-sha1",
1370                                 .cra_priority = 200,
1371                                 .cra_flags = CRYPTO_ALG_ASYNC |
1372                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1373                                 .cra_blocksize = SHA1_BLOCK_SIZE,
1374                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1375                                 .cra_init = stm32_hash_cra_hmac_init,
1376                                 .cra_exit = stm32_hash_cra_exit,
1377                                 .cra_module = THIS_MODULE,
1378                         }
1379                 },
1380                 .op = {
1381                         .do_one_request = stm32_hash_one_request,
1382                 },
1383         },
1384 };
1385
1386 static struct ahash_engine_alg algs_sha224[] = {
1387         {
1388                 .base.init = stm32_hash_init,
1389                 .base.update = stm32_hash_update,
1390                 .base.final = stm32_hash_final,
1391                 .base.finup = stm32_hash_finup,
1392                 .base.digest = stm32_hash_digest,
1393                 .base.export = stm32_hash_export,
1394                 .base.import = stm32_hash_import,
1395                 .base.halg = {
1396                         .digestsize = SHA224_DIGEST_SIZE,
1397                         .statesize = sizeof(struct stm32_hash_state),
1398                         .base = {
1399                                 .cra_name = "sha224",
1400                                 .cra_driver_name = "stm32-sha224",
1401                                 .cra_priority = 200,
1402                                 .cra_flags = CRYPTO_ALG_ASYNC |
1403                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1404                                 .cra_blocksize = SHA224_BLOCK_SIZE,
1405                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1406                                 .cra_init = stm32_hash_cra_init,
1407                                 .cra_exit = stm32_hash_cra_exit,
1408                                 .cra_module = THIS_MODULE,
1409                         }
1410                 },
1411                 .op = {
1412                         .do_one_request = stm32_hash_one_request,
1413                 },
1414         },
1415         {
1416                 .base.init = stm32_hash_init,
1417                 .base.update = stm32_hash_update,
1418                 .base.final = stm32_hash_final,
1419                 .base.finup = stm32_hash_finup,
1420                 .base.digest = stm32_hash_digest,
1421                 .base.setkey = stm32_hash_setkey,
1422                 .base.export = stm32_hash_export,
1423                 .base.import = stm32_hash_import,
1424                 .base.halg = {
1425                         .digestsize = SHA224_DIGEST_SIZE,
1426                         .statesize = sizeof(struct stm32_hash_state),
1427                         .base = {
1428                                 .cra_name = "hmac(sha224)",
1429                                 .cra_driver_name = "stm32-hmac-sha224",
1430                                 .cra_priority = 200,
1431                                 .cra_flags = CRYPTO_ALG_ASYNC |
1432                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1433                                 .cra_blocksize = SHA224_BLOCK_SIZE,
1434                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1435                                 .cra_init = stm32_hash_cra_hmac_init,
1436                                 .cra_exit = stm32_hash_cra_exit,
1437                                 .cra_module = THIS_MODULE,
1438                         }
1439                 },
1440                 .op = {
1441                         .do_one_request = stm32_hash_one_request,
1442                 },
1443         },
1444 };
1445
1446 static struct ahash_engine_alg algs_sha256[] = {
1447         {
1448                 .base.init = stm32_hash_init,
1449                 .base.update = stm32_hash_update,
1450                 .base.final = stm32_hash_final,
1451                 .base.finup = stm32_hash_finup,
1452                 .base.digest = stm32_hash_digest,
1453                 .base.export = stm32_hash_export,
1454                 .base.import = stm32_hash_import,
1455                 .base.halg = {
1456                         .digestsize = SHA256_DIGEST_SIZE,
1457                         .statesize = sizeof(struct stm32_hash_state),
1458                         .base = {
1459                                 .cra_name = "sha256",
1460                                 .cra_driver_name = "stm32-sha256",
1461                                 .cra_priority = 200,
1462                                 .cra_flags = CRYPTO_ALG_ASYNC |
1463                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1464                                 .cra_blocksize = SHA256_BLOCK_SIZE,
1465                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1466                                 .cra_init = stm32_hash_cra_init,
1467                                 .cra_exit = stm32_hash_cra_exit,
1468                                 .cra_module = THIS_MODULE,
1469                         }
1470                 },
1471                 .op = {
1472                         .do_one_request = stm32_hash_one_request,
1473                 },
1474         },
1475         {
1476                 .base.init = stm32_hash_init,
1477                 .base.update = stm32_hash_update,
1478                 .base.final = stm32_hash_final,
1479                 .base.finup = stm32_hash_finup,
1480                 .base.digest = stm32_hash_digest,
1481                 .base.export = stm32_hash_export,
1482                 .base.import = stm32_hash_import,
1483                 .base.setkey = stm32_hash_setkey,
1484                 .base.halg = {
1485                         .digestsize = SHA256_DIGEST_SIZE,
1486                         .statesize = sizeof(struct stm32_hash_state),
1487                         .base = {
1488                                 .cra_name = "hmac(sha256)",
1489                                 .cra_driver_name = "stm32-hmac-sha256",
1490                                 .cra_priority = 200,
1491                                 .cra_flags = CRYPTO_ALG_ASYNC |
1492                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1493                                 .cra_blocksize = SHA256_BLOCK_SIZE,
1494                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1495                                 .cra_init = stm32_hash_cra_hmac_init,
1496                                 .cra_exit = stm32_hash_cra_exit,
1497                                 .cra_module = THIS_MODULE,
1498                         }
1499                 },
1500                 .op = {
1501                         .do_one_request = stm32_hash_one_request,
1502                 },
1503         },
1504 };
1505
1506 static struct ahash_engine_alg algs_sha384_sha512[] = {
1507         {
1508                 .base.init = stm32_hash_init,
1509                 .base.update = stm32_hash_update,
1510                 .base.final = stm32_hash_final,
1511                 .base.finup = stm32_hash_finup,
1512                 .base.digest = stm32_hash_digest,
1513                 .base.export = stm32_hash_export,
1514                 .base.import = stm32_hash_import,
1515                 .base.halg = {
1516                         .digestsize = SHA384_DIGEST_SIZE,
1517                         .statesize = sizeof(struct stm32_hash_state),
1518                         .base = {
1519                                 .cra_name = "sha384",
1520                                 .cra_driver_name = "stm32-sha384",
1521                                 .cra_priority = 200,
1522                                 .cra_flags = CRYPTO_ALG_ASYNC |
1523                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1524                                 .cra_blocksize = SHA384_BLOCK_SIZE,
1525                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1526                                 .cra_init = stm32_hash_cra_init,
1527                                 .cra_exit = stm32_hash_cra_exit,
1528                                 .cra_module = THIS_MODULE,
1529                         }
1530                 },
1531                 .op = {
1532                         .do_one_request = stm32_hash_one_request,
1533                 },
1534         },
1535         {
1536                 .base.init = stm32_hash_init,
1537                 .base.update = stm32_hash_update,
1538                 .base.final = stm32_hash_final,
1539                 .base.finup = stm32_hash_finup,
1540                 .base.digest = stm32_hash_digest,
1541                 .base.setkey = stm32_hash_setkey,
1542                 .base.export = stm32_hash_export,
1543                 .base.import = stm32_hash_import,
1544                 .base.halg = {
1545                         .digestsize = SHA384_DIGEST_SIZE,
1546                         .statesize = sizeof(struct stm32_hash_state),
1547                         .base = {
1548                                 .cra_name = "hmac(sha384)",
1549                                 .cra_driver_name = "stm32-hmac-sha384",
1550                                 .cra_priority = 200,
1551                                 .cra_flags = CRYPTO_ALG_ASYNC |
1552                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1553                                 .cra_blocksize = SHA384_BLOCK_SIZE,
1554                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1555                                 .cra_init = stm32_hash_cra_hmac_init,
1556                                 .cra_exit = stm32_hash_cra_exit,
1557                                 .cra_module = THIS_MODULE,
1558                         }
1559                 },
1560                 .op = {
1561                         .do_one_request = stm32_hash_one_request,
1562                 },
1563         },
1564         {
1565                 .base.init = stm32_hash_init,
1566                 .base.update = stm32_hash_update,
1567                 .base.final = stm32_hash_final,
1568                 .base.finup = stm32_hash_finup,
1569                 .base.digest = stm32_hash_digest,
1570                 .base.export = stm32_hash_export,
1571                 .base.import = stm32_hash_import,
1572                 .base.halg = {
1573                         .digestsize = SHA512_DIGEST_SIZE,
1574                         .statesize = sizeof(struct stm32_hash_state),
1575                         .base = {
1576                                 .cra_name = "sha512",
1577                                 .cra_driver_name = "stm32-sha512",
1578                                 .cra_priority = 200,
1579                                 .cra_flags = CRYPTO_ALG_ASYNC |
1580                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1581                                 .cra_blocksize = SHA512_BLOCK_SIZE,
1582                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1583                                 .cra_init = stm32_hash_cra_init,
1584                                 .cra_exit = stm32_hash_cra_exit,
1585                                 .cra_module = THIS_MODULE,
1586                         }
1587                 },
1588                 .op = {
1589                         .do_one_request = stm32_hash_one_request,
1590                 },
1591         },
1592         {
1593                 .base.init = stm32_hash_init,
1594                 .base.update = stm32_hash_update,
1595                 .base.final = stm32_hash_final,
1596                 .base.finup = stm32_hash_finup,
1597                 .base.digest = stm32_hash_digest,
1598                 .base.export = stm32_hash_export,
1599                 .base.import = stm32_hash_import,
1600                 .base.setkey = stm32_hash_setkey,
1601                 .base.halg = {
1602                         .digestsize = SHA512_DIGEST_SIZE,
1603                         .statesize = sizeof(struct stm32_hash_state),
1604                         .base = {
1605                                 .cra_name = "hmac(sha512)",
1606                                 .cra_driver_name = "stm32-hmac-sha512",
1607                                 .cra_priority = 200,
1608                                 .cra_flags = CRYPTO_ALG_ASYNC |
1609                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1610                                 .cra_blocksize = SHA512_BLOCK_SIZE,
1611                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1612                                 .cra_init = stm32_hash_cra_hmac_init,
1613                                 .cra_exit = stm32_hash_cra_exit,
1614                                 .cra_module = THIS_MODULE,
1615                         }
1616                 },
1617                 .op = {
1618                         .do_one_request = stm32_hash_one_request,
1619                 },
1620         },
1621 };
1622
1623 static struct ahash_engine_alg algs_sha3[] = {
1624         {
1625                 .base.init = stm32_hash_init,
1626                 .base.update = stm32_hash_update,
1627                 .base.final = stm32_hash_final,
1628                 .base.finup = stm32_hash_finup,
1629                 .base.digest = stm32_hash_digest,
1630                 .base.export = stm32_hash_export,
1631                 .base.import = stm32_hash_import,
1632                 .base.halg = {
1633                         .digestsize = SHA3_224_DIGEST_SIZE,
1634                         .statesize = sizeof(struct stm32_hash_state),
1635                         .base = {
1636                                 .cra_name = "sha3-224",
1637                                 .cra_driver_name = "stm32-sha3-224",
1638                                 .cra_priority = 200,
1639                                 .cra_flags = CRYPTO_ALG_ASYNC |
1640                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1641                                 .cra_blocksize = SHA3_224_BLOCK_SIZE,
1642                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1643                                 .cra_init = stm32_hash_cra_sha3_init,
1644                                 .cra_exit = stm32_hash_cra_exit,
1645                                 .cra_module = THIS_MODULE,
1646                         }
1647                 },
1648                 .op = {
1649                         .do_one_request = stm32_hash_one_request,
1650                 },
1651         },
1652         {
1653                 .base.init = stm32_hash_init,
1654                 .base.update = stm32_hash_update,
1655                 .base.final = stm32_hash_final,
1656                 .base.finup = stm32_hash_finup,
1657                 .base.digest = stm32_hash_digest,
1658                 .base.export = stm32_hash_export,
1659                 .base.import = stm32_hash_import,
1660                 .base.setkey = stm32_hash_setkey,
1661                 .base.halg = {
1662                         .digestsize = SHA3_224_DIGEST_SIZE,
1663                         .statesize = sizeof(struct stm32_hash_state),
1664                         .base = {
1665                                 .cra_name = "hmac(sha3-224)",
1666                                 .cra_driver_name = "stm32-hmac-sha3-224",
1667                                 .cra_priority = 200,
1668                                 .cra_flags = CRYPTO_ALG_ASYNC |
1669                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1670                                 .cra_blocksize = SHA3_224_BLOCK_SIZE,
1671                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1672                                 .cra_init = stm32_hash_cra_sha3_hmac_init,
1673                                 .cra_exit = stm32_hash_cra_exit,
1674                                 .cra_module = THIS_MODULE,
1675                         }
1676                 },
1677                 .op = {
1678                         .do_one_request = stm32_hash_one_request,
1679                 },
1680         },
1681         {
1682                 .base.init = stm32_hash_init,
1683                 .base.update = stm32_hash_update,
1684                 .base.final = stm32_hash_final,
1685                 .base.finup = stm32_hash_finup,
1686                 .base.digest = stm32_hash_digest,
1687                 .base.export = stm32_hash_export,
1688                 .base.import = stm32_hash_import,
1689                 .base.halg = {
1690                         .digestsize = SHA3_256_DIGEST_SIZE,
1691                         .statesize = sizeof(struct stm32_hash_state),
1692                         .base = {
1693                                 .cra_name = "sha3-256",
1694                                 .cra_driver_name = "stm32-sha3-256",
1695                                 .cra_priority = 200,
1696                                 .cra_flags = CRYPTO_ALG_ASYNC |
1697                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1698                                 .cra_blocksize = SHA3_256_BLOCK_SIZE,
1699                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1700                                 .cra_init = stm32_hash_cra_sha3_init,
1701                                 .cra_exit = stm32_hash_cra_exit,
1702                                 .cra_module = THIS_MODULE,
1703                         }
1704                 },
1705                 .op = {
1706                         .do_one_request = stm32_hash_one_request,
1707                 },
1708         },
1709         {
1710                 .base.init = stm32_hash_init,
1711                 .base.update = stm32_hash_update,
1712                 .base.final = stm32_hash_final,
1713                 .base.finup = stm32_hash_finup,
1714                 .base.digest = stm32_hash_digest,
1715                 .base.export = stm32_hash_export,
1716                 .base.import = stm32_hash_import,
1717                 .base.setkey = stm32_hash_setkey,
1718                 .base.halg = {
1719                         .digestsize = SHA3_256_DIGEST_SIZE,
1720                         .statesize = sizeof(struct stm32_hash_state),
1721                         .base = {
1722                                 .cra_name = "hmac(sha3-256)",
1723                                 .cra_driver_name = "stm32-hmac-sha3-256",
1724                                 .cra_priority = 200,
1725                                 .cra_flags = CRYPTO_ALG_ASYNC |
1726                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1727                                 .cra_blocksize = SHA3_256_BLOCK_SIZE,
1728                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1729                                 .cra_init = stm32_hash_cra_sha3_hmac_init,
1730                                 .cra_exit = stm32_hash_cra_exit,
1731                                 .cra_module = THIS_MODULE,
1732                         }
1733                 },
1734                 .op = {
1735                         .do_one_request = stm32_hash_one_request,
1736                 },
1737         },
1738         {
1739                 .base.init = stm32_hash_init,
1740                 .base.update = stm32_hash_update,
1741                 .base.final = stm32_hash_final,
1742                 .base.finup = stm32_hash_finup,
1743                 .base.digest = stm32_hash_digest,
1744                 .base.export = stm32_hash_export,
1745                 .base.import = stm32_hash_import,
1746                 .base.halg = {
1747                         .digestsize = SHA3_384_DIGEST_SIZE,
1748                         .statesize = sizeof(struct stm32_hash_state),
1749                         .base = {
1750                                 .cra_name = "sha3-384",
1751                                 .cra_driver_name = "stm32-sha3-384",
1752                                 .cra_priority = 200,
1753                                 .cra_flags = CRYPTO_ALG_ASYNC |
1754                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1755                                 .cra_blocksize = SHA3_384_BLOCK_SIZE,
1756                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1757                                 .cra_init = stm32_hash_cra_sha3_init,
1758                                 .cra_exit = stm32_hash_cra_exit,
1759                                 .cra_module = THIS_MODULE,
1760                         }
1761                 },
1762                 .op = {
1763                         .do_one_request = stm32_hash_one_request,
1764                 },
1765         },
1766         {
1767                 .base.init = stm32_hash_init,
1768                 .base.update = stm32_hash_update,
1769                 .base.final = stm32_hash_final,
1770                 .base.finup = stm32_hash_finup,
1771                 .base.digest = stm32_hash_digest,
1772                 .base.export = stm32_hash_export,
1773                 .base.import = stm32_hash_import,
1774                 .base.setkey = stm32_hash_setkey,
1775                 .base.halg = {
1776                         .digestsize = SHA3_384_DIGEST_SIZE,
1777                         .statesize = sizeof(struct stm32_hash_state),
1778                         .base = {
1779                                 .cra_name = "hmac(sha3-384)",
1780                                 .cra_driver_name = "stm32-hmac-sha3-384",
1781                                 .cra_priority = 200,
1782                                 .cra_flags = CRYPTO_ALG_ASYNC |
1783                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1784                                 .cra_blocksize = SHA3_384_BLOCK_SIZE,
1785                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1786                                 .cra_init = stm32_hash_cra_sha3_hmac_init,
1787                                 .cra_exit = stm32_hash_cra_exit,
1788                                 .cra_module = THIS_MODULE,
1789                         }
1790                 },
1791                 .op = {
1792                         .do_one_request = stm32_hash_one_request,
1793                 },
1794         },
1795         {
1796                 .base.init = stm32_hash_init,
1797                 .base.update = stm32_hash_update,
1798                 .base.final = stm32_hash_final,
1799                 .base.finup = stm32_hash_finup,
1800                 .base.digest = stm32_hash_digest,
1801                 .base.export = stm32_hash_export,
1802                 .base.import = stm32_hash_import,
1803                 .base.halg = {
1804                         .digestsize = SHA3_512_DIGEST_SIZE,
1805                         .statesize = sizeof(struct stm32_hash_state),
1806                         .base = {
1807                                 .cra_name = "sha3-512",
1808                                 .cra_driver_name = "stm32-sha3-512",
1809                                 .cra_priority = 200,
1810                                 .cra_flags = CRYPTO_ALG_ASYNC |
1811                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1812                                 .cra_blocksize = SHA3_512_BLOCK_SIZE,
1813                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1814                                 .cra_init = stm32_hash_cra_sha3_init,
1815                                 .cra_exit = stm32_hash_cra_exit,
1816                                 .cra_module = THIS_MODULE,
1817                         }
1818                 },
1819                 .op = {
1820                         .do_one_request = stm32_hash_one_request,
1821                 },
1822         },
1823         {
1824                 .base.init = stm32_hash_init,
1825                 .base.update = stm32_hash_update,
1826                 .base.final = stm32_hash_final,
1827                 .base.finup = stm32_hash_finup,
1828                 .base.digest = stm32_hash_digest,
1829                 .base.export = stm32_hash_export,
1830                 .base.import = stm32_hash_import,
1831                 .base.setkey = stm32_hash_setkey,
1832                 .base.halg = {
1833                         .digestsize = SHA3_512_DIGEST_SIZE,
1834                         .statesize = sizeof(struct stm32_hash_state),
1835                         .base = {
1836                                 .cra_name = "hmac(sha3-512)",
1837                                 .cra_driver_name = "stm32-hmac-sha3-512",
1838                                 .cra_priority = 200,
1839                                 .cra_flags = CRYPTO_ALG_ASYNC |
1840                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1841                                 .cra_blocksize = SHA3_512_BLOCK_SIZE,
1842                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1843                                 .cra_init = stm32_hash_cra_sha3_hmac_init,
1844                                 .cra_exit = stm32_hash_cra_exit,
1845                                 .cra_module = THIS_MODULE,
1846                         }
1847                 },
1848                 .op = {
1849                         .do_one_request = stm32_hash_one_request,
1850                 },
1851         }
1852 };
1853
1854 static int stm32_hash_register_algs(struct stm32_hash_dev *hdev)
1855 {
1856         unsigned int i, j;
1857         int err;
1858
1859         for (i = 0; i < hdev->pdata->algs_info_size; i++) {
1860                 for (j = 0; j < hdev->pdata->algs_info[i].size; j++) {
1861                         err = crypto_engine_register_ahash(
1862                                 &hdev->pdata->algs_info[i].algs_list[j]);
1863                         if (err)
1864                                 goto err_algs;
1865                 }
1866         }
1867
1868         return 0;
1869 err_algs:
1870         dev_err(hdev->dev, "Algo %d : %d failed\n", i, j);
1871         for (; i--; ) {
1872                 for (; j--;)
1873                         crypto_engine_unregister_ahash(
1874                                 &hdev->pdata->algs_info[i].algs_list[j]);
1875         }
1876
1877         return err;
1878 }
1879
1880 static int stm32_hash_unregister_algs(struct stm32_hash_dev *hdev)
1881 {
1882         unsigned int i, j;
1883
1884         for (i = 0; i < hdev->pdata->algs_info_size; i++) {
1885                 for (j = 0; j < hdev->pdata->algs_info[i].size; j++)
1886                         crypto_engine_unregister_ahash(
1887                                 &hdev->pdata->algs_info[i].algs_list[j]);
1888         }
1889
1890         return 0;
1891 }
1892
1893 static struct stm32_hash_algs_info stm32_hash_algs_info_ux500[] = {
1894         {
1895                 .algs_list      = algs_sha1,
1896                 .size           = ARRAY_SIZE(algs_sha1),
1897         },
1898         {
1899                 .algs_list      = algs_sha256,
1900                 .size           = ARRAY_SIZE(algs_sha256),
1901         },
1902 };
1903
1904 static const struct stm32_hash_pdata stm32_hash_pdata_ux500 = {
1905         .alg_shift      = 7,
1906         .algs_info      = stm32_hash_algs_info_ux500,
1907         .algs_info_size = ARRAY_SIZE(stm32_hash_algs_info_ux500),
1908         .broken_emptymsg = true,
1909         .ux500          = true,
1910 };
1911
1912 static struct stm32_hash_algs_info stm32_hash_algs_info_stm32f4[] = {
1913         {
1914                 .algs_list      = algs_md5,
1915                 .size           = ARRAY_SIZE(algs_md5),
1916         },
1917         {
1918                 .algs_list      = algs_sha1,
1919                 .size           = ARRAY_SIZE(algs_sha1),
1920         },
1921 };
1922
1923 static const struct stm32_hash_pdata stm32_hash_pdata_stm32f4 = {
1924         .alg_shift      = 7,
1925         .algs_info      = stm32_hash_algs_info_stm32f4,
1926         .algs_info_size = ARRAY_SIZE(stm32_hash_algs_info_stm32f4),
1927         .has_sr         = true,
1928         .has_mdmat      = true,
1929 };
1930
1931 static struct stm32_hash_algs_info stm32_hash_algs_info_stm32f7[] = {
1932         {
1933                 .algs_list      = algs_md5,
1934                 .size           = ARRAY_SIZE(algs_md5),
1935         },
1936         {
1937                 .algs_list      = algs_sha1,
1938                 .size           = ARRAY_SIZE(algs_sha1),
1939         },
1940         {
1941                 .algs_list      = algs_sha224,
1942                 .size           = ARRAY_SIZE(algs_sha224),
1943         },
1944         {
1945                 .algs_list      = algs_sha256,
1946                 .size           = ARRAY_SIZE(algs_sha256),
1947         },
1948 };
1949
1950 static const struct stm32_hash_pdata stm32_hash_pdata_stm32f7 = {
1951         .alg_shift      = 7,
1952         .algs_info      = stm32_hash_algs_info_stm32f7,
1953         .algs_info_size = ARRAY_SIZE(stm32_hash_algs_info_stm32f7),
1954         .has_sr         = true,
1955         .has_mdmat      = true,
1956 };
1957
1958 static struct stm32_hash_algs_info stm32_hash_algs_info_stm32mp13[] = {
1959         {
1960                 .algs_list      = algs_sha1,
1961                 .size           = ARRAY_SIZE(algs_sha1),
1962         },
1963         {
1964                 .algs_list      = algs_sha224,
1965                 .size           = ARRAY_SIZE(algs_sha224),
1966         },
1967         {
1968                 .algs_list      = algs_sha256,
1969                 .size           = ARRAY_SIZE(algs_sha256),
1970         },
1971         {
1972                 .algs_list      = algs_sha384_sha512,
1973                 .size           = ARRAY_SIZE(algs_sha384_sha512),
1974         },
1975         {
1976                 .algs_list      = algs_sha3,
1977                 .size           = ARRAY_SIZE(algs_sha3),
1978         },
1979 };
1980
1981 static const struct stm32_hash_pdata stm32_hash_pdata_stm32mp13 = {
1982         .alg_shift      = 17,
1983         .algs_info      = stm32_hash_algs_info_stm32mp13,
1984         .algs_info_size = ARRAY_SIZE(stm32_hash_algs_info_stm32mp13),
1985         .has_sr         = true,
1986         .has_mdmat      = true,
1987 };
1988
1989 static const struct of_device_id stm32_hash_of_match[] = {
1990         { .compatible = "stericsson,ux500-hash", .data = &stm32_hash_pdata_ux500 },
1991         { .compatible = "st,stm32f456-hash", .data = &stm32_hash_pdata_stm32f4 },
1992         { .compatible = "st,stm32f756-hash", .data = &stm32_hash_pdata_stm32f7 },
1993         { .compatible = "st,stm32mp13-hash", .data = &stm32_hash_pdata_stm32mp13 },
1994         {},
1995 };
1996
1997 MODULE_DEVICE_TABLE(of, stm32_hash_of_match);
1998
1999 static int stm32_hash_get_of_match(struct stm32_hash_dev *hdev,
2000                                    struct device *dev)
2001 {
2002         hdev->pdata = of_device_get_match_data(dev);
2003         if (!hdev->pdata) {
2004                 dev_err(dev, "no compatible OF match\n");
2005                 return -EINVAL;
2006         }
2007
2008         return 0;
2009 }
2010
2011 static int stm32_hash_probe(struct platform_device *pdev)
2012 {
2013         struct stm32_hash_dev *hdev;
2014         struct device *dev = &pdev->dev;
2015         struct resource *res;
2016         int ret, irq;
2017
2018         hdev = devm_kzalloc(dev, sizeof(*hdev), GFP_KERNEL);
2019         if (!hdev)
2020                 return -ENOMEM;
2021
2022         hdev->io_base = devm_platform_get_and_ioremap_resource(pdev, 0, &res);
2023         if (IS_ERR(hdev->io_base))
2024                 return PTR_ERR(hdev->io_base);
2025
2026         hdev->phys_base = res->start;
2027
2028         ret = stm32_hash_get_of_match(hdev, dev);
2029         if (ret)
2030                 return ret;
2031
2032         irq = platform_get_irq_optional(pdev, 0);
2033         if (irq < 0 && irq != -ENXIO)
2034                 return irq;
2035
2036         if (irq > 0) {
2037                 ret = devm_request_threaded_irq(dev, irq,
2038                                                 stm32_hash_irq_handler,
2039                                                 stm32_hash_irq_thread,
2040                                                 IRQF_ONESHOT,
2041                                                 dev_name(dev), hdev);
2042                 if (ret) {
2043                         dev_err(dev, "Cannot grab IRQ\n");
2044                         return ret;
2045                 }
2046         } else {
2047                 dev_info(dev, "No IRQ, use polling mode\n");
2048                 hdev->polled = true;
2049         }
2050
2051         hdev->clk = devm_clk_get(&pdev->dev, NULL);
2052         if (IS_ERR(hdev->clk))
2053                 return dev_err_probe(dev, PTR_ERR(hdev->clk),
2054                                      "failed to get clock for hash\n");
2055
2056         ret = clk_prepare_enable(hdev->clk);
2057         if (ret) {
2058                 dev_err(dev, "failed to enable hash clock (%d)\n", ret);
2059                 return ret;
2060         }
2061
2062         pm_runtime_set_autosuspend_delay(dev, HASH_AUTOSUSPEND_DELAY);
2063         pm_runtime_use_autosuspend(dev);
2064
2065         pm_runtime_get_noresume(dev);
2066         pm_runtime_set_active(dev);
2067         pm_runtime_enable(dev);
2068
2069         hdev->rst = devm_reset_control_get(&pdev->dev, NULL);
2070         if (IS_ERR(hdev->rst)) {
2071                 if (PTR_ERR(hdev->rst) == -EPROBE_DEFER) {
2072                         ret = -EPROBE_DEFER;
2073                         goto err_reset;
2074                 }
2075         } else {
2076                 reset_control_assert(hdev->rst);
2077                 udelay(2);
2078                 reset_control_deassert(hdev->rst);
2079         }
2080
2081         hdev->dev = dev;
2082
2083         platform_set_drvdata(pdev, hdev);
2084
2085         ret = stm32_hash_dma_init(hdev);
2086         switch (ret) {
2087         case 0:
2088                 break;
2089         case -ENOENT:
2090         case -ENODEV:
2091                 dev_info(dev, "DMA mode not available\n");
2092                 break;
2093         default:
2094                 dev_err(dev, "DMA init error %d\n", ret);
2095                 goto err_dma;
2096         }
2097
2098         spin_lock(&stm32_hash.lock);
2099         list_add_tail(&hdev->list, &stm32_hash.dev_list);
2100         spin_unlock(&stm32_hash.lock);
2101
2102         /* Initialize crypto engine */
2103         hdev->engine = crypto_engine_alloc_init(dev, 1);
2104         if (!hdev->engine) {
2105                 ret = -ENOMEM;
2106                 goto err_engine;
2107         }
2108
2109         ret = crypto_engine_start(hdev->engine);
2110         if (ret)
2111                 goto err_engine_start;
2112
2113         if (hdev->pdata->ux500)
2114                 /* FIXME: implement DMA mode for Ux500 */
2115                 hdev->dma_mode = 0;
2116         else
2117                 hdev->dma_mode = stm32_hash_read(hdev, HASH_HWCFGR) & HASH_HWCFG_DMA_MASK;
2118
2119         /* Register algos */
2120         ret = stm32_hash_register_algs(hdev);
2121         if (ret)
2122                 goto err_algs;
2123
2124         dev_info(dev, "Init HASH done HW ver %x DMA mode %u\n",
2125                  stm32_hash_read(hdev, HASH_VER), hdev->dma_mode);
2126
2127         pm_runtime_put_sync(dev);
2128
2129         return 0;
2130
2131 err_algs:
2132 err_engine_start:
2133         crypto_engine_exit(hdev->engine);
2134 err_engine:
2135         spin_lock(&stm32_hash.lock);
2136         list_del(&hdev->list);
2137         spin_unlock(&stm32_hash.lock);
2138 err_dma:
2139         if (hdev->dma_lch)
2140                 dma_release_channel(hdev->dma_lch);
2141 err_reset:
2142         pm_runtime_disable(dev);
2143         pm_runtime_put_noidle(dev);
2144
2145         clk_disable_unprepare(hdev->clk);
2146
2147         return ret;
2148 }
2149
2150 static void stm32_hash_remove(struct platform_device *pdev)
2151 {
2152         struct stm32_hash_dev *hdev = platform_get_drvdata(pdev);
2153         int ret;
2154
2155         ret = pm_runtime_get_sync(hdev->dev);
2156
2157         stm32_hash_unregister_algs(hdev);
2158
2159         crypto_engine_exit(hdev->engine);
2160
2161         spin_lock(&stm32_hash.lock);
2162         list_del(&hdev->list);
2163         spin_unlock(&stm32_hash.lock);
2164
2165         if (hdev->dma_lch)
2166                 dma_release_channel(hdev->dma_lch);
2167
2168         pm_runtime_disable(hdev->dev);
2169         pm_runtime_put_noidle(hdev->dev);
2170
2171         if (ret >= 0)
2172                 clk_disable_unprepare(hdev->clk);
2173 }
2174
2175 #ifdef CONFIG_PM
2176 static int stm32_hash_runtime_suspend(struct device *dev)
2177 {
2178         struct stm32_hash_dev *hdev = dev_get_drvdata(dev);
2179
2180         clk_disable_unprepare(hdev->clk);
2181
2182         return 0;
2183 }
2184
2185 static int stm32_hash_runtime_resume(struct device *dev)
2186 {
2187         struct stm32_hash_dev *hdev = dev_get_drvdata(dev);
2188         int ret;
2189
2190         ret = clk_prepare_enable(hdev->clk);
2191         if (ret) {
2192                 dev_err(hdev->dev, "Failed to prepare_enable clock\n");
2193                 return ret;
2194         }
2195
2196         return 0;
2197 }
2198 #endif
2199
2200 static const struct dev_pm_ops stm32_hash_pm_ops = {
2201         SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
2202                                 pm_runtime_force_resume)
2203         SET_RUNTIME_PM_OPS(stm32_hash_runtime_suspend,
2204                            stm32_hash_runtime_resume, NULL)
2205 };
2206
2207 static struct platform_driver stm32_hash_driver = {
2208         .probe          = stm32_hash_probe,
2209         .remove_new     = stm32_hash_remove,
2210         .driver         = {
2211                 .name   = "stm32-hash",
2212                 .pm = &stm32_hash_pm_ops,
2213                 .of_match_table = stm32_hash_of_match,
2214         }
2215 };
2216
2217 module_platform_driver(stm32_hash_driver);
2218
2219 MODULE_DESCRIPTION("STM32 SHA1/SHA2/SHA3 & MD5 (HMAC) hw accelerator driver");
2220 MODULE_AUTHOR("Lionel Debieve <[email protected]>");
2221 MODULE_LICENSE("GPL v2");
This page took 0.170347 seconds and 4 git commands to generate.