2 * This file is part of STM32 Crypto driver for Linux.
4 * Copyright (C) 2017, STMicroelectronics - All Rights Reserved
7 * License terms: GPL V2.0.
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License version 2 as published by
11 * the Free Software Foundation.
13 * This program is distributed in the hope that it will be useful, but
14 * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
18 * You should have received a copy of the GNU General Public License along with
19 * this program. If not, see <http://www.gnu.org/licenses/>.
23 #include <linux/clk.h>
24 #include <linux/crypto.h>
25 #include <linux/delay.h>
26 #include <linux/dmaengine.h>
27 #include <linux/interrupt.h>
29 #include <linux/iopoll.h>
30 #include <linux/kernel.h>
31 #include <linux/module.h>
32 #include <linux/of_device.h>
33 #include <linux/platform_device.h>
34 #include <linux/pm_runtime.h>
35 #include <linux/reset.h>
37 #include <crypto/engine.h>
38 #include <crypto/hash.h>
39 #include <crypto/md5.h>
40 #include <crypto/scatterwalk.h>
41 #include <crypto/sha.h>
42 #include <crypto/internal/hash.h>
49 #define HASH_CSR(x) (0x0F8 + ((x) * 0x04))
50 #define HASH_HREG(x) (0x310 + ((x) * 0x04))
51 #define HASH_HWCFGR 0x3F0
52 #define HASH_VER 0x3F4
55 /* Control Register */
56 #define HASH_CR_INIT BIT(2)
57 #define HASH_CR_DMAE BIT(3)
58 #define HASH_CR_DATATYPE_POS 4
59 #define HASH_CR_MODE BIT(6)
60 #define HASH_CR_MDMAT BIT(13)
61 #define HASH_CR_DMAA BIT(14)
62 #define HASH_CR_LKEY BIT(16)
64 #define HASH_CR_ALGO_SHA1 0x0
65 #define HASH_CR_ALGO_MD5 0x80
66 #define HASH_CR_ALGO_SHA224 0x40000
67 #define HASH_CR_ALGO_SHA256 0x40080
70 #define HASH_DINIE BIT(0)
71 #define HASH_DCIE BIT(1)
74 #define HASH_MASK_CALC_COMPLETION BIT(0)
75 #define HASH_MASK_DATA_INPUT BIT(1)
77 /* Context swap register */
78 #define HASH_CSR_REGISTER_NUMBER 53
81 #define HASH_SR_DATA_INPUT_READY BIT(0)
82 #define HASH_SR_OUTPUT_READY BIT(1)
83 #define HASH_SR_DMA_ACTIVE BIT(2)
84 #define HASH_SR_BUSY BIT(3)
87 #define HASH_STR_NBLW_MASK GENMASK(4, 0)
88 #define HASH_STR_DCAL BIT(8)
90 #define HASH_FLAGS_INIT BIT(0)
91 #define HASH_FLAGS_OUTPUT_READY BIT(1)
92 #define HASH_FLAGS_CPU BIT(2)
93 #define HASH_FLAGS_DMA_READY BIT(3)
94 #define HASH_FLAGS_DMA_ACTIVE BIT(4)
95 #define HASH_FLAGS_HMAC_INIT BIT(5)
96 #define HASH_FLAGS_HMAC_FINAL BIT(6)
97 #define HASH_FLAGS_HMAC_KEY BIT(7)
99 #define HASH_FLAGS_FINAL BIT(15)
100 #define HASH_FLAGS_FINUP BIT(16)
101 #define HASH_FLAGS_ALGO_MASK GENMASK(21, 18)
102 #define HASH_FLAGS_MD5 BIT(18)
103 #define HASH_FLAGS_SHA1 BIT(19)
104 #define HASH_FLAGS_SHA224 BIT(20)
105 #define HASH_FLAGS_SHA256 BIT(21)
106 #define HASH_FLAGS_ERRORS BIT(22)
107 #define HASH_FLAGS_HMAC BIT(23)
109 #define HASH_OP_UPDATE 1
110 #define HASH_OP_FINAL 2
112 enum stm32_hash_data_format {
113 HASH_DATA_32_BITS = 0x0,
114 HASH_DATA_16_BITS = 0x1,
115 HASH_DATA_8_BITS = 0x2,
116 HASH_DATA_1_BIT = 0x3
119 #define HASH_BUFLEN 256
120 #define HASH_LONG_KEY 64
121 #define HASH_MAX_KEY_SIZE (SHA256_BLOCK_SIZE * 8)
122 #define HASH_QUEUE_LENGTH 16
123 #define HASH_DMA_THRESHOLD 50
125 #define HASH_AUTOSUSPEND_DELAY 50
127 struct stm32_hash_ctx {
128 struct crypto_engine_ctx enginectx;
129 struct stm32_hash_dev *hdev;
132 u8 key[HASH_MAX_KEY_SIZE];
136 struct stm32_hash_request_ctx {
137 struct stm32_hash_dev *hdev;
141 u8 digest[SHA256_DIGEST_SIZE] __aligned(sizeof(u32));
147 struct scatterlist *sg;
150 struct scatterlist sg_key;
158 u8 buffer[HASH_BUFLEN] __aligned(sizeof(u32));
164 struct stm32_hash_algs_info {
165 struct ahash_alg *algs_list;
169 struct stm32_hash_pdata {
170 struct stm32_hash_algs_info *algs_info;
171 size_t algs_info_size;
174 struct stm32_hash_dev {
175 struct list_head list;
178 struct reset_control *rst;
179 void __iomem *io_base;
180 phys_addr_t phys_base;
184 spinlock_t lock; /* lock to protect queue */
186 struct ahash_request *req;
187 struct crypto_engine *engine;
192 struct dma_chan *dma_lch;
193 struct completion dma_completion;
195 const struct stm32_hash_pdata *pdata;
198 struct stm32_hash_drv {
199 struct list_head dev_list;
200 spinlock_t lock; /* List protection access */
203 static struct stm32_hash_drv stm32_hash = {
204 .dev_list = LIST_HEAD_INIT(stm32_hash.dev_list),
205 .lock = __SPIN_LOCK_UNLOCKED(stm32_hash.lock),
208 static void stm32_hash_dma_callback(void *param);
210 static inline u32 stm32_hash_read(struct stm32_hash_dev *hdev, u32 offset)
212 return readl_relaxed(hdev->io_base + offset);
215 static inline void stm32_hash_write(struct stm32_hash_dev *hdev,
216 u32 offset, u32 value)
218 writel_relaxed(value, hdev->io_base + offset);
221 static inline int stm32_hash_wait_busy(struct stm32_hash_dev *hdev)
225 return readl_relaxed_poll_timeout(hdev->io_base + HASH_SR, status,
226 !(status & HASH_SR_BUSY), 10, 10000);
229 static void stm32_hash_set_nblw(struct stm32_hash_dev *hdev, int length)
233 reg = stm32_hash_read(hdev, HASH_STR);
234 reg &= ~(HASH_STR_NBLW_MASK);
235 reg |= (8U * ((length) % 4U));
236 stm32_hash_write(hdev, HASH_STR, reg);
239 static int stm32_hash_write_key(struct stm32_hash_dev *hdev)
241 struct crypto_ahash *tfm = crypto_ahash_reqtfm(hdev->req);
242 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
244 int keylen = ctx->keylen;
245 void *key = ctx->key;
248 stm32_hash_set_nblw(hdev, keylen);
251 stm32_hash_write(hdev, HASH_DIN, *(u32 *)key);
256 reg = stm32_hash_read(hdev, HASH_STR);
257 reg |= HASH_STR_DCAL;
258 stm32_hash_write(hdev, HASH_STR, reg);
266 static void stm32_hash_write_ctrl(struct stm32_hash_dev *hdev)
268 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
269 struct crypto_ahash *tfm = crypto_ahash_reqtfm(hdev->req);
270 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
272 u32 reg = HASH_CR_INIT;
274 if (!(hdev->flags & HASH_FLAGS_INIT)) {
275 switch (rctx->flags & HASH_FLAGS_ALGO_MASK) {
277 reg |= HASH_CR_ALGO_MD5;
279 case HASH_FLAGS_SHA1:
280 reg |= HASH_CR_ALGO_SHA1;
282 case HASH_FLAGS_SHA224:
283 reg |= HASH_CR_ALGO_SHA224;
285 case HASH_FLAGS_SHA256:
286 reg |= HASH_CR_ALGO_SHA256;
289 reg |= HASH_CR_ALGO_MD5;
292 reg |= (rctx->data_type << HASH_CR_DATATYPE_POS);
294 if (rctx->flags & HASH_FLAGS_HMAC) {
295 hdev->flags |= HASH_FLAGS_HMAC;
297 if (ctx->keylen > HASH_LONG_KEY)
301 stm32_hash_write(hdev, HASH_IMR, HASH_DCIE);
303 stm32_hash_write(hdev, HASH_CR, reg);
305 hdev->flags |= HASH_FLAGS_INIT;
307 dev_dbg(hdev->dev, "Write Control %x\n", reg);
311 static void stm32_hash_append_sg(struct stm32_hash_request_ctx *rctx)
315 while ((rctx->bufcnt < rctx->buflen) && rctx->total) {
316 count = min(rctx->sg->length - rctx->offset, rctx->total);
317 count = min(count, rctx->buflen - rctx->bufcnt);
320 if ((rctx->sg->length == 0) && !sg_is_last(rctx->sg)) {
321 rctx->sg = sg_next(rctx->sg);
328 scatterwalk_map_and_copy(rctx->buffer + rctx->bufcnt, rctx->sg,
329 rctx->offset, count, 0);
331 rctx->bufcnt += count;
332 rctx->offset += count;
333 rctx->total -= count;
335 if (rctx->offset == rctx->sg->length) {
336 rctx->sg = sg_next(rctx->sg);
345 static int stm32_hash_xmit_cpu(struct stm32_hash_dev *hdev,
346 const u8 *buf, size_t length, int final)
348 unsigned int count, len32;
349 const u32 *buffer = (const u32 *)buf;
353 hdev->flags |= HASH_FLAGS_FINAL;
355 len32 = DIV_ROUND_UP(length, sizeof(u32));
357 dev_dbg(hdev->dev, "%s: length: %d, final: %x len32 %i\n",
358 __func__, length, final, len32);
360 hdev->flags |= HASH_FLAGS_CPU;
362 stm32_hash_write_ctrl(hdev);
364 if (stm32_hash_wait_busy(hdev))
367 if ((hdev->flags & HASH_FLAGS_HMAC) &&
368 (hdev->flags & ~HASH_FLAGS_HMAC_KEY)) {
369 hdev->flags |= HASH_FLAGS_HMAC_KEY;
370 stm32_hash_write_key(hdev);
371 if (stm32_hash_wait_busy(hdev))
375 for (count = 0; count < len32; count++)
376 stm32_hash_write(hdev, HASH_DIN, buffer[count]);
379 stm32_hash_set_nblw(hdev, length);
380 reg = stm32_hash_read(hdev, HASH_STR);
381 reg |= HASH_STR_DCAL;
382 stm32_hash_write(hdev, HASH_STR, reg);
383 if (hdev->flags & HASH_FLAGS_HMAC) {
384 if (stm32_hash_wait_busy(hdev))
386 stm32_hash_write_key(hdev);
394 static int stm32_hash_update_cpu(struct stm32_hash_dev *hdev)
396 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
397 int bufcnt, err = 0, final;
399 dev_dbg(hdev->dev, "%s flags %lx\n", __func__, rctx->flags);
401 final = (rctx->flags & HASH_FLAGS_FINUP);
403 while ((rctx->total >= rctx->buflen) ||
404 (rctx->bufcnt + rctx->total >= rctx->buflen)) {
405 stm32_hash_append_sg(rctx);
406 bufcnt = rctx->bufcnt;
408 err = stm32_hash_xmit_cpu(hdev, rctx->buffer, bufcnt, 0);
411 stm32_hash_append_sg(rctx);
414 bufcnt = rctx->bufcnt;
416 err = stm32_hash_xmit_cpu(hdev, rctx->buffer, bufcnt,
417 (rctx->flags & HASH_FLAGS_FINUP));
423 static int stm32_hash_xmit_dma(struct stm32_hash_dev *hdev,
424 struct scatterlist *sg, int length, int mdma)
426 struct dma_async_tx_descriptor *in_desc;
431 in_desc = dmaengine_prep_slave_sg(hdev->dma_lch, sg, 1,
432 DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT |
435 dev_err(hdev->dev, "dmaengine_prep_slave error\n");
439 reinit_completion(&hdev->dma_completion);
440 in_desc->callback = stm32_hash_dma_callback;
441 in_desc->callback_param = hdev;
443 hdev->flags |= HASH_FLAGS_FINAL;
444 hdev->flags |= HASH_FLAGS_DMA_ACTIVE;
446 reg = stm32_hash_read(hdev, HASH_CR);
449 reg |= HASH_CR_MDMAT;
451 reg &= ~HASH_CR_MDMAT;
455 stm32_hash_write(hdev, HASH_CR, reg);
457 stm32_hash_set_nblw(hdev, length);
459 cookie = dmaengine_submit(in_desc);
460 err = dma_submit_error(cookie);
464 dma_async_issue_pending(hdev->dma_lch);
466 if (!wait_for_completion_interruptible_timeout(&hdev->dma_completion,
467 msecs_to_jiffies(100)))
470 if (dma_async_is_tx_complete(hdev->dma_lch, cookie,
471 NULL, NULL) != DMA_COMPLETE)
475 dev_err(hdev->dev, "DMA Error %i\n", err);
476 dmaengine_terminate_all(hdev->dma_lch);
483 static void stm32_hash_dma_callback(void *param)
485 struct stm32_hash_dev *hdev = param;
487 complete(&hdev->dma_completion);
489 hdev->flags |= HASH_FLAGS_DMA_READY;
492 static int stm32_hash_hmac_dma_send(struct stm32_hash_dev *hdev)
494 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
495 struct crypto_ahash *tfm = crypto_ahash_reqtfm(hdev->req);
496 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
499 if (ctx->keylen < HASH_DMA_THRESHOLD || (hdev->dma_mode == 1)) {
500 err = stm32_hash_write_key(hdev);
501 if (stm32_hash_wait_busy(hdev))
504 if (!(hdev->flags & HASH_FLAGS_HMAC_KEY))
505 sg_init_one(&rctx->sg_key, ctx->key,
506 ALIGN(ctx->keylen, sizeof(u32)));
508 rctx->dma_ct = dma_map_sg(hdev->dev, &rctx->sg_key, 1,
510 if (rctx->dma_ct == 0) {
511 dev_err(hdev->dev, "dma_map_sg error\n");
515 err = stm32_hash_xmit_dma(hdev, &rctx->sg_key, ctx->keylen, 0);
517 dma_unmap_sg(hdev->dev, &rctx->sg_key, 1, DMA_TO_DEVICE);
523 static int stm32_hash_dma_init(struct stm32_hash_dev *hdev)
525 struct dma_slave_config dma_conf;
528 memset(&dma_conf, 0, sizeof(dma_conf));
530 dma_conf.direction = DMA_MEM_TO_DEV;
531 dma_conf.dst_addr = hdev->phys_base + HASH_DIN;
532 dma_conf.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
533 dma_conf.src_maxburst = hdev->dma_maxburst;
534 dma_conf.dst_maxburst = hdev->dma_maxburst;
535 dma_conf.device_fc = false;
537 hdev->dma_lch = dma_request_slave_channel(hdev->dev, "in");
538 if (!hdev->dma_lch) {
539 dev_err(hdev->dev, "Couldn't acquire a slave DMA channel.\n");
543 err = dmaengine_slave_config(hdev->dma_lch, &dma_conf);
545 dma_release_channel(hdev->dma_lch);
546 hdev->dma_lch = NULL;
547 dev_err(hdev->dev, "Couldn't configure DMA slave.\n");
551 init_completion(&hdev->dma_completion);
556 static int stm32_hash_dma_send(struct stm32_hash_dev *hdev)
558 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
559 struct scatterlist sg[1], *tsg;
560 int err = 0, len = 0, reg, ncp = 0;
562 u32 *buffer = (void *)rctx->buffer;
564 rctx->sg = hdev->req->src;
565 rctx->total = hdev->req->nbytes;
567 rctx->nents = sg_nents(rctx->sg);
572 stm32_hash_write_ctrl(hdev);
574 if (hdev->flags & HASH_FLAGS_HMAC) {
575 err = stm32_hash_hmac_dma_send(hdev);
576 if (err != -EINPROGRESS)
580 for_each_sg(rctx->sg, tsg, rctx->nents, i) {
584 if (sg_is_last(sg)) {
585 if (hdev->dma_mode == 1) {
586 len = (ALIGN(sg->length, 16) - 16);
588 ncp = sg_pcopy_to_buffer(
589 rctx->sg, rctx->nents,
590 rctx->buffer, sg->length - len,
591 rctx->total - sg->length + len);
595 if (!(IS_ALIGNED(sg->length, sizeof(u32)))) {
597 sg->length = ALIGN(sg->length,
603 rctx->dma_ct = dma_map_sg(hdev->dev, sg, 1,
605 if (rctx->dma_ct == 0) {
606 dev_err(hdev->dev, "dma_map_sg error\n");
610 err = stm32_hash_xmit_dma(hdev, sg, len,
613 dma_unmap_sg(hdev->dev, sg, 1, DMA_TO_DEVICE);
619 if (hdev->dma_mode == 1) {
620 if (stm32_hash_wait_busy(hdev))
622 reg = stm32_hash_read(hdev, HASH_CR);
623 reg &= ~HASH_CR_DMAE;
625 stm32_hash_write(hdev, HASH_CR, reg);
628 memset(buffer + ncp, 0,
629 DIV_ROUND_UP(ncp, sizeof(u32)) - ncp);
630 writesl(hdev->io_base + HASH_DIN, buffer,
631 DIV_ROUND_UP(ncp, sizeof(u32)));
633 stm32_hash_set_nblw(hdev, ncp);
634 reg = stm32_hash_read(hdev, HASH_STR);
635 reg |= HASH_STR_DCAL;
636 stm32_hash_write(hdev, HASH_STR, reg);
640 if (hdev->flags & HASH_FLAGS_HMAC) {
641 if (stm32_hash_wait_busy(hdev))
643 err = stm32_hash_hmac_dma_send(hdev);
649 static struct stm32_hash_dev *stm32_hash_find_dev(struct stm32_hash_ctx *ctx)
651 struct stm32_hash_dev *hdev = NULL, *tmp;
653 spin_lock_bh(&stm32_hash.lock);
655 list_for_each_entry(tmp, &stm32_hash.dev_list, list) {
664 spin_unlock_bh(&stm32_hash.lock);
669 static bool stm32_hash_dma_aligned_data(struct ahash_request *req)
671 struct scatterlist *sg;
672 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
673 struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
676 if (req->nbytes <= HASH_DMA_THRESHOLD)
679 if (sg_nents(req->src) > 1) {
680 if (hdev->dma_mode == 1)
682 for_each_sg(req->src, sg, sg_nents(req->src), i) {
683 if ((!IS_ALIGNED(sg->length, sizeof(u32))) &&
689 if (req->src->offset % 4)
695 static int stm32_hash_init(struct ahash_request *req)
697 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
698 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
699 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
700 struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
704 rctx->flags = HASH_FLAGS_CPU;
706 rctx->digcnt = crypto_ahash_digestsize(tfm);
707 switch (rctx->digcnt) {
708 case MD5_DIGEST_SIZE:
709 rctx->flags |= HASH_FLAGS_MD5;
711 case SHA1_DIGEST_SIZE:
712 rctx->flags |= HASH_FLAGS_SHA1;
714 case SHA224_DIGEST_SIZE:
715 rctx->flags |= HASH_FLAGS_SHA224;
717 case SHA256_DIGEST_SIZE:
718 rctx->flags |= HASH_FLAGS_SHA256;
725 rctx->buflen = HASH_BUFLEN;
728 rctx->data_type = HASH_DATA_8_BITS;
730 memset(rctx->buffer, 0, HASH_BUFLEN);
732 if (ctx->flags & HASH_FLAGS_HMAC)
733 rctx->flags |= HASH_FLAGS_HMAC;
735 dev_dbg(hdev->dev, "%s Flags %lx\n", __func__, rctx->flags);
740 static int stm32_hash_update_req(struct stm32_hash_dev *hdev)
742 return stm32_hash_update_cpu(hdev);
745 static int stm32_hash_final_req(struct stm32_hash_dev *hdev)
747 struct ahash_request *req = hdev->req;
748 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
750 int buflen = rctx->bufcnt;
754 if (!(rctx->flags & HASH_FLAGS_CPU))
755 err = stm32_hash_dma_send(hdev);
757 err = stm32_hash_xmit_cpu(hdev, rctx->buffer, buflen, 1);
763 static void stm32_hash_copy_hash(struct ahash_request *req)
765 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
766 u32 *hash = (u32 *)rctx->digest;
767 unsigned int i, hashsize;
769 switch (rctx->flags & HASH_FLAGS_ALGO_MASK) {
771 hashsize = MD5_DIGEST_SIZE;
773 case HASH_FLAGS_SHA1:
774 hashsize = SHA1_DIGEST_SIZE;
776 case HASH_FLAGS_SHA224:
777 hashsize = SHA224_DIGEST_SIZE;
779 case HASH_FLAGS_SHA256:
780 hashsize = SHA256_DIGEST_SIZE;
786 for (i = 0; i < hashsize / sizeof(u32); i++)
787 hash[i] = be32_to_cpu(stm32_hash_read(rctx->hdev,
791 static int stm32_hash_finish(struct ahash_request *req)
793 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
798 memcpy(req->result, rctx->digest, rctx->digcnt);
803 static void stm32_hash_finish_req(struct ahash_request *req, int err)
805 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
806 struct stm32_hash_dev *hdev = rctx->hdev;
808 if (!err && (HASH_FLAGS_FINAL & hdev->flags)) {
809 stm32_hash_copy_hash(req);
810 err = stm32_hash_finish(req);
811 hdev->flags &= ~(HASH_FLAGS_FINAL | HASH_FLAGS_CPU |
812 HASH_FLAGS_INIT | HASH_FLAGS_DMA_READY |
813 HASH_FLAGS_OUTPUT_READY | HASH_FLAGS_HMAC |
814 HASH_FLAGS_HMAC_INIT | HASH_FLAGS_HMAC_FINAL |
815 HASH_FLAGS_HMAC_KEY);
817 rctx->flags |= HASH_FLAGS_ERRORS;
820 pm_runtime_mark_last_busy(hdev->dev);
821 pm_runtime_put_autosuspend(hdev->dev);
823 crypto_finalize_hash_request(hdev->engine, req, err);
826 static int stm32_hash_hw_init(struct stm32_hash_dev *hdev,
827 struct stm32_hash_request_ctx *rctx)
829 pm_runtime_get_sync(hdev->dev);
831 if (!(HASH_FLAGS_INIT & hdev->flags)) {
832 stm32_hash_write(hdev, HASH_CR, HASH_CR_INIT);
833 stm32_hash_write(hdev, HASH_STR, 0);
834 stm32_hash_write(hdev, HASH_DIN, 0);
835 stm32_hash_write(hdev, HASH_IMR, 0);
842 static int stm32_hash_one_request(struct crypto_engine *engine, void *areq);
843 static int stm32_hash_prepare_req(struct crypto_engine *engine, void *areq);
845 static int stm32_hash_handle_queue(struct stm32_hash_dev *hdev,
846 struct ahash_request *req)
848 return crypto_transfer_hash_request_to_engine(hdev->engine, req);
851 static int stm32_hash_prepare_req(struct crypto_engine *engine, void *areq)
853 struct ahash_request *req = container_of(areq, struct ahash_request,
855 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
856 struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
857 struct stm32_hash_request_ctx *rctx;
864 rctx = ahash_request_ctx(req);
866 dev_dbg(hdev->dev, "processing new req, op: %lu, nbytes %d\n",
867 rctx->op, req->nbytes);
869 return stm32_hash_hw_init(hdev, rctx);
872 static int stm32_hash_one_request(struct crypto_engine *engine, void *areq)
874 struct ahash_request *req = container_of(areq, struct ahash_request,
876 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
877 struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
878 struct stm32_hash_request_ctx *rctx;
886 rctx = ahash_request_ctx(req);
888 if (rctx->op == HASH_OP_UPDATE)
889 err = stm32_hash_update_req(hdev);
890 else if (rctx->op == HASH_OP_FINAL)
891 err = stm32_hash_final_req(hdev);
893 if (err != -EINPROGRESS)
894 /* done task will not finish it, so do it here */
895 stm32_hash_finish_req(req, err);
900 static int stm32_hash_enqueue(struct ahash_request *req, unsigned int op)
902 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
903 struct stm32_hash_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
904 struct stm32_hash_dev *hdev = ctx->hdev;
908 return stm32_hash_handle_queue(hdev, req);
911 static int stm32_hash_update(struct ahash_request *req)
913 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
915 if (!req->nbytes || !(rctx->flags & HASH_FLAGS_CPU))
918 rctx->total = req->nbytes;
922 if ((rctx->bufcnt + rctx->total < rctx->buflen)) {
923 stm32_hash_append_sg(rctx);
927 return stm32_hash_enqueue(req, HASH_OP_UPDATE);
930 static int stm32_hash_final(struct ahash_request *req)
932 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
934 rctx->flags |= HASH_FLAGS_FINUP;
936 return stm32_hash_enqueue(req, HASH_OP_FINAL);
939 static int stm32_hash_finup(struct ahash_request *req)
941 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
942 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
943 struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
946 rctx->flags |= HASH_FLAGS_FINUP;
948 if (hdev->dma_lch && stm32_hash_dma_aligned_data(req))
949 rctx->flags &= ~HASH_FLAGS_CPU;
951 err1 = stm32_hash_update(req);
953 if (err1 == -EINPROGRESS || err1 == -EBUSY)
957 * final() has to be always called to cleanup resources
958 * even if update() failed, except EINPROGRESS
960 err2 = stm32_hash_final(req);
965 static int stm32_hash_digest(struct ahash_request *req)
967 return stm32_hash_init(req) ?: stm32_hash_finup(req);
970 static int stm32_hash_export(struct ahash_request *req, void *out)
972 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
973 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
974 struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
978 pm_runtime_get_sync(hdev->dev);
980 while (!(stm32_hash_read(hdev, HASH_SR) & HASH_SR_DATA_INPUT_READY))
983 rctx->hw_context = kmalloc_array(3 + HASH_CSR_REGISTER_NUMBER,
987 preg = rctx->hw_context;
989 *preg++ = stm32_hash_read(hdev, HASH_IMR);
990 *preg++ = stm32_hash_read(hdev, HASH_STR);
991 *preg++ = stm32_hash_read(hdev, HASH_CR);
992 for (i = 0; i < HASH_CSR_REGISTER_NUMBER; i++)
993 *preg++ = stm32_hash_read(hdev, HASH_CSR(i));
995 pm_runtime_mark_last_busy(hdev->dev);
996 pm_runtime_put_autosuspend(hdev->dev);
998 memcpy(out, rctx, sizeof(*rctx));
1003 static int stm32_hash_import(struct ahash_request *req, const void *in)
1005 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
1006 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
1007 struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
1008 const u32 *preg = in;
1012 memcpy(rctx, in, sizeof(*rctx));
1014 preg = rctx->hw_context;
1016 pm_runtime_get_sync(hdev->dev);
1018 stm32_hash_write(hdev, HASH_IMR, *preg++);
1019 stm32_hash_write(hdev, HASH_STR, *preg++);
1020 stm32_hash_write(hdev, HASH_CR, *preg);
1021 reg = *preg++ | HASH_CR_INIT;
1022 stm32_hash_write(hdev, HASH_CR, reg);
1024 for (i = 0; i < HASH_CSR_REGISTER_NUMBER; i++)
1025 stm32_hash_write(hdev, HASH_CSR(i), *preg++);
1027 pm_runtime_mark_last_busy(hdev->dev);
1028 pm_runtime_put_autosuspend(hdev->dev);
1030 kfree(rctx->hw_context);
1035 static int stm32_hash_setkey(struct crypto_ahash *tfm,
1036 const u8 *key, unsigned int keylen)
1038 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
1040 if (keylen <= HASH_MAX_KEY_SIZE) {
1041 memcpy(ctx->key, key, keylen);
1042 ctx->keylen = keylen;
1050 static int stm32_hash_cra_init_algs(struct crypto_tfm *tfm,
1051 const char *algs_hmac_name)
1053 struct stm32_hash_ctx *ctx = crypto_tfm_ctx(tfm);
1055 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
1056 sizeof(struct stm32_hash_request_ctx));
1061 ctx->flags |= HASH_FLAGS_HMAC;
1063 ctx->enginectx.op.do_one_request = stm32_hash_one_request;
1064 ctx->enginectx.op.prepare_request = stm32_hash_prepare_req;
1065 ctx->enginectx.op.unprepare_request = NULL;
1069 static int stm32_hash_cra_init(struct crypto_tfm *tfm)
1071 return stm32_hash_cra_init_algs(tfm, NULL);
1074 static int stm32_hash_cra_md5_init(struct crypto_tfm *tfm)
1076 return stm32_hash_cra_init_algs(tfm, "md5");
1079 static int stm32_hash_cra_sha1_init(struct crypto_tfm *tfm)
1081 return stm32_hash_cra_init_algs(tfm, "sha1");
1084 static int stm32_hash_cra_sha224_init(struct crypto_tfm *tfm)
1086 return stm32_hash_cra_init_algs(tfm, "sha224");
1089 static int stm32_hash_cra_sha256_init(struct crypto_tfm *tfm)
1091 return stm32_hash_cra_init_algs(tfm, "sha256");
1094 static irqreturn_t stm32_hash_irq_thread(int irq, void *dev_id)
1096 struct stm32_hash_dev *hdev = dev_id;
1098 if (HASH_FLAGS_CPU & hdev->flags) {
1099 if (HASH_FLAGS_OUTPUT_READY & hdev->flags) {
1100 hdev->flags &= ~HASH_FLAGS_OUTPUT_READY;
1103 } else if (HASH_FLAGS_DMA_READY & hdev->flags) {
1104 if (HASH_FLAGS_DMA_ACTIVE & hdev->flags) {
1105 hdev->flags &= ~HASH_FLAGS_DMA_ACTIVE;
1113 /* Finish current request */
1114 stm32_hash_finish_req(hdev->req, 0);
1119 static irqreturn_t stm32_hash_irq_handler(int irq, void *dev_id)
1121 struct stm32_hash_dev *hdev = dev_id;
1124 reg = stm32_hash_read(hdev, HASH_SR);
1125 if (reg & HASH_SR_OUTPUT_READY) {
1126 reg &= ~HASH_SR_OUTPUT_READY;
1127 stm32_hash_write(hdev, HASH_SR, reg);
1128 hdev->flags |= HASH_FLAGS_OUTPUT_READY;
1130 stm32_hash_write(hdev, HASH_IMR, 0);
1131 return IRQ_WAKE_THREAD;
1137 static struct ahash_alg algs_md5_sha1[] = {
1139 .init = stm32_hash_init,
1140 .update = stm32_hash_update,
1141 .final = stm32_hash_final,
1142 .finup = stm32_hash_finup,
1143 .digest = stm32_hash_digest,
1144 .export = stm32_hash_export,
1145 .import = stm32_hash_import,
1147 .digestsize = MD5_DIGEST_SIZE,
1148 .statesize = sizeof(struct stm32_hash_request_ctx),
1151 .cra_driver_name = "stm32-md5",
1152 .cra_priority = 200,
1153 .cra_flags = CRYPTO_ALG_ASYNC |
1154 CRYPTO_ALG_KERN_DRIVER_ONLY,
1155 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1156 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1158 .cra_init = stm32_hash_cra_init,
1159 .cra_module = THIS_MODULE,
1164 .init = stm32_hash_init,
1165 .update = stm32_hash_update,
1166 .final = stm32_hash_final,
1167 .finup = stm32_hash_finup,
1168 .digest = stm32_hash_digest,
1169 .export = stm32_hash_export,
1170 .import = stm32_hash_import,
1171 .setkey = stm32_hash_setkey,
1173 .digestsize = MD5_DIGEST_SIZE,
1174 .statesize = sizeof(struct stm32_hash_request_ctx),
1176 .cra_name = "hmac(md5)",
1177 .cra_driver_name = "stm32-hmac-md5",
1178 .cra_priority = 200,
1179 .cra_flags = CRYPTO_ALG_ASYNC |
1180 CRYPTO_ALG_KERN_DRIVER_ONLY,
1181 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1182 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1184 .cra_init = stm32_hash_cra_md5_init,
1185 .cra_module = THIS_MODULE,
1190 .init = stm32_hash_init,
1191 .update = stm32_hash_update,
1192 .final = stm32_hash_final,
1193 .finup = stm32_hash_finup,
1194 .digest = stm32_hash_digest,
1195 .export = stm32_hash_export,
1196 .import = stm32_hash_import,
1198 .digestsize = SHA1_DIGEST_SIZE,
1199 .statesize = sizeof(struct stm32_hash_request_ctx),
1202 .cra_driver_name = "stm32-sha1",
1203 .cra_priority = 200,
1204 .cra_flags = CRYPTO_ALG_ASYNC |
1205 CRYPTO_ALG_KERN_DRIVER_ONLY,
1206 .cra_blocksize = SHA1_BLOCK_SIZE,
1207 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1209 .cra_init = stm32_hash_cra_init,
1210 .cra_module = THIS_MODULE,
1215 .init = stm32_hash_init,
1216 .update = stm32_hash_update,
1217 .final = stm32_hash_final,
1218 .finup = stm32_hash_finup,
1219 .digest = stm32_hash_digest,
1220 .export = stm32_hash_export,
1221 .import = stm32_hash_import,
1222 .setkey = stm32_hash_setkey,
1224 .digestsize = SHA1_DIGEST_SIZE,
1225 .statesize = sizeof(struct stm32_hash_request_ctx),
1227 .cra_name = "hmac(sha1)",
1228 .cra_driver_name = "stm32-hmac-sha1",
1229 .cra_priority = 200,
1230 .cra_flags = CRYPTO_ALG_ASYNC |
1231 CRYPTO_ALG_KERN_DRIVER_ONLY,
1232 .cra_blocksize = SHA1_BLOCK_SIZE,
1233 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1235 .cra_init = stm32_hash_cra_sha1_init,
1236 .cra_module = THIS_MODULE,
1242 static struct ahash_alg algs_sha224_sha256[] = {
1244 .init = stm32_hash_init,
1245 .update = stm32_hash_update,
1246 .final = stm32_hash_final,
1247 .finup = stm32_hash_finup,
1248 .digest = stm32_hash_digest,
1249 .export = stm32_hash_export,
1250 .import = stm32_hash_import,
1252 .digestsize = SHA224_DIGEST_SIZE,
1253 .statesize = sizeof(struct stm32_hash_request_ctx),
1255 .cra_name = "sha224",
1256 .cra_driver_name = "stm32-sha224",
1257 .cra_priority = 200,
1258 .cra_flags = CRYPTO_ALG_ASYNC |
1259 CRYPTO_ALG_KERN_DRIVER_ONLY,
1260 .cra_blocksize = SHA224_BLOCK_SIZE,
1261 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1263 .cra_init = stm32_hash_cra_init,
1264 .cra_module = THIS_MODULE,
1269 .init = stm32_hash_init,
1270 .update = stm32_hash_update,
1271 .final = stm32_hash_final,
1272 .finup = stm32_hash_finup,
1273 .digest = stm32_hash_digest,
1274 .setkey = stm32_hash_setkey,
1275 .export = stm32_hash_export,
1276 .import = stm32_hash_import,
1278 .digestsize = SHA224_DIGEST_SIZE,
1279 .statesize = sizeof(struct stm32_hash_request_ctx),
1281 .cra_name = "hmac(sha224)",
1282 .cra_driver_name = "stm32-hmac-sha224",
1283 .cra_priority = 200,
1284 .cra_flags = CRYPTO_ALG_ASYNC |
1285 CRYPTO_ALG_KERN_DRIVER_ONLY,
1286 .cra_blocksize = SHA224_BLOCK_SIZE,
1287 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1289 .cra_init = stm32_hash_cra_sha224_init,
1290 .cra_module = THIS_MODULE,
1295 .init = stm32_hash_init,
1296 .update = stm32_hash_update,
1297 .final = stm32_hash_final,
1298 .finup = stm32_hash_finup,
1299 .digest = stm32_hash_digest,
1300 .export = stm32_hash_export,
1301 .import = stm32_hash_import,
1303 .digestsize = SHA256_DIGEST_SIZE,
1304 .statesize = sizeof(struct stm32_hash_request_ctx),
1306 .cra_name = "sha256",
1307 .cra_driver_name = "stm32-sha256",
1308 .cra_priority = 200,
1309 .cra_flags = CRYPTO_ALG_ASYNC |
1310 CRYPTO_ALG_KERN_DRIVER_ONLY,
1311 .cra_blocksize = SHA256_BLOCK_SIZE,
1312 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1314 .cra_init = stm32_hash_cra_init,
1315 .cra_module = THIS_MODULE,
1320 .init = stm32_hash_init,
1321 .update = stm32_hash_update,
1322 .final = stm32_hash_final,
1323 .finup = stm32_hash_finup,
1324 .digest = stm32_hash_digest,
1325 .export = stm32_hash_export,
1326 .import = stm32_hash_import,
1327 .setkey = stm32_hash_setkey,
1329 .digestsize = SHA256_DIGEST_SIZE,
1330 .statesize = sizeof(struct stm32_hash_request_ctx),
1332 .cra_name = "hmac(sha256)",
1333 .cra_driver_name = "stm32-hmac-sha256",
1334 .cra_priority = 200,
1335 .cra_flags = CRYPTO_ALG_ASYNC |
1336 CRYPTO_ALG_KERN_DRIVER_ONLY,
1337 .cra_blocksize = SHA256_BLOCK_SIZE,
1338 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1340 .cra_init = stm32_hash_cra_sha256_init,
1341 .cra_module = THIS_MODULE,
1347 static int stm32_hash_register_algs(struct stm32_hash_dev *hdev)
1352 for (i = 0; i < hdev->pdata->algs_info_size; i++) {
1353 for (j = 0; j < hdev->pdata->algs_info[i].size; j++) {
1354 err = crypto_register_ahash(
1355 &hdev->pdata->algs_info[i].algs_list[j]);
1363 dev_err(hdev->dev, "Algo %d : %d failed\n", i, j);
1366 crypto_unregister_ahash(
1367 &hdev->pdata->algs_info[i].algs_list[j]);
1373 static int stm32_hash_unregister_algs(struct stm32_hash_dev *hdev)
1377 for (i = 0; i < hdev->pdata->algs_info_size; i++) {
1378 for (j = 0; j < hdev->pdata->algs_info[i].size; j++)
1379 crypto_unregister_ahash(
1380 &hdev->pdata->algs_info[i].algs_list[j]);
1386 static struct stm32_hash_algs_info stm32_hash_algs_info_stm32f4[] = {
1388 .algs_list = algs_md5_sha1,
1389 .size = ARRAY_SIZE(algs_md5_sha1),
1393 static const struct stm32_hash_pdata stm32_hash_pdata_stm32f4 = {
1394 .algs_info = stm32_hash_algs_info_stm32f4,
1395 .algs_info_size = ARRAY_SIZE(stm32_hash_algs_info_stm32f4),
1398 static struct stm32_hash_algs_info stm32_hash_algs_info_stm32f7[] = {
1400 .algs_list = algs_md5_sha1,
1401 .size = ARRAY_SIZE(algs_md5_sha1),
1404 .algs_list = algs_sha224_sha256,
1405 .size = ARRAY_SIZE(algs_sha224_sha256),
1409 static const struct stm32_hash_pdata stm32_hash_pdata_stm32f7 = {
1410 .algs_info = stm32_hash_algs_info_stm32f7,
1411 .algs_info_size = ARRAY_SIZE(stm32_hash_algs_info_stm32f7),
1414 static const struct of_device_id stm32_hash_of_match[] = {
1416 .compatible = "st,stm32f456-hash",
1417 .data = &stm32_hash_pdata_stm32f4,
1420 .compatible = "st,stm32f756-hash",
1421 .data = &stm32_hash_pdata_stm32f7,
1426 MODULE_DEVICE_TABLE(of, stm32_hash_of_match);
1428 static int stm32_hash_get_of_match(struct stm32_hash_dev *hdev,
1431 hdev->pdata = of_device_get_match_data(dev);
1433 dev_err(dev, "no compatible OF match\n");
1437 if (of_property_read_u32(dev->of_node, "dma-maxburst",
1438 &hdev->dma_maxburst)) {
1439 dev_info(dev, "dma-maxburst not specified, using 0\n");
1440 hdev->dma_maxburst = 0;
1446 static int stm32_hash_probe(struct platform_device *pdev)
1448 struct stm32_hash_dev *hdev;
1449 struct device *dev = &pdev->dev;
1450 struct resource *res;
1453 hdev = devm_kzalloc(dev, sizeof(*hdev), GFP_KERNEL);
1457 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1458 hdev->io_base = devm_ioremap_resource(dev, res);
1459 if (IS_ERR(hdev->io_base))
1460 return PTR_ERR(hdev->io_base);
1462 hdev->phys_base = res->start;
1464 ret = stm32_hash_get_of_match(hdev, dev);
1468 irq = platform_get_irq(pdev, 0);
1470 dev_err(dev, "Cannot get IRQ resource\n");
1474 ret = devm_request_threaded_irq(dev, irq, stm32_hash_irq_handler,
1475 stm32_hash_irq_thread, IRQF_ONESHOT,
1476 dev_name(dev), hdev);
1478 dev_err(dev, "Cannot grab IRQ\n");
1482 hdev->clk = devm_clk_get(&pdev->dev, NULL);
1483 if (IS_ERR(hdev->clk)) {
1484 dev_err(dev, "failed to get clock for hash (%lu)\n",
1485 PTR_ERR(hdev->clk));
1486 return PTR_ERR(hdev->clk);
1489 ret = clk_prepare_enable(hdev->clk);
1491 dev_err(dev, "failed to enable hash clock (%d)\n", ret);
1495 pm_runtime_set_autosuspend_delay(dev, HASH_AUTOSUSPEND_DELAY);
1496 pm_runtime_use_autosuspend(dev);
1498 pm_runtime_get_noresume(dev);
1499 pm_runtime_set_active(dev);
1500 pm_runtime_enable(dev);
1502 hdev->rst = devm_reset_control_get(&pdev->dev, NULL);
1503 if (!IS_ERR(hdev->rst)) {
1504 reset_control_assert(hdev->rst);
1506 reset_control_deassert(hdev->rst);
1511 platform_set_drvdata(pdev, hdev);
1513 ret = stm32_hash_dma_init(hdev);
1515 dev_dbg(dev, "DMA mode not available\n");
1517 spin_lock(&stm32_hash.lock);
1518 list_add_tail(&hdev->list, &stm32_hash.dev_list);
1519 spin_unlock(&stm32_hash.lock);
1521 /* Initialize crypto engine */
1522 hdev->engine = crypto_engine_alloc_init(dev, 1);
1523 if (!hdev->engine) {
1528 ret = crypto_engine_start(hdev->engine);
1530 goto err_engine_start;
1532 hdev->dma_mode = stm32_hash_read(hdev, HASH_HWCFGR);
1534 /* Register algos */
1535 ret = stm32_hash_register_algs(hdev);
1539 dev_info(dev, "Init HASH done HW ver %x DMA mode %u\n",
1540 stm32_hash_read(hdev, HASH_VER), hdev->dma_mode);
1542 pm_runtime_put_sync(dev);
1548 crypto_engine_exit(hdev->engine);
1550 spin_lock(&stm32_hash.lock);
1551 list_del(&hdev->list);
1552 spin_unlock(&stm32_hash.lock);
1555 dma_release_channel(hdev->dma_lch);
1557 pm_runtime_disable(dev);
1558 pm_runtime_put_noidle(dev);
1560 clk_disable_unprepare(hdev->clk);
1565 static int stm32_hash_remove(struct platform_device *pdev)
1567 static struct stm32_hash_dev *hdev;
1570 hdev = platform_get_drvdata(pdev);
1574 ret = pm_runtime_get_sync(hdev->dev);
1578 stm32_hash_unregister_algs(hdev);
1580 crypto_engine_exit(hdev->engine);
1582 spin_lock(&stm32_hash.lock);
1583 list_del(&hdev->list);
1584 spin_unlock(&stm32_hash.lock);
1587 dma_release_channel(hdev->dma_lch);
1589 pm_runtime_disable(hdev->dev);
1590 pm_runtime_put_noidle(hdev->dev);
1592 clk_disable_unprepare(hdev->clk);
1598 static int stm32_hash_runtime_suspend(struct device *dev)
1600 struct stm32_hash_dev *hdev = dev_get_drvdata(dev);
1602 clk_disable_unprepare(hdev->clk);
1607 static int stm32_hash_runtime_resume(struct device *dev)
1609 struct stm32_hash_dev *hdev = dev_get_drvdata(dev);
1612 ret = clk_prepare_enable(hdev->clk);
1614 dev_err(hdev->dev, "Failed to prepare_enable clock\n");
1622 static const struct dev_pm_ops stm32_hash_pm_ops = {
1623 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
1624 pm_runtime_force_resume)
1625 SET_RUNTIME_PM_OPS(stm32_hash_runtime_suspend,
1626 stm32_hash_runtime_resume, NULL)
1629 static struct platform_driver stm32_hash_driver = {
1630 .probe = stm32_hash_probe,
1631 .remove = stm32_hash_remove,
1633 .name = "stm32-hash",
1634 .pm = &stm32_hash_pm_ops,
1635 .of_match_table = stm32_hash_of_match,
1639 module_platform_driver(stm32_hash_driver);
1641 MODULE_DESCRIPTION("STM32 SHA1/224/256 & MD5 (HMAC) hw accelerator driver");
1643 MODULE_LICENSE("GPL v2");