2 * This file is part of STM32 Crypto driver for Linux.
4 * Copyright (C) 2017, STMicroelectronics - All Rights Reserved
7 * License terms: GPL V2.0.
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License version 2 as published by
11 * the Free Software Foundation.
13 * This program is distributed in the hope that it will be useful, but
14 * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
18 * You should have received a copy of the GNU General Public License along with
19 * this program. If not, see <http://www.gnu.org/licenses/>.
23 #include <linux/clk.h>
24 #include <linux/crypto.h>
25 #include <linux/delay.h>
26 #include <linux/dmaengine.h>
27 #include <linux/interrupt.h>
29 #include <linux/iopoll.h>
30 #include <linux/kernel.h>
31 #include <linux/module.h>
32 #include <linux/of_device.h>
33 #include <linux/platform_device.h>
34 #include <linux/pm_runtime.h>
35 #include <linux/reset.h>
37 #include <crypto/engine.h>
38 #include <crypto/hash.h>
39 #include <crypto/md5.h>
40 #include <crypto/scatterwalk.h>
41 #include <crypto/sha.h>
42 #include <crypto/internal/hash.h>
49 #define HASH_CSR(x) (0x0F8 + ((x) * 0x04))
50 #define HASH_HREG(x) (0x310 + ((x) * 0x04))
51 #define HASH_HWCFGR 0x3F0
52 #define HASH_VER 0x3F4
55 /* Control Register */
56 #define HASH_CR_INIT BIT(2)
57 #define HASH_CR_DMAE BIT(3)
58 #define HASH_CR_DATATYPE_POS 4
59 #define HASH_CR_MODE BIT(6)
60 #define HASH_CR_MDMAT BIT(13)
61 #define HASH_CR_DMAA BIT(14)
62 #define HASH_CR_LKEY BIT(16)
64 #define HASH_CR_ALGO_SHA1 0x0
65 #define HASH_CR_ALGO_MD5 0x80
66 #define HASH_CR_ALGO_SHA224 0x40000
67 #define HASH_CR_ALGO_SHA256 0x40080
70 #define HASH_DINIE BIT(0)
71 #define HASH_DCIE BIT(1)
74 #define HASH_MASK_CALC_COMPLETION BIT(0)
75 #define HASH_MASK_DATA_INPUT BIT(1)
77 /* Context swap register */
78 #define HASH_CSR_REGISTER_NUMBER 53
81 #define HASH_SR_DATA_INPUT_READY BIT(0)
82 #define HASH_SR_OUTPUT_READY BIT(1)
83 #define HASH_SR_DMA_ACTIVE BIT(2)
84 #define HASH_SR_BUSY BIT(3)
87 #define HASH_STR_NBLW_MASK GENMASK(4, 0)
88 #define HASH_STR_DCAL BIT(8)
90 #define HASH_FLAGS_INIT BIT(0)
91 #define HASH_FLAGS_OUTPUT_READY BIT(1)
92 #define HASH_FLAGS_CPU BIT(2)
93 #define HASH_FLAGS_DMA_READY BIT(3)
94 #define HASH_FLAGS_DMA_ACTIVE BIT(4)
95 #define HASH_FLAGS_HMAC_INIT BIT(5)
96 #define HASH_FLAGS_HMAC_FINAL BIT(6)
97 #define HASH_FLAGS_HMAC_KEY BIT(7)
99 #define HASH_FLAGS_FINAL BIT(15)
100 #define HASH_FLAGS_FINUP BIT(16)
101 #define HASH_FLAGS_ALGO_MASK GENMASK(21, 18)
102 #define HASH_FLAGS_MD5 BIT(18)
103 #define HASH_FLAGS_SHA1 BIT(19)
104 #define HASH_FLAGS_SHA224 BIT(20)
105 #define HASH_FLAGS_SHA256 BIT(21)
106 #define HASH_FLAGS_ERRORS BIT(22)
107 #define HASH_FLAGS_HMAC BIT(23)
109 #define HASH_OP_UPDATE 1
110 #define HASH_OP_FINAL 2
112 enum stm32_hash_data_format {
113 HASH_DATA_32_BITS = 0x0,
114 HASH_DATA_16_BITS = 0x1,
115 HASH_DATA_8_BITS = 0x2,
116 HASH_DATA_1_BIT = 0x3
119 #define HASH_BUFLEN 256
120 #define HASH_LONG_KEY 64
121 #define HASH_MAX_KEY_SIZE (SHA256_BLOCK_SIZE * 8)
122 #define HASH_QUEUE_LENGTH 16
123 #define HASH_DMA_THRESHOLD 50
125 #define HASH_AUTOSUSPEND_DELAY 50
127 struct stm32_hash_ctx {
128 struct crypto_engine_ctx enginectx;
129 struct stm32_hash_dev *hdev;
132 u8 key[HASH_MAX_KEY_SIZE];
136 struct stm32_hash_request_ctx {
137 struct stm32_hash_dev *hdev;
141 u8 digest[SHA256_DIGEST_SIZE] __aligned(sizeof(u32));
147 struct scatterlist *sg;
150 struct scatterlist sg_key;
158 u8 buffer[HASH_BUFLEN] __aligned(sizeof(u32));
164 struct stm32_hash_algs_info {
165 struct ahash_alg *algs_list;
169 struct stm32_hash_pdata {
170 struct stm32_hash_algs_info *algs_info;
171 size_t algs_info_size;
174 struct stm32_hash_dev {
175 struct list_head list;
178 struct reset_control *rst;
179 void __iomem *io_base;
180 phys_addr_t phys_base;
184 struct ahash_request *req;
185 struct crypto_engine *engine;
190 struct dma_chan *dma_lch;
191 struct completion dma_completion;
193 const struct stm32_hash_pdata *pdata;
196 struct stm32_hash_drv {
197 struct list_head dev_list;
198 spinlock_t lock; /* List protection access */
201 static struct stm32_hash_drv stm32_hash = {
202 .dev_list = LIST_HEAD_INIT(stm32_hash.dev_list),
203 .lock = __SPIN_LOCK_UNLOCKED(stm32_hash.lock),
206 static void stm32_hash_dma_callback(void *param);
208 static inline u32 stm32_hash_read(struct stm32_hash_dev *hdev, u32 offset)
210 return readl_relaxed(hdev->io_base + offset);
213 static inline void stm32_hash_write(struct stm32_hash_dev *hdev,
214 u32 offset, u32 value)
216 writel_relaxed(value, hdev->io_base + offset);
219 static inline int stm32_hash_wait_busy(struct stm32_hash_dev *hdev)
223 return readl_relaxed_poll_timeout(hdev->io_base + HASH_SR, status,
224 !(status & HASH_SR_BUSY), 10, 10000);
227 static void stm32_hash_set_nblw(struct stm32_hash_dev *hdev, int length)
231 reg = stm32_hash_read(hdev, HASH_STR);
232 reg &= ~(HASH_STR_NBLW_MASK);
233 reg |= (8U * ((length) % 4U));
234 stm32_hash_write(hdev, HASH_STR, reg);
237 static int stm32_hash_write_key(struct stm32_hash_dev *hdev)
239 struct crypto_ahash *tfm = crypto_ahash_reqtfm(hdev->req);
240 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
242 int keylen = ctx->keylen;
243 void *key = ctx->key;
246 stm32_hash_set_nblw(hdev, keylen);
249 stm32_hash_write(hdev, HASH_DIN, *(u32 *)key);
254 reg = stm32_hash_read(hdev, HASH_STR);
255 reg |= HASH_STR_DCAL;
256 stm32_hash_write(hdev, HASH_STR, reg);
264 static void stm32_hash_write_ctrl(struct stm32_hash_dev *hdev)
266 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
267 struct crypto_ahash *tfm = crypto_ahash_reqtfm(hdev->req);
268 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
270 u32 reg = HASH_CR_INIT;
272 if (!(hdev->flags & HASH_FLAGS_INIT)) {
273 switch (rctx->flags & HASH_FLAGS_ALGO_MASK) {
275 reg |= HASH_CR_ALGO_MD5;
277 case HASH_FLAGS_SHA1:
278 reg |= HASH_CR_ALGO_SHA1;
280 case HASH_FLAGS_SHA224:
281 reg |= HASH_CR_ALGO_SHA224;
283 case HASH_FLAGS_SHA256:
284 reg |= HASH_CR_ALGO_SHA256;
287 reg |= HASH_CR_ALGO_MD5;
290 reg |= (rctx->data_type << HASH_CR_DATATYPE_POS);
292 if (rctx->flags & HASH_FLAGS_HMAC) {
293 hdev->flags |= HASH_FLAGS_HMAC;
295 if (ctx->keylen > HASH_LONG_KEY)
299 stm32_hash_write(hdev, HASH_IMR, HASH_DCIE);
301 stm32_hash_write(hdev, HASH_CR, reg);
303 hdev->flags |= HASH_FLAGS_INIT;
305 dev_dbg(hdev->dev, "Write Control %x\n", reg);
309 static void stm32_hash_append_sg(struct stm32_hash_request_ctx *rctx)
313 while ((rctx->bufcnt < rctx->buflen) && rctx->total) {
314 count = min(rctx->sg->length - rctx->offset, rctx->total);
315 count = min(count, rctx->buflen - rctx->bufcnt);
318 if ((rctx->sg->length == 0) && !sg_is_last(rctx->sg)) {
319 rctx->sg = sg_next(rctx->sg);
326 scatterwalk_map_and_copy(rctx->buffer + rctx->bufcnt, rctx->sg,
327 rctx->offset, count, 0);
329 rctx->bufcnt += count;
330 rctx->offset += count;
331 rctx->total -= count;
333 if (rctx->offset == rctx->sg->length) {
334 rctx->sg = sg_next(rctx->sg);
343 static int stm32_hash_xmit_cpu(struct stm32_hash_dev *hdev,
344 const u8 *buf, size_t length, int final)
346 unsigned int count, len32;
347 const u32 *buffer = (const u32 *)buf;
351 hdev->flags |= HASH_FLAGS_FINAL;
353 len32 = DIV_ROUND_UP(length, sizeof(u32));
355 dev_dbg(hdev->dev, "%s: length: %d, final: %x len32 %i\n",
356 __func__, length, final, len32);
358 hdev->flags |= HASH_FLAGS_CPU;
360 stm32_hash_write_ctrl(hdev);
362 if (stm32_hash_wait_busy(hdev))
365 if ((hdev->flags & HASH_FLAGS_HMAC) &&
366 (hdev->flags & ~HASH_FLAGS_HMAC_KEY)) {
367 hdev->flags |= HASH_FLAGS_HMAC_KEY;
368 stm32_hash_write_key(hdev);
369 if (stm32_hash_wait_busy(hdev))
373 for (count = 0; count < len32; count++)
374 stm32_hash_write(hdev, HASH_DIN, buffer[count]);
377 stm32_hash_set_nblw(hdev, length);
378 reg = stm32_hash_read(hdev, HASH_STR);
379 reg |= HASH_STR_DCAL;
380 stm32_hash_write(hdev, HASH_STR, reg);
381 if (hdev->flags & HASH_FLAGS_HMAC) {
382 if (stm32_hash_wait_busy(hdev))
384 stm32_hash_write_key(hdev);
392 static int stm32_hash_update_cpu(struct stm32_hash_dev *hdev)
394 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
395 int bufcnt, err = 0, final;
397 dev_dbg(hdev->dev, "%s flags %lx\n", __func__, rctx->flags);
399 final = (rctx->flags & HASH_FLAGS_FINUP);
401 while ((rctx->total >= rctx->buflen) ||
402 (rctx->bufcnt + rctx->total >= rctx->buflen)) {
403 stm32_hash_append_sg(rctx);
404 bufcnt = rctx->bufcnt;
406 err = stm32_hash_xmit_cpu(hdev, rctx->buffer, bufcnt, 0);
409 stm32_hash_append_sg(rctx);
412 bufcnt = rctx->bufcnt;
414 err = stm32_hash_xmit_cpu(hdev, rctx->buffer, bufcnt,
415 (rctx->flags & HASH_FLAGS_FINUP));
421 static int stm32_hash_xmit_dma(struct stm32_hash_dev *hdev,
422 struct scatterlist *sg, int length, int mdma)
424 struct dma_async_tx_descriptor *in_desc;
429 in_desc = dmaengine_prep_slave_sg(hdev->dma_lch, sg, 1,
430 DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT |
433 dev_err(hdev->dev, "dmaengine_prep_slave error\n");
437 reinit_completion(&hdev->dma_completion);
438 in_desc->callback = stm32_hash_dma_callback;
439 in_desc->callback_param = hdev;
441 hdev->flags |= HASH_FLAGS_FINAL;
442 hdev->flags |= HASH_FLAGS_DMA_ACTIVE;
444 reg = stm32_hash_read(hdev, HASH_CR);
447 reg |= HASH_CR_MDMAT;
449 reg &= ~HASH_CR_MDMAT;
453 stm32_hash_write(hdev, HASH_CR, reg);
455 stm32_hash_set_nblw(hdev, length);
457 cookie = dmaengine_submit(in_desc);
458 err = dma_submit_error(cookie);
462 dma_async_issue_pending(hdev->dma_lch);
464 if (!wait_for_completion_interruptible_timeout(&hdev->dma_completion,
465 msecs_to_jiffies(100)))
468 if (dma_async_is_tx_complete(hdev->dma_lch, cookie,
469 NULL, NULL) != DMA_COMPLETE)
473 dev_err(hdev->dev, "DMA Error %i\n", err);
474 dmaengine_terminate_all(hdev->dma_lch);
481 static void stm32_hash_dma_callback(void *param)
483 struct stm32_hash_dev *hdev = param;
485 complete(&hdev->dma_completion);
487 hdev->flags |= HASH_FLAGS_DMA_READY;
490 static int stm32_hash_hmac_dma_send(struct stm32_hash_dev *hdev)
492 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
493 struct crypto_ahash *tfm = crypto_ahash_reqtfm(hdev->req);
494 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
497 if (ctx->keylen < HASH_DMA_THRESHOLD || (hdev->dma_mode == 1)) {
498 err = stm32_hash_write_key(hdev);
499 if (stm32_hash_wait_busy(hdev))
502 if (!(hdev->flags & HASH_FLAGS_HMAC_KEY))
503 sg_init_one(&rctx->sg_key, ctx->key,
504 ALIGN(ctx->keylen, sizeof(u32)));
506 rctx->dma_ct = dma_map_sg(hdev->dev, &rctx->sg_key, 1,
508 if (rctx->dma_ct == 0) {
509 dev_err(hdev->dev, "dma_map_sg error\n");
513 err = stm32_hash_xmit_dma(hdev, &rctx->sg_key, ctx->keylen, 0);
515 dma_unmap_sg(hdev->dev, &rctx->sg_key, 1, DMA_TO_DEVICE);
521 static int stm32_hash_dma_init(struct stm32_hash_dev *hdev)
523 struct dma_slave_config dma_conf;
526 memset(&dma_conf, 0, sizeof(dma_conf));
528 dma_conf.direction = DMA_MEM_TO_DEV;
529 dma_conf.dst_addr = hdev->phys_base + HASH_DIN;
530 dma_conf.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
531 dma_conf.src_maxburst = hdev->dma_maxburst;
532 dma_conf.dst_maxburst = hdev->dma_maxburst;
533 dma_conf.device_fc = false;
535 hdev->dma_lch = dma_request_slave_channel(hdev->dev, "in");
536 if (!hdev->dma_lch) {
537 dev_err(hdev->dev, "Couldn't acquire a slave DMA channel.\n");
541 err = dmaengine_slave_config(hdev->dma_lch, &dma_conf);
543 dma_release_channel(hdev->dma_lch);
544 hdev->dma_lch = NULL;
545 dev_err(hdev->dev, "Couldn't configure DMA slave.\n");
549 init_completion(&hdev->dma_completion);
554 static int stm32_hash_dma_send(struct stm32_hash_dev *hdev)
556 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
557 struct scatterlist sg[1], *tsg;
558 int err = 0, len = 0, reg, ncp = 0;
560 u32 *buffer = (void *)rctx->buffer;
562 rctx->sg = hdev->req->src;
563 rctx->total = hdev->req->nbytes;
565 rctx->nents = sg_nents(rctx->sg);
570 stm32_hash_write_ctrl(hdev);
572 if (hdev->flags & HASH_FLAGS_HMAC) {
573 err = stm32_hash_hmac_dma_send(hdev);
574 if (err != -EINPROGRESS)
578 for_each_sg(rctx->sg, tsg, rctx->nents, i) {
582 if (sg_is_last(sg)) {
583 if (hdev->dma_mode == 1) {
584 len = (ALIGN(sg->length, 16) - 16);
586 ncp = sg_pcopy_to_buffer(
587 rctx->sg, rctx->nents,
588 rctx->buffer, sg->length - len,
589 rctx->total - sg->length + len);
593 if (!(IS_ALIGNED(sg->length, sizeof(u32)))) {
595 sg->length = ALIGN(sg->length,
601 rctx->dma_ct = dma_map_sg(hdev->dev, sg, 1,
603 if (rctx->dma_ct == 0) {
604 dev_err(hdev->dev, "dma_map_sg error\n");
608 err = stm32_hash_xmit_dma(hdev, sg, len,
611 dma_unmap_sg(hdev->dev, sg, 1, DMA_TO_DEVICE);
617 if (hdev->dma_mode == 1) {
618 if (stm32_hash_wait_busy(hdev))
620 reg = stm32_hash_read(hdev, HASH_CR);
621 reg &= ~HASH_CR_DMAE;
623 stm32_hash_write(hdev, HASH_CR, reg);
626 memset(buffer + ncp, 0,
627 DIV_ROUND_UP(ncp, sizeof(u32)) - ncp);
628 writesl(hdev->io_base + HASH_DIN, buffer,
629 DIV_ROUND_UP(ncp, sizeof(u32)));
631 stm32_hash_set_nblw(hdev, ncp);
632 reg = stm32_hash_read(hdev, HASH_STR);
633 reg |= HASH_STR_DCAL;
634 stm32_hash_write(hdev, HASH_STR, reg);
638 if (hdev->flags & HASH_FLAGS_HMAC) {
639 if (stm32_hash_wait_busy(hdev))
641 err = stm32_hash_hmac_dma_send(hdev);
647 static struct stm32_hash_dev *stm32_hash_find_dev(struct stm32_hash_ctx *ctx)
649 struct stm32_hash_dev *hdev = NULL, *tmp;
651 spin_lock_bh(&stm32_hash.lock);
653 list_for_each_entry(tmp, &stm32_hash.dev_list, list) {
662 spin_unlock_bh(&stm32_hash.lock);
667 static bool stm32_hash_dma_aligned_data(struct ahash_request *req)
669 struct scatterlist *sg;
670 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
671 struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
674 if (req->nbytes <= HASH_DMA_THRESHOLD)
677 if (sg_nents(req->src) > 1) {
678 if (hdev->dma_mode == 1)
680 for_each_sg(req->src, sg, sg_nents(req->src), i) {
681 if ((!IS_ALIGNED(sg->length, sizeof(u32))) &&
687 if (req->src->offset % 4)
693 static int stm32_hash_init(struct ahash_request *req)
695 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
696 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
697 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
698 struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
702 rctx->flags = HASH_FLAGS_CPU;
704 rctx->digcnt = crypto_ahash_digestsize(tfm);
705 switch (rctx->digcnt) {
706 case MD5_DIGEST_SIZE:
707 rctx->flags |= HASH_FLAGS_MD5;
709 case SHA1_DIGEST_SIZE:
710 rctx->flags |= HASH_FLAGS_SHA1;
712 case SHA224_DIGEST_SIZE:
713 rctx->flags |= HASH_FLAGS_SHA224;
715 case SHA256_DIGEST_SIZE:
716 rctx->flags |= HASH_FLAGS_SHA256;
723 rctx->buflen = HASH_BUFLEN;
726 rctx->data_type = HASH_DATA_8_BITS;
728 memset(rctx->buffer, 0, HASH_BUFLEN);
730 if (ctx->flags & HASH_FLAGS_HMAC)
731 rctx->flags |= HASH_FLAGS_HMAC;
733 dev_dbg(hdev->dev, "%s Flags %lx\n", __func__, rctx->flags);
738 static int stm32_hash_update_req(struct stm32_hash_dev *hdev)
740 return stm32_hash_update_cpu(hdev);
743 static int stm32_hash_final_req(struct stm32_hash_dev *hdev)
745 struct ahash_request *req = hdev->req;
746 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
748 int buflen = rctx->bufcnt;
752 if (!(rctx->flags & HASH_FLAGS_CPU))
753 err = stm32_hash_dma_send(hdev);
755 err = stm32_hash_xmit_cpu(hdev, rctx->buffer, buflen, 1);
761 static void stm32_hash_copy_hash(struct ahash_request *req)
763 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
764 u32 *hash = (u32 *)rctx->digest;
765 unsigned int i, hashsize;
767 switch (rctx->flags & HASH_FLAGS_ALGO_MASK) {
769 hashsize = MD5_DIGEST_SIZE;
771 case HASH_FLAGS_SHA1:
772 hashsize = SHA1_DIGEST_SIZE;
774 case HASH_FLAGS_SHA224:
775 hashsize = SHA224_DIGEST_SIZE;
777 case HASH_FLAGS_SHA256:
778 hashsize = SHA256_DIGEST_SIZE;
784 for (i = 0; i < hashsize / sizeof(u32); i++)
785 hash[i] = be32_to_cpu(stm32_hash_read(rctx->hdev,
789 static int stm32_hash_finish(struct ahash_request *req)
791 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
796 memcpy(req->result, rctx->digest, rctx->digcnt);
801 static void stm32_hash_finish_req(struct ahash_request *req, int err)
803 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
804 struct stm32_hash_dev *hdev = rctx->hdev;
806 if (!err && (HASH_FLAGS_FINAL & hdev->flags)) {
807 stm32_hash_copy_hash(req);
808 err = stm32_hash_finish(req);
809 hdev->flags &= ~(HASH_FLAGS_FINAL | HASH_FLAGS_CPU |
810 HASH_FLAGS_INIT | HASH_FLAGS_DMA_READY |
811 HASH_FLAGS_OUTPUT_READY | HASH_FLAGS_HMAC |
812 HASH_FLAGS_HMAC_INIT | HASH_FLAGS_HMAC_FINAL |
813 HASH_FLAGS_HMAC_KEY);
815 rctx->flags |= HASH_FLAGS_ERRORS;
818 pm_runtime_mark_last_busy(hdev->dev);
819 pm_runtime_put_autosuspend(hdev->dev);
821 crypto_finalize_hash_request(hdev->engine, req, err);
824 static int stm32_hash_hw_init(struct stm32_hash_dev *hdev,
825 struct stm32_hash_request_ctx *rctx)
827 pm_runtime_get_sync(hdev->dev);
829 if (!(HASH_FLAGS_INIT & hdev->flags)) {
830 stm32_hash_write(hdev, HASH_CR, HASH_CR_INIT);
831 stm32_hash_write(hdev, HASH_STR, 0);
832 stm32_hash_write(hdev, HASH_DIN, 0);
833 stm32_hash_write(hdev, HASH_IMR, 0);
840 static int stm32_hash_one_request(struct crypto_engine *engine, void *areq);
841 static int stm32_hash_prepare_req(struct crypto_engine *engine, void *areq);
843 static int stm32_hash_handle_queue(struct stm32_hash_dev *hdev,
844 struct ahash_request *req)
846 return crypto_transfer_hash_request_to_engine(hdev->engine, req);
849 static int stm32_hash_prepare_req(struct crypto_engine *engine, void *areq)
851 struct ahash_request *req = container_of(areq, struct ahash_request,
853 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
854 struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
855 struct stm32_hash_request_ctx *rctx;
862 rctx = ahash_request_ctx(req);
864 dev_dbg(hdev->dev, "processing new req, op: %lu, nbytes %d\n",
865 rctx->op, req->nbytes);
867 return stm32_hash_hw_init(hdev, rctx);
870 static int stm32_hash_one_request(struct crypto_engine *engine, void *areq)
872 struct ahash_request *req = container_of(areq, struct ahash_request,
874 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
875 struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
876 struct stm32_hash_request_ctx *rctx;
884 rctx = ahash_request_ctx(req);
886 if (rctx->op == HASH_OP_UPDATE)
887 err = stm32_hash_update_req(hdev);
888 else if (rctx->op == HASH_OP_FINAL)
889 err = stm32_hash_final_req(hdev);
891 if (err != -EINPROGRESS)
892 /* done task will not finish it, so do it here */
893 stm32_hash_finish_req(req, err);
898 static int stm32_hash_enqueue(struct ahash_request *req, unsigned int op)
900 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
901 struct stm32_hash_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
902 struct stm32_hash_dev *hdev = ctx->hdev;
906 return stm32_hash_handle_queue(hdev, req);
909 static int stm32_hash_update(struct ahash_request *req)
911 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
913 if (!req->nbytes || !(rctx->flags & HASH_FLAGS_CPU))
916 rctx->total = req->nbytes;
920 if ((rctx->bufcnt + rctx->total < rctx->buflen)) {
921 stm32_hash_append_sg(rctx);
925 return stm32_hash_enqueue(req, HASH_OP_UPDATE);
928 static int stm32_hash_final(struct ahash_request *req)
930 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
932 rctx->flags |= HASH_FLAGS_FINUP;
934 return stm32_hash_enqueue(req, HASH_OP_FINAL);
937 static int stm32_hash_finup(struct ahash_request *req)
939 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
940 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
941 struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
944 rctx->flags |= HASH_FLAGS_FINUP;
946 if (hdev->dma_lch && stm32_hash_dma_aligned_data(req))
947 rctx->flags &= ~HASH_FLAGS_CPU;
949 err1 = stm32_hash_update(req);
951 if (err1 == -EINPROGRESS || err1 == -EBUSY)
955 * final() has to be always called to cleanup resources
956 * even if update() failed, except EINPROGRESS
958 err2 = stm32_hash_final(req);
963 static int stm32_hash_digest(struct ahash_request *req)
965 return stm32_hash_init(req) ?: stm32_hash_finup(req);
968 static int stm32_hash_export(struct ahash_request *req, void *out)
970 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
971 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
972 struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
976 pm_runtime_get_sync(hdev->dev);
978 while ((stm32_hash_read(hdev, HASH_SR) & HASH_SR_BUSY))
981 rctx->hw_context = kmalloc_array(3 + HASH_CSR_REGISTER_NUMBER,
985 preg = rctx->hw_context;
987 *preg++ = stm32_hash_read(hdev, HASH_IMR);
988 *preg++ = stm32_hash_read(hdev, HASH_STR);
989 *preg++ = stm32_hash_read(hdev, HASH_CR);
990 for (i = 0; i < HASH_CSR_REGISTER_NUMBER; i++)
991 *preg++ = stm32_hash_read(hdev, HASH_CSR(i));
993 pm_runtime_mark_last_busy(hdev->dev);
994 pm_runtime_put_autosuspend(hdev->dev);
996 memcpy(out, rctx, sizeof(*rctx));
1001 static int stm32_hash_import(struct ahash_request *req, const void *in)
1003 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
1004 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
1005 struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
1006 const u32 *preg = in;
1010 memcpy(rctx, in, sizeof(*rctx));
1012 preg = rctx->hw_context;
1014 pm_runtime_get_sync(hdev->dev);
1016 stm32_hash_write(hdev, HASH_IMR, *preg++);
1017 stm32_hash_write(hdev, HASH_STR, *preg++);
1018 stm32_hash_write(hdev, HASH_CR, *preg);
1019 reg = *preg++ | HASH_CR_INIT;
1020 stm32_hash_write(hdev, HASH_CR, reg);
1022 for (i = 0; i < HASH_CSR_REGISTER_NUMBER; i++)
1023 stm32_hash_write(hdev, HASH_CSR(i), *preg++);
1025 pm_runtime_mark_last_busy(hdev->dev);
1026 pm_runtime_put_autosuspend(hdev->dev);
1028 kfree(rctx->hw_context);
1033 static int stm32_hash_setkey(struct crypto_ahash *tfm,
1034 const u8 *key, unsigned int keylen)
1036 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
1038 if (keylen <= HASH_MAX_KEY_SIZE) {
1039 memcpy(ctx->key, key, keylen);
1040 ctx->keylen = keylen;
1048 static int stm32_hash_cra_init_algs(struct crypto_tfm *tfm,
1049 const char *algs_hmac_name)
1051 struct stm32_hash_ctx *ctx = crypto_tfm_ctx(tfm);
1053 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
1054 sizeof(struct stm32_hash_request_ctx));
1059 ctx->flags |= HASH_FLAGS_HMAC;
1061 ctx->enginectx.op.do_one_request = stm32_hash_one_request;
1062 ctx->enginectx.op.prepare_request = stm32_hash_prepare_req;
1063 ctx->enginectx.op.unprepare_request = NULL;
1067 static int stm32_hash_cra_init(struct crypto_tfm *tfm)
1069 return stm32_hash_cra_init_algs(tfm, NULL);
1072 static int stm32_hash_cra_md5_init(struct crypto_tfm *tfm)
1074 return stm32_hash_cra_init_algs(tfm, "md5");
1077 static int stm32_hash_cra_sha1_init(struct crypto_tfm *tfm)
1079 return stm32_hash_cra_init_algs(tfm, "sha1");
1082 static int stm32_hash_cra_sha224_init(struct crypto_tfm *tfm)
1084 return stm32_hash_cra_init_algs(tfm, "sha224");
1087 static int stm32_hash_cra_sha256_init(struct crypto_tfm *tfm)
1089 return stm32_hash_cra_init_algs(tfm, "sha256");
1092 static irqreturn_t stm32_hash_irq_thread(int irq, void *dev_id)
1094 struct stm32_hash_dev *hdev = dev_id;
1096 if (HASH_FLAGS_CPU & hdev->flags) {
1097 if (HASH_FLAGS_OUTPUT_READY & hdev->flags) {
1098 hdev->flags &= ~HASH_FLAGS_OUTPUT_READY;
1101 } else if (HASH_FLAGS_DMA_READY & hdev->flags) {
1102 if (HASH_FLAGS_DMA_ACTIVE & hdev->flags) {
1103 hdev->flags &= ~HASH_FLAGS_DMA_ACTIVE;
1111 /* Finish current request */
1112 stm32_hash_finish_req(hdev->req, 0);
1117 static irqreturn_t stm32_hash_irq_handler(int irq, void *dev_id)
1119 struct stm32_hash_dev *hdev = dev_id;
1122 reg = stm32_hash_read(hdev, HASH_SR);
1123 if (reg & HASH_SR_OUTPUT_READY) {
1124 reg &= ~HASH_SR_OUTPUT_READY;
1125 stm32_hash_write(hdev, HASH_SR, reg);
1126 hdev->flags |= HASH_FLAGS_OUTPUT_READY;
1128 stm32_hash_write(hdev, HASH_IMR, 0);
1129 return IRQ_WAKE_THREAD;
1135 static struct ahash_alg algs_md5_sha1[] = {
1137 .init = stm32_hash_init,
1138 .update = stm32_hash_update,
1139 .final = stm32_hash_final,
1140 .finup = stm32_hash_finup,
1141 .digest = stm32_hash_digest,
1142 .export = stm32_hash_export,
1143 .import = stm32_hash_import,
1145 .digestsize = MD5_DIGEST_SIZE,
1146 .statesize = sizeof(struct stm32_hash_request_ctx),
1149 .cra_driver_name = "stm32-md5",
1150 .cra_priority = 200,
1151 .cra_flags = CRYPTO_ALG_ASYNC |
1152 CRYPTO_ALG_KERN_DRIVER_ONLY,
1153 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1154 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1156 .cra_init = stm32_hash_cra_init,
1157 .cra_module = THIS_MODULE,
1162 .init = stm32_hash_init,
1163 .update = stm32_hash_update,
1164 .final = stm32_hash_final,
1165 .finup = stm32_hash_finup,
1166 .digest = stm32_hash_digest,
1167 .export = stm32_hash_export,
1168 .import = stm32_hash_import,
1169 .setkey = stm32_hash_setkey,
1171 .digestsize = MD5_DIGEST_SIZE,
1172 .statesize = sizeof(struct stm32_hash_request_ctx),
1174 .cra_name = "hmac(md5)",
1175 .cra_driver_name = "stm32-hmac-md5",
1176 .cra_priority = 200,
1177 .cra_flags = CRYPTO_ALG_ASYNC |
1178 CRYPTO_ALG_KERN_DRIVER_ONLY,
1179 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1180 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1182 .cra_init = stm32_hash_cra_md5_init,
1183 .cra_module = THIS_MODULE,
1188 .init = stm32_hash_init,
1189 .update = stm32_hash_update,
1190 .final = stm32_hash_final,
1191 .finup = stm32_hash_finup,
1192 .digest = stm32_hash_digest,
1193 .export = stm32_hash_export,
1194 .import = stm32_hash_import,
1196 .digestsize = SHA1_DIGEST_SIZE,
1197 .statesize = sizeof(struct stm32_hash_request_ctx),
1200 .cra_driver_name = "stm32-sha1",
1201 .cra_priority = 200,
1202 .cra_flags = CRYPTO_ALG_ASYNC |
1203 CRYPTO_ALG_KERN_DRIVER_ONLY,
1204 .cra_blocksize = SHA1_BLOCK_SIZE,
1205 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1207 .cra_init = stm32_hash_cra_init,
1208 .cra_module = THIS_MODULE,
1213 .init = stm32_hash_init,
1214 .update = stm32_hash_update,
1215 .final = stm32_hash_final,
1216 .finup = stm32_hash_finup,
1217 .digest = stm32_hash_digest,
1218 .export = stm32_hash_export,
1219 .import = stm32_hash_import,
1220 .setkey = stm32_hash_setkey,
1222 .digestsize = SHA1_DIGEST_SIZE,
1223 .statesize = sizeof(struct stm32_hash_request_ctx),
1225 .cra_name = "hmac(sha1)",
1226 .cra_driver_name = "stm32-hmac-sha1",
1227 .cra_priority = 200,
1228 .cra_flags = CRYPTO_ALG_ASYNC |
1229 CRYPTO_ALG_KERN_DRIVER_ONLY,
1230 .cra_blocksize = SHA1_BLOCK_SIZE,
1231 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1233 .cra_init = stm32_hash_cra_sha1_init,
1234 .cra_module = THIS_MODULE,
1240 static struct ahash_alg algs_sha224_sha256[] = {
1242 .init = stm32_hash_init,
1243 .update = stm32_hash_update,
1244 .final = stm32_hash_final,
1245 .finup = stm32_hash_finup,
1246 .digest = stm32_hash_digest,
1247 .export = stm32_hash_export,
1248 .import = stm32_hash_import,
1250 .digestsize = SHA224_DIGEST_SIZE,
1251 .statesize = sizeof(struct stm32_hash_request_ctx),
1253 .cra_name = "sha224",
1254 .cra_driver_name = "stm32-sha224",
1255 .cra_priority = 200,
1256 .cra_flags = CRYPTO_ALG_ASYNC |
1257 CRYPTO_ALG_KERN_DRIVER_ONLY,
1258 .cra_blocksize = SHA224_BLOCK_SIZE,
1259 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1261 .cra_init = stm32_hash_cra_init,
1262 .cra_module = THIS_MODULE,
1267 .init = stm32_hash_init,
1268 .update = stm32_hash_update,
1269 .final = stm32_hash_final,
1270 .finup = stm32_hash_finup,
1271 .digest = stm32_hash_digest,
1272 .setkey = stm32_hash_setkey,
1273 .export = stm32_hash_export,
1274 .import = stm32_hash_import,
1276 .digestsize = SHA224_DIGEST_SIZE,
1277 .statesize = sizeof(struct stm32_hash_request_ctx),
1279 .cra_name = "hmac(sha224)",
1280 .cra_driver_name = "stm32-hmac-sha224",
1281 .cra_priority = 200,
1282 .cra_flags = CRYPTO_ALG_ASYNC |
1283 CRYPTO_ALG_KERN_DRIVER_ONLY,
1284 .cra_blocksize = SHA224_BLOCK_SIZE,
1285 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1287 .cra_init = stm32_hash_cra_sha224_init,
1288 .cra_module = THIS_MODULE,
1293 .init = stm32_hash_init,
1294 .update = stm32_hash_update,
1295 .final = stm32_hash_final,
1296 .finup = stm32_hash_finup,
1297 .digest = stm32_hash_digest,
1298 .export = stm32_hash_export,
1299 .import = stm32_hash_import,
1301 .digestsize = SHA256_DIGEST_SIZE,
1302 .statesize = sizeof(struct stm32_hash_request_ctx),
1304 .cra_name = "sha256",
1305 .cra_driver_name = "stm32-sha256",
1306 .cra_priority = 200,
1307 .cra_flags = CRYPTO_ALG_ASYNC |
1308 CRYPTO_ALG_KERN_DRIVER_ONLY,
1309 .cra_blocksize = SHA256_BLOCK_SIZE,
1310 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1312 .cra_init = stm32_hash_cra_init,
1313 .cra_module = THIS_MODULE,
1318 .init = stm32_hash_init,
1319 .update = stm32_hash_update,
1320 .final = stm32_hash_final,
1321 .finup = stm32_hash_finup,
1322 .digest = stm32_hash_digest,
1323 .export = stm32_hash_export,
1324 .import = stm32_hash_import,
1325 .setkey = stm32_hash_setkey,
1327 .digestsize = SHA256_DIGEST_SIZE,
1328 .statesize = sizeof(struct stm32_hash_request_ctx),
1330 .cra_name = "hmac(sha256)",
1331 .cra_driver_name = "stm32-hmac-sha256",
1332 .cra_priority = 200,
1333 .cra_flags = CRYPTO_ALG_ASYNC |
1334 CRYPTO_ALG_KERN_DRIVER_ONLY,
1335 .cra_blocksize = SHA256_BLOCK_SIZE,
1336 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1338 .cra_init = stm32_hash_cra_sha256_init,
1339 .cra_module = THIS_MODULE,
1345 static int stm32_hash_register_algs(struct stm32_hash_dev *hdev)
1350 for (i = 0; i < hdev->pdata->algs_info_size; i++) {
1351 for (j = 0; j < hdev->pdata->algs_info[i].size; j++) {
1352 err = crypto_register_ahash(
1353 &hdev->pdata->algs_info[i].algs_list[j]);
1361 dev_err(hdev->dev, "Algo %d : %d failed\n", i, j);
1364 crypto_unregister_ahash(
1365 &hdev->pdata->algs_info[i].algs_list[j]);
1371 static int stm32_hash_unregister_algs(struct stm32_hash_dev *hdev)
1375 for (i = 0; i < hdev->pdata->algs_info_size; i++) {
1376 for (j = 0; j < hdev->pdata->algs_info[i].size; j++)
1377 crypto_unregister_ahash(
1378 &hdev->pdata->algs_info[i].algs_list[j]);
1384 static struct stm32_hash_algs_info stm32_hash_algs_info_stm32f4[] = {
1386 .algs_list = algs_md5_sha1,
1387 .size = ARRAY_SIZE(algs_md5_sha1),
1391 static const struct stm32_hash_pdata stm32_hash_pdata_stm32f4 = {
1392 .algs_info = stm32_hash_algs_info_stm32f4,
1393 .algs_info_size = ARRAY_SIZE(stm32_hash_algs_info_stm32f4),
1396 static struct stm32_hash_algs_info stm32_hash_algs_info_stm32f7[] = {
1398 .algs_list = algs_md5_sha1,
1399 .size = ARRAY_SIZE(algs_md5_sha1),
1402 .algs_list = algs_sha224_sha256,
1403 .size = ARRAY_SIZE(algs_sha224_sha256),
1407 static const struct stm32_hash_pdata stm32_hash_pdata_stm32f7 = {
1408 .algs_info = stm32_hash_algs_info_stm32f7,
1409 .algs_info_size = ARRAY_SIZE(stm32_hash_algs_info_stm32f7),
1412 static const struct of_device_id stm32_hash_of_match[] = {
1414 .compatible = "st,stm32f456-hash",
1415 .data = &stm32_hash_pdata_stm32f4,
1418 .compatible = "st,stm32f756-hash",
1419 .data = &stm32_hash_pdata_stm32f7,
1424 MODULE_DEVICE_TABLE(of, stm32_hash_of_match);
1426 static int stm32_hash_get_of_match(struct stm32_hash_dev *hdev,
1429 hdev->pdata = of_device_get_match_data(dev);
1431 dev_err(dev, "no compatible OF match\n");
1435 if (of_property_read_u32(dev->of_node, "dma-maxburst",
1436 &hdev->dma_maxburst)) {
1437 dev_info(dev, "dma-maxburst not specified, using 0\n");
1438 hdev->dma_maxburst = 0;
1444 static int stm32_hash_probe(struct platform_device *pdev)
1446 struct stm32_hash_dev *hdev;
1447 struct device *dev = &pdev->dev;
1448 struct resource *res;
1451 hdev = devm_kzalloc(dev, sizeof(*hdev), GFP_KERNEL);
1455 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1456 hdev->io_base = devm_ioremap_resource(dev, res);
1457 if (IS_ERR(hdev->io_base))
1458 return PTR_ERR(hdev->io_base);
1460 hdev->phys_base = res->start;
1462 ret = stm32_hash_get_of_match(hdev, dev);
1466 irq = platform_get_irq(pdev, 0);
1468 dev_err(dev, "Cannot get IRQ resource\n");
1472 ret = devm_request_threaded_irq(dev, irq, stm32_hash_irq_handler,
1473 stm32_hash_irq_thread, IRQF_ONESHOT,
1474 dev_name(dev), hdev);
1476 dev_err(dev, "Cannot grab IRQ\n");
1480 hdev->clk = devm_clk_get(&pdev->dev, NULL);
1481 if (IS_ERR(hdev->clk)) {
1482 dev_err(dev, "failed to get clock for hash (%lu)\n",
1483 PTR_ERR(hdev->clk));
1484 return PTR_ERR(hdev->clk);
1487 ret = clk_prepare_enable(hdev->clk);
1489 dev_err(dev, "failed to enable hash clock (%d)\n", ret);
1493 pm_runtime_set_autosuspend_delay(dev, HASH_AUTOSUSPEND_DELAY);
1494 pm_runtime_use_autosuspend(dev);
1496 pm_runtime_get_noresume(dev);
1497 pm_runtime_set_active(dev);
1498 pm_runtime_enable(dev);
1500 hdev->rst = devm_reset_control_get(&pdev->dev, NULL);
1501 if (!IS_ERR(hdev->rst)) {
1502 reset_control_assert(hdev->rst);
1504 reset_control_deassert(hdev->rst);
1509 platform_set_drvdata(pdev, hdev);
1511 ret = stm32_hash_dma_init(hdev);
1513 dev_dbg(dev, "DMA mode not available\n");
1515 spin_lock(&stm32_hash.lock);
1516 list_add_tail(&hdev->list, &stm32_hash.dev_list);
1517 spin_unlock(&stm32_hash.lock);
1519 /* Initialize crypto engine */
1520 hdev->engine = crypto_engine_alloc_init(dev, 1);
1521 if (!hdev->engine) {
1526 ret = crypto_engine_start(hdev->engine);
1528 goto err_engine_start;
1530 hdev->dma_mode = stm32_hash_read(hdev, HASH_HWCFGR);
1532 /* Register algos */
1533 ret = stm32_hash_register_algs(hdev);
1537 dev_info(dev, "Init HASH done HW ver %x DMA mode %u\n",
1538 stm32_hash_read(hdev, HASH_VER), hdev->dma_mode);
1540 pm_runtime_put_sync(dev);
1546 crypto_engine_exit(hdev->engine);
1548 spin_lock(&stm32_hash.lock);
1549 list_del(&hdev->list);
1550 spin_unlock(&stm32_hash.lock);
1553 dma_release_channel(hdev->dma_lch);
1555 pm_runtime_disable(dev);
1556 pm_runtime_put_noidle(dev);
1558 clk_disable_unprepare(hdev->clk);
1563 static int stm32_hash_remove(struct platform_device *pdev)
1565 struct stm32_hash_dev *hdev;
1568 hdev = platform_get_drvdata(pdev);
1572 ret = pm_runtime_get_sync(hdev->dev);
1576 stm32_hash_unregister_algs(hdev);
1578 crypto_engine_exit(hdev->engine);
1580 spin_lock(&stm32_hash.lock);
1581 list_del(&hdev->list);
1582 spin_unlock(&stm32_hash.lock);
1585 dma_release_channel(hdev->dma_lch);
1587 pm_runtime_disable(hdev->dev);
1588 pm_runtime_put_noidle(hdev->dev);
1590 clk_disable_unprepare(hdev->clk);
1596 static int stm32_hash_runtime_suspend(struct device *dev)
1598 struct stm32_hash_dev *hdev = dev_get_drvdata(dev);
1600 clk_disable_unprepare(hdev->clk);
1605 static int stm32_hash_runtime_resume(struct device *dev)
1607 struct stm32_hash_dev *hdev = dev_get_drvdata(dev);
1610 ret = clk_prepare_enable(hdev->clk);
1612 dev_err(hdev->dev, "Failed to prepare_enable clock\n");
1620 static const struct dev_pm_ops stm32_hash_pm_ops = {
1621 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
1622 pm_runtime_force_resume)
1623 SET_RUNTIME_PM_OPS(stm32_hash_runtime_suspend,
1624 stm32_hash_runtime_resume, NULL)
1627 static struct platform_driver stm32_hash_driver = {
1628 .probe = stm32_hash_probe,
1629 .remove = stm32_hash_remove,
1631 .name = "stm32-hash",
1632 .pm = &stm32_hash_pm_ops,
1633 .of_match_table = stm32_hash_of_match,
1637 module_platform_driver(stm32_hash_driver);
1639 MODULE_DESCRIPTION("STM32 SHA1/224/256 & MD5 (HMAC) hw accelerator driver");
1641 MODULE_LICENSE("GPL v2");