1 // SPDX-License-Identifier: GPL-2.0+
3 * Copyright (c) 2021 Aspeed Technology Inc.
6 #include "aspeed-hace.h"
7 #include <crypto/engine.h>
8 #include <crypto/hmac.h>
9 #include <crypto/internal/hash.h>
10 #include <crypto/scatterwalk.h>
11 #include <crypto/sha1.h>
12 #include <crypto/sha2.h>
13 #include <linux/dma-mapping.h>
14 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/string.h>
19 #ifdef CONFIG_CRYPTO_DEV_ASPEED_DEBUG
20 #define AHASH_DBG(h, fmt, ...) \
21 dev_info((h)->dev, "%s() " fmt, __func__, ##__VA_ARGS__)
23 #define AHASH_DBG(h, fmt, ...) \
24 dev_dbg((h)->dev, "%s() " fmt, __func__, ##__VA_ARGS__)
27 /* Initialization Vectors for SHA-family */
28 static const __be32 sha1_iv[8] = {
29 cpu_to_be32(SHA1_H0), cpu_to_be32(SHA1_H1),
30 cpu_to_be32(SHA1_H2), cpu_to_be32(SHA1_H3),
31 cpu_to_be32(SHA1_H4), 0, 0, 0
34 static const __be32 sha224_iv[8] = {
35 cpu_to_be32(SHA224_H0), cpu_to_be32(SHA224_H1),
36 cpu_to_be32(SHA224_H2), cpu_to_be32(SHA224_H3),
37 cpu_to_be32(SHA224_H4), cpu_to_be32(SHA224_H5),
38 cpu_to_be32(SHA224_H6), cpu_to_be32(SHA224_H7),
41 static const __be32 sha256_iv[8] = {
42 cpu_to_be32(SHA256_H0), cpu_to_be32(SHA256_H1),
43 cpu_to_be32(SHA256_H2), cpu_to_be32(SHA256_H3),
44 cpu_to_be32(SHA256_H4), cpu_to_be32(SHA256_H5),
45 cpu_to_be32(SHA256_H6), cpu_to_be32(SHA256_H7),
48 static const __be64 sha384_iv[8] = {
49 cpu_to_be64(SHA384_H0), cpu_to_be64(SHA384_H1),
50 cpu_to_be64(SHA384_H2), cpu_to_be64(SHA384_H3),
51 cpu_to_be64(SHA384_H4), cpu_to_be64(SHA384_H5),
52 cpu_to_be64(SHA384_H6), cpu_to_be64(SHA384_H7)
55 static const __be64 sha512_iv[8] = {
56 cpu_to_be64(SHA512_H0), cpu_to_be64(SHA512_H1),
57 cpu_to_be64(SHA512_H2), cpu_to_be64(SHA512_H3),
58 cpu_to_be64(SHA512_H4), cpu_to_be64(SHA512_H5),
59 cpu_to_be64(SHA512_H6), cpu_to_be64(SHA512_H7)
62 /* The purpose of this padding is to ensure that the padded message is a
63 * multiple of 512 bits (SHA1/SHA224/SHA256) or 1024 bits (SHA384/SHA512).
64 * The bit "1" is appended at the end of the message followed by
65 * "padlen-1" zero bits. Then a 64 bits block (SHA1/SHA224/SHA256) or
66 * 128 bits block (SHA384/SHA512) equals to the message length in bits
69 * For SHA1/SHA224/SHA256, padlen is calculated as followed:
70 * - if message length < 56 bytes then padlen = 56 - message length
71 * - else padlen = 64 + 56 - message length
73 * For SHA384/SHA512, padlen is calculated as followed:
74 * - if message length < 112 bytes then padlen = 112 - message length
75 * - else padlen = 128 + 112 - message length
77 static void aspeed_ahash_fill_padding(struct aspeed_hace_dev *hace_dev,
78 struct aspeed_sham_reqctx *rctx)
80 unsigned int index, padlen;
83 AHASH_DBG(hace_dev, "rctx flags:0x%x\n", (u32)rctx->flags);
85 switch (rctx->flags & SHA_FLAGS_MASK) {
87 case SHA_FLAGS_SHA224:
88 case SHA_FLAGS_SHA256:
89 bits[0] = cpu_to_be64(rctx->digcnt[0] << 3);
90 index = rctx->bufcnt & 0x3f;
91 padlen = (index < 56) ? (56 - index) : ((64 + 56) - index);
92 *(rctx->buffer + rctx->bufcnt) = 0x80;
93 memset(rctx->buffer + rctx->bufcnt + 1, 0, padlen - 1);
94 memcpy(rctx->buffer + rctx->bufcnt + padlen, bits, 8);
95 rctx->bufcnt += padlen + 8;
98 bits[1] = cpu_to_be64(rctx->digcnt[0] << 3);
99 bits[0] = cpu_to_be64(rctx->digcnt[1] << 3 |
100 rctx->digcnt[0] >> 61);
101 index = rctx->bufcnt & 0x7f;
102 padlen = (index < 112) ? (112 - index) : ((128 + 112) - index);
103 *(rctx->buffer + rctx->bufcnt) = 0x80;
104 memset(rctx->buffer + rctx->bufcnt + 1, 0, padlen - 1);
105 memcpy(rctx->buffer + rctx->bufcnt + padlen, bits, 16);
106 rctx->bufcnt += padlen + 16;
112 * Prepare DMA buffer before hardware engine
115 static int aspeed_ahash_dma_prepare(struct aspeed_hace_dev *hace_dev)
117 struct aspeed_engine_hash *hash_engine = &hace_dev->hash_engine;
118 struct ahash_request *req = hash_engine->req;
119 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);
122 length = rctx->total + rctx->bufcnt;
123 remain = length % rctx->block_size;
125 AHASH_DBG(hace_dev, "length:0x%x, remain:0x%x\n", length, remain);
128 memcpy(hash_engine->ahash_src_addr, rctx->buffer, rctx->bufcnt);
130 if (rctx->total + rctx->bufcnt < ASPEED_CRYPTO_SRC_DMA_BUF_LEN) {
131 scatterwalk_map_and_copy(hash_engine->ahash_src_addr +
132 rctx->bufcnt, rctx->src_sg,
133 rctx->offset, rctx->total - remain, 0);
134 rctx->offset += rctx->total - remain;
137 dev_warn(hace_dev->dev, "Hash data length is too large\n");
141 scatterwalk_map_and_copy(rctx->buffer, rctx->src_sg,
142 rctx->offset, remain, 0);
144 rctx->bufcnt = remain;
145 rctx->digest_dma_addr = dma_map_single(hace_dev->dev, rctx->digest,
148 if (dma_mapping_error(hace_dev->dev, rctx->digest_dma_addr)) {
149 dev_warn(hace_dev->dev, "dma_map() rctx digest error\n");
153 hash_engine->src_length = length - remain;
154 hash_engine->src_dma = hash_engine->ahash_src_dma_addr;
155 hash_engine->digest_dma = rctx->digest_dma_addr;
161 * Prepare DMA buffer as SG list buffer before
162 * hardware engine processing.
164 static int aspeed_ahash_dma_prepare_sg(struct aspeed_hace_dev *hace_dev)
166 struct aspeed_engine_hash *hash_engine = &hace_dev->hash_engine;
167 struct ahash_request *req = hash_engine->req;
168 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);
169 struct aspeed_sg_list *src_list;
170 struct scatterlist *s;
171 int length, remain, sg_len, i;
174 remain = (rctx->total + rctx->bufcnt) % rctx->block_size;
175 length = rctx->total + rctx->bufcnt - remain;
177 AHASH_DBG(hace_dev, "%s:0x%x, %s:%zu, %s:0x%x, %s:0x%x\n",
178 "rctx total", rctx->total, "bufcnt", rctx->bufcnt,
179 "length", length, "remain", remain);
181 sg_len = dma_map_sg(hace_dev->dev, rctx->src_sg, rctx->src_nents,
184 dev_warn(hace_dev->dev, "dma_map_sg() src error\n");
189 src_list = (struct aspeed_sg_list *)hash_engine->ahash_src_addr;
190 rctx->digest_dma_addr = dma_map_single(hace_dev->dev, rctx->digest,
193 if (dma_mapping_error(hace_dev->dev, rctx->digest_dma_addr)) {
194 dev_warn(hace_dev->dev, "dma_map() rctx digest error\n");
199 if (rctx->bufcnt != 0) {
203 rctx->buffer_dma_addr = dma_map_single(hace_dev->dev,
205 rctx->block_size * 2,
207 if (dma_mapping_error(hace_dev->dev, rctx->buffer_dma_addr)) {
208 dev_warn(hace_dev->dev, "dma_map() rctx buffer error\n");
210 goto free_rctx_digest;
213 phy_addr = rctx->buffer_dma_addr;
219 len |= HASH_SG_LAST_LIST;
221 src_list[0].phy_addr = cpu_to_le32(phy_addr);
222 src_list[0].len = cpu_to_le32(len);
227 for_each_sg(rctx->src_sg, s, sg_len, i) {
228 u32 phy_addr = sg_dma_address(s);
229 u32 len = sg_dma_len(s);
236 len |= HASH_SG_LAST_LIST;
240 src_list[i].phy_addr = cpu_to_le32(phy_addr);
241 src_list[i].len = cpu_to_le32(len);
247 goto free_rctx_buffer;
250 rctx->offset = rctx->total - remain;
251 hash_engine->src_length = rctx->total + rctx->bufcnt - remain;
252 hash_engine->src_dma = hash_engine->ahash_src_dma_addr;
253 hash_engine->digest_dma = rctx->digest_dma_addr;
258 if (rctx->bufcnt != 0)
259 dma_unmap_single(hace_dev->dev, rctx->buffer_dma_addr,
260 rctx->block_size * 2, DMA_TO_DEVICE);
262 dma_unmap_single(hace_dev->dev, rctx->digest_dma_addr,
263 SHA512_DIGEST_SIZE, DMA_BIDIRECTIONAL);
265 dma_unmap_sg(hace_dev->dev, rctx->src_sg, rctx->src_nents,
271 static int aspeed_ahash_complete(struct aspeed_hace_dev *hace_dev)
273 struct aspeed_engine_hash *hash_engine = &hace_dev->hash_engine;
274 struct ahash_request *req = hash_engine->req;
276 AHASH_DBG(hace_dev, "\n");
278 hash_engine->flags &= ~CRYPTO_FLAGS_BUSY;
280 crypto_finalize_hash_request(hace_dev->crypt_engine_hash, req, 0);
286 * Copy digest to the corresponding request result.
287 * This function will be called at final() stage.
289 static int aspeed_ahash_transfer(struct aspeed_hace_dev *hace_dev)
291 struct aspeed_engine_hash *hash_engine = &hace_dev->hash_engine;
292 struct ahash_request *req = hash_engine->req;
293 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);
295 AHASH_DBG(hace_dev, "\n");
297 dma_unmap_single(hace_dev->dev, rctx->digest_dma_addr,
298 SHA512_DIGEST_SIZE, DMA_BIDIRECTIONAL);
300 dma_unmap_single(hace_dev->dev, rctx->buffer_dma_addr,
301 rctx->block_size * 2, DMA_TO_DEVICE);
303 memcpy(req->result, rctx->digest, rctx->digsize);
305 return aspeed_ahash_complete(hace_dev);
309 * Trigger hardware engines to do the math.
311 static int aspeed_hace_ahash_trigger(struct aspeed_hace_dev *hace_dev,
312 aspeed_hace_fn_t resume)
314 struct aspeed_engine_hash *hash_engine = &hace_dev->hash_engine;
315 struct ahash_request *req = hash_engine->req;
316 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);
318 AHASH_DBG(hace_dev, "src_dma:%pad, digest_dma:%pad, length:%zu\n",
319 &hash_engine->src_dma, &hash_engine->digest_dma,
320 hash_engine->src_length);
322 rctx->cmd |= HASH_CMD_INT_ENABLE;
323 hash_engine->resume = resume;
325 ast_hace_write(hace_dev, hash_engine->src_dma, ASPEED_HACE_HASH_SRC);
326 ast_hace_write(hace_dev, hash_engine->digest_dma,
327 ASPEED_HACE_HASH_DIGEST_BUFF);
328 ast_hace_write(hace_dev, hash_engine->digest_dma,
329 ASPEED_HACE_HASH_KEY_BUFF);
330 ast_hace_write(hace_dev, hash_engine->src_length,
331 ASPEED_HACE_HASH_DATA_LEN);
333 /* Memory barrier to ensure all data setup before engine starts */
336 ast_hace_write(hace_dev, rctx->cmd, ASPEED_HACE_HASH_CMD);
342 * HMAC resume aims to do the second pass produces
343 * the final HMAC code derived from the inner hash
344 * result and the outer key.
346 static int aspeed_ahash_hmac_resume(struct aspeed_hace_dev *hace_dev)
348 struct aspeed_engine_hash *hash_engine = &hace_dev->hash_engine;
349 struct ahash_request *req = hash_engine->req;
350 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);
351 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
352 struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm);
353 struct aspeed_sha_hmac_ctx *bctx = tctx->base;
356 AHASH_DBG(hace_dev, "\n");
358 dma_unmap_single(hace_dev->dev, rctx->digest_dma_addr,
359 SHA512_DIGEST_SIZE, DMA_BIDIRECTIONAL);
361 dma_unmap_single(hace_dev->dev, rctx->buffer_dma_addr,
362 rctx->block_size * 2, DMA_TO_DEVICE);
364 /* o key pad + hash sum 1 */
365 memcpy(rctx->buffer, bctx->opad, rctx->block_size);
366 memcpy(rctx->buffer + rctx->block_size, rctx->digest, rctx->digsize);
368 rctx->bufcnt = rctx->block_size + rctx->digsize;
369 rctx->digcnt[0] = rctx->block_size + rctx->digsize;
371 aspeed_ahash_fill_padding(hace_dev, rctx);
372 memcpy(rctx->digest, rctx->sha_iv, rctx->ivsize);
374 rctx->digest_dma_addr = dma_map_single(hace_dev->dev, rctx->digest,
377 if (dma_mapping_error(hace_dev->dev, rctx->digest_dma_addr)) {
378 dev_warn(hace_dev->dev, "dma_map() rctx digest error\n");
383 rctx->buffer_dma_addr = dma_map_single(hace_dev->dev, rctx->buffer,
384 rctx->block_size * 2,
386 if (dma_mapping_error(hace_dev->dev, rctx->buffer_dma_addr)) {
387 dev_warn(hace_dev->dev, "dma_map() rctx buffer error\n");
389 goto free_rctx_digest;
392 hash_engine->src_dma = rctx->buffer_dma_addr;
393 hash_engine->src_length = rctx->bufcnt;
394 hash_engine->digest_dma = rctx->digest_dma_addr;
396 return aspeed_hace_ahash_trigger(hace_dev, aspeed_ahash_transfer);
399 dma_unmap_single(hace_dev->dev, rctx->digest_dma_addr,
400 SHA512_DIGEST_SIZE, DMA_BIDIRECTIONAL);
405 static int aspeed_ahash_req_final(struct aspeed_hace_dev *hace_dev)
407 struct aspeed_engine_hash *hash_engine = &hace_dev->hash_engine;
408 struct ahash_request *req = hash_engine->req;
409 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);
412 AHASH_DBG(hace_dev, "\n");
414 aspeed_ahash_fill_padding(hace_dev, rctx);
416 rctx->digest_dma_addr = dma_map_single(hace_dev->dev,
420 if (dma_mapping_error(hace_dev->dev, rctx->digest_dma_addr)) {
421 dev_warn(hace_dev->dev, "dma_map() rctx digest error\n");
426 rctx->buffer_dma_addr = dma_map_single(hace_dev->dev,
428 rctx->block_size * 2,
430 if (dma_mapping_error(hace_dev->dev, rctx->buffer_dma_addr)) {
431 dev_warn(hace_dev->dev, "dma_map() rctx buffer error\n");
433 goto free_rctx_digest;
436 hash_engine->src_dma = rctx->buffer_dma_addr;
437 hash_engine->src_length = rctx->bufcnt;
438 hash_engine->digest_dma = rctx->digest_dma_addr;
440 if (rctx->flags & SHA_FLAGS_HMAC)
441 return aspeed_hace_ahash_trigger(hace_dev,
442 aspeed_ahash_hmac_resume);
444 return aspeed_hace_ahash_trigger(hace_dev, aspeed_ahash_transfer);
447 dma_unmap_single(hace_dev->dev, rctx->digest_dma_addr,
448 SHA512_DIGEST_SIZE, DMA_BIDIRECTIONAL);
453 static int aspeed_ahash_update_resume_sg(struct aspeed_hace_dev *hace_dev)
455 struct aspeed_engine_hash *hash_engine = &hace_dev->hash_engine;
456 struct ahash_request *req = hash_engine->req;
457 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);
459 AHASH_DBG(hace_dev, "\n");
461 dma_unmap_sg(hace_dev->dev, rctx->src_sg, rctx->src_nents,
464 if (rctx->bufcnt != 0)
465 dma_unmap_single(hace_dev->dev, rctx->buffer_dma_addr,
466 rctx->block_size * 2,
469 dma_unmap_single(hace_dev->dev, rctx->digest_dma_addr,
470 SHA512_DIGEST_SIZE, DMA_BIDIRECTIONAL);
472 scatterwalk_map_and_copy(rctx->buffer, rctx->src_sg, rctx->offset,
473 rctx->total - rctx->offset, 0);
475 rctx->bufcnt = rctx->total - rctx->offset;
476 rctx->cmd &= ~HASH_CMD_HASH_SRC_SG_CTRL;
478 if (rctx->flags & SHA_FLAGS_FINUP)
479 return aspeed_ahash_req_final(hace_dev);
481 return aspeed_ahash_complete(hace_dev);
484 static int aspeed_ahash_update_resume(struct aspeed_hace_dev *hace_dev)
486 struct aspeed_engine_hash *hash_engine = &hace_dev->hash_engine;
487 struct ahash_request *req = hash_engine->req;
488 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);
490 AHASH_DBG(hace_dev, "\n");
492 dma_unmap_single(hace_dev->dev, rctx->digest_dma_addr,
493 SHA512_DIGEST_SIZE, DMA_BIDIRECTIONAL);
495 if (rctx->flags & SHA_FLAGS_FINUP)
496 return aspeed_ahash_req_final(hace_dev);
498 return aspeed_ahash_complete(hace_dev);
501 static int aspeed_ahash_req_update(struct aspeed_hace_dev *hace_dev)
503 struct aspeed_engine_hash *hash_engine = &hace_dev->hash_engine;
504 struct ahash_request *req = hash_engine->req;
505 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);
506 aspeed_hace_fn_t resume;
509 AHASH_DBG(hace_dev, "\n");
511 if (hace_dev->version == AST2600_VERSION) {
512 rctx->cmd |= HASH_CMD_HASH_SRC_SG_CTRL;
513 resume = aspeed_ahash_update_resume_sg;
516 resume = aspeed_ahash_update_resume;
519 ret = hash_engine->dma_prepare(hace_dev);
523 return aspeed_hace_ahash_trigger(hace_dev, resume);
526 static int aspeed_hace_hash_handle_queue(struct aspeed_hace_dev *hace_dev,
527 struct ahash_request *req)
529 return crypto_transfer_hash_request_to_engine(
530 hace_dev->crypt_engine_hash, req);
533 static int aspeed_ahash_do_request(struct crypto_engine *engine, void *areq)
535 struct ahash_request *req = ahash_request_cast(areq);
536 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);
537 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
538 struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm);
539 struct aspeed_hace_dev *hace_dev = tctx->hace_dev;
540 struct aspeed_engine_hash *hash_engine;
543 hash_engine = &hace_dev->hash_engine;
544 hash_engine->flags |= CRYPTO_FLAGS_BUSY;
546 if (rctx->op == SHA_OP_UPDATE)
547 ret = aspeed_ahash_req_update(hace_dev);
548 else if (rctx->op == SHA_OP_FINAL)
549 ret = aspeed_ahash_req_final(hace_dev);
551 if (ret != -EINPROGRESS)
557 static void aspeed_ahash_prepare_request(struct crypto_engine *engine,
560 struct ahash_request *req = ahash_request_cast(areq);
561 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
562 struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm);
563 struct aspeed_hace_dev *hace_dev = tctx->hace_dev;
564 struct aspeed_engine_hash *hash_engine;
566 hash_engine = &hace_dev->hash_engine;
567 hash_engine->req = req;
569 if (hace_dev->version == AST2600_VERSION)
570 hash_engine->dma_prepare = aspeed_ahash_dma_prepare_sg;
572 hash_engine->dma_prepare = aspeed_ahash_dma_prepare;
575 static int aspeed_ahash_do_one(struct crypto_engine *engine, void *areq)
577 aspeed_ahash_prepare_request(engine, areq);
578 return aspeed_ahash_do_request(engine, areq);
581 static int aspeed_sham_update(struct ahash_request *req)
583 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);
584 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
585 struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm);
586 struct aspeed_hace_dev *hace_dev = tctx->hace_dev;
588 AHASH_DBG(hace_dev, "req->nbytes: %d\n", req->nbytes);
590 rctx->total = req->nbytes;
591 rctx->src_sg = req->src;
593 rctx->src_nents = sg_nents(req->src);
594 rctx->op = SHA_OP_UPDATE;
596 rctx->digcnt[0] += rctx->total;
597 if (rctx->digcnt[0] < rctx->total)
600 if (rctx->bufcnt + rctx->total < rctx->block_size) {
601 scatterwalk_map_and_copy(rctx->buffer + rctx->bufcnt,
602 rctx->src_sg, rctx->offset,
604 rctx->bufcnt += rctx->total;
609 return aspeed_hace_hash_handle_queue(hace_dev, req);
612 static int aspeed_sham_shash_digest(struct crypto_shash *tfm, u32 flags,
613 const u8 *data, unsigned int len, u8 *out)
615 SHASH_DESC_ON_STACK(shash, tfm);
619 return crypto_shash_digest(shash, data, len, out);
622 static int aspeed_sham_final(struct ahash_request *req)
624 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);
625 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
626 struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm);
627 struct aspeed_hace_dev *hace_dev = tctx->hace_dev;
629 AHASH_DBG(hace_dev, "req->nbytes:%d, rctx->total:%d\n",
630 req->nbytes, rctx->total);
631 rctx->op = SHA_OP_FINAL;
633 return aspeed_hace_hash_handle_queue(hace_dev, req);
636 static int aspeed_sham_finup(struct ahash_request *req)
638 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);
639 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
640 struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm);
641 struct aspeed_hace_dev *hace_dev = tctx->hace_dev;
644 AHASH_DBG(hace_dev, "req->nbytes: %d\n", req->nbytes);
646 rctx->flags |= SHA_FLAGS_FINUP;
648 rc1 = aspeed_sham_update(req);
649 if (rc1 == -EINPROGRESS || rc1 == -EBUSY)
653 * final() has to be always called to cleanup resources
654 * even if update() failed, except EINPROGRESS
656 rc2 = aspeed_sham_final(req);
661 static int aspeed_sham_init(struct ahash_request *req)
663 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);
664 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
665 struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm);
666 struct aspeed_hace_dev *hace_dev = tctx->hace_dev;
667 struct aspeed_sha_hmac_ctx *bctx = tctx->base;
669 AHASH_DBG(hace_dev, "%s: digest size:%d\n",
670 crypto_tfm_alg_name(&tfm->base),
671 crypto_ahash_digestsize(tfm));
673 rctx->cmd = HASH_CMD_ACC_MODE;
676 switch (crypto_ahash_digestsize(tfm)) {
677 case SHA1_DIGEST_SIZE:
678 rctx->cmd |= HASH_CMD_SHA1 | HASH_CMD_SHA_SWAP;
679 rctx->flags |= SHA_FLAGS_SHA1;
680 rctx->digsize = SHA1_DIGEST_SIZE;
681 rctx->block_size = SHA1_BLOCK_SIZE;
682 rctx->sha_iv = sha1_iv;
684 memcpy(rctx->digest, sha1_iv, rctx->ivsize);
686 case SHA224_DIGEST_SIZE:
687 rctx->cmd |= HASH_CMD_SHA224 | HASH_CMD_SHA_SWAP;
688 rctx->flags |= SHA_FLAGS_SHA224;
689 rctx->digsize = SHA224_DIGEST_SIZE;
690 rctx->block_size = SHA224_BLOCK_SIZE;
691 rctx->sha_iv = sha224_iv;
693 memcpy(rctx->digest, sha224_iv, rctx->ivsize);
695 case SHA256_DIGEST_SIZE:
696 rctx->cmd |= HASH_CMD_SHA256 | HASH_CMD_SHA_SWAP;
697 rctx->flags |= SHA_FLAGS_SHA256;
698 rctx->digsize = SHA256_DIGEST_SIZE;
699 rctx->block_size = SHA256_BLOCK_SIZE;
700 rctx->sha_iv = sha256_iv;
702 memcpy(rctx->digest, sha256_iv, rctx->ivsize);
704 case SHA384_DIGEST_SIZE:
705 rctx->cmd |= HASH_CMD_SHA512_SER | HASH_CMD_SHA384 |
707 rctx->flags |= SHA_FLAGS_SHA384;
708 rctx->digsize = SHA384_DIGEST_SIZE;
709 rctx->block_size = SHA384_BLOCK_SIZE;
710 rctx->sha_iv = (const __be32 *)sha384_iv;
712 memcpy(rctx->digest, sha384_iv, rctx->ivsize);
714 case SHA512_DIGEST_SIZE:
715 rctx->cmd |= HASH_CMD_SHA512_SER | HASH_CMD_SHA512 |
717 rctx->flags |= SHA_FLAGS_SHA512;
718 rctx->digsize = SHA512_DIGEST_SIZE;
719 rctx->block_size = SHA512_BLOCK_SIZE;
720 rctx->sha_iv = (const __be32 *)sha512_iv;
722 memcpy(rctx->digest, sha512_iv, rctx->ivsize);
725 dev_warn(tctx->hace_dev->dev, "digest size %d not support\n",
726 crypto_ahash_digestsize(tfm));
736 if (tctx->flags & SHA_FLAGS_HMAC) {
737 rctx->digcnt[0] = rctx->block_size;
738 rctx->bufcnt = rctx->block_size;
739 memcpy(rctx->buffer, bctx->ipad, rctx->block_size);
740 rctx->flags |= SHA_FLAGS_HMAC;
746 static int aspeed_sham_digest(struct ahash_request *req)
748 return aspeed_sham_init(req) ? : aspeed_sham_finup(req);
751 static int aspeed_sham_setkey(struct crypto_ahash *tfm, const u8 *key,
754 struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm);
755 struct aspeed_hace_dev *hace_dev = tctx->hace_dev;
756 struct aspeed_sha_hmac_ctx *bctx = tctx->base;
757 int ds = crypto_shash_digestsize(bctx->shash);
758 int bs = crypto_shash_blocksize(bctx->shash);
762 AHASH_DBG(hace_dev, "%s: keylen:%d\n", crypto_tfm_alg_name(&tfm->base),
766 err = aspeed_sham_shash_digest(bctx->shash,
767 crypto_shash_get_flags(bctx->shash),
768 key, keylen, bctx->ipad);
774 memcpy(bctx->ipad, key, keylen);
777 memset(bctx->ipad + keylen, 0, bs - keylen);
778 memcpy(bctx->opad, bctx->ipad, bs);
780 for (i = 0; i < bs; i++) {
781 bctx->ipad[i] ^= HMAC_IPAD_VALUE;
782 bctx->opad[i] ^= HMAC_OPAD_VALUE;
788 static int aspeed_sham_cra_init(struct crypto_tfm *tfm)
790 struct ahash_alg *alg = __crypto_ahash_alg(tfm->__crt_alg);
791 struct aspeed_sham_ctx *tctx = crypto_tfm_ctx(tfm);
792 struct aspeed_hace_alg *ast_alg;
794 ast_alg = container_of(alg, struct aspeed_hace_alg, alg.ahash.base);
795 tctx->hace_dev = ast_alg->hace_dev;
798 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
799 sizeof(struct aspeed_sham_reqctx));
801 if (ast_alg->alg_base) {
803 struct aspeed_sha_hmac_ctx *bctx = tctx->base;
805 tctx->flags |= SHA_FLAGS_HMAC;
806 bctx->shash = crypto_alloc_shash(ast_alg->alg_base, 0,
807 CRYPTO_ALG_NEED_FALLBACK);
808 if (IS_ERR(bctx->shash)) {
809 dev_warn(ast_alg->hace_dev->dev,
810 "base driver '%s' could not be loaded.\n",
812 return PTR_ERR(bctx->shash);
819 static void aspeed_sham_cra_exit(struct crypto_tfm *tfm)
821 struct aspeed_sham_ctx *tctx = crypto_tfm_ctx(tfm);
822 struct aspeed_hace_dev *hace_dev = tctx->hace_dev;
824 AHASH_DBG(hace_dev, "%s\n", crypto_tfm_alg_name(tfm));
826 if (tctx->flags & SHA_FLAGS_HMAC) {
827 struct aspeed_sha_hmac_ctx *bctx = tctx->base;
829 crypto_free_shash(bctx->shash);
833 static int aspeed_sham_export(struct ahash_request *req, void *out)
835 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);
837 memcpy(out, rctx, sizeof(*rctx));
842 static int aspeed_sham_import(struct ahash_request *req, const void *in)
844 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);
846 memcpy(rctx, in, sizeof(*rctx));
851 static struct aspeed_hace_alg aspeed_ahash_algs[] = {
854 .init = aspeed_sham_init,
855 .update = aspeed_sham_update,
856 .final = aspeed_sham_final,
857 .finup = aspeed_sham_finup,
858 .digest = aspeed_sham_digest,
859 .export = aspeed_sham_export,
860 .import = aspeed_sham_import,
862 .digestsize = SHA1_DIGEST_SIZE,
863 .statesize = sizeof(struct aspeed_sham_reqctx),
866 .cra_driver_name = "aspeed-sha1",
868 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
870 CRYPTO_ALG_KERN_DRIVER_ONLY,
871 .cra_blocksize = SHA1_BLOCK_SIZE,
872 .cra_ctxsize = sizeof(struct aspeed_sham_ctx),
874 .cra_module = THIS_MODULE,
875 .cra_init = aspeed_sham_cra_init,
876 .cra_exit = aspeed_sham_cra_exit,
881 .do_one_request = aspeed_ahash_do_one,
886 .init = aspeed_sham_init,
887 .update = aspeed_sham_update,
888 .final = aspeed_sham_final,
889 .finup = aspeed_sham_finup,
890 .digest = aspeed_sham_digest,
891 .export = aspeed_sham_export,
892 .import = aspeed_sham_import,
894 .digestsize = SHA256_DIGEST_SIZE,
895 .statesize = sizeof(struct aspeed_sham_reqctx),
897 .cra_name = "sha256",
898 .cra_driver_name = "aspeed-sha256",
900 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
902 CRYPTO_ALG_KERN_DRIVER_ONLY,
903 .cra_blocksize = SHA256_BLOCK_SIZE,
904 .cra_ctxsize = sizeof(struct aspeed_sham_ctx),
906 .cra_module = THIS_MODULE,
907 .cra_init = aspeed_sham_cra_init,
908 .cra_exit = aspeed_sham_cra_exit,
913 .do_one_request = aspeed_ahash_do_one,
918 .init = aspeed_sham_init,
919 .update = aspeed_sham_update,
920 .final = aspeed_sham_final,
921 .finup = aspeed_sham_finup,
922 .digest = aspeed_sham_digest,
923 .export = aspeed_sham_export,
924 .import = aspeed_sham_import,
926 .digestsize = SHA224_DIGEST_SIZE,
927 .statesize = sizeof(struct aspeed_sham_reqctx),
929 .cra_name = "sha224",
930 .cra_driver_name = "aspeed-sha224",
932 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
934 CRYPTO_ALG_KERN_DRIVER_ONLY,
935 .cra_blocksize = SHA224_BLOCK_SIZE,
936 .cra_ctxsize = sizeof(struct aspeed_sham_ctx),
938 .cra_module = THIS_MODULE,
939 .cra_init = aspeed_sham_cra_init,
940 .cra_exit = aspeed_sham_cra_exit,
945 .do_one_request = aspeed_ahash_do_one,
951 .init = aspeed_sham_init,
952 .update = aspeed_sham_update,
953 .final = aspeed_sham_final,
954 .finup = aspeed_sham_finup,
955 .digest = aspeed_sham_digest,
956 .setkey = aspeed_sham_setkey,
957 .export = aspeed_sham_export,
958 .import = aspeed_sham_import,
960 .digestsize = SHA1_DIGEST_SIZE,
961 .statesize = sizeof(struct aspeed_sham_reqctx),
963 .cra_name = "hmac(sha1)",
964 .cra_driver_name = "aspeed-hmac-sha1",
966 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
968 CRYPTO_ALG_KERN_DRIVER_ONLY,
969 .cra_blocksize = SHA1_BLOCK_SIZE,
970 .cra_ctxsize = sizeof(struct aspeed_sham_ctx) +
971 sizeof(struct aspeed_sha_hmac_ctx),
973 .cra_module = THIS_MODULE,
974 .cra_init = aspeed_sham_cra_init,
975 .cra_exit = aspeed_sham_cra_exit,
980 .do_one_request = aspeed_ahash_do_one,
984 .alg_base = "sha224",
986 .init = aspeed_sham_init,
987 .update = aspeed_sham_update,
988 .final = aspeed_sham_final,
989 .finup = aspeed_sham_finup,
990 .digest = aspeed_sham_digest,
991 .setkey = aspeed_sham_setkey,
992 .export = aspeed_sham_export,
993 .import = aspeed_sham_import,
995 .digestsize = SHA224_DIGEST_SIZE,
996 .statesize = sizeof(struct aspeed_sham_reqctx),
998 .cra_name = "hmac(sha224)",
999 .cra_driver_name = "aspeed-hmac-sha224",
1000 .cra_priority = 300,
1001 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
1003 CRYPTO_ALG_KERN_DRIVER_ONLY,
1004 .cra_blocksize = SHA224_BLOCK_SIZE,
1005 .cra_ctxsize = sizeof(struct aspeed_sham_ctx) +
1006 sizeof(struct aspeed_sha_hmac_ctx),
1008 .cra_module = THIS_MODULE,
1009 .cra_init = aspeed_sham_cra_init,
1010 .cra_exit = aspeed_sham_cra_exit,
1015 .do_one_request = aspeed_ahash_do_one,
1019 .alg_base = "sha256",
1021 .init = aspeed_sham_init,
1022 .update = aspeed_sham_update,
1023 .final = aspeed_sham_final,
1024 .finup = aspeed_sham_finup,
1025 .digest = aspeed_sham_digest,
1026 .setkey = aspeed_sham_setkey,
1027 .export = aspeed_sham_export,
1028 .import = aspeed_sham_import,
1030 .digestsize = SHA256_DIGEST_SIZE,
1031 .statesize = sizeof(struct aspeed_sham_reqctx),
1033 .cra_name = "hmac(sha256)",
1034 .cra_driver_name = "aspeed-hmac-sha256",
1035 .cra_priority = 300,
1036 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
1038 CRYPTO_ALG_KERN_DRIVER_ONLY,
1039 .cra_blocksize = SHA256_BLOCK_SIZE,
1040 .cra_ctxsize = sizeof(struct aspeed_sham_ctx) +
1041 sizeof(struct aspeed_sha_hmac_ctx),
1043 .cra_module = THIS_MODULE,
1044 .cra_init = aspeed_sham_cra_init,
1045 .cra_exit = aspeed_sham_cra_exit,
1050 .do_one_request = aspeed_ahash_do_one,
1055 static struct aspeed_hace_alg aspeed_ahash_algs_g6[] = {
1058 .init = aspeed_sham_init,
1059 .update = aspeed_sham_update,
1060 .final = aspeed_sham_final,
1061 .finup = aspeed_sham_finup,
1062 .digest = aspeed_sham_digest,
1063 .export = aspeed_sham_export,
1064 .import = aspeed_sham_import,
1066 .digestsize = SHA384_DIGEST_SIZE,
1067 .statesize = sizeof(struct aspeed_sham_reqctx),
1069 .cra_name = "sha384",
1070 .cra_driver_name = "aspeed-sha384",
1071 .cra_priority = 300,
1072 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
1074 CRYPTO_ALG_KERN_DRIVER_ONLY,
1075 .cra_blocksize = SHA384_BLOCK_SIZE,
1076 .cra_ctxsize = sizeof(struct aspeed_sham_ctx),
1078 .cra_module = THIS_MODULE,
1079 .cra_init = aspeed_sham_cra_init,
1080 .cra_exit = aspeed_sham_cra_exit,
1085 .do_one_request = aspeed_ahash_do_one,
1090 .init = aspeed_sham_init,
1091 .update = aspeed_sham_update,
1092 .final = aspeed_sham_final,
1093 .finup = aspeed_sham_finup,
1094 .digest = aspeed_sham_digest,
1095 .export = aspeed_sham_export,
1096 .import = aspeed_sham_import,
1098 .digestsize = SHA512_DIGEST_SIZE,
1099 .statesize = sizeof(struct aspeed_sham_reqctx),
1101 .cra_name = "sha512",
1102 .cra_driver_name = "aspeed-sha512",
1103 .cra_priority = 300,
1104 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
1106 CRYPTO_ALG_KERN_DRIVER_ONLY,
1107 .cra_blocksize = SHA512_BLOCK_SIZE,
1108 .cra_ctxsize = sizeof(struct aspeed_sham_ctx),
1110 .cra_module = THIS_MODULE,
1111 .cra_init = aspeed_sham_cra_init,
1112 .cra_exit = aspeed_sham_cra_exit,
1117 .do_one_request = aspeed_ahash_do_one,
1121 .alg_base = "sha384",
1123 .init = aspeed_sham_init,
1124 .update = aspeed_sham_update,
1125 .final = aspeed_sham_final,
1126 .finup = aspeed_sham_finup,
1127 .digest = aspeed_sham_digest,
1128 .setkey = aspeed_sham_setkey,
1129 .export = aspeed_sham_export,
1130 .import = aspeed_sham_import,
1132 .digestsize = SHA384_DIGEST_SIZE,
1133 .statesize = sizeof(struct aspeed_sham_reqctx),
1135 .cra_name = "hmac(sha384)",
1136 .cra_driver_name = "aspeed-hmac-sha384",
1137 .cra_priority = 300,
1138 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
1140 CRYPTO_ALG_KERN_DRIVER_ONLY,
1141 .cra_blocksize = SHA384_BLOCK_SIZE,
1142 .cra_ctxsize = sizeof(struct aspeed_sham_ctx) +
1143 sizeof(struct aspeed_sha_hmac_ctx),
1145 .cra_module = THIS_MODULE,
1146 .cra_init = aspeed_sham_cra_init,
1147 .cra_exit = aspeed_sham_cra_exit,
1152 .do_one_request = aspeed_ahash_do_one,
1156 .alg_base = "sha512",
1158 .init = aspeed_sham_init,
1159 .update = aspeed_sham_update,
1160 .final = aspeed_sham_final,
1161 .finup = aspeed_sham_finup,
1162 .digest = aspeed_sham_digest,
1163 .setkey = aspeed_sham_setkey,
1164 .export = aspeed_sham_export,
1165 .import = aspeed_sham_import,
1167 .digestsize = SHA512_DIGEST_SIZE,
1168 .statesize = sizeof(struct aspeed_sham_reqctx),
1170 .cra_name = "hmac(sha512)",
1171 .cra_driver_name = "aspeed-hmac-sha512",
1172 .cra_priority = 300,
1173 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
1175 CRYPTO_ALG_KERN_DRIVER_ONLY,
1176 .cra_blocksize = SHA512_BLOCK_SIZE,
1177 .cra_ctxsize = sizeof(struct aspeed_sham_ctx) +
1178 sizeof(struct aspeed_sha_hmac_ctx),
1180 .cra_module = THIS_MODULE,
1181 .cra_init = aspeed_sham_cra_init,
1182 .cra_exit = aspeed_sham_cra_exit,
1187 .do_one_request = aspeed_ahash_do_one,
1192 void aspeed_unregister_hace_hash_algs(struct aspeed_hace_dev *hace_dev)
1196 for (i = 0; i < ARRAY_SIZE(aspeed_ahash_algs); i++)
1197 crypto_engine_unregister_ahash(&aspeed_ahash_algs[i].alg.ahash);
1199 if (hace_dev->version != AST2600_VERSION)
1202 for (i = 0; i < ARRAY_SIZE(aspeed_ahash_algs_g6); i++)
1203 crypto_engine_unregister_ahash(&aspeed_ahash_algs_g6[i].alg.ahash);
1206 void aspeed_register_hace_hash_algs(struct aspeed_hace_dev *hace_dev)
1210 AHASH_DBG(hace_dev, "\n");
1212 for (i = 0; i < ARRAY_SIZE(aspeed_ahash_algs); i++) {
1213 aspeed_ahash_algs[i].hace_dev = hace_dev;
1214 rc = crypto_engine_register_ahash(&aspeed_ahash_algs[i].alg.ahash);
1216 AHASH_DBG(hace_dev, "Failed to register %s\n",
1217 aspeed_ahash_algs[i].alg.ahash.base.halg.base.cra_name);
1221 if (hace_dev->version != AST2600_VERSION)
1224 for (i = 0; i < ARRAY_SIZE(aspeed_ahash_algs_g6); i++) {
1225 aspeed_ahash_algs_g6[i].hace_dev = hace_dev;
1226 rc = crypto_engine_register_ahash(&aspeed_ahash_algs_g6[i].alg.ahash);
1228 AHASH_DBG(hace_dev, "Failed to register %s\n",
1229 aspeed_ahash_algs_g6[i].alg.ahash.base.halg.base.cra_name);