1 // SPDX-License-Identifier: GPL-2.0-only
3 * Crypto acceleration support for Rockchip RK3288
5 * Copyright (c) 2015, Fuzhou Rockchip Electronics Co., Ltd
9 * Some ideas are from marvell-cesa.c and s5p-sss.c driver.
12 #include "rk3288_crypto.h"
13 #include <linux/dma-mapping.h>
14 #include <linux/module.h>
15 #include <linux/platform_device.h>
17 #include <linux/of_device.h>
18 #include <linux/clk.h>
19 #include <linux/crypto.h>
20 #include <linux/reset.h>
22 static struct rockchip_ip rocklist = {
23 .dev_list = LIST_HEAD_INIT(rocklist.dev_list),
24 .lock = __SPIN_LOCK_UNLOCKED(rocklist.lock),
27 struct rk_crypto_info *get_rk_crypto(void)
29 struct rk_crypto_info *first;
31 spin_lock(&rocklist.lock);
32 first = list_first_entry_or_null(&rocklist.dev_list,
33 struct rk_crypto_info, list);
34 list_rotate_left(&rocklist.dev_list);
35 spin_unlock(&rocklist.lock);
39 static const struct rk_variant rk3288_variant = {
46 static const struct rk_variant rk3328_variant = {
50 static const struct rk_variant rk3399_variant = {
54 static int rk_crypto_get_clks(struct rk_crypto_info *dev)
59 dev->num_clks = devm_clk_bulk_get_all(dev->dev, &dev->clks);
60 if (dev->num_clks < dev->variant->num_clks) {
61 dev_err(dev->dev, "Missing clocks, got %d instead of %d\n",
62 dev->num_clks, dev->variant->num_clks);
66 for (i = 0; i < dev->num_clks; i++) {
67 cr = clk_get_rate(dev->clks[i].clk);
68 for (j = 0; j < ARRAY_SIZE(dev->variant->rkclks); j++) {
69 if (dev->variant->rkclks[j].max == 0)
71 if (strcmp(dev->variant->rkclks[j].name, dev->clks[i].id))
73 if (cr > dev->variant->rkclks[j].max) {
74 err = clk_set_rate(dev->clks[i].clk,
75 dev->variant->rkclks[j].max);
77 dev_err(dev->dev, "Fail downclocking %s from %lu to %lu\n",
78 dev->variant->rkclks[j].name, cr,
79 dev->variant->rkclks[j].max);
81 dev_info(dev->dev, "Downclocking %s from %lu to %lu\n",
82 dev->variant->rkclks[j].name, cr,
83 dev->variant->rkclks[j].max);
90 static int rk_crypto_enable_clk(struct rk_crypto_info *dev)
94 err = clk_bulk_prepare_enable(dev->num_clks, dev->clks);
96 dev_err(dev->dev, "Could not enable clock clks\n");
101 static void rk_crypto_disable_clk(struct rk_crypto_info *dev)
103 clk_bulk_disable_unprepare(dev->num_clks, dev->clks);
107 * Power management strategy: The device is suspended until a request
108 * is handled. For avoiding suspend/resume yoyo, the autosuspend is set to 2s.
110 static int rk_crypto_pm_suspend(struct device *dev)
112 struct rk_crypto_info *rkdev = dev_get_drvdata(dev);
114 rk_crypto_disable_clk(rkdev);
115 reset_control_assert(rkdev->rst);
120 static int rk_crypto_pm_resume(struct device *dev)
122 struct rk_crypto_info *rkdev = dev_get_drvdata(dev);
125 ret = rk_crypto_enable_clk(rkdev);
129 reset_control_deassert(rkdev->rst);
134 static const struct dev_pm_ops rk_crypto_pm_ops = {
135 SET_RUNTIME_PM_OPS(rk_crypto_pm_suspend, rk_crypto_pm_resume, NULL)
138 static int rk_crypto_pm_init(struct rk_crypto_info *rkdev)
142 pm_runtime_use_autosuspend(rkdev->dev);
143 pm_runtime_set_autosuspend_delay(rkdev->dev, 2000);
145 err = pm_runtime_set_suspended(rkdev->dev);
148 pm_runtime_enable(rkdev->dev);
152 static void rk_crypto_pm_exit(struct rk_crypto_info *rkdev)
154 pm_runtime_disable(rkdev->dev);
157 static irqreturn_t rk_crypto_irq_handle(int irq, void *dev_id)
159 struct rk_crypto_info *dev = platform_get_drvdata(dev_id);
160 u32 interrupt_status;
162 interrupt_status = CRYPTO_READ(dev, RK_CRYPTO_INTSTS);
163 CRYPTO_WRITE(dev, RK_CRYPTO_INTSTS, interrupt_status);
166 if (interrupt_status & 0x0a) {
167 dev_warn(dev->dev, "DMA Error\n");
170 complete(&dev->complete);
175 static struct rk_crypto_tmp *rk_cipher_algs[] = {
180 &rk_ecb_des3_ede_alg,
181 &rk_cbc_des3_ede_alg,
187 #ifdef CONFIG_CRYPTO_DEV_ROCKCHIP_DEBUG
188 static int rk_crypto_debugfs_show(struct seq_file *seq, void *v)
190 struct rk_crypto_info *dd;
193 spin_lock(&rocklist.lock);
194 list_for_each_entry(dd, &rocklist.dev_list, list) {
195 seq_printf(seq, "%s %s requests: %lu\n",
196 dev_driver_string(dd->dev), dev_name(dd->dev),
199 spin_unlock(&rocklist.lock);
201 for (i = 0; i < ARRAY_SIZE(rk_cipher_algs); i++) {
202 if (!rk_cipher_algs[i]->dev)
204 switch (rk_cipher_algs[i]->type) {
205 case CRYPTO_ALG_TYPE_SKCIPHER:
206 seq_printf(seq, "%s %s reqs=%lu fallback=%lu\n",
207 rk_cipher_algs[i]->alg.skcipher.base.cra_driver_name,
208 rk_cipher_algs[i]->alg.skcipher.base.cra_name,
209 rk_cipher_algs[i]->stat_req, rk_cipher_algs[i]->stat_fb);
210 seq_printf(seq, "\tfallback due to length: %lu\n",
211 rk_cipher_algs[i]->stat_fb_len);
212 seq_printf(seq, "\tfallback due to alignment: %lu\n",
213 rk_cipher_algs[i]->stat_fb_align);
214 seq_printf(seq, "\tfallback due to SGs: %lu\n",
215 rk_cipher_algs[i]->stat_fb_sgdiff);
217 case CRYPTO_ALG_TYPE_AHASH:
218 seq_printf(seq, "%s %s reqs=%lu fallback=%lu\n",
219 rk_cipher_algs[i]->alg.hash.halg.base.cra_driver_name,
220 rk_cipher_algs[i]->alg.hash.halg.base.cra_name,
221 rk_cipher_algs[i]->stat_req, rk_cipher_algs[i]->stat_fb);
228 DEFINE_SHOW_ATTRIBUTE(rk_crypto_debugfs);
231 static void register_debugfs(struct rk_crypto_info *crypto_info)
233 #ifdef CONFIG_CRYPTO_DEV_ROCKCHIP_DEBUG
234 /* Ignore error of debugfs */
235 rocklist.dbgfs_dir = debugfs_create_dir("rk3288_crypto", NULL);
236 rocklist.dbgfs_stats = debugfs_create_file("stats", 0444,
239 &rk_crypto_debugfs_fops);
243 static int rk_crypto_register(struct rk_crypto_info *crypto_info)
248 for (i = 0; i < ARRAY_SIZE(rk_cipher_algs); i++) {
249 rk_cipher_algs[i]->dev = crypto_info;
250 switch (rk_cipher_algs[i]->type) {
251 case CRYPTO_ALG_TYPE_SKCIPHER:
252 dev_info(crypto_info->dev, "Register %s as %s\n",
253 rk_cipher_algs[i]->alg.skcipher.base.cra_name,
254 rk_cipher_algs[i]->alg.skcipher.base.cra_driver_name);
255 err = crypto_register_skcipher(&rk_cipher_algs[i]->alg.skcipher);
257 case CRYPTO_ALG_TYPE_AHASH:
258 dev_info(crypto_info->dev, "Register %s as %s\n",
259 rk_cipher_algs[i]->alg.hash.halg.base.cra_name,
260 rk_cipher_algs[i]->alg.hash.halg.base.cra_driver_name);
261 err = crypto_register_ahash(&rk_cipher_algs[i]->alg.hash);
264 dev_err(crypto_info->dev, "unknown algorithm\n");
267 goto err_cipher_algs;
272 for (k = 0; k < i; k++) {
273 if (rk_cipher_algs[i]->type == CRYPTO_ALG_TYPE_SKCIPHER)
274 crypto_unregister_skcipher(&rk_cipher_algs[k]->alg.skcipher);
276 crypto_unregister_ahash(&rk_cipher_algs[i]->alg.hash);
281 static void rk_crypto_unregister(void)
285 for (i = 0; i < ARRAY_SIZE(rk_cipher_algs); i++) {
286 if (rk_cipher_algs[i]->type == CRYPTO_ALG_TYPE_SKCIPHER)
287 crypto_unregister_skcipher(&rk_cipher_algs[i]->alg.skcipher);
289 crypto_unregister_ahash(&rk_cipher_algs[i]->alg.hash);
293 static const struct of_device_id crypto_of_id_table[] = {
294 { .compatible = "rockchip,rk3288-crypto",
295 .data = &rk3288_variant,
297 { .compatible = "rockchip,rk3328-crypto",
298 .data = &rk3328_variant,
300 { .compatible = "rockchip,rk3399-crypto",
301 .data = &rk3399_variant,
305 MODULE_DEVICE_TABLE(of, crypto_of_id_table);
307 static int rk_crypto_probe(struct platform_device *pdev)
309 struct device *dev = &pdev->dev;
310 struct rk_crypto_info *crypto_info, *first;
313 crypto_info = devm_kzalloc(&pdev->dev,
314 sizeof(*crypto_info), GFP_KERNEL);
320 crypto_info->dev = &pdev->dev;
321 platform_set_drvdata(pdev, crypto_info);
323 crypto_info->variant = of_device_get_match_data(&pdev->dev);
324 if (!crypto_info->variant) {
325 dev_err(&pdev->dev, "Missing variant\n");
329 crypto_info->rst = devm_reset_control_array_get_exclusive(dev);
330 if (IS_ERR(crypto_info->rst)) {
331 err = PTR_ERR(crypto_info->rst);
335 reset_control_assert(crypto_info->rst);
336 usleep_range(10, 20);
337 reset_control_deassert(crypto_info->rst);
339 crypto_info->reg = devm_platform_ioremap_resource(pdev, 0);
340 if (IS_ERR(crypto_info->reg)) {
341 err = PTR_ERR(crypto_info->reg);
345 err = rk_crypto_get_clks(crypto_info);
349 crypto_info->irq = platform_get_irq(pdev, 0);
350 if (crypto_info->irq < 0) {
351 err = crypto_info->irq;
355 err = devm_request_irq(&pdev->dev, crypto_info->irq,
356 rk_crypto_irq_handle, IRQF_SHARED,
360 dev_err(&pdev->dev, "irq request failed.\n");
364 crypto_info->engine = crypto_engine_alloc_init(&pdev->dev, true);
365 crypto_engine_start(crypto_info->engine);
366 init_completion(&crypto_info->complete);
368 err = rk_crypto_pm_init(crypto_info);
372 spin_lock(&rocklist.lock);
373 first = list_first_entry_or_null(&rocklist.dev_list,
374 struct rk_crypto_info, list);
375 list_add_tail(&crypto_info->list, &rocklist.dev_list);
376 spin_unlock(&rocklist.lock);
379 err = rk_crypto_register(crypto_info);
381 dev_err(dev, "Fail to register crypto algorithms");
382 goto err_register_alg;
385 register_debugfs(crypto_info);
391 rk_crypto_pm_exit(crypto_info);
393 crypto_engine_exit(crypto_info->engine);
395 dev_err(dev, "Crypto Accelerator not successfully registered\n");
399 static int rk_crypto_remove(struct platform_device *pdev)
401 struct rk_crypto_info *crypto_tmp = platform_get_drvdata(pdev);
402 struct rk_crypto_info *first;
404 spin_lock_bh(&rocklist.lock);
405 list_del(&crypto_tmp->list);
406 first = list_first_entry_or_null(&rocklist.dev_list,
407 struct rk_crypto_info, list);
408 spin_unlock_bh(&rocklist.lock);
411 #ifdef CONFIG_CRYPTO_DEV_ROCKCHIP_DEBUG
412 debugfs_remove_recursive(rocklist.dbgfs_dir);
414 rk_crypto_unregister();
416 rk_crypto_pm_exit(crypto_tmp);
417 crypto_engine_exit(crypto_tmp->engine);
421 static struct platform_driver crypto_driver = {
422 .probe = rk_crypto_probe,
423 .remove = rk_crypto_remove,
425 .name = "rk3288-crypto",
426 .pm = &rk_crypto_pm_ops,
427 .of_match_table = crypto_of_id_table,
431 module_platform_driver(crypto_driver);
434 MODULE_DESCRIPTION("Support for Rockchip's cryptographic engine");
435 MODULE_LICENSE("GPL");