1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0-only) */
2 /* Copyright(c) 2014 - 2020 Intel Corporation */
3 #ifndef _QAT_CRYPTO_INSTANCE_H_
4 #define _QAT_CRYPTO_INSTANCE_H_
6 #include <crypto/aes.h>
7 #include <linux/list.h>
8 #include <linux/slab.h>
9 #include "adf_accel_devices.h"
10 #include "icp_qat_fw_la.h"
11 #include "qat_algs_send.h"
14 struct qat_crypto_instance {
15 struct adf_etr_ring_data *sym_tx;
16 struct adf_etr_ring_data *sym_rx;
17 struct adf_etr_ring_data *pke_tx;
18 struct adf_etr_ring_data *pke_rx;
19 struct adf_accel_dev *accel_dev;
20 struct list_head list;
24 struct qat_instance_backlog backlog;
27 struct qat_crypto_request;
29 struct qat_crypto_request {
30 struct icp_qat_fw_la_bulk_req req;
32 struct qat_alg_aead_ctx *aead_ctx;
33 struct qat_alg_skcipher_ctx *skcipher_ctx;
36 struct aead_request *aead_req;
37 struct skcipher_request *skcipher_req;
39 struct qat_request_buffs buf;
40 void (*cb)(struct icp_qat_fw_la_resp *resp,
41 struct qat_crypto_request *req);
47 u8 iv[AES_BLOCK_SIZE];
50 struct qat_alg_req alg_req;
53 static inline bool adf_hw_dev_has_crypto(struct adf_accel_dev *accel_dev)
55 struct adf_hw_device_data *hw_device = accel_dev->hw_device;
56 u32 mask = ~hw_device->accel_capabilities_mask;
58 if (mask & ADF_ACCEL_CAPABILITIES_CRYPTO_SYMMETRIC)
60 if (mask & ADF_ACCEL_CAPABILITIES_CRYPTO_ASYMMETRIC)
62 if (mask & ADF_ACCEL_CAPABILITIES_AUTHENTICATION)