1 // SPDX-License-Identifier: GPL-2.0
3 * Crypto user configuration API.
9 #include <linux/crypto.h>
10 #include <linux/cryptouser.h>
11 #include <linux/sched.h>
12 #include <net/netlink.h>
13 #include <crypto/internal/skcipher.h>
14 #include <crypto/internal/rng.h>
15 #include <crypto/akcipher.h>
16 #include <crypto/kpp.h>
17 #include <crypto/internal/cryptouser.h>
21 #define null_terminated(x) (strnlen(x, sizeof(x)) < sizeof(x))
23 static DEFINE_MUTEX(crypto_cfg_mutex);
25 extern struct sock *crypto_nlsk;
27 struct crypto_dump_info {
28 struct sk_buff *in_skb;
29 struct sk_buff *out_skb;
34 static int crypto_report_aead(struct sk_buff *skb, struct crypto_alg *alg)
36 struct crypto_stat raead;
40 strncpy(raead.type, "aead", sizeof(raead.type));
42 v32 = atomic_read(&alg->encrypt_cnt);
43 raead.stat_encrypt_cnt = v32;
44 v64 = atomic64_read(&alg->encrypt_tlen);
45 raead.stat_encrypt_tlen = v64;
46 v32 = atomic_read(&alg->decrypt_cnt);
47 raead.stat_decrypt_cnt = v32;
48 v64 = atomic64_read(&alg->decrypt_tlen);
49 raead.stat_decrypt_tlen = v64;
50 v32 = atomic_read(&alg->aead_err_cnt);
51 raead.stat_aead_err_cnt = v32;
53 if (nla_put(skb, CRYPTOCFGA_STAT_AEAD,
54 sizeof(struct crypto_stat), &raead))
62 static int crypto_report_cipher(struct sk_buff *skb, struct crypto_alg *alg)
64 struct crypto_stat rcipher;
68 strlcpy(rcipher.type, "cipher", sizeof(rcipher.type));
70 v32 = atomic_read(&alg->encrypt_cnt);
71 rcipher.stat_encrypt_cnt = v32;
72 v64 = atomic64_read(&alg->encrypt_tlen);
73 rcipher.stat_encrypt_tlen = v64;
74 v32 = atomic_read(&alg->decrypt_cnt);
75 rcipher.stat_decrypt_cnt = v32;
76 v64 = atomic64_read(&alg->decrypt_tlen);
77 rcipher.stat_decrypt_tlen = v64;
78 v32 = atomic_read(&alg->cipher_err_cnt);
79 rcipher.stat_cipher_err_cnt = v32;
81 if (nla_put(skb, CRYPTOCFGA_STAT_CIPHER,
82 sizeof(struct crypto_stat), &rcipher))
90 static int crypto_report_comp(struct sk_buff *skb, struct crypto_alg *alg)
92 struct crypto_stat rcomp;
96 strlcpy(rcomp.type, "compression", sizeof(rcomp.type));
97 v32 = atomic_read(&alg->compress_cnt);
98 rcomp.stat_compress_cnt = v32;
99 v64 = atomic64_read(&alg->compress_tlen);
100 rcomp.stat_compress_tlen = v64;
101 v32 = atomic_read(&alg->decompress_cnt);
102 rcomp.stat_decompress_cnt = v32;
103 v64 = atomic64_read(&alg->decompress_tlen);
104 rcomp.stat_decompress_tlen = v64;
105 v32 = atomic_read(&alg->cipher_err_cnt);
106 rcomp.stat_compress_err_cnt = v32;
108 if (nla_put(skb, CRYPTOCFGA_STAT_COMPRESS,
109 sizeof(struct crypto_stat), &rcomp))
110 goto nla_put_failure;
117 static int crypto_report_acomp(struct sk_buff *skb, struct crypto_alg *alg)
119 struct crypto_stat racomp;
123 strlcpy(racomp.type, "acomp", sizeof(racomp.type));
124 v32 = atomic_read(&alg->compress_cnt);
125 racomp.stat_compress_cnt = v32;
126 v64 = atomic64_read(&alg->compress_tlen);
127 racomp.stat_compress_tlen = v64;
128 v32 = atomic_read(&alg->decompress_cnt);
129 racomp.stat_decompress_cnt = v32;
130 v64 = atomic64_read(&alg->decompress_tlen);
131 racomp.stat_decompress_tlen = v64;
132 v32 = atomic_read(&alg->cipher_err_cnt);
133 racomp.stat_compress_err_cnt = v32;
135 if (nla_put(skb, CRYPTOCFGA_STAT_ACOMP,
136 sizeof(struct crypto_stat), &racomp))
137 goto nla_put_failure;
144 static int crypto_report_akcipher(struct sk_buff *skb, struct crypto_alg *alg)
146 struct crypto_stat rakcipher;
150 strncpy(rakcipher.type, "akcipher", sizeof(rakcipher.type));
151 v32 = atomic_read(&alg->encrypt_cnt);
152 rakcipher.stat_encrypt_cnt = v32;
153 v64 = atomic64_read(&alg->encrypt_tlen);
154 rakcipher.stat_encrypt_tlen = v64;
155 v32 = atomic_read(&alg->decrypt_cnt);
156 rakcipher.stat_decrypt_cnt = v32;
157 v64 = atomic64_read(&alg->decrypt_tlen);
158 rakcipher.stat_decrypt_tlen = v64;
159 v32 = atomic_read(&alg->sign_cnt);
160 rakcipher.stat_sign_cnt = v32;
161 v32 = atomic_read(&alg->verify_cnt);
162 rakcipher.stat_verify_cnt = v32;
163 v32 = atomic_read(&alg->akcipher_err_cnt);
164 rakcipher.stat_akcipher_err_cnt = v32;
166 if (nla_put(skb, CRYPTOCFGA_STAT_AKCIPHER,
167 sizeof(struct crypto_stat), &rakcipher))
168 goto nla_put_failure;
175 static int crypto_report_kpp(struct sk_buff *skb, struct crypto_alg *alg)
177 struct crypto_stat rkpp;
180 strlcpy(rkpp.type, "kpp", sizeof(rkpp.type));
182 v = atomic_read(&alg->setsecret_cnt);
183 rkpp.stat_setsecret_cnt = v;
184 v = atomic_read(&alg->generate_public_key_cnt);
185 rkpp.stat_generate_public_key_cnt = v;
186 v = atomic_read(&alg->compute_shared_secret_cnt);
187 rkpp.stat_compute_shared_secret_cnt = v;
188 v = atomic_read(&alg->kpp_err_cnt);
189 rkpp.stat_kpp_err_cnt = v;
191 if (nla_put(skb, CRYPTOCFGA_STAT_KPP,
192 sizeof(struct crypto_stat), &rkpp))
193 goto nla_put_failure;
200 static int crypto_report_ahash(struct sk_buff *skb, struct crypto_alg *alg)
202 struct crypto_stat rhash;
206 strncpy(rhash.type, "ahash", sizeof(rhash.type));
208 v32 = atomic_read(&alg->hash_cnt);
209 rhash.stat_hash_cnt = v32;
210 v64 = atomic64_read(&alg->hash_tlen);
211 rhash.stat_hash_tlen = v64;
212 v32 = atomic_read(&alg->hash_err_cnt);
213 rhash.stat_hash_err_cnt = v32;
215 if (nla_put(skb, CRYPTOCFGA_STAT_HASH,
216 sizeof(struct crypto_stat), &rhash))
217 goto nla_put_failure;
224 static int crypto_report_shash(struct sk_buff *skb, struct crypto_alg *alg)
226 struct crypto_stat rhash;
230 strncpy(rhash.type, "shash", sizeof(rhash.type));
232 v32 = atomic_read(&alg->hash_cnt);
233 rhash.stat_hash_cnt = v32;
234 v64 = atomic64_read(&alg->hash_tlen);
235 rhash.stat_hash_tlen = v64;
236 v32 = atomic_read(&alg->hash_err_cnt);
237 rhash.stat_hash_err_cnt = v32;
239 if (nla_put(skb, CRYPTOCFGA_STAT_HASH,
240 sizeof(struct crypto_stat), &rhash))
241 goto nla_put_failure;
248 static int crypto_report_rng(struct sk_buff *skb, struct crypto_alg *alg)
250 struct crypto_stat rrng;
254 strncpy(rrng.type, "rng", sizeof(rrng.type));
256 v32 = atomic_read(&alg->generate_cnt);
257 rrng.stat_generate_cnt = v32;
258 v64 = atomic64_read(&alg->generate_tlen);
259 rrng.stat_generate_tlen = v64;
260 v32 = atomic_read(&alg->seed_cnt);
261 rrng.stat_seed_cnt = v32;
262 v32 = atomic_read(&alg->hash_err_cnt);
263 rrng.stat_rng_err_cnt = v32;
265 if (nla_put(skb, CRYPTOCFGA_STAT_RNG,
266 sizeof(struct crypto_stat), &rrng))
267 goto nla_put_failure;
274 static int crypto_reportstat_one(struct crypto_alg *alg,
275 struct crypto_user_alg *ualg,
278 strlcpy(ualg->cru_name, alg->cra_name, sizeof(ualg->cru_name));
279 strlcpy(ualg->cru_driver_name, alg->cra_driver_name,
280 sizeof(ualg->cru_driver_name));
281 strlcpy(ualg->cru_module_name, module_name(alg->cra_module),
282 sizeof(ualg->cru_module_name));
286 ualg->cru_flags = alg->cra_flags;
287 ualg->cru_refcnt = refcount_read(&alg->cra_refcnt);
289 if (nla_put_u32(skb, CRYPTOCFGA_PRIORITY_VAL, alg->cra_priority))
290 goto nla_put_failure;
291 if (alg->cra_flags & CRYPTO_ALG_LARVAL) {
292 struct crypto_stat rl;
294 strlcpy(rl.type, "larval", sizeof(rl.type));
295 if (nla_put(skb, CRYPTOCFGA_STAT_LARVAL,
296 sizeof(struct crypto_stat), &rl))
297 goto nla_put_failure;
301 switch (alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL)) {
302 case CRYPTO_ALG_TYPE_AEAD:
303 if (crypto_report_aead(skb, alg))
304 goto nla_put_failure;
306 case CRYPTO_ALG_TYPE_SKCIPHER:
307 if (crypto_report_cipher(skb, alg))
308 goto nla_put_failure;
310 case CRYPTO_ALG_TYPE_BLKCIPHER:
311 if (crypto_report_cipher(skb, alg))
312 goto nla_put_failure;
314 case CRYPTO_ALG_TYPE_CIPHER:
315 if (crypto_report_cipher(skb, alg))
316 goto nla_put_failure;
318 case CRYPTO_ALG_TYPE_COMPRESS:
319 if (crypto_report_comp(skb, alg))
320 goto nla_put_failure;
322 case CRYPTO_ALG_TYPE_ACOMPRESS:
323 if (crypto_report_acomp(skb, alg))
324 goto nla_put_failure;
326 case CRYPTO_ALG_TYPE_SCOMPRESS:
327 if (crypto_report_acomp(skb, alg))
328 goto nla_put_failure;
330 case CRYPTO_ALG_TYPE_AKCIPHER:
331 if (crypto_report_akcipher(skb, alg))
332 goto nla_put_failure;
334 case CRYPTO_ALG_TYPE_KPP:
335 if (crypto_report_kpp(skb, alg))
336 goto nla_put_failure;
338 case CRYPTO_ALG_TYPE_AHASH:
339 if (crypto_report_ahash(skb, alg))
340 goto nla_put_failure;
342 case CRYPTO_ALG_TYPE_HASH:
343 if (crypto_report_shash(skb, alg))
344 goto nla_put_failure;
346 case CRYPTO_ALG_TYPE_RNG:
347 if (crypto_report_rng(skb, alg))
348 goto nla_put_failure;
351 pr_err("ERROR: Unhandled alg %d in %s\n",
352 alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL),
363 static int crypto_reportstat_alg(struct crypto_alg *alg,
364 struct crypto_dump_info *info)
366 struct sk_buff *in_skb = info->in_skb;
367 struct sk_buff *skb = info->out_skb;
368 struct nlmsghdr *nlh;
369 struct crypto_user_alg *ualg;
372 nlh = nlmsg_put(skb, NETLINK_CB(in_skb).portid, info->nlmsg_seq,
373 CRYPTO_MSG_GETSTAT, sizeof(*ualg), info->nlmsg_flags);
379 ualg = nlmsg_data(nlh);
381 err = crypto_reportstat_one(alg, ualg, skb);
383 nlmsg_cancel(skb, nlh);
393 int crypto_reportstat(struct sk_buff *in_skb, struct nlmsghdr *in_nlh,
394 struct nlattr **attrs)
396 struct crypto_user_alg *p = nlmsg_data(in_nlh);
397 struct crypto_alg *alg;
399 struct crypto_dump_info info;
402 if (!null_terminated(p->cru_name) || !null_terminated(p->cru_driver_name))
405 alg = crypto_alg_match(p, 0);
410 skb = nlmsg_new(NLMSG_DEFAULT_SIZE, GFP_ATOMIC);
414 info.in_skb = in_skb;
416 info.nlmsg_seq = in_nlh->nlmsg_seq;
417 info.nlmsg_flags = 0;
419 err = crypto_reportstat_alg(alg, &info);
427 return nlmsg_unicast(crypto_nlsk, skb, NETLINK_CB(in_skb).portid);
430 int crypto_dump_reportstat(struct sk_buff *skb, struct netlink_callback *cb)
432 struct crypto_alg *alg;
433 struct crypto_dump_info info;
441 info.in_skb = cb->skb;
443 info.nlmsg_seq = cb->nlh->nlmsg_seq;
444 info.nlmsg_flags = NLM_F_MULTI;
446 list_for_each_entry(alg, &crypto_alg_list, cra_list) {
447 err = crypto_reportstat_alg(alg, &info);
458 int crypto_dump_reportstat_done(struct netlink_callback *cb)
463 MODULE_LICENSE("GPL");