OSDN Git Service

crypto: ecc - regularize scalar for scalar multiplication
[uclinux-h8/linux.git] / crypto / crypto_user_stat.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Crypto user configuration API.
4  *
5  * Copyright (C) 2017-2018 Corentin Labbe <clabbe@baylibre.com>
6  *
7  */
8
9 #include <linux/crypto.h>
10 #include <linux/cryptouser.h>
11 #include <linux/sched.h>
12 #include <net/netlink.h>
13 #include <crypto/internal/skcipher.h>
14 #include <crypto/internal/rng.h>
15 #include <crypto/akcipher.h>
16 #include <crypto/kpp.h>
17 #include <crypto/internal/cryptouser.h>
18
19 #include "internal.h"
20
21 #define null_terminated(x)      (strnlen(x, sizeof(x)) < sizeof(x))
22
23 static DEFINE_MUTEX(crypto_cfg_mutex);
24
25 extern struct sock *crypto_nlsk;
26
27 struct crypto_dump_info {
28         struct sk_buff *in_skb;
29         struct sk_buff *out_skb;
30         u32 nlmsg_seq;
31         u16 nlmsg_flags;
32 };
33
34 static int crypto_report_aead(struct sk_buff *skb, struct crypto_alg *alg)
35 {
36         struct crypto_stat raead;
37         u64 v64;
38         u32 v32;
39
40         memset(&raead, 0, sizeof(raead));
41
42         strscpy(raead.type, "aead", sizeof(raead.type));
43
44         v32 = atomic_read(&alg->encrypt_cnt);
45         raead.stat_encrypt_cnt = v32;
46         v64 = atomic64_read(&alg->encrypt_tlen);
47         raead.stat_encrypt_tlen = v64;
48         v32 = atomic_read(&alg->decrypt_cnt);
49         raead.stat_decrypt_cnt = v32;
50         v64 = atomic64_read(&alg->decrypt_tlen);
51         raead.stat_decrypt_tlen = v64;
52         v32 = atomic_read(&alg->aead_err_cnt);
53         raead.stat_aead_err_cnt = v32;
54
55         return nla_put(skb, CRYPTOCFGA_STAT_AEAD, sizeof(raead), &raead);
56 }
57
58 static int crypto_report_cipher(struct sk_buff *skb, struct crypto_alg *alg)
59 {
60         struct crypto_stat rcipher;
61         u64 v64;
62         u32 v32;
63
64         memset(&rcipher, 0, sizeof(rcipher));
65
66         strscpy(rcipher.type, "cipher", sizeof(rcipher.type));
67
68         v32 = atomic_read(&alg->encrypt_cnt);
69         rcipher.stat_encrypt_cnt = v32;
70         v64 = atomic64_read(&alg->encrypt_tlen);
71         rcipher.stat_encrypt_tlen = v64;
72         v32 = atomic_read(&alg->decrypt_cnt);
73         rcipher.stat_decrypt_cnt = v32;
74         v64 = atomic64_read(&alg->decrypt_tlen);
75         rcipher.stat_decrypt_tlen = v64;
76         v32 = atomic_read(&alg->cipher_err_cnt);
77         rcipher.stat_cipher_err_cnt = v32;
78
79         return nla_put(skb, CRYPTOCFGA_STAT_CIPHER, sizeof(rcipher), &rcipher);
80 }
81
82 static int crypto_report_comp(struct sk_buff *skb, struct crypto_alg *alg)
83 {
84         struct crypto_stat rcomp;
85         u64 v64;
86         u32 v32;
87
88         memset(&rcomp, 0, sizeof(rcomp));
89
90         strscpy(rcomp.type, "compression", sizeof(rcomp.type));
91         v32 = atomic_read(&alg->compress_cnt);
92         rcomp.stat_compress_cnt = v32;
93         v64 = atomic64_read(&alg->compress_tlen);
94         rcomp.stat_compress_tlen = v64;
95         v32 = atomic_read(&alg->decompress_cnt);
96         rcomp.stat_decompress_cnt = v32;
97         v64 = atomic64_read(&alg->decompress_tlen);
98         rcomp.stat_decompress_tlen = v64;
99         v32 = atomic_read(&alg->cipher_err_cnt);
100         rcomp.stat_compress_err_cnt = v32;
101
102         return nla_put(skb, CRYPTOCFGA_STAT_COMPRESS, sizeof(rcomp), &rcomp);
103 }
104
105 static int crypto_report_acomp(struct sk_buff *skb, struct crypto_alg *alg)
106 {
107         struct crypto_stat racomp;
108         u64 v64;
109         u32 v32;
110
111         memset(&racomp, 0, sizeof(racomp));
112
113         strscpy(racomp.type, "acomp", sizeof(racomp.type));
114         v32 = atomic_read(&alg->compress_cnt);
115         racomp.stat_compress_cnt = v32;
116         v64 = atomic64_read(&alg->compress_tlen);
117         racomp.stat_compress_tlen = v64;
118         v32 = atomic_read(&alg->decompress_cnt);
119         racomp.stat_decompress_cnt = v32;
120         v64 = atomic64_read(&alg->decompress_tlen);
121         racomp.stat_decompress_tlen = v64;
122         v32 = atomic_read(&alg->cipher_err_cnt);
123         racomp.stat_compress_err_cnt = v32;
124
125         return nla_put(skb, CRYPTOCFGA_STAT_ACOMP, sizeof(racomp), &racomp);
126 }
127
128 static int crypto_report_akcipher(struct sk_buff *skb, struct crypto_alg *alg)
129 {
130         struct crypto_stat rakcipher;
131         u64 v64;
132         u32 v32;
133
134         memset(&rakcipher, 0, sizeof(rakcipher));
135
136         strscpy(rakcipher.type, "akcipher", sizeof(rakcipher.type));
137         v32 = atomic_read(&alg->encrypt_cnt);
138         rakcipher.stat_encrypt_cnt = v32;
139         v64 = atomic64_read(&alg->encrypt_tlen);
140         rakcipher.stat_encrypt_tlen = v64;
141         v32 = atomic_read(&alg->decrypt_cnt);
142         rakcipher.stat_decrypt_cnt = v32;
143         v64 = atomic64_read(&alg->decrypt_tlen);
144         rakcipher.stat_decrypt_tlen = v64;
145         v32 = atomic_read(&alg->sign_cnt);
146         rakcipher.stat_sign_cnt = v32;
147         v32 = atomic_read(&alg->verify_cnt);
148         rakcipher.stat_verify_cnt = v32;
149         v32 = atomic_read(&alg->akcipher_err_cnt);
150         rakcipher.stat_akcipher_err_cnt = v32;
151
152         return nla_put(skb, CRYPTOCFGA_STAT_AKCIPHER,
153                        sizeof(rakcipher), &rakcipher);
154 }
155
156 static int crypto_report_kpp(struct sk_buff *skb, struct crypto_alg *alg)
157 {
158         struct crypto_stat rkpp;
159         u32 v;
160
161         memset(&rkpp, 0, sizeof(rkpp));
162
163         strscpy(rkpp.type, "kpp", sizeof(rkpp.type));
164
165         v = atomic_read(&alg->setsecret_cnt);
166         rkpp.stat_setsecret_cnt = v;
167         v = atomic_read(&alg->generate_public_key_cnt);
168         rkpp.stat_generate_public_key_cnt = v;
169         v = atomic_read(&alg->compute_shared_secret_cnt);
170         rkpp.stat_compute_shared_secret_cnt = v;
171         v = atomic_read(&alg->kpp_err_cnt);
172         rkpp.stat_kpp_err_cnt = v;
173
174         return nla_put(skb, CRYPTOCFGA_STAT_KPP, sizeof(rkpp), &rkpp);
175 }
176
177 static int crypto_report_ahash(struct sk_buff *skb, struct crypto_alg *alg)
178 {
179         struct crypto_stat rhash;
180         u64 v64;
181         u32 v32;
182
183         memset(&rhash, 0, sizeof(rhash));
184
185         strscpy(rhash.type, "ahash", sizeof(rhash.type));
186
187         v32 = atomic_read(&alg->hash_cnt);
188         rhash.stat_hash_cnt = v32;
189         v64 = atomic64_read(&alg->hash_tlen);
190         rhash.stat_hash_tlen = v64;
191         v32 = atomic_read(&alg->hash_err_cnt);
192         rhash.stat_hash_err_cnt = v32;
193
194         return nla_put(skb, CRYPTOCFGA_STAT_HASH, sizeof(rhash), &rhash);
195 }
196
197 static int crypto_report_shash(struct sk_buff *skb, struct crypto_alg *alg)
198 {
199         struct crypto_stat rhash;
200         u64 v64;
201         u32 v32;
202
203         memset(&rhash, 0, sizeof(rhash));
204
205         strscpy(rhash.type, "shash", sizeof(rhash.type));
206
207         v32 = atomic_read(&alg->hash_cnt);
208         rhash.stat_hash_cnt = v32;
209         v64 = atomic64_read(&alg->hash_tlen);
210         rhash.stat_hash_tlen = v64;
211         v32 = atomic_read(&alg->hash_err_cnt);
212         rhash.stat_hash_err_cnt = v32;
213
214         return nla_put(skb, CRYPTOCFGA_STAT_HASH, sizeof(rhash), &rhash);
215 }
216
217 static int crypto_report_rng(struct sk_buff *skb, struct crypto_alg *alg)
218 {
219         struct crypto_stat rrng;
220         u64 v64;
221         u32 v32;
222
223         memset(&rrng, 0, sizeof(rrng));
224
225         strscpy(rrng.type, "rng", sizeof(rrng.type));
226
227         v32 = atomic_read(&alg->generate_cnt);
228         rrng.stat_generate_cnt = v32;
229         v64 = atomic64_read(&alg->generate_tlen);
230         rrng.stat_generate_tlen = v64;
231         v32 = atomic_read(&alg->seed_cnt);
232         rrng.stat_seed_cnt = v32;
233         v32 = atomic_read(&alg->hash_err_cnt);
234         rrng.stat_rng_err_cnt = v32;
235
236         return nla_put(skb, CRYPTOCFGA_STAT_RNG, sizeof(rrng), &rrng);
237 }
238
239 static int crypto_reportstat_one(struct crypto_alg *alg,
240                                  struct crypto_user_alg *ualg,
241                                  struct sk_buff *skb)
242 {
243         memset(ualg, 0, sizeof(*ualg));
244
245         strscpy(ualg->cru_name, alg->cra_name, sizeof(ualg->cru_name));
246         strscpy(ualg->cru_driver_name, alg->cra_driver_name,
247                 sizeof(ualg->cru_driver_name));
248         strscpy(ualg->cru_module_name, module_name(alg->cra_module),
249                 sizeof(ualg->cru_module_name));
250
251         ualg->cru_type = 0;
252         ualg->cru_mask = 0;
253         ualg->cru_flags = alg->cra_flags;
254         ualg->cru_refcnt = refcount_read(&alg->cra_refcnt);
255
256         if (nla_put_u32(skb, CRYPTOCFGA_PRIORITY_VAL, alg->cra_priority))
257                 goto nla_put_failure;
258         if (alg->cra_flags & CRYPTO_ALG_LARVAL) {
259                 struct crypto_stat rl;
260
261                 memset(&rl, 0, sizeof(rl));
262                 strscpy(rl.type, "larval", sizeof(rl.type));
263                 if (nla_put(skb, CRYPTOCFGA_STAT_LARVAL, sizeof(rl), &rl))
264                         goto nla_put_failure;
265                 goto out;
266         }
267
268         switch (alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL)) {
269         case CRYPTO_ALG_TYPE_AEAD:
270                 if (crypto_report_aead(skb, alg))
271                         goto nla_put_failure;
272                 break;
273         case CRYPTO_ALG_TYPE_SKCIPHER:
274                 if (crypto_report_cipher(skb, alg))
275                         goto nla_put_failure;
276                 break;
277         case CRYPTO_ALG_TYPE_BLKCIPHER:
278                 if (crypto_report_cipher(skb, alg))
279                         goto nla_put_failure;
280                 break;
281         case CRYPTO_ALG_TYPE_CIPHER:
282                 if (crypto_report_cipher(skb, alg))
283                         goto nla_put_failure;
284                 break;
285         case CRYPTO_ALG_TYPE_COMPRESS:
286                 if (crypto_report_comp(skb, alg))
287                         goto nla_put_failure;
288                 break;
289         case CRYPTO_ALG_TYPE_ACOMPRESS:
290                 if (crypto_report_acomp(skb, alg))
291                         goto nla_put_failure;
292                 break;
293         case CRYPTO_ALG_TYPE_SCOMPRESS:
294                 if (crypto_report_acomp(skb, alg))
295                         goto nla_put_failure;
296                 break;
297         case CRYPTO_ALG_TYPE_AKCIPHER:
298                 if (crypto_report_akcipher(skb, alg))
299                         goto nla_put_failure;
300                 break;
301         case CRYPTO_ALG_TYPE_KPP:
302                 if (crypto_report_kpp(skb, alg))
303                         goto nla_put_failure;
304                 break;
305         case CRYPTO_ALG_TYPE_AHASH:
306                 if (crypto_report_ahash(skb, alg))
307                         goto nla_put_failure;
308                 break;
309         case CRYPTO_ALG_TYPE_HASH:
310                 if (crypto_report_shash(skb, alg))
311                         goto nla_put_failure;
312                 break;
313         case CRYPTO_ALG_TYPE_RNG:
314                 if (crypto_report_rng(skb, alg))
315                         goto nla_put_failure;
316                 break;
317         default:
318                 pr_err("ERROR: Unhandled alg %d in %s\n",
319                        alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL),
320                        __func__);
321         }
322
323 out:
324         return 0;
325
326 nla_put_failure:
327         return -EMSGSIZE;
328 }
329
330 static int crypto_reportstat_alg(struct crypto_alg *alg,
331                                  struct crypto_dump_info *info)
332 {
333         struct sk_buff *in_skb = info->in_skb;
334         struct sk_buff *skb = info->out_skb;
335         struct nlmsghdr *nlh;
336         struct crypto_user_alg *ualg;
337         int err = 0;
338
339         nlh = nlmsg_put(skb, NETLINK_CB(in_skb).portid, info->nlmsg_seq,
340                         CRYPTO_MSG_GETSTAT, sizeof(*ualg), info->nlmsg_flags);
341         if (!nlh) {
342                 err = -EMSGSIZE;
343                 goto out;
344         }
345
346         ualg = nlmsg_data(nlh);
347
348         err = crypto_reportstat_one(alg, ualg, skb);
349         if (err) {
350                 nlmsg_cancel(skb, nlh);
351                 goto out;
352         }
353
354         nlmsg_end(skb, nlh);
355
356 out:
357         return err;
358 }
359
360 int crypto_reportstat(struct sk_buff *in_skb, struct nlmsghdr *in_nlh,
361                       struct nlattr **attrs)
362 {
363         struct crypto_user_alg *p = nlmsg_data(in_nlh);
364         struct crypto_alg *alg;
365         struct sk_buff *skb;
366         struct crypto_dump_info info;
367         int err;
368
369         if (!null_terminated(p->cru_name) || !null_terminated(p->cru_driver_name))
370                 return -EINVAL;
371
372         alg = crypto_alg_match(p, 0);
373         if (!alg)
374                 return -ENOENT;
375
376         err = -ENOMEM;
377         skb = nlmsg_new(NLMSG_DEFAULT_SIZE, GFP_ATOMIC);
378         if (!skb)
379                 goto drop_alg;
380
381         info.in_skb = in_skb;
382         info.out_skb = skb;
383         info.nlmsg_seq = in_nlh->nlmsg_seq;
384         info.nlmsg_flags = 0;
385
386         err = crypto_reportstat_alg(alg, &info);
387
388 drop_alg:
389         crypto_mod_put(alg);
390
391         if (err)
392                 return err;
393
394         return nlmsg_unicast(crypto_nlsk, skb, NETLINK_CB(in_skb).portid);
395 }
396
397 int crypto_dump_reportstat(struct sk_buff *skb, struct netlink_callback *cb)
398 {
399         struct crypto_alg *alg;
400         struct crypto_dump_info info;
401         int err;
402
403         if (cb->args[0])
404                 goto out;
405
406         cb->args[0] = 1;
407
408         info.in_skb = cb->skb;
409         info.out_skb = skb;
410         info.nlmsg_seq = cb->nlh->nlmsg_seq;
411         info.nlmsg_flags = NLM_F_MULTI;
412
413         list_for_each_entry(alg, &crypto_alg_list, cra_list) {
414                 err = crypto_reportstat_alg(alg, &info);
415                 if (err)
416                         goto out_err;
417         }
418
419 out:
420         return skb->len;
421 out_err:
422         return err;
423 }
424
425 int crypto_dump_reportstat_done(struct netlink_callback *cb)
426 {
427         return 0;
428 }
429
430 MODULE_LICENSE("GPL");