2 * Scatterlist Cryptographic API.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
18 #include <linux/err.h>
19 #include <linux/errno.h>
20 #include <linux/kernel.h>
21 #include <linux/kmod.h>
22 #include <linux/module.h>
23 #include <linux/param.h>
24 #include <linux/sched.h>
25 #include <linux/slab.h>
26 #include <linux/string.h>
27 #include <linux/completion.h>
30 LIST_HEAD(crypto_alg_list);
31 EXPORT_SYMBOL_GPL(crypto_alg_list);
32 DECLARE_RWSEM(crypto_alg_sem);
33 EXPORT_SYMBOL_GPL(crypto_alg_sem);
35 BLOCKING_NOTIFIER_HEAD(crypto_chain);
36 EXPORT_SYMBOL_GPL(crypto_chain);
38 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
40 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
42 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
44 EXPORT_SYMBOL_GPL(crypto_mod_get);
46 void crypto_mod_put(struct crypto_alg *alg)
48 struct module *module = alg->cra_module;
53 EXPORT_SYMBOL_GPL(crypto_mod_put);
55 static inline int crypto_is_test_larval(struct crypto_larval *larval)
57 return larval->alg.cra_driver_name[0];
60 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
63 struct crypto_alg *q, *alg = NULL;
66 list_for_each_entry(q, &crypto_alg_list, cra_list) {
69 if (crypto_is_moribund(q))
72 if ((q->cra_flags ^ type) & mask)
75 if (crypto_is_larval(q) &&
76 !crypto_is_test_larval((struct crypto_larval *)q) &&
77 ((struct crypto_larval *)q)->mask != mask)
80 exact = !strcmp(q->cra_driver_name, name);
81 fuzzy = !strcmp(q->cra_name, name);
82 if (!exact && !(fuzzy && q->cra_priority > best))
85 if (unlikely(!crypto_mod_get(q)))
88 best = q->cra_priority;
100 static void crypto_larval_destroy(struct crypto_alg *alg)
102 struct crypto_larval *larval = (void *)alg;
104 BUG_ON(!crypto_is_larval(alg));
106 crypto_mod_put(larval->adult);
110 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
112 struct crypto_larval *larval;
114 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
116 return ERR_PTR(-ENOMEM);
119 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
120 larval->alg.cra_priority = -1;
121 larval->alg.cra_destroy = crypto_larval_destroy;
123 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
124 init_completion(&larval->completion);
128 EXPORT_SYMBOL_GPL(crypto_larval_alloc);
130 static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
133 struct crypto_alg *alg;
134 struct crypto_larval *larval;
136 larval = crypto_larval_alloc(name, type, mask);
138 return ERR_CAST(larval);
140 atomic_set(&larval->alg.cra_refcnt, 2);
142 down_write(&crypto_alg_sem);
143 alg = __crypto_alg_lookup(name, type, mask);
146 list_add(&alg->cra_list, &crypto_alg_list);
148 up_write(&crypto_alg_sem);
150 if (alg != &larval->alg) {
152 if (crypto_is_larval(alg))
153 alg = crypto_larval_wait(alg);
159 void crypto_larval_kill(struct crypto_alg *alg)
161 struct crypto_larval *larval = (void *)alg;
163 down_write(&crypto_alg_sem);
164 list_del(&alg->cra_list);
165 up_write(&crypto_alg_sem);
166 complete_all(&larval->completion);
169 EXPORT_SYMBOL_GPL(crypto_larval_kill);
171 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
173 struct crypto_larval *larval = (void *)alg;
176 timeout = wait_for_completion_killable_timeout(
177 &larval->completion, 60 * HZ);
181 alg = ERR_PTR(-EINTR);
183 alg = ERR_PTR(-ETIMEDOUT);
185 alg = ERR_PTR(-ENOENT);
186 else if (crypto_is_test_larval(larval) &&
187 !(alg->cra_flags & CRYPTO_ALG_TESTED))
188 alg = ERR_PTR(-EAGAIN);
189 else if (!crypto_mod_get(alg))
190 alg = ERR_PTR(-EAGAIN);
191 crypto_mod_put(&larval->alg);
196 struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask)
198 struct crypto_alg *alg;
200 down_read(&crypto_alg_sem);
201 alg = __crypto_alg_lookup(name, type, mask);
202 up_read(&crypto_alg_sem);
206 EXPORT_SYMBOL_GPL(crypto_alg_lookup);
208 struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
210 struct crypto_alg *alg;
213 return ERR_PTR(-ENOENT);
215 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
218 alg = crypto_alg_lookup(name, type, mask);
220 request_module("crypto-%s", name);
222 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
223 CRYPTO_ALG_NEED_FALLBACK))
224 request_module("crypto-%s-all", name);
226 alg = crypto_alg_lookup(name, type, mask);
230 return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
232 return crypto_larval_add(name, type, mask);
234 EXPORT_SYMBOL_GPL(crypto_larval_lookup);
236 int crypto_probing_notify(unsigned long val, void *v)
240 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
241 if (ok == NOTIFY_DONE) {
242 request_module("cryptomgr");
243 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
248 EXPORT_SYMBOL_GPL(crypto_probing_notify);
250 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
252 struct crypto_alg *alg;
253 struct crypto_alg *larval;
256 if (!((type | mask) & CRYPTO_ALG_TESTED)) {
257 type |= CRYPTO_ALG_TESTED;
258 mask |= CRYPTO_ALG_TESTED;
262 * If the internal flag is set for a cipher, require a caller to
263 * to invoke the cipher with the internal flag to use that cipher.
264 * Also, if a caller wants to allocate a cipher that may or may
265 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
266 * !(mask & CRYPTO_ALG_INTERNAL).
268 if (!((type | mask) & CRYPTO_ALG_INTERNAL))
269 mask |= CRYPTO_ALG_INTERNAL;
271 larval = crypto_larval_lookup(name, type, mask);
272 if (IS_ERR(larval) || !crypto_is_larval(larval))
275 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
277 if (ok == NOTIFY_STOP)
278 alg = crypto_larval_wait(larval);
280 crypto_mod_put(larval);
281 alg = ERR_PTR(-ENOENT);
283 crypto_larval_kill(larval);
286 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
288 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
290 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
293 return type_obj->init(tfm, type, mask);
295 switch (crypto_tfm_alg_type(tfm)) {
296 case CRYPTO_ALG_TYPE_CIPHER:
297 return crypto_init_cipher_ops(tfm);
299 case CRYPTO_ALG_TYPE_COMPRESS:
300 return crypto_init_compress_ops(tfm);
310 static void crypto_exit_ops(struct crypto_tfm *tfm)
312 const struct crypto_type *type = tfm->__crt_alg->cra_type;
320 switch (crypto_tfm_alg_type(tfm)) {
321 case CRYPTO_ALG_TYPE_CIPHER:
322 crypto_exit_cipher_ops(tfm);
325 case CRYPTO_ALG_TYPE_COMPRESS:
326 crypto_exit_compress_ops(tfm);
334 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
336 const struct crypto_type *type_obj = alg->cra_type;
339 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
341 return len + type_obj->ctxsize(alg, type, mask);
343 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
347 case CRYPTO_ALG_TYPE_CIPHER:
348 len += crypto_cipher_ctxsize(alg);
351 case CRYPTO_ALG_TYPE_COMPRESS:
352 len += crypto_compress_ctxsize(alg);
359 void crypto_shoot_alg(struct crypto_alg *alg)
361 down_write(&crypto_alg_sem);
362 alg->cra_flags |= CRYPTO_ALG_DYING;
363 up_write(&crypto_alg_sem);
365 EXPORT_SYMBOL_GPL(crypto_shoot_alg);
367 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
370 struct crypto_tfm *tfm = NULL;
371 unsigned int tfm_size;
374 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
375 tfm = kzalloc(tfm_size, GFP_KERNEL);
379 tfm->__crt_alg = alg;
381 err = crypto_init_ops(tfm, type, mask);
385 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
386 goto cra_init_failed;
391 crypto_exit_ops(tfm);
394 crypto_shoot_alg(alg);
401 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
404 * crypto_alloc_base - Locate algorithm and allocate transform
405 * @alg_name: Name of algorithm
406 * @type: Type of algorithm
407 * @mask: Mask for type comparison
409 * This function should not be used by new algorithm types.
410 * Please use crypto_alloc_tfm instead.
412 * crypto_alloc_base() will first attempt to locate an already loaded
413 * algorithm. If that fails and the kernel supports dynamically loadable
414 * modules, it will then attempt to load a module of the same name or
415 * alias. If that fails it will send a query to any loaded crypto manager
416 * to construct an algorithm on the fly. A refcount is grabbed on the
417 * algorithm which is then associated with the new transform.
419 * The returned transform is of a non-determinate type. Most people
420 * should use one of the more specific allocation functions such as
421 * crypto_alloc_blkcipher.
423 * In case of error the return value is an error pointer.
425 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
427 struct crypto_tfm *tfm;
431 struct crypto_alg *alg;
433 alg = crypto_alg_mod_lookup(alg_name, type, mask);
439 tfm = __crypto_alloc_tfm(alg, type, mask);
449 if (fatal_signal_pending(current)) {
457 EXPORT_SYMBOL_GPL(crypto_alloc_base);
459 void *crypto_create_tfm(struct crypto_alg *alg,
460 const struct crypto_type *frontend)
463 struct crypto_tfm *tfm = NULL;
464 unsigned int tfmsize;
468 tfmsize = frontend->tfmsize;
469 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
471 mem = kzalloc(total, GFP_KERNEL);
475 tfm = (struct crypto_tfm *)(mem + tfmsize);
476 tfm->__crt_alg = alg;
478 err = frontend->init_tfm(tfm);
482 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
483 goto cra_init_failed;
488 crypto_exit_ops(tfm);
491 crypto_shoot_alg(alg);
498 EXPORT_SYMBOL_GPL(crypto_create_tfm);
500 struct crypto_alg *crypto_find_alg(const char *alg_name,
501 const struct crypto_type *frontend,
504 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) =
505 crypto_alg_mod_lookup;
508 type &= frontend->maskclear;
509 mask &= frontend->maskclear;
510 type |= frontend->type;
511 mask |= frontend->maskset;
513 if (frontend->lookup)
514 lookup = frontend->lookup;
517 return lookup(alg_name, type, mask);
519 EXPORT_SYMBOL_GPL(crypto_find_alg);
522 * crypto_alloc_tfm - Locate algorithm and allocate transform
523 * @alg_name: Name of algorithm
524 * @frontend: Frontend algorithm type
525 * @type: Type of algorithm
526 * @mask: Mask for type comparison
528 * crypto_alloc_tfm() will first attempt to locate an already loaded
529 * algorithm. If that fails and the kernel supports dynamically loadable
530 * modules, it will then attempt to load a module of the same name or
531 * alias. If that fails it will send a query to any loaded crypto manager
532 * to construct an algorithm on the fly. A refcount is grabbed on the
533 * algorithm which is then associated with the new transform.
535 * The returned transform is of a non-determinate type. Most people
536 * should use one of the more specific allocation functions such as
537 * crypto_alloc_blkcipher.
539 * In case of error the return value is an error pointer.
541 void *crypto_alloc_tfm(const char *alg_name,
542 const struct crypto_type *frontend, u32 type, u32 mask)
548 struct crypto_alg *alg;
550 alg = crypto_find_alg(alg_name, frontend, type, mask);
556 tfm = crypto_create_tfm(alg, frontend);
566 if (fatal_signal_pending(current)) {
574 EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
577 * crypto_destroy_tfm - Free crypto transform
578 * @mem: Start of tfm slab
579 * @tfm: Transform to free
581 * This function frees up the transform and any associated resources,
582 * then drops the refcount on the associated algorithm.
584 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
586 struct crypto_alg *alg;
591 alg = tfm->__crt_alg;
593 if (!tfm->exit && alg->cra_exit)
595 crypto_exit_ops(tfm);
599 EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
601 int crypto_has_alg(const char *name, u32 type, u32 mask)
604 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
613 EXPORT_SYMBOL_GPL(crypto_has_alg);
615 void crypto_req_done(struct crypto_async_request *req, int err)
617 struct crypto_wait *wait = req->data;
619 if (err == -EINPROGRESS)
623 complete(&wait->completion);
625 EXPORT_SYMBOL_GPL(crypto_req_done);
627 MODULE_DESCRIPTION("Cryptographic core API");
628 MODULE_LICENSE("GPL");