2 * Synchronous Cryptographic Hash operations.
4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 #include <linux/cryptouser.h>
21 #include <net/netlink.h>
25 static const struct crypto_type crypto_shash_type;
27 static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
33 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
36 struct shash_alg *shash = crypto_shash_alg(tfm);
37 unsigned long alignmask = crypto_shash_alignmask(tfm);
39 u8 *buffer, *alignbuffer;
42 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
43 buffer = kmalloc(absize, GFP_KERNEL);
47 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
48 memcpy(alignbuffer, key, keylen);
49 err = shash->setkey(tfm, alignbuffer, keylen);
54 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
57 struct shash_alg *shash = crypto_shash_alg(tfm);
58 unsigned long alignmask = crypto_shash_alignmask(tfm);
60 if ((unsigned long)key & alignmask)
61 return shash_setkey_unaligned(tfm, key, keylen);
63 return shash->setkey(tfm, key, keylen);
65 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
67 static inline unsigned int shash_align_buffer_size(unsigned len,
70 typedef u8 __attribute__ ((aligned)) u8_aligned;
71 return len + (mask & ~(__alignof__(u8_aligned) - 1));
74 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
77 struct crypto_shash *tfm = desc->tfm;
78 struct shash_alg *shash = crypto_shash_alg(tfm);
79 unsigned long alignmask = crypto_shash_alignmask(tfm);
80 unsigned int unaligned_len = alignmask + 1 -
81 ((unsigned long)data & alignmask);
82 u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
83 __attribute__ ((aligned));
84 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
87 if (unaligned_len > len)
90 memcpy(buf, data, unaligned_len);
91 err = shash->update(desc, buf, unaligned_len);
92 memset(buf, 0, unaligned_len);
95 shash->update(desc, data + unaligned_len, len - unaligned_len);
98 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
101 struct crypto_shash *tfm = desc->tfm;
102 struct shash_alg *shash = crypto_shash_alg(tfm);
103 unsigned long alignmask = crypto_shash_alignmask(tfm);
105 if ((unsigned long)data & alignmask)
106 return shash_update_unaligned(desc, data, len);
108 return shash->update(desc, data, len);
110 EXPORT_SYMBOL_GPL(crypto_shash_update);
112 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
114 struct crypto_shash *tfm = desc->tfm;
115 unsigned long alignmask = crypto_shash_alignmask(tfm);
116 struct shash_alg *shash = crypto_shash_alg(tfm);
117 unsigned int ds = crypto_shash_digestsize(tfm);
118 u8 ubuf[shash_align_buffer_size(ds, alignmask)]
119 __attribute__ ((aligned));
120 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
123 err = shash->final(desc, buf);
127 memcpy(out, buf, ds);
134 int crypto_shash_final(struct shash_desc *desc, u8 *out)
136 struct crypto_shash *tfm = desc->tfm;
137 struct shash_alg *shash = crypto_shash_alg(tfm);
138 unsigned long alignmask = crypto_shash_alignmask(tfm);
140 if ((unsigned long)out & alignmask)
141 return shash_final_unaligned(desc, out);
143 return shash->final(desc, out);
145 EXPORT_SYMBOL_GPL(crypto_shash_final);
147 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
148 unsigned int len, u8 *out)
150 return crypto_shash_update(desc, data, len) ?:
151 crypto_shash_final(desc, out);
154 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
155 unsigned int len, u8 *out)
157 struct crypto_shash *tfm = desc->tfm;
158 struct shash_alg *shash = crypto_shash_alg(tfm);
159 unsigned long alignmask = crypto_shash_alignmask(tfm);
161 if (((unsigned long)data | (unsigned long)out) & alignmask)
162 return shash_finup_unaligned(desc, data, len, out);
164 return shash->finup(desc, data, len, out);
166 EXPORT_SYMBOL_GPL(crypto_shash_finup);
168 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
169 unsigned int len, u8 *out)
171 return crypto_shash_init(desc) ?:
172 crypto_shash_finup(desc, data, len, out);
175 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
176 unsigned int len, u8 *out)
178 struct crypto_shash *tfm = desc->tfm;
179 struct shash_alg *shash = crypto_shash_alg(tfm);
180 unsigned long alignmask = crypto_shash_alignmask(tfm);
182 if (((unsigned long)data | (unsigned long)out) & alignmask)
183 return shash_digest_unaligned(desc, data, len, out);
185 return shash->digest(desc, data, len, out);
187 EXPORT_SYMBOL_GPL(crypto_shash_digest);
189 static int shash_default_export(struct shash_desc *desc, void *out)
191 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
195 static int shash_default_import(struct shash_desc *desc, const void *in)
197 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
201 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
204 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
206 return crypto_shash_setkey(*ctx, key, keylen);
209 static int shash_async_init(struct ahash_request *req)
211 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
212 struct shash_desc *desc = ahash_request_ctx(req);
215 desc->flags = req->base.flags;
217 return crypto_shash_init(desc);
220 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
222 struct crypto_hash_walk walk;
225 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
226 nbytes = crypto_hash_walk_done(&walk, nbytes))
227 nbytes = crypto_shash_update(desc, walk.data, nbytes);
231 EXPORT_SYMBOL_GPL(shash_ahash_update);
233 static int shash_async_update(struct ahash_request *req)
235 return shash_ahash_update(req, ahash_request_ctx(req));
238 static int shash_async_final(struct ahash_request *req)
240 return crypto_shash_final(ahash_request_ctx(req), req->result);
243 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
245 struct crypto_hash_walk walk;
248 nbytes = crypto_hash_walk_first(req, &walk);
250 return crypto_shash_final(desc, req->result);
253 nbytes = crypto_hash_walk_last(&walk) ?
254 crypto_shash_finup(desc, walk.data, nbytes,
256 crypto_shash_update(desc, walk.data, nbytes);
257 nbytes = crypto_hash_walk_done(&walk, nbytes);
258 } while (nbytes > 0);
262 EXPORT_SYMBOL_GPL(shash_ahash_finup);
264 static int shash_async_finup(struct ahash_request *req)
266 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
267 struct shash_desc *desc = ahash_request_ctx(req);
270 desc->flags = req->base.flags;
272 return shash_ahash_finup(req, desc);
275 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
277 unsigned int nbytes = req->nbytes;
278 struct scatterlist *sg;
283 (sg = req->src, offset = sg->offset,
284 nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
287 data = kmap_atomic(sg_page(sg));
288 err = crypto_shash_digest(desc, data + offset, nbytes,
291 crypto_yield(desc->flags);
293 err = crypto_shash_init(desc) ?:
294 shash_ahash_finup(req, desc);
298 EXPORT_SYMBOL_GPL(shash_ahash_digest);
300 static int shash_async_digest(struct ahash_request *req)
302 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
303 struct shash_desc *desc = ahash_request_ctx(req);
306 desc->flags = req->base.flags;
308 return shash_ahash_digest(req, desc);
311 static int shash_async_export(struct ahash_request *req, void *out)
313 return crypto_shash_export(ahash_request_ctx(req), out);
316 static int shash_async_import(struct ahash_request *req, const void *in)
318 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
319 struct shash_desc *desc = ahash_request_ctx(req);
322 desc->flags = req->base.flags;
324 return crypto_shash_import(desc, in);
327 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
329 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
331 crypto_free_shash(*ctx);
334 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
336 struct crypto_alg *calg = tfm->__crt_alg;
337 struct shash_alg *alg = __crypto_shash_alg(calg);
338 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
339 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
340 struct crypto_shash *shash;
342 if (!crypto_mod_get(calg))
345 shash = crypto_create_tfm(calg, &crypto_shash_type);
347 crypto_mod_put(calg);
348 return PTR_ERR(shash);
352 tfm->exit = crypto_exit_shash_ops_async;
354 crt->init = shash_async_init;
355 crt->update = shash_async_update;
356 crt->final = shash_async_final;
357 crt->finup = shash_async_finup;
358 crt->digest = shash_async_digest;
359 crt->setkey = shash_async_setkey;
361 crt->has_setkey = alg->setkey != shash_no_setkey;
364 crt->export = shash_async_export;
366 crt->import = shash_async_import;
368 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
373 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
375 struct crypto_shash *hash = __crypto_shash_cast(tfm);
377 hash->descsize = crypto_shash_alg(hash)->descsize;
382 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
384 struct crypto_report_hash rhash;
385 struct shash_alg *salg = __crypto_shash_alg(alg);
387 strncpy(rhash.type, "shash", sizeof(rhash.type));
389 rhash.blocksize = alg->cra_blocksize;
390 rhash.digestsize = salg->digestsize;
392 if (nla_put(skb, CRYPTOCFGA_REPORT_HASH,
393 sizeof(struct crypto_report_hash), &rhash))
394 goto nla_put_failure;
401 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
407 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
408 __attribute__ ((unused));
409 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
411 struct shash_alg *salg = __crypto_shash_alg(alg);
413 seq_printf(m, "type : shash\n");
414 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
415 seq_printf(m, "digestsize : %u\n", salg->digestsize);
418 static const struct crypto_type crypto_shash_type = {
419 .extsize = crypto_alg_extsize,
420 .init_tfm = crypto_shash_init_tfm,
421 #ifdef CONFIG_PROC_FS
422 .show = crypto_shash_show,
424 .report = crypto_shash_report,
425 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
426 .maskset = CRYPTO_ALG_TYPE_MASK,
427 .type = CRYPTO_ALG_TYPE_SHASH,
428 .tfmsize = offsetof(struct crypto_shash, base),
431 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
434 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
436 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
438 static int shash_prepare_alg(struct shash_alg *alg)
440 struct crypto_alg *base = &alg->base;
442 if (alg->digestsize > PAGE_SIZE / 8 ||
443 alg->descsize > PAGE_SIZE / 8 ||
444 alg->statesize > PAGE_SIZE / 8)
447 base->cra_type = &crypto_shash_type;
448 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
449 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
452 alg->finup = shash_finup_unaligned;
454 alg->digest = shash_digest_unaligned;
456 alg->export = shash_default_export;
457 alg->import = shash_default_import;
458 alg->statesize = alg->descsize;
461 alg->setkey = shash_no_setkey;
466 int crypto_register_shash(struct shash_alg *alg)
468 struct crypto_alg *base = &alg->base;
471 err = shash_prepare_alg(alg);
475 return crypto_register_alg(base);
477 EXPORT_SYMBOL_GPL(crypto_register_shash);
479 int crypto_unregister_shash(struct shash_alg *alg)
481 return crypto_unregister_alg(&alg->base);
483 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
485 int crypto_register_shashes(struct shash_alg *algs, int count)
489 for (i = 0; i < count; i++) {
490 ret = crypto_register_shash(&algs[i]);
498 for (--i; i >= 0; --i)
499 crypto_unregister_shash(&algs[i]);
503 EXPORT_SYMBOL_GPL(crypto_register_shashes);
505 int crypto_unregister_shashes(struct shash_alg *algs, int count)
509 for (i = count - 1; i >= 0; --i) {
510 ret = crypto_unregister_shash(&algs[i]);
512 pr_err("Failed to unregister %s %s: %d\n",
513 algs[i].base.cra_driver_name,
514 algs[i].base.cra_name, ret);
519 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
521 int shash_register_instance(struct crypto_template *tmpl,
522 struct shash_instance *inst)
526 err = shash_prepare_alg(&inst->alg);
530 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
532 EXPORT_SYMBOL_GPL(shash_register_instance);
534 void shash_free_instance(struct crypto_instance *inst)
536 crypto_drop_spawn(crypto_instance_ctx(inst));
537 kfree(shash_instance(inst));
539 EXPORT_SYMBOL_GPL(shash_free_instance);
541 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
542 struct shash_alg *alg,
543 struct crypto_instance *inst)
545 return crypto_init_spawn2(&spawn->base, &alg->base, inst,
548 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
550 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
552 struct crypto_alg *alg;
554 alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
555 return IS_ERR(alg) ? ERR_CAST(alg) :
556 container_of(alg, struct shash_alg, base);
558 EXPORT_SYMBOL_GPL(shash_attr_alg);
560 MODULE_LICENSE("GPL");
561 MODULE_DESCRIPTION("Synchronous cryptographic hash type");