]> bbs.cooldavid.org Git - net-next-2.6.git/blame - crypto/api.c
crypto: eseqiv - Fix IV generation for sync algorithms
[net-next-2.6.git] / crypto / api.c
CommitLineData
1da177e4
LT
1/*
2 * Scatterlist Cryptographic API.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
5cb1454b 6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
1da177e4
LT
7 *
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
991d1740 9 * and Nettle, by Niels Möller.
1da177e4
LT
10 *
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
14 * any later version.
15 *
16 */
a61cc448 17
6bfd4809 18#include <linux/err.h>
1da177e4 19#include <linux/errno.h>
5cb1454b 20#include <linux/kernel.h>
176c3652 21#include <linux/kmod.h>
2b8c19db 22#include <linux/module.h>
2825982d 23#include <linux/param.h>
6bfd4809 24#include <linux/sched.h>
1da177e4 25#include <linux/slab.h>
5cb1454b 26#include <linux/string.h>
1da177e4
LT
27#include "internal.h"
28
29LIST_HEAD(crypto_alg_list);
cce9e06d 30EXPORT_SYMBOL_GPL(crypto_alg_list);
1da177e4 31DECLARE_RWSEM(crypto_alg_sem);
cce9e06d 32EXPORT_SYMBOL_GPL(crypto_alg_sem);
1da177e4 33
2825982d
HX
34BLOCKING_NOTIFIER_HEAD(crypto_chain);
35EXPORT_SYMBOL_GPL(crypto_chain);
36
6521f302 37static inline struct crypto_alg *crypto_alg_get(struct crypto_alg *alg)
1da177e4 38{
6521f302
HX
39 atomic_inc(&alg->cra_refcnt);
40 return alg;
41}
42
2825982d 43struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
6521f302
HX
44{
45 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
1da177e4 46}
2825982d 47EXPORT_SYMBOL_GPL(crypto_mod_get);
1da177e4 48
2825982d 49void crypto_mod_put(struct crypto_alg *alg)
1da177e4 50{
da7cd59a
HX
51 struct module *module = alg->cra_module;
52
6521f302 53 crypto_alg_put(alg);
da7cd59a 54 module_put(module);
1da177e4 55}
2825982d 56EXPORT_SYMBOL_GPL(crypto_mod_put);
1da177e4 57
73d3864a
HX
58static inline int crypto_is_test_larval(struct crypto_larval *larval)
59{
60 return larval->alg.cra_driver_name[0];
61}
62
c51b6c81
HX
63static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
64 u32 mask)
1da177e4
LT
65{
66 struct crypto_alg *q, *alg = NULL;
2825982d 67 int best = -2;
1da177e4 68
1da177e4 69 list_for_each_entry(q, &crypto_alg_list, cra_list) {
5cb1454b
HX
70 int exact, fuzzy;
71
6bfd4809
HX
72 if (crypto_is_moribund(q))
73 continue;
74
492e2b63
HX
75 if ((q->cra_flags ^ type) & mask)
76 continue;
77
78 if (crypto_is_larval(q) &&
73d3864a 79 !crypto_is_test_larval((struct crypto_larval *)q) &&
492e2b63
HX
80 ((struct crypto_larval *)q)->mask != mask)
81 continue;
82
5cb1454b
HX
83 exact = !strcmp(q->cra_driver_name, name);
84 fuzzy = !strcmp(q->cra_name, name);
85 if (!exact && !(fuzzy && q->cra_priority > best))
86 continue;
87
72fa4919 88 if (unlikely(!crypto_mod_get(q)))
5cb1454b
HX
89 continue;
90
91 best = q->cra_priority;
92 if (alg)
72fa4919 93 crypto_mod_put(alg);
5cb1454b
HX
94 alg = q;
95
96 if (exact)
1da177e4 97 break;
1da177e4 98 }
2825982d
HX
99
100 return alg;
101}
2825982d
HX
102
103static void crypto_larval_destroy(struct crypto_alg *alg)
104{
105 struct crypto_larval *larval = (void *)alg;
106
107 BUG_ON(!crypto_is_larval(alg));
108 if (larval->adult)
109 crypto_mod_put(larval->adult);
110 kfree(larval);
111}
112
73d3864a 113struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
2825982d 114{
2825982d
HX
115 struct crypto_larval *larval;
116
117 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
118 if (!larval)
6bfd4809 119 return ERR_PTR(-ENOMEM);
2825982d 120
492e2b63
HX
121 larval->mask = mask;
122 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
2825982d
HX
123 larval->alg.cra_priority = -1;
124 larval->alg.cra_destroy = crypto_larval_destroy;
125
2825982d
HX
126 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
127 init_completion(&larval->completion);
128
73d3864a
HX
129 return larval;
130}
131EXPORT_SYMBOL_GPL(crypto_larval_alloc);
132
133static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
134 u32 mask)
135{
136 struct crypto_alg *alg;
137 struct crypto_larval *larval;
138
139 larval = crypto_larval_alloc(name, type, mask);
140 if (IS_ERR(larval))
141 return ERR_CAST(larval);
142
143 atomic_set(&larval->alg.cra_refcnt, 2);
144
2825982d 145 down_write(&crypto_alg_sem);
492e2b63 146 alg = __crypto_alg_lookup(name, type, mask);
2825982d
HX
147 if (!alg) {
148 alg = &larval->alg;
149 list_add(&alg->cra_list, &crypto_alg_list);
150 }
151 up_write(&crypto_alg_sem);
152
153 if (alg != &larval->alg)
154 kfree(larval);
155
156 return alg;
157}
158
b9c55aa4 159void crypto_larval_kill(struct crypto_alg *alg)
2825982d
HX
160{
161 struct crypto_larval *larval = (void *)alg;
162
163 down_write(&crypto_alg_sem);
164 list_del(&alg->cra_list);
165 up_write(&crypto_alg_sem);
fe3c5206 166 complete_all(&larval->completion);
2825982d
HX
167 crypto_alg_put(alg);
168}
b9c55aa4 169EXPORT_SYMBOL_GPL(crypto_larval_kill);
2825982d
HX
170
171static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
172{
173 struct crypto_larval *larval = (void *)alg;
73d3864a
HX
174 long timeout;
175
176 timeout = wait_for_completion_interruptible_timeout(
177 &larval->completion, 60 * HZ);
2825982d 178
2825982d 179 alg = larval->adult;
73d3864a
HX
180 if (timeout < 0)
181 alg = ERR_PTR(-EINTR);
182 else if (!timeout)
183 alg = ERR_PTR(-ETIMEDOUT);
184 else if (!alg)
6bfd4809 185 alg = ERR_PTR(-ENOENT);
73d3864a
HX
186 else if (crypto_is_test_larval(larval) &&
187 !(alg->cra_flags & CRYPTO_ALG_TESTED))
188 alg = ERR_PTR(-EAGAIN);
189 else if (!crypto_mod_get(alg))
190 alg = ERR_PTR(-EAGAIN);
2825982d
HX
191 crypto_mod_put(&larval->alg);
192
193 return alg;
194}
195
c51b6c81 196struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask)
2825982d
HX
197{
198 struct crypto_alg *alg;
199
2825982d 200 down_read(&crypto_alg_sem);
492e2b63 201 alg = __crypto_alg_lookup(name, type, mask);
1da177e4 202 up_read(&crypto_alg_sem);
2825982d 203
1da177e4
LT
204 return alg;
205}
c51b6c81 206EXPORT_SYMBOL_GPL(crypto_alg_lookup);
1da177e4 207
b9c55aa4 208struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
176c3652 209{
2825982d 210 struct crypto_alg *alg;
2825982d 211
6bfd4809
HX
212 if (!name)
213 return ERR_PTR(-ENOENT);
214
215 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
492e2b63
HX
216 type &= mask;
217
a760a665
HX
218 alg = crypto_alg_lookup(name, type, mask);
219 if (!alg) {
220 char tmp[CRYPTO_MAX_ALG_NAME];
221
222 request_module(name);
223
224 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask) &&
225 snprintf(tmp, sizeof(tmp), "%s-all", name) < sizeof(tmp))
226 request_module(tmp);
227
228 alg = crypto_alg_lookup(name, type, mask);
229 }
230
2825982d
HX
231 if (alg)
232 return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
233
73d3864a 234 return crypto_larval_add(name, type, mask);
b9c55aa4
HX
235}
236EXPORT_SYMBOL_GPL(crypto_larval_lookup);
237
73d3864a
HX
238int crypto_probing_notify(unsigned long val, void *v)
239{
240 int ok;
241
242 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
243 if (ok == NOTIFY_DONE) {
244 request_module("cryptomgr");
245 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
246 }
247
248 return ok;
249}
250EXPORT_SYMBOL_GPL(crypto_probing_notify);
251
b9c55aa4
HX
252struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
253{
254 struct crypto_alg *alg;
255 struct crypto_alg *larval;
256 int ok;
257
ff753308 258 if (!((type | mask) & CRYPTO_ALG_TESTED)) {
73d3864a
HX
259 type |= CRYPTO_ALG_TESTED;
260 mask |= CRYPTO_ALG_TESTED;
261 }
262
b9c55aa4 263 larval = crypto_larval_lookup(name, type, mask);
6bfd4809 264 if (IS_ERR(larval) || !crypto_is_larval(larval))
2825982d
HX
265 return larval;
266
73d3864a 267 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
2b8c19db
HX
268
269 if (ok == NOTIFY_STOP)
2825982d
HX
270 alg = crypto_larval_wait(larval);
271 else {
272 crypto_mod_put(larval);
6bfd4809 273 alg = ERR_PTR(-ENOENT);
2825982d
HX
274 }
275 crypto_larval_kill(larval);
276 return alg;
176c3652 277}
492e2b63 278EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
176c3652 279
27d2a330 280static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
1da177e4 281{
27d2a330 282 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
e853c3cf 283
27d2a330
HX
284 if (type_obj)
285 return type_obj->init(tfm, type, mask);
e853c3cf 286
1da177e4
LT
287 switch (crypto_tfm_alg_type(tfm)) {
288 case CRYPTO_ALG_TYPE_CIPHER:
289 return crypto_init_cipher_ops(tfm);
290
291 case CRYPTO_ALG_TYPE_DIGEST:
004a403c
LH
292 if ((mask & CRYPTO_ALG_TYPE_HASH_MASK) !=
293 CRYPTO_ALG_TYPE_HASH_MASK)
294 return crypto_init_digest_ops_async(tfm);
295 else
296 return crypto_init_digest_ops(tfm);
297
1da177e4
LT
298 case CRYPTO_ALG_TYPE_COMPRESS:
299 return crypto_init_compress_ops(tfm);
300
301 default:
302 break;
303 }
304
305 BUG();
306 return -EINVAL;
307}
308
309static void crypto_exit_ops(struct crypto_tfm *tfm)
310{
e853c3cf
HX
311 const struct crypto_type *type = tfm->__crt_alg->cra_type;
312
313 if (type) {
4a779486
HX
314 if (tfm->exit)
315 tfm->exit(tfm);
e853c3cf
HX
316 return;
317 }
318
1da177e4
LT
319 switch (crypto_tfm_alg_type(tfm)) {
320 case CRYPTO_ALG_TYPE_CIPHER:
321 crypto_exit_cipher_ops(tfm);
322 break;
323
324 case CRYPTO_ALG_TYPE_DIGEST:
325 crypto_exit_digest_ops(tfm);
326 break;
327
328 case CRYPTO_ALG_TYPE_COMPRESS:
329 crypto_exit_compress_ops(tfm);
330 break;
331
332 default:
333 BUG();
334
335 }
336}
337
27d2a330 338static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
fbdae9f3 339{
27d2a330 340 const struct crypto_type *type_obj = alg->cra_type;
fbdae9f3
HX
341 unsigned int len;
342
e853c3cf 343 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
27d2a330
HX
344 if (type_obj)
345 return len + type_obj->ctxsize(alg, type, mask);
e853c3cf 346
fbdae9f3
HX
347 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
348 default:
349 BUG();
350
351 case CRYPTO_ALG_TYPE_CIPHER:
f1ddcaf3 352 len += crypto_cipher_ctxsize(alg);
fbdae9f3
HX
353 break;
354
355 case CRYPTO_ALG_TYPE_DIGEST:
f1ddcaf3 356 len += crypto_digest_ctxsize(alg);
fbdae9f3
HX
357 break;
358
359 case CRYPTO_ALG_TYPE_COMPRESS:
f1ddcaf3 360 len += crypto_compress_ctxsize(alg);
fbdae9f3
HX
361 break;
362 }
363
e853c3cf 364 return len;
fbdae9f3
HX
365}
366
6bfd4809
HX
367void crypto_shoot_alg(struct crypto_alg *alg)
368{
369 down_write(&crypto_alg_sem);
370 alg->cra_flags |= CRYPTO_ALG_DYING;
371 up_write(&crypto_alg_sem);
372}
373EXPORT_SYMBOL_GPL(crypto_shoot_alg);
374
27d2a330
HX
375struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
376 u32 mask)
1da177e4
LT
377{
378 struct crypto_tfm *tfm = NULL;
fbdae9f3 379 unsigned int tfm_size;
6bfd4809 380 int err = -ENOMEM;
fbdae9f3 381
27d2a330 382 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
bbeb563f 383 tfm = kzalloc(tfm_size, GFP_KERNEL);
1da177e4 384 if (tfm == NULL)
9765d262 385 goto out_err;
1da177e4 386
1da177e4 387 tfm->__crt_alg = alg;
6bfd4809 388
27d2a330 389 err = crypto_init_ops(tfm, type, mask);
6bfd4809 390 if (err)
1da177e4 391 goto out_free_tfm;
c7fc0599 392
4a779486 393 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
c7fc0599 394 goto cra_init_failed;
1da177e4
LT
395
396 goto out;
397
c7fc0599
HX
398cra_init_failed:
399 crypto_exit_ops(tfm);
1da177e4 400out_free_tfm:
4a779486
HX
401 if (err == -EAGAIN)
402 crypto_shoot_alg(alg);
1da177e4 403 kfree(tfm);
9765d262 404out_err:
6bfd4809 405 tfm = ERR_PTR(err);
1da177e4
LT
406out:
407 return tfm;
408}
6bfd4809
HX
409EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
410
6d7d684d
HX
411/*
412 * crypto_alloc_base - Locate algorithm and allocate transform
413 * @alg_name: Name of algorithm
414 * @type: Type of algorithm
415 * @mask: Mask for type comparison
416 *
7b0bac64
HX
417 * This function should not be used by new algorithm types.
418 * Plesae use crypto_alloc_tfm instead.
419 *
6d7d684d
HX
420 * crypto_alloc_base() will first attempt to locate an already loaded
421 * algorithm. If that fails and the kernel supports dynamically loadable
422 * modules, it will then attempt to load a module of the same name or
423 * alias. If that fails it will send a query to any loaded crypto manager
424 * to construct an algorithm on the fly. A refcount is grabbed on the
425 * algorithm which is then associated with the new transform.
426 *
427 * The returned transform is of a non-determinate type. Most people
428 * should use one of the more specific allocation functions such as
429 * crypto_alloc_blkcipher.
430 *
431 * In case of error the return value is an error pointer.
432 */
433struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
434{
435 struct crypto_tfm *tfm;
436 int err;
437
438 for (;;) {
439 struct crypto_alg *alg;
440
441 alg = crypto_alg_mod_lookup(alg_name, type, mask);
9765d262
AM
442 if (IS_ERR(alg)) {
443 err = PTR_ERR(alg);
6d7d684d 444 goto err;
9765d262 445 }
6d7d684d 446
27d2a330 447 tfm = __crypto_alloc_tfm(alg, type, mask);
6d7d684d 448 if (!IS_ERR(tfm))
9765d262 449 return tfm;
6d7d684d
HX
450
451 crypto_mod_put(alg);
452 err = PTR_ERR(tfm);
453
454err:
455 if (err != -EAGAIN)
456 break;
457 if (signal_pending(current)) {
458 err = -EINTR;
459 break;
460 }
9765d262 461 }
6d7d684d 462
9765d262 463 return ERR_PTR(err);
6d7d684d
HX
464}
465EXPORT_SYMBOL_GPL(crypto_alloc_base);
7b0bac64 466
3f683d61
HX
467void *crypto_create_tfm(struct crypto_alg *alg,
468 const struct crypto_type *frontend)
7b0bac64
HX
469{
470 char *mem;
471 struct crypto_tfm *tfm = NULL;
472 unsigned int tfmsize;
473 unsigned int total;
474 int err = -ENOMEM;
475
476 tfmsize = frontend->tfmsize;
477 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg, frontend);
478
479 mem = kzalloc(total, GFP_KERNEL);
480 if (mem == NULL)
481 goto out_err;
482
483 tfm = (struct crypto_tfm *)(mem + tfmsize);
484 tfm->__crt_alg = alg;
485
486 err = frontend->init_tfm(tfm, frontend);
487 if (err)
488 goto out_free_tfm;
489
490 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
491 goto cra_init_failed;
492
493 goto out;
494
495cra_init_failed:
496 crypto_exit_ops(tfm);
497out_free_tfm:
498 if (err == -EAGAIN)
499 crypto_shoot_alg(alg);
500 kfree(mem);
501out_err:
3f683d61 502 mem = ERR_PTR(err);
7b0bac64 503out:
3f683d61 504 return mem;
7b0bac64
HX
505}
506EXPORT_SYMBOL_GPL(crypto_create_tfm);
507
508/*
509 * crypto_alloc_tfm - Locate algorithm and allocate transform
510 * @alg_name: Name of algorithm
511 * @frontend: Frontend algorithm type
512 * @type: Type of algorithm
513 * @mask: Mask for type comparison
514 *
515 * crypto_alloc_tfm() will first attempt to locate an already loaded
516 * algorithm. If that fails and the kernel supports dynamically loadable
517 * modules, it will then attempt to load a module of the same name or
518 * alias. If that fails it will send a query to any loaded crypto manager
519 * to construct an algorithm on the fly. A refcount is grabbed on the
520 * algorithm which is then associated with the new transform.
521 *
522 * The returned transform is of a non-determinate type. Most people
523 * should use one of the more specific allocation functions such as
524 * crypto_alloc_blkcipher.
525 *
526 * In case of error the return value is an error pointer.
527 */
3f683d61
HX
528void *crypto_alloc_tfm(const char *alg_name,
529 const struct crypto_type *frontend, u32 type, u32 mask)
7b0bac64
HX
530{
531 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
3f683d61 532 void *tfm;
7b0bac64
HX
533 int err;
534
535 type &= frontend->maskclear;
536 mask &= frontend->maskclear;
537 type |= frontend->type;
538 mask |= frontend->maskset;
539
540 lookup = frontend->lookup ?: crypto_alg_mod_lookup;
541
542 for (;;) {
543 struct crypto_alg *alg;
544
545 alg = lookup(alg_name, type, mask);
546 if (IS_ERR(alg)) {
547 err = PTR_ERR(alg);
548 goto err;
549 }
550
551 tfm = crypto_create_tfm(alg, frontend);
552 if (!IS_ERR(tfm))
553 return tfm;
554
555 crypto_mod_put(alg);
556 err = PTR_ERR(tfm);
557
558err:
559 if (err != -EAGAIN)
560 break;
561 if (signal_pending(current)) {
562 err = -EINTR;
563 break;
564 }
565 }
566
567 return ERR_PTR(err);
568}
569EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
7b2cd92a 570
6d7d684d 571/*
7b2cd92a
HX
572 * crypto_destroy_tfm - Free crypto transform
573 * @mem: Start of tfm slab
6d7d684d
HX
574 * @tfm: Transform to free
575 *
7b2cd92a 576 * This function frees up the transform and any associated resources,
6d7d684d
HX
577 * then drops the refcount on the associated algorithm.
578 */
7b2cd92a 579void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
1da177e4 580{
a61cc448
JJ
581 struct crypto_alg *alg;
582 int size;
583
7b2cd92a 584 if (unlikely(!mem))
a61cc448
JJ
585 return;
586
587 alg = tfm->__crt_alg;
7b2cd92a 588 size = ksize(mem);
1da177e4 589
4a779486 590 if (!tfm->exit && alg->cra_exit)
c7fc0599 591 alg->cra_exit(tfm);
1da177e4 592 crypto_exit_ops(tfm);
72fa4919 593 crypto_mod_put(alg);
7b2cd92a
HX
594 memset(mem, 0, size);
595 kfree(mem);
1da177e4 596}
7b2cd92a 597EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
fce32d70
HX
598
599int crypto_has_alg(const char *name, u32 type, u32 mask)
600{
601 int ret = 0;
602 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
603
604 if (!IS_ERR(alg)) {
605 crypto_mod_put(alg);
606 ret = 1;
607 }
608
609 return ret;
610}
611EXPORT_SYMBOL_GPL(crypto_has_alg);
c3715cb9
SS
612
613MODULE_DESCRIPTION("Cryptographic core API");
614MODULE_LICENSE("GPL");