2 * Scatterlist Cryptographic API.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
18 #include <linux/compiler.h>
19 #include <linux/init.h>
20 #include <linux/crypto.h>
21 #include <linux/errno.h>
22 #include <linux/kernel.h>
23 #include <linux/kmod.h>
24 #include <linux/rwsem.h>
25 #include <linux/slab.h>
26 #include <linux/string.h>
29 LIST_HEAD(crypto_alg_list);
30 DECLARE_RWSEM(crypto_alg_sem);
32 static inline struct crypto_alg *crypto_alg_get(struct crypto_alg *alg)
34 atomic_inc(&alg->cra_refcnt);
38 static inline void crypto_alg_put(struct crypto_alg *alg)
40 if (atomic_dec_and_test(&alg->cra_refcnt) && alg->cra_destroy)
41 alg->cra_destroy(alg);
44 static struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
46 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
49 static void crypto_mod_put(struct crypto_alg *alg)
52 module_put(alg->cra_module);
55 static struct crypto_alg *crypto_alg_lookup(const char *name)
57 struct crypto_alg *q, *alg = NULL;
63 down_read(&crypto_alg_sem);
65 list_for_each_entry(q, &crypto_alg_list, cra_list) {
68 exact = !strcmp(q->cra_driver_name, name);
69 fuzzy = !strcmp(q->cra_name, name);
70 if (!exact && !(fuzzy && q->cra_priority > best))
73 if (unlikely(!crypto_mod_get(q)))
76 best = q->cra_priority;
85 up_read(&crypto_alg_sem);
89 /* A far more intelligent version of this is planned. For now, just
90 * try an exact match on the name of the algorithm. */
91 static inline struct crypto_alg *crypto_alg_mod_lookup(const char *name)
93 return try_then_request_module(crypto_alg_lookup(name), name);
96 static int crypto_init_flags(struct crypto_tfm *tfm, u32 flags)
98 tfm->crt_flags = flags & CRYPTO_TFM_REQ_MASK;
99 flags &= ~CRYPTO_TFM_REQ_MASK;
101 switch (crypto_tfm_alg_type(tfm)) {
102 case CRYPTO_ALG_TYPE_CIPHER:
103 return crypto_init_cipher_flags(tfm, flags);
105 case CRYPTO_ALG_TYPE_DIGEST:
106 return crypto_init_digest_flags(tfm, flags);
108 case CRYPTO_ALG_TYPE_COMPRESS:
109 return crypto_init_compress_flags(tfm, flags);
119 static int crypto_init_ops(struct crypto_tfm *tfm)
121 switch (crypto_tfm_alg_type(tfm)) {
122 case CRYPTO_ALG_TYPE_CIPHER:
123 return crypto_init_cipher_ops(tfm);
125 case CRYPTO_ALG_TYPE_DIGEST:
126 return crypto_init_digest_ops(tfm);
128 case CRYPTO_ALG_TYPE_COMPRESS:
129 return crypto_init_compress_ops(tfm);
139 static void crypto_exit_ops(struct crypto_tfm *tfm)
141 switch (crypto_tfm_alg_type(tfm)) {
142 case CRYPTO_ALG_TYPE_CIPHER:
143 crypto_exit_cipher_ops(tfm);
146 case CRYPTO_ALG_TYPE_DIGEST:
147 crypto_exit_digest_ops(tfm);
150 case CRYPTO_ALG_TYPE_COMPRESS:
151 crypto_exit_compress_ops(tfm);
160 static unsigned int crypto_ctxsize(struct crypto_alg *alg, int flags)
164 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
168 case CRYPTO_ALG_TYPE_CIPHER:
169 len = crypto_cipher_ctxsize(alg, flags);
172 case CRYPTO_ALG_TYPE_DIGEST:
173 len = crypto_digest_ctxsize(alg, flags);
176 case CRYPTO_ALG_TYPE_COMPRESS:
177 len = crypto_compress_ctxsize(alg, flags);
181 return len + (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
184 struct crypto_tfm *crypto_alloc_tfm(const char *name, u32 flags)
186 struct crypto_tfm *tfm = NULL;
187 struct crypto_alg *alg;
188 unsigned int tfm_size;
190 alg = crypto_alg_mod_lookup(name);
194 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, flags);
195 tfm = kzalloc(tfm_size, GFP_KERNEL);
199 tfm->__crt_alg = alg;
201 if (crypto_init_flags(tfm, flags))
204 if (crypto_init_ops(tfm))
207 if (alg->cra_init && alg->cra_init(tfm))
208 goto cra_init_failed;
213 crypto_exit_ops(tfm);
223 void crypto_free_tfm(struct crypto_tfm *tfm)
225 struct crypto_alg *alg;
231 alg = tfm->__crt_alg;
232 size = sizeof(*tfm) + alg->cra_ctxsize;
236 crypto_exit_ops(tfm);
238 memset(tfm, 0, size);
242 static inline int crypto_set_driver_name(struct crypto_alg *alg)
244 static const char suffix[] = "-generic";
245 char *driver_name = alg->cra_driver_name;
251 len = strlcpy(driver_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
252 if (len + sizeof(suffix) > CRYPTO_MAX_ALG_NAME)
253 return -ENAMETOOLONG;
255 memcpy(driver_name + len, suffix, sizeof(suffix));
259 int crypto_register_alg(struct crypto_alg *alg)
262 struct crypto_alg *q;
264 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
267 if (alg->cra_alignmask & alg->cra_blocksize)
270 if (alg->cra_blocksize > PAGE_SIZE / 8)
273 if (alg->cra_priority < 0)
276 ret = crypto_set_driver_name(alg);
280 down_write(&crypto_alg_sem);
282 list_for_each_entry(q, &crypto_alg_list, cra_list) {
289 list_add(&alg->cra_list, &crypto_alg_list);
290 atomic_set(&alg->cra_refcnt, 1);
292 up_write(&crypto_alg_sem);
296 int crypto_unregister_alg(struct crypto_alg *alg)
299 struct crypto_alg *q;
301 down_write(&crypto_alg_sem);
302 list_for_each_entry(q, &crypto_alg_list, cra_list) {
304 list_del(&alg->cra_list);
310 up_write(&crypto_alg_sem);
315 BUG_ON(atomic_read(&alg->cra_refcnt) != 1);
316 if (alg->cra_destroy)
317 alg->cra_destroy(alg);
322 int crypto_alg_available(const char *name, u32 flags)
325 struct crypto_alg *alg = crypto_alg_mod_lookup(name);
335 static int __init init_crypto(void)
337 printk(KERN_INFO "Initializing Cryptographic API\n");
342 __initcall(init_crypto);
344 EXPORT_SYMBOL_GPL(crypto_register_alg);
345 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
346 EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
347 EXPORT_SYMBOL_GPL(crypto_free_tfm);
348 EXPORT_SYMBOL_GPL(crypto_alg_available);