netfilter: nf_conntrack: elegantly simplify nf_ct_exp_net()
[safe/jmp/linux-2.6] / crypto / cryptd.c
index 2eb7058..704c141 100644 (file)
@@ -99,7 +99,7 @@ static int cryptd_enqueue_request(struct cryptd_queue *queue,
        struct cryptd_cpu_queue *cpu_queue;
 
        cpu = get_cpu();
-       cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
+       cpu_queue = this_cpu_ptr(queue->cpu_queue);
        err = crypto_enqueue_request(&cpu_queue->queue, request);
        queue_work_on(cpu, kcrypto_wq, &cpu_queue->work);
        put_cpu();
@@ -682,6 +682,48 @@ void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm)
 }
 EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher);
 
+struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
+                                       u32 type, u32 mask)
+{
+       char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
+       struct crypto_ahash *tfm;
+
+       if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
+                    "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
+               return ERR_PTR(-EINVAL);
+       tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
+       if (IS_ERR(tfm))
+               return ERR_CAST(tfm);
+       if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
+               crypto_free_ahash(tfm);
+               return ERR_PTR(-EINVAL);
+       }
+
+       return __cryptd_ahash_cast(tfm);
+}
+EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
+
+struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
+{
+       struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
+
+       return ctx->child;
+}
+EXPORT_SYMBOL_GPL(cryptd_ahash_child);
+
+struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
+{
+       struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
+       return &rctx->desc;
+}
+EXPORT_SYMBOL_GPL(cryptd_shash_desc);
+
+void cryptd_free_ahash(struct cryptd_ahash *tfm)
+{
+       crypto_free_ahash(&tfm->base);
+}
+EXPORT_SYMBOL_GPL(cryptd_free_ahash);
+
 static int __init cryptd_init(void)
 {
        int err;