[CRYPTO] ctr: Add CTR (Counter) block cipher mode
[safe/jmp/linux-2.6] / crypto / ctr.c
1 /*
2  * CTR: Counter mode
3  *
4  * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License as published by the Free
8  * Software Foundation; either version 2 of the License, or (at your option)
9  * any later version.
10  *
11  */
12
13 #include <crypto/algapi.h>
14 #include <linux/err.h>
15 #include <linux/init.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/random.h>
19 #include <linux/scatterlist.h>
20 #include <linux/slab.h>
21
22 struct ctr_instance_ctx {
23         struct crypto_spawn alg;
24         unsigned int noncesize;
25         unsigned int ivsize;
26 };
27
28 struct crypto_ctr_ctx {
29         struct crypto_cipher *child;
30         u8 *nonce;
31 };
32
33 static inline void __ctr_inc_byte(u8 *a, unsigned int size)
34 {
35         u8 *b = (a + size);
36         u8 c;
37
38         for (; size; size--) {
39                 c = *--b + 1;
40                 *b = c;
41                 if (c)
42                         break;
43         }
44 }
45
46 static void ctr_inc_quad(u8 *a, unsigned int size)
47 {
48         __be32 *b = (__be32 *)(a + size);
49         u32 c;
50
51         for (; size >= 4; size -=4) {
52                 c = be32_to_cpu(*--b) + 1;
53                 *b = cpu_to_be32(c);
54                 if (c)
55                         return;
56         }
57
58         __ctr_inc_byte(a, size);
59 }
60
61 static void xor_byte(u8 *a, const u8 *b, unsigned int bs)
62 {
63         for (; bs; bs--)
64                 *a++ ^= *b++;
65 }
66
67 static void xor_quad(u8 *dst, const u8 *src, unsigned int bs)
68 {
69         u32 *a = (u32 *)dst;
70         u32 *b = (u32 *)src;
71
72         for (; bs >= 4; bs -= 4)
73                 *a++ ^= *b++;
74
75         xor_byte((u8 *)a, (u8 *)b, bs);
76 }
77
78 static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key,
79                              unsigned int keylen)
80 {
81         struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(parent);
82         struct crypto_cipher *child = ctx->child;
83         struct ctr_instance_ctx *ictx =
84                 crypto_instance_ctx(crypto_tfm_alg_instance(parent));
85         unsigned int noncelen = ictx->noncesize;
86         int err = 0;
87
88         /* the nonce is stored in bytes at end of key */
89         if (keylen < noncelen)
90                 return  -EINVAL;
91
92         memcpy(ctx->nonce, key + (keylen - noncelen), noncelen);
93
94         keylen -=  noncelen;
95
96         crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
97         crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
98                                 CRYPTO_TFM_REQ_MASK);
99         err = crypto_cipher_setkey(child, key, keylen);
100         crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
101                              CRYPTO_TFM_RES_MASK);
102
103         return err;
104 }
105
106 static int crypto_ctr_crypt_segment(struct blkcipher_walk *walk,
107                                     struct crypto_cipher *tfm, u8 *ctrblk,
108                                     unsigned int countersize)
109 {
110         void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
111                    crypto_cipher_alg(tfm)->cia_encrypt;
112         unsigned int bsize = crypto_cipher_blocksize(tfm);
113         unsigned long alignmask = crypto_cipher_alignmask(tfm);
114         u8 ks[bsize + alignmask];
115         u8 *keystream = (u8 *)ALIGN((unsigned long)ks, alignmask + 1);
116         u8 *src = walk->src.virt.addr;
117         u8 *dst = walk->dst.virt.addr;
118         unsigned int nbytes = walk->nbytes;
119
120         do {
121                 /* create keystream */
122                 fn(crypto_cipher_tfm(tfm), keystream, ctrblk);
123                 xor_quad(keystream, src, min(nbytes, bsize));
124
125                 /* copy result into dst */
126                 memcpy(dst, keystream, min(nbytes, bsize));
127
128                 /* increment counter in counterblock */
129                 ctr_inc_quad(ctrblk + (bsize - countersize), countersize);
130
131                 if (nbytes < bsize)
132                         break;
133
134                 src += bsize;
135                 dst += bsize;
136                 nbytes -= bsize;
137
138         } while (nbytes);
139
140         return 0;
141 }
142
143 static int crypto_ctr_crypt_inplace(struct blkcipher_walk *walk,
144                                     struct crypto_cipher *tfm, u8 *ctrblk,
145                                     unsigned int countersize)
146 {
147         void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
148                    crypto_cipher_alg(tfm)->cia_encrypt;
149         unsigned int bsize = crypto_cipher_blocksize(tfm);
150         unsigned long alignmask = crypto_cipher_alignmask(tfm);
151         unsigned int nbytes = walk->nbytes;
152         u8 *src = walk->src.virt.addr;
153         u8 ks[bsize + alignmask];
154         u8 *keystream = (u8 *)ALIGN((unsigned long)ks, alignmask + 1);
155
156         do {
157                 /* create keystream */
158                 fn(crypto_cipher_tfm(tfm), keystream, ctrblk);
159                 xor_quad(src, keystream, min(nbytes, bsize));
160
161                 /* increment counter in counterblock */
162                 ctr_inc_quad(ctrblk + (bsize - countersize), countersize);
163
164                 if (nbytes < bsize)
165                         break;
166
167                 src += bsize;
168                 nbytes -= bsize;
169
170         } while (nbytes);
171
172         return 0;
173 }
174
175 static int crypto_ctr_crypt(struct blkcipher_desc *desc,
176                               struct scatterlist *dst, struct scatterlist *src,
177                               unsigned int nbytes)
178 {
179         struct blkcipher_walk walk;
180         struct crypto_blkcipher *tfm = desc->tfm;
181         struct crypto_ctr_ctx *ctx = crypto_blkcipher_ctx(tfm);
182         struct crypto_cipher *child = ctx->child;
183         unsigned int bsize = crypto_cipher_blocksize(child);
184         struct ctr_instance_ctx *ictx =
185                 crypto_instance_ctx(crypto_tfm_alg_instance(&tfm->base));
186         unsigned long alignmask = crypto_cipher_alignmask(child);
187         u8 cblk[bsize + alignmask];
188         u8 *counterblk = (u8 *)ALIGN((unsigned long)cblk, alignmask + 1);
189         unsigned int countersize;
190         int err;
191
192         blkcipher_walk_init(&walk, dst, src, nbytes);
193         err = blkcipher_walk_virt_block(desc, &walk, bsize);
194
195         /* set up counter block */
196         memset(counterblk, 0 , bsize);
197         memcpy(counterblk, ctx->nonce, ictx->noncesize);
198         memcpy(counterblk + ictx->noncesize, walk.iv, ictx->ivsize);
199
200         /* initialize counter portion of counter block */
201         countersize = bsize - ictx->noncesize - ictx->ivsize;
202         ctr_inc_quad(counterblk + (bsize - countersize), countersize);
203
204         while (walk.nbytes) {
205                 if (walk.src.virt.addr == walk.dst.virt.addr)
206                         nbytes = crypto_ctr_crypt_inplace(&walk, child,
207                                                           counterblk,
208                                                           countersize);
209                 else
210                         nbytes = crypto_ctr_crypt_segment(&walk, child,
211                                                           counterblk,
212                                                           countersize);
213
214                 err = blkcipher_walk_done(desc, &walk, nbytes);
215         }
216         return err;
217 }
218
219 static int crypto_ctr_init_tfm(struct crypto_tfm *tfm)
220 {
221         struct crypto_instance *inst = (void *)tfm->__crt_alg;
222         struct ctr_instance_ctx *ictx = crypto_instance_ctx(inst);
223         struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
224         struct crypto_cipher *cipher;
225
226         ctx->nonce = kzalloc(ictx->noncesize, GFP_KERNEL);
227         if (!ctx->nonce)
228                 return -ENOMEM;
229
230         cipher = crypto_spawn_cipher(&ictx->alg);
231         if (IS_ERR(cipher))
232                 return PTR_ERR(cipher);
233
234         ctx->child = cipher;
235
236         return 0;
237 }
238
239 static void crypto_ctr_exit_tfm(struct crypto_tfm *tfm)
240 {
241         struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
242
243         kfree(ctx->nonce);
244         crypto_free_cipher(ctx->child);
245 }
246
247 static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
248 {
249         struct crypto_instance *inst;
250         struct crypto_alg *alg;
251         struct ctr_instance_ctx *ictx;
252         unsigned int noncesize;
253         unsigned int ivsize;
254         int err;
255
256         err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
257         if (err)
258                 return ERR_PTR(err);
259
260         alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER,
261                                   CRYPTO_ALG_TYPE_MASK);
262         if (IS_ERR(alg))
263                 return ERR_PTR(PTR_ERR(alg));
264
265         err = crypto_attr_u32(tb[2], &noncesize);
266         if (err)
267                 goto out_put_alg;
268
269         err = crypto_attr_u32(tb[3], &ivsize);
270         if (err)
271                 goto out_put_alg;
272
273         /* verify size of nonce + iv + counter */
274         err = -EINVAL;
275         if ((noncesize + ivsize) >= alg->cra_blocksize)
276                 goto out_put_alg;
277
278         inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
279         err = -ENOMEM;
280         if (!inst)
281                 goto out_put_alg;
282
283         err = -ENAMETOOLONG;
284         if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
285                      "ctr(%s,%u,%u)", alg->cra_name, noncesize,
286                      ivsize) >= CRYPTO_MAX_ALG_NAME) {
287                 goto err_free_inst;
288         }
289
290         if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
291                      "ctr(%s,%u,%u)", alg->cra_driver_name, noncesize,
292                      ivsize) >= CRYPTO_MAX_ALG_NAME) {
293                 goto err_free_inst;
294         }
295
296         ictx = crypto_instance_ctx(inst);
297         ictx->noncesize = noncesize;
298         ictx->ivsize = ivsize;
299
300         err = crypto_init_spawn(&ictx->alg, alg, inst,
301                 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
302         if (err)
303                 goto err_free_inst;
304
305         err = 0;
306         inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
307         inst->alg.cra_priority = alg->cra_priority;
308         inst->alg.cra_blocksize = 1;
309         inst->alg.cra_alignmask = 3;
310         inst->alg.cra_type = &crypto_blkcipher_type;
311
312         inst->alg.cra_blkcipher.ivsize = ivsize;
313         inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize
314                                               + noncesize;
315         inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize
316                                               + noncesize;
317
318         inst->alg.cra_ctxsize = sizeof(struct crypto_ctr_ctx);
319
320         inst->alg.cra_init = crypto_ctr_init_tfm;
321         inst->alg.cra_exit = crypto_ctr_exit_tfm;
322
323         inst->alg.cra_blkcipher.setkey = crypto_ctr_setkey;
324         inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt;
325         inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt;
326
327 err_free_inst:
328         if (err)
329                 kfree(inst);
330
331 out_put_alg:
332         crypto_mod_put(alg);
333
334         if (err)
335                 inst = ERR_PTR(err);
336
337         return inst;
338 }
339
340 static void crypto_ctr_free(struct crypto_instance *inst)
341 {
342         struct ctr_instance_ctx *ictx = crypto_instance_ctx(inst);
343
344         crypto_drop_spawn(&ictx->alg);
345         kfree(inst);
346 }
347
348 static struct crypto_template crypto_ctr_tmpl = {
349         .name = "ctr",
350         .alloc = crypto_ctr_alloc,
351         .free = crypto_ctr_free,
352         .module = THIS_MODULE,
353 };
354
355 static int __init crypto_ctr_module_init(void)
356 {
357         return crypto_register_template(&crypto_ctr_tmpl);
358 }
359
360 static void __exit crypto_ctr_module_exit(void)
361 {
362         crypto_unregister_template(&crypto_ctr_tmpl);
363 }
364
365 module_init(crypto_ctr_module_init);
366 module_exit(crypto_ctr_module_exit);
367
368 MODULE_LICENSE("GPL");
369 MODULE_DESCRIPTION("CTR Counter block mode");