[CRYPTO] aes: Move common defines into a header file
[safe/jmp/linux-2.6] / arch / s390 / crypto / aes_s390.c
1 /*
2  * Cryptographic API.
3  *
4  * s390 implementation of the AES Cipher Algorithm.
5  *
6  * s390 Version:
7  *   Copyright IBM Corp. 2005,2007
8  *   Author(s): Jan Glauber (jang@de.ibm.com)
9  *
10  * Derived from "crypto/aes_generic.c"
11  *
12  * This program is free software; you can redistribute it and/or modify it
13  * under the terms of the GNU General Public License as published by the Free
14  * Software Foundation; either version 2 of the License, or (at your option)
15  * any later version.
16  *
17  */
18
19 #include <crypto/aes.h>
20 #include <crypto/algapi.h>
21 #include <linux/module.h>
22 #include <linux/init.h>
23 #include "crypt_s390.h"
24
25 #define AES_KEYLEN_128          1
26 #define AES_KEYLEN_192          2
27 #define AES_KEYLEN_256          4
28
29 static char keylen_flag = 0;
30
31 struct s390_aes_ctx {
32         u8 iv[AES_BLOCK_SIZE];
33         u8 key[AES_MAX_KEY_SIZE];
34         long enc;
35         long dec;
36         int key_len;
37 };
38
39 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
40                        unsigned int key_len)
41 {
42         struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
43         u32 *flags = &tfm->crt_flags;
44
45         switch (key_len) {
46         case 16:
47                 if (!(keylen_flag & AES_KEYLEN_128))
48                         goto fail;
49                 break;
50         case 24:
51                 if (!(keylen_flag & AES_KEYLEN_192))
52                         goto fail;
53
54                 break;
55         case 32:
56                 if (!(keylen_flag & AES_KEYLEN_256))
57                         goto fail;
58                 break;
59         default:
60                 goto fail;
61                 break;
62         }
63
64         sctx->key_len = key_len;
65         memcpy(sctx->key, in_key, key_len);
66         return 0;
67 fail:
68         *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
69         return -EINVAL;
70 }
71
72 static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
73 {
74         const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
75
76         switch (sctx->key_len) {
77         case 16:
78                 crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
79                               AES_BLOCK_SIZE);
80                 break;
81         case 24:
82                 crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
83                               AES_BLOCK_SIZE);
84                 break;
85         case 32:
86                 crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
87                               AES_BLOCK_SIZE);
88                 break;
89         }
90 }
91
92 static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
93 {
94         const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
95
96         switch (sctx->key_len) {
97         case 16:
98                 crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
99                               AES_BLOCK_SIZE);
100                 break;
101         case 24:
102                 crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
103                               AES_BLOCK_SIZE);
104                 break;
105         case 32:
106                 crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
107                               AES_BLOCK_SIZE);
108                 break;
109         }
110 }
111
112
113 static struct crypto_alg aes_alg = {
114         .cra_name               =       "aes",
115         .cra_driver_name        =       "aes-s390",
116         .cra_priority           =       CRYPT_S390_PRIORITY,
117         .cra_flags              =       CRYPTO_ALG_TYPE_CIPHER |
118                                         CRYPTO_ALG_NEED_FALLBACK,
119         .cra_blocksize          =       AES_BLOCK_SIZE,
120         .cra_ctxsize            =       sizeof(struct s390_aes_ctx),
121         .cra_module             =       THIS_MODULE,
122         .cra_list               =       LIST_HEAD_INIT(aes_alg.cra_list),
123         .cra_u                  =       {
124                 .cipher = {
125                         .cia_min_keysize        =       AES_MIN_KEY_SIZE,
126                         .cia_max_keysize        =       AES_MAX_KEY_SIZE,
127                         .cia_setkey             =       aes_set_key,
128                         .cia_encrypt            =       aes_encrypt,
129                         .cia_decrypt            =       aes_decrypt,
130                 }
131         }
132 };
133
134 static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
135                            unsigned int key_len)
136 {
137         struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
138
139         switch (key_len) {
140         case 16:
141                 sctx->enc = KM_AES_128_ENCRYPT;
142                 sctx->dec = KM_AES_128_DECRYPT;
143                 break;
144         case 24:
145                 sctx->enc = KM_AES_192_ENCRYPT;
146                 sctx->dec = KM_AES_192_DECRYPT;
147                 break;
148         case 32:
149                 sctx->enc = KM_AES_256_ENCRYPT;
150                 sctx->dec = KM_AES_256_DECRYPT;
151                 break;
152         }
153
154         return aes_set_key(tfm, in_key, key_len);
155 }
156
157 static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
158                          struct blkcipher_walk *walk)
159 {
160         int ret = blkcipher_walk_virt(desc, walk);
161         unsigned int nbytes;
162
163         while ((nbytes = walk->nbytes)) {
164                 /* only use complete blocks */
165                 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
166                 u8 *out = walk->dst.virt.addr;
167                 u8 *in = walk->src.virt.addr;
168
169                 ret = crypt_s390_km(func, param, out, in, n);
170                 BUG_ON((ret < 0) || (ret != n));
171
172                 nbytes &= AES_BLOCK_SIZE - 1;
173                 ret = blkcipher_walk_done(desc, walk, nbytes);
174         }
175
176         return ret;
177 }
178
179 static int ecb_aes_encrypt(struct blkcipher_desc *desc,
180                            struct scatterlist *dst, struct scatterlist *src,
181                            unsigned int nbytes)
182 {
183         struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
184         struct blkcipher_walk walk;
185
186         blkcipher_walk_init(&walk, dst, src, nbytes);
187         return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
188 }
189
190 static int ecb_aes_decrypt(struct blkcipher_desc *desc,
191                            struct scatterlist *dst, struct scatterlist *src,
192                            unsigned int nbytes)
193 {
194         struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
195         struct blkcipher_walk walk;
196
197         blkcipher_walk_init(&walk, dst, src, nbytes);
198         return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
199 }
200
201 static struct crypto_alg ecb_aes_alg = {
202         .cra_name               =       "ecb(aes)",
203         .cra_driver_name        =       "ecb-aes-s390",
204         .cra_priority           =       CRYPT_S390_COMPOSITE_PRIORITY,
205         .cra_flags              =       CRYPTO_ALG_TYPE_BLKCIPHER |
206                                         CRYPTO_ALG_NEED_FALLBACK,
207         .cra_blocksize          =       AES_BLOCK_SIZE,
208         .cra_ctxsize            =       sizeof(struct s390_aes_ctx),
209         .cra_type               =       &crypto_blkcipher_type,
210         .cra_module             =       THIS_MODULE,
211         .cra_list               =       LIST_HEAD_INIT(ecb_aes_alg.cra_list),
212         .cra_u                  =       {
213                 .blkcipher = {
214                         .min_keysize            =       AES_MIN_KEY_SIZE,
215                         .max_keysize            =       AES_MAX_KEY_SIZE,
216                         .setkey                 =       ecb_aes_set_key,
217                         .encrypt                =       ecb_aes_encrypt,
218                         .decrypt                =       ecb_aes_decrypt,
219                 }
220         }
221 };
222
223 static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
224                            unsigned int key_len)
225 {
226         struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
227
228         switch (key_len) {
229         case 16:
230                 sctx->enc = KMC_AES_128_ENCRYPT;
231                 sctx->dec = KMC_AES_128_DECRYPT;
232                 break;
233         case 24:
234                 sctx->enc = KMC_AES_192_ENCRYPT;
235                 sctx->dec = KMC_AES_192_DECRYPT;
236                 break;
237         case 32:
238                 sctx->enc = KMC_AES_256_ENCRYPT;
239                 sctx->dec = KMC_AES_256_DECRYPT;
240                 break;
241         }
242
243         return aes_set_key(tfm, in_key, key_len);
244 }
245
246 static int cbc_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
247                          struct blkcipher_walk *walk)
248 {
249         int ret = blkcipher_walk_virt(desc, walk);
250         unsigned int nbytes = walk->nbytes;
251
252         if (!nbytes)
253                 goto out;
254
255         memcpy(param, walk->iv, AES_BLOCK_SIZE);
256         do {
257                 /* only use complete blocks */
258                 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
259                 u8 *out = walk->dst.virt.addr;
260                 u8 *in = walk->src.virt.addr;
261
262                 ret = crypt_s390_kmc(func, param, out, in, n);
263                 BUG_ON((ret < 0) || (ret != n));
264
265                 nbytes &= AES_BLOCK_SIZE - 1;
266                 ret = blkcipher_walk_done(desc, walk, nbytes);
267         } while ((nbytes = walk->nbytes));
268         memcpy(walk->iv, param, AES_BLOCK_SIZE);
269
270 out:
271         return ret;
272 }
273
274 static int cbc_aes_encrypt(struct blkcipher_desc *desc,
275                            struct scatterlist *dst, struct scatterlist *src,
276                            unsigned int nbytes)
277 {
278         struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
279         struct blkcipher_walk walk;
280
281         blkcipher_walk_init(&walk, dst, src, nbytes);
282         return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk);
283 }
284
285 static int cbc_aes_decrypt(struct blkcipher_desc *desc,
286                            struct scatterlist *dst, struct scatterlist *src,
287                            unsigned int nbytes)
288 {
289         struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
290         struct blkcipher_walk walk;
291
292         blkcipher_walk_init(&walk, dst, src, nbytes);
293         return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk);
294 }
295
296 static struct crypto_alg cbc_aes_alg = {
297         .cra_name               =       "cbc(aes)",
298         .cra_driver_name        =       "cbc-aes-s390",
299         .cra_priority           =       CRYPT_S390_COMPOSITE_PRIORITY,
300         .cra_flags              =       CRYPTO_ALG_TYPE_BLKCIPHER |
301                                         CRYPTO_ALG_NEED_FALLBACK,
302         .cra_blocksize          =       AES_BLOCK_SIZE,
303         .cra_ctxsize            =       sizeof(struct s390_aes_ctx),
304         .cra_type               =       &crypto_blkcipher_type,
305         .cra_module             =       THIS_MODULE,
306         .cra_list               =       LIST_HEAD_INIT(cbc_aes_alg.cra_list),
307         .cra_u                  =       {
308                 .blkcipher = {
309                         .min_keysize            =       AES_MIN_KEY_SIZE,
310                         .max_keysize            =       AES_MAX_KEY_SIZE,
311                         .ivsize                 =       AES_BLOCK_SIZE,
312                         .setkey                 =       cbc_aes_set_key,
313                         .encrypt                =       cbc_aes_encrypt,
314                         .decrypt                =       cbc_aes_decrypt,
315                 }
316         }
317 };
318
319 static int __init aes_init(void)
320 {
321         int ret;
322
323         if (crypt_s390_func_available(KM_AES_128_ENCRYPT))
324                 keylen_flag |= AES_KEYLEN_128;
325         if (crypt_s390_func_available(KM_AES_192_ENCRYPT))
326                 keylen_flag |= AES_KEYLEN_192;
327         if (crypt_s390_func_available(KM_AES_256_ENCRYPT))
328                 keylen_flag |= AES_KEYLEN_256;
329
330         if (!keylen_flag)
331                 return -EOPNOTSUPP;
332
333         /* z9 109 and z9 BC/EC only support 128 bit key length */
334         if (keylen_flag == AES_KEYLEN_128) {
335                 aes_alg.cra_u.cipher.cia_max_keysize = AES_MIN_KEY_SIZE;
336                 ecb_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE;
337                 cbc_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE;
338                 printk(KERN_INFO
339                        "aes_s390: hardware acceleration only available for"
340                        "128 bit keys\n");
341         }
342
343         ret = crypto_register_alg(&aes_alg);
344         if (ret)
345                 goto aes_err;
346
347         ret = crypto_register_alg(&ecb_aes_alg);
348         if (ret)
349                 goto ecb_aes_err;
350
351         ret = crypto_register_alg(&cbc_aes_alg);
352         if (ret)
353                 goto cbc_aes_err;
354
355 out:
356         return ret;
357
358 cbc_aes_err:
359         crypto_unregister_alg(&ecb_aes_alg);
360 ecb_aes_err:
361         crypto_unregister_alg(&aes_alg);
362 aes_err:
363         goto out;
364 }
365
366 static void __exit aes_fini(void)
367 {
368         crypto_unregister_alg(&cbc_aes_alg);
369         crypto_unregister_alg(&ecb_aes_alg);
370         crypto_unregister_alg(&aes_alg);
371 }
372
373 module_init(aes_init);
374 module_exit(aes_fini);
375
376 MODULE_ALIAS("aes");
377
378 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
379 MODULE_LICENSE("GPL");
380