netfilter: nf_conntrack: restrict runtime expect hashsize modifications
[safe/jmp/linux-2.6] / drivers / crypto / padlock-sha.c
1 /*
2  * Cryptographic API.
3  *
4  * Support for VIA PadLock hardware crypto engine.
5  *
6  * Copyright (c) 2006  Michal Ludvig <michal@logix.cz>
7  *
8  * This program is free software; you can redistribute it and/or modify
9  * it under the terms of the GNU General Public License as published by
10  * the Free Software Foundation; either version 2 of the License, or
11  * (at your option) any later version.
12  *
13  */
14
15 #include <crypto/internal/hash.h>
16 #include <crypto/sha.h>
17 #include <linux/err.h>
18 #include <linux/module.h>
19 #include <linux/init.h>
20 #include <linux/errno.h>
21 #include <linux/interrupt.h>
22 #include <linux/kernel.h>
23 #include <linux/scatterlist.h>
24 #include <asm/i387.h>
25 #include "padlock.h"
26
27 #ifdef CONFIG_64BIT
28 #define STACK_ALIGN 16
29 #else
30 #define STACK_ALIGN 4
31 #endif
32
33 struct padlock_sha_desc {
34         struct shash_desc fallback;
35 };
36
37 struct padlock_sha_ctx {
38         struct crypto_shash *fallback;
39 };
40
41 static int padlock_sha_init(struct shash_desc *desc)
42 {
43         struct padlock_sha_desc *dctx = shash_desc_ctx(desc);
44         struct padlock_sha_ctx *ctx = crypto_shash_ctx(desc->tfm);
45
46         dctx->fallback.tfm = ctx->fallback;
47         dctx->fallback.flags = desc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
48         return crypto_shash_init(&dctx->fallback);
49 }
50
51 static int padlock_sha_update(struct shash_desc *desc,
52                               const u8 *data, unsigned int length)
53 {
54         struct padlock_sha_desc *dctx = shash_desc_ctx(desc);
55
56         dctx->fallback.flags = desc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
57         return crypto_shash_update(&dctx->fallback, data, length);
58 }
59
60 static inline void padlock_output_block(uint32_t *src,
61                         uint32_t *dst, size_t count)
62 {
63         while (count--)
64                 *dst++ = swab32(*src++);
65 }
66
67 static int padlock_sha1_finup(struct shash_desc *desc, const u8 *in,
68                               unsigned int count, u8 *out)
69 {
70         /* We can't store directly to *out as it may be unaligned. */
71         /* BTW Don't reduce the buffer size below 128 Bytes!
72          *     PadLock microcode needs it that big. */
73         char buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__
74                 ((aligned(STACK_ALIGN)));
75         char *result = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT);
76         struct padlock_sha_desc *dctx = shash_desc_ctx(desc);
77         struct sha1_state state;
78         unsigned int space;
79         unsigned int leftover;
80         int ts_state;
81         int err;
82
83         dctx->fallback.flags = desc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
84         err = crypto_shash_export(&dctx->fallback, &state);
85         if (err)
86                 goto out;
87
88         if (state.count + count > ULONG_MAX)
89                 return crypto_shash_finup(&dctx->fallback, in, count, out);
90
91         leftover = ((state.count - 1) & (SHA1_BLOCK_SIZE - 1)) + 1;
92         space =  SHA1_BLOCK_SIZE - leftover;
93         if (space) {
94                 if (count > space) {
95                         err = crypto_shash_update(&dctx->fallback, in, space) ?:
96                               crypto_shash_export(&dctx->fallback, &state);
97                         if (err)
98                                 goto out;
99                         count -= space;
100                         in += space;
101                 } else {
102                         memcpy(state.buffer + leftover, in, count);
103                         in = state.buffer;
104                         count += leftover;
105                         state.count &= ~(SHA1_BLOCK_SIZE - 1);
106                 }
107         }
108
109         memcpy(result, &state.state, SHA1_DIGEST_SIZE);
110
111         /* prevent taking the spurious DNA fault with padlock. */
112         ts_state = irq_ts_save();
113         asm volatile (".byte 0xf3,0x0f,0xa6,0xc8" /* rep xsha1 */
114                       : \
115                       : "c"((unsigned long)state.count + count), \
116                         "a"((unsigned long)state.count), \
117                         "S"(in), "D"(result));
118         irq_ts_restore(ts_state);
119
120         padlock_output_block((uint32_t *)result, (uint32_t *)out, 5);
121
122 out:
123         return err;
124 }
125
126 static int padlock_sha1_final(struct shash_desc *desc, u8 *out)
127 {
128         u8 buf[4];
129
130         return padlock_sha1_finup(desc, buf, 0, out);
131 }
132
133 static int padlock_sha256_finup(struct shash_desc *desc, const u8 *in,
134                                 unsigned int count, u8 *out)
135 {
136         /* We can't store directly to *out as it may be unaligned. */
137         /* BTW Don't reduce the buffer size below 128 Bytes!
138          *     PadLock microcode needs it that big. */
139         char buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__
140                 ((aligned(STACK_ALIGN)));
141         char *result = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT);
142         struct padlock_sha_desc *dctx = shash_desc_ctx(desc);
143         struct sha256_state state;
144         unsigned int space;
145         unsigned int leftover;
146         int ts_state;
147         int err;
148
149         dctx->fallback.flags = desc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
150         err = crypto_shash_export(&dctx->fallback, &state);
151         if (err)
152                 goto out;
153
154         if (state.count + count > ULONG_MAX)
155                 return crypto_shash_finup(&dctx->fallback, in, count, out);
156
157         leftover = ((state.count - 1) & (SHA256_BLOCK_SIZE - 1)) + 1;
158         space =  SHA256_BLOCK_SIZE - leftover;
159         if (space) {
160                 if (count > space) {
161                         err = crypto_shash_update(&dctx->fallback, in, space) ?:
162                               crypto_shash_export(&dctx->fallback, &state);
163                         if (err)
164                                 goto out;
165                         count -= space;
166                         in += space;
167                 } else {
168                         memcpy(state.buf + leftover, in, count);
169                         in = state.buf;
170                         count += leftover;
171                         state.count &= ~(SHA1_BLOCK_SIZE - 1);
172                 }
173         }
174
175         memcpy(result, &state.state, SHA256_DIGEST_SIZE);
176
177         /* prevent taking the spurious DNA fault with padlock. */
178         ts_state = irq_ts_save();
179         asm volatile (".byte 0xf3,0x0f,0xa6,0xd0" /* rep xsha256 */
180                       : \
181                       : "c"((unsigned long)state.count + count), \
182                         "a"((unsigned long)state.count), \
183                         "S"(in), "D"(result));
184         irq_ts_restore(ts_state);
185
186         padlock_output_block((uint32_t *)result, (uint32_t *)out, 8);
187
188 out:
189         return err;
190 }
191
192 static int padlock_sha256_final(struct shash_desc *desc, u8 *out)
193 {
194         u8 buf[4];
195
196         return padlock_sha256_finup(desc, buf, 0, out);
197 }
198
199 static int padlock_cra_init(struct crypto_tfm *tfm)
200 {
201         struct crypto_shash *hash = __crypto_shash_cast(tfm);
202         const char *fallback_driver_name = tfm->__crt_alg->cra_name;
203         struct padlock_sha_ctx *ctx = crypto_tfm_ctx(tfm);
204         struct crypto_shash *fallback_tfm;
205         int err = -ENOMEM;
206
207         /* Allocate a fallback and abort if it failed. */
208         fallback_tfm = crypto_alloc_shash(fallback_driver_name, 0,
209                                           CRYPTO_ALG_NEED_FALLBACK);
210         if (IS_ERR(fallback_tfm)) {
211                 printk(KERN_WARNING PFX "Fallback driver '%s' could not be loaded!\n",
212                        fallback_driver_name);
213                 err = PTR_ERR(fallback_tfm);
214                 goto out;
215         }
216
217         ctx->fallback = fallback_tfm;
218         hash->descsize += crypto_shash_descsize(fallback_tfm);
219         return 0;
220
221 out:
222         return err;
223 }
224
225 static void padlock_cra_exit(struct crypto_tfm *tfm)
226 {
227         struct padlock_sha_ctx *ctx = crypto_tfm_ctx(tfm);
228
229         crypto_free_shash(ctx->fallback);
230 }
231
232 static struct shash_alg sha1_alg = {
233         .digestsize     =       SHA1_DIGEST_SIZE,
234         .init           =       padlock_sha_init,
235         .update         =       padlock_sha_update,
236         .finup          =       padlock_sha1_finup,
237         .final          =       padlock_sha1_final,
238         .descsize       =       sizeof(struct padlock_sha_desc),
239         .base           =       {
240                 .cra_name               =       "sha1",
241                 .cra_driver_name        =       "sha1-padlock",
242                 .cra_priority           =       PADLOCK_CRA_PRIORITY,
243                 .cra_flags              =       CRYPTO_ALG_TYPE_SHASH |
244                                                 CRYPTO_ALG_NEED_FALLBACK,
245                 .cra_blocksize          =       SHA1_BLOCK_SIZE,
246                 .cra_ctxsize            =       sizeof(struct padlock_sha_ctx),
247                 .cra_module             =       THIS_MODULE,
248                 .cra_init               =       padlock_cra_init,
249                 .cra_exit               =       padlock_cra_exit,
250         }
251 };
252
253 static struct shash_alg sha256_alg = {
254         .digestsize     =       SHA256_DIGEST_SIZE,
255         .init           =       padlock_sha_init,
256         .update         =       padlock_sha_update,
257         .finup          =       padlock_sha256_finup,
258         .final          =       padlock_sha256_final,
259         .descsize       =       sizeof(struct padlock_sha_desc),
260         .base           =       {
261                 .cra_name               =       "sha256",
262                 .cra_driver_name        =       "sha256-padlock",
263                 .cra_priority           =       PADLOCK_CRA_PRIORITY,
264                 .cra_flags              =       CRYPTO_ALG_TYPE_SHASH |
265                                                 CRYPTO_ALG_NEED_FALLBACK,
266                 .cra_blocksize          =       SHA256_BLOCK_SIZE,
267                 .cra_ctxsize            =       sizeof(struct padlock_sha_ctx),
268                 .cra_module             =       THIS_MODULE,
269                 .cra_init               =       padlock_cra_init,
270                 .cra_exit               =       padlock_cra_exit,
271         }
272 };
273
274 static int __init padlock_init(void)
275 {
276         int rc = -ENODEV;
277
278         if (!cpu_has_phe) {
279                 printk(KERN_NOTICE PFX "VIA PadLock Hash Engine not detected.\n");
280                 return -ENODEV;
281         }
282
283         if (!cpu_has_phe_enabled) {
284                 printk(KERN_NOTICE PFX "VIA PadLock detected, but not enabled. Hmm, strange...\n");
285                 return -ENODEV;
286         }
287
288         rc = crypto_register_shash(&sha1_alg);
289         if (rc)
290                 goto out;
291
292         rc = crypto_register_shash(&sha256_alg);
293         if (rc)
294                 goto out_unreg1;
295
296         printk(KERN_NOTICE PFX "Using VIA PadLock ACE for SHA1/SHA256 algorithms.\n");
297
298         return 0;
299
300 out_unreg1:
301         crypto_unregister_shash(&sha1_alg);
302 out:
303         printk(KERN_ERR PFX "VIA PadLock SHA1/SHA256 initialization failed.\n");
304         return rc;
305 }
306
307 static void __exit padlock_fini(void)
308 {
309         crypto_unregister_shash(&sha1_alg);
310         crypto_unregister_shash(&sha256_alg);
311 }
312
313 module_init(padlock_init);
314 module_exit(padlock_fini);
315
316 MODULE_DESCRIPTION("VIA PadLock SHA1/SHA256 algorithms support.");
317 MODULE_LICENSE("GPL");
318 MODULE_AUTHOR("Michal Ludvig");
319
320 MODULE_ALIAS("sha1-all");
321 MODULE_ALIAS("sha256-all");
322 MODULE_ALIAS("sha1-padlock");
323 MODULE_ALIAS("sha256-padlock");