*
*/
+#include <crypto/algapi.h>
+#include <crypto/sha.h>
+#include <linux/err.h>
#include <linux/module.h>
#include <linux/init.h>
#include <linux/errno.h>
-#include <linux/crypto.h>
#include <linux/cryptohash.h>
#include <linux/interrupt.h>
#include <linux/kernel.h>
#include <linux/scatterlist.h>
+#include <asm/i387.h>
#include "padlock.h"
#define SHA1_DEFAULT_FALLBACK "sha1-generic"
-#define SHA1_DIGEST_SIZE 20
-#define SHA1_HMAC_BLOCK_SIZE 64
-
#define SHA256_DEFAULT_FALLBACK "sha256-generic"
-#define SHA256_DIGEST_SIZE 32
-#define SHA256_HMAC_BLOCK_SIZE 64
-
-static char *sha1_fallback = SHA1_DEFAULT_FALLBACK;
-static char *sha256_fallback = SHA256_DEFAULT_FALLBACK;
-
-module_param(sha1_fallback, charp, 0644);
-module_param(sha256_fallback, charp, 0644);
-
-MODULE_PARM_DESC(sha1_fallback, "Fallback driver for SHA1. Default is "
- SHA1_DEFAULT_FALLBACK);
-MODULE_PARM_DESC(sha256_fallback, "Fallback driver for SHA256. Default is "
- SHA256_DEFAULT_FALLBACK);
struct padlock_sha_ctx {
char *data;
size_t used;
int bypass;
void (*f_sha_padlock)(const char *in, char *out, int count);
- struct crypto_tfm *fallback_tfm;
+ struct hash_desc fallback;
};
static inline struct padlock_sha_ctx *ctx(struct crypto_tfm *tfm)
{
- return (struct padlock_sha_ctx *)(crypto_tfm_ctx(tfm));
+ return crypto_tfm_ctx(tfm);
}
/* We'll need aligned address on the stack */
if (ctx(tfm)->bypass)
return;
- BUG_ON(!ctx(tfm)->fallback_tfm);
-
- crypto_digest_init(ctx(tfm)->fallback_tfm);
+ crypto_hash_init(&ctx(tfm)->fallback);
if (ctx(tfm)->data && ctx(tfm)->used) {
struct scatterlist sg;
- sg_set_buf(&sg, ctx(tfm)->data, ctx(tfm)->used);
- crypto_digest_update(ctx(tfm)->fallback_tfm, &sg, 1);
+ sg_init_one(&sg, ctx(tfm)->data, ctx(tfm)->used);
+ crypto_hash_update(&ctx(tfm)->fallback, &sg, sg.length);
}
ctx(tfm)->used = 0;
if (unlikely(ctx(tfm)->bypass)) {
struct scatterlist sg;
- BUG_ON(!ctx(tfm)->fallback_tfm);
- sg_set_buf(&sg, (uint8_t *)data, length);
- crypto_digest_update(ctx(tfm)->fallback_tfm, &sg, 1);
+ sg_init_one(&sg, (uint8_t *)data, length);
+ crypto_hash_update(&ctx(tfm)->fallback, &sg, length);
return;
}
* PadLock microcode needs it that big. */
char buf[128+16];
char *result = NEAREST_ALIGNED(buf);
+ int ts_state;
- ((uint32_t *)result)[0] = 0x67452301;
- ((uint32_t *)result)[1] = 0xEFCDAB89;
- ((uint32_t *)result)[2] = 0x98BADCFE;
- ((uint32_t *)result)[3] = 0x10325476;
- ((uint32_t *)result)[4] = 0xC3D2E1F0;
+ ((uint32_t *)result)[0] = SHA1_H0;
+ ((uint32_t *)result)[1] = SHA1_H1;
+ ((uint32_t *)result)[2] = SHA1_H2;
+ ((uint32_t *)result)[3] = SHA1_H3;
+ ((uint32_t *)result)[4] = SHA1_H4;
+ /* prevent taking the spurious DNA fault with padlock. */
+ ts_state = irq_ts_save();
asm volatile (".byte 0xf3,0x0f,0xa6,0xc8" /* rep xsha1 */
: "+S"(in), "+D"(result)
: "c"(count), "a"(0));
+ irq_ts_restore(ts_state);
padlock_output_block((uint32_t *)result, (uint32_t *)out, 5);
}
* PadLock microcode needs it that big. */
char buf[128+16];
char *result = NEAREST_ALIGNED(buf);
-
- ((uint32_t *)result)[0] = 0x6A09E667;
- ((uint32_t *)result)[1] = 0xBB67AE85;
- ((uint32_t *)result)[2] = 0x3C6EF372;
- ((uint32_t *)result)[3] = 0xA54FF53A;
- ((uint32_t *)result)[4] = 0x510E527F;
- ((uint32_t *)result)[5] = 0x9B05688C;
- ((uint32_t *)result)[6] = 0x1F83D9AB;
- ((uint32_t *)result)[7] = 0x5BE0CD19;
-
+ int ts_state;
+
+ ((uint32_t *)result)[0] = SHA256_H0;
+ ((uint32_t *)result)[1] = SHA256_H1;
+ ((uint32_t *)result)[2] = SHA256_H2;
+ ((uint32_t *)result)[3] = SHA256_H3;
+ ((uint32_t *)result)[4] = SHA256_H4;
+ ((uint32_t *)result)[5] = SHA256_H5;
+ ((uint32_t *)result)[6] = SHA256_H6;
+ ((uint32_t *)result)[7] = SHA256_H7;
+
+ /* prevent taking the spurious DNA fault with padlock. */
+ ts_state = irq_ts_save();
asm volatile (".byte 0xf3,0x0f,0xa6,0xd0" /* rep xsha256 */
: "+S"(in), "+D"(result)
: "c"(count), "a"(0));
+ irq_ts_restore(ts_state);
padlock_output_block((uint32_t *)result, (uint32_t *)out, 8);
}
static void padlock_sha_final(struct crypto_tfm *tfm, uint8_t *out)
{
if (unlikely(ctx(tfm)->bypass)) {
- BUG_ON(!ctx(tfm)->fallback_tfm);
- crypto_digest_final(ctx(tfm)->fallback_tfm, out);
+ crypto_hash_final(&ctx(tfm)->fallback, out);
ctx(tfm)->bypass = 0;
return;
}
ctx(tfm)->used = 0;
}
-static int padlock_cra_init(struct crypto_tfm *tfm, const char *fallback_driver_name)
+static int padlock_cra_init(struct crypto_tfm *tfm)
{
+ const char *fallback_driver_name = tfm->__crt_alg->cra_name;
+ struct crypto_hash *fallback_tfm;
+
/* For now we'll allocate one page. This
* could eventually be configurable one day. */
ctx(tfm)->data = (char *)__get_free_page(GFP_KERNEL);
return -ENOMEM;
/* Allocate a fallback and abort if it failed. */
- ctx(tfm)->fallback_tfm = crypto_alloc_tfm(fallback_driver_name, 0);
- if (!ctx(tfm)->fallback_tfm) {
+ fallback_tfm = crypto_alloc_hash(fallback_driver_name, 0,
+ CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_NEED_FALLBACK);
+ if (IS_ERR(fallback_tfm)) {
printk(KERN_WARNING PFX "Fallback driver '%s' could not be loaded!\n",
fallback_driver_name);
free_page((unsigned long)(ctx(tfm)->data));
- return -ENOENT;
+ return PTR_ERR(fallback_tfm);
}
+ ctx(tfm)->fallback.tfm = fallback_tfm;
return 0;
}
{
ctx(tfm)->f_sha_padlock = padlock_do_sha1;
- return padlock_cra_init(tfm, sha1_fallback);
+ return padlock_cra_init(tfm);
}
static int padlock_sha256_cra_init(struct crypto_tfm *tfm)
{
ctx(tfm)->f_sha_padlock = padlock_do_sha256;
- return padlock_cra_init(tfm, sha256_fallback);
+ return padlock_cra_init(tfm);
}
static void padlock_cra_exit(struct crypto_tfm *tfm)
ctx(tfm)->data = NULL;
}
- BUG_ON(!ctx(tfm)->fallback_tfm);
- crypto_free_tfm(ctx(tfm)->fallback_tfm);
- ctx(tfm)->fallback_tfm = NULL;
+ crypto_free_hash(ctx(tfm)->fallback.tfm);
+ ctx(tfm)->fallback.tfm = NULL;
}
static struct crypto_alg sha1_alg = {
.cra_name = "sha1",
.cra_driver_name = "sha1-padlock",
.cra_priority = PADLOCK_CRA_PRIORITY,
- .cra_flags = CRYPTO_ALG_TYPE_DIGEST,
- .cra_blocksize = SHA1_HMAC_BLOCK_SIZE,
+ .cra_flags = CRYPTO_ALG_TYPE_DIGEST |
+ CRYPTO_ALG_NEED_FALLBACK,
+ .cra_blocksize = SHA1_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct padlock_sha_ctx),
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(sha1_alg.cra_list),
.cra_name = "sha256",
.cra_driver_name = "sha256-padlock",
.cra_priority = PADLOCK_CRA_PRIORITY,
- .cra_flags = CRYPTO_ALG_TYPE_DIGEST,
- .cra_blocksize = SHA256_HMAC_BLOCK_SIZE,
+ .cra_flags = CRYPTO_ALG_TYPE_DIGEST |
+ CRYPTO_ALG_NEED_FALLBACK,
+ .cra_blocksize = SHA256_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct padlock_sha_ctx),
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(sha256_alg.cra_list),
}
};
-static void __init padlock_sha_check_fallbacks(void)
-{
- struct crypto_tfm *tfm;
-
- /* We'll try to allocate one TFM for each fallback
- * to test that the modules are available. */
- tfm = crypto_alloc_tfm(sha1_fallback, 0);
- if (!tfm) {
- printk(KERN_WARNING PFX "Couldn't load fallback module for '%s'. Tried '%s'.\n",
- sha1_alg.cra_name, sha1_fallback);
- } else {
- printk(KERN_NOTICE PFX "Fallback for '%s' is driver '%s' (prio=%d)\n", sha1_alg.cra_name,
- crypto_tfm_alg_driver_name(tfm), crypto_tfm_alg_priority(tfm));
- crypto_free_tfm(tfm);
- }
-
- tfm = crypto_alloc_tfm(sha256_fallback, 0);
- if (!tfm) {
- printk(KERN_WARNING PFX "Couldn't load fallback module for '%s'. Tried '%s'.\n",
- sha256_alg.cra_name, sha256_fallback);
- } else {
- printk(KERN_NOTICE PFX "Fallback for '%s' is driver '%s' (prio=%d)\n", sha256_alg.cra_name,
- crypto_tfm_alg_driver_name(tfm), crypto_tfm_alg_priority(tfm));
- crypto_free_tfm(tfm);
- }
-}
-
static int __init padlock_init(void)
{
int rc = -ENODEV;
if (!cpu_has_phe) {
- printk(KERN_ERR PFX "VIA PadLock Hash Engine not detected.\n");
+ printk(KERN_NOTICE PFX "VIA PadLock Hash Engine not detected.\n");
return -ENODEV;
}
if (!cpu_has_phe_enabled) {
- printk(KERN_ERR PFX "VIA PadLock detected, but not enabled. Hmm, strange...\n");
+ printk(KERN_NOTICE PFX "VIA PadLock detected, but not enabled. Hmm, strange...\n");
return -ENODEV;
}
- padlock_sha_check_fallbacks();
-
rc = crypto_register_alg(&sha1_alg);
if (rc)
goto out;
MODULE_LICENSE("GPL");
MODULE_AUTHOR("Michal Ludvig");
+MODULE_ALIAS("sha1-all");
+MODULE_ALIAS("sha256-all");
MODULE_ALIAS("sha1-padlock");
MODULE_ALIAS("sha256-padlock");