[PATCH] FUSE: add access call
[safe/jmp/linux-2.6] / crypto / cipher.c
index 54c4a56..dfd4bcf 100644 (file)
@@ -41,8 +41,10 @@ static unsigned int crypt_slow(const struct cipher_desc *desc,
                               struct scatter_walk *in,
                               struct scatter_walk *out, unsigned int bsize)
 {
-       u8 src[bsize];
-       u8 dst[bsize];
+       unsigned long alignmask = crypto_tfm_alg_alignmask(desc->tfm);
+       u8 buffer[bsize * 2 + alignmask];
+       u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
+       u8 *dst = src + bsize;
        unsigned int n;
 
        n = scatterwalk_copychunks(src, in, bsize, 0);
@@ -59,15 +61,24 @@ static unsigned int crypt_slow(const struct cipher_desc *desc,
 static inline unsigned int crypt_fast(const struct cipher_desc *desc,
                                      struct scatter_walk *in,
                                      struct scatter_walk *out,
-                                     unsigned int nbytes)
+                                     unsigned int nbytes, u8 *tmp)
 {
        u8 *src, *dst;
 
        src = in->data;
        dst = scatterwalk_samebuf(in, out) ? src : out->data;
 
+       if (tmp) {
+               memcpy(tmp, in->data, nbytes);
+               src = tmp;
+               dst = tmp;
+       }
+
        nbytes = desc->prfn(desc, dst, src, nbytes);
 
+       if (tmp)
+               memcpy(out->data, tmp, nbytes);
+
        scatterwalk_advance(in, nbytes);
        scatterwalk_advance(out, nbytes);
 
@@ -87,6 +98,8 @@ static int crypt(const struct cipher_desc *desc,
        struct scatter_walk walk_in, walk_out;
        struct crypto_tfm *tfm = desc->tfm;
        const unsigned int bsize = crypto_tfm_alg_blocksize(tfm);
+       unsigned int alignmask = crypto_tfm_alg_alignmask(tfm);
+       unsigned long buffer = 0;
 
        if (!nbytes)
                return 0;
@@ -100,16 +113,27 @@ static int crypt(const struct cipher_desc *desc,
        scatterwalk_start(&walk_out, dst);
 
        for(;;) {
-               unsigned int n;
+               unsigned int n = nbytes;
+               u8 *tmp = NULL;
+
+               if (!scatterwalk_aligned(&walk_in, alignmask) ||
+                   !scatterwalk_aligned(&walk_out, alignmask)) {
+                       if (!buffer) {
+                               buffer = __get_free_page(GFP_ATOMIC);
+                               if (!buffer)
+                                       n = 0;
+                       }
+                       tmp = (u8 *)buffer;
+               }
 
                scatterwalk_map(&walk_in, 0);
                scatterwalk_map(&walk_out, 1);
 
-               n = scatterwalk_clamp(&walk_in, nbytes);
+               n = scatterwalk_clamp(&walk_in, n);
                n = scatterwalk_clamp(&walk_out, n);
 
                if (likely(n >= bsize))
-                       n = crypt_fast(desc, &walk_in, &walk_out, n);
+                       n = crypt_fast(desc, &walk_in, &walk_out, n, tmp);
                else
                        n = crypt_slow(desc, &walk_in, &walk_out, bsize);
 
@@ -119,10 +143,40 @@ static int crypt(const struct cipher_desc *desc,
                scatterwalk_done(&walk_out, 1, nbytes);
 
                if (!nbytes)
-                       return 0;
+                       break;
 
                crypto_yield(tfm);
        }
+
+       if (buffer)
+               free_page(buffer);
+
+       return 0;
+}
+
+static int crypt_iv_unaligned(struct cipher_desc *desc,
+                             struct scatterlist *dst,
+                             struct scatterlist *src,
+                             unsigned int nbytes)
+{
+       struct crypto_tfm *tfm = desc->tfm;
+       unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
+       u8 *iv = desc->info;
+
+       if (unlikely(((unsigned long)iv & alignmask))) {
+               unsigned int ivsize = tfm->crt_cipher.cit_ivsize;
+               u8 buffer[ivsize + alignmask];
+               u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
+               int err;
+
+               desc->info = memcpy(tmp, iv, ivsize);
+               err = crypt(desc, dst, src, nbytes);
+               memcpy(iv, tmp, ivsize);
+
+               return err;
+       }
+
+       return crypt(desc, dst, src, nbytes);
 }
 
 static unsigned int cbc_process_encrypt(const struct cipher_desc *desc,
@@ -137,6 +191,8 @@ static unsigned int cbc_process_encrypt(const struct cipher_desc *desc,
        u8 *iv = desc->info;
        unsigned int done = 0;
 
+       nbytes -= bsize;
+
        do {
                xor(iv, src);
                fn(crypto_tfm_ctx(tfm), dst, iv);
@@ -144,7 +200,7 @@ static unsigned int cbc_process_encrypt(const struct cipher_desc *desc,
 
                src += bsize;
                dst += bsize;
-       } while ((done += bsize) < nbytes);
+       } while ((done += bsize) <= nbytes);
 
        return done;
 }
@@ -165,6 +221,8 @@ static unsigned int cbc_process_decrypt(const struct cipher_desc *desc,
        u8 *iv = desc->info;
        unsigned int done = 0;
 
+       nbytes -= bsize;
+
        do {
                u8 *tmp_dst = *dst_p;
 
@@ -176,7 +234,7 @@ static unsigned int cbc_process_decrypt(const struct cipher_desc *desc,
 
                src += bsize;
                dst += bsize;
-       } while ((done += bsize) < nbytes);
+       } while ((done += bsize) <= nbytes);
 
        return done;
 }
@@ -189,12 +247,14 @@ static unsigned int ecb_process(const struct cipher_desc *desc, u8 *dst,
        void (*fn)(void *, u8 *, const u8 *) = desc->crfn;
        unsigned int done = 0;
 
+       nbytes -= bsize;
+
        do {
                fn(crypto_tfm_ctx(tfm), dst, src);
 
                src += bsize;
                dst += bsize;
-       } while ((done += bsize) < nbytes);
+       } while ((done += bsize) <= nbytes);
 
        return done;
 }
@@ -269,7 +329,7 @@ static int cbc_encrypt_iv(struct crypto_tfm *tfm,
        desc.prfn = cipher->cia_encrypt_cbc ?: cbc_process_encrypt;
        desc.info = iv;
 
-       return crypt(&desc, dst, src, nbytes);
+       return crypt_iv_unaligned(&desc, dst, src, nbytes);
 }
 
 static int cbc_decrypt(struct crypto_tfm *tfm,
@@ -301,7 +361,7 @@ static int cbc_decrypt_iv(struct crypto_tfm *tfm,
        desc.prfn = cipher->cia_decrypt_cbc ?: cbc_process_decrypt;
        desc.info = iv;
 
-       return crypt(&desc, dst, src, nbytes);
+       return crypt_iv_unaligned(&desc, dst, src, nbytes);
 }
 
 static int nocrypt(struct crypto_tfm *tfm,
@@ -323,11 +383,7 @@ static int nocrypt_iv(struct crypto_tfm *tfm,
 int crypto_init_cipher_flags(struct crypto_tfm *tfm, u32 flags)
 {
        u32 mode = flags & CRYPTO_TFM_MODE_MASK;
-       
        tfm->crt_cipher.cit_mode = mode ? mode : CRYPTO_TFM_MODE_ECB;
-       if (flags & CRYPTO_TFM_REQ_WEAK_KEY)
-               tfm->crt_flags = CRYPTO_TFM_REQ_WEAK_KEY;
-       
        return 0;
 }
 
@@ -370,6 +426,8 @@ int crypto_init_cipher_ops(struct crypto_tfm *tfm)
        }
        
        if (ops->cit_mode == CRYPTO_TFM_MODE_CBC) {
+               unsigned long align;
+               unsigned long addr;
                
                switch (crypto_tfm_alg_blocksize(tfm)) {
                case 8:
@@ -389,9 +447,11 @@ int crypto_init_cipher_ops(struct crypto_tfm *tfm)
                }
                
                ops->cit_ivsize = crypto_tfm_alg_blocksize(tfm);
-               ops->cit_iv = kmalloc(ops->cit_ivsize, GFP_KERNEL);
-               if (ops->cit_iv == NULL)
-                       ret = -ENOMEM;
+               align = crypto_tfm_alg_alignmask(tfm) + 1;
+               addr = (unsigned long)crypto_tfm_ctx(tfm);
+               addr = ALIGN(addr, align);
+               addr += ALIGN(tfm->__crt_alg->cra_ctxsize, align);
+               ops->cit_iv = (void *)addr;
        }
 
 out:   
@@ -400,5 +460,4 @@ out:
 
 void crypto_exit_cipher_ops(struct crypto_tfm *tfm)
 {
-       kfree(tfm->crt_cipher.cit_iv);
 }