X-Git-Url: http://ftp.safe.ca/?a=blobdiff_plain;f=include%2Flinux%2Fcrypto.h;h=fd929889e8dc68a583e3cf138ef650c0cd9a90b4;hb=af901ca181d92aac3a7dc265144a9081a86d8f39;hp=d4d05313280c5643a4c0b05e2f388d901bb581f1;hpb=32e3983fe590ac4cd70c7728eb330d43cef031a7;p=safe%2Fjmp%2Flinux-2.6 diff --git a/include/linux/crypto.h b/include/linux/crypto.h index d4d0531..fd92988 100644 --- a/include/linux/crypto.h +++ b/include/linux/crypto.h @@ -6,7 +6,7 @@ * Copyright (c) 2005 Herbert Xu * * Portions derived from Cryptoapi, by Alexander Kjeldaas - * and Nettle, by Niels Möller. + * and Nettle, by Niels Möller. * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free @@ -30,12 +30,21 @@ */ #define CRYPTO_ALG_TYPE_MASK 0x0000000f #define CRYPTO_ALG_TYPE_CIPHER 0x00000001 -#define CRYPTO_ALG_TYPE_DIGEST 0x00000002 -#define CRYPTO_ALG_TYPE_HASH 0x00000003 +#define CRYPTO_ALG_TYPE_COMPRESS 0x00000002 +#define CRYPTO_ALG_TYPE_AEAD 0x00000003 #define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000004 -#define CRYPTO_ALG_TYPE_COMPRESS 0x00000005 +#define CRYPTO_ALG_TYPE_ABLKCIPHER 0x00000005 +#define CRYPTO_ALG_TYPE_GIVCIPHER 0x00000006 +#define CRYPTO_ALG_TYPE_DIGEST 0x00000008 +#define CRYPTO_ALG_TYPE_HASH 0x00000008 +#define CRYPTO_ALG_TYPE_SHASH 0x00000009 +#define CRYPTO_ALG_TYPE_AHASH 0x0000000a +#define CRYPTO_ALG_TYPE_RNG 0x0000000c +#define CRYPTO_ALG_TYPE_PCOMPRESS 0x0000000f #define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e +#define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000c +#define CRYPTO_ALG_TYPE_BLKCIPHER_MASK 0x0000000c #define CRYPTO_ALG_LARVAL 0x00000010 #define CRYPTO_ALG_DEAD 0x00000020 @@ -49,6 +58,20 @@ #define CRYPTO_ALG_NEED_FALLBACK 0x00000100 /* + * This bit is set for symmetric key ciphers that have already been wrapped + * with a generic IV generator to prevent them from being wrapped again. + */ +#define CRYPTO_ALG_GENIV 0x00000200 + +/* + * Set if the algorithm has passed automated run-time testing. Note that + * if there is no run-time testing for a given algorithm it is considered + * to have passed. + */ + +#define CRYPTO_ALG_TESTED 0x00000400 + +/* * Transform masks and values (for crt_flags). */ #define CRYPTO_TFM_REQ_MASK 0x000fff00 @@ -80,21 +103,23 @@ #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN #elif defined(ARCH_SLAB_MINALIGN) #define CRYPTO_MINALIGN ARCH_SLAB_MINALIGN +#else +#define CRYPTO_MINALIGN __alignof__(unsigned long long) #endif -#ifdef CRYPTO_MINALIGN #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN))) -#else -#define CRYPTO_MINALIGN_ATTR -#endif struct scatterlist; struct crypto_ablkcipher; struct crypto_async_request; +struct crypto_aead; struct crypto_blkcipher; struct crypto_hash; +struct crypto_rng; struct crypto_tfm; struct crypto_type; +struct aead_givcrypt_request; +struct skcipher_givcrypt_request; typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err); @@ -120,6 +145,32 @@ struct ablkcipher_request { void *__ctx[] CRYPTO_MINALIGN_ATTR; }; +/** + * struct aead_request - AEAD request + * @base: Common attributes for async crypto requests + * @assoclen: Length in bytes of associated data for authentication + * @cryptlen: Length of data to be encrypted or decrypted + * @iv: Initialisation vector + * @assoc: Associated data + * @src: Source data + * @dst: Destination data + * @__ctx: Start of private context data + */ +struct aead_request { + struct crypto_async_request base; + + unsigned int assoclen; + unsigned int cryptlen; + + u8 *iv; + + struct scatterlist *assoc; + struct scatterlist *src; + struct scatterlist *dst; + + void *__ctx[] CRYPTO_MINALIGN_ATTR; +}; + struct blkcipher_desc { struct crypto_blkcipher *tfm; void *info; @@ -143,6 +194,36 @@ struct hash_desc { * Algorithms: modular crypto algorithm implementations, managed * via crypto_register_alg() and crypto_unregister_alg(). */ +struct ablkcipher_alg { + int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key, + unsigned int keylen); + int (*encrypt)(struct ablkcipher_request *req); + int (*decrypt)(struct ablkcipher_request *req); + int (*givencrypt)(struct skcipher_givcrypt_request *req); + int (*givdecrypt)(struct skcipher_givcrypt_request *req); + + const char *geniv; + + unsigned int min_keysize; + unsigned int max_keysize; + unsigned int ivsize; +}; + +struct aead_alg { + int (*setkey)(struct crypto_aead *tfm, const u8 *key, + unsigned int keylen); + int (*setauthsize)(struct crypto_aead *tfm, unsigned int authsize); + int (*encrypt)(struct aead_request *req); + int (*decrypt)(struct aead_request *req); + int (*givencrypt)(struct aead_givcrypt_request *req); + int (*givdecrypt)(struct aead_givcrypt_request *req); + + const char *geniv; + + unsigned int ivsize; + unsigned int maxauthsize; +}; + struct blkcipher_alg { int (*setkey)(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen); @@ -153,6 +234,8 @@ struct blkcipher_alg { struct scatterlist *dst, struct scatterlist *src, unsigned int nbytes); + const char *geniv; + unsigned int min_keysize; unsigned int max_keysize; unsigned int ivsize; @@ -197,11 +280,23 @@ struct compress_alg { unsigned int slen, u8 *dst, unsigned int *dlen); }; +struct rng_alg { + int (*rng_make_random)(struct crypto_rng *tfm, u8 *rdata, + unsigned int dlen); + int (*rng_reset)(struct crypto_rng *tfm, u8 *seed, unsigned int slen); + + unsigned int seedsize; +}; + + +#define cra_ablkcipher cra_u.ablkcipher +#define cra_aead cra_u.aead #define cra_blkcipher cra_u.blkcipher #define cra_cipher cra_u.cipher #define cra_digest cra_u.digest #define cra_hash cra_u.hash #define cra_compress cra_u.compress +#define cra_rng cra_u.rng struct crypto_alg { struct list_head cra_list; @@ -221,11 +316,14 @@ struct crypto_alg { const struct crypto_type *cra_type; union { + struct ablkcipher_alg ablkcipher; + struct aead_alg aead; struct blkcipher_alg blkcipher; struct cipher_alg cipher; struct digest_alg digest; struct hash_alg hash; struct compress_alg compress; + struct rng_alg rng; } cra_u; int (*cra_init)(struct crypto_tfm *tfm); @@ -244,14 +342,7 @@ int crypto_unregister_alg(struct crypto_alg *alg); /* * Algorithm query interface. */ -#ifdef CONFIG_CRYPTO int crypto_has_alg(const char *name, u32 type, u32 mask); -#else -static inline int crypto_has_alg(const char *name, u32 type, u32 mask) -{ - return 0; -} -#endif /* * Transforms: user-instantiated objects which encapsulate algorithms @@ -264,7 +355,27 @@ struct ablkcipher_tfm { unsigned int keylen); int (*encrypt)(struct ablkcipher_request *req); int (*decrypt)(struct ablkcipher_request *req); + int (*givencrypt)(struct skcipher_givcrypt_request *req); + int (*givdecrypt)(struct skcipher_givcrypt_request *req); + + struct crypto_ablkcipher *base; + + unsigned int ivsize; + unsigned int reqsize; +}; + +struct aead_tfm { + int (*setkey)(struct crypto_aead *tfm, const u8 *key, + unsigned int keylen); + int (*encrypt)(struct aead_request *req); + int (*decrypt)(struct aead_request *req); + int (*givencrypt)(struct aead_givcrypt_request *req); + int (*givdecrypt)(struct aead_givcrypt_request *req); + + struct crypto_aead *base; + unsigned int ivsize; + unsigned int authsize; unsigned int reqsize; }; @@ -279,28 +390,8 @@ struct blkcipher_tfm { }; struct cipher_tfm { - void *cit_iv; - unsigned int cit_ivsize; - u32 cit_mode; int (*cit_setkey)(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen); - int (*cit_encrypt)(struct crypto_tfm *tfm, - struct scatterlist *dst, - struct scatterlist *src, - unsigned int nbytes); - int (*cit_encrypt_iv)(struct crypto_tfm *tfm, - struct scatterlist *dst, - struct scatterlist *src, - unsigned int nbytes, u8 *iv); - int (*cit_decrypt)(struct crypto_tfm *tfm, - struct scatterlist *dst, - struct scatterlist *src, - unsigned int nbytes); - int (*cit_decrypt_iv)(struct crypto_tfm *tfm, - struct scatterlist *dst, - struct scatterlist *src, - unsigned int nbytes, u8 *iv); - void (*cit_xor_block)(u8 *dst, const u8 *src); void (*cit_encrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); void (*cit_decrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); }; @@ -326,11 +417,19 @@ struct compress_tfm { u8 *dst, unsigned int *dlen); }; +struct rng_tfm { + int (*rng_gen_random)(struct crypto_rng *tfm, u8 *rdata, + unsigned int dlen); + int (*rng_reset)(struct crypto_rng *tfm, u8 *seed, unsigned int slen); +}; + #define crt_ablkcipher crt_u.ablkcipher +#define crt_aead crt_u.aead #define crt_blkcipher crt_u.blkcipher #define crt_cipher crt_u.cipher #define crt_hash crt_u.hash #define crt_compress crt_u.compress +#define crt_rng crt_u.rng struct crypto_tfm { @@ -338,11 +437,15 @@ struct crypto_tfm { union { struct ablkcipher_tfm ablkcipher; + struct aead_tfm aead; struct blkcipher_tfm blkcipher; struct cipher_tfm cipher; struct hash_tfm hash; struct compress_tfm compress; + struct rng_tfm rng; } crt_u; + + void (*exit)(struct crypto_tfm *tfm); struct crypto_alg *__crt_alg; @@ -353,6 +456,10 @@ struct crypto_ablkcipher { struct crypto_tfm base; }; +struct crypto_aead { + struct crypto_tfm base; +}; + struct crypto_blkcipher { struct crypto_tfm base; }; @@ -369,22 +476,49 @@ struct crypto_hash { struct crypto_tfm base; }; +struct crypto_rng { + struct crypto_tfm base; +}; + enum { CRYPTOA_UNSPEC, CRYPTOA_ALG, + CRYPTOA_TYPE, + CRYPTOA_U32, + __CRYPTOA_MAX, }; +#define CRYPTOA_MAX (__CRYPTOA_MAX - 1) + +/* Maximum number of (rtattr) parameters for each template. */ +#define CRYPTO_MAX_ATTRS 32 + struct crypto_attr_alg { char name[CRYPTO_MAX_ALG_NAME]; }; +struct crypto_attr_type { + u32 type; + u32 mask; +}; + +struct crypto_attr_u32 { + u32 num; +}; + /* * Transform user interface. */ -struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags); struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask); -void crypto_free_tfm(struct crypto_tfm *tfm); +void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm); + +static inline void crypto_free_tfm(struct crypto_tfm *tfm) +{ + return crypto_destroy_tfm(tfm, tfm); +} + +int alg_test(const char *driver, const char *alg, u32 type, u32 mask); /* * Transform helpers which query the underlying algorithm. @@ -459,17 +593,23 @@ static inline struct crypto_ablkcipher *__crypto_ablkcipher_cast( return (struct crypto_ablkcipher *)tfm; } -static inline struct crypto_ablkcipher *crypto_alloc_ablkcipher( - const char *alg_name, u32 type, u32 mask) +static inline u32 crypto_skcipher_type(u32 type) { - type &= ~CRYPTO_ALG_TYPE_MASK; + type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); type |= CRYPTO_ALG_TYPE_BLKCIPHER; - mask |= CRYPTO_ALG_TYPE_MASK; + return type; +} - return __crypto_ablkcipher_cast( - crypto_alloc_base(alg_name, type, mask)); +static inline u32 crypto_skcipher_mask(u32 mask) +{ + mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); + mask |= CRYPTO_ALG_TYPE_BLKCIPHER_MASK; + return mask; } +struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name, + u32 type, u32 mask); + static inline struct crypto_tfm *crypto_ablkcipher_tfm( struct crypto_ablkcipher *tfm) { @@ -484,11 +624,8 @@ static inline void crypto_free_ablkcipher(struct crypto_ablkcipher *tfm) static inline int crypto_has_ablkcipher(const char *alg_name, u32 type, u32 mask) { - type &= ~CRYPTO_ALG_TYPE_MASK; - type |= CRYPTO_ALG_TYPE_BLKCIPHER; - mask |= CRYPTO_ALG_TYPE_MASK; - - return crypto_has_alg(alg_name, type, mask); + return crypto_has_alg(alg_name, crypto_skcipher_type(type), + crypto_skcipher_mask(mask)); } static inline struct ablkcipher_tfm *crypto_ablkcipher_crt( @@ -535,7 +672,9 @@ static inline void crypto_ablkcipher_clear_flags(struct crypto_ablkcipher *tfm, static inline int crypto_ablkcipher_setkey(struct crypto_ablkcipher *tfm, const u8 *key, unsigned int keylen) { - return crypto_ablkcipher_crt(tfm)->setkey(tfm, key, keylen); + struct ablkcipher_tfm *crt = crypto_ablkcipher_crt(tfm); + + return crt->setkey(crt->base, key, keylen); } static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm( @@ -558,11 +697,24 @@ static inline int crypto_ablkcipher_decrypt(struct ablkcipher_request *req) return crt->decrypt(req); } -static inline int crypto_ablkcipher_reqsize(struct crypto_ablkcipher *tfm) +static inline unsigned int crypto_ablkcipher_reqsize( + struct crypto_ablkcipher *tfm) { return crypto_ablkcipher_crt(tfm)->reqsize; } +static inline void ablkcipher_request_set_tfm( + struct ablkcipher_request *req, struct crypto_ablkcipher *tfm) +{ + req->base.tfm = crypto_ablkcipher_tfm(crypto_ablkcipher_crt(tfm)->base); +} + +static inline struct ablkcipher_request *ablkcipher_request_cast( + struct crypto_async_request *req) +{ + return container_of(req, struct ablkcipher_request, base); +} + static inline struct ablkcipher_request *ablkcipher_request_alloc( struct crypto_ablkcipher *tfm, gfp_t gfp) { @@ -572,14 +724,14 @@ static inline struct ablkcipher_request *ablkcipher_request_alloc( crypto_ablkcipher_reqsize(tfm), gfp); if (likely(req)) - req->base.tfm = crypto_ablkcipher_tfm(tfm); + ablkcipher_request_set_tfm(req, tfm); return req; } static inline void ablkcipher_request_free(struct ablkcipher_request *req) { - kfree(req); + kzfree(req); } static inline void ablkcipher_request_set_callback( @@ -602,6 +754,146 @@ static inline void ablkcipher_request_set_crypt( req->info = iv; } +static inline struct crypto_aead *__crypto_aead_cast(struct crypto_tfm *tfm) +{ + return (struct crypto_aead *)tfm; +} + +struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask); + +static inline struct crypto_tfm *crypto_aead_tfm(struct crypto_aead *tfm) +{ + return &tfm->base; +} + +static inline void crypto_free_aead(struct crypto_aead *tfm) +{ + crypto_free_tfm(crypto_aead_tfm(tfm)); +} + +static inline struct aead_tfm *crypto_aead_crt(struct crypto_aead *tfm) +{ + return &crypto_aead_tfm(tfm)->crt_aead; +} + +static inline unsigned int crypto_aead_ivsize(struct crypto_aead *tfm) +{ + return crypto_aead_crt(tfm)->ivsize; +} + +static inline unsigned int crypto_aead_authsize(struct crypto_aead *tfm) +{ + return crypto_aead_crt(tfm)->authsize; +} + +static inline unsigned int crypto_aead_blocksize(struct crypto_aead *tfm) +{ + return crypto_tfm_alg_blocksize(crypto_aead_tfm(tfm)); +} + +static inline unsigned int crypto_aead_alignmask(struct crypto_aead *tfm) +{ + return crypto_tfm_alg_alignmask(crypto_aead_tfm(tfm)); +} + +static inline u32 crypto_aead_get_flags(struct crypto_aead *tfm) +{ + return crypto_tfm_get_flags(crypto_aead_tfm(tfm)); +} + +static inline void crypto_aead_set_flags(struct crypto_aead *tfm, u32 flags) +{ + crypto_tfm_set_flags(crypto_aead_tfm(tfm), flags); +} + +static inline void crypto_aead_clear_flags(struct crypto_aead *tfm, u32 flags) +{ + crypto_tfm_clear_flags(crypto_aead_tfm(tfm), flags); +} + +static inline int crypto_aead_setkey(struct crypto_aead *tfm, const u8 *key, + unsigned int keylen) +{ + struct aead_tfm *crt = crypto_aead_crt(tfm); + + return crt->setkey(crt->base, key, keylen); +} + +int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize); + +static inline struct crypto_aead *crypto_aead_reqtfm(struct aead_request *req) +{ + return __crypto_aead_cast(req->base.tfm); +} + +static inline int crypto_aead_encrypt(struct aead_request *req) +{ + return crypto_aead_crt(crypto_aead_reqtfm(req))->encrypt(req); +} + +static inline int crypto_aead_decrypt(struct aead_request *req) +{ + return crypto_aead_crt(crypto_aead_reqtfm(req))->decrypt(req); +} + +static inline unsigned int crypto_aead_reqsize(struct crypto_aead *tfm) +{ + return crypto_aead_crt(tfm)->reqsize; +} + +static inline void aead_request_set_tfm(struct aead_request *req, + struct crypto_aead *tfm) +{ + req->base.tfm = crypto_aead_tfm(crypto_aead_crt(tfm)->base); +} + +static inline struct aead_request *aead_request_alloc(struct crypto_aead *tfm, + gfp_t gfp) +{ + struct aead_request *req; + + req = kmalloc(sizeof(*req) + crypto_aead_reqsize(tfm), gfp); + + if (likely(req)) + aead_request_set_tfm(req, tfm); + + return req; +} + +static inline void aead_request_free(struct aead_request *req) +{ + kzfree(req); +} + +static inline void aead_request_set_callback(struct aead_request *req, + u32 flags, + crypto_completion_t complete, + void *data) +{ + req->base.complete = complete; + req->base.data = data; + req->base.flags = flags; +} + +static inline void aead_request_set_crypt(struct aead_request *req, + struct scatterlist *src, + struct scatterlist *dst, + unsigned int cryptlen, u8 *iv) +{ + req->src = src; + req->dst = dst; + req->cryptlen = cryptlen; + req->iv = iv; +} + +static inline void aead_request_set_assoc(struct aead_request *req, + struct scatterlist *assoc, + unsigned int assoclen) +{ + req->assoc = assoc; + req->assoclen = assoclen; +} + static inline struct crypto_blkcipher *__crypto_blkcipher_cast( struct crypto_tfm *tfm) { @@ -618,9 +910,9 @@ static inline struct crypto_blkcipher *crypto_blkcipher_cast( static inline struct crypto_blkcipher *crypto_alloc_blkcipher( const char *alg_name, u32 type, u32 mask) { - type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); + type &= ~CRYPTO_ALG_TYPE_MASK; type |= CRYPTO_ALG_TYPE_BLKCIPHER; - mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC; + mask |= CRYPTO_ALG_TYPE_MASK; return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask)); } @@ -638,9 +930,9 @@ static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm) static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask) { - type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); + type &= ~CRYPTO_ALG_TYPE_MASK; type |= CRYPTO_ALG_TYPE_BLKCIPHER; - mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC; + mask |= CRYPTO_ALG_TYPE_MASK; return crypto_has_alg(alg_name, type, mask); } @@ -858,6 +1150,7 @@ static inline struct crypto_hash *crypto_alloc_hash(const char *alg_name, u32 type, u32 mask) { type &= ~CRYPTO_ALG_TYPE_MASK; + mask &= ~CRYPTO_ALG_TYPE_MASK; type |= CRYPTO_ALG_TYPE_HASH; mask |= CRYPTO_ALG_TYPE_HASH_MASK; @@ -877,6 +1170,7 @@ static inline void crypto_free_hash(struct crypto_hash *tfm) static inline int crypto_has_hash(const char *alg_name, u32 type, u32 mask) { type &= ~CRYPTO_ALG_TYPE_MASK; + mask &= ~CRYPTO_ALG_TYPE_MASK; type |= CRYPTO_ALG_TYPE_HASH; mask |= CRYPTO_ALG_TYPE_HASH_MASK;