summaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2006-08-13 20:58:18 +1000
committerHerbert Xu <herbert@gondor.apana.org.au>2006-09-21 11:41:51 +1000
commitf28776a369b12f9a03a822a8e1090ed670a41f4f (patch)
treeb1eb08db2d7ad5c83a4b2784aea3af0502d127b3 /include
parente853c3cfa8cc24869ecd2526e589bcb176bc12e9 (diff)
downloadop-kernel-dev-f28776a369b12f9a03a822a8e1090ed670a41f4f.zip
op-kernel-dev-f28776a369b12f9a03a822a8e1090ed670a41f4f.tar.gz
[CRYPTO] cipher: Added encrypt_one/decrypt_one
This patch adds two new operations for the simple cipher that encrypts or decrypts a single block at a time. This will be the main interface after the existing block operations have moved over to the new block ciphers. It also adds the crypto_cipher type which is currently only used on the new operations but will be extended to setkey as well once existing users have been converted to use block ciphers where applicable. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'include')
-rw-r--r--include/crypto/algapi.h5
-rw-r--r--include/linux/crypto.h96
2 files changed, 101 insertions, 0 deletions
diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h
index c533c0a..6f9fb27 100644
--- a/include/crypto/algapi.h
+++ b/include/crypto/algapi.h
@@ -69,5 +69,10 @@ static inline void *crypto_instance_ctx(struct crypto_instance *inst)
return inst->__ctx;
}
+static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm)
+{
+ return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher;
+}
+
#endif /* _CRYPTO_ALGAPI_H */
diff --git a/include/linux/crypto.h b/include/linux/crypto.h
index 8e9c407..fdecee8 100644
--- a/include/linux/crypto.h
+++ b/include/linux/crypto.h
@@ -224,6 +224,8 @@ struct cipher_tfm {
struct scatterlist *src,
unsigned int nbytes, u8 *iv);
void (*cit_xor_block)(u8 *dst, const u8 *src);
+ void (*cit_encrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
+ void (*cit_decrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
};
struct digest_tfm {
@@ -268,6 +270,8 @@ struct crypto_tfm {
void *__crt_ctx[] CRYPTO_MINALIGN_ATTR;
};
+#define crypto_cipher crypto_tfm
+
enum {
CRYPTOA_UNSPEC,
CRYPTOA_ALG,
@@ -347,6 +351,21 @@ static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
return tfm->__crt_alg->cra_alignmask;
}
+static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm)
+{
+ return tfm->crt_flags;
+}
+
+static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags)
+{
+ tfm->crt_flags |= flags;
+}
+
+static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags)
+{
+ tfm->crt_flags &= ~flags;
+}
+
static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
{
return tfm->__crt_ctx;
@@ -361,6 +380,83 @@ static inline unsigned int crypto_tfm_ctx_alignment(void)
/*
* API wrappers.
*/
+static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm)
+{
+ return (struct crypto_cipher *)tfm;
+}
+
+static inline struct crypto_cipher *crypto_cipher_cast(struct crypto_tfm *tfm)
+{
+ BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
+ return __crypto_cipher_cast(tfm);
+}
+
+static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name,
+ u32 type, u32 mask)
+{
+ type &= ~CRYPTO_ALG_TYPE_MASK;
+ type |= CRYPTO_ALG_TYPE_CIPHER;
+ mask |= CRYPTO_ALG_TYPE_MASK;
+
+ return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask));
+}
+
+static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm)
+{
+ return tfm;
+}
+
+static inline void crypto_free_cipher(struct crypto_cipher *tfm)
+{
+ crypto_free_tfm(crypto_cipher_tfm(tfm));
+}
+
+static inline struct cipher_tfm *crypto_cipher_crt(struct crypto_cipher *tfm)
+{
+ return &crypto_cipher_tfm(tfm)->crt_cipher;
+}
+
+static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm)
+{
+ return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm));
+}
+
+static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm)
+{
+ return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm));
+}
+
+static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm)
+{
+ return crypto_tfm_get_flags(crypto_cipher_tfm(tfm));
+}
+
+static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm,
+ u32 flags)
+{
+ crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags);
+}
+
+static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm,
+ u32 flags)
+{
+ crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags);
+}
+
+static inline void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
+ u8 *dst, const u8 *src)
+{
+ crypto_cipher_crt(tfm)->cit_encrypt_one(crypto_cipher_tfm(tfm),
+ dst, src);
+}
+
+static inline void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
+ u8 *dst, const u8 *src)
+{
+ crypto_cipher_crt(tfm)->cit_decrypt_one(crypto_cipher_tfm(tfm),
+ dst, src);
+}
+
static inline void crypto_digest_init(struct crypto_tfm *tfm)
{
BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
OpenPOWER on IntegriCloud