summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2007-12-05 20:59:25 +1100
committerHerbert Xu <herbert@gondor.apana.org.au>2008-01-11 08:16:33 +1100
commitb2ab4a57b018aafbba35bff088218f5cc3d2142e (patch)
treea46c5bd42927c24c69f0786be2651ff3fba6c10e
parent42c271c6c538857cb13c5ead5184d264d745f675 (diff)
downloadop-kernel-dev-b2ab4a57b018aafbba35bff088218f5cc3d2142e.zip
op-kernel-dev-b2ab4a57b018aafbba35bff088218f5cc3d2142e.tar.gz
[CRYPTO] scatterwalk: Restore custom sg chaining for now
Unfortunately the generic chaining hasn't been ported to all architectures yet, and notably not s390. So this patch restores the chainging that we've been using previously which does work everywhere. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r--crypto/digest.c2
-rw-r--r--crypto/gcm.c2
-rw-r--r--crypto/hmac.c3
-rw-r--r--crypto/scatterwalk.c4
-rw-r--r--include/crypto/scatterwalk.h11
5 files changed, 17 insertions, 5 deletions
diff --git a/crypto/digest.c b/crypto/digest.c
index 52845f5..6fd43bd 100644
--- a/crypto/digest.c
+++ b/crypto/digest.c
@@ -75,7 +75,7 @@ static int update2(struct hash_desc *desc,
if (!nbytes)
break;
- sg = sg_next(sg);
+ sg = scatterwalk_sg_next(sg);
}
return 0;
diff --git a/crypto/gcm.c b/crypto/gcm.c
index 27483f3..502da92 100644
--- a/crypto/gcm.c
+++ b/crypto/gcm.c
@@ -100,7 +100,7 @@ static void crypto_gcm_ghash_update_sg(struct crypto_gcm_ghash_ctx *ctx,
n = scatterwalk_clamp(&walk, len);
if (!n) {
- scatterwalk_start(&walk, sg_next(walk.sg));
+ scatterwalk_start(&walk, scatterwalk_sg_next(walk.sg));
n = scatterwalk_clamp(&walk, len);
}
diff --git a/crypto/hmac.c b/crypto/hmac.c
index 34c3706..a1d016a 100644
--- a/crypto/hmac.c
+++ b/crypto/hmac.c
@@ -17,6 +17,7 @@
*/
#include <crypto/algapi.h>
+#include <crypto/scatterwalk.h>
#include <linux/err.h>
#include <linux/init.h>
#include <linux/kernel.h>
@@ -160,7 +161,7 @@ static int hmac_digest(struct hash_desc *pdesc, struct scatterlist *sg,
sg_init_table(sg1, 2);
sg_set_buf(sg1, ipad, bs);
- sg_chain(sg1, 2, sg);
+ scatterwalk_sg_chain(sg1, 2, sg);
sg_init_table(sg2, 1);
sg_set_buf(sg2, opad, bs + ds);
diff --git a/crypto/scatterwalk.c b/crypto/scatterwalk.c
index 12d1901..297e19d 100644
--- a/crypto/scatterwalk.c
+++ b/crypto/scatterwalk.c
@@ -61,7 +61,7 @@ static void scatterwalk_pagedone(struct scatter_walk *walk, int out,
walk->offset += PAGE_SIZE - 1;
walk->offset &= PAGE_MASK;
if (walk->offset >= walk->sg->offset + walk->sg->length)
- scatterwalk_start(walk, sg_next(walk->sg));
+ scatterwalk_start(walk, scatterwalk_sg_next(walk->sg));
}
}
@@ -112,7 +112,7 @@ void scatterwalk_map_and_copy(void *buf, struct scatterlist *sg,
break;
offset += sg->length;
- sg = sg_next(sg);
+ sg = scatterwalk_sg_next(sg);
}
scatterwalk_advance(&walk, start - offset);
diff --git a/include/crypto/scatterwalk.h b/include/crypto/scatterwalk.h
index 07b6f17..bd62431 100644
--- a/include/crypto/scatterwalk.h
+++ b/include/crypto/scatterwalk.h
@@ -52,6 +52,17 @@ static inline void crypto_yield(u32 flags)
cond_resched();
}
+static inline void scatterwalk_sg_chain(struct scatterlist *sg1, int num,
+ struct scatterlist *sg2)
+{
+ sg_set_page(&sg1[num - 1], (void *)sg2, 0, 0);
+}
+
+static inline struct scatterlist *scatterwalk_sg_next(struct scatterlist *sg)
+{
+ return (++sg)->length ? sg : (void *)sg_page(sg);
+}
+
static inline unsigned long scatterwalk_samebuf(struct scatter_walk *walk_in,
struct scatter_walk *walk_out)
{
OpenPOWER on IntegriCloud