--- a/crypto/Kconfig
+++ b/crypto/Kconfig
-@@ -65,6 +65,7 @@
+@@ -65,6 +65,7 @@ config CRYPTO_NULL
config CRYPTO_CRYPTD
tristate "Software async crypto daemon"
select CRYPTO_BLKCIPHER
select CRYPTO_MANAGER
help
This is a generic software asynchronous crypto daemon that
-@@ -212,7 +213,7 @@
+@@ -212,7 +213,7 @@ comment "Digest"
config CRYPTO_CRC32C
tristate "CRC32c CRC algorithm"
select LIBCRC32C
help
Castagnoli, et al Cyclic Redundancy-Check Algorithm. Used
-@@ -241,6 +242,57 @@
+@@ -241,6 +242,57 @@ config CRYPTO_MICHAEL_MIC
should not be used for other purposes because of the weakness
of the algorithm.
config CRYPTO_SHA1
tristate "SHA1 digest algorithm"
select CRYPTO_ALGAPI
-@@ -614,6 +666,15 @@
+@@ -614,6 +666,15 @@ config CRYPTO_LZO
help
This is the LZO algorithm.
endif # if CRYPTO
--- a/crypto/Makefile
+++ b/crypto/Makefile
-@@ -19,6 +19,7 @@
+@@ -19,6 +19,7 @@ obj-$(CONFIG_CRYPTO_BLKCIPHER) += crypto
obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o
crypto_hash-objs := hash.o
obj-$(CONFIG_CRYPTO_HASH) += crypto_hash.o
obj-$(CONFIG_CRYPTO_MANAGER) += cryptomgr.o
-@@ -27,6 +28,10 @@
+@@ -27,6 +28,10 @@ obj-$(CONFIG_CRYPTO_XCBC) += xcbc.o
obj-$(CONFIG_CRYPTO_NULL) += crypto_null.o
obj-$(CONFIG_CRYPTO_MD4) += md4.o
obj-$(CONFIG_CRYPTO_MD5) += md5.o
obj-$(CONFIG_CRYPTO_SHA1) += sha1_generic.o
obj-$(CONFIG_CRYPTO_SHA256) += sha256_generic.o
obj-$(CONFIG_CRYPTO_SHA512) += sha512_generic.o
-@@ -64,7 +69,7 @@
+@@ -64,7 +69,7 @@ obj-$(CONFIG_CRYPTO_MICHAEL_MIC) += mich
obj-$(CONFIG_CRYPTO_CRC32C) += crc32c.o
obj-$(CONFIG_CRYPTO_AUTHENC) += authenc.o
obj-$(CONFIG_CRYPTO_LZO) += lzo.o
+MODULE_DESCRIPTION("Asynchronous cryptographic hash type");
--- a/crypto/api.c
+++ b/crypto/api.c
-@@ -235,8 +235,12 @@
+@@ -235,8 +235,12 @@ static int crypto_init_ops(struct crypto
return crypto_init_cipher_ops(tfm);
case CRYPTO_ALG_TYPE_DIGEST:
static const u32 camellia_sp1110[256] = {
0x70707000,0x82828200,0x2c2c2c00,0xececec00,
-@@ -335,20 +337,6 @@
+@@ -335,20 +337,6 @@ static const u32 camellia_sp4404[256] =
/*
* macros
*/
#define ROLDQ(ll, lr, rl, rr, w0, w1, bits) \
do { \
w0 = ll; \
-@@ -383,7 +371,7 @@
+@@ -383,7 +371,7 @@ static const u32 camellia_sp4404[256] =
^ camellia_sp3033[(u8)(il >> 8)] \
^ camellia_sp4404[(u8)(il )]; \
yl ^= yr; \
yr ^= yl; \
} while(0)
-@@ -405,7 +393,7 @@
+@@ -405,7 +393,7 @@ static void camellia_setup_tail(u32 *sub
subL[7] ^= subL[1]; subR[7] ^= subR[1];
subL[1] ^= subR[1] & ~subR[9];
dw = subL[1] & subL[9],
/* round 8 */
subL[11] ^= subL[1]; subR[11] ^= subR[1];
/* round 10 */
-@@ -414,7 +402,7 @@
+@@ -414,7 +402,7 @@ static void camellia_setup_tail(u32 *sub
subL[15] ^= subL[1]; subR[15] ^= subR[1];
subL[1] ^= subR[1] & ~subR[17];
dw = subL[1] & subL[17],
/* round 14 */
subL[19] ^= subL[1]; subR[19] ^= subR[1];
/* round 16 */
-@@ -430,7 +418,7 @@
+@@ -430,7 +418,7 @@ static void camellia_setup_tail(u32 *sub
} else {
subL[1] ^= subR[1] & ~subR[25];
dw = subL[1] & subL[25],
/* round 20 */
subL[27] ^= subL[1]; subR[27] ^= subR[1];
/* round 22 */
-@@ -450,7 +438,7 @@
+@@ -450,7 +438,7 @@ static void camellia_setup_tail(u32 *sub
subL[26] ^= kw4l; subR[26] ^= kw4r;
kw4l ^= kw4r & ~subR[24];
dw = kw4l & subL[24],
}
/* round 17 */
subL[22] ^= kw4l; subR[22] ^= kw4r;
-@@ -460,7 +448,7 @@
+@@ -460,7 +448,7 @@ static void camellia_setup_tail(u32 *sub
subL[18] ^= kw4l; subR[18] ^= kw4r;
kw4l ^= kw4r & ~subR[16];
dw = kw4l & subL[16],
/* round 11 */
subL[14] ^= kw4l; subR[14] ^= kw4r;
/* round 9 */
-@@ -469,7 +457,7 @@
+@@ -469,7 +457,7 @@ static void camellia_setup_tail(u32 *sub
subL[10] ^= kw4l; subR[10] ^= kw4r;
kw4l ^= kw4r & ~subR[8];
dw = kw4l & subL[8],
/* round 5 */
subL[6] ^= kw4l; subR[6] ^= kw4r;
/* round 3 */
-@@ -494,7 +482,7 @@
+@@ -494,7 +482,7 @@ static void camellia_setup_tail(u32 *sub
SUBKEY_R(6) = subR[5] ^ subR[7];
tl = subL[10] ^ (subR[10] & ~subR[8]);
dw = tl & subL[8], /* FL(kl1) */
SUBKEY_L(7) = subL[6] ^ tl; /* round 6 */
SUBKEY_R(7) = subR[6] ^ tr;
SUBKEY_L(8) = subL[8]; /* FL(kl1) */
-@@ -503,7 +491,7 @@
+@@ -503,7 +491,7 @@ static void camellia_setup_tail(u32 *sub
SUBKEY_R(9) = subR[9];
tl = subL[7] ^ (subR[7] & ~subR[9]);
dw = tl & subL[9], /* FLinv(kl2) */
SUBKEY_L(10) = tl ^ subL[11]; /* round 7 */
SUBKEY_R(10) = tr ^ subR[11];
SUBKEY_L(11) = subL[10] ^ subL[12]; /* round 8 */
-@@ -516,7 +504,7 @@
+@@ -516,7 +504,7 @@ static void camellia_setup_tail(u32 *sub
SUBKEY_R(14) = subR[13] ^ subR[15];
tl = subL[18] ^ (subR[18] & ~subR[16]);
dw = tl & subL[16], /* FL(kl3) */
SUBKEY_L(15) = subL[14] ^ tl; /* round 12 */
SUBKEY_R(15) = subR[14] ^ tr;
SUBKEY_L(16) = subL[16]; /* FL(kl3) */
-@@ -525,7 +513,7 @@
+@@ -525,7 +513,7 @@ static void camellia_setup_tail(u32 *sub
SUBKEY_R(17) = subR[17];
tl = subL[15] ^ (subR[15] & ~subR[17]);
dw = tl & subL[17], /* FLinv(kl4) */
SUBKEY_L(18) = tl ^ subL[19]; /* round 13 */
SUBKEY_R(18) = tr ^ subR[19];
SUBKEY_L(19) = subL[18] ^ subL[20]; /* round 14 */
-@@ -544,7 +532,7 @@
+@@ -544,7 +532,7 @@ static void camellia_setup_tail(u32 *sub
} else {
tl = subL[26] ^ (subR[26] & ~subR[24]);
dw = tl & subL[24], /* FL(kl5) */
SUBKEY_L(23) = subL[22] ^ tl; /* round 18 */
SUBKEY_R(23) = subR[22] ^ tr;
SUBKEY_L(24) = subL[24]; /* FL(kl5) */
-@@ -553,7 +541,7 @@
+@@ -553,7 +541,7 @@ static void camellia_setup_tail(u32 *sub
SUBKEY_R(25) = subR[25];
tl = subL[23] ^ (subR[23] & ~subR[25]);
dw = tl & subL[25], /* FLinv(kl6) */
SUBKEY_L(26) = tl ^ subL[27]; /* round 19 */
SUBKEY_R(26) = tr ^ subR[27];
SUBKEY_L(27) = subL[26] ^ subL[28]; /* round 20 */
-@@ -573,17 +561,17 @@
+@@ -573,17 +561,17 @@ static void camellia_setup_tail(u32 *sub
/* apply the inverse of the last half of P-function */
i = 2;
do {
SUBKEY_R(i + 5) = SUBKEY_L(i + 5) ^ dw; SUBKEY_L(i + 5) = dw;
i += 8;
} while (i < max);
-@@ -599,10 +587,10 @@
+@@ -599,10 +587,10 @@ static void camellia_setup128(const unsi
/**
* k == kll || klr || krl || krr (|| is concatenation)
*/
/* generate KL dependent subkeys */
/* kw1 */
-@@ -707,14 +695,14 @@
+@@ -707,14 +695,14 @@ static void camellia_setup256(const unsi
* key = (kll || klr || krl || krr || krll || krlr || krrl || krrr)
* (|| is concatenation)
*/
/* generate KL dependent subkeys */
/* kw1 */
-@@ -870,13 +858,13 @@
+@@ -870,13 +858,13 @@ static void camellia_setup192(const unsi
t0 &= ll; \
t2 |= rr; \
rl ^= t2; \
} while(0)
#define CAMELLIA_ROUNDSM(xl, xr, kl, kr, yl, yr, il, ir) \
-@@ -892,7 +880,7 @@
+@@ -892,7 +880,7 @@ static void camellia_setup192(const unsi
il ^= kl; \
ir ^= il ^ kr; \
yl ^= ir; \
} while(0)
/* max = 24: 128bit encrypt, max = 32: 256bit encrypt */
---- a/crypto/chainiv.c
-+++ b/crypto/chainiv.c
-@@ -117,6 +117,7 @@
- static int async_chainiv_schedule_work(struct async_chainiv_ctx *ctx)
- {
- int queued;
-+ int err = ctx->err;
-
- if (!ctx->queue.qlen) {
- smp_mb__before_clear_bit();
-@@ -131,7 +132,7 @@
- BUG_ON(!queued);
-
- out:
-- return ctx->err;
-+ return err;
- }
-
- static int async_chainiv_postpone_request(struct skcipher_givcrypt_request *req)
-@@ -227,6 +228,7 @@
- postponed);
- struct skcipher_givcrypt_request *req;
- struct ablkcipher_request *subreq;
-+ int err;
-
- /* Only handle one request at a time to avoid hogging keventd. */
- spin_lock_bh(&ctx->lock);
-@@ -241,7 +243,11 @@
- subreq = skcipher_givcrypt_reqctx(req);
- subreq->base.flags |= CRYPTO_TFM_REQ_MAY_SLEEP;
-
-- async_chainiv_givencrypt_tail(req);
-+ err = async_chainiv_givencrypt_tail(req);
-+
-+ local_bh_disable();
-+ skcipher_givcrypt_complete(req, err);
-+ local_bh_enable();
- }
-
- static int async_chainiv_init(struct crypto_tfm *tfm)
--- a/crypto/crc32c.c
+++ b/crypto/crc32c.c
-@@ -5,20 +5,23 @@
+@@ -1,24 +1,27 @@
+-/*
++/*
+ * Cryptographic API.
+ *
+ * CRC32C chksum
*
* This module file is a wrapper to invoke the lib/crc32c routines.
*
+ *
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
- * Software Foundation; either version 2 of the License, or (at your option)
+- * Software Foundation; either version 2 of the License, or (at your option)
++ * Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
*/
#define CHKSUM_DIGEST_SIZE 4
struct chksum_ctx {
-@@ -71,7 +74,7 @@
+@@ -27,7 +30,7 @@ struct chksum_ctx {
+ };
+
+ /*
+- * Steps through buffer one byte at at time, calculates reflected
++ * Steps through buffer one byte at at time, calculates reflected
+ * crc using table.
+ */
+
+@@ -67,11 +70,11 @@ static void chksum_update(struct crypto_
+ static void chksum_final(struct crypto_tfm *tfm, u8 *out)
+ {
+ struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
+-
++
*(__le32 *)out = ~cpu_to_le32(mctx->crc);
}
{
struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
-@@ -79,14 +82,14 @@
+@@ -79,14 +82,14 @@ static int crc32c_cra_init(struct crypto
return 0;
}
.cra_u = {
.digest = {
.dia_digestsize= CHKSUM_DIGEST_SIZE,
-@@ -98,14 +101,125 @@
+@@ -98,14 +101,125 @@ static struct crypto_alg alg = {
}
};
+static int crc32c_final(struct ahash_request *req)
+{
+ u32 *crcp = ahash_request_ctx(req);
-+
++
+ *(__le32 *)req->result = ~cpu_to_le32p(crcp);
+ return 0;
+}
#include <linux/err.h>
#include <linux/init.h>
#include <linux/kernel.h>
-@@ -45,6 +46,13 @@
+@@ -45,6 +46,13 @@ struct cryptd_blkcipher_request_ctx {
crypto_completion_t complete;
};
static inline struct cryptd_state *cryptd_get_state(struct crypto_tfm *tfm)
{
-@@ -82,10 +90,8 @@
+@@ -82,10 +90,8 @@ static void cryptd_blkcipher_crypt(struc
rctx = ablkcipher_request_ctx(req);
desc.tfm = child;
desc.info = req->info;
-@@ -95,8 +101,9 @@
+@@ -95,8 +101,9 @@ static void cryptd_blkcipher_crypt(struc
req->base.complete = rctx->complete;
local_bh_enable();
}
-@@ -261,6 +268,240 @@
+@@ -261,6 +268,240 @@ out_put_alg:
return inst;
}
static struct cryptd_state state;
static struct crypto_instance *cryptd_alloc(struct rtattr **tb)
-@@ -274,6 +515,8 @@
+@@ -274,6 +515,8 @@ static struct crypto_instance *cryptd_al
switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
case CRYPTO_ALG_TYPE_BLKCIPHER:
return cryptd_alloc_blkcipher(tb, &state);
#include <crypto/scatterwalk.h>
#include <linux/mm.h>
#include <linux/errno.h>
-@@ -141,7 +142,7 @@
+@@ -141,7 +142,7 @@ int crypto_init_digest_ops(struct crypto
struct hash_tfm *ops = &tfm->crt_hash;
struct digest_alg *dalg = &tfm->__crt_alg->cra_digest;
return -EINVAL;
ops->init = init;
-@@ -157,3 +158,83 @@
+@@ -157,3 +158,83 @@ int crypto_init_digest_ops(struct crypto
void crypto_exit_digest_ops(struct crypto_tfm *tfm)
{
}
#include <linux/errno.h>
#include <linux/kernel.h>
#include <linux/module.h>
-@@ -59,24 +60,107 @@
+@@ -59,24 +60,107 @@ static int hash_setkey(struct crypto_has
return alg->setkey(crt, key, keylen);
}
static void crypto_hash_show(struct seq_file *m, struct crypto_alg *alg)
--- a/crypto/hmac.c
+++ b/crypto/hmac.c
-@@ -226,6 +226,7 @@
+@@ -226,6 +226,7 @@ static struct crypto_instance *hmac_allo
struct crypto_instance *inst;
struct crypto_alg *alg;
int err;
err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_HASH);
if (err)
-@@ -236,6 +237,13 @@
+@@ -236,6 +237,13 @@ static struct crypto_instance *hmac_allo
if (IS_ERR(alg))
return ERR_CAST(alg);
inst = crypto_alloc_instance("hmac", alg);
if (IS_ERR(inst))
goto out_put_alg;
-@@ -246,14 +254,10 @@
+@@ -246,14 +254,10 @@ static struct crypto_instance *hmac_allo
inst->alg.cra_alignmask = alg->cra_alignmask;
inst->alg.cra_type = &crypto_hash_type;
inst->alg.cra_init = hmac_init_tfm;
--- a/crypto/internal.h
+++ b/crypto/internal.h
-@@ -86,6 +86,7 @@
+@@ -86,6 +86,7 @@ struct crypto_alg *__crypto_alg_lookup(c
struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask);
int crypto_init_digest_ops(struct crypto_tfm *tfm);
#define IDX2 32400
#define IDX3 1
#define IDX4 8193
-@@ -83,7 +76,8 @@
+@@ -83,7 +76,8 @@ static char *check[] = {
"blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
"cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
"khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
};
static void hexdump(unsigned char *buf, unsigned int len)
-@@ -110,22 +104,30 @@
+@@ -110,22 +104,30 @@ static void test_hash(char *algo, struct
unsigned int i, j, k, temp;
struct scatterlist sg[8];
char result[64];
for (i = 0; i < tcount; i++) {
printk("test %u:\n", i + 1);
-@@ -139,8 +141,9 @@
+@@ -139,8 +141,9 @@ static void test_hash(char *algo, struct
sg_init_one(&sg[0], hash_buff, template[i].psize);
if (template[i].ksize) {
if (ret) {
printk("setkey() failed ret=%d\n", ret);
kfree(hash_buff);
-@@ -148,17 +151,30 @@
+@@ -148,17 +151,30 @@ static void test_hash(char *algo, struct
}
}
"fail" : "pass");
kfree(hash_buff);
}
-@@ -187,8 +203,9 @@
+@@ -187,8 +203,9 @@ static void test_hash(char *algo, struct
}
if (template[i].ksize) {
if (ret) {
printk("setkey() failed ret=%d\n", ret);
-@@ -196,29 +213,44 @@
+@@ -196,29 +213,44 @@ static void test_hash(char *algo, struct
}
}
char *q;
struct crypto_aead *tfm;
char *key;
-@@ -344,13 +376,12 @@
+@@ -344,13 +376,12 @@ static void test_aead(char *algo, int en
goto next_one;
}
next_one:
if (!template[i].key)
kfree(key);
-@@ -360,7 +391,6 @@
+@@ -360,7 +391,6 @@ next_one:
}
printk(KERN_INFO "\ntesting %s %s across pages (chunking)\n", algo, e);
memset(axbuf, 0, XBUFSIZE);
for (i = 0, j = 0; i < tcount; i++) {
-@@ -388,6 +418,7 @@
+@@ -388,6 +418,7 @@ next_one:
goto out;
}
sg_init_table(sg, template[i].np);
for (k = 0, temp = 0; k < template[i].np; k++) {
memcpy(&xbuf[IDX[k]],
-@@ -450,7 +481,7 @@
+@@ -450,7 +481,7 @@ next_one:
for (k = 0, temp = 0; k < template[i].np; k++) {
printk(KERN_INFO "page %u\n", k);
hexdump(q, template[i].tap[k]);
printk(KERN_INFO "%s\n",
memcmp(q, template[i].result + temp,
-@@ -459,8 +490,15 @@
+@@ -459,8 +490,15 @@ next_one:
0 : authsize)) ?
"fail" : "pass");
}
}
}
-@@ -473,7 +511,7 @@
+@@ -473,7 +511,7 @@ out:
static void test_cipher(char *algo, int enc,
struct cipher_testvec *template, unsigned int tcount)
{
char *q;
struct crypto_ablkcipher *tfm;
struct ablkcipher_request *req;
-@@ -569,19 +607,17 @@
+@@ -569,19 +607,17 @@ static void test_cipher(char *algo, int
goto out;
}
j = 0;
for (i = 0; i < tcount; i++) {
-@@ -602,6 +638,7 @@
+@@ -596,6 +632,7 @@ static void test_cipher(char *algo, int
printk("test %u (%d bit key):\n",
j, template[i].klen * 8);
crypto_ablkcipher_clear_flags(tfm, ~0);
if (template[i].wk)
crypto_ablkcipher_set_flags(
-@@ -657,14 +694,21 @@
+@@ -649,14 +686,21 @@ static void test_cipher(char *algo, int
temp = 0;
for (k = 0; k < template[i].np; k++) {
printk("page %u\n", k);
}
}
}
-@@ -1180,6 +1224,14 @@
+@@ -1172,6 +1216,14 @@ static void do_test(void)
test_cipher("ecb(des3_ede)", DECRYPT, des3_ede_dec_tv_template,
DES3_EDE_DEC_TEST_VECTORS);
test_hash("md4", md4_tv_template, MD4_TEST_VECTORS);
test_hash("sha224", sha224_tv_template, SHA224_TEST_VECTORS);
-@@ -1390,6 +1442,14 @@
+@@ -1382,6 +1434,14 @@ static void do_test(void)
DES3_EDE_ENC_TEST_VECTORS);
test_cipher("ecb(des3_ede)", DECRYPT, des3_ede_dec_tv_template,
DES3_EDE_DEC_TEST_VECTORS);
break;
case 5:
-@@ -1558,7 +1618,7 @@
+@@ -1550,7 +1610,7 @@ static void do_test(void)
case 29:
test_hash("tgr128", tgr128_tv_template, TGR128_TEST_VECTORS);
break;
case 30:
test_cipher("ecb(xeta)", ENCRYPT, xeta_enc_tv_template,
XETA_ENC_TEST_VECTORS);
-@@ -1623,6 +1683,22 @@
+@@ -1615,6 +1675,22 @@ static void do_test(void)
CTS_MODE_DEC_TEST_VECTORS);
break;
case 100:
test_hash("hmac(md5)", hmac_md5_tv_template,
HMAC_MD5_TEST_VECTORS);
-@@ -1658,6 +1734,16 @@
+@@ -1650,6 +1726,16 @@ static void do_test(void)
XCBC_AES_TEST_VECTORS);
break;
case 200:
test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
speed_template_16_24_32);
-@@ -1796,6 +1882,22 @@
+@@ -1788,6 +1874,22 @@ static void do_test(void)
test_hash_speed("sha224", sec, generic_hash_speed_template);
if (mode > 300 && mode < 400) break;
*/
#ifndef _CRYPTO_TCRYPT_H
#define _CRYPTO_TCRYPT_H
-@@ -168,6 +162,271 @@
+@@ -168,6 +162,271 @@ static struct hash_testvec md5_tv_templa
.digest = "\x57\xed\xf4\xa2\x2b\xe3\xc9\x55"
"\xac\x49\xda\x2e\x21\x07\xb6\x7a",
}
};
/*
-@@ -817,6 +1076,168 @@
+@@ -817,6 +1076,168 @@ static struct hash_testvec hmac_md5_tv_t
};
/*
* HMAC-SHA1 test vectors from RFC2202
*/
#define HMAC_SHA1_TEST_VECTORS 7
-@@ -1442,6 +1863,8 @@
+@@ -1442,6 +1863,8 @@ static struct hash_testvec hmac_sha512_t
#define DES_CBC_DEC_TEST_VECTORS 4
#define DES3_EDE_ENC_TEST_VECTORS 3
#define DES3_EDE_DEC_TEST_VECTORS 3
static struct cipher_testvec des_enc_tv_template[] = {
{ /* From Applied Cryptography */
-@@ -1680,9 +2103,6 @@
+@@ -1680,9 +2103,6 @@ static struct cipher_testvec des_cbc_dec
},
};
static struct cipher_testvec des3_ede_enc_tv_template[] = {
{ /* These are from openssl */
.key = "\x01\x23\x45\x67\x89\xab\xcd\xef"
-@@ -1745,6 +2165,94 @@
+@@ -1745,6 +2165,94 @@ static struct cipher_testvec des3_ede_de
},
};
*/
--- a/drivers/crypto/Kconfig
+++ b/drivers/crypto/Kconfig
-@@ -174,4 +174,30 @@
+@@ -174,4 +174,30 @@ config CRYPTO_DEV_HIFN_795X_RNG
Select this option if you want to enable the random number generator
on the HIFN 795x crypto adapters.
endif # CRYPTO_HW
--- a/drivers/crypto/Makefile
+++ b/drivers/crypto/Makefile
-@@ -2,3 +2,5 @@
+@@ -2,3 +2,5 @@ obj-$(CONFIG_CRYPTO_DEV_PADLOCK_AES) +=
obj-$(CONFIG_CRYPTO_DEV_PADLOCK_SHA) += padlock-sha.o
obj-$(CONFIG_CRYPTO_DEV_GEODE) += geode-aes.o
obj-$(CONFIG_CRYPTO_DEV_HIFN_795X) += hifn_795x.o
#include <linux/crypto.h>
#include <linux/hw_random.h>
#include <linux/ktime.h>
-@@ -369,7 +368,9 @@
+@@ -369,7 +368,9 @@ static atomic_t hifn_dev_number;
#define HIFN_D_DST_RSIZE 80*4
#define HIFN_D_RES_RSIZE 24*4
#define AES_MIN_KEY_SIZE 16
#define AES_MAX_KEY_SIZE 32
-@@ -535,10 +536,10 @@
+@@ -535,10 +536,10 @@ struct hifn_crypt_command
*/
struct hifn_mac_command
{
};
#define HIFN_MAC_CMD_ALG_MASK 0x0001
-@@ -564,10 +565,10 @@
+@@ -564,10 +565,10 @@ struct hifn_mac_command
struct hifn_comp_command
{
};
#define HIFN_COMP_CMD_SRCLEN_M 0xc000
-@@ -583,10 +584,10 @@
+@@ -583,10 +584,10 @@ struct hifn_comp_command
struct hifn_base_result
{
};
#define HIFN_BASE_RES_DSTOVERRUN 0x0200 /* destination overrun */
-@@ -597,8 +598,8 @@
+@@ -597,8 +598,8 @@ struct hifn_base_result
struct hifn_comp_result
{
};
#define HIFN_COMP_RES_LCB_M 0xff00 /* longitudinal check byte */
-@@ -609,8 +610,8 @@
+@@ -609,8 +610,8 @@ struct hifn_comp_result
struct hifn_mac_result
{
/* followed by 0, 6, 8, or 10 u16's of the MAC, then crypt */
};
-@@ -619,8 +620,8 @@
+@@ -619,8 +620,8 @@ struct hifn_mac_result
struct hifn_crypt_result
{
};
#define HIFN_CRYPT_RES_SRC_NOTZERO 0x0001 /* source expired */
-@@ -686,12 +687,12 @@
+@@ -686,12 +687,12 @@ static inline u32 hifn_read_1(struct hif
static inline void hifn_write_0(struct hifn_device *dev, u32 reg, u32 val)
{
}
static void hifn_wait_puc(struct hifn_device *dev)
-@@ -894,7 +895,7 @@
+@@ -894,7 +895,7 @@ static int hifn_enable_crypto(struct hif
char *offtbl = NULL;
int i;
if (pci2id[i].pci_vendor == dev->pdev->vendor &&
pci2id[i].pci_prod == dev->pdev->device) {
offtbl = pci2id[i].card_id;
-@@ -1037,14 +1038,14 @@
+@@ -1037,14 +1038,14 @@ static void hifn_init_registers(struct h
hifn_write_0(dev, HIFN_0_PUIER, HIFN_PUIER_DSTOVER);
/* write all 4 ring address registers */
mdelay(2);
#if 0
-@@ -1166,109 +1167,15 @@
+@@ -1166,109 +1167,15 @@ static int hifn_setup_crypto_command(str
return cmd_len;
}
buf_pos = buf = dma->command_bufs[dma->cmdi];
mask = 0;
-@@ -1370,16 +1277,113 @@
+@@ -1370,16 +1277,113 @@ static int hifn_setup_dma(struct hifn_de
hifn_write_1(dev, HIFN_1_DMA_CSR, HIFN_DMACSR_C_CTRL_ENA);
dev->flags |= HIFN_FLAG_CMD_BUSY;
}
static int ablkcipher_walk_init(struct ablkcipher_walk *w,
int num, gfp_t gfp_flags)
{
-@@ -1431,7 +1435,7 @@
+@@ -1431,7 +1435,7 @@ static int ablkcipher_add(void *daddr, u
return -EINVAL;
while (size) {
saddr = kmap_atomic(sg_page(src), KM_SOFTIRQ1);
memcpy(daddr, saddr + src->offset, copy);
-@@ -1458,10 +1462,6 @@
+@@ -1458,10 +1462,6 @@ static int ablkcipher_add(void *daddr, u
static int ablkcipher_walk(struct ablkcipher_request *req,
struct ablkcipher_walk *w)
{
struct scatterlist *src, *dst, *t;
void *daddr;
unsigned int nbytes = req->nbytes, offset, copy, diff;
-@@ -1477,16 +1477,14 @@
+@@ -1477,16 +1477,14 @@ static int ablkcipher_walk(struct ablkci
dst = &req->dst[idx];
dprintk("\n%s: slen: %u, dlen: %u, soff: %u, doff: %u, offset: %u, "
unsigned dlen = PAGE_SIZE;
t = &w->cache[idx];
-@@ -1498,8 +1496,8 @@
+@@ -1498,8 +1496,8 @@ static int ablkcipher_walk(struct ablkci
idx += err;
if (dlen < nbytes) {
/*
-@@ -1507,7 +1505,7 @@
+@@ -1507,7 +1505,7 @@ static int ablkcipher_walk(struct ablkci
* to put there additional blocksized chunk,
* so we mark that page as containing only
* blocksize aligned chunks:
* and increase number of bytes to be processed
* in next chunk:
* nbytes += diff;
-@@ -1544,7 +1542,7 @@
+@@ -1544,7 +1542,7 @@ static int ablkcipher_walk(struct ablkci
kunmap_atomic(daddr, KM_SOFTIRQ0);
} else {
idx++;
}
-@@ -1563,14 +1561,10 @@
+@@ -1563,14 +1561,10 @@ static int hifn_setup_session(struct abl
struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm);
struct hifn_device *dev = ctx->dev;
struct page *spage, *dpage;
if (ctx->iv && !ctx->ivsize && ctx->mode != ACRYPTO_MODE_ECB)
goto err_out_exit;
-@@ -1578,17 +1572,14 @@
+@@ -1578,17 +1572,14 @@ static int hifn_setup_session(struct abl
ctx->walk.flags = 0;
while (nbytes) {
idx++;
}
-@@ -1602,7 +1593,10 @@
+@@ -1602,7 +1593,10 @@ static int hifn_setup_session(struct abl
idx = 0;
sg_num = ablkcipher_walk(req, &ctx->walk);
atomic_set(&ctx->sg_num, sg_num);
spin_lock_irqsave(&dev->lock, flags);
-@@ -1640,7 +1634,7 @@
+@@ -1640,7 +1634,7 @@ static int hifn_setup_session(struct abl
if (err)
goto err_out;
}
dev->active = HIFN_DEFAULT_ACTIVE_NUM;
-@@ -1651,7 +1645,7 @@
+@@ -1651,7 +1645,7 @@ static int hifn_setup_session(struct abl
err_out:
spin_unlock_irqrestore(&dev->lock, flags);
err_out_exit:
dprintk("%s: iv: %p [%d], key: %p [%d], mode: %u, op: %u, "
"type: %u, err: %d.\n",
dev->name, ctx->iv, ctx->ivsize,
-@@ -1745,8 +1739,7 @@
+@@ -1745,8 +1739,7 @@ static int ablkcipher_get(void *saddr, u
return -EINVAL;
while (size) {
daddr = kmap_atomic(sg_page(dst), KM_IRQ0);
memcpy(daddr + dst->offset + offset, saddr, copy);
-@@ -1803,7 +1796,7 @@
+@@ -1803,7 +1796,7 @@ static void hifn_process_ready(struct ab
sg_page(dst), dst->length, nbytes);
if (!t->length) {
idx++;
continue;
}
-@@ -2202,9 +2195,9 @@
+@@ -2202,9 +2195,9 @@ static int hifn_setup_crypto(struct ablk
return err;
if (dev->started < HIFN_QUEUE_LENGTH && dev->queue.qlen)
}
/*
-@@ -2364,7 +2357,7 @@
+@@ -2364,7 +2357,7 @@ static struct hifn_alg_template hifn_alg
* 3DES ECB, CBC, CFB and OFB modes.
*/
{
.ablkcipher = {
.min_keysize = HIFN_3DES_KEY_LENGTH,
.max_keysize = HIFN_3DES_KEY_LENGTH,
-@@ -2374,7 +2367,7 @@
+@@ -2374,7 +2367,7 @@ static struct hifn_alg_template hifn_alg
},
},
{
.ablkcipher = {
.min_keysize = HIFN_3DES_KEY_LENGTH,
.max_keysize = HIFN_3DES_KEY_LENGTH,
-@@ -2384,8 +2377,9 @@
+@@ -2384,8 +2377,9 @@ static struct hifn_alg_template hifn_alg
},
},
{
.min_keysize = HIFN_3DES_KEY_LENGTH,
.max_keysize = HIFN_3DES_KEY_LENGTH,
.setkey = hifn_setkey,
-@@ -2394,7 +2388,7 @@
+@@ -2394,7 +2388,7 @@ static struct hifn_alg_template hifn_alg
},
},
{
.ablkcipher = {
.min_keysize = HIFN_3DES_KEY_LENGTH,
.max_keysize = HIFN_3DES_KEY_LENGTH,
-@@ -2408,7 +2402,7 @@
+@@ -2408,7 +2402,7 @@ static struct hifn_alg_template hifn_alg
* DES ECB, CBC, CFB and OFB modes.
*/
{
.ablkcipher = {
.min_keysize = HIFN_DES_KEY_LENGTH,
.max_keysize = HIFN_DES_KEY_LENGTH,
-@@ -2418,7 +2412,7 @@
+@@ -2418,7 +2412,7 @@ static struct hifn_alg_template hifn_alg
},
},
{
.ablkcipher = {
.min_keysize = HIFN_DES_KEY_LENGTH,
.max_keysize = HIFN_DES_KEY_LENGTH,
-@@ -2428,8 +2422,9 @@
+@@ -2428,8 +2422,9 @@ static struct hifn_alg_template hifn_alg
},
},
{
.min_keysize = HIFN_DES_KEY_LENGTH,
.max_keysize = HIFN_DES_KEY_LENGTH,
.setkey = hifn_setkey,
-@@ -2438,7 +2433,7 @@
+@@ -2438,7 +2433,7 @@ static struct hifn_alg_template hifn_alg
},
},
{
.ablkcipher = {
.min_keysize = HIFN_DES_KEY_LENGTH,
.max_keysize = HIFN_DES_KEY_LENGTH,
-@@ -2452,7 +2447,7 @@
+@@ -2452,7 +2447,7 @@ static struct hifn_alg_template hifn_alg
* AES ECB, CBC, CFB and OFB modes.
*/
{
.ablkcipher = {
.min_keysize = AES_MIN_KEY_SIZE,
.max_keysize = AES_MAX_KEY_SIZE,
-@@ -2462,8 +2457,9 @@
+@@ -2462,8 +2457,9 @@ static struct hifn_alg_template hifn_alg
},
},
{
.min_keysize = AES_MIN_KEY_SIZE,
.max_keysize = AES_MAX_KEY_SIZE,
.setkey = hifn_setkey,
-@@ -2472,7 +2468,7 @@
+@@ -2472,7 +2468,7 @@ static struct hifn_alg_template hifn_alg
},
},
{
.ablkcipher = {
.min_keysize = AES_MIN_KEY_SIZE,
.max_keysize = AES_MAX_KEY_SIZE,
-@@ -2482,7 +2478,7 @@
+@@ -2482,7 +2478,7 @@ static struct hifn_alg_template hifn_alg
},
},
{
.ablkcipher = {
.min_keysize = AES_MIN_KEY_SIZE,
.max_keysize = AES_MAX_KEY_SIZE,
-@@ -2514,15 +2510,14 @@
+@@ -2514,15 +2510,14 @@ static int hifn_alg_alloc(struct hifn_de
return -ENOMEM;
snprintf(alg->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s", t->name);
+
--- a/drivers/crypto/padlock-aes.c
+++ b/drivers/crypto/padlock-aes.c
-@@ -385,12 +385,12 @@
+@@ -411,12 +411,12 @@ static int __init padlock_init(void)
int ret;
if (!cpu_has_xcrypt) {
--- a/drivers/crypto/padlock-sha.c
+++ b/drivers/crypto/padlock-sha.c
-@@ -254,12 +254,12 @@
+@@ -263,12 +263,12 @@ static int __init padlock_init(void)
int rc = -ENODEV;
if (!cpu_has_phe) {
@@ -0,0 +1,154 @@
+/*
+ * Hash: Hash algorithms under the crypto API
-+ *
++ *
+ * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
-+ * Software Foundation; either version 2 of the License, or (at your option)
++ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ */
@@ -0,0 +1,78 @@
+/*
+ * Hash algorithms.
-+ *
++ *
+ * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
-+ * Software Foundation; either version 2 of the License, or (at your option)
++ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ */
#define CRYPTO_ALG_TYPE_BLKCIPHER_MASK 0x0000000c
#define CRYPTO_ALG_LARVAL 0x00000010
-@@ -102,6 +104,7 @@
+@@ -102,6 +104,7 @@ struct crypto_async_request;
struct crypto_aead;
struct crypto_blkcipher;
struct crypto_hash;
struct crypto_tfm;
struct crypto_type;
struct aead_givcrypt_request;
-@@ -131,6 +134,16 @@
+@@ -131,6 +134,16 @@ struct ablkcipher_request {
void *__ctx[] CRYPTO_MINALIGN_ATTR;
};
/**
* struct aead_request - AEAD request
* @base: Common attributes for async crypto requests
-@@ -195,6 +208,17 @@
+@@ -195,6 +208,17 @@ struct ablkcipher_alg {
unsigned int ivsize;
};
struct aead_alg {
int (*setkey)(struct crypto_aead *tfm, const u8 *key,
unsigned int keylen);
-@@ -272,6 +296,7 @@
+@@ -272,6 +296,7 @@ struct compress_alg {
#define cra_cipher cra_u.cipher
#define cra_digest cra_u.digest
#define cra_hash cra_u.hash
#define cra_compress cra_u.compress
struct crypto_alg {
-@@ -298,6 +323,7 @@
+@@ -298,6 +323,7 @@ struct crypto_alg {
struct cipher_alg cipher;
struct digest_alg digest;
struct hash_alg hash;
struct compress_alg compress;
} cra_u;
-@@ -383,6 +409,18 @@
+@@ -383,6 +409,18 @@ struct hash_tfm {
unsigned int digestsize;
};
struct compress_tfm {
int (*cot_compress)(struct crypto_tfm *tfm,
const u8 *src, unsigned int slen,
-@@ -397,6 +435,7 @@
+@@ -397,6 +435,7 @@ struct compress_tfm {
#define crt_blkcipher crt_u.blkcipher
#define crt_cipher crt_u.cipher
#define crt_hash crt_u.hash
#define crt_compress crt_u.compress
struct crypto_tfm {
-@@ -409,6 +448,7 @@
+@@ -409,6 +448,7 @@ struct crypto_tfm {
struct blkcipher_tfm blkcipher;
struct cipher_tfm cipher;
struct hash_tfm hash;