Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: [HWRNG] omap: Minor updates [CRYPTO] kconfig: Ordering cleanup [CRYPTO] all: Clean up init()/fini() [CRYPTO] padlock-aes: Use generic setkey function [CRYPTO] aes: Export generic setkey [CRYPTO] api: Make the crypto subsystem fully modular [CRYPTO] cts: Add CTS mode required for Kerberos AES support [CRYPTO] lrw: Replace all adds to big endians variables with be*_add_cpu [CRYPTO] tcrypt: Change the XTEA test vectors [CRYPTO] tcrypt: Shrink the tcrypt module [CRYPTO] tcrypt: Change the usage of the test vectors [CRYPTO] api: Constify function pointer tables [CRYPTO] aes-x86-32: Remove unused return code [CRYPTO] tcrypt: Shrink speed templates [CRYPTO] tcrypt: Group common speed templates [CRYPTO] sha512: Rename sha512 to sha512_generic [CRYPTO] sha384: Hardware acceleration for s390 [CRYPTO] sha512: Hardware acceleration for s390 [CRYPTO] s390: Generic sha_update and sha_final [CRYPTO] api: Switch to proc_create()
This commit is contained in:
commit
904e0ab54b
44 changed files with 7434 additions and 7120 deletions
|
@ -2,8 +2,9 @@
|
|||
# Cryptographic API
|
||||
#
|
||||
|
||||
obj-$(CONFIG_CRYPTO_SHA1_S390) += sha1_s390.o
|
||||
obj-$(CONFIG_CRYPTO_SHA256_S390) += sha256_s390.o
|
||||
obj-$(CONFIG_CRYPTO_SHA1_S390) += sha1_s390.o sha_common.o
|
||||
obj-$(CONFIG_CRYPTO_SHA256_S390) += sha256_s390.o sha_common.o
|
||||
obj-$(CONFIG_CRYPTO_SHA512_S390) += sha512_s390.o sha_common.o
|
||||
obj-$(CONFIG_CRYPTO_DES_S390) += des_s390.o des_check_key.o
|
||||
obj-$(CONFIG_CRYPTO_AES_S390) += aes_s390.o
|
||||
obj-$(CONFIG_S390_PRNG) += prng.o
|
||||
|
|
|
@ -82,6 +82,7 @@ enum crypt_s390_kimd_func {
|
|||
KIMD_QUERY = CRYPT_S390_KIMD | 0,
|
||||
KIMD_SHA_1 = CRYPT_S390_KIMD | 1,
|
||||
KIMD_SHA_256 = CRYPT_S390_KIMD | 2,
|
||||
KIMD_SHA_512 = CRYPT_S390_KIMD | 3,
|
||||
};
|
||||
|
||||
/*
|
||||
|
@ -92,6 +93,7 @@ enum crypt_s390_klmd_func {
|
|||
KLMD_QUERY = CRYPT_S390_KLMD | 0,
|
||||
KLMD_SHA_1 = CRYPT_S390_KLMD | 1,
|
||||
KLMD_SHA_256 = CRYPT_S390_KLMD | 2,
|
||||
KLMD_SHA_512 = CRYPT_S390_KLMD | 3,
|
||||
};
|
||||
|
||||
/*
|
||||
|
|
35
arch/s390/crypto/sha.h
Normal file
35
arch/s390/crypto/sha.h
Normal file
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Cryptographic API.
|
||||
*
|
||||
* s390 generic implementation of the SHA Secure Hash Algorithms.
|
||||
*
|
||||
* Copyright IBM Corp. 2007
|
||||
* Author(s): Jan Glauber (jang@de.ibm.com)
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License as published by the Free
|
||||
* Software Foundation; either version 2 of the License, or (at your option)
|
||||
* any later version.
|
||||
*
|
||||
*/
|
||||
#ifndef _CRYPTO_ARCH_S390_SHA_H
|
||||
#define _CRYPTO_ARCH_S390_SHA_H
|
||||
|
||||
#include <linux/crypto.h>
|
||||
#include <crypto/sha.h>
|
||||
|
||||
/* must be big enough for the largest SHA variant */
|
||||
#define SHA_MAX_STATE_SIZE 16
|
||||
#define SHA_MAX_BLOCK_SIZE SHA512_BLOCK_SIZE
|
||||
|
||||
struct s390_sha_ctx {
|
||||
u64 count; /* message length in bytes */
|
||||
u32 state[SHA_MAX_STATE_SIZE];
|
||||
u8 buf[2 * SHA_MAX_BLOCK_SIZE];
|
||||
int func; /* KIMD function to use */
|
||||
};
|
||||
|
||||
void s390_sha_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len);
|
||||
void s390_sha_final(struct crypto_tfm *tfm, u8 *out);
|
||||
|
||||
#endif
|
|
@ -29,16 +29,11 @@
|
|||
#include <crypto/sha.h>
|
||||
|
||||
#include "crypt_s390.h"
|
||||
|
||||
struct s390_sha1_ctx {
|
||||
u64 count; /* message length */
|
||||
u32 state[5];
|
||||
u8 buf[2 * SHA1_BLOCK_SIZE];
|
||||
};
|
||||
#include "sha.h"
|
||||
|
||||
static void sha1_init(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct s390_sha1_ctx *sctx = crypto_tfm_ctx(tfm);
|
||||
struct s390_sha_ctx *sctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
sctx->state[0] = SHA1_H0;
|
||||
sctx->state[1] = SHA1_H1;
|
||||
|
@ -46,79 +41,7 @@ static void sha1_init(struct crypto_tfm *tfm)
|
|||
sctx->state[3] = SHA1_H3;
|
||||
sctx->state[4] = SHA1_H4;
|
||||
sctx->count = 0;
|
||||
}
|
||||
|
||||
static void sha1_update(struct crypto_tfm *tfm, const u8 *data,
|
||||
unsigned int len)
|
||||
{
|
||||
struct s390_sha1_ctx *sctx = crypto_tfm_ctx(tfm);
|
||||
unsigned int index;
|
||||
int ret;
|
||||
|
||||
/* how much is already in the buffer? */
|
||||
index = sctx->count & 0x3f;
|
||||
|
||||
sctx->count += len;
|
||||
|
||||
if (index + len < SHA1_BLOCK_SIZE)
|
||||
goto store;
|
||||
|
||||
/* process one stored block */
|
||||
if (index) {
|
||||
memcpy(sctx->buf + index, data, SHA1_BLOCK_SIZE - index);
|
||||
ret = crypt_s390_kimd(KIMD_SHA_1, sctx->state, sctx->buf,
|
||||
SHA1_BLOCK_SIZE);
|
||||
BUG_ON(ret != SHA1_BLOCK_SIZE);
|
||||
data += SHA1_BLOCK_SIZE - index;
|
||||
len -= SHA1_BLOCK_SIZE - index;
|
||||
}
|
||||
|
||||
/* process as many blocks as possible */
|
||||
if (len >= SHA1_BLOCK_SIZE) {
|
||||
ret = crypt_s390_kimd(KIMD_SHA_1, sctx->state, data,
|
||||
len & ~(SHA1_BLOCK_SIZE - 1));
|
||||
BUG_ON(ret != (len & ~(SHA1_BLOCK_SIZE - 1)));
|
||||
data += ret;
|
||||
len -= ret;
|
||||
}
|
||||
|
||||
store:
|
||||
/* anything left? */
|
||||
if (len)
|
||||
memcpy(sctx->buf + index , data, len);
|
||||
}
|
||||
|
||||
/* Add padding and return the message digest. */
|
||||
static void sha1_final(struct crypto_tfm *tfm, u8 *out)
|
||||
{
|
||||
struct s390_sha1_ctx *sctx = crypto_tfm_ctx(tfm);
|
||||
u64 bits;
|
||||
unsigned int index, end;
|
||||
int ret;
|
||||
|
||||
/* must perform manual padding */
|
||||
index = sctx->count & 0x3f;
|
||||
end = (index < 56) ? SHA1_BLOCK_SIZE : (2 * SHA1_BLOCK_SIZE);
|
||||
|
||||
/* start pad with 1 */
|
||||
sctx->buf[index] = 0x80;
|
||||
|
||||
/* pad with zeros */
|
||||
index++;
|
||||
memset(sctx->buf + index, 0x00, end - index - 8);
|
||||
|
||||
/* append message length */
|
||||
bits = sctx->count * 8;
|
||||
memcpy(sctx->buf + end - 8, &bits, sizeof(bits));
|
||||
|
||||
ret = crypt_s390_kimd(KIMD_SHA_1, sctx->state, sctx->buf, end);
|
||||
BUG_ON(ret != end);
|
||||
|
||||
/* copy digest to out */
|
||||
memcpy(out, sctx->state, SHA1_DIGEST_SIZE);
|
||||
|
||||
/* wipe context */
|
||||
memset(sctx, 0, sizeof *sctx);
|
||||
sctx->func = KIMD_SHA_1;
|
||||
}
|
||||
|
||||
static struct crypto_alg alg = {
|
||||
|
@ -127,21 +50,20 @@ static struct crypto_alg alg = {
|
|||
.cra_priority = CRYPT_S390_PRIORITY,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_DIGEST,
|
||||
.cra_blocksize = SHA1_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct s390_sha1_ctx),
|
||||
.cra_ctxsize = sizeof(struct s390_sha_ctx),
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_list = LIST_HEAD_INIT(alg.cra_list),
|
||||
.cra_u = { .digest = {
|
||||
.dia_digestsize = SHA1_DIGEST_SIZE,
|
||||
.dia_init = sha1_init,
|
||||
.dia_update = sha1_update,
|
||||
.dia_final = sha1_final } }
|
||||
.dia_update = s390_sha_update,
|
||||
.dia_final = s390_sha_final } }
|
||||
};
|
||||
|
||||
static int __init sha1_s390_init(void)
|
||||
{
|
||||
if (!crypt_s390_func_available(KIMD_SHA_1))
|
||||
return -EOPNOTSUPP;
|
||||
|
||||
return crypto_register_alg(&alg);
|
||||
}
|
||||
|
||||
|
@ -154,6 +76,5 @@ module_init(sha1_s390_init);
|
|||
module_exit(sha1_s390_fini);
|
||||
|
||||
MODULE_ALIAS("sha1");
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("SHA1 Secure Hash Algorithm");
|
||||
|
|
|
@ -22,16 +22,11 @@
|
|||
#include <crypto/sha.h>
|
||||
|
||||
#include "crypt_s390.h"
|
||||
|
||||
struct s390_sha256_ctx {
|
||||
u64 count; /* message length */
|
||||
u32 state[8];
|
||||
u8 buf[2 * SHA256_BLOCK_SIZE];
|
||||
};
|
||||
#include "sha.h"
|
||||
|
||||
static void sha256_init(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm);
|
||||
struct s390_sha_ctx *sctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
sctx->state[0] = SHA256_H0;
|
||||
sctx->state[1] = SHA256_H1;
|
||||
|
@ -42,79 +37,7 @@ static void sha256_init(struct crypto_tfm *tfm)
|
|||
sctx->state[6] = SHA256_H6;
|
||||
sctx->state[7] = SHA256_H7;
|
||||
sctx->count = 0;
|
||||
}
|
||||
|
||||
static void sha256_update(struct crypto_tfm *tfm, const u8 *data,
|
||||
unsigned int len)
|
||||
{
|
||||
struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm);
|
||||
unsigned int index;
|
||||
int ret;
|
||||
|
||||
/* how much is already in the buffer? */
|
||||
index = sctx->count & 0x3f;
|
||||
|
||||
sctx->count += len;
|
||||
|
||||
if ((index + len) < SHA256_BLOCK_SIZE)
|
||||
goto store;
|
||||
|
||||
/* process one stored block */
|
||||
if (index) {
|
||||
memcpy(sctx->buf + index, data, SHA256_BLOCK_SIZE - index);
|
||||
ret = crypt_s390_kimd(KIMD_SHA_256, sctx->state, sctx->buf,
|
||||
SHA256_BLOCK_SIZE);
|
||||
BUG_ON(ret != SHA256_BLOCK_SIZE);
|
||||
data += SHA256_BLOCK_SIZE - index;
|
||||
len -= SHA256_BLOCK_SIZE - index;
|
||||
}
|
||||
|
||||
/* process as many blocks as possible */
|
||||
if (len >= SHA256_BLOCK_SIZE) {
|
||||
ret = crypt_s390_kimd(KIMD_SHA_256, sctx->state, data,
|
||||
len & ~(SHA256_BLOCK_SIZE - 1));
|
||||
BUG_ON(ret != (len & ~(SHA256_BLOCK_SIZE - 1)));
|
||||
data += ret;
|
||||
len -= ret;
|
||||
}
|
||||
|
||||
store:
|
||||
/* anything left? */
|
||||
if (len)
|
||||
memcpy(sctx->buf + index , data, len);
|
||||
}
|
||||
|
||||
/* Add padding and return the message digest */
|
||||
static void sha256_final(struct crypto_tfm *tfm, u8 *out)
|
||||
{
|
||||
struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm);
|
||||
u64 bits;
|
||||
unsigned int index, end;
|
||||
int ret;
|
||||
|
||||
/* must perform manual padding */
|
||||
index = sctx->count & 0x3f;
|
||||
end = (index < 56) ? SHA256_BLOCK_SIZE : (2 * SHA256_BLOCK_SIZE);
|
||||
|
||||
/* start pad with 1 */
|
||||
sctx->buf[index] = 0x80;
|
||||
|
||||
/* pad with zeros */
|
||||
index++;
|
||||
memset(sctx->buf + index, 0x00, end - index - 8);
|
||||
|
||||
/* append message length */
|
||||
bits = sctx->count * 8;
|
||||
memcpy(sctx->buf + end - 8, &bits, sizeof(bits));
|
||||
|
||||
ret = crypt_s390_kimd(KIMD_SHA_256, sctx->state, sctx->buf, end);
|
||||
BUG_ON(ret != end);
|
||||
|
||||
/* copy digest to out */
|
||||
memcpy(out, sctx->state, SHA256_DIGEST_SIZE);
|
||||
|
||||
/* wipe context */
|
||||
memset(sctx, 0, sizeof *sctx);
|
||||
sctx->func = KIMD_SHA_256;
|
||||
}
|
||||
|
||||
static struct crypto_alg alg = {
|
||||
|
@ -123,14 +46,14 @@ static struct crypto_alg alg = {
|
|||
.cra_priority = CRYPT_S390_PRIORITY,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_DIGEST,
|
||||
.cra_blocksize = SHA256_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct s390_sha256_ctx),
|
||||
.cra_ctxsize = sizeof(struct s390_sha_ctx),
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_list = LIST_HEAD_INIT(alg.cra_list),
|
||||
.cra_u = { .digest = {
|
||||
.dia_digestsize = SHA256_DIGEST_SIZE,
|
||||
.dia_init = sha256_init,
|
||||
.dia_update = sha256_update,
|
||||
.dia_final = sha256_final } }
|
||||
.dia_update = s390_sha_update,
|
||||
.dia_final = s390_sha_final } }
|
||||
};
|
||||
|
||||
static int sha256_s390_init(void)
|
||||
|
@ -150,6 +73,5 @@ module_init(sha256_s390_init);
|
|||
module_exit(sha256_s390_fini);
|
||||
|
||||
MODULE_ALIAS("sha256");
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm");
|
||||
|
|
114
arch/s390/crypto/sha512_s390.c
Normal file
114
arch/s390/crypto/sha512_s390.c
Normal file
|
@ -0,0 +1,114 @@
|
|||
/*
|
||||
* Cryptographic API.
|
||||
*
|
||||
* s390 implementation of the SHA512 and SHA38 Secure Hash Algorithm.
|
||||
*
|
||||
* Copyright IBM Corp. 2007
|
||||
* Author(s): Jan Glauber (jang@de.ibm.com)
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License as published by the Free
|
||||
* Software Foundation; either version 2 of the License, or (at your option)
|
||||
* any later version.
|
||||
*
|
||||
*/
|
||||
#include <linux/init.h>
|
||||
#include <linux/module.h>
|
||||
#include <linux/crypto.h>
|
||||
|
||||
#include "sha.h"
|
||||
#include "crypt_s390.h"
|
||||
|
||||
static void sha512_init(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct s390_sha_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
*(__u64 *)&ctx->state[0] = 0x6a09e667f3bcc908ULL;
|
||||
*(__u64 *)&ctx->state[2] = 0xbb67ae8584caa73bULL;
|
||||
*(__u64 *)&ctx->state[4] = 0x3c6ef372fe94f82bULL;
|
||||
*(__u64 *)&ctx->state[6] = 0xa54ff53a5f1d36f1ULL;
|
||||
*(__u64 *)&ctx->state[8] = 0x510e527fade682d1ULL;
|
||||
*(__u64 *)&ctx->state[10] = 0x9b05688c2b3e6c1fULL;
|
||||
*(__u64 *)&ctx->state[12] = 0x1f83d9abfb41bd6bULL;
|
||||
*(__u64 *)&ctx->state[14] = 0x5be0cd19137e2179ULL;
|
||||
ctx->count = 0;
|
||||
ctx->func = KIMD_SHA_512;
|
||||
}
|
||||
|
||||
static struct crypto_alg sha512_alg = {
|
||||
.cra_name = "sha512",
|
||||
.cra_driver_name = "sha512-s390",
|
||||
.cra_priority = CRYPT_S390_PRIORITY,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_DIGEST,
|
||||
.cra_blocksize = SHA512_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct s390_sha_ctx),
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_list = LIST_HEAD_INIT(sha512_alg.cra_list),
|
||||
.cra_u = { .digest = {
|
||||
.dia_digestsize = SHA512_DIGEST_SIZE,
|
||||
.dia_init = sha512_init,
|
||||
.dia_update = s390_sha_update,
|
||||
.dia_final = s390_sha_final } }
|
||||
};
|
||||
|
||||
MODULE_ALIAS("sha512");
|
||||
|
||||
static void sha384_init(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct s390_sha_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
*(__u64 *)&ctx->state[0] = 0xcbbb9d5dc1059ed8ULL;
|
||||
*(__u64 *)&ctx->state[2] = 0x629a292a367cd507ULL;
|
||||
*(__u64 *)&ctx->state[4] = 0x9159015a3070dd17ULL;
|
||||
*(__u64 *)&ctx->state[6] = 0x152fecd8f70e5939ULL;
|
||||
*(__u64 *)&ctx->state[8] = 0x67332667ffc00b31ULL;
|
||||
*(__u64 *)&ctx->state[10] = 0x8eb44a8768581511ULL;
|
||||
*(__u64 *)&ctx->state[12] = 0xdb0c2e0d64f98fa7ULL;
|
||||
*(__u64 *)&ctx->state[14] = 0x47b5481dbefa4fa4ULL;
|
||||
ctx->count = 0;
|
||||
ctx->func = KIMD_SHA_512;
|
||||
}
|
||||
|
||||
static struct crypto_alg sha384_alg = {
|
||||
.cra_name = "sha384",
|
||||
.cra_driver_name = "sha384-s390",
|
||||
.cra_priority = CRYPT_S390_PRIORITY,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_DIGEST,
|
||||
.cra_blocksize = SHA384_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct s390_sha_ctx),
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_list = LIST_HEAD_INIT(sha384_alg.cra_list),
|
||||
.cra_u = { .digest = {
|
||||
.dia_digestsize = SHA384_DIGEST_SIZE,
|
||||
.dia_init = sha384_init,
|
||||
.dia_update = s390_sha_update,
|
||||
.dia_final = s390_sha_final } }
|
||||
};
|
||||
|
||||
MODULE_ALIAS("sha384");
|
||||
|
||||
static int __init init(void)
|
||||
{
|
||||
int ret;
|
||||
|
||||
if (!crypt_s390_func_available(KIMD_SHA_512))
|
||||
return -EOPNOTSUPP;
|
||||
if ((ret = crypto_register_alg(&sha512_alg)) < 0)
|
||||
goto out;
|
||||
if ((ret = crypto_register_alg(&sha384_alg)) < 0)
|
||||
crypto_unregister_alg(&sha512_alg);
|
||||
out:
|
||||
return ret;
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&sha512_alg);
|
||||
crypto_unregister_alg(&sha384_alg);
|
||||
}
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("SHA512 and SHA-384 Secure Hash Algorithm");
|
97
arch/s390/crypto/sha_common.c
Normal file
97
arch/s390/crypto/sha_common.c
Normal file
|
@ -0,0 +1,97 @@
|
|||
/*
|
||||
* Cryptographic API.
|
||||
*
|
||||
* s390 generic implementation of the SHA Secure Hash Algorithms.
|
||||
*
|
||||
* Copyright IBM Corp. 2007
|
||||
* Author(s): Jan Glauber (jang@de.ibm.com)
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License as published by the Free
|
||||
* Software Foundation; either version 2 of the License, or (at your option)
|
||||
* any later version.
|
||||
*
|
||||
*/
|
||||
|
||||
#include <linux/crypto.h>
|
||||
#include "sha.h"
|
||||
#include "crypt_s390.h"
|
||||
|
||||
void s390_sha_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
|
||||
{
|
||||
struct s390_sha_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
unsigned int bsize = crypto_tfm_alg_blocksize(tfm);
|
||||
unsigned int index;
|
||||
int ret;
|
||||
|
||||
/* how much is already in the buffer? */
|
||||
index = ctx->count & (bsize - 1);
|
||||
ctx->count += len;
|
||||
|
||||
if ((index + len) < bsize)
|
||||
goto store;
|
||||
|
||||
/* process one stored block */
|
||||
if (index) {
|
||||
memcpy(ctx->buf + index, data, bsize - index);
|
||||
ret = crypt_s390_kimd(ctx->func, ctx->state, ctx->buf, bsize);
|
||||
BUG_ON(ret != bsize);
|
||||
data += bsize - index;
|
||||
len -= bsize - index;
|
||||
}
|
||||
|
||||
/* process as many blocks as possible */
|
||||
if (len >= bsize) {
|
||||
ret = crypt_s390_kimd(ctx->func, ctx->state, data,
|
||||
len & ~(bsize - 1));
|
||||
BUG_ON(ret != (len & ~(bsize - 1)));
|
||||
data += ret;
|
||||
len -= ret;
|
||||
}
|
||||
store:
|
||||
if (len)
|
||||
memcpy(ctx->buf + index , data, len);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(s390_sha_update);
|
||||
|
||||
void s390_sha_final(struct crypto_tfm *tfm, u8 *out)
|
||||
{
|
||||
struct s390_sha_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
unsigned int bsize = crypto_tfm_alg_blocksize(tfm);
|
||||
u64 bits;
|
||||
unsigned int index, end, plen;
|
||||
int ret;
|
||||
|
||||
/* SHA-512 uses 128 bit padding length */
|
||||
plen = (bsize > SHA256_BLOCK_SIZE) ? 16 : 8;
|
||||
|
||||
/* must perform manual padding */
|
||||
index = ctx->count & (bsize - 1);
|
||||
end = (index < bsize - plen) ? bsize : (2 * bsize);
|
||||
|
||||
/* start pad with 1 */
|
||||
ctx->buf[index] = 0x80;
|
||||
index++;
|
||||
|
||||
/* pad with zeros */
|
||||
memset(ctx->buf + index, 0x00, end - index - 8);
|
||||
|
||||
/*
|
||||
* Append message length. Well, SHA-512 wants a 128 bit lenght value,
|
||||
* nevertheless we use u64, should be enough for now...
|
||||
*/
|
||||
bits = ctx->count * 8;
|
||||
memcpy(ctx->buf + end - 8, &bits, sizeof(bits));
|
||||
|
||||
ret = crypt_s390_kimd(ctx->func, ctx->state, ctx->buf, end);
|
||||
BUG_ON(ret != end);
|
||||
|
||||
/* copy digest to out */
|
||||
memcpy(out, ctx->state, crypto_hash_digestsize(crypto_hash_cast(tfm)));
|
||||
/* wipe context */
|
||||
memset(ctx, 0, sizeof *ctx);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(s390_sha_final);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("s390 SHA cipher common functions");
|
|
@ -289,7 +289,6 @@ aes_enc_blk:
|
|||
pop %ebx
|
||||
mov %r0,(%ebp)
|
||||
pop %ebp
|
||||
mov $1,%eax
|
||||
ret
|
||||
|
||||
// AES (Rijndael) Decryption Subroutine
|
||||
|
@ -365,6 +364,4 @@ aes_dec_blk:
|
|||
pop %ebx
|
||||
mov %r0,(%ebp)
|
||||
pop %ebp
|
||||
mov $1,%eax
|
||||
ret
|
||||
|
||||
|
|
698
crypto/Kconfig
698
crypto/Kconfig
|
@ -13,12 +13,14 @@ source "crypto/async_tx/Kconfig"
|
|||
# Cryptographic API Configuration
|
||||
#
|
||||
menuconfig CRYPTO
|
||||
bool "Cryptographic API"
|
||||
tristate "Cryptographic API"
|
||||
help
|
||||
This option provides the core Cryptographic API.
|
||||
|
||||
if CRYPTO
|
||||
|
||||
comment "Crypto core or helper"
|
||||
|
||||
config CRYPTO_ALGAPI
|
||||
tristate
|
||||
help
|
||||
|
@ -32,15 +34,6 @@ config CRYPTO_BLKCIPHER
|
|||
tristate
|
||||
select CRYPTO_ALGAPI
|
||||
|
||||
config CRYPTO_SEQIV
|
||||
tristate "Sequence Number IV Generator"
|
||||
select CRYPTO_AEAD
|
||||
select CRYPTO_BLKCIPHER
|
||||
help
|
||||
This IV generator generates an IV based on a sequence number by
|
||||
xoring it with a salt. This algorithm is mainly useful for CTR
|
||||
and similar modes.
|
||||
|
||||
config CRYPTO_HASH
|
||||
tristate
|
||||
select CRYPTO_ALGAPI
|
||||
|
@ -52,6 +45,150 @@ config CRYPTO_MANAGER
|
|||
Create default cryptographic template instantiations such as
|
||||
cbc(aes).
|
||||
|
||||
config CRYPTO_GF128MUL
|
||||
tristate "GF(2^128) multiplication functions (EXPERIMENTAL)"
|
||||
depends on EXPERIMENTAL
|
||||
help
|
||||
Efficient table driven implementation of multiplications in the
|
||||
field GF(2^128). This is needed by some cypher modes. This
|
||||
option will be selected automatically if you select such a
|
||||
cipher mode. Only select this option by hand if you expect to load
|
||||
an external module that requires these functions.
|
||||
|
||||
config CRYPTO_NULL
|
||||
tristate "Null algorithms"
|
||||
select CRYPTO_ALGAPI
|
||||
select CRYPTO_BLKCIPHER
|
||||
help
|
||||
These are 'Null' algorithms, used by IPsec, which do nothing.
|
||||
|
||||
config CRYPTO_CRYPTD
|
||||
tristate "Software async crypto daemon"
|
||||
select CRYPTO_BLKCIPHER
|
||||
select CRYPTO_MANAGER
|
||||
help
|
||||
This is a generic software asynchronous crypto daemon that
|
||||
converts an arbitrary synchronous software crypto algorithm
|
||||
into an asynchronous algorithm that executes in a kernel thread.
|
||||
|
||||
config CRYPTO_AUTHENC
|
||||
tristate "Authenc support"
|
||||
select CRYPTO_AEAD
|
||||
select CRYPTO_BLKCIPHER
|
||||
select CRYPTO_MANAGER
|
||||
select CRYPTO_HASH
|
||||
help
|
||||
Authenc: Combined mode wrapper for IPsec.
|
||||
This is required for IPSec.
|
||||
|
||||
config CRYPTO_TEST
|
||||
tristate "Testing module"
|
||||
depends on m
|
||||
select CRYPTO_ALGAPI
|
||||
select CRYPTO_AEAD
|
||||
select CRYPTO_BLKCIPHER
|
||||
help
|
||||
Quick & dirty crypto test module.
|
||||
|
||||
comment "Authenticated Encryption with Associated Data"
|
||||
|
||||
config CRYPTO_CCM
|
||||
tristate "CCM support"
|
||||
select CRYPTO_CTR
|
||||
select CRYPTO_AEAD
|
||||
help
|
||||
Support for Counter with CBC MAC. Required for IPsec.
|
||||
|
||||
config CRYPTO_GCM
|
||||
tristate "GCM/GMAC support"
|
||||
select CRYPTO_CTR
|
||||
select CRYPTO_AEAD
|
||||
select CRYPTO_GF128MUL
|
||||
help
|
||||
Support for Galois/Counter Mode (GCM) and Galois Message
|
||||
Authentication Code (GMAC). Required for IPSec.
|
||||
|
||||
config CRYPTO_SEQIV
|
||||
tristate "Sequence Number IV Generator"
|
||||
select CRYPTO_AEAD
|
||||
select CRYPTO_BLKCIPHER
|
||||
help
|
||||
This IV generator generates an IV based on a sequence number by
|
||||
xoring it with a salt. This algorithm is mainly useful for CTR
|
||||
|
||||
comment "Block modes"
|
||||
|
||||
config CRYPTO_CBC
|
||||
tristate "CBC support"
|
||||
select CRYPTO_BLKCIPHER
|
||||
select CRYPTO_MANAGER
|
||||
help
|
||||
CBC: Cipher Block Chaining mode
|
||||
This block cipher algorithm is required for IPSec.
|
||||
|
||||
config CRYPTO_CTR
|
||||
tristate "CTR support"
|
||||
select CRYPTO_BLKCIPHER
|
||||
select CRYPTO_SEQIV
|
||||
select CRYPTO_MANAGER
|
||||
help
|
||||
CTR: Counter mode
|
||||
This block cipher algorithm is required for IPSec.
|
||||
|
||||
config CRYPTO_CTS
|
||||
tristate "CTS support"
|
||||
select CRYPTO_BLKCIPHER
|
||||
help
|
||||
CTS: Cipher Text Stealing
|
||||
This is the Cipher Text Stealing mode as described by
|
||||
Section 8 of rfc2040 and referenced by rfc3962.
|
||||
(rfc3962 includes errata information in its Appendix A)
|
||||
This mode is required for Kerberos gss mechanism support
|
||||
for AES encryption.
|
||||
|
||||
config CRYPTO_ECB
|
||||
tristate "ECB support"
|
||||
select CRYPTO_BLKCIPHER
|
||||
select CRYPTO_MANAGER
|
||||
help
|
||||
ECB: Electronic CodeBook mode
|
||||
This is the simplest block cipher algorithm. It simply encrypts
|
||||
the input block by block.
|
||||
|
||||
config CRYPTO_LRW
|
||||
tristate "LRW support (EXPERIMENTAL)"
|
||||
depends on EXPERIMENTAL
|
||||
select CRYPTO_BLKCIPHER
|
||||
select CRYPTO_MANAGER
|
||||
select CRYPTO_GF128MUL
|
||||
help
|
||||
LRW: Liskov Rivest Wagner, a tweakable, non malleable, non movable
|
||||
narrow block cipher mode for dm-crypt. Use it with cipher
|
||||
specification string aes-lrw-benbi, the key must be 256, 320 or 384.
|
||||
The first 128, 192 or 256 bits in the key are used for AES and the
|
||||
rest is used to tie each cipher block to its logical position.
|
||||
|
||||
config CRYPTO_PCBC
|
||||
tristate "PCBC support"
|
||||
select CRYPTO_BLKCIPHER
|
||||
select CRYPTO_MANAGER
|
||||
help
|
||||
PCBC: Propagating Cipher Block Chaining mode
|
||||
This block cipher algorithm is required for RxRPC.
|
||||
|
||||
config CRYPTO_XTS
|
||||
tristate "XTS support (EXPERIMENTAL)"
|
||||
depends on EXPERIMENTAL
|
||||
select CRYPTO_BLKCIPHER
|
||||
select CRYPTO_MANAGER
|
||||
select CRYPTO_GF128MUL
|
||||
help
|
||||
XTS: IEEE1619/D16 narrow block cipher use with aes-xts-plain,
|
||||
key size 256, 384 or 512 bits. This implementation currently
|
||||
can't handle a sectorsize which is not a multiple of 16 bytes.
|
||||
|
||||
comment "Hash modes"
|
||||
|
||||
config CRYPTO_HMAC
|
||||
tristate "HMAC support"
|
||||
select CRYPTO_HASH
|
||||
|
@ -71,12 +208,17 @@ config CRYPTO_XCBC
|
|||
http://csrc.nist.gov/encryption/modes/proposedmodes/
|
||||
xcbc-mac/xcbc-mac-spec.pdf
|
||||
|
||||
config CRYPTO_NULL
|
||||
tristate "Null algorithms"
|
||||
comment "Digest"
|
||||
|
||||
config CRYPTO_CRC32C
|
||||
tristate "CRC32c CRC algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
select CRYPTO_BLKCIPHER
|
||||
select LIBCRC32C
|
||||
help
|
||||
These are 'Null' algorithms, used by IPsec, which do nothing.
|
||||
Castagnoli, et al Cyclic Redundancy-Check Algorithm. Used
|
||||
by iSCSI for header and data digests and by others.
|
||||
See Castagnoli93. This implementation uses lib/libcrc32c.
|
||||
Module will be crc32c.
|
||||
|
||||
config CRYPTO_MD4
|
||||
tristate "MD4 digest algorithm"
|
||||
|
@ -90,6 +232,15 @@ config CRYPTO_MD5
|
|||
help
|
||||
MD5 message digest algorithm (RFC1321).
|
||||
|
||||
config CRYPTO_MICHAEL_MIC
|
||||
tristate "Michael MIC keyed digest algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
help
|
||||
Michael MIC is used for message integrity protection in TKIP
|
||||
(IEEE 802.11i). This algorithm is required for TKIP, but it
|
||||
should not be used for other purposes because of the weakness
|
||||
of the algorithm.
|
||||
|
||||
config CRYPTO_SHA1
|
||||
tristate "SHA1 digest algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
|
@ -101,7 +252,7 @@ config CRYPTO_SHA256
|
|||
select CRYPTO_ALGAPI
|
||||
help
|
||||
SHA256 secure hash standard (DFIPS 180-2).
|
||||
|
||||
|
||||
This version of SHA implements a 256 bit hash with 128 bits of
|
||||
security against collision attacks.
|
||||
|
||||
|
@ -113,25 +264,13 @@ config CRYPTO_SHA512
|
|||
select CRYPTO_ALGAPI
|
||||
help
|
||||
SHA512 secure hash standard (DFIPS 180-2).
|
||||
|
||||
|
||||
This version of SHA implements a 512 bit hash with 256 bits of
|
||||
security against collision attacks.
|
||||
|
||||
This code also includes SHA-384, a 384 bit hash with 192 bits
|
||||
of security against collision attacks.
|
||||
|
||||
config CRYPTO_WP512
|
||||
tristate "Whirlpool digest algorithms"
|
||||
select CRYPTO_ALGAPI
|
||||
help
|
||||
Whirlpool hash algorithm 512, 384 and 256-bit hashes
|
||||
|
||||
Whirlpool-512 is part of the NESSIE cryptographic primitives.
|
||||
Whirlpool will be part of the ISO/IEC 10118-3:2003(E) standard
|
||||
|
||||
See also:
|
||||
<http://planeta.terra.com.br/informatica/paulobarreto/WhirlpoolPage.html>
|
||||
|
||||
config CRYPTO_TGR192
|
||||
tristate "Tiger digest algorithms"
|
||||
select CRYPTO_ALGAPI
|
||||
|
@ -145,208 +284,37 @@ config CRYPTO_TGR192
|
|||
See also:
|
||||
<http://www.cs.technion.ac.il/~biham/Reports/Tiger/>.
|
||||
|
||||
config CRYPTO_GF128MUL
|
||||
tristate "GF(2^128) multiplication functions (EXPERIMENTAL)"
|
||||
depends on EXPERIMENTAL
|
||||
help
|
||||
Efficient table driven implementation of multiplications in the
|
||||
field GF(2^128). This is needed by some cypher modes. This
|
||||
option will be selected automatically if you select such a
|
||||
cipher mode. Only select this option by hand if you expect to load
|
||||
an external module that requires these functions.
|
||||
|
||||
config CRYPTO_ECB
|
||||
tristate "ECB support"
|
||||
select CRYPTO_BLKCIPHER
|
||||
select CRYPTO_MANAGER
|
||||
help
|
||||
ECB: Electronic CodeBook mode
|
||||
This is the simplest block cipher algorithm. It simply encrypts
|
||||
the input block by block.
|
||||
|
||||
config CRYPTO_CBC
|
||||
tristate "CBC support"
|
||||
select CRYPTO_BLKCIPHER
|
||||
select CRYPTO_MANAGER
|
||||
help
|
||||
CBC: Cipher Block Chaining mode
|
||||
This block cipher algorithm is required for IPSec.
|
||||
|
||||
config CRYPTO_PCBC
|
||||
tristate "PCBC support"
|
||||
select CRYPTO_BLKCIPHER
|
||||
select CRYPTO_MANAGER
|
||||
help
|
||||
PCBC: Propagating Cipher Block Chaining mode
|
||||
This block cipher algorithm is required for RxRPC.
|
||||
|
||||
config CRYPTO_LRW
|
||||
tristate "LRW support (EXPERIMENTAL)"
|
||||
depends on EXPERIMENTAL
|
||||
select CRYPTO_BLKCIPHER
|
||||
select CRYPTO_MANAGER
|
||||
select CRYPTO_GF128MUL
|
||||
help
|
||||
LRW: Liskov Rivest Wagner, a tweakable, non malleable, non movable
|
||||
narrow block cipher mode for dm-crypt. Use it with cipher
|
||||
specification string aes-lrw-benbi, the key must be 256, 320 or 384.
|
||||
The first 128, 192 or 256 bits in the key are used for AES and the
|
||||
rest is used to tie each cipher block to its logical position.
|
||||
|
||||
config CRYPTO_XTS
|
||||
tristate "XTS support (EXPERIMENTAL)"
|
||||
depends on EXPERIMENTAL
|
||||
select CRYPTO_BLKCIPHER
|
||||
select CRYPTO_MANAGER
|
||||
select CRYPTO_GF128MUL
|
||||
help
|
||||
XTS: IEEE1619/D16 narrow block cipher use with aes-xts-plain,
|
||||
key size 256, 384 or 512 bits. This implementation currently
|
||||
can't handle a sectorsize which is not a multiple of 16 bytes.
|
||||
|
||||
config CRYPTO_CTR
|
||||
tristate "CTR support"
|
||||
select CRYPTO_BLKCIPHER
|
||||
select CRYPTO_SEQIV
|
||||
select CRYPTO_MANAGER
|
||||
help
|
||||
CTR: Counter mode
|
||||
This block cipher algorithm is required for IPSec.
|
||||
|
||||
config CRYPTO_GCM
|
||||
tristate "GCM/GMAC support"
|
||||
select CRYPTO_CTR
|
||||
select CRYPTO_AEAD
|
||||
select CRYPTO_GF128MUL
|
||||
help
|
||||
Support for Galois/Counter Mode (GCM) and Galois Message
|
||||
Authentication Code (GMAC). Required for IPSec.
|
||||
|
||||
config CRYPTO_CCM
|
||||
tristate "CCM support"
|
||||
select CRYPTO_CTR
|
||||
select CRYPTO_AEAD
|
||||
help
|
||||
Support for Counter with CBC MAC. Required for IPsec.
|
||||
|
||||
config CRYPTO_CRYPTD
|
||||
tristate "Software async crypto daemon"
|
||||
select CRYPTO_BLKCIPHER
|
||||
select CRYPTO_MANAGER
|
||||
help
|
||||
This is a generic software asynchronous crypto daemon that
|
||||
converts an arbitrary synchronous software crypto algorithm
|
||||
into an asynchronous algorithm that executes in a kernel thread.
|
||||
|
||||
config CRYPTO_DES
|
||||
tristate "DES and Triple DES EDE cipher algorithms"
|
||||
config CRYPTO_WP512
|
||||
tristate "Whirlpool digest algorithms"
|
||||
select CRYPTO_ALGAPI
|
||||
help
|
||||
DES cipher algorithm (FIPS 46-2), and Triple DES EDE (FIPS 46-3).
|
||||
Whirlpool hash algorithm 512, 384 and 256-bit hashes
|
||||
|
||||
config CRYPTO_FCRYPT
|
||||
tristate "FCrypt cipher algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
select CRYPTO_BLKCIPHER
|
||||
help
|
||||
FCrypt algorithm used by RxRPC.
|
||||
|
||||
config CRYPTO_BLOWFISH
|
||||
tristate "Blowfish cipher algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
help
|
||||
Blowfish cipher algorithm, by Bruce Schneier.
|
||||
|
||||
This is a variable key length cipher which can use keys from 32
|
||||
bits to 448 bits in length. It's fast, simple and specifically
|
||||
designed for use on "large microprocessors".
|
||||
|
||||
See also:
|
||||
<http://www.schneier.com/blowfish.html>
|
||||
|
||||
config CRYPTO_TWOFISH
|
||||
tristate "Twofish cipher algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
select CRYPTO_TWOFISH_COMMON
|
||||
help
|
||||
Twofish cipher algorithm.
|
||||
|
||||
Twofish was submitted as an AES (Advanced Encryption Standard)
|
||||
candidate cipher by researchers at CounterPane Systems. It is a
|
||||
16 round block cipher supporting key sizes of 128, 192, and 256
|
||||
bits.
|
||||
|
||||
See also:
|
||||
<http://www.schneier.com/twofish.html>
|
||||
|
||||
config CRYPTO_TWOFISH_COMMON
|
||||
tristate
|
||||
help
|
||||
Common parts of the Twofish cipher algorithm shared by the
|
||||
generic c and the assembler implementations.
|
||||
|
||||
config CRYPTO_TWOFISH_586
|
||||
tristate "Twofish cipher algorithms (i586)"
|
||||
depends on (X86 || UML_X86) && !64BIT
|
||||
select CRYPTO_ALGAPI
|
||||
select CRYPTO_TWOFISH_COMMON
|
||||
help
|
||||
Twofish cipher algorithm.
|
||||
|
||||
Twofish was submitted as an AES (Advanced Encryption Standard)
|
||||
candidate cipher by researchers at CounterPane Systems. It is a
|
||||
16 round block cipher supporting key sizes of 128, 192, and 256
|
||||
bits.
|
||||
Whirlpool-512 is part of the NESSIE cryptographic primitives.
|
||||
Whirlpool will be part of the ISO/IEC 10118-3:2003(E) standard
|
||||
|
||||
See also:
|
||||
<http://www.schneier.com/twofish.html>
|
||||
<http://planeta.terra.com.br/informatica/paulobarreto/WhirlpoolPage.html>
|
||||
|
||||
config CRYPTO_TWOFISH_X86_64
|
||||
tristate "Twofish cipher algorithm (x86_64)"
|
||||
depends on (X86 || UML_X86) && 64BIT
|
||||
select CRYPTO_ALGAPI
|
||||
select CRYPTO_TWOFISH_COMMON
|
||||
help
|
||||
Twofish cipher algorithm (x86_64).
|
||||
|
||||
Twofish was submitted as an AES (Advanced Encryption Standard)
|
||||
candidate cipher by researchers at CounterPane Systems. It is a
|
||||
16 round block cipher supporting key sizes of 128, 192, and 256
|
||||
bits.
|
||||
|
||||
See also:
|
||||
<http://www.schneier.com/twofish.html>
|
||||
|
||||
config CRYPTO_SERPENT
|
||||
tristate "Serpent cipher algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
help
|
||||
Serpent cipher algorithm, by Anderson, Biham & Knudsen.
|
||||
|
||||
Keys are allowed to be from 0 to 256 bits in length, in steps
|
||||
of 8 bits. Also includes the 'Tnepres' algorithm, a reversed
|
||||
variant of Serpent for compatibility with old kerneli.org code.
|
||||
|
||||
See also:
|
||||
<http://www.cl.cam.ac.uk/~rja14/serpent.html>
|
||||
comment "Ciphers"
|
||||
|
||||
config CRYPTO_AES
|
||||
tristate "AES cipher algorithms"
|
||||
select CRYPTO_ALGAPI
|
||||
help
|
||||
AES cipher algorithms (FIPS-197). AES uses the Rijndael
|
||||
AES cipher algorithms (FIPS-197). AES uses the Rijndael
|
||||
algorithm.
|
||||
|
||||
Rijndael appears to be consistently a very good performer in
|
||||
both hardware and software across a wide range of computing
|
||||
environments regardless of its use in feedback or non-feedback
|
||||
modes. Its key setup time is excellent, and its key agility is
|
||||
good. Rijndael's very low memory requirements make it very well
|
||||
suited for restricted-space environments, in which it also
|
||||
demonstrates excellent performance. Rijndael's operations are
|
||||
among the easiest to defend against power and timing attacks.
|
||||
both hardware and software across a wide range of computing
|
||||
environments regardless of its use in feedback or non-feedback
|
||||
modes. Its key setup time is excellent, and its key agility is
|
||||
good. Rijndael's very low memory requirements make it very well
|
||||
suited for restricted-space environments, in which it also
|
||||
demonstrates excellent performance. Rijndael's operations are
|
||||
among the easiest to defend against power and timing attacks.
|
||||
|
||||
The AES specifies three key sizes: 128, 192 and 256 bits
|
||||
The AES specifies three key sizes: 128, 192 and 256 bits
|
||||
|
||||
See <http://csrc.nist.gov/CryptoToolkit/aes/> for more information.
|
||||
|
||||
|
@ -356,19 +324,19 @@ config CRYPTO_AES_586
|
|||
select CRYPTO_ALGAPI
|
||||
select CRYPTO_AES
|
||||
help
|
||||
AES cipher algorithms (FIPS-197). AES uses the Rijndael
|
||||
AES cipher algorithms (FIPS-197). AES uses the Rijndael
|
||||
algorithm.
|
||||
|
||||
Rijndael appears to be consistently a very good performer in
|
||||
both hardware and software across a wide range of computing
|
||||
environments regardless of its use in feedback or non-feedback
|
||||
modes. Its key setup time is excellent, and its key agility is
|
||||
good. Rijndael's very low memory requirements make it very well
|
||||
suited for restricted-space environments, in which it also
|
||||
demonstrates excellent performance. Rijndael's operations are
|
||||
among the easiest to defend against power and timing attacks.
|
||||
both hardware and software across a wide range of computing
|
||||
environments regardless of its use in feedback or non-feedback
|
||||
modes. Its key setup time is excellent, and its key agility is
|
||||
good. Rijndael's very low memory requirements make it very well
|
||||
suited for restricted-space environments, in which it also
|
||||
demonstrates excellent performance. Rijndael's operations are
|
||||
among the easiest to defend against power and timing attacks.
|
||||
|
||||
The AES specifies three key sizes: 128, 192 and 256 bits
|
||||
The AES specifies three key sizes: 128, 192 and 256 bits
|
||||
|
||||
See <http://csrc.nist.gov/encryption/aes/> for more information.
|
||||
|
||||
|
@ -378,22 +346,75 @@ config CRYPTO_AES_X86_64
|
|||
select CRYPTO_ALGAPI
|
||||
select CRYPTO_AES
|
||||
help
|
||||
AES cipher algorithms (FIPS-197). AES uses the Rijndael
|
||||
AES cipher algorithms (FIPS-197). AES uses the Rijndael
|
||||
algorithm.
|
||||
|
||||
Rijndael appears to be consistently a very good performer in
|
||||
both hardware and software across a wide range of computing
|
||||
environments regardless of its use in feedback or non-feedback
|
||||
modes. Its key setup time is excellent, and its key agility is
|
||||
good. Rijndael's very low memory requirements make it very well
|
||||
suited for restricted-space environments, in which it also
|
||||
demonstrates excellent performance. Rijndael's operations are
|
||||
among the easiest to defend against power and timing attacks.
|
||||
both hardware and software across a wide range of computing
|
||||
environments regardless of its use in feedback or non-feedback
|
||||
modes. Its key setup time is excellent, and its key agility is
|
||||
good. Rijndael's very low memory requirements make it very well
|
||||
suited for restricted-space environments, in which it also
|
||||
demonstrates excellent performance. Rijndael's operations are
|
||||
among the easiest to defend against power and timing attacks.
|
||||
|
||||
The AES specifies three key sizes: 128, 192 and 256 bits
|
||||
The AES specifies three key sizes: 128, 192 and 256 bits
|
||||
|
||||
See <http://csrc.nist.gov/encryption/aes/> for more information.
|
||||
|
||||
config CRYPTO_ANUBIS
|
||||
tristate "Anubis cipher algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
help
|
||||
Anubis cipher algorithm.
|
||||
|
||||
Anubis is a variable key length cipher which can use keys from
|
||||
128 bits to 320 bits in length. It was evaluated as a entrant
|
||||
in the NESSIE competition.
|
||||
|
||||
See also:
|
||||
<https://www.cosic.esat.kuleuven.ac.be/nessie/reports/>
|
||||
<http://planeta.terra.com.br/informatica/paulobarreto/AnubisPage.html>
|
||||
|
||||
config CRYPTO_ARC4
|
||||
tristate "ARC4 cipher algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
help
|
||||
ARC4 cipher algorithm.
|
||||
|
||||
ARC4 is a stream cipher using keys ranging from 8 bits to 2048
|
||||
bits in length. This algorithm is required for driver-based
|
||||
WEP, but it should not be for other purposes because of the
|
||||
weakness of the algorithm.
|
||||
|
||||
config CRYPTO_BLOWFISH
|
||||
tristate "Blowfish cipher algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
help
|
||||
Blowfish cipher algorithm, by Bruce Schneier.
|
||||
|
||||
This is a variable key length cipher which can use keys from 32
|
||||
bits to 448 bits in length. It's fast, simple and specifically
|
||||
designed for use on "large microprocessors".
|
||||
|
||||
See also:
|
||||
<http://www.schneier.com/blowfish.html>
|
||||
|
||||
config CRYPTO_CAMELLIA
|
||||
tristate "Camellia cipher algorithms"
|
||||
depends on CRYPTO
|
||||
select CRYPTO_ALGAPI
|
||||
help
|
||||
Camellia cipher algorithms module.
|
||||
|
||||
Camellia is a symmetric key block cipher developed jointly
|
||||
at NTT and Mitsubishi Electric Corporation.
|
||||
|
||||
The Camellia specifies three key sizes: 128, 192 and 256 bits.
|
||||
|
||||
See also:
|
||||
<https://info.isl.ntt.co.jp/crypt/eng/camellia/index_s.html>
|
||||
|
||||
config CRYPTO_CAST5
|
||||
tristate "CAST5 (CAST-128) cipher algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
|
@ -408,33 +429,18 @@ config CRYPTO_CAST6
|
|||
The CAST6 encryption algorithm (synonymous with CAST-256) is
|
||||
described in RFC2612.
|
||||
|
||||
config CRYPTO_TEA
|
||||
tristate "TEA, XTEA and XETA cipher algorithms"
|
||||
config CRYPTO_DES
|
||||
tristate "DES and Triple DES EDE cipher algorithms"
|
||||
select CRYPTO_ALGAPI
|
||||
help
|
||||
TEA cipher algorithm.
|
||||
DES cipher algorithm (FIPS 46-2), and Triple DES EDE (FIPS 46-3).
|
||||
|
||||
Tiny Encryption Algorithm is a simple cipher that uses
|
||||
many rounds for security. It is very fast and uses
|
||||
little memory.
|
||||
|
||||
Xtendend Tiny Encryption Algorithm is a modification to
|
||||
the TEA algorithm to address a potential key weakness
|
||||
in the TEA algorithm.
|
||||
|
||||
Xtendend Encryption Tiny Algorithm is a mis-implementation
|
||||
of the XTEA algorithm for compatibility purposes.
|
||||
|
||||
config CRYPTO_ARC4
|
||||
tristate "ARC4 cipher algorithm"
|
||||
config CRYPTO_FCRYPT
|
||||
tristate "FCrypt cipher algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
select CRYPTO_BLKCIPHER
|
||||
help
|
||||
ARC4 cipher algorithm.
|
||||
|
||||
ARC4 is a stream cipher using keys ranging from 8 bits to 2048
|
||||
bits in length. This algorithm is required for driver-based
|
||||
WEP, but it should not be for other purposes because of the
|
||||
weakness of the algorithm.
|
||||
FCrypt algorithm used by RxRPC.
|
||||
|
||||
config CRYPTO_KHAZAD
|
||||
tristate "Khazad cipher algorithm"
|
||||
|
@ -449,34 +455,6 @@ config CRYPTO_KHAZAD
|
|||
See also:
|
||||
<http://planeta.terra.com.br/informatica/paulobarreto/KhazadPage.html>
|
||||
|
||||
config CRYPTO_ANUBIS
|
||||
tristate "Anubis cipher algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
help
|
||||
Anubis cipher algorithm.
|
||||
|
||||
Anubis is a variable key length cipher which can use keys from
|
||||
128 bits to 320 bits in length. It was evaluated as a entrant
|
||||
in the NESSIE competition.
|
||||
|
||||
See also:
|
||||
<https://www.cosic.esat.kuleuven.ac.be/nessie/reports/>
|
||||
<http://planeta.terra.com.br/informatica/paulobarreto/AnubisPage.html>
|
||||
|
||||
config CRYPTO_SEED
|
||||
tristate "SEED cipher algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
help
|
||||
SEED cipher algorithm (RFC4269).
|
||||
|
||||
SEED is a 128-bit symmetric key block cipher that has been
|
||||
developed by KISA (Korea Information Security Agency) as a
|
||||
national standard encryption algorithm of the Republic of Korea.
|
||||
It is a 16 round block cipher with the key size of 128 bit.
|
||||
|
||||
See also:
|
||||
<http://www.kisa.or.kr/kisa/seed/jsp/seed_eng.jsp>
|
||||
|
||||
config CRYPTO_SALSA20
|
||||
tristate "Salsa20 stream cipher algorithm (EXPERIMENTAL)"
|
||||
depends on EXPERIMENTAL
|
||||
|
@ -518,6 +496,105 @@ config CRYPTO_SALSA20_X86_64
|
|||
The Salsa20 stream cipher algorithm is designed by Daniel J.
|
||||
Bernstein <djb@cr.yp.to>. See <http://cr.yp.to/snuffle.html>
|
||||
|
||||
config CRYPTO_SEED
|
||||
tristate "SEED cipher algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
help
|
||||
SEED cipher algorithm (RFC4269).
|
||||
|
||||
SEED is a 128-bit symmetric key block cipher that has been
|
||||
developed by KISA (Korea Information Security Agency) as a
|
||||
national standard encryption algorithm of the Republic of Korea.
|
||||
It is a 16 round block cipher with the key size of 128 bit.
|
||||
|
||||
See also:
|
||||
<http://www.kisa.or.kr/kisa/seed/jsp/seed_eng.jsp>
|
||||
|
||||
config CRYPTO_SERPENT
|
||||
tristate "Serpent cipher algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
help
|
||||
Serpent cipher algorithm, by Anderson, Biham & Knudsen.
|
||||
|
||||
Keys are allowed to be from 0 to 256 bits in length, in steps
|
||||
of 8 bits. Also includes the 'Tnepres' algorithm, a reversed
|
||||
variant of Serpent for compatibility with old kerneli.org code.
|
||||
|
||||
See also:
|
||||
<http://www.cl.cam.ac.uk/~rja14/serpent.html>
|
||||
|
||||
config CRYPTO_TEA
|
||||
tristate "TEA, XTEA and XETA cipher algorithms"
|
||||
select CRYPTO_ALGAPI
|
||||
help
|
||||
TEA cipher algorithm.
|
||||
|
||||
Tiny Encryption Algorithm is a simple cipher that uses
|
||||
many rounds for security. It is very fast and uses
|
||||
little memory.
|
||||
|
||||
Xtendend Tiny Encryption Algorithm is a modification to
|
||||
the TEA algorithm to address a potential key weakness
|
||||
in the TEA algorithm.
|
||||
|
||||
Xtendend Encryption Tiny Algorithm is a mis-implementation
|
||||
of the XTEA algorithm for compatibility purposes.
|
||||
|
||||
config CRYPTO_TWOFISH
|
||||
tristate "Twofish cipher algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
select CRYPTO_TWOFISH_COMMON
|
||||
help
|
||||
Twofish cipher algorithm.
|
||||
|
||||
Twofish was submitted as an AES (Advanced Encryption Standard)
|
||||
candidate cipher by researchers at CounterPane Systems. It is a
|
||||
16 round block cipher supporting key sizes of 128, 192, and 256
|
||||
bits.
|
||||
|
||||
See also:
|
||||
<http://www.schneier.com/twofish.html>
|
||||
|
||||
config CRYPTO_TWOFISH_COMMON
|
||||
tristate
|
||||
help
|
||||
Common parts of the Twofish cipher algorithm shared by the
|
||||
generic c and the assembler implementations.
|
||||
|
||||
config CRYPTO_TWOFISH_586
|
||||
tristate "Twofish cipher algorithms (i586)"
|
||||
depends on (X86 || UML_X86) && !64BIT
|
||||
select CRYPTO_ALGAPI
|
||||
select CRYPTO_TWOFISH_COMMON
|
||||
help
|
||||
Twofish cipher algorithm.
|
||||
|
||||
Twofish was submitted as an AES (Advanced Encryption Standard)
|
||||
candidate cipher by researchers at CounterPane Systems. It is a
|
||||
16 round block cipher supporting key sizes of 128, 192, and 256
|
||||
bits.
|
||||
|
||||
See also:
|
||||
<http://www.schneier.com/twofish.html>
|
||||
|
||||
config CRYPTO_TWOFISH_X86_64
|
||||
tristate "Twofish cipher algorithm (x86_64)"
|
||||
depends on (X86 || UML_X86) && 64BIT
|
||||
select CRYPTO_ALGAPI
|
||||
select CRYPTO_TWOFISH_COMMON
|
||||
help
|
||||
Twofish cipher algorithm (x86_64).
|
||||
|
||||
Twofish was submitted as an AES (Advanced Encryption Standard)
|
||||
candidate cipher by researchers at CounterPane Systems. It is a
|
||||
16 round block cipher supporting key sizes of 128, 192, and 256
|
||||
bits.
|
||||
|
||||
See also:
|
||||
<http://www.schneier.com/twofish.html>
|
||||
|
||||
comment "Compression"
|
||||
|
||||
config CRYPTO_DEFLATE
|
||||
tristate "Deflate compression algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
|
@ -526,62 +603,9 @@ config CRYPTO_DEFLATE
|
|||
help
|
||||
This is the Deflate algorithm (RFC1951), specified for use in
|
||||
IPSec with the IPCOMP protocol (RFC3173, RFC2394).
|
||||
|
||||
|
||||
You will most probably want this if using IPSec.
|
||||
|
||||
config CRYPTO_MICHAEL_MIC
|
||||
tristate "Michael MIC keyed digest algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
help
|
||||
Michael MIC is used for message integrity protection in TKIP
|
||||
(IEEE 802.11i). This algorithm is required for TKIP, but it
|
||||
should not be used for other purposes because of the weakness
|
||||
of the algorithm.
|
||||
|
||||
config CRYPTO_CRC32C
|
||||
tristate "CRC32c CRC algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
select LIBCRC32C
|
||||
help
|
||||
Castagnoli, et al Cyclic Redundancy-Check Algorithm. Used
|
||||
by iSCSI for header and data digests and by others.
|
||||
See Castagnoli93. This implementation uses lib/libcrc32c.
|
||||
Module will be crc32c.
|
||||
|
||||
config CRYPTO_CAMELLIA
|
||||
tristate "Camellia cipher algorithms"
|
||||
depends on CRYPTO
|
||||
select CRYPTO_ALGAPI
|
||||
help
|
||||
Camellia cipher algorithms module.
|
||||
|
||||
Camellia is a symmetric key block cipher developed jointly
|
||||
at NTT and Mitsubishi Electric Corporation.
|
||||
|
||||
The Camellia specifies three key sizes: 128, 192 and 256 bits.
|
||||
|
||||
See also:
|
||||
<https://info.isl.ntt.co.jp/crypt/eng/camellia/index_s.html>
|
||||
|
||||
config CRYPTO_TEST
|
||||
tristate "Testing module"
|
||||
depends on m
|
||||
select CRYPTO_ALGAPI
|
||||
select CRYPTO_AEAD
|
||||
select CRYPTO_BLKCIPHER
|
||||
help
|
||||
Quick & dirty crypto test module.
|
||||
|
||||
config CRYPTO_AUTHENC
|
||||
tristate "Authenc support"
|
||||
select CRYPTO_AEAD
|
||||
select CRYPTO_BLKCIPHER
|
||||
select CRYPTO_MANAGER
|
||||
select CRYPTO_HASH
|
||||
help
|
||||
Authenc: Combined mode wrapper for IPsec.
|
||||
This is required for IPSec.
|
||||
|
||||
config CRYPTO_LZO
|
||||
tristate "LZO compression algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
|
|
|
@ -2,7 +2,8 @@
|
|||
# Cryptographic API
|
||||
#
|
||||
|
||||
obj-$(CONFIG_CRYPTO) += api.o cipher.o digest.o compress.o
|
||||
obj-$(CONFIG_CRYPTO) += crypto.o
|
||||
crypto-objs := api.o cipher.o digest.o compress.o
|
||||
|
||||
crypto_algapi-$(CONFIG_PROC_FS) += proc.o
|
||||
crypto_algapi-objs := algapi.o scatterwalk.o $(crypto_algapi-y)
|
||||
|
@ -28,13 +29,14 @@ obj-$(CONFIG_CRYPTO_MD4) += md4.o
|
|||
obj-$(CONFIG_CRYPTO_MD5) += md5.o
|
||||
obj-$(CONFIG_CRYPTO_SHA1) += sha1_generic.o
|
||||
obj-$(CONFIG_CRYPTO_SHA256) += sha256_generic.o
|
||||
obj-$(CONFIG_CRYPTO_SHA512) += sha512.o
|
||||
obj-$(CONFIG_CRYPTO_SHA512) += sha512_generic.o
|
||||
obj-$(CONFIG_CRYPTO_WP512) += wp512.o
|
||||
obj-$(CONFIG_CRYPTO_TGR192) += tgr192.o
|
||||
obj-$(CONFIG_CRYPTO_GF128MUL) += gf128mul.o
|
||||
obj-$(CONFIG_CRYPTO_ECB) += ecb.o
|
||||
obj-$(CONFIG_CRYPTO_CBC) += cbc.o
|
||||
obj-$(CONFIG_CRYPTO_PCBC) += pcbc.o
|
||||
obj-$(CONFIG_CRYPTO_CTS) += cts.o
|
||||
obj-$(CONFIG_CRYPTO_LRW) += lrw.o
|
||||
obj-$(CONFIG_CRYPTO_XTS) += xts.o
|
||||
obj-$(CONFIG_CRYPTO_CTR) += ctr.o
|
||||
|
|
|
@ -229,18 +229,29 @@ static void __init gen_tabs(void)
|
|||
ctx->key_enc[8 * i + 15] = t; \
|
||||
} while (0)
|
||||
|
||||
int crypto_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
|
||||
/**
|
||||
* crypto_aes_expand_key - Expands the AES key as described in FIPS-197
|
||||
* @ctx: The location where the computed key will be stored.
|
||||
* @in_key: The supplied key.
|
||||
* @key_len: The length of the supplied key.
|
||||
*
|
||||
* Returns 0 on success. The function fails only if an invalid key size (or
|
||||
* pointer) is supplied.
|
||||
* The expanded key size is 240 bytes (max of 14 rounds with a unique 16 bytes
|
||||
* key schedule plus a 16 bytes key which is used before the first round).
|
||||
* The decryption key is prepared for the "Equivalent Inverse Cipher" as
|
||||
* described in FIPS-197. The first slot (16 bytes) of each key (enc or dec) is
|
||||
* for the initial combination, the second slot for the first round and so on.
|
||||
*/
|
||||
int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
|
||||
unsigned int key_len)
|
||||
{
|
||||
struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
const __le32 *key = (const __le32 *)in_key;
|
||||
u32 *flags = &tfm->crt_flags;
|
||||
u32 i, t, u, v, w, j;
|
||||
|
||||
if (key_len % 8) {
|
||||
*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
|
||||
if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
|
||||
key_len != AES_KEYSIZE_256)
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
ctx->key_length = key_len;
|
||||
|
||||
|
@ -250,20 +261,20 @@ int crypto_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
|
|||
ctx->key_dec[key_len + 27] = ctx->key_enc[3] = le32_to_cpu(key[3]);
|
||||
|
||||
switch (key_len) {
|
||||
case 16:
|
||||
case AES_KEYSIZE_128:
|
||||
t = ctx->key_enc[3];
|
||||
for (i = 0; i < 10; ++i)
|
||||
loop4(i);
|
||||
break;
|
||||
|
||||
case 24:
|
||||
case AES_KEYSIZE_192:
|
||||
ctx->key_enc[4] = le32_to_cpu(key[4]);
|
||||
t = ctx->key_enc[5] = le32_to_cpu(key[5]);
|
||||
for (i = 0; i < 8; ++i)
|
||||
loop6(i);
|
||||
break;
|
||||
|
||||
case 32:
|
||||
case AES_KEYSIZE_256:
|
||||
ctx->key_enc[4] = le32_to_cpu(key[4]);
|
||||
ctx->key_enc[5] = le32_to_cpu(key[5]);
|
||||
ctx->key_enc[6] = le32_to_cpu(key[6]);
|
||||
|
@ -284,6 +295,33 @@ int crypto_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
|
|||
}
|
||||
return 0;
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_aes_expand_key);
|
||||
|
||||
/**
|
||||
* crypto_aes_set_key - Set the AES key.
|
||||
* @tfm: The %crypto_tfm that is used in the context.
|
||||
* @in_key: The input key.
|
||||
* @key_len: The size of the key.
|
||||
*
|
||||
* Returns 0 on success, on failure the %CRYPTO_TFM_RES_BAD_KEY_LEN flag in tfm
|
||||
* is set. The function uses crypto_aes_expand_key() to expand the key.
|
||||
* &crypto_aes_ctx _must_ be the private data embedded in @tfm which is
|
||||
* retrieved with crypto_tfm_ctx().
|
||||
*/
|
||||
int crypto_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
|
||||
unsigned int key_len)
|
||||
{
|
||||
struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
u32 *flags = &tfm->crt_flags;
|
||||
int ret;
|
||||
|
||||
ret = crypto_aes_expand_key(ctx, in_key, key_len);
|
||||
if (!ret)
|
||||
return 0;
|
||||
|
||||
*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
|
||||
return -EINVAL;
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_aes_set_key);
|
||||
|
||||
/* encrypt a block of text */
|
||||
|
|
|
@ -687,7 +687,7 @@ static struct crypto_alg anubis_alg = {
|
|||
.cia_decrypt = anubis_decrypt } }
|
||||
};
|
||||
|
||||
static int __init init(void)
|
||||
static int __init anubis_mod_init(void)
|
||||
{
|
||||
int ret = 0;
|
||||
|
||||
|
@ -695,13 +695,13 @@ static int __init init(void)
|
|||
return ret;
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit anubis_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&anubis_alg);
|
||||
}
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(anubis_mod_init);
|
||||
module_exit(anubis_mod_fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("Anubis Cryptographic Algorithm");
|
||||
|
|
|
@ -445,3 +445,6 @@ int crypto_has_alg(const char *name, u32 type, u32 mask)
|
|||
return ret;
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_has_alg);
|
||||
|
||||
MODULE_DESCRIPTION("Cryptographic core API");
|
||||
MODULE_LICENSE("GPL");
|
||||
|
|
|
@ -465,18 +465,18 @@ static struct crypto_alg alg = {
|
|||
.cia_decrypt = bf_decrypt } }
|
||||
};
|
||||
|
||||
static int __init init(void)
|
||||
static int __init blowfish_mod_init(void)
|
||||
{
|
||||
return crypto_register_alg(&alg);
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit blowfish_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&alg);
|
||||
}
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(blowfish_mod_init);
|
||||
module_exit(blowfish_mod_fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("Blowfish Cipher Algorithm");
|
||||
|
|
|
@ -817,18 +817,18 @@ static struct crypto_alg alg = {
|
|||
}
|
||||
};
|
||||
|
||||
static int __init init(void)
|
||||
static int __init cast5_mod_init(void)
|
||||
{
|
||||
return crypto_register_alg(&alg);
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit cast5_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&alg);
|
||||
}
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(cast5_mod_init);
|
||||
module_exit(cast5_mod_fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("Cast5 Cipher Algorithm");
|
||||
|
|
|
@ -528,18 +528,18 @@ static struct crypto_alg alg = {
|
|||
}
|
||||
};
|
||||
|
||||
static int __init init(void)
|
||||
static int __init cast6_mod_init(void)
|
||||
{
|
||||
return crypto_register_alg(&alg);
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit cast6_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&alg);
|
||||
}
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(cast6_mod_init);
|
||||
module_exit(cast6_mod_fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("Cast6 Cipher Algorithm");
|
||||
|
|
|
@ -98,18 +98,18 @@ static struct crypto_alg alg = {
|
|||
}
|
||||
};
|
||||
|
||||
static int __init init(void)
|
||||
static int __init crc32c_mod_init(void)
|
||||
{
|
||||
return crypto_register_alg(&alg);
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit crc32c_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&alg);
|
||||
}
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(crc32c_mod_init);
|
||||
module_exit(crc32c_mod_fini);
|
||||
|
||||
MODULE_AUTHOR("Clay Haapala <chaapala@cisco.com>");
|
||||
MODULE_DESCRIPTION("CRC32c (Castagnoli) calculations wrapper for lib/crc32c");
|
||||
|
|
|
@ -142,7 +142,7 @@ MODULE_ALIAS("compress_null");
|
|||
MODULE_ALIAS("digest_null");
|
||||
MODULE_ALIAS("cipher_null");
|
||||
|
||||
static int __init init(void)
|
||||
static int __init crypto_null_mod_init(void)
|
||||
{
|
||||
int ret = 0;
|
||||
|
||||
|
@ -174,7 +174,7 @@ static int __init init(void)
|
|||
goto out;
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit crypto_null_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&compress_null);
|
||||
crypto_unregister_alg(&digest_null);
|
||||
|
@ -182,8 +182,8 @@ static void __exit fini(void)
|
|||
crypto_unregister_alg(&cipher_null);
|
||||
}
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(crypto_null_mod_init);
|
||||
module_exit(crypto_null_mod_fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("Null Cryptographic Algorithms");
|
||||
|
|
347
crypto/cts.c
Normal file
347
crypto/cts.c
Normal file
|
@ -0,0 +1,347 @@
|
|||
/*
|
||||
* CTS: Cipher Text Stealing mode
|
||||
*
|
||||
* COPYRIGHT (c) 2008
|
||||
* The Regents of the University of Michigan
|
||||
* ALL RIGHTS RESERVED
|
||||
*
|
||||
* Permission is granted to use, copy, create derivative works
|
||||
* and redistribute this software and such derivative works
|
||||
* for any purpose, so long as the name of The University of
|
||||
* Michigan is not used in any advertising or publicity
|
||||
* pertaining to the use of distribution of this software
|
||||
* without specific, written prior authorization. If the
|
||||
* above copyright notice or any other identification of the
|
||||
* University of Michigan is included in any copy of any
|
||||
* portion of this software, then the disclaimer below must
|
||||
* also be included.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED AS IS, WITHOUT REPRESENTATION
|
||||
* FROM THE UNIVERSITY OF MICHIGAN AS TO ITS FITNESS FOR ANY
|
||||
* PURPOSE, AND WITHOUT WARRANTY BY THE UNIVERSITY OF
|
||||
* MICHIGAN OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING
|
||||
* WITHOUT LIMITATION THE IMPLIED WARRANTIES OF
|
||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE
|
||||
* REGENTS OF THE UNIVERSITY OF MICHIGAN SHALL NOT BE LIABLE
|
||||
* FOR ANY DAMAGES, INCLUDING SPECIAL, INDIRECT, INCIDENTAL, OR
|
||||
* CONSEQUENTIAL DAMAGES, WITH RESPECT TO ANY CLAIM ARISING
|
||||
* OUT OF OR IN CONNECTION WITH THE USE OF THE SOFTWARE, EVEN
|
||||
* IF IT HAS BEEN OR IS HEREAFTER ADVISED OF THE POSSIBILITY OF
|
||||
* SUCH DAMAGES.
|
||||
*/
|
||||
|
||||
/* Derived from various:
|
||||
* Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
|
||||
*/
|
||||
|
||||
/*
|
||||
* This is the Cipher Text Stealing mode as described by
|
||||
* Section 8 of rfc2040 and referenced by rfc3962.
|
||||
* rfc3962 includes errata information in its Appendix A.
|
||||
*/
|
||||
|
||||
#include <crypto/algapi.h>
|
||||
#include <linux/err.h>
|
||||
#include <linux/init.h>
|
||||
#include <linux/kernel.h>
|
||||
#include <linux/log2.h>
|
||||
#include <linux/module.h>
|
||||
#include <linux/scatterlist.h>
|
||||
#include <crypto/scatterwalk.h>
|
||||
#include <linux/slab.h>
|
||||
|
||||
struct crypto_cts_ctx {
|
||||
struct crypto_blkcipher *child;
|
||||
};
|
||||
|
||||
static int crypto_cts_setkey(struct crypto_tfm *parent, const u8 *key,
|
||||
unsigned int keylen)
|
||||
{
|
||||
struct crypto_cts_ctx *ctx = crypto_tfm_ctx(parent);
|
||||
struct crypto_blkcipher *child = ctx->child;
|
||||
int err;
|
||||
|
||||
crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
|
||||
crypto_blkcipher_set_flags(child, crypto_tfm_get_flags(parent) &
|
||||
CRYPTO_TFM_REQ_MASK);
|
||||
err = crypto_blkcipher_setkey(child, key, keylen);
|
||||
crypto_tfm_set_flags(parent, crypto_blkcipher_get_flags(child) &
|
||||
CRYPTO_TFM_RES_MASK);
|
||||
return err;
|
||||
}
|
||||
|
||||
static int cts_cbc_encrypt(struct crypto_cts_ctx *ctx,
|
||||
struct blkcipher_desc *desc,
|
||||
struct scatterlist *dst,
|
||||
struct scatterlist *src,
|
||||
unsigned int offset,
|
||||
unsigned int nbytes)
|
||||
{
|
||||
int bsize = crypto_blkcipher_blocksize(desc->tfm);
|
||||
u8 tmp[bsize], tmp2[bsize];
|
||||
struct blkcipher_desc lcldesc;
|
||||
struct scatterlist sgsrc[1], sgdst[1];
|
||||
int lastn = nbytes - bsize;
|
||||
u8 iv[bsize];
|
||||
u8 s[bsize * 2], d[bsize * 2];
|
||||
int err;
|
||||
|
||||
if (lastn < 0)
|
||||
return -EINVAL;
|
||||
|
||||
memset(s, 0, sizeof(s));
|
||||
scatterwalk_map_and_copy(s, src, offset, nbytes, 0);
|
||||
|
||||
memcpy(iv, desc->info, bsize);
|
||||
|
||||
lcldesc.tfm = ctx->child;
|
||||
lcldesc.info = iv;
|
||||
lcldesc.flags = desc->flags;
|
||||
|
||||
sg_set_buf(&sgsrc[0], s, bsize);
|
||||
sg_set_buf(&sgdst[0], tmp, bsize);
|
||||
err = crypto_blkcipher_encrypt_iv(&lcldesc, sgdst, sgsrc, bsize);
|
||||
|
||||
memcpy(d + bsize, tmp, lastn);
|
||||
|
||||
lcldesc.info = tmp;
|
||||
|
||||
sg_set_buf(&sgsrc[0], s + bsize, bsize);
|
||||
sg_set_buf(&sgdst[0], tmp2, bsize);
|
||||
err = crypto_blkcipher_encrypt_iv(&lcldesc, sgdst, sgsrc, bsize);
|
||||
|
||||
memcpy(d, tmp2, bsize);
|
||||
|
||||
scatterwalk_map_and_copy(d, dst, offset, nbytes, 1);
|
||||
|
||||
memcpy(desc->info, tmp2, bsize);
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
static int crypto_cts_encrypt(struct blkcipher_desc *desc,
|
||||
struct scatterlist *dst, struct scatterlist *src,
|
||||
unsigned int nbytes)
|
||||
{
|
||||
struct crypto_cts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
|
||||
int bsize = crypto_blkcipher_blocksize(desc->tfm);
|
||||
int tot_blocks = (nbytes + bsize - 1) / bsize;
|
||||
int cbc_blocks = tot_blocks > 2 ? tot_blocks - 2 : 0;
|
||||
struct blkcipher_desc lcldesc;
|
||||
int err;
|
||||
|
||||
lcldesc.tfm = ctx->child;
|
||||
lcldesc.info = desc->info;
|
||||
lcldesc.flags = desc->flags;
|
||||
|
||||
if (tot_blocks == 1) {
|
||||
err = crypto_blkcipher_encrypt_iv(&lcldesc, dst, src, bsize);
|
||||
} else if (nbytes <= bsize * 2) {
|
||||
err = cts_cbc_encrypt(ctx, desc, dst, src, 0, nbytes);
|
||||
} else {
|
||||
/* do normal function for tot_blocks - 2 */
|
||||
err = crypto_blkcipher_encrypt_iv(&lcldesc, dst, src,
|
||||
cbc_blocks * bsize);
|
||||
if (err == 0) {
|
||||
/* do cts for final two blocks */
|
||||
err = cts_cbc_encrypt(ctx, desc, dst, src,
|
||||
cbc_blocks * bsize,
|
||||
nbytes - (cbc_blocks * bsize));
|
||||
}
|
||||
}
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
static int cts_cbc_decrypt(struct crypto_cts_ctx *ctx,
|
||||
struct blkcipher_desc *desc,
|
||||
struct scatterlist *dst,
|
||||
struct scatterlist *src,
|
||||
unsigned int offset,
|
||||
unsigned int nbytes)
|
||||
{
|
||||
int bsize = crypto_blkcipher_blocksize(desc->tfm);
|
||||
u8 tmp[bsize];
|
||||
struct blkcipher_desc lcldesc;
|
||||
struct scatterlist sgsrc[1], sgdst[1];
|
||||
int lastn = nbytes - bsize;
|
||||
u8 iv[bsize];
|
||||
u8 s[bsize * 2], d[bsize * 2];
|
||||
int err;
|
||||
|
||||
if (lastn < 0)
|
||||
return -EINVAL;
|
||||
|
||||
scatterwalk_map_and_copy(s, src, offset, nbytes, 0);
|
||||
|
||||
lcldesc.tfm = ctx->child;
|
||||
lcldesc.info = iv;
|
||||
lcldesc.flags = desc->flags;
|
||||
|
||||
/* 1. Decrypt Cn-1 (s) to create Dn (tmp)*/
|
||||
memset(iv, 0, sizeof(iv));
|
||||
sg_set_buf(&sgsrc[0], s, bsize);
|
||||
sg_set_buf(&sgdst[0], tmp, bsize);
|
||||
err = crypto_blkcipher_decrypt_iv(&lcldesc, sgdst, sgsrc, bsize);
|
||||
if (err)
|
||||
return err;
|
||||
/* 2. Pad Cn with zeros at the end to create C of length BB */
|
||||
memset(iv, 0, sizeof(iv));
|
||||
memcpy(iv, s + bsize, lastn);
|
||||
/* 3. Exclusive-or Dn (tmp) with C (iv) to create Xn (tmp) */
|
||||
crypto_xor(tmp, iv, bsize);
|
||||
/* 4. Select the first Ln bytes of Xn (tmp) to create Pn */
|
||||
memcpy(d + bsize, tmp, lastn);
|
||||
|
||||
/* 5. Append the tail (BB - Ln) bytes of Xn (tmp) to Cn to create En */
|
||||
memcpy(s + bsize + lastn, tmp + lastn, bsize - lastn);
|
||||
/* 6. Decrypt En to create Pn-1 */
|
||||
memset(iv, 0, sizeof(iv));
|
||||
sg_set_buf(&sgsrc[0], s + bsize, bsize);
|
||||
sg_set_buf(&sgdst[0], d, bsize);
|
||||
err = crypto_blkcipher_decrypt_iv(&lcldesc, sgdst, sgsrc, bsize);
|
||||
|
||||
/* XOR with previous block */
|
||||
crypto_xor(d, desc->info, bsize);
|
||||
|
||||
scatterwalk_map_and_copy(d, dst, offset, nbytes, 1);
|
||||
|
||||
memcpy(desc->info, s, bsize);
|
||||
return err;
|
||||
}
|
||||
|
||||
static int crypto_cts_decrypt(struct blkcipher_desc *desc,
|
||||
struct scatterlist *dst, struct scatterlist *src,
|
||||
unsigned int nbytes)
|
||||
{
|
||||
struct crypto_cts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
|
||||
int bsize = crypto_blkcipher_blocksize(desc->tfm);
|
||||
int tot_blocks = (nbytes + bsize - 1) / bsize;
|
||||
int cbc_blocks = tot_blocks > 2 ? tot_blocks - 2 : 0;
|
||||
struct blkcipher_desc lcldesc;
|
||||
int err;
|
||||
|
||||
lcldesc.tfm = ctx->child;
|
||||
lcldesc.info = desc->info;
|
||||
lcldesc.flags = desc->flags;
|
||||
|
||||
if (tot_blocks == 1) {
|
||||
err = crypto_blkcipher_decrypt_iv(&lcldesc, dst, src, bsize);
|
||||
} else if (nbytes <= bsize * 2) {
|
||||
err = cts_cbc_decrypt(ctx, desc, dst, src, 0, nbytes);
|
||||
} else {
|
||||
/* do normal function for tot_blocks - 2 */
|
||||
err = crypto_blkcipher_decrypt_iv(&lcldesc, dst, src,
|
||||
cbc_blocks * bsize);
|
||||
if (err == 0) {
|
||||
/* do cts for final two blocks */
|
||||
err = cts_cbc_decrypt(ctx, desc, dst, src,
|
||||
cbc_blocks * bsize,
|
||||
nbytes - (cbc_blocks * bsize));
|
||||
}
|
||||
}
|
||||
return err;
|
||||
}
|
||||
|
||||
static int crypto_cts_init_tfm(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct crypto_instance *inst = (void *)tfm->__crt_alg;
|
||||
struct crypto_spawn *spawn = crypto_instance_ctx(inst);
|
||||
struct crypto_cts_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
struct crypto_blkcipher *cipher;
|
||||
|
||||
cipher = crypto_spawn_blkcipher(spawn);
|
||||
if (IS_ERR(cipher))
|
||||
return PTR_ERR(cipher);
|
||||
|
||||
ctx->child = cipher;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void crypto_cts_exit_tfm(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct crypto_cts_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
crypto_free_blkcipher(ctx->child);
|
||||
}
|
||||
|
||||
static struct crypto_instance *crypto_cts_alloc(struct rtattr **tb)
|
||||
{
|
||||
struct crypto_instance *inst;
|
||||
struct crypto_alg *alg;
|
||||
int err;
|
||||
|
||||
err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
|
||||
if (err)
|
||||
return ERR_PTR(err);
|
||||
|
||||
alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_BLKCIPHER,
|
||||
CRYPTO_ALG_TYPE_MASK);
|
||||
err = PTR_ERR(alg);
|
||||
if (IS_ERR(alg))
|
||||
return ERR_PTR(err);
|
||||
|
||||
inst = ERR_PTR(-EINVAL);
|
||||
if (!is_power_of_2(alg->cra_blocksize))
|
||||
goto out_put_alg;
|
||||
|
||||
inst = crypto_alloc_instance("cts", alg);
|
||||
if (IS_ERR(inst))
|
||||
goto out_put_alg;
|
||||
|
||||
inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
|
||||
inst->alg.cra_priority = alg->cra_priority;
|
||||
inst->alg.cra_blocksize = alg->cra_blocksize;
|
||||
inst->alg.cra_alignmask = alg->cra_alignmask;
|
||||
inst->alg.cra_type = &crypto_blkcipher_type;
|
||||
|
||||
/* We access the data as u32s when xoring. */
|
||||
inst->alg.cra_alignmask |= __alignof__(u32) - 1;
|
||||
|
||||
inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize;
|
||||
inst->alg.cra_blkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
|
||||
inst->alg.cra_blkcipher.max_keysize = alg->cra_blkcipher.max_keysize;
|
||||
|
||||
inst->alg.cra_blkcipher.geniv = "seqiv";
|
||||
|
||||
inst->alg.cra_ctxsize = sizeof(struct crypto_cts_ctx);
|
||||
|
||||
inst->alg.cra_init = crypto_cts_init_tfm;
|
||||
inst->alg.cra_exit = crypto_cts_exit_tfm;
|
||||
|
||||
inst->alg.cra_blkcipher.setkey = crypto_cts_setkey;
|
||||
inst->alg.cra_blkcipher.encrypt = crypto_cts_encrypt;
|
||||
inst->alg.cra_blkcipher.decrypt = crypto_cts_decrypt;
|
||||
|
||||
out_put_alg:
|
||||
crypto_mod_put(alg);
|
||||
return inst;
|
||||
}
|
||||
|
||||
static void crypto_cts_free(struct crypto_instance *inst)
|
||||
{
|
||||
crypto_drop_spawn(crypto_instance_ctx(inst));
|
||||
kfree(inst);
|
||||
}
|
||||
|
||||
static struct crypto_template crypto_cts_tmpl = {
|
||||
.name = "cts",
|
||||
.alloc = crypto_cts_alloc,
|
||||
.free = crypto_cts_free,
|
||||
.module = THIS_MODULE,
|
||||
};
|
||||
|
||||
static int __init crypto_cts_module_init(void)
|
||||
{
|
||||
return crypto_register_template(&crypto_cts_tmpl);
|
||||
}
|
||||
|
||||
static void __exit crypto_cts_module_exit(void)
|
||||
{
|
||||
crypto_unregister_template(&crypto_cts_tmpl);
|
||||
}
|
||||
|
||||
module_init(crypto_cts_module_init);
|
||||
module_exit(crypto_cts_module_exit);
|
||||
|
||||
MODULE_LICENSE("Dual BSD/GPL");
|
||||
MODULE_DESCRIPTION("CTS-CBC CipherText Stealing for CBC");
|
|
@ -208,18 +208,18 @@ static struct crypto_alg alg = {
|
|||
.coa_decompress = deflate_decompress } }
|
||||
};
|
||||
|
||||
static int __init init(void)
|
||||
static int __init deflate_mod_init(void)
|
||||
{
|
||||
return crypto_register_alg(&alg);
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit deflate_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&alg);
|
||||
}
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(deflate_mod_init);
|
||||
module_exit(deflate_mod_fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("Deflate Compression Algorithm for IPCOMP");
|
||||
|
|
|
@ -977,7 +977,7 @@ static struct crypto_alg des3_ede_alg = {
|
|||
|
||||
MODULE_ALIAS("des3_ede");
|
||||
|
||||
static int __init init(void)
|
||||
static int __init des_generic_mod_init(void)
|
||||
{
|
||||
int ret = 0;
|
||||
|
||||
|
@ -992,14 +992,14 @@ static int __init init(void)
|
|||
return ret;
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit des_generic_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&des3_ede_alg);
|
||||
crypto_unregister_alg(&des_alg);
|
||||
}
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(des_generic_mod_init);
|
||||
module_exit(des_generic_mod_fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("DES & Triple DES EDE Cipher Algorithms");
|
||||
|
|
|
@ -405,18 +405,18 @@ static struct crypto_alg fcrypt_alg = {
|
|||
.cia_decrypt = fcrypt_decrypt } }
|
||||
};
|
||||
|
||||
static int __init init(void)
|
||||
static int __init fcrypt_mod_init(void)
|
||||
{
|
||||
return crypto_register_alg(&fcrypt_alg);
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit fcrypt_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&fcrypt_alg);
|
||||
}
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(fcrypt_mod_init);
|
||||
module_exit(fcrypt_mod_fini);
|
||||
|
||||
MODULE_LICENSE("Dual BSD/GPL");
|
||||
MODULE_DESCRIPTION("FCrypt Cipher Algorithm");
|
||||
|
|
|
@ -862,7 +862,7 @@ static struct crypto_alg khazad_alg = {
|
|||
.cia_decrypt = khazad_decrypt } }
|
||||
};
|
||||
|
||||
static int __init init(void)
|
||||
static int __init khazad_mod_init(void)
|
||||
{
|
||||
int ret = 0;
|
||||
|
||||
|
@ -870,14 +870,14 @@ static int __init init(void)
|
|||
return ret;
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit khazad_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&khazad_alg);
|
||||
}
|
||||
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(khazad_mod_init);
|
||||
module_exit(khazad_mod_fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("Khazad Cryptographic Algorithm");
|
||||
|
|
|
@ -91,8 +91,9 @@ struct sinfo {
|
|||
|
||||
static inline void inc(be128 *iv)
|
||||
{
|
||||
if (!(iv->b = cpu_to_be64(be64_to_cpu(iv->b) + 1)))
|
||||
iv->a = cpu_to_be64(be64_to_cpu(iv->a) + 1);
|
||||
be64_add_cpu(&iv->b, 1);
|
||||
if (!iv->b)
|
||||
be64_add_cpu(&iv->a, 1);
|
||||
}
|
||||
|
||||
static inline void lrw_round(struct sinfo *s, void *dst, const void *src)
|
||||
|
|
|
@ -89,18 +89,18 @@ static struct crypto_alg alg = {
|
|||
.coa_decompress = lzo_decompress } }
|
||||
};
|
||||
|
||||
static int __init init(void)
|
||||
static int __init lzo_mod_init(void)
|
||||
{
|
||||
return crypto_register_alg(&alg);
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit lzo_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&alg);
|
||||
}
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(lzo_mod_init);
|
||||
module_exit(lzo_mod_fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("LZO Compression Algorithm");
|
||||
|
|
|
@ -233,18 +233,18 @@ static struct crypto_alg alg = {
|
|||
.dia_final = md4_final } }
|
||||
};
|
||||
|
||||
static int __init init(void)
|
||||
static int __init md4_mod_init(void)
|
||||
{
|
||||
return crypto_register_alg(&alg);
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit md4_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&alg);
|
||||
}
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(md4_mod_init);
|
||||
module_exit(md4_mod_fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("MD4 Message Digest Algorithm");
|
||||
|
|
|
@ -228,18 +228,18 @@ static struct crypto_alg alg = {
|
|||
.dia_final = md5_final } }
|
||||
};
|
||||
|
||||
static int __init init(void)
|
||||
static int __init md5_mod_init(void)
|
||||
{
|
||||
return crypto_register_alg(&alg);
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit md5_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&alg);
|
||||
}
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(md5_mod_init);
|
||||
module_exit(md5_mod_fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("MD5 Message Digest Algorithm");
|
||||
|
|
|
@ -78,7 +78,7 @@ static int c_show(struct seq_file *m, void *p)
|
|||
return 0;
|
||||
}
|
||||
|
||||
static struct seq_operations crypto_seq_ops = {
|
||||
static const struct seq_operations crypto_seq_ops = {
|
||||
.start = c_start,
|
||||
.next = c_next,
|
||||
.stop = c_stop,
|
||||
|
@ -99,11 +99,7 @@ static const struct file_operations proc_crypto_ops = {
|
|||
|
||||
void __init crypto_init_proc(void)
|
||||
{
|
||||
struct proc_dir_entry *proc;
|
||||
|
||||
proc = create_proc_entry("crypto", 0, NULL);
|
||||
if (proc)
|
||||
proc->proc_fops = &proc_crypto_ops;
|
||||
proc_create("crypto", 0, NULL, &proc_crypto_ops);
|
||||
}
|
||||
|
||||
void __exit crypto_exit_proc(void)
|
||||
|
|
|
@ -237,18 +237,18 @@ static struct crypto_alg alg = {
|
|||
}
|
||||
};
|
||||
|
||||
static int __init init(void)
|
||||
static int __init salsa20_generic_mod_init(void)
|
||||
{
|
||||
return crypto_register_alg(&alg);
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit salsa20_generic_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&alg);
|
||||
}
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(salsa20_generic_mod_init);
|
||||
module_exit(salsa20_generic_mod_fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION ("Salsa20 stream cipher algorithm");
|
||||
|
|
|
@ -557,7 +557,7 @@ static struct crypto_alg tnepres_alg = {
|
|||
.cia_decrypt = tnepres_decrypt } }
|
||||
};
|
||||
|
||||
static int __init init(void)
|
||||
static int __init serpent_mod_init(void)
|
||||
{
|
||||
int ret = crypto_register_alg(&serpent_alg);
|
||||
|
||||
|
@ -572,14 +572,14 @@ static int __init init(void)
|
|||
return ret;
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit serpent_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&tnepres_alg);
|
||||
crypto_unregister_alg(&serpent_alg);
|
||||
}
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(serpent_mod_init);
|
||||
module_exit(serpent_mod_fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("Serpent and tnepres (kerneli compatible serpent reversed) Cipher Algorithm");
|
||||
|
|
|
@ -120,18 +120,18 @@ static struct crypto_alg alg = {
|
|||
.dia_final = sha1_final } }
|
||||
};
|
||||
|
||||
static int __init init(void)
|
||||
static int __init sha1_generic_mod_init(void)
|
||||
{
|
||||
return crypto_register_alg(&alg);
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit sha1_generic_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&alg);
|
||||
}
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(sha1_generic_mod_init);
|
||||
module_exit(sha1_generic_mod_fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("SHA1 Secure Hash Algorithm");
|
||||
|
|
|
@ -353,7 +353,7 @@ static struct crypto_alg sha224 = {
|
|||
.dia_final = sha224_final } }
|
||||
};
|
||||
|
||||
static int __init init(void)
|
||||
static int __init sha256_generic_mod_init(void)
|
||||
{
|
||||
int ret = 0;
|
||||
|
||||
|
@ -370,14 +370,14 @@ static int __init init(void)
|
|||
return ret;
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit sha256_generic_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&sha224);
|
||||
crypto_unregister_alg(&sha256);
|
||||
}
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(sha256_generic_mod_init);
|
||||
module_exit(sha256_generic_mod_fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("SHA-224 and SHA-256 Secure Hash Algorithm");
|
||||
|
|
|
@ -104,9 +104,9 @@ sha512_transform(u64 *state, u64 *W, const u8 *input)
|
|||
}
|
||||
|
||||
/* load the state into our registers */
|
||||
a=state[0]; b=state[1]; c=state[2]; d=state[3];
|
||||
e=state[4]; f=state[5]; g=state[6]; h=state[7];
|
||||
|
||||
a=state[0]; b=state[1]; c=state[2]; d=state[3];
|
||||
e=state[4]; f=state[5]; g=state[6]; h=state[7];
|
||||
|
||||
/* now iterate */
|
||||
for (i=0; i<80; i+=8) {
|
||||
t1 = h + e1(e) + Ch(e,f,g) + sha512_K[i ] + W[i ];
|
||||
|
@ -126,9 +126,9 @@ sha512_transform(u64 *state, u64 *W, const u8 *input)
|
|||
t1 = a + e1(f) + Ch(f,g,h) + sha512_K[i+7] + W[i+7];
|
||||
t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
|
||||
}
|
||||
|
||||
state[0] += a; state[1] += b; state[2] += c; state[3] += d;
|
||||
state[4] += e; state[5] += f; state[6] += g; state[7] += h;
|
||||
|
||||
state[0] += a; state[1] += b; state[2] += c; state[3] += d;
|
||||
state[4] += e; state[5] += f; state[6] += g; state[7] += h;
|
||||
|
||||
/* erase our data */
|
||||
a = b = c = d = e = f = g = h = t1 = t2 = 0;
|
||||
|
@ -173,7 +173,7 @@ sha512_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
|
|||
|
||||
/* Compute number of bytes mod 128 */
|
||||
index = (unsigned int)((sctx->count[0] >> 3) & 0x7F);
|
||||
|
||||
|
||||
/* Update number of bits */
|
||||
if ((sctx->count[0] += (len << 3)) < (len << 3)) {
|
||||
if ((sctx->count[1] += 1) < 1)
|
||||
|
@ -181,9 +181,9 @@ sha512_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
|
|||
sctx->count[3]++;
|
||||
sctx->count[1] += (len >> 29);
|
||||
}
|
||||
|
||||
|
||||
part_len = 128 - index;
|
||||
|
||||
|
||||
/* Transform as many times as possible. */
|
||||
if (len >= part_len) {
|
||||
memcpy(&sctx->buf[index], data, part_len);
|
||||
|
@ -278,9 +278,7 @@ static struct crypto_alg sha384 = {
|
|||
}
|
||||
};
|
||||
|
||||
MODULE_ALIAS("sha384");
|
||||
|
||||
static int __init init(void)
|
||||
static int __init sha512_generic_mod_init(void)
|
||||
{
|
||||
int ret = 0;
|
||||
|
||||
|
@ -292,14 +290,17 @@ static int __init init(void)
|
|||
return ret;
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit sha512_generic_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&sha384);
|
||||
crypto_unregister_alg(&sha512);
|
||||
}
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(sha512_generic_mod_init);
|
||||
module_exit(sha512_generic_mod_fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("SHA-512 and SHA-384 Secure Hash Algorithms");
|
||||
|
||||
MODULE_ALIAS("sha384");
|
||||
MODULE_ALIAS("sha512");
|
535
crypto/tcrypt.c
535
crypto/tcrypt.c
|
@ -82,9 +82,8 @@ static char *check[] = {
|
|||
"des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256",
|
||||
"blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
|
||||
"cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
|
||||
"arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
|
||||
"khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
|
||||
"camellia", "seed", "salsa20", "lzo", NULL
|
||||
"camellia", "seed", "salsa20", "lzo", "cts", NULL
|
||||
};
|
||||
|
||||
static void hexdump(unsigned char *buf, unsigned int len)
|
||||
|
@ -113,23 +112,11 @@ static void test_hash(char *algo, struct hash_testvec *template,
|
|||
char result[64];
|
||||
struct crypto_hash *tfm;
|
||||
struct hash_desc desc;
|
||||
struct hash_testvec *hash_tv;
|
||||
unsigned int tsize;
|
||||
int ret;
|
||||
void *hash_buff;
|
||||
|
||||
printk("\ntesting %s\n", algo);
|
||||
|
||||
tsize = sizeof(struct hash_testvec);
|
||||
tsize *= tcount;
|
||||
|
||||
if (tsize > TVMEMSIZE) {
|
||||
printk("template (%u) too big for tvmem (%u)\n", tsize, TVMEMSIZE);
|
||||
return;
|
||||
}
|
||||
|
||||
memcpy(tvmem, template, tsize);
|
||||
hash_tv = (void *)tvmem;
|
||||
|
||||
tfm = crypto_alloc_hash(algo, 0, CRYPTO_ALG_ASYNC);
|
||||
if (IS_ERR(tfm)) {
|
||||
printk("failed to load transform for %s: %ld\n", algo,
|
||||
|
@ -144,28 +131,36 @@ static void test_hash(char *algo, struct hash_testvec *template,
|
|||
printk("test %u:\n", i + 1);
|
||||
memset(result, 0, 64);
|
||||
|
||||
sg_init_one(&sg[0], hash_tv[i].plaintext, hash_tv[i].psize);
|
||||
hash_buff = kzalloc(template[i].psize, GFP_KERNEL);
|
||||
if (!hash_buff)
|
||||
continue;
|
||||
|
||||
if (hash_tv[i].ksize) {
|
||||
ret = crypto_hash_setkey(tfm, hash_tv[i].key,
|
||||
hash_tv[i].ksize);
|
||||
memcpy(hash_buff, template[i].plaintext, template[i].psize);
|
||||
sg_init_one(&sg[0], hash_buff, template[i].psize);
|
||||
|
||||
if (template[i].ksize) {
|
||||
ret = crypto_hash_setkey(tfm, template[i].key,
|
||||
template[i].ksize);
|
||||
if (ret) {
|
||||
printk("setkey() failed ret=%d\n", ret);
|
||||
kfree(hash_buff);
|
||||
goto out;
|
||||
}
|
||||
}
|
||||
|
||||
ret = crypto_hash_digest(&desc, sg, hash_tv[i].psize, result);
|
||||
ret = crypto_hash_digest(&desc, sg, template[i].psize, result);
|
||||
if (ret) {
|
||||
printk("digest () failed ret=%d\n", ret);
|
||||
kfree(hash_buff);
|
||||
goto out;
|
||||
}
|
||||
|
||||
hexdump(result, crypto_hash_digestsize(tfm));
|
||||
printk("%s\n",
|
||||
memcmp(result, hash_tv[i].digest,
|
||||
memcmp(result, template[i].digest,
|
||||
crypto_hash_digestsize(tfm)) ?
|
||||
"fail" : "pass");
|
||||
kfree(hash_buff);
|
||||
}
|
||||
|
||||
printk("testing %s across pages\n", algo);
|
||||
|
@ -175,25 +170,25 @@ static void test_hash(char *algo, struct hash_testvec *template,
|
|||
|
||||
j = 0;
|
||||
for (i = 0; i < tcount; i++) {
|
||||
if (hash_tv[i].np) {
|
||||
if (template[i].np) {
|
||||
j++;
|
||||
printk("test %u:\n", j);
|
||||
memset(result, 0, 64);
|
||||
|
||||
temp = 0;
|
||||
sg_init_table(sg, hash_tv[i].np);
|
||||
for (k = 0; k < hash_tv[i].np; k++) {
|
||||
sg_init_table(sg, template[i].np);
|
||||
for (k = 0; k < template[i].np; k++) {
|
||||
memcpy(&xbuf[IDX[k]],
|
||||
hash_tv[i].plaintext + temp,
|
||||
hash_tv[i].tap[k]);
|
||||
temp += hash_tv[i].tap[k];
|
||||
template[i].plaintext + temp,
|
||||
template[i].tap[k]);
|
||||
temp += template[i].tap[k];
|
||||
sg_set_buf(&sg[k], &xbuf[IDX[k]],
|
||||
hash_tv[i].tap[k]);
|
||||
template[i].tap[k]);
|
||||
}
|
||||
|
||||
if (hash_tv[i].ksize) {
|
||||
ret = crypto_hash_setkey(tfm, hash_tv[i].key,
|
||||
hash_tv[i].ksize);
|
||||
if (template[i].ksize) {
|
||||
ret = crypto_hash_setkey(tfm, template[i].key,
|
||||
template[i].ksize);
|
||||
|
||||
if (ret) {
|
||||
printk("setkey() failed ret=%d\n", ret);
|
||||
|
@ -201,7 +196,7 @@ static void test_hash(char *algo, struct hash_testvec *template,
|
|||
}
|
||||
}
|
||||
|
||||
ret = crypto_hash_digest(&desc, sg, hash_tv[i].psize,
|
||||
ret = crypto_hash_digest(&desc, sg, template[i].psize,
|
||||
result);
|
||||
if (ret) {
|
||||
printk("digest () failed ret=%d\n", ret);
|
||||
|
@ -210,7 +205,7 @@ static void test_hash(char *algo, struct hash_testvec *template,
|
|||
|
||||
hexdump(result, crypto_hash_digestsize(tfm));
|
||||
printk("%s\n",
|
||||
memcmp(result, hash_tv[i].digest,
|
||||
memcmp(result, template[i].digest,
|
||||
crypto_hash_digestsize(tfm)) ?
|
||||
"fail" : "pass");
|
||||
}
|
||||
|
@ -224,17 +219,18 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template,
|
|||
unsigned int tcount)
|
||||
{
|
||||
unsigned int ret, i, j, k, temp;
|
||||
unsigned int tsize;
|
||||
char *q;
|
||||
struct crypto_aead *tfm;
|
||||
char *key;
|
||||
struct aead_testvec *aead_tv;
|
||||
struct aead_request *req;
|
||||
struct scatterlist sg[8];
|
||||
struct scatterlist asg[8];
|
||||
const char *e;
|
||||
struct tcrypt_result result;
|
||||
unsigned int authsize;
|
||||
void *input;
|
||||
void *assoc;
|
||||
char iv[MAX_IVLEN];
|
||||
|
||||
if (enc == ENCRYPT)
|
||||
e = "encryption";
|
||||
|
@ -243,18 +239,6 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template,
|
|||
|
||||
printk(KERN_INFO "\ntesting %s %s\n", algo, e);
|
||||
|
||||
tsize = sizeof(struct aead_testvec);
|
||||
tsize *= tcount;
|
||||
|
||||
if (tsize > TVMEMSIZE) {
|
||||
printk(KERN_INFO "template (%u) too big for tvmem (%u)\n",
|
||||
tsize, TVMEMSIZE);
|
||||
return;
|
||||
}
|
||||
|
||||
memcpy(tvmem, template, tsize);
|
||||
aead_tv = (void *)tvmem;
|
||||
|
||||
init_completion(&result.completion);
|
||||
|
||||
tfm = crypto_alloc_aead(algo, 0, 0);
|
||||
|
@ -275,46 +259,68 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template,
|
|||
tcrypt_complete, &result);
|
||||
|
||||
for (i = 0, j = 0; i < tcount; i++) {
|
||||
if (!aead_tv[i].np) {
|
||||
if (!template[i].np) {
|
||||
printk(KERN_INFO "test %u (%d bit key):\n",
|
||||
++j, aead_tv[i].klen * 8);
|
||||
++j, template[i].klen * 8);
|
||||
|
||||
/* some tepmplates have no input data but they will
|
||||
* touch input
|
||||
*/
|
||||
input = kzalloc(template[i].ilen + template[i].rlen, GFP_KERNEL);
|
||||
if (!input)
|
||||
continue;
|
||||
|
||||
assoc = kzalloc(template[i].alen, GFP_KERNEL);
|
||||
if (!assoc) {
|
||||
kfree(input);
|
||||
continue;
|
||||
}
|
||||
|
||||
memcpy(input, template[i].input, template[i].ilen);
|
||||
memcpy(assoc, template[i].assoc, template[i].alen);
|
||||
if (template[i].iv)
|
||||
memcpy(iv, template[i].iv, MAX_IVLEN);
|
||||
else
|
||||
memset(iv, 0, MAX_IVLEN);
|
||||
|
||||
crypto_aead_clear_flags(tfm, ~0);
|
||||
if (aead_tv[i].wk)
|
||||
if (template[i].wk)
|
||||
crypto_aead_set_flags(
|
||||
tfm, CRYPTO_TFM_REQ_WEAK_KEY);
|
||||
key = aead_tv[i].key;
|
||||
|
||||
if (template[i].key)
|
||||
key = template[i].key;
|
||||
else
|
||||
key = kzalloc(template[i].klen, GFP_KERNEL);
|
||||
|
||||
ret = crypto_aead_setkey(tfm, key,
|
||||
aead_tv[i].klen);
|
||||
template[i].klen);
|
||||
if (ret) {
|
||||
printk(KERN_INFO "setkey() failed flags=%x\n",
|
||||
crypto_aead_get_flags(tfm));
|
||||
|
||||
if (!aead_tv[i].fail)
|
||||
goto out;
|
||||
if (!template[i].fail)
|
||||
goto next_one;
|
||||
}
|
||||
|
||||
authsize = abs(aead_tv[i].rlen - aead_tv[i].ilen);
|
||||
authsize = abs(template[i].rlen - template[i].ilen);
|
||||
ret = crypto_aead_setauthsize(tfm, authsize);
|
||||
if (ret) {
|
||||
printk(KERN_INFO
|
||||
"failed to set authsize = %u\n",
|
||||
authsize);
|
||||
goto out;
|
||||
goto next_one;
|
||||
}
|
||||
|
||||
sg_init_one(&sg[0], aead_tv[i].input,
|
||||
aead_tv[i].ilen + (enc ? authsize : 0));
|
||||
sg_init_one(&sg[0], input,
|
||||
template[i].ilen + (enc ? authsize : 0));
|
||||
|
||||
sg_init_one(&asg[0], aead_tv[i].assoc,
|
||||
aead_tv[i].alen);
|
||||
sg_init_one(&asg[0], assoc, template[i].alen);
|
||||
|
||||
aead_request_set_crypt(req, sg, sg,
|
||||
aead_tv[i].ilen,
|
||||
aead_tv[i].iv);
|
||||
template[i].ilen, iv);
|
||||
|
||||
aead_request_set_assoc(req, asg, aead_tv[i].alen);
|
||||
aead_request_set_assoc(req, asg, template[i].alen);
|
||||
|
||||
ret = enc ?
|
||||
crypto_aead_encrypt(req) :
|
||||
|
@ -335,15 +341,21 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template,
|
|||
default:
|
||||
printk(KERN_INFO "%s () failed err=%d\n",
|
||||
e, -ret);
|
||||
goto out;
|
||||
goto next_one;
|
||||
}
|
||||
|
||||
q = kmap(sg_page(&sg[0])) + sg[0].offset;
|
||||
hexdump(q, aead_tv[i].rlen);
|
||||
hexdump(q, template[i].rlen);
|
||||
|
||||
printk(KERN_INFO "enc/dec: %s\n",
|
||||
memcmp(q, aead_tv[i].result,
|
||||
aead_tv[i].rlen) ? "fail" : "pass");
|
||||
memcmp(q, template[i].result,
|
||||
template[i].rlen) ? "fail" : "pass");
|
||||
kunmap(sg_page(&sg[0]));
|
||||
next_one:
|
||||
if (!template[i].key)
|
||||
kfree(key);
|
||||
kfree(assoc);
|
||||
kfree(input);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -352,36 +364,41 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template,
|
|||
memset(axbuf, 0, XBUFSIZE);
|
||||
|
||||
for (i = 0, j = 0; i < tcount; i++) {
|
||||
if (aead_tv[i].np) {
|
||||
if (template[i].np) {
|
||||
printk(KERN_INFO "test %u (%d bit key):\n",
|
||||
++j, aead_tv[i].klen * 8);
|
||||
++j, template[i].klen * 8);
|
||||
|
||||
if (template[i].iv)
|
||||
memcpy(iv, template[i].iv, MAX_IVLEN);
|
||||
else
|
||||
memset(iv, 0, MAX_IVLEN);
|
||||
|
||||
crypto_aead_clear_flags(tfm, ~0);
|
||||
if (aead_tv[i].wk)
|
||||
if (template[i].wk)
|
||||
crypto_aead_set_flags(
|
||||
tfm, CRYPTO_TFM_REQ_WEAK_KEY);
|
||||
key = aead_tv[i].key;
|
||||
key = template[i].key;
|
||||
|
||||
ret = crypto_aead_setkey(tfm, key, aead_tv[i].klen);
|
||||
ret = crypto_aead_setkey(tfm, key, template[i].klen);
|
||||
if (ret) {
|
||||
printk(KERN_INFO "setkey() failed flags=%x\n",
|
||||
crypto_aead_get_flags(tfm));
|
||||
|
||||
if (!aead_tv[i].fail)
|
||||
if (!template[i].fail)
|
||||
goto out;
|
||||
}
|
||||
|
||||
sg_init_table(sg, aead_tv[i].np);
|
||||
for (k = 0, temp = 0; k < aead_tv[i].np; k++) {
|
||||
sg_init_table(sg, template[i].np);
|
||||
for (k = 0, temp = 0; k < template[i].np; k++) {
|
||||
memcpy(&xbuf[IDX[k]],
|
||||
aead_tv[i].input + temp,
|
||||
aead_tv[i].tap[k]);
|
||||
temp += aead_tv[i].tap[k];
|
||||
template[i].input + temp,
|
||||
template[i].tap[k]);
|
||||
temp += template[i].tap[k];
|
||||
sg_set_buf(&sg[k], &xbuf[IDX[k]],
|
||||
aead_tv[i].tap[k]);
|
||||
template[i].tap[k]);
|
||||
}
|
||||
|
||||
authsize = abs(aead_tv[i].rlen - aead_tv[i].ilen);
|
||||
authsize = abs(template[i].rlen - template[i].ilen);
|
||||
ret = crypto_aead_setauthsize(tfm, authsize);
|
||||
if (ret) {
|
||||
printk(KERN_INFO
|
||||
|
@ -393,21 +410,21 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template,
|
|||
if (enc)
|
||||
sg[k - 1].length += authsize;
|
||||
|
||||
sg_init_table(asg, aead_tv[i].anp);
|
||||
for (k = 0, temp = 0; k < aead_tv[i].anp; k++) {
|
||||
sg_init_table(asg, template[i].anp);
|
||||
for (k = 0, temp = 0; k < template[i].anp; k++) {
|
||||
memcpy(&axbuf[IDX[k]],
|
||||
aead_tv[i].assoc + temp,
|
||||
aead_tv[i].atap[k]);
|
||||
temp += aead_tv[i].atap[k];
|
||||
template[i].assoc + temp,
|
||||
template[i].atap[k]);
|
||||
temp += template[i].atap[k];
|
||||
sg_set_buf(&asg[k], &axbuf[IDX[k]],
|
||||
aead_tv[i].atap[k]);
|
||||
template[i].atap[k]);
|
||||
}
|
||||
|
||||
aead_request_set_crypt(req, sg, sg,
|
||||
aead_tv[i].ilen,
|
||||
aead_tv[i].iv);
|
||||
template[i].ilen,
|
||||
iv);
|
||||
|
||||
aead_request_set_assoc(req, asg, aead_tv[i].alen);
|
||||
aead_request_set_assoc(req, asg, template[i].alen);
|
||||
|
||||
ret = enc ?
|
||||
crypto_aead_encrypt(req) :
|
||||
|
@ -431,18 +448,19 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template,
|
|||
goto out;
|
||||
}
|
||||
|
||||
for (k = 0, temp = 0; k < aead_tv[i].np; k++) {
|
||||
for (k = 0, temp = 0; k < template[i].np; k++) {
|
||||
printk(KERN_INFO "page %u\n", k);
|
||||
q = kmap(sg_page(&sg[k])) + sg[k].offset;
|
||||
hexdump(q, aead_tv[i].tap[k]);
|
||||
hexdump(q, template[i].tap[k]);
|
||||
printk(KERN_INFO "%s\n",
|
||||
memcmp(q, aead_tv[i].result + temp,
|
||||
aead_tv[i].tap[k] -
|
||||
(k < aead_tv[i].np - 1 || enc ?
|
||||
memcmp(q, template[i].result + temp,
|
||||
template[i].tap[k] -
|
||||
(k < template[i].np - 1 || enc ?
|
||||
0 : authsize)) ?
|
||||
"fail" : "pass");
|
||||
|
||||
temp += aead_tv[i].tap[k];
|
||||
temp += template[i].tap[k];
|
||||
kunmap(sg_page(&sg[k]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -456,15 +474,14 @@ static void test_cipher(char *algo, int enc,
|
|||
struct cipher_testvec *template, unsigned int tcount)
|
||||
{
|
||||
unsigned int ret, i, j, k, temp;
|
||||
unsigned int tsize;
|
||||
char *q;
|
||||
struct crypto_ablkcipher *tfm;
|
||||
char *key;
|
||||
struct cipher_testvec *cipher_tv;
|
||||
struct ablkcipher_request *req;
|
||||
struct scatterlist sg[8];
|
||||
const char *e;
|
||||
struct tcrypt_result result;
|
||||
void *data;
|
||||
char iv[MAX_IVLEN];
|
||||
|
||||
if (enc == ENCRYPT)
|
||||
e = "encryption";
|
||||
|
@ -473,16 +490,7 @@ static void test_cipher(char *algo, int enc,
|
|||
|
||||
printk("\ntesting %s %s\n", algo, e);
|
||||
|
||||
tsize = sizeof (struct cipher_testvec);
|
||||
if (tsize > TVMEMSIZE) {
|
||||
printk("template (%u) too big for tvmem (%u)\n", tsize,
|
||||
TVMEMSIZE);
|
||||
return;
|
||||
}
|
||||
cipher_tv = (void *)tvmem;
|
||||
|
||||
init_completion(&result.completion);
|
||||
|
||||
tfm = crypto_alloc_ablkcipher(algo, 0, 0);
|
||||
|
||||
if (IS_ERR(tfm)) {
|
||||
|
@ -502,35 +510,43 @@ static void test_cipher(char *algo, int enc,
|
|||
|
||||
j = 0;
|
||||
for (i = 0; i < tcount; i++) {
|
||||
memcpy(cipher_tv, &template[i], tsize);
|
||||
if (!(cipher_tv->np)) {
|
||||
|
||||
data = kzalloc(template[i].ilen, GFP_KERNEL);
|
||||
if (!data)
|
||||
continue;
|
||||
|
||||
memcpy(data, template[i].input, template[i].ilen);
|
||||
if (template[i].iv)
|
||||
memcpy(iv, template[i].iv, MAX_IVLEN);
|
||||
else
|
||||
memset(iv, 0, MAX_IVLEN);
|
||||
|
||||
if (!(template[i].np)) {
|
||||
j++;
|
||||
printk("test %u (%d bit key):\n",
|
||||
j, cipher_tv->klen * 8);
|
||||
j, template[i].klen * 8);
|
||||
|
||||
crypto_ablkcipher_clear_flags(tfm, ~0);
|
||||
if (cipher_tv->wk)
|
||||
if (template[i].wk)
|
||||
crypto_ablkcipher_set_flags(
|
||||
tfm, CRYPTO_TFM_REQ_WEAK_KEY);
|
||||
key = cipher_tv->key;
|
||||
|
||||
ret = crypto_ablkcipher_setkey(tfm, key,
|
||||
cipher_tv->klen);
|
||||
ret = crypto_ablkcipher_setkey(tfm, template[i].key,
|
||||
template[i].klen);
|
||||
if (ret) {
|
||||
printk("setkey() failed flags=%x\n",
|
||||
crypto_ablkcipher_get_flags(tfm));
|
||||
|
||||
if (!cipher_tv->fail)
|
||||
if (!template[i].fail) {
|
||||
kfree(data);
|
||||
goto out;
|
||||
}
|
||||
}
|
||||
|
||||
sg_init_one(&sg[0], cipher_tv->input,
|
||||
cipher_tv->ilen);
|
||||
sg_init_one(&sg[0], data, template[i].ilen);
|
||||
|
||||
ablkcipher_request_set_crypt(req, sg, sg,
|
||||
cipher_tv->ilen,
|
||||
cipher_tv->iv);
|
||||
|
||||
template[i].ilen, iv);
|
||||
ret = enc ?
|
||||
crypto_ablkcipher_encrypt(req) :
|
||||
crypto_ablkcipher_decrypt(req);
|
||||
|
@ -549,16 +565,19 @@ static void test_cipher(char *algo, int enc,
|
|||
/* fall through */
|
||||
default:
|
||||
printk("%s () failed err=%d\n", e, -ret);
|
||||
kfree(data);
|
||||
goto out;
|
||||
}
|
||||
|
||||
q = kmap(sg_page(&sg[0])) + sg[0].offset;
|
||||
hexdump(q, cipher_tv->rlen);
|
||||
hexdump(q, template[i].rlen);
|
||||
|
||||
printk("%s\n",
|
||||
memcmp(q, cipher_tv->result,
|
||||
cipher_tv->rlen) ? "fail" : "pass");
|
||||
memcmp(q, template[i].result,
|
||||
template[i].rlen) ? "fail" : "pass");
|
||||
kunmap(sg_page(&sg[0]));
|
||||
}
|
||||
kfree(data);
|
||||
}
|
||||
|
||||
printk("\ntesting %s %s across pages (chunking)\n", algo, e);
|
||||
|
@ -566,42 +585,53 @@ static void test_cipher(char *algo, int enc,
|
|||
|
||||
j = 0;
|
||||
for (i = 0; i < tcount; i++) {
|
||||
memcpy(cipher_tv, &template[i], tsize);
|
||||
if (cipher_tv->np) {
|
||||
|
||||
data = kzalloc(template[i].ilen, GFP_KERNEL);
|
||||
if (!data)
|
||||
continue;
|
||||
|
||||
memcpy(data, template[i].input, template[i].ilen);
|
||||
|
||||
if (template[i].iv)
|
||||
memcpy(iv, template[i].iv, MAX_IVLEN);
|
||||
else
|
||||
memset(iv, 0, MAX_IVLEN);
|
||||
|
||||
if (template[i].np) {
|
||||
j++;
|
||||
printk("test %u (%d bit key):\n",
|
||||
j, cipher_tv->klen * 8);
|
||||
j, template[i].klen * 8);
|
||||
|
||||
crypto_ablkcipher_clear_flags(tfm, ~0);
|
||||
if (cipher_tv->wk)
|
||||
if (template[i].wk)
|
||||
crypto_ablkcipher_set_flags(
|
||||
tfm, CRYPTO_TFM_REQ_WEAK_KEY);
|
||||
key = cipher_tv->key;
|
||||
|
||||
ret = crypto_ablkcipher_setkey(tfm, key,
|
||||
cipher_tv->klen);
|
||||
ret = crypto_ablkcipher_setkey(tfm, template[i].key,
|
||||
template[i].klen);
|
||||
if (ret) {
|
||||
printk("setkey() failed flags=%x\n",
|
||||
crypto_ablkcipher_get_flags(tfm));
|
||||
crypto_ablkcipher_get_flags(tfm));
|
||||
|
||||
if (!cipher_tv->fail)
|
||||
if (!template[i].fail) {
|
||||
kfree(data);
|
||||
goto out;
|
||||
}
|
||||
}
|
||||
|
||||
temp = 0;
|
||||
sg_init_table(sg, cipher_tv->np);
|
||||
for (k = 0; k < cipher_tv->np; k++) {
|
||||
sg_init_table(sg, template[i].np);
|
||||
for (k = 0; k < template[i].np; k++) {
|
||||
memcpy(&xbuf[IDX[k]],
|
||||
cipher_tv->input + temp,
|
||||
cipher_tv->tap[k]);
|
||||
temp += cipher_tv->tap[k];
|
||||
template[i].input + temp,
|
||||
template[i].tap[k]);
|
||||
temp += template[i].tap[k];
|
||||
sg_set_buf(&sg[k], &xbuf[IDX[k]],
|
||||
cipher_tv->tap[k]);
|
||||
template[i].tap[k]);
|
||||
}
|
||||
|
||||
ablkcipher_request_set_crypt(req, sg, sg,
|
||||
cipher_tv->ilen,
|
||||
cipher_tv->iv);
|
||||
template[i].ilen, iv);
|
||||
|
||||
ret = enc ?
|
||||
crypto_ablkcipher_encrypt(req) :
|
||||
|
@ -625,19 +655,19 @@ static void test_cipher(char *algo, int enc,
|
|||
}
|
||||
|
||||
temp = 0;
|
||||
for (k = 0; k < cipher_tv->np; k++) {
|
||||
for (k = 0; k < template[i].np; k++) {
|
||||
printk("page %u\n", k);
|
||||
q = kmap(sg_page(&sg[k])) + sg[k].offset;
|
||||
hexdump(q, cipher_tv->tap[k]);
|
||||
hexdump(q, template[i].tap[k]);
|
||||
printk("%s\n",
|
||||
memcmp(q, cipher_tv->result + temp,
|
||||
cipher_tv->tap[k]) ? "fail" :
|
||||
memcmp(q, template[i].result + temp,
|
||||
template[i].tap[k]) ? "fail" :
|
||||
"pass");
|
||||
temp += cipher_tv->tap[k];
|
||||
temp += template[i].tap[k];
|
||||
kunmap(sg_page(&sg[k]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
out:
|
||||
crypto_free_ablkcipher(tfm);
|
||||
ablkcipher_request_free(req);
|
||||
|
@ -721,15 +751,18 @@ static int test_cipher_cycles(struct blkcipher_desc *desc, int enc, char *p,
|
|||
return ret;
|
||||
}
|
||||
|
||||
static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
|
||||
|
||||
static void test_cipher_speed(char *algo, int enc, unsigned int sec,
|
||||
struct cipher_testvec *template,
|
||||
unsigned int tcount, struct cipher_speed *speed)
|
||||
unsigned int tcount, u8 *keysize)
|
||||
{
|
||||
unsigned int ret, i, j, iv_len;
|
||||
unsigned char *key, *p, iv[128];
|
||||
struct crypto_blkcipher *tfm;
|
||||
struct blkcipher_desc desc;
|
||||
const char *e;
|
||||
u32 *b_size;
|
||||
|
||||
if (enc == ENCRYPT)
|
||||
e = "encryption";
|
||||
|
@ -748,52 +781,60 @@ static void test_cipher_speed(char *algo, int enc, unsigned int sec,
|
|||
desc.tfm = tfm;
|
||||
desc.flags = 0;
|
||||
|
||||
for (i = 0; speed[i].klen != 0; i++) {
|
||||
if ((speed[i].blen + speed[i].klen) > TVMEMSIZE) {
|
||||
printk("template (%u) too big for tvmem (%u)\n",
|
||||
speed[i].blen + speed[i].klen, TVMEMSIZE);
|
||||
goto out;
|
||||
}
|
||||
i = 0;
|
||||
do {
|
||||
|
||||
printk("test %u (%d bit key, %d byte blocks): ", i,
|
||||
speed[i].klen * 8, speed[i].blen);
|
||||
b_size = block_sizes;
|
||||
do {
|
||||
|
||||
memset(tvmem, 0xff, speed[i].klen + speed[i].blen);
|
||||
if ((*keysize + *b_size) > TVMEMSIZE) {
|
||||
printk("template (%u) too big for tvmem (%u)\n",
|
||||
*keysize + *b_size, TVMEMSIZE);
|
||||
goto out;
|
||||
}
|
||||
|
||||
/* set key, plain text and IV */
|
||||
key = (unsigned char *)tvmem;
|
||||
for (j = 0; j < tcount; j++) {
|
||||
if (template[j].klen == speed[i].klen) {
|
||||
key = template[j].key;
|
||||
printk("test %u (%d bit key, %d byte blocks): ", i,
|
||||
*keysize * 8, *b_size);
|
||||
|
||||
memset(tvmem, 0xff, *keysize + *b_size);
|
||||
|
||||
/* set key, plain text and IV */
|
||||
key = (unsigned char *)tvmem;
|
||||
for (j = 0; j < tcount; j++) {
|
||||
if (template[j].klen == *keysize) {
|
||||
key = template[j].key;
|
||||
break;
|
||||
}
|
||||
}
|
||||
p = (unsigned char *)tvmem + *keysize;
|
||||
|
||||
ret = crypto_blkcipher_setkey(tfm, key, *keysize);
|
||||
if (ret) {
|
||||
printk("setkey() failed flags=%x\n",
|
||||
crypto_blkcipher_get_flags(tfm));
|
||||
goto out;
|
||||
}
|
||||
|
||||
iv_len = crypto_blkcipher_ivsize(tfm);
|
||||
if (iv_len) {
|
||||
memset(&iv, 0xff, iv_len);
|
||||
crypto_blkcipher_set_iv(tfm, iv, iv_len);
|
||||
}
|
||||
|
||||
if (sec)
|
||||
ret = test_cipher_jiffies(&desc, enc, p, *b_size, sec);
|
||||
else
|
||||
ret = test_cipher_cycles(&desc, enc, p, *b_size);
|
||||
|
||||
if (ret) {
|
||||
printk("%s() failed flags=%x\n", e, desc.flags);
|
||||
break;
|
||||
}
|
||||
}
|
||||
p = (unsigned char *)tvmem + speed[i].klen;
|
||||
|
||||
ret = crypto_blkcipher_setkey(tfm, key, speed[i].klen);
|
||||
if (ret) {
|
||||
printk("setkey() failed flags=%x\n",
|
||||
crypto_blkcipher_get_flags(tfm));
|
||||
goto out;
|
||||
}
|
||||
|
||||
iv_len = crypto_blkcipher_ivsize(tfm);
|
||||
if (iv_len) {
|
||||
memset(&iv, 0xff, iv_len);
|
||||
crypto_blkcipher_set_iv(tfm, iv, iv_len);
|
||||
}
|
||||
|
||||
if (sec)
|
||||
ret = test_cipher_jiffies(&desc, enc, p, speed[i].blen,
|
||||
sec);
|
||||
else
|
||||
ret = test_cipher_cycles(&desc, enc, p, speed[i].blen);
|
||||
|
||||
if (ret) {
|
||||
printk("%s() failed flags=%x\n", e, desc.flags);
|
||||
break;
|
||||
}
|
||||
}
|
||||
b_size++;
|
||||
i++;
|
||||
} while (*b_size);
|
||||
keysize++;
|
||||
} while (*keysize);
|
||||
|
||||
out:
|
||||
crypto_free_blkcipher(tfm);
|
||||
|
@ -1041,22 +1082,10 @@ static void test_comp(char *algo, struct comp_testvec *ctemplate,
|
|||
unsigned int i;
|
||||
char result[COMP_BUF_SIZE];
|
||||
struct crypto_comp *tfm;
|
||||
struct comp_testvec *tv;
|
||||
unsigned int tsize;
|
||||
|
||||
printk("\ntesting %s compression\n", algo);
|
||||
|
||||
tsize = sizeof(struct comp_testvec);
|
||||
tsize *= ctcount;
|
||||
if (tsize > TVMEMSIZE) {
|
||||
printk("template (%u) too big for tvmem (%u)\n", tsize,
|
||||
TVMEMSIZE);
|
||||
return;
|
||||
}
|
||||
|
||||
memcpy(tvmem, ctemplate, tsize);
|
||||
tv = (void *)tvmem;
|
||||
|
||||
tfm = crypto_alloc_comp(algo, 0, CRYPTO_ALG_ASYNC);
|
||||
if (IS_ERR(tfm)) {
|
||||
printk("failed to load transform for %s\n", algo);
|
||||
|
@ -1069,8 +1098,8 @@ static void test_comp(char *algo, struct comp_testvec *ctemplate,
|
|||
printk("test %u:\n", i + 1);
|
||||
memset(result, 0, sizeof (result));
|
||||
|
||||
ilen = tv[i].inlen;
|
||||
ret = crypto_comp_compress(tfm, tv[i].input,
|
||||
ilen = ctemplate[i].inlen;
|
||||
ret = crypto_comp_compress(tfm, ctemplate[i].input,
|
||||
ilen, result, &dlen);
|
||||
if (ret) {
|
||||
printk("fail: ret=%d\n", ret);
|
||||
|
@ -1078,7 +1107,7 @@ static void test_comp(char *algo, struct comp_testvec *ctemplate,
|
|||
}
|
||||
hexdump(result, dlen);
|
||||
printk("%s (ratio %d:%d)\n",
|
||||
memcmp(result, tv[i].output, dlen) ? "fail" : "pass",
|
||||
memcmp(result, ctemplate[i].output, dlen) ? "fail" : "pass",
|
||||
ilen, dlen);
|
||||
}
|
||||
|
||||
|
@ -1092,17 +1121,14 @@ static void test_comp(char *algo, struct comp_testvec *ctemplate,
|
|||
goto out;
|
||||
}
|
||||
|
||||
memcpy(tvmem, dtemplate, tsize);
|
||||
tv = (void *)tvmem;
|
||||
|
||||
for (i = 0; i < dtcount; i++) {
|
||||
int ilen, ret, dlen = COMP_BUF_SIZE;
|
||||
|
||||
printk("test %u:\n", i + 1);
|
||||
memset(result, 0, sizeof (result));
|
||||
|
||||
ilen = tv[i].inlen;
|
||||
ret = crypto_comp_decompress(tfm, tv[i].input,
|
||||
ilen = dtemplate[i].inlen;
|
||||
ret = crypto_comp_decompress(tfm, dtemplate[i].input,
|
||||
ilen, result, &dlen);
|
||||
if (ret) {
|
||||
printk("fail: ret=%d\n", ret);
|
||||
|
@ -1110,7 +1136,7 @@ static void test_comp(char *algo, struct comp_testvec *ctemplate,
|
|||
}
|
||||
hexdump(result, dlen);
|
||||
printk("%s (ratio %d:%d)\n",
|
||||
memcmp(result, tv[i].output, dlen) ? "fail" : "pass",
|
||||
memcmp(result, dtemplate[i].output, dlen) ? "fail" : "pass",
|
||||
ilen, dlen);
|
||||
}
|
||||
out:
|
||||
|
@ -1301,6 +1327,12 @@ static void do_test(void)
|
|||
test_cipher("ecb(seed)", DECRYPT, seed_dec_tv_template,
|
||||
SEED_DEC_TEST_VECTORS);
|
||||
|
||||
//CTS
|
||||
test_cipher("cts(cbc(aes))", ENCRYPT, cts_mode_enc_tv_template,
|
||||
CTS_MODE_ENC_TEST_VECTORS);
|
||||
test_cipher("cts(cbc(aes))", DECRYPT, cts_mode_dec_tv_template,
|
||||
CTS_MODE_DEC_TEST_VECTORS);
|
||||
|
||||
test_hash("sha384", sha384_tv_template, SHA384_TEST_VECTORS);
|
||||
test_hash("sha512", sha512_tv_template, SHA512_TEST_VECTORS);
|
||||
test_hash("wp512", wp512_tv_template, WP512_TEST_VECTORS);
|
||||
|
@ -1584,6 +1616,13 @@ static void do_test(void)
|
|||
AES_CCM_DEC_TEST_VECTORS);
|
||||
break;
|
||||
|
||||
case 38:
|
||||
test_cipher("cts(cbc(aes))", ENCRYPT, cts_mode_enc_tv_template,
|
||||
CTS_MODE_ENC_TEST_VECTORS);
|
||||
test_cipher("cts(cbc(aes))", DECRYPT, cts_mode_dec_tv_template,
|
||||
CTS_MODE_DEC_TEST_VECTORS);
|
||||
break;
|
||||
|
||||
case 100:
|
||||
test_hash("hmac(md5)", hmac_md5_tv_template,
|
||||
HMAC_MD5_TEST_VECTORS);
|
||||
|
@ -1621,89 +1660,85 @@ static void do_test(void)
|
|||
|
||||
case 200:
|
||||
test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
|
||||
aes_speed_template);
|
||||
speed_template_16_24_32);
|
||||
test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
|
||||
aes_speed_template);
|
||||
speed_template_16_24_32);
|
||||
test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
|
||||
aes_speed_template);
|
||||
speed_template_16_24_32);
|
||||
test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
|
||||
aes_speed_template);
|
||||
speed_template_16_24_32);
|
||||
test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
|
||||
aes_lrw_speed_template);
|
||||
speed_template_32_40_48);
|
||||
test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
|
||||
aes_lrw_speed_template);
|
||||
speed_template_32_40_48);
|
||||
test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
|
||||
aes_xts_speed_template);
|
||||
speed_template_32_48_64);
|
||||
test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
|
||||
aes_xts_speed_template);
|
||||
speed_template_32_48_64);
|
||||
break;
|
||||
|
||||
case 201:
|
||||
test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
|
||||
des3_ede_enc_tv_template,
|
||||
DES3_EDE_ENC_TEST_VECTORS,
|
||||
des3_ede_speed_template);
|
||||
des3_ede_enc_tv_template, DES3_EDE_ENC_TEST_VECTORS,
|
||||
speed_template_24);
|
||||
test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
|
||||
des3_ede_dec_tv_template,
|
||||
DES3_EDE_DEC_TEST_VECTORS,
|
||||
des3_ede_speed_template);
|
||||
des3_ede_enc_tv_template, DES3_EDE_ENC_TEST_VECTORS,
|
||||
speed_template_24);
|
||||
test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
|
||||
des3_ede_enc_tv_template,
|
||||
DES3_EDE_ENC_TEST_VECTORS,
|
||||
des3_ede_speed_template);
|
||||
des3_ede_enc_tv_template, DES3_EDE_ENC_TEST_VECTORS,
|
||||
speed_template_24);
|
||||
test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
|
||||
des3_ede_dec_tv_template,
|
||||
DES3_EDE_DEC_TEST_VECTORS,
|
||||
des3_ede_speed_template);
|
||||
des3_ede_enc_tv_template, DES3_EDE_ENC_TEST_VECTORS,
|
||||
speed_template_24);
|
||||
break;
|
||||
|
||||
case 202:
|
||||
test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
|
||||
twofish_speed_template);
|
||||
speed_template_16_24_32);
|
||||
test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
|
||||
twofish_speed_template);
|
||||
speed_template_16_24_32);
|
||||
test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
|
||||
twofish_speed_template);
|
||||
speed_template_16_24_32);
|
||||
test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
|
||||
twofish_speed_template);
|
||||
speed_template_16_24_32);
|
||||
break;
|
||||
|
||||
case 203:
|
||||
test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
|
||||
blowfish_speed_template);
|
||||
speed_template_8_32);
|
||||
test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
|
||||
blowfish_speed_template);
|
||||
speed_template_8_32);
|
||||
test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
|
||||
blowfish_speed_template);
|
||||
speed_template_8_32);
|
||||
test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
|
||||
blowfish_speed_template);
|
||||
speed_template_8_32);
|
||||
break;
|
||||
|
||||
case 204:
|
||||
test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
|
||||
des_speed_template);
|
||||
speed_template_8);
|
||||
test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
|
||||
des_speed_template);
|
||||
speed_template_8);
|
||||
test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
|
||||
des_speed_template);
|
||||
speed_template_8);
|
||||
test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
|
||||
des_speed_template);
|
||||
speed_template_8);
|
||||
break;
|
||||
|
||||
case 205:
|
||||
test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
|
||||
camellia_speed_template);
|
||||
speed_template_16_24_32);
|
||||
test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
|
||||
camellia_speed_template);
|
||||
speed_template_16_24_32);
|
||||
test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
|
||||
camellia_speed_template);
|
||||
speed_template_16_24_32);
|
||||
test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
|
||||
camellia_speed_template);
|
||||
speed_template_16_24_32);
|
||||
break;
|
||||
|
||||
case 206:
|
||||
test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
|
||||
salsa20_speed_template);
|
||||
speed_template_16_32);
|
||||
break;
|
||||
|
||||
case 300:
|
||||
|
@ -1775,7 +1810,7 @@ static void do_test(void)
|
|||
}
|
||||
}
|
||||
|
||||
static int __init init(void)
|
||||
static int __init tcrypt_mod_init(void)
|
||||
{
|
||||
int err = -ENOMEM;
|
||||
|
||||
|
@ -1814,10 +1849,10 @@ static int __init init(void)
|
|||
* If an init function is provided, an exit function must also be provided
|
||||
* to allow module unload.
|
||||
*/
|
||||
static void __exit fini(void) { }
|
||||
static void __exit tcrypt_mod_fini(void) { }
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(tcrypt_mod_init);
|
||||
module_exit(tcrypt_mod_fini);
|
||||
|
||||
module_param(mode, int, 0);
|
||||
module_param(sec, uint, 0);
|
||||
|
|
11885
crypto/tcrypt.h
11885
crypto/tcrypt.h
File diff suppressed because it is too large
Load diff
|
@ -267,7 +267,7 @@ static struct crypto_alg xeta_alg = {
|
|||
.cia_decrypt = xeta_decrypt } }
|
||||
};
|
||||
|
||||
static int __init init(void)
|
||||
static int __init tea_mod_init(void)
|
||||
{
|
||||
int ret = 0;
|
||||
|
||||
|
@ -292,7 +292,7 @@ static int __init init(void)
|
|||
return ret;
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit tea_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&tea_alg);
|
||||
crypto_unregister_alg(&xtea_alg);
|
||||
|
@ -302,8 +302,8 @@ static void __exit fini(void)
|
|||
MODULE_ALIAS("xtea");
|
||||
MODULE_ALIAS("xeta");
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(tea_mod_init);
|
||||
module_exit(tea_mod_fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("TEA, XTEA & XETA Cryptographic Algorithms");
|
||||
|
|
|
@ -663,7 +663,7 @@ static struct crypto_alg tgr128 = {
|
|||
.dia_final = tgr128_final}}
|
||||
};
|
||||
|
||||
static int __init init(void)
|
||||
static int __init tgr192_mod_init(void)
|
||||
{
|
||||
int ret = 0;
|
||||
|
||||
|
@ -688,7 +688,7 @@ static int __init init(void)
|
|||
return ret;
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit tgr192_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&tgr192);
|
||||
crypto_unregister_alg(&tgr160);
|
||||
|
@ -698,8 +698,8 @@ static void __exit fini(void)
|
|||
MODULE_ALIAS("tgr160");
|
||||
MODULE_ALIAS("tgr128");
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(tgr192_mod_init);
|
||||
module_exit(tgr192_mod_fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("Tiger Message Digest Algorithm");
|
||||
|
|
|
@ -197,18 +197,18 @@ static struct crypto_alg alg = {
|
|||
.cia_decrypt = twofish_decrypt } }
|
||||
};
|
||||
|
||||
static int __init init(void)
|
||||
static int __init twofish_mod_init(void)
|
||||
{
|
||||
return crypto_register_alg(&alg);
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit twofish_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&alg);
|
||||
}
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(twofish_mod_init);
|
||||
module_exit(twofish_mod_fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION ("Twofish Cipher Algorithm");
|
||||
|
|
|
@ -1146,7 +1146,7 @@ static struct crypto_alg wp256 = {
|
|||
.dia_final = wp256_final } }
|
||||
};
|
||||
|
||||
static int __init init(void)
|
||||
static int __init wp512_mod_init(void)
|
||||
{
|
||||
int ret = 0;
|
||||
|
||||
|
@ -1172,7 +1172,7 @@ static int __init init(void)
|
|||
return ret;
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
static void __exit wp512_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&wp512);
|
||||
crypto_unregister_alg(&wp384);
|
||||
|
@ -1182,8 +1182,8 @@ static void __exit fini(void)
|
|||
MODULE_ALIAS("wp384");
|
||||
MODULE_ALIAS("wp256");
|
||||
|
||||
module_init(init);
|
||||
module_exit(fini);
|
||||
module_init(wp512_mod_init);
|
||||
module_exit(wp512_mod_fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("Whirlpool Message Digest Algorithm");
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
/*
|
||||
* drivers/char/hw_random/omap-rng.c
|
||||
*
|
||||
* RNG driver for TI OMAP CPU family
|
||||
* omap-rng.c - RNG driver for TI OMAP CPU family
|
||||
*
|
||||
* Author: Deepak Saxena <dsaxena@plexity.net>
|
||||
*
|
||||
|
@ -15,11 +13,6 @@
|
|||
* This file is licensed under the terms of the GNU General Public
|
||||
* License version 2. This program is licensed "as is" without any
|
||||
* warranty of any kind, whether express or implied.
|
||||
*
|
||||
* TODO:
|
||||
*
|
||||
* - Make status updated be interrupt driven so we don't poll
|
||||
*
|
||||
*/
|
||||
|
||||
#include <linux/module.h>
|
||||
|
@ -55,17 +48,16 @@ static void __iomem *rng_base;
|
|||
static struct clk *rng_ick;
|
||||
static struct platform_device *rng_dev;
|
||||
|
||||
static u32 omap_rng_read_reg(int reg)
|
||||
static inline u32 omap_rng_read_reg(int reg)
|
||||
{
|
||||
return __raw_readl(rng_base + reg);
|
||||
}
|
||||
|
||||
static void omap_rng_write_reg(int reg, u32 val)
|
||||
static inline void omap_rng_write_reg(int reg, u32 val)
|
||||
{
|
||||
__raw_writel(val, rng_base + reg);
|
||||
}
|
||||
|
||||
/* REVISIT: Does the status bit really work on 16xx? */
|
||||
static int omap_rng_data_present(struct hwrng *rng, int wait)
|
||||
{
|
||||
int data, i;
|
||||
|
@ -74,6 +66,11 @@ static int omap_rng_data_present(struct hwrng *rng, int wait)
|
|||
data = omap_rng_read_reg(RNG_STAT_REG) ? 0 : 1;
|
||||
if (data || !wait)
|
||||
break;
|
||||
/* RNG produces data fast enough (2+ MBit/sec, even
|
||||
* during "rngtest" loads, that these delays don't
|
||||
* seem to trigger. We *could* use the RNG IRQ, but
|
||||
* that'd be higher overhead ... so why bother?
|
||||
*/
|
||||
udelay(10);
|
||||
}
|
||||
return data;
|
||||
|
@ -101,7 +98,8 @@ static int __init omap_rng_probe(struct platform_device *pdev)
|
|||
* A bit ugly, and it will never actually happen but there can
|
||||
* be only one RNG and this catches any bork
|
||||
*/
|
||||
BUG_ON(rng_dev);
|
||||
if (rng_dev)
|
||||
return -EBUSY;
|
||||
|
||||
if (cpu_is_omap24xx()) {
|
||||
rng_ick = clk_get(NULL, "rng_ick");
|
||||
|
@ -124,7 +122,7 @@ static int __init omap_rng_probe(struct platform_device *pdev)
|
|||
return -EBUSY;
|
||||
|
||||
dev_set_drvdata(&pdev->dev, mem);
|
||||
rng_base = (u32 __iomem *)io_p2v(res->start);
|
||||
rng_base = (u32 __force __iomem *)io_p2v(res->start);
|
||||
|
||||
ret = hwrng_register(&omap_rng_ops);
|
||||
if (ret) {
|
||||
|
@ -182,6 +180,8 @@ static int omap_rng_resume(struct platform_device *pdev)
|
|||
|
||||
#endif
|
||||
|
||||
/* work with hotplug and coldplug */
|
||||
MODULE_ALIAS("platform:omap_rng");
|
||||
|
||||
static struct platform_driver omap_rng_driver = {
|
||||
.driver = {
|
||||
|
|
|
@ -27,6 +27,7 @@ config CRYPTO_DEV_PADLOCK_AES
|
|||
tristate "PadLock driver for AES algorithm"
|
||||
depends on CRYPTO_DEV_PADLOCK
|
||||
select CRYPTO_BLKCIPHER
|
||||
select CRYPTO_AES
|
||||
help
|
||||
Use VIA PadLock for AES algorithm.
|
||||
|
||||
|
@ -101,6 +102,19 @@ config CRYPTO_SHA256_S390
|
|||
This version of SHA implements a 256 bit hash with 128 bits of
|
||||
security against collision attacks.
|
||||
|
||||
config CRYPTO_SHA512_S390
|
||||
tristate "SHA384 and SHA512 digest algorithm"
|
||||
depends on S390
|
||||
select CRYPTO_ALGAPI
|
||||
help
|
||||
This is the s390 hardware accelerated implementation of the
|
||||
SHA512 secure hash standard.
|
||||
|
||||
This version of SHA implements a 512 bit hash with 256 bits of
|
||||
security against collision attacks. The code also includes SHA-384,
|
||||
a 384 bit hash with 192 bits of security against collision attacks.
|
||||
|
||||
|
||||
config CRYPTO_DES_S390
|
||||
tristate "DES and Triple DES cipher algorithms"
|
||||
depends on S390
|
||||
|
|
|
@ -5,42 +5,6 @@
|
|||
*
|
||||
* Copyright (c) 2004 Michal Ludvig <michal@logix.cz>
|
||||
*
|
||||
* Key expansion routine taken from crypto/aes_generic.c
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation; either version 2 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* ---------------------------------------------------------------------------
|
||||
* Copyright (c) 2002, Dr Brian Gladman <brg@gladman.me.uk>, Worcester, UK.
|
||||
* All rights reserved.
|
||||
*
|
||||
* LICENSE TERMS
|
||||
*
|
||||
* The free distribution and use of this software in both source and binary
|
||||
* form is allowed (with or without changes) provided that:
|
||||
*
|
||||
* 1. distributions of this source code include the above copyright
|
||||
* notice, this list of conditions and the following disclaimer;
|
||||
*
|
||||
* 2. distributions in binary form include the above copyright
|
||||
* notice, this list of conditions and the following disclaimer
|
||||
* in the documentation and/or other associated materials;
|
||||
*
|
||||
* 3. the copyright holder's name is not used to endorse products
|
||||
* built using this software without specific written permission.
|
||||
*
|
||||
* ALTERNATIVELY, provided that this notice is retained in full, this product
|
||||
* may be distributed under the terms of the GNU General Public License (GPL),
|
||||
* in which case the provisions of the GPL apply INSTEAD OF those given above.
|
||||
*
|
||||
* DISCLAIMER
|
||||
*
|
||||
* This software is provided 'as is' with no explicit or implied warranties
|
||||
* in respect of its properties, including, but not limited to, correctness
|
||||
* and/or fitness for purpose.
|
||||
* ---------------------------------------------------------------------------
|
||||
*/
|
||||
|
||||
#include <crypto/algapi.h>
|
||||
|
@ -54,9 +18,6 @@
|
|||
#include <asm/byteorder.h>
|
||||
#include "padlock.h"
|
||||
|
||||
#define AES_EXTENDED_KEY_SIZE 64 /* in uint32_t units */
|
||||
#define AES_EXTENDED_KEY_SIZE_B (AES_EXTENDED_KEY_SIZE * sizeof(uint32_t))
|
||||
|
||||
/* Control word. */
|
||||
struct cword {
|
||||
unsigned int __attribute__ ((__packed__))
|
||||
|
@ -70,218 +31,23 @@ struct cword {
|
|||
|
||||
/* Whenever making any changes to the following
|
||||
* structure *make sure* you keep E, d_data
|
||||
* and cword aligned on 16 Bytes boundaries!!! */
|
||||
* and cword aligned on 16 Bytes boundaries and
|
||||
* the Hardware can access 16 * 16 bytes of E and d_data
|
||||
* (only the first 15 * 16 bytes matter but the HW reads
|
||||
* more).
|
||||
*/
|
||||
struct aes_ctx {
|
||||
u32 E[AES_MAX_KEYLENGTH_U32]
|
||||
__attribute__ ((__aligned__(PADLOCK_ALIGNMENT)));
|
||||
u32 d_data[AES_MAX_KEYLENGTH_U32]
|
||||
__attribute__ ((__aligned__(PADLOCK_ALIGNMENT)));
|
||||
struct {
|
||||
struct cword encrypt;
|
||||
struct cword decrypt;
|
||||
} cword;
|
||||
u32 *D;
|
||||
int key_length;
|
||||
u32 E[AES_EXTENDED_KEY_SIZE]
|
||||
__attribute__ ((__aligned__(PADLOCK_ALIGNMENT)));
|
||||
u32 d_data[AES_EXTENDED_KEY_SIZE]
|
||||
__attribute__ ((__aligned__(PADLOCK_ALIGNMENT)));
|
||||
};
|
||||
|
||||
/* ====== Key management routines ====== */
|
||||
|
||||
static inline uint32_t
|
||||
generic_rotr32 (const uint32_t x, const unsigned bits)
|
||||
{
|
||||
const unsigned n = bits % 32;
|
||||
return (x >> n) | (x << (32 - n));
|
||||
}
|
||||
|
||||
static inline uint32_t
|
||||
generic_rotl32 (const uint32_t x, const unsigned bits)
|
||||
{
|
||||
const unsigned n = bits % 32;
|
||||
return (x << n) | (x >> (32 - n));
|
||||
}
|
||||
|
||||
#define rotl generic_rotl32
|
||||
#define rotr generic_rotr32
|
||||
|
||||
/*
|
||||
* #define byte(x, nr) ((unsigned char)((x) >> (nr*8)))
|
||||
*/
|
||||
static inline uint8_t
|
||||
byte(const uint32_t x, const unsigned n)
|
||||
{
|
||||
return x >> (n << 3);
|
||||
}
|
||||
|
||||
#define E_KEY ctx->E
|
||||
#define D_KEY ctx->D
|
||||
|
||||
static uint8_t pow_tab[256];
|
||||
static uint8_t log_tab[256];
|
||||
static uint8_t sbx_tab[256];
|
||||
static uint8_t isb_tab[256];
|
||||
static uint32_t rco_tab[10];
|
||||
static uint32_t ft_tab[4][256];
|
||||
static uint32_t it_tab[4][256];
|
||||
|
||||
static uint32_t fl_tab[4][256];
|
||||
static uint32_t il_tab[4][256];
|
||||
|
||||
static inline uint8_t
|
||||
f_mult (uint8_t a, uint8_t b)
|
||||
{
|
||||
uint8_t aa = log_tab[a], cc = aa + log_tab[b];
|
||||
|
||||
return pow_tab[cc + (cc < aa ? 1 : 0)];
|
||||
}
|
||||
|
||||
#define ff_mult(a,b) (a && b ? f_mult(a, b) : 0)
|
||||
|
||||
#define f_rn(bo, bi, n, k) \
|
||||
bo[n] = ft_tab[0][byte(bi[n],0)] ^ \
|
||||
ft_tab[1][byte(bi[(n + 1) & 3],1)] ^ \
|
||||
ft_tab[2][byte(bi[(n + 2) & 3],2)] ^ \
|
||||
ft_tab[3][byte(bi[(n + 3) & 3],3)] ^ *(k + n)
|
||||
|
||||
#define i_rn(bo, bi, n, k) \
|
||||
bo[n] = it_tab[0][byte(bi[n],0)] ^ \
|
||||
it_tab[1][byte(bi[(n + 3) & 3],1)] ^ \
|
||||
it_tab[2][byte(bi[(n + 2) & 3],2)] ^ \
|
||||
it_tab[3][byte(bi[(n + 1) & 3],3)] ^ *(k + n)
|
||||
|
||||
#define ls_box(x) \
|
||||
( fl_tab[0][byte(x, 0)] ^ \
|
||||
fl_tab[1][byte(x, 1)] ^ \
|
||||
fl_tab[2][byte(x, 2)] ^ \
|
||||
fl_tab[3][byte(x, 3)] )
|
||||
|
||||
#define f_rl(bo, bi, n, k) \
|
||||
bo[n] = fl_tab[0][byte(bi[n],0)] ^ \
|
||||
fl_tab[1][byte(bi[(n + 1) & 3],1)] ^ \
|
||||
fl_tab[2][byte(bi[(n + 2) & 3],2)] ^ \
|
||||
fl_tab[3][byte(bi[(n + 3) & 3],3)] ^ *(k + n)
|
||||
|
||||
#define i_rl(bo, bi, n, k) \
|
||||
bo[n] = il_tab[0][byte(bi[n],0)] ^ \
|
||||
il_tab[1][byte(bi[(n + 3) & 3],1)] ^ \
|
||||
il_tab[2][byte(bi[(n + 2) & 3],2)] ^ \
|
||||
il_tab[3][byte(bi[(n + 1) & 3],3)] ^ *(k + n)
|
||||
|
||||
static void
|
||||
gen_tabs (void)
|
||||
{
|
||||
uint32_t i, t;
|
||||
uint8_t p, q;
|
||||
|
||||
/* log and power tables for GF(2**8) finite field with
|
||||
0x011b as modular polynomial - the simplest prmitive
|
||||
root is 0x03, used here to generate the tables */
|
||||
|
||||
for (i = 0, p = 1; i < 256; ++i) {
|
||||
pow_tab[i] = (uint8_t) p;
|
||||
log_tab[p] = (uint8_t) i;
|
||||
|
||||
p ^= (p << 1) ^ (p & 0x80 ? 0x01b : 0);
|
||||
}
|
||||
|
||||
log_tab[1] = 0;
|
||||
|
||||
for (i = 0, p = 1; i < 10; ++i) {
|
||||
rco_tab[i] = p;
|
||||
|
||||
p = (p << 1) ^ (p & 0x80 ? 0x01b : 0);
|
||||
}
|
||||
|
||||
for (i = 0; i < 256; ++i) {
|
||||
p = (i ? pow_tab[255 - log_tab[i]] : 0);
|
||||
q = ((p >> 7) | (p << 1)) ^ ((p >> 6) | (p << 2));
|
||||
p ^= 0x63 ^ q ^ ((q >> 6) | (q << 2));
|
||||
sbx_tab[i] = p;
|
||||
isb_tab[p] = (uint8_t) i;
|
||||
}
|
||||
|
||||
for (i = 0; i < 256; ++i) {
|
||||
p = sbx_tab[i];
|
||||
|
||||
t = p;
|
||||
fl_tab[0][i] = t;
|
||||
fl_tab[1][i] = rotl (t, 8);
|
||||
fl_tab[2][i] = rotl (t, 16);
|
||||
fl_tab[3][i] = rotl (t, 24);
|
||||
|
||||
t = ((uint32_t) ff_mult (2, p)) |
|
||||
((uint32_t) p << 8) |
|
||||
((uint32_t) p << 16) | ((uint32_t) ff_mult (3, p) << 24);
|
||||
|
||||
ft_tab[0][i] = t;
|
||||
ft_tab[1][i] = rotl (t, 8);
|
||||
ft_tab[2][i] = rotl (t, 16);
|
||||
ft_tab[3][i] = rotl (t, 24);
|
||||
|
||||
p = isb_tab[i];
|
||||
|
||||
t = p;
|
||||
il_tab[0][i] = t;
|
||||
il_tab[1][i] = rotl (t, 8);
|
||||
il_tab[2][i] = rotl (t, 16);
|
||||
il_tab[3][i] = rotl (t, 24);
|
||||
|
||||
t = ((uint32_t) ff_mult (14, p)) |
|
||||
((uint32_t) ff_mult (9, p) << 8) |
|
||||
((uint32_t) ff_mult (13, p) << 16) |
|
||||
((uint32_t) ff_mult (11, p) << 24);
|
||||
|
||||
it_tab[0][i] = t;
|
||||
it_tab[1][i] = rotl (t, 8);
|
||||
it_tab[2][i] = rotl (t, 16);
|
||||
it_tab[3][i] = rotl (t, 24);
|
||||
}
|
||||
}
|
||||
|
||||
#define star_x(x) (((x) & 0x7f7f7f7f) << 1) ^ ((((x) & 0x80808080) >> 7) * 0x1b)
|
||||
|
||||
#define imix_col(y,x) \
|
||||
u = star_x(x); \
|
||||
v = star_x(u); \
|
||||
w = star_x(v); \
|
||||
t = w ^ (x); \
|
||||
(y) = u ^ v ^ w; \
|
||||
(y) ^= rotr(u ^ t, 8) ^ \
|
||||
rotr(v ^ t, 16) ^ \
|
||||
rotr(t,24)
|
||||
|
||||
/* initialise the key schedule from the user supplied key */
|
||||
|
||||
#define loop4(i) \
|
||||
{ t = rotr(t, 8); t = ls_box(t) ^ rco_tab[i]; \
|
||||
t ^= E_KEY[4 * i]; E_KEY[4 * i + 4] = t; \
|
||||
t ^= E_KEY[4 * i + 1]; E_KEY[4 * i + 5] = t; \
|
||||
t ^= E_KEY[4 * i + 2]; E_KEY[4 * i + 6] = t; \
|
||||
t ^= E_KEY[4 * i + 3]; E_KEY[4 * i + 7] = t; \
|
||||
}
|
||||
|
||||
#define loop6(i) \
|
||||
{ t = rotr(t, 8); t = ls_box(t) ^ rco_tab[i]; \
|
||||
t ^= E_KEY[6 * i]; E_KEY[6 * i + 6] = t; \
|
||||
t ^= E_KEY[6 * i + 1]; E_KEY[6 * i + 7] = t; \
|
||||
t ^= E_KEY[6 * i + 2]; E_KEY[6 * i + 8] = t; \
|
||||
t ^= E_KEY[6 * i + 3]; E_KEY[6 * i + 9] = t; \
|
||||
t ^= E_KEY[6 * i + 4]; E_KEY[6 * i + 10] = t; \
|
||||
t ^= E_KEY[6 * i + 5]; E_KEY[6 * i + 11] = t; \
|
||||
}
|
||||
|
||||
#define loop8(i) \
|
||||
{ t = rotr(t, 8); ; t = ls_box(t) ^ rco_tab[i]; \
|
||||
t ^= E_KEY[8 * i]; E_KEY[8 * i + 8] = t; \
|
||||
t ^= E_KEY[8 * i + 1]; E_KEY[8 * i + 9] = t; \
|
||||
t ^= E_KEY[8 * i + 2]; E_KEY[8 * i + 10] = t; \
|
||||
t ^= E_KEY[8 * i + 3]; E_KEY[8 * i + 11] = t; \
|
||||
t = E_KEY[8 * i + 4] ^ ls_box(t); \
|
||||
E_KEY[8 * i + 12] = t; \
|
||||
t ^= E_KEY[8 * i + 5]; E_KEY[8 * i + 13] = t; \
|
||||
t ^= E_KEY[8 * i + 6]; E_KEY[8 * i + 14] = t; \
|
||||
t ^= E_KEY[8 * i + 7]; E_KEY[8 * i + 15] = t; \
|
||||
}
|
||||
|
||||
/* Tells whether the ACE is capable to generate
|
||||
the extended key for a given key_len. */
|
||||
static inline int
|
||||
|
@ -321,17 +87,13 @@ static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
|
|||
struct aes_ctx *ctx = aes_ctx(tfm);
|
||||
const __le32 *key = (const __le32 *)in_key;
|
||||
u32 *flags = &tfm->crt_flags;
|
||||
uint32_t i, t, u, v, w;
|
||||
uint32_t P[AES_EXTENDED_KEY_SIZE];
|
||||
uint32_t rounds;
|
||||
struct crypto_aes_ctx gen_aes;
|
||||
|
||||
if (key_len % 8) {
|
||||
*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
ctx->key_length = key_len;
|
||||
|
||||
/*
|
||||
* If the hardware is capable of generating the extended key
|
||||
* itself we must supply the plain key for both encryption
|
||||
|
@ -339,10 +101,10 @@ static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
|
|||
*/
|
||||
ctx->D = ctx->E;
|
||||
|
||||
E_KEY[0] = le32_to_cpu(key[0]);
|
||||
E_KEY[1] = le32_to_cpu(key[1]);
|
||||
E_KEY[2] = le32_to_cpu(key[2]);
|
||||
E_KEY[3] = le32_to_cpu(key[3]);
|
||||
ctx->E[0] = le32_to_cpu(key[0]);
|
||||
ctx->E[1] = le32_to_cpu(key[1]);
|
||||
ctx->E[2] = le32_to_cpu(key[2]);
|
||||
ctx->E[3] = le32_to_cpu(key[3]);
|
||||
|
||||
/* Prepare control words. */
|
||||
memset(&ctx->cword, 0, sizeof(ctx->cword));
|
||||
|
@ -361,56 +123,13 @@ static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
|
|||
ctx->cword.encrypt.keygen = 1;
|
||||
ctx->cword.decrypt.keygen = 1;
|
||||
|
||||
switch (key_len) {
|
||||
case 16:
|
||||
t = E_KEY[3];
|
||||
for (i = 0; i < 10; ++i)
|
||||
loop4 (i);
|
||||
break;
|
||||
|
||||
case 24:
|
||||
E_KEY[4] = le32_to_cpu(key[4]);
|
||||
t = E_KEY[5] = le32_to_cpu(key[5]);
|
||||
for (i = 0; i < 8; ++i)
|
||||
loop6 (i);
|
||||
break;
|
||||
|
||||
case 32:
|
||||
E_KEY[4] = le32_to_cpu(key[4]);
|
||||
E_KEY[5] = le32_to_cpu(key[5]);
|
||||
E_KEY[6] = le32_to_cpu(key[6]);
|
||||
t = E_KEY[7] = le32_to_cpu(key[7]);
|
||||
for (i = 0; i < 7; ++i)
|
||||
loop8 (i);
|
||||
break;
|
||||
if (crypto_aes_expand_key(&gen_aes, in_key, key_len)) {
|
||||
*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
D_KEY[0] = E_KEY[0];
|
||||
D_KEY[1] = E_KEY[1];
|
||||
D_KEY[2] = E_KEY[2];
|
||||
D_KEY[3] = E_KEY[3];
|
||||
|
||||
for (i = 4; i < key_len + 24; ++i) {
|
||||
imix_col (D_KEY[i], E_KEY[i]);
|
||||
}
|
||||
|
||||
/* PadLock needs a different format of the decryption key. */
|
||||
rounds = 10 + (key_len - 16) / 4;
|
||||
|
||||
for (i = 0; i < rounds; i++) {
|
||||
P[((i + 1) * 4) + 0] = D_KEY[((rounds - i - 1) * 4) + 0];
|
||||
P[((i + 1) * 4) + 1] = D_KEY[((rounds - i - 1) * 4) + 1];
|
||||
P[((i + 1) * 4) + 2] = D_KEY[((rounds - i - 1) * 4) + 2];
|
||||
P[((i + 1) * 4) + 3] = D_KEY[((rounds - i - 1) * 4) + 3];
|
||||
}
|
||||
|
||||
P[0] = E_KEY[(rounds * 4) + 0];
|
||||
P[1] = E_KEY[(rounds * 4) + 1];
|
||||
P[2] = E_KEY[(rounds * 4) + 2];
|
||||
P[3] = E_KEY[(rounds * 4) + 3];
|
||||
|
||||
memcpy(D_KEY, P, AES_EXTENDED_KEY_SIZE_B);
|
||||
|
||||
memcpy(ctx->E, gen_aes.key_enc, AES_MAX_KEYLENGTH);
|
||||
memcpy(ctx->D, gen_aes.key_dec, AES_MAX_KEYLENGTH);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -675,7 +394,6 @@ static int __init padlock_init(void)
|
|||
return -ENODEV;
|
||||
}
|
||||
|
||||
gen_tabs();
|
||||
if ((ret = crypto_register_alg(&aes_alg)))
|
||||
goto aes_err;
|
||||
|
||||
|
|
|
@ -14,11 +14,13 @@
|
|||
#define AES_KEYSIZE_192 24
|
||||
#define AES_KEYSIZE_256 32
|
||||
#define AES_BLOCK_SIZE 16
|
||||
#define AES_MAX_KEYLENGTH (15 * 16)
|
||||
#define AES_MAX_KEYLENGTH_U32 (AES_MAX_KEYLENGTH / sizeof(u32))
|
||||
|
||||
struct crypto_aes_ctx {
|
||||
u32 key_length;
|
||||
u32 key_enc[60];
|
||||
u32 key_dec[60];
|
||||
u32 key_enc[AES_MAX_KEYLENGTH_U32];
|
||||
u32 key_dec[AES_MAX_KEYLENGTH_U32];
|
||||
};
|
||||
|
||||
extern u32 crypto_ft_tab[4][256];
|
||||
|
@ -28,4 +30,6 @@ extern u32 crypto_il_tab[4][256];
|
|||
|
||||
int crypto_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
|
||||
unsigned int key_len);
|
||||
int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
|
||||
unsigned int key_len);
|
||||
#endif
|
||||
|
|
|
@ -317,14 +317,7 @@ int crypto_unregister_alg(struct crypto_alg *alg);
|
|||
/*
|
||||
* Algorithm query interface.
|
||||
*/
|
||||
#ifdef CONFIG_CRYPTO
|
||||
int crypto_has_alg(const char *name, u32 type, u32 mask);
|
||||
#else
|
||||
static inline int crypto_has_alg(const char *name, u32 type, u32 mask)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
#endif
|
||||
|
||||
/*
|
||||
* Transforms: user-instantiated objects which encapsulate algorithms
|
||||
|
|
Loading…
Reference in a new issue