mirror of
https://github.com/adulau/aha.git
synced 2025-02-17 20:29:41 +00:00
Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (102 commits) crypto: sha-s390 - Fix warnings in import function crypto: vmac - New hash algorithm for intel_txt support crypto: api - Do not displace newly registered algorithms crypto: ansi_cprng - Fix module initialization crypto: xcbc - Fix alignment calculation of xcbc_tfm_ctx crypto: fips - Depend on ansi_cprng crypto: blkcipher - Do not use eseqiv on stream ciphers crypto: ctr - Use chainiv on raw counter mode Revert crypto: fips - Select CPRNG crypto: rng - Fix typo crypto: talitos - add support for 36 bit addressing crypto: talitos - align locks on cache lines crypto: talitos - simplify hmac data size calculation crypto: mv_cesa - Add support for Orion5X crypto engine crypto: cryptd - Add support to access underlaying shash crypto: gcm - Use GHASH digest algorithm crypto: ghash - Add GHASH digest algorithm for GCM crypto: authenc - Convert to ahash crypto: api - Fix aligned ctx helper crypto: hmac - Prehash ipad/opad ...
This commit is contained in:
commit
332a339218
53 changed files with 4521 additions and 1518 deletions
|
@ -250,8 +250,9 @@ static int des3_128_setkey(struct crypto_tfm *tfm, const u8 *key,
|
|||
const u8 *temp_key = key;
|
||||
u32 *flags = &tfm->crt_flags;
|
||||
|
||||
if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE))) {
|
||||
*flags |= CRYPTO_TFM_RES_BAD_KEY_SCHED;
|
||||
if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE)) &&
|
||||
(*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
|
||||
*flags |= CRYPTO_TFM_RES_WEAK_KEY;
|
||||
return -EINVAL;
|
||||
}
|
||||
for (i = 0; i < 2; i++, temp_key += DES_KEY_SIZE) {
|
||||
|
@ -411,9 +412,9 @@ static int des3_192_setkey(struct crypto_tfm *tfm, const u8 *key,
|
|||
|
||||
if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) &&
|
||||
memcmp(&key[DES_KEY_SIZE], &key[DES_KEY_SIZE * 2],
|
||||
DES_KEY_SIZE))) {
|
||||
|
||||
*flags |= CRYPTO_TFM_RES_BAD_KEY_SCHED;
|
||||
DES_KEY_SIZE)) &&
|
||||
(*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
|
||||
*flags |= CRYPTO_TFM_RES_WEAK_KEY;
|
||||
return -EINVAL;
|
||||
}
|
||||
for (i = 0; i < 3; i++, temp_key += DES_KEY_SIZE) {
|
||||
|
|
|
@ -46,12 +46,38 @@ static int sha1_init(struct shash_desc *desc)
|
|||
return 0;
|
||||
}
|
||||
|
||||
static int sha1_export(struct shash_desc *desc, void *out)
|
||||
{
|
||||
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
|
||||
struct sha1_state *octx = out;
|
||||
|
||||
octx->count = sctx->count;
|
||||
memcpy(octx->state, sctx->state, sizeof(octx->state));
|
||||
memcpy(octx->buffer, sctx->buf, sizeof(octx->buffer));
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int sha1_import(struct shash_desc *desc, const void *in)
|
||||
{
|
||||
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
|
||||
const struct sha1_state *ictx = in;
|
||||
|
||||
sctx->count = ictx->count;
|
||||
memcpy(sctx->state, ictx->state, sizeof(ictx->state));
|
||||
memcpy(sctx->buf, ictx->buffer, sizeof(ictx->buffer));
|
||||
sctx->func = KIMD_SHA_1;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static struct shash_alg alg = {
|
||||
.digestsize = SHA1_DIGEST_SIZE,
|
||||
.init = sha1_init,
|
||||
.update = s390_sha_update,
|
||||
.final = s390_sha_final,
|
||||
.export = sha1_export,
|
||||
.import = sha1_import,
|
||||
.descsize = sizeof(struct s390_sha_ctx),
|
||||
.statesize = sizeof(struct sha1_state),
|
||||
.base = {
|
||||
.cra_name = "sha1",
|
||||
.cra_driver_name= "sha1-s390",
|
||||
|
|
|
@ -42,12 +42,38 @@ static int sha256_init(struct shash_desc *desc)
|
|||
return 0;
|
||||
}
|
||||
|
||||
static int sha256_export(struct shash_desc *desc, void *out)
|
||||
{
|
||||
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
|
||||
struct sha256_state *octx = out;
|
||||
|
||||
octx->count = sctx->count;
|
||||
memcpy(octx->state, sctx->state, sizeof(octx->state));
|
||||
memcpy(octx->buf, sctx->buf, sizeof(octx->buf));
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int sha256_import(struct shash_desc *desc, const void *in)
|
||||
{
|
||||
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
|
||||
const struct sha256_state *ictx = in;
|
||||
|
||||
sctx->count = ictx->count;
|
||||
memcpy(sctx->state, ictx->state, sizeof(ictx->state));
|
||||
memcpy(sctx->buf, ictx->buf, sizeof(ictx->buf));
|
||||
sctx->func = KIMD_SHA_256;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static struct shash_alg alg = {
|
||||
.digestsize = SHA256_DIGEST_SIZE,
|
||||
.init = sha256_init,
|
||||
.update = s390_sha_update,
|
||||
.final = s390_sha_final,
|
||||
.export = sha256_export,
|
||||
.import = sha256_import,
|
||||
.descsize = sizeof(struct s390_sha_ctx),
|
||||
.statesize = sizeof(struct sha256_state),
|
||||
.base = {
|
||||
.cra_name = "sha256",
|
||||
.cra_driver_name= "sha256-s390",
|
||||
|
|
|
@ -13,7 +13,10 @@
|
|||
*
|
||||
*/
|
||||
#include <crypto/internal/hash.h>
|
||||
#include <crypto/sha.h>
|
||||
#include <linux/errno.h>
|
||||
#include <linux/init.h>
|
||||
#include <linux/kernel.h>
|
||||
#include <linux/module.h>
|
||||
|
||||
#include "sha.h"
|
||||
|
@ -37,12 +40,42 @@ static int sha512_init(struct shash_desc *desc)
|
|||
return 0;
|
||||
}
|
||||
|
||||
static int sha512_export(struct shash_desc *desc, void *out)
|
||||
{
|
||||
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
|
||||
struct sha512_state *octx = out;
|
||||
|
||||
octx->count[0] = sctx->count;
|
||||
octx->count[1] = 0;
|
||||
memcpy(octx->state, sctx->state, sizeof(octx->state));
|
||||
memcpy(octx->buf, sctx->buf, sizeof(octx->buf));
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int sha512_import(struct shash_desc *desc, const void *in)
|
||||
{
|
||||
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
|
||||
const struct sha512_state *ictx = in;
|
||||
|
||||
if (unlikely(ictx->count[1]))
|
||||
return -ERANGE;
|
||||
sctx->count = ictx->count[0];
|
||||
|
||||
memcpy(sctx->state, ictx->state, sizeof(ictx->state));
|
||||
memcpy(sctx->buf, ictx->buf, sizeof(ictx->buf));
|
||||
sctx->func = KIMD_SHA_512;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static struct shash_alg sha512_alg = {
|
||||
.digestsize = SHA512_DIGEST_SIZE,
|
||||
.init = sha512_init,
|
||||
.update = s390_sha_update,
|
||||
.final = s390_sha_final,
|
||||
.export = sha512_export,
|
||||
.import = sha512_import,
|
||||
.descsize = sizeof(struct s390_sha_ctx),
|
||||
.statesize = sizeof(struct sha512_state),
|
||||
.base = {
|
||||
.cra_name = "sha512",
|
||||
.cra_driver_name= "sha512-s390",
|
||||
|
@ -78,7 +111,10 @@ static struct shash_alg sha384_alg = {
|
|||
.init = sha384_init,
|
||||
.update = s390_sha_update,
|
||||
.final = s390_sha_final,
|
||||
.export = sha512_export,
|
||||
.import = sha512_import,
|
||||
.descsize = sizeof(struct s390_sha_ctx),
|
||||
.statesize = sizeof(struct sha512_state),
|
||||
.base = {
|
||||
.cra_name = "sha384",
|
||||
.cra_driver_name= "sha384-s390",
|
||||
|
|
|
@ -636,7 +636,7 @@ static int __init aesni_init(void)
|
|||
int err;
|
||||
|
||||
if (!cpu_has_aes) {
|
||||
printk(KERN_ERR "Intel AES-NI instructions are not detected.\n");
|
||||
printk(KERN_INFO "Intel AES-NI instructions are not detected.\n");
|
||||
return -ENODEV;
|
||||
}
|
||||
if ((err = crypto_register_alg(&aesni_alg)))
|
||||
|
|
|
@ -23,11 +23,13 @@ comment "Crypto core or helper"
|
|||
|
||||
config CRYPTO_FIPS
|
||||
bool "FIPS 200 compliance"
|
||||
depends on CRYPTO_ANSI_CPRNG
|
||||
help
|
||||
This options enables the fips boot option which is
|
||||
required if you want to system to operate in a FIPS 200
|
||||
certification. You should say no unless you know what
|
||||
this is.
|
||||
this is. Note that CRYPTO_ANSI_CPRNG is requred if this
|
||||
option is selected
|
||||
|
||||
config CRYPTO_ALGAPI
|
||||
tristate
|
||||
|
@ -156,7 +158,7 @@ config CRYPTO_GCM
|
|||
tristate "GCM/GMAC support"
|
||||
select CRYPTO_CTR
|
||||
select CRYPTO_AEAD
|
||||
select CRYPTO_GF128MUL
|
||||
select CRYPTO_GHASH
|
||||
help
|
||||
Support for Galois/Counter Mode (GCM) and Galois Message
|
||||
Authentication Code (GMAC). Required for IPSec.
|
||||
|
@ -267,6 +269,18 @@ config CRYPTO_XCBC
|
|||
http://csrc.nist.gov/encryption/modes/proposedmodes/
|
||||
xcbc-mac/xcbc-mac-spec.pdf
|
||||
|
||||
config CRYPTO_VMAC
|
||||
tristate "VMAC support"
|
||||
depends on EXPERIMENTAL
|
||||
select CRYPTO_HASH
|
||||
select CRYPTO_MANAGER
|
||||
help
|
||||
VMAC is a message authentication algorithm designed for
|
||||
very high speed on 64-bit architectures.
|
||||
|
||||
See also:
|
||||
<http://fastcrypto.org/vmac>
|
||||
|
||||
comment "Digest"
|
||||
|
||||
config CRYPTO_CRC32C
|
||||
|
@ -289,6 +303,13 @@ config CRYPTO_CRC32C_INTEL
|
|||
gain performance compared with software implementation.
|
||||
Module will be crc32c-intel.
|
||||
|
||||
config CRYPTO_GHASH
|
||||
tristate "GHASH digest algorithm"
|
||||
select CRYPTO_SHASH
|
||||
select CRYPTO_GF128MUL
|
||||
help
|
||||
GHASH is message digest algorithm for GCM (Galois/Counter Mode).
|
||||
|
||||
config CRYPTO_MD4
|
||||
tristate "MD4 digest algorithm"
|
||||
select CRYPTO_HASH
|
||||
|
@ -780,13 +801,14 @@ comment "Random Number Generation"
|
|||
|
||||
config CRYPTO_ANSI_CPRNG
|
||||
tristate "Pseudo Random Number Generation for Cryptographic modules"
|
||||
default m
|
||||
select CRYPTO_AES
|
||||
select CRYPTO_RNG
|
||||
select CRYPTO_FIPS
|
||||
help
|
||||
This option enables the generic pseudo random number generator
|
||||
for cryptographic modules. Uses the Algorithm specified in
|
||||
ANSI X9.31 A.2.4
|
||||
ANSI X9.31 A.2.4. Not this option must be enabled if CRYPTO_FIPS
|
||||
is selected
|
||||
|
||||
source "drivers/crypto/Kconfig"
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
#
|
||||
|
||||
obj-$(CONFIG_CRYPTO) += crypto.o
|
||||
crypto-objs := api.o cipher.o digest.o compress.o
|
||||
crypto-objs := api.o cipher.o compress.o
|
||||
|
||||
obj-$(CONFIG_CRYPTO_WORKQUEUE) += crypto_wq.o
|
||||
|
||||
|
@ -22,7 +22,6 @@ obj-$(CONFIG_CRYPTO_BLKCIPHER2) += chainiv.o
|
|||
obj-$(CONFIG_CRYPTO_BLKCIPHER2) += eseqiv.o
|
||||
obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o
|
||||
|
||||
crypto_hash-objs := hash.o
|
||||
crypto_hash-objs += ahash.o
|
||||
crypto_hash-objs += shash.o
|
||||
obj-$(CONFIG_CRYPTO_HASH2) += crypto_hash.o
|
||||
|
@ -33,6 +32,7 @@ cryptomgr-objs := algboss.o testmgr.o
|
|||
|
||||
obj-$(CONFIG_CRYPTO_MANAGER2) += cryptomgr.o
|
||||
obj-$(CONFIG_CRYPTO_HMAC) += hmac.o
|
||||
obj-$(CONFIG_CRYPTO_VMAC) += vmac.o
|
||||
obj-$(CONFIG_CRYPTO_XCBC) += xcbc.o
|
||||
obj-$(CONFIG_CRYPTO_NULL) += crypto_null.o
|
||||
obj-$(CONFIG_CRYPTO_MD4) += md4.o
|
||||
|
@ -83,6 +83,7 @@ obj-$(CONFIG_CRYPTO_RNG2) += rng.o
|
|||
obj-$(CONFIG_CRYPTO_RNG2) += krng.o
|
||||
obj-$(CONFIG_CRYPTO_ANSI_CPRNG) += ansi_cprng.o
|
||||
obj-$(CONFIG_CRYPTO_TEST) += tcrypt.o
|
||||
obj-$(CONFIG_CRYPTO_GHASH) += ghash-generic.o
|
||||
|
||||
#
|
||||
# generic algorithms and the async_tx api
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
*/
|
||||
|
||||
#include <crypto/internal/skcipher.h>
|
||||
#include <linux/cpumask.h>
|
||||
#include <linux/err.h>
|
||||
#include <linux/init.h>
|
||||
#include <linux/kernel.h>
|
||||
|
@ -25,6 +26,8 @@
|
|||
|
||||
#include "internal.h"
|
||||
|
||||
static const char *skcipher_default_geniv __read_mostly;
|
||||
|
||||
static int setkey_unaligned(struct crypto_ablkcipher *tfm, const u8 *key,
|
||||
unsigned int keylen)
|
||||
{
|
||||
|
@ -180,7 +183,14 @@ EXPORT_SYMBOL_GPL(crypto_givcipher_type);
|
|||
|
||||
const char *crypto_default_geniv(const struct crypto_alg *alg)
|
||||
{
|
||||
return alg->cra_flags & CRYPTO_ALG_ASYNC ? "eseqiv" : "chainiv";
|
||||
if (((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
|
||||
CRYPTO_ALG_TYPE_BLKCIPHER ? alg->cra_blkcipher.ivsize :
|
||||
alg->cra_ablkcipher.ivsize) !=
|
||||
alg->cra_blocksize)
|
||||
return "chainiv";
|
||||
|
||||
return alg->cra_flags & CRYPTO_ALG_ASYNC ?
|
||||
"eseqiv" : skcipher_default_geniv;
|
||||
}
|
||||
|
||||
static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask)
|
||||
|
@ -201,8 +211,9 @@ static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask)
|
|||
int err;
|
||||
|
||||
larval = crypto_larval_lookup(alg->cra_driver_name,
|
||||
(type & ~CRYPTO_ALG_TYPE_MASK) |
|
||||
CRYPTO_ALG_TYPE_GIVCIPHER,
|
||||
CRYPTO_ALG_TYPE_MASK);
|
||||
mask | CRYPTO_ALG_TYPE_MASK);
|
||||
err = PTR_ERR(larval);
|
||||
if (IS_ERR(larval))
|
||||
goto out;
|
||||
|
@ -360,3 +371,17 @@ err:
|
|||
return ERR_PTR(err);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_alloc_ablkcipher);
|
||||
|
||||
static int __init skcipher_module_init(void)
|
||||
{
|
||||
skcipher_default_geniv = num_possible_cpus() > 1 ?
|
||||
"eseqiv" : "chainiv";
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void skcipher_module_exit(void)
|
||||
{
|
||||
}
|
||||
|
||||
module_init(skcipher_module_init);
|
||||
module_exit(skcipher_module_exit);
|
||||
|
|
|
@ -1174,7 +1174,7 @@ EXPORT_SYMBOL_GPL(crypto_il_tab);
|
|||
ctx->key_enc[6 * i + 11] = t; \
|
||||
} while (0)
|
||||
|
||||
#define loop8(i) do { \
|
||||
#define loop8tophalf(i) do { \
|
||||
t = ror32(t, 8); \
|
||||
t = ls_box(t) ^ rco_tab[i]; \
|
||||
t ^= ctx->key_enc[8 * i]; \
|
||||
|
@ -1185,6 +1185,10 @@ EXPORT_SYMBOL_GPL(crypto_il_tab);
|
|||
ctx->key_enc[8 * i + 10] = t; \
|
||||
t ^= ctx->key_enc[8 * i + 3]; \
|
||||
ctx->key_enc[8 * i + 11] = t; \
|
||||
} while (0)
|
||||
|
||||
#define loop8(i) do { \
|
||||
loop8tophalf(i); \
|
||||
t = ctx->key_enc[8 * i + 4] ^ ls_box(t); \
|
||||
ctx->key_enc[8 * i + 12] = t; \
|
||||
t ^= ctx->key_enc[8 * i + 5]; \
|
||||
|
@ -1245,8 +1249,9 @@ int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
|
|||
ctx->key_enc[5] = le32_to_cpu(key[5]);
|
||||
ctx->key_enc[6] = le32_to_cpu(key[6]);
|
||||
t = ctx->key_enc[7] = le32_to_cpu(key[7]);
|
||||
for (i = 0; i < 7; ++i)
|
||||
for (i = 0; i < 6; ++i)
|
||||
loop8(i);
|
||||
loop8tophalf(i);
|
||||
break;
|
||||
}
|
||||
|
||||
|
|
346
crypto/ahash.c
346
crypto/ahash.c
|
@ -24,6 +24,19 @@
|
|||
|
||||
#include "internal.h"
|
||||
|
||||
struct ahash_request_priv {
|
||||
crypto_completion_t complete;
|
||||
void *data;
|
||||
u8 *result;
|
||||
void *ubuf[] CRYPTO_MINALIGN_ATTR;
|
||||
};
|
||||
|
||||
static inline struct ahash_alg *crypto_ahash_alg(struct crypto_ahash *hash)
|
||||
{
|
||||
return container_of(crypto_hash_alg_common(hash), struct ahash_alg,
|
||||
halg);
|
||||
}
|
||||
|
||||
static int hash_walk_next(struct crypto_hash_walk *walk)
|
||||
{
|
||||
unsigned int alignmask = walk->alignmask;
|
||||
|
@ -132,36 +145,34 @@ int crypto_hash_walk_first_compat(struct hash_desc *hdesc,
|
|||
static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key,
|
||||
unsigned int keylen)
|
||||
{
|
||||
struct ahash_alg *ahash = crypto_ahash_alg(tfm);
|
||||
unsigned long alignmask = crypto_ahash_alignmask(tfm);
|
||||
int ret;
|
||||
u8 *buffer, *alignbuffer;
|
||||
unsigned long absize;
|
||||
|
||||
absize = keylen + alignmask;
|
||||
buffer = kmalloc(absize, GFP_ATOMIC);
|
||||
buffer = kmalloc(absize, GFP_KERNEL);
|
||||
if (!buffer)
|
||||
return -ENOMEM;
|
||||
|
||||
alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
|
||||
memcpy(alignbuffer, key, keylen);
|
||||
ret = ahash->setkey(tfm, alignbuffer, keylen);
|
||||
memset(alignbuffer, 0, keylen);
|
||||
kfree(buffer);
|
||||
ret = tfm->setkey(tfm, alignbuffer, keylen);
|
||||
kzfree(buffer);
|
||||
return ret;
|
||||
}
|
||||
|
||||
static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
|
||||
int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
|
||||
unsigned int keylen)
|
||||
{
|
||||
struct ahash_alg *ahash = crypto_ahash_alg(tfm);
|
||||
unsigned long alignmask = crypto_ahash_alignmask(tfm);
|
||||
|
||||
if ((unsigned long)key & alignmask)
|
||||
return ahash_setkey_unaligned(tfm, key, keylen);
|
||||
|
||||
return ahash->setkey(tfm, key, keylen);
|
||||
return tfm->setkey(tfm, key, keylen);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_ahash_setkey);
|
||||
|
||||
static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
|
||||
unsigned int keylen)
|
||||
|
@ -169,42 +180,219 @@ static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
|
|||
return -ENOSYS;
|
||||
}
|
||||
|
||||
int crypto_ahash_import(struct ahash_request *req, const u8 *in)
|
||||
static inline unsigned int ahash_align_buffer_size(unsigned len,
|
||||
unsigned long mask)
|
||||
{
|
||||
return len + (mask & ~(crypto_tfm_ctx_alignment() - 1));
|
||||
}
|
||||
|
||||
static void ahash_op_unaligned_finish(struct ahash_request *req, int err)
|
||||
{
|
||||
struct ahash_request_priv *priv = req->priv;
|
||||
|
||||
if (err == -EINPROGRESS)
|
||||
return;
|
||||
|
||||
if (!err)
|
||||
memcpy(priv->result, req->result,
|
||||
crypto_ahash_digestsize(crypto_ahash_reqtfm(req)));
|
||||
|
||||
kzfree(priv);
|
||||
}
|
||||
|
||||
static void ahash_op_unaligned_done(struct crypto_async_request *req, int err)
|
||||
{
|
||||
struct ahash_request *areq = req->data;
|
||||
struct ahash_request_priv *priv = areq->priv;
|
||||
crypto_completion_t complete = priv->complete;
|
||||
void *data = priv->data;
|
||||
|
||||
ahash_op_unaligned_finish(areq, err);
|
||||
|
||||
complete(data, err);
|
||||
}
|
||||
|
||||
static int ahash_op_unaligned(struct ahash_request *req,
|
||||
int (*op)(struct ahash_request *))
|
||||
{
|
||||
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
|
||||
struct ahash_alg *alg = crypto_ahash_alg(tfm);
|
||||
unsigned long alignmask = crypto_ahash_alignmask(tfm);
|
||||
unsigned int ds = crypto_ahash_digestsize(tfm);
|
||||
struct ahash_request_priv *priv;
|
||||
int err;
|
||||
|
||||
memcpy(ahash_request_ctx(req), in, crypto_ahash_reqsize(tfm));
|
||||
priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask),
|
||||
(req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
|
||||
GFP_KERNEL : GFP_ATOMIC);
|
||||
if (!priv)
|
||||
return -ENOMEM;
|
||||
|
||||
if (alg->reinit)
|
||||
alg->reinit(req);
|
||||
priv->result = req->result;
|
||||
priv->complete = req->base.complete;
|
||||
priv->data = req->base.data;
|
||||
|
||||
req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1);
|
||||
req->base.complete = ahash_op_unaligned_done;
|
||||
req->base.data = req;
|
||||
req->priv = priv;
|
||||
|
||||
err = op(req);
|
||||
ahash_op_unaligned_finish(req, err);
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
static int crypto_ahash_op(struct ahash_request *req,
|
||||
int (*op)(struct ahash_request *))
|
||||
{
|
||||
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
|
||||
unsigned long alignmask = crypto_ahash_alignmask(tfm);
|
||||
|
||||
if ((unsigned long)req->result & alignmask)
|
||||
return ahash_op_unaligned(req, op);
|
||||
|
||||
return op(req);
|
||||
}
|
||||
|
||||
int crypto_ahash_final(struct ahash_request *req)
|
||||
{
|
||||
return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->final);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_ahash_final);
|
||||
|
||||
int crypto_ahash_finup(struct ahash_request *req)
|
||||
{
|
||||
return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->finup);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_ahash_finup);
|
||||
|
||||
int crypto_ahash_digest(struct ahash_request *req)
|
||||
{
|
||||
return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->digest);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_ahash_digest);
|
||||
|
||||
static void ahash_def_finup_finish2(struct ahash_request *req, int err)
|
||||
{
|
||||
struct ahash_request_priv *priv = req->priv;
|
||||
|
||||
if (err == -EINPROGRESS)
|
||||
return;
|
||||
|
||||
if (!err)
|
||||
memcpy(priv->result, req->result,
|
||||
crypto_ahash_digestsize(crypto_ahash_reqtfm(req)));
|
||||
|
||||
kzfree(priv);
|
||||
}
|
||||
|
||||
static void ahash_def_finup_done2(struct crypto_async_request *req, int err)
|
||||
{
|
||||
struct ahash_request *areq = req->data;
|
||||
struct ahash_request_priv *priv = areq->priv;
|
||||
crypto_completion_t complete = priv->complete;
|
||||
void *data = priv->data;
|
||||
|
||||
ahash_def_finup_finish2(areq, err);
|
||||
|
||||
complete(data, err);
|
||||
}
|
||||
|
||||
static int ahash_def_finup_finish1(struct ahash_request *req, int err)
|
||||
{
|
||||
if (err)
|
||||
goto out;
|
||||
|
||||
req->base.complete = ahash_def_finup_done2;
|
||||
req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
|
||||
err = crypto_ahash_reqtfm(req)->final(req);
|
||||
|
||||
out:
|
||||
ahash_def_finup_finish2(req, err);
|
||||
return err;
|
||||
}
|
||||
|
||||
static void ahash_def_finup_done1(struct crypto_async_request *req, int err)
|
||||
{
|
||||
struct ahash_request *areq = req->data;
|
||||
struct ahash_request_priv *priv = areq->priv;
|
||||
crypto_completion_t complete = priv->complete;
|
||||
void *data = priv->data;
|
||||
|
||||
err = ahash_def_finup_finish1(areq, err);
|
||||
|
||||
complete(data, err);
|
||||
}
|
||||
|
||||
static int ahash_def_finup(struct ahash_request *req)
|
||||
{
|
||||
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
|
||||
unsigned long alignmask = crypto_ahash_alignmask(tfm);
|
||||
unsigned int ds = crypto_ahash_digestsize(tfm);
|
||||
struct ahash_request_priv *priv;
|
||||
|
||||
priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask),
|
||||
(req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
|
||||
GFP_KERNEL : GFP_ATOMIC);
|
||||
if (!priv)
|
||||
return -ENOMEM;
|
||||
|
||||
priv->result = req->result;
|
||||
priv->complete = req->base.complete;
|
||||
priv->data = req->base.data;
|
||||
|
||||
req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1);
|
||||
req->base.complete = ahash_def_finup_done1;
|
||||
req->base.data = req;
|
||||
req->priv = priv;
|
||||
|
||||
return ahash_def_finup_finish1(req, tfm->update(req));
|
||||
}
|
||||
|
||||
static int ahash_no_export(struct ahash_request *req, void *out)
|
||||
{
|
||||
return -ENOSYS;
|
||||
}
|
||||
|
||||
static int ahash_no_import(struct ahash_request *req, const void *in)
|
||||
{
|
||||
return -ENOSYS;
|
||||
}
|
||||
|
||||
static int crypto_ahash_init_tfm(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
|
||||
struct ahash_alg *alg = crypto_ahash_alg(hash);
|
||||
|
||||
hash->setkey = ahash_nosetkey;
|
||||
hash->export = ahash_no_export;
|
||||
hash->import = ahash_no_import;
|
||||
|
||||
if (tfm->__crt_alg->cra_type != &crypto_ahash_type)
|
||||
return crypto_init_shash_ops_async(tfm);
|
||||
|
||||
hash->init = alg->init;
|
||||
hash->update = alg->update;
|
||||
hash->final = alg->final;
|
||||
hash->finup = alg->finup ?: ahash_def_finup;
|
||||
hash->digest = alg->digest;
|
||||
|
||||
if (alg->setkey)
|
||||
hash->setkey = alg->setkey;
|
||||
if (alg->export)
|
||||
hash->export = alg->export;
|
||||
if (alg->import)
|
||||
hash->import = alg->import;
|
||||
|
||||
return 0;
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_ahash_import);
|
||||
|
||||
static unsigned int crypto_ahash_ctxsize(struct crypto_alg *alg, u32 type,
|
||||
u32 mask)
|
||||
static unsigned int crypto_ahash_extsize(struct crypto_alg *alg)
|
||||
{
|
||||
return alg->cra_ctxsize;
|
||||
}
|
||||
if (alg->cra_type == &crypto_ahash_type)
|
||||
return alg->cra_ctxsize;
|
||||
|
||||
static int crypto_init_ahash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
|
||||
{
|
||||
struct ahash_alg *alg = &tfm->__crt_alg->cra_ahash;
|
||||
struct ahash_tfm *crt = &tfm->crt_ahash;
|
||||
|
||||
if (alg->digestsize > PAGE_SIZE / 8)
|
||||
return -EINVAL;
|
||||
|
||||
crt->init = alg->init;
|
||||
crt->update = alg->update;
|
||||
crt->final = alg->final;
|
||||
crt->digest = alg->digest;
|
||||
crt->setkey = alg->setkey ? ahash_setkey : ahash_nosetkey;
|
||||
crt->digestsize = alg->digestsize;
|
||||
|
||||
return 0;
|
||||
return sizeof(struct crypto_shash *);
|
||||
}
|
||||
|
||||
static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
|
||||
|
@ -215,17 +403,101 @@ static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
|
|||
seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
|
||||
"yes" : "no");
|
||||
seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
|
||||
seq_printf(m, "digestsize : %u\n", alg->cra_ahash.digestsize);
|
||||
seq_printf(m, "digestsize : %u\n",
|
||||
__crypto_hash_alg_common(alg)->digestsize);
|
||||
}
|
||||
|
||||
const struct crypto_type crypto_ahash_type = {
|
||||
.ctxsize = crypto_ahash_ctxsize,
|
||||
.init = crypto_init_ahash_ops,
|
||||
.extsize = crypto_ahash_extsize,
|
||||
.init_tfm = crypto_ahash_init_tfm,
|
||||
#ifdef CONFIG_PROC_FS
|
||||
.show = crypto_ahash_show,
|
||||
#endif
|
||||
.maskclear = ~CRYPTO_ALG_TYPE_MASK,
|
||||
.maskset = CRYPTO_ALG_TYPE_AHASH_MASK,
|
||||
.type = CRYPTO_ALG_TYPE_AHASH,
|
||||
.tfmsize = offsetof(struct crypto_ahash, base),
|
||||
};
|
||||
EXPORT_SYMBOL_GPL(crypto_ahash_type);
|
||||
|
||||
struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type,
|
||||
u32 mask)
|
||||
{
|
||||
return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_alloc_ahash);
|
||||
|
||||
static int ahash_prepare_alg(struct ahash_alg *alg)
|
||||
{
|
||||
struct crypto_alg *base = &alg->halg.base;
|
||||
|
||||
if (alg->halg.digestsize > PAGE_SIZE / 8 ||
|
||||
alg->halg.statesize > PAGE_SIZE / 8)
|
||||
return -EINVAL;
|
||||
|
||||
base->cra_type = &crypto_ahash_type;
|
||||
base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
|
||||
base->cra_flags |= CRYPTO_ALG_TYPE_AHASH;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int crypto_register_ahash(struct ahash_alg *alg)
|
||||
{
|
||||
struct crypto_alg *base = &alg->halg.base;
|
||||
int err;
|
||||
|
||||
err = ahash_prepare_alg(alg);
|
||||
if (err)
|
||||
return err;
|
||||
|
||||
return crypto_register_alg(base);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_register_ahash);
|
||||
|
||||
int crypto_unregister_ahash(struct ahash_alg *alg)
|
||||
{
|
||||
return crypto_unregister_alg(&alg->halg.base);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_unregister_ahash);
|
||||
|
||||
int ahash_register_instance(struct crypto_template *tmpl,
|
||||
struct ahash_instance *inst)
|
||||
{
|
||||
int err;
|
||||
|
||||
err = ahash_prepare_alg(&inst->alg);
|
||||
if (err)
|
||||
return err;
|
||||
|
||||
return crypto_register_instance(tmpl, ahash_crypto_instance(inst));
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(ahash_register_instance);
|
||||
|
||||
void ahash_free_instance(struct crypto_instance *inst)
|
||||
{
|
||||
crypto_drop_spawn(crypto_instance_ctx(inst));
|
||||
kfree(ahash_instance(inst));
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(ahash_free_instance);
|
||||
|
||||
int crypto_init_ahash_spawn(struct crypto_ahash_spawn *spawn,
|
||||
struct hash_alg_common *alg,
|
||||
struct crypto_instance *inst)
|
||||
{
|
||||
return crypto_init_spawn2(&spawn->base, &alg->base, inst,
|
||||
&crypto_ahash_type);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_init_ahash_spawn);
|
||||
|
||||
struct hash_alg_common *ahash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
|
||||
{
|
||||
struct crypto_alg *alg;
|
||||
|
||||
alg = crypto_attr_alg2(rta, &crypto_ahash_type, type, mask);
|
||||
return IS_ERR(alg) ? ERR_CAST(alg) : __crypto_hash_alg_common(alg);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(ahash_attr_alg);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("Asynchronous cryptographic hash type");
|
||||
|
|
180
crypto/algapi.c
180
crypto/algapi.c
|
@ -81,16 +81,35 @@ static void crypto_destroy_instance(struct crypto_alg *alg)
|
|||
crypto_tmpl_put(tmpl);
|
||||
}
|
||||
|
||||
static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
|
||||
struct list_head *stack,
|
||||
struct list_head *top,
|
||||
struct list_head *secondary_spawns)
|
||||
{
|
||||
struct crypto_spawn *spawn, *n;
|
||||
|
||||
if (list_empty(stack))
|
||||
return NULL;
|
||||
|
||||
spawn = list_first_entry(stack, struct crypto_spawn, list);
|
||||
n = list_entry(spawn->list.next, struct crypto_spawn, list);
|
||||
|
||||
if (spawn->alg && &n->list != stack && !n->alg)
|
||||
n->alg = (n->list.next == stack) ? alg :
|
||||
&list_entry(n->list.next, struct crypto_spawn,
|
||||
list)->inst->alg;
|
||||
|
||||
list_move(&spawn->list, secondary_spawns);
|
||||
|
||||
return &n->list == stack ? top : &n->inst->alg.cra_users;
|
||||
}
|
||||
|
||||
static void crypto_remove_spawn(struct crypto_spawn *spawn,
|
||||
struct list_head *list,
|
||||
struct list_head *secondary_spawns)
|
||||
struct list_head *list)
|
||||
{
|
||||
struct crypto_instance *inst = spawn->inst;
|
||||
struct crypto_template *tmpl = inst->tmpl;
|
||||
|
||||
list_del_init(&spawn->list);
|
||||
spawn->alg = NULL;
|
||||
|
||||
if (crypto_is_dead(&inst->alg))
|
||||
return;
|
||||
|
||||
|
@ -106,25 +125,55 @@ static void crypto_remove_spawn(struct crypto_spawn *spawn,
|
|||
hlist_del(&inst->list);
|
||||
inst->alg.cra_destroy = crypto_destroy_instance;
|
||||
|
||||
list_splice(&inst->alg.cra_users, secondary_spawns);
|
||||
BUG_ON(!list_empty(&inst->alg.cra_users));
|
||||
}
|
||||
|
||||
static void crypto_remove_spawns(struct list_head *spawns,
|
||||
struct list_head *list, u32 new_type)
|
||||
static void crypto_remove_spawns(struct crypto_alg *alg,
|
||||
struct list_head *list,
|
||||
struct crypto_alg *nalg)
|
||||
{
|
||||
u32 new_type = (nalg ?: alg)->cra_flags;
|
||||
struct crypto_spawn *spawn, *n;
|
||||
LIST_HEAD(secondary_spawns);
|
||||
struct list_head *spawns;
|
||||
LIST_HEAD(stack);
|
||||
LIST_HEAD(top);
|
||||
|
||||
spawns = &alg->cra_users;
|
||||
list_for_each_entry_safe(spawn, n, spawns, list) {
|
||||
if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
|
||||
continue;
|
||||
|
||||
crypto_remove_spawn(spawn, list, &secondary_spawns);
|
||||
list_move(&spawn->list, &top);
|
||||
}
|
||||
|
||||
while (!list_empty(&secondary_spawns)) {
|
||||
list_for_each_entry_safe(spawn, n, &secondary_spawns, list)
|
||||
crypto_remove_spawn(spawn, list, &secondary_spawns);
|
||||
spawns = ⊤
|
||||
do {
|
||||
while (!list_empty(spawns)) {
|
||||
struct crypto_instance *inst;
|
||||
|
||||
spawn = list_first_entry(spawns, struct crypto_spawn,
|
||||
list);
|
||||
inst = spawn->inst;
|
||||
|
||||
BUG_ON(&inst->alg == alg);
|
||||
|
||||
list_move(&spawn->list, &stack);
|
||||
|
||||
if (&inst->alg == nalg)
|
||||
break;
|
||||
|
||||
spawn->alg = NULL;
|
||||
spawns = &inst->alg.cra_users;
|
||||
}
|
||||
} while ((spawns = crypto_more_spawns(alg, &stack, &top,
|
||||
&secondary_spawns)));
|
||||
|
||||
list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
|
||||
if (spawn->alg)
|
||||
list_move(&spawn->list, &spawn->alg->cra_users);
|
||||
else
|
||||
crypto_remove_spawn(spawn, list);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -258,7 +307,7 @@ found:
|
|||
q->cra_priority > alg->cra_priority)
|
||||
continue;
|
||||
|
||||
crypto_remove_spawns(&q->cra_users, &list, alg->cra_flags);
|
||||
crypto_remove_spawns(q, &list, alg);
|
||||
}
|
||||
|
||||
complete:
|
||||
|
@ -330,7 +379,7 @@ static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
|
|||
|
||||
crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, alg);
|
||||
list_del_init(&alg->cra_list);
|
||||
crypto_remove_spawns(&alg->cra_users, list, alg->cra_flags);
|
||||
crypto_remove_spawns(alg, list, NULL);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
@ -488,20 +537,38 @@ int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
|
|||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_init_spawn);
|
||||
|
||||
int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
|
||||
struct crypto_instance *inst,
|
||||
const struct crypto_type *frontend)
|
||||
{
|
||||
int err = -EINVAL;
|
||||
|
||||
if (frontend && (alg->cra_flags ^ frontend->type) & frontend->maskset)
|
||||
goto out;
|
||||
|
||||
spawn->frontend = frontend;
|
||||
err = crypto_init_spawn(spawn, alg, inst, frontend->maskset);
|
||||
|
||||
out:
|
||||
return err;
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_init_spawn2);
|
||||
|
||||
void crypto_drop_spawn(struct crypto_spawn *spawn)
|
||||
{
|
||||
if (!spawn->alg)
|
||||
return;
|
||||
|
||||
down_write(&crypto_alg_sem);
|
||||
list_del(&spawn->list);
|
||||
up_write(&crypto_alg_sem);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_drop_spawn);
|
||||
|
||||
struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
|
||||
u32 mask)
|
||||
static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
|
||||
{
|
||||
struct crypto_alg *alg;
|
||||
struct crypto_alg *alg2;
|
||||
struct crypto_tfm *tfm;
|
||||
|
||||
down_read(&crypto_alg_sem);
|
||||
alg = spawn->alg;
|
||||
|
@ -516,6 +583,19 @@ struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
|
|||
return ERR_PTR(-EAGAIN);
|
||||
}
|
||||
|
||||
return alg;
|
||||
}
|
||||
|
||||
struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
|
||||
u32 mask)
|
||||
{
|
||||
struct crypto_alg *alg;
|
||||
struct crypto_tfm *tfm;
|
||||
|
||||
alg = crypto_spawn_alg(spawn);
|
||||
if (IS_ERR(alg))
|
||||
return ERR_CAST(alg);
|
||||
|
||||
tfm = ERR_PTR(-EINVAL);
|
||||
if (unlikely((alg->cra_flags ^ type) & mask))
|
||||
goto out_put_alg;
|
||||
|
@ -532,6 +612,27 @@ out_put_alg:
|
|||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
|
||||
|
||||
void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
|
||||
{
|
||||
struct crypto_alg *alg;
|
||||
struct crypto_tfm *tfm;
|
||||
|
||||
alg = crypto_spawn_alg(spawn);
|
||||
if (IS_ERR(alg))
|
||||
return ERR_CAST(alg);
|
||||
|
||||
tfm = crypto_create_tfm(alg, spawn->frontend);
|
||||
if (IS_ERR(tfm))
|
||||
goto out_put_alg;
|
||||
|
||||
return tfm;
|
||||
|
||||
out_put_alg:
|
||||
crypto_mod_put(alg);
|
||||
return tfm;
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
|
||||
|
||||
int crypto_register_notifier(struct notifier_block *nb)
|
||||
{
|
||||
return blocking_notifier_chain_register(&crypto_chain, nb);
|
||||
|
@ -595,7 +696,9 @@ const char *crypto_attr_alg_name(struct rtattr *rta)
|
|||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
|
||||
|
||||
struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask)
|
||||
struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
|
||||
const struct crypto_type *frontend,
|
||||
u32 type, u32 mask)
|
||||
{
|
||||
const char *name;
|
||||
int err;
|
||||
|
@ -605,9 +708,9 @@ struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask)
|
|||
if (IS_ERR(name))
|
||||
return ERR_PTR(err);
|
||||
|
||||
return crypto_alg_mod_lookup(name, type, mask);
|
||||
return crypto_find_alg(name, frontend, type, mask);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_attr_alg);
|
||||
EXPORT_SYMBOL_GPL(crypto_attr_alg2);
|
||||
|
||||
int crypto_attr_u32(struct rtattr *rta, u32 *num)
|
||||
{
|
||||
|
@ -627,17 +730,20 @@ int crypto_attr_u32(struct rtattr *rta, u32 *num)
|
|||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_attr_u32);
|
||||
|
||||
struct crypto_instance *crypto_alloc_instance(const char *name,
|
||||
struct crypto_alg *alg)
|
||||
void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg,
|
||||
unsigned int head)
|
||||
{
|
||||
struct crypto_instance *inst;
|
||||
struct crypto_spawn *spawn;
|
||||
char *p;
|
||||
int err;
|
||||
|
||||
inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
|
||||
if (!inst)
|
||||
p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn),
|
||||
GFP_KERNEL);
|
||||
if (!p)
|
||||
return ERR_PTR(-ENOMEM);
|
||||
|
||||
inst = (void *)(p + head);
|
||||
|
||||
err = -ENAMETOOLONG;
|
||||
if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
|
||||
alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
|
||||
|
@ -647,6 +753,25 @@ struct crypto_instance *crypto_alloc_instance(const char *name,
|
|||
name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
|
||||
goto err_free_inst;
|
||||
|
||||
return p;
|
||||
|
||||
err_free_inst:
|
||||
kfree(p);
|
||||
return ERR_PTR(err);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_alloc_instance2);
|
||||
|
||||
struct crypto_instance *crypto_alloc_instance(const char *name,
|
||||
struct crypto_alg *alg)
|
||||
{
|
||||
struct crypto_instance *inst;
|
||||
struct crypto_spawn *spawn;
|
||||
int err;
|
||||
|
||||
inst = crypto_alloc_instance2(name, alg, 0);
|
||||
if (IS_ERR(inst))
|
||||
goto out;
|
||||
|
||||
spawn = crypto_instance_ctx(inst);
|
||||
err = crypto_init_spawn(spawn, alg, inst,
|
||||
CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
|
||||
|
@ -658,7 +783,10 @@ struct crypto_instance *crypto_alloc_instance(const char *name,
|
|||
|
||||
err_free_inst:
|
||||
kfree(inst);
|
||||
return ERR_PTR(err);
|
||||
inst = ERR_PTR(err);
|
||||
|
||||
out:
|
||||
return inst;
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_alloc_instance);
|
||||
|
||||
|
|
|
@ -68,6 +68,11 @@ static int cryptomgr_probe(void *data)
|
|||
goto err;
|
||||
|
||||
do {
|
||||
if (tmpl->create) {
|
||||
err = tmpl->create(tmpl, param->tb);
|
||||
continue;
|
||||
}
|
||||
|
||||
inst = tmpl->alloc(param->tb);
|
||||
if (IS_ERR(inst))
|
||||
err = PTR_ERR(inst);
|
||||
|
|
|
@ -187,7 +187,6 @@ static int _get_more_prng_bytes(struct prng_context *ctx)
|
|||
/* Our exported functions */
|
||||
static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx)
|
||||
{
|
||||
unsigned long flags;
|
||||
unsigned char *ptr = buf;
|
||||
unsigned int byte_count = (unsigned int)nbytes;
|
||||
int err;
|
||||
|
@ -196,7 +195,7 @@ static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx)
|
|||
if (nbytes < 0)
|
||||
return -EINVAL;
|
||||
|
||||
spin_lock_irqsave(&ctx->prng_lock, flags);
|
||||
spin_lock_bh(&ctx->prng_lock);
|
||||
|
||||
err = -EINVAL;
|
||||
if (ctx->flags & PRNG_NEED_RESET)
|
||||
|
@ -268,7 +267,7 @@ empty_rbuf:
|
|||
goto remainder;
|
||||
|
||||
done:
|
||||
spin_unlock_irqrestore(&ctx->prng_lock, flags);
|
||||
spin_unlock_bh(&ctx->prng_lock);
|
||||
dbgprint(KERN_CRIT "returning %d from get_prng_bytes in context %p\n",
|
||||
err, ctx);
|
||||
return err;
|
||||
|
@ -284,10 +283,9 @@ static int reset_prng_context(struct prng_context *ctx,
|
|||
unsigned char *V, unsigned char *DT)
|
||||
{
|
||||
int ret;
|
||||
int rc = -EINVAL;
|
||||
unsigned char *prng_key;
|
||||
|
||||
spin_lock(&ctx->prng_lock);
|
||||
spin_lock_bh(&ctx->prng_lock);
|
||||
ctx->flags |= PRNG_NEED_RESET;
|
||||
|
||||
prng_key = (key != NULL) ? key : (unsigned char *)DEFAULT_PRNG_KEY;
|
||||
|
@ -308,34 +306,20 @@ static int reset_prng_context(struct prng_context *ctx,
|
|||
memset(ctx->rand_data, 0, DEFAULT_BLK_SZ);
|
||||
memset(ctx->last_rand_data, 0, DEFAULT_BLK_SZ);
|
||||
|
||||
if (ctx->tfm)
|
||||
crypto_free_cipher(ctx->tfm);
|
||||
|
||||
ctx->tfm = crypto_alloc_cipher("aes", 0, 0);
|
||||
if (IS_ERR(ctx->tfm)) {
|
||||
dbgprint(KERN_CRIT "Failed to alloc tfm for context %p\n",
|
||||
ctx);
|
||||
ctx->tfm = NULL;
|
||||
goto out;
|
||||
}
|
||||
|
||||
ctx->rand_data_valid = DEFAULT_BLK_SZ;
|
||||
|
||||
ret = crypto_cipher_setkey(ctx->tfm, prng_key, klen);
|
||||
if (ret) {
|
||||
dbgprint(KERN_CRIT "PRNG: setkey() failed flags=%x\n",
|
||||
crypto_cipher_get_flags(ctx->tfm));
|
||||
crypto_free_cipher(ctx->tfm);
|
||||
goto out;
|
||||
}
|
||||
|
||||
rc = 0;
|
||||
ret = 0;
|
||||
ctx->flags &= ~PRNG_NEED_RESET;
|
||||
out:
|
||||
spin_unlock(&ctx->prng_lock);
|
||||
|
||||
return rc;
|
||||
|
||||
spin_unlock_bh(&ctx->prng_lock);
|
||||
return ret;
|
||||
}
|
||||
|
||||
static int cprng_init(struct crypto_tfm *tfm)
|
||||
|
@ -343,6 +327,12 @@ static int cprng_init(struct crypto_tfm *tfm)
|
|||
struct prng_context *ctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
spin_lock_init(&ctx->prng_lock);
|
||||
ctx->tfm = crypto_alloc_cipher("aes", 0, 0);
|
||||
if (IS_ERR(ctx->tfm)) {
|
||||
dbgprint(KERN_CRIT "Failed to alloc tfm for context %p\n",
|
||||
ctx);
|
||||
return PTR_ERR(ctx->tfm);
|
||||
}
|
||||
|
||||
if (reset_prng_context(ctx, NULL, DEFAULT_PRNG_KSZ, NULL, NULL) < 0)
|
||||
return -EINVAL;
|
||||
|
@ -418,17 +408,10 @@ static struct crypto_alg rng_alg = {
|
|||
/* Module initalization */
|
||||
static int __init prng_mod_init(void)
|
||||
{
|
||||
int ret = 0;
|
||||
|
||||
if (fips_enabled)
|
||||
rng_alg.cra_priority += 200;
|
||||
|
||||
ret = crypto_register_alg(&rng_alg);
|
||||
|
||||
if (ret)
|
||||
goto out;
|
||||
out:
|
||||
return 0;
|
||||
return crypto_register_alg(&rng_alg);
|
||||
}
|
||||
|
||||
static void __exit prng_mod_fini(void)
|
||||
|
|
54
crypto/api.c
54
crypto/api.c
|
@ -285,13 +285,6 @@ static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
|
|||
switch (crypto_tfm_alg_type(tfm)) {
|
||||
case CRYPTO_ALG_TYPE_CIPHER:
|
||||
return crypto_init_cipher_ops(tfm);
|
||||
|
||||
case CRYPTO_ALG_TYPE_DIGEST:
|
||||
if ((mask & CRYPTO_ALG_TYPE_HASH_MASK) !=
|
||||
CRYPTO_ALG_TYPE_HASH_MASK)
|
||||
return crypto_init_digest_ops_async(tfm);
|
||||
else
|
||||
return crypto_init_digest_ops(tfm);
|
||||
|
||||
case CRYPTO_ALG_TYPE_COMPRESS:
|
||||
return crypto_init_compress_ops(tfm);
|
||||
|
@ -318,11 +311,7 @@ static void crypto_exit_ops(struct crypto_tfm *tfm)
|
|||
case CRYPTO_ALG_TYPE_CIPHER:
|
||||
crypto_exit_cipher_ops(tfm);
|
||||
break;
|
||||
|
||||
case CRYPTO_ALG_TYPE_DIGEST:
|
||||
crypto_exit_digest_ops(tfm);
|
||||
break;
|
||||
|
||||
|
||||
case CRYPTO_ALG_TYPE_COMPRESS:
|
||||
crypto_exit_compress_ops(tfm);
|
||||
break;
|
||||
|
@ -349,11 +338,7 @@ static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
|
|||
case CRYPTO_ALG_TYPE_CIPHER:
|
||||
len += crypto_cipher_ctxsize(alg);
|
||||
break;
|
||||
|
||||
case CRYPTO_ALG_TYPE_DIGEST:
|
||||
len += crypto_digest_ctxsize(alg);
|
||||
break;
|
||||
|
||||
|
||||
case CRYPTO_ALG_TYPE_COMPRESS:
|
||||
len += crypto_compress_ctxsize(alg);
|
||||
break;
|
||||
|
@ -472,7 +457,7 @@ void *crypto_create_tfm(struct crypto_alg *alg,
|
|||
int err = -ENOMEM;
|
||||
|
||||
tfmsize = frontend->tfmsize;
|
||||
total = tfmsize + sizeof(*tfm) + frontend->extsize(alg, frontend);
|
||||
total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
|
||||
|
||||
mem = kzalloc(total, GFP_KERNEL);
|
||||
if (mem == NULL)
|
||||
|
@ -481,7 +466,7 @@ void *crypto_create_tfm(struct crypto_alg *alg,
|
|||
tfm = (struct crypto_tfm *)(mem + tfmsize);
|
||||
tfm->__crt_alg = alg;
|
||||
|
||||
err = frontend->init_tfm(tfm, frontend);
|
||||
err = frontend->init_tfm(tfm);
|
||||
if (err)
|
||||
goto out_free_tfm;
|
||||
|
||||
|
@ -503,6 +488,27 @@ out:
|
|||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_create_tfm);
|
||||
|
||||
struct crypto_alg *crypto_find_alg(const char *alg_name,
|
||||
const struct crypto_type *frontend,
|
||||
u32 type, u32 mask)
|
||||
{
|
||||
struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) =
|
||||
crypto_alg_mod_lookup;
|
||||
|
||||
if (frontend) {
|
||||
type &= frontend->maskclear;
|
||||
mask &= frontend->maskclear;
|
||||
type |= frontend->type;
|
||||
mask |= frontend->maskset;
|
||||
|
||||
if (frontend->lookup)
|
||||
lookup = frontend->lookup;
|
||||
}
|
||||
|
||||
return lookup(alg_name, type, mask);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_find_alg);
|
||||
|
||||
/*
|
||||
* crypto_alloc_tfm - Locate algorithm and allocate transform
|
||||
* @alg_name: Name of algorithm
|
||||
|
@ -526,21 +532,13 @@ EXPORT_SYMBOL_GPL(crypto_create_tfm);
|
|||
void *crypto_alloc_tfm(const char *alg_name,
|
||||
const struct crypto_type *frontend, u32 type, u32 mask)
|
||||
{
|
||||
struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
|
||||
void *tfm;
|
||||
int err;
|
||||
|
||||
type &= frontend->maskclear;
|
||||
mask &= frontend->maskclear;
|
||||
type |= frontend->type;
|
||||
mask |= frontend->maskset;
|
||||
|
||||
lookup = frontend->lookup ?: crypto_alg_mod_lookup;
|
||||
|
||||
for (;;) {
|
||||
struct crypto_alg *alg;
|
||||