Commit 332a3392 authored by Linus Torvalds's avatar Linus Torvalds

Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6

* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (102 commits)
  crypto: sha-s390 - Fix warnings in import function
  crypto: vmac - New hash algorithm for intel_txt support
  crypto: api - Do not displace newly registered algorithms
  crypto: ansi_cprng - Fix module initialization
  crypto: xcbc - Fix alignment calculation of xcbc_tfm_ctx
  crypto: fips - Depend on ansi_cprng
  crypto: blkcipher - Do not use eseqiv on stream ciphers
  crypto: ctr - Use chainiv on raw counter mode
  Revert crypto: fips - Select CPRNG
  crypto: rng - Fix typo
  crypto: talitos - add support for 36 bit addressing
  crypto: talitos - align locks on cache lines
  crypto: talitos - simplify hmac data size calculation
  crypto: mv_cesa - Add support for Orion5X crypto engine
  crypto: cryptd - Add support to access underlaying shash
  crypto: gcm - Use GHASH digest algorithm
  crypto: ghash - Add GHASH digest algorithm for GCM
  crypto: authenc - Convert to ahash
  crypto: api - Fix aligned ctx helper
  crypto: hmac - Prehash ipad/opad
  ...
parents a9c86d42 81bd5f6c
...@@ -250,8 +250,9 @@ static int des3_128_setkey(struct crypto_tfm *tfm, const u8 *key, ...@@ -250,8 +250,9 @@ static int des3_128_setkey(struct crypto_tfm *tfm, const u8 *key,
const u8 *temp_key = key; const u8 *temp_key = key;
u32 *flags = &tfm->crt_flags; u32 *flags = &tfm->crt_flags;
if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE))) { if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE)) &&
*flags |= CRYPTO_TFM_RES_BAD_KEY_SCHED; (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
*flags |= CRYPTO_TFM_RES_WEAK_KEY;
return -EINVAL; return -EINVAL;
} }
for (i = 0; i < 2; i++, temp_key += DES_KEY_SIZE) { for (i = 0; i < 2; i++, temp_key += DES_KEY_SIZE) {
...@@ -411,9 +412,9 @@ static int des3_192_setkey(struct crypto_tfm *tfm, const u8 *key, ...@@ -411,9 +412,9 @@ static int des3_192_setkey(struct crypto_tfm *tfm, const u8 *key,
if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) && if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) &&
memcmp(&key[DES_KEY_SIZE], &key[DES_KEY_SIZE * 2], memcmp(&key[DES_KEY_SIZE], &key[DES_KEY_SIZE * 2],
DES_KEY_SIZE))) { DES_KEY_SIZE)) &&
(*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
*flags |= CRYPTO_TFM_RES_BAD_KEY_SCHED; *flags |= CRYPTO_TFM_RES_WEAK_KEY;
return -EINVAL; return -EINVAL;
} }
for (i = 0; i < 3; i++, temp_key += DES_KEY_SIZE) { for (i = 0; i < 3; i++, temp_key += DES_KEY_SIZE) {
......
...@@ -46,12 +46,38 @@ static int sha1_init(struct shash_desc *desc) ...@@ -46,12 +46,38 @@ static int sha1_init(struct shash_desc *desc)
return 0; return 0;
} }
static int sha1_export(struct shash_desc *desc, void *out)
{
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
struct sha1_state *octx = out;
octx->count = sctx->count;
memcpy(octx->state, sctx->state, sizeof(octx->state));
memcpy(octx->buffer, sctx->buf, sizeof(octx->buffer));
return 0;
}
static int sha1_import(struct shash_desc *desc, const void *in)
{
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
const struct sha1_state *ictx = in;
sctx->count = ictx->count;
memcpy(sctx->state, ictx->state, sizeof(ictx->state));
memcpy(sctx->buf, ictx->buffer, sizeof(ictx->buffer));
sctx->func = KIMD_SHA_1;
return 0;
}
static struct shash_alg alg = { static struct shash_alg alg = {
.digestsize = SHA1_DIGEST_SIZE, .digestsize = SHA1_DIGEST_SIZE,
.init = sha1_init, .init = sha1_init,
.update = s390_sha_update, .update = s390_sha_update,
.final = s390_sha_final, .final = s390_sha_final,
.export = sha1_export,
.import = sha1_import,
.descsize = sizeof(struct s390_sha_ctx), .descsize = sizeof(struct s390_sha_ctx),
.statesize = sizeof(struct sha1_state),
.base = { .base = {
.cra_name = "sha1", .cra_name = "sha1",
.cra_driver_name= "sha1-s390", .cra_driver_name= "sha1-s390",
......
...@@ -42,12 +42,38 @@ static int sha256_init(struct shash_desc *desc) ...@@ -42,12 +42,38 @@ static int sha256_init(struct shash_desc *desc)
return 0; return 0;
} }
static int sha256_export(struct shash_desc *desc, void *out)
{
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
struct sha256_state *octx = out;
octx->count = sctx->count;
memcpy(octx->state, sctx->state, sizeof(octx->state));
memcpy(octx->buf, sctx->buf, sizeof(octx->buf));
return 0;
}
static int sha256_import(struct shash_desc *desc, const void *in)
{
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
const struct sha256_state *ictx = in;
sctx->count = ictx->count;
memcpy(sctx->state, ictx->state, sizeof(ictx->state));
memcpy(sctx->buf, ictx->buf, sizeof(ictx->buf));
sctx->func = KIMD_SHA_256;
return 0;
}
static struct shash_alg alg = { static struct shash_alg alg = {
.digestsize = SHA256_DIGEST_SIZE, .digestsize = SHA256_DIGEST_SIZE,
.init = sha256_init, .init = sha256_init,
.update = s390_sha_update, .update = s390_sha_update,
.final = s390_sha_final, .final = s390_sha_final,
.export = sha256_export,
.import = sha256_import,
.descsize = sizeof(struct s390_sha_ctx), .descsize = sizeof(struct s390_sha_ctx),
.statesize = sizeof(struct sha256_state),
.base = { .base = {
.cra_name = "sha256", .cra_name = "sha256",
.cra_driver_name= "sha256-s390", .cra_driver_name= "sha256-s390",
......
...@@ -13,7 +13,10 @@ ...@@ -13,7 +13,10 @@
* *
*/ */
#include <crypto/internal/hash.h> #include <crypto/internal/hash.h>
#include <crypto/sha.h>
#include <linux/errno.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/kernel.h>
#include <linux/module.h> #include <linux/module.h>
#include "sha.h" #include "sha.h"
...@@ -37,12 +40,42 @@ static int sha512_init(struct shash_desc *desc) ...@@ -37,12 +40,42 @@ static int sha512_init(struct shash_desc *desc)
return 0; return 0;
} }
static int sha512_export(struct shash_desc *desc, void *out)
{
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
struct sha512_state *octx = out;
octx->count[0] = sctx->count;
octx->count[1] = 0;
memcpy(octx->state, sctx->state, sizeof(octx->state));
memcpy(octx->buf, sctx->buf, sizeof(octx->buf));
return 0;
}
static int sha512_import(struct shash_desc *desc, const void *in)
{
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
const struct sha512_state *ictx = in;
if (unlikely(ictx->count[1]))
return -ERANGE;
sctx->count = ictx->count[0];
memcpy(sctx->state, ictx->state, sizeof(ictx->state));
memcpy(sctx->buf, ictx->buf, sizeof(ictx->buf));
sctx->func = KIMD_SHA_512;
return 0;
}
static struct shash_alg sha512_alg = { static struct shash_alg sha512_alg = {
.digestsize = SHA512_DIGEST_SIZE, .digestsize = SHA512_DIGEST_SIZE,
.init = sha512_init, .init = sha512_init,
.update = s390_sha_update, .update = s390_sha_update,
.final = s390_sha_final, .final = s390_sha_final,
.export = sha512_export,
.import = sha512_import,
.descsize = sizeof(struct s390_sha_ctx), .descsize = sizeof(struct s390_sha_ctx),
.statesize = sizeof(struct sha512_state),
.base = { .base = {
.cra_name = "sha512", .cra_name = "sha512",
.cra_driver_name= "sha512-s390", .cra_driver_name= "sha512-s390",
...@@ -78,7 +111,10 @@ static struct shash_alg sha384_alg = { ...@@ -78,7 +111,10 @@ static struct shash_alg sha384_alg = {
.init = sha384_init, .init = sha384_init,
.update = s390_sha_update, .update = s390_sha_update,
.final = s390_sha_final, .final = s390_sha_final,
.export = sha512_export,
.import = sha512_import,
.descsize = sizeof(struct s390_sha_ctx), .descsize = sizeof(struct s390_sha_ctx),
.statesize = sizeof(struct sha512_state),
.base = { .base = {
.cra_name = "sha384", .cra_name = "sha384",
.cra_driver_name= "sha384-s390", .cra_driver_name= "sha384-s390",
......
...@@ -636,7 +636,7 @@ static int __init aesni_init(void) ...@@ -636,7 +636,7 @@ static int __init aesni_init(void)
int err; int err;
if (!cpu_has_aes) { if (!cpu_has_aes) {
printk(KERN_ERR "Intel AES-NI instructions are not detected.\n"); printk(KERN_INFO "Intel AES-NI instructions are not detected.\n");
return -ENODEV; return -ENODEV;
} }
if ((err = crypto_register_alg(&aesni_alg))) if ((err = crypto_register_alg(&aesni_alg)))
......
...@@ -23,11 +23,13 @@ comment "Crypto core or helper" ...@@ -23,11 +23,13 @@ comment "Crypto core or helper"
config CRYPTO_FIPS config CRYPTO_FIPS
bool "FIPS 200 compliance" bool "FIPS 200 compliance"
depends on CRYPTO_ANSI_CPRNG
help help
This options enables the fips boot option which is This options enables the fips boot option which is
required if you want to system to operate in a FIPS 200 required if you want to system to operate in a FIPS 200
certification. You should say no unless you know what certification. You should say no unless you know what
this is. this is. Note that CRYPTO_ANSI_CPRNG is requred if this
option is selected
config CRYPTO_ALGAPI config CRYPTO_ALGAPI
tristate tristate
...@@ -156,7 +158,7 @@ config CRYPTO_GCM ...@@ -156,7 +158,7 @@ config CRYPTO_GCM
tristate "GCM/GMAC support" tristate "GCM/GMAC support"
select CRYPTO_CTR select CRYPTO_CTR
select CRYPTO_AEAD select CRYPTO_AEAD
select CRYPTO_GF128MUL select CRYPTO_GHASH
help help
Support for Galois/Counter Mode (GCM) and Galois Message Support for Galois/Counter Mode (GCM) and Galois Message
Authentication Code (GMAC). Required for IPSec. Authentication Code (GMAC). Required for IPSec.
...@@ -267,6 +269,18 @@ config CRYPTO_XCBC ...@@ -267,6 +269,18 @@ config CRYPTO_XCBC
http://csrc.nist.gov/encryption/modes/proposedmodes/ http://csrc.nist.gov/encryption/modes/proposedmodes/
xcbc-mac/xcbc-mac-spec.pdf xcbc-mac/xcbc-mac-spec.pdf
config CRYPTO_VMAC
tristate "VMAC support"
depends on EXPERIMENTAL
select CRYPTO_HASH
select CRYPTO_MANAGER
help
VMAC is a message authentication algorithm designed for
very high speed on 64-bit architectures.
See also:
<http://fastcrypto.org/vmac>
comment "Digest" comment "Digest"
config CRYPTO_CRC32C config CRYPTO_CRC32C
...@@ -289,6 +303,13 @@ config CRYPTO_CRC32C_INTEL ...@@ -289,6 +303,13 @@ config CRYPTO_CRC32C_INTEL
gain performance compared with software implementation. gain performance compared with software implementation.
Module will be crc32c-intel. Module will be crc32c-intel.
config CRYPTO_GHASH
tristate "GHASH digest algorithm"
select CRYPTO_SHASH
select CRYPTO_GF128MUL
help
GHASH is message digest algorithm for GCM (Galois/Counter Mode).
config CRYPTO_MD4 config CRYPTO_MD4
tristate "MD4 digest algorithm" tristate "MD4 digest algorithm"
select CRYPTO_HASH select CRYPTO_HASH
...@@ -780,13 +801,14 @@ comment "Random Number Generation" ...@@ -780,13 +801,14 @@ comment "Random Number Generation"
config CRYPTO_ANSI_CPRNG config CRYPTO_ANSI_CPRNG
tristate "Pseudo Random Number Generation for Cryptographic modules" tristate "Pseudo Random Number Generation for Cryptographic modules"
default m
select CRYPTO_AES select CRYPTO_AES
select CRYPTO_RNG select CRYPTO_RNG
select CRYPTO_FIPS
help help
This option enables the generic pseudo random number generator This option enables the generic pseudo random number generator
for cryptographic modules. Uses the Algorithm specified in for cryptographic modules. Uses the Algorithm specified in
ANSI X9.31 A.2.4 ANSI X9.31 A.2.4. Not this option must be enabled if CRYPTO_FIPS
is selected
source "drivers/crypto/Kconfig" source "drivers/crypto/Kconfig"
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
# #
obj-$(CONFIG_CRYPTO) += crypto.o obj-$(CONFIG_CRYPTO) += crypto.o
crypto-objs := api.o cipher.o digest.o compress.o crypto-objs := api.o cipher.o compress.o
obj-$(CONFIG_CRYPTO_WORKQUEUE) += crypto_wq.o obj-$(CONFIG_CRYPTO_WORKQUEUE) += crypto_wq.o
...@@ -22,7 +22,6 @@ obj-$(CONFIG_CRYPTO_BLKCIPHER2) += chainiv.o ...@@ -22,7 +22,6 @@ obj-$(CONFIG_CRYPTO_BLKCIPHER2) += chainiv.o
obj-$(CONFIG_CRYPTO_BLKCIPHER2) += eseqiv.o obj-$(CONFIG_CRYPTO_BLKCIPHER2) += eseqiv.o
obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o
crypto_hash-objs := hash.o
crypto_hash-objs += ahash.o crypto_hash-objs += ahash.o
crypto_hash-objs += shash.o crypto_hash-objs += shash.o
obj-$(CONFIG_CRYPTO_HASH2) += crypto_hash.o obj-$(CONFIG_CRYPTO_HASH2) += crypto_hash.o
...@@ -33,6 +32,7 @@ cryptomgr-objs := algboss.o testmgr.o ...@@ -33,6 +32,7 @@ cryptomgr-objs := algboss.o testmgr.o
obj-$(CONFIG_CRYPTO_MANAGER2) += cryptomgr.o obj-$(CONFIG_CRYPTO_MANAGER2) += cryptomgr.o
obj-$(CONFIG_CRYPTO_HMAC) += hmac.o obj-$(CONFIG_CRYPTO_HMAC) += hmac.o
obj-$(CONFIG_CRYPTO_VMAC) += vmac.o
obj-$(CONFIG_CRYPTO_XCBC) += xcbc.o obj-$(CONFIG_CRYPTO_XCBC) += xcbc.o
obj-$(CONFIG_CRYPTO_NULL) += crypto_null.o obj-$(CONFIG_CRYPTO_NULL) += crypto_null.o
obj-$(CONFIG_CRYPTO_MD4) += md4.o obj-$(CONFIG_CRYPTO_MD4) += md4.o
...@@ -83,6 +83,7 @@ obj-$(CONFIG_CRYPTO_RNG2) += rng.o ...@@ -83,6 +83,7 @@ obj-$(CONFIG_CRYPTO_RNG2) += rng.o
obj-$(CONFIG_CRYPTO_RNG2) += krng.o obj-$(CONFIG_CRYPTO_RNG2) += krng.o
obj-$(CONFIG_CRYPTO_ANSI_CPRNG) += ansi_cprng.o obj-$(CONFIG_CRYPTO_ANSI_CPRNG) += ansi_cprng.o
obj-$(CONFIG_CRYPTO_TEST) += tcrypt.o obj-$(CONFIG_CRYPTO_TEST) += tcrypt.o
obj-$(CONFIG_CRYPTO_GHASH) += ghash-generic.o
# #
# generic algorithms and the async_tx api # generic algorithms and the async_tx api
......
...@@ -14,6 +14,7 @@ ...@@ -14,6 +14,7 @@
*/ */
#include <crypto/internal/skcipher.h> #include <crypto/internal/skcipher.h>
#include <linux/cpumask.h>
#include <linux/err.h> #include <linux/err.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/kernel.h> #include <linux/kernel.h>
...@@ -25,6 +26,8 @@ ...@@ -25,6 +26,8 @@
#include "internal.h" #include "internal.h"
static const char *skcipher_default_geniv __read_mostly;
static int setkey_unaligned(struct crypto_ablkcipher *tfm, const u8 *key, static int setkey_unaligned(struct crypto_ablkcipher *tfm, const u8 *key,
unsigned int keylen) unsigned int keylen)
{ {
...@@ -180,7 +183,14 @@ EXPORT_SYMBOL_GPL(crypto_givcipher_type); ...@@ -180,7 +183,14 @@ EXPORT_SYMBOL_GPL(crypto_givcipher_type);
const char *crypto_default_geniv(const struct crypto_alg *alg) const char *crypto_default_geniv(const struct crypto_alg *alg)
{ {
return alg->cra_flags & CRYPTO_ALG_ASYNC ? "eseqiv" : "chainiv"; if (((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
CRYPTO_ALG_TYPE_BLKCIPHER ? alg->cra_blkcipher.ivsize :
alg->cra_ablkcipher.ivsize) !=
alg->cra_blocksize)
return "chainiv";
return alg->cra_flags & CRYPTO_ALG_ASYNC ?
"eseqiv" : skcipher_default_geniv;
} }
static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask) static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask)
...@@ -201,8 +211,9 @@ static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask) ...@@ -201,8 +211,9 @@ static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask)
int err; int err;
larval = crypto_larval_lookup(alg->cra_driver_name, larval = crypto_larval_lookup(alg->cra_driver_name,
(type & ~CRYPTO_ALG_TYPE_MASK) |
CRYPTO_ALG_TYPE_GIVCIPHER, CRYPTO_ALG_TYPE_GIVCIPHER,
CRYPTO_ALG_TYPE_MASK); mask | CRYPTO_ALG_TYPE_MASK);
err = PTR_ERR(larval); err = PTR_ERR(larval);
if (IS_ERR(larval)) if (IS_ERR(larval))
goto out; goto out;
...@@ -360,3 +371,17 @@ struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name, ...@@ -360,3 +371,17 @@ struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name,
return ERR_PTR(err); return ERR_PTR(err);
} }
EXPORT_SYMBOL_GPL(crypto_alloc_ablkcipher); EXPORT_SYMBOL_GPL(crypto_alloc_ablkcipher);
static int __init skcipher_module_init(void)
{
skcipher_default_geniv = num_possible_cpus() > 1 ?
"eseqiv" : "chainiv";
return 0;
}
static void skcipher_module_exit(void)
{
}
module_init(skcipher_module_init);
module_exit(skcipher_module_exit);
...@@ -1174,7 +1174,7 @@ EXPORT_SYMBOL_GPL(crypto_il_tab); ...@@ -1174,7 +1174,7 @@ EXPORT_SYMBOL_GPL(crypto_il_tab);
ctx->key_enc[6 * i + 11] = t; \ ctx->key_enc[6 * i + 11] = t; \
} while (0) } while (0)
#define loop8(i) do { \ #define loop8tophalf(i) do { \
t = ror32(t, 8); \ t = ror32(t, 8); \
t = ls_box(t) ^ rco_tab[i]; \ t = ls_box(t) ^ rco_tab[i]; \
t ^= ctx->key_enc[8 * i]; \ t ^= ctx->key_enc[8 * i]; \
...@@ -1185,6 +1185,10 @@ EXPORT_SYMBOL_GPL(crypto_il_tab); ...@@ -1185,6 +1185,10 @@ EXPORT_SYMBOL_GPL(crypto_il_tab);
ctx->key_enc[8 * i + 10] = t; \ ctx->key_enc[8 * i + 10] = t; \
t ^= ctx->key_enc[8 * i + 3]; \ t ^= ctx->key_enc[8 * i + 3]; \
ctx->key_enc[8 * i + 11] = t; \ ctx->key_enc[8 * i + 11] = t; \
} while (0)
#define loop8(i) do { \
loop8tophalf(i); \
t = ctx->key_enc[8 * i + 4] ^ ls_box(t); \ t = ctx->key_enc[8 * i + 4] ^ ls_box(t); \
ctx->key_enc[8 * i + 12] = t; \ ctx->key_enc[8 * i + 12] = t; \
t ^= ctx->key_enc[8 * i + 5]; \ t ^= ctx->key_enc[8 * i + 5]; \
...@@ -1245,8 +1249,9 @@ int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key, ...@@ -1245,8 +1249,9 @@ int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
ctx->key_enc[5] = le32_to_cpu(key[5]); ctx->key_enc[5] = le32_to_cpu(key[5]);
ctx->key_enc[6] = le32_to_cpu(key[6]); ctx->key_enc[6] = le32_to_cpu(key[6]);
t = ctx->key_enc[7] = le32_to_cpu(key[7]); t = ctx->key_enc[7] = le32_to_cpu(key[7]);
for (i = 0; i < 7; ++i) for (i = 0; i < 6; ++i)
loop8(i); loop8(i);
loop8tophalf(i);
break; break;
} }
......
This diff is collapsed.
...@@ -81,16 +81,35 @@ static void crypto_destroy_instance(struct crypto_alg *alg) ...@@ -81,16 +81,35 @@ static void crypto_destroy_instance(struct crypto_alg *alg)
crypto_tmpl_put(tmpl); crypto_tmpl_put(tmpl);
} }
static void crypto_remove_spawn(struct crypto_spawn *spawn, static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
struct list_head *list, struct list_head *stack,
struct list_head *top,
struct list_head *secondary_spawns) struct list_head *secondary_spawns)
{
struct crypto_spawn *spawn, *n;
if (list_empty(stack))
return NULL;
spawn = list_first_entry(stack, struct crypto_spawn, list);
n = list_entry(spawn->list.next, struct crypto_spawn, list);
if (spawn->alg && &n->list != stack && !n->alg)
n->alg = (n->list.next == stack) ? alg :
&list_entry(n->list.next, struct crypto_spawn,
list)->inst->alg;
list_move(&spawn->list, secondary_spawns);
return &n->list == stack ? top : &n->inst->alg.cra_users;
}
static void crypto_remove_spawn(struct crypto_spawn *spawn,
struct list_head *list)
{ {
struct crypto_instance *inst = spawn->inst; struct crypto_instance *inst = spawn->inst;
struct crypto_template *tmpl = inst->tmpl; struct crypto_template *tmpl = inst->tmpl;
list_del_init(&spawn->list);
spawn->alg = NULL;
if (crypto_is_dead(&inst->alg)) if (crypto_is_dead(&inst->alg))
return; return;
...@@ -106,25 +125,55 @@ static void crypto_remove_spawn(struct crypto_spawn *spawn, ...@@ -106,25 +125,55 @@ static void crypto_remove_spawn(struct crypto_spawn *spawn,
hlist_del(&inst->list); hlist_del(&inst->list);
inst->alg.cra_destroy = crypto_destroy_instance; inst->alg.cra_destroy = crypto_destroy_instance;
list_splice(&inst->alg.cra_users, secondary_spawns); BUG_ON(!list_empty(&inst->alg.cra_users));
} }
static void crypto_remove_spawns(struct list_head *spawns, static void crypto_remove_spawns(struct crypto_alg *alg,
struct list_head *list, u32 new_type) struct list_head *list,
struct crypto_alg *nalg)
{ {
u32 new_type = (nalg ?: alg)->cra_flags;
struct crypto_spawn *spawn, *n; struct crypto_spawn *spawn, *n;
LIST_HEAD(secondary_spawns); LIST_HEAD(secondary_spawns);
struct list_head *spawns;
LIST_HEAD(stack);
LIST_HEAD(top);
spawns = &alg->cra_users;
list_for_each_entry_safe(spawn, n, spawns, list) { list_for_each_entry_safe(spawn, n, spawns, list) {
if ((spawn->alg->cra_flags ^ new_type) & spawn->mask) if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
continue; continue;
crypto_remove_spawn(spawn, list, &secondary_spawns); list_move(&spawn->list, &top);
} }
while (!list_empty(&secondary_spawns)) { spawns = &top;
list_for_each_entry_safe(spawn, n, &secondary_spawns, list) do {
crypto_remove_spawn(spawn, list, &secondary_spawns); while (!list_empty(spawns)) {
struct crypto_instance *inst;
spawn = list_first_entry(spawns, struct crypto_spawn,
list);
inst = spawn->inst;
BUG_ON(&inst->alg == alg);
list_move(&spawn->list, &stack);
if (&inst->alg == nalg)
break;
spawn->alg = NULL;
spawns = &inst->alg.cra_users;
}
} while ((spawns = crypto_more_spawns(alg, &stack, &top,
&secondary_spawns)));
list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
if (spawn->alg)
list_move(&spawn->list, &spawn->alg->cra_users);
else
crypto_remove_spawn(spawn, list);
} }
} }
...@@ -258,7 +307,7 @@ void crypto_alg_tested(const char *name, int err) ...@@ -258,7 +307,7 @@ void crypto_alg_tested(const char *name, int err)
q->cra_priority > alg->cra_priority) q->cra_priority > alg->cra_priority)
continue; continue;
crypto_remove_spawns(&q->cra_users, &list, alg->cra_flags); crypto_remove_spawns(q, &list, alg);
} }
complete: complete:
...@@ -330,7 +379,7 @@ static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list) ...@@ -330,7 +379,7 @@ static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, alg); crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, alg);
list_del_init(&alg->cra_list); list_del_init(&alg->cra_list);
crypto_remove_spawns(&alg->cra_users, list, alg->cra_flags); crypto_remove_spawns(alg, list, NULL);
return 0; return 0;
} }
...@@ -488,20 +537,38 @@ int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg, ...@@ -488,20 +537,38 @@ int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
} }
EXPORT_SYMBOL_GPL(crypto_init_spawn); EXPORT_SYMBOL_GPL(crypto_init_spawn);
int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
struct crypto_instance *inst,
const struct crypto_type *frontend)
{
int err = -EINVAL;
if (frontend && (alg->cra_flags ^ frontend->type) & frontend->maskset)
goto out;
spawn->frontend = frontend;
err = crypto_init_spawn(spawn, alg, inst, frontend->maskset);
out:
return err;
}
EXPORT_SYMBOL_GPL(crypto_init_spawn2);
void crypto_drop_spawn(struct crypto_spawn *spawn) void crypto_drop_spawn(struct crypto_spawn *spawn)
{ {
if (!spawn->alg)
return;
down_write(&crypto_alg_sem); down_write(&crypto_alg_sem);
list_del(&spawn->list); list_del(&spawn->list);
up_write(&crypto_alg_sem); up_write(&crypto_alg_sem);
} }
EXPORT_SYMBOL_GPL(crypto_drop_spawn); EXPORT_SYMBOL_GPL(crypto_drop_spawn);
struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
u32 mask)
{ {
struct crypto_alg *alg; struct crypto_alg *alg;
struct crypto_alg *alg2; struct crypto_alg *alg2;
struct crypto_tfm *tfm;
down_read(&crypto_alg_sem); down_read(&crypto_alg_sem);
alg = spawn->alg; alg = spawn->alg;
...@@ -516,6 +583,19 @@ struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, ...@@ -516,6 +583,19 @@ struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
return ERR_PTR(-EAGAIN); return ERR_PTR(-EAGAIN);
} }
return alg;
}
struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
u32 mask)
{
struct crypto_alg *alg;
struct crypto_tfm *tfm;
alg = crypto_spawn_alg(spawn);
if (IS_ERR(alg))
return ERR_CAST(alg);
tfm = ERR_PTR(-EINVAL); tfm = ERR_PTR(-EINVAL);
if (unlikely((alg->cra_flags ^ type) & mask)) if (unlikely((alg->cra_flags ^ type) & mask))
goto out_put_alg; goto out_put_alg;
...@@ -532,6 +612,27 @@ struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, ...@@ -532,6 +612,27 @@ struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
} }
EXPORT_SYMBOL_GPL(crypto_spawn_tfm); EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
{
struct crypto_alg *alg;
struct crypto_tfm *tfm;
alg = crypto_spawn_alg(spawn);
if (IS_ERR(alg))
return ERR_CAST(alg);
tfm = crypto_create_tfm(alg, spawn->frontend);
if (IS_ERR(tfm))
goto out_put_alg;
return tfm;
out_put_alg:
crypto_mod_put(alg);
return tfm;
}
EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
int crypto_register_notifier(struct notifier_block *nb) int crypto_register_notifier(struct notifier_block *nb)
{ {
return blocking_notifier_chain_register(&crypto_chain, nb); return blocking_notifier_chain_register(&crypto_chain, nb);
...@@ -595,7 +696,9 @@ const char *crypto_attr_alg_name(struct rtattr *rta) ...@@ -595,7 +696,9 @@ const char *crypto_attr_alg_name(struct rtattr *rta)
} }
EXPORT_SYMBOL_GPL(crypto_attr_alg_name); EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask) struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
const struct crypto_type *frontend,
u32 type, u32 mask)
{ {
const char *name; const char *name;
int err; int err;
...@@ -605,9 +708,9 @@ struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask) ...@@ -605,9 +708,9 @@ struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask)
if (IS_ERR(name)) if (IS_ERR(name))
return ERR_PTR(err); return ERR_PTR(err);
return crypto_alg_mod_lookup(name, type, mask); return crypto_find_alg(name, frontend, type, mask);
} }
EXPORT_SYMBOL_GPL(crypto_attr_alg); EXPORT_SYMBOL_GPL(crypto_attr_alg2);
int crypto_attr_u32(struct rtattr *rta, u32 *num) int crypto_attr_u32(struct rtattr *rta, u32 *num)
{ {
...@@ -627,17 +730,20 @@ int crypto_attr_u32(struct rtattr *rta, u32 *num) ...@@ -627,17 +730,20 @@ int crypto_attr_u32(struct rtattr *rta, u32 *num)
} }
EXPORT_SYMBOL_GPL(crypto_attr_u32); EXPORT_SYMBOL_GPL(crypto_attr_u32);
struct crypto_instance *crypto_alloc_instance(const char *name, void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg,
struct crypto_alg *alg) unsigned int head)
{ {
struct crypto_instance *inst; struct crypto_instance *inst;
struct crypto_spawn *spawn; char *p;
int err; int err;
inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn),
if (!inst) GFP_KERNEL);
if (!p)
return ERR_PTR(-ENOMEM); return ERR_PTR(-ENOMEM);
inst = (void *)(p + head);
err = -ENAMETOOLONG; err = -ENAMETOOLONG;
if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name, if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
alg->cra_name) >= CRYPTO_MAX_ALG_NAME) alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
...@@ -647,6 +753,25 @@ struct crypto_instance *crypto_alloc_instance(const char *name, ...@@ -647,6 +753,25 @@ struct crypto_instance *crypto_alloc_instance(const char *name,
name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
goto err_free_inst; goto err_free_inst;
return p;
err_free_inst:
kfree(p);
return ERR_PTR(err);
}
EXPORT_SYMBOL_GPL(crypto_alloc_instance2);
struct crypto_instance *crypto_alloc_instance(const char *name,
struct crypto_alg *alg)
{
struct crypto_instance *inst;
struct crypto_spawn *spawn;
int err;
inst = crypto_alloc_instance2(name, alg, 0);
if (IS_ERR(inst))
goto out;
spawn = crypto_instance_ctx(inst); spawn = crypto_instance_ctx(inst);
err = crypto_init_spawn(spawn, alg, inst, err = crypto_init_spawn(spawn, alg, inst,
CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
...@@ -658,7 +783,10 @@ struct crypto_instance *crypto_alloc_instance(const char *name, ...@@ -658,7 +783,10 @@ struct crypto_instance *crypto_alloc_instance(const char *name,
err_free_inst: err_free_inst:
kfree(inst); kfree(inst);
return ERR_PTR(err); inst = ERR_PTR(err);
out:
return inst;
} }
EXPORT_SYMBOL_GPL(crypto_alloc_instance); EXPORT_SYMBOL_GPL(crypto_alloc_instance);
......
...@@ -68,6 +68,11 @@ static int cryptomgr_probe(void *data) ...@@ -68,6 +68,11 @@ static int cryptomgr_probe(void *data)
goto err; goto err;
do { do {
if (tmpl->create) {
err = tmpl->create(tmpl, param->tb);
continue;
}
inst = tmpl->alloc(param->tb); inst = tmpl->alloc(param->tb);
if (IS_ERR(inst)) if (IS_ERR(inst))
err = PTR_ERR(inst); err = PTR_ERR(inst);
......
...@@ -187,7 +187,6 @@ static int _get_more_prng_bytes(struct prng_context *ctx) ...@@ -187,7 +187,6 @@ static int _get_more_prng_bytes(struct prng_context *ctx)
/* Our exported functions */ /* Our exported functions */
static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx) static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx)
{ {
unsigned long flags;
unsigned char *ptr = buf; unsigned char *ptr = buf;
unsigned int byte_count = (unsigned int)nbytes; unsigned int byte_count = (unsigned int)nbytes;
int err; int err;
...@@ -196,7 +195,7 @@ static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx) ...@@ -196,7 +195,7 @@ static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx)
if (nbytes < 0) if (nbytes < 0)
return -EINVAL; return -EINVAL;
spin_lock_irqsave(&ctx->prng_lock, flags); spin_lock_bh(&ctx->prng_lock);
err = -EINVAL; err = -EINVAL;
if (ctx->flags & PRNG_NEED_RESET) if (ctx->flags & PRNG_NEED_RESET)
...@@ -268,7 +267,7 @@ static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx) ...@@ -268,7 +267,7 @@ static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx)
goto remainder; goto remainder;
done: done:
spin_unlock_irqrestore(&ctx->prng_lock, flags); spin_unlock_bh(&ctx->prng_lock);
dbgprint(KERN_CRIT "returning %d from get_prng_bytes in context %p\n", dbgprint(KERN_CRIT "returning %d from get_prng_bytes in context %p\n",
err, ctx); err, ctx);
return err; return err;
...@@ -284,10 +283,9 @@ static int reset_prng_context(struct prng_context *ctx, ...@@ -284,10 +283,9 @@ static int reset_prng_context(struct prng_context *ctx,
unsigned char *V, unsigned char *DT) unsigned char *V, unsigned char *DT)
{ {
int ret; int ret;
int rc = -EINVAL;
unsigned char *prng_key; unsigned char *prng_key;
spin_lock(&ctx->prng_lock); spin_lock_bh(&ctx->prng_lock);
ctx->flags |= PRNG_NEED_RESET; ctx->flags |= PRNG_NEED_RESET;
prng_key = (key != NULL) ? key : (unsigned char *)DEFAULT_PRNG_KEY; prng_key = (key != NULL) ? key : (unsigned char *)DEFAULT_PRNG_KEY;
...@@ -308,34 +306,20 @@ static int reset_prng_context(struct prng_context *ctx, ...@@ -308,34 +306,20 @@ static int reset_prng_context(struct prng_context *ctx,
memset(ctx->rand_data, 0, DEFAULT_BLK_SZ); memset(ctx->rand_data, 0, DEFAULT_BLK_SZ);
memset(ctx->last_rand_data, 0, DEFAULT_BLK_SZ); memset(ctx->last_rand_data, 0, DEFAULT_BLK_SZ);
if (ctx->tfm)
crypto_free_cipher(ctx->tfm);
ctx->tfm = crypto_alloc_cipher("aes", 0, 0);
if (IS_ERR(ctx->tfm)) {
dbgprint(KERN_CRIT "Failed to alloc tfm for context %p\n",
ctx);
ctx->tfm = NULL;
goto out;
}
ctx->rand_data_valid = DEFAULT_BLK_SZ; ctx->rand_data_valid = DEFAULT_BLK_SZ;
ret = crypto_cipher_setkey(ctx->tfm, prng_key, klen); ret = crypto_cipher_setkey(ctx->tfm, prng_key, klen);
if (ret) { if (ret) {
dbgprint(KERN_CRIT "PRNG: setkey() failed flags=%x\n", dbgprint(KERN_CRIT "PRNG: setkey() failed flags=%x\n",
crypto_cipher_get_flags(ctx->tfm)); crypto_cipher_get_flags(ctx->tfm));
crypto_free_cipher(ctx->tfm);
goto out; goto out;
} }
rc = 0; ret = 0;
ctx->flags &= ~PRNG_NEED_RESET; ctx->flags &= ~PRNG_NEED_RESET;
out: out:
spin_unlock(&ctx->prng_lock); spin_unlock_bh(&ctx->prng_lock);
return ret;
return rc;
} }
static int cprng_init(struct crypto_tfm *tfm) static int cprng_init(struct crypto_tfm *tfm)
...@@ -343,6 +327,12 @@ static int cprng_init(struct crypto_tfm *tfm) ...@@ -343,6 +327,12 @@ static int cprng_init(struct crypto_tfm *tfm)
struct prng_context *ctx = crypto_tfm_ctx(tfm); struct prng_context *ctx = crypto_tfm_ctx(tfm);
spin_lock_init(&ctx->prng_lock); spin_lock_init(&ctx->prng_lock);
ctx->tfm = crypto_alloc_cipher("aes", 0, 0);
if (IS_ERR(ctx->tfm)) {
dbgprint(KERN_CRIT "Failed to alloc tfm for context %p\n",
ctx);
return PTR_ERR(ctx->tfm);
}
if (reset_prng_context(ctx, NULL, DEFAULT_PRNG_KSZ, NULL, NULL) < 0) if (reset_prng_context(ctx, NULL, DEFAULT_PRNG_KSZ, NULL, NULL) < 0)
return -EINVAL; return -EINVAL;
...@@ -418,17 +408,10 @@ static struct crypto_alg rng_alg = { ...@@ -418,17 +408,10 @@ static struct crypto_alg rng_alg = {
/* Module initalization */ /* Module initalization */
static int __init prng_mod_init(void) static int __init prng_mod_init(void)
{ {
int ret = 0;
if (fips_enabled) if (fips_enabled)
rng_alg.cra_priority += 200; rng_alg.cra_priority += 200;
ret = crypto_register_alg(&rng_alg); return crypto_register_alg(&rng_alg);
if (ret)
goto out;
out:
return 0;
} }
static void __exit prng_mod_fini(void) static void __exit prng_mod_fini(void)
......
...@@ -286,13 +286,6 @@ static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask) ...@@ -286,13 +286,6 @@ static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
case CRYPTO_ALG_TYPE_CIPHER: case CRYPTO_ALG_TYPE_CIPHER:
return crypto_init_cipher_ops(tfm); return crypto_init_cipher_ops(tfm);
case CRYPTO_ALG_TYPE_DIGEST:
if ((mask & CRYPTO_ALG_TYPE_HASH_MASK) !=
CRYPTO_ALG_TYPE_HASH_MASK)
return crypto_init_digest_ops_async(tfm);
else
return crypto_init_digest_ops(tfm);
case CRYPTO_ALG_TYPE_COMPRESS: case CRYPTO_ALG_TYPE_COMPRESS:
return crypto_init_compress_ops(tfm); return crypto_init_compress_ops(tfm);
...@@ -319,10 +312,6 @@ static void crypto_exit_ops(struct crypto_tfm *tfm) ...@@ -319,10 +312,6 @@ static void crypto_exit_ops(struct crypto_tfm *tfm)
crypto_exit_cipher_ops(tfm); crypto_exit_cipher_ops(tfm);
break; break;
case CRYPTO_ALG_TYPE_DIGEST:
crypto_exit_digest_ops(tfm);
break;
case CRYPTO_ALG_TYPE_COMPRESS: case CRYPTO_ALG_TYPE_COMPRESS:
crypto_exit_compress_ops(tfm); crypto_exit_compress_ops(tfm);
break; break;
...@@ -350,10 +339,6 @@ static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask) ...@@ -350,10 +339,6 @@ static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
len += crypto_cipher_ctxsize(alg); len += crypto_cipher_ctxsize(alg);
break; break;
case CRYPTO_ALG_TYPE_DIGEST:
len += crypto_digest_ctxsize(alg);
break;
case CRYPTO_ALG_TYPE_COMPRESS: case CRYPTO_ALG_TYPE_COMPRESS:
len += crypto_compress_ctxsize(alg); len += crypto_compress_ctxsize(alg);
break; break;
...@@ -472,7 +457,7 @@ void *crypto_create_tfm(struct crypto_alg *alg, ...@@ -472,7 +457,7 @@ void *crypto_create_tfm(struct crypto_alg *alg,
int err = -ENOMEM; int err = -ENOMEM;
tfmsize = frontend->tfmsize; tfmsize = frontend->tfmsize;
total = tfmsize + sizeof(*tfm) + frontend->extsize(alg, frontend); total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
mem = kzalloc(total, GFP_KERNEL); mem = kzalloc(total, GFP_KERNEL);
if (mem == NULL) if (mem == NULL)
...@@ -481,7 +466,7 @@ void *crypto_create_tfm(struct crypto_alg *alg, ...@@ -481,7 +466,7 @@ void *crypto_create_tfm(struct crypto_alg *alg,
tfm = (struct crypto_tfm *)(mem + tfmsize); tfm = (struct crypto_tfm *)(mem + tfmsize);
tfm->__crt_alg = alg; tfm->__crt_alg = alg;
err = frontend->init_tfm(tfm, frontend); err = frontend->init_tfm(tfm);
if (err) if (err)
goto out_free_tfm; goto out_free_tfm;
...@@ -503,6 +488,27 @@ void *crypto_create_tfm(struct crypto_alg *alg, ...@@ -503,6 +488,27 @@ void *crypto_create_tfm(struct crypto_alg *alg,
} }
EXPORT_SYMBOL_GPL(crypto_create_tfm); EXPORT_SYMBOL_GPL(crypto_create_tfm);
struct crypto_alg *crypto_find_alg(const char *alg_name,
const struct crypto_type *frontend,
u32 type, u32 mask)
{
struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) =
crypto_alg_mod_lookup;
if (frontend) {
type &= frontend->maskclear;
mask &= frontend->maskclear;
type |= frontend->type;
mask |= frontend->maskset;
if (frontend->lookup)
lookup = frontend->lookup;
}
return lookup(alg_name, type, mask);
}
EXPORT_SYMBOL_GPL(crypto_find_alg);
/* /*
* crypto_alloc_tfm - Locate algorithm and allocate transform * crypto_alloc_tfm - Locate algorithm and allocate transform
* @alg_name: Name of algorithm * @alg_name: Name of algorithm
...@@ -526,21 +532,13 @@ EXPORT_SYMBOL_GPL(crypto_create_tfm); ...@@ -526,21 +532,13 @@ EXPORT_SYMBOL_GPL(crypto_create_tfm);
void *crypto_alloc_tfm(const char *alg_name, void *crypto_alloc_tfm(const char *alg_name,
const struct crypto_type *frontend, u32 type, u32 mask) const struct crypto_type *frontend, u32 type, u32 mask)
{ {
struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
void *tfm; void *tfm;
int err; int err;
type &= frontend->maskclear;
mask &= frontend->maskclear;
type |= frontend->type;
mask |= frontend->maskset;
lookup = frontend->lookup ?: crypto_alg_mod_lookup;
for (;;) { for (;;) {
struct crypto_alg *alg; struct crypto_alg *alg;
alg = lookup(alg_name, type, mask); alg = crypto_find_alg(alg_name, frontend, type, mask);
if (IS_ERR(alg)) { if (IS_ERR(alg)) {
err = PTR_ERR(alg); err = PTR_ERR(alg);
goto err; goto err;
......
This diff is collapsed.
This diff is collapsed.
...@@ -219,6 +219,8 @@ static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb) ...@@ -219,6 +219,8 @@ static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt; inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt;
inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt; inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt;
inst->alg.cra_blkcipher.geniv = "chainiv";
out: out:
crypto_mod_put(alg); crypto_mod_put(alg);
return inst; return inst;
......
This diff is collapsed.
/*
* GHASH: digest algorithm for GCM (Galois/Counter Mode).
*
* Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
* Copyright (c) 2009 Intel Corp.
* Author: Huang Ying <ying.huang@intel.com>
*
* The algorithm implementation is copied from gcm.c.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 as published
* by the Free Software Foundation.
*/
#include <crypto/algapi.h>
#include <crypto/gf128mul.h>
#include <crypto/internal/hash.h>
#include <linux/crypto.h>
#include <linux/init.h>
#include <linux/kernel.h>
#include <linux/module.h>
#define GHASH_BLOCK_SIZE 16
#define GHASH_DIGEST_SIZE 16
struct ghash_ctx {
struct gf128mul_4k *gf128;
};
struct ghash_desc_ctx {
u8 buffer[GHASH_BLOCK_SIZE];
u32 bytes;
};
static int ghash_init(struct shash_desc *desc)
{
struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
memset(dctx, 0, sizeof(*dctx));
return 0;
}
static int ghash_setkey(struct crypto_shash *tfm,
const u8 *key, unsigned int keylen)
{
struct ghash_ctx *ctx = crypto_shash_ctx(tfm);
if (keylen != GHASH_BLOCK_SIZE) {
crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
return -EINVAL;
}
if (ctx->gf128)
gf128mul_free_4k(ctx->gf128);
ctx->gf128 = gf128mul_init_4k_lle((be128 *)key);
if (!ctx->gf128)
return -ENOMEM;
return 0;
}
static int ghash_update(struct shash_desc *desc,
const u8 *src, unsigned int srclen)
{
struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
u8 *dst = dctx->buffer;
if (dctx->bytes) {
int n = min(srclen, dctx->bytes);
u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
dctx->bytes -= n;
srclen -= n;
while (n--)
*pos++ ^= *src++;
if (!dctx->bytes)
gf128mul_4k_lle((be128 *)dst, ctx->gf128);
}
while (srclen >= GHASH_BLOCK_SIZE) {
crypto_xor(dst, src, GHASH_BLOCK_SIZE);
gf128mul_4k_lle((be128 *)dst, ctx->gf128);
src += GHASH_BLOCK_SIZE;
srclen -= GHASH_BLOCK_SIZE;
}
if (srclen) {
dctx->bytes = GHASH_BLOCK_SIZE - srclen;
while (srclen--)
*dst++ ^= *src++;
}
return 0;
}
static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx)
{
u8 *dst = dctx->buffer;
if (dctx->bytes) {
u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
while (dctx->bytes--)
*tmp++ ^= 0;
gf128mul_4k_lle((be128 *)dst, ctx->gf128);
}
dctx->bytes = 0;
}
static int ghash_final(struct shash_desc *desc, u8 *dst)
{
struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
u8 *buf = dctx->buffer;
ghash_flush(ctx, dctx);
memcpy(dst, buf, GHASH_BLOCK_SIZE);
return 0;
}
static void ghash_exit_tfm(struct crypto_tfm *tfm)
{
struct ghash_ctx *ctx = crypto_tfm_ctx(tfm);
if (ctx->gf128)
gf128mul_free_4k(ctx->gf128);
}
static struct shash_alg ghash_alg = {
.digestsize = GHASH_DIGEST_SIZE,
.init = ghash_init,
.update = ghash_update,
.final = ghash_final,
.setkey = ghash_setkey,
.descsize = sizeof(struct ghash_desc_ctx),
.base = {
.cra_name = "ghash",
.cra_driver_name = "ghash-generic",
.cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_SHASH,
.cra_blocksize = GHASH_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct ghash_ctx),
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(ghash_alg.base.cra_list),
.cra_exit = ghash_exit_tfm,
},
};
static int __init ghash_mod_init(void)
{
return crypto_register_shash(&ghash_alg);
}
static void __exit ghash_mod_exit(void)
{
crypto_unregister_shash(&ghash_alg);
}
module_init(ghash_mod_init);
module_exit(ghash_mod_exit);
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("GHASH Message Digest Algorithm");
MODULE_ALIAS("ghash");
This diff is collapsed.
...@@ -25,12 +25,7 @@ ...@@ -25,12 +25,7 @@
#include <linux/notifier.h> #include <linux/notifier.h>
#include <linux/rwsem.h> #include <linux/rwsem.h>
#include <linux/slab.h> #include <linux/slab.h>
#include <linux/fips.h>
#ifdef CONFIG_CRYPTO_FIPS
extern int fips_enabled;
#else
#define fips_enabled 0
#endif
/* Crypto notification events. */ /* Crypto notification events. */
enum { enum {
...@@ -65,18 +60,6 @@ static inline void crypto_exit_proc(void) ...@@ -65,18 +60,6 @@ static inline void crypto_exit_proc(void)
{ } { }
#endif #endif
static inline unsigned int crypto_digest_ctxsize(struct crypto_alg *alg)
{
unsigned int len = alg->cra_ctxsize;
if (alg->cra_alignmask) {
len = ALIGN(len, (unsigned long)alg->cra_alignmask + 1);
len += alg->cra_digest.dia_digestsize;
}
return len;
}
static inline unsigned int crypto_cipher_ctxsize(struct crypto_alg *alg) static inline unsigned int crypto_cipher_ctxsize(struct crypto_alg *alg)
{ {
return alg->cra_ctxsize; return alg->cra_ctxsize;
...@@ -91,12 +74,9 @@ struct crypto_alg *crypto_mod_get(struct crypto_alg *alg); ...@@ -91,12 +74,9 @@ struct crypto_alg *crypto_mod_get(struct crypto_alg *alg);
struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask); struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask);
struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask); struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask);
int crypto_init_digest_ops(struct crypto_tfm *tfm);
int crypto_init_digest_ops_async(struct crypto_tfm *tfm);
int crypto_init_cipher_ops(struct crypto_tfm *tfm); int crypto_init_cipher_ops(struct crypto_tfm *tfm);
int crypto_init_compress_ops(struct crypto_tfm *tfm); int crypto_init_compress_ops(struct crypto_tfm *tfm);
void crypto_exit_digest_ops(struct crypto_tfm *tfm);
void crypto_exit_cipher_ops(struct crypto_tfm *tfm); void crypto_exit_cipher_ops(struct crypto_tfm *tfm);
void crypto_exit_compress_ops(struct crypto_tfm *tfm); void crypto_exit_compress_ops(struct crypto_tfm *tfm);
...@@ -111,12 +91,12 @@ struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type, ...@@ -111,12 +91,12 @@ struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
u32 mask); u32 mask);
void *crypto_create_tfm(struct crypto_alg *alg, void *crypto_create_tfm(struct crypto_alg *alg,
const struct crypto_type *frontend); const struct crypto_type *frontend);
struct crypto_alg *crypto_find_alg(const char *alg_name,
const struct crypto_type *frontend,
u32 type, u32 mask);
void *crypto_alloc_tfm(const char *alg_name, void *crypto_alloc_tfm(const char *alg_name,
const struct crypto_type *frontend, u32 type, u32 mask); const struct crypto_type *frontend, u32 type, u32 mask);
int crypto_register_instance(struct crypto_template *tmpl,
struct crypto_instance *inst);
int crypto_register_notifier(struct notifier_block *nb); int crypto_register_notifier(struct notifier_block *nb);
int crypto_unregister_notifier(struct notifier_block *nb); int crypto_unregister_notifier(struct notifier_block *nb);
int crypto_probing_notify(unsigned long val, void *v); int crypto_probing_notify(unsigned long val, void *v);
......
...@@ -36,14 +36,12 @@ static int crypto_pcomp_init(struct crypto_tfm *tfm, u32 type, u32 mask) ...@@ -36,14 +36,12 @@ static int crypto_pcomp_init(struct crypto_tfm *tfm, u32 type, u32 mask)
return 0; return 0;
} }
static unsigned int crypto_pcomp_extsize(struct crypto_alg *alg, static unsigned int crypto_pcomp_extsize(struct crypto_alg *alg)
const struct crypto_type *frontend)
{ {
return alg->cra_ctxsize; return alg->cra_ctxsize;
} }
static int crypto_pcomp_init_tfm(struct crypto_tfm *tfm, static int crypto_pcomp_init_tfm(struct crypto_tfm *tfm)
const struct crypto_type *frontend)
{ {
return 0; return 0;
} }
......
...@@ -123,4 +123,4 @@ void crypto_put_default_rng(void) ...@@ -123,4 +123,4 @@ void crypto_put_default_rng(void)
EXPORT_SYMBOL_GPL(crypto_put_default_rng); EXPORT_SYMBOL_GPL(crypto_put_default_rng);
MODULE_LICENSE("GPL"); MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("Random Number Genertor"); MODULE_DESCRIPTION("Random Number Generator");
...@@ -25,31 +25,21 @@ ...@@ -25,31 +25,21 @@
#include <crypto/sha.h> #include <crypto/sha.h>
#include <asm/byteorder.h> #include <asm/byteorder.h>
struct sha1_ctx {
u64 count;
u32 state[5];
u8 buffer[64];
};
static int sha1_init(struct shash_desc *desc) static int sha1_init(struct shash_desc *desc)
{ {
struct sha1_ctx *sctx = shash_desc_ctx(desc); struct sha1_state *sctx = shash_desc_ctx(desc);
static const struct sha1_ctx initstate = { *sctx = (struct sha1_state){
0, .state = { SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 },
{ SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 },
{ 0, }
}; };
*sctx = initstate;
return 0; return 0;
} }
static int sha1_update(struct shash_desc *desc, const u8 *data, static int sha1_update(struct shash_desc *desc, const u8 *data,
unsigned int len) unsigned int len)
{ {
struct sha1_ctx *sctx = shash_desc_ctx(desc); struct sha1_state *sctx = shash_desc_ctx(desc);
unsigned int partial, done; unsigned int partial, done;
const u8 *src; const u8 *src;
...@@ -85,7 +75,7 @@ static int sha1_update(struct shash_desc *desc, const u8 *data, ...@@ -85,7 +75,7 @@ static int sha1_update(struct shash_desc *desc, const u8 *data,
/* Add padding and return the message digest. */ /* Add padding and return the message digest. */
static int sha1_final(struct shash_desc *desc, u8 *out) static int sha1_final(struct shash_desc *desc, u8 *out)
{ {
struct sha1_ctx *sctx = shash_desc_ctx(desc); struct sha1_state *sctx = shash_desc_ctx(desc);
__be32 *dst = (__be32 *)out; __be32 *dst = (__be32 *)out;
u32 i, index, padlen; u32 i, index, padlen;
__be64 bits; __be64 bits;
...@@ -111,12 +101,31 @@ static int sha1_final(struct shash_desc *desc, u8 *out) ...@@ -111,12 +101,31 @@ static int sha1_final(struct shash_desc *desc, u8 *out)
return 0; return 0;
} }
static int sha1_export(struct shash_desc *desc, void *out)
{
struct sha1_state *sctx = shash_desc_ctx(desc);
memcpy(out, sctx, sizeof(*sctx));
return 0;
}
static int sha1_import(struct shash_desc *desc, const void *in)
{
struct sha1_state *sctx = shash_desc_ctx(desc);
memcpy(sctx, in, sizeof(*sctx));
return 0;
}
static struct shash_alg alg = { static struct shash_alg alg = {
.digestsize = SHA1_DIGEST_SIZE, .digestsize = SHA1_DIGEST_SIZE,
.init = sha1_init, .init = sha1_init,
.update = sha1_update, .update = sha1_update,
.final = sha1_final, .final = sha1_final,
.descsize = sizeof(struct sha1_ctx), .export = sha1_export,
.import = sha1_import,
.descsize = sizeof(struct sha1_state),
.statesize = sizeof(struct sha1_state),
.base = { .base = {
.cra_name = "sha1", .cra_name = "sha1",
.cra_driver_name= "sha1-generic", .cra_driver_name= "sha1-generic",
......
...@@ -25,12 +25,6 @@ ...@@ -25,12 +25,6 @@
#include <crypto/sha.h> #include <crypto/sha.h>
#include <asm/byteorder.h> #include <asm/byteorder.h>
struct sha256_ctx {
u32 count[2];
u32 state[8];
u8 buf[128];
};
static inline u32 Ch(u32 x, u32 y, u32 z) static inline u32 Ch(u32 x, u32 y, u32 z)
{ {
return z ^ (x & (y ^ z)); return z ^ (x & (y ^ z));
...@@ -222,7 +216,7 @@ static void sha256_transform(u32 *state, const u8 *input) ...@@ -222,7 +216,7 @@ static void sha256_transform(u32 *state, const u8 *input)
static int sha224_init(struct shash_desc *desc) static int sha224_init(struct shash_desc *desc)
{ {
struct sha256_ctx *sctx = shash_desc_ctx(desc); struct sha256_state *sctx = shash_desc_ctx(desc);
sctx->state[0] = SHA224_H0; sctx->state[0] = SHA224_H0;
sctx->state[1] = SHA224_H1; sctx->state[1] = SHA224_H1;
sctx->state[2] = SHA224_H2; sctx->state[2] = SHA224_H2;
...@@ -231,15 +225,14 @@ static int sha224_init(struct shash_desc *desc) ...@@ -231,15 +225,14 @@ static int sha224_init(struct shash_desc *desc)
sctx->state[5] = SHA224_H5; sctx->state[5] = SHA224_H5;
sctx->state[6] = SHA224_H6; sctx->state[6] = SHA224_H6;
sctx->state[7] = SHA224_H7; sctx->state[7] = SHA224_H7;
sctx->count[0] = 0; sctx->count = 0;
sctx->count[1] = 0;
return 0; return 0;
} }
static int sha256_init(struct shash_desc *desc) static int sha256_init(struct shash_desc *desc)
{ {
struct sha256_ctx *sctx = shash_desc_ctx(desc); struct sha256_state *sctx = shash_desc_ctx(desc);
sctx->state[0] = SHA256_H0; sctx->state[0] = SHA256_H0;
sctx->state[1] = SHA256_H1; sctx->state[1] = SHA256_H1;
sctx->state[2] = SHA256_H2; sctx->state[2] = SHA256_H2;
...@@ -248,7 +241,7 @@ static int sha256_init(struct shash_desc *desc) ...@@ -248,7 +241,7 @@ static int sha256_init(struct shash_desc *desc)
sctx->state[5] = SHA256_H5; sctx->state[5] = SHA256_H5;
sctx->state[6] = SHA256_H6; sctx->state[6] = SHA256_H6;
sctx->state[7] = SHA256_H7; sctx->state[7] = SHA256_H7;
sctx->count[0] = sctx->count[1] = 0; sctx->count = 0;
return 0; return 0;
} }
...@@ -256,58 +249,54 @@ static int sha256_init(struct shash_desc *desc) ...@@ -256,58 +249,54 @@ static int sha256_init(struct shash_desc *desc)
static int sha256_update(struct shash_desc *desc, const u8 *data, static int sha256_update(struct shash_desc *desc, const u8 *data,
unsigned int len) unsigned int len)
{ {
struct sha256_ctx *sctx = shash_desc_ctx(desc); struct sha256_state *sctx = shash_desc_ctx(desc);
unsigned int i, index, part_len; unsigned int partial, done;
const u8 *src;
/* Compute number of bytes mod 128 */
index = (unsigned int)((sctx->count[0] >> 3) & 0x3f); partial = sctx->count & 0x3f;
sctx->count += len;
/* Update number of bits */ done = 0;
if ((sctx->count[0] += (len << 3)) < (len << 3)) { src = data;
sctx->count[1]++;
sctx->count[1] += (len >> 29); if ((partial + len) > 63) {
if (partial) {
done = -partial;
memcpy(sctx->buf + partial, data, done + 64);
src = sctx->buf;
} }
part_len = 64 - index; do {
sha256_transform(sctx->state, src);
done += 64;
src = data + done;
} while (done + 63 < len);
/* Transform as many times as possible. */ partial = 0;
if (len >= part_len) {
memcpy(&sctx->buf[index], data, part_len);
sha256_transform(sctx->state, sctx->buf);
for (i = part_len; i + 63 < len; i += 64)
sha256_transform(sctx->state, &data[i]);
index = 0;
} else {
i = 0;
} }
memcpy(sctx->buf + partial, src, len - done);
/* Buffer remaining input */
memcpy(&sctx->buf[index], &data[i], len-i);
return 0; return 0;
} }
static int sha256_final(struct shash_desc *desc, u8 *out) static int sha256_final(struct shash_desc *desc, u8 *out)
{ {
struct sha256_ctx *sctx = shash_desc_ctx(desc); struct sha256_state *sctx = shash_desc_ctx(desc);
__be32 *dst = (__be32 *)out; __be32 *dst = (__be32 *)out;
__be32 bits[2]; __be64 bits;
unsigned int index, pad_len; unsigned int index, pad_len;
int i; int i;
static const u8 padding[64] = { 0x80, }; static const u8 padding[64] = { 0x80, };
/* Save number of bits */ /* Save number of bits */
bits[1] = cpu_to_be32(sctx->count[0]); bits = cpu_to_be64(sctx->count << 3);
bits[0] = cpu_to_be32(sctx->count[1]);
/* Pad out to 56 mod 64. */ /* Pad out to 56 mod 64. */
index = (sctx->count[0] >> 3) & 0x3f; index = sctx->count & 0x3f;
pad_len = (index < 56) ? (56 - index) : ((64+56) - index); pad_len = (index < 56) ? (56 - index) : ((64+56) - index);
sha256_update(desc, padding, pad_len); sha256_update(desc, padding, pad_len);
/* Append length (before padding) */ /* Append length (before padding) */
sha256_update(desc, (const u8 *)bits, sizeof(bits)); sha256_update(desc, (const u8 *)&bits, sizeof(bits));
/* Store state in digest */ /* Store state in digest */
for (i = 0; i < 8; i++) for (i = 0; i < 8; i++)
...@@ -331,12 +320,31 @@ static int sha224_final(struct shash_desc *desc, u8 *hash) ...@@ -331,12 +320,31 @@ static int sha224_final(struct shash_desc *desc, u8 *hash)
return 0; return 0;
} }
static int sha256_export(struct shash_desc *desc, void *out)
{
struct sha256_state *sctx = shash_desc_ctx(desc);
memcpy(out, sctx, sizeof(*sctx));
return 0;
}
static int sha256_import(struct shash_desc *desc, const void *in)
{
struct sha256_state *sctx = shash_desc_ctx(desc);
memcpy(sctx, in, sizeof(*sctx));
return 0;
}
static struct shash_alg sha256 = { static struct shash_alg sha256 = {
.digestsize = SHA256_DIGEST_SIZE, .digestsize = SHA256_DIGEST_SIZE,
.init = sha256_init, .init = sha256_init,
.update = sha256_update, .update = sha256_update,
.final = sha256_final, .final = sha256_final,
.descsize = sizeof(struct sha256_ctx), .export = sha256_export,
.import = sha256_import,
.descsize = sizeof(struct sha256_state),
.statesize = sizeof(struct sha256_state),
.base = { .base = {
.cra_name = "sha256", .cra_name = "sha256",
.cra_driver_name= "sha256-generic", .cra_driver_name= "sha256-generic",
...@@ -351,7 +359,7 @@ static struct shash_alg sha224 = { ...@@ -351,7 +359,7 @@ static struct shash_alg sha224 = {
.init = sha224_init, .init = sha224_init,
.update = sha256_update, .update = sha256_update,
.final = sha224_final, .final = sha224_final,
.descsize = sizeof(struct sha256_ctx), .descsize = sizeof(struct sha256_state),
.base = { .base = {
.cra_name = "sha224", .cra_name = "sha224",
.cra_driver_name= "sha224-generic", .cra_driver_name= "sha224-generic",
......
...@@ -21,12 +21,6 @@ ...@@ -21,12 +21,6 @@
#include <linux/percpu.h> #include <linux/percpu.h>
#include <asm/byteorder.h> #include <asm/byteorder.h>
struct sha512_ctx {
u64 state[8];
u32 count[4];
u8 buf[128];
};
static DEFINE_PER_CPU(u64[80], msg_schedule); static DEFINE_PER_CPU(u64[80], msg_schedule);
static inline u64 Ch(u64 x, u64 y, u64 z) static inline u64 Ch(u64 x, u64 y, u64 z)
...@@ -141,7 +135,7 @@ sha512_transform(u64 *state, const u8 *input) ...@@ -141,7 +135,7 @@ sha512_transform(u64 *state, const u8 *input)
static int static int
sha512_init(struct shash_desc *desc) sha512_init(struct shash_desc *desc)
{ {
struct sha512_ctx *sctx = shash_desc_ctx(desc); struct sha512_state *sctx = shash_desc_ctx(desc);
sctx->state[0] = SHA512_H0; sctx->state[0] = SHA512_H0;
sctx->state[1] = SHA512_H1; sctx->state[1] = SHA512_H1;
sctx->state[2] = SHA512_H2; sctx->state[2] = SHA512_H2;
...@@ -150,7 +144,7 @@ sha512_init(struct shash_desc *desc) ...@@ -150,7 +144,7 @@ sha512_init(struct shash_desc *desc)
sctx->state[5] = SHA512_H5; sctx->state[5] = SHA512_H5;
sctx->state[6] = SHA512_H6; sctx->state[6] = SHA512_H6;
sctx->state[7] = SHA512_H7; sctx->state[7] = SHA512_H7;
sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0; sctx->count[0] = sctx->count[1] = 0;
return 0; return 0;
} }
...@@ -158,7 +152,7 @@ sha512_init(struct shash_desc *desc) ...@@ -158,7 +152,7 @@ sha512_init(struct shash_desc *desc)
static int static int
sha384_init(struct shash_desc *desc) sha384_init(struct shash_desc *desc)
{ {
struct sha512_ctx *sctx = shash_desc_ctx(desc); struct sha512_state *sctx = shash_desc_ctx(desc);
sctx->state[0] = SHA384_H0; sctx->state[0] = SHA384_H0;
sctx->state[1] = SHA384_H1; sctx->state[1] = SHA384_H1;
sctx->state[2] = SHA384_H2; sctx->state[2] = SHA384_H2;
...@@ -167,7 +161,7 @@ sha384_init(struct shash_desc *desc) ...@@ -167,7 +161,7 @@ sha384_init(struct shash_desc *desc)
sctx->state[5] = SHA384_H5; sctx->state[5] = SHA384_H5;
sctx->state[6] = SHA384_H6; sctx->state[6] = SHA384_H6;
sctx->state[7] = SHA384_H7; sctx->state[7] = SHA384_H7;
sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0; sctx->count[0] = sctx->count[1] = 0;
return 0; return 0;
} }
...@@ -175,20 +169,16 @@ sha384_init(struct shash_desc *desc) ...@@ -175,20 +169,16 @@ sha384_init(struct shash_desc *desc)
static int static int
sha512_update(struct shash_desc *desc, const u8 *data, unsigned int len) sha512_update(struct shash_desc *desc, const u8 *data, unsigned int len)
{ {
struct sha512_ctx *sctx = shash_desc_ctx(desc); struct sha512_state *sctx = shash_desc_ctx(desc);
unsigned int i, index, part_len; unsigned int i, index, part_len;
/* Compute number of bytes mod 128 */ /* Compute number of bytes mod 128 */
index = (unsigned int)((sctx->count[0] >> 3) & 0x7F); index = sctx->count[0] & 0x7f;
/* Update number of bits */ /* Update number of bytes */
if ((sctx->count[0] += (len << 3)) < (len << 3)) { if (!(sctx->count[0] += len))
if ((sctx->count[1] += 1) < 1) sctx->count[1]++;
if ((sctx->count[2] += 1) < 1)
sctx->count[3]++;
sctx->count[1] += (len >> 29);
}
part_len = 128 - index; part_len = 128 - index;
...@@ -214,21 +204,19 @@ sha512_update(struct shash_desc *desc, const u8 *data, unsigned int len) ...@@ -214,21 +204,19 @@ sha512_update(struct shash_desc *desc, const u8 *data, unsigned int len)
static int static int
sha512_final(struct shash_desc *desc, u8 *hash) sha512_final(struct shash_desc *desc, u8 *hash)
{ {
struct sha512_ctx *sctx = shash_desc_ctx(desc); struct sha512_state *sctx = shash_desc_ctx(desc);
static u8 padding[128] = { 0x80, }; static u8 padding[128] = { 0x80, };
__be64 *dst = (__be64 *)hash; __be64 *dst = (__be64 *)hash;
__be32 bits[4]; __be64 bits[2];
unsigned int index, pad_len; unsigned int index, pad_len;
int i; int i;
/* Save number of bits */ /* Save number of bits */
bits[3] = cpu_to_be32(sctx->count[0]); bits[1] = cpu_to_be64(sctx->count[0] << 3);
bits[2] = cpu_to_be32(sctx->count[1]); bits[0] = cpu_to_be64(sctx->count[1] << 3 | sctx->count[0] >> 61);
bits[1] = cpu_to_be32(sctx->count[2]);
bits[0] = cpu_to_be32(sctx->count[3]);
/* Pad out to 112 mod 128. */ /* Pad out to 112 mod 128. */
index = (sctx->count[0] >> 3) & 0x7f; index = sctx->count[0] & 0x7f;
pad_len = (index < 112) ? (112 - index) : ((128+112) - index); pad_len = (index < 112) ? (112 - index) : ((128+112) - index);
sha512_update(desc, padding, pad_len); sha512_update(desc, padding, pad_len);
...@@ -240,7 +228,7 @@ sha512_final(struct shash_desc *desc, u8 *hash) ...@@ -240,7 +228,7 @@ sha512_final(struct shash_desc *desc, u8 *hash)
dst[i] = cpu_to_be64(sctx->state[i]); dst[i] = cpu_to_be64(sctx->state[i]);
/* Zeroize sensitive information. */ /* Zeroize sensitive information. */
memset(sctx, 0, sizeof(struct sha512_ctx)); memset(sctx, 0, sizeof(struct sha512_state));
return 0; return 0;
} }
...@@ -262,7 +250,7 @@ static struct shash_alg sha512 = { ...@@ -262,7 +250,7 @@ static struct shash_alg sha512 = {
.init = sha512_init, .init = sha512_init,
.update = sha512_update, .update = sha512_update,
.final = sha512_final, .final = sha512_final,
.descsize = sizeof(struct sha512_ctx), .descsize = sizeof(struct sha512_state),
.base = { .base = {
.cra_name = "sha512", .cra_name = "sha512",
.cra_flags = CRYPTO_ALG_TYPE_SHASH, .cra_flags = CRYPTO_ALG_TYPE_SHASH,
...@@ -276,7 +264,7 @@ static struct shash_alg sha384 = { ...@@ -276,7 +264,7 @@ static struct shash_alg sha384 = {
.init = sha384_init, .init = sha384_init,
.update = sha512_update, .update = sha512_update,
.final = sha384_final, .final = sha384_final,
.descsize = sizeof(struct sha512_ctx), .descsize = sizeof(struct sha512_state),
.base = { .base = {
.cra_name = "sha384", .cra_name = "sha384",
.cra_flags = CRYPTO_ALG_TYPE_SHASH, .cra_flags = CRYPTO_ALG_TYPE_SHASH,
......
This diff is collapsed.
...@@ -45,6 +45,9 @@ ...@@ -45,6 +45,9 @@
*/ */
static unsigned int sec; static unsigned int sec;
static char *alg = NULL;
static u32 type;
static u32 mask;
static int mode; static int mode;
static char *tvmem[TVMEMSIZE]; static char *tvmem[TVMEMSIZE];
...@@ -716,6 +719,10 @@ static int do_test(int m) ...@@ -716,6 +719,10 @@ static int do_test(int m)
ret += tcrypt_test("hmac(rmd160)"); ret += tcrypt_test("hmac(rmd160)");
break; break;
case 109:
ret += tcrypt_test("vmac(aes)");
break;
case 150: case 150:
ret += tcrypt_test("ansi_cprng"); ret += tcrypt_test("ansi_cprng");
break; break;
...@@ -885,6 +892,12 @@ static int do_test(int m) ...@@ -885,6 +892,12 @@ static int do_test(int m)
return ret; return ret;
} }
static int do_alg_test(const char *alg, u32 type, u32 mask)
{
return crypto_has_alg(alg, type, mask ?: CRYPTO_ALG_TYPE_MASK) ?
0 : -ENOENT;
}
static int __init tcrypt_mod_init(void) static int __init tcrypt_mod_init(void)
{ {
int err = -ENOMEM; int err = -ENOMEM;
...@@ -896,7 +909,11 @@ static int __init tcrypt_mod_init(void) ...@@ -896,7 +909,11 @@ static int __init tcrypt_mod_init(void)
goto err_free_tv; goto err_free_tv;
} }
if (alg)
err = do_alg_test(alg, type, mask);
else
err = do_test(mode); err = do_test(mode);
if (err) { if (err) {
printk(KERN_ERR "tcrypt: one or more tests failed!\n"); printk(KERN_ERR "tcrypt: one or more tests failed!\n");
goto err_free_tv; goto err_free_tv;
...@@ -928,6 +945,9 @@ static void __exit tcrypt_mod_fini(void) { } ...@@ -928,6 +945,9 @@ static void __exit tcrypt_mod_fini(void) { }
module_init(tcrypt_mod_init); module_init(tcrypt_mod_init);
module_exit(tcrypt_mod_fini); module_exit(tcrypt_mod_fini);
module_param(alg, charp, 0);
module_param(type, uint, 0);
module_param(mask, uint, 0);
module_param(mode, int, 0); module_param(mode, int, 0);
module_param(sec, uint, 0); module_param(sec, uint, 0);
MODULE_PARM_DESC(sec, "Length in seconds of speed tests " MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
......
...@@ -190,10 +190,6 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, ...@@ -190,10 +190,6 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
hash_buff = xbuf[0]; hash_buff = xbuf[0];
ret = -EINVAL;
if (WARN_ON(template[i].psize > PAGE_SIZE))
goto out;
memcpy(hash_buff, template[i].plaintext, template[i].psize); memcpy(hash_buff, template[i].plaintext, template[i].psize);
sg_init_one(&sg[0], hash_buff, template[i].psize); sg_init_one(&sg[0], hash_buff, template[i].psize);
...@@ -2251,6 +2247,15 @@ static const struct alg_test_desc alg_test_descs[] = { ...@@ -2251,6 +2247,15 @@ static const struct alg_test_desc alg_test_descs[] = {
.count = TGR192_TEST_VECTORS .count = TGR192_TEST_VECTORS
} }
} }
}, {
.alg = "vmac(aes)",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = aes_vmac128_tv_template,
.count = VMAC_AES_TEST_VECTORS
}
}
}, { }, {
.alg = "wp256", .alg = "wp256",
.test = alg_test_hash, .test = alg_test_hash,
...@@ -2348,6 +2353,7 @@ static int alg_find_test(const char *alg) ...@@ -2348,6 +2353,7 @@ static int alg_find_test(const char *alg)
int alg_test(const char *driver, const char *alg, u32 type, u32 mask) int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
{ {
int i; int i;
int j;
int rc; int rc;
if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
...@@ -2369,14 +2375,22 @@ int alg_test(const char *driver, const char *alg, u32 type, u32 mask) ...@@ -2369,14 +2375,22 @@ int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
} }
i = alg_find_test(alg); i = alg_find_test(alg);
if (i < 0) j = alg_find_test(driver);
if (i < 0 && j < 0)
goto notest; goto notest;
if (fips_enabled && !alg_test_descs[i].fips_allowed) if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
(j >= 0 && !alg_test_descs[j].fips_allowed)))
goto non_fips_alg; goto non_fips_alg;
rc = alg_test_descs[i].test(alg_test_descs + i, driver, rc = 0;
if (i >= 0)
rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
type, mask);
if (j >= 0)
rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
type, mask); type, mask);
test_done: test_done:
if (fips_enabled && rc) if (fips_enabled && rc)
panic("%s: %s alg self test failed in fips mode!\n", driver, alg); panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
......
...@@ -1654,6 +1654,22 @@ static struct hash_testvec aes_xcbc128_tv_template[] = { ...@@ -1654,6 +1654,22 @@ static struct hash_testvec aes_xcbc128_tv_template[] = {
} }
}; };
#define VMAC_AES_TEST_VECTORS 1
static char vmac_string[128] = {'\x01', '\x01', '\x01', '\x01',
'\x02', '\x03', '\x02', '\x02',
'\x02', '\x04', '\x01', '\x07',
'\x04', '\x01', '\x04', '\x03',};
static struct hash_testvec aes_vmac128_tv_template[] = {
{
.key = "\x00\x01\x02\x03\x04\x05\x06\x07"
"\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
.plaintext = vmac_string,
.digest = "\xcb\xd7\x8a\xfd\xb7\x33\x79\xe7",
.psize = 128,
.ksize = 16,
},
};
/* /*
* SHA384 HMAC test vectors from RFC4231 * SHA384 HMAC test vectors from RFC4231
*/ */
......
This diff is collapsed.
This diff is collapsed.
...@@ -44,8 +44,8 @@ ...@@ -44,8 +44,8 @@
* want to register another driver on the same PCI id. * want to register another driver on the same PCI id.
*/ */
static const struct pci_device_id pci_tbl[] = { static const struct pci_device_id pci_tbl[] = {
{ 0x1022, 0x7443, PCI_ANY_ID, PCI_ANY_ID, 0, 0, 0, }, { PCI_VDEVICE(AMD, 0x7443), 0, },
{ 0x1022, 0x746b, PCI_ANY_ID, PCI_ANY_ID, 0, 0, 0, }, { PCI_VDEVICE(AMD, 0x746b), 0, },
{ 0, }, /* terminate list */ { 0, }, /* terminate list */
}; };
MODULE_DEVICE_TABLE(pci, pci_tbl); MODULE_DEVICE_TABLE(pci, pci_tbl);
......
...@@ -46,8 +46,7 @@ ...@@ -46,8 +46,7 @@
* want to register another driver on the same PCI id. * want to register another driver on the same PCI id.
*/ */
static const struct pci_device_id pci_tbl[] = { static const struct pci_device_id pci_tbl[] = {
{ PCI_VENDOR_ID_AMD, PCI_DEVICE_ID_AMD_LX_AES, { PCI_VDEVICE(AMD, PCI_DEVICE_ID_AMD_LX_AES), 0, },
PCI_ANY_ID, PCI_ANY_ID, 0, 0, 0, },
{ 0, }, /* terminate list */ { 0, }, /* terminate list */
}; };
MODULE_DEVICE_TABLE(pci, pci_tbl); MODULE_DEVICE_TABLE(pci, pci_tbl);
......
...@@ -240,6 +240,7 @@ ...@@ -240,6 +240,7 @@
#include <linux/spinlock.h> #include <linux/spinlock.h>
#include <linux/percpu.h> #include <linux/percpu.h>
#include <linux/cryptohash.h> #include <linux/cryptohash.h>
#include <linux/fips.h>
#ifdef CONFIG_GENERIC_HARDIRQS #ifdef CONFIG_GENERIC_HARDIRQS
# include <linux/irq.h> # include <linux/irq.h>
...@@ -413,6 +414,7 @@ struct entropy_store { ...@@ -413,6 +414,7 @@ struct entropy_store {
unsigned add_ptr; unsigned add_ptr;
int entropy_count; int entropy_count;
int input_rotate; int input_rotate;
__u8 *last_data;
}; };
static __u32 input_pool_data[INPUT_POOL_WORDS]; static __u32 input_pool_data[INPUT_POOL_WORDS];
...@@ -852,12 +854,21 @@ static ssize_t extract_entropy(struct entropy_store *r, void *buf, ...@@ -852,12 +854,21 @@ static ssize_t extract_entropy(struct entropy_store *r, void *buf,
{ {
ssize_t ret = 0, i; ssize_t ret = 0, i;
__u8 tmp[EXTRACT_SIZE]; __u8 tmp[EXTRACT_SIZE];
unsigned long flags;
xfer_secondary_pool(r, nbytes); xfer_secondary_pool(r, nbytes);
nbytes = account(r, nbytes, min, reserved); nbytes = account(r, nbytes, min, reserved);
while (nbytes) { while (nbytes) {
extract_buf(r, tmp); extract_buf(r, tmp);
if (r->last_data) {
spin_lock_irqsave(&r->lock, flags);
if (!memcmp(tmp, r->last_data, EXTRACT_SIZE))
panic("Hardware RNG duplicated output!\n");
memcpy(r->last_data, tmp, EXTRACT_SIZE);
spin_unlock_irqrestore(&r->lock, flags);
}
i = min_t(int, nbytes, EXTRACT_SIZE); i = min_t(int, nbytes, EXTRACT_SIZE);
memcpy(buf, tmp, i); memcpy(buf, tmp, i);
nbytes -= i; nbytes -= i;
...@@ -940,6 +951,9 @@ static void init_std_data(struct entropy_store *r) ...@@ -940,6 +951,9 @@ static void init_std_data(struct entropy_store *r)
now = ktime_get_real(); now = ktime_get_real();
mix_pool_bytes(r, &now, sizeof(now)); mix_pool_bytes(r, &now, sizeof(now));
mix_pool_bytes(r, utsname(), sizeof(*(utsname()))); mix_pool_bytes(r, utsname(), sizeof(*(utsname())));
/* Enable continuous test in fips mode */
if (fips_enabled)
r->last_data = kmalloc(EXTRACT_SIZE, GFP_KERNEL);
} }
static int rand_initialize(void) static int rand_initialize(void)
......
...@@ -13,7 +13,6 @@ if CRYPTO_HW ...@@ -13,7 +13,6 @@ if CRYPTO_HW
config CRYPTO_DEV_PADLOCK config CRYPTO_DEV_PADLOCK
tristate "Support for VIA PadLock ACE" tristate "Support for VIA PadLock ACE"
depends on X86 && !UML depends on X86 && !UML
select CRYPTO_ALGAPI
help help
Some VIA processors come with an integrated crypto engine Some VIA processors come with an integrated crypto engine
(so called VIA PadLock ACE, Advanced Cryptography Engine) (so called VIA PadLock ACE, Advanced Cryptography Engine)
...@@ -39,6 +38,7 @@ config CRYPTO_DEV_PADLOCK_AES ...@@ -39,6 +38,7 @@ config CRYPTO_DEV_PADLOCK_AES
config CRYPTO_DEV_PADLOCK_SHA config CRYPTO_DEV_PADLOCK_SHA
tristate "PadLock driver for SHA1 and SHA256 algorithms" tristate "PadLock driver for SHA1 and SHA256 algorithms"
depends on CRYPTO_DEV_PADLOCK depends on CRYPTO_DEV_PADLOCK
select CRYPTO_HASH
select CRYPTO_SHA1 select CRYPTO_SHA1
select CRYPTO_SHA256 select CRYPTO_SHA256
help help
...@@ -157,6 +157,19 @@ config S390_PRNG ...@@ -157,6 +157,19 @@ config S390_PRNG
ANSI X9.17 standard. The PRNG is usable via the char device ANSI X9.17 standard. The PRNG is usable via the char device
/dev/prandom. /dev/prandom.
config CRYPTO_DEV_MV_CESA
tristate "Marvell's Cryptographic Engine"
depends on PLAT_ORION
select CRYPTO_ALGAPI
select CRYPTO_AES
select CRYPTO_BLKCIPHER2
help
This driver allows you to utilize the Cryptographic Engines and
Security Accelerator (CESA) which can be found on the Marvell Orion
and Kirkwood SoCs, such as QNAP's TS-209.
Currently the driver supports AES in ECB and CBC mode without DMA.
config CRYPTO_DEV_HIFN_795X config CRYPTO_DEV_HIFN_795X
tristate "Driver HIFN 795x crypto accelerator chips" tristate "Driver HIFN 795x crypto accelerator chips"
select CRYPTO_DES select CRYPTO_DES
......
...@@ -2,6 +2,7 @@ obj-$(CONFIG_CRYPTO_DEV_PADLOCK_AES) += padlock-aes.o ...@@ -2,6 +2,7 @@ obj-$(CONFIG_CRYPTO_DEV_PADLOCK_AES) += padlock-aes.o
obj-$(CONFIG_CRYPTO_DEV_PADLOCK_SHA) += padlock-sha.o obj-$(CONFIG_CRYPTO_DEV_PADLOCK_SHA) += padlock-sha.o
obj-$(CONFIG_CRYPTO_DEV_GEODE) += geode-aes.o obj-$(CONFIG_CRYPTO_DEV_GEODE) += geode-aes.o
obj-$(CONFIG_CRYPTO_DEV_HIFN_795X) += hifn_795x.o obj-$(CONFIG_CRYPTO_DEV_HIFN_795X) += hifn_795x.o
obj-$(CONFIG_CRYPTO_DEV_MV_CESA) += mv_cesa.o
obj-$(CONFIG_CRYPTO_DEV_TALITOS) += talitos.o obj-$(CONFIG_CRYPTO_DEV_TALITOS) += talitos.o
obj-$(CONFIG_CRYPTO_DEV_IXP4XX) += ixp4xx_crypto.o obj-$(CONFIG_CRYPTO_DEV_IXP4XX) += ixp4xx_crypto.o
obj-$(CONFIG_CRYPTO_DEV_PPC4XX) += amcc/ obj-$(CONFIG_CRYPTO_DEV_PPC4XX) += amcc/
...@@ -208,7 +208,8 @@ static int crypto4xx_hash_alg_init(struct crypto_tfm *tfm, ...@@ -208,7 +208,8 @@ static int crypto4xx_hash_alg_init(struct crypto_tfm *tfm,
} }
} }
tfm->crt_ahash.reqsize = sizeof(struct crypto4xx_ctx); crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
sizeof(struct crypto4xx_ctx));
sa = (struct dynamic_sa_ctl *) ctx->sa_in; sa = (struct dynamic_sa_ctl *) ctx->sa_in;
set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV, set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
SA_NOT_LOAD_HASH, SA_LOAD_IV_FROM_SA, SA_NOT_LOAD_HASH, SA_LOAD_IV_FROM_SA,
......
...@@ -31,8 +31,6 @@ ...@@ -31,8 +31,6 @@
#include <asm/dcr.h> #include <asm/dcr.h>
#include <asm/dcr-regs.h> #include <asm/dcr-regs.h>
#include <asm/cacheflush.h> #include <asm/cacheflush.h>
#include <crypto/internal/hash.h>
#include <crypto/algapi.h>
#include <crypto/aes.h> #include <crypto/aes.h>
#include <crypto/sha.h> #include <crypto/sha.h>
#include "crypto4xx_reg_def.h" #include "crypto4xx_reg_def.h"
...@@ -998,10 +996,15 @@ static int crypto4xx_alg_init(struct crypto_tfm *tfm) ...@@ -998,10 +996,15 @@ static int crypto4xx_alg_init(struct crypto_tfm *tfm)
ctx->sa_out_dma_addr = 0; ctx->sa_out_dma_addr = 0;
ctx->sa_len = 0; ctx->sa_len = 0;
if (alg->cra_type == &crypto_ablkcipher_type) switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
default:
tfm->crt_ablkcipher.reqsize = sizeof(struct crypto4xx_ctx); tfm->crt_ablkcipher.reqsize = sizeof(struct crypto4xx_ctx);
else if (alg->cra_type == &crypto_ahash_type) break;
tfm->crt_ahash.reqsize = sizeof(struct crypto4xx_ctx); case CRYPTO_ALG_TYPE_AHASH:
crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
sizeof(struct crypto4xx_ctx));
break;
}
return 0; return 0;
} }
...@@ -1015,7 +1018,8 @@ static void crypto4xx_alg_exit(struct crypto_tfm *tfm) ...@@ -1015,7 +1018,8 @@ static void crypto4xx_alg_exit(struct crypto_tfm *tfm)
} }
int crypto4xx_register_alg(struct crypto4xx_device *sec_dev, int crypto4xx_register_alg(struct crypto4xx_device *sec_dev,
struct crypto_alg *crypto_alg, int array_size) struct crypto4xx_alg_common *crypto_alg,
int array_size)
{ {
struct crypto4xx_alg *alg; struct crypto4xx_alg *alg;
int i; int i;
...@@ -1027,13 +1031,18 @@ int crypto4xx_register_alg(struct crypto4xx_device *sec_dev, ...@@ -1027,13 +1031,18 @@ int crypto4xx_register_alg(struct crypto4xx_device *sec_dev,
return -ENOMEM; return -ENOMEM;
alg->alg = crypto_alg[i]; alg->alg = crypto_alg[i];
INIT_LIST_HEAD(&alg->alg.cra_list);
if (alg->alg.cra_init == NULL)
alg->alg.cra_init = crypto4xx_alg_init;
if (alg->alg.cra_exit == NULL)
alg->alg.cra_exit = crypto4xx_alg_exit;
alg->dev = sec_dev; alg->dev = sec_dev;
rc = crypto_register_alg(&alg->alg);
switch (alg->alg.type) {
case CRYPTO_ALG_TYPE_AHASH:
rc = crypto_register_ahash(&alg->alg.u.hash);
break;
default:
rc = crypto_register_alg(&alg->alg.u.cipher);
break;
}
if (rc) { if (rc) {
list_del(&alg->entry); list_del(&alg->entry);
kfree(alg); kfree(alg);
...@@ -1051,7 +1060,14 @@ static void crypto4xx_unregister_alg(struct crypto4xx_device *sec_dev) ...@@ -1051,7 +1060,14 @@ static void crypto4xx_unregister_alg(struct crypto4xx_device *sec_dev)
list_for_each_entry_safe(alg, tmp, &sec_dev->alg_list, entry) { list_for_each_entry_safe(alg, tmp, &sec_dev->alg_list, entry) {
list_del(&alg->entry); list_del(&alg->entry);
crypto_unregister_alg(&alg->alg); switch (alg->alg.type) {
case CRYPTO_ALG_TYPE_AHASH:
crypto_unregister_ahash(&alg->alg.u.hash);
break;
default:
crypto_unregister_alg(&alg->alg.u.cipher);
}
kfree(alg); kfree(alg);
} }
} }
...@@ -1104,17 +1120,18 @@ static irqreturn_t crypto4xx_ce_interrupt_handler(int irq, void *data) ...@@ -1104,17 +1120,18 @@ static irqreturn_t crypto4xx_ce_interrupt_handler(int irq, void *data)
/** /**
* Supported Crypto Algorithms * Supported Crypto Algorithms
*/ */
struct crypto_alg crypto4xx_alg[] = { struct crypto4xx_alg_common crypto4xx_alg[] = {
/* Crypto AES modes */ /* Crypto AES modes */
{ { .type = CRYPTO_ALG_TYPE_ABLKCIPHER, .u.cipher = {
.cra_name = "cbc(aes)", .cra_name = "cbc(aes)",
.cra_driver_name = "cbc-aes-ppc4xx", .cra_driver_name = "cbc-aes-ppc4xx",
.cra_priority = CRYPTO4XX_CRYPTO_PRIORITY, .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
.cra_blocksize = AES_BLOCK_SIZE, .cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct crypto4xx_ctx), .cra_ctxsize = sizeof(struct crypto4xx_ctx),
.cra_alignmask = 0,
.cra_type = &crypto_ablkcipher_type, .cra_type = &crypto_ablkcipher_type,
.cra_init = crypto4xx_alg_init,
.cra_exit = crypto4xx_alg_exit,
.cra_module = THIS_MODULE, .cra_module = THIS_MODULE,
.cra_u = { .cra_u = {
.ablkcipher = { .ablkcipher = {
...@@ -1126,29 +1143,7 @@ struct crypto_alg crypto4xx_alg[] = { ...@@ -1126,29 +1143,7 @@ struct crypto_alg crypto4xx_alg[] = {
.decrypt = crypto4xx_decrypt, .decrypt = crypto4xx_decrypt,
} }
} }
}, }},
/* Hash SHA1 */
{
.cra_name = "sha1",
.cra_driver_name = "sha1-ppc4xx",
.cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
.cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC,
.cra_blocksize = SHA1_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct crypto4xx_ctx),
.cra_alignmask = 0,
.cra_type = &crypto_ahash_type,
.cra_init = crypto4xx_sha1_alg_init,
.cra_module = THIS_MODULE,
.cra_u = {
.ahash = {
.digestsize = SHA1_DIGEST_SIZE,
.init = crypto4xx_hash_init,
.update = crypto4xx_hash_update,
.final = crypto4xx_hash_final,
.digest = crypto4xx_hash_digest,
}
}
},
}; };
/** /**
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
...@@ -57,6 +57,7 @@ ...@@ -57,6 +57,7 @@
#define TALITOS_CCCR_RESET 0x1 /* channel reset */ #define TALITOS_CCCR_RESET 0x1 /* channel reset */
#define TALITOS_CCCR_LO(ch) (ch * TALITOS_CH_STRIDE + 0x110c) #define TALITOS_CCCR_LO(ch) (ch * TALITOS_CH_STRIDE + 0x110c)
#define TALITOS_CCCR_LO_IWSE 0x80 /* chan. ICCR writeback enab. */ #define TALITOS_CCCR_LO_IWSE 0x80 /* chan. ICCR writeback enab. */
#define TALITOS_CCCR_LO_EAE 0x20 /* extended address enable */
#define TALITOS_CCCR_LO_CDWE 0x10 /* chan. done writeback enab. */ #define TALITOS_CCCR_LO_CDWE 0x10 /* chan. done writeback enab. */
#define TALITOS_CCCR_LO_NT 0x4 /* notification type */ #define TALITOS_CCCR_LO_NT 0x4 /* notification type */
#define TALITOS_CCCR_LO_CDIE 0x2 /* channel done IRQ enable */ #define TALITOS_CCCR_LO_CDIE 0x2 /* channel done IRQ enable */
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment