Commit 13a1bb93 authored by Pascal van Leeuwen's avatar Pascal van Leeuwen Committed by Herbert Xu

crypto: inside-secure - Fixed warnings on inconsistent byte order handling

This fixes a bunch of endianness related sparse warnings reported by the
kbuild test robot as well as Ben Dooks.

Credits for the fix to safexcel.c go to Ben Dooks.
Reported-by: default avatarkbuild test robot <lkp@intel.com>
Reported-by: default avatarBen Dooks <ben.dooks@codethink.co.uk>
Signed-off-by: default avatarPascal van Leeuwen <pvanleeuwen@verimatrix.com>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent 9b537997
...@@ -311,13 +311,14 @@ static void eip197_init_firmware(struct safexcel_crypto_priv *priv) ...@@ -311,13 +311,14 @@ static void eip197_init_firmware(struct safexcel_crypto_priv *priv)
static int eip197_write_firmware(struct safexcel_crypto_priv *priv, static int eip197_write_firmware(struct safexcel_crypto_priv *priv,
const struct firmware *fw) const struct firmware *fw)
{ {
const u32 *data = (const u32 *)fw->data; const __be32 *data = (const __be32 *)fw->data;
int i; int i;
/* Write the firmware */ /* Write the firmware */
for (i = 0; i < fw->size / sizeof(u32); i++) for (i = 0; i < fw->size / sizeof(u32); i++)
writel(be32_to_cpu(data[i]), writel(be32_to_cpu(data[i]),
priv->base + EIP197_CLASSIFICATION_RAMS + i * sizeof(u32)); priv->base + EIP197_CLASSIFICATION_RAMS +
i * sizeof(__be32));
/* Exclude final 2 NOPs from size */ /* Exclude final 2 NOPs from size */
return i - EIP197_FW_TERMINAL_NOPS; return i - EIP197_FW_TERMINAL_NOPS;
......
...@@ -360,8 +360,8 @@ ...@@ -360,8 +360,8 @@
/* Context Control */ /* Context Control */
struct safexcel_context_record { struct safexcel_context_record {
u32 control0; __le32 control0;
u32 control1; __le32 control1;
__le32 data[40]; __le32 data[40];
} __packed; } __packed;
......
...@@ -57,8 +57,8 @@ struct safexcel_cipher_ctx { ...@@ -57,8 +57,8 @@ struct safexcel_cipher_ctx {
/* All the below is AEAD specific */ /* All the below is AEAD specific */
u32 hash_alg; u32 hash_alg;
u32 state_sz; u32 state_sz;
u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)]; __be32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)]; __be32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
struct crypto_cipher *hkaes; struct crypto_cipher *hkaes;
struct crypto_aead *fback; struct crypto_aead *fback;
...@@ -92,7 +92,8 @@ static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv, ...@@ -92,7 +92,8 @@ static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
cdesc->control_data.token[3] = 0; cdesc->control_data.token[3] = 0;
} else { } else {
/* 32 bit counter, start at 1 (big endian!) */ /* 32 bit counter, start at 1 (big endian!) */
cdesc->control_data.token[3] = cpu_to_be32(1); cdesc->control_data.token[3] =
(__force u32)cpu_to_be32(1);
} }
return; return;
...@@ -108,7 +109,8 @@ static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv, ...@@ -108,7 +109,8 @@ static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
cdesc->control_data.token[3] = 0; cdesc->control_data.token[3] = 0;
} else { } else {
/* 32 bit counter, start at 1 (big endian!) */ /* 32 bit counter, start at 1 (big endian!) */
cdesc->control_data.token[3] = cpu_to_be32(1); *(__be32 *)&cdesc->control_data.token[3] =
cpu_to_be32(1);
} }
return; return;
...@@ -267,7 +269,7 @@ static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv, ...@@ -267,7 +269,7 @@ static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
if (ctx->xcm != EIP197_XCM_MODE_GCM) { if (ctx->xcm != EIP197_XCM_MODE_GCM) {
u8 *final_iv = (u8 *)cdesc->control_data.token; u8 *final_iv = (u8 *)cdesc->control_data.token;
u8 *cbcmaciv = (u8 *)&token[1]; u8 *cbcmaciv = (u8 *)&token[1];
u32 *aadlen = (u32 *)&token[5]; __le32 *aadlen = (__le32 *)&token[5];
/* Construct IV block B0 for the CBC-MAC */ /* Construct IV block B0 for the CBC-MAC */
token[0].opcode = EIP197_TOKEN_OPCODE_INSERT; token[0].opcode = EIP197_TOKEN_OPCODE_INSERT;
...@@ -286,7 +288,8 @@ static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv, ...@@ -286,7 +288,8 @@ static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
cbcmaciv[15] = cryptlen & 255; cbcmaciv[15] = cryptlen & 255;
if (assoclen) { if (assoclen) {
*aadlen = cpu_to_le32(cpu_to_be16(assoclen)); *aadlen = cpu_to_le32((assoclen >> 8) |
((assoclen & 0xff) << 8));
assoclen += 2; assoclen += 2;
} }
...@@ -333,7 +336,7 @@ static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm, ...@@ -333,7 +336,7 @@ static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) { if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
for (i = 0; i < len / sizeof(u32); i++) { for (i = 0; i < len / sizeof(u32); i++) {
if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) { if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
ctx->base.needs_inv = true; ctx->base.needs_inv = true;
break; break;
} }
...@@ -358,7 +361,7 @@ static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key, ...@@ -358,7 +361,7 @@ static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
struct safexcel_crypto_priv *priv = ctx->priv; struct safexcel_crypto_priv *priv = ctx->priv;
struct crypto_authenc_keys keys; struct crypto_authenc_keys keys;
struct crypto_aes_ctx aes; struct crypto_aes_ctx aes;
int err = -EINVAL; int err = -EINVAL, i;
if (unlikely(crypto_authenc_extractkeys(&keys, key, len))) if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
goto badkey; goto badkey;
...@@ -400,9 +403,14 @@ static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key, ...@@ -400,9 +403,14 @@ static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
goto badkey; goto badkey;
} }
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma && if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
memcmp(ctx->key, keys.enckey, keys.enckeylen)) for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
ctx->base.needs_inv = true; ctx->base.needs_inv = true;
break;
}
}
}
/* Auth key */ /* Auth key */
switch (ctx->hash_alg) { switch (ctx->hash_alg) {
...@@ -450,7 +458,8 @@ static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key, ...@@ -450,7 +458,8 @@ static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
ctx->base.needs_inv = true; ctx->base.needs_inv = true;
/* Now copy the keys into the context */ /* Now copy the keys into the context */
memcpy(ctx->key, keys.enckey, keys.enckeylen); for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
ctx->key_len = keys.enckeylen; ctx->key_len = keys.enckeylen;
memcpy(ctx->ipad, &istate.state, ctx->state_sz); memcpy(ctx->ipad, &istate.state, ctx->state_sz);
...@@ -1378,7 +1387,7 @@ static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm, ...@@ -1378,7 +1387,7 @@ static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) { if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
for (i = 0; i < keylen / sizeof(u32); i++) { for (i = 0; i < keylen / sizeof(u32); i++) {
if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) { if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
ctx->base.needs_inv = true; ctx->base.needs_inv = true;
break; break;
} }
...@@ -1534,13 +1543,11 @@ static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm, ...@@ -1534,13 +1543,11 @@ static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
return err; return err;
/* if context exits and key changed, need to invalidate it */ /* if context exits and key changed, need to invalidate it */
if (ctx->base.ctxr_dma) { if (ctx->base.ctxr_dma)
if (memcmp(ctx->key, key, len)) if (memcmp(ctx->key, key, len))
ctx->base.needs_inv = true; ctx->base.needs_inv = true;
}
memcpy(ctx->key, key, len); memcpy(ctx->key, key, len);
ctx->key_len = len; ctx->key_len = len;
return 0; return 0;
...@@ -2361,7 +2368,7 @@ static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm, ...@@ -2361,7 +2368,7 @@ static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) { if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
for (i = 0; i < keylen / sizeof(u32); i++) { for (i = 0; i < keylen / sizeof(u32); i++) {
if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) { if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
ctx->base.needs_inv = true; ctx->base.needs_inv = true;
break; break;
} }
...@@ -2380,8 +2387,8 @@ static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm, ...@@ -2380,8 +2387,8 @@ static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) { if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
for (i = 0; i < keylen / sizeof(u32); i++) { for (i = 0; i < keylen / sizeof(u32); i++) {
if (ctx->key[i + keylen / sizeof(u32)] != if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
cpu_to_le32(aes.key_enc[i])) { aes.key_enc[i]) {
ctx->base.needs_inv = true; ctx->base.needs_inv = true;
break; break;
} }
...@@ -2471,7 +2478,7 @@ static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key, ...@@ -2471,7 +2478,7 @@ static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) { if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
for (i = 0; i < len / sizeof(u32); i++) { for (i = 0; i < len / sizeof(u32); i++) {
if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) { if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
ctx->base.needs_inv = true; ctx->base.needs_inv = true;
break; break;
} }
...@@ -2498,7 +2505,7 @@ static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key, ...@@ -2498,7 +2505,7 @@ static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) { if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) { for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
if (ctx->ipad[i] != cpu_to_be32(hashkey[i])) { if (be32_to_cpu(ctx->ipad[i]) != hashkey[i]) {
ctx->base.needs_inv = true; ctx->base.needs_inv = true;
break; break;
} }
...@@ -2588,7 +2595,7 @@ static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key, ...@@ -2588,7 +2595,7 @@ static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) { if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
for (i = 0; i < len / sizeof(u32); i++) { for (i = 0; i < len / sizeof(u32); i++) {
if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) { if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
ctx->base.needs_inv = true; ctx->base.needs_inv = true;
break; break;
} }
...@@ -2697,20 +2704,12 @@ static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx, ...@@ -2697,20 +2704,12 @@ static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
const u8 *key) const u8 *key)
{ {
struct safexcel_crypto_priv *priv = ctx->priv; struct safexcel_crypto_priv *priv = ctx->priv;
int i;
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) { if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
for (i = 0; i < CHACHA_KEY_SIZE / sizeof(u32); i++) { if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
if (ctx->key[i] !=
get_unaligned_le32(key + i * sizeof(u32))) {
ctx->base.needs_inv = true; ctx->base.needs_inv = true;
break;
}
}
}
for (i = 0; i < CHACHA_KEY_SIZE / sizeof(u32); i++) memcpy(ctx->key, key, CHACHA_KEY_SIZE);
ctx->key[i] = get_unaligned_le32(key + i * sizeof(u32));
ctx->key_len = CHACHA_KEY_SIZE; ctx->key_len = CHACHA_KEY_SIZE;
} }
...@@ -2801,7 +2800,7 @@ static int safexcel_aead_chachapoly_crypt(struct aead_request *req, ...@@ -2801,7 +2800,7 @@ static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
struct aead_request *subreq = aead_request_ctx(req); struct aead_request *subreq = aead_request_ctx(req);
u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1]; u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
int i, ret = 0; int ret = 0;
/* /*
* Instead of wasting time detecting umpteen silly corner cases, * Instead of wasting time detecting umpteen silly corner cases,
...@@ -2815,8 +2814,7 @@ static int safexcel_aead_chachapoly_crypt(struct aead_request *req, ...@@ -2815,8 +2814,7 @@ static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
} }
/* HW cannot do full (AAD+payload) zero length, use fallback */ /* HW cannot do full (AAD+payload) zero length, use fallback */
for (i = 0; i < CHACHA_KEY_SIZE / sizeof(u32); i++) memcpy(key, ctx->key, CHACHA_KEY_SIZE);
key[i] = cpu_to_le32(ctx->key[i]);
if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) { if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
/* ESP variant has nonce appended to the key */ /* ESP variant has nonce appended to the key */
key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce; key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
...@@ -2971,25 +2969,17 @@ static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm, ...@@ -2971,25 +2969,17 @@ static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm); struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
struct safexcel_crypto_priv *priv = ctx->priv; struct safexcel_crypto_priv *priv = ctx->priv;
int i;
if (len != SM4_KEY_SIZE) { if (len != SM4_KEY_SIZE) {
crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN); crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
return -EINVAL; return -EINVAL;
} }
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) { if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
for (i = 0; i < SM4_KEY_SIZE / sizeof(u32); i++) { if (memcmp(ctx->key, key, SM4_KEY_SIZE))
if (ctx->key[i] !=
get_unaligned_le32(key + i * sizeof(u32))) {
ctx->base.needs_inv = true; ctx->base.needs_inv = true;
break;
}
}
}
for (i = 0; i < SM4_KEY_SIZE / sizeof(u32); i++) memcpy(ctx->key, key, SM4_KEY_SIZE);
ctx->key[i] = get_unaligned_le32(key + i * sizeof(u32));
ctx->key_len = SM4_KEY_SIZE; ctx->key_len = SM4_KEY_SIZE;
return 0; return 0;
......
...@@ -29,8 +29,8 @@ struct safexcel_ahash_ctx { ...@@ -29,8 +29,8 @@ struct safexcel_ahash_ctx {
bool fb_init_done; bool fb_init_done;
bool fb_do_setkey; bool fb_do_setkey;
u32 ipad[SHA3_512_BLOCK_SIZE / sizeof(u32)]; __le32 ipad[SHA3_512_BLOCK_SIZE / sizeof(__le32)];
u32 opad[SHA3_512_BLOCK_SIZE / sizeof(u32)]; __le32 opad[SHA3_512_BLOCK_SIZE / sizeof(__le32)];
struct crypto_cipher *kaes; struct crypto_cipher *kaes;
struct crypto_ahash *fback; struct crypto_ahash *fback;
...@@ -56,7 +56,8 @@ struct safexcel_ahash_req { ...@@ -56,7 +56,8 @@ struct safexcel_ahash_req {
u8 state_sz; /* expected state size, only set once */ u8 state_sz; /* expected state size, only set once */
u8 block_sz; /* block size, only set once */ u8 block_sz; /* block size, only set once */
u8 digest_sz; /* output digest size, only set once */ u8 digest_sz; /* output digest size, only set once */
u32 state[SHA3_512_BLOCK_SIZE / sizeof(u32)] __aligned(sizeof(u32)); __le32 state[SHA3_512_BLOCK_SIZE /
sizeof(__le32)] __aligned(sizeof(__le32));
u64 len; u64 len;
u64 processed; u64 processed;
...@@ -287,7 +288,7 @@ static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, ...@@ -287,7 +288,7 @@ static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM && if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM &&
ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) { ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) {
/* Undo final XOR with 0xffffffff ...*/ /* Undo final XOR with 0xffffffff ...*/
*(u32 *)areq->result = ~sreq->state[0]; *(__le32 *)areq->result = ~sreq->state[0];
} else { } else {
memcpy(areq->result, sreq->state, memcpy(areq->result, sreq->state,
crypto_ahash_digestsize(ahash)); crypto_ahash_digestsize(ahash));
...@@ -372,9 +373,9 @@ static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring, ...@@ -372,9 +373,9 @@ static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
req->cache[cache_len + skip] = 0x80; req->cache[cache_len + skip] = 0x80;
// HW will use K2 iso K3 - compensate! // HW will use K2 iso K3 - compensate!
for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
((u32 *)req->cache)[i] ^= ((__be32 *)req->cache)[i] ^=
cpu_to_be32(ctx->ipad[i]) ^ cpu_to_be32(le32_to_cpu(
cpu_to_be32(ctx->ipad[i + 4]); ctx->ipad[i] ^ ctx->ipad[i + 4]));
} }
cache_len = AES_BLOCK_SIZE; cache_len = AES_BLOCK_SIZE;
queued = queued + extra; queued = queued + extra;
...@@ -807,8 +808,8 @@ static int safexcel_ahash_final(struct ahash_request *areq) ...@@ -807,8 +808,8 @@ static int safexcel_ahash_final(struct ahash_request *areq)
int i; int i;
for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
((u32 *)areq->result)[i] = ((__be32 *)areq->result)[i] =
cpu_to_be32(ctx->ipad[i + 4]); // K3 cpu_to_be32(le32_to_cpu(ctx->ipad[i + 4]));//K3
areq->result[0] ^= 0x80; // 10- padding areq->result[0] ^= 0x80; // 10- padding
crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result); crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result);
return 0; return 0;
...@@ -1891,7 +1892,7 @@ static int safexcel_crc32_init(struct ahash_request *areq) ...@@ -1891,7 +1892,7 @@ static int safexcel_crc32_init(struct ahash_request *areq)
memset(req, 0, sizeof(*req)); memset(req, 0, sizeof(*req));
/* Start from loaded key */ /* Start from loaded key */
req->state[0] = cpu_to_le32(~ctx->ipad[0]); req->state[0] = (__force __le32)le32_to_cpu(~ctx->ipad[0]);
/* Set processed to non-zero to enable invalidation detection */ /* Set processed to non-zero to enable invalidation detection */
req->len = sizeof(u32); req->len = sizeof(u32);
req->processed = sizeof(u32); req->processed = sizeof(u32);
...@@ -1993,7 +1994,7 @@ static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key, ...@@ -1993,7 +1994,7 @@ static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
memset(ctx->ipad, 0, 2 * AES_BLOCK_SIZE); memset(ctx->ipad, 0, 2 * AES_BLOCK_SIZE);
for (i = 0; i < len / sizeof(u32); i++) for (i = 0; i < len / sizeof(u32); i++)
ctx->ipad[i + 8] = cpu_to_be32(aes.key_enc[i]); ctx->ipad[i + 8] = (__force __le32)cpu_to_be32(aes.key_enc[i]);
if (len == AES_KEYSIZE_192) { if (len == AES_KEYSIZE_192) {
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192; ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
...@@ -2078,7 +2079,8 @@ static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key, ...@@ -2078,7 +2079,8 @@ static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE, crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE,
"\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3"); "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++) for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
ctx->ipad[i] = cpu_to_be32(key_tmp[i]); ctx->ipad[i] =
cpu_to_le32((__force u32)cpu_to_be32(key_tmp[i]));
crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK); crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) & crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
...@@ -2164,7 +2166,8 @@ static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key, ...@@ -2164,7 +2166,8 @@ static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
} }
for (i = 0; i < len / sizeof(u32); i++) for (i = 0; i < len / sizeof(u32); i++)
ctx->ipad[i + 8] = cpu_to_be32(aes.key_enc[i]); ctx->ipad[i + 8] =
cpu_to_le32((__force u32)cpu_to_be32(aes.key_enc[i]));
/* precompute the CMAC key material */ /* precompute the CMAC key material */
crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK); crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
...@@ -2197,7 +2200,7 @@ static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key, ...@@ -2197,7 +2200,7 @@ static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
/* end of code borrowed from crypto/cmac.c */ /* end of code borrowed from crypto/cmac.c */
for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++) for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
ctx->ipad[i] = cpu_to_be32(((u32 *)consts)[i]); ctx->ipad[i] = (__force __le32)cpu_to_be32(((u32 *)consts)[i]);
if (len == AES_KEYSIZE_192) { if (len == AES_KEYSIZE_192) {
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192; ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment