Commit d9ec772d authored by Ard Biesheuvel's avatar Ard Biesheuvel Committed by Herbert Xu

crypto: ctr - add helper for performing a CTR encryption walk

Add a static inline helper modeled after crypto_cbc_encrypt_walk()
that can be reused for SIMD algorithms that need to implement a
non-SIMD fallback for performing CTR encryption.
Signed-off-by: default avatarArd Biesheuvel <ard.biesheuvel@linaro.org>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent 5bb12d78
......@@ -8,8 +8,58 @@
#ifndef _CRYPTO_CTR_H
#define _CRYPTO_CTR_H
#include <crypto/algapi.h>
#include <crypto/internal/skcipher.h>
#include <linux/string.h>
#include <linux/types.h>
#define CTR_RFC3686_NONCE_SIZE 4
#define CTR_RFC3686_IV_SIZE 8
#define CTR_RFC3686_BLOCK_SIZE 16
static inline int crypto_ctr_encrypt_walk(struct skcipher_request *req,
void (*fn)(struct crypto_skcipher *,
const u8 *, u8 *))
{
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
int blocksize = crypto_skcipher_chunksize(tfm);
u8 buf[MAX_CIPHER_BLOCKSIZE];
struct skcipher_walk walk;
int err;
/* avoid integer division due to variable blocksize parameter */
if (WARN_ON_ONCE(!is_power_of_2(blocksize)))
return -EINVAL;
err = skcipher_walk_virt(&walk, req, false);
while (walk.nbytes > 0) {
u8 *dst = walk.dst.virt.addr;
u8 *src = walk.src.virt.addr;
int nbytes = walk.nbytes;
int tail = 0;
if (nbytes < walk.total) {
tail = walk.nbytes & (blocksize - 1);
nbytes -= tail;
}
do {
int bsize = min(nbytes, blocksize);
fn(tfm, walk.iv, buf);
crypto_xor_cpy(dst, src, buf, bsize);
crypto_inc(walk.iv, blocksize);
dst += bsize;
src += bsize;
nbytes -= bsize;
} while (nbytes > 0);
err = skcipher_walk_done(&walk, tail);
}
return err;
}
#endif /* _CRYPTO_CTR_H */
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment