Commit 871df319 authored by Antoine Ténart's avatar Antoine Ténart Committed by Herbert Xu

crypto: inside-secure - EIP97 support

The Inside Secure SafeXcel driver was firstly designed to support the
EIP197 cryptographic engine which is an evolution (with much more
feature, better performances) of the EIP97 cryptographic engine. This
patch convert the Inside Secure SafeXcel driver to support both engines
(EIP97 + EIP197).

The main differences are the register offsets and the context
invalidation process which is EIP197 specific. This patch adds an
indirection on the register offsets and adds checks not to send any
invalidation request when driving the EIP97. A new compatible is added
as well to bind the driver from device trees.
Signed-off-by: default avatarAntoine Tenart <antoine.tenart@free-electrons.com>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent 8732b298
......@@ -108,10 +108,10 @@ static void eip197_write_firmware(struct safexcel_crypto_priv *priv,
writel(EIP197_PE_ICE_x_CTRL_SW_RESET |
EIP197_PE_ICE_x_CTRL_CLR_ECC_CORR |
EIP197_PE_ICE_x_CTRL_CLR_ECC_NON_CORR,
priv->base + ctrl);
EIP197_PE(priv) + ctrl);
/* Enable access to the program memory */
writel(prog_en, priv->base + EIP197_PE_ICE_RAM_CTRL);
writel(prog_en, EIP197_PE(priv) + EIP197_PE_ICE_RAM_CTRL);
/* Write the firmware */
for (i = 0; i < fw->size / sizeof(u32); i++)
......@@ -119,12 +119,12 @@ static void eip197_write_firmware(struct safexcel_crypto_priv *priv,
priv->base + EIP197_CLASSIFICATION_RAMS + i * sizeof(u32));
/* Disable access to the program memory */
writel(0, priv->base + EIP197_PE_ICE_RAM_CTRL);
writel(0, EIP197_PE(priv) + EIP197_PE_ICE_RAM_CTRL);
/* Release engine from reset */
val = readl(priv->base + ctrl);
val = readl(EIP197_PE(priv) + ctrl);
val &= ~EIP197_PE_ICE_x_CTRL_SW_RESET;
writel(val, priv->base + ctrl);
writel(val, EIP197_PE(priv) + ctrl);
}
static int eip197_load_firmwares(struct safexcel_crypto_priv *priv)
......@@ -145,14 +145,14 @@ static int eip197_load_firmwares(struct safexcel_crypto_priv *priv)
}
/* Clear the scratchpad memory */
val = readl(priv->base + EIP197_PE_ICE_SCRATCH_CTRL);
val = readl(EIP197_PE(priv) + EIP197_PE_ICE_SCRATCH_CTRL);
val |= EIP197_PE_ICE_SCRATCH_CTRL_CHANGE_TIMER |
EIP197_PE_ICE_SCRATCH_CTRL_TIMER_EN |
EIP197_PE_ICE_SCRATCH_CTRL_SCRATCH_ACCESS |
EIP197_PE_ICE_SCRATCH_CTRL_CHANGE_ACCESS;
writel(val, priv->base + EIP197_PE_ICE_SCRATCH_CTRL);
writel(val, EIP197_PE(priv) + EIP197_PE_ICE_SCRATCH_CTRL);
memset(priv->base + EIP197_PE_ICE_SCRATCH_RAM, 0,
memset(EIP197_PE(priv) + EIP197_PE_ICE_SCRATCH_RAM, 0,
EIP197_NUM_OF_SCRATCH_BLOCKS * sizeof(u32));
eip197_write_firmware(priv, fw[FW_IFPP], EIP197_PE_ICE_FPP_CTRL,
......@@ -173,7 +173,7 @@ static int safexcel_hw_setup_cdesc_rings(struct safexcel_crypto_priv *priv)
u32 hdw, cd_size_rnd, val;
int i;
hdw = readl(priv->base + EIP197_HIA_OPTIONS);
hdw = readl(EIP197_HIA_AIC_G(priv) + EIP197_HIA_OPTIONS);
hdw &= GENMASK(27, 25);
hdw >>= 25;
......@@ -182,26 +182,25 @@ static int safexcel_hw_setup_cdesc_rings(struct safexcel_crypto_priv *priv)
for (i = 0; i < priv->config.rings; i++) {
/* ring base address */
writel(lower_32_bits(priv->ring[i].cdr.base_dma),
priv->base + EIP197_HIA_CDR(i) + EIP197_HIA_xDR_RING_BASE_ADDR_LO);
EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_RING_BASE_ADDR_LO);
writel(upper_32_bits(priv->ring[i].cdr.base_dma),
priv->base + EIP197_HIA_CDR(i) + EIP197_HIA_xDR_RING_BASE_ADDR_HI);
EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_RING_BASE_ADDR_HI);
writel(EIP197_xDR_DESC_MODE_64BIT | (priv->config.cd_offset << 16) |
priv->config.cd_size,
priv->base + EIP197_HIA_CDR(i) + EIP197_HIA_xDR_DESC_SIZE);
EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_DESC_SIZE);
writel(((EIP197_FETCH_COUNT * (cd_size_rnd << hdw)) << 16) |
(EIP197_FETCH_COUNT * priv->config.cd_offset),
priv->base + EIP197_HIA_CDR(i) + EIP197_HIA_xDR_CFG);
EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_CFG);
/* Configure DMA tx control */
val = EIP197_HIA_xDR_CFG_WR_CACHE(WR_CACHE_3BITS);
val |= EIP197_HIA_xDR_CFG_RD_CACHE(RD_CACHE_3BITS);
writel(val,
priv->base + EIP197_HIA_CDR(i) + EIP197_HIA_xDR_DMA_CFG);
writel(val, EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_DMA_CFG);
/* clear any pending interrupt */
writel(GENMASK(5, 0),
priv->base + EIP197_HIA_CDR(i) + EIP197_HIA_xDR_STAT);
EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_STAT);
}
return 0;
......@@ -212,7 +211,7 @@ static int safexcel_hw_setup_rdesc_rings(struct safexcel_crypto_priv *priv)
u32 hdw, rd_size_rnd, val;
int i;
hdw = readl(priv->base + EIP197_HIA_OPTIONS);
hdw = readl(EIP197_HIA_AIC_G(priv) + EIP197_HIA_OPTIONS);
hdw &= GENMASK(27, 25);
hdw >>= 25;
......@@ -221,33 +220,33 @@ static int safexcel_hw_setup_rdesc_rings(struct safexcel_crypto_priv *priv)
for (i = 0; i < priv->config.rings; i++) {
/* ring base address */
writel(lower_32_bits(priv->ring[i].rdr.base_dma),
priv->base + EIP197_HIA_RDR(i) + EIP197_HIA_xDR_RING_BASE_ADDR_LO);
EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_RING_BASE_ADDR_LO);
writel(upper_32_bits(priv->ring[i].rdr.base_dma),
priv->base + EIP197_HIA_RDR(i) + EIP197_HIA_xDR_RING_BASE_ADDR_HI);
EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_RING_BASE_ADDR_HI);
writel(EIP197_xDR_DESC_MODE_64BIT | (priv->config.rd_offset << 16) |
priv->config.rd_size,
priv->base + EIP197_HIA_RDR(i) + EIP197_HIA_xDR_DESC_SIZE);
EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_DESC_SIZE);
writel(((EIP197_FETCH_COUNT * (rd_size_rnd << hdw)) << 16) |
(EIP197_FETCH_COUNT * priv->config.rd_offset),
priv->base + EIP197_HIA_RDR(i) + EIP197_HIA_xDR_CFG);
EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_CFG);
/* Configure DMA tx control */
val = EIP197_HIA_xDR_CFG_WR_CACHE(WR_CACHE_3BITS);
val |= EIP197_HIA_xDR_CFG_RD_CACHE(RD_CACHE_3BITS);
val |= EIP197_HIA_xDR_WR_RES_BUF | EIP197_HIA_xDR_WR_CTRL_BUG;
writel(val,
priv->base + EIP197_HIA_RDR(i) + EIP197_HIA_xDR_DMA_CFG);
EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_DMA_CFG);
/* clear any pending interrupt */
writel(GENMASK(7, 0),
priv->base + EIP197_HIA_RDR(i) + EIP197_HIA_xDR_STAT);
EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_STAT);
/* enable ring interrupt */
val = readl(priv->base + EIP197_HIA_AIC_R_ENABLE_CTRL(i));
val = readl(EIP197_HIA_AIC_R(priv) + EIP197_HIA_AIC_R_ENABLE_CTRL(i));
val |= EIP197_RDR_IRQ(i);
writel(val, priv->base + EIP197_HIA_AIC_R_ENABLE_CTRL(i));
writel(val, EIP197_HIA_AIC_R(priv) + EIP197_HIA_AIC_R_ENABLE_CTRL(i));
}
return 0;
......@@ -259,39 +258,40 @@ static int safexcel_hw_init(struct safexcel_crypto_priv *priv)
int i, ret;
/* Determine endianess and configure byte swap */
version = readl(priv->base + EIP197_HIA_VERSION);
val = readl(priv->base + EIP197_HIA_MST_CTRL);
version = readl(EIP197_HIA_AIC(priv) + EIP197_HIA_VERSION);
val = readl(EIP197_HIA_AIC(priv) + EIP197_HIA_MST_CTRL);
if ((version & 0xffff) == EIP197_HIA_VERSION_BE)
val |= EIP197_MST_CTRL_BYTE_SWAP;
else if (((version >> 16) & 0xffff) == EIP197_HIA_VERSION_LE)
val |= (EIP197_MST_CTRL_NO_BYTE_SWAP >> 24);
writel(val, priv->base + EIP197_HIA_MST_CTRL);
writel(val, EIP197_HIA_AIC(priv) + EIP197_HIA_MST_CTRL);
/* Configure wr/rd cache values */
writel(EIP197_MST_CTRL_RD_CACHE(RD_CACHE_4BITS) |
EIP197_MST_CTRL_WD_CACHE(WR_CACHE_4BITS),
priv->base + EIP197_MST_CTRL);
EIP197_HIA_GEN_CFG(priv) + EIP197_MST_CTRL);
/* Interrupts reset */
/* Disable all global interrupts */
writel(0, priv->base + EIP197_HIA_AIC_G_ENABLE_CTRL);
writel(0, EIP197_HIA_AIC_G(priv) + EIP197_HIA_AIC_G_ENABLE_CTRL);
/* Clear any pending interrupt */
writel(GENMASK(31, 0), priv->base + EIP197_HIA_AIC_G_ACK);
writel(GENMASK(31, 0), EIP197_HIA_AIC_G(priv) + EIP197_HIA_AIC_G_ACK);
/* Data Fetch Engine configuration */
/* Reset all DFE threads */
writel(EIP197_DxE_THR_CTRL_RESET_PE,
priv->base + EIP197_HIA_DFE_THR_CTRL);
EIP197_HIA_DFE_THR(priv) + EIP197_HIA_DFE_THR_CTRL);
/* Reset HIA input interface arbiter */
writel(EIP197_HIA_RA_PE_CTRL_RESET,
priv->base + EIP197_HIA_RA_PE_CTRL);
if (priv->version == EIP197) {
/* Reset HIA input interface arbiter */
writel(EIP197_HIA_RA_PE_CTRL_RESET,
EIP197_HIA_AIC(priv) + EIP197_HIA_RA_PE_CTRL);
}
/* DMA transfer size to use */
val = EIP197_HIA_DFE_CFG_DIS_DEBUG;
......@@ -299,29 +299,32 @@ static int safexcel_hw_init(struct safexcel_crypto_priv *priv)
val |= EIP197_HIA_DxE_CFG_MIN_CTRL_SIZE(5) | EIP197_HIA_DxE_CFG_MAX_CTRL_SIZE(7);
val |= EIP197_HIA_DxE_CFG_DATA_CACHE_CTRL(RD_CACHE_3BITS);
val |= EIP197_HIA_DxE_CFG_CTRL_CACHE_CTRL(RD_CACHE_3BITS);
writel(val, priv->base + EIP197_HIA_DFE_CFG);
writel(val, EIP197_HIA_DFE(priv) + EIP197_HIA_DFE_CFG);
/* Leave the DFE threads reset state */
writel(0, priv->base + EIP197_HIA_DFE_THR_CTRL);
writel(0, EIP197_HIA_DFE_THR(priv) + EIP197_HIA_DFE_THR_CTRL);
/* Configure the procesing engine thresholds */
writel(EIP197_PE_IN_xBUF_THRES_MIN(5) | EIP197_PE_IN_xBUF_THRES_MAX(9),
priv->base + EIP197_PE_IN_DBUF_THRES);
EIP197_PE(priv) + EIP197_PE_IN_DBUF_THRES);
writel(EIP197_PE_IN_xBUF_THRES_MIN(5) | EIP197_PE_IN_xBUF_THRES_MAX(7),
priv->base + EIP197_PE_IN_TBUF_THRES);
EIP197_PE(priv) + EIP197_PE_IN_TBUF_THRES);
/* enable HIA input interface arbiter and rings */
writel(EIP197_HIA_RA_PE_CTRL_EN | GENMASK(priv->config.rings - 1, 0),
priv->base + EIP197_HIA_RA_PE_CTRL);
if (priv->version == EIP197) {
/* enable HIA input interface arbiter and rings */
writel(EIP197_HIA_RA_PE_CTRL_EN |
GENMASK(priv->config.rings - 1, 0),
EIP197_HIA_AIC(priv) + EIP197_HIA_RA_PE_CTRL);
}
/* Data Store Engine configuration */
/* Reset all DSE threads */
writel(EIP197_DxE_THR_CTRL_RESET_PE,
priv->base + EIP197_HIA_DSE_THR_CTRL);
EIP197_HIA_DSE_THR(priv) + EIP197_HIA_DSE_THR_CTRL);
/* Wait for all DSE threads to complete */
while ((readl(priv->base + EIP197_HIA_DSE_THR_STAT) &
while ((readl(EIP197_HIA_DSE_THR(priv) + EIP197_HIA_DSE_THR_STAT) &
GENMASK(15, 12)) != GENMASK(15, 12))
;
......@@ -330,15 +333,19 @@ static int safexcel_hw_init(struct safexcel_crypto_priv *priv)
val |= EIP197_HIA_DxE_CFG_MIN_DATA_SIZE(7) | EIP197_HIA_DxE_CFG_MAX_DATA_SIZE(8);
val |= EIP197_HIA_DxE_CFG_DATA_CACHE_CTRL(WR_CACHE_3BITS);
val |= EIP197_HIA_DSE_CFG_ALLWAYS_BUFFERABLE;
val |= EIP197_HIA_DSE_CFG_EN_SINGLE_WR;
writel(val, priv->base + EIP197_HIA_DSE_CFG);
/* FIXME: instability issues can occur for EIP97 but disabling it impact
* performances.
*/
if (priv->version == EIP197)
val |= EIP197_HIA_DSE_CFG_EN_SINGLE_WR;
writel(val, EIP197_HIA_DSE(priv) + EIP197_HIA_DSE_CFG);
/* Leave the DSE threads reset state */
writel(0, priv->base + EIP197_HIA_DSE_THR_CTRL);
writel(0, EIP197_HIA_DSE_THR(priv) + EIP197_HIA_DSE_THR_CTRL);
/* Configure the procesing engine thresholds */
writel(EIP197_PE_OUT_DBUF_THRES_MIN(7) | EIP197_PE_OUT_DBUF_THRES_MAX(8),
priv->base + EIP197_PE_OUT_DBUF_THRES);
EIP197_PE(priv) + EIP197_PE_OUT_DBUF_THRES);
/* Processing Engine configuration */
......@@ -348,73 +355,75 @@ static int safexcel_hw_init(struct safexcel_crypto_priv *priv)
val |= EIP197_ALG_AES_ECB | EIP197_ALG_AES_CBC;
val |= EIP197_ALG_SHA1 | EIP197_ALG_HMAC_SHA1;
val |= EIP197_ALG_SHA2;
writel(val, priv->base + EIP197_PE_EIP96_FUNCTION_EN);
writel(val, EIP197_PE(priv) + EIP197_PE_EIP96_FUNCTION_EN);
/* Command Descriptor Rings prepare */
for (i = 0; i < priv->config.rings; i++) {
/* Clear interrupts for this ring */
writel(GENMASK(31, 0),
priv->base + EIP197_HIA_AIC_R_ENABLE_CLR(i));
EIP197_HIA_AIC_R(priv) + EIP197_HIA_AIC_R_ENABLE_CLR(i));
/* Disable external triggering */
writel(0, priv->base + EIP197_HIA_CDR(i) + EIP197_HIA_xDR_CFG);
writel(0, EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_CFG);
/* Clear the pending prepared counter */
writel(EIP197_xDR_PREP_CLR_COUNT,
priv->base + EIP197_HIA_CDR(i) + EIP197_HIA_xDR_PREP_COUNT);
EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_PREP_COUNT);
/* Clear the pending processed counter */
writel(EIP197_xDR_PROC_CLR_COUNT,
priv->base + EIP197_HIA_CDR(i) + EIP197_HIA_xDR_PROC_COUNT);
EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_PROC_COUNT);
writel(0,
priv->base + EIP197_HIA_CDR(i) + EIP197_HIA_xDR_PREP_PNTR);
EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_PREP_PNTR);
writel(0,
priv->base + EIP197_HIA_CDR(i) + EIP197_HIA_xDR_PROC_PNTR);
EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_PROC_PNTR);
writel((EIP197_DEFAULT_RING_SIZE * priv->config.cd_offset) << 2,
priv->base + EIP197_HIA_CDR(i) + EIP197_HIA_xDR_RING_SIZE);
EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_RING_SIZE);
}
/* Result Descriptor Ring prepare */
for (i = 0; i < priv->config.rings; i++) {
/* Disable external triggering*/
writel(0, priv->base + EIP197_HIA_RDR(i) + EIP197_HIA_xDR_CFG);
writel(0, EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_CFG);
/* Clear the pending prepared counter */
writel(EIP197_xDR_PREP_CLR_COUNT,
priv->base + EIP197_HIA_RDR(i) + EIP197_HIA_xDR_PREP_COUNT);
EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_PREP_COUNT);
/* Clear the pending processed counter */
writel(EIP197_xDR_PROC_CLR_COUNT,
priv->base + EIP197_HIA_RDR(i) + EIP197_HIA_xDR_PROC_COUNT);
EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_PROC_COUNT);
writel(0,
priv->base + EIP197_HIA_RDR(i) + EIP197_HIA_xDR_PREP_PNTR);
EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_PREP_PNTR);
writel(0,
priv->base + EIP197_HIA_RDR(i) + EIP197_HIA_xDR_PROC_PNTR);
EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_PROC_PNTR);
/* Ring size */
writel((EIP197_DEFAULT_RING_SIZE * priv->config.rd_offset) << 2,
priv->base + EIP197_HIA_RDR(i) + EIP197_HIA_xDR_RING_SIZE);
EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_RING_SIZE);
}
/* Enable command descriptor rings */
writel(EIP197_DxE_THR_CTRL_EN | GENMASK(priv->config.rings - 1, 0),
priv->base + EIP197_HIA_DFE_THR_CTRL);
EIP197_HIA_DFE_THR(priv) + EIP197_HIA_DFE_THR_CTRL);
/* Enable result descriptor rings */
writel(EIP197_DxE_THR_CTRL_EN | GENMASK(priv->config.rings - 1, 0),
priv->base + EIP197_HIA_DSE_THR_CTRL);
EIP197_HIA_DSE_THR(priv) + EIP197_HIA_DSE_THR_CTRL);
/* Clear any HIA interrupt */
writel(GENMASK(30, 20), priv->base + EIP197_HIA_AIC_G_ACK);
writel(GENMASK(30, 20), EIP197_HIA_AIC_G(priv) + EIP197_HIA_AIC_G_ACK);
eip197_trc_cache_init(priv);
if (priv->version == EIP197) {
eip197_trc_cache_init(priv);
ret = eip197_load_firmwares(priv);
if (ret)
return ret;
ret = eip197_load_firmwares(priv);
if (ret)
return ret;
}
safexcel_hw_setup_cdesc_rings(priv);
safexcel_hw_setup_rdesc_rings(priv);
......@@ -434,7 +443,7 @@ int safexcel_try_push_requests(struct safexcel_crypto_priv *priv, int ring,
/* Configure when we want an interrupt */
writel(EIP197_HIA_RDR_THRESH_PKT_MODE |
EIP197_HIA_RDR_THRESH_PROC_PKT(coal),
priv->base + EIP197_HIA_RDR(ring) + EIP197_HIA_xDR_THRESH);
EIP197_HIA_RDR(priv, ring) + EIP197_HIA_xDR_THRESH);
return coal;
}
......@@ -515,11 +524,11 @@ void safexcel_dequeue(struct safexcel_crypto_priv *priv, int ring)
/* let the RDR know we have pending descriptors */
writel((rdesc * priv->config.rd_offset) << 2,
priv->base + EIP197_HIA_RDR(ring) + EIP197_HIA_xDR_PREP_COUNT);
EIP197_HIA_RDR(priv, ring) + EIP197_HIA_xDR_PREP_COUNT);
/* let the CDR know we have pending descriptors */
writel((cdesc * priv->config.cd_offset) << 2,
priv->base + EIP197_HIA_CDR(ring) + EIP197_HIA_xDR_PREP_COUNT);
EIP197_HIA_CDR(priv, ring) + EIP197_HIA_xDR_PREP_COUNT);
}
void safexcel_free_context(struct safexcel_crypto_priv *priv,
......@@ -620,7 +629,7 @@ static inline void safexcel_handle_result_descriptor(struct safexcel_crypto_priv
handle_results:
tot_descs = 0;
nreq = readl(priv->base + EIP197_HIA_RDR(ring) + EIP197_HIA_xDR_PROC_COUNT);
nreq = readl(EIP197_HIA_RDR(priv, ring) + EIP197_HIA_xDR_PROC_COUNT);
nreq >>= EIP197_xDR_PROC_xD_PKT_OFFSET;
nreq &= EIP197_xDR_PROC_xD_PKT_MASK;
if (!nreq)
......@@ -656,7 +665,7 @@ static inline void safexcel_handle_result_descriptor(struct safexcel_crypto_priv
if (i) {
writel(EIP197_xDR_PROC_xD_PKT(i) |
EIP197_xDR_PROC_xD_COUNT(tot_descs * priv->config.rd_offset),
priv->base + EIP197_HIA_RDR(ring) + EIP197_HIA_xDR_PROC_COUNT);
EIP197_HIA_RDR(priv, ring) + EIP197_HIA_xDR_PROC_COUNT);
}
/* If the number of requests overflowed the counter, try to proceed more
......@@ -698,13 +707,13 @@ static irqreturn_t safexcel_irq_ring(int irq, void *data)
int ring = irq_data->ring, rc = IRQ_NONE;
u32 status, stat;
status = readl(priv->base + EIP197_HIA_AIC_R_ENABLED_STAT(ring));
status = readl(EIP197_HIA_AIC_R(priv) + EIP197_HIA_AIC_R_ENABLED_STAT(ring));
if (!status)
return rc;
/* RDR interrupts */
if (status & EIP197_RDR_IRQ(ring)) {
stat = readl(priv->base + EIP197_HIA_RDR(ring) + EIP197_HIA_xDR_STAT);
stat = readl(EIP197_HIA_RDR(priv, ring) + EIP197_HIA_xDR_STAT);
if (unlikely(stat & EIP197_xDR_ERR)) {
/*
......@@ -719,11 +728,11 @@ static irqreturn_t safexcel_irq_ring(int irq, void *data)
/* ACK the interrupts */
writel(stat & 0xff,
priv->base + EIP197_HIA_RDR(ring) + EIP197_HIA_xDR_STAT);
EIP197_HIA_RDR(priv, ring) + EIP197_HIA_xDR_STAT);
}
/* ACK the interrupts */
writel(status, priv->base + EIP197_HIA_AIC_R_ACK(ring));
writel(status, EIP197_HIA_AIC_R(priv) + EIP197_HIA_AIC_R_ACK(ring));
return rc;
}
......@@ -819,11 +828,11 @@ static void safexcel_configure(struct safexcel_crypto_priv *priv)
{
u32 val, mask;
val = readl(priv->base + EIP197_HIA_OPTIONS);
val = readl(EIP197_HIA_AIC_G(priv) + EIP197_HIA_OPTIONS);
val = (val & GENMASK(27, 25)) >> 25;
mask = BIT(val) - 1;
val = readl(priv->base + EIP197_HIA_OPTIONS);
val = readl(EIP197_HIA_AIC_G(priv) + EIP197_HIA_OPTIONS);
priv->config.rings = min_t(u32, val & GENMASK(3, 0), max_rings);
priv->config.cd_size = (sizeof(struct safexcel_command_desc) / sizeof(u32));
......@@ -833,6 +842,35 @@ static void safexcel_configure(struct safexcel_crypto_priv *priv)
priv->config.rd_offset = (priv->config.rd_size + mask) & ~mask;
}
static void safexcel_init_register_offsets(struct safexcel_crypto_priv *priv)
{
struct safexcel_register_offsets *offsets = &priv->offsets;
if (priv->version == EIP197) {
offsets->hia_aic = EIP197_HIA_AIC_BASE;
offsets->hia_aic_g = EIP197_HIA_AIC_G_BASE;
offsets->hia_aic_r = EIP197_HIA_AIC_R_BASE;
offsets->hia_aic_xdr = EIP197_HIA_AIC_xDR_BASE;
offsets->hia_dfe = EIP197_HIA_DFE_BASE;
offsets->hia_dfe_thr = EIP197_HIA_DFE_THR_BASE;
offsets->hia_dse = EIP197_HIA_DSE_BASE;
offsets->hia_dse_thr = EIP197_HIA_DSE_THR_BASE;
offsets->hia_gen_cfg = EIP197_HIA_GEN_CFG_BASE;
offsets->pe = EIP197_PE_BASE;
} else {
offsets->hia_aic = EIP97_HIA_AIC_BASE;
offsets->hia_aic_g = EIP97_HIA_AIC_G_BASE;
offsets->hia_aic_r = EIP97_HIA_AIC_R_BASE;
offsets->hia_aic_xdr = EIP97_HIA_AIC_xDR_BASE;
offsets->hia_dfe = EIP97_HIA_DFE_BASE;
offsets->hia_dfe_thr = EIP97_HIA_DFE_THR_BASE;
offsets->hia_dse = EIP97_HIA_DSE_BASE;
offsets->hia_dse_thr = EIP97_HIA_DSE_THR_BASE;
offsets->hia_gen_cfg = EIP97_HIA_GEN_CFG_BASE;
offsets->pe = EIP97_PE_BASE;
}
}
static int safexcel_probe(struct platform_device *pdev)
{
struct device *dev = &pdev->dev;
......@@ -845,6 +883,9 @@ static int safexcel_probe(struct platform_device *pdev)
return -ENOMEM;
priv->dev = dev;
priv->version = (enum safexcel_eip_version)of_device_get_match_data(dev);
safexcel_init_register_offsets(priv);
res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
priv->base = devm_ioremap_resource(dev, res);
......@@ -971,7 +1012,14 @@ static int safexcel_remove(struct platform_device *pdev)
}
static const struct of_device_id safexcel_of_match_table[] = {
{ .compatible = "inside-secure,safexcel-eip197" },
{
.compatible = "inside-secure,safexcel-eip97",
.data = (void *)EIP97,
},
{
.compatible = "inside-secure,safexcel-eip197",
.data = (void *)EIP197,
},
{},
};
......
......@@ -28,55 +28,94 @@
#define EIP197_GFP_FLAGS(base) ((base).flags & CRYPTO_TFM_REQ_MAY_SLEEP ? \
GFP_KERNEL : GFP_ATOMIC)
/* Register base offsets */
#define EIP197_HIA_AIC(priv) ((priv)->base + (priv)->offsets.hia_aic)
#define EIP197_HIA_AIC_G(priv) ((priv)->base + (priv)->offsets.hia_aic_g)
#define EIP197_HIA_AIC_R(priv) ((priv)->base + (priv)->offsets.hia_aic_r)
#define EIP197_HIA_AIC_xDR(priv) ((priv)->base + (priv)->offsets.hia_aic_xdr)
#define EIP197_HIA_DFE(priv) ((priv)->base + (priv)->offsets.hia_dfe)
#define EIP197_HIA_DFE_THR(priv) ((priv)->base + (priv)->offsets.hia_dfe_thr)
#define EIP197_HIA_DSE(priv) ((priv)->base + (priv)->offsets.hia_dse)
#define EIP197_HIA_DSE_THR(priv) ((priv)->base + (priv)->offsets.hia_dse_thr)
#define EIP197_HIA_GEN_CFG(priv) ((priv)->base + (priv)->offsets.hia_gen_cfg)
#define EIP197_PE(priv) ((priv)->base + (priv)->offsets.pe)
/* EIP197 base offsets */
#define EIP197_HIA_AIC_BASE 0x90000
#define EIP197_HIA_AIC_G_BASE 0x90000
#define EIP197_HIA_AIC_R_BASE 0x90800
#define EIP197_HIA_AIC_xDR_BASE 0x80000
#define EIP197_HIA_DFE_BASE 0x8c000
#define EIP197_HIA_DFE_THR_BASE 0x8c040
#define EIP197_HIA_DSE_BASE 0x8d000
#define EIP197_HIA_DSE_THR_BASE 0x8d040
#define EIP197_HIA_GEN_CFG_BASE 0xf0000
#define EIP197_PE_BASE 0xa0000
/* EIP97 base offsets */
#define EIP97_HIA_AIC_BASE 0x0
#define EIP97_HIA_AIC_G_BASE 0x0
#define EIP97_HIA_AIC_R_BASE 0x0
#define EIP97_HIA_AIC_xDR_BASE 0x0
#define EIP97_HIA_DFE_BASE 0xf000
#define EIP97_HIA_DFE_THR_BASE 0xf200
#define EIP97_HIA_DSE_BASE 0xf400
#define EIP97_HIA_DSE_THR_BASE 0xf600
#define EIP97_HIA_GEN_CFG_BASE 0x10000
#define EIP97_PE_BASE 0x10000
/* CDR/RDR register offsets */
#define EIP197_HIA_xDR_OFF(r) (0x80000 + (r) * 0x1000)
#define EIP197_HIA_CDR(r) (EIP197_HIA_xDR_OFF(r))
#define EIP197_HIA_RDR(r) (EIP197_HIA_xDR_OFF(r) + 0x800)
#define EIP197_HIA_xDR_RING_BASE_ADDR_LO 0x0
#define EIP197_HIA_xDR_RING_BASE_ADDR_HI 0x4
#define EIP197_HIA_xDR_RING_SIZE 0x18
#define EIP197_HIA_xDR_DESC_SIZE 0x1c
#define EIP197_HIA_xDR_CFG 0x20
#define EIP197_HIA_xDR_DMA_CFG 0x24
#define EIP197_HIA_xDR_THRESH 0x28
#define EIP197_HIA_xDR_PREP_COUNT 0x2c
#define EIP197_HIA_xDR_PROC_COUNT 0x30
#define EIP197_HIA_xDR_PREP_PNTR 0x34
#define EIP197_HIA_xDR_PROC_PNTR 0x38
#define EIP197_HIA_xDR_STAT 0x3c
#define EIP197_HIA_xDR_OFF(priv, r) (EIP197_HIA_AIC_xDR(priv) + (r) * 0x1000)
#define EIP197_HIA_CDR(priv, r) (EIP197_HIA_xDR_OFF(priv, r))
#define EIP197_HIA_RDR(priv, r) (EIP197_HIA_xDR_OFF(priv, r) + 0x800)
#define EIP197_HIA_xDR_RING_BASE_ADDR_LO 0x0000
#define EIP197_HIA_xDR_RING_BASE_ADDR_HI 0x0004
#define EIP197_HIA_xDR_RING_SIZE 0x0018
#define EIP197_HIA_xDR_DESC_SIZE 0x001c
#define EIP197_HIA_xDR_CFG 0x0020
#define EIP197_HIA_xDR_DMA_CFG 0x0024
#define EIP197_HIA_xDR_THRESH 0x0028
#define EIP197_HIA_xDR_PREP_COUNT 0x002c
#define EIP197_HIA_xDR_PROC_COUNT 0x0030
#define EIP197_HIA_xDR_PREP_PNTR 0x0034
#define EIP197_HIA_xDR_PROC_PNTR 0x0038
#define EIP197_HIA_xDR_STAT 0x003c
/* register offsets */
#define EIP197_HIA_DFE_CFG 0x8c000
#define EIP197_HIA_DFE_THR_CTRL 0x8c040
#define EIP197_HIA_DFE_THR_STAT 0x8c044
#define EIP197_HIA_DSE_CFG 0x8d000
#define EIP197_HIA_DSE_THR_CTRL 0x8d040
#define EIP197_HIA_DSE_THR_STAT 0x8d044
#define EIP197_HIA_RA_PE_CTRL 0x90010
#define EIP197_HIA_RA_PE_STAT 0x90014
#define EIP197_HIA_DFE_CFG 0x0000
#define EIP197_HIA_DFE_THR_CTRL 0x0000
#define EIP197_HIA_DFE_THR_STAT 0x0004
#define EIP197_HIA_DSE_CFG 0x0000
#define EIP197_HIA_DSE_THR_CTRL 0x0000
#define EIP197_HIA_DSE_THR_STAT 0x0004
#define EIP197_HIA_RA_PE_CTRL 0x0010
#define EIP197_HIA_RA_PE_STAT 0x0014
#define EIP197_HIA_AIC_R_OFF(r) ((r) * 0x1000)
#define EIP197_HIA_AIC_R_ENABLE_CTRL(r) (0x9e808 - EIP197_HIA_AIC_R_OFF(r))
#define EIP197_HIA_AIC_R_ENABLED_STAT(r) (0x9e810 - EIP197_HIA_AIC_R_OFF(r))
#define EIP197_HIA_AIC_R_ACK(r) (0x9e810 - EIP197_HIA_AIC_R_OFF(r))
#define EIP197_HIA_AIC_R_ENABLE_CLR(r) (0x9e814 - EIP197_HIA_AIC_R_OFF(r))
#define EIP197_HIA_AIC_G_ENABLE_CTRL 0x9f808
#define EIP197_HIA_AIC_G_ENABLED_STAT 0x9f810
#define EIP197_HIA_AIC_G_ACK 0x9f810
#define EIP197_HIA_MST_CTRL 0x9fff4
#define EIP197_HIA_OPTIONS 0x9fff8
#define EIP197_HIA_VERSION 0x9fffc
#define EIP197_PE_IN_DBUF_THRES 0xa0000
#define EIP197_PE_IN_TBUF_THRES 0xa0100
#define EIP197_PE_ICE_SCRATCH_RAM 0xa0800
#define EIP197_PE_ICE_PUE_CTRL 0xa0c80
#define EIP197_PE_ICE_SCRATCH_CTRL 0xa0d04
#define EIP197_PE_ICE_FPP_CTRL 0xa0d80
#define EIP197_PE_ICE_RAM_CTRL 0xa0ff0
#define EIP197_PE_EIP96_FUNCTION_EN 0xa1004
#define EIP197_PE_EIP96_CONTEXT_CTRL 0xa1008
#define EIP197_PE_EIP96_CONTEXT_STAT 0xa100c
#define EIP197_PE_OUT_DBUF_THRES 0xa1c00
#define EIP197_PE_OUT_TBUF_THRES 0xa1d00
#define EIP197_HIA_AIC_R_ENABLE_CTRL(r) (0xe008 - EIP197_HIA_AIC_R_OFF(r))
#define EIP197_HIA_AIC_R_ENABLED_STAT(r) (0xe010 - EIP197_HIA_AIC_R_OFF(r))
#define EIP197_HIA_AIC_R_ACK(r) (0xe010 - EIP197_HIA_AIC_R_OFF(r))
#define EIP197_HIA_AIC_R_ENABLE_CLR(r) (0xe014 - EIP197_HIA_AIC_R_OFF(r))
#define EIP197_HIA_AIC_G_ENABLE_CTRL 0xf808
#define EIP197_HIA_AIC_G_ENABLED_STAT 0xf810
#define EIP197_HIA_AIC_G_ACK 0xf810
#define EIP197_HIA_MST_CTRL 0xfff4
#define EIP197_HIA_OPTIONS 0xfff8
#define EIP197_HIA_VERSION 0xfffc
#define EIP197_PE_IN_DBUF_THRES 0x0000
#define EIP197_PE_IN_TBUF_THRES 0x0100
#define EIP197_PE_ICE_SCRATCH_RAM 0x0800
#define EIP197_PE_ICE_PUE_CTRL 0x0c80
#define EIP197_PE_ICE_SCRATCH_CTRL 0x0d04
#define EIP197_PE_ICE_FPP_CTRL 0x0d80
#define EIP197_PE_ICE_RAM_CTRL 0x0ff0
#define EIP197_PE_EIP96_FUNCTION_EN 0x1004
#define EIP197_PE_EIP96_CONTEXT_CTRL 0x1008
#define EIP197_PE_EIP96_CONTEXT_STAT 0x100c
#define EIP197_PE_OUT_DBUF_THRES 0x1c00
#define EIP197_PE_OUT_TBUF_THRES 0x1d00
#define EIP197_MST_CTRL 0xfff4
/* EIP197-specific registers, no indirection */
#define EIP197_CLASSIFICATION_RAMS 0xe0000
#define EIP197_TRC_CTRL 0xf0800
#define EIP197_TRC_LASTRES 0xf0804
......@@ -90,7 +129,6 @@
#define EIP197_TRC_ECCDATASTAT 0xf083c
#define EIP197_TRC_ECCDATA 0xf0840
#define EIP197_CS_RAM_CTRL 0xf7ff0
#define EIP197_MST_CTRL 0xffff4
/* EIP197_HIA_xDR_DESC_SIZE */
#define EIP197_xDR_DESC_MODE_64BIT BIT(31)
......@@ -465,12 +503,33 @@ struct safexcel_work_data {
int ring;
};
enum safexcel_eip_version {
EIP97,
EIP197,
};
struct safexcel_register_offsets {
u32 hia_aic;
u32 hia_aic_g;
u32 hia_aic_r;
u32 hia_aic_xdr;
u32 hia_dfe;
u32 hia_dfe_thr;
u32 hia_dse;
u32 hia_dse_thr;
u32 hia_gen_cfg;
u32 pe;
};
struct safexcel_crypto_priv {
void __iomem *base;
struct device *dev;
struct clk *clk;
struct safexcel_config config;
enum safexcel_eip_version version;
struct safexcel_register_offsets offsets;
/* context DMA pool */
struct dma_pool *context_pool;
......
......@@ -69,6 +69,7 @@ static int safexcel_aes_setkey(struct crypto_skcipher *ctfm, const u8 *key,
{
struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
struct safexcel_crypto_priv *priv = ctx->priv;
struct crypto_aes_ctx aes;
int ret, i;
......@@ -78,7 +79,7 @@ static int safexcel_aes_setkey(struct crypto_skcipher *ctfm, const u8 *key,
return ret;
}
if (ctx->base.ctxr_dma) {
if (priv->version == EIP197 && ctx->base.ctxr_dma) {
for (i = 0; i < len / sizeof(u32); i++) {
if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
ctx->base.needs_inv = true;
......@@ -411,9 +412,13 @@ static int safexcel_send(struct crypto_async_request *async,
int *commands, int *results)
{
struct skcipher_request *req = skcipher_request_cast(async);
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
struct safexcel_crypto_priv *priv = ctx->priv;
int ret;
BUG_ON(priv->version == EIP97 && sreq->needs_inv);
if (sreq->needs_inv)
ret = safexcel_cipher_send_inv(async, ring, request,
commands, results);
......@@ -476,7 +481,7 @@ static int safexcel_aes(struct skcipher_request *req,
ctx->mode = mode;
if (ctx->base.ctxr) {
if (ctx->base.needs_inv) {
if (priv->version == EIP197 && ctx->base.needs_inv) {
sreq->needs_inv = true;
ctx->base.needs_inv = false;
}
......@@ -544,9 +549,14 @@ static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
memzero_explicit(ctx->base.ctxr->data, 8 * sizeof(u32));
ret = safexcel_cipher_exit_inv(tfm);
if (ret)
dev_warn(priv->dev, "cipher: invalidation error %d\n", ret);
if (priv->version == EIP197) {
ret = safexcel_cipher_exit_inv(tfm);
if (ret)
dev_warn(priv->dev, "cipher: invalidation error %d\n", ret);
} else {
dma_pool_free(priv->context_pool, ctx->base.ctxr,
ctx->base.ctxr_dma);
}
}
struct safexcel_alg_template safexcel_alg_ecb_aes = {
......
......@@ -397,6 +397,8 @@ static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
struct safexcel_ahash_req *req = ahash_request_ctx(areq);
int err;
BUG_ON(priv->version == EIP97 && req->needs_inv);
if (req->needs_inv) {
req->needs_inv = false;
err = safexcel_handle_inv_result(priv, ring, async,
......@@ -528,7 +530,8 @@ static int safexcel_ahash_enqueue(struct ahash_request *areq)
req->needs_inv = false;
if (ctx->base.ctxr) {
if (!ctx->base.needs_inv && req->processed &&
if (priv->version == EIP197 &&
!ctx->base.needs_inv && req->processed &&
ctx->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED)
/* We're still setting needs_inv here, even though it is
* cleared right away, because the needs_inv flag can be
......@@ -721,9 +724,14 @@ static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm)
if (!ctx->base.ctxr)
return;
ret = safexcel_ahash_exit_inv(tfm);
if (ret)
dev_warn(priv->dev, "hash: invalidation error %d\n", ret);
if (priv->version == EIP197) {
ret = safexcel_ahash_exit_inv(tfm);
if (ret)
dev_warn(priv->dev, "hash: invalidation error %d\n", ret);
} else {
dma_pool_free(priv->context_pool, ctx->base.ctxr,
ctx->base.ctxr_dma);
}
}
struct safexcel_alg_template safexcel_alg_sha1 = {
......@@ -927,6 +935,7 @@ static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
unsigned int keylen)
{
struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
struct safexcel_crypto_priv *priv = ctx->priv;
struct safexcel_ahash_export_state istate, ostate;
int ret, i;
......@@ -934,7 +943,7 @@ static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
if (ret)
return ret;
if (ctx->base.ctxr) {
if (priv->version == EIP197 && ctx->base.ctxr) {
for (i = 0; i < SHA1_DIGEST_SIZE / sizeof(u32); i++) {
if (ctx->ipad[i] != le32_to_cpu(istate.state[i]) ||
ctx->opad[i] != le32_to_cpu(ostate.state[i])) {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment