Commit a482f4e0 authored by John Ogness's avatar John Ogness Committed by Vinod Koul

dmaengine: edma: special case slot limit workaround

Currently drivers are limited to 19 slots for cyclic transfers.
However, if the DMA burst size is the same as the period size,
the period size can be changed to the full buffer size and
intermediate interrupts activated. Since intermediate interrupts
will trigger for each burst and the burst size is the same as
the period size, the driver will get interrupts each period as
expected. This has the benefit of allowing the functionality of
many more slots, but only uses 2 slots.

This workaround is only active if more than 19 slots are needed
and the burst size matches the period size.
Acked-by: default avatarPeter Ujfalusi <peter.ujfalusi@ti.com>
Signed-off-by: default avatarJohn Ogness <john.ogness@linutronix.de>
Signed-off-by: default avatarSekhar Nori <nsekhar@ti.com>
Signed-off-by: default avatarVinod Koul <vinod.koul@intel.com>
parent 23f49fd2
...@@ -1238,6 +1238,7 @@ static struct dma_async_tx_descriptor *edma_prep_dma_cyclic( ...@@ -1238,6 +1238,7 @@ static struct dma_async_tx_descriptor *edma_prep_dma_cyclic(
struct edma_desc *edesc; struct edma_desc *edesc;
dma_addr_t src_addr, dst_addr; dma_addr_t src_addr, dst_addr;
enum dma_slave_buswidth dev_width; enum dma_slave_buswidth dev_width;
bool use_intermediate = false;
u32 burst; u32 burst;
int i, ret, nslots; int i, ret, nslots;
...@@ -1279,8 +1280,21 @@ static struct dma_async_tx_descriptor *edma_prep_dma_cyclic( ...@@ -1279,8 +1280,21 @@ static struct dma_async_tx_descriptor *edma_prep_dma_cyclic(
* but the synchronization is difficult to achieve with Cyclic and * but the synchronization is difficult to achieve with Cyclic and
* cannot be guaranteed, so we error out early. * cannot be guaranteed, so we error out early.
*/ */
if (nslots > MAX_NR_SG) if (nslots > MAX_NR_SG) {
/*
* If the burst and period sizes are the same, we can put
* the full buffer into a single period and activate
* intermediate interrupts. This will produce interrupts
* after each burst, which is also after each desired period.
*/
if (burst == period_len) {
period_len = buf_len;
nslots = 2;
use_intermediate = true;
} else {
return NULL; return NULL;
}
}
edesc = kzalloc(sizeof(*edesc) + nslots * sizeof(edesc->pset[0]), edesc = kzalloc(sizeof(*edesc) + nslots * sizeof(edesc->pset[0]),
GFP_ATOMIC); GFP_ATOMIC);
...@@ -1358,8 +1372,13 @@ static struct dma_async_tx_descriptor *edma_prep_dma_cyclic( ...@@ -1358,8 +1372,13 @@ static struct dma_async_tx_descriptor *edma_prep_dma_cyclic(
/* /*
* Enable period interrupt only if it is requested * Enable period interrupt only if it is requested
*/ */
if (tx_flags & DMA_PREP_INTERRUPT) if (tx_flags & DMA_PREP_INTERRUPT) {
edesc->pset[i].param.opt |= TCINTEN; edesc->pset[i].param.opt |= TCINTEN;
/* Also enable intermediate interrupts if necessary */
if (use_intermediate)
edesc->pset[i].param.opt |= ITCINTEN;
}
} }
/* Place the cyclic channel to highest priority queue */ /* Place the cyclic channel to highest priority queue */
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment