Commit 3d0358d0 authored by Vladimir Murzin's avatar Vladimir Murzin Committed by Russell King

ARM: 8815/1: V7M: align v7m_dma_inv_range() with v7 counterpart

Chris has discovered and reported that v7_dma_inv_range() may corrupt
memory if address range is not aligned to cache line size.

Since the whole cache-v7m.S was lifted form cache-v7.S the same
observation applies to v7m_dma_inv_range(). So the fix just mirrors
what has been done for v7 with a little specific of M-class.

Cc: Chris Cole <chris@sageembedded.com>
Signed-off-by: default avatarVladimir Murzin <vladimir.murzin@arm.com>
Signed-off-by: default avatarRussell King <rmk+kernel@armlinux.org.uk>
parent a1208f6a
...@@ -73,9 +73,11 @@ ...@@ -73,9 +73,11 @@
/* /*
* dcimvac: Invalidate data cache line by MVA to PoC * dcimvac: Invalidate data cache line by MVA to PoC
*/ */
.macro dcimvac, rt, tmp .irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
v7m_cacheop \rt, \tmp, V7M_SCB_DCIMVAC .macro dcimvac\c, rt, tmp
v7m_cacheop \rt, \tmp, V7M_SCB_DCIMVAC, \c
.endm .endm
.endr
/* /*
* dccmvau: Clean data cache line by MVA to PoU * dccmvau: Clean data cache line by MVA to PoU
...@@ -369,14 +371,16 @@ v7m_dma_inv_range: ...@@ -369,14 +371,16 @@ v7m_dma_inv_range:
tst r0, r3 tst r0, r3
bic r0, r0, r3 bic r0, r0, r3
dccimvacne r0, r3 dccimvacne r0, r3
addne r0, r0, r2
subne r3, r2, #1 @ restore r3, corrupted by v7m's dccimvac subne r3, r2, #1 @ restore r3, corrupted by v7m's dccimvac
tst r1, r3 tst r1, r3
bic r1, r1, r3 bic r1, r1, r3
dccimvacne r1, r3 dccimvacne r1, r3
1:
dcimvac r0, r3
add r0, r0, r2
cmp r0, r1 cmp r0, r1
1:
dcimvaclo r0, r3
addlo r0, r0, r2
cmplo r0, r1
blo 1b blo 1b
dsb st dsb st
ret lr ret lr
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment