Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
L
linux
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
nexedi
linux
Commits
2fd7dc48
Commit
2fd7dc48
authored
Sep 07, 2004
by
David S. Miller
Browse files
Options
Browse Files
Download
Plain Diff
Merge davem@nuts.davemloft.net:/disk1/BK/sparc-2.6
into kernel.bkbits.net:/home/davem/sparc-2.6
parents
2cd24267
8395c735
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
136 additions
and
106 deletions
+136
-106
include/asm-sparc64/spinlock.h
include/asm-sparc64/spinlock.h
+136
-106
No files found.
include/asm-sparc64/spinlock.h
View file @
2fd7dc48
...
@@ -41,56 +41,69 @@ typedef unsigned char spinlock_t;
...
@@ -41,56 +41,69 @@ typedef unsigned char spinlock_t;
do { membar("#LoadLoad"); \
do { membar("#LoadLoad"); \
} while(*((volatile unsigned char *)lock))
} while(*((volatile unsigned char *)lock))
static
__inline__
void
_raw_spin_lock
(
spinlock_t
*
lock
)
static
inline
void
_raw_spin_lock
(
spinlock_t
*
lock
)
{
{
unsigned
long
tmp
;
__asm__
__volatile__
(
"1: ldstub [%0], %%g7
\n\t
"
"brnz,pn %%g7, 2f
\n\t
"
__asm__
__volatile__
(
"membar #StoreLoad | #StoreStore
\n\t
"
"1: ldstub [%1], %0
\n
"
"b 3f
\n\t
"
" brnz,pn %0, 2f
\n
"
"2: ldub [%0], %%g7
\n\t
"
" membar #StoreLoad | #StoreStore
\n
"
"brnz,pt %%g7, 2b
\n\t
"
" .subsection 2
\n
"
"membar #LoadLoad
\n\t
"
"2: ldub [%1], %0
\n
"
"ba,a,pt %%xcc, 1b
\n\t
"
" brnz,pt %0, 2b
\n
"
"3:
\n\t
"
" membar #LoadLoad
\n
"
:
:
"r"
(
lock
)
:
"memory"
);
" ba,a,pt %%xcc, 1b
\n
"
" .previous"
:
"=&r"
(
tmp
)
:
"r"
(
lock
)
:
"memory"
);
}
}
static
__inline__
int
_raw_spin_trylock
(
spinlock_t
*
lock
)
static
inline
int
_raw_spin_trylock
(
spinlock_t
*
lock
)
{
{
unsigned
int
result
;
unsigned
long
result
;
__asm__
__volatile__
(
"ldstub [%1], %0
\n\t
"
"membar #StoreLoad | #StoreStore"
__asm__
__volatile__
(
:
"=r"
(
result
)
" ldstub [%1], %0
\n
"
:
"r"
(
lock
)
" membar #StoreLoad | #StoreStore"
:
"memory"
);
:
"=r"
(
result
)
return
(
result
==
0
);
:
"r"
(
lock
)
:
"memory"
);
return
(
result
==
0UL
);
}
}
static
__inline__
void
_raw_spin_unlock
(
spinlock_t
*
lock
)
static
inline
void
_raw_spin_unlock
(
spinlock_t
*
lock
)
{
{
__asm__
__volatile__
(
"membar #StoreStore | #LoadStore
\n\t
"
__asm__
__volatile__
(
"stb %%g0, [%0]"
" membar #StoreStore | #LoadStore
\n
"
:
/* No outputs */
" stb %%g0, [%0]"
:
"r"
(
lock
)
:
/* No outputs */
:
"memory"
);
:
"r"
(
lock
)
:
"memory"
);
}
}
static
__inline__
void
_raw_spin_lock_flags
(
spinlock_t
*
lock
,
unsigned
long
flags
)
static
inline
void
_raw_spin_lock_flags
(
spinlock_t
*
lock
,
unsigned
long
flags
)
{
{
__asm__
__volatile__
(
"1:ldstub [%0], %%g7
\n\t
"
unsigned
long
tmp1
,
tmp2
;
"brnz,pn %%g7, 2f
\n\t
"
"membar #StoreLoad | #StoreStore
\n\t
"
__asm__
__volatile__
(
"b 4f
\n\t
"
"1: ldstub [%2], %0
\n
"
"2: rdpr %%pil, %%g2 ! Save PIL
\n\t
"
" brnz,pn %0, 2f
\n
"
"wrpr %1, %%pil ! Set previous PIL
\n\t
"
" membar #StoreLoad | #StoreStore
\n
"
"3:ldub [%0], %%g7 ! Spin on lock set
\n\t
"
" .subsection 2
\n
"
"brnz,pt %%g7, 3b
\n\t
"
"2: rdpr %%pil, %1
\n
"
"membar #LoadLoad
\n\t
"
" wrpr %3, %%pil
\n
"
"ba,pt %%xcc, 1b ! Retry lock acquire
\n\t
"
"3: ldub [%2], %0
\n
"
"wrpr %%g2, %%pil ! Restore PIL
\n\t
"
" brnz,pt %0, 3b
\n
"
"4:
\n\t
"
" membar #LoadLoad
\n
"
:
:
"r"
(
lock
),
"r"
(
flags
)
:
"memory"
);
" ba,pt %%xcc, 1b
\n
"
" wrpr %1, %%pil
\n
"
" .previous"
:
"=&r"
(
tmp1
),
"=&r"
(
tmp2
)
:
"r"
(
lock
),
"r"
(
flags
)
:
"memory"
);
}
}
#else
/* !(CONFIG_DEBUG_SPINLOCK) */
#else
/* !(CONFIG_DEBUG_SPINLOCK) */
...
@@ -131,85 +144,102 @@ typedef unsigned int rwlock_t;
...
@@ -131,85 +144,102 @@ typedef unsigned int rwlock_t;
#define rwlock_init(lp) do { *(lp) = RW_LOCK_UNLOCKED; } while(0)
#define rwlock_init(lp) do { *(lp) = RW_LOCK_UNLOCKED; } while(0)
#define rwlock_is_locked(x) (*(x) != RW_LOCK_UNLOCKED)
#define rwlock_is_locked(x) (*(x) != RW_LOCK_UNLOCKED)
static
void
__inline__
__read_lock
(
rwlock_t
*
lock
)
static
void
inline
__read_lock
(
rwlock_t
*
lock
)
{
{
__asm__
__volatile__
(
"b 1f
\n\t
"
unsigned
long
tmp1
,
tmp2
;
"99:
\n\t
"
"ldsw [%0], %%g5
\n\t
"
__asm__
__volatile__
(
"brlz,pt %%g5, 99b
\n\t
"
"1: ldsw [%2], %0
\n
"
"membar #LoadLoad
\n\t
"
" brlz,pn %0, 2f
\n
"
"ba,a,pt %%xcc, 4f
\n\t
"
"4: add %0, 1, %1
\n
"
"1: ldsw [%0], %%g5
\n\t
"
" cas [%2], %0, %1
\n
"
"brlz,pn %%g5, 99b
\n\t
"
" cmp %0, %1
\n
"
"4:add %%g5, 1, %%g7
\n\t
"
" bne,pn %%icc, 1b
\n
"
"cas [%0], %%g5, %%g7
\n\t
"
" membar #StoreLoad | #StoreStore
\n
"
"cmp %%g5, %%g7
\n\t
"
" .subsection 2
\n
"
"bne,pn %%icc, 1b
\n\t
"
"2: ldsw [%2], %0
\n
"
"membar #StoreLoad | #StoreStore
\n\t
"
" brlz,pt %0, 2b
\n
"
:
:
"r"
(
lock
)
:
"memory"
);
" membar #LoadLoad
\n
"
" ba,a,pt %%xcc, 4b
\n
"
" .previous"
:
"=&r"
(
tmp1
),
"=&r"
(
tmp2
)
:
"r"
(
lock
)
:
"memory"
);
}
}
static
void
__inline__
__read_unlock
(
rwlock_t
*
lock
)
static
void
inline
__read_unlock
(
rwlock_t
*
lock
)
{
{
__asm__
__volatile__
(
"1: lduw [%0], %%g5
\n\t
"
unsigned
long
tmp1
,
tmp2
;
"sub %%g5, 1, %%g7
\n\t
"
"cas [%0], %%g5, %%g7
\n\t
"
__asm__
__volatile__
(
"cmp %%g5, %%g7
\n\t
"
"1: lduw [%2], %0
\n
"
"be,pt %%xcc, 2f
\n\t
"
" sub %0, 1, %1
\n
"
"membar #StoreLoad | #StoreStore
\n\t
"
" cas [%2], %0, %1
\n
"
"ba,a,pt %%xcc, 1b
\n\t
"
" cmp %0, %1
\n
"
"2:
\n\t
"
" bne,pn %%xcc, 1b
\n
"
:
:
"r"
(
lock
)
:
"memory"
);
" membar #StoreLoad | #StoreStore"
:
"=&r"
(
tmp1
),
"=&r"
(
tmp2
)
:
"r"
(
lock
)
:
"memory"
);
}
}
static
void
__inline__
__write_lock
(
rwlock_t
*
lock
)
static
void
inline
__write_lock
(
rwlock_t
*
lock
)
{
{
__asm__
__volatile__
(
"sethi %%hi(0x80000000), %%g2
\n\t
"
unsigned
long
mask
,
tmp1
,
tmp2
;
"b 1f
\n\t
"
"99:
\n\t
"
mask
=
0x80000000UL
;
"lduw [%0], %%g5
\n\t
"
"brnz,pt %%g5, 99b
\n\t
"
__asm__
__volatile__
(
"membar #LoadLoad
\n\t
"
"1: lduw [%2], %0
\n
"
"ba,a,pt %%xcc, 4f
\n\t
"
" brnz,pn %0, 2f
\n
"
"1: lduw [%0], %%g5
\n\t
"
"4: or %0, %3, %1
\n
"
"brnz,pn %%g5, 99b
\n\t
"
" cas [%2], %0, %1
\n
"
"4: or %%g5, %%g2, %%g7
\n\t
"
" cmp %0, %1
\n
"
"cas [%0], %%g5, %%g7
\n\t
"
" bne,pn %%icc, 1b
\n
"
"cmp %%g5, %%g7
\n\t
"
" membar #StoreLoad | #StoreStore
\n
"
"be,pt %%icc, 2f
\n\t
"
" .subsection 2
\n
"
"membar #StoreLoad | #StoreStore
\n\t
"
"2: lduw [%2], %0
\n
"
"ba,a,pt %%xcc, 1b
\n\t
"
" brnz,pt %0, 2b
\n
"
"2:
\n\t
"
" membar #LoadLoad
\n
"
:
:
"r"
(
lock
)
:
"memory"
);
" ba,a,pt %%xcc, 4b
\n
"
" .previous"
:
"=&r"
(
tmp1
),
"=&r"
(
tmp2
)
:
"r"
(
lock
),
"r"
(
mask
)
:
"memory"
);
}
}
static
void
__inline__
__write_unlock
(
rwlock_t
*
lock
)
static
void
inline
__write_unlock
(
rwlock_t
*
lock
)
{
{
__asm__
__volatile__
(
"membar #LoadStore | #StoreStore
\n\t
"
__asm__
__volatile__
(
"retl
\n\t
"
" membar #LoadStore | #StoreStore
\n
"
"stw %%g0, [%0]
\n\t
"
" stw %%g0, [%0]"
:
:
"r"
(
lock
)
:
"memory"
);
:
/* no outputs */
:
"r"
(
lock
)
:
"memory"
);
}
}
static
int
__inline__
__write_trylock
(
rwlock_t
*
lock
)
static
int
inline
__write_trylock
(
rwlock_t
*
lock
)
{
{
__asm__
__volatile__
(
"sethi %%hi(0x80000000), %%g2
\n\t
"
unsigned
long
mask
,
tmp1
,
tmp2
,
result
;
"1: lduw [%0], %%g5
\n\t
"
"brnz,pn %%g5, 100f
\n\t
"
mask
=
0x80000000UL
;
"4: or %%g5, %%g2, %%g7
\n\t
"
"cas [%0], %%g5, %%g7
\n\t
"
__asm__
__volatile__
(
"cmp %%g5, %%g7
\n\t
"
" mov 0, %2
\n
"
"be,pt %%icc, 99f
\n\t
"
"1: lduw [%3], %0
\n
"
"membar #StoreLoad | #StoreStore
\n\t
"
" brnz,pn %0, 2f
\n
"
"ba,pt %%xcc, 1b
\n\t
"
" or %0, %4, %1
\n
"
"99:
\n\t
"
" cas [%3], %0, %1
\n
"
"retl
\n\t
"
" cmp %0, %1
\n
"
"mov 1, %0
\n\t
"
" bne,pn %%icc, 1b
\n
"
"100:
\n\t
"
" membar #StoreLoad | #StoreStore
\n
"
"retl
\n\t
"
" mov 1, %2
\n
"
"mov 0, %0
\n\t
"
"2:"
:
:
"r"
(
lock
)
:
"memory"
);
:
"=&r"
(
tmp1
),
"=&r"
(
tmp2
),
"=&r"
(
result
)
return
rwlock_is_locked
(
lock
);
:
"r"
(
lock
),
"r"
(
mask
)
:
"memory"
);
return
result
;
}
}
#define _raw_read_lock(p) __read_lock(p)
#define _raw_read_lock(p) __read_lock(p)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment