Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
L
linux
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Kirill Smelkov
linux
Commits
ba1074cf
Commit
ba1074cf
authored
Feb 25, 2002
by
Russell King
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
ARM preempt and scheduler fixups for 2.5.5
parent
ad889c6b
Changes
7
Show whitespace changes
Inline
Side-by-side
Showing
7 changed files
with
45 additions
and
40 deletions
+45
-40
arch/arm/kernel/entry-armv.S
arch/arm/kernel/entry-armv.S
+10
-7
arch/arm/kernel/entry-common.S
arch/arm/kernel/entry-common.S
+2
-5
arch/arm/kernel/head.S
arch/arm/kernel/head.S
+1
-1
include/asm-arm/bitops.h
include/asm-arm/bitops.h
+17
-0
include/asm-arm/mmu_context.h
include/asm-arm/mmu_context.h
+0
-23
include/asm-arm/page.h
include/asm-arm/page.h
+13
-2
include/asm-arm/system.h
include/asm-arm/system.h
+2
-2
No files found.
arch/arm/kernel/entry-armv.S
View file @
ba1074cf
...
...
@@ -734,19 +734,22 @@ preempt_return:
ldmia
sp
,
{
r0
-
pc
}^
@
load
r0
-
pc
,
cpsr
#ifdef CONFIG_PREEMPT
svc_preempt
:
teq
r9
,
#
0
movne
pc
,
lr
svc_preempt
:
teq
r9
,
#
0
@
was
preempt
count
=
0
movne
pc
,
lr
@
no
ldr
r0
,
[
r6
,
#
4
]
@
local_irq_count
ldr
r1
,
[
r6
,
#
8
]
@
local_b_count
ldr
r1
,
[
r6
,
#
8
]
@
local_b
h
_count
adds
r0
,
r0
,
r1
movne
pc
,
lr
1
:
set_cpsr_c
r0
,
#
MODE_SVC
@
enable
IRQs
bl
SYMBOL_NAME
(
preempt_schedule
)
ldr
r1
,
[
r8
,
#
TI_TASK
]
set_cpsr_c
r2
,
#
MODE_SVC
@
enable
IRQs
str
r0
,
[
r1
,
#
0
]
@
current
->
state
=
TASK_RUNNING
1
:
bl
SYMBOL_NAME
(
schedule
)
set_cpsr_c
r0
,
#
PSR_I_BIT
|
MODE_SVC
@
disable
IRQs
ldr
r0
,
[
r8
,
#
TI_FLAGS
]
tst
r0
,
#
_TIF_NEED_RESCHED
bne
1
b
b
preempt_return
beq
preempt_return
set_cpsr_c
r0
,
#
MODE_SVC
@
enable
IRQs
b
1
b
#endif
.
align
5
...
...
arch/arm/kernel/entry-common.S
View file @
ba1074cf
...
...
@@ -55,7 +55,7 @@ work_resched:
*/
ENTRY
(
ret_to_user
)
ret_slow_syscall
:
set_cpsr_c
r1
,
#
PSR_I_BIT
|
MODE_SVC
set_cpsr_c
r1
,
#
PSR_I_BIT
|
MODE_SVC
@
disable
interrupts
ldr
r1
,
[
tsk
,
#
TI_FLAGS
]
tst
r1
,
#
_TIF_WORK_MASK
beq
no_work_pending
...
...
@@ -73,12 +73,9 @@ __do_notify_resume:
b
SYMBOL_NAME
(
do_notify_resume
)
@
note
the
bl
above
sets
lr
/*
*
This
is
how
we
return
from
a
fork
.
__switch_to
will
be
calling
us
*
with
r0
pointing
at
the
previous
task
that
was
running
(
ready
for
*
calling
schedule_tail
)
.
*
This
is
how
we
return
from
a
fork
.
*/
ENTRY
(
ret_from_fork
)
bl
SYMBOL_NAME
(
schedule_tail
)
get_thread_info
tsk
ldr
ip
,
[
tsk
,
#
TI_FLAGS
]
@
check
for
syscall
tracing
mov
why
,
#
1
...
...
arch/arm/kernel/head.S
View file @
ba1074cf
...
...
@@ -127,7 +127,7 @@ __entry:
mov
r1
,
#
MACH_TYPE_L7200
#endif
mov
r0
,
#
F_BIT
|
I_BIT |
MODE_SVC
@
make
sure
svc
mode
mov
r0
,
#
PSR_F_BIT
| PSR_
I_BIT |
MODE_SVC
@
make
sure
svc
mode
msr
cpsr_c
,
r0
@
and
all
irqs
disabled
bl
__lookup_processor_type
teq
r10
,
#
0
@
invalid
processor
?
...
...
include/asm-arm/bitops.h
View file @
ba1074cf
...
...
@@ -326,6 +326,23 @@ static inline unsigned long __ffs(unsigned long word)
#define ffs(x) generic_ffs(x)
/*
* Find first bit set in a 168-bit bitmap, where the first
* 128 bits are unlikely to be set.
*/
static
inline
int
sched_find_first_bit
(
unsigned
long
*
b
)
{
if
(
unlikely
(
b
[
0
]))
return
__ffs
(
b
[
0
]);
if
(
unlikely
(
b
[
1
]))
return
__ffs
(
b
[
1
])
+
32
;
if
(
unlikely
(
b
[
2
]))
return
__ffs
(
b
[
2
])
+
64
;
if
(
b
[
3
])
return
__ffs
(
b
[
3
])
+
96
;
return
__ffs
(
b
[
4
])
+
128
;
}
/*
* hweightN: returns the hamming weight (i.e. the number
* of bits set) of a N-bit word
...
...
include/asm-arm/mmu_context.h
View file @
ba1074cf
...
...
@@ -49,27 +49,4 @@ switch_mm(struct mm_struct *prev, struct mm_struct *next,
#define activate_mm(prev, next) \
switch_mm((prev),(next),NULL,smp_processor_id())
/*
* Find first bit set in a 168-bit bitmap, where the first
* 128 bits are unlikely to be set.
*/
static
inline
int
sched_find_first_bit
(
unsigned
long
*
b
)
{
#if MAX_RT_PRIO != 128 || MAX_PRIO != 168
#error update this function
#endif
if
(
unlikely
(
b
[
0
]))
return
__ffs
(
b
[
0
]);
if
(
unlikely
(
b
[
1
]))
return
__ffs
(
b
[
1
])
+
32
;
if
(
unlikely
(
b
[
2
]))
return
__ffs
(
b
[
2
])
+
64
;
if
(
unlikely
(
b
[
3
]))
return
__ffs
(
b
[
3
])
+
96
;
if
(
b
[
4
])
return
__ffs
(
b
[
4
])
+
MAX_RT_PRIO
;
return
__ffs
(
b
[
5
])
+
32
+
MAX_RT_PRIO
;
}
#endif
include/asm-arm/page.h
View file @
ba1074cf
...
...
@@ -14,8 +14,19 @@
#define clear_page(page) memzero((void *)(page), PAGE_SIZE)
extern
void
copy_page
(
void
*
to
,
void
*
from
);
#define clear_user_page(page, vaddr) cpu_clear_user_page(page,vaddr)
#define copy_user_page(to, from, vaddr) cpu_copy_user_page(to,from,vaddr)
#define clear_user_page(addr,vaddr) \
do { \
preempt_disable(); \
cpu_clear_user_page(addr, vaddr); \
preempt_enable(); \
} while (0)
#define copy_user_page(to,from,vaddr) \
do { \
preempt_disable(); \
cpu_copy_user_page(to, from, vaddr); \
preempt_enable(); \
} while (0)
#ifdef STRICT_MM_TYPECHECKS
/*
...
...
include/asm-arm/system.h
View file @
ba1074cf
...
...
@@ -53,9 +53,9 @@ extern asmlinkage void __backtrace(void);
struct
thread_info
;
extern
struct
task_struct
*
__switch_to
(
struct
thread_info
*
,
struct
thread_info
*
);
#define switch_to(prev,next
,last)
\
#define switch_to(prev,next
)
\
do { \
last = __switch_to(prev->thread_info,next->thread_info);
\
__switch_to(prev->thread_info,next->thread_info);
\
mb(); \
} while (0)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment