Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
L
linux
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Kirill Smelkov
linux
Commits
7b967c48
Commit
7b967c48
authored
Apr 09, 2002
by
Anton Blanchard
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
ppc64: Change MASK -> __MASK to avoid namespace clash in symbios
driver and fix up thread_saved_pc
parent
d493fb63
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
29 additions
and
31 deletions
+29
-31
include/asm-ppc64/processor.h
include/asm-ppc64/processor.h
+29
-31
No files found.
include/asm-ppc64/processor.h
View file @
7b967c48
...
@@ -53,36 +53,36 @@
...
@@ -53,36 +53,36 @@
#define MSR_LE_LG 0
/* Little Endian */
#define MSR_LE_LG 0
/* Little Endian */
#ifdef __ASSEMBLY__
#ifdef __ASSEMBLY__
#define
MASK(X)
(1<<(X))
#define
__MASK(X)
(1<<(X))
#else
#else
#define
MASK(X)
(1UL<<(X))
#define
__MASK(X)
(1UL<<(X))
#endif
#endif
#define MSR_SF
MASK(MSR_SF_LG)
/* Enable 64 bit mode */
#define MSR_SF
__MASK(MSR_SF_LG)
/* Enable 64 bit mode */
#define MSR_ISF
MASK(MSR_ISF_LG)
/* Interrupt 64b mode valid on 630 */
#define MSR_ISF
__MASK(MSR_ISF_LG)
/* Interrupt 64b mode valid on 630 */
#define MSR_HV
MASK(MSR_HV_LG)
/* Hypervisor state */
#define MSR_HV
__MASK(MSR_HV_LG)
/* Hypervisor state */
#define MSR_VEC
MASK(MSR_VEC_LG)
/* Enable AltiVec */
#define MSR_VEC
__MASK(MSR_VEC_LG)
/* Enable AltiVec */
#define MSR_POW
MASK(MSR_POW_LG)
/* Enable Power Management */
#define MSR_POW
__MASK(MSR_POW_LG)
/* Enable Power Management */
#define MSR_WE
MASK(MSR_WE_LG)
/* Wait State Enable */
#define MSR_WE
__MASK(MSR_WE_LG)
/* Wait State Enable */
#define MSR_TGPR
MASK(MSR_TGPR_LG)
/* TLB Update registers in use */
#define MSR_TGPR
__MASK(MSR_TGPR_LG)
/* TLB Update registers in use */
#define MSR_CE
MASK(MSR_CE_LG)
/* Critical Interrupt Enable */
#define MSR_CE
__MASK(MSR_CE_LG)
/* Critical Interrupt Enable */
#define MSR_ILE
MASK(MSR_ILE_LG)
/* Interrupt Little Endian */
#define MSR_ILE
__MASK(MSR_ILE_LG)
/* Interrupt Little Endian */
#define MSR_EE
MASK(MSR_EE_LG)
/* External Interrupt Enable */
#define MSR_EE
__MASK(MSR_EE_LG)
/* External Interrupt Enable */
#define MSR_PR
MASK(MSR_PR_LG)
/* Problem State / Privilege Level */
#define MSR_PR
__MASK(MSR_PR_LG)
/* Problem State / Privilege Level */
#define MSR_FP
MASK(MSR_FP_LG)
/* Floating Point enable */
#define MSR_FP
__MASK(MSR_FP_LG)
/* Floating Point enable */
#define MSR_ME
MASK(MSR_ME_LG)
/* Machine Check Enable */
#define MSR_ME
__MASK(MSR_ME_LG)
/* Machine Check Enable */
#define MSR_FE0
MASK(MSR_FE0_LG)
/* Floating Exception mode 0 */
#define MSR_FE0
__MASK(MSR_FE0_LG)
/* Floating Exception mode 0 */
#define MSR_SE
MASK(MSR_SE_LG)
/* Single Step */
#define MSR_SE
__MASK(MSR_SE_LG)
/* Single Step */
#define MSR_BE
MASK(MSR_BE_LG)
/* Branch Trace */
#define MSR_BE
__MASK(MSR_BE_LG)
/* Branch Trace */
#define MSR_DE
MASK(MSR_DE_LG)
/* Debug Exception Enable */
#define MSR_DE
__MASK(MSR_DE_LG)
/* Debug Exception Enable */
#define MSR_FE1
MASK(MSR_FE1_LG)
/* Floating Exception mode 1 */
#define MSR_FE1
__MASK(MSR_FE1_LG)
/* Floating Exception mode 1 */
#define MSR_IP
MASK(MSR_IP_LG)
/* Exception prefix 0x000/0xFFF */
#define MSR_IP
__MASK(MSR_IP_LG)
/* Exception prefix 0x000/0xFFF */
#define MSR_IR
MASK(MSR_IR_LG)
/* Instruction Relocate */
#define MSR_IR
__MASK(MSR_IR_LG)
/* Instruction Relocate */
#define MSR_DR
MASK(MSR_DR_LG)
/* Data Relocate */
#define MSR_DR
__MASK(MSR_DR_LG)
/* Data Relocate */
#define MSR_PE
MASK(MSR_PE_LG)
/* Protection Enable */
#define MSR_PE
__MASK(MSR_PE_LG)
/* Protection Enable */
#define MSR_PX
MASK(MSR_PX_LG)
/* Protection Exclusive Mode */
#define MSR_PX
__MASK(MSR_PX_LG)
/* Protection Exclusive Mode */
#define MSR_RI
MASK(MSR_RI_LG)
/* Recoverable Exception */
#define MSR_RI
__MASK(MSR_RI_LG)
/* Recoverable Exception */
#define MSR_LE
MASK(MSR_LE_LG)
/* Little Endian */
#define MSR_LE
__MASK(MSR_LE_LG)
/* Little Endian */
#define MSR_ MSR_ME | MSR_RI | MSR_IR | MSR_DR | MSR_ISF
#define MSR_ MSR_ME | MSR_RI | MSR_IR | MSR_DR | MSR_ISF
#define MSR_KERNEL MSR_ | MSR_SF | MSR_HV
#define MSR_KERNEL MSR_ | MSR_SF | MSR_HV
...
@@ -694,10 +694,8 @@ extern struct mm_struct ioremap_mm;
...
@@ -694,10 +694,8 @@ extern struct mm_struct ioremap_mm;
/*
/*
* Return saved PC of a blocked thread. For now, this is the "user" PC
* Return saved PC of a blocked thread. For now, this is the "user" PC
*/
*/
static
inline
unsigned
long
thread_saved_pc
(
struct
thread_struct
*
t
)
#define thread_saved_pc(tsk) \
{
((tsk)->thread.regs? (tsk)->thread.regs->nip: 0)
return
(
t
->
regs
)
?
t
->
regs
->
nip
:
0
;
}
#define copy_segments(tsk, mm) do { } while (0)
#define copy_segments(tsk, mm) do { } while (0)
#define release_segments(mm) do { } while (0)
#define release_segments(mm) do { } while (0)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment