Commit 3c5df5c2 authored by Kumar Gala's avatar Kumar Gala

[POWERPC] Cleaned up whitespace in head_fsl_booke.S

Signed-off-by: default avatarKumar Gala <galak@kernel.crashing.org>
parent b6927bca
...@@ -2,27 +2,27 @@ ...@@ -2,27 +2,27 @@
* Kernel execution entry point code. * Kernel execution entry point code.
* *
* Copyright (c) 1995-1996 Gary Thomas <gdt@linuxppc.org> * Copyright (c) 1995-1996 Gary Thomas <gdt@linuxppc.org>
* Initial PowerPC version. * Initial PowerPC version.
* Copyright (c) 1996 Cort Dougan <cort@cs.nmt.edu> * Copyright (c) 1996 Cort Dougan <cort@cs.nmt.edu>
* Rewritten for PReP * Rewritten for PReP
* Copyright (c) 1996 Paul Mackerras <paulus@cs.anu.edu.au> * Copyright (c) 1996 Paul Mackerras <paulus@cs.anu.edu.au>
* Low-level exception handers, MMU support, and rewrite. * Low-level exception handers, MMU support, and rewrite.
* Copyright (c) 1997 Dan Malek <dmalek@jlc.net> * Copyright (c) 1997 Dan Malek <dmalek@jlc.net>
* PowerPC 8xx modifications. * PowerPC 8xx modifications.
* Copyright (c) 1998-1999 TiVo, Inc. * Copyright (c) 1998-1999 TiVo, Inc.
* PowerPC 403GCX modifications. * PowerPC 403GCX modifications.
* Copyright (c) 1999 Grant Erickson <grant@lcse.umn.edu> * Copyright (c) 1999 Grant Erickson <grant@lcse.umn.edu>
* PowerPC 403GCX/405GP modifications. * PowerPC 403GCX/405GP modifications.
* Copyright 2000 MontaVista Software Inc. * Copyright 2000 MontaVista Software Inc.
* PPC405 modifications * PPC405 modifications
* PowerPC 403GCX/405GP modifications. * PowerPC 403GCX/405GP modifications.
* Author: MontaVista Software, Inc. * Author: MontaVista Software, Inc.
* frank_rowand@mvista.com or source@mvista.com * frank_rowand@mvista.com or source@mvista.com
* debbie_chu@mvista.com * debbie_chu@mvista.com
* Copyright 2002-2004 MontaVista Software, Inc. * Copyright 2002-2004 MontaVista Software, Inc.
* PowerPC 44x support, Matt Porter <mporter@kernel.crashing.org> * PowerPC 44x support, Matt Porter <mporter@kernel.crashing.org>
* Copyright 2004 Freescale Semiconductor, Inc * Copyright 2004 Freescale Semiconductor, Inc
* PowerPC e500 modifications, Kumar Gala <galak@kernel.crashing.org> * PowerPC e500 modifications, Kumar Gala <galak@kernel.crashing.org>
* *
* This program is free software; you can redistribute it and/or modify it * This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the * under the terms of the GNU General Public License as published by the
...@@ -146,13 +146,13 @@ skpinv: addi r6,r6,1 /* Increment */ ...@@ -146,13 +146,13 @@ skpinv: addi r6,r6,1 /* Increment */
bne 1b /* If not, repeat */ bne 1b /* If not, repeat */
/* Invalidate TLB0 */ /* Invalidate TLB0 */
li r6,0x04 li r6,0x04
tlbivax 0,r6 tlbivax 0,r6
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
tlbsync tlbsync
#endif #endif
/* Invalidate TLB1 */ /* Invalidate TLB1 */
li r6,0x0c li r6,0x0c
tlbivax 0,r6 tlbivax 0,r6
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
tlbsync tlbsync
...@@ -211,7 +211,7 @@ skpinv: addi r6,r6,1 /* Increment */ ...@@ -211,7 +211,7 @@ skpinv: addi r6,r6,1 /* Increment */
mtspr SPRN_MAS1,r6 mtspr SPRN_MAS1,r6
tlbwe tlbwe
/* Invalidate TLB1 */ /* Invalidate TLB1 */
li r9,0x0c li r9,0x0c
tlbivax 0,r9 tlbivax 0,r9
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
tlbsync tlbsync
...@@ -254,7 +254,7 @@ skpinv: addi r6,r6,1 /* Increment */ ...@@ -254,7 +254,7 @@ skpinv: addi r6,r6,1 /* Increment */
mtspr SPRN_MAS1,r8 mtspr SPRN_MAS1,r8
tlbwe tlbwe
/* Invalidate TLB1 */ /* Invalidate TLB1 */
li r9,0x0c li r9,0x0c
tlbivax 0,r9 tlbivax 0,r9
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
tlbsync tlbsync
...@@ -294,7 +294,7 @@ skpinv: addi r6,r6,1 /* Increment */ ...@@ -294,7 +294,7 @@ skpinv: addi r6,r6,1 /* Increment */
#ifdef CONFIG_E200 #ifdef CONFIG_E200
oris r2,r2,MAS4_TLBSELD(1)@h oris r2,r2,MAS4_TLBSELD(1)@h
#endif #endif
mtspr SPRN_MAS4, r2 mtspr SPRN_MAS4, r2
#if 0 #if 0
/* Enable DOZE */ /* Enable DOZE */
...@@ -305,7 +305,7 @@ skpinv: addi r6,r6,1 /* Increment */ ...@@ -305,7 +305,7 @@ skpinv: addi r6,r6,1 /* Increment */
#ifdef CONFIG_E200 #ifdef CONFIG_E200
/* enable dedicated debug exception handling resources (Debug APU) */ /* enable dedicated debug exception handling resources (Debug APU) */
mfspr r2,SPRN_HID0 mfspr r2,SPRN_HID0
ori r2,r2,HID0_DAPUEN@l ori r2,r2,HID0_DAPUEN@l
mtspr SPRN_HID0,r2 mtspr SPRN_HID0,r2
#endif #endif
...@@ -391,7 +391,7 @@ skpinv: addi r6,r6,1 /* Increment */ ...@@ -391,7 +391,7 @@ skpinv: addi r6,r6,1 /* Increment */
#ifdef CONFIG_PTE_64BIT #ifdef CONFIG_PTE_64BIT
#define PTE_FLAGS_OFFSET 4 #define PTE_FLAGS_OFFSET 4
#define FIND_PTE \ #define FIND_PTE \
rlwinm r12, r10, 13, 19, 29; /* Compute pgdir/pmd offset */ \ rlwinm r12, r10, 13, 19, 29; /* Compute pgdir/pmd offset */ \
lwzx r11, r12, r11; /* Get pgd/pmd entry */ \ lwzx r11, r12, r11; /* Get pgd/pmd entry */ \
rlwinm. r12, r11, 0, 0, 20; /* Extract pt base address */ \ rlwinm. r12, r11, 0, 0, 20; /* Extract pt base address */ \
beq 2f; /* Bail if no table */ \ beq 2f; /* Bail if no table */ \
...@@ -487,7 +487,7 @@ interrupt_base: ...@@ -487,7 +487,7 @@ interrupt_base:
*/ */
andi. r11, r11, _PAGE_HWEXEC andi. r11, r11, _PAGE_HWEXEC
rlwimi r11, r11, 31, 27, 27 /* SX <- _PAGE_HWEXEC */ rlwimi r11, r11, 31, 27, 27 /* SX <- _PAGE_HWEXEC */
ori r11, r11, (MAS3_UW|MAS3_SW|MAS3_UR|MAS3_SR)@l /* set static perms */ ori r11, r11, (MAS3_UW|MAS3_SW|MAS3_UR|MAS3_SR)@l /* set static perms */
/* update search PID in MAS6, AS = 0 */ /* update search PID in MAS6, AS = 0 */
mfspr r12, SPRN_PID0 mfspr r12, SPRN_PID0
...@@ -694,7 +694,7 @@ interrupt_base: ...@@ -694,7 +694,7 @@ interrupt_base:
START_EXCEPTION(SPEUnavailable) START_EXCEPTION(SPEUnavailable)
NORMAL_EXCEPTION_PROLOG NORMAL_EXCEPTION_PROLOG
bne load_up_spe bne load_up_spe
addi r3,r1,STACK_FRAME_OVERHEAD addi r3,r1,STACK_FRAME_OVERHEAD
EXC_XFER_EE_LITE(0x2010, KernelSPE) EXC_XFER_EE_LITE(0x2010, KernelSPE)
#else #else
EXCEPTION(0x2020, SPEUnavailable, unknown_exception, EXC_XFER_EE) EXCEPTION(0x2020, SPEUnavailable, unknown_exception, EXC_XFER_EE)
...@@ -741,10 +741,10 @@ data_access: ...@@ -741,10 +741,10 @@ data_access:
* Both the instruction and data TLB miss get to this * Both the instruction and data TLB miss get to this
* point to load the TLB. * point to load the TLB.
* r10 - EA of fault * r10 - EA of fault
* r11 - TLB (info from Linux PTE) * r11 - TLB (info from Linux PTE)
* r12, r13 - available to use * r12, r13 - available to use
* CR5 - results of addr < TASK_SIZE * CR5 - results of addr < TASK_SIZE
* MAS0, MAS1 - loaded with proper value when we get here * MAS0, MAS1 - loaded with proper value when we get here
* MAS2, MAS3 - will need additional info from Linux PTE * MAS2, MAS3 - will need additional info from Linux PTE
* Upon exit, we reload everything and RFI. * Upon exit, we reload everything and RFI.
...@@ -813,7 +813,7 @@ END_FTR_SECTION_IFSET(CPU_FTR_BIG_PHYS) ...@@ -813,7 +813,7 @@ END_FTR_SECTION_IFSET(CPU_FTR_BIG_PHYS)
lwz r13, tlbcam_index@l(r13) lwz r13, tlbcam_index@l(r13)
rlwimi r12, r13, 0, 20, 31 rlwimi r12, r13, 0, 20, 31
7: 7:
mtspr SPRN_MAS0,r12 mtspr SPRN_MAS0,r12
#endif /* CONFIG_E200 */ #endif /* CONFIG_E200 */
tlbwe tlbwe
...@@ -855,17 +855,17 @@ load_up_spe: ...@@ -855,17 +855,17 @@ load_up_spe:
beq 1f beq 1f
addi r4,r4,THREAD /* want THREAD of last_task_used_spe */ addi r4,r4,THREAD /* want THREAD of last_task_used_spe */
SAVE_32EVRS(0,r10,r4) SAVE_32EVRS(0,r10,r4)
evxor evr10, evr10, evr10 /* clear out evr10 */ evxor evr10, evr10, evr10 /* clear out evr10 */
evmwumiaa evr10, evr10, evr10 /* evr10 <- ACC = 0 * 0 + ACC */ evmwumiaa evr10, evr10, evr10 /* evr10 <- ACC = 0 * 0 + ACC */
li r5,THREAD_ACC li r5,THREAD_ACC
evstddx evr10, r4, r5 /* save off accumulator */ evstddx evr10, r4, r5 /* save off accumulator */
lwz r5,PT_REGS(r4) lwz r5,PT_REGS(r4)
lwz r4,_MSR-STACK_FRAME_OVERHEAD(r5) lwz r4,_MSR-STACK_FRAME_OVERHEAD(r5)
lis r10,MSR_SPE@h lis r10,MSR_SPE@h
andc r4,r4,r10 /* disable SPE for previous task */ andc r4,r4,r10 /* disable SPE for previous task */
stw r4,_MSR-STACK_FRAME_OVERHEAD(r5) stw r4,_MSR-STACK_FRAME_OVERHEAD(r5)
1: 1:
#endif /* CONFIG_SMP */ #endif /* !CONFIG_SMP */
/* enable use of SPE after return */ /* enable use of SPE after return */
oris r9,r9,MSR_SPE@h oris r9,r9,MSR_SPE@h
mfspr r5,SPRN_SPRG3 /* current task's THREAD (phys) */ mfspr r5,SPRN_SPRG3 /* current task's THREAD (phys) */
...@@ -878,7 +878,7 @@ load_up_spe: ...@@ -878,7 +878,7 @@ load_up_spe:
#ifndef CONFIG_SMP #ifndef CONFIG_SMP
subi r4,r5,THREAD subi r4,r5,THREAD
stw r4,last_task_used_spe@l(r3) stw r4,last_task_used_spe@l(r3)
#endif /* CONFIG_SMP */ #endif /* !CONFIG_SMP */
/* restore registers and return */ /* restore registers and return */
2: REST_4GPRS(3, r11) 2: REST_4GPRS(3, r11)
lwz r10,_CCR(r11) lwz r10,_CCR(r11)
...@@ -963,10 +963,10 @@ _GLOBAL(giveup_spe) ...@@ -963,10 +963,10 @@ _GLOBAL(giveup_spe)
lwz r5,PT_REGS(r3) lwz r5,PT_REGS(r3)
cmpi 0,r5,0 cmpi 0,r5,0
SAVE_32EVRS(0, r4, r3) SAVE_32EVRS(0, r4, r3)
evxor evr6, evr6, evr6 /* clear out evr6 */ evxor evr6, evr6, evr6 /* clear out evr6 */
evmwumiaa evr6, evr6, evr6 /* evr6 <- ACC = 0 * 0 + ACC */ evmwumiaa evr6, evr6, evr6 /* evr6 <- ACC = 0 * 0 + ACC */
li r4,THREAD_ACC li r4,THREAD_ACC
evstddx evr6, r4, r3 /* save off accumulator */ evstddx evr6, r4, r3 /* save off accumulator */
mfspr r6,SPRN_SPEFSCR mfspr r6,SPRN_SPEFSCR
stw r6,THREAD_SPEFSCR(r3) /* save spefscr register value */ stw r6,THREAD_SPEFSCR(r3) /* save spefscr register value */
beq 1f beq 1f
...@@ -979,7 +979,7 @@ _GLOBAL(giveup_spe) ...@@ -979,7 +979,7 @@ _GLOBAL(giveup_spe)
li r5,0 li r5,0
lis r4,last_task_used_spe@ha lis r4,last_task_used_spe@ha
stw r5,last_task_used_spe@l(r4) stw r5,last_task_used_spe@l(r4)
#endif /* CONFIG_SMP */ #endif /* !CONFIG_SMP */
blr blr
#endif /* CONFIG_SPE */ #endif /* CONFIG_SPE */
...@@ -1000,15 +1000,15 @@ _GLOBAL(giveup_fpu) ...@@ -1000,15 +1000,15 @@ _GLOBAL(giveup_fpu)
*/ */
_GLOBAL(abort) _GLOBAL(abort)
li r13,0 li r13,0
mtspr SPRN_DBCR0,r13 /* disable all debug events */ mtspr SPRN_DBCR0,r13 /* disable all debug events */
isync isync
mfmsr r13 mfmsr r13
ori r13,r13,MSR_DE@l /* Enable Debug Events */ ori r13,r13,MSR_DE@l /* Enable Debug Events */
mtmsr r13 mtmsr r13
isync isync
mfspr r13,SPRN_DBCR0 mfspr r13,SPRN_DBCR0
lis r13,(DBCR0_IDM|DBCR0_RST_CHIP)@h lis r13,(DBCR0_IDM|DBCR0_RST_CHIP)@h
mtspr SPRN_DBCR0,r13 mtspr SPRN_DBCR0,r13
isync isync
_GLOBAL(set_context) _GLOBAL(set_context)
...@@ -1043,7 +1043,7 @@ swapper_pg_dir: ...@@ -1043,7 +1043,7 @@ swapper_pg_dir:
/* Reserved 4k for the critical exception stack & 4k for the machine /* Reserved 4k for the critical exception stack & 4k for the machine
* check stack per CPU for kernel mode exceptions */ * check stack per CPU for kernel mode exceptions */
.section .bss .section .bss
.align 12 .align 12
exception_stack_bottom: exception_stack_bottom:
.space BOOKE_EXCEPTION_STACK_SIZE * NR_CPUS .space BOOKE_EXCEPTION_STACK_SIZE * NR_CPUS
.globl exception_stack_top .globl exception_stack_top
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment