Commit 6ebeafff authored by Kyle McMartin's avatar Kyle McMartin Committed by Kyle McMartin

[PARISC] Clean up pointless ASM_PAGE_SIZE_DIV use

Signed-off-by: default avatarKyle McMartin <kyle@mcmartin.ca>
parent 80af0876
...@@ -290,8 +290,6 @@ int main(void) ...@@ -290,8 +290,6 @@ int main(void)
DEFINE(ASM_PTE_ENTRY_SIZE, PTE_ENTRY_SIZE); DEFINE(ASM_PTE_ENTRY_SIZE, PTE_ENTRY_SIZE);
DEFINE(ASM_PFN_PTE_SHIFT, PFN_PTE_SHIFT); DEFINE(ASM_PFN_PTE_SHIFT, PFN_PTE_SHIFT);
DEFINE(ASM_PT_INITIAL, PT_INITIAL); DEFINE(ASM_PT_INITIAL, PT_INITIAL);
DEFINE(ASM_PAGE_SIZE_DIV64, PAGE_SIZE/64);
DEFINE(ASM_PAGE_SIZE_DIV128, PAGE_SIZE/128);
BLANK(); BLANK();
DEFINE(EXCDATA_IP, offsetof(struct exception_data, fault_ip)); DEFINE(EXCDATA_IP, offsetof(struct exception_data, fault_ip));
DEFINE(EXCDATA_SPACE, offsetof(struct exception_data, fault_space)); DEFINE(EXCDATA_SPACE, offsetof(struct exception_data, fault_space));
......
...@@ -289,7 +289,7 @@ ENTRY(copy_user_page_asm) ...@@ -289,7 +289,7 @@ ENTRY(copy_user_page_asm)
*/ */
ldd 0(%r25), %r19 ldd 0(%r25), %r19
ldi ASM_PAGE_SIZE_DIV128, %r1 ldi (PAGE_SIZE / 128), %r1
ldw 64(%r25), %r0 /* prefetch 1 cacheline ahead */ ldw 64(%r25), %r0 /* prefetch 1 cacheline ahead */
ldw 128(%r25), %r0 /* prefetch 2 */ ldw 128(%r25), %r0 /* prefetch 2 */
...@@ -355,7 +355,7 @@ ENTRY(copy_user_page_asm) ...@@ -355,7 +355,7 @@ ENTRY(copy_user_page_asm)
* use ldd/std on a 32 bit kernel. * use ldd/std on a 32 bit kernel.
*/ */
ldw 0(%r25), %r19 ldw 0(%r25), %r19
ldi ASM_PAGE_SIZE_DIV64, %r1 ldi (PAGE_SIZE / 64), %r1
1: 1:
ldw 4(%r25), %r20 ldw 4(%r25), %r20
...@@ -553,7 +553,7 @@ ENTRY(__clear_user_page_asm) ...@@ -553,7 +553,7 @@ ENTRY(__clear_user_page_asm)
pdtlb 0(%r28) pdtlb 0(%r28)
#ifdef CONFIG_64BIT #ifdef CONFIG_64BIT
ldi ASM_PAGE_SIZE_DIV128, %r1 ldi (PAGE_SIZE / 128), %r1
/* PREFETCH (Write) has not (yet) been proven to help here */ /* PREFETCH (Write) has not (yet) been proven to help here */
/* #define PREFETCHW_OP ldd 256(%0), %r0 */ /* #define PREFETCHW_OP ldd 256(%0), %r0 */
...@@ -578,7 +578,7 @@ ENTRY(__clear_user_page_asm) ...@@ -578,7 +578,7 @@ ENTRY(__clear_user_page_asm)
ldo 128(%r28), %r28 ldo 128(%r28), %r28
#else /* ! CONFIG_64BIT */ #else /* ! CONFIG_64BIT */
ldi ASM_PAGE_SIZE_DIV64, %r1 ldi (PAGE_SIZE / 64), %r1
1: 1:
stw %r0, 0(%r28) stw %r0, 0(%r28)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment