Commit 240b62d3 authored by Juergen Gross's avatar Juergen Gross Committed by Tony Luck

ia64: remove stale paravirt leftovers

Remove the last leftovers from IA64 Xen pv-guest support.

PARAVIRT is long gone from IA64 Kconfig and Xen IA64 support, too.

Due to lack of infrastructure no testing done.
Signed-off-by: default avatarJuergen Gross <jgross@suse.com>
Signed-off-by: default avatarTony Luck <tony.luck@intel.com>
Link: https://lore.kernel.org/r/20191021100415.7642-1-jgross@suse.com
parent 219d5433
...@@ -36,11 +36,7 @@ static inline void arch_maybe_save_ip(unsigned long flags) ...@@ -36,11 +36,7 @@ static inline void arch_maybe_save_ip(unsigned long flags)
static inline unsigned long arch_local_save_flags(void) static inline unsigned long arch_local_save_flags(void)
{ {
ia64_stop(); ia64_stop();
#ifdef CONFIG_PARAVIRT
return ia64_get_psr_i();
#else
return ia64_getreg(_IA64_REG_PSR); return ia64_getreg(_IA64_REG_PSR);
#endif
} }
static inline unsigned long arch_local_irq_save(void) static inline unsigned long arch_local_irq_save(void)
......
...@@ -31,7 +31,7 @@ extern void ia64_bad_param_for_setreg (void); ...@@ -31,7 +31,7 @@ extern void ia64_bad_param_for_setreg (void);
extern void ia64_bad_param_for_getreg (void); extern void ia64_bad_param_for_getreg (void);
#define ia64_native_setreg(regnum, val) \ #define ia64_setreg(regnum, val) \
({ \ ({ \
switch (regnum) { \ switch (regnum) { \
case _IA64_REG_PSR_L: \ case _IA64_REG_PSR_L: \
...@@ -60,7 +60,7 @@ extern void ia64_bad_param_for_getreg (void); ...@@ -60,7 +60,7 @@ extern void ia64_bad_param_for_getreg (void);
} \ } \
}) })
#define ia64_native_getreg(regnum) \ #define ia64_getreg(regnum) \
({ \ ({ \
__u64 ia64_intri_res; \ __u64 ia64_intri_res; \
\ \
...@@ -384,7 +384,7 @@ extern void ia64_bad_param_for_getreg (void); ...@@ -384,7 +384,7 @@ extern void ia64_bad_param_for_getreg (void);
#define ia64_invala() asm volatile ("invala" ::: "memory") #define ia64_invala() asm volatile ("invala" ::: "memory")
#define ia64_native_thash(addr) \ #define ia64_thash(addr) \
({ \ ({ \
unsigned long ia64_intri_res; \ unsigned long ia64_intri_res; \
asm volatile ("thash %0=%1" : "=r"(ia64_intri_res) : "r" (addr)); \ asm volatile ("thash %0=%1" : "=r"(ia64_intri_res) : "r" (addr)); \
...@@ -437,10 +437,10 @@ extern void ia64_bad_param_for_getreg (void); ...@@ -437,10 +437,10 @@ extern void ia64_bad_param_for_getreg (void);
#define ia64_set_pmd(index, val) \ #define ia64_set_pmd(index, val) \
asm volatile ("mov pmd[%0]=%1" :: "r"(index), "r"(val) : "memory") asm volatile ("mov pmd[%0]=%1" :: "r"(index), "r"(val) : "memory")
#define ia64_native_set_rr(index, val) \ #define ia64_set_rr(index, val) \
asm volatile ("mov rr[%0]=%1" :: "r"(index), "r"(val) : "memory"); asm volatile ("mov rr[%0]=%1" :: "r"(index), "r"(val) : "memory");
#define ia64_native_get_cpuid(index) \ #define ia64_get_cpuid(index) \
({ \ ({ \
unsigned long ia64_intri_res; \ unsigned long ia64_intri_res; \
asm volatile ("mov %0=cpuid[%r1]" : "=r"(ia64_intri_res) : "rO"(index)); \ asm volatile ("mov %0=cpuid[%r1]" : "=r"(ia64_intri_res) : "rO"(index)); \
...@@ -476,33 +476,33 @@ extern void ia64_bad_param_for_getreg (void); ...@@ -476,33 +476,33 @@ extern void ia64_bad_param_for_getreg (void);
}) })
#define ia64_native_get_pmd(index) \ #define ia64_get_pmd(index) \
({ \ ({ \
unsigned long ia64_intri_res; \ unsigned long ia64_intri_res; \
asm volatile ("mov %0=pmd[%1]" : "=r"(ia64_intri_res) : "r"(index)); \ asm volatile ("mov %0=pmd[%1]" : "=r"(ia64_intri_res) : "r"(index)); \
ia64_intri_res; \ ia64_intri_res; \
}) })
#define ia64_native_get_rr(index) \ #define ia64_get_rr(index) \
({ \ ({ \
unsigned long ia64_intri_res; \ unsigned long ia64_intri_res; \
asm volatile ("mov %0=rr[%1]" : "=r"(ia64_intri_res) : "r" (index)); \ asm volatile ("mov %0=rr[%1]" : "=r"(ia64_intri_res) : "r" (index)); \
ia64_intri_res; \ ia64_intri_res; \
}) })
#define ia64_native_fc(addr) asm volatile ("fc %0" :: "r"(addr) : "memory") #define ia64_fc(addr) asm volatile ("fc %0" :: "r"(addr) : "memory")
#define ia64_sync_i() asm volatile (";; sync.i" ::: "memory") #define ia64_sync_i() asm volatile (";; sync.i" ::: "memory")
#define ia64_native_ssm(mask) asm volatile ("ssm %0":: "i"((mask)) : "memory") #define ia64_ssm(mask) asm volatile ("ssm %0":: "i"((mask)) : "memory")
#define ia64_native_rsm(mask) asm volatile ("rsm %0":: "i"((mask)) : "memory") #define ia64_rsm(mask) asm volatile ("rsm %0":: "i"((mask)) : "memory")
#define ia64_sum(mask) asm volatile ("sum %0":: "i"((mask)) : "memory") #define ia64_sum(mask) asm volatile ("sum %0":: "i"((mask)) : "memory")
#define ia64_rum(mask) asm volatile ("rum %0":: "i"((mask)) : "memory") #define ia64_rum(mask) asm volatile ("rum %0":: "i"((mask)) : "memory")
#define ia64_ptce(addr) asm volatile ("ptc.e %0" :: "r"(addr)) #define ia64_ptce(addr) asm volatile ("ptc.e %0" :: "r"(addr))
#define ia64_native_ptcga(addr, size) \ #define ia64_ptcga(addr, size) \
do { \ do { \
asm volatile ("ptc.ga %0,%1" :: "r"(addr), "r"(size) : "memory"); \ asm volatile ("ptc.ga %0,%1" :: "r"(addr), "r"(size) : "memory"); \
ia64_dv_serialize_data(); \ ia64_dv_serialize_data(); \
...@@ -607,7 +607,7 @@ do { \ ...@@ -607,7 +607,7 @@ do { \
} \ } \
}) })
#define ia64_native_intrin_local_irq_restore(x) \ #define ia64_intrin_local_irq_restore(x) \
do { \ do { \
asm volatile (";; cmp.ne p6,p7=%0,r0;;" \ asm volatile (";; cmp.ne p6,p7=%0,r0;;" \
"(p6) ssm psr.i;" \ "(p6) ssm psr.i;" \
......
...@@ -17,8 +17,8 @@ ...@@ -17,8 +17,8 @@
* intrinsic * intrinsic
*/ */
#define ia64_native_getreg __getReg #define ia64_getreg __getReg
#define ia64_native_setreg __setReg #define ia64_setreg __setReg
#define ia64_hint __hint #define ia64_hint __hint
#define ia64_hint_pause __hint_pause #define ia64_hint_pause __hint_pause
...@@ -40,10 +40,10 @@ ...@@ -40,10 +40,10 @@
#define ia64_invala_fr __invala_fr #define ia64_invala_fr __invala_fr
#define ia64_nop __nop #define ia64_nop __nop
#define ia64_sum __sum #define ia64_sum __sum
#define ia64_native_ssm __ssm #define ia64_ssm __ssm
#define ia64_rum __rum #define ia64_rum __rum
#define ia64_native_rsm __rsm #define ia64_rsm __rsm
#define ia64_native_fc __fc #define ia64_fc __fc
#define ia64_ldfs __ldfs #define ia64_ldfs __ldfs
#define ia64_ldfd __ldfd #define ia64_ldfd __ldfd
...@@ -89,17 +89,17 @@ ...@@ -89,17 +89,17 @@
__setIndReg(_IA64_REG_INDR_PMC, index, val) __setIndReg(_IA64_REG_INDR_PMC, index, val)
#define ia64_set_pmd(index, val) \ #define ia64_set_pmd(index, val) \
__setIndReg(_IA64_REG_INDR_PMD, index, val) __setIndReg(_IA64_REG_INDR_PMD, index, val)
#define ia64_native_set_rr(index, val) \ #define ia64_set_rr(index, val) \
__setIndReg(_IA64_REG_INDR_RR, index, val) __setIndReg(_IA64_REG_INDR_RR, index, val)
#define ia64_native_get_cpuid(index) \ #define ia64_get_cpuid(index) \
__getIndReg(_IA64_REG_INDR_CPUID, index) __getIndReg(_IA64_REG_INDR_CPUID, index)
#define __ia64_get_dbr(index) __getIndReg(_IA64_REG_INDR_DBR, index) #define __ia64_get_dbr(index) __getIndReg(_IA64_REG_INDR_DBR, index)
#define ia64_get_ibr(index) __getIndReg(_IA64_REG_INDR_IBR, index) #define ia64_get_ibr(index) __getIndReg(_IA64_REG_INDR_IBR, index)
#define ia64_get_pkr(index) __getIndReg(_IA64_REG_INDR_PKR, index) #define ia64_get_pkr(index) __getIndReg(_IA64_REG_INDR_PKR, index)
#define ia64_get_pmc(index) __getIndReg(_IA64_REG_INDR_PMC, index) #define ia64_get_pmc(index) __getIndReg(_IA64_REG_INDR_PMC, index)
#define ia64_native_get_pmd(index) __getIndReg(_IA64_REG_INDR_PMD, index) #define ia64_get_pmd(index) __getIndReg(_IA64_REG_INDR_PMD, index)
#define ia64_native_get_rr(index) __getIndReg(_IA64_REG_INDR_RR, index) #define ia64_get_rr(index) __getIndReg(_IA64_REG_INDR_RR, index)
#define ia64_srlz_d __dsrlz #define ia64_srlz_d __dsrlz
#define ia64_srlz_i __isrlz #define ia64_srlz_i __isrlz
...@@ -121,16 +121,16 @@ ...@@ -121,16 +121,16 @@
#define ia64_ld8_acq __ld8_acq #define ia64_ld8_acq __ld8_acq
#define ia64_sync_i __synci #define ia64_sync_i __synci
#define ia64_native_thash __thash #define ia64_thash __thash
#define ia64_native_ttag __ttag #define ia64_ttag __ttag
#define ia64_itcd __itcd #define ia64_itcd __itcd
#define ia64_itci __itci #define ia64_itci __itci
#define ia64_itrd __itrd #define ia64_itrd __itrd
#define ia64_itri __itri #define ia64_itri __itri
#define ia64_ptce __ptce #define ia64_ptce __ptce
#define ia64_ptcl __ptcl #define ia64_ptcl __ptcl
#define ia64_native_ptcg __ptcg #define ia64_ptcg __ptcg
#define ia64_native_ptcga __ptcga #define ia64_ptcga __ptcga
#define ia64_ptri __ptri #define ia64_ptri __ptri
#define ia64_ptrd __ptrd #define ia64_ptrd __ptrd
#define ia64_dep_mi _m64_dep_mi #define ia64_dep_mi _m64_dep_mi
...@@ -147,13 +147,13 @@ ...@@ -147,13 +147,13 @@
#define ia64_lfetch_fault __lfetch_fault #define ia64_lfetch_fault __lfetch_fault
#define ia64_lfetch_fault_excl __lfetch_fault_excl #define ia64_lfetch_fault_excl __lfetch_fault_excl
#define ia64_native_intrin_local_irq_restore(x) \ #define ia64_intrin_local_irq_restore(x) \
do { \ do { \
if ((x) != 0) { \ if ((x) != 0) { \
ia64_native_ssm(IA64_PSR_I); \ ia64_ssm(IA64_PSR_I); \
ia64_srlz_d(); \ ia64_srlz_d(); \
} else { \ } else { \
ia64_native_rsm(IA64_PSR_I); \ ia64_rsm(IA64_PSR_I); \
} \ } \
} while (0) } while (0)
......
...@@ -21,15 +21,13 @@ ...@@ -21,15 +21,13 @@
#endif #endif
#include <asm/cmpxchg.h> #include <asm/cmpxchg.h>
#define ia64_native_get_psr_i() (ia64_native_getreg(_IA64_REG_PSR) & IA64_PSR_I) #define ia64_set_rr0_to_rr4(val0, val1, val2, val3, val4) \
#define ia64_native_set_rr0_to_rr4(val0, val1, val2, val3, val4) \
do { \ do { \
ia64_native_set_rr(0x0000000000000000UL, (val0)); \ ia64_set_rr(0x0000000000000000UL, (val0)); \
ia64_native_set_rr(0x2000000000000000UL, (val1)); \ ia64_set_rr(0x2000000000000000UL, (val1)); \
ia64_native_set_rr(0x4000000000000000UL, (val2)); \ ia64_set_rr(0x4000000000000000UL, (val2)); \
ia64_native_set_rr(0x6000000000000000UL, (val3)); \ ia64_set_rr(0x6000000000000000UL, (val3)); \
ia64_native_set_rr(0x8000000000000000UL, (val4)); \ ia64_set_rr(0x8000000000000000UL, (val4)); \
} while (0) } while (0)
/* /*
...@@ -85,41 +83,4 @@ extern unsigned long __bad_increment_for_ia64_fetch_and_add (void); ...@@ -85,41 +83,4 @@ extern unsigned long __bad_increment_for_ia64_fetch_and_add (void);
#endif #endif
#ifndef __ASSEMBLY__
#define IA64_INTRINSIC_API(name) ia64_native_ ## name
#define IA64_INTRINSIC_MACRO(name) ia64_native_ ## name
/************************************************/
/* Instructions paravirtualized for correctness */
/************************************************/
/* fc, thash, get_cpuid, get_pmd, get_eflags, set_eflags */
/* Note that "ttag" and "cover" are also privilege-sensitive; "ttag"
* is not currently used (though it may be in a long-format VHPT system!)
*/
#define ia64_fc IA64_INTRINSIC_API(fc)
#define ia64_thash IA64_INTRINSIC_API(thash)
#define ia64_get_cpuid IA64_INTRINSIC_API(get_cpuid)
#define ia64_get_pmd IA64_INTRINSIC_API(get_pmd)
/************************************************/
/* Instructions paravirtualized for performance */
/************************************************/
#define ia64_ssm IA64_INTRINSIC_MACRO(ssm)
#define ia64_rsm IA64_INTRINSIC_MACRO(rsm)
#define ia64_getreg IA64_INTRINSIC_MACRO(getreg)
#define ia64_setreg IA64_INTRINSIC_API(setreg)
#define ia64_set_rr IA64_INTRINSIC_API(set_rr)
#define ia64_get_rr IA64_INTRINSIC_API(get_rr)
#define ia64_ptcga IA64_INTRINSIC_API(ptcga)
#define ia64_get_psr_i IA64_INTRINSIC_API(get_psr_i)
#define ia64_intrin_local_irq_restore \
IA64_INTRINSIC_API(intrin_local_irq_restore)
#define ia64_set_rr0_to_rr4 IA64_INTRINSIC_API(set_rr0_to_rr4)
#endif /* !__ASSEMBLY__ */
#endif /* _UAPI_ASM_IA64_INTRINSICS_H */ #endif /* _UAPI_ASM_IA64_INTRINSICS_H */
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment