Commit 38fabca1 authored by Linus Torvalds's avatar Linus Torvalds

Merge branch 'x86-asm-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip

Pull x86 asm updates from Ingo Molnar:
 "Two changes:

   - Remove (some) remnants of the vDSO's fake section table mechanism
     that were left behind when the vDSO build process reverted to using
     "objdump -S" to strip the userspace image.

   - Remove hardcoded POPCNT mnemonics now that the minimum binutils
     version supports the symbolic form"

* 'x86-asm-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip:
  x86/vdso: Remove a stale/misleading comment from the linker script
  x86/vdso: Remove obsolete "fake section table" reservation
  x86: Use POPCNT mnemonics in arch_hweight.h
parents 8465625a 29434801
...@@ -7,16 +7,6 @@ ...@@ -7,16 +7,6 @@
* This script controls its layout. * This script controls its layout.
*/ */
#if defined(BUILD_VDSO64)
# define SHDR_SIZE 64
#elif defined(BUILD_VDSO32) || defined(BUILD_VDSOX32)
# define SHDR_SIZE 40
#else
# error unknown VDSO target
#endif
#define NUM_FAKE_SHDRS 13
SECTIONS SECTIONS
{ {
/* /*
...@@ -60,20 +50,8 @@ SECTIONS ...@@ -60,20 +50,8 @@ SECTIONS
*(.bss*) *(.bss*)
*(.dynbss*) *(.dynbss*)
*(.gnu.linkonce.b.*) *(.gnu.linkonce.b.*)
/*
* Ideally this would live in a C file, but that won't
* work cleanly for x32 until we start building the x32
* C code using an x32 toolchain.
*/
VDSO_FAKE_SECTION_TABLE_START = .;
. = . + NUM_FAKE_SHDRS * SHDR_SIZE;
VDSO_FAKE_SECTION_TABLE_END = .;
} :text } :text
.fake_shstrtab : { *(.fake_shstrtab) } :text
.note : { *(.note.*) } :text :note .note : { *(.note.*) } :text :note
.eh_frame_hdr : { *(.eh_frame_hdr) } :text :eh_frame_hdr .eh_frame_hdr : { *(.eh_frame_hdr) } :text :eh_frame_hdr
...@@ -87,11 +65,6 @@ SECTIONS ...@@ -87,11 +65,6 @@ SECTIONS
.text : { *(.text*) } :text =0x90909090, .text : { *(.text*) } :text =0x90909090,
/*
* At the end so that eu-elflint stays happy when vdso2c strips
* these. A better implementation would avoid allocating space
* for these.
*/
.altinstructions : { *(.altinstructions) } :text .altinstructions : { *(.altinstructions) } :text
.altinstr_replacement : { *(.altinstr_replacement) } :text .altinstr_replacement : { *(.altinstr_replacement) } :text
......
...@@ -76,8 +76,6 @@ enum { ...@@ -76,8 +76,6 @@ enum {
sym_hpet_page, sym_hpet_page,
sym_pvclock_page, sym_pvclock_page,
sym_hvclock_page, sym_hvclock_page,
sym_VDSO_FAKE_SECTION_TABLE_START,
sym_VDSO_FAKE_SECTION_TABLE_END,
}; };
const int special_pages[] = { const int special_pages[] = {
...@@ -98,12 +96,6 @@ struct vdso_sym required_syms[] = { ...@@ -98,12 +96,6 @@ struct vdso_sym required_syms[] = {
[sym_hpet_page] = {"hpet_page", true}, [sym_hpet_page] = {"hpet_page", true},
[sym_pvclock_page] = {"pvclock_page", true}, [sym_pvclock_page] = {"pvclock_page", true},
[sym_hvclock_page] = {"hvclock_page", true}, [sym_hvclock_page] = {"hvclock_page", true},
[sym_VDSO_FAKE_SECTION_TABLE_START] = {
"VDSO_FAKE_SECTION_TABLE_START", false
},
[sym_VDSO_FAKE_SECTION_TABLE_END] = {
"VDSO_FAKE_SECTION_TABLE_END", false
},
{"VDSO32_NOTE_MASK", true}, {"VDSO32_NOTE_MASK", true},
{"__kernel_vsyscall", true}, {"__kernel_vsyscall", true},
{"__kernel_sigreturn", true}, {"__kernel_sigreturn", true},
......
...@@ -5,15 +5,9 @@ ...@@ -5,15 +5,9 @@
#include <asm/cpufeatures.h> #include <asm/cpufeatures.h>
#ifdef CONFIG_64BIT #ifdef CONFIG_64BIT
/* popcnt %edi, %eax */
#define POPCNT32 ".byte 0xf3,0x0f,0xb8,0xc7"
/* popcnt %rdi, %rax */
#define POPCNT64 ".byte 0xf3,0x48,0x0f,0xb8,0xc7"
#define REG_IN "D" #define REG_IN "D"
#define REG_OUT "a" #define REG_OUT "a"
#else #else
/* popcnt %eax, %eax */
#define POPCNT32 ".byte 0xf3,0x0f,0xb8,0xc0"
#define REG_IN "a" #define REG_IN "a"
#define REG_OUT "a" #define REG_OUT "a"
#endif #endif
...@@ -24,7 +18,7 @@ static __always_inline unsigned int __arch_hweight32(unsigned int w) ...@@ -24,7 +18,7 @@ static __always_inline unsigned int __arch_hweight32(unsigned int w)
{ {
unsigned int res; unsigned int res;
asm (ALTERNATIVE("call __sw_hweight32", POPCNT32, X86_FEATURE_POPCNT) asm (ALTERNATIVE("call __sw_hweight32", "popcntl %1, %0", X86_FEATURE_POPCNT)
: "="REG_OUT (res) : "="REG_OUT (res)
: REG_IN (w)); : REG_IN (w));
...@@ -52,7 +46,7 @@ static __always_inline unsigned long __arch_hweight64(__u64 w) ...@@ -52,7 +46,7 @@ static __always_inline unsigned long __arch_hweight64(__u64 w)
{ {
unsigned long res; unsigned long res;
asm (ALTERNATIVE("call __sw_hweight64", POPCNT64, X86_FEATURE_POPCNT) asm (ALTERNATIVE("call __sw_hweight64", "popcntq %1, %0", X86_FEATURE_POPCNT)
: "="REG_OUT (res) : "="REG_OUT (res)
: REG_IN (w)); : REG_IN (w));
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment