Commit 9bfa23df authored by Thomas Gleixner's avatar Thomas Gleixner Committed by Thomas Gleixner

x86: unify include/asm/cache_32/64.h

Same file, except for whitespace, comment formatting and the extra
defines in _64, which are conditional on VSMP anyway.
Signed-off-by: default avatarThomas Gleixner <tglx@linutronix.de>
parent b2bba72c
#ifdef CONFIG_X86_32
# include "cache_32.h"
#else
# include "cache_64.h"
#ifndef _ARCH_X86_CACHE_H
#define _ARCH_X86_CACHE_H
/* L1 cache line size */
#define L1_CACHE_SHIFT (CONFIG_X86_L1_CACHE_SHIFT)
#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
#define __read_mostly __attribute__((__section__(".data.read_mostly")))
#ifdef CONFIG_X86_VSMP
/* vSMP Internode cacheline shift */
#define INTERNODE_CACHE_SHIFT (12)
#ifdef CONFIG_SMP
#define __cacheline_aligned_in_smp \
__attribute__((__aligned__(1 << (INTERNODE_CACHE_SHIFT)))) \
__attribute__((__section__(".data.page_aligned")))
#endif
#endif
#endif
/*
* include/asm-i386/cache.h
*/
#ifndef __ARCH_I386_CACHE_H
#define __ARCH_I386_CACHE_H
/* L1 cache line size */
#define L1_CACHE_SHIFT (CONFIG_X86_L1_CACHE_SHIFT)
#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
#define __read_mostly __attribute__((__section__(".data.read_mostly")))
#endif
/*
* include/asm-x86_64/cache.h
*/
#ifndef __ARCH_X8664_CACHE_H
#define __ARCH_X8664_CACHE_H
/* L1 cache line size */
#define L1_CACHE_SHIFT (CONFIG_X86_L1_CACHE_SHIFT)
#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
#ifdef CONFIG_X86_VSMP
/* vSMP Internode cacheline shift */
#define INTERNODE_CACHE_SHIFT (12)
#ifdef CONFIG_SMP
#define __cacheline_aligned_in_smp \
__attribute__((__aligned__(1 << (INTERNODE_CACHE_SHIFT)))) \
__attribute__((__section__(".data.page_aligned")))
#endif
#endif
#define __read_mostly __attribute__((__section__(".data.read_mostly")))
#endif
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment