5a5488d3bb
Surprisingly this actually makes LOAD_PER_CPU_BASE() a little more efficient. Signed-off-by: David S. Miller <davem@davemloft.net>
28 lines
575 B
C
28 lines
575 B
C
#ifndef __ARCH_SPARC64_PERCPU__
|
|
#define __ARCH_SPARC64_PERCPU__
|
|
|
|
#include <linux/compiler.h>
|
|
|
|
register unsigned long __local_per_cpu_offset asm("g5");
|
|
|
|
#ifdef CONFIG_SMP
|
|
|
|
#include <asm/trap_block.h>
|
|
|
|
extern void real_setup_per_cpu_areas(void);
|
|
|
|
#define __per_cpu_offset(__cpu) \
|
|
(trap_block[(__cpu)].__per_cpu_base)
|
|
#define per_cpu_offset(x) (__per_cpu_offset(x))
|
|
|
|
#define __my_cpu_offset __local_per_cpu_offset
|
|
|
|
#else /* ! SMP */
|
|
|
|
#define real_setup_per_cpu_areas() do { } while (0)
|
|
|
|
#endif /* SMP */
|
|
|
|
#include <asm-generic/percpu.h>
|
|
|
|
#endif /* __ARCH_SPARC64_PERCPU__ */
|