1#ifndef __ARCH_SPARC64_PERCPU__ 2#define __ARCH_SPARC64_PERCPU__ 3 4#include <linux/compiler.h> 5 6#ifdef CONFIG_SMP 7 8#define setup_per_cpu_areas() do { } while (0) 9extern void real_setup_per_cpu_areas(void); 10 11extern unsigned long __per_cpu_base; 12extern unsigned long __per_cpu_shift; 13#define __per_cpu_offset(__cpu) \ 14 (__per_cpu_base + ((unsigned long)(__cpu) << __per_cpu_shift)) 15#define per_cpu_offset(x) (__per_cpu_offset(x)) 16 17/* Separate out the type, so (int[3], foo) works. */ 18#define DEFINE_PER_CPU(type, name) \ 19 __attribute__((__section__(".data.percpu"))) __typeof__(type) per_cpu__##name 20 21register unsigned long __local_per_cpu_offset asm("g5"); 22 23/* var is in discarded region: offset to particular copy we want */ 24#define per_cpu(var, cpu) (*RELOC_HIDE(&per_cpu__##var, __per_cpu_offset(cpu))) 25#define __get_cpu_var(var) (*RELOC_HIDE(&per_cpu__##var, __local_per_cpu_offset)) 26#define __raw_get_cpu_var(var) (*RELOC_HIDE(&per_cpu__##var, __local_per_cpu_offset)) 27 28/* A macro to avoid #include hell... */ 29#define percpu_modcopy(pcpudst, src, size) \ 30do { \ 31 unsigned int __i; \ 32 for_each_possible_cpu(__i) \ 33 memcpy((pcpudst)+__per_cpu_offset(__i), \ 34 (src), (size)); \ 35} while (0) 36#else /* ! SMP */ 37 38#define real_setup_per_cpu_areas() do { } while (0) 39#define DEFINE_PER_CPU(type, name) \ 40 __typeof__(type) per_cpu__##name 41 42#define per_cpu(var, cpu) (*((void)cpu, &per_cpu__##var)) 43#define __get_cpu_var(var) per_cpu__##var 44#define __raw_get_cpu_var(var) per_cpu__##var 45 46#endif /* SMP */ 47 48#define DECLARE_PER_CPU(type, name) extern __typeof__(type) per_cpu__##name 49 50#define EXPORT_PER_CPU_SYMBOL(var) EXPORT_SYMBOL(per_cpu__##var) 51#define EXPORT_PER_CPU_SYMBOL_GPL(var) EXPORT_SYMBOL_GPL(per_cpu__##var) 52 53#endif /* __ARCH_SPARC64_PERCPU__ */ 54