1#ifndef _ASM_M32R_SMP_H 2#define _ASM_M32R_SMP_H 3 4#ifdef CONFIG_SMP 5#ifndef __ASSEMBLY__ 6 7#include <linux/cpumask.h> 8#include <linux/spinlock.h> 9#include <linux/threads.h> 10#include <asm/m32r.h> 11 12#define PHYSID_ARRAY_SIZE 1 13 14struct physid_mask 15{ 16 unsigned long mask[PHYSID_ARRAY_SIZE]; 17}; 18 19typedef struct physid_mask physid_mask_t; 20 21#define physid_set(physid, map) set_bit(physid, (map).mask) 22#define physid_clear(physid, map) clear_bit(physid, (map).mask) 23#define physid_isset(physid, map) test_bit(physid, (map).mask) 24#define physid_test_and_set(physid, map) test_and_set_bit(physid, (map).mask) 25 26#define physids_and(dst, src1, src2) bitmap_and((dst).mask, (src1).mask, (src2).mask, MAX_APICS) 27#define physids_or(dst, src1, src2) bitmap_or((dst).mask, (src1).mask, (src2).mask, MAX_APICS) 28#define physids_clear(map) bitmap_zero((map).mask, MAX_APICS) 29#define physids_complement(dst, src) bitmap_complement((dst).mask,(src).mask, MAX_APICS) 30#define physids_empty(map) bitmap_empty((map).mask, MAX_APICS) 31#define physids_equal(map1, map2) bitmap_equal((map1).mask, (map2).mask, MAX_APICS) 32#define physids_weight(map) bitmap_weight((map).mask, MAX_APICS) 33#define physids_shift_right(d, s, n) bitmap_shift_right((d).mask, (s).mask, n, MAX_APICS) 34#define physids_shift_left(d, s, n) bitmap_shift_left((d).mask, (s).mask, n, MAX_APICS) 35#define physids_coerce(map) ((map).mask[0]) 36 37#define physids_promote(physids) \ 38 ({ \ 39 physid_mask_t __physid_mask = PHYSID_MASK_NONE; \ 40 __physid_mask.mask[0] = physids; \ 41 __physid_mask; \ 42 }) 43 44#define physid_mask_of_physid(physid) \ 45 ({ \ 46 physid_mask_t __physid_mask = PHYSID_MASK_NONE; \ 47 physid_set(physid, __physid_mask); \ 48 __physid_mask; \ 49 }) 50 51#define PHYSID_MASK_ALL { {[0 ... PHYSID_ARRAY_SIZE-1] = ~0UL} } 52#define PHYSID_MASK_NONE { {[0 ... PHYSID_ARRAY_SIZE-1] = 0UL} } 53 54extern physid_mask_t phys_cpu_present_map; 55 56/* 57 * Some lowlevel functions might want to know about 58 * the real CPU ID <-> CPU # mapping. 59 */ 60extern volatile int cpu_2_physid[NR_CPUS]; 61#define cpu_to_physid(cpu_id) cpu_2_physid[cpu_id] 62 63#define raw_smp_processor_id() (current_thread_info()->cpu) 64 65extern cpumask_t cpu_callout_map; 66extern cpumask_t cpu_possible_map; 67extern cpumask_t cpu_present_map; 68 69static __inline__ int hard_smp_processor_id(void) 70{ 71 return (int)*(volatile long *)M32R_CPUID_PORTL; 72} 73 74static __inline__ int cpu_logical_map(int cpu) 75{ 76 return cpu; 77} 78 79static __inline__ int cpu_number_map(int cpu) 80{ 81 return cpu; 82} 83 84static __inline__ unsigned int num_booting_cpus(void) 85{ 86 return cpus_weight(cpu_callout_map); 87} 88 89extern void smp_send_timer(void); 90extern unsigned long send_IPI_mask_phys(cpumask_t, int, int); 91 92#endif /* not __ASSEMBLY__ */ 93 94#define NO_PROC_ID (0xff) /* No processor magic marker */ 95 96#define PROC_CHANGE_PENALTY (15) /* Schedule penalty */ 97 98/* 99 * M32R-mp IPI 100 */ 101#define RESCHEDULE_IPI (M32R_IRQ_IPI0-M32R_IRQ_IPI0) 102#define INVALIDATE_TLB_IPI (M32R_IRQ_IPI1-M32R_IRQ_IPI0) 103#define CALL_FUNCTION_IPI (M32R_IRQ_IPI2-M32R_IRQ_IPI0) 104#define LOCAL_TIMER_IPI (M32R_IRQ_IPI3-M32R_IRQ_IPI0) 105#define INVALIDATE_CACHE_IPI (M32R_IRQ_IPI4-M32R_IRQ_IPI0) 106#define CPU_BOOT_IPI (M32R_IRQ_IPI5-M32R_IRQ_IPI0) 107 108#define IPI_SHIFT (0) 109#define NR_IPIS (8) 110 111#else /* CONFIG_SMP */ 112 113#define hard_smp_processor_id() 0 114 115#endif /* CONFIG_SMP */ 116 117#endif /* _ASM_M32R_SMP_H */ 118