1#ifndef _H8300_SYSTEM_H 2#define _H8300_SYSTEM_H 3 4#include <linux/linkage.h> 5 6/* 7 * switch_to(n) should switch tasks to task ptr, first checking that 8 * ptr isn't the current task, in which case it does nothing. This 9 * also clears the TS-flag if the task we switched to has used the 10 * math co-processor latest. 11 */ 12/* 13 * switch_to() saves the extra registers, that are not saved 14 * automatically by SAVE_SWITCH_STACK in resume(), ie. d0-d5 and 15 * a0-a1. Some of these are used by schedule() and its predecessors 16 * and so we might get see unexpected behaviors when a task returns 17 * with unexpected register values. 18 * 19 * syscall stores these registers itself and none of them are used 20 * by syscall after the function in the syscall has been called. 21 * 22 * Beware that resume now expects *next to be in d1 and the offset of 23 * tss to be in a1. This saves a few instructions as we no longer have 24 * to push them onto the stack and read them back right after. 25 * 26 * 02/17/96 - Jes Sorensen (jds@kom.auc.dk) 27 * 28 * Changed 96/09/19 by Andreas Schwab 29 * pass prev in a0, next in a1, offset of tss in d1, and whether 30 * the mm structures are shared in d2 (to avoid atc flushing). 31 * 32 * H8/300 Porting 2002/09/04 Yoshinori Sato 33 */ 34 35asmlinkage void resume(void); 36#define switch_to(prev,next,last) { \ 37 void *_last; \ 38 __asm__ __volatile__( \ 39 "mov.l %1, er0\n\t" \ 40 "mov.l %2, er1\n\t" \ 41 "mov.l %3, er2\n\t" \ 42 "jsr @_resume\n\t" \ 43 "mov.l er2,%0\n\t" \ 44 : "=r" (_last) \ 45 : "r" (&(prev->thread)), \ 46 "r" (&(next->thread)), \ 47 "g" (prev) \ 48 : "cc", "er0", "er1", "er2", "er3"); \ 49 (last) = _last; \ 50} 51 52#define __sti() asm volatile ("andc #0x7f,ccr") 53#define __cli() asm volatile ("orc #0x80,ccr") 54 55#define __save_flags(x) \ 56 asm volatile ("stc ccr,%w0":"=r" (x)) 57 58#define __restore_flags(x) \ 59 asm volatile ("ldc %w0,ccr": :"r" (x)) 60 61#define irqs_disabled() \ 62({ \ 63 unsigned char flags; \ 64 __save_flags(flags); \ 65 ((flags & 0x80) == 0x80); \ 66}) 67 68#define iret() __asm__ __volatile__ ("rte": : :"memory", "sp", "cc") 69 70/* For spinlocks etc */ 71#define local_irq_disable() __cli() 72#define local_irq_enable() __sti() 73#define local_irq_save(x) ({ __save_flags(x); local_irq_disable(); }) 74#define local_irq_restore(x) __restore_flags(x) 75#define local_save_flags(x) __save_flags(x) 76 77/* 78 * Force strict CPU ordering. 79 * Not really required on H8... 80 */ 81#define nop() asm volatile ("nop"::) 82#define mb() asm volatile ("" : : :"memory") 83#define rmb() asm volatile ("" : : :"memory") 84#define wmb() asm volatile ("" : : :"memory") 85#define set_rmb(var, value) do { xchg(&var, value); } while (0) 86#define set_mb(var, value) set_rmb(var, value) 87 88#ifdef CONFIG_SMP 89#define smp_mb() mb() 90#define smp_rmb() rmb() 91#define smp_wmb() wmb() 92#define smp_read_barrier_depends() read_barrier_depends() 93#else 94#define smp_mb() barrier() 95#define smp_rmb() barrier() 96#define smp_wmb() barrier() 97#define smp_read_barrier_depends() do { } while(0) 98#endif 99 100#define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr)))) 101 102struct __xchg_dummy { unsigned long a[100]; }; 103#define __xg(x) ((volatile struct __xchg_dummy *)(x)) 104 105static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size) 106{ 107 unsigned long tmp, flags; 108 109 local_irq_save(flags); 110 111 switch (size) { 112 case 1: 113 __asm__ __volatile__ 114 ("mov.b %2,%0\n\t" 115 "mov.b %1,%2" 116 : "=&r" (tmp) : "r" (x), "m" (*__xg(ptr)) : "memory"); 117 break; 118 case 2: 119 __asm__ __volatile__ 120 ("mov.w %2,%0\n\t" 121 "mov.w %1,%2" 122 : "=&r" (tmp) : "r" (x), "m" (*__xg(ptr)) : "memory"); 123 break; 124 case 4: 125 __asm__ __volatile__ 126 ("mov.l %2,%0\n\t" 127 "mov.l %1,%2" 128 : "=&r" (tmp) : "r" (x), "m" (*__xg(ptr)) : "memory"); 129 break; 130 default: 131 tmp = 0; 132 } 133 local_irq_restore(flags); 134 return tmp; 135} 136 137#define HARD_RESET_NOW() ({ \ 138 local_irq_disable(); \ 139 asm("jmp @@0"); \ 140}) 141 142#define arch_align_stack(x) (x) 143 144#endif /* _H8300_SYSTEM_H */ 145