1/* 2 * include/asm-alpha/processor.h 3 * 4 * Copyright (C) 1994 Linus Torvalds 5 */ 6 7#ifndef __ASM_ALPHA_PROCESSOR_H 8#define __ASM_ALPHA_PROCESSOR_H 9 10#include <linux/personality.h> /* for ADDR_LIMIT_32BIT */ 11 12/* 13 * Returns current instruction pointer ("program counter"). 14 */ 15#define current_text_addr() \ 16 ({ void *__pc; __asm__ ("br %0,.+4" : "=r"(__pc)); __pc; }) 17 18/* 19 * We have a 42-bit user address space: 4TB user VM... 20 */ 21#define TASK_SIZE (0x40000000000UL) 22 23/* This decides where the kernel will search for a free chunk of vm 24 * space during mmap's. 25 */ 26#define TASK_UNMAPPED_BASE \ 27 ((current->personality & ADDR_LIMIT_32BIT) ? 0x40000000 : TASK_SIZE / 2) 28 29typedef struct { 30 unsigned long seg; 31} mm_segment_t; 32 33/* This is dead. Everything has been moved to thread_info. */ 34struct thread_struct { }; 35#define INIT_THREAD { } 36 37/* Return saved PC of a blocked thread. */ 38struct task_struct; 39extern unsigned long thread_saved_pc(struct task_struct *); 40 41/* Do necessary setup to start up a newly executed thread. */ 42extern void start_thread(struct pt_regs *, unsigned long, unsigned long); 43 44/* Free all resources held by a thread. */ 45extern void release_thread(struct task_struct *); 46 47/* Prepare to copy thread state - unlazy all lazy status */ 48#define prepare_to_copy(tsk) do { } while (0) 49 50/* Create a kernel thread without removing it from tasklists. */ 51extern long kernel_thread(int (*fn)(void *), void *arg, unsigned long flags); 52 53unsigned long get_wchan(struct task_struct *p); 54 55#define KSTK_EIP(tsk) (task_pt_regs(tsk)->pc) 56 57#define KSTK_ESP(tsk) \ 58 ((tsk) == current ? rdusp() : task_thread_info(tsk)->pcb.usp) 59 60#define cpu_relax() barrier() 61 62#define ARCH_HAS_PREFETCH 63#define ARCH_HAS_PREFETCHW 64#define ARCH_HAS_SPINLOCK_PREFETCH 65 66#ifndef CONFIG_SMP 67/* Nothing to prefetch. */ 68#define spin_lock_prefetch(lock) do { } while (0) 69#endif 70 71extern inline void prefetch(const void *ptr) 72{ 73 __builtin_prefetch(ptr, 0, 3); 74} 75 76extern inline void prefetchw(const void *ptr) 77{ 78 __builtin_prefetch(ptr, 1, 3); 79} 80 81#ifdef CONFIG_SMP 82extern inline void spin_lock_prefetch(const void *ptr) 83{ 84 __builtin_prefetch(ptr, 1, 3); 85} 86#endif 87 88#endif /* __ASM_ALPHA_PROCESSOR_H */ 89