1#ifndef __ASM_SH64_CACHEFLUSH_H
2#define __ASM_SH64_CACHEFLUSH_H
3
4#ifndef __ASSEMBLY__
5
6#include <asm/page.h>
7
8struct vm_area_struct;
9struct page;
10struct mm_struct;
11
12extern void flush_cache_all(void);
13extern void flush_cache_mm(struct mm_struct *mm);
14extern void flush_cache_sigtramp(unsigned long start, unsigned long end);
15extern void flush_cache_range(struct vm_area_struct *vma, unsigned long start,
16			      unsigned long end);
17extern void flush_cache_page(struct vm_area_struct *vma, unsigned long addr, unsigned long pfn);
18extern void flush_dcache_page(struct page *pg);
19extern void flush_icache_range(unsigned long start, unsigned long end);
20extern void flush_icache_user_range(struct vm_area_struct *vma,
21				    struct page *page, unsigned long addr,
22				    int len);
23
24#define flush_cache_dup_mm(mm)	flush_cache_mm(mm)
25
26#define flush_dcache_mmap_lock(mapping)		do { } while (0)
27#define flush_dcache_mmap_unlock(mapping)	do { } while (0)
28
29#define flush_cache_vmap(start, end)		flush_cache_all()
30#define flush_cache_vunmap(start, end)		flush_cache_all()
31
32#define flush_icache_page(vma, page)	do { } while (0)
33
34#define copy_to_user_page(vma, page, vaddr, dst, src, len) \
35	do {							\
36		flush_cache_page(vma, vaddr, page_to_pfn(page));\
37		memcpy(dst, src, len);				\
38		flush_icache_user_range(vma, page, vaddr, len);	\
39	} while (0)
40
41#define copy_from_user_page(vma, page, vaddr, dst, src, len) \
42	do {							\
43		flush_cache_page(vma, vaddr, page_to_pfn(page));\
44		memcpy(dst, src, len);				\
45	} while (0)
46
47#endif /* __ASSEMBLY__ */
48
49#endif /* __ASM_SH64_CACHEFLUSH_H */
50