1/* 2 * Copyright 2004-2009 Analog Devices Inc. 3 * 4 * Licensed under the GPL-2 or later. 5 */ 6 7#ifndef __ARCH_BLACKFIN_CACHE_H 8#define __ARCH_BLACKFIN_CACHE_H 9 10/* 11 * Bytes per L1 cache line 12 * Blackfin loads 32 bytes for cache 13 */ 14#define L1_CACHE_SHIFT 5 15#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) 16#define SMP_CACHE_BYTES L1_CACHE_BYTES 17 18#define ARCH_DMA_MINALIGN L1_CACHE_BYTES 19 20#ifdef CONFIG_SMP 21#define __cacheline_aligned 22#else 23#define ____cacheline_aligned 24 25/* 26 * Put cacheline_aliged data to L1 data memory 27 */ 28#ifdef CONFIG_CACHELINE_ALIGNED_L1 29#define __cacheline_aligned \ 30 __attribute__((__aligned__(L1_CACHE_BYTES), \ 31 __section__(".data_l1.cacheline_aligned"))) 32#endif 33 34#endif 35 36/* 37 * largest L1 which this arch supports 38 */ 39#define L1_CACHE_SHIFT_MAX 5 40 41#if defined(CONFIG_SMP) && !defined(CONFIG_BFIN_CACHE_COHERENT) 42# if defined(CONFIG_BFIN_EXTMEM_ICACHEABLE) || defined(CONFIG_BFIN_L2_ICACHEABLE) 43# define __ARCH_SYNC_CORE_ICACHE 44# endif 45# if defined(CONFIG_BFIN_EXTMEM_DCACHEABLE) || defined(CONFIG_BFIN_L2_DCACHEABLE) 46# define __ARCH_SYNC_CORE_DCACHE 47# endif 48#ifndef __ASSEMBLY__ 49asmlinkage void __raw_smp_mark_barrier_asm(void); 50asmlinkage void __raw_smp_check_barrier_asm(void); 51 52static inline void smp_mark_barrier(void) 53{ 54 __raw_smp_mark_barrier_asm(); 55} 56static inline void smp_check_barrier(void) 57{ 58 __raw_smp_check_barrier_asm(); 59} 60 61void resync_core_dcache(void); 62void resync_core_icache(void); 63#endif 64#endif 65 66 67#endif 68