41ba653f24
The current cache options don't really represent the hardware features. They end up setting different aspects of the hardware so that the end result is to turn on/off the cache. Unfortunately, when we hit cache problems with the hardware, it's difficult to test different settings to root cause the problem. The current settings also don't cleanly allow for different caching behaviors with different regions of memory. So split the configure options such that they properly reflect the settings that are applied to the hardware. Signed-off-by: Jie Zhang <jie.zhang@analog.com> Signed-off-by: Mike Frysinger <vapier@gentoo.org>
63 lines
1.3 KiB
C
63 lines
1.3 KiB
C
/*
|
|
* include/asm-blackfin/cache.h
|
|
*/
|
|
#ifndef __ARCH_BLACKFIN_CACHE_H
|
|
#define __ARCH_BLACKFIN_CACHE_H
|
|
|
|
/*
|
|
* Bytes per L1 cache line
|
|
* Blackfin loads 32 bytes for cache
|
|
*/
|
|
#define L1_CACHE_SHIFT 5
|
|
#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
|
|
#define SMP_CACHE_BYTES L1_CACHE_BYTES
|
|
|
|
#ifdef CONFIG_SMP
|
|
#define __cacheline_aligned
|
|
#else
|
|
#define ____cacheline_aligned
|
|
|
|
/*
|
|
* Put cacheline_aliged data to L1 data memory
|
|
*/
|
|
#ifdef CONFIG_CACHELINE_ALIGNED_L1
|
|
#define __cacheline_aligned \
|
|
__attribute__((__aligned__(L1_CACHE_BYTES), \
|
|
__section__(".data_l1.cacheline_aligned")))
|
|
#endif
|
|
|
|
#endif
|
|
|
|
/*
|
|
* largest L1 which this arch supports
|
|
*/
|
|
#define L1_CACHE_SHIFT_MAX 5
|
|
|
|
#if defined(CONFIG_SMP) && \
|
|
!defined(CONFIG_BFIN_CACHE_COHERENT)
|
|
# if defined(CONFIG_BFIN_ICACHEABLE) || defined(CONFIG_BFIN_L2_ICACHEABLE)
|
|
# define __ARCH_SYNC_CORE_ICACHE
|
|
# endif
|
|
# if defined(CONFIG_BFIN_DCACHEABLE) || defined(CONFIG_BFIN_L2_DCACHEABLE)
|
|
# define __ARCH_SYNC_CORE_DCACHE
|
|
# endif
|
|
#ifndef __ASSEMBLY__
|
|
asmlinkage void __raw_smp_mark_barrier_asm(void);
|
|
asmlinkage void __raw_smp_check_barrier_asm(void);
|
|
|
|
static inline void smp_mark_barrier(void)
|
|
{
|
|
__raw_smp_mark_barrier_asm();
|
|
}
|
|
static inline void smp_check_barrier(void)
|
|
{
|
|
__raw_smp_check_barrier_asm();
|
|
}
|
|
|
|
void resync_core_dcache(void);
|
|
void resync_core_icache(void);
|
|
#endif
|
|
#endif
|
|
|
|
|
|
#endif
|