1/*
2 * Copyright 2004-2009 Analog Devices Inc.
3 *
4 * Licensed under the GPL-2 or later.
5 */
6
7#ifndef __ARCH_BLACKFIN_CACHE_H
8#define __ARCH_BLACKFIN_CACHE_H
9
10#include <linux/linkage.h>	/* for asmlinkage */
11
12/*
13 * Bytes per L1 cache line
14 * Blackfin loads 32 bytes for cache
15 */
16#define L1_CACHE_SHIFT	5
17#define L1_CACHE_BYTES	(1 << L1_CACHE_SHIFT)
18#define SMP_CACHE_BYTES	L1_CACHE_BYTES
19
20#define ARCH_DMA_MINALIGN	L1_CACHE_BYTES
21
22#ifdef CONFIG_SMP
23#define __cacheline_aligned
24#else
25#define ____cacheline_aligned
26
27/*
28 * Put cacheline_aliged data to L1 data memory
29 */
30#ifdef CONFIG_CACHELINE_ALIGNED_L1
31#define __cacheline_aligned				\
32	  __attribute__((__aligned__(L1_CACHE_BYTES),	\
33		__section__(".data_l1.cacheline_aligned")))
34#endif
35
36#endif
37
38/*
39 * largest L1 which this arch supports
40 */
41#define L1_CACHE_SHIFT_MAX	5
42
43#if defined(CONFIG_SMP) && \
44    !defined(CONFIG_BFIN_CACHE_COHERENT)
45# if defined(CONFIG_BFIN_EXTMEM_ICACHEABLE) || defined(CONFIG_BFIN_L2_ICACHEABLE)
46# define __ARCH_SYNC_CORE_ICACHE
47# endif
48# if defined(CONFIG_BFIN_EXTMEM_DCACHEABLE) || defined(CONFIG_BFIN_L2_DCACHEABLE)
49# define __ARCH_SYNC_CORE_DCACHE
50# endif
51#ifndef __ASSEMBLY__
52asmlinkage void __raw_smp_mark_barrier_asm(void);
53asmlinkage void __raw_smp_check_barrier_asm(void);
54
55static inline void smp_mark_barrier(void)
56{
57	__raw_smp_mark_barrier_asm();
58}
59static inline void smp_check_barrier(void)
60{
61	__raw_smp_check_barrier_asm();
62}
63
64void resync_core_dcache(void);
65void resync_core_icache(void);
66#endif
67#endif
68
69
70#endif
71