1 From 3d317cc06fce61787e4429b98d6073e69a6b6cd7 Mon Sep 17 00:00:00 2001
2 From: Lars-Peter Clausen <lars@metafoo.de>
3 Date: Sat, 24 Apr 2010 17:34:29 +0200
4 Subject: [PATCH] JZ4740 cache quirks
7 arch/mips/include/asm/r4kcache.h | 231 ++++++++++++++++++++++++++++++++++++++
8 1 files changed, 231 insertions(+), 0 deletions(-)
10 diff --git a/arch/mips/include/asm/r4kcache.h b/arch/mips/include/asm/r4kcache.h
11 index 387bf59..b500056 100644
12 --- a/arch/mips/include/asm/r4kcache.h
13 +++ b/arch/mips/include/asm/r4kcache.h
15 #include <asm/cpu-features.h>
16 #include <asm/mipsmtregs.h>
22 + unsigned long __k0_addr; \
24 + __asm__ __volatile__( \
26 + "or %0, %0, %1\n\t" \
30 + : "=&r"(__k0_addr) \
31 + : "r" (0x20000000) ); \
36 + unsigned long __k0_addr; \
37 + __asm__ __volatile__( \
38 + "nop;nop;nop;nop;nop;nop;nop\n\t" \
43 + : "=&r" (__k0_addr)); \
46 +#define INVALIDATE_BTB() \
48 + unsigned long tmp; \
49 + __asm__ __volatile__( \
51 + "mfc0 %0, $16, 7\n\t" \
54 + "mtc0 %0, $16, 7\n\t" \
59 +#define SYNC_WB() __asm__ __volatile__ ("sync")
61 +#else /* CONFIG_JZRISC */
63 +#define K0_TO_K1() do { } while (0)
64 +#define K1_TO_K0() do { } while (0)
65 +#define INVALIDATE_BTB() do { } while (0)
66 +#define SYNC_WB() do { } while (0)
68 +#endif /* CONFIG_JZRISC */
71 * This macro return a properly sign-extended address suitable as base address
72 * for indexed cache operations. Two issues here:
73 @@ -144,6 +196,7 @@ static inline void flush_icache_line_indexed(unsigned long addr)
76 cache_op(Index_Invalidate_I, addr);
81 @@ -151,6 +204,7 @@ static inline void flush_dcache_line_indexed(unsigned long addr)
84 cache_op(Index_Writeback_Inv_D, addr);
89 @@ -163,6 +217,7 @@ static inline void flush_icache_line(unsigned long addr)
92 cache_op(Hit_Invalidate_I, addr);
97 @@ -170,6 +225,7 @@ static inline void flush_dcache_line(unsigned long addr)
100 cache_op(Hit_Writeback_Inv_D, addr);
105 @@ -177,6 +233,7 @@ static inline void invalidate_dcache_line(unsigned long addr)
108 cache_op(Hit_Invalidate_D, addr);
113 @@ -209,6 +266,7 @@ static inline void flush_scache_line(unsigned long addr)
114 static inline void protected_flush_icache_line(unsigned long addr)
116 protected_cache_op(Hit_Invalidate_I, addr);
121 @@ -220,6 +278,7 @@ static inline void protected_flush_icache_line(unsigned long addr)
122 static inline void protected_writeback_dcache_line(unsigned long addr)
124 protected_cache_op(Hit_Writeback_Inv_D, addr);
128 static inline void protected_writeback_scache_line(unsigned long addr)
129 @@ -396,8 +455,10 @@ static inline void blast_##pfx##cache##lsize##_page_indexed(unsigned long page)
130 __BUILD_BLAST_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 16)
131 __BUILD_BLAST_CACHE(i, icache, Index_Invalidate_I, Hit_Invalidate_I, 16)
132 __BUILD_BLAST_CACHE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 16)
133 +#ifndef CONFIG_JZRISC
134 __BUILD_BLAST_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 32)
135 __BUILD_BLAST_CACHE(i, icache, Index_Invalidate_I, Hit_Invalidate_I, 32)
137 __BUILD_BLAST_CACHE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 32)
138 __BUILD_BLAST_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 64)
139 __BUILD_BLAST_CACHE(i, icache, Index_Invalidate_I, Hit_Invalidate_I, 64)
140 @@ -405,12 +466,122 @@ __BUILD_BLAST_CACHE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 64)
141 __BUILD_BLAST_CACHE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 128)
143 __BUILD_BLAST_CACHE(inv_d, dcache, Index_Writeback_Inv_D, Hit_Invalidate_D, 16)
144 +#ifndef CONFIG_JZRISC
145 __BUILD_BLAST_CACHE(inv_d, dcache, Index_Writeback_Inv_D, Hit_Invalidate_D, 32)
147 __BUILD_BLAST_CACHE(inv_s, scache, Index_Writeback_Inv_SD, Hit_Invalidate_SD, 16)
148 __BUILD_BLAST_CACHE(inv_s, scache, Index_Writeback_Inv_SD, Hit_Invalidate_SD, 32)
149 __BUILD_BLAST_CACHE(inv_s, scache, Index_Writeback_Inv_SD, Hit_Invalidate_SD, 64)
150 __BUILD_BLAST_CACHE(inv_s, scache, Index_Writeback_Inv_SD, Hit_Invalidate_SD, 128)
152 +#ifdef CONFIG_JZRISC
154 +static inline void blast_dcache32(void)
156 + unsigned long start = INDEX_BASE;
157 + unsigned long end = start + current_cpu_data.dcache.waysize;
158 + unsigned long ws_inc = 1UL << current_cpu_data.dcache.waybit;
159 + unsigned long ws_end = current_cpu_data.dcache.ways <<
160 + current_cpu_data.dcache.waybit;
161 + unsigned long ws, addr;
163 + for (ws = 0; ws < ws_end; ws += ws_inc)
164 + for (addr = start; addr < end; addr += 0x400)
165 + cache32_unroll32(addr|ws,Index_Writeback_Inv_D);
170 +static inline void blast_dcache32_page(unsigned long page)
172 + unsigned long start = page;
173 + unsigned long end = page + PAGE_SIZE;
176 + cache32_unroll32(start,Hit_Writeback_Inv_D);
178 + } while (start < end);
183 +static inline void blast_dcache32_page_indexed(unsigned long page)
185 + unsigned long indexmask = current_cpu_data.dcache.waysize - 1;
186 + unsigned long start = INDEX_BASE + (page & indexmask);
187 + unsigned long end = start + PAGE_SIZE;
188 + unsigned long ws_inc = 1UL << current_cpu_data.dcache.waybit;
189 + unsigned long ws_end = current_cpu_data.dcache.ways <<
190 + current_cpu_data.dcache.waybit;
191 + unsigned long ws, addr;
193 + for (ws = 0; ws < ws_end; ws += ws_inc)
194 + for (addr = start; addr < end; addr += 0x400)
195 + cache32_unroll32(addr|ws,Index_Writeback_Inv_D);
200 +static inline void blast_icache32(void)
202 + unsigned long start = INDEX_BASE;
203 + unsigned long end = start + current_cpu_data.icache.waysize;
204 + unsigned long ws_inc = 1UL << current_cpu_data.icache.waybit;
205 + unsigned long ws_end = current_cpu_data.icache.ways <<
206 + current_cpu_data.icache.waybit;
207 + unsigned long ws, addr;
211 + for (ws = 0; ws < ws_end; ws += ws_inc)
212 + for (addr = start; addr < end; addr += 0x400)
213 + cache32_unroll32(addr|ws,Index_Invalidate_I);
220 +static inline void blast_icache32_page(unsigned long page)
222 + unsigned long start = page;
223 + unsigned long end = page + PAGE_SIZE;
228 + cache32_unroll32(start,Hit_Invalidate_I);
230 + } while (start < end);
237 +static inline void blast_icache32_page_indexed(unsigned long page)
239 + unsigned long indexmask = current_cpu_data.icache.waysize - 1;
240 + unsigned long start = INDEX_BASE + (page & indexmask);
241 + unsigned long end = start + PAGE_SIZE;
242 + unsigned long ws_inc = 1UL << current_cpu_data.icache.waybit;
243 + unsigned long ws_end = current_cpu_data.icache.ways <<
244 + current_cpu_data.icache.waybit;
245 + unsigned long ws, addr;
249 + for (ws = 0; ws < ws_end; ws += ws_inc)
250 + for (addr = start; addr < end; addr += 0x400)
251 + cache32_unroll32(addr|ws,Index_Invalidate_I);
258 +#endif /* CONFIG_JZRISC */
260 /* build blast_xxx_range, protected_blast_xxx_range */
261 #define __BUILD_BLAST_CACHE_RANGE(pfx, desc, hitop, prot) \
262 static inline void prot##blast_##pfx##cache##_range(unsigned long start, \
263 @@ -432,13 +603,73 @@ static inline void prot##blast_##pfx##cache##_range(unsigned long start, \
264 __##pfx##flush_epilogue \
267 +#ifndef CONFIG_JZRISC
268 __BUILD_BLAST_CACHE_RANGE(d, dcache, Hit_Writeback_Inv_D, protected_)
270 __BUILD_BLAST_CACHE_RANGE(s, scache, Hit_Writeback_Inv_SD, protected_)
271 +#ifndef CONFIG_JZRISC
272 __BUILD_BLAST_CACHE_RANGE(i, icache, Hit_Invalidate_I, protected_)
273 __BUILD_BLAST_CACHE_RANGE(d, dcache, Hit_Writeback_Inv_D, )
275 __BUILD_BLAST_CACHE_RANGE(s, scache, Hit_Writeback_Inv_SD, )
276 /* blast_inv_dcache_range */
277 __BUILD_BLAST_CACHE_RANGE(inv_d, dcache, Hit_Invalidate_D, )
278 __BUILD_BLAST_CACHE_RANGE(inv_s, scache, Hit_Invalidate_SD, )
280 +#ifdef CONFIG_JZRISC
282 +static inline void protected_blast_dcache_range(unsigned long start,
285 + unsigned long lsize = cpu_dcache_line_size();
286 + unsigned long addr = start & ~(lsize - 1);
287 + unsigned long aend = (end - 1) & ~(lsize - 1);
290 + protected_cache_op(Hit_Writeback_Inv_D, addr);
298 +static inline void protected_blast_icache_range(unsigned long start,
301 + unsigned long lsize = cpu_icache_line_size();
302 + unsigned long addr = start & ~(lsize - 1);
303 + unsigned long aend = (end - 1) & ~(lsize - 1);
308 + protected_cache_op(Hit_Invalidate_I, addr);
318 +static inline void blast_dcache_range(unsigned long start,
321 + unsigned long lsize = cpu_dcache_line_size();
322 + unsigned long addr = start & ~(lsize - 1);
323 + unsigned long aend = (end - 1) & ~(lsize - 1);
326 + cache_op(Hit_Writeback_Inv_D, addr);
334 +#endif /* CONFIG_JZRISC */
336 #endif /* _ASM_R4KCACHE_H */