1 Index: linux-2.6.22-rc4/arch/mips/kernel/genex.S
2 ===================================================================
3 --- linux-2.6.22-rc4.orig/arch/mips/kernel/genex.S 2007-06-10 21:32:12.000000000 +0100
4 +++ linux-2.6.22-rc4/arch/mips/kernel/genex.S 2007-06-10 21:33:19.000000000 +0100
6 NESTED(except_vec3_generic, 0, sp)
9 +#ifdef CONFIG_BCM947XX
13 #if R5432_CP0_INTERRUPT_WAR
16 Index: linux-2.6.22-rc4/arch/mips/mm/c-r4k.c
17 ===================================================================
18 --- linux-2.6.22-rc4.orig/arch/mips/mm/c-r4k.c 2007-06-10 21:33:17.000000000 +0100
19 +++ linux-2.6.22-rc4/arch/mips/mm/c-r4k.c 2007-06-10 21:33:19.000000000 +0100
21 #include <asm/cacheflush.h> /* for run_uncached() */
24 +/* For enabling BCM4710 cache workarounds */
28 * Special Variant of smp_call_function for use by cache functions:
32 unsigned long dc_lsize = cpu_dcache_line_size();
35 + r4k_blast_dcache_page = blast_dcache_page;
38 r4k_blast_dcache_page = (void *)cache_noop;
39 else if (dc_lsize == 16)
42 unsigned long dc_lsize = cpu_dcache_line_size();
45 + r4k_blast_dcache_page_indexed = blast_dcache_page_indexed;
48 r4k_blast_dcache_page_indexed = (void *)cache_noop;
49 else if (dc_lsize == 16)
52 unsigned long dc_lsize = cpu_dcache_line_size();
55 + r4k_blast_dcache = blast_dcache;
58 r4k_blast_dcache = (void *)cache_noop;
59 else if (dc_lsize == 16)
63 protected_blast_icache_range(start, end);
66 + r4k_flush_cache_all();
69 static void r4k_flush_icache_range(unsigned long start, unsigned long end)
71 unsigned long addr = (unsigned long) arg;
73 R4600_HIT_CACHEOP_WAR_IMPL;
74 + BCM4710_PROTECTED_FILL_TLB(addr);
75 + BCM4710_PROTECTED_FILL_TLB(addr + 4);
77 protected_writeback_dcache_line(addr & ~(dc_lsize - 1));
78 if (!cpu_icache_snoops_remote_store && scache_size)
79 @@ -1173,6 +1190,15 @@
81 /* Default cache error handler for R4000 and R5000 family */
82 set_uncached_handler (0x100, &except_vec2_generic, 0x80);
84 + /* Check if special workarounds are required */
85 +#ifdef CONFIG_BCM947XX
86 + if (current_cpu_data.cputype == CPU_BCM4710 && (current_cpu_data.processor_id & 0xff) == 0) {
87 + printk("Enabling BCM4710A0 cache workarounds.\n");
95 Index: linux-2.6.22-rc4/arch/mips/mm/tlbex.c
96 ===================================================================
97 --- linux-2.6.22-rc4.orig/arch/mips/mm/tlbex.c 2007-06-10 21:33:12.000000000 +0100
98 +++ linux-2.6.22-rc4/arch/mips/mm/tlbex.c 2007-06-10 21:33:19.000000000 +0100
99 @@ -1229,6 +1229,10 @@
103 +#ifdef CONFIG_BCM947XX
107 static void __init build_r4000_tlb_refill_handler(void)
109 u32 *p = tlb_handler;
110 @@ -1243,6 +1247,12 @@
111 memset(relocs, 0, sizeof(relocs));
112 memset(final_handler, 0, sizeof(final_handler));
114 +#ifdef CONFIG_BCM947XX
121 * create the plain linear handler
123 Index: linux-2.6.22-rc4/include/asm-mips/r4kcache.h
124 ===================================================================
125 --- linux-2.6.22-rc4.orig/include/asm-mips/r4kcache.h 2007-06-10 21:32:12.000000000 +0100
126 +++ linux-2.6.22-rc4/include/asm-mips/r4kcache.h 2007-06-10 21:33:19.000000000 +0100
128 #include <asm/cpu-features.h>
129 #include <asm/mipsmtregs.h>
131 +#ifdef CONFIG_BCM947XX
132 +#include <asm/paccess.h>
133 +#include <linux/ssb/ssb.h>
134 +#define BCM4710_DUMMY_RREG() ((void) *((u8 *) KSEG1ADDR(SSB_ENUM_BASE + SSB_IMSTATE)))
136 +#define BCM4710_FILL_TLB(addr) (*(volatile unsigned long *)(addr))
137 +#define BCM4710_PROTECTED_FILL_TLB(addr) ({ unsigned long x; get_dbe(x, (volatile unsigned long *)(addr)); })
139 +#define BCM4710_DUMMY_RREG()
141 +#define BCM4710_FILL_TLB(addr)
142 +#define BCM4710_PROTECTED_FILL_TLB(addr)
146 * This macro return a properly sign-extended address suitable as base address
147 * for indexed cache operations. Two issues here:
149 static inline void flush_dcache_line_indexed(unsigned long addr)
152 + BCM4710_DUMMY_RREG();
153 cache_op(Index_Writeback_Inv_D, addr);
157 static inline void flush_dcache_line(unsigned long addr)
160 + BCM4710_DUMMY_RREG();
161 cache_op(Hit_Writeback_Inv_D, addr);
165 static inline void invalidate_dcache_line(unsigned long addr)
168 + BCM4710_DUMMY_RREG();
169 cache_op(Hit_Invalidate_D, addr);
174 static inline void protected_flush_icache_line(unsigned long addr)
176 + BCM4710_DUMMY_RREG();
177 protected_cache_op(Hit_Invalidate_I, addr);
182 static inline void protected_writeback_dcache_line(unsigned long addr)
184 + BCM4710_DUMMY_RREG();
185 protected_cache_op(Hit_Writeback_Inv_D, addr);
192 +static inline void blast_dcache(void)
194 + unsigned long start = KSEG0;
195 + unsigned long dcache_size = current_cpu_data.dcache.waysize * current_cpu_data.dcache.ways;
196 + unsigned long end = (start + dcache_size);
199 + BCM4710_DUMMY_RREG();
200 + cache_op(Index_Writeback_Inv_D, start);
201 + start += current_cpu_data.dcache.linesz;
202 + } while(start < end);
205 +static inline void blast_dcache_page(unsigned long page)
207 + unsigned long start = page;
208 + unsigned long end = start + PAGE_SIZE;
210 + BCM4710_FILL_TLB(start);
212 + BCM4710_DUMMY_RREG();
213 + cache_op(Hit_Writeback_Inv_D, start);
214 + start += current_cpu_data.dcache.linesz;
215 + } while(start < end);
218 +static inline void blast_dcache_page_indexed(unsigned long page)
220 + unsigned long start = page;
221 + unsigned long end = start + PAGE_SIZE;
222 + unsigned long ws_inc = 1UL << current_cpu_data.dcache.waybit;
223 + unsigned long ws_end = current_cpu_data.dcache.ways <<
224 + current_cpu_data.dcache.waybit;
225 + unsigned long ws, addr;
226 + for (ws = 0; ws < ws_end; ws += ws_inc) {
228 + for (addr = start; addr < end; addr += current_cpu_data.dcache.linesz) {
229 + BCM4710_DUMMY_RREG();
230 + cache_op(Index_Writeback_Inv_D, addr);
236 /* build blast_xxx, blast_xxx_page, blast_xxx_page_indexed */
237 -#define __BUILD_BLAST_CACHE(pfx, desc, indexop, hitop, lsize) \
238 +#define __BUILD_BLAST_CACHE(pfx, desc, indexop, hitop, lsize, war) \
239 static inline void blast_##pfx##cache##lsize(void) \
241 unsigned long start = INDEX_BASE; \
244 __##pfx##flush_prologue \
247 for (ws = 0; ws < ws_end; ws += ws_inc) \
248 for (addr = start; addr < end; addr += lsize * 32) \
249 cache##lsize##_unroll32(addr|ws,indexop); \
252 __##pfx##flush_prologue \
256 cache##lsize##_unroll32(start,hitop); \
257 start += lsize * 32; \
259 current_cpu_data.desc.waybit; \
260 unsigned long ws, addr; \
264 __##pfx##flush_prologue \
266 for (ws = 0; ws < ws_end; ws += ws_inc) \
267 @@ -393,28 +460,30 @@
268 __##pfx##flush_epilogue \
271 -__BUILD_BLAST_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 16)
272 -__BUILD_BLAST_CACHE(i, icache, Index_Invalidate_I, Hit_Invalidate_I, 16)
273 -__BUILD_BLAST_CACHE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 16)
274 -__BUILD_BLAST_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 32)
275 -__BUILD_BLAST_CACHE(i, icache, Index_Invalidate_I, Hit_Invalidate_I, 32)
276 -__BUILD_BLAST_CACHE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 32)
277 -__BUILD_BLAST_CACHE(i, icache, Index_Invalidate_I, Hit_Invalidate_I, 64)
278 -__BUILD_BLAST_CACHE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 64)
279 -__BUILD_BLAST_CACHE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 128)
280 +__BUILD_BLAST_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 16, )
281 +__BUILD_BLAST_CACHE(i, icache, Index_Invalidate_I, Hit_Invalidate_I, 16, BCM4710_FILL_TLB(start);)
282 +__BUILD_BLAST_CACHE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 16, )
283 +__BUILD_BLAST_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 32, )
284 +__BUILD_BLAST_CACHE(i, icache, Index_Invalidate_I, Hit_Invalidate_I, 32, BCM4710_FILL_TLB(start);)
285 +__BUILD_BLAST_CACHE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 32, )
286 +__BUILD_BLAST_CACHE(i, icache, Index_Invalidate_I, Hit_Invalidate_I, 64, BCM4710_FILL_TLB(start);)
287 +__BUILD_BLAST_CACHE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 64, )
288 +__BUILD_BLAST_CACHE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 128, )
290 /* build blast_xxx_range, protected_blast_xxx_range */
291 -#define __BUILD_BLAST_CACHE_RANGE(pfx, desc, hitop, prot) \
292 +#define __BUILD_BLAST_CACHE_RANGE(pfx, desc, hitop, prot, war, war2) \
293 static inline void prot##blast_##pfx##cache##_range(unsigned long start, \
296 unsigned long lsize = cpu_##desc##_line_size(); \
297 unsigned long addr = start & ~(lsize - 1); \
298 unsigned long aend = (end - 1) & ~(lsize - 1); \
301 __##pfx##flush_prologue \
305 prot##cache_op(hitop, addr); \
308 @@ -424,13 +493,13 @@
309 __##pfx##flush_epilogue \
312 -__BUILD_BLAST_CACHE_RANGE(d, dcache, Hit_Writeback_Inv_D, protected_)
313 -__BUILD_BLAST_CACHE_RANGE(s, scache, Hit_Writeback_Inv_SD, protected_)
314 -__BUILD_BLAST_CACHE_RANGE(i, icache, Hit_Invalidate_I, protected_)
315 -__BUILD_BLAST_CACHE_RANGE(d, dcache, Hit_Writeback_Inv_D, )
316 -__BUILD_BLAST_CACHE_RANGE(s, scache, Hit_Writeback_Inv_SD, )
317 +__BUILD_BLAST_CACHE_RANGE(d, dcache, Hit_Writeback_Inv_D, protected_, BCM4710_PROTECTED_FILL_TLB(addr); BCM4710_PROTECTED_FILL_TLB(aend);, BCM4710_DUMMY_RREG();)
318 +__BUILD_BLAST_CACHE_RANGE(s, scache, Hit_Writeback_Inv_SD, protected_,, )
319 +__BUILD_BLAST_CACHE_RANGE(i, icache, Hit_Invalidate_I, protected_,, )
320 +__BUILD_BLAST_CACHE_RANGE(d, dcache, Hit_Writeback_Inv_D,, BCM4710_FILL_TLB(addr); BCM4710_FILL_TLB(aend);, BCM4710_DUMMY_RREG();)
321 +__BUILD_BLAST_CACHE_RANGE(s, scache, Hit_Writeback_Inv_SD,,, )
322 /* blast_inv_dcache_range */
323 -__BUILD_BLAST_CACHE_RANGE(inv_d, dcache, Hit_Invalidate_D, )
324 -__BUILD_BLAST_CACHE_RANGE(inv_s, scache, Hit_Invalidate_SD, )
325 +__BUILD_BLAST_CACHE_RANGE(inv_d, dcache, Hit_Invalidate_D,,,BCM4710_DUMMY_RREG();)
326 +__BUILD_BLAST_CACHE_RANGE(inv_s, scache, Hit_Invalidate_SD,,, )
328 #endif /* _ASM_R4KCACHE_H */
329 Index: linux-2.6.22-rc4/include/asm-mips/stackframe.h
330 ===================================================================
331 --- linux-2.6.22-rc4.orig/include/asm-mips/stackframe.h 2007-06-10 21:32:12.000000000 +0100
332 +++ linux-2.6.22-rc4/include/asm-mips/stackframe.h 2007-06-10 21:33:19.000000000 +0100
334 .macro RESTORE_SP_AND_RET
335 LONG_L sp, PT_R29(sp)
337 +#ifdef CONFIG_BCM947XX