1 diff -urN linux.old/arch/mips/mm/c-r4k.c linux.dev/arch/mips/mm/c-r4k.c
2 --- linux.old/arch/mips/mm/c-r4k.c 2005-05-28 17:42:06.000000000 +0200
3 +++ linux.dev/arch/mips/mm/c-r4k.c 2005-05-29 18:26:34.000000000 +0200
6 #include <linux/bitops.h>
9 +#include "../bcm947xx/include/typedefs.h"
10 +#include "../bcm947xx/include/sbconfig.h"
11 +#include <asm/paccess.h>
14 #include <asm/bcache.h>
15 #include <asm/bootinfo.h>
16 #include <asm/cacheops.h>
18 addr = start & ~(dc_lsize - 1);
19 aend = (end - 1) & ~(dc_lsize - 1);
21 +#ifdef CONFIG_BCM4710
22 + BCM4710_FILL_TLB(addr);
23 + BCM4710_FILL_TLB(aend);
27 /* Hit_Writeback_Inv_D */
28 protected_writeback_dcache_line(addr);
30 R4600_HIT_CACHEOP_WAR_IMPL;
31 a = addr & ~(dc_lsize - 1);
32 end = (addr + size - 1) & ~(dc_lsize - 1);
33 +#ifdef CONFIG_BCM4710
34 + BCM4710_FILL_TLB(a);
35 + BCM4710_FILL_TLB(end);
38 flush_dcache_line(a); /* Hit_Writeback_Inv_D */
41 unsigned long ic_lsize = current_cpu_data.icache.linesz;
42 unsigned long dc_lsize = current_cpu_data.dcache.linesz;
44 +#ifdef CONFIG_BCM4710
45 + BCM4710_PROTECTED_FILL_TLB(addr);
46 + BCM4710_PROTECTED_FILL_TLB(addr + 4);
48 R4600_HIT_CACHEOP_WAR_IMPL;
49 protected_writeback_dcache_line(addr & ~(dc_lsize - 1));
50 protected_flush_icache_line(addr & ~(ic_lsize - 1));
51 diff -urN linux.old/include/asm-mips/r4kcache.h linux.dev/include/asm-mips/r4kcache.h
52 --- linux.old/include/asm-mips/r4kcache.h 2005-05-28 17:42:06.000000000 +0200
53 +++ linux.dev/include/asm-mips/r4kcache.h 2005-05-29 18:34:46.000000000 +0200
56 #include <asm/cacheops.h>
58 +#ifdef CONFIG_BCM4710
59 +#define BCM4710_DUMMY_RREG() (((sbconfig_t *)(KSEG1ADDR(SB_ENUM_BASE + SBCONFIGOFF)))->sbimstate)
61 +#define BCM4710_FILL_TLB(addr) (*(volatile unsigned long *)(addr))
62 +#define BCM4710_PROTECTED_FILL_TLB(addr) ({ unsigned long x; get_dbe(x, (volatile unsigned long *)(addr)); })
64 +#define cache_op(op,addr) \
65 + BCM4710_DUMMY_RREG(); \
66 + __asm__ __volatile__( \
67 + " .set noreorder \n" \
68 + " .set mips3\n\t \n" \
69 + " cache %0, %1 \n" \
73 + : "i" (op), "m" (*(unsigned char *)(addr)))
77 #define cache_op(op,addr) \
78 __asm__ __volatile__( \
79 " .set noreorder \n" \
83 : "i" (op), "m" (*(unsigned char *)(addr)))
87 static inline void flush_icache_line_indexed(unsigned long addr)
91 static inline void flush_dcache_line(unsigned long addr)
94 +#ifdef CONFIG_BCM4710
95 + BCM4710_DUMMY_RREG();
97 cache_op(Hit_Writeback_Inv_D, addr);
101 unsigned long start = page;
102 unsigned long end = start + PAGE_SIZE;
104 +#ifdef CONFIG_BCM4710
105 + BCM4710_FILL_TLB(start);
108 +#ifdef CONFIG_BCM4710
109 + BCM4710_DUMMY_RREG();
111 cache16_unroll32(start,Hit_Invalidate_I);
113 } while (start < end);
115 unsigned long ws, addr;
117 for (ws = 0; ws < ws_end; ws += ws_inc)
118 - for (addr = start; addr < end; addr += 0x400)
119 + for (addr = start; addr < end; addr += 0x400) {
120 +#ifdef CONFIG_BCM4710
121 + BCM4710_DUMMY_RREG();
123 cache32_unroll32(addr|ws,Index_Writeback_Inv_D);
127 static inline void blast_dcache32_page(unsigned long page)
129 unsigned long start = page;
130 unsigned long end = start + PAGE_SIZE;
132 +#ifdef CONFIG_BCM4710
133 + __asm__ __volatile__("nop;nop;nop;nop");
136 cache32_unroll32(start,Hit_Writeback_Inv_D);
139 unsigned long start = page;
140 unsigned long end = start + PAGE_SIZE;
142 +#ifdef CONFIG_BCM4710
143 + BCM4710_FILL_TLB(start);
146 cache32_unroll32(start,Hit_Invalidate_I);