aboutsummaryrefslogtreecommitdiffstats
path: root/target/linux/brcm47xx/patches-4.0/159-cpu_fixes.patch
diff options
context:
space:
mode:
Diffstat (limited to 'target/linux/brcm47xx/patches-4.0/159-cpu_fixes.patch')
-rw-r--r--target/linux/brcm47xx/patches-4.0/159-cpu_fixes.patch32
1 files changed, 16 insertions, 16 deletions
diff --git a/target/linux/brcm47xx/patches-4.0/159-cpu_fixes.patch b/target/linux/brcm47xx/patches-4.0/159-cpu_fixes.patch
index 214534577e..38c64318f1 100644
--- a/target/linux/brcm47xx/patches-4.0/159-cpu_fixes.patch
+++ b/target/linux/brcm47xx/patches-4.0/159-cpu_fixes.patch
@@ -1,6 +1,6 @@
--- a/arch/mips/include/asm/r4kcache.h
+++ b/arch/mips/include/asm/r4kcache.h
-@@ -23,6 +23,20 @@
+@@ -25,6 +25,20 @@
extern void (*r4k_blast_dcache)(void);
extern void (*r4k_blast_icache)(void);
@@ -21,7 +21,7 @@
/*
* This macro return a properly sign-extended address suitable as base address
* for indexed cache operations. Two issues here:
-@@ -96,6 +110,7 @@ static inline void flush_icache_line_ind
+@@ -98,6 +112,7 @@ static inline void flush_icache_line_ind
static inline void flush_dcache_line_indexed(unsigned long addr)
{
__dflush_prologue
@@ -29,7 +29,7 @@
cache_op(Index_Writeback_Inv_D, addr);
__dflush_epilogue
}
-@@ -123,6 +138,7 @@ static inline void flush_icache_line(uns
+@@ -125,6 +140,7 @@ static inline void flush_icache_line(uns
static inline void flush_dcache_line(unsigned long addr)
{
__dflush_prologue
@@ -37,7 +37,7 @@
cache_op(Hit_Writeback_Inv_D, addr);
__dflush_epilogue
}
-@@ -130,6 +146,7 @@ static inline void flush_dcache_line(uns
+@@ -132,6 +148,7 @@ static inline void flush_dcache_line(uns
static inline void invalidate_dcache_line(unsigned long addr)
{
__dflush_prologue
@@ -45,7 +45,7 @@
cache_op(Hit_Invalidate_D, addr);
__dflush_epilogue
}
-@@ -185,6 +202,7 @@ static inline void protected_flush_icach
+@@ -187,6 +204,7 @@ static inline void protected_flush_icach
#ifdef CONFIG_EVA
protected_cachee_op(Hit_Invalidate_I, addr);
#else
@@ -53,7 +53,7 @@
protected_cache_op(Hit_Invalidate_I, addr);
#endif
break;
-@@ -199,6 +217,7 @@ static inline void protected_flush_icach
+@@ -201,6 +219,7 @@ static inline void protected_flush_icach
*/
static inline void protected_writeback_dcache_line(unsigned long addr)
{
@@ -61,7 +61,7 @@
#ifdef CONFIG_EVA
protected_cachee_op(Hit_Writeback_Inv_D, addr);
#else
-@@ -553,8 +572,51 @@ static inline void invalidate_tcache_pag
+@@ -554,8 +573,51 @@ static inline void invalidate_tcache_pag
: "r" (base), \
"i" (op));
@@ -114,7 +114,7 @@
static inline void extra##blast_##pfx##cache##lsize(void) \
{ \
unsigned long start = INDEX_BASE; \
-@@ -566,6 +628,7 @@ static inline void extra##blast_##pfx##c
+@@ -567,6 +629,7 @@ static inline void extra##blast_##pfx##c
\
__##pfx##flush_prologue \
\
@@ -122,7 +122,7 @@
for (ws = 0; ws < ws_end; ws += ws_inc) \
for (addr = start; addr < end; addr += lsize * 32) \
cache##lsize##_unroll32(addr|ws, indexop); \
-@@ -580,6 +643,7 @@ static inline void extra##blast_##pfx##c
+@@ -581,6 +644,7 @@ static inline void extra##blast_##pfx##c
\
__##pfx##flush_prologue \
\
@@ -130,7 +130,7 @@
do { \
cache##lsize##_unroll32(start, hitop); \
start += lsize * 32; \
-@@ -598,6 +662,8 @@ static inline void extra##blast_##pfx##c
+@@ -599,6 +663,8 @@ static inline void extra##blast_##pfx##c
current_cpu_data.desc.waybit; \
unsigned long ws, addr; \
\
@@ -139,7 +139,7 @@
__##pfx##flush_prologue \
\
for (ws = 0; ws < ws_end; ws += ws_inc) \
-@@ -607,26 +673,26 @@ static inline void extra##blast_##pfx##c
+@@ -608,26 +674,26 @@ static inline void extra##blast_##pfx##c
__##pfx##flush_epilogue \
}
@@ -186,7 +186,7 @@
#define __BUILD_BLAST_USER_CACHE(pfx, desc, indexop, hitop, lsize) \
static inline void blast_##pfx##cache##lsize##_user_page(unsigned long page) \
-@@ -655,17 +721,19 @@ __BUILD_BLAST_USER_CACHE(d, dcache, Inde
+@@ -656,17 +722,19 @@ __BUILD_BLAST_USER_CACHE(d, dcache, Inde
__BUILD_BLAST_USER_CACHE(i, icache, Index_Invalidate_I, Hit_Invalidate_I, 64)
/* build blast_xxx_range, protected_blast_xxx_range */
@@ -207,7 +207,7 @@
prot##cache_op(hitop, addr); \
if (addr == aend) \
break; \
-@@ -677,8 +745,8 @@ static inline void prot##extra##blast_##
+@@ -678,8 +746,8 @@ static inline void prot##extra##blast_##
#ifndef CONFIG_EVA
@@ -218,7 +218,7 @@
#else
-@@ -715,14 +783,14 @@ __BUILD_PROT_BLAST_CACHE_RANGE(d, dcache
+@@ -716,14 +784,14 @@ __BUILD_PROT_BLAST_CACHE_RANGE(d, dcache
__BUILD_PROT_BLAST_CACHE_RANGE(i, icache, Hit_Invalidate_I)
#endif
@@ -369,7 +369,7 @@
/*
--- a/arch/mips/mm/tlbex.c
+++ b/arch/mips/mm/tlbex.c
-@@ -1286,6 +1286,9 @@ static void build_r4000_tlb_refill_handl
+@@ -1269,6 +1269,9 @@ static void build_r4000_tlb_refill_handl
/* No need for uasm_i_nop */
}
@@ -379,7 +379,7 @@
#ifdef CONFIG_64BIT
build_get_pmde64(&p, &l, &r, K0, K1); /* get pmd in K1 */
#else
-@@ -1848,6 +1851,9 @@ build_r4000_tlbchange_handler_head(u32 *
+@@ -1831,6 +1834,9 @@ build_r4000_tlbchange_handler_head(u32 *
{
struct work_registers wr = build_get_work_registers(p);