summaryrefslogtreecommitdiffstats
path: root/include/asm-mips/r4kcache.h
diff options
context:
space:
mode:
authorAtsushi Nemoto <anemo@mba.ocn.ne.jp>2006-02-10 00:39:06 +0900
committerRalf Baechle <ralf@linux-mips.org>2006-02-14 19:13:24 +0000
commit41700e73995d6c814932cb55e12525bd34be1ca5 (patch)
tree559759cbda13bb50720cfd0e317fa65a467d59fa /include/asm-mips/r4kcache.h
parent63077519899721120b61d663a68adced068a459d (diff)
downloadlinux-41700e73995d6c814932cb55e12525bd34be1ca5.tar.bz2
[MIPS] Add protected_blast_icache_range, blast_icache_range, etc.
Add blast_xxx_range(), protected_blast_xxx_range() etc. for common use. They are built by __BUILD_BLAST_CACHE_RANGE(). Use protected_cache_op() macro for various protected_ routines. Output code should be logically same. Signed-off-by: Atsushi Nemoto <anemo@mba.ocn.ne.jp> Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'include/asm-mips/r4kcache.h')
-rw-r--r--include/asm-mips/r4kcache.h74
1 files changed, 41 insertions, 33 deletions
diff --git a/include/asm-mips/r4kcache.h b/include/asm-mips/r4kcache.h
index cc53196efa40..9632c27dad15 100644
--- a/include/asm-mips/r4kcache.h
+++ b/include/asm-mips/r4kcache.h
@@ -14,6 +14,7 @@
#include <asm/asm.h>
#include <asm/cacheops.h>
+#include <asm/cpu-features.h>
/*
* This macro return a properly sign-extended address suitable as base address
@@ -78,22 +79,25 @@ static inline void flush_scache_line(unsigned long addr)
cache_op(Hit_Writeback_Inv_SD, addr);
}
+#define protected_cache_op(op,addr) \
+ __asm__ __volatile__( \
+ " .set push \n" \
+ " .set noreorder \n" \
+ " .set mips3 \n" \
+ "1: cache %0, (%1) \n" \
+ "2: .set pop \n" \
+ " .section __ex_table,\"a\" \n" \
+ " "STR(PTR)" 1b, 2b \n" \
+ " .previous" \
+ : \
+ : "i" (op), "r" (addr))
+
/*
* The next two are for badland addresses like signal trampolines.
*/
static inline void protected_flush_icache_line(unsigned long addr)
{
- __asm__ __volatile__(
- " .set push \n"
- " .set noreorder \n"
- " .set mips3 \n"
- "1: cache %0, (%1) \n"
- "2: .set pop \n"
- " .section __ex_table,\"a\" \n"
- " "STR(PTR)" 1b, 2b \n"
- " .previous"
- :
- : "i" (Hit_Invalidate_I), "r" (addr));
+ protected_cache_op(Hit_Invalidate_I, addr);
}
/*
@@ -104,32 +108,12 @@ static inline void protected_flush_icache_line(unsigned long addr)
*/
static inline void protected_writeback_dcache_line(unsigned long addr)
{
- __asm__ __volatile__(
- " .set push \n"
- " .set noreorder \n"
- " .set mips3 \n"
- "1: cache %0, (%1) \n"
- "2: .set pop \n"
- " .section __ex_table,\"a\" \n"
- " "STR(PTR)" 1b, 2b \n"
- " .previous"
- :
- : "i" (Hit_Writeback_Inv_D), "r" (addr));
+ protected_cache_op(Hit_Writeback_Inv_D, addr);
}
static inline void protected_writeback_scache_line(unsigned long addr)
{
- __asm__ __volatile__(
- " .set push \n"
- " .set noreorder \n"
- " .set mips3 \n"
- "1: cache %0, (%1) \n"
- "2: .set pop \n"
- " .section __ex_table,\"a\" \n"
- " "STR(PTR)" 1b, 2b \n"
- " .previous"
- :
- : "i" (Hit_Writeback_Inv_SD), "r" (addr));
+ protected_cache_op(Hit_Writeback_Inv_SD, addr);
}
/*
@@ -295,4 +279,28 @@ __BUILD_BLAST_CACHE(i, icache, Index_Invalidate_I, Hit_Invalidate_I, 64)
__BUILD_BLAST_CACHE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 64)
__BUILD_BLAST_CACHE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 128)
+/* build blast_xxx_range, protected_blast_xxx_range */
+#define __BUILD_BLAST_CACHE_RANGE(pfx, desc, hitop, prot) \
+static inline void prot##blast_##pfx##cache##_range(unsigned long start, \
+ unsigned long end) \
+{ \
+ unsigned long lsize = cpu_##desc##_line_size(); \
+ unsigned long addr = start & ~(lsize - 1); \
+ unsigned long aend = (end - 1) & ~(lsize - 1); \
+ while (1) { \
+ prot##cache_op(hitop, addr); \
+ if (addr == aend) \
+ break; \
+ addr += lsize; \
+ } \
+}
+
+__BUILD_BLAST_CACHE_RANGE(d, dcache, Hit_Writeback_Inv_D, protected_)
+__BUILD_BLAST_CACHE_RANGE(s, scache, Hit_Writeback_Inv_SD, protected_)
+__BUILD_BLAST_CACHE_RANGE(i, icache, Hit_Invalidate_I, protected_)
+__BUILD_BLAST_CACHE_RANGE(d, dcache, Hit_Writeback_Inv_D, )
+__BUILD_BLAST_CACHE_RANGE(s, scache, Hit_Writeback_Inv_SD, )
+/* blast_inv_dcache_range */
+__BUILD_BLAST_CACHE_RANGE(inv_d, dcache, Hit_Invalidate_D, )
+
#endif /* _ASM_R4KCACHE_H */