summaryrefslogtreecommitdiffstats
path: root/arch/arc/lib/memset-archs.S
diff options
context:
space:
mode:
authorIngo Molnar <mingo@kernel.org>2019-02-11 12:52:51 +0100
committerIngo Molnar <mingo@kernel.org>2019-02-11 12:52:51 +0100
commitdc14b5fe7d0a1e37e9388964614676e4166b2953 (patch)
tree4f4f3aed094446b3e5daedd8710ccd831943490f /arch/arc/lib/memset-archs.S
parentbae54dc4f353653bbd3e3081754adad05da1d4dd (diff)
parentd13937116f1e82bf508a6325111b322c30c85eb9 (diff)
downloadlinux-dc14b5fe7d0a1e37e9388964614676e4166b2953.tar.bz2
Merge tag 'v5.0-rc6' into x86/fpu, to pick up fixes
Signed-off-by: Ingo Molnar <mingo@kernel.org>
Diffstat (limited to 'arch/arc/lib/memset-archs.S')
-rw-r--r--arch/arc/lib/memset-archs.S40
1 files changed, 32 insertions, 8 deletions
diff --git a/arch/arc/lib/memset-archs.S b/arch/arc/lib/memset-archs.S
index 62ad4bcb841a..f230bb7092fd 100644
--- a/arch/arc/lib/memset-archs.S
+++ b/arch/arc/lib/memset-archs.S
@@ -7,11 +7,39 @@
*/
#include <linux/linkage.h>
+#include <asm/cache.h>
-#undef PREALLOC_NOT_AVAIL
+/*
+ * The memset implementation below is optimized to use prefetchw and prealloc
+ * instruction in case of CPU with 64B L1 data cache line (L1_CACHE_SHIFT == 6)
+ * If you want to implement optimized memset for other possible L1 data cache
+ * line lengths (32B and 128B) you should rewrite code carefully checking
+ * we don't call any prefetchw/prealloc instruction for L1 cache lines which
+ * don't belongs to memset area.
+ */
+
+#if L1_CACHE_SHIFT == 6
+
+.macro PREALLOC_INSTR reg, off
+ prealloc [\reg, \off]
+.endm
+
+.macro PREFETCHW_INSTR reg, off
+ prefetchw [\reg, \off]
+.endm
+
+#else
+
+.macro PREALLOC_INSTR
+.endm
+
+.macro PREFETCHW_INSTR
+.endm
+
+#endif
ENTRY_CFI(memset)
- prefetchw [r0] ; Prefetch the write location
+ PREFETCHW_INSTR r0, 0 ; Prefetch the first write location
mov.f 0, r2
;;; if size is zero
jz.d [blink]
@@ -48,11 +76,8 @@ ENTRY_CFI(memset)
lpnz @.Lset64bytes
;; LOOP START
-#ifdef PREALLOC_NOT_AVAIL
- prefetchw [r3, 64] ;Prefetch the next write location
-#else
- prealloc [r3, 64]
-#endif
+ PREALLOC_INSTR r3, 64 ; alloc next line w/o fetching
+
#ifdef CONFIG_ARC_HAS_LL64
std.ab r4, [r3, 8]
std.ab r4, [r3, 8]
@@ -85,7 +110,6 @@ ENTRY_CFI(memset)
lsr.f lp_count, r2, 5 ;Last remaining max 124 bytes
lpnz .Lset32bytes
;; LOOP START
- prefetchw [r3, 32] ;Prefetch the next write location
#ifdef CONFIG_ARC_HAS_LL64
std.ab r4, [r3, 8]
std.ab r4, [r3, 8]