summaryrefslogtreecommitdiffstats
path: root/include/asm-i386/xor.h
diff options
context:
space:
mode:
authorZachary Amsden <zach@vmware.com>2005-09-03 15:56:36 -0700
committerLinus Torvalds <torvalds@evo.osdl.org>2005-09-05 00:06:11 -0700
commit4bb0d3ec3e5b1e9e2399cdc641b3b6521ac9cdaa (patch)
tree5e8d7646f5c6a2cec990b6d591f230d496b20664 /include/asm-i386/xor.h
parent2a0694d15d55d0deed928786a6393d5e45e37d76 (diff)
downloadlinux-4bb0d3ec3e5b1e9e2399cdc641b3b6521ac9cdaa.tar.bz2
[PATCH] i386: inline asm cleanup
i386 Inline asm cleanup. Use cr/dr accessor functions. Also, a potential bugfix. Also, some CR accessors really should be volatile. Reads from CR0 (numeric state may change in an exception handler), writes to CR4 (flipping CR4.TSD) and reads from CR2 (page fault) prevent instruction re-ordering. I did not add memory clobber to CR3 / CR4 / CR0 updates, as it was not there to begin with, and in no case should kernel memory be clobbered, except when doing a TLB flush, which already has memory clobber. I noticed that page invalidation does not have a memory clobber. I can't find a bug as a result, but there is definitely a potential for a bug here: #define __flush_tlb_single(addr) \ __asm__ __volatile__("invlpg %0": :"m" (*(char *) addr)) Signed-off-by: Zachary Amsden <zach@vmware.com> Signed-off-by: Andrew Morton <akpm@osdl.org> Signed-off-by: Linus Torvalds <torvalds@osdl.org>
Diffstat (limited to 'include/asm-i386/xor.h')
-rw-r--r--include/asm-i386/xor.h26
1 files changed, 13 insertions, 13 deletions
diff --git a/include/asm-i386/xor.h b/include/asm-i386/xor.h
index f80e2dbe1b56..23c86cef3b25 100644
--- a/include/asm-i386/xor.h
+++ b/include/asm-i386/xor.h
@@ -535,14 +535,14 @@ static struct xor_block_template xor_block_p5_mmx = {
#define XMMS_SAVE do { \
preempt_disable(); \
+ cr0 = read_cr0(); \
+ clts(); \
__asm__ __volatile__ ( \
- "movl %%cr0,%0 ;\n\t" \
- "clts ;\n\t" \
- "movups %%xmm0,(%1) ;\n\t" \
- "movups %%xmm1,0x10(%1) ;\n\t" \
- "movups %%xmm2,0x20(%1) ;\n\t" \
- "movups %%xmm3,0x30(%1) ;\n\t" \
- : "=&r" (cr0) \
+ "movups %%xmm0,(%0) ;\n\t" \
+ "movups %%xmm1,0x10(%0) ;\n\t" \
+ "movups %%xmm2,0x20(%0) ;\n\t" \
+ "movups %%xmm3,0x30(%0) ;\n\t" \
+ : \
: "r" (xmm_save) \
: "memory"); \
} while(0)
@@ -550,14 +550,14 @@ static struct xor_block_template xor_block_p5_mmx = {
#define XMMS_RESTORE do { \
__asm__ __volatile__ ( \
"sfence ;\n\t" \
- "movups (%1),%%xmm0 ;\n\t" \
- "movups 0x10(%1),%%xmm1 ;\n\t" \
- "movups 0x20(%1),%%xmm2 ;\n\t" \
- "movups 0x30(%1),%%xmm3 ;\n\t" \
- "movl %0,%%cr0 ;\n\t" \
+ "movups (%0),%%xmm0 ;\n\t" \
+ "movups 0x10(%0),%%xmm1 ;\n\t" \
+ "movups 0x20(%0),%%xmm2 ;\n\t" \
+ "movups 0x30(%0),%%xmm3 ;\n\t" \
: \
- : "r" (cr0), "r" (xmm_save) \
+ : "r" (xmm_save) \
: "memory"); \
+ write_cr0(cr0); \
preempt_enable(); \
} while(0)