summaryrefslogtreecommitdiffstats
path: root/arch/arm64
diff options
context:
space:
mode:
authorMark Rutland <mark.rutland@arm.com>2013-08-14 09:54:54 +0100
committerCatalin Marinas <catalin.marinas@arm.com>2014-03-04 01:07:22 +0000
commitbff705950e2cdcf35641dee35eb14bad9ed49e8f (patch)
tree3c962f6ba12e9d3172720b2a81c14adf89f48264 /arch/arm64
parentaddea9ef055bf5a10f9ec967b1e0443ceb44bd83 (diff)
downloadlinux-bff705950e2cdcf35641dee35eb14bad9ed49e8f.tar.bz2
arm64: remove unnecessary cache flush at boot
Currently we flush the entire dcache at boot within __cpu_setup, but this is unnecessary as the booting protocol demands that the dcache is invalid and off upon entering the kernel. The presence of the cache flush only serves to hide bugs in bootloaders, and is not safe in the presence of SMP. In an SMP boot scenario the CPUs enter coherency outside of the kernel, and the primary CPU enables its caches before bringing up secondary CPUs. Therefore if any secondary CPU has an entry in its cache (in violation of the boot protocol), the primary CPU might snoop it even if the secondary CPU's cache is disabled. The boot-time cache flush only serves to hide a firmware bug, and slows down a cpu boot unnecessarily. This patch removes the unnecessary boot-time cache flush. Signed-off-by: Mark Rutland <mark.rutland@arm.com> Acked-by: Will Deacon <will.deacon@arm.com> [catalin.marinas@arm.com: make __flush_dcache_all local only] Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
Diffstat (limited to 'arch/arm64')
-rw-r--r--arch/arm64/mm/cache.S2
-rw-r--r--arch/arm64/mm/proc.S6
2 files changed, 1 insertions, 7 deletions
diff --git a/arch/arm64/mm/cache.S b/arch/arm64/mm/cache.S
index 97fcef535a8a..c46f48b33c14 100644
--- a/arch/arm64/mm/cache.S
+++ b/arch/arm64/mm/cache.S
@@ -30,7 +30,7 @@
*
* Corrupted registers: x0-x7, x9-x11
*/
-ENTRY(__flush_dcache_all)
+__flush_dcache_all:
dsb sy // ensure ordering with previous memory accesses
mrs x0, clidr_el1 // read clidr
and x3, x0, #0x7000000 // extract loc from clidr
diff --git a/arch/arm64/mm/proc.S b/arch/arm64/mm/proc.S
index 1333e6f9a8e5..e0ef63cd05dc 100644
--- a/arch/arm64/mm/proc.S
+++ b/arch/arm64/mm/proc.S
@@ -173,12 +173,6 @@ ENDPROC(cpu_do_switch_mm)
* value of the SCTLR_EL1 register.
*/
ENTRY(__cpu_setup)
- /*
- * Preserve the link register across the function call.
- */
- mov x28, lr
- bl __flush_dcache_all
- mov lr, x28
ic iallu // I+BTB cache invalidate
tlbi vmalle1is // invalidate I + D TLBs
dsb sy