summaryrefslogtreecommitdiffstats
path: root/arch/arm64/include
diff options
context:
space:
mode:
authorMark Brown <broonie@kernel.org>2022-09-10 17:33:49 +0100
committerCatalin Marinas <catalin.marinas@arm.com>2022-09-16 12:38:57 +0100
commitc0357a73fa4a96d8ed9ee46e9927d9fcbc9d0828 (patch)
tree84f115e75eb4005865f0819c22fc4d535adc373b /arch/arm64/include
parent3e9ae1ce508b8d69762abd1b8b9d9f97d6715b9b (diff)
downloadlinux-c0357a73fa4a96d8ed9ee46e9927d9fcbc9d0828.tar.bz2
arm64/sysreg: Align field names in ID_AA64DFR0_EL1 with architecture
The naming scheme the architecture uses for the fields in ID_AA64DFR0_EL1 does not align well with kernel conventions, using as it does a lot of MixedCase in various arrangements. In preparation for automatically generating the defines for this register rename the defines used to match what is in the architecture. Signed-off-by: Mark Brown <broonie@kernel.org> Link: https://lore.kernel.org/r/20220910163354.860255-2-broonie@kernel.org Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
Diffstat (limited to 'arch/arm64/include')
-rw-r--r--arch/arm64/include/asm/assembler.h2
-rw-r--r--arch/arm64/include/asm/cpufeature.h2
-rw-r--r--arch/arm64/include/asm/el2_setup.h8
-rw-r--r--arch/arm64/include/asm/hw_breakpoint.h4
-rw-r--r--arch/arm64/include/asm/sysreg.h40
5 files changed, 28 insertions, 28 deletions
diff --git a/arch/arm64/include/asm/assembler.h b/arch/arm64/include/asm/assembler.h
index c1fc5f7bb978..71222f2b8a5a 100644
--- a/arch/arm64/include/asm/assembler.h
+++ b/arch/arm64/include/asm/assembler.h
@@ -512,7 +512,7 @@ alternative_endif
*/
.macro reset_pmuserenr_el0, tmpreg
mrs \tmpreg, id_aa64dfr0_el1
- sbfx \tmpreg, \tmpreg, #ID_AA64DFR0_PMUVER_SHIFT, #4
+ sbfx \tmpreg, \tmpreg, #ID_AA64DFR0_PMUVer_SHIFT, #4
cmp \tmpreg, #1 // Skip if no PMU present
b.lt 9000f
msr pmuserenr_el0, xzr // Disable PMU access from EL0
diff --git a/arch/arm64/include/asm/cpufeature.h b/arch/arm64/include/asm/cpufeature.h
index 79bb9e58d9c6..e3b63967c8a9 100644
--- a/arch/arm64/include/asm/cpufeature.h
+++ b/arch/arm64/include/asm/cpufeature.h
@@ -553,7 +553,7 @@ cpuid_feature_cap_perfmon_field(u64 features, int field, u64 cap)
u64 mask = GENMASK_ULL(field + 3, field);
/* Treat IMPLEMENTATION DEFINED functionality as unimplemented */
- if (val == ID_AA64DFR0_PMUVER_IMP_DEF)
+ if (val == ID_AA64DFR0_PMUVer_IMP_DEF)
val = 0;
if (val > cap) {
diff --git a/arch/arm64/include/asm/el2_setup.h b/arch/arm64/include/asm/el2_setup.h
index b6e9bea7c9ec..03af4278bc23 100644
--- a/arch/arm64/include/asm/el2_setup.h
+++ b/arch/arm64/include/asm/el2_setup.h
@@ -40,7 +40,7 @@
.macro __init_el2_debug
mrs x1, id_aa64dfr0_el1
- sbfx x0, x1, #ID_AA64DFR0_PMUVER_SHIFT, #4
+ sbfx x0, x1, #ID_AA64DFR0_PMUVer_SHIFT, #4
cmp x0, #1
b.lt .Lskip_pmu_\@ // Skip if no PMU present
mrs x0, pmcr_el0 // Disable debug access traps
@@ -49,7 +49,7 @@
csel x2, xzr, x0, lt // all PMU counters from EL1
/* Statistical profiling */
- ubfx x0, x1, #ID_AA64DFR0_PMSVER_SHIFT, #4
+ ubfx x0, x1, #ID_AA64DFR0_PMSVer_SHIFT, #4
cbz x0, .Lskip_spe_\@ // Skip if SPE not present
mrs_s x0, SYS_PMBIDR_EL1 // If SPE available at EL2,
@@ -65,7 +65,7 @@
.Lskip_spe_\@:
/* Trace buffer */
- ubfx x0, x1, #ID_AA64DFR0_TRBE_SHIFT, #4
+ ubfx x0, x1, #ID_AA64DFR0_TraceBuffer_SHIFT, #4
cbz x0, .Lskip_trace_\@ // Skip if TraceBuffer is not present
mrs_s x0, SYS_TRBIDR_EL1
@@ -137,7 +137,7 @@
mov x0, xzr
mrs x1, id_aa64dfr0_el1
- ubfx x1, x1, #ID_AA64DFR0_PMSVER_SHIFT, #4
+ ubfx x1, x1, #ID_AA64DFR0_PMSVer_SHIFT, #4
cmp x1, #3
b.lt .Lset_debug_fgt_\@
/* Disable PMSNEVFR_EL1 read and write traps */
diff --git a/arch/arm64/include/asm/hw_breakpoint.h b/arch/arm64/include/asm/hw_breakpoint.h
index bc7aaed4b34e..d667c03d5f5e 100644
--- a/arch/arm64/include/asm/hw_breakpoint.h
+++ b/arch/arm64/include/asm/hw_breakpoint.h
@@ -142,7 +142,7 @@ static inline int get_num_brps(void)
u64 dfr0 = read_sanitised_ftr_reg(SYS_ID_AA64DFR0_EL1);
return 1 +
cpuid_feature_extract_unsigned_field(dfr0,
- ID_AA64DFR0_BRPS_SHIFT);
+ ID_AA64DFR0_BRPs_SHIFT);
}
/* Determine number of WRP registers available. */
@@ -151,7 +151,7 @@ static inline int get_num_wrps(void)
u64 dfr0 = read_sanitised_ftr_reg(SYS_ID_AA64DFR0_EL1);
return 1 +
cpuid_feature_extract_unsigned_field(dfr0,
- ID_AA64DFR0_WRPS_SHIFT);
+ ID_AA64DFR0_WRPs_SHIFT);
}
#endif /* __ASM_BREAKPOINT_H */
diff --git a/arch/arm64/include/asm/sysreg.h b/arch/arm64/include/asm/sysreg.h
index c7876363c6e5..b1e9e4d3d964 100644
--- a/arch/arm64/include/asm/sysreg.h
+++ b/arch/arm64/include/asm/sysreg.h
@@ -700,26 +700,26 @@
/* id_aa64dfr0 */
#define ID_AA64DFR0_MTPMU_SHIFT 48
-#define ID_AA64DFR0_TRBE_SHIFT 44
-#define ID_AA64DFR0_TRACE_FILT_SHIFT 40
-#define ID_AA64DFR0_DOUBLELOCK_SHIFT 36
-#define ID_AA64DFR0_PMSVER_SHIFT 32
-#define ID_AA64DFR0_CTX_CMPS_SHIFT 28
-#define ID_AA64DFR0_WRPS_SHIFT 20
-#define ID_AA64DFR0_BRPS_SHIFT 12
-#define ID_AA64DFR0_PMUVER_SHIFT 8
-#define ID_AA64DFR0_TRACEVER_SHIFT 4
-#define ID_AA64DFR0_DEBUGVER_SHIFT 0
-
-#define ID_AA64DFR0_PMUVER_8_0 0x1
-#define ID_AA64DFR0_PMUVER_8_1 0x4
-#define ID_AA64DFR0_PMUVER_8_4 0x5
-#define ID_AA64DFR0_PMUVER_8_5 0x6
-#define ID_AA64DFR0_PMUVER_8_7 0x7
-#define ID_AA64DFR0_PMUVER_IMP_DEF 0xf
-
-#define ID_AA64DFR0_PMSVER_8_2 0x1
-#define ID_AA64DFR0_PMSVER_8_3 0x2
+#define ID_AA64DFR0_TraceBuffer_SHIFT 44
+#define ID_AA64DFR0_TraceFilt_SHIFT 40
+#define ID_AA64DFR0_DoubleLock_SHIFT 36
+#define ID_AA64DFR0_PMSVer_SHIFT 32
+#define ID_AA64DFR0_CTX_CMPs_SHIFT 28
+#define ID_AA64DFR0_WRPs_SHIFT 20
+#define ID_AA64DFR0_BRPs_SHIFT 12
+#define ID_AA64DFR0_PMUVer_SHIFT 8
+#define ID_AA64DFR0_TraceVer_SHIFT 4
+#define ID_AA64DFR0_DebugVer_SHIFT 0
+
+#define ID_AA64DFR0_PMUVer_8_0 0x1
+#define ID_AA64DFR0_PMUVer_8_1 0x4
+#define ID_AA64DFR0_PMUVer_8_4 0x5
+#define ID_AA64DFR0_PMUVer_8_5 0x6
+#define ID_AA64DFR0_PMUVer_8_7 0x7
+#define ID_AA64DFR0_PMUVer_IMP_DEF 0xf
+
+#define ID_AA64DFR0_PMSVer_8_2 0x1
+#define ID_AA64DFR0_PMSVer_8_3 0x2
#define ID_DFR0_PERFMON_SHIFT 24