Commit a957c6be authored by Mark Brown's avatar Mark Brown Committed by Catalin Marinas

arm64/sysreg: Add _EL1 into ID_AA64MMFR2_EL1 definition names

Normally we include the full register name in the defines for fields within
registers but this has not been followed for ID registers. In preparation
for automatic generation of defines add the _EL1s into the defines for
ID_AA64MMFR2_EL1 to follow the convention. No functional changes.
Signed-off-by: default avatarMark Brown <broonie@kernel.org>
Reviewed-by: default avatarKristina Martsenko <kristina.martsenko@arm.com>
Link: https://lore.kernel.org/r/20220905225425.1871461-6-broonie@kernel.orgSigned-off-by: default avatarCatalin Marinas <catalin.marinas@arm.com>
parent 2d987e64
...@@ -612,7 +612,7 @@ alternative_endif ...@@ -612,7 +612,7 @@ alternative_endif
.macro offset_ttbr1, ttbr, tmp .macro offset_ttbr1, ttbr, tmp
#ifdef CONFIG_ARM64_VA_BITS_52 #ifdef CONFIG_ARM64_VA_BITS_52
mrs_s \tmp, SYS_ID_AA64MMFR2_EL1 mrs_s \tmp, SYS_ID_AA64MMFR2_EL1
and \tmp, \tmp, #(0xf << ID_AA64MMFR2_LVA_SHIFT) and \tmp, \tmp, #(0xf << ID_AA64MMFR2_EL1_LVA_SHIFT)
cbnz \tmp, .Lskipoffs_\@ cbnz \tmp, .Lskipoffs_\@
orr \ttbr, \ttbr, #TTBR1_BADDR_4852_OFFSET orr \ttbr, \ttbr, #TTBR1_BADDR_4852_OFFSET
.Lskipoffs_\@ : .Lskipoffs_\@ :
......
...@@ -805,21 +805,21 @@ ...@@ -805,21 +805,21 @@
#define ID_AA64MMFR1_TIDCP1_IMP 1 #define ID_AA64MMFR1_TIDCP1_IMP 1
/* id_aa64mmfr2 */ /* id_aa64mmfr2 */
#define ID_AA64MMFR2_E0PD_SHIFT 60 #define ID_AA64MMFR2_EL1_E0PD_SHIFT 60
#define ID_AA64MMFR2_EVT_SHIFT 56 #define ID_AA64MMFR2_EL1_EVT_SHIFT 56
#define ID_AA64MMFR2_BBM_SHIFT 52 #define ID_AA64MMFR2_EL1_BBM_SHIFT 52
#define ID_AA64MMFR2_TTL_SHIFT 48 #define ID_AA64MMFR2_EL1_TTL_SHIFT 48
#define ID_AA64MMFR2_FWB_SHIFT 40 #define ID_AA64MMFR2_EL1_FWB_SHIFT 40
#define ID_AA64MMFR2_IDS_SHIFT 36 #define ID_AA64MMFR2_EL1_IDS_SHIFT 36
#define ID_AA64MMFR2_AT_SHIFT 32 #define ID_AA64MMFR2_EL1_AT_SHIFT 32
#define ID_AA64MMFR2_ST_SHIFT 28 #define ID_AA64MMFR2_EL1_ST_SHIFT 28
#define ID_AA64MMFR2_NV_SHIFT 24 #define ID_AA64MMFR2_EL1_NV_SHIFT 24
#define ID_AA64MMFR2_CCIDX_SHIFT 20 #define ID_AA64MMFR2_EL1_CCIDX_SHIFT 20
#define ID_AA64MMFR2_LVA_SHIFT 16 #define ID_AA64MMFR2_EL1_LVA_SHIFT 16
#define ID_AA64MMFR2_IESB_SHIFT 12 #define ID_AA64MMFR2_EL1_IESB_SHIFT 12
#define ID_AA64MMFR2_LSM_SHIFT 8 #define ID_AA64MMFR2_EL1_LSM_SHIFT 8
#define ID_AA64MMFR2_UAO_SHIFT 4 #define ID_AA64MMFR2_EL1_UAO_SHIFT 4
#define ID_AA64MMFR2_CNP_SHIFT 0 #define ID_AA64MMFR2_EL1_CNP_SHIFT 0
/* id_aa64dfr0 */ /* id_aa64dfr0 */
#define ID_AA64DFR0_MTPMU_SHIFT 48 #define ID_AA64DFR0_MTPMU_SHIFT 48
......
...@@ -378,21 +378,21 @@ static const struct arm64_ftr_bits ftr_id_aa64mmfr1[] = { ...@@ -378,21 +378,21 @@ static const struct arm64_ftr_bits ftr_id_aa64mmfr1[] = {
}; };
static const struct arm64_ftr_bits ftr_id_aa64mmfr2[] = { static const struct arm64_ftr_bits ftr_id_aa64mmfr2[] = {
ARM64_FTR_BITS(FTR_HIDDEN, FTR_NONSTRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_E0PD_SHIFT, 4, 0), ARM64_FTR_BITS(FTR_HIDDEN, FTR_NONSTRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_E0PD_SHIFT, 4, 0),
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EVT_SHIFT, 4, 0), ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_EVT_SHIFT, 4, 0),
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_BBM_SHIFT, 4, 0), ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_BBM_SHIFT, 4, 0),
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_TTL_SHIFT, 4, 0), ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_TTL_SHIFT, 4, 0),
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_FWB_SHIFT, 4, 0), ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_FWB_SHIFT, 4, 0),
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_IDS_SHIFT, 4, 0), ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_IDS_SHIFT, 4, 0),
ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_AT_SHIFT, 4, 0), ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_AT_SHIFT, 4, 0),
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_ST_SHIFT, 4, 0), ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_ST_SHIFT, 4, 0),
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_NV_SHIFT, 4, 0), ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_NV_SHIFT, 4, 0),
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_CCIDX_SHIFT, 4, 0), ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_CCIDX_SHIFT, 4, 0),
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_LVA_SHIFT, 4, 0), ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_LVA_SHIFT, 4, 0),
ARM64_FTR_BITS(FTR_HIDDEN, FTR_NONSTRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_IESB_SHIFT, 4, 0), ARM64_FTR_BITS(FTR_HIDDEN, FTR_NONSTRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_IESB_SHIFT, 4, 0),
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_LSM_SHIFT, 4, 0), ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_LSM_SHIFT, 4, 0),
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_UAO_SHIFT, 4, 0), ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_UAO_SHIFT, 4, 0),
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_CNP_SHIFT, 4, 0), ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_CNP_SHIFT, 4, 0),
ARM64_FTR_END, ARM64_FTR_END,
}; };
...@@ -1571,7 +1571,7 @@ bool kaslr_requires_kpti(void) ...@@ -1571,7 +1571,7 @@ bool kaslr_requires_kpti(void)
if (IS_ENABLED(CONFIG_ARM64_E0PD)) { if (IS_ENABLED(CONFIG_ARM64_E0PD)) {
u64 mmfr2 = read_sysreg_s(SYS_ID_AA64MMFR2_EL1); u64 mmfr2 = read_sysreg_s(SYS_ID_AA64MMFR2_EL1);
if (cpuid_feature_extract_unsigned_field(mmfr2, if (cpuid_feature_extract_unsigned_field(mmfr2,
ID_AA64MMFR2_E0PD_SHIFT)) ID_AA64MMFR2_EL1_E0PD_SHIFT))
return false; return false;
} }
...@@ -2303,7 +2303,7 @@ static const struct arm64_cpu_capabilities arm64_features[] = { ...@@ -2303,7 +2303,7 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
.capability = ARM64_HAS_STAGE2_FWB, .capability = ARM64_HAS_STAGE2_FWB,
.sys_reg = SYS_ID_AA64MMFR2_EL1, .sys_reg = SYS_ID_AA64MMFR2_EL1,
.sign = FTR_UNSIGNED, .sign = FTR_UNSIGNED,
.field_pos = ID_AA64MMFR2_FWB_SHIFT, .field_pos = ID_AA64MMFR2_EL1_FWB_SHIFT,
.field_width = 4, .field_width = 4,
.min_field_value = 1, .min_field_value = 1,
.matches = has_cpuid_feature, .matches = has_cpuid_feature,
...@@ -2314,7 +2314,7 @@ static const struct arm64_cpu_capabilities arm64_features[] = { ...@@ -2314,7 +2314,7 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
.capability = ARM64_HAS_ARMv8_4_TTL, .capability = ARM64_HAS_ARMv8_4_TTL,
.sys_reg = SYS_ID_AA64MMFR2_EL1, .sys_reg = SYS_ID_AA64MMFR2_EL1,
.sign = FTR_UNSIGNED, .sign = FTR_UNSIGNED,
.field_pos = ID_AA64MMFR2_TTL_SHIFT, .field_pos = ID_AA64MMFR2_EL1_TTL_SHIFT,
.field_width = 4, .field_width = 4,
.min_field_value = 1, .min_field_value = 1,
.matches = has_cpuid_feature, .matches = has_cpuid_feature,
...@@ -2380,7 +2380,7 @@ static const struct arm64_cpu_capabilities arm64_features[] = { ...@@ -2380,7 +2380,7 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
.matches = has_useable_cnp, .matches = has_useable_cnp,
.sys_reg = SYS_ID_AA64MMFR2_EL1, .sys_reg = SYS_ID_AA64MMFR2_EL1,
.sign = FTR_UNSIGNED, .sign = FTR_UNSIGNED,
.field_pos = ID_AA64MMFR2_CNP_SHIFT, .field_pos = ID_AA64MMFR2_EL1_CNP_SHIFT,
.field_width = 4, .field_width = 4,
.min_field_value = 1, .min_field_value = 1,
.cpu_enable = cpu_enable_cnp, .cpu_enable = cpu_enable_cnp,
...@@ -2499,7 +2499,7 @@ static const struct arm64_cpu_capabilities arm64_features[] = { ...@@ -2499,7 +2499,7 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
.sys_reg = SYS_ID_AA64MMFR2_EL1, .sys_reg = SYS_ID_AA64MMFR2_EL1,
.sign = FTR_UNSIGNED, .sign = FTR_UNSIGNED,
.field_width = 4, .field_width = 4,
.field_pos = ID_AA64MMFR2_E0PD_SHIFT, .field_pos = ID_AA64MMFR2_EL1_E0PD_SHIFT,
.matches = has_cpuid_feature, .matches = has_cpuid_feature,
.min_field_value = 1, .min_field_value = 1,
.cpu_enable = cpu_enable_e0pd, .cpu_enable = cpu_enable_e0pd,
...@@ -2725,7 +2725,7 @@ static const struct arm64_cpu_capabilities arm64_elf_hwcaps[] = { ...@@ -2725,7 +2725,7 @@ static const struct arm64_cpu_capabilities arm64_elf_hwcaps[] = {
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_BF16_SHIFT, 4, FTR_UNSIGNED, 2, CAP_HWCAP, KERNEL_HWCAP_EBF16), HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_BF16_SHIFT, 4, FTR_UNSIGNED, 2, CAP_HWCAP, KERNEL_HWCAP_EBF16),
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_DGH_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_DGH), HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_DGH_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_DGH),
HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_I8MM_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_I8MM), HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_I8MM_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_I8MM),
HWCAP_CAP(SYS_ID_AA64MMFR2_EL1, ID_AA64MMFR2_AT_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_USCAT), HWCAP_CAP(SYS_ID_AA64MMFR2_EL1, ID_AA64MMFR2_EL1_AT_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_USCAT),
#ifdef CONFIG_ARM64_SVE #ifdef CONFIG_ARM64_SVE
HWCAP_CAP(SYS_ID_AA64PFR0_EL1, ID_AA64PFR0_SVE_SHIFT, 4, FTR_UNSIGNED, ID_AA64PFR0_SVE, CAP_HWCAP, KERNEL_HWCAP_SVE), HWCAP_CAP(SYS_ID_AA64PFR0_EL1, ID_AA64PFR0_SVE_SHIFT, 4, FTR_UNSIGNED, ID_AA64PFR0_SVE, CAP_HWCAP, KERNEL_HWCAP_SVE),
HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_EL1_SVEver_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_EL1_SVEver_SVE2, CAP_HWCAP, KERNEL_HWCAP_SVE2), HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_EL1_SVEver_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_EL1_SVEver_SVE2, CAP_HWCAP, KERNEL_HWCAP_SVE2),
......
...@@ -99,7 +99,7 @@ SYM_CODE_START(primary_entry) ...@@ -99,7 +99,7 @@ SYM_CODE_START(primary_entry)
*/ */
#if VA_BITS > 48 #if VA_BITS > 48
mrs_s x0, SYS_ID_AA64MMFR2_EL1 mrs_s x0, SYS_ID_AA64MMFR2_EL1
tst x0, #0xf << ID_AA64MMFR2_LVA_SHIFT tst x0, #0xf << ID_AA64MMFR2_EL1_LVA_SHIFT
mov x0, #VA_BITS mov x0, #VA_BITS
mov x25, #VA_BITS_MIN mov x25, #VA_BITS_MIN
csel x25, x25, x0, eq csel x25, x25, x0, eq
...@@ -677,7 +677,7 @@ SYM_FUNC_START(__cpu_secondary_check52bitva) ...@@ -677,7 +677,7 @@ SYM_FUNC_START(__cpu_secondary_check52bitva)
b.ne 2f b.ne 2f
mrs_s x0, SYS_ID_AA64MMFR2_EL1 mrs_s x0, SYS_ID_AA64MMFR2_EL1
and x0, x0, #(0xf << ID_AA64MMFR2_LVA_SHIFT) and x0, x0, #(0xf << ID_AA64MMFR2_EL1_LVA_SHIFT)
cbnz x0, 2f cbnz x0, 2f
update_early_cpu_boot_status \ update_early_cpu_boot_status \
......
...@@ -120,14 +120,14 @@ ...@@ -120,14 +120,14 @@
* - E0PDx mechanism * - E0PDx mechanism
*/ */
#define PVM_ID_AA64MMFR2_ALLOW (\ #define PVM_ID_AA64MMFR2_ALLOW (\
ARM64_FEATURE_MASK(ID_AA64MMFR2_CNP) | \ ARM64_FEATURE_MASK(ID_AA64MMFR2_EL1_CNP) | \
ARM64_FEATURE_MASK(ID_AA64MMFR2_UAO) | \ ARM64_FEATURE_MASK(ID_AA64MMFR2_EL1_UAO) | \
ARM64_FEATURE_MASK(ID_AA64MMFR2_IESB) | \ ARM64_FEATURE_MASK(ID_AA64MMFR2_EL1_IESB) | \
ARM64_FEATURE_MASK(ID_AA64MMFR2_AT) | \ ARM64_FEATURE_MASK(ID_AA64MMFR2_EL1_AT) | \
ARM64_FEATURE_MASK(ID_AA64MMFR2_IDS) | \ ARM64_FEATURE_MASK(ID_AA64MMFR2_EL1_IDS) | \
ARM64_FEATURE_MASK(ID_AA64MMFR2_TTL) | \ ARM64_FEATURE_MASK(ID_AA64MMFR2_EL1_TTL) | \
ARM64_FEATURE_MASK(ID_AA64MMFR2_BBM) | \ ARM64_FEATURE_MASK(ID_AA64MMFR2_EL1_BBM) | \
ARM64_FEATURE_MASK(ID_AA64MMFR2_E0PD) \ ARM64_FEATURE_MASK(ID_AA64MMFR2_EL1_E0PD) \
) )
/* /*
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment