diff --git a/sys/arm64/arm64/identcpu.c b/sys/arm64/arm64/identcpu.c index 8ad604ceb754..def7c0c0838b 100644 --- a/sys/arm64/arm64/identcpu.c +++ b/sys/arm64/arm64/identcpu.c @@ -1,2252 +1,2255 @@ /*- * Copyright (c) 2014 Andrew Turner * Copyright (c) 2014 The FreeBSD Foundation * All rights reserved. * * Portions of this software were developed by Semihalf * under sponsorship of the FreeBSD Foundation. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * */ #include __FBSDID("$FreeBSD$"); #include #include #include #include #include #include #include #include #include #include #include #include #include static void print_cpu_midr(struct sbuf *sb, u_int cpu); static void print_cpu_features(u_int cpu); #ifdef COMPAT_FREEBSD32 static u_long parse_cpu_features_hwcap32(void); #endif char machine[] = "arm64"; #ifdef SCTL_MASK32 extern int adaptive_machine_arch; #endif static SYSCTL_NODE(_machdep, OID_AUTO, cache, CTLFLAG_RD | CTLFLAG_MPSAFE, 0, "Cache management tuning"); static int allow_dic = 1; SYSCTL_INT(_machdep_cache, OID_AUTO, allow_dic, CTLFLAG_RDTUN, &allow_dic, 0, "Allow optimizations based on the DIC cache bit"); static int allow_idc = 1; SYSCTL_INT(_machdep_cache, OID_AUTO, allow_idc, CTLFLAG_RDTUN, &allow_idc, 0, "Allow optimizations based on the IDC cache bit"); static void check_cpu_regs(u_int cpu); /* * The default implementation of I-cache sync assumes we have an * aliasing cache until we know otherwise. */ void (*arm64_icache_sync_range)(vm_offset_t, vm_size_t) = &arm64_aliasing_icache_sync_range; static int sysctl_hw_machine(SYSCTL_HANDLER_ARGS) { #ifdef SCTL_MASK32 static const char machine32[] = "arm"; #endif int error; #ifdef SCTL_MASK32 if ((req->flags & SCTL_MASK32) != 0 && adaptive_machine_arch) error = SYSCTL_OUT(req, machine32, sizeof(machine32)); else #endif error = SYSCTL_OUT(req, machine, sizeof(machine)); return (error); } SYSCTL_PROC(_hw, HW_MACHINE, machine, CTLTYPE_STRING | CTLFLAG_RD | CTLFLAG_MPSAFE, NULL, 0, sysctl_hw_machine, "A", "Machine class"); static char cpu_model[64]; SYSCTL_STRING(_hw, HW_MODEL, model, CTLFLAG_RD, cpu_model, sizeof(cpu_model), "Machine model"); /* * Per-CPU affinity as provided in MPIDR_EL1 * Indexed by CPU number in logical order selected by the system. * Relevant fields can be extracted using CPU_AFFn macros, * Aff3.Aff2.Aff1.Aff0 construct a unique CPU address in the system. * * Fields used by us: * Aff1 - Cluster number * Aff0 - CPU number in Aff1 cluster */ uint64_t __cpu_affinity[MAXCPU]; static u_int cpu_aff_levels; struct cpu_desc { uint64_t mpidr; uint64_t id_aa64afr0; uint64_t id_aa64afr1; uint64_t id_aa64dfr0; uint64_t id_aa64dfr1; uint64_t id_aa64isar0; uint64_t id_aa64isar1; uint64_t id_aa64mmfr0; uint64_t id_aa64mmfr1; uint64_t id_aa64mmfr2; uint64_t id_aa64pfr0; uint64_t id_aa64pfr1; uint64_t ctr; #ifdef COMPAT_FREEBSD32 uint64_t id_isar5; uint64_t mvfr0; uint64_t mvfr1; #endif }; static struct cpu_desc cpu_desc[MAXCPU]; static struct cpu_desc kern_cpu_desc; static struct cpu_desc user_cpu_desc; static u_int cpu_print_regs; #define PRINT_ID_AA64_AFR0 0x00000001 #define PRINT_ID_AA64_AFR1 0x00000002 #define PRINT_ID_AA64_DFR0 0x00000010 #define PRINT_ID_AA64_DFR1 0x00000020 #define PRINT_ID_AA64_ISAR0 0x00000100 #define PRINT_ID_AA64_ISAR1 0x00000200 #define PRINT_ID_AA64_MMFR0 0x00001000 #define PRINT_ID_AA64_MMFR1 0x00002000 #define PRINT_ID_AA64_MMFR2 0x00004000 #define PRINT_ID_AA64_PFR0 0x00010000 #define PRINT_ID_AA64_PFR1 0x00020000 #ifdef COMPAT_FREEBSD32 #define PRINT_ID_ISAR5 0x01000000 #define PRINT_MVFR0 0x02000000 #define PRINT_MVFR1 0x04000000 #endif #define PRINT_CTR_EL0 0x10000000 struct cpu_parts { u_int part_id; const char *part_name; }; #define CPU_PART_NONE { 0, NULL } struct cpu_implementers { u_int impl_id; const char *impl_name; /* * Part number is implementation defined * so each vendor will have its own set of values and names. */ const struct cpu_parts *cpu_parts; }; #define CPU_IMPLEMENTER_NONE { 0, NULL, NULL } /* * Per-implementer table of (PartNum, CPU Name) pairs. */ /* ARM Ltd. */ static const struct cpu_parts cpu_parts_arm[] = { { CPU_PART_AEM_V8, "AEMv8" }, { CPU_PART_FOUNDATION, "Foundation-Model" }, { CPU_PART_CORTEX_A34, "Cortex-A34" }, { CPU_PART_CORTEX_A35, "Cortex-A35" }, { CPU_PART_CORTEX_A53, "Cortex-A53" }, { CPU_PART_CORTEX_A55, "Cortex-A55" }, { CPU_PART_CORTEX_A57, "Cortex-A57" }, { CPU_PART_CORTEX_A65, "Cortex-A65" }, { CPU_PART_CORTEX_A72, "Cortex-A72" }, { CPU_PART_CORTEX_A73, "Cortex-A73" }, { CPU_PART_CORTEX_A75, "Cortex-A75" }, { CPU_PART_CORTEX_A76, "Cortex-A76" }, { CPU_PART_CORTEX_A76AE, "Cortex-A76AE" }, { CPU_PART_CORTEX_A77, "Cortex-A77" }, { CPU_PART_CORTEX_A78, "Cortex-A78" }, { CPU_PART_CORTEX_A78C, "Cortex-A78C" }, { CPU_PART_CORTEX_A510, "Cortex-A510" }, { CPU_PART_CORTEX_A710, "Cortex-A710" }, { CPU_PART_CORTEX_X1, "Cortex-X1" }, { CPU_PART_CORTEX_X1C, "Cortex-X1C" }, { CPU_PART_CORTEX_X2, "Cortex-X2" }, { CPU_PART_NEOVERSE_E1, "Neoverse-E1" }, { CPU_PART_NEOVERSE_N1, "Neoverse-N1" }, { CPU_PART_NEOVERSE_N2, "Neoverse-N2" }, { CPU_PART_NEOVERSE_V1, "Neoverse-V1" }, CPU_PART_NONE, }; /* Cavium */ static const struct cpu_parts cpu_parts_cavium[] = { { CPU_PART_THUNDERX, "ThunderX" }, { CPU_PART_THUNDERX2, "ThunderX2" }, CPU_PART_NONE, }; /* APM / Ampere */ static const struct cpu_parts cpu_parts_apm[] = { { CPU_PART_EMAG8180, "eMAG 8180" }, CPU_PART_NONE, }; /* Unknown */ static const struct cpu_parts cpu_parts_none[] = { CPU_PART_NONE, }; /* * Implementers table. */ const struct cpu_implementers cpu_implementers[] = { { CPU_IMPL_AMPERE, "Ampere", cpu_parts_none }, { CPU_IMPL_APPLE, "Apple", cpu_parts_none }, { CPU_IMPL_APM, "APM", cpu_parts_apm }, { CPU_IMPL_ARM, "ARM", cpu_parts_arm }, { CPU_IMPL_BROADCOM, "Broadcom", cpu_parts_none }, { CPU_IMPL_CAVIUM, "Cavium", cpu_parts_cavium }, { CPU_IMPL_DEC, "DEC", cpu_parts_none }, { CPU_IMPL_FREESCALE, "Freescale", cpu_parts_none }, { CPU_IMPL_FUJITSU, "Fujitsu", cpu_parts_none }, { CPU_IMPL_INFINEON, "IFX", cpu_parts_none }, { CPU_IMPL_INTEL, "Intel", cpu_parts_none }, { CPU_IMPL_MARVELL, "Marvell", cpu_parts_none }, { CPU_IMPL_NVIDIA, "NVIDIA", cpu_parts_none }, { CPU_IMPL_QUALCOMM, "Qualcomm", cpu_parts_none }, CPU_IMPLEMENTER_NONE, }; #define MRS_TYPE_MASK 0xf #define MRS_INVALID 0 #define MRS_EXACT 1 #define MRS_EXACT_VAL(x) (MRS_EXACT | ((x) << 4)) #define MRS_EXACT_FIELD(x) ((x) >> 4) #define MRS_LOWER 2 struct mrs_field_value { uint64_t value; const char *desc; }; #define MRS_FIELD_VALUE(_value, _desc) \ { \ .value = (_value), \ .desc = (_desc), \ } #define MRS_FIELD_VALUE_NONE_IMPL(_reg, _field, _none, _impl) \ MRS_FIELD_VALUE(_reg ## _ ## _field ## _ ## _none, ""), \ MRS_FIELD_VALUE(_reg ## _ ## _field ## _ ## _impl, #_field) #define MRS_FIELD_VALUE_COUNT(_reg, _field, _desc) \ MRS_FIELD_VALUE(0ul << _reg ## _ ## _field ## _SHIFT, "1 " _desc), \ MRS_FIELD_VALUE(1ul << _reg ## _ ## _field ## _SHIFT, "2 " _desc "s"), \ MRS_FIELD_VALUE(2ul << _reg ## _ ## _field ## _SHIFT, "3 " _desc "s"), \ MRS_FIELD_VALUE(3ul << _reg ## _ ## _field ## _SHIFT, "4 " _desc "s"), \ MRS_FIELD_VALUE(4ul << _reg ## _ ## _field ## _SHIFT, "5 " _desc "s"), \ MRS_FIELD_VALUE(5ul << _reg ## _ ## _field ## _SHIFT, "6 " _desc "s"), \ MRS_FIELD_VALUE(6ul << _reg ## _ ## _field ## _SHIFT, "7 " _desc "s"), \ MRS_FIELD_VALUE(7ul << _reg ## _ ## _field ## _SHIFT, "8 " _desc "s"), \ MRS_FIELD_VALUE(8ul << _reg ## _ ## _field ## _SHIFT, "9 " _desc "s"), \ MRS_FIELD_VALUE(9ul << _reg ## _ ## _field ## _SHIFT, "10 "_desc "s"), \ MRS_FIELD_VALUE(10ul<< _reg ## _ ## _field ## _SHIFT, "11 "_desc "s"), \ MRS_FIELD_VALUE(11ul<< _reg ## _ ## _field ## _SHIFT, "12 "_desc "s"), \ MRS_FIELD_VALUE(12ul<< _reg ## _ ## _field ## _SHIFT, "13 "_desc "s"), \ MRS_FIELD_VALUE(13ul<< _reg ## _ ## _field ## _SHIFT, "14 "_desc "s"), \ MRS_FIELD_VALUE(14ul<< _reg ## _ ## _field ## _SHIFT, "15 "_desc "s"), \ MRS_FIELD_VALUE(15ul<< _reg ## _ ## _field ## _SHIFT, "16 "_desc "s") #define MRS_FIELD_VALUE_END { .desc = NULL } struct mrs_field_hwcap { u_long *hwcap; uint64_t min; u_long hwcap_val; }; #define MRS_HWCAP(_hwcap, _val, _min) \ { \ .hwcap = (_hwcap), \ .hwcap_val = (_val), \ .min = (_min), \ } #define MRS_HWCAP_END { .hwcap = NULL } struct mrs_field { const char *name; struct mrs_field_value *values; struct mrs_field_hwcap *hwcaps; uint64_t mask; bool sign; u_int type; u_int shift; }; #define MRS_FIELD_HWCAP(_register, _name, _sign, _type, _values, _hwcap) \ { \ .name = #_name, \ .sign = (_sign), \ .type = (_type), \ .shift = _register ## _ ## _name ## _SHIFT, \ .mask = _register ## _ ## _name ## _MASK, \ .values = (_values), \ .hwcaps = (_hwcap), \ } #define MRS_FIELD(_register, _name, _sign, _type, _values) \ MRS_FIELD_HWCAP(_register, _name, _sign, _type, _values, NULL) #define MRS_FIELD_END { .type = MRS_INVALID, } /* ID_AA64AFR0_EL1 */ static struct mrs_field id_aa64afr0_fields[] = { MRS_FIELD_END, }; /* ID_AA64AFR1_EL1 */ static struct mrs_field id_aa64afr1_fields[] = { MRS_FIELD_END, }; /* ID_AA64DFR0_EL1 */ static struct mrs_field_value id_aa64dfr0_tracefilt[] = { MRS_FIELD_VALUE(ID_AA64DFR0_TraceFilt_NONE, ""), MRS_FIELD_VALUE(ID_AA64DFR0_TraceFilt_8_4, "Trace v8.4"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64dfr0_doublelock[] = { MRS_FIELD_VALUE(ID_AA64DFR0_DoubleLock_IMPL, "DoubleLock"), MRS_FIELD_VALUE(ID_AA64DFR0_DoubleLock_NONE, ""), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64dfr0_pmsver[] = { MRS_FIELD_VALUE(ID_AA64DFR0_PMSVer_NONE, ""), MRS_FIELD_VALUE(ID_AA64DFR0_PMSVer_SPE, "SPE"), MRS_FIELD_VALUE(ID_AA64DFR0_PMSVer_SPE_8_3, "SPE v8.3"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64dfr0_ctx_cmps[] = { MRS_FIELD_VALUE_COUNT(ID_AA64DFR0, CTX_CMPs, "CTX BKPT"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64dfr0_wrps[] = { MRS_FIELD_VALUE_COUNT(ID_AA64DFR0, WRPs, "Watchpoint"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64dfr0_brps[] = { MRS_FIELD_VALUE_COUNT(ID_AA64DFR0, BRPs, "Breakpoint"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64dfr0_pmuver[] = { MRS_FIELD_VALUE(ID_AA64DFR0_PMUVer_NONE, ""), MRS_FIELD_VALUE(ID_AA64DFR0_PMUVer_3, "PMUv3"), MRS_FIELD_VALUE(ID_AA64DFR0_PMUVer_3_1, "PMUv3 v8.1"), MRS_FIELD_VALUE(ID_AA64DFR0_PMUVer_3_4, "PMUv3 v8.4"), MRS_FIELD_VALUE(ID_AA64DFR0_PMUVer_3_5, "PMUv3 v8.5"), MRS_FIELD_VALUE(ID_AA64DFR0_PMUVer_IMPL, "IMPL PMU"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64dfr0_tracever[] = { MRS_FIELD_VALUE(ID_AA64DFR0_TraceVer_NONE, ""), MRS_FIELD_VALUE(ID_AA64DFR0_TraceVer_IMPL, "Trace"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64dfr0_debugver[] = { MRS_FIELD_VALUE(ID_AA64DFR0_DebugVer_8, "Debugv8"), MRS_FIELD_VALUE(ID_AA64DFR0_DebugVer_8_VHE, "Debugv8_VHE"), MRS_FIELD_VALUE(ID_AA64DFR0_DebugVer_8_2, "Debugv8.2"), MRS_FIELD_VALUE(ID_AA64DFR0_DebugVer_8_4, "Debugv8.4"), MRS_FIELD_VALUE_END, }; static struct mrs_field id_aa64dfr0_fields[] = { MRS_FIELD(ID_AA64DFR0, TraceFilt, false, MRS_EXACT, id_aa64dfr0_tracefilt), MRS_FIELD(ID_AA64DFR0, DoubleLock, false, MRS_EXACT, id_aa64dfr0_doublelock), MRS_FIELD(ID_AA64DFR0, PMSVer, false, MRS_EXACT, id_aa64dfr0_pmsver), MRS_FIELD(ID_AA64DFR0, CTX_CMPs, false, MRS_EXACT, id_aa64dfr0_ctx_cmps), MRS_FIELD(ID_AA64DFR0, WRPs, false, MRS_LOWER, id_aa64dfr0_wrps), MRS_FIELD(ID_AA64DFR0, BRPs, false, MRS_LOWER, id_aa64dfr0_brps), MRS_FIELD(ID_AA64DFR0, PMUVer, false, MRS_EXACT, id_aa64dfr0_pmuver), MRS_FIELD(ID_AA64DFR0, TraceVer, false, MRS_EXACT, id_aa64dfr0_tracever), MRS_FIELD(ID_AA64DFR0, DebugVer, false, MRS_EXACT_VAL(0x6), id_aa64dfr0_debugver), MRS_FIELD_END, }; /* ID_AA64DFR1_EL1 */ static struct mrs_field id_aa64dfr1_fields[] = { MRS_FIELD_END, }; /* ID_AA64ISAR0_EL1 */ static struct mrs_field_value id_aa64isar0_rndr[] = { MRS_FIELD_VALUE(ID_AA64ISAR0_RNDR_NONE, ""), MRS_FIELD_VALUE(ID_AA64ISAR0_RNDR_IMPL, "RNG"), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar0_rndr_caps[] = { MRS_HWCAP(&elf_hwcap2, HWCAP2_RNG, ID_AA64ISAR0_RNDR_IMPL), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar0_tlb[] = { MRS_FIELD_VALUE(ID_AA64ISAR0_TLB_NONE, ""), MRS_FIELD_VALUE(ID_AA64ISAR0_TLB_TLBIOS, "TLBI-OS"), MRS_FIELD_VALUE(ID_AA64ISAR0_TLB_TLBIOSR, "TLBI-OSR"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64isar0_ts[] = { MRS_FIELD_VALUE(ID_AA64ISAR0_TS_NONE, ""), MRS_FIELD_VALUE(ID_AA64ISAR0_TS_CondM_8_4, "CondM-8.4"), MRS_FIELD_VALUE(ID_AA64ISAR0_TS_CondM_8_5, "CondM-8.5"), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar0_ts_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_FLAGM, ID_AA64ISAR0_TS_CondM_8_4), MRS_HWCAP(&elf_hwcap2, HWCAP2_FLAGM2, ID_AA64ISAR0_TS_CondM_8_5), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar0_fhm[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64ISAR0, FHM, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar0_fhm_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_ASIMDFHM, ID_AA64ISAR0_FHM_IMPL), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar0_dp[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64ISAR0, DP, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar0_dp_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_ASIMDDP, ID_AA64ISAR0_DP_IMPL), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar0_sm4[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64ISAR0, SM4, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar0_sm4_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_SM4, ID_AA64ISAR0_SM4_IMPL), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar0_sm3[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64ISAR0, SM3, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar0_sm3_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_SM3, ID_AA64ISAR0_SM3_IMPL), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar0_sha3[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64ISAR0, SHA3, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar0_sha3_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_SHA3, ID_AA64ISAR0_SHA3_IMPL), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar0_rdm[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64ISAR0, RDM, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar0_rdm_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_ASIMDRDM, ID_AA64ISAR0_RDM_IMPL), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar0_atomic[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64ISAR0, Atomic, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar0_atomic_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_ATOMICS, ID_AA64ISAR0_Atomic_IMPL), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar0_crc32[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64ISAR0, CRC32, NONE, BASE), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar0_crc32_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_CRC32, ID_AA64ISAR0_CRC32_BASE), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar0_sha2[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64ISAR0, SHA2, NONE, BASE), MRS_FIELD_VALUE(ID_AA64ISAR0_SHA2_512, "SHA2+SHA512"), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar0_sha2_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_SHA2, ID_AA64ISAR0_SHA2_BASE), MRS_HWCAP(&elf_hwcap, HWCAP_SHA512, ID_AA64ISAR0_SHA2_512), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar0_sha1[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64ISAR0, SHA1, NONE, BASE), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar0_sha1_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_SHA1, ID_AA64ISAR0_SHA1_BASE), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar0_aes[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64ISAR0, AES, NONE, BASE), MRS_FIELD_VALUE(ID_AA64ISAR0_AES_PMULL, "AES+PMULL"), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar0_aes_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_AES, ID_AA64ISAR0_AES_BASE), MRS_HWCAP(&elf_hwcap, HWCAP_PMULL, ID_AA64ISAR0_AES_PMULL), MRS_HWCAP_END }; static struct mrs_field id_aa64isar0_fields[] = { MRS_FIELD_HWCAP(ID_AA64ISAR0, RNDR, false, MRS_LOWER, id_aa64isar0_rndr, id_aa64isar0_rndr_caps), MRS_FIELD(ID_AA64ISAR0, TLB, false, MRS_EXACT, id_aa64isar0_tlb), MRS_FIELD_HWCAP(ID_AA64ISAR0, TS, false, MRS_LOWER, id_aa64isar0_ts, id_aa64isar0_ts_caps), MRS_FIELD_HWCAP(ID_AA64ISAR0, FHM, false, MRS_LOWER, id_aa64isar0_fhm, id_aa64isar0_fhm_caps), MRS_FIELD_HWCAP(ID_AA64ISAR0, DP, false, MRS_LOWER, id_aa64isar0_dp, id_aa64isar0_dp_caps), MRS_FIELD_HWCAP(ID_AA64ISAR0, SM4, false, MRS_LOWER, id_aa64isar0_sm4, id_aa64isar0_sm4_caps), MRS_FIELD_HWCAP(ID_AA64ISAR0, SM3, false, MRS_LOWER, id_aa64isar0_sm3, id_aa64isar0_sm3_caps), MRS_FIELD_HWCAP(ID_AA64ISAR0, SHA3, false, MRS_LOWER, id_aa64isar0_sha3, id_aa64isar0_sha3_caps), MRS_FIELD_HWCAP(ID_AA64ISAR0, RDM, false, MRS_LOWER, id_aa64isar0_rdm, id_aa64isar0_rdm_caps), MRS_FIELD_HWCAP(ID_AA64ISAR0, Atomic, false, MRS_LOWER, id_aa64isar0_atomic, id_aa64isar0_atomic_caps), MRS_FIELD_HWCAP(ID_AA64ISAR0, CRC32, false, MRS_LOWER, id_aa64isar0_crc32, id_aa64isar0_crc32_caps), MRS_FIELD_HWCAP(ID_AA64ISAR0, SHA2, false, MRS_LOWER, id_aa64isar0_sha2, id_aa64isar0_sha2_caps), MRS_FIELD_HWCAP(ID_AA64ISAR0, SHA1, false, MRS_LOWER, id_aa64isar0_sha1, id_aa64isar0_sha1_caps), MRS_FIELD_HWCAP(ID_AA64ISAR0, AES, false, MRS_LOWER, id_aa64isar0_aes, id_aa64isar0_aes_caps), MRS_FIELD_END, }; /* ID_AA64ISAR1_EL1 */ static struct mrs_field_value id_aa64isar1_i8mm[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64ISAR1, I8MM, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar1_i8mm_caps[] = { MRS_HWCAP(&elf_hwcap2, HWCAP2_I8MM, ID_AA64ISAR1_I8MM_IMPL), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar1_dgh[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64ISAR1, DGH, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar1_dgh_caps[] = { MRS_HWCAP(&elf_hwcap2, HWCAP2_DGH, ID_AA64ISAR1_DGH_IMPL), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar1_bf16[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64ISAR1, BF16, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar1_bf16_caps[] = { MRS_HWCAP(&elf_hwcap2, HWCAP2_BF16, ID_AA64ISAR1_BF16_IMPL), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar1_specres[] = { MRS_FIELD_VALUE(ID_AA64ISAR1_SPECRES_NONE, ""), MRS_FIELD_VALUE(ID_AA64ISAR1_SPECRES_IMPL, "PredInv"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64isar1_sb[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64ISAR1, SB, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar1_sb_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_SB, ID_AA64ISAR1_SB_IMPL), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar1_frintts[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64ISAR1, FRINTTS, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar1_frintts_caps[] = { MRS_HWCAP(&elf_hwcap2, HWCAP2_FRINT, ID_AA64ISAR1_FRINTTS_IMPL), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar1_gpi[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64ISAR1, GPI, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar1_gpi_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_PACG, ID_AA64ISAR1_GPI_IMPL), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar1_gpa[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64ISAR1, GPA, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar1_gpa_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_PACG, ID_AA64ISAR1_GPA_IMPL), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar1_lrcpc[] = { MRS_FIELD_VALUE(ID_AA64ISAR1_LRCPC_NONE, ""), MRS_FIELD_VALUE(ID_AA64ISAR1_LRCPC_RCPC_8_3, "RCPC-8.3"), MRS_FIELD_VALUE(ID_AA64ISAR1_LRCPC_RCPC_8_4, "RCPC-8.4"), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar1_lrcpc_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_LRCPC, ID_AA64ISAR1_LRCPC_RCPC_8_3), MRS_HWCAP(&elf_hwcap, HWCAP_ILRCPC, ID_AA64ISAR1_LRCPC_RCPC_8_4), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar1_fcma[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64ISAR1, FCMA, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar1_fcma_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_FCMA, ID_AA64ISAR1_FCMA_IMPL), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar1_jscvt[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64ISAR1, JSCVT, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar1_jscvt_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_JSCVT, ID_AA64ISAR1_JSCVT_IMPL), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar1_api[] = { MRS_FIELD_VALUE(ID_AA64ISAR1_API_NONE, ""), MRS_FIELD_VALUE(ID_AA64ISAR1_API_PAC, "API PAC"), MRS_FIELD_VALUE(ID_AA64ISAR1_API_EPAC, "API EPAC"), MRS_FIELD_VALUE(ID_AA64ISAR1_API_EPAC2, "Impl PAuth+EPAC2"), MRS_FIELD_VALUE(ID_AA64ISAR1_API_FPAC, "Impl PAuth+FPAC"), MRS_FIELD_VALUE(ID_AA64ISAR1_API_FPAC_COMBINED, "Impl PAuth+FPAC+Combined"), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar1_api_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_PACA, ID_AA64ISAR1_API_PAC), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar1_apa[] = { MRS_FIELD_VALUE(ID_AA64ISAR1_APA_NONE, ""), MRS_FIELD_VALUE(ID_AA64ISAR1_APA_PAC, "APA PAC"), MRS_FIELD_VALUE(ID_AA64ISAR1_APA_EPAC, "APA EPAC"), MRS_FIELD_VALUE(ID_AA64ISAR1_APA_EPAC2, "PAuth+EPAC2"), MRS_FIELD_VALUE(ID_AA64ISAR1_APA_FPAC, "PAuth+FPAC"), MRS_FIELD_VALUE(ID_AA64ISAR1_APA_FPAC_COMBINED, "PAuth+FPAC+Combined"), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar1_apa_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_PACA, ID_AA64ISAR1_APA_PAC), MRS_HWCAP_END }; static struct mrs_field_value id_aa64isar1_dpb[] = { MRS_FIELD_VALUE(ID_AA64ISAR1_DPB_NONE, ""), MRS_FIELD_VALUE(ID_AA64ISAR1_DPB_DCCVAP, "DCPoP"), MRS_FIELD_VALUE(ID_AA64ISAR1_DPB_DCCVADP, "DCCVADP"), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64isar1_dpb_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_DCPOP, ID_AA64ISAR1_DPB_DCCVAP), MRS_HWCAP(&elf_hwcap2, HWCAP2_DCPODP, ID_AA64ISAR1_DPB_DCCVADP), MRS_HWCAP_END }; static struct mrs_field id_aa64isar1_fields[] = { MRS_FIELD_HWCAP(ID_AA64ISAR1, I8MM, false, MRS_LOWER, id_aa64isar1_i8mm, id_aa64isar1_i8mm_caps), MRS_FIELD_HWCAP(ID_AA64ISAR1, DGH, false, MRS_LOWER, id_aa64isar1_dgh, id_aa64isar1_dgh_caps), MRS_FIELD_HWCAP(ID_AA64ISAR1, BF16, false, MRS_LOWER, id_aa64isar1_bf16, id_aa64isar1_bf16_caps), MRS_FIELD(ID_AA64ISAR1, SPECRES, false, MRS_EXACT, id_aa64isar1_specres), MRS_FIELD_HWCAP(ID_AA64ISAR1, SB, false, MRS_LOWER, id_aa64isar1_sb, id_aa64isar1_sb_caps), MRS_FIELD_HWCAP(ID_AA64ISAR1, FRINTTS, false, MRS_LOWER, id_aa64isar1_frintts, id_aa64isar1_frintts_caps), MRS_FIELD_HWCAP(ID_AA64ISAR1, GPI, false, MRS_EXACT, id_aa64isar1_gpi, id_aa64isar1_gpi_caps), MRS_FIELD_HWCAP(ID_AA64ISAR1, GPA, false, MRS_EXACT, id_aa64isar1_gpa, id_aa64isar1_gpa_caps), MRS_FIELD_HWCAP(ID_AA64ISAR1, LRCPC, false, MRS_LOWER, id_aa64isar1_lrcpc, id_aa64isar1_lrcpc_caps), MRS_FIELD_HWCAP(ID_AA64ISAR1, FCMA, false, MRS_LOWER, id_aa64isar1_fcma, id_aa64isar1_fcma_caps), MRS_FIELD_HWCAP(ID_AA64ISAR1, JSCVT, false, MRS_LOWER, id_aa64isar1_jscvt, id_aa64isar1_jscvt_caps), MRS_FIELD_HWCAP(ID_AA64ISAR1, API, false, MRS_EXACT, id_aa64isar1_api, id_aa64isar1_api_caps), MRS_FIELD_HWCAP(ID_AA64ISAR1, APA, false, MRS_EXACT, id_aa64isar1_apa, id_aa64isar1_apa_caps), MRS_FIELD_HWCAP(ID_AA64ISAR1, DPB, false, MRS_LOWER, id_aa64isar1_dpb, id_aa64isar1_dpb_caps), MRS_FIELD_END, }; /* ID_AA64MMFR0_EL1 */ static struct mrs_field_value id_aa64mmfr0_exs[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR0, ExS, ALL, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr0_tgran4_2[] = { MRS_FIELD_VALUE(ID_AA64MMFR0_TGran4_2_TGran4, ""), MRS_FIELD_VALUE(ID_AA64MMFR0_TGran4_2_NONE, "No S2 TGran4"), MRS_FIELD_VALUE(ID_AA64MMFR0_TGran4_2_IMPL, "S2 TGran4"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr0_tgran64_2[] = { MRS_FIELD_VALUE(ID_AA64MMFR0_TGran64_2_TGran64, ""), MRS_FIELD_VALUE(ID_AA64MMFR0_TGran64_2_NONE, "No S2 TGran64"), MRS_FIELD_VALUE(ID_AA64MMFR0_TGran64_2_IMPL, "S2 TGran64"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr0_tgran16_2[] = { MRS_FIELD_VALUE(ID_AA64MMFR0_TGran16_2_TGran16, ""), MRS_FIELD_VALUE(ID_AA64MMFR0_TGran16_2_NONE, "No S2 TGran16"), MRS_FIELD_VALUE(ID_AA64MMFR0_TGran16_2_IMPL, "S2 TGran16"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr0_tgran4[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR0, TGran4,NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr0_tgran64[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR0, TGran64, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr0_tgran16[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR0, TGran16, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr0_bigendel0[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR0, BigEndEL0, FIXED, MIXED), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr0_snsmem[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR0, SNSMem, NONE, DISTINCT), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr0_bigend[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR0, BigEnd, FIXED, MIXED), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr0_asidbits[] = { MRS_FIELD_VALUE(ID_AA64MMFR0_ASIDBits_8, "8bit ASID"), MRS_FIELD_VALUE(ID_AA64MMFR0_ASIDBits_16, "16bit ASID"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr0_parange[] = { MRS_FIELD_VALUE(ID_AA64MMFR0_PARange_4G, "4GB PA"), MRS_FIELD_VALUE(ID_AA64MMFR0_PARange_64G, "64GB PA"), MRS_FIELD_VALUE(ID_AA64MMFR0_PARange_1T, "1TB PA"), MRS_FIELD_VALUE(ID_AA64MMFR0_PARange_4T, "4TB PA"), MRS_FIELD_VALUE(ID_AA64MMFR0_PARange_16T, "16TB PA"), MRS_FIELD_VALUE(ID_AA64MMFR0_PARange_256T, "256TB PA"), MRS_FIELD_VALUE(ID_AA64MMFR0_PARange_4P, "4PB PA"), MRS_FIELD_VALUE_END, }; static struct mrs_field id_aa64mmfr0_fields[] = { MRS_FIELD(ID_AA64MMFR0, ExS, false, MRS_EXACT, id_aa64mmfr0_exs), MRS_FIELD(ID_AA64MMFR0, TGran4_2, false, MRS_EXACT, id_aa64mmfr0_tgran4_2), MRS_FIELD(ID_AA64MMFR0, TGran64_2, false, MRS_EXACT, id_aa64mmfr0_tgran64_2), MRS_FIELD(ID_AA64MMFR0, TGran16_2, false, MRS_EXACT, id_aa64mmfr0_tgran16_2), MRS_FIELD(ID_AA64MMFR0, TGran4, false, MRS_EXACT, id_aa64mmfr0_tgran4), MRS_FIELD(ID_AA64MMFR0, TGran64, false, MRS_EXACT, id_aa64mmfr0_tgran64), MRS_FIELD(ID_AA64MMFR0, TGran16, false, MRS_EXACT, id_aa64mmfr0_tgran16), MRS_FIELD(ID_AA64MMFR0, BigEndEL0, false, MRS_EXACT, id_aa64mmfr0_bigendel0), MRS_FIELD(ID_AA64MMFR0, SNSMem, false, MRS_EXACT, id_aa64mmfr0_snsmem), MRS_FIELD(ID_AA64MMFR0, BigEnd, false, MRS_EXACT, id_aa64mmfr0_bigend), MRS_FIELD(ID_AA64MMFR0, ASIDBits, false, MRS_EXACT, id_aa64mmfr0_asidbits), MRS_FIELD(ID_AA64MMFR0, PARange, false, MRS_EXACT, id_aa64mmfr0_parange), MRS_FIELD_END, }; /* ID_AA64MMFR1_EL1 */ static struct mrs_field_value id_aa64mmfr1_xnx[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR1, XNX, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr1_specsei[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR1, SpecSEI, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr1_pan[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR1, PAN, NONE, IMPL), MRS_FIELD_VALUE(ID_AA64MMFR1_PAN_ATS1E1, "PAN+ATS1E1"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr1_lo[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR1, LO, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr1_hpds[] = { MRS_FIELD_VALUE(ID_AA64MMFR1_HPDS_NONE, ""), MRS_FIELD_VALUE(ID_AA64MMFR1_HPDS_HPD, "HPD"), MRS_FIELD_VALUE(ID_AA64MMFR1_HPDS_TTPBHA, "HPD+TTPBHA"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr1_vh[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR1, VH, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr1_vmidbits[] = { MRS_FIELD_VALUE(ID_AA64MMFR1_VMIDBits_8, "8bit VMID"), MRS_FIELD_VALUE(ID_AA64MMFR1_VMIDBits_16, "16bit VMID"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr1_hafdbs[] = { MRS_FIELD_VALUE(ID_AA64MMFR1_HAFDBS_NONE, ""), MRS_FIELD_VALUE(ID_AA64MMFR1_HAFDBS_AF, "HAF"), MRS_FIELD_VALUE(ID_AA64MMFR1_HAFDBS_AF_DBS, "HAF+DS"), MRS_FIELD_VALUE_END, }; static struct mrs_field id_aa64mmfr1_fields[] = { MRS_FIELD(ID_AA64MMFR1, XNX, false, MRS_EXACT, id_aa64mmfr1_xnx), MRS_FIELD(ID_AA64MMFR1, SpecSEI, false, MRS_EXACT, id_aa64mmfr1_specsei), MRS_FIELD(ID_AA64MMFR1, PAN, false, MRS_EXACT, id_aa64mmfr1_pan), MRS_FIELD(ID_AA64MMFR1, LO, false, MRS_EXACT, id_aa64mmfr1_lo), MRS_FIELD(ID_AA64MMFR1, HPDS, false, MRS_EXACT, id_aa64mmfr1_hpds), MRS_FIELD(ID_AA64MMFR1, VH, false, MRS_EXACT, id_aa64mmfr1_vh), MRS_FIELD(ID_AA64MMFR1, VMIDBits, false, MRS_EXACT, id_aa64mmfr1_vmidbits), MRS_FIELD(ID_AA64MMFR1, HAFDBS, false, MRS_EXACT, id_aa64mmfr1_hafdbs), MRS_FIELD_END, }; /* ID_AA64MMFR2_EL1 */ static struct mrs_field_value id_aa64mmfr2_e0pd[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR2, E0PD, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr2_evt[] = { MRS_FIELD_VALUE(ID_AA64MMFR2_EVT_NONE, ""), MRS_FIELD_VALUE(ID_AA64MMFR2_EVT_8_2, "EVT-8.2"), MRS_FIELD_VALUE(ID_AA64MMFR2_EVT_8_5, "EVT-8.5"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr2_bbm[] = { MRS_FIELD_VALUE(ID_AA64MMFR2_BBM_LEVEL0, ""), MRS_FIELD_VALUE(ID_AA64MMFR2_BBM_LEVEL1, "BBM level 1"), MRS_FIELD_VALUE(ID_AA64MMFR2_BBM_LEVEL2, "BBM level 2"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr2_ttl[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR2, TTL, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr2_fwb[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR2, FWB, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr2_ids[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR2, IDS, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr2_at[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR2, AT, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64mmfr2_at_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_USCAT, ID_AA64MMFR2_AT_IMPL), MRS_HWCAP_END }; static struct mrs_field_value id_aa64mmfr2_st[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR2, ST, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr2_nv[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR2, NV, NONE, 8_3), MRS_FIELD_VALUE(ID_AA64MMFR2_NV_8_4, "NV v8.4"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr2_ccidx[] = { MRS_FIELD_VALUE(ID_AA64MMFR2_CCIDX_32, "32bit CCIDX"), MRS_FIELD_VALUE(ID_AA64MMFR2_CCIDX_64, "64bit CCIDX"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr2_varange[] = { MRS_FIELD_VALUE(ID_AA64MMFR2_VARange_48, "48bit VA"), MRS_FIELD_VALUE(ID_AA64MMFR2_VARange_52, "52bit VA"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr2_iesb[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR2, IESB, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr2_lsm[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR2, LSM, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr2_uao[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR2, UAO, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64mmfr2_cnp[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64MMFR2, CnP, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field id_aa64mmfr2_fields[] = { MRS_FIELD(ID_AA64MMFR2, E0PD, false, MRS_EXACT, id_aa64mmfr2_e0pd), MRS_FIELD(ID_AA64MMFR2, EVT, false, MRS_EXACT, id_aa64mmfr2_evt), MRS_FIELD(ID_AA64MMFR2, BBM, false, MRS_EXACT, id_aa64mmfr2_bbm), MRS_FIELD(ID_AA64MMFR2, TTL, false, MRS_EXACT, id_aa64mmfr2_ttl), MRS_FIELD(ID_AA64MMFR2, FWB, false, MRS_EXACT, id_aa64mmfr2_fwb), MRS_FIELD(ID_AA64MMFR2, IDS, false, MRS_EXACT, id_aa64mmfr2_ids), MRS_FIELD_HWCAP(ID_AA64MMFR2, AT, false, MRS_LOWER, id_aa64mmfr2_at, id_aa64mmfr2_at_caps), MRS_FIELD(ID_AA64MMFR2, ST, false, MRS_EXACT, id_aa64mmfr2_st), MRS_FIELD(ID_AA64MMFR2, NV, false, MRS_EXACT, id_aa64mmfr2_nv), MRS_FIELD(ID_AA64MMFR2, CCIDX, false, MRS_EXACT, id_aa64mmfr2_ccidx), MRS_FIELD(ID_AA64MMFR2, VARange, false, MRS_EXACT, id_aa64mmfr2_varange), MRS_FIELD(ID_AA64MMFR2, IESB, false, MRS_EXACT, id_aa64mmfr2_iesb), MRS_FIELD(ID_AA64MMFR2, LSM, false, MRS_EXACT, id_aa64mmfr2_lsm), MRS_FIELD(ID_AA64MMFR2, UAO, false, MRS_EXACT, id_aa64mmfr2_uao), MRS_FIELD(ID_AA64MMFR2, CnP, false, MRS_EXACT, id_aa64mmfr2_cnp), MRS_FIELD_END, }; /* ID_AA64PFR0_EL1 */ static struct mrs_field_value id_aa64pfr0_csv3[] = { MRS_FIELD_VALUE(ID_AA64PFR0_CSV3_NONE, ""), MRS_FIELD_VALUE(ID_AA64PFR0_CSV3_ISOLATED, "CSV3"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64pfr0_csv2[] = { MRS_FIELD_VALUE(ID_AA64PFR0_CSV2_NONE, ""), MRS_FIELD_VALUE(ID_AA64PFR0_CSV2_ISOLATED, "CSV2"), MRS_FIELD_VALUE(ID_AA64PFR0_CSV2_SCXTNUM, "SCXTNUM"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64pfr0_dit[] = { MRS_FIELD_VALUE(ID_AA64PFR0_DIT_NONE, ""), MRS_FIELD_VALUE(ID_AA64PFR0_DIT_PSTATE, "PSTATE.DIT"), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64pfr0_dit_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_DIT, ID_AA64PFR0_DIT_PSTATE), MRS_HWCAP_END }; static struct mrs_field_value id_aa64pfr0_amu[] = { MRS_FIELD_VALUE(ID_AA64PFR0_AMU_NONE, ""), MRS_FIELD_VALUE(ID_AA64PFR0_AMU_V1, "AMUv1"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64pfr0_mpam[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64PFR0, MPAM, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64pfr0_sel2[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64PFR0, SEL2, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64pfr0_sve[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64PFR0, SVE, NONE, IMPL), MRS_FIELD_VALUE_END, }; #if 0 /* Enable when we add SVE support */ static struct mrs_field_hwcap id_aa64pfr0_sve_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_SVE, ID_AA64PFR0_SVE_IMPL), MRS_HWCAP_END }; #endif static struct mrs_field_value id_aa64pfr0_ras[] = { MRS_FIELD_VALUE(ID_AA64PFR0_RAS_NONE, ""), MRS_FIELD_VALUE(ID_AA64PFR0_RAS_IMPL, "RAS"), MRS_FIELD_VALUE(ID_AA64PFR0_RAS_8_4, "RAS v8.4"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64pfr0_gic[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64PFR0, GIC, CPUIF_NONE, CPUIF_EN), + MRS_FIELD_VALUE(ID_AA64PFR0_GIC_CPUIF_NONE, ""), + MRS_FIELD_VALUE(ID_AA64PFR0_GIC_CPUIF_EN, "GIC"), + MRS_FIELD_VALUE(ID_AA64PFR0_GIC_CPUIF_4_1, "GIC 4.1"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64pfr0_advsimd[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64PFR0, AdvSIMD, NONE, IMPL), MRS_FIELD_VALUE(ID_AA64PFR0_AdvSIMD_HP, "AdvSIMD+HP"), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64pfr0_advsimd_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_ASIMD, ID_AA64PFR0_AdvSIMD_IMPL), MRS_HWCAP(&elf_hwcap, HWCAP_ASIMDHP, ID_AA64PFR0_AdvSIMD_HP), MRS_HWCAP_END }; static struct mrs_field_value id_aa64pfr0_fp[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64PFR0, FP, NONE, IMPL), MRS_FIELD_VALUE(ID_AA64PFR0_FP_HP, "FP+HP"), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64pfr0_fp_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_FP, ID_AA64PFR0_FP_IMPL), MRS_HWCAP(&elf_hwcap, HWCAP_FPHP, ID_AA64PFR0_FP_HP), MRS_HWCAP_END }; static struct mrs_field_value id_aa64pfr0_el3[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64PFR0, EL3, NONE, 64), MRS_FIELD_VALUE(ID_AA64PFR0_EL3_64_32, "EL3 32"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64pfr0_el2[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_AA64PFR0, EL2, NONE, 64), MRS_FIELD_VALUE(ID_AA64PFR0_EL2_64_32, "EL2 32"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64pfr0_el1[] = { MRS_FIELD_VALUE(ID_AA64PFR0_EL1_64, "EL1"), MRS_FIELD_VALUE(ID_AA64PFR0_EL1_64_32, "EL1 32"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64pfr0_el0[] = { MRS_FIELD_VALUE(ID_AA64PFR0_EL0_64, "EL0"), MRS_FIELD_VALUE(ID_AA64PFR0_EL0_64_32, "EL0 32"), MRS_FIELD_VALUE_END, }; static struct mrs_field id_aa64pfr0_fields[] = { MRS_FIELD(ID_AA64PFR0, CSV3, false, MRS_EXACT, id_aa64pfr0_csv3), MRS_FIELD(ID_AA64PFR0, CSV2, false, MRS_EXACT, id_aa64pfr0_csv2), MRS_FIELD_HWCAP(ID_AA64PFR0, DIT, false, MRS_LOWER, id_aa64pfr0_dit, id_aa64pfr0_dit_caps), MRS_FIELD(ID_AA64PFR0, AMU, false, MRS_EXACT, id_aa64pfr0_amu), MRS_FIELD(ID_AA64PFR0, MPAM, false, MRS_EXACT, id_aa64pfr0_mpam), MRS_FIELD(ID_AA64PFR0, SEL2, false, MRS_EXACT, id_aa64pfr0_sel2), MRS_FIELD(ID_AA64PFR0, SVE, false, MRS_EXACT, id_aa64pfr0_sve), MRS_FIELD(ID_AA64PFR0, RAS, false, MRS_EXACT, id_aa64pfr0_ras), MRS_FIELD(ID_AA64PFR0, GIC, false, MRS_EXACT, id_aa64pfr0_gic), MRS_FIELD_HWCAP(ID_AA64PFR0, AdvSIMD, true, MRS_LOWER, id_aa64pfr0_advsimd, id_aa64pfr0_advsimd_caps), MRS_FIELD_HWCAP(ID_AA64PFR0, FP, true, MRS_LOWER, id_aa64pfr0_fp, id_aa64pfr0_fp_caps), MRS_FIELD(ID_AA64PFR0, EL3, false, MRS_EXACT, id_aa64pfr0_el3), MRS_FIELD(ID_AA64PFR0, EL2, false, MRS_EXACT, id_aa64pfr0_el2), MRS_FIELD(ID_AA64PFR0, EL1, false, MRS_LOWER, id_aa64pfr0_el1), MRS_FIELD(ID_AA64PFR0, EL0, false, MRS_LOWER, id_aa64pfr0_el0), MRS_FIELD_END, }; /* ID_AA64PFR1_EL1 */ static struct mrs_field_value id_aa64pfr1_mte[] = { MRS_FIELD_VALUE(ID_AA64PFR1_MTE_NONE, ""), MRS_FIELD_VALUE(ID_AA64PFR1_MTE_IMPL_EL0, "MTE EL0"), MRS_FIELD_VALUE(ID_AA64PFR1_MTE_IMPL, "MTE"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_aa64pfr1_ssbs[] = { MRS_FIELD_VALUE(ID_AA64PFR1_SSBS_NONE, ""), MRS_FIELD_VALUE(ID_AA64PFR1_SSBS_PSTATE, "PSTATE.SSBS"), MRS_FIELD_VALUE(ID_AA64PFR1_SSBS_PSTATE_MSR, "PSTATE.SSBS MSR"), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_aa64pfr1_ssbs_caps[] = { MRS_HWCAP(&elf_hwcap, HWCAP_SSBS, ID_AA64PFR1_SSBS_PSTATE), MRS_HWCAP_END }; static struct mrs_field_value id_aa64pfr1_bt[] = { MRS_FIELD_VALUE(ID_AA64PFR1_BT_NONE, ""), MRS_FIELD_VALUE(ID_AA64PFR1_BT_IMPL, "BTI"), MRS_FIELD_VALUE_END, }; #if 0 /* Enable when we add BTI support */ static struct mrs_field_hwcap id_aa64pfr1_bt_caps[] = { MRS_HWCAP(&elf_hwcap2, HWCAP2_BTI, ID_AA64PFR1_BT_IMPL), MRS_HWCAP_END }; #endif static struct mrs_field id_aa64pfr1_fields[] = { MRS_FIELD(ID_AA64PFR1, MTE, false, MRS_EXACT, id_aa64pfr1_mte), MRS_FIELD_HWCAP(ID_AA64PFR1, SSBS, false, MRS_LOWER, id_aa64pfr1_ssbs, id_aa64pfr1_ssbs_caps), MRS_FIELD(ID_AA64PFR1, BT, false, MRS_EXACT, id_aa64pfr1_bt), MRS_FIELD_END, }; #ifdef COMPAT_FREEBSD32 /* ID_ISAR5_EL1 */ static struct mrs_field_value id_isar5_vcma[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_ISAR5, VCMA, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_isar5_rdm[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_ISAR5, RDM, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value id_isar5_crc32[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_ISAR5, CRC32, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_isar5_crc32_caps[] = { MRS_HWCAP(&elf32_hwcap2, HWCAP32_2_CRC32, ID_ISAR5_CRC32_IMPL), MRS_HWCAP_END }; static struct mrs_field_value id_isar5_sha2[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_ISAR5, SHA2, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_isar5_sha2_caps[] = { MRS_HWCAP(&elf32_hwcap2, HWCAP32_2_SHA2, ID_ISAR5_SHA2_IMPL), MRS_HWCAP_END }; static struct mrs_field_value id_isar5_sha1[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_ISAR5, SHA1, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_isar5_sha1_caps[] = { MRS_HWCAP(&elf32_hwcap2, HWCAP32_2_SHA1, ID_ISAR5_SHA1_IMPL), MRS_HWCAP_END }; static struct mrs_field_value id_isar5_aes[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_ISAR5, AES, NONE, BASE), MRS_FIELD_VALUE(ID_ISAR5_AES_VMULL, "AES+VMULL"), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap id_isar5_aes_caps[] = { MRS_HWCAP(&elf32_hwcap2, HWCAP32_2_AES, ID_ISAR5_AES_BASE), MRS_HWCAP(&elf32_hwcap2, HWCAP32_2_PMULL, ID_ISAR5_AES_VMULL), MRS_HWCAP_END }; static struct mrs_field_value id_isar5_sevl[] = { MRS_FIELD_VALUE_NONE_IMPL(ID_ISAR5, SEVL, NOP, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field id_isar5_fields[] = { MRS_FIELD(ID_ISAR5, VCMA, false, MRS_LOWER, id_isar5_vcma), MRS_FIELD(ID_ISAR5, RDM, false, MRS_LOWER, id_isar5_rdm), MRS_FIELD_HWCAP(ID_ISAR5, CRC32, false, MRS_LOWER, id_isar5_crc32, id_isar5_crc32_caps), MRS_FIELD_HWCAP(ID_ISAR5, SHA2, false, MRS_LOWER, id_isar5_sha2, id_isar5_sha2_caps), MRS_FIELD_HWCAP(ID_ISAR5, SHA1, false, MRS_LOWER, id_isar5_sha1, id_isar5_sha1_caps), MRS_FIELD_HWCAP(ID_ISAR5, AES, false, MRS_LOWER, id_isar5_aes, id_isar5_aes_caps), MRS_FIELD(ID_ISAR5, SEVL, false, MRS_LOWER, id_isar5_sevl), MRS_FIELD_END, }; /* MVFR0 */ static struct mrs_field_value mvfr0_fpround[] = { MRS_FIELD_VALUE_NONE_IMPL(MVFR0, FPRound, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value mvfr0_fpsqrt[] = { MRS_FIELD_VALUE_NONE_IMPL(MVFR0, FPSqrt, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value mvfr0_fpdivide[] = { MRS_FIELD_VALUE_NONE_IMPL(MVFR0, FPDivide, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value mvfr0_fptrap[] = { MRS_FIELD_VALUE_NONE_IMPL(MVFR0, FPTrap, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value mvfr0_fpdp[] = { MRS_FIELD_VALUE(MVFR0_FPDP_NONE, ""), MRS_FIELD_VALUE(MVFR0_FPDP_VFP_v2, "DP VFPv2"), MRS_FIELD_VALUE(MVFR0_FPDP_VFP_v3_v4, "DP VFPv3+v4"), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap mvfr0_fpdp_caps[] = { MRS_HWCAP(&elf32_hwcap, HWCAP32_VFP, MVFR0_FPDP_VFP_v2), MRS_HWCAP(&elf32_hwcap, HWCAP32_VFPv3, MVFR0_FPDP_VFP_v3_v4), }; static struct mrs_field_value mvfr0_fpsp[] = { MRS_FIELD_VALUE(MVFR0_FPSP_NONE, ""), MRS_FIELD_VALUE(MVFR0_FPSP_VFP_v2, "SP VFPv2"), MRS_FIELD_VALUE(MVFR0_FPSP_VFP_v3_v4, "SP VFPv3+v4"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value mvfr0_simdreg[] = { MRS_FIELD_VALUE(MVFR0_SIMDReg_NONE, ""), MRS_FIELD_VALUE(MVFR0_SIMDReg_FP, "FP 16x64"), MRS_FIELD_VALUE(MVFR0_SIMDReg_AdvSIMD, "AdvSIMD"), MRS_FIELD_VALUE_END, }; static struct mrs_field mvfr0_fields[] = { MRS_FIELD(MVFR0, FPRound, false, MRS_LOWER, mvfr0_fpround), MRS_FIELD(MVFR0, FPSqrt, false, MRS_LOWER, mvfr0_fpsqrt), MRS_FIELD(MVFR0, FPDivide, false, MRS_LOWER, mvfr0_fpdivide), MRS_FIELD(MVFR0, FPTrap, false, MRS_LOWER, mvfr0_fptrap), MRS_FIELD_HWCAP(MVFR0, FPDP, false, MRS_LOWER, mvfr0_fpdp, mvfr0_fpdp_caps), MRS_FIELD(MVFR0, FPSP, false, MRS_LOWER, mvfr0_fpsp), MRS_FIELD(MVFR0, SIMDReg, false, MRS_LOWER, mvfr0_simdreg), MRS_FIELD_END, }; /* MVFR1 */ static struct mrs_field_value mvfr1_simdfmac[] = { MRS_FIELD_VALUE_NONE_IMPL(MVFR1, SIMDFMAC, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap mvfr1_simdfmac_caps[] = { MRS_HWCAP(&elf32_hwcap, HWCAP32_VFPv4, MVFR1_SIMDFMAC_IMPL), MRS_HWCAP_END }; static struct mrs_field_value mvfr1_fphp[] = { MRS_FIELD_VALUE(MVFR1_FPHP_NONE, ""), MRS_FIELD_VALUE(MVFR1_FPHP_CONV_SP, "FPHP SP Conv"), MRS_FIELD_VALUE(MVFR1_FPHP_CONV_DP, "FPHP DP Conv"), MRS_FIELD_VALUE(MVFR1_FPHP_ARITH, "FPHP Arith"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value mvfr1_simdhp[] = { MRS_FIELD_VALUE(MVFR1_SIMDHP_NONE, ""), MRS_FIELD_VALUE(MVFR1_SIMDHP_CONV_SP, "SIMDHP SP Conv"), MRS_FIELD_VALUE(MVFR1_SIMDHP_ARITH, "SIMDHP Arith"), MRS_FIELD_VALUE_END, }; static struct mrs_field_value mvfr1_simdsp[] = { MRS_FIELD_VALUE_NONE_IMPL(MVFR1, SIMDSP, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value mvfr1_simdint[] = { MRS_FIELD_VALUE_NONE_IMPL(MVFR1, SIMDInt, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value mvfr1_simdls[] = { MRS_FIELD_VALUE_NONE_IMPL(MVFR1, SIMDLS, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_hwcap mvfr1_simdls_caps[] = { MRS_HWCAP(&elf32_hwcap, HWCAP32_VFPv4, MVFR1_SIMDFMAC_IMPL), MRS_HWCAP_END }; static struct mrs_field_value mvfr1_fpdnan[] = { MRS_FIELD_VALUE_NONE_IMPL(MVFR1, FPDNaN, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field_value mvfr1_fpftz[] = { MRS_FIELD_VALUE_NONE_IMPL(MVFR1, FPFtZ, NONE, IMPL), MRS_FIELD_VALUE_END, }; static struct mrs_field mvfr1_fields[] = { MRS_FIELD_HWCAP(MVFR1, SIMDFMAC, false, MRS_LOWER, mvfr1_simdfmac, mvfr1_simdfmac_caps), MRS_FIELD(MVFR1, FPHP, false, MRS_LOWER, mvfr1_fphp), MRS_FIELD(MVFR1, SIMDHP, false, MRS_LOWER, mvfr1_simdhp), MRS_FIELD(MVFR1, SIMDSP, false, MRS_LOWER, mvfr1_simdsp), MRS_FIELD(MVFR1, SIMDInt, false, MRS_LOWER, mvfr1_simdint), MRS_FIELD_HWCAP(MVFR1, SIMDLS, false, MRS_LOWER, mvfr1_simdls, mvfr1_simdls_caps), MRS_FIELD(MVFR1, FPDNaN, false, MRS_LOWER, mvfr1_fpdnan), MRS_FIELD(MVFR1, FPFtZ, false, MRS_LOWER, mvfr1_fpftz), MRS_FIELD_END, }; #endif /* COMPAT_FREEBSD32 */ struct mrs_user_reg { u_int reg; u_int CRm; u_int Op2; size_t offset; struct mrs_field *fields; }; #define USER_REG(name, field_name) \ { \ .reg = name, \ .CRm = name##_CRm, \ .Op2 = name##_op2, \ .offset = __offsetof(struct cpu_desc, field_name), \ .fields = field_name##_fields, \ } static struct mrs_user_reg user_regs[] = { USER_REG(ID_AA64DFR0_EL1, id_aa64dfr0), USER_REG(ID_AA64ISAR0_EL1, id_aa64isar0), USER_REG(ID_AA64ISAR1_EL1, id_aa64isar1), USER_REG(ID_AA64MMFR0_EL1, id_aa64mmfr0), USER_REG(ID_AA64MMFR1_EL1, id_aa64mmfr1), USER_REG(ID_AA64MMFR2_EL1, id_aa64mmfr2), USER_REG(ID_AA64PFR0_EL1, id_aa64pfr0), USER_REG(ID_AA64PFR1_EL1, id_aa64pfr1), #ifdef COMPAT_FREEBSD32 USER_REG(ID_ISAR5_EL1, id_isar5), USER_REG(MVFR0_EL1, mvfr0), USER_REG(MVFR1_EL1, mvfr1), #endif /* COMPAT_FREEBSD32 */ }; #define CPU_DESC_FIELD(desc, idx) \ *(uint64_t *)((char *)&(desc) + user_regs[(idx)].offset) static int user_mrs_handler(vm_offset_t va, uint32_t insn, struct trapframe *frame, uint32_t esr) { uint64_t value; int CRm, Op2, i, reg; if ((insn & MRS_MASK) != MRS_VALUE) return (0); /* * We only emulate Op0 == 3, Op1 == 0, CRn == 0, CRm == {0, 4-7}. * These are in the EL1 CPU identification space. * CRm == 0 holds MIDR_EL1, MPIDR_EL1, and REVID_EL1. * CRm == {4-7} holds the ID_AA64 registers. * * For full details see the ARMv8 ARM (ARM DDI 0487C.a) * Table D9-2 System instruction encodings for non-Debug System * register accesses. */ if (mrs_Op0(insn) != 3 || mrs_Op1(insn) != 0 || mrs_CRn(insn) != 0) return (0); CRm = mrs_CRm(insn); if (CRm > 7 || (CRm < 4 && CRm != 0)) return (0); Op2 = mrs_Op2(insn); value = 0; for (i = 0; i < nitems(user_regs); i++) { if (user_regs[i].CRm == CRm && user_regs[i].Op2 == Op2) { value = CPU_DESC_FIELD(user_cpu_desc, i); break; } } if (CRm == 0) { switch (Op2) { case 0: value = READ_SPECIALREG(midr_el1); break; case 5: value = READ_SPECIALREG(mpidr_el1); break; case 6: value = READ_SPECIALREG(revidr_el1); break; default: return (0); } } /* * We will handle this instruction, move to the next so we * don't trap here again. */ frame->tf_elr += INSN_SIZE; reg = MRS_REGISTER(insn); /* If reg is 31 then write to xzr, i.e. do nothing */ if (reg == 31) return (1); if (reg < nitems(frame->tf_x)) frame->tf_x[reg] = value; else if (reg == 30) frame->tf_lr = value; return (1); } bool extract_user_id_field(u_int reg, u_int field_shift, uint8_t *val) { uint64_t value; int i; for (i = 0; i < nitems(user_regs); i++) { if (user_regs[i].reg == reg) { value = CPU_DESC_FIELD(user_cpu_desc, i); *val = value >> field_shift; return (true); } } return (false); } bool get_kernel_reg(u_int reg, uint64_t *val) { int i; for (i = 0; i < nitems(user_regs); i++) { if (user_regs[i].reg == reg) { *val = CPU_DESC_FIELD(kern_cpu_desc, i); return (true); } } return (false); } /* * Compares two field values that may be signed or unsigned. * Returns: * < 0 when a is less than b * = 0 when a equals b * > 0 when a is greater than b */ static int mrs_field_cmp(uint64_t a, uint64_t b, u_int shift, int width, bool sign) { uint64_t mask; KASSERT(width > 0 && width < 64, ("%s: Invalid width %d", __func__, width)); mask = (1ul << width) - 1; /* Move the field to the lower bits */ a = (a >> shift) & mask; b = (b >> shift) & mask; if (sign) { /* * The field is signed. Toggle the upper bit so the comparison * works on unsigned values as this makes positive numbers, * i.e. those with a 0 bit, larger than negative numbers, * i.e. those with a 1 bit, in an unsigned comparison. */ a ^= 1ul << (width - 1); b ^= 1ul << (width - 1); } return (a - b); } static uint64_t update_lower_register(uint64_t val, uint64_t new_val, u_int shift, int width, bool sign) { uint64_t mask; KASSERT(width > 0 && width < 64, ("%s: Invalid width %d", __func__, width)); /* * If the new value is less than the existing value update it. */ if (mrs_field_cmp(new_val, val, shift, width, sign) < 0) { mask = (1ul << width) - 1; val &= ~(mask << shift); val |= new_val & (mask << shift); } return (val); } void update_special_regs(u_int cpu) { struct mrs_field *fields; uint64_t user_reg, kern_reg, value; int i, j; if (cpu == 0) { /* Create a user visible cpu description with safe values */ memset(&user_cpu_desc, 0, sizeof(user_cpu_desc)); /* Safe values for these registers */ user_cpu_desc.id_aa64pfr0 = ID_AA64PFR0_AdvSIMD_NONE | ID_AA64PFR0_FP_NONE | ID_AA64PFR0_EL1_64 | ID_AA64PFR0_EL0_64; user_cpu_desc.id_aa64dfr0 = ID_AA64DFR0_DebugVer_8; } for (i = 0; i < nitems(user_regs); i++) { value = CPU_DESC_FIELD(cpu_desc[cpu], i); if (cpu == 0) { kern_reg = value; user_reg = value; } else { kern_reg = CPU_DESC_FIELD(kern_cpu_desc, i); user_reg = CPU_DESC_FIELD(user_cpu_desc, i); } fields = user_regs[i].fields; for (j = 0; fields[j].type != 0; j++) { switch (fields[j].type & MRS_TYPE_MASK) { case MRS_EXACT: user_reg &= ~(0xful << fields[j].shift); user_reg |= (uint64_t)MRS_EXACT_FIELD(fields[j].type) << fields[j].shift; break; case MRS_LOWER: user_reg = update_lower_register(user_reg, value, fields[j].shift, 4, fields[j].sign); break; default: panic("Invalid field type: %d", fields[j].type); } kern_reg = update_lower_register(kern_reg, value, fields[j].shift, 4, fields[j].sign); } CPU_DESC_FIELD(kern_cpu_desc, i) = kern_reg; CPU_DESC_FIELD(user_cpu_desc, i) = user_reg; } } /* HWCAP */ bool __read_frequently lse_supported = false; bool __read_frequently icache_aliasing = false; bool __read_frequently icache_vmid = false; int64_t dcache_line_size; /* The minimum D cache line size */ int64_t icache_line_size; /* The minimum I cache line size */ int64_t idcache_line_size; /* The minimum cache line size */ /* * Find the values to export to userspace as AT_HWCAP and AT_HWCAP2. */ static void parse_cpu_features(void) { struct mrs_field_hwcap *hwcaps; struct mrs_field *fields; uint64_t min, reg; int i, j, k; for (i = 0; i < nitems(user_regs); i++) { reg = CPU_DESC_FIELD(user_cpu_desc, i); fields = user_regs[i].fields; for (j = 0; fields[j].type != 0; j++) { hwcaps = fields[j].hwcaps; if (hwcaps == NULL) continue; for (k = 0; hwcaps[k].hwcap != NULL; k++) { min = hwcaps[k].min; /* * If the field is greater than the minimum * value we can set the hwcap; */ if (mrs_field_cmp(reg, min, fields[j].shift, 4, fields[j].sign) >= 0) { *hwcaps[k].hwcap |= hwcaps[k].hwcap_val; } } } } } static void identify_cpu_sysinit(void *dummy __unused) { int cpu; bool dic, idc; dic = (allow_dic != 0); idc = (allow_idc != 0); CPU_FOREACH(cpu) { check_cpu_regs(cpu); if (cpu != 0) update_special_regs(cpu); if (CTR_DIC_VAL(cpu_desc[cpu].ctr) == 0) dic = false; if (CTR_IDC_VAL(cpu_desc[cpu].ctr) == 0) idc = false; } /* Find the values to export to userspace as AT_HWCAP and AT_HWCAP2 */ parse_cpu_features(); #ifdef COMPAT_FREEBSD32 /* Set the default caps and any that need to check multiple fields */ elf32_hwcap |= parse_cpu_features_hwcap32(); #endif if (dic && idc) { arm64_icache_sync_range = &arm64_dic_idc_icache_sync_range; if (bootverbose) printf("Enabling DIC & IDC ICache sync\n"); } if ((elf_hwcap & HWCAP_ATOMICS) != 0) { lse_supported = true; if (bootverbose) printf("Enabling LSE atomics in the kernel\n"); } #ifdef LSE_ATOMICS if (!lse_supported) panic("CPU does not support LSE atomic instructions"); #endif install_undef_handler(true, user_mrs_handler); } SYSINIT(identify_cpu, SI_SUB_CPU, SI_ORDER_MIDDLE, identify_cpu_sysinit, NULL); static void cpu_features_sysinit(void *dummy __unused) { struct sbuf sb; u_int cpu; CPU_FOREACH(cpu) print_cpu_features(cpu); /* Fill in cpu_model for the hw.model sysctl */ sbuf_new(&sb, cpu_model, sizeof(cpu_model), SBUF_FIXEDLEN); print_cpu_midr(&sb, 0); sbuf_finish(&sb); sbuf_delete(&sb); } /* Log features before APs are released and start printing to the dmesg. */ SYSINIT(cpu_features, SI_SUB_SMP - 1, SI_ORDER_ANY, cpu_features_sysinit, NULL); #ifdef COMPAT_FREEBSD32 static u_long parse_cpu_features_hwcap32(void) { u_long hwcap = HWCAP32_DEFAULT; if ((MVFR1_SIMDLS_VAL(user_cpu_desc.mvfr1) >= MVFR1_SIMDLS_IMPL) && (MVFR1_SIMDInt_VAL(user_cpu_desc.mvfr1) >= MVFR1_SIMDInt_IMPL) && (MVFR1_SIMDSP_VAL(user_cpu_desc.mvfr1) >= MVFR1_SIMDSP_IMPL)) hwcap |= HWCAP32_NEON; return (hwcap); } #endif /* COMPAT_FREEBSD32 */ static void print_ctr_fields(struct sbuf *sb, uint64_t reg, void *arg) { sbuf_printf(sb, "%u byte D-cacheline,", CTR_DLINE_SIZE(reg)); sbuf_printf(sb, "%u byte I-cacheline,", CTR_ILINE_SIZE(reg)); reg &= ~(CTR_DLINE_MASK | CTR_ILINE_MASK); switch(CTR_L1IP_VAL(reg)) { case CTR_L1IP_VPIPT: sbuf_printf(sb, "VPIPT"); break; case CTR_L1IP_AIVIVT: sbuf_printf(sb, "AIVIVT"); break; case CTR_L1IP_VIPT: sbuf_printf(sb, "VIPT"); break; case CTR_L1IP_PIPT: sbuf_printf(sb, "PIPT"); break; } sbuf_printf(sb, " ICache,"); reg &= ~CTR_L1IP_MASK; sbuf_printf(sb, "%d byte ERG,", CTR_ERG_SIZE(reg)); sbuf_printf(sb, "%d byte CWG", CTR_CWG_SIZE(reg)); reg &= ~(CTR_ERG_MASK | CTR_CWG_MASK); if (CTR_IDC_VAL(reg) != 0) sbuf_printf(sb, ",IDC"); if (CTR_DIC_VAL(reg) != 0) sbuf_printf(sb, ",DIC"); reg &= ~(CTR_IDC_MASK | CTR_DIC_MASK); reg &= ~CTR_RES1; if (reg != 0) sbuf_printf(sb, ",%lx", reg); } static void print_register(struct sbuf *sb, const char *reg_name, uint64_t reg, void (*print_fields)(struct sbuf *, uint64_t, void *), void *arg) { sbuf_printf(sb, "%29s = <", reg_name); print_fields(sb, reg, arg); sbuf_finish(sb); printf("%s>\n", sbuf_data(sb)); sbuf_clear(sb); } static void print_id_fields(struct sbuf *sb, uint64_t reg, void *arg) { struct mrs_field *fields = arg; struct mrs_field_value *fv; int field, i, j, printed; #define SEP_STR ((printed++) == 0) ? "" : "," printed = 0; for (i = 0; fields[i].type != 0; i++) { fv = fields[i].values; /* TODO: Handle with an unknown message */ if (fv == NULL) continue; field = (reg & fields[i].mask) >> fields[i].shift; for (j = 0; fv[j].desc != NULL; j++) { if ((fv[j].value >> fields[i].shift) != field) continue; if (fv[j].desc[0] != '\0') sbuf_printf(sb, "%s%s", SEP_STR, fv[j].desc); break; } if (fv[j].desc == NULL) sbuf_printf(sb, "%sUnknown %s(%x)", SEP_STR, fields[i].name, field); reg &= ~(0xful << fields[i].shift); } if (reg != 0) sbuf_printf(sb, "%s%#lx", SEP_STR, reg); #undef SEP_STR } static void print_id_register(struct sbuf *sb, const char *reg_name, uint64_t reg, struct mrs_field *fields) { print_register(sb, reg_name, reg, print_id_fields, fields); } static void print_cpu_midr(struct sbuf *sb, u_int cpu) { const struct cpu_parts *cpu_partsp; const char *cpu_impl_name; const char *cpu_part_name; u_int midr; u_int impl_id; u_int part_id; midr = pcpu_find(cpu)->pc_midr; cpu_impl_name = NULL; cpu_partsp = NULL; impl_id = CPU_IMPL(midr); for (int i = 0; cpu_implementers[i].impl_name != NULL; i++) { if (impl_id == cpu_implementers[i].impl_id) { cpu_impl_name = cpu_implementers[i].impl_name; cpu_partsp = cpu_implementers[i].cpu_parts; break; } } /* Unknown implementer, so unknown part */ if (cpu_impl_name == NULL) { sbuf_printf(sb, "Unknown Implementer (midr: %08x)", midr); return; } KASSERT(cpu_partsp != NULL, ("%s: No parts table for implementer %s", __func__, cpu_impl_name)); cpu_part_name = NULL; part_id = CPU_PART(midr); for (int i = 0; cpu_partsp[i].part_name != NULL; i++) { if (part_id == cpu_partsp[i].part_id) { cpu_part_name = cpu_partsp[i].part_name; break; } } /* Known Implementer, Unknown part */ if (cpu_part_name == NULL) { sbuf_printf(sb, "%s Unknown CPU r%dp%d (midr: %08x)", cpu_impl_name, CPU_VAR(midr), CPU_REV(midr), midr); return; } sbuf_printf(sb, "%s %s r%dp%d", cpu_impl_name, cpu_part_name, CPU_VAR(midr), CPU_REV(midr)); } static void print_cpu_features(u_int cpu) { struct sbuf *sb; sb = sbuf_new_auto(); sbuf_printf(sb, "CPU%3u: ", cpu); print_cpu_midr(sb, cpu); sbuf_cat(sb, " affinity:"); switch(cpu_aff_levels) { default: case 4: sbuf_printf(sb, " %2d", CPU_AFF3(cpu_desc[cpu].mpidr)); /* FALLTHROUGH */ case 3: sbuf_printf(sb, " %2d", CPU_AFF2(cpu_desc[cpu].mpidr)); /* FALLTHROUGH */ case 2: sbuf_printf(sb, " %2d", CPU_AFF1(cpu_desc[cpu].mpidr)); /* FALLTHROUGH */ case 1: case 0: /* On UP this will be zero */ sbuf_printf(sb, " %2d", CPU_AFF0(cpu_desc[cpu].mpidr)); break; } sbuf_finish(sb); printf("%s\n", sbuf_data(sb)); sbuf_clear(sb); /* * There is a hardware errata where, if one CPU is performing a TLB * invalidation while another is performing a store-exclusive the * store-exclusive may return the wrong status. A workaround seems * to be to use an IPI to invalidate on each CPU, however given the * limited number of affected units (pass 1.1 is the evaluation * hardware revision), and the lack of information from Cavium * this has not been implemented. * * At the time of writing this the only information is from: * https://lkml.org/lkml/2016/8/4/722 */ /* * XXX: CPU_MATCH_ERRATA_CAVIUM_THUNDERX_1_1 on its own also * triggers on pass 2.0+. */ if (cpu == 0 && CPU_VAR(PCPU_GET(midr)) == 0 && CPU_MATCH_ERRATA_CAVIUM_THUNDERX_1_1) printf("WARNING: ThunderX Pass 1.1 detected.\nThis has known " "hardware bugs that may cause the incorrect operation of " "atomic operations.\n"); /* Cache Type Register */ if (cpu == 0 || (cpu_print_regs & PRINT_CTR_EL0) != 0) { print_register(sb, "Cache Type", cpu_desc[cpu].ctr, print_ctr_fields, NULL); } /* AArch64 Instruction Set Attribute Register 0 */ if (cpu == 0 || (cpu_print_regs & PRINT_ID_AA64_ISAR0) != 0) print_id_register(sb, "Instruction Set Attributes 0", cpu_desc[cpu].id_aa64isar0, id_aa64isar0_fields); /* AArch64 Instruction Set Attribute Register 1 */ if (cpu == 0 || (cpu_print_regs & PRINT_ID_AA64_ISAR1) != 0) print_id_register(sb, "Instruction Set Attributes 1", cpu_desc[cpu].id_aa64isar1, id_aa64isar1_fields); /* AArch64 Processor Feature Register 0 */ if (cpu == 0 || (cpu_print_regs & PRINT_ID_AA64_PFR0) != 0) print_id_register(sb, "Processor Features 0", cpu_desc[cpu].id_aa64pfr0, id_aa64pfr0_fields); /* AArch64 Processor Feature Register 1 */ if (cpu == 0 || (cpu_print_regs & PRINT_ID_AA64_PFR1) != 0) print_id_register(sb, "Processor Features 1", cpu_desc[cpu].id_aa64pfr1, id_aa64pfr1_fields); /* AArch64 Memory Model Feature Register 0 */ if (cpu == 0 || (cpu_print_regs & PRINT_ID_AA64_MMFR0) != 0) print_id_register(sb, "Memory Model Features 0", cpu_desc[cpu].id_aa64mmfr0, id_aa64mmfr0_fields); /* AArch64 Memory Model Feature Register 1 */ if (cpu == 0 || (cpu_print_regs & PRINT_ID_AA64_MMFR1) != 0) print_id_register(sb, "Memory Model Features 1", cpu_desc[cpu].id_aa64mmfr1, id_aa64mmfr1_fields); /* AArch64 Memory Model Feature Register 2 */ if (cpu == 0 || (cpu_print_regs & PRINT_ID_AA64_MMFR2) != 0) print_id_register(sb, "Memory Model Features 2", cpu_desc[cpu].id_aa64mmfr2, id_aa64mmfr2_fields); /* AArch64 Debug Feature Register 0 */ if (cpu == 0 || (cpu_print_regs & PRINT_ID_AA64_DFR0) != 0) print_id_register(sb, "Debug Features 0", cpu_desc[cpu].id_aa64dfr0, id_aa64dfr0_fields); /* AArch64 Memory Model Feature Register 1 */ if (cpu == 0 || (cpu_print_regs & PRINT_ID_AA64_DFR1) != 0) print_id_register(sb, "Debug Features 1", cpu_desc[cpu].id_aa64dfr1, id_aa64dfr1_fields); /* AArch64 Auxiliary Feature Register 0 */ if (cpu == 0 || (cpu_print_regs & PRINT_ID_AA64_AFR0) != 0) print_id_register(sb, "Auxiliary Features 0", cpu_desc[cpu].id_aa64afr0, id_aa64afr0_fields); /* AArch64 Auxiliary Feature Register 1 */ if (cpu == 0 || (cpu_print_regs & PRINT_ID_AA64_AFR1) != 0) print_id_register(sb, "Auxiliary Features 1", cpu_desc[cpu].id_aa64afr1, id_aa64afr1_fields); #ifdef COMPAT_FREEBSD32 /* AArch32 Instruction Set Attribute Register 5 */ if (cpu == 0 || (cpu_print_regs & PRINT_ID_ISAR5) != 0) print_id_register(sb, "AArch32 Instruction Set Attributes 5", cpu_desc[cpu].id_isar5, id_isar5_fields); /* AArch32 Media and VFP Feature Register 0 */ if (cpu == 0 || (cpu_print_regs & PRINT_MVFR0) != 0) print_id_register(sb, "AArch32 Media and VFP Features 0", cpu_desc[cpu].mvfr0, mvfr0_fields); /* AArch32 Media and VFP Feature Register 1 */ if (cpu == 0 || (cpu_print_regs & PRINT_MVFR1) != 0) print_id_register(sb, "AArch32 Media and VFP Features 1", cpu_desc[cpu].mvfr1, mvfr1_fields); #endif sbuf_delete(sb); sb = NULL; #undef SEP_STR } void identify_cache(uint64_t ctr) { /* Identify the L1 cache type */ switch (CTR_L1IP_VAL(ctr)) { case CTR_L1IP_PIPT: break; case CTR_L1IP_VPIPT: icache_vmid = true; break; default: case CTR_L1IP_VIPT: icache_aliasing = true; break; } if (dcache_line_size == 0) { KASSERT(icache_line_size == 0, ("%s: i-cacheline size set: %ld", __func__, icache_line_size)); /* Get the D cache line size */ dcache_line_size = CTR_DLINE_SIZE(ctr); /* And the same for the I cache */ icache_line_size = CTR_ILINE_SIZE(ctr); idcache_line_size = MIN(dcache_line_size, icache_line_size); } if (dcache_line_size != CTR_DLINE_SIZE(ctr)) { printf("WARNING: D-cacheline size mismatch %ld != %d\n", dcache_line_size, CTR_DLINE_SIZE(ctr)); } if (icache_line_size != CTR_ILINE_SIZE(ctr)) { printf("WARNING: I-cacheline size mismatch %ld != %d\n", icache_line_size, CTR_ILINE_SIZE(ctr)); } } void identify_cpu(u_int cpu) { /* Save affinity for current CPU */ cpu_desc[cpu].mpidr = get_mpidr(); CPU_AFFINITY(cpu) = cpu_desc[cpu].mpidr & CPU_AFF_MASK; cpu_desc[cpu].ctr = READ_SPECIALREG(ctr_el0); cpu_desc[cpu].id_aa64dfr0 = READ_SPECIALREG(id_aa64dfr0_el1); cpu_desc[cpu].id_aa64dfr1 = READ_SPECIALREG(id_aa64dfr1_el1); cpu_desc[cpu].id_aa64isar0 = READ_SPECIALREG(id_aa64isar0_el1); cpu_desc[cpu].id_aa64isar1 = READ_SPECIALREG(id_aa64isar1_el1); cpu_desc[cpu].id_aa64mmfr0 = READ_SPECIALREG(id_aa64mmfr0_el1); cpu_desc[cpu].id_aa64mmfr1 = READ_SPECIALREG(id_aa64mmfr1_el1); cpu_desc[cpu].id_aa64mmfr2 = READ_SPECIALREG(id_aa64mmfr2_el1); cpu_desc[cpu].id_aa64pfr0 = READ_SPECIALREG(id_aa64pfr0_el1); cpu_desc[cpu].id_aa64pfr1 = READ_SPECIALREG(id_aa64pfr1_el1); #ifdef COMPAT_FREEBSD32 /* Only read aarch32 SRs if EL0-32 is available */ if (ID_AA64PFR0_EL0_VAL(cpu_desc[cpu].id_aa64pfr0) == ID_AA64PFR0_EL0_64_32) { cpu_desc[cpu].id_isar5 = READ_SPECIALREG(id_isar5_el1); cpu_desc[cpu].mvfr0 = READ_SPECIALREG(mvfr0_el1); cpu_desc[cpu].mvfr1 = READ_SPECIALREG(mvfr1_el1); } #endif } static void check_cpu_regs(u_int cpu) { switch (cpu_aff_levels) { case 0: if (CPU_AFF0(cpu_desc[cpu].mpidr) != CPU_AFF0(cpu_desc[0].mpidr)) cpu_aff_levels = 1; /* FALLTHROUGH */ case 1: if (CPU_AFF1(cpu_desc[cpu].mpidr) != CPU_AFF1(cpu_desc[0].mpidr)) cpu_aff_levels = 2; /* FALLTHROUGH */ case 2: if (CPU_AFF2(cpu_desc[cpu].mpidr) != CPU_AFF2(cpu_desc[0].mpidr)) cpu_aff_levels = 3; /* FALLTHROUGH */ case 3: if (CPU_AFF3(cpu_desc[cpu].mpidr) != CPU_AFF3(cpu_desc[0].mpidr)) cpu_aff_levels = 4; break; } if (cpu_desc[cpu].id_aa64afr0 != cpu_desc[0].id_aa64afr0) cpu_print_regs |= PRINT_ID_AA64_AFR0; if (cpu_desc[cpu].id_aa64afr1 != cpu_desc[0].id_aa64afr1) cpu_print_regs |= PRINT_ID_AA64_AFR1; if (cpu_desc[cpu].id_aa64dfr0 != cpu_desc[0].id_aa64dfr0) cpu_print_regs |= PRINT_ID_AA64_DFR0; if (cpu_desc[cpu].id_aa64dfr1 != cpu_desc[0].id_aa64dfr1) cpu_print_regs |= PRINT_ID_AA64_DFR1; if (cpu_desc[cpu].id_aa64isar0 != cpu_desc[0].id_aa64isar0) cpu_print_regs |= PRINT_ID_AA64_ISAR0; if (cpu_desc[cpu].id_aa64isar1 != cpu_desc[0].id_aa64isar1) cpu_print_regs |= PRINT_ID_AA64_ISAR1; if (cpu_desc[cpu].id_aa64mmfr0 != cpu_desc[0].id_aa64mmfr0) cpu_print_regs |= PRINT_ID_AA64_MMFR0; if (cpu_desc[cpu].id_aa64mmfr1 != cpu_desc[0].id_aa64mmfr1) cpu_print_regs |= PRINT_ID_AA64_MMFR1; if (cpu_desc[cpu].id_aa64mmfr2 != cpu_desc[0].id_aa64mmfr2) cpu_print_regs |= PRINT_ID_AA64_MMFR2; if (cpu_desc[cpu].id_aa64pfr0 != cpu_desc[0].id_aa64pfr0) cpu_print_regs |= PRINT_ID_AA64_PFR0; if (cpu_desc[cpu].id_aa64pfr1 != cpu_desc[0].id_aa64pfr1) cpu_print_regs |= PRINT_ID_AA64_PFR1; if (cpu_desc[cpu].ctr != cpu_desc[0].ctr) { /* * If the cache type register is different we may * have a different l1 cache type. */ identify_cache(cpu_desc[cpu].ctr); cpu_print_regs |= PRINT_CTR_EL0; } #ifdef COMPAT_FREEBSD32 if (cpu_desc[cpu].id_isar5 != cpu_desc[0].id_isar5) cpu_print_regs |= PRINT_ID_ISAR5; if (cpu_desc[cpu].mvfr0 != cpu_desc[0].mvfr0) cpu_print_regs |= PRINT_MVFR0; if (cpu_desc[cpu].mvfr1 != cpu_desc[0].mvfr1) cpu_print_regs |= PRINT_MVFR1; #endif } diff --git a/sys/arm64/include/armreg.h b/sys/arm64/include/armreg.h index 44b11617fbf1..c16c5b77f3c0 100644 --- a/sys/arm64/include/armreg.h +++ b/sys/arm64/include/armreg.h @@ -1,1776 +1,1777 @@ /*- * Copyright (c) 2013, 2014 Andrew Turner * Copyright (c) 2015,2021 The FreeBSD Foundation * * Portions of this software were developed by Andrew Turner * under sponsorship from the FreeBSD Foundation. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * * $FreeBSD$ */ #ifndef _MACHINE_ARMREG_H_ #define _MACHINE_ARMREG_H_ #define INSN_SIZE 4 #define MRS_MASK 0xfff00000 #define MRS_VALUE 0xd5300000 #define MRS_SPECIAL(insn) ((insn) & 0x000fffe0) #define MRS_REGISTER(insn) ((insn) & 0x0000001f) #define MRS_Op0_SHIFT 19 #define MRS_Op0_MASK 0x00080000 #define MRS_Op1_SHIFT 16 #define MRS_Op1_MASK 0x00070000 #define MRS_CRn_SHIFT 12 #define MRS_CRn_MASK 0x0000f000 #define MRS_CRm_SHIFT 8 #define MRS_CRm_MASK 0x00000f00 #define MRS_Op2_SHIFT 5 #define MRS_Op2_MASK 0x000000e0 #define MRS_Rt_SHIFT 0 #define MRS_Rt_MASK 0x0000001f #define __MRS_REG(op0, op1, crn, crm, op2) \ (((op0) << MRS_Op0_SHIFT) | ((op1) << MRS_Op1_SHIFT) | \ ((crn) << MRS_CRn_SHIFT) | ((crm) << MRS_CRm_SHIFT) | \ ((op2) << MRS_Op2_SHIFT)) #define MRS_REG(reg) \ __MRS_REG(reg##_op0, reg##_op1, reg##_CRn, reg##_CRm, reg##_op2) #define READ_SPECIALREG(reg) \ ({ uint64_t _val; \ __asm __volatile("mrs %0, " __STRING(reg) : "=&r" (_val)); \ _val; \ }) #define WRITE_SPECIALREG(reg, _val) \ __asm __volatile("msr " __STRING(reg) ", %0" : : "r"((uint64_t)_val)) #define UL(x) UINT64_C(x) /* CNTHCTL_EL2 - Counter-timer Hypervisor Control register */ #define CNTHCTL_EVNTI_MASK (0xf << 4) /* Bit to trigger event stream */ #define CNTHCTL_EVNTDIR (1 << 3) /* Control transition trigger bit */ #define CNTHCTL_EVNTEN (1 << 2) /* Enable event stream */ #define CNTHCTL_EL1PCEN (1 << 1) /* Allow EL0/1 physical timer access */ #define CNTHCTL_EL1PCTEN (1 << 0) /*Allow EL0/1 physical counter access*/ /* CNTP_CTL_EL0 - Counter-timer Physical Timer Control register */ #define CNTP_CTL_EL0 MRS_REG(CNTP_CTL_EL0) #define CNTP_CTL_EL0_op0 3 #define CNTP_CTL_EL0_op1 3 #define CNTP_CTL_EL0_CRn 14 #define CNTP_CTL_EL0_CRm 2 #define CNTP_CTL_EL0_op2 1 #define CNTP_CTL_ENABLE (1 << 0) #define CNTP_CTL_IMASK (1 << 1) #define CNTP_CTL_ISTATUS (1 << 2) /* CNTP_CVAL_EL0 - Counter-timer Physical Timer CompareValue register */ #define CNTP_CVAL_EL0 MRS_REG(CNTP_CVAL_EL0) #define CNTP_CVAL_EL0_op0 3 #define CNTP_CVAL_EL0_op1 3 #define CNTP_CVAL_EL0_CRn 14 #define CNTP_CVAL_EL0_CRm 2 #define CNTP_CVAL_EL0_op2 2 /* CNTP_TVAL_EL0 - Counter-timer Physical Timer TimerValue register */ #define CNTP_TVAL_EL0 MRS_REG(CNTP_TVAL_EL0) #define CNTP_TVAL_EL0_op0 3 #define CNTP_TVAL_EL0_op1 3 #define CNTP_TVAL_EL0_CRn 14 #define CNTP_TVAL_EL0_CRm 2 #define CNTP_TVAL_EL0_op2 0 /* CNTPCT_EL0 - Counter-timer Physical Count register */ #define CNTPCT_EL0 MRS_REG(CNTPCT_EL0) #define CNTPCT_EL0_op0 3 #define CNTPCT_EL0_op1 3 #define CNTPCT_EL0_CRn 14 #define CNTPCT_EL0_CRm 0 #define CNTPCT_EL0_op2 1 /* CPACR_EL1 */ #define CPACR_FPEN_MASK (0x3 << 20) #define CPACR_FPEN_TRAP_ALL1 (0x0 << 20) /* Traps from EL0 and EL1 */ #define CPACR_FPEN_TRAP_EL0 (0x1 << 20) /* Traps from EL0 */ #define CPACR_FPEN_TRAP_ALL2 (0x2 << 20) /* Traps from EL0 and EL1 */ #define CPACR_FPEN_TRAP_NONE (0x3 << 20) /* No traps */ #define CPACR_TTA (0x1 << 28) /* CTR_EL0 - Cache Type Register */ #define CTR_RES1 (1 << 31) #define CTR_TminLine_SHIFT 32 #define CTR_TminLine_MASK (UL(0x3f) << CTR_TminLine_SHIFT) #define CTR_TminLine_VAL(reg) ((reg) & CTR_TminLine_MASK) #define CTR_DIC_SHIFT 29 #define CTR_DIC_MASK (0x1 << CTR_DIC_SHIFT) #define CTR_DIC_VAL(reg) ((reg) & CTR_DIC_MASK) #define CTR_IDC_SHIFT 28 #define CTR_IDC_MASK (0x1 << CTR_IDC_SHIFT) #define CTR_IDC_VAL(reg) ((reg) & CTR_IDC_MASK) #define CTR_CWG_SHIFT 24 #define CTR_CWG_MASK (0xf << CTR_CWG_SHIFT) #define CTR_CWG_VAL(reg) ((reg) & CTR_CWG_MASK) #define CTR_CWG_SIZE(reg) (4 << (CTR_CWG_VAL(reg) >> CTR_CWG_SHIFT)) #define CTR_ERG_SHIFT 20 #define CTR_ERG_MASK (0xf << CTR_ERG_SHIFT) #define CTR_ERG_VAL(reg) ((reg) & CTR_ERG_MASK) #define CTR_ERG_SIZE(reg) (4 << (CTR_ERG_VAL(reg) >> CTR_ERG_SHIFT)) #define CTR_DLINE_SHIFT 16 #define CTR_DLINE_MASK (0xf << CTR_DLINE_SHIFT) #define CTR_DLINE_VAL(reg) ((reg) & CTR_DLINE_MASK) #define CTR_DLINE_SIZE(reg) (4 << (CTR_DLINE_VAL(reg) >> CTR_DLINE_SHIFT)) #define CTR_L1IP_SHIFT 14 #define CTR_L1IP_MASK (0x3 << CTR_L1IP_SHIFT) #define CTR_L1IP_VAL(reg) ((reg) & CTR_L1IP_MASK) #define CTR_L1IP_VPIPT (0 << CTR_L1IP_SHIFT) #define CTR_L1IP_AIVIVT (1 << CTR_L1IP_SHIFT) #define CTR_L1IP_VIPT (2 << CTR_L1IP_SHIFT) #define CTR_L1IP_PIPT (3 << CTR_L1IP_SHIFT) #define CTR_ILINE_SHIFT 0 #define CTR_ILINE_MASK (0xf << CTR_ILINE_SHIFT) #define CTR_ILINE_VAL(reg) ((reg) & CTR_ILINE_MASK) #define CTR_ILINE_SIZE(reg) (4 << (CTR_ILINE_VAL(reg) >> CTR_ILINE_SHIFT)) /* DAIFSet/DAIFClear */ #define DAIF_D (1 << 3) #define DAIF_A (1 << 2) #define DAIF_I (1 << 1) #define DAIF_F (1 << 0) #define DAIF_ALL (DAIF_D | DAIF_A | DAIF_I | DAIF_F) #define DAIF_INTR (DAIF_I) /* All exceptions that pass */ /* through the intr framework */ /* DBGBCR_EL1 - Debug Breakpoint Control Registers */ #define DBGBCR_EL1_op0 2 #define DBGBCR_EL1_op1 0 #define DBGBCR_EL1_CRn 0 /* DBGBCR_EL1_CRm indicates which watchpoint this register is for */ #define DBGBCR_EL1_op2 5 #define DBGBCR_EN 0x1 #define DBGBCR_PMC_SHIFT 1 #define DBGBCR_PMC (0x3 << DBGBCR_PMC_SHIFT) #define DBGBCR_PMC_EL1 (0x1 << DBGBCR_PMC_SHIFT) #define DBGBCR_PMC_EL0 (0x2 << DBGBCR_PMC_SHIFT) #define DBGBCR_BAS_SHIFT 5 #define DBGBCR_BAS (0xf << DBGBCR_BAS_SHIFT) #define DBGBCR_HMC_SHIFT 13 #define DBGBCR_HMC (0x1 << DBGBCR_HMC_SHIFT) #define DBGBCR_SSC_SHIFT 14 #define DBGBCR_SSC (0x3 << DBGBCR_SSC_SHIFT) #define DBGBCR_LBN_SHIFT 16 #define DBGBCR_LBN (0xf << DBGBCR_LBN_SHIFT) #define DBGBCR_BT_SHIFT 20 #define DBGBCR_BT (0xf << DBGBCR_BT_SHIFT) /* DBGBVR_EL1 - Debug Breakpoint Value Registers */ #define DBGBVR_EL1_op0 2 #define DBGBVR_EL1_op1 0 #define DBGBVR_EL1_CRn 0 /* DBGBVR_EL1_CRm indicates which watchpoint this register is for */ #define DBGBVR_EL1_op2 4 /* DBGWCR_EL1 - Debug Watchpoint Control Registers */ #define DBGWCR_EL1_op0 2 #define DBGWCR_EL1_op1 0 #define DBGWCR_EL1_CRn 0 /* DBGWCR_EL1_CRm indicates which watchpoint this register is for */ #define DBGWCR_EL1_op2 7 #define DBGWCR_EN 0x1 #define DBGWCR_PAC_SHIFT 1 #define DBGWCR_PAC (0x3 << DBGWCR_PAC_SHIFT) #define DBGWCR_PAC_EL1 (0x1 << DBGWCR_PAC_SHIFT) #define DBGWCR_PAC_EL0 (0x2 << DBGWCR_PAC_SHIFT) #define DBGWCR_LSC_SHIFT 3 #define DBGWCR_LSC (0x3 << DBGWCR_LSC_SHIFT) #define DBGWCR_BAS_SHIFT 5 #define DBGWCR_BAS (0xff << DBGWCR_BAS_SHIFT) #define DBGWCR_HMC_SHIFT 13 #define DBGWCR_HMC (0x1 << DBGWCR_HMC_SHIFT) #define DBGWCR_SSC_SHIFT 14 #define DBGWCR_SSC (0x3 << DBGWCR_SSC_SHIFT) #define DBGWCR_LBN_SHIFT 16 #define DBGWCR_LBN (0xf << DBGWCR_LBN_SHIFT) #define DBGWCR_WT_SHIFT 20 #define DBGWCR_WT (0x1 << DBGWCR_WT_SHIFT) #define DBGWCR_MASK_SHIFT 24 #define DBGWCR_MASK (0x1f << DBGWCR_MASK_SHIFT) /* DBGWVR_EL1 - Debug Watchpoint Value Registers */ #define DBGWVR_EL1_op0 2 #define DBGWVR_EL1_op1 0 #define DBGWVR_EL1_CRn 0 /* DBGWVR_EL1_CRm indicates which watchpoint this register is for */ #define DBGWVR_EL1_op2 6 /* DCZID_EL0 - Data Cache Zero ID register */ #define DCZID_DZP (1 << 4) /* DC ZVA prohibited if non-0 */ #define DCZID_BS_SHIFT 0 #define DCZID_BS_MASK (0xf << DCZID_BS_SHIFT) #define DCZID_BS_SIZE(reg) (((reg) & DCZID_BS_MASK) >> DCZID_BS_SHIFT) /* DBGAUTHSTATUS_EL1 */ #define DBGAUTHSTATUS_EL1 MRS_REG(DBGAUTHSTATUS_EL1) #define DBGAUTHSTATUS_EL1_op0 2 #define DBGAUTHSTATUS_EL1_op1 0 #define DBGAUTHSTATUS_EL1_CRn 7 #define DBGAUTHSTATUS_EL1_CRm 14 #define DBGAUTHSTATUS_EL1_op2 6 /* DBGCLAIMCLR_EL1 */ #define DBGCLAIMCLR_EL1 MRS_REG(DBGCLAIMCLR_EL1) #define DBGCLAIMCLR_EL1_op0 2 #define DBGCLAIMCLR_EL1_op1 0 #define DBGCLAIMCLR_EL1_CRn 7 #define DBGCLAIMCLR_EL1_CRm 9 #define DBGCLAIMCLR_EL1_op2 6 /* DBGCLAIMSET_EL1 */ #define DBGCLAIMSET_EL1 MRS_REG(DBGCLAIMSET_EL1) #define DBGCLAIMSET_EL1_op0 2 #define DBGCLAIMSET_EL1_op1 0 #define DBGCLAIMSET_EL1_CRn 7 #define DBGCLAIMSET_EL1_CRm 8 #define DBGCLAIMSET_EL1_op2 6 /* DBGPRCR_EL1 */ #define DBGPRCR_EL1 MRS_REG(DBGPRCR_EL1) #define DBGPRCR_EL1_op0 2 #define DBGPRCR_EL1_op1 0 #define DBGPRCR_EL1_CRn 1 #define DBGPRCR_EL1_CRm 4 #define DBGPRCR_EL1_op2 4 /* ESR_ELx */ #define ESR_ELx_ISS_MASK 0x01ffffff #define ISS_FP_TFV_SHIFT 23 #define ISS_FP_TFV (0x01 << ISS_FP_TFV_SHIFT) #define ISS_FP_IOF 0x01 #define ISS_FP_DZF 0x02 #define ISS_FP_OFF 0x04 #define ISS_FP_UFF 0x08 #define ISS_FP_IXF 0x10 #define ISS_FP_IDF 0x80 #define ISS_INSN_FnV (0x01 << 10) #define ISS_INSN_EA (0x01 << 9) #define ISS_INSN_S1PTW (0x01 << 7) #define ISS_INSN_IFSC_MASK (0x1f << 0) #define ISS_MSR_DIR_SHIFT 0 #define ISS_MSR_DIR (0x01 << ISS_MSR_DIR_SHIFT) #define ISS_MSR_Rt_SHIFT 5 #define ISS_MSR_Rt_MASK (0x1f << ISS_MSR_Rt_SHIFT) #define ISS_MSR_Rt(x) (((x) & ISS_MSR_Rt_MASK) >> ISS_MSR_Rt_SHIFT) #define ISS_MSR_CRm_SHIFT 1 #define ISS_MSR_CRm_MASK (0xf << ISS_MSR_CRm_SHIFT) #define ISS_MSR_CRm(x) (((x) & ISS_MSR_CRm_MASK) >> ISS_MSR_CRm_SHIFT) #define ISS_MSR_CRn_SHIFT 10 #define ISS_MSR_CRn_MASK (0xf << ISS_MSR_CRn_SHIFT) #define ISS_MSR_CRn(x) (((x) & ISS_MSR_CRn_MASK) >> ISS_MSR_CRn_SHIFT) #define ISS_MSR_OP1_SHIFT 14 #define ISS_MSR_OP1_MASK (0x7 << ISS_MSR_OP1_SHIFT) #define ISS_MSR_OP1(x) (((x) & ISS_MSR_OP1_MASK) >> ISS_MSR_OP1_SHIFT) #define ISS_MSR_OP2_SHIFT 17 #define ISS_MSR_OP2_MASK (0x7 << ISS_MSR_OP2_SHIFT) #define ISS_MSR_OP2(x) (((x) & ISS_MSR_OP2_MASK) >> ISS_MSR_OP2_SHIFT) #define ISS_MSR_OP0_SHIFT 20 #define ISS_MSR_OP0_MASK (0x3 << ISS_MSR_OP0_SHIFT) #define ISS_MSR_OP0(x) (((x) & ISS_MSR_OP0_MASK) >> ISS_MSR_OP0_SHIFT) #define ISS_MSR_REG_MASK \ (ISS_MSR_OP0_MASK | ISS_MSR_OP2_MASK | ISS_MSR_OP1_MASK | \ ISS_MSR_CRn_MASK | ISS_MSR_CRm_MASK) #define ISS_DATA_ISV_SHIFT 24 #define ISS_DATA_ISV (0x01 << ISS_DATA_ISV_SHIFT) #define ISS_DATA_SAS_SHIFT 22 #define ISS_DATA_SAS_MASK (0x03 << ISS_DATA_SAS_SHIFT) #define ISS_DATA_SSE_SHIFT 21 #define ISS_DATA_SSE (0x01 << ISS_DATA_SSE_SHIFT) #define ISS_DATA_SRT_SHIFT 16 #define ISS_DATA_SRT_MASK (0x1f << ISS_DATA_SRT_SHIFT) #define ISS_DATA_SF (0x01 << 15) #define ISS_DATA_AR (0x01 << 14) #define ISS_DATA_FnV (0x01 << 10) #define ISS_DATA_EA (0x01 << 9) #define ISS_DATA_CM (0x01 << 8) #define ISS_DATA_S1PTW (0x01 << 7) #define ISS_DATA_WnR_SHIFT 6 #define ISS_DATA_WnR (0x01 << ISS_DATA_WnR_SHIFT) #define ISS_DATA_DFSC_MASK (0x3f << 0) #define ISS_DATA_DFSC_ASF_L0 (0x00 << 0) #define ISS_DATA_DFSC_ASF_L1 (0x01 << 0) #define ISS_DATA_DFSC_ASF_L2 (0x02 << 0) #define ISS_DATA_DFSC_ASF_L3 (0x03 << 0) #define ISS_DATA_DFSC_TF_L0 (0x04 << 0) #define ISS_DATA_DFSC_TF_L1 (0x05 << 0) #define ISS_DATA_DFSC_TF_L2 (0x06 << 0) #define ISS_DATA_DFSC_TF_L3 (0x07 << 0) #define ISS_DATA_DFSC_AFF_L1 (0x09 << 0) #define ISS_DATA_DFSC_AFF_L2 (0x0a << 0) #define ISS_DATA_DFSC_AFF_L3 (0x0b << 0) #define ISS_DATA_DFSC_PF_L1 (0x0d << 0) #define ISS_DATA_DFSC_PF_L2 (0x0e << 0) #define ISS_DATA_DFSC_PF_L3 (0x0f << 0) #define ISS_DATA_DFSC_EXT (0x10 << 0) #define ISS_DATA_DFSC_EXT_L0 (0x14 << 0) #define ISS_DATA_DFSC_EXT_L1 (0x15 << 0) #define ISS_DATA_DFSC_EXT_L2 (0x16 << 0) #define ISS_DATA_DFSC_EXT_L3 (0x17 << 0) #define ISS_DATA_DFSC_ECC (0x18 << 0) #define ISS_DATA_DFSC_ECC_L0 (0x1c << 0) #define ISS_DATA_DFSC_ECC_L1 (0x1d << 0) #define ISS_DATA_DFSC_ECC_L2 (0x1e << 0) #define ISS_DATA_DFSC_ECC_L3 (0x1f << 0) #define ISS_DATA_DFSC_ALIGN (0x21 << 0) #define ISS_DATA_DFSC_TLB_CONFLICT (0x30 << 0) #define ESR_ELx_IL (0x01 << 25) #define ESR_ELx_EC_SHIFT 26 #define ESR_ELx_EC_MASK (0x3f << 26) #define ESR_ELx_EXCEPTION(esr) (((esr) & ESR_ELx_EC_MASK) >> ESR_ELx_EC_SHIFT) #define EXCP_UNKNOWN 0x00 /* Unkwn exception */ #define EXCP_TRAP_WFI_WFE 0x01 /* Trapped WFI or WFE */ #define EXCP_FP_SIMD 0x07 /* VFP/SIMD trap */ #define EXCP_ILL_STATE 0x0e /* Illegal execution state */ #define EXCP_SVC32 0x11 /* SVC trap for AArch32 */ #define EXCP_SVC64 0x15 /* SVC trap for AArch64 */ #define EXCP_HVC 0x16 /* HVC trap */ #define EXCP_MSR 0x18 /* MSR/MRS trap */ #define EXCP_FPAC 0x1c /* Faulting PAC trap */ #define EXCP_INSN_ABORT_L 0x20 /* Instruction abort, from lower EL */ #define EXCP_INSN_ABORT 0x21 /* Instruction abort, from same EL */ #define EXCP_PC_ALIGN 0x22 /* PC alignment fault */ #define EXCP_DATA_ABORT_L 0x24 /* Data abort, from lower EL */ #define EXCP_DATA_ABORT 0x25 /* Data abort, from same EL */ #define EXCP_SP_ALIGN 0x26 /* SP slignment fault */ #define EXCP_TRAP_FP 0x2c /* Trapped FP exception */ #define EXCP_SERROR 0x2f /* SError interrupt */ #define EXCP_BRKPT_EL0 0x30 /* Hardware breakpoint, from same EL */ #define EXCP_SOFTSTP_EL0 0x32 /* Software Step, from lower EL */ #define EXCP_SOFTSTP_EL1 0x33 /* Software Step, from same EL */ #define EXCP_WATCHPT_EL0 0x34 /* Watchpoint, from lower EL */ #define EXCP_WATCHPT_EL1 0x35 /* Watchpoint, from same EL */ #define EXCP_BRKPT_32 0x38 /* 32bits breakpoint */ #define EXCP_BRK 0x3c /* Breakpoint */ /* ICC_CTLR_EL1 */ #define ICC_CTLR_EL1_EOIMODE (1U << 1) /* ICC_IAR1_EL1 */ #define ICC_IAR1_EL1_SPUR (0x03ff) /* ICC_IGRPEN0_EL1 */ #define ICC_IGRPEN0_EL1_EN (1U << 0) /* ICC_PMR_EL1 */ #define ICC_PMR_EL1_PRIO_MASK (0xFFUL) /* ICC_SGI1R_EL1 */ #define ICC_SGI1R_EL1 MRS_REG(ICC_SGI1R_EL1) #define ICC_SGI1R_EL1_op0 3 #define ICC_SGI1R_EL1_op1 0 #define ICC_SGI1R_EL1_CRn 12 #define ICC_SGI1R_EL1_CRm 11 #define ICC_SGI1R_EL1_op2 5 #define ICC_SGI1R_EL1_TL_MASK 0xffffUL #define ICC_SGI1R_EL1_AFF1_SHIFT 16 #define ICC_SGI1R_EL1_SGIID_SHIFT 24 #define ICC_SGI1R_EL1_AFF2_SHIFT 32 #define ICC_SGI1R_EL1_AFF3_SHIFT 48 #define ICC_SGI1R_EL1_SGIID_MASK 0xfUL #define ICC_SGI1R_EL1_IRM (0x1UL << 40) /* ICC_SRE_EL1 */ #define ICC_SRE_EL1_SRE (1U << 0) /* ID_AA64DFR0_EL1 */ #define ID_AA64DFR0_EL1 MRS_REG(ID_AA64DFR0_EL1) #define ID_AA64DFR0_EL1_op0 0x3 #define ID_AA64DFR0_EL1_op1 0x0 #define ID_AA64DFR0_EL1_CRn 0x0 #define ID_AA64DFR0_EL1_CRm 0x5 #define ID_AA64DFR0_EL1_op2 0x0 #define ID_AA64DFR0_DebugVer_SHIFT 0 #define ID_AA64DFR0_DebugVer_MASK (UL(0xf) << ID_AA64DFR0_DebugVer_SHIFT) #define ID_AA64DFR0_DebugVer_VAL(x) ((x) & ID_AA64DFR0_DebugVer_MASK) #define ID_AA64DFR0_DebugVer_8 (UL(0x6) << ID_AA64DFR0_DebugVer_SHIFT) #define ID_AA64DFR0_DebugVer_8_VHE (UL(0x7) << ID_AA64DFR0_DebugVer_SHIFT) #define ID_AA64DFR0_DebugVer_8_2 (UL(0x8) << ID_AA64DFR0_DebugVer_SHIFT) #define ID_AA64DFR0_DebugVer_8_4 (UL(0x9) << ID_AA64DFR0_DebugVer_SHIFT) #define ID_AA64DFR0_TraceVer_SHIFT 4 #define ID_AA64DFR0_TraceVer_MASK (UL(0xf) << ID_AA64DFR0_TraceVer_SHIFT) #define ID_AA64DFR0_TraceVer_VAL(x) ((x) & ID_AA64DFR0_TraceVer_MASK) #define ID_AA64DFR0_TraceVer_NONE (UL(0x0) << ID_AA64DFR0_TraceVer_SHIFT) #define ID_AA64DFR0_TraceVer_IMPL (UL(0x1) << ID_AA64DFR0_TraceVer_SHIFT) #define ID_AA64DFR0_PMUVer_SHIFT 8 #define ID_AA64DFR0_PMUVer_MASK (UL(0xf) << ID_AA64DFR0_PMUVer_SHIFT) #define ID_AA64DFR0_PMUVer_VAL(x) ((x) & ID_AA64DFR0_PMUVer_MASK) #define ID_AA64DFR0_PMUVer_NONE (UL(0x0) << ID_AA64DFR0_PMUVer_SHIFT) #define ID_AA64DFR0_PMUVer_3 (UL(0x1) << ID_AA64DFR0_PMUVer_SHIFT) #define ID_AA64DFR0_PMUVer_3_1 (UL(0x4) << ID_AA64DFR0_PMUVer_SHIFT) #define ID_AA64DFR0_PMUVer_3_4 (UL(0x5) << ID_AA64DFR0_PMUVer_SHIFT) #define ID_AA64DFR0_PMUVer_3_5 (UL(0x6) << ID_AA64DFR0_PMUVer_SHIFT) #define ID_AA64DFR0_PMUVer_IMPL (UL(0xf) << ID_AA64DFR0_PMUVer_SHIFT) #define ID_AA64DFR0_BRPs_SHIFT 12 #define ID_AA64DFR0_BRPs_MASK (UL(0xf) << ID_AA64DFR0_BRPs_SHIFT) #define ID_AA64DFR0_BRPs_VAL(x) \ ((((x) >> ID_AA64DFR0_BRPs_SHIFT) & 0xf) + 1) #define ID_AA64DFR0_WRPs_SHIFT 20 #define ID_AA64DFR0_WRPs_MASK (UL(0xf) << ID_AA64DFR0_WRPs_SHIFT) #define ID_AA64DFR0_WRPs_VAL(x) \ ((((x) >> ID_AA64DFR0_WRPs_SHIFT) & 0xf) + 1) #define ID_AA64DFR0_CTX_CMPs_SHIFT 28 #define ID_AA64DFR0_CTX_CMPs_MASK (UL(0xf) << ID_AA64DFR0_CTX_CMPs_SHIFT) #define ID_AA64DFR0_CTX_CMPs_VAL(x) \ ((((x) >> ID_AA64DFR0_CTX_CMPs_SHIFT) & 0xf) + 1) #define ID_AA64DFR0_PMSVer_SHIFT 32 #define ID_AA64DFR0_PMSVer_MASK (UL(0xf) << ID_AA64DFR0_PMSVer_SHIFT) #define ID_AA64DFR0_PMSVer_VAL(x) ((x) & ID_AA64DFR0_PMSVer_MASK) #define ID_AA64DFR0_PMSVer_NONE (UL(0x0) << ID_AA64DFR0_PMSVer_SHIFT) #define ID_AA64DFR0_PMSVer_SPE (UL(0x1) << ID_AA64DFR0_PMSVer_SHIFT) #define ID_AA64DFR0_PMSVer_SPE_8_3 (UL(0x2) << ID_AA64DFR0_PMSVer_SHIFT) #define ID_AA64DFR0_DoubleLock_SHIFT 36 #define ID_AA64DFR0_DoubleLock_MASK (UL(0xf) << ID_AA64DFR0_DoubleLock_SHIFT) #define ID_AA64DFR0_DoubleLock_VAL(x) ((x) & ID_AA64DFR0_DoubleLock_MASK) #define ID_AA64DFR0_DoubleLock_IMPL (UL(0x0) << ID_AA64DFR0_DoubleLock_SHIFT) #define ID_AA64DFR0_DoubleLock_NONE (UL(0xf) << ID_AA64DFR0_DoubleLock_SHIFT) #define ID_AA64DFR0_TraceFilt_SHIFT 40 #define ID_AA64DFR0_TraceFilt_MASK (UL(0xf) << ID_AA64DFR0_TraceFilt_SHIFT) #define ID_AA64DFR0_TraceFilt_VAL(x) ((x) & ID_AA64DFR0_TraceFilt_MASK) #define ID_AA64DFR0_TraceFilt_NONE (UL(0x0) << ID_AA64DFR0_TraceFilt_SHIFT) #define ID_AA64DFR0_TraceFilt_8_4 (UL(0x1) << ID_AA64DFR0_TraceFilt_SHIFT) /* ID_AA64ISAR0_EL1 */ #define ID_AA64ISAR0_EL1 MRS_REG(ID_AA64ISAR0_EL1) #define ID_AA64ISAR0_EL1_op0 0x3 #define ID_AA64ISAR0_EL1_op1 0x0 #define ID_AA64ISAR0_EL1_CRn 0x0 #define ID_AA64ISAR0_EL1_CRm 0x6 #define ID_AA64ISAR0_EL1_op2 0x0 #define ID_AA64ISAR0_AES_SHIFT 4 #define ID_AA64ISAR0_AES_MASK (UL(0xf) << ID_AA64ISAR0_AES_SHIFT) #define ID_AA64ISAR0_AES_VAL(x) ((x) & ID_AA64ISAR0_AES_MASK) #define ID_AA64ISAR0_AES_NONE (UL(0x0) << ID_AA64ISAR0_AES_SHIFT) #define ID_AA64ISAR0_AES_BASE (UL(0x1) << ID_AA64ISAR0_AES_SHIFT) #define ID_AA64ISAR0_AES_PMULL (UL(0x2) << ID_AA64ISAR0_AES_SHIFT) #define ID_AA64ISAR0_SHA1_SHIFT 8 #define ID_AA64ISAR0_SHA1_MASK (UL(0xf) << ID_AA64ISAR0_SHA1_SHIFT) #define ID_AA64ISAR0_SHA1_VAL(x) ((x) & ID_AA64ISAR0_SHA1_MASK) #define ID_AA64ISAR0_SHA1_NONE (UL(0x0) << ID_AA64ISAR0_SHA1_SHIFT) #define ID_AA64ISAR0_SHA1_BASE (UL(0x1) << ID_AA64ISAR0_SHA1_SHIFT) #define ID_AA64ISAR0_SHA2_SHIFT 12 #define ID_AA64ISAR0_SHA2_MASK (UL(0xf) << ID_AA64ISAR0_SHA2_SHIFT) #define ID_AA64ISAR0_SHA2_VAL(x) ((x) & ID_AA64ISAR0_SHA2_MASK) #define ID_AA64ISAR0_SHA2_NONE (UL(0x0) << ID_AA64ISAR0_SHA2_SHIFT) #define ID_AA64ISAR0_SHA2_BASE (UL(0x1) << ID_AA64ISAR0_SHA2_SHIFT) #define ID_AA64ISAR0_SHA2_512 (UL(0x2) << ID_AA64ISAR0_SHA2_SHIFT) #define ID_AA64ISAR0_CRC32_SHIFT 16 #define ID_AA64ISAR0_CRC32_MASK (UL(0xf) << ID_AA64ISAR0_CRC32_SHIFT) #define ID_AA64ISAR0_CRC32_VAL(x) ((x) & ID_AA64ISAR0_CRC32_MASK) #define ID_AA64ISAR0_CRC32_NONE (UL(0x0) << ID_AA64ISAR0_CRC32_SHIFT) #define ID_AA64ISAR0_CRC32_BASE (UL(0x1) << ID_AA64ISAR0_CRC32_SHIFT) #define ID_AA64ISAR0_Atomic_SHIFT 20 #define ID_AA64ISAR0_Atomic_MASK (UL(0xf) << ID_AA64ISAR0_Atomic_SHIFT) #define ID_AA64ISAR0_Atomic_VAL(x) ((x) & ID_AA64ISAR0_Atomic_MASK) #define ID_AA64ISAR0_Atomic_NONE (UL(0x0) << ID_AA64ISAR0_Atomic_SHIFT) #define ID_AA64ISAR0_Atomic_IMPL (UL(0x2) << ID_AA64ISAR0_Atomic_SHIFT) #define ID_AA64ISAR0_RDM_SHIFT 28 #define ID_AA64ISAR0_RDM_MASK (UL(0xf) << ID_AA64ISAR0_RDM_SHIFT) #define ID_AA64ISAR0_RDM_VAL(x) ((x) & ID_AA64ISAR0_RDM_MASK) #define ID_AA64ISAR0_RDM_NONE (UL(0x0) << ID_AA64ISAR0_RDM_SHIFT) #define ID_AA64ISAR0_RDM_IMPL (UL(0x1) << ID_AA64ISAR0_RDM_SHIFT) #define ID_AA64ISAR0_SHA3_SHIFT 32 #define ID_AA64ISAR0_SHA3_MASK (UL(0xf) << ID_AA64ISAR0_SHA3_SHIFT) #define ID_AA64ISAR0_SHA3_VAL(x) ((x) & ID_AA64ISAR0_SHA3_MASK) #define ID_AA64ISAR0_SHA3_NONE (UL(0x0) << ID_AA64ISAR0_SHA3_SHIFT) #define ID_AA64ISAR0_SHA3_IMPL (UL(0x1) << ID_AA64ISAR0_SHA3_SHIFT) #define ID_AA64ISAR0_SM3_SHIFT 36 #define ID_AA64ISAR0_SM3_MASK (UL(0xf) << ID_AA64ISAR0_SM3_SHIFT) #define ID_AA64ISAR0_SM3_VAL(x) ((x) & ID_AA64ISAR0_SM3_MASK) #define ID_AA64ISAR0_SM3_NONE (UL(0x0) << ID_AA64ISAR0_SM3_SHIFT) #define ID_AA64ISAR0_SM3_IMPL (UL(0x1) << ID_AA64ISAR0_SM3_SHIFT) #define ID_AA64ISAR0_SM4_SHIFT 40 #define ID_AA64ISAR0_SM4_MASK (UL(0xf) << ID_AA64ISAR0_SM4_SHIFT) #define ID_AA64ISAR0_SM4_VAL(x) ((x) & ID_AA64ISAR0_SM4_MASK) #define ID_AA64ISAR0_SM4_NONE (UL(0x0) << ID_AA64ISAR0_SM4_SHIFT) #define ID_AA64ISAR0_SM4_IMPL (UL(0x1) << ID_AA64ISAR0_SM4_SHIFT) #define ID_AA64ISAR0_DP_SHIFT 44 #define ID_AA64ISAR0_DP_MASK (UL(0xf) << ID_AA64ISAR0_DP_SHIFT) #define ID_AA64ISAR0_DP_VAL(x) ((x) & ID_AA64ISAR0_DP_MASK) #define ID_AA64ISAR0_DP_NONE (UL(0x0) << ID_AA64ISAR0_DP_SHIFT) #define ID_AA64ISAR0_DP_IMPL (UL(0x1) << ID_AA64ISAR0_DP_SHIFT) #define ID_AA64ISAR0_FHM_SHIFT 48 #define ID_AA64ISAR0_FHM_MASK (UL(0xf) << ID_AA64ISAR0_FHM_SHIFT) #define ID_AA64ISAR0_FHM_VAL(x) ((x) & ID_AA64ISAR0_FHM_MASK) #define ID_AA64ISAR0_FHM_NONE (UL(0x0) << ID_AA64ISAR0_FHM_SHIFT) #define ID_AA64ISAR0_FHM_IMPL (UL(0x1) << ID_AA64ISAR0_FHM_SHIFT) #define ID_AA64ISAR0_TS_SHIFT 52 #define ID_AA64ISAR0_TS_MASK (UL(0xf) << ID_AA64ISAR0_TS_SHIFT) #define ID_AA64ISAR0_TS_VAL(x) ((x) & ID_AA64ISAR0_TS_MASK) #define ID_AA64ISAR0_TS_NONE (UL(0x0) << ID_AA64ISAR0_TS_SHIFT) #define ID_AA64ISAR0_TS_CondM_8_4 (UL(0x1) << ID_AA64ISAR0_TS_SHIFT) #define ID_AA64ISAR0_TS_CondM_8_5 (UL(0x2) << ID_AA64ISAR0_TS_SHIFT) #define ID_AA64ISAR0_TLB_SHIFT 56 #define ID_AA64ISAR0_TLB_MASK (UL(0xf) << ID_AA64ISAR0_TLB_SHIFT) #define ID_AA64ISAR0_TLB_VAL(x) ((x) & ID_AA64ISAR0_TLB_MASK) #define ID_AA64ISAR0_TLB_NONE (UL(0x0) << ID_AA64ISAR0_TLB_SHIFT) #define ID_AA64ISAR0_TLB_TLBIOS (UL(0x1) << ID_AA64ISAR0_TLB_SHIFT) #define ID_AA64ISAR0_TLB_TLBIOSR (UL(0x2) << ID_AA64ISAR0_TLB_SHIFT) #define ID_AA64ISAR0_RNDR_SHIFT 60 #define ID_AA64ISAR0_RNDR_MASK (UL(0xf) << ID_AA64ISAR0_RNDR_SHIFT) #define ID_AA64ISAR0_RNDR_VAL(x) ((x) & ID_AA64ISAR0_RNDR_MASK) #define ID_AA64ISAR0_RNDR_NONE (UL(0x0) << ID_AA64ISAR0_RNDR_SHIFT) #define ID_AA64ISAR0_RNDR_IMPL (UL(0x1) << ID_AA64ISAR0_RNDR_SHIFT) /* ID_AA64ISAR1_EL1 */ #define ID_AA64ISAR1_EL1 MRS_REG(ID_AA64ISAR1_EL1) #define ID_AA64ISAR1_EL1_op0 0x3 #define ID_AA64ISAR1_EL1_op1 0x0 #define ID_AA64ISAR1_EL1_CRn 0x0 #define ID_AA64ISAR1_EL1_CRm 0x6 #define ID_AA64ISAR1_EL1_op2 0x1 #define ID_AA64ISAR1_DPB_SHIFT 0 #define ID_AA64ISAR1_DPB_MASK (UL(0xf) << ID_AA64ISAR1_DPB_SHIFT) #define ID_AA64ISAR1_DPB_VAL(x) ((x) & ID_AA64ISAR1_DPB_MASK) #define ID_AA64ISAR1_DPB_NONE (UL(0x0) << ID_AA64ISAR1_DPB_SHIFT) #define ID_AA64ISAR1_DPB_DCCVAP (UL(0x1) << ID_AA64ISAR1_DPB_SHIFT) #define ID_AA64ISAR1_DPB_DCCVADP (UL(0x2) << ID_AA64ISAR1_DPB_SHIFT) #define ID_AA64ISAR1_APA_SHIFT 4 #define ID_AA64ISAR1_APA_MASK (UL(0xf) << ID_AA64ISAR1_APA_SHIFT) #define ID_AA64ISAR1_APA_VAL(x) ((x) & ID_AA64ISAR1_APA_MASK) #define ID_AA64ISAR1_APA_NONE (UL(0x0) << ID_AA64ISAR1_APA_SHIFT) #define ID_AA64ISAR1_APA_PAC (UL(0x1) << ID_AA64ISAR1_APA_SHIFT) #define ID_AA64ISAR1_APA_EPAC (UL(0x2) << ID_AA64ISAR1_APA_SHIFT) #define ID_AA64ISAR1_APA_EPAC2 (UL(0x3) << ID_AA64ISAR1_APA_SHIFT) #define ID_AA64ISAR1_APA_FPAC (UL(0x4) << ID_AA64ISAR1_APA_SHIFT) #define ID_AA64ISAR1_APA_FPAC_COMBINED (UL(0x5) << ID_AA64ISAR1_APA_SHIFT) #define ID_AA64ISAR1_API_SHIFT 8 #define ID_AA64ISAR1_API_MASK (UL(0xf) << ID_AA64ISAR1_API_SHIFT) #define ID_AA64ISAR1_API_VAL(x) ((x) & ID_AA64ISAR1_API_MASK) #define ID_AA64ISAR1_API_NONE (UL(0x0) << ID_AA64ISAR1_API_SHIFT) #define ID_AA64ISAR1_API_PAC (UL(0x1) << ID_AA64ISAR1_API_SHIFT) #define ID_AA64ISAR1_API_EPAC (UL(0x2) << ID_AA64ISAR1_API_SHIFT) #define ID_AA64ISAR1_API_EPAC2 (UL(0x3) << ID_AA64ISAR1_API_SHIFT) #define ID_AA64ISAR1_API_FPAC (UL(0x4) << ID_AA64ISAR1_API_SHIFT) #define ID_AA64ISAR1_API_FPAC_COMBINED (UL(0x5) << ID_AA64ISAR1_API_SHIFT) #define ID_AA64ISAR1_JSCVT_SHIFT 12 #define ID_AA64ISAR1_JSCVT_MASK (UL(0xf) << ID_AA64ISAR1_JSCVT_SHIFT) #define ID_AA64ISAR1_JSCVT_VAL(x) ((x) & ID_AA64ISAR1_JSCVT_MASK) #define ID_AA64ISAR1_JSCVT_NONE (UL(0x0) << ID_AA64ISAR1_JSCVT_SHIFT) #define ID_AA64ISAR1_JSCVT_IMPL (UL(0x1) << ID_AA64ISAR1_JSCVT_SHIFT) #define ID_AA64ISAR1_FCMA_SHIFT 16 #define ID_AA64ISAR1_FCMA_MASK (UL(0xf) << ID_AA64ISAR1_FCMA_SHIFT) #define ID_AA64ISAR1_FCMA_VAL(x) ((x) & ID_AA64ISAR1_FCMA_MASK) #define ID_AA64ISAR1_FCMA_NONE (UL(0x0) << ID_AA64ISAR1_FCMA_SHIFT) #define ID_AA64ISAR1_FCMA_IMPL (UL(0x1) << ID_AA64ISAR1_FCMA_SHIFT) #define ID_AA64ISAR1_LRCPC_SHIFT 20 #define ID_AA64ISAR1_LRCPC_MASK (UL(0xf) << ID_AA64ISAR1_LRCPC_SHIFT) #define ID_AA64ISAR1_LRCPC_VAL(x) ((x) & ID_AA64ISAR1_LRCPC_MASK) #define ID_AA64ISAR1_LRCPC_NONE (UL(0x0) << ID_AA64ISAR1_LRCPC_SHIFT) #define ID_AA64ISAR1_LRCPC_RCPC_8_3 (UL(0x1) << ID_AA64ISAR1_LRCPC_SHIFT) #define ID_AA64ISAR1_LRCPC_RCPC_8_4 (UL(0x2) << ID_AA64ISAR1_LRCPC_SHIFT) #define ID_AA64ISAR1_GPA_SHIFT 24 #define ID_AA64ISAR1_GPA_MASK (UL(0xf) << ID_AA64ISAR1_GPA_SHIFT) #define ID_AA64ISAR1_GPA_VAL(x) ((x) & ID_AA64ISAR1_GPA_MASK) #define ID_AA64ISAR1_GPA_NONE (UL(0x0) << ID_AA64ISAR1_GPA_SHIFT) #define ID_AA64ISAR1_GPA_IMPL (UL(0x1) << ID_AA64ISAR1_GPA_SHIFT) #define ID_AA64ISAR1_GPI_SHIFT 28 #define ID_AA64ISAR1_GPI_MASK (UL(0xf) << ID_AA64ISAR1_GPI_SHIFT) #define ID_AA64ISAR1_GPI_VAL(x) ((x) & ID_AA64ISAR1_GPI_MASK) #define ID_AA64ISAR1_GPI_NONE (UL(0x0) << ID_AA64ISAR1_GPI_SHIFT) #define ID_AA64ISAR1_GPI_IMPL (UL(0x1) << ID_AA64ISAR1_GPI_SHIFT) #define ID_AA64ISAR1_FRINTTS_SHIFT 32 #define ID_AA64ISAR1_FRINTTS_MASK (UL(0xf) << ID_AA64ISAR1_FRINTTS_SHIFT) #define ID_AA64ISAR1_FRINTTS_VAL(x) ((x) & ID_AA64ISAR1_FRINTTS_MASK) #define ID_AA64ISAR1_FRINTTS_NONE (UL(0x0) << ID_AA64ISAR1_FRINTTS_SHIFT) #define ID_AA64ISAR1_FRINTTS_IMPL (UL(0x1) << ID_AA64ISAR1_FRINTTS_SHIFT) #define ID_AA64ISAR1_SB_SHIFT 36 #define ID_AA64ISAR1_SB_MASK (UL(0xf) << ID_AA64ISAR1_SB_SHIFT) #define ID_AA64ISAR1_SB_VAL(x) ((x) & ID_AA64ISAR1_SB_MASK) #define ID_AA64ISAR1_SB_NONE (UL(0x0) << ID_AA64ISAR1_SB_SHIFT) #define ID_AA64ISAR1_SB_IMPL (UL(0x1) << ID_AA64ISAR1_SB_SHIFT) #define ID_AA64ISAR1_SPECRES_SHIFT 40 #define ID_AA64ISAR1_SPECRES_MASK (UL(0xf) << ID_AA64ISAR1_SPECRES_SHIFT) #define ID_AA64ISAR1_SPECRES_VAL(x) ((x) & ID_AA64ISAR1_SPECRES_MASK) #define ID_AA64ISAR1_SPECRES_NONE (UL(0x0) << ID_AA64ISAR1_SPECRES_SHIFT) #define ID_AA64ISAR1_SPECRES_IMPL (UL(0x1) << ID_AA64ISAR1_SPECRES_SHIFT) #define ID_AA64ISAR1_BF16_SHIFT 44 #define ID_AA64ISAR1_BF16_MASK (UL(0xf) << ID_AA64ISAR1_BF16_SHIFT) #define ID_AA64ISAR1_BF16_VAL(x) ((x) & ID_AA64ISAR1_BF16_MASK) #define ID_AA64ISAR1_BF16_NONE (UL(0x0) << ID_AA64ISAR1_BF16_SHIFT) #define ID_AA64ISAR1_BF16_IMPL (UL(0x1) << ID_AA64ISAR1_BF16_SHIFT) #define ID_AA64ISAR1_DGH_SHIFT 48 #define ID_AA64ISAR1_DGH_MASK (UL(0xf) << ID_AA64ISAR1_DGH_SHIFT) #define ID_AA64ISAR1_DGH_VAL(x) ((x) & ID_AA64ISAR1_DGH_MASK) #define ID_AA64ISAR1_DGH_NONE (UL(0x0) << ID_AA64ISAR1_DGH_SHIFT) #define ID_AA64ISAR1_DGH_IMPL (UL(0x1) << ID_AA64ISAR1_DGH_SHIFT) #define ID_AA64ISAR1_I8MM_SHIFT 52 #define ID_AA64ISAR1_I8MM_MASK (UL(0xf) << ID_AA64ISAR1_I8MM_SHIFT) #define ID_AA64ISAR1_I8MM_VAL(x) ((x) & ID_AA64ISAR1_I8MM_MASK) #define ID_AA64ISAR1_I8MM_NONE (UL(0x0) << ID_AA64ISAR1_I8MM_SHIFT) #define ID_AA64ISAR1_I8MM_IMPL (UL(0x1) << ID_AA64ISAR1_I8MM_SHIFT) /* ID_AA64MMFR0_EL1 */ #define ID_AA64MMFR0_EL1 MRS_REG(ID_AA64MMFR0_EL1) #define ID_AA64MMFR0_EL1_op0 0x3 #define ID_AA64MMFR0_EL1_op1 0x0 #define ID_AA64MMFR0_EL1_CRn 0x0 #define ID_AA64MMFR0_EL1_CRm 0x7 #define ID_AA64MMFR0_EL1_op2 0x0 #define ID_AA64MMFR0_PARange_SHIFT 0 #define ID_AA64MMFR0_PARange_MASK (UL(0xf) << ID_AA64MMFR0_PARange_SHIFT) #define ID_AA64MMFR0_PARange_VAL(x) ((x) & ID_AA64MMFR0_PARange_MASK) #define ID_AA64MMFR0_PARange_4G (UL(0x0) << ID_AA64MMFR0_PARange_SHIFT) #define ID_AA64MMFR0_PARange_64G (UL(0x1) << ID_AA64MMFR0_PARange_SHIFT) #define ID_AA64MMFR0_PARange_1T (UL(0x2) << ID_AA64MMFR0_PARange_SHIFT) #define ID_AA64MMFR0_PARange_4T (UL(0x3) << ID_AA64MMFR0_PARange_SHIFT) #define ID_AA64MMFR0_PARange_16T (UL(0x4) << ID_AA64MMFR0_PARange_SHIFT) #define ID_AA64MMFR0_PARange_256T (UL(0x5) << ID_AA64MMFR0_PARange_SHIFT) #define ID_AA64MMFR0_PARange_4P (UL(0x6) << ID_AA64MMFR0_PARange_SHIFT) #define ID_AA64MMFR0_ASIDBits_SHIFT 4 #define ID_AA64MMFR0_ASIDBits_MASK (UL(0xf) << ID_AA64MMFR0_ASIDBits_SHIFT) #define ID_AA64MMFR0_ASIDBits_VAL(x) ((x) & ID_AA64MMFR0_ASIDBits_MASK) #define ID_AA64MMFR0_ASIDBits_8 (UL(0x0) << ID_AA64MMFR0_ASIDBits_SHIFT) #define ID_AA64MMFR0_ASIDBits_16 (UL(0x2) << ID_AA64MMFR0_ASIDBits_SHIFT) #define ID_AA64MMFR0_BigEnd_SHIFT 8 #define ID_AA64MMFR0_BigEnd_MASK (UL(0xf) << ID_AA64MMFR0_BigEnd_SHIFT) #define ID_AA64MMFR0_BigEnd_VAL(x) ((x) & ID_AA64MMFR0_BigEnd_MASK) #define ID_AA64MMFR0_BigEnd_FIXED (UL(0x0) << ID_AA64MMFR0_BigEnd_SHIFT) #define ID_AA64MMFR0_BigEnd_MIXED (UL(0x1) << ID_AA64MMFR0_BigEnd_SHIFT) #define ID_AA64MMFR0_SNSMem_SHIFT 12 #define ID_AA64MMFR0_SNSMem_MASK (UL(0xf) << ID_AA64MMFR0_SNSMem_SHIFT) #define ID_AA64MMFR0_SNSMem_VAL(x) ((x) & ID_AA64MMFR0_SNSMem_MASK) #define ID_AA64MMFR0_SNSMem_NONE (UL(0x0) << ID_AA64MMFR0_SNSMem_SHIFT) #define ID_AA64MMFR0_SNSMem_DISTINCT (UL(0x1) << ID_AA64MMFR0_SNSMem_SHIFT) #define ID_AA64MMFR0_BigEndEL0_SHIFT 16 #define ID_AA64MMFR0_BigEndEL0_MASK (UL(0xf) << ID_AA64MMFR0_BigEndEL0_SHIFT) #define ID_AA64MMFR0_BigEndEL0_VAL(x) ((x) & ID_AA64MMFR0_BigEndEL0_MASK) #define ID_AA64MMFR0_BigEndEL0_FIXED (UL(0x0) << ID_AA64MMFR0_BigEndEL0_SHIFT) #define ID_AA64MMFR0_BigEndEL0_MIXED (UL(0x1) << ID_AA64MMFR0_BigEndEL0_SHIFT) #define ID_AA64MMFR0_TGran16_SHIFT 20 #define ID_AA64MMFR0_TGran16_MASK (UL(0xf) << ID_AA64MMFR0_TGran16_SHIFT) #define ID_AA64MMFR0_TGran16_VAL(x) ((x) & ID_AA64MMFR0_TGran16_MASK) #define ID_AA64MMFR0_TGran16_NONE (UL(0x0) << ID_AA64MMFR0_TGran16_SHIFT) #define ID_AA64MMFR0_TGran16_IMPL (UL(0x1) << ID_AA64MMFR0_TGran16_SHIFT) #define ID_AA64MMFR0_TGran64_SHIFT 24 #define ID_AA64MMFR0_TGran64_MASK (UL(0xf) << ID_AA64MMFR0_TGran64_SHIFT) #define ID_AA64MMFR0_TGran64_VAL(x) ((x) & ID_AA64MMFR0_TGran64_MASK) #define ID_AA64MMFR0_TGran64_IMPL (UL(0x0) << ID_AA64MMFR0_TGran64_SHIFT) #define ID_AA64MMFR0_TGran64_NONE (UL(0xf) << ID_AA64MMFR0_TGran64_SHIFT) #define ID_AA64MMFR0_TGran4_SHIFT 28 #define ID_AA64MMFR0_TGran4_MASK (UL(0xf) << ID_AA64MMFR0_TGran4_SHIFT) #define ID_AA64MMFR0_TGran4_VAL(x) ((x) & ID_AA64MMFR0_TGran4_MASK) #define ID_AA64MMFR0_TGran4_IMPL (UL(0x0) << ID_AA64MMFR0_TGran4_SHIFT) #define ID_AA64MMFR0_TGran4_NONE (UL(0xf) << ID_AA64MMFR0_TGran4_SHIFT) #define ID_AA64MMFR0_TGran16_2_SHIFT 32 #define ID_AA64MMFR0_TGran16_2_MASK (UL(0xf) << ID_AA64MMFR0_TGran16_2_SHIFT) #define ID_AA64MMFR0_TGran16_2_VAL(x) ((x) & ID_AA64MMFR0_TGran16_2_MASK) #define ID_AA64MMFR0_TGran16_2_TGran16 (UL(0x0) << ID_AA64MMFR0_TGran16_2_SHIFT) #define ID_AA64MMFR0_TGran16_2_NONE (UL(0x1) << ID_AA64MMFR0_TGran16_2_SHIFT) #define ID_AA64MMFR0_TGran16_2_IMPL (UL(0x2) << ID_AA64MMFR0_TGran16_2_SHIFT) #define ID_AA64MMFR0_TGran64_2_SHIFT 36 #define ID_AA64MMFR0_TGran64_2_MASK (UL(0xf) << ID_AA64MMFR0_TGran64_2_SHIFT) #define ID_AA64MMFR0_TGran64_2_VAL(x) ((x) & ID_AA64MMFR0_TGran64_2_MASK) #define ID_AA64MMFR0_TGran64_2_TGran64 (UL(0x0) << ID_AA64MMFR0_TGran64_2_SHIFT) #define ID_AA64MMFR0_TGran64_2_NONE (UL(0x1) << ID_AA64MMFR0_TGran64_2_SHIFT) #define ID_AA64MMFR0_TGran64_2_IMPL (UL(0x2) << ID_AA64MMFR0_TGran64_2_SHIFT) #define ID_AA64MMFR0_TGran4_2_SHIFT 40 #define ID_AA64MMFR0_TGran4_2_MASK (UL(0xf) << ID_AA64MMFR0_TGran4_2_SHIFT) #define ID_AA64MMFR0_TGran4_2_VAL(x) ((x) & ID_AA64MMFR0_TGran4_2_MASK) #define ID_AA64MMFR0_TGran4_2_TGran4 (UL(0x0) << ID_AA64MMFR0_TGran4_2_SHIFT) #define ID_AA64MMFR0_TGran4_2_NONE (UL(0x1) << ID_AA64MMFR0_TGran4_2_SHIFT) #define ID_AA64MMFR0_TGran4_2_IMPL (UL(0x2) << ID_AA64MMFR0_TGran4_2_SHIFT) #define ID_AA64MMFR0_ExS_SHIFT 44 #define ID_AA64MMFR0_ExS_MASK (UL(0xf) << ID_AA64MMFR0_ExS_SHIFT) #define ID_AA64MMFR0_ExS_VAL(x) ((x) & ID_AA64MMFR0_ExS_MASK) #define ID_AA64MMFR0_ExS_ALL (UL(0x0) << ID_AA64MMFR0_ExS_SHIFT) #define ID_AA64MMFR0_ExS_IMPL (UL(0x1) << ID_AA64MMFR0_ExS_SHIFT) /* ID_AA64MMFR1_EL1 */ #define ID_AA64MMFR1_EL1 MRS_REG(ID_AA64MMFR1_EL1) #define ID_AA64MMFR1_EL1_op0 0x3 #define ID_AA64MMFR1_EL1_op1 0x0 #define ID_AA64MMFR1_EL1_CRn 0x0 #define ID_AA64MMFR1_EL1_CRm 0x7 #define ID_AA64MMFR1_EL1_op2 0x1 #define ID_AA64MMFR1_HAFDBS_SHIFT 0 #define ID_AA64MMFR1_HAFDBS_MASK (UL(0xf) << ID_AA64MMFR1_HAFDBS_SHIFT) #define ID_AA64MMFR1_HAFDBS_VAL(x) ((x) & ID_AA64MMFR1_HAFDBS_MASK) #define ID_AA64MMFR1_HAFDBS_NONE (UL(0x0) << ID_AA64MMFR1_HAFDBS_SHIFT) #define ID_AA64MMFR1_HAFDBS_AF (UL(0x1) << ID_AA64MMFR1_HAFDBS_SHIFT) #define ID_AA64MMFR1_HAFDBS_AF_DBS (UL(0x2) << ID_AA64MMFR1_HAFDBS_SHIFT) #define ID_AA64MMFR1_VMIDBits_SHIFT 4 #define ID_AA64MMFR1_VMIDBits_MASK (UL(0xf) << ID_AA64MMFR1_VMIDBits_SHIFT) #define ID_AA64MMFR1_VMIDBits_VAL(x) ((x) & ID_AA64MMFR1_VMIDBits_MASK) #define ID_AA64MMFR1_VMIDBits_8 (UL(0x0) << ID_AA64MMFR1_VMIDBits_SHIFT) #define ID_AA64MMFR1_VMIDBits_16 (UL(0x2) << ID_AA64MMFR1_VMIDBits_SHIFT) #define ID_AA64MMFR1_VH_SHIFT 8 #define ID_AA64MMFR1_VH_MASK (UL(0xf) << ID_AA64MMFR1_VH_SHIFT) #define ID_AA64MMFR1_VH_VAL(x) ((x) & ID_AA64MMFR1_VH_MASK) #define ID_AA64MMFR1_VH_NONE (UL(0x0) << ID_AA64MMFR1_VH_SHIFT) #define ID_AA64MMFR1_VH_IMPL (UL(0x1) << ID_AA64MMFR1_VH_SHIFT) #define ID_AA64MMFR1_HPDS_SHIFT 12 #define ID_AA64MMFR1_HPDS_MASK (UL(0xf) << ID_AA64MMFR1_HPDS_SHIFT) #define ID_AA64MMFR1_HPDS_VAL(x) ((x) & ID_AA64MMFR1_HPDS_MASK) #define ID_AA64MMFR1_HPDS_NONE (UL(0x0) << ID_AA64MMFR1_HPDS_SHIFT) #define ID_AA64MMFR1_HPDS_HPD (UL(0x1) << ID_AA64MMFR1_HPDS_SHIFT) #define ID_AA64MMFR1_HPDS_TTPBHA (UL(0x2) << ID_AA64MMFR1_HPDS_SHIFT) #define ID_AA64MMFR1_LO_SHIFT 16 #define ID_AA64MMFR1_LO_MASK (UL(0xf) << ID_AA64MMFR1_LO_SHIFT) #define ID_AA64MMFR1_LO_VAL(x) ((x) & ID_AA64MMFR1_LO_MASK) #define ID_AA64MMFR1_LO_NONE (UL(0x0) << ID_AA64MMFR1_LO_SHIFT) #define ID_AA64MMFR1_LO_IMPL (UL(0x1) << ID_AA64MMFR1_LO_SHIFT) #define ID_AA64MMFR1_PAN_SHIFT 20 #define ID_AA64MMFR1_PAN_MASK (UL(0xf) << ID_AA64MMFR1_PAN_SHIFT) #define ID_AA64MMFR1_PAN_VAL(x) ((x) & ID_AA64MMFR1_PAN_MASK) #define ID_AA64MMFR1_PAN_NONE (UL(0x0) << ID_AA64MMFR1_PAN_SHIFT) #define ID_AA64MMFR1_PAN_IMPL (UL(0x1) << ID_AA64MMFR1_PAN_SHIFT) #define ID_AA64MMFR1_PAN_ATS1E1 (UL(0x2) << ID_AA64MMFR1_PAN_SHIFT) #define ID_AA64MMFR1_SpecSEI_SHIFT 24 #define ID_AA64MMFR1_SpecSEI_MASK (UL(0xf) << ID_AA64MMFR1_SpecSEI_SHIFT) #define ID_AA64MMFR1_SpecSEI_VAL(x) ((x) & ID_AA64MMFR1_SpecSEI_MASK) #define ID_AA64MMFR1_SpecSEI_NONE (UL(0x0) << ID_AA64MMFR1_SpecSEI_SHIFT) #define ID_AA64MMFR1_SpecSEI_IMPL (UL(0x1) << ID_AA64MMFR1_SpecSEI_SHIFT) #define ID_AA64MMFR1_XNX_SHIFT 28 #define ID_AA64MMFR1_XNX_MASK (UL(0xf) << ID_AA64MMFR1_XNX_SHIFT) #define ID_AA64MMFR1_XNX_VAL(x) ((x) & ID_AA64MMFR1_XNX_MASK) #define ID_AA64MMFR1_XNX_NONE (UL(0x0) << ID_AA64MMFR1_XNX_SHIFT) #define ID_AA64MMFR1_XNX_IMPL (UL(0x1) << ID_AA64MMFR1_XNX_SHIFT) /* ID_AA64MMFR2_EL1 */ #define ID_AA64MMFR2_EL1 MRS_REG(ID_AA64MMFR2_EL1) #define ID_AA64MMFR2_EL1_op0 0x3 #define ID_AA64MMFR2_EL1_op1 0x0 #define ID_AA64MMFR2_EL1_CRn 0x0 #define ID_AA64MMFR2_EL1_CRm 0x7 #define ID_AA64MMFR2_EL1_op2 0x2 #define ID_AA64MMFR2_CnP_SHIFT 0 #define ID_AA64MMFR2_CnP_MASK (UL(0xf) << ID_AA64MMFR2_CnP_SHIFT) #define ID_AA64MMFR2_CnP_VAL(x) ((x) & ID_AA64MMFR2_CnP_MASK) #define ID_AA64MMFR2_CnP_NONE (UL(0x0) << ID_AA64MMFR2_CnP_SHIFT) #define ID_AA64MMFR2_CnP_IMPL (UL(0x1) << ID_AA64MMFR2_CnP_SHIFT) #define ID_AA64MMFR2_UAO_SHIFT 4 #define ID_AA64MMFR2_UAO_MASK (UL(0xf) << ID_AA64MMFR2_UAO_SHIFT) #define ID_AA64MMFR2_UAO_VAL(x) ((x) & ID_AA64MMFR2_UAO_MASK) #define ID_AA64MMFR2_UAO_NONE (UL(0x0) << ID_AA64MMFR2_UAO_SHIFT) #define ID_AA64MMFR2_UAO_IMPL (UL(0x1) << ID_AA64MMFR2_UAO_SHIFT) #define ID_AA64MMFR2_LSM_SHIFT 8 #define ID_AA64MMFR2_LSM_MASK (UL(0xf) << ID_AA64MMFR2_LSM_SHIFT) #define ID_AA64MMFR2_LSM_VAL(x) ((x) & ID_AA64MMFR2_LSM_MASK) #define ID_AA64MMFR2_LSM_NONE (UL(0x0) << ID_AA64MMFR2_LSM_SHIFT) #define ID_AA64MMFR2_LSM_IMPL (UL(0x1) << ID_AA64MMFR2_LSM_SHIFT) #define ID_AA64MMFR2_IESB_SHIFT 12 #define ID_AA64MMFR2_IESB_MASK (UL(0xf) << ID_AA64MMFR2_IESB_SHIFT) #define ID_AA64MMFR2_IESB_VAL(x) ((x) & ID_AA64MMFR2_IESB_MASK) #define ID_AA64MMFR2_IESB_NONE (UL(0x0) << ID_AA64MMFR2_IESB_SHIFT) #define ID_AA64MMFR2_IESB_IMPL (UL(0x1) << ID_AA64MMFR2_IESB_SHIFT) #define ID_AA64MMFR2_VARange_SHIFT 16 #define ID_AA64MMFR2_VARange_MASK (UL(0xf) << ID_AA64MMFR2_VARange_SHIFT) #define ID_AA64MMFR2_VARange_VAL(x) ((x) & ID_AA64MMFR2_VARange_MASK) #define ID_AA64MMFR2_VARange_48 (UL(0x0) << ID_AA64MMFR2_VARange_SHIFT) #define ID_AA64MMFR2_VARange_52 (UL(0x1) << ID_AA64MMFR2_VARange_SHIFT) #define ID_AA64MMFR2_CCIDX_SHIFT 20 #define ID_AA64MMFR2_CCIDX_MASK (UL(0xf) << ID_AA64MMFR2_CCIDX_SHIFT) #define ID_AA64MMFR2_CCIDX_VAL(x) ((x) & ID_AA64MMFR2_CCIDX_MASK) #define ID_AA64MMFR2_CCIDX_32 (UL(0x0) << ID_AA64MMFR2_CCIDX_SHIFT) #define ID_AA64MMFR2_CCIDX_64 (UL(0x1) << ID_AA64MMFR2_CCIDX_SHIFT) #define ID_AA64MMFR2_NV_SHIFT 24 #define ID_AA64MMFR2_NV_MASK (UL(0xf) << ID_AA64MMFR2_NV_SHIFT) #define ID_AA64MMFR2_NV_VAL(x) ((x) & ID_AA64MMFR2_NV_MASK) #define ID_AA64MMFR2_NV_NONE (UL(0x0) << ID_AA64MMFR2_NV_SHIFT) #define ID_AA64MMFR2_NV_8_3 (UL(0x1) << ID_AA64MMFR2_NV_SHIFT) #define ID_AA64MMFR2_NV_8_4 (UL(0x2) << ID_AA64MMFR2_NV_SHIFT) #define ID_AA64MMFR2_ST_SHIFT 28 #define ID_AA64MMFR2_ST_MASK (UL(0xf) << ID_AA64MMFR2_ST_SHIFT) #define ID_AA64MMFR2_ST_VAL(x) ((x) & ID_AA64MMFR2_ST_MASK) #define ID_AA64MMFR2_ST_NONE (UL(0x0) << ID_AA64MMFR2_ST_SHIFT) #define ID_AA64MMFR2_ST_IMPL (UL(0x1) << ID_AA64MMFR2_ST_SHIFT) #define ID_AA64MMFR2_AT_SHIFT 32 #define ID_AA64MMFR2_AT_MASK (UL(0xf) << ID_AA64MMFR2_AT_SHIFT) #define ID_AA64MMFR2_AT_VAL(x) ((x) & ID_AA64MMFR2_AT_MASK) #define ID_AA64MMFR2_AT_NONE (UL(0x0) << ID_AA64MMFR2_AT_SHIFT) #define ID_AA64MMFR2_AT_IMPL (UL(0x1) << ID_AA64MMFR2_AT_SHIFT) #define ID_AA64MMFR2_IDS_SHIFT 36 #define ID_AA64MMFR2_IDS_MASK (UL(0xf) << ID_AA64MMFR2_IDS_SHIFT) #define ID_AA64MMFR2_IDS_VAL(x) ((x) & ID_AA64MMFR2_IDS_MASK) #define ID_AA64MMFR2_IDS_NONE (UL(0x0) << ID_AA64MMFR2_IDS_SHIFT) #define ID_AA64MMFR2_IDS_IMPL (UL(0x1) << ID_AA64MMFR2_IDS_SHIFT) #define ID_AA64MMFR2_FWB_SHIFT 40 #define ID_AA64MMFR2_FWB_MASK (UL(0xf) << ID_AA64MMFR2_FWB_SHIFT) #define ID_AA64MMFR2_FWB_VAL(x) ((x) & ID_AA64MMFR2_FWB_MASK) #define ID_AA64MMFR2_FWB_NONE (UL(0x0) << ID_AA64MMFR2_FWB_SHIFT) #define ID_AA64MMFR2_FWB_IMPL (UL(0x1) << ID_AA64MMFR2_FWB_SHIFT) #define ID_AA64MMFR2_TTL_SHIFT 48 #define ID_AA64MMFR2_TTL_MASK (UL(0xf) << ID_AA64MMFR2_TTL_SHIFT) #define ID_AA64MMFR2_TTL_VAL(x) ((x) & ID_AA64MMFR2_TTL_MASK) #define ID_AA64MMFR2_TTL_NONE (UL(0x0) << ID_AA64MMFR2_TTL_SHIFT) #define ID_AA64MMFR2_TTL_IMPL (UL(0x1) << ID_AA64MMFR2_TTL_SHIFT) #define ID_AA64MMFR2_BBM_SHIFT 52 #define ID_AA64MMFR2_BBM_MASK (UL(0xf) << ID_AA64MMFR2_BBM_SHIFT) #define ID_AA64MMFR2_BBM_VAL(x) ((x) & ID_AA64MMFR2_BBM_MASK) #define ID_AA64MMFR2_BBM_LEVEL0 (UL(0x0) << ID_AA64MMFR2_BBM_SHIFT) #define ID_AA64MMFR2_BBM_LEVEL1 (UL(0x1) << ID_AA64MMFR2_BBM_SHIFT) #define ID_AA64MMFR2_BBM_LEVEL2 (UL(0x2) << ID_AA64MMFR2_BBM_SHIFT) #define ID_AA64MMFR2_EVT_SHIFT 56 #define ID_AA64MMFR2_EVT_MASK (UL(0xf) << ID_AA64MMFR2_EVT_SHIFT) #define ID_AA64MMFR2_EVT_VAL(x) ((x) & ID_AA64MMFR2_EVT_MASK) #define ID_AA64MMFR2_EVT_NONE (UL(0x0) << ID_AA64MMFR2_EVT_SHIFT) #define ID_AA64MMFR2_EVT_8_2 (UL(0x1) << ID_AA64MMFR2_EVT_SHIFT) #define ID_AA64MMFR2_EVT_8_5 (UL(0x2) << ID_AA64MMFR2_EVT_SHIFT) #define ID_AA64MMFR2_E0PD_SHIFT 60 #define ID_AA64MMFR2_E0PD_MASK (UL(0xf) << ID_AA64MMFR2_E0PD_SHIFT) #define ID_AA64MMFR2_E0PD_VAL(x) ((x) & ID_AA64MMFR2_E0PD_MASK) #define ID_AA64MMFR2_E0PD_NONE (UL(0x0) << ID_AA64MMFR2_E0PD_SHIFT) #define ID_AA64MMFR2_E0PD_IMPL (UL(0x1) << ID_AA64MMFR2_E0PD_SHIFT) /* ID_AA64PFR0_EL1 */ #define ID_AA64PFR0_EL1 MRS_REG(ID_AA64PFR0_EL1) #define ID_AA64PFR0_EL1_op0 0x3 #define ID_AA64PFR0_EL1_op1 0x0 #define ID_AA64PFR0_EL1_CRn 0x0 #define ID_AA64PFR0_EL1_CRm 0x4 #define ID_AA64PFR0_EL1_op2 0x0 #define ID_AA64PFR0_EL0_SHIFT 0 #define ID_AA64PFR0_EL0_MASK (UL(0xf) << ID_AA64PFR0_EL0_SHIFT) #define ID_AA64PFR0_EL0_VAL(x) ((x) & ID_AA64PFR0_EL0_MASK) #define ID_AA64PFR0_EL0_64 (UL(0x1) << ID_AA64PFR0_EL0_SHIFT) #define ID_AA64PFR0_EL0_64_32 (UL(0x2) << ID_AA64PFR0_EL0_SHIFT) #define ID_AA64PFR0_EL1_SHIFT 4 #define ID_AA64PFR0_EL1_MASK (UL(0xf) << ID_AA64PFR0_EL1_SHIFT) #define ID_AA64PFR0_EL1_VAL(x) ((x) & ID_AA64PFR0_EL1_MASK) #define ID_AA64PFR0_EL1_64 (UL(0x1) << ID_AA64PFR0_EL1_SHIFT) #define ID_AA64PFR0_EL1_64_32 (UL(0x2) << ID_AA64PFR0_EL1_SHIFT) #define ID_AA64PFR0_EL2_SHIFT 8 #define ID_AA64PFR0_EL2_MASK (UL(0xf) << ID_AA64PFR0_EL2_SHIFT) #define ID_AA64PFR0_EL2_VAL(x) ((x) & ID_AA64PFR0_EL2_MASK) #define ID_AA64PFR0_EL2_NONE (UL(0x0) << ID_AA64PFR0_EL2_SHIFT) #define ID_AA64PFR0_EL2_64 (UL(0x1) << ID_AA64PFR0_EL2_SHIFT) #define ID_AA64PFR0_EL2_64_32 (UL(0x2) << ID_AA64PFR0_EL2_SHIFT) #define ID_AA64PFR0_EL3_SHIFT 12 #define ID_AA64PFR0_EL3_MASK (UL(0xf) << ID_AA64PFR0_EL3_SHIFT) #define ID_AA64PFR0_EL3_VAL(x) ((x) & ID_AA64PFR0_EL3_MASK) #define ID_AA64PFR0_EL3_NONE (UL(0x0) << ID_AA64PFR0_EL3_SHIFT) #define ID_AA64PFR0_EL3_64 (UL(0x1) << ID_AA64PFR0_EL3_SHIFT) #define ID_AA64PFR0_EL3_64_32 (UL(0x2) << ID_AA64PFR0_EL3_SHIFT) #define ID_AA64PFR0_FP_SHIFT 16 #define ID_AA64PFR0_FP_MASK (UL(0xf) << ID_AA64PFR0_FP_SHIFT) #define ID_AA64PFR0_FP_VAL(x) ((x) & ID_AA64PFR0_FP_MASK) #define ID_AA64PFR0_FP_IMPL (UL(0x0) << ID_AA64PFR0_FP_SHIFT) #define ID_AA64PFR0_FP_HP (UL(0x1) << ID_AA64PFR0_FP_SHIFT) #define ID_AA64PFR0_FP_NONE (UL(0xf) << ID_AA64PFR0_FP_SHIFT) #define ID_AA64PFR0_AdvSIMD_SHIFT 20 #define ID_AA64PFR0_AdvSIMD_MASK (UL(0xf) << ID_AA64PFR0_AdvSIMD_SHIFT) #define ID_AA64PFR0_AdvSIMD_VAL(x) ((x) & ID_AA64PFR0_AdvSIMD_MASK) #define ID_AA64PFR0_AdvSIMD_IMPL (UL(0x0) << ID_AA64PFR0_AdvSIMD_SHIFT) #define ID_AA64PFR0_AdvSIMD_HP (UL(0x1) << ID_AA64PFR0_AdvSIMD_SHIFT) #define ID_AA64PFR0_AdvSIMD_NONE (UL(0xf) << ID_AA64PFR0_AdvSIMD_SHIFT) #define ID_AA64PFR0_GIC_BITS 0x4 /* Number of bits in GIC field */ #define ID_AA64PFR0_GIC_SHIFT 24 #define ID_AA64PFR0_GIC_MASK (UL(0xf) << ID_AA64PFR0_GIC_SHIFT) #define ID_AA64PFR0_GIC_VAL(x) ((x) & ID_AA64PFR0_GIC_MASK) #define ID_AA64PFR0_GIC_CPUIF_NONE (UL(0x0) << ID_AA64PFR0_GIC_SHIFT) #define ID_AA64PFR0_GIC_CPUIF_EN (UL(0x1) << ID_AA64PFR0_GIC_SHIFT) +#define ID_AA64PFR0_GIC_CPUIF_4_1 (UL(0x3) << ID_AA64PFR0_GIC_SHIFT) #define ID_AA64PFR0_RAS_SHIFT 28 #define ID_AA64PFR0_RAS_MASK (UL(0xf) << ID_AA64PFR0_RAS_SHIFT) #define ID_AA64PFR0_RAS_VAL(x) ((x) & ID_AA64PFR0_RAS_MASK) #define ID_AA64PFR0_RAS_NONE (UL(0x0) << ID_AA64PFR0_RAS_SHIFT) #define ID_AA64PFR0_RAS_IMPL (UL(0x1) << ID_AA64PFR0_RAS_SHIFT) #define ID_AA64PFR0_RAS_8_4 (UL(0x2) << ID_AA64PFR0_RAS_SHIFT) #define ID_AA64PFR0_SVE_SHIFT 32 #define ID_AA64PFR0_SVE_MASK (UL(0xf) << ID_AA64PFR0_SVE_SHIFT) #define ID_AA64PFR0_SVE_VAL(x) ((x) & ID_AA64PFR0_SVE_MASK) #define ID_AA64PFR0_SVE_NONE (UL(0x0) << ID_AA64PFR0_SVE_SHIFT) #define ID_AA64PFR0_SVE_IMPL (UL(0x1) << ID_AA64PFR0_SVE_SHIFT) #define ID_AA64PFR0_SEL2_SHIFT 36 #define ID_AA64PFR0_SEL2_MASK (UL(0xf) << ID_AA64PFR0_SEL2_SHIFT) #define ID_AA64PFR0_SEL2_VAL(x) ((x) & ID_AA64PFR0_SEL2_MASK) #define ID_AA64PFR0_SEL2_NONE (UL(0x0) << ID_AA64PFR0_SEL2_SHIFT) #define ID_AA64PFR0_SEL2_IMPL (UL(0x1) << ID_AA64PFR0_SEL2_SHIFT) #define ID_AA64PFR0_MPAM_SHIFT 40 #define ID_AA64PFR0_MPAM_MASK (UL(0xf) << ID_AA64PFR0_MPAM_SHIFT) #define ID_AA64PFR0_MPAM_VAL(x) ((x) & ID_AA64PFR0_MPAM_MASK) #define ID_AA64PFR0_MPAM_NONE (UL(0x0) << ID_AA64PFR0_MPAM_SHIFT) #define ID_AA64PFR0_MPAM_IMPL (UL(0x1) << ID_AA64PFR0_MPAM_SHIFT) #define ID_AA64PFR0_AMU_SHIFT 44 #define ID_AA64PFR0_AMU_MASK (UL(0xf) << ID_AA64PFR0_AMU_SHIFT) #define ID_AA64PFR0_AMU_VAL(x) ((x) & ID_AA64PFR0_AMU_MASK) #define ID_AA64PFR0_AMU_NONE (UL(0x0) << ID_AA64PFR0_AMU_SHIFT) #define ID_AA64PFR0_AMU_V1 (UL(0x1) << ID_AA64PFR0_AMU_SHIFT) #define ID_AA64PFR0_DIT_SHIFT 48 #define ID_AA64PFR0_DIT_MASK (UL(0xf) << ID_AA64PFR0_DIT_SHIFT) #define ID_AA64PFR0_DIT_VAL(x) ((x) & ID_AA64PFR0_DIT_MASK) #define ID_AA64PFR0_DIT_NONE (UL(0x0) << ID_AA64PFR0_DIT_SHIFT) #define ID_AA64PFR0_DIT_PSTATE (UL(0x1) << ID_AA64PFR0_DIT_SHIFT) #define ID_AA64PFR0_CSV2_SHIFT 56 #define ID_AA64PFR0_CSV2_MASK (UL(0xf) << ID_AA64PFR0_CSV2_SHIFT) #define ID_AA64PFR0_CSV2_VAL(x) ((x) & ID_AA64PFR0_CSV2_MASK) #define ID_AA64PFR0_CSV2_NONE (UL(0x0) << ID_AA64PFR0_CSV2_SHIFT) #define ID_AA64PFR0_CSV2_ISOLATED (UL(0x1) << ID_AA64PFR0_CSV2_SHIFT) #define ID_AA64PFR0_CSV2_SCXTNUM (UL(0x2) << ID_AA64PFR0_CSV2_SHIFT) #define ID_AA64PFR0_CSV3_SHIFT 60 #define ID_AA64PFR0_CSV3_MASK (UL(0xf) << ID_AA64PFR0_CSV3_SHIFT) #define ID_AA64PFR0_CSV3_VAL(x) ((x) & ID_AA64PFR0_CSV3_MASK) #define ID_AA64PFR0_CSV3_NONE (UL(0x0) << ID_AA64PFR0_CSV3_SHIFT) #define ID_AA64PFR0_CSV3_ISOLATED (UL(0x1) << ID_AA64PFR0_CSV3_SHIFT) /* ID_AA64PFR1_EL1 */ #define ID_AA64PFR1_EL1 MRS_REG(ID_AA64PFR1_EL1) #define ID_AA64PFR1_EL1_op0 0x3 #define ID_AA64PFR1_EL1_op1 0x0 #define ID_AA64PFR1_EL1_CRn 0x0 #define ID_AA64PFR1_EL1_CRm 0x4 #define ID_AA64PFR1_EL1_op2 0x1 #define ID_AA64PFR1_BT_SHIFT 0 #define ID_AA64PFR1_BT_MASK (UL(0xf) << ID_AA64PFR1_BT_SHIFT) #define ID_AA64PFR1_BT_VAL(x) ((x) & ID_AA64PFR1_BT_MASK) #define ID_AA64PFR1_BT_NONE (UL(0x0) << ID_AA64PFR1_BT_SHIFT) #define ID_AA64PFR1_BT_IMPL (UL(0x1) << ID_AA64PFR1_BT_SHIFT) #define ID_AA64PFR1_SSBS_SHIFT 4 #define ID_AA64PFR1_SSBS_MASK (UL(0xf) << ID_AA64PFR1_SSBS_SHIFT) #define ID_AA64PFR1_SSBS_VAL(x) ((x) & ID_AA64PFR1_SSBS_MASK) #define ID_AA64PFR1_SSBS_NONE (UL(0x0) << ID_AA64PFR1_SSBS_SHIFT) #define ID_AA64PFR1_SSBS_PSTATE (UL(0x1) << ID_AA64PFR1_SSBS_SHIFT) #define ID_AA64PFR1_SSBS_PSTATE_MSR (UL(0x2) << ID_AA64PFR1_SSBS_SHIFT) #define ID_AA64PFR1_MTE_SHIFT 8 #define ID_AA64PFR1_MTE_MASK (UL(0xf) << ID_AA64PFR1_MTE_SHIFT) #define ID_AA64PFR1_MTE_VAL(x) ((x) & ID_AA64PFR1_MTE_MASK) #define ID_AA64PFR1_MTE_NONE (UL(0x0) << ID_AA64PFR1_MTE_SHIFT) #define ID_AA64PFR1_MTE_IMPL_EL0 (UL(0x1) << ID_AA64PFR1_MTE_SHIFT) #define ID_AA64PFR1_MTE_IMPL (UL(0x2) << ID_AA64PFR1_MTE_SHIFT) #define ID_AA64PFR1_RAS_frac_SHIFT 12 #define ID_AA64PFR1_RAS_frac_MASK (UL(0xf) << ID_AA64PFR1_RAS_frac_SHIFT) #define ID_AA64PFR1_RAS_frac_VAL(x) ((x) & ID_AA64PFR1_RAS_frac_MASK) #define ID_AA64PFR1_RAS_frac_V1 (UL(0x0) << ID_AA64PFR1_RAS_frac_SHIFT) #define ID_AA64PFR1_RAS_frac_V2 (UL(0x1) << ID_AA64PFR1_RAS_frac_SHIFT) /* ID_ISAR5_EL1 */ #define ID_ISAR5_EL1 MRS_REG(ID_ISAR5_EL1) #define ID_ISAR5_EL1_op0 0x3 #define ID_ISAR5_EL1_op1 0x0 #define ID_ISAR5_EL1_CRn 0x0 #define ID_ISAR5_EL1_CRm 0x2 #define ID_ISAR5_EL1_op2 0x5 #define ID_ISAR5_SEVL_SHIFT 0 #define ID_ISAR5_SEVL_MASK (UL(0xf) << ID_ISAR5_SEVL_SHIFT) #define ID_ISAR5_SEVL_VAL(x) ((x) & ID_ISAR5_SEVL_MASK) #define ID_ISAR5_SEVL_NOP (UL(0x0) << ID_ISAR5_SEVL_SHIFT) #define ID_ISAR5_SEVL_IMPL (UL(0x1) << ID_ISAR5_SEVL_SHIFT) #define ID_ISAR5_AES_SHIFT 4 #define ID_ISAR5_AES_MASK (UL(0xf) << ID_ISAR5_AES_SHIFT) #define ID_ISAR5_AES_VAL(x) ((x) & ID_ISAR5_AES_MASK) #define ID_ISAR5_AES_NONE (UL(0x0) << ID_ISAR5_AES_SHIFT) #define ID_ISAR5_AES_BASE (UL(0x1) << ID_ISAR5_AES_SHIFT) #define ID_ISAR5_AES_VMULL (UL(0x2) << ID_ISAR5_AES_SHIFT) #define ID_ISAR5_SHA1_SHIFT 8 #define ID_ISAR5_SHA1_MASK (UL(0xf) << ID_ISAR5_SHA1_SHIFT) #define ID_ISAR5_SHA1_VAL(x) ((x) & ID_ISAR5_SHA1_MASK) #define ID_ISAR5_SHA1_NONE (UL(0x0) << ID_ISAR5_SHA1_SHIFT) #define ID_ISAR5_SHA1_IMPL (UL(0x1) << ID_ISAR5_SHA1_SHIFT) #define ID_ISAR5_SHA2_SHIFT 12 #define ID_ISAR5_SHA2_MASK (UL(0xf) << ID_ISAR5_SHA2_SHIFT) #define ID_ISAR5_SHA2_VAL(x) ((x) & ID_ISAR5_SHA2_MASK) #define ID_ISAR5_SHA2_NONE (UL(0x0) << ID_ISAR5_SHA2_SHIFT) #define ID_ISAR5_SHA2_IMPL (UL(0x1) << ID_ISAR5_SHA2_SHIFT) #define ID_ISAR5_CRC32_SHIFT 16 #define ID_ISAR5_CRC32_MASK (UL(0xf) << ID_ISAR5_CRC32_SHIFT) #define ID_ISAR5_CRC32_VAL(x) ((x) & ID_ISAR5_CRC32_MASK) #define ID_ISAR5_CRC32_NONE (UL(0x0) << ID_ISAR5_CRC32_SHIFT) #define ID_ISAR5_CRC32_IMPL (UL(0x1) << ID_ISAR5_CRC32_SHIFT) #define ID_ISAR5_RDM_SHIFT 24 #define ID_ISAR5_RDM_MASK (UL(0xf) << ID_ISAR5_RDM_SHIFT) #define ID_ISAR5_RDM_VAL(x) ((x) & ID_ISAR5_RDM_MASK) #define ID_ISAR5_RDM_NONE (UL(0x0) << ID_ISAR5_RDM_SHIFT) #define ID_ISAR5_RDM_IMPL (UL(0x1) << ID_ISAR5_RDM_SHIFT) #define ID_ISAR5_VCMA_SHIFT 28 #define ID_ISAR5_VCMA_MASK (UL(0xf) << ID_ISAR5_VCMA_SHIFT) #define ID_ISAR5_VCMA_VAL(x) ((x) & ID_ISAR5_VCMA_MASK) #define ID_ISAR5_VCMA_NONE (UL(0x0) << ID_ISAR5_VCMA_SHIFT) #define ID_ISAR5_VCMA_IMPL (UL(0x1) << ID_ISAR5_VCMA_SHIFT) /* MAIR_EL1 - Memory Attribute Indirection Register */ #define MAIR_ATTR_MASK(idx) (0xff << ((n)* 8)) #define MAIR_ATTR(attr, idx) ((attr) << ((idx) * 8)) #define MAIR_DEVICE_nGnRnE 0x00 #define MAIR_DEVICE_nGnRE 0x04 #define MAIR_NORMAL_NC 0x44 #define MAIR_NORMAL_WT 0xbb #define MAIR_NORMAL_WB 0xff /* MDCCINT_EL1 */ #define MDCCINT_EL1 MRS_REG(MDCCINT_EL1) #define MDCCINT_EL1_op0 2 #define MDCCINT_EL1_op1 0 #define MDCCINT_EL1_CRn 0 #define MDCCINT_EL1_CRm 2 #define MDCCINT_EL1_op2 0 /* MDCCSR_EL0 */ #define MDCCSR_EL0 MRS_REG(MDCCSR_EL0) #define MDCCSR_EL0_op0 2 #define MDCCSR_EL0_op1 3 #define MDCCSR_EL0_CRn 0 #define MDCCSR_EL0_CRm 1 #define MDCCSR_EL0_op2 0 /* MDSCR_EL1 - Monitor Debug System Control Register */ #define MDSCR_EL1 MRS_REG(MDSCR_EL1) #define MDSCR_EL1_op0 2 #define MDSCR_EL1_op1 0 #define MDSCR_EL1_CRn 0 #define MDSCR_EL1_CRm 2 #define MDSCR_EL1_op2 2 #define MDSCR_SS_SHIFT 0 #define MDSCR_SS (UL(0x1) << MDSCR_SS_SHIFT) #define MDSCR_KDE_SHIFT 13 #define MDSCR_KDE (UL(0x1) << MDSCR_KDE_SHIFT) #define MDSCR_MDE_SHIFT 15 #define MDSCR_MDE (UL(0x1) << MDSCR_MDE_SHIFT) /* MVFR0_EL1 */ #define MVFR0_EL1 MRS_REG(MVFR0_EL1) #define MVFR0_EL1_op0 0x3 #define MVFR0_EL1_op1 0x0 #define MVFR0_EL1_CRn 0x0 #define MVFR0_EL1_CRm 0x3 #define MVFR0_EL1_op2 0x0 #define MVFR0_SIMDReg_SHIFT 0 #define MVFR0_SIMDReg_MASK (UL(0xf) << MVFR0_SIMDReg_SHIFT) #define MVFR0_SIMDReg_VAL(x) ((x) & MVFR0_SIMDReg_MASK) #define MVFR0_SIMDReg_NONE (UL(0x0) << MVFR0_SIMDReg_SHIFT) #define MVFR0_SIMDReg_FP (UL(0x1) << MVFR0_SIMDReg_SHIFT) #define MVFR0_SIMDReg_AdvSIMD (UL(0x2) << MVFR0_SIMDReg_SHIFT) #define MVFR0_FPSP_SHIFT 4 #define MVFR0_FPSP_MASK (UL(0xf) << MVFR0_FPSP_SHIFT) #define MVFR0_FPSP_VAL(x) ((x) & MVFR0_FPSP_MASK) #define MVFR0_FPSP_NONE (UL(0x0) << MVFR0_FPSP_SHIFT) #define MVFR0_FPSP_VFP_v2 (UL(0x1) << MVFR0_FPSP_SHIFT) #define MVFR0_FPSP_VFP_v3_v4 (UL(0x2) << MVFR0_FPSP_SHIFT) #define MVFR0_FPDP_SHIFT 8 #define MVFR0_FPDP_MASK (UL(0xf) << MVFR0_FPDP_SHIFT) #define MVFR0_FPDP_VAL(x) ((x) & MVFR0_FPDP_MASK) #define MVFR0_FPDP_NONE (UL(0x0) << MVFR0_FPDP_SHIFT) #define MVFR0_FPDP_VFP_v2 (UL(0x1) << MVFR0_FPDP_SHIFT) #define MVFR0_FPDP_VFP_v3_v4 (UL(0x2) << MVFR0_FPDP_SHIFT) #define MVFR0_FPTrap_SHIFT 12 #define MVFR0_FPTrap_MASK (UL(0xf) << MVFR0_FPTrap_SHIFT) #define MVFR0_FPTrap_VAL(x) ((x) & MVFR0_FPTrap_MASK) #define MVFR0_FPTrap_NONE (UL(0x0) << MVFR0_FPTrap_SHIFT) #define MVFR0_FPTrap_IMPL (UL(0x1) << MVFR0_FPTrap_SHIFT) #define MVFR0_FPDivide_SHIFT 16 #define MVFR0_FPDivide_MASK (UL(0xf) << MVFR0_FPDivide_SHIFT) #define MVFR0_FPDivide_VAL(x) ((x) & MVFR0_FPDivide_MASK) #define MVFR0_FPDivide_NONE (UL(0x0) << MVFR0_FPDivide_SHIFT) #define MVFR0_FPDivide_IMPL (UL(0x1) << MVFR0_FPDivide_SHIFT) #define MVFR0_FPSqrt_SHIFT 20 #define MVFR0_FPSqrt_MASK (UL(0xf) << MVFR0_FPSqrt_SHIFT) #define MVFR0_FPSqrt_VAL(x) ((x) & MVFR0_FPSqrt_MASK) #define MVFR0_FPSqrt_NONE (UL(0x0) << MVFR0_FPSqrt_SHIFT) #define MVFR0_FPSqrt_IMPL (UL(0x1) << MVFR0_FPSqrt_SHIFT) #define MVFR0_FPShVec_SHIFT 24 #define MVFR0_FPShVec_MASK (UL(0xf) << MVFR0_FPShVec_SHIFT) #define MVFR0_FPShVec_VAL(x) ((x) & MVFR0_FPShVec_MASK) #define MVFR0_FPShVec_NONE (UL(0x0) << MVFR0_FPShVec_SHIFT) #define MVFR0_FPShVec_IMPL (UL(0x1) << MVFR0_FPShVec_SHIFT) #define MVFR0_FPRound_SHIFT 28 #define MVFR0_FPRound_MASK (UL(0xf) << MVFR0_FPRound_SHIFT) #define MVFR0_FPRound_VAL(x) ((x) & MVFR0_FPRound_MASK) #define MVFR0_FPRound_NONE (UL(0x0) << MVFR0_FPRound_SHIFT) #define MVFR0_FPRound_IMPL (UL(0x1) << MVFR0_FPRound_SHIFT) /* MVFR1_EL1 */ #define MVFR1_EL1 MRS_REG(MVFR1_EL1) #define MVFR1_EL1_op0 0x3 #define MVFR1_EL1_op1 0x0 #define MVFR1_EL1_CRn 0x0 #define MVFR1_EL1_CRm 0x3 #define MVFR1_EL1_op2 0x1 #define MVFR1_FPFtZ_SHIFT 0 #define MVFR1_FPFtZ_MASK (UL(0xf) << MVFR1_FPFtZ_SHIFT) #define MVFR1_FPFtZ_VAL(x) ((x) & MVFR1_FPFtZ_MASK) #define MVFR1_FPFtZ_NONE (UL(0x0) << MVFR1_FPFtZ_SHIFT) #define MVFR1_FPFtZ_IMPL (UL(0x1) << MVFR1_FPFtZ_SHIFT) #define MVFR1_FPDNaN_SHIFT 4 #define MVFR1_FPDNaN_MASK (UL(0xf) << MVFR1_FPDNaN_SHIFT) #define MVFR1_FPDNaN_VAL(x) ((x) & MVFR1_FPDNaN_MASK) #define MVFR1_FPDNaN_NONE (UL(0x0) << MVFR1_FPDNaN_SHIFT) #define MVFR1_FPDNaN_IMPL (UL(0x1) << MVFR1_FPDNaN_SHIFT) #define MVFR1_SIMDLS_SHIFT 8 #define MVFR1_SIMDLS_MASK (UL(0xf) << MVFR1_SIMDLS_SHIFT) #define MVFR1_SIMDLS_VAL(x) ((x) & MVFR1_SIMDLS_MASK) #define MVFR1_SIMDLS_NONE (UL(0x0) << MVFR1_SIMDLS_SHIFT) #define MVFR1_SIMDLS_IMPL (UL(0x1) << MVFR1_SIMDLS_SHIFT) #define MVFR1_SIMDInt_SHIFT 12 #define MVFR1_SIMDInt_MASK (UL(0xf) << MVFR1_SIMDInt_SHIFT) #define MVFR1_SIMDInt_VAL(x) ((x) & MVFR1_SIMDInt_MASK) #define MVFR1_SIMDInt_NONE (UL(0x0) << MVFR1_SIMDInt_SHIFT) #define MVFR1_SIMDInt_IMPL (UL(0x1) << MVFR1_SIMDInt_SHIFT) #define MVFR1_SIMDSP_SHIFT 16 #define MVFR1_SIMDSP_MASK (UL(0xf) << MVFR1_SIMDSP_SHIFT) #define MVFR1_SIMDSP_VAL(x) ((x) & MVFR1_SIMDSP_MASK) #define MVFR1_SIMDSP_NONE (UL(0x0) << MVFR1_SIMDSP_SHIFT) #define MVFR1_SIMDSP_IMPL (UL(0x1) << MVFR1_SIMDSP_SHIFT) #define MVFR1_SIMDHP_SHIFT 20 #define MVFR1_SIMDHP_MASK (UL(0xf) << MVFR1_SIMDHP_SHIFT) #define MVFR1_SIMDHP_VAL(x) ((x) & MVFR1_SIMDHP_MASK) #define MVFR1_SIMDHP_NONE (UL(0x0) << MVFR1_SIMDHP_SHIFT) #define MVFR1_SIMDHP_CONV_SP (UL(0x1) << MVFR1_SIMDHP_SHIFT) #define MVFR1_SIMDHP_ARITH (UL(0x2) << MVFR1_SIMDHP_SHIFT) #define MVFR1_FPHP_SHIFT 24 #define MVFR1_FPHP_MASK (UL(0xf) << MVFR1_FPHP_SHIFT) #define MVFR1_FPHP_VAL(x) ((x) & MVFR1_FPHP_MASK) #define MVFR1_FPHP_NONE (UL(0x0) << MVFR1_FPHP_SHIFT) #define MVFR1_FPHP_CONV_SP (UL(0x1) << MVFR1_FPHP_SHIFT) #define MVFR1_FPHP_CONV_DP (UL(0x2) << MVFR1_FPHP_SHIFT) #define MVFR1_FPHP_ARITH (UL(0x3) << MVFR1_FPHP_SHIFT) #define MVFR1_SIMDFMAC_SHIFT 28 #define MVFR1_SIMDFMAC_MASK (UL(0xf) << MVFR1_SIMDFMAC_SHIFT) #define MVFR1_SIMDFMAC_VAL(x) ((x) & MVFR1_SIMDFMAC_MASK) #define MVFR1_SIMDFMAC_NONE (UL(0x0) << MVFR1_SIMDFMAC_SHIFT) #define MVFR1_SIMDFMAC_IMPL (UL(0x1) << MVFR1_SIMDFMAC_SHIFT) /* OSDLR_EL1 */ #define OSDLR_EL1 MRS_REG(OSDLR_EL1) #define OSDLR_EL1_op0 2 #define OSDLR_EL1_op1 0 #define OSDLR_EL1_CRn 1 #define OSDLR_EL1_CRm 3 #define OSDLR_EL1_op2 4 /* OSLAR_EL1 */ #define OSLAR_EL1 MRS_REG(OSLAR_EL1) #define OSLAR_EL1_op0 2 #define OSLAR_EL1_op1 0 #define OSLAR_EL1_CRn 1 #define OSLAR_EL1_CRm 0 #define OSLAR_EL1_op2 4 /* OSLSR_EL1 */ #define OSLSR_EL1 MRS_REG(OSLSR_EL1) #define OSLSR_EL1_op0 2 #define OSLSR_EL1_op1 0 #define OSLSR_EL1_CRn 1 #define OSLSR_EL1_CRm 1 #define OSLSR_EL1_op2 4 /* PAR_EL1 - Physical Address Register */ #define PAR_F_SHIFT 0 #define PAR_F (0x1 << PAR_F_SHIFT) #define PAR_SUCCESS(x) (((x) & PAR_F) == 0) /* When PAR_F == 0 (success) */ #define PAR_LOW_MASK 0xfff #define PAR_SH_SHIFT 7 #define PAR_SH_MASK (0x3 << PAR_SH_SHIFT) #define PAR_NS_SHIFT 9 #define PAR_NS_MASK (0x3 << PAR_NS_SHIFT) #define PAR_PA_SHIFT 12 #define PAR_PA_MASK 0x0000fffffffff000 #define PAR_ATTR_SHIFT 56 #define PAR_ATTR_MASK (0xff << PAR_ATTR_SHIFT) /* When PAR_F == 1 (aborted) */ #define PAR_FST_SHIFT 1 #define PAR_FST_MASK (0x3f << PAR_FST_SHIFT) #define PAR_PTW_SHIFT 8 #define PAR_PTW_MASK (0x1 << PAR_PTW_SHIFT) #define PAR_S_SHIFT 9 #define PAR_S_MASK (0x1 << PAR_S_SHIFT) /* PMBIDR_EL1 */ #define PMBIDR_EL1 MRS_REG(PMBIDR_EL1) #define PMBIDR_EL1_op0 0x3 #define PMBIDR_EL1_op1 0x0 #define PMBIDR_EL1_CRn 0x9 #define PMBIDR_EL1_CRm 0xa #define PMBIDR_EL1_op2 0x7 #define PMBIDR_Align_SHIFT 0 #define PMBIDR_Align_MASK (UL(0xf) << PMBIDR_Align_SHIFT) #define PMBIDR_P_SHIFT 4 #define PMBIDR_P (UL(0x1) << PMBIDR_P_SHIFT) #define PMBIDR_F_SHIFT 5 #define PMBIDR_F (UL(0x1) << PMBIDR_F_SHIFT) /* PMBLIMITR_EL1 */ #define PMBLIMITR_EL1 MRS_REG(PMBLIMITR_EL1) #define PMBLIMITR_EL1_op0 0x3 #define PMBLIMITR_EL1_op1 0x0 #define PMBLIMITR_EL1_CRn 0x9 #define PMBLIMITR_EL1_CRm 0xa #define PMBLIMITR_EL1_op2 0x0 #define PMBLIMITR_E_SHIFT 0 #define PMBLIMITR_E (UL(0x1) << PMBLIMITR_E_SHIFT) #define PMBLIMITR_FM_SHIFT 1 #define PMBLIMITR_FM_MASK (UL(0x3) << PMBLIMITR_FM_SHIFT) #define PMBLIMITR_PMFZ_SHIFT 5 #define PMBLIMITR_PMFZ (UL(0x1) << PMBLIMITR_PMFZ_SHIFT) #define PMBLIMITR_LIMIT_SHIFT 12 #define PMBLIMITR_LIMIT_MASK \ (UL(0xfffffffffffff) << PMBLIMITR_LIMIT_SHIFT) /* PMBPTR_EL1 */ #define PMBPTR_EL1 MRS_REG(PMBPTR_EL1) #define PMBPTR_EL1_op0 0x3 #define PMBPTR_EL1_op1 0x0 #define PMBPTR_EL1_CRn 0x9 #define PMBPTR_EL1_CRm 0xa #define PMBPTR_EL1_op2 0x1 #define PMBPTR_PTR_SHIFT 0 #define PMBPTR_PTR_MASK \ (UL(0xffffffffffffffff) << PMBPTR_PTR_SHIFT) /* PMBSR_EL1 */ #define PMBSR_EL1 MRS_REG(PMBSR_EL1) #define PMBSR_EL1_op0 0x3 #define PMBSR_EL1_op1 0x0 #define PMBSR_EL1_CRn 0x9 #define PMBSR_EL1_CRm 0xa #define PMBSR_EL1_op2 0x3 #define PMBSR_MSS_SHIFT 0 #define PMBSR_MSS_MASK (UL(0xffff) << PMBSR_MSS_SHIFT) #define PMBSR_COLL_SHIFT 16 #define PMBSR_COLL (UL(0x1) << PMBSR_COLL_SHIFT) #define PMBSR_S_SHIFT 17 #define PMBSR_S (UL(0x1) << PMBSR_S_SHIFT) #define PMBSR_EA_SHIFT 18 #define PMBSR_EA (UL(0x1) << PMBSR_EA_SHIFT) #define PMBSR_DL_SHIFT 19 #define PMBSR_DL (UL(0x1) << PMBSR_DL_SHIFT) #define PMBSR_EC_SHIFT 26 #define PMBSR_EC_MASK (UL(0x3f) << PMBSR_EC_SHIFT) /* PMCCFILTR_EL0 */ #define PMCCFILTR_EL0 MRS_REG(PMCCFILTR_EL0) #define PMCCFILTR_EL0_op0 3 #define PMCCFILTR_EL0_op1 3 #define PMCCFILTR_EL0_CRn 14 #define PMCCFILTR_EL0_CRm 15 #define PMCCFILTR_EL0_op2 7 /* PMCCNTR_EL0 */ #define PMCCNTR_EL0 MRS_REG(PMCCNTR_EL0) #define PMCCNTR_EL0_op0 3 #define PMCCNTR_EL0_op1 3 #define PMCCNTR_EL0_CRn 9 #define PMCCNTR_EL0_CRm 13 #define PMCCNTR_EL0_op2 0 /* PMCEID0_EL0 */ #define PMCEID0_EL0 MRS_REG(PMCEID0_EL0) #define PMCEID0_EL0_op0 3 #define PMCEID0_EL0_op1 3 #define PMCEID0_EL0_CRn 9 #define PMCEID0_EL0_CRm 12 #define PMCEID0_EL0_op2 6 /* PMCEID1_EL0 */ #define PMCEID1_EL0 MRS_REG(PMCEID1_EL0) #define PMCEID1_EL0_op0 3 #define PMCEID1_EL0_op1 3 #define PMCEID1_EL0_CRn 9 #define PMCEID1_EL0_CRm 12 #define PMCEID1_EL0_op2 7 /* PMCNTENCLR_EL0 */ #define PMCNTENCLR_EL0 MRS_REG(PMCNTENCLR_EL0) #define PMCNTENCLR_EL0_op0 3 #define PMCNTENCLR_EL0_op1 3 #define PMCNTENCLR_EL0_CRn 9 #define PMCNTENCLR_EL0_CRm 12 #define PMCNTENCLR_EL0_op2 2 /* PMCNTENSET_EL0 */ #define PMCNTENSET_EL0 MRS_REG(PMCNTENSET_EL0) #define PMCNTENSET_EL0_op0 3 #define PMCNTENSET_EL0_op1 3 #define PMCNTENSET_EL0_CRn 9 #define PMCNTENSET_EL0_CRm 12 #define PMCNTENSET_EL0_op2 1 /* PMCR_EL0 - Perfomance Monitoring Counters */ #define PMCR_EL0 MRS_REG(PMCR_EL0) #define PMCR_EL0_op0 3 #define PMCR_EL0_op1 3 #define PMCR_EL0_CRn 9 #define PMCR_EL0_CRm 12 #define PMCR_EL0_op2 0 #define PMCR_E (1 << 0) /* Enable all counters */ #define PMCR_P (1 << 1) /* Reset all counters */ #define PMCR_C (1 << 2) /* Clock counter reset */ #define PMCR_D (1 << 3) /* CNTR counts every 64 clk cycles */ #define PMCR_X (1 << 4) /* Export to ext. monitoring (ETM) */ #define PMCR_DP (1 << 5) /* Disable CCNT if non-invasive debug*/ #define PMCR_LC (1 << 6) /* Long cycle count enable */ #define PMCR_IMP_SHIFT 24 /* Implementer code */ #define PMCR_IMP_MASK (0xff << PMCR_IMP_SHIFT) #define PMCR_IMP_ARM 0x41 #define PMCR_IDCODE_SHIFT 16 /* Identification code */ #define PMCR_IDCODE_MASK (0xff << PMCR_IDCODE_SHIFT) #define PMCR_IDCODE_CORTEX_A57 0x01 #define PMCR_IDCODE_CORTEX_A72 0x02 #define PMCR_IDCODE_CORTEX_A53 0x03 #define PMCR_IDCODE_CORTEX_A73 0x04 #define PMCR_IDCODE_CORTEX_A35 0x0a #define PMCR_IDCODE_CORTEX_A76 0x0b #define PMCR_IDCODE_NEOVERSE_N1 0x0c #define PMCR_IDCODE_CORTEX_A77 0x10 #define PMCR_IDCODE_CORTEX_A55 0x45 #define PMCR_IDCODE_NEOVERSE_E1 0x46 #define PMCR_IDCODE_CORTEX_A75 0x4a #define PMCR_N_SHIFT 11 /* Number of counters implemented */ #define PMCR_N_MASK (0x1f << PMCR_N_SHIFT) /* PMEVCNTR_EL0 */ #define PMEVCNTR_EL0_op0 3 #define PMEVCNTR_EL0_op1 3 #define PMEVCNTR_EL0_CRn 14 #define PMEVCNTR_EL0_CRm 8 /* * PMEVCNTRn_EL0_CRm[1:0] holds the upper 2 bits of 'n' * PMEVCNTRn_EL0_op2 holds the lower 3 bits of 'n' */ /* PMEVTYPER_EL0 - Performance Monitoring Event Type */ #define PMEVTYPER_EL0_op0 3 #define PMEVTYPER_EL0_op1 3 #define PMEVTYPER_EL0_CRn 14 #define PMEVTYPER_EL0_CRm 12 /* * PMEVTYPERn_EL0_CRm[1:0] holds the upper 2 bits of 'n' * PMEVTYPERn_EL0_op2 holds the lower 3 bits of 'n' */ #define PMEVTYPER_EVTCOUNT_MASK 0x000003ff /* ARMv8.0 */ #define PMEVTYPER_EVTCOUNT_8_1_MASK 0x0000ffff /* ARMv8.1+ */ #define PMEVTYPER_MT (1 << 25) /* Multithreading */ #define PMEVTYPER_M (1 << 26) /* Secure EL3 filtering */ #define PMEVTYPER_NSH (1 << 27) /* Non-secure hypervisor filtering */ #define PMEVTYPER_NSU (1 << 28) /* Non-secure user filtering */ #define PMEVTYPER_NSK (1 << 29) /* Non-secure kernel filtering */ #define PMEVTYPER_U (1 << 30) /* User filtering */ #define PMEVTYPER_P (1 << 31) /* Privileged filtering */ /* PMINTENCLR_EL1 */ #define PMINTENCLR_EL1 MRS_REG(PMINTENCLR_EL1) #define PMINTENCLR_EL1_op0 3 #define PMINTENCLR_EL1_op1 0 #define PMINTENCLR_EL1_CRn 9 #define PMINTENCLR_EL1_CRm 14 #define PMINTENCLR_EL1_op2 2 /* PMINTENSET_EL1 */ #define PMINTENSET_EL1 MRS_REG(PMINTENSET_EL1) #define PMINTENSET_EL1_op0 3 #define PMINTENSET_EL1_op1 0 #define PMINTENSET_EL1_CRn 9 #define PMINTENSET_EL1_CRm 14 #define PMINTENSET_EL1_op2 1 /* PMMIR_EL1 */ #define PMMIR_EL1 MRS_REG(PMMIR_EL1) #define PMMIR_EL1_op0 3 #define PMMIR_EL1_op1 0 #define PMMIR_EL1_CRn 9 #define PMMIR_EL1_CRm 14 #define PMMIR_EL1_op2 6 /* PMOVSCLR_EL0 */ #define PMOVSCLR_EL0 MRS_REG(PMOVSCLR_EL0) #define PMOVSCLR_EL0_op0 3 #define PMOVSCLR_EL0_op1 3 #define PMOVSCLR_EL0_CRn 9 #define PMOVSCLR_EL0_CRm 12 #define PMOVSCLR_EL0_op2 3 /* PMOVSSET_EL0 */ #define PMOVSSET_EL0 MRS_REG(PMOVSSET_EL0) #define PMOVSSET_EL0_op0 3 #define PMOVSSET_EL0_op1 3 #define PMOVSSET_EL0_CRn 9 #define PMOVSSET_EL0_CRm 14 #define PMOVSSET_EL0_op2 3 /* PMSCR_EL1 */ #define PMSCR_EL1 MRS_REG(PMSCR_EL1) #define PMSCR_EL1_op0 0x3 #define PMSCR_EL1_op1 0x0 #define PMSCR_EL1_CRn 0x9 #define PMSCR_EL1_CRm 0x9 #define PMSCR_EL1_op2 0x0 #define PMSCR_E0SPE_SHIFT 0 #define PMSCR_E0SPE (UL(0x1) << PMSCR_E0SPE_SHIFT) #define PMSCR_E1SPE_SHIFT 1 #define PMSCR_E1SPE (UL(0x1) << PMSCR_E1SPE_SHIFT) #define PMSCR_CX_SHIFT 3 #define PMSCR_CX (UL(0x1) << PMSCR_CX_SHIFT) #define PMSCR_PA_SHIFT 4 #define PMSCR_PA (UL(0x1) << PMSCR_PA_SHIFT) #define PMSCR_TS_SHIFT 5 #define PMSCR_TS (UL(0x1) << PMSCR_TS_SHIFT) #define PMSCR_PCT_SHIFT 6 #define PMSCR_PCT_MASK (UL(0x3) << PMSCR_PCT_SHIFT) /* PMSELR_EL0 */ #define PMSELR_EL0 MRS_REG(PMSELR_EL0) #define PMSELR_EL0_op0 3 #define PMSELR_EL0_op1 3 #define PMSELR_EL0_CRn 9 #define PMSELR_EL0_CRm 12 #define PMSELR_EL0_op2 5 #define PMSELR_SEL_MASK 0x1f /* PMSEVFR_EL1 */ #define PMSEVFR_EL1 MRS_REG(PMSEVFR_EL1) #define PMSEVFR_EL1_op0 0x3 #define PMSEVFR_EL1_op1 0x0 #define PMSEVFR_EL1_CRn 0x9 #define PMSEVFR_EL1_CRm 0x9 #define PMSEVFR_EL1_op2 0x5 /* PMSFCR_EL1 */ #define PMSFCR_EL1 MRS_REG(PMSFCR_EL1) #define PMSFCR_EL1_op0 0x3 #define PMSFCR_EL1_op1 0x0 #define PMSFCR_EL1_CRn 0x9 #define PMSFCR_EL1_CRm 0x9 #define PMSFCR_EL1_op2 0x4 #define PMSFCR_FE_SHIFT 0 #define PMSFCR_FE (UL(0x1) << PMSFCR_FE_SHIFT) #define PMSFCR_FT_SHIFT 1 #define PMSFCR_FT (UL(0x1) << PMSFCR_FT_SHIFT) #define PMSFCR_FL_SHIFT 2 #define PMSFCR_FL (UL(0x1) << PMSFCR_FL_SHIFT) #define PMSFCR_FnE_SHIFT 3 #define PMSFCR_FnE (UL(0x1) << PMSFCR_FnE_SHIFT) #define PMSFCR_B_SHIFT 16 #define PMSFCR_B (UL(0x1) << PMSFCR_B_SHIFT) #define PMSFCR_LD_SHIFT 17 #define PMSFCR_LD (UL(0x1) << PMSFCR_LD_SHIFT) #define PMSFCR_ST_SHIFT 18 #define PMSFCR_ST (UL(0x1) << PMSFCR_ST_SHIFT) /* PMSICR_EL1 */ #define PMSICR_EL1 MRS_REG(PMSICR_EL1) #define PMSICR_EL1_op0 0x3 #define PMSICR_EL1_op1 0x0 #define PMSICR_EL1_CRn 0x9 #define PMSICR_EL1_CRm 0x9 #define PMSICR_EL1_op2 0x2 #define PMSICR_COUNT_SHIFT 0 #define PMSICR_COUNT_MASK (UL(0xffffffff) << PMSICR_COUNT_SHIFT) #define PMSICR_ECOUNT_SHIFT 56 #define PMSICR_ECOUNT_MASK (UL(0xff) << PMSICR_ECOUNT_SHIFT) /* PMSIDR_EL1 */ #define PMSIDR_EL1 MRS_REG(PMSIDR_EL1) #define PMSIDR_EL1_op0 0x3 #define PMSIDR_EL1_op1 0x0 #define PMSIDR_EL1_CRn 0x9 #define PMSIDR_EL1_CRm 0x9 #define PMSIDR_EL1_op2 0x7 #define PMSIDR_FE_SHIFT 0 #define PMSIDR_FE (UL(0x1) << PMSIDR_FE_SHIFT) #define PMSIDR_FT_SHIFT 1 #define PMSIDR_FT (UL(0x1) << PMSIDR_FT_SHIFT) #define PMSIDR_FL_SHIFT 2 #define PMSIDR_FL (UL(0x1) << PMSIDR_FL_SHIFT) #define PMSIDR_ArchInst_SHIFT 3 #define PMSIDR_ArchInst (UL(0x1) << PMSIDR_ArchInst_SHIFT) #define PMSIDR_LDS_SHIFT 4 #define PMSIDR_LDS (UL(0x1) << PMSIDR_LDS_SHIFT) #define PMSIDR_ERnd_SHIFT 5 #define PMSIDR_ERnd (UL(0x1) << PMSIDR_ERnd_SHIFT) #define PMSIDR_FnE_SHIFT 6 #define PMSIDR_FnE (UL(0x1) << PMSIDR_FnE_SHIFT) #define PMSIDR_Interval_SHIFT 8 #define PMSIDR_Interval_MASK (UL(0xf) << PMSIDR_Interval_SHIFT) #define PMSIDR_MaxSize_SHIFT 12 #define PMSIDR_MaxSize_MASK (UL(0xf) << PMSIDR_MaxSize_SHIFT) #define PMSIDR_CountSize_SHIFT 16 #define PMSIDR_CountSize_MASK (UL(0xf) << PMSIDR_CountSize_SHIFT) #define PMSIDR_Format_SHIFT 20 #define PMSIDR_Format_MASK (UL(0xf) << PMSIDR_Format_SHIFT) #define PMSIDR_PBT_SHIFT 24 #define PMSIDR_PBT (UL(0x1) << PMSIDR_PBT_SHIFT) /* PMSIRR_EL1 */ #define PMSIRR_EL1 MRS_REG(PMSIRR_EL1) #define PMSIRR_EL1_op0 0x3 #define PMSIRR_EL1_op1 0x0 #define PMSIRR_EL1_CRn 0x9 #define PMSIRR_EL1_CRm 0x9 #define PMSIRR_EL1_op2 0x3 #define PMSIRR_RND_SHIFT 0 #define PMSIRR_RND (UL(0x1) << PMSIRR_RND_SHIFT) #define PMSIRR_INTERVAL_SHIFT 8 #define PMSIRR_INTERVAL_MASK (UL(0xffffff) << PMSIRR_INTERVAL_SHIFT) /* PMSLATFR_EL1 */ #define PMSLATFR_EL1 MRS_REG(PMSLATFR_EL1) #define PMSLATFR_EL1_op0 0x3 #define PMSLATFR_EL1_op1 0x0 #define PMSLATFR_EL1_CRn 0x9 #define PMSLATFR_EL1_CRm 0x9 #define PMSLATFR_EL1_op2 0x6 #define PMSLATFR_MINLAT_SHIFT 0 #define PMSLATFR_MINLAT_MASK (UL(0xfff) << PMSLATFR_MINLAT_SHIFT) /* PMSNEVFR_EL1 */ #define PMSNEVFR_EL1 MRS_REG(PMSNEVFR_EL1) #define PMSNEVFR_EL1_op0 0x3 #define PMSNEVFR_EL1_op1 0x0 #define PMSNEVFR_EL1_CRn 0x9 #define PMSNEVFR_EL1_CRm 0x9 #define PMSNEVFR_EL1_op2 0x1 /* PMSWINC_EL0 */ #define PMSWINC_EL0 MRS_REG(PMSWINC_EL0) #define PMSWINC_EL0_op0 3 #define PMSWINC_EL0_op1 3 #define PMSWINC_EL0_CRn 9 #define PMSWINC_EL0_CRm 12 #define PMSWINC_EL0_op2 4 /* PMUSERENR_EL0 */ #define PMUSERENR_EL0 MRS_REG(PMUSERENR_EL0) #define PMUSERENR_EL0_op0 3 #define PMUSERENR_EL0_op1 3 #define PMUSERENR_EL0_CRn 9 #define PMUSERENR_EL0_CRm 14 #define PMUSERENR_EL0_op2 0 /* PMXEVCNTR_EL0 */ #define PMXEVCNTR_EL0 MRS_REG(PMXEVCNTR_EL0) #define PMXEVCNTR_EL0_op0 3 #define PMXEVCNTR_EL0_op1 3 #define PMXEVCNTR_EL0_CRn 9 #define PMXEVCNTR_EL0_CRm 13 #define PMXEVCNTR_EL0_op2 2 /* PMXEVTYPER_EL0 */ #define PMXEVTYPER_EL0 MRS_REG(PMXEVTYPER_EL0) #define PMXEVTYPER_EL0_op0 3 #define PMXEVTYPER_EL0_op1 3 #define PMXEVTYPER_EL0_CRn 9 #define PMXEVTYPER_EL0_CRm 13 #define PMXEVTYPER_EL0_op2 1 /* SCTLR_EL1 - System Control Register */ #define SCTLR_RES1 0x30d00800 /* Reserved ARMv8.0, write 1 */ #define SCTLR_M (UL(0x1) << 0) #define SCTLR_A (UL(0x1) << 1) #define SCTLR_C (UL(0x1) << 2) #define SCTLR_SA (UL(0x1) << 3) #define SCTLR_SA0 (UL(0x1) << 4) #define SCTLR_CP15BEN (UL(0x1) << 5) #define SCTLR_nAA (UL(0x1) << 6) #define SCTLR_ITD (UL(0x1) << 7) #define SCTLR_SED (UL(0x1) << 8) #define SCTLR_UMA (UL(0x1) << 9) #define SCTLR_EnRCTX (UL(0x1) << 10) #define SCTLR_EOS (UL(0x1) << 11) #define SCTLR_I (UL(0x1) << 12) #define SCTLR_EnDB (UL(0x1) << 13) #define SCTLR_DZE (UL(0x1) << 14) #define SCTLR_UCT (UL(0x1) << 15) #define SCTLR_nTWI (UL(0x1) << 16) /* Bit 17 is reserved */ #define SCTLR_nTWE (UL(0x1) << 18) #define SCTLR_WXN (UL(0x1) << 19) #define SCTLR_TSCXT (UL(0x1) << 20) #define SCTLR_IESB (UL(0x1) << 21) #define SCTLR_EIS (UL(0x1) << 22) #define SCTLR_SPAN (UL(0x1) << 23) #define SCTLR_E0E (UL(0x1) << 24) #define SCTLR_EE (UL(0x1) << 25) #define SCTLR_UCI (UL(0x1) << 26) #define SCTLR_EnDA (UL(0x1) << 27) #define SCTLR_nTLSMD (UL(0x1) << 28) #define SCTLR_LSMAOE (UL(0x1) << 29) #define SCTLR_EnIB (UL(0x1) << 30) #define SCTLR_EnIA (UL(0x1) << 31) /* Bits 34:32 are reserved */ #define SCTLR_BT0 (UL(0x1) << 35) #define SCTLR_BT1 (UL(0x1) << 36) #define SCTLR_ITFSB (UL(0x1) << 37) #define SCTLR_TCF0_MASK (UL(0x3) << 38) #define SCTLR_TCF_MASK (UL(0x3) << 40) #define SCTLR_ATA0 (UL(0x1) << 42) #define SCTLR_ATA (UL(0x1) << 43) #define SCTLR_DSSBS (UL(0x1) << 44) #define SCTLR_TWEDEn (UL(0x1) << 45) #define SCTLR_TWEDEL_MASK (UL(0xf) << 46) /* Bits 53:50 are reserved */ #define SCTLR_EnASR (UL(0x1) << 54) #define SCTLR_EnAS0 (UL(0x1) << 55) #define SCTLR_EnALS (UL(0x1) << 56) #define SCTLR_EPAN (UL(0x1) << 57) /* SPSR_EL1 */ /* * When the exception is taken in AArch64: * M[3:2] is the exception level * M[1] is unused * M[0] is the SP select: * 0: always SP0 * 1: current ELs SP */ #define PSR_M_EL0t 0x00000000 #define PSR_M_EL1t 0x00000004 #define PSR_M_EL1h 0x00000005 #define PSR_M_EL2t 0x00000008 #define PSR_M_EL2h 0x00000009 #define PSR_M_64 0x00000000 #define PSR_M_32 0x00000010 #define PSR_M_MASK 0x0000000f #define PSR_T 0x00000020 #define PSR_AARCH32 0x00000010 #define PSR_F 0x00000040 #define PSR_I 0x00000080 #define PSR_A 0x00000100 #define PSR_D 0x00000200 #define PSR_DAIF (PSR_D | PSR_A | PSR_I | PSR_F) /* The default DAIF mask. These bits are valid in spsr_el1 and daif */ #define PSR_DAIF_DEFAULT (PSR_F) #define PSR_IL 0x00100000 #define PSR_SS 0x00200000 #define PSR_V 0x10000000 #define PSR_C 0x20000000 #define PSR_Z 0x40000000 #define PSR_N 0x80000000 #define PSR_FLAGS 0xf0000000 /* PSR fields that can be set from 32-bit and 64-bit processes */ #define PSR_SETTABLE_32 PSR_FLAGS #define PSR_SETTABLE_64 (PSR_FLAGS | PSR_SS) /* TCR_EL1 - Translation Control Register */ /* Bits 63:59 are reserved */ #define TCR_TCMA1_SHIFT 58 #define TCR_TCMA1 (1UL << TCR_TCMA1_SHIFT) #define TCR_TCMA0_SHIFT 57 #define TCR_TCMA0 (1UL << TCR_TCMA0_SHIFT) #define TCR_E0PD1_SHIFT 56 #define TCR_E0PD1 (1UL << TCR_E0PD1_SHIFT) #define TCR_E0PD0_SHIFT 55 #define TCR_E0PD0 (1UL << TCR_E0PD0_SHIFT) #define TCR_NFD1_SHIFT 54 #define TCR_NFD1 (1UL << TCR_NFD1_SHIFT) #define TCR_NFD0_SHIFT 53 #define TCR_NFD0 (1UL << TCR_NFD0_SHIFT) #define TCR_TBID1_SHIFT 52 #define TCR_TBID1 (1UL << TCR_TBID1_SHIFT) #define TCR_TBID0_SHIFT 51 #define TCR_TBID0 (1UL << TCR_TBID0_SHIFT) #define TCR_HWU162_SHIFT 50 #define TCR_HWU162 (1UL << TCR_HWU162_SHIFT) #define TCR_HWU161_SHIFT 49 #define TCR_HWU161 (1UL << TCR_HWU161_SHIFT) #define TCR_HWU160_SHIFT 48 #define TCR_HWU160 (1UL << TCR_HWU160_SHIFT) #define TCR_HWU159_SHIFT 47 #define TCR_HWU159 (1UL << TCR_HWU159_SHIFT) #define TCR_HWU1 \ (TCR_HWU159 | TCR_HWU160 | TCR_HWU161 | TCR_HWU162) #define TCR_HWU062_SHIFT 46 #define TCR_HWU062 (1UL << TCR_HWU062_SHIFT) #define TCR_HWU061_SHIFT 45 #define TCR_HWU061 (1UL << TCR_HWU061_SHIFT) #define TCR_HWU060_SHIFT 44 #define TCR_HWU060 (1UL << TCR_HWU060_SHIFT) #define TCR_HWU059_SHIFT 43 #define TCR_HWU059 (1UL << TCR_HWU059_SHIFT) #define TCR_HWU0 \ (TCR_HWU059 | TCR_HWU060 | TCR_HWU061 | TCR_HWU062) #define TCR_HPD1_SHIFT 42 #define TCR_HPD1 (1UL << TCR_HPD1_SHIFT) #define TCR_HPD0_SHIFT 41 #define TCR_HPD0 (1UL << TCR_HPD0_SHIFT) #define TCR_HD_SHIFT 40 #define TCR_HD (1UL << TCR_HD_SHIFT) #define TCR_HA_SHIFT 39 #define TCR_HA (1UL << TCR_HA_SHIFT) #define TCR_TBI1_SHIFT 38 #define TCR_TBI1 (1UL << TCR_TBI1_SHIFT) #define TCR_TBI0_SHIFT 37 #define TCR_TBI0 (1U << TCR_TBI0_SHIFT) #define TCR_ASID_SHIFT 36 #define TCR_ASID_WIDTH 1 #define TCR_ASID_16 (1UL << TCR_ASID_SHIFT) /* Bit 35 is reserved */ #define TCR_IPS_SHIFT 32 #define TCR_IPS_WIDTH 3 #define TCR_IPS_32BIT (0UL << TCR_IPS_SHIFT) #define TCR_IPS_36BIT (1UL << TCR_IPS_SHIFT) #define TCR_IPS_40BIT (2UL << TCR_IPS_SHIFT) #define TCR_IPS_42BIT (3UL << TCR_IPS_SHIFT) #define TCR_IPS_44BIT (4UL << TCR_IPS_SHIFT) #define TCR_IPS_48BIT (5UL << TCR_IPS_SHIFT) #define TCR_TG1_SHIFT 30 #define TCR_TG1_16K (1UL << TCR_TG1_SHIFT) #define TCR_TG1_4K (2UL << TCR_TG1_SHIFT) #define TCR_TG1_64K (3UL << TCR_TG1_SHIFT) #define TCR_SH1_SHIFT 28 #define TCR_SH1_IS (3UL << TCR_SH1_SHIFT) #define TCR_ORGN1_SHIFT 26 #define TCR_ORGN1_WBWA (1UL << TCR_ORGN1_SHIFT) #define TCR_IRGN1_SHIFT 24 #define TCR_IRGN1_WBWA (1UL << TCR_IRGN1_SHIFT) #define TCR_EPD1_SHIFT 23 #define TCR_EPD1 (1UL << TCR_EPD1_SHIFT) #define TCR_A1_SHIFT 22 #define TCR_A1 (0x1UL << TCR_A1_SHIFT) #define TCR_T1SZ_SHIFT 16 #define TCR_T1SZ(x) ((x) << TCR_T1SZ_SHIFT) #define TCR_TG0_SHIFT 14 #define TCR_TG0_4K (0UL << TCR_TG0_SHIFT) #define TCR_TG0_64K (1UL << TCR_TG0_SHIFT) #define TCR_TG0_16K (2UL << TCR_TG0_SHIFT) #define TCR_SH0_SHIFT 12 #define TCR_SH0_IS (3UL << TCR_SH0_SHIFT) #define TCR_ORGN0_SHIFT 10 #define TCR_ORGN0_WBWA (1UL << TCR_ORGN0_SHIFT) #define TCR_IRGN0_SHIFT 8 #define TCR_IRGN0_WBWA (1UL << TCR_IRGN0_SHIFT) #define TCR_EPD0_SHIFT 7 #define TCR_EPD0 (1UL << TCR_EPD1_SHIFT) /* Bit 6 is reserved */ #define TCR_T0SZ_SHIFT 0 #define TCR_T0SZ_MASK 0x3f #define TCR_T0SZ(x) ((x) << TCR_T0SZ_SHIFT) #define TCR_TxSZ(x) (TCR_T1SZ(x) | TCR_T0SZ(x)) #define TCR_CACHE_ATTRS ((TCR_IRGN0_WBWA | TCR_IRGN1_WBWA) |\ (TCR_ORGN0_WBWA | TCR_ORGN1_WBWA)) #ifdef SMP #define TCR_SMP_ATTRS (TCR_SH0_IS | TCR_SH1_IS) #else #define TCR_SMP_ATTRS 0 #endif /* TTBR0_EL1 & TTBR1_EL1 - Translation Table Base Register 0 & 1 */ #define TTBR_ASID_SHIFT 48 #define TTBR_ASID_MASK (0xfffful << TTBR_ASID_SHIFT) #define TTBR_BADDR 0x0000fffffffffffeul #define TTBR_CnP_SHIFT 0 #define TTBR_CnP (1ul << TTBR_CnP_SHIFT) #endif /* !_MACHINE_ARMREG_H_ */