Index: head/sys/arm64/arm64/locore.S =================================================================== --- head/sys/arm64/arm64/locore.S +++ head/sys/arm64/arm64/locore.S @@ -683,7 +683,8 @@ /* * Setup TCR according to the PARange and ASIDBits fields - * from ID_AA64MMFR0_EL1. More precisely, set TCR_EL1.AS + * from ID_AA64MMFR0_EL1 and the HAFDBS field from the + * ID_AA64MMFR1_EL1. More precisely, set TCR_EL1.AS * to 1 only if the ASIDBits field equals 0b0010. */ ldr x2, tcr @@ -700,6 +701,21 @@ /* Set TCR.AS with x3 */ bfi x2, x3, #(TCR_ASID_SHIFT), #(TCR_ASID_WIDTH) + /* + * Check if the HW supports access flag and dirty state updates, + * and set TCR_EL1.HA and TCR_EL1.HD accordingly. + */ + mrs x3, id_aa64mmfr1_el1 + and x3, x3, #(ID_AA64MMFR1_HAFDBS_MASK) + cmp x3, #1 + b.ne 1f + orr x2, x2, #(TCR_HA) + b 2f +1: + cmp x3, #2 + b.ne 2f + orr x2, x2, #(TCR_HA | TCR_HD) +2: msr tcr_el1, x2 /* Index: head/sys/arm64/arm64/pmap.c =================================================================== --- head/sys/arm64/arm64/pmap.c +++ head/sys/arm64/arm64/pmap.c @@ -3510,8 +3510,7 @@ KASSERT(opa == pa, ("pmap_enter: invalid update")); if ((orig_l3 & ~ATTR_AF) != (new_l3 & ~ATTR_AF)) { /* same PA, different attributes */ - /* XXXMJ need to reload orig_l3 for hardware DBM. */ - pmap_load_store(l3, new_l3); + orig_l3 = pmap_load_store(l3, new_l3); pmap_invalidate_page(pmap, va); if ((orig_l3 & ATTR_SW_MANAGED) != 0 && pmap_pte_dirty(orig_l3)) Index: head/sys/arm64/include/armreg.h =================================================================== --- head/sys/arm64/include/armreg.h +++ head/sys/arm64/include/armreg.h @@ -619,6 +619,11 @@ #define PSR_FLAGS 0xf0000000 /* TCR_EL1 - Translation Control Register */ +#define TCR_HD_SHIFT 40 +#define TCR_HD (0x1UL << TCR_HD_SHIFT) +#define TCR_HA_SHIFT 39 +#define TCR_HA (0x1UL << TCR_HA_SHIFT) + #define TCR_ASID_SHIFT 36 #define TCR_ASID_WIDTH 1 #define TCR_ASID_16 (0x1UL << TCR_ASID_SHIFT)