Index: sys/arm64/arm64/pmap.c =================================================================== --- sys/arm64/arm64/pmap.c +++ sys/arm64/arm64/pmap.c @@ -440,8 +440,15 @@ pd_entry_t l1, *l2p; l1 = pmap_load(l1p); - KASSERT((l1 & ATTR_DESCR_MASK) == L1_TABLE, - ("%s: L1 entry %#lx is a leaf", __func__, l1)); + + /* + * The valid bit may be clear if pmap_update_entry() is concurrently + * modifying the entry, so for KVA only the entry type may be checked. + */ + KASSERT(va >= VM_MAX_USER_ADDRESS || (l1 & ATTR_DESCR_VALID) != 0, + ("%s: L1 entry %#lx for %#lx is invalid", __func__, l1, va)); + KASSERT((l1 & ATTR_DESCR_TYPE_MASK) == ATTR_DESCR_TYPE_TABLE, + ("%s: L1 entry %#lx for %#lx is a leaf", __func__, l1, va)); l2p = (pd_entry_t *)PHYS_TO_DMAP(l1 & ~ATTR_MASK); return (&l2p[pmap_l2_index(va)]); } @@ -465,8 +472,15 @@ pt_entry_t *l3p; l2 = pmap_load(l2p); - KASSERT((l2 & ATTR_DESCR_MASK) == L2_TABLE, - ("%s: L2 entry %#lx is a leaf", __func__, l2)); + + /* + * The valid bit may be clear if pmap_update_entry() is concurrently + * modifying the entry, so for KVA only the entry type may be checked. + */ + KASSERT(va >= VM_MAX_USER_ADDRESS || (l2 & ATTR_DESCR_VALID) != 0, + ("%s: L2 entry %#lx for %#lx is invalid", __func__, l2, va)); + KASSERT((l2 & ATTR_DESCR_TYPE_MASK) == ATTR_DESCR_TYPE_TABLE, + ("%s: L2 entry %#lx for %#lx is a leaf", __func__, l2, va)); l3p = (pt_entry_t *)PHYS_TO_DMAP(l2 & ~ATTR_MASK); return (&l3p[pmap_l3_index(va)]); }