Index: sys/kern/subr_physmem.c =================================================================== --- sys/kern/subr_physmem.c +++ sys/kern/subr_physmem.c @@ -68,6 +68,19 @@ #define MAX_PHYS_ADDR 0xFFFFFFFFFFFFFFFFull #endif +#if defined(__aarch64__) +/* + * The arm64 DMAP code assumes L2 block aligned & sized memory. Becauese of + * this round memory regions so the start and end are aligned to an L2 block + * boundary. + */ +#define round_mem(x) roundup2((x), L2_SIZE) +#define truncate_mem(x) rounddown2((x), L2_SIZE) +#else +#define round_mem(x) round_page(x) +#define truncate_mem(x) truncate_page(x) +#endif + struct region { vm_paddr_t addr; vm_size_t size; @@ -351,9 +364,9 @@ * Round the starting address up to a page boundary, and truncate the * ending page down to a page boundary. */ - adj = round_page(pa) - pa; - pa = round_page(pa); - sz = trunc_page(sz - adj); + adj = round_mem(pa) - pa; + pa = round_mem(pa); + sz = trunc_mem(sz - adj); if (sz > 0 && hwcnt < nitems(hwregions)) hwcnt = insert_region(hwregions, hwcnt, pa, sz, 0); @@ -371,9 +384,9 @@ * Truncate the starting address down to a page boundary, and round the * ending page up to a page boundary. */ - adj = pa - trunc_page(pa); - pa = trunc_page(pa); - sz = round_page(sz + adj); + adj = pa - trunc_mem(pa); + pa = trunc_mem(pa); + sz = round_mem(sz + adj); if (excnt >= nitems(exregions)) panic("failed to exclude region %#jx-%#jx", (uintmax_t)pa,