Lines Matching refs:PAGE_SIZE

38 // checks that *base* + i * PAGE_SIZE is mapped.  Otherwise checks that it is not mapped.
46 if (zx_process_read_memory(process, base + i * PAGE_SIZE, buf, 1, &len) != expected) {
77 alignas(16) static uint8_t thread_stack[PAGE_SIZE];
186 0, 10 * PAGE_SIZE, &region, &region_addr),
206 const size_t region1_size = PAGE_SIZE * 10;
207 const size_t region2_size = PAGE_SIZE;
241 const size_t region_size = PAGE_SIZE * 10;
242 const size_t map_size = PAGE_SIZE;
283 const size_t region1_size = PAGE_SIZE * 10;
293 ZX_VM_SPECIFIC, region1_size, PAGE_SIZE,
300 region1_size - PAGE_SIZE, PAGE_SIZE * 2,
323 const size_t region1_size = PAGE_SIZE * 10;
334 0, region1_size + PAGE_SIZE, &region2, &region2_addr),
347 0, PAGE_SIZE, &region3, &region3_addr),
373 ASSERT_EQ(zx_vmo_create(PAGE_SIZE, 0, &vmo), ZX_OK);
377 0, 10 * PAGE_SIZE, &region[0], &region_addr[0]),
382 0, vmo, 0, PAGE_SIZE, &map_addr[0]),
388 0, PAGE_SIZE, &region[1], &region_addr[1]),
394 0, vmo, 0, PAGE_SIZE, &map_addr[1]),
432 0, PAGE_SIZE, &region[1], &region_addr[2]),
434 EXPECT_EQ(zx_vmar_unmap(region[i], map_addr[i], PAGE_SIZE),
436 EXPECT_EQ(zx_vmar_protect(region[i], ZX_VM_PERM_READ, map_addr[i], PAGE_SIZE),
438 EXPECT_EQ(zx_vmar_map(region[i], ZX_VM_PERM_READ, 0, vmo, 0, PAGE_SIZE, &map_addr[i]),
445 0, PAGE_SIZE, &region[2], &region_addr[2]),
476 ASSERT_EQ(zx_vmo_create(PAGE_SIZE, 0, &vmo), ZX_OK);
477 ASSERT_EQ(zx_vmo_create(PAGE_SIZE, 0, &vmo2), ZX_OK);
482 0, 10 * PAGE_SIZE, &region[0], &region_addr[0]),
488 0, PAGE_SIZE,&region[1], &region_addr[1]),
494 0, vmo, 0, PAGE_SIZE, &map_addr),
524 map_addr - region_addr[0], vmo2, 0, PAGE_SIZE, &new_map_addr),
565 ASSERT_EQ(zx_vmo_create(PAGE_SIZE, 0, &vmo), ZX_OK);
566 ASSERT_EQ(zx_vmo_create(PAGE_SIZE * 4, 0, &vmo2), ZX_OK);
571 0, 10 * PAGE_SIZE, &region[0], &region_addr[0]),
577 PAGE_SIZE, vmo, 0, 2 * PAGE_SIZE, &map_addr[0]),
584 map_addr[0] - region_addr[0], vmo2, 0, 2 * PAGE_SIZE, &map_addr[1]),
591 map_addr[0] - region_addr[0], vmo2, 0, PAGE_SIZE,&map_addr[1]),
598 4 * PAGE_SIZE, &map_addr[1]),
605 map_addr[0] - region_addr[0], PAGE_SIZE, &region[1], &region_addr[1]),
609 ASSERT_EQ(zx_vmar_unmap(region[0], map_addr[0], 2 * PAGE_SIZE), ZX_OK);
616 PAGE_SIZE, 2 * PAGE_SIZE, &region[1], &region_addr[1]),
623 region_addr[1] - region_addr[0], vmo2, 0, 2 * PAGE_SIZE, &map_addr[1]),
630 vmo2, 0, PAGE_SIZE,&map_addr[1]),
636 vmo2, 0, 4 * PAGE_SIZE, &map_addr[1]),
643 region_addr[1] - region_addr[0], PAGE_SIZE,
669 ASSERT_EQ(zx_vmo_create(4 * PAGE_SIZE, 0, &vmo), ZX_OK);
675 0, 10 * PAGE_SIZE, &region, &region_addr),
678 0, vmo, 0, 4 * PAGE_SIZE, &map_addr),
681 0, process, 0, 4 * PAGE_SIZE, &map_addr),
689 PAGE_SIZE, 10 * PAGE_SIZE, &region, &region_addr),
692 PAGE_SIZE, vmo, 0, 4 * PAGE_SIZE, &map_addr),
697 PAGE_SIZE, 10 * PAGE_SIZE, &region, &region_addr),
704 PAGE_SIZE, vmo, 0, 4 * PAGE_SIZE, &map_addr),
712 0, 10 * PAGE_SIZE, &region, bad_addr_ptr),
716 0, 10 * PAGE_SIZE, bad_handle_ptr, &region_addr),
719 0, vmo, 0, 4 * PAGE_SIZE, bad_addr_ptr),
725 0, PAGE_SIZE - 1,&region, &region_addr),
730 PAGE_SIZE - 1, PAGE_SIZE, &region, &region_addr),
738 PAGE_SIZE - 1, vmo, 0, 4 * PAGE_SIZE,&map_addr),
743 PAGE_SIZE, vmo, PAGE_SIZE - 1, 3 * PAGE_SIZE, &map_addr),
748 0, vmo, PAGE_SIZE - 1, 3 * PAGE_SIZE, &map_addr),
752 0, vmo, 0, 4 * PAGE_SIZE, &map_addr),
754 EXPECT_EQ(zx_vmar_unmap(vmar, map_addr + 1, PAGE_SIZE), ZX_ERR_INVALID_ARGS);
755 EXPECT_EQ(zx_vmar_protect(vmar, ZX_VM_PERM_READ, map_addr + 1, PAGE_SIZE),
757 EXPECT_EQ(zx_vmar_unmap(vmar, map_addr, 4 * PAGE_SIZE), ZX_OK);
762 0, vmo, UINT64_MAX + 1 - PAGE_SIZE, PAGE_SIZE,&map_addr),
765 0, vmo, UINT64_MAX + 1 - 2 * PAGE_SIZE, PAGE_SIZE,
768 EXPECT_EQ(zx_vmar_unmap(vmar, map_addr, PAGE_SIZE), ZX_OK);
779 0, vmo, 0, 4 * PAGE_SIZE, &map_addr),
784 EXPECT_EQ(zx_vmar_unmap(vmar, map_addr, 4 * PAGE_SIZE), ZX_OK);
787 constexpr size_t bad_size = fbl::numeric_limits<size_t>::max() - PAGE_SIZE + 2;
788 static_assert(((bad_size + PAGE_SIZE - 1) & ~(PAGE_SIZE - 1)) == 0, "");
801 PAGE_SIZE, vmo, 0, 4 * PAGE_SIZE, &map_addr),
804 EXPECT_EQ(zx_vmar_protect(vmar, ZX_VM_PERM_READ, map_addr + PAGE_SIZE * i, bad_size),
806 EXPECT_EQ(zx_vmar_unmap(vmar, map_addr + PAGE_SIZE * i, bad_size), ZX_ERR_INVALID_ARGS);
808 EXPECT_EQ(zx_vmar_unmap(vmar, map_addr, 4 * PAGE_SIZE), ZX_OK);
813 ZX_VM_CAN_MAP_WRITE, 0, 4 * PAGE_SIZE, &region, &region_addr),
817 0, 4 * PAGE_SIZE,&region, &region_addr),
820 0, vmo, 0, 4 * PAGE_SIZE, &map_addr),
824 0, vmo, 0, 4 * PAGE_SIZE, &map_addr),
828 0, vmo, 0, 4 * PAGE_SIZE, &map_addr),
831 map_addr, 4 * PAGE_SIZE),
834 4 * PAGE_SIZE),
836 EXPECT_EQ(zx_vmar_unmap(vmar, map_addr, 4 * PAGE_SIZE), ZX_OK);
856 ASSERT_EQ(zx_vmo_create(4 * PAGE_SIZE, 0, &vmo), ZX_OK);
858 ASSERT_EQ(zx_vmar_map(vmar, ZX_VM_PERM_READ, 0, vmo, 0, 4 * PAGE_SIZE, &map_addr),
861 map_addr, 4 * PAGE_SIZE - 1),
863 EXPECT_EQ(zx_vmar_unmap(vmar, map_addr, 4 * PAGE_SIZE - 1), ZX_OK);
869 EXPECT_EQ(zx_process_read_memory(process, map_addr + 3 * PAGE_SIZE, &buf, 1, &read),
891 ASSERT_EQ(zx_vmo_create(4 * PAGE_SIZE, 0, &vmo), ZX_OK);
895 ASSERT_EQ(zx_vmar_map(vmar, ZX_VM_PERM_READ | map_range, 0, vmo, 0, 4 * PAGE_SIZE - 1,
903 EXPECT_EQ(zx_process_read_memory(process, map_addr + 3 * PAGE_SIZE, &buf, 1, &read),
907 EXPECT_EQ(zx_vmar_unmap(vmar, map_addr, 4 * PAGE_SIZE - 1), ZX_OK);
912 EXPECT_EQ(zx_process_read_memory(process, map_addr + 3 * PAGE_SIZE, &buf, 1, &read),
937 ASSERT_EQ(zx_vmo_create(PAGE_SIZE, 0, &vmo), ZX_OK);
952 EXPECT_EQ(zx_vmar_map(new_h, kRwxMapPerm, 0, vmo, 0, PAGE_SIZE, &map_addr),
956 ASSERT_EQ(zx_vmar_map(new_h, perm, 0, vmo, 0, PAGE_SIZE, &map_addr),
960 EXPECT_EQ(zx_vmar_protect(new_h, kRwxMapPerm, map_addr, PAGE_SIZE),
963 EXPECT_EQ(zx_vmar_unmap(new_h, map_addr, PAGE_SIZE), ZX_OK);
967 EXPECT_EQ(zx_vmar_allocate(new_h, kRwxAllocPerm, 0, 10 * PAGE_SIZE, &region, &region_addr),
992 ASSERT_EQ(zx_vmo_create(PAGE_SIZE, 0, &vmo), ZX_OK);
1007 EXPECT_EQ(zx_vmar_map(vmar, kRwxMapPerm, 0, new_h, 0, PAGE_SIZE, &map_addr),
1011 ASSERT_EQ(zx_vmar_map(vmar, perm, 0, new_h, 0, PAGE_SIZE, &map_addr),
1016 EXPECT_EQ(zx_vmar_protect(vmar, kRwxMapPerm, map_addr, PAGE_SIZE),
1022 EXPECT_EQ(zx_vmar_protect(vmar, kRwxMapPerm, map_addr, PAGE_SIZE),
1025 EXPECT_EQ(zx_vmar_unmap(vmar, map_addr, PAGE_SIZE), ZX_OK);
1050 ASSERT_EQ(zx_vmo_create(PAGE_SIZE, 0, &vmo), ZX_OK);
1065 0, 10 * PAGE_SIZE, &region[0], &region_addr[0]),
1069 EXPECT_EQ(zx_vmar_allocate(region[0], kRwxAllocPerm, 0, PAGE_SIZE,
1074 EXPECT_EQ(zx_vmar_map(region[0], kRwxMapPerm, 0, vmo, 0, PAGE_SIZE, &map_addr),
1081 vmo, 0, PAGE_SIZE, &map_addr),
1083 EXPECT_EQ(zx_vmar_unmap(region[0], map_addr, PAGE_SIZE), ZX_OK);
1088 0, PAGE_SIZE, &region[1], &region_addr[1]),
1098 ASSERT_EQ(zx_vmar_allocate(vmar, kRwxAllocPerm, 0, 10 * PAGE_SIZE,
1103 PAGE_SIZE, vmo, 0, PAGE_SIZE, &map_addr),
1107 PAGE_SIZE, vmo, 0, PAGE_SIZE, &map_addr),
1129 const size_t region_size = PAGE_SIZE * 10;
1160 ASSERT_EQ(zx_vmo_create(4 * PAGE_SIZE, 0, &vmo), ZX_OK);
1165 0, vmo, 0, 4 * PAGE_SIZE, &addr),
1170 EXPECT_EQ(zx_vmar_unmap(vmar, mapping_addr[0], 2 * PAGE_SIZE), ZX_OK);
1173 EXPECT_EQ(zx_vmar_unmap(vmar, mapping_addr[0] + 2 * PAGE_SIZE, 2 * PAGE_SIZE), ZX_OK);
1177 EXPECT_EQ(zx_vmar_unmap(vmar, mapping_addr[1] + 2 * PAGE_SIZE, 2 * PAGE_SIZE), ZX_OK);
1180 EXPECT_EQ(zx_vmar_unmap(vmar, mapping_addr[1], 2 * PAGE_SIZE), ZX_OK);
1184 EXPECT_EQ(zx_vmar_unmap(vmar, mapping_addr[2] + PAGE_SIZE, 2 * PAGE_SIZE), ZX_OK);
1187 EXPECT_EQ(zx_vmar_unmap(vmar, mapping_addr[2], PAGE_SIZE), ZX_OK);
1188 EXPECT_EQ(zx_vmar_unmap(vmar, mapping_addr[2] + 3 * PAGE_SIZE, PAGE_SIZE), ZX_OK);
1200 offset, vmo, 0, 4 * PAGE_SIZE, &addr),
1203 EXPECT_EQ(zx_vmar_unmap(vmar, addr, 4 * PAGE_SIZE), ZX_OK);
1227 const size_t mapping_size = 4 * PAGE_SIZE;
1239 EXPECT_EQ(zx_vmar_unmap(vmar, mapping_addr[0] + 2 * PAGE_SIZE, 3 * PAGE_SIZE), ZX_OK);
1242 EXPECT_EQ(zx_vmar_unmap(vmar, mapping_addr[0], 2 * PAGE_SIZE), ZX_OK, "");
1243 EXPECT_EQ(zx_vmar_unmap(vmar, mapping_addr[1] + 1 * PAGE_SIZE, 3 * PAGE_SIZE), ZX_OK, "");
1255 EXPECT_EQ(zx_vmar_unmap(vmar, mapping_addr[0], 2 * mapping_size + PAGE_SIZE), ZX_OK);
1258 EXPECT_EQ(zx_vmar_unmap(vmar, mapping_addr[1] + 1 * PAGE_SIZE, 3 * PAGE_SIZE), ZX_OK);
1276 0, vmo, 0, PAGE_SIZE, &mapping_addr[2]),
1282 EXPECT_EQ(zx_vmar_unmap(vmar, mapping_addr[0], 2 * mapping_size + PAGE_SIZE), ZX_OK);
1287 PAGE_SIZE, vmo, 0, PAGE_SIZE, &mapping_addr[2]),
1290 EXPECT_EQ(zx_vmar_unmap(vmar, mapping_addr[1] + 1 * PAGE_SIZE, 3 * PAGE_SIZE), ZX_OK);
1309 0, vmo, 0, PAGE_SIZE, &mapping_addr[2]),
1315 EXPECT_EQ(zx_vmar_unmap(vmar, mapping_addr[0] + PAGE_SIZE, 3 * mapping_size - 2 * PAGE_SIZE),
1319 EXPECT_EQ(zx_vmar_unmap(vmar, mapping_addr[0] + PAGE_SIZE, 3 * mapping_size - PAGE_SIZE),
1325 PAGE_SIZE, vmo, 0, PAGE_SIZE, &mapping_addr[2]),
1328 EXPECT_EQ(zx_vmar_unmap(vmar, mapping_addr[0], PAGE_SIZE), ZX_OK);
1347 mapping_size - PAGE_SIZE, vmo, 0, PAGE_SIZE, &mapping_addr[2]),
1353 EXPECT_EQ(zx_vmar_unmap(vmar, subregion_addr + PAGE_SIZE, 3 * mapping_size - PAGE_SIZE),
1362 PAGE_SIZE, vmo, 0, PAGE_SIZE, &mapping_addr[2]),
1385 const size_t mapping_size = 4 * PAGE_SIZE;
1390 PAGE_SIZE, vmo, 0, mapping_size, &mapping_addr),
1392 ASSERT_EQ(zx_vmar_unmap(vmar, mapping_addr - PAGE_SIZE, mapping_size + PAGE_SIZE),
1400 for (size_t gap = PAGE_SIZE; gap < 3 * PAGE_SIZE; gap += PAGE_SIZE) {
1405 ASSERT_EQ(zx_vmar_unmap(vmar, mapping_addr - PAGE_SIZE, mapping_size + PAGE_SIZE),
1432 const size_t mapping_size = 4 * PAGE_SIZE;
1438 for (size_t i = 0; i < mapping_size / PAGE_SIZE; ++i) {
1440 ASSERT_EQ(zx_vmo_write(vmo, buf, i * PAGE_SIZE, 1), ZX_OK);
1442 ASSERT_EQ(zx_vmo_write(vmo2, buf, i * PAGE_SIZE, 1), ZX_OK);
1448 PAGE_SIZE, vmo, 0, mapping_size, &mapping_addr[0]),
1454 PAGE_SIZE, vmo2, 0, mapping_size, &mapping_addr[1]),
1460 PAGE_SIZE, vmo2, 0, mapping_size, &mapping_addr[1]),
1463 for (size_t i = 0; i < mapping_size / PAGE_SIZE; ++i) {
1464 EXPECT_EQ(zx_process_read_memory(process, mapping_addr[0] + i * PAGE_SIZE, buf, 1, &len),
1473 2 * PAGE_SIZE, vmo, 0, 2 * PAGE_SIZE, &mapping_addr[0]),
1475 EXPECT_EQ(mapping_addr[0], mapping_addr[1] + PAGE_SIZE);
1476 for (size_t i = 0; i < mapping_size / PAGE_SIZE; ++i) {
1477 EXPECT_EQ(zx_process_read_memory(process, mapping_addr[1] + i * PAGE_SIZE, buf, 1, &len),
1486 PAGE_SIZE + mapping_size, mapping_size, &subregion, &subregion_addr),
1492 PAGE_SIZE, vmo2, 0, 2 * mapping_size, &mapping_addr[0]),
1520 ASSERT_EQ(zx_vmo_create(4 * PAGE_SIZE, 0, &vmo), ZX_OK);
1525 0, vmo, 0, 4 * PAGE_SIZE, &mapping_addr),
1527 EXPECT_EQ(zx_vmar_protect(vmar, ZX_VM_PERM_READ, mapping_addr, 2 * PAGE_SIZE),
1532 EXPECT_EQ(zx_vmar_unmap(vmar, mapping_addr, 4 * PAGE_SIZE), ZX_OK);
1538 0, vmo, 0, 4 * PAGE_SIZE, &mapping_addr),
1540 EXPECT_EQ(zx_vmar_protect(vmar, ZX_VM_PERM_READ, mapping_addr + 2 * PAGE_SIZE,
1541 2 * PAGE_SIZE),
1546 EXPECT_EQ(zx_vmar_unmap(vmar, mapping_addr, 4 * PAGE_SIZE), ZX_OK);
1552 0, vmo, 0, 4 * PAGE_SIZE, &mapping_addr),
1554 EXPECT_EQ(zx_vmar_protect(vmar, ZX_VM_PERM_READ, mapping_addr + PAGE_SIZE,
1555 2 * PAGE_SIZE),
1560 EXPECT_EQ(zx_vmar_unmap(vmar, mapping_addr, 4 * PAGE_SIZE), ZX_OK);
1584 const size_t mapping_size = 4 * PAGE_SIZE;
1602 EXPECT_EQ(zx_vmar_protect(vmar, ZX_VM_PERM_READ, mapping_addr[0] + PAGE_SIZE,
1603 3 * mapping_size - 2 * PAGE_SIZE),
1625 mapping_addr[0] + PAGE_SIZE,
1626 3 * mapping_size - 2 * PAGE_SIZE),
1643 EXPECT_EQ(zx_vmar_protect(vmar, ZX_VM_PERM_READ, mapping_addr[0] + PAGE_SIZE,
1644 3 * mapping_size - 2 * PAGE_SIZE),
1666 EXPECT_EQ(zx_vmar_protect(vmar, ZX_VM_PERM_READ, mapping_addr[0] + PAGE_SIZE,
1667 3 * mapping_size - 2 * PAGE_SIZE),
1694 EXPECT_EQ(zx_vmar_protect(vmar, ZX_VM_PERM_READ, mapping_addr[0] + PAGE_SIZE,
1695 3 * mapping_size - 2 * PAGE_SIZE),
1717 const size_t size = 100 * PAGE_SIZE;
1782 const uintptr_t base = ROUNDUP(mapping_addr, 512 * PAGE_SIZE) + PAGE_SIZE;
1831 const uintptr_t base = ROUNDUP(mapping_addr, 512 * PAGE_SIZE) + PAGE_SIZE;
1856 ASSERT_EQ(zx_vmo_create(PAGE_SIZE * 2, 0, &vmo), ZX_OK);
1860 0, vmo, 0, PAGE_SIZE * 2, &mapping_addr),
1865 memset(ptr, 0, PAGE_SIZE * 2);
1868 zx_vmar_unmap(zx_vmar_root_self(), mapping_addr + PAGE_SIZE, PAGE_SIZE);
1870 char buffer[PAGE_SIZE * 2];
1874 EXPECT_EQ(zx_process_read_memory(zx_process_self(), mapping_addr, buffer, PAGE_SIZE, &actual_read),
1876 EXPECT_EQ(actual_read, PAGE_SIZE);
1879 EXPECT_EQ(zx_process_read_memory(zx_process_self(), mapping_addr + PAGE_SIZE, buffer, PAGE_SIZE, &actual_read),
1883 EXPECT_EQ(zx_process_read_memory(zx_process_self(), mapping_addr, buffer, PAGE_SIZE * 2, &actual_read),
1885 EXPECT_EQ(actual_read, PAGE_SIZE);
1888 EXPECT_EQ(zx_process_read_memory(zx_process_self(), mapping_addr + PAGE_SIZE - 1, buffer, 2, &actual_read), ZX_OK);
1892 EXPECT_EQ(zx_vmar_unmap(zx_vmar_root_self(), mapping_addr, PAGE_SIZE), ZX_OK);
1902 ASSERT_EQ(zx_vmo_create(PAGE_SIZE * 2, 0, &vmo), ZX_OK);
1906 0, vmo, 0, PAGE_SIZE * 2, &mapping_addr),
1911 memset(ptr, 0, PAGE_SIZE * 2);
1914 zx_vmar_unmap(zx_vmar_root_self(), mapping_addr + PAGE_SIZE, PAGE_SIZE);
1916 char buffer[PAGE_SIZE * 2];
1918 memset(buffer, 0, PAGE_SIZE * 2);
1921 EXPECT_EQ(zx_process_write_memory(zx_process_self(), mapping_addr, buffer, PAGE_SIZE, &actual_written),
1923 EXPECT_EQ(actual_written, PAGE_SIZE);
1926 EXPECT_EQ(zx_process_write_memory(zx_process_self(), mapping_addr + PAGE_SIZE, buffer, PAGE_SIZE, &actual_written),
1930 EXPECT_EQ(zx_process_write_memory(zx_process_self(), mapping_addr, buffer, PAGE_SIZE * 2, &actual_written),
1932 EXPECT_EQ(actual_written, PAGE_SIZE);
1935 EXPECT_EQ(zx_process_write_memory(zx_process_self(), mapping_addr + PAGE_SIZE - 1, buffer, 2, &actual_written), ZX_OK);
1939 EXPECT_EQ(zx_vmar_unmap(zx_vmar_root_self(), mapping_addr, PAGE_SIZE), ZX_OK);
1948 constexpr size_t kVmoSize = PAGE_SIZE * 10;