/barrelfish-2018-10-04/kernel/include/ |
H A D | capabilities.h | 34 #ifndef ROUND_UP 35 #define ROUND_UP(n, size) ((((n) + (size) - 1)) & (~((size) - 1))) macro 38 STATIC_ASSERT((ROUND_UP(sizeof(struct capability), 8) 39 + ROUND_UP(sizeof(struct mdbnode), 8) 55 char padding0[ROUND_UP(sizeof(struct capability), 8)- sizeof(struct capability)]; 57 char padding1[ROUND_UP(sizeof(struct mdbnode), 8) - sizeof(struct mdbnode)]; 63 - ROUND_UP(sizeof(struct capability), 8) 64 - ROUND_UP(sizeof(struct mdbnode), 8)];
|
H A D | kernel.h | 51 #define ROUND_UP(n, size) ((((n) + (size) - 1)) & (~((size) - 1))) macro
|
/barrelfish-2018-10-04/tools/arm_molly/ |
H A D | molly_init32.c | 28 #define ROUND_UP(n, size) ((((n) + (size) - 1)) & (~((size) - 1))) macro 71 phys_alloc_start = ROUND_UP((uint32_t) &_end_img, ALIGNMENT); //+
|
H A D | molly_init64.c | 29 #define ROUND_UP(n, size) ((((n) + (size) - 1)) & (~((size) - 1))) macro 72 phys_alloc_start = ROUND_UP((uint64_t) &_end_img, ALIGNMENT); //+
|
/barrelfish-2018-10-04/lib/barrelfish/arch/x86/ |
H A D | pmap.c | 432 pmapx->min_mappable_va = ROUND_UP((lvaddr_t)&_end, 64 * 1024) 463 alignment = ROUND_UP(alignment, BASE_PAGE_SIZE); 465 size_t size = ROUND_UP(memobj->size, alignment); 468 genvaddr_t minva = ROUND_UP(pmapx->min_mappable_va, alignment); 476 genvaddr_t walk_size = ROUND_UP(vregion_get_size(walk), BASE_PAGE_SIZE); 477 genvaddr_t walk_end = ROUND_UP(walk_base + walk_size, alignment); 493 vaddr = ROUND_UP((vregion_get_base_addr(walk) 494 + ROUND_UP(vregion_get_size(walk), BASE_PAGE_SIZE)),
|
/barrelfish-2018-10-04/lib/barrelfish/ |
H A D | morecore.c | 53 bytes = ROUND_UP(bytes, LARGE_PAGE_SIZE); 139 size_t remapsize = ROUND_UP(mapoffset, state->mmu_state.alignment);
|
H A D | heap.c | 149 size_t nb = ROUND_UP(nu * sizeof(union heap_header),
|
/barrelfish-2018-10-04/include/ |
H A D | bitmacros.h | 34 #define ROUND_UP(n, size) ((((n) + (size) - 1)) & (~((size) - 1))) macro
|
/barrelfish-2018-10-04/kernel/arch/armv7/ |
H A D | paging_init.c | 114 assert(ROUND_UP((lpaddr_t)l1_low, ARM_L1_ALIGN) == (lpaddr_t)l1_low); 115 assert(ROUND_UP((lpaddr_t)l1_high, ARM_L1_ALIGN) == (lpaddr_t)l1_high);
|
/barrelfish-2018-10-04/kernel/arch/armv8/ |
H A D | init.c | 73 physical_mem = ROUND_UP(desc->PhysicalStart, BASE_PAGE_SIZE); 92 armv8_glbl_core_data->start_free_ram = ROUND_UP(armv8_glbl_core_data->start_free_ram, BASE_PAGE_SIZE);
|
/barrelfish-2018-10-04/lib/barrelfish/vspace/ |
H A D | mmu_aware.c | 65 size = ROUND_UP(size, BASE_PAGE_SIZE); 111 size_t alloc_size = ROUND_UP(req_size, BASE_PAGE_SIZE); 123 alloc_size = ROUND_UP(req_size, HUGE_PAGE_SIZE); 138 alloc_size = ROUND_UP(req_size, LARGE_PAGE_SIZE);
|
H A D | utils.c | 58 size = ROUND_UP(size, BASE_PAGE_SIZE); 255 size = ROUND_UP(size, BASE_PAGE_SIZE); 345 size = ROUND_UP(size, BASE_PAGE_SIZE);
|
/barrelfish-2018-10-04/lib/blk/dma_mem/ |
H A D | dma_mem.c | 60 bytes = ROUND_UP(BASE_PAGE_SIZE, bytes);
|
/barrelfish-2018-10-04/lib/net/ |
H A D | netbufs.c | 94 reg->buffer_size = ROUND_UP(buffersize, NETWORKING_BUFFER_ALIGN); 195 size = ROUND_UP(size, NETWORKING_BUFFER_ALIGN); 197 size_t alloc_size = ROUND_UP(numbuf * size, BASE_PAGE_SIZE);
|
/barrelfish-2018-10-04/lib/barrelfish/arch/arm/ |
H A D | pmap.c | 538 size = ROUND_UP(size, page_size); 767 size = ROUND_UP(size, page_size); 885 size = ROUND_UP(size, BASE_PAGE_SIZE); 968 alignment = ROUND_UP(alignment, BASE_PAGE_SIZE); 970 size_t size = ROUND_UP(memobj->size, alignment); 975 genvaddr_t walk_size = ROUND_UP(vregion_get_size(walk), BASE_PAGE_SIZE); 976 genvaddr_t walk_end = ROUND_UP(walk_base + walk_size, alignment); 987 *vaddr = ROUND_UP((vregion_get_base_addr(walk) 988 + ROUND_UP(vregion_get_size(walk), alignment)), 1007 alignment = ROUND_UP(alignmen [all...] |
/barrelfish-2018-10-04/usr/kaluga/ |
H A D | start_decnet.c | 92 err = get_device_cap(ROUND_DOWN(addr, BASE_PAGE_SIZE), ROUND_UP(size, BASE_PAGE_SIZE), &device_frame);
|
/barrelfish-2018-10-04/tools/molly/ |
H A D | molly_init.c | 217 #define ROUND_UP(n, size) ((((n) + (size) - 1)) & (~((size) - 1))) macro 258 next_addr = (ROUND_UP((uint64_t) &_end_img, BASE_PAGE_SIZE) +
|
/barrelfish-2018-10-04/tools/weever/ |
H A D | loader.c | 42 #define ROUND_UP(n, size) ((((n) + (size) - 1)) & (~((size) - 1))) macro 225 phys_alloc_start = ROUND_UP(bp->ramdisk_image + bp->ramdisk_size,
|
/barrelfish-2018-10-04/lib/barrelfish/arch/x86_32/ |
H A D | pmap.c | 239 size = ROUND_UP(size, page_size); 423 size = ROUND_UP(size, X86_32_LARGE_PAGE_SIZE); 429 size = ROUND_UP(size, X86_32_BASE_PAGE_SIZE); 573 size = ROUND_UP(size, page_size); 708 size = ROUND_UP(size, page_size); 842 alignment = ROUND_UP(alignment, BASE_PAGE_SIZE); 844 size = ROUND_UP(size, alignment);
|
/barrelfish-2018-10-04/lib/virtio/ |
H A D | virtqueue.c | 232 size = ROUND_UP(size, BASE_PAGE_SIZE); 305 vring_mem_size = ROUND_UP(vring_mem_size, BASE_PAGE_SIZE); 366 offset = ROUND_UP(offset, BASE_PAGE_SIZE); 480 vring_mem_size = ROUND_UP(vring_mem_size, BASE_PAGE_SIZE);
|
/barrelfish-2018-10-04/include/barrelfish/ |
H A D | ump_impl.h | 100 #define UMP_CHAN_STATE_SIZE ROUND_UP(sizeof(struct ump_chan_state), CACHELINE_BYTES)
|
/barrelfish-2018-10-04/lib/barrelfish/arch/x86_64/ |
H A D | pmap.c | 373 size = ROUND_UP(size, page_size); 599 size = ROUND_UP(size, LARGE_PAGE_SIZE); 608 size = ROUND_UP(size, HUGE_PAGE_SIZE); 614 size = ROUND_UP(size, BASE_PAGE_SIZE); 779 size = ROUND_UP(size, info.page_size); 903 size = ROUND_UP(size, info.page_size); 1047 alignment = ROUND_UP(alignment, BASE_PAGE_SIZE); 1049 size = ROUND_UP(size, alignment);
|
/barrelfish-2018-10-04/tools/elver/ |
H A D | elver.c | 55 #define ROUND_UP(n, size) ((((n) + (size) - 1)) & (~((size) - 1))) macro 269 phys_alloc_start = ROUND_UP(multiboot_end_addr(), BASE_PAGE_SIZE) +
|
/barrelfish-2018-10-04/kernel/ |
H A D | startup.c | 402 app_alloc_phys_start = ROUND_UP(app_alloc_phys_start, align); 438 bsp_init_alloc_addr = ROUND_UP(bsp_init_alloc_addr, align);
|
/barrelfish-2018-10-04/lib/ahci/ |
H A D | ahci_dma_pool.c | 220 pool_size = ROUND_UP(pool_size, BASE_PAGE_SIZE); 367 size = ROUND_UP(size, alignment_requirement);
|