Lines Matching defs:va_start

822  * All vmap_area objects in this tree are sorted by va->va_start
965 return (va->va_end - va->va_start);
1002 if (addr < va->va_start)
1028 if (tmp->va_start <= addr)
1061 if (!va_start_lowest || (*va)->va_start < va_start_lowest)
1062 va_start_lowest = (*va)->va_start;
1126 if (va->va_end <= tmp_va->va_start)
1128 else if (va->va_start >= tmp_va->va_end)
1132 va->va_start, va->va_end, tmp_va->va_start, tmp_va->va_end);
1274 * when VA size is modified by changing its va_start/va_end. Or
1389 if (sibling->va_start == va->va_end) {
1390 sibling->va_start = va->va_start;
1410 if (sibling->va_end == va->va_start) {
1463 if (va->va_start > vstart)
1464 nva_start_addr = ALIGN(va->va_start, align);
1501 vstart < va->va_start) {
1529 vstart <= va->va_start) {
1536 vstart = va->va_start + 1;
1601 if (nva_start_addr < va->va_start ||
1606 if (va->va_start == nva_start_addr) {
1646 va->va_start += size;
1699 lva->va_start = va->va_start;
1705 va->va_start = nva_start_addr + size;
1729 if (va->va_start > vstart)
1730 nva_start_addr = ALIGN(va->va_start, align);
1791 struct vmap_node *vn = addr_to_node(va->va_start);
1876 if (IS_ALIGNED(va->va_start, align)) {
1882 err |= (va->va_start < vstart);
1924 *addr = va->va_start;
1995 va->va_start = addr;
2000 vn = addr_to_node(va->va_start);
2006 BUG_ON(!IS_ALIGNED(va->va_start, align));
2007 BUG_ON(va->va_start < vstart);
2175 unsigned long nr = (va->va_end - va->va_start) >> PAGE_SHIFT;
2176 unsigned long orig_start = va->va_start;
2184 va->va_start, va->va_end);
2235 struct vmap_area, list)->va_start);
2311 unsigned long va_start = va->va_start;
2319 nr_lazy = atomic_long_add_return((va->va_end - va->va_start) >>
2327 id_to_node(vn_id):addr_to_node(va->va_start);
2333 trace_free_vmap_area_noflush(va_start, nr_lazy, nr_lazy_max);
2345 flush_cache_vunmap(va->va_start, va->va_end);
2346 vunmap_range_noflush(va->va_start, va->va_end);
2348 flush_tlb_kernel_range(va->va_start, va->va_end);
2365 * addr is not the same as va->va_start, what is not common, we
2540 static void *vmap_block_vaddr(unsigned long va_start, unsigned long pages_off)
2544 addr = va_start + (pages_off << PAGE_SHIFT);
2545 BUG_ON(addr_to_vb_idx(addr) != addr_to_vb_idx(va_start));
2583 vaddr = vmap_block_vaddr(va->va_start, 0);
2596 xa = addr_to_vb_xa(va->va_start);
2597 vb_idx = addr_to_vb_idx(va->va_start);
2619 xa = addr_to_vb_xa(vb->va->va_start);
2620 tmp = xa_erase(xa, addr_to_vb_idx(vb->va->va_start));
2623 vn = addr_to_node(vb->va->va_start);
2732 vaddr = vmap_block_vaddr(vb->va->va_start, pages_off);
2822 unsigned long va_start = vb->va->va_start;
2825 s = va_start + (vb->dirty_min << PAGE_SHIFT);
2826 e = va_start + (vb->dirty_max << PAGE_SHIFT);
2899 debug_check_no_locks_freed((void *)va->va_start,
2900 (va->va_end - va->va_start));
2938 addr = va->va_start;
3041 vm->addr = (void *)va->va_start;
3042 vm->size = va->va_end - va->va_start;
3050 struct vmap_node *vn = addr_to_node(va->va_start);
4177 start = vmap_block_vaddr(vb->va->va_start, rs);
4262 if ((unsigned long)addr + remains <= va->va_start)
4288 vaddr = (char *) va->va_start;
4448 * i.e. va->va_start < addr && va->va_end < addr or NULL
4462 if (tmp->va_start <= addr) {
4496 if ((*va)->va_start < addr)
4617 if (base + start < va->va_start) {
4657 va->va_start = start;
4665 if (kasan_populate_vmalloc(vas[area]->va_start, sizes[area]))
4671 struct vmap_node *vn = addr_to_node(vas[area]->va_start);
4701 orig_start = vas[area]->va_start;
4707 va->va_start, va->va_end);
4751 orig_start = vas[area]->va_start;
4757 va->va_start, va->va_end);
4856 (void *)va->va_start, (void *)va->va_end,
4857 va->va_end - va->va_start);
4878 (void *)va->va_start, (void *)va->va_end,
4879 va->va_end - va->va_start);
4965 free->va_start = vmap_start;
4980 free->va_start = vmap_start;
5109 va->va_start = (unsigned long)tmp->addr;
5110 va->va_end = va->va_start + tmp->size;
5113 vn = addr_to_node(va->va_start);