Searched refs:kaddr (Results 1 - 25 of 195) sorted by last modified time

12345678

/linux-master/fs/btrfs/
H A Dinode.c521 char *kaddr; local
566 kaddr = kmap_local_page(cpage);
567 write_extent_buffer(leaf, kaddr, ptr, cur_size);
568 kunmap_local(kaddr);
579 kaddr = kmap_local_page(page);
580 write_extent_buffer(leaf, kaddr, ptr, size);
581 kunmap_local(kaddr);
3296 char *kaddr; local
3302 kaddr = kmap_local_page(page) + pgoff;
3303 crypto_shash_digest(shash, kaddr, fs_inf
6773 void *kaddr; local
10415 char *kaddr; local
[all...]
H A Dextent_io.c4434 char *kaddr; local
4437 kaddr = folio_address(eb->folios[i]);
4438 memcpy(dst, kaddr + offset, cur);
4470 char *kaddr; local
4473 kaddr = folio_address(eb->folios[i]);
4474 if (copy_to_user_nofault(dst, kaddr + offset, cur)) {
4494 char *kaddr; local
4509 kaddr = folio_address(eb->folios[i]);
4510 ret = memcmp(ptr, kaddr + offset, cur);
4565 char *kaddr; local
4668 char *kaddr; local
4738 u8 *kaddr; local
4770 u8 *kaddr; local
4807 u8 *kaddr; local
[all...]
/linux-master/fs/nilfs2/
H A Ddir.c109 static bool nilfs_check_folio(struct folio *folio, char *kaddr) argument
127 p = (struct nilfs_dir_entry *)(kaddr + offs);
171 p = (struct nilfs_dir_entry *)(kaddr + offs);
186 void *kaddr; local
191 kaddr = kmap_local_folio(folio, 0);
193 if (!nilfs_check_folio(folio, kaddr))
198 return kaddr;
201 folio_release_kmap(folio, kaddr);
273 char *kaddr, *limit; local
277 kaddr
342 char *kaddr = nilfs_get_folio(dir, n, foliop); local
443 char *kaddr = nilfs_get_folio(dir, n, &folio); local
522 char *kaddr = (char *)((unsigned long)dir & ~(folio_size(folio) - 1)); local
565 void *kaddr; local
603 char *kaddr; local
[all...]
/linux-master/drivers/gpu/drm/amd/amdkfd/
H A Dkfd_process.c772 void *kaddr; local
780 &mem, &kaddr);
785 qpd->ib_kaddr = kaddr;
1334 void *kaddr; local
1342 KFD_CWSR_TBA_TMA_SIZE, flags, &mem, &kaddr);
1347 qpd->cwsr_kaddr = kaddr;
/linux-master/fs/ceph/
H A Daddr.c1814 void *kaddr = kmap_atomic(page); local
1815 memcpy(kaddr, data, len);
1816 kunmap_atomic(kaddr);
/linux-master/arch/arm64/include/asm/
H A Dtlbflush.h509 static inline void __flush_tlb_kernel_pgtable(unsigned long kaddr) argument
511 unsigned long addr = __TLBI_VADDR(kaddr, 0);
/linux-master/arch/loongarch/mm/
H A Dpgtable.c12 struct page *dmw_virt_to_page(unsigned long kaddr) argument
14 return phys_to_page(__pa(kaddr));
18 struct page *tlb_virt_to_page(unsigned long kaddr) argument
20 return phys_to_page(pfn_to_phys(pte_pfn(*virt_to_kpte(kaddr))));
H A Dmmap.c111 int __virt_addr_valid(volatile void *kaddr) argument
113 unsigned long vaddr = (unsigned long)kaddr;
115 if (is_kfence_address((void *)kaddr))
121 return pfn_valid(PFN_DOWN(PHYSADDR(kaddr)));
/linux-master/arch/loongarch/include/asm/
H A Dkfence.h19 char *kaddr, *vaddr; local
39 kaddr = kfence_pool;
41 while (kaddr < kfence_pool + KFENCE_POOL_SIZE) {
42 set_page_address(virt_to_page(kaddr), vaddr);
43 kaddr += PAGE_SIZE;
H A Dpage.h78 struct page *dmw_virt_to_page(unsigned long kaddr);
79 struct page *tlb_virt_to_page(unsigned long kaddr);
90 #define virt_to_page(kaddr) phys_to_page(__pa(kaddr))
102 #define virt_to_page(kaddr) \
104 (likely((unsigned long)kaddr < vm_map_base)) ? \
105 dmw_virt_to_page((unsigned long)kaddr) : tlb_virt_to_page((unsigned long)kaddr);\
111 #define virt_to_pfn(kaddr) page_to_pfn(virt_to_page(kaddr))
[all...]
H A Dio.h72 #define virt_to_phys(kaddr) \
74 (likely((unsigned long)kaddr < vm_map_base)) ? __pa((unsigned long)kaddr) : \
75 page_to_phys(tlb_virt_to_page((unsigned long)kaddr)) + offset_in_page((unsigned long)kaddr);\
/linux-master/arch/x86/events/intel/
H A Dds.c1451 void *kaddr; local
1497 kaddr = buf;
1499 kaddr = (void *)to;
1510 insn_init(&insn, kaddr, size, is_64bit);
1521 kaddr += insn.length;
/linux-master/mm/
H A Dvmalloc.c4345 * @kaddr: virtual address of vmalloc kernel memory
4346 * @pgoff: offset from @kaddr to start at
4351 * This function checks that @kaddr is a valid vmalloc'ed area,
4359 void *kaddr, unsigned long pgoff,
4371 if (!PAGE_ALIGNED(uaddr) || !PAGE_ALIGNED(kaddr))
4374 area = find_vm_area(kaddr);
4384 kaddr += off;
4387 struct page *page = vmalloc_to_page(kaddr);
4395 kaddr += PAGE_SIZE;
4358 remap_vmalloc_range_partial(struct vm_area_struct *vma, unsigned long uaddr, void *kaddr, unsigned long pgoff, unsigned long size) argument
H A Dmemory.c2964 void *kaddr; local
2984 kaddr = kmap_local_page(dst);
3019 if (__copy_from_user_inatomic(kaddr, uaddr, PAGE_SIZE)) {
3037 if (__copy_from_user_inatomic(kaddr, uaddr, PAGE_SIZE)) {
3044 clear_page(kaddr);
3054 kunmap_local(kaddr);
6389 void *kaddr; local
6397 kaddr = kmap_local_page(subpage);
6400 rc = copy_from_user(kaddr, usr_src + i * PAGE_SIZE, PAGE_SIZE);
6403 kunmap_local(kaddr);
[all...]
H A Duserfaultfd.c250 void *kaddr; local
261 kaddr = kmap_local_folio(folio, 0);
278 ret = copy_from_user(kaddr, (const void __user *) src_addr,
281 kunmap_local(kaddr);
785 void *kaddr; local
791 kaddr = kmap_local_folio(folio, 0);
792 err = copy_from_user(kaddr,
795 kunmap_local(kaddr);
/linux-master/fs/
H A Dnamei.c5173 char *kaddr; local
5192 kaddr = page_address(page);
5193 nd_terminate_link(kaddr, inode->i_size, PAGE_SIZE - 1);
5194 return kaddr;
H A Dexec.c528 char *kaddr = NULL; local
592 kunmap_local(kaddr);
596 kaddr = kmap_local_page(kmapped_page);
600 if (copy_from_user(kaddr+offset, str, bytes_to_copy)) {
610 kunmap_local(kaddr);
1724 char *kaddr; local
1735 kaddr = kmap_local_page(page);
1737 for (; offset < PAGE_SIZE && kaddr[offset];
1741 kunmap_local(kaddr);
/linux-master/arch/arm64/kvm/
H A Dmmu.c419 static phys_addr_t kvm_kaddr_to_phys(void *kaddr) argument
421 if (!is_vmalloc_addr(kaddr)) {
422 BUG_ON(!virt_addr_valid(kaddr));
423 return __pa(kaddr);
425 return page_to_phys(vmalloc_to_page(kaddr)) +
426 offset_in_page(kaddr);
737 * @kaddr: Kernel VA for this mapping
741 void __iomem **kaddr,
750 *kaddr = ioremap(phys_addr, size);
751 if (!*kaddr)
740 create_hyp_io_mappings(phys_addr_t phys_addr, size_t size, void __iomem **kaddr, void __iomem **haddr) argument
[all...]
/linux-master/kernel/bpf/
H A Darena.c250 long kbase, kaddr; local
254 kaddr = kbase + (u32)(vmf->address & PAGE_MASK);
257 page = vmalloc_to_page((void *)kaddr);
277 ret = vm_area_map_pages(arena->kern_vm, kaddr, kaddr + PAGE_SIZE, &page);
490 long kaddr, pgoff, i; local
513 kaddr = bpf_arena_get_kern_vm_start(arena) + uaddr;
514 for (i = 0; i < page_cnt; i++, kaddr += PAGE_SIZE, full_uaddr += PAGE_SIZE) {
515 page = vmalloc_to_page((void *)kaddr);
525 vm_area_unmap_pages(arena->kern_vm, kaddr, kadd
[all...]
/linux-master/fs/gfs2/
H A Dbmap.c60 void *kaddr = kmap_local_folio(folio, 0); local
63 memcpy(kaddr, dibh->b_data + sizeof(struct gfs2_dinode), dsize);
64 memset(kaddr + dsize, 0, folio_size(folio) - dsize);
65 kunmap_local(kaddr);
/linux-master/tools/perf/trace/beauty/include/linux/
H A Dsocket.h391 extern int move_addr_to_kernel(void __user *uaddr, int ulen, struct sockaddr_storage *kaddr);
/linux-master/net/
H A Dsocket.c239 * @kaddr: Address in kernel space
247 int move_addr_to_kernel(void __user *uaddr, int ulen, struct sockaddr_storage *kaddr) argument
253 if (copy_from_user(kaddr, uaddr, ulen))
255 return audit_sockaddr(ulen, kaddr);
260 * @kaddr: kernel space address
275 static int move_addr_to_user(struct sockaddr_storage *kaddr, int klen, argument
290 if (audit_sockaddr(klen, kaddr))
292 if (copy_to_user(uaddr, kaddr, len))
/linux-master/net/packet/
H A Daf_packet.c4592 void *kaddr = rb->pg_vec[i].buffer; local
4596 page = pgv_to_page(kaddr);
4601 kaddr += PAGE_SIZE;
/linux-master/include/linux/
H A Dsocket.h391 extern int move_addr_to_kernel(void __user *uaddr, int ulen, struct sockaddr_storage *kaddr);
/linux-master/drivers/misc/
H A Dopen-dice.c45 void *kaddr; local
48 kaddr = devm_memremap(drvdata->misc.this_device, drvdata->rmem->base,
50 if (IS_ERR(kaddr)) {
52 return PTR_ERR(kaddr);
55 memset(kaddr, 0, drvdata->rmem->size);
56 devm_memunmap(drvdata->misc.this_device, kaddr);

Completed in 521 milliseconds

12345678