Searched refs:ptl (Results 1 - 25 of 60) sorted by relevance

123

/linux-master/drivers/platform/surface/aggregator/
H A Dssh_packet_layer.c335 ptl_info(packet->ptl, "packet error injection: dropping ACK packet %p\n",
347 ptl_info(packet->ptl, "packet error injection: dropping NAK packet %p\n",
359 ptl_info(packet->ptl,
387 static int ssh_ptl_write_buf(struct ssh_ptl *ptl, struct ssh_packet *packet, argument
395 ptl_info(packet->ptl,
402 return serdev_device_write_buf(ptl->serdev, buf, count);
419 ptl_info(packet->ptl,
431 static void ssh_ptl_rx_inject_invalid_syn(struct ssh_ptl *ptl, argument
448 static void ssh_ptl_rx_inject_invalid_data(struct ssh_ptl *ptl, argument
486 static inline int ssh_ptl_write_buf(struct ssh_ptl *ptl, argument
498 ssh_ptl_rx_inject_invalid_syn(struct ssh_ptl *ptl, struct ssam_span *data) argument
503 ssh_ptl_rx_inject_invalid_data(struct ssh_ptl *ptl, struct ssam_span *frame) argument
663 ssh_ptl_timeout_reaper_mod(struct ssh_ptl *ptl, ktime_t now, ktime_t expires) argument
741 struct ssh_ptl *ptl = packet->ptl; local
776 struct ssh_ptl *ptl = packet->ptl; local
793 struct ssh_ptl *ptl = p->ptl; local
833 struct ssh_ptl *ptl = packet->ptl; local
853 struct ssh_ptl *ptl = READ_ONCE(p->ptl); local
885 struct ssh_ptl *ptl = packet->ptl; local
902 ssh_ptl_tx_pop(struct ssh_ptl *ptl) argument
955 ssh_ptl_tx_next(struct ssh_ptl *ptl) argument
975 struct ssh_ptl *ptl = packet->ptl; local
1018 ssh_ptl_tx_wait_packet(struct ssh_ptl *ptl) argument
1034 ssh_ptl_tx_wait_transfer(struct ssh_ptl *ptl, long timeout) argument
1051 ssh_ptl_tx_packet(struct ssh_ptl *ptl, struct ssh_packet *packet) argument
1101 struct ssh_ptl *ptl = data; local
1138 ssh_ptl_tx_wakeup_packet(struct ssh_ptl *ptl) argument
1152 ssh_ptl_tx_start(struct ssh_ptl *ptl) argument
1169 ssh_ptl_tx_stop(struct ssh_ptl *ptl) argument
1193 ssh_ptl_ack_pop(struct ssh_ptl *ptl, u8 seq_id) argument
1244 ssh_ptl_acknowledge(struct ssh_ptl *ptl, u8 seq) argument
1312 ssh_ptl_submit(struct ssh_ptl *ptl, struct ssh_packet *p) argument
1398 ssh_ptl_resubmit_pending(struct ssh_ptl *ptl) argument
1502 struct ssh_ptl *ptl = to_ssh_ptl(work, rtx_timeout.reaper.work); local
1599 ssh_ptl_rx_retransmit_check(struct ssh_ptl *ptl, const struct ssh_frame *frame) argument
1639 ssh_ptl_rx_dataframe(struct ssh_ptl *ptl, const struct ssh_frame *frame, const struct ssam_span *payload) argument
1649 ssh_ptl_send_ack(struct ssh_ptl *ptl, u8 seq) argument
1673 ssh_ptl_send_nak(struct ssh_ptl *ptl) argument
1697 ssh_ptl_rx_eval(struct ssh_ptl *ptl, struct ssam_span *source) argument
1796 struct ssh_ptl *ptl = data; local
1834 ssh_ptl_rx_wakeup(struct ssh_ptl *ptl) argument
1845 ssh_ptl_rx_start(struct ssh_ptl *ptl) argument
1864 ssh_ptl_rx_stop(struct ssh_ptl *ptl) argument
1890 ssh_ptl_rx_rcvbuf(struct ssh_ptl *ptl, const u8 *buf, size_t n) argument
1917 ssh_ptl_shutdown(struct ssh_ptl *ptl) argument
2026 ssh_ptl_init(struct ssh_ptl *ptl, struct serdev_device *serdev, struct ssh_ptl_ops *ops) argument
2082 ssh_ptl_destroy(struct ssh_ptl *ptl) argument
[all...]
H A Dssh_request_layer.h47 * @ptl: Underlying packet transport layer.
66 struct ssh_ptl ptl; member in struct:ssh_rtl
94 #define rtl_dbg(r, fmt, ...) ptl_dbg(&(r)->ptl, fmt, ##__VA_ARGS__)
95 #define rtl_info(p, fmt, ...) ptl_info(&(p)->ptl, fmt, ##__VA_ARGS__)
96 #define rtl_warn(r, fmt, ...) ptl_warn(&(r)->ptl, fmt, ##__VA_ARGS__)
97 #define rtl_err(r, fmt, ...) ptl_err(&(r)->ptl, fmt, ##__VA_ARGS__)
112 return ssh_ptl_get_device(&rtl->ptl);
123 struct ssh_ptl *ptl; local
125 ptl = READ_ONCE(rqst->packet.ptl);
[all...]
H A Dssh_packet_layer.h139 int ssh_ptl_init(struct ssh_ptl *ptl, struct serdev_device *serdev,
142 void ssh_ptl_destroy(struct ssh_ptl *ptl);
146 * @ptl: The packet transport layer.
151 static inline struct device *ssh_ptl_get_device(struct ssh_ptl *ptl) argument
153 return ptl->serdev ? &ptl->serdev->dev : NULL;
156 int ssh_ptl_tx_start(struct ssh_ptl *ptl);
157 int ssh_ptl_tx_stop(struct ssh_ptl *ptl);
158 int ssh_ptl_rx_start(struct ssh_ptl *ptl);
159 int ssh_ptl_rx_stop(struct ssh_ptl *ptl);
176 ssh_ptl_tx_wakeup_transfer(struct ssh_ptl *ptl) argument
[all...]
H A Dcontroller.h244 return ssh_ptl_rx_rcvbuf(&ctrl->rtl.ptl, buf, n);
254 ssh_ptl_tx_wakeup_transfer(&ctrl->rtl.ptl);
/linux-master/mm/
H A Dpage_vma_mapped.c23 pvmw->address, &pvmw->ptl);
24 *ptlp = pvmw->ptl;
29 * It is important to return the ptl corresponding to pte,
70 pvmw->ptl = *ptlp;
71 spin_lock(pvmw->ptl);
153 * must be set. pmd, pte and ptl must be NULL.
156 * to relevant page table entries. @pvmw->ptl is locked. @pvmw->address is
168 * the vma. @pvmw->ptl is unlocked and @pvmw->pte is unmapped.
178 spinlock_t *ptl; local
203 pvmw->ptl
274 spinlock_t *ptl = pmd_lock(mm, pvmw->pmd); local
[all...]
H A Dmincore.c103 spinlock_t *ptl; local
109 ptl = pmd_trans_huge_lock(pmd, vma);
110 if (ptl) {
112 spin_unlock(ptl);
116 ptep = pte_offset_map_lock(walk->mm, pmd, addr, &ptl);
151 pte_unmap_unlock(ptep - 1, ptl);
H A Dmadvise.c177 spinlock_t *ptl; local
186 ptep = pte_offset_map_lock(vma->vm_mm, pmd, addr, &ptl);
198 pte_unmap_unlock(ptep, ptl);
208 pte_unmap_unlock(ptep, ptl);
334 spinlock_t *ptl; local
352 ptl = pmd_trans_huge_lock(pmd, vma);
353 if (!ptl)
379 spin_unlock(ptl);
411 spin_unlock(ptl);
421 start_pte = pte = pte_offset_map_lock(vma->vm_mm, pmd, addr, &ptl);
626 spinlock_t *ptl; local
[all...]
H A Dmemory.c414 spinlock_t *ptl = pmd_lock(mm, pmd); local
435 spin_unlock(ptl);
1576 spinlock_t *ptl; local
1584 start_pte = pte = pte_offset_map_lock(mm, pmd, addr, &ptl);
1669 /* Do the actual TLB flush before dropping ptl */
1674 pte_unmap_unlock(start_pte, ptl);
1679 * entries before releasing the ptl), free the batched
1710 spinlock_t *ptl = pmd_lock(tlb->mm, pmd); local
1716 spin_unlock(ptl);
1969 spinlock_t **ptl)
1968 __get_locked_pte(struct mm_struct *mm, unsigned long addr, spinlock_t **ptl) argument
2016 spinlock_t *ptl; local
2268 spinlock_t *ptl; local
2494 spinlock_t *ptl; local
2723 spinlock_t *ptl; local
5851 spinlock_t *ptl; local
5945 spinlock_t *ptl; local
5967 spinlock_t *ptl; local
6012 spinlock_t *ptl; local
6429 spinlock_t *ptl; local
[all...]
H A Dmigrate_device.c66 spinlock_t *ptl; local
76 ptl = pmd_lock(mm, pmdp);
78 spin_unlock(ptl);
84 spin_unlock(ptl);
90 spin_unlock(ptl);
103 ptep = pte_offset_map_lock(mm, pmdp, addr, &ptl);
277 pte_unmap_unlock(ptep - 1, ptl);
574 spinlock_t *ptl; local
634 ptep = pte_offset_map_lock(mm, pmdp, addr, &ptl);
675 pte_unmap_unlock(ptep, ptl);
[all...]
H A Dhuge_memory.c901 vmf->ptl = pmd_lock(vma->vm_mm, vmf->pmd);
913 spin_unlock(vmf->ptl);
930 spin_unlock(vmf->ptl);
937 spin_unlock(vmf->ptl);
1024 vmf->ptl = pmd_lock(vma->vm_mm, vmf->pmd);
1029 spin_unlock(vmf->ptl);
1032 spin_unlock(vmf->ptl);
1040 spin_unlock(vmf->ptl);
1043 spin_unlock(vmf->ptl);
1063 spinlock_t *ptl; local
1162 spinlock_t *ptl; local
1805 spinlock_t *ptl; local
1885 spinlock_t *ptl; local
2034 spinlock_t *ptl; local
2309 spinlock_t *ptl; local
2326 spinlock_t *ptl; local
2339 spinlock_t *ptl; local
2373 spinlock_t *ptl; local
2652 spinlock_t *ptl; local
[all...]
H A Dhmm.c418 spinlock_t *ptl = pud_trans_huge_lock(pudp, walk->vma); local
420 if (!ptl)
428 spin_unlock(ptl);
439 spin_unlock(ptl);
451 spin_unlock(ptl);
465 spin_unlock(ptl);
484 spinlock_t *ptl; local
487 ptl = huge_pte_lock(hstate_vma(vma), walk->mm, pte);
499 spin_unlock(ptl);
506 * use here of either pte or ptl afte
[all...]
H A Dpgtable-generic.c331 * struct page). pte_unmap_unlock(pte, ptl) to unlock and unmap afterwards.
365 spinlock_t *ptl; local
372 ptl = pte_lockptr(mm, &pmdval);
373 spin_lock(ptl);
375 *ptlp = ptl;
378 pte_unmap_unlock(pte, ptl);
H A Dkhugepaged.c689 spinlock_t *ptl,
703 * ptl mostly unnecessary.
705 spin_lock(ptl);
707 spin_unlock(ptl);
717 * ptl mostly unnecessary, but preempt has to
721 spin_lock(ptl);
724 spin_unlock(ptl);
775 * @ptl: lock on raw pages' PTEs
784 spinlock_t *ptl,
811 __collapse_huge_page_copy_succeeded(pte, vma, address, ptl,
686 __collapse_huge_page_copy_succeeded(pte_t *pte, struct vm_area_struct *vma, unsigned long address, spinlock_t *ptl, struct list_head *compound_pagelist) argument
778 __collapse_huge_page_copy(pte_t *pte, struct page *page, pmd_t *pmd, pmd_t orig_pmd, struct vm_area_struct *vma, unsigned long address, spinlock_t *ptl, struct list_head *compound_pagelist) argument
1000 spinlock_t *ptl; local
1263 spinlock_t *ptl; local
1490 spinlock_t *pml = NULL, *ptl; local
1697 spinlock_t *ptl; local
[all...]
/linux-master/arch/arm/lib/
H A Duaccess_with_memcpy.c31 spinlock_t *ptl; local
60 ptl = &current->mm->page_table_lock;
61 spin_lock(ptl);
64 spin_unlock(ptl);
69 *ptlp = ptl;
76 pte = pte_offset_map_lock(current->mm, pmd, addr, &ptl);
82 pte_unmap_unlock(pte, ptl);
87 *ptlp = ptl;
105 spinlock_t *ptl; local
108 while (!pin_page_for_write(to, &pte, &ptl)) {
169 spinlock_t *ptl; local
[all...]
/linux-master/drivers/media/platform/allegro-dvt/
H A Dnal-hevc.c101 struct nal_hevc_profile_tier_level *ptl)
106 rbsp_bits(rbsp, 2, &ptl->general_profile_space);
107 rbsp_bit(rbsp, &ptl->general_tier_flag);
108 rbsp_bits(rbsp, 5, &ptl->general_profile_idc);
110 rbsp_bit(rbsp, &ptl->general_profile_compatibility_flag[i]);
111 rbsp_bit(rbsp, &ptl->general_progressive_source_flag);
112 rbsp_bit(rbsp, &ptl->general_interlaced_source_flag);
113 rbsp_bit(rbsp, &ptl->general_non_packed_constraint_flag);
114 rbsp_bit(rbsp, &ptl->general_frame_only_constraint_flag);
115 if (ptl
100 nal_hevc_rbsp_profile_tier_level(struct rbsp *rbsp, struct nal_hevc_profile_tier_level *ptl) argument
[all...]
/linux-master/arch/arm/mm/
H A Dfault-armv.c70 static inline void do_pte_lock(spinlock_t *ptl) argument
76 spin_lock_nested(ptl, SINGLE_DEPTH_NESTING);
79 static inline void do_pte_unlock(spinlock_t *ptl) argument
81 spin_unlock(ptl);
84 static inline void do_pte_lock(spinlock_t *ptl) {} argument
85 static inline void do_pte_unlock(spinlock_t *ptl) {} argument
91 spinlock_t *ptl; local
120 pte = pte_offset_map_nolock(vma->vm_mm, pmd, address, &ptl);
124 do_pte_lock(ptl);
128 do_pte_unlock(ptl);
[all...]
/linux-master/mm/damon/
H A Dvaddr.c305 spinlock_t *ptl; local
308 ptl = pmd_lock(walk->mm, pmd);
312 spin_unlock(ptl);
318 spin_unlock(ptl);
321 spin_unlock(ptl);
324 pte = pte_offset_map_lock(walk->mm, pmd, addr, &ptl);
333 pte_unmap_unlock(pte, ptl);
372 spinlock_t *ptl; local
375 ptl = huge_pte_lock(h, walk->mm, pte);
383 spin_unlock(ptl);
442 spinlock_t *ptl; local
508 spinlock_t *ptl; local
[all...]
/linux-master/arch/powerpc/mm/
H A Dhugetlbpage.c47 unsigned int pshift, spinlock_t *ptl)
82 spin_lock(ptl);
102 spin_unlock(ptl);
120 spinlock_t *ptl; local
134 ptl = &mm->page_table_lock;
144 ptl = pud_lockptr(mm, pu);
155 ptl = pmd_lockptr(mm, pm);
162 ptl = &mm->page_table_lock;
170 ptl = pud_lockptr(mm, pu);
177 ptl
45 __hugepte_alloc(struct mm_struct *mm, hugepd_t *hpdp, unsigned long address, unsigned int pdshift, unsigned int pshift, spinlock_t *ptl) argument
[all...]
/linux-master/arch/s390/mm/
H A Dpgtable.c806 spinlock_t *ptl; local
824 ptl = pmd_lock(mm, pmdp);
826 spin_unlock(ptl);
838 spin_unlock(ptl);
841 spin_unlock(ptl);
843 ptep = pte_offset_map_lock(mm, pmdp, addr, &ptl);
870 pte_unmap_unlock(ptep, ptl);
915 spinlock_t *ptl; local
935 ptl = pmd_lock(mm, pmdp);
937 spin_unlock(ptl);
979 spinlock_t *ptl; local
1047 spinlock_t *ptl; local
1152 spinlock_t *ptl; local
1184 spinlock_t *ptl; local
[all...]
/linux-master/arch/powerpc/lib/
H A Dcode-patching.c154 spinlock_t *ptl; local
175 pte = get_locked_pte(mm, addr, &ptl);
178 pte_unmap_unlock(pte, ptl);
288 spinlock_t *ptl; local
294 pte = get_locked_pte(patching_mm, text_poke_addr, &ptl);
320 pte_unmap_unlock(pte, ptl);
410 spinlock_t *ptl; local
419 pte = get_locked_pte(patching_mm, text_poke_addr, &ptl);
445 pte_unmap_unlock(pte, ptl);
/linux-master/arch/s390/pci/
H A Dpci_mmio.c126 spinlock_t *ptl; local
172 ret = follow_pte(vma->vm_mm, mmio_addr, &ptep, &ptl);
184 pte_unmap_unlock(ptep, ptl);
268 spinlock_t *ptl; local
311 ret = follow_pte(vma->vm_mm, mmio_addr, &ptep, &ptl);
325 pte_unmap_unlock(ptep, ptl);
/linux-master/fs/proc/
H A Dtask_mmu.c617 spinlock_t *ptl; local
619 ptl = pmd_trans_huge_lock(pmd, vma);
620 if (ptl) {
622 spin_unlock(ptl);
626 pte = pte_offset_map_lock(vma->vm_mm, pmd, addr, &ptl);
633 pte_unmap_unlock(pte - 1, ptl);
1163 spinlock_t *ptl; local
1166 ptl = pmd_trans_huge_lock(pmd, vma);
1167 if (ptl) {
1183 spin_unlock(ptl);
1462 spinlock_t *ptl; local
2105 spinlock_t *ptl; local
2156 spinlock_t *ptl; local
2252 spinlock_t *ptl; local
2628 spinlock_t *ptl; local
[all...]
/linux-master/arch/x86/kernel/
H A Dldt.c292 spinlock_t *ptl; local
326 ptep = get_locked_pte(mm, va, &ptl);
339 pte_unmap_unlock(ptep, ptl);
365 spinlock_t *ptl; local
369 ptep = get_locked_pte(mm, va, &ptl);
372 pte_unmap_unlock(ptep, ptl);
/linux-master/include/linux/
H A Dmigrate.h78 void migration_entry_wait_on_locked(swp_entry_t entry, spinlock_t *ptl)
79 __releases(ptl); variable
/linux-master/arch/m68k/kernel/
H A Dsys_m68k.c474 spinlock_t *ptl; local
490 pte = pte_offset_map_lock(mm, pmd, (unsigned long)mem, &ptl);
495 pte_unmap_unlock(pte, ptl);
507 pte_unmap_unlock(pte, ptl);

Completed in 1004 milliseconds

123