Searched refs:sync (Results 1 - 25 of 629) sorted by relevance

1234567891011>>

/linux-master/tools/testing/selftests/powerpc/ptrace/
H A Dchild.h3 * Helper functions to sync execution between parent and child processes.
29 #define CHILD_FAIL_IF(x, sync) \
34 (sync)->child_gave_up = true; \
35 prod_parent(sync); \
40 #define PARENT_FAIL_IF(x, sync) \
45 (sync)->parent_gave_up = true; \
46 prod_child(sync); \
51 #define PARENT_SKIP_IF_UNSUPPORTED(x, sync, msg) \
54 (sync)->parent_gave_up = true; \
55 prod_child(sync); \
60 init_child_sync(struct child_sync *sync) argument
79 destroy_child_sync(struct child_sync *sync) argument
85 wait_child(struct child_sync *sync) argument
99 prod_child(struct child_sync *sync) argument
113 wait_parent(struct child_sync *sync) argument
127 prod_parent(struct child_sync *sync) argument
[all...]
/linux-master/arch/powerpc/platforms/44x/
H A Dmisc_44x.S18 sync
20 sync
23 sync
25 sync
32 sync
34 sync
37 sync
39 sync
/linux-master/drivers/gpu/drm/xe/
H A Dxe_sync.h22 struct xe_sync_entry *sync,
25 int xe_sync_entry_wait(struct xe_sync_entry *sync);
26 int xe_sync_entry_add_deps(struct xe_sync_entry *sync,
28 void xe_sync_entry_signal(struct xe_sync_entry *sync,
31 void xe_sync_entry_cleanup(struct xe_sync_entry *sync);
33 xe_sync_in_fence_get(struct xe_sync_entry *sync, int num_sync,
36 static inline bool xe_sync_is_ufence(struct xe_sync_entry *sync) argument
38 return !!sync->ufence;
41 struct xe_user_fence *xe_sync_ufence_get(struct xe_sync_entry *sync);
H A Dxe_sync.c103 struct xe_sync_entry *sync,
130 sync->syncobj = drm_syncobj_find(xef->drm, sync_in.handle);
131 if (XE_IOCTL_DBG(xe, !sync->syncobj))
135 sync->fence = drm_syncobj_fence_get(sync->syncobj);
136 if (XE_IOCTL_DBG(xe, !sync->fence))
151 sync->syncobj = drm_syncobj_find(xef->drm, sync_in.handle);
152 if (XE_IOCTL_DBG(xe, !sync->syncobj))
156 sync->chain_fence = dma_fence_chain_alloc();
157 if (!sync
102 xe_sync_entry_parse(struct xe_device *xe, struct xe_file *xef, struct xe_sync_entry *sync, struct drm_xe_sync __user *sync_user, unsigned int flags) argument
203 xe_sync_entry_wait(struct xe_sync_entry *sync) argument
211 xe_sync_entry_add_deps(struct xe_sync_entry *sync, struct xe_sched_job *job) argument
227 xe_sync_entry_signal(struct xe_sync_entry *sync, struct xe_sched_job *job, struct dma_fence *fence) argument
264 xe_sync_entry_cleanup(struct xe_sync_entry *sync) argument
291 xe_sync_in_fence_get(struct xe_sync_entry *sync, int num_sync, struct xe_exec_queue *q, struct xe_vm *vm) argument
354 xe_sync_ufence_get(struct xe_sync_entry *sync) argument
[all...]
/linux-master/drivers/clk/tegra/
H A Dclk-audio-sync.c15 struct tegra_clk_sync_source *sync = to_clk_sync_source(hw); local
17 return sync->rate;
23 struct tegra_clk_sync_source *sync = to_clk_sync_source(hw); local
25 if (rate > sync->max_rate)
34 struct tegra_clk_sync_source *sync = to_clk_sync_source(hw); local
36 sync->rate = rate;
49 struct tegra_clk_sync_source *sync; local
53 sync = kzalloc(sizeof(*sync), GFP_KERNEL);
54 if (!sync) {
[all...]
/linux-master/drivers/gpu/drm/i915/selftests/
H A Di915_syncmap.c101 static int check_syncmap_free(struct i915_syncmap **sync) argument
103 i915_syncmap_free(sync);
104 if (*sync) {
105 pr_err("sync not cleared after free\n");
112 static int dump_syncmap(struct i915_syncmap *sync, int err) argument
117 return check_syncmap_free(&sync);
123 if (i915_syncmap_print_to_buf(sync, buf, PAGE_SIZE))
129 i915_syncmap_free(&sync);
135 struct i915_syncmap *sync = (void *)~0ul; local
142 i915_syncmap_init(&sync);
163 check_one(struct i915_syncmap **sync, u64 context, u32 seqno) argument
206 struct i915_syncmap *sync; local
238 check_leaf(struct i915_syncmap **sync, u64 context, u32 seqno) argument
273 struct i915_syncmap *sync; local
334 struct i915_syncmap *sync; local
404 struct i915_syncmap *sync; local
448 struct i915_syncmap *sync; local
546 struct i915_syncmap *sync; local
[all...]
/linux-master/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_sync.h43 * Container for fences used to sync command submissions.
49 void amdgpu_sync_create(struct amdgpu_sync *sync);
50 int amdgpu_sync_fence(struct amdgpu_sync *sync, struct dma_fence *f);
51 int amdgpu_sync_resv(struct amdgpu_device *adev, struct amdgpu_sync *sync,
54 struct dma_fence *amdgpu_sync_peek_fence(struct amdgpu_sync *sync,
56 struct dma_fence *amdgpu_sync_get_fence(struct amdgpu_sync *sync);
58 int amdgpu_sync_push_to_job(struct amdgpu_sync *sync, struct amdgpu_job *job);
59 int amdgpu_sync_wait(struct amdgpu_sync *sync, bool intr);
60 void amdgpu_sync_free(struct amdgpu_sync *sync);
H A Damdgpu_sync.c46 * amdgpu_sync_create - zero init sync object
48 * @sync: sync object to initialize
50 * Just clear the sync object for now.
52 void amdgpu_sync_create(struct amdgpu_sync *sync) argument
54 hash_init(sync->fences);
127 * @sync: sync object to add the fence to
133 static bool amdgpu_sync_add_later(struct amdgpu_sync *sync, struct dma_fence *f) argument
137 hash_for_each_possible(sync
155 amdgpu_sync_fence(struct amdgpu_sync *sync, struct dma_fence *f) argument
235 amdgpu_sync_resv(struct amdgpu_device *adev, struct amdgpu_sync *sync, struct dma_resv *resv, enum amdgpu_sync_mode mode, void *owner) argument
280 amdgpu_sync_peek_fence(struct amdgpu_sync *sync, struct amdgpu_ring *ring) argument
320 amdgpu_sync_get_fence(struct amdgpu_sync *sync) argument
379 amdgpu_sync_push_to_job(struct amdgpu_sync *sync, struct amdgpu_job *job) argument
403 amdgpu_sync_wait(struct amdgpu_sync *sync, bool intr) argument
427 amdgpu_sync_free(struct amdgpu_sync *sync) argument
[all...]
/linux-master/io_uring/
H A Dsync.c15 #include "sync.h"
27 struct io_sync *sync = io_kiocb_to_cmd(req, struct io_sync); local
32 sync->off = READ_ONCE(sqe->off);
33 sync->len = READ_ONCE(sqe->len);
34 sync->flags = READ_ONCE(sqe->sync_range_flags);
42 struct io_sync *sync = io_kiocb_to_cmd(req, struct io_sync); local
48 ret = sync_file_range(req->file, sync->off, sync->len, sync->flags);
55 struct io_sync *sync local
72 struct io_sync *sync = io_kiocb_to_cmd(req, struct io_sync); local
87 struct io_sync *sync = io_kiocb_to_cmd(req, struct io_sync); local
101 struct io_sync *sync = io_kiocb_to_cmd(req, struct io_sync); local
[all...]
/linux-master/drivers/gpu/drm/radeon/
H A Dradeon_sync.c35 * radeon_sync_create - zero init sync object
37 * @sync: sync object to initialize
39 * Just clear the sync object for now.
41 void radeon_sync_create(struct radeon_sync *sync) argument
46 sync->semaphores[i] = NULL;
49 sync->sync_to[i] = NULL;
51 sync->last_vm_update = NULL;
55 * radeon_sync_fence - use the semaphore to sync to a fence
57 * @sync
62 radeon_sync_fence(struct radeon_sync *sync, struct radeon_fence *fence) argument
89 radeon_sync_resv(struct radeon_device *rdev, struct radeon_sync *sync, struct dma_resv *resv, bool shared) argument
121 radeon_sync_rings(struct radeon_device *rdev, struct radeon_sync *sync, int ring) argument
196 radeon_sync_free(struct radeon_device *rdev, struct radeon_sync *sync, struct radeon_fence *fence) argument
[all...]
H A Drv770_dma.c36 * @resv: reservation object to sync to
48 struct radeon_sync sync; local
55 radeon_sync_create(&sync);
62 radeon_sync_free(rdev, &sync, NULL);
66 radeon_sync_resv(rdev, &sync, resv, false);
67 radeon_sync_rings(rdev, &sync, ring->idx);
86 radeon_sync_free(rdev, &sync, NULL);
91 radeon_sync_free(rdev, &sync, fence);
/linux-master/arch/powerpc/platforms/powermac/
H A Dcache.S45 sync
52 sync
58 sync
60 sync
81 sync
88 sync
91 sync
96 sync
106 3: sync
110 3: sync
[all...]
/linux-master/arch/powerpc/kernel/
H A Dl2cr_6xx.S100 sync
107 sync
118 sync
120 sync
156 sync
158 sync
182 sync
196 sync
198 sync
202 21: sync
[all...]
H A Dcpu_setup_ppc970.S28 sync
31 sync
34 sync
37 sync
50 sync
104 sync
111 sync
155 sync
158 sync
162 sync
[all...]
H A Dmisc_64.S47 sync
53 sync
58 sync
62 sync
68 sync
73 sync
86 sync
88 sync
94 sync
100 sync
[all...]
/linux-master/drivers/gpu/drm/i915/gt/selftests/
H A Dmock_timeline.c21 i915_syncmap_init(&timeline->sync);
28 i915_syncmap_free(&timeline->sync);
/linux-master/arch/powerpc/platforms/52xx/
H A Dmpc52xx_sleep.S16 sync; isync;
25 sync
30 sync
40 sync; isync;
42 sync; isync;
57 sync; isync;
59 sync; isync;
73 sync
77 sync
82 sync
[all...]
/linux-master/arch/powerpc/boot/
H A Dmotload-head.S9 sync
/linux-master/drivers/gpu/drm/nouveau/dispnv50/
H A Ddac507d.c33 u32 sync = 0; local
37 sync |= NVVAL(NV507D, DAC_SET_POLARITY, HSYNC, asyh->or.nhsync);
38 sync |= NVVAL(NV507D, DAC_SET_POLARITY, VSYNC, asyh->or.nvsync);
45 DAC_SET_POLARITY(or), sync);
/linux-master/arch/powerpc/include/asm/
H A Dsynch.h50 sync; \
53 # define LWSYNC sync
63 #define PPC_ATOMIC_ENTRY_BARRIER "\n" stringify_in_c(sync) "\n"
64 #define PPC_ATOMIC_EXIT_BARRIER "\n" stringify_in_c(sync) "\n"
/linux-master/arch/nios2/boot/
H A Dinstall.sh30 sync
/linux-master/arch/m68k/
H A Dinstall.sh30 sync
/linux-master/tools/testing/selftests/pstore/
H A Dpstore_crash_test25 sync
/linux-master/net/caif/
H A Dcfserl.c25 spinlock_t sync; member in struct:cfserl
49 spin_lock_init(&this->sync);
67 spin_lock(&layr->sync);
74 spin_unlock(&layr->sync);
94 spin_unlock(&layr->sync);
112 spin_unlock(&layr->sync);
131 spin_unlock(&layr->sync);
142 spin_unlock(&layr->sync);
156 spin_unlock(&layr->sync);
158 spin_lock(&layr->sync);
[all...]
/linux-master/tools/testing/selftests/kvm/
H A Dmemslot_perf_test.c149 "Unexpected sync ucall, got %lx",
288 struct sync_area *sync; local
349 sync = (typeof(sync))vm_gpa2hva(data, MEM_SYNC_GPA, NULL);
350 sync->guest_page_size = data->vm->page_size;
351 atomic_init(&sync->start_flag, false);
352 atomic_init(&sync->exit_flag, false);
353 atomic_init(&sync->sync_flag, false);
382 static void let_guest_run(struct sync_area *sync) argument
384 atomic_store_explicit(&sync
389 struct sync_area *sync = (typeof(sync))MEM_SYNC_GPA; local
395 make_guest_exit(struct sync_area *sync) argument
402 struct sync_area *sync = (typeof(sync))MEM_SYNC_GPA; local
415 host_perform_sync(struct sync_area *sync) argument
428 struct sync_area *sync = (typeof(sync))MEM_SYNC_GPA; local
446 struct sync_area *sync = (typeof(sync))MEM_SYNC_GPA; local
475 struct sync_area *sync = (typeof(sync))MEM_SYNC_GPA; local
507 struct sync_area *sync = (typeof(sync))MEM_SYNC_GPA; local
542 struct sync_area *sync = (typeof(sync))MEM_SYNC_GPA; local
574 test_memslot_move_prepare(struct vm_data *data, struct sync_area *sync, uint64_t *maxslots, bool isactive) argument
605 test_memslot_move_prepare_active(struct vm_data *data, struct sync_area *sync, uint64_t *maxslots) argument
612 test_memslot_move_prepare_inactive(struct vm_data *data, struct sync_area *sync, uint64_t *maxslots) argument
619 test_memslot_move_loop(struct vm_data *data, struct sync_area *sync) argument
672 test_memslot_map_loop(struct vm_data *data, struct sync_area *sync) argument
710 test_memslot_unmap_loop_common(struct vm_data *data, struct sync_area *sync, uint64_t chunk) argument
737 test_memslot_unmap_loop(struct vm_data *data, struct sync_area *sync) argument
748 test_memslot_unmap_loop_chunked(struct vm_data *data, struct sync_area *sync) argument
757 test_memslot_rw_loop(struct vm_data *data, struct sync_area *sync) argument
800 struct sync_area *sync; local
[all...]

Completed in 617 milliseconds

1234567891011>>