Searched refs:lock (Results 126 - 150 of 6514) sorted by relevance

1234567891011>>

/linux-master/drivers/gpu/drm/amd/display/dc/dce/
H A Ddmub_hw_lock_mgr.h33 bool lock,
/linux-master/drivers/usb/gadget/function/
H A Du_midi.h32 struct mutex lock; member in struct:f_midi_opts
H A Du_ether_configfs.h35 mutex_lock(&opts->lock); \
37 mutex_unlock(&opts->lock); \
48 mutex_lock(&opts->lock); \
50 mutex_unlock(&opts->lock); \
55 mutex_unlock(&opts->lock); \
70 mutex_lock(&opts->lock); \
72 mutex_unlock(&opts->lock); \
83 mutex_lock(&opts->lock); \
85 mutex_unlock(&opts->lock); \
90 mutex_unlock(&opts->lock); \
[all...]
/linux-master/include/linux/
H A Dww_mutex.h89 * @lock: the mutex to be initialized
98 static inline void ww_mutex_init(struct ww_mutex *lock, argument
101 ww_mutex_base_init(&lock->base, ww_class->mutex_name, &ww_class->mutex_key);
102 lock->ctx = NULL;
104 lock->ww_class = ww_class;
116 * a given lock class. Deadlocks will be detected and handled with the
126 * to the usual locking rules between different lock classes.
161 * Marks the end of the acquire phase, any further w/w mutex lock calls using
207 * @lock: the mutex to be acquired
208 * @ctx: w/w acquire context, or NULL to acquire only a single lock
293 ww_mutex_lock_slow(struct ww_mutex *lock, struct ww_acquire_ctx *ctx) argument
329 ww_mutex_lock_slow_interruptible(struct ww_mutex *lock, struct ww_acquire_ctx *ctx) argument
351 ww_mutex_destroy(struct ww_mutex *lock) argument
364 ww_mutex_is_locked(struct ww_mutex *lock) argument
[all...]
H A Dsemaphore.h16 raw_spinlock_t lock; member in struct:semaphore
23 .lock = __RAW_SPIN_LOCK_UNLOCKED((name).lock), \
41 lockdep_init_map(&sem->lock.dep_map, "semaphore->lock", &__key, 0);
/linux-master/drivers/gpu/drm/i915/gt/
H A Dintel_engine_stats.h28 write_seqcount_begin(&stats->lock);
33 write_seqcount_end(&stats->lock);
51 write_seqcount_begin(&stats->lock);
57 write_seqcount_end(&stats->lock);
/linux-master/drivers/infiniband/sw/rxe/
H A Drxe_task.c33 * Context: caller should hold task->lock.
57 * Context: caller should hold task->lock.
79 spin_lock_irqsave(&task->lock, flags);
81 spin_unlock_irqrestore(&task->lock, flags);
113 spin_lock_irqsave(&task->lock, flags);
117 spin_unlock_irqrestore(&task->lock, flags);
120 spin_unlock_irqrestore(&task->lock, flags);
130 spin_lock_irqsave(&task->lock, flags);
173 spin_unlock_irqrestore(&task->lock, flags);
198 spin_lock_init(&task->lock);
[all...]
/linux-master/drivers/clk/tegra/
H A Dclk-pll-out.c36 if (pll_out->lock)
37 spin_lock_irqsave(pll_out->lock, flags);
46 if (pll_out->lock)
47 spin_unlock_irqrestore(pll_out->lock, flags);
58 if (pll_out->lock)
59 spin_lock_irqsave(pll_out->lock, flags);
68 if (pll_out->lock)
69 spin_unlock_irqrestore(pll_out->lock, flags);
90 spinlock_t *lock)
110 pll_out->lock
87 tegra_clk_register_pll_out(const char *name, const char *parent_name, void __iomem *reg, u8 enb_bit_idx, u8 rst_bit_idx, unsigned long flags, u8 pll_out_flags, spinlock_t *lock) argument
[all...]
/linux-master/fs/lockd/
H A Dsvclock.c47 static int nlmsvc_setgrantargs(struct nlm_rqst *call, struct nlm_lock *lock);
87 * Insert a blocked lock into the global list
144 * Find a block for a given lock
147 nlmsvc_lookup_block(struct nlm_file *file, struct nlm_lock *lock) argument
153 file, lock->fl.c.flc_pid,
154 (long long)lock->fl.fl_start,
155 (long long)lock->fl.fl_end,
156 lock->fl.c.flc_type);
159 fl = &block->b_call->a_args.lock.fl;
165 if (block->b_file == file && nlm_compare_locks(fl, &lock
225 nlmsvc_create_block(struct svc_rqst *rqstp, struct nlm_host *host, struct nlm_file *file, struct nlm_lock *lock, struct nlm_cookie *cookie) argument
404 nlmsvc_release_lockowner(struct nlm_lock *lock) argument
420 nlmsvc_setgrantargs(struct nlm_rqst *call, struct nlm_lock *lock) argument
480 nlmsvc_lock(struct svc_rqst *rqstp, struct nlm_file *file, struct nlm_host *host, struct nlm_lock *lock, int wait, struct nlm_cookie *cookie, int reclaim) argument
610 nlmsvc_testlock(struct svc_rqst *rqstp, struct nlm_file *file, struct nlm_host *host, struct nlm_lock *lock, struct nlm_lock *conflock, struct nlm_cookie *cookie) argument
671 nlmsvc_unlock(struct net *net, struct nlm_file *file, struct nlm_lock *lock) argument
706 nlmsvc_cancel_blocked(struct net *net, struct nlm_file *file, struct nlm_lock *lock) argument
844 struct nlm_lock *lock = &block->b_call->a_args.lock; local
[all...]
H A Dtrace.h50 const struct nlm_lock *lock,
56 TP_ARGS(lock, addr, addrlen, status),
69 __entry->oh = ~crc32_le(0xffffffff, lock->oh.data, lock->oh.len);
70 __entry->svid = lock->svid;
71 __entry->fh = nfs_fhandle_hash(&lock->fh);
72 __entry->start = lock->lock_start;
73 __entry->len = lock->lock_len;
89 const struct nlm_lock *lock, \
94 TP_ARGS(lock, add
[all...]
H A Dclntxdr.c61 static void nlm_compute_offsets(const struct nlm_lock *lock, argument
64 const struct file_lock *fl = &lock->fl;
237 const struct nlm_lock *lock = &result->lock; local
241 encode_bool(xdr, lock->fl.c.flc_type == F_RDLCK);
242 encode_int32(xdr, lock->svid);
243 encode_netobj(xdr, lock->oh.data, lock->oh.len);
246 nlm_compute_offsets(lock, &l_offset, &l_len);
253 struct nlm_lock *lock local
319 encode_nlm_lock(struct xdr_stream *xdr, const struct nlm_lock *lock) argument
357 const struct nlm_lock *lock = &args->lock; local
379 const struct nlm_lock *lock = &args->lock; local
402 const struct nlm_lock *lock = &args->lock; local
421 const struct nlm_lock *lock = &args->lock; local
[all...]
/linux-master/fs/
H A Dfs_pin.c16 spin_lock_irq(&pin->wait.lock);
19 spin_unlock_irq(&pin->wait.lock);
39 spin_lock_irq(&p->wait.lock);
42 spin_unlock_irq(&p->wait.lock);
48 spin_unlock_irq(&p->wait.lock);
55 spin_unlock_irq(&p->wait.lock);
62 spin_lock_irq(&p->wait.lock);
64 spin_unlock_irq(&p->wait.lock);
/linux-master/drivers/clk/visconti/
H A Dreset.h25 spinlock_t *lock; member in struct:visconti_reset
35 spinlock_t *lock);
/linux-master/arch/s390/include/asm/
H A Dspinlock.h28 * Simple spin lock operations. There are two variants, one clears IRQ's
36 void arch_spin_relax(arch_spinlock_t *lock);
48 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) argument
50 return lock.lock == 0;
55 return READ_ONCE(lp->lock) != 0;
61 return likely(__atomic_cmpxchg_bool(&lp->lock, 0, SPINLOCK_LOCKVAL));
79 typecheck(int, lp->lock);
84 : "=R" (((unsigned short *) &lp->lock)[1])
95 * irq-safe write-lock, bu
[all...]
/linux-master/net/can/j1939/
H A Dbus.c41 lockdep_assert_held(&priv->lock);
53 lockdep_assert_held(&priv->lock);
77 lockdep_assert_held(&priv->lock);
93 write_lock_bh(&ecu->priv->lock);
95 write_unlock_bh(&ecu->priv->lock);
102 write_lock_bh(&priv->lock);
106 write_unlock_bh(&priv->lock);
133 write_lock_bh(&priv->lock);
143 write_unlock_bh(&priv->lock);
152 lockdep_assert_held(&priv->lock);
[all...]
/linux-master/tools/testing/selftests/bpf/progs/
H A Dtest_map_lock.c10 struct bpf_spin_lock lock; member in struct:hmap_elem
22 struct bpf_spin_lock lock; member in struct:array_elem
45 bpf_spin_lock(&val->lock);
48 bpf_spin_unlock(&val->lock);
54 bpf_spin_lock(&q->lock);
57 bpf_spin_unlock(&q->lock);
H A Drefcounted_kptr.c41 private(A) struct bpf_spin_lock lock;
75 struct bpf_spin_lock *lock)
87 bpf_spin_lock(lock);
90 bpf_spin_unlock(lock);
94 bpf_spin_unlock(lock);
96 bpf_spin_lock(lock);
99 bpf_spin_unlock(lock);
102 bpf_spin_unlock(lock);
107 struct bpf_spin_lock *lock)
130 bpf_spin_lock(lock);
73 __insert_in_tree_and_list(struct bpf_list_head *head, struct bpf_rb_root *root, struct bpf_spin_lock *lock) argument
106 __stash_map_insert_tree(int idx, int val, struct bpf_rb_root *root, struct bpf_spin_lock *lock) argument
140 __read_from_tree(struct bpf_rb_root *root, struct bpf_spin_lock *lock, bool remove_from_tree) argument
173 __read_from_list(struct bpf_list_head *head, struct bpf_spin_lock *lock, bool remove_from_list) argument
[all...]
/linux-master/arch/arm/mach-spear/
H A Dpl080.c21 static spinlock_t lock = __SPIN_LOCK_UNLOCKED(x); variable
33 spin_lock_irqsave(&lock, flags);
38 spin_unlock_irqrestore(&lock, flags);
58 spin_unlock_irqrestore(&lock, flags);
67 spin_lock_irqsave(&lock, flags);
75 spin_unlock_irqrestore(&lock, flags);
/linux-master/drivers/gpu/drm/
H A Ddrm_modeset_lock.c46 * foreach (lock in random_ordered_set_of_locks) {
47 * ret = drm_modeset_lock(lock, ctx)
65 * If all that is needed is a single modeset lock, then the &struct
106 drm_printf(&p, "attempting to lock a contended lock without backoff:\n%s", buf);
136 * This function is deprecated. It allocates a lock acquisition context and
190 * This function is deprecated. It uses the lock acquisition context stored
280 struct drm_modeset_lock *lock; local
282 lock = list_first_entry(&ctx->locked,
285 drm_modeset_unlock(lock);
290 modeset_lock(struct drm_modeset_lock *lock, struct drm_modeset_acquire_ctx *ctx, bool interruptible, bool slow) argument
366 drm_modeset_lock_init(struct drm_modeset_lock *lock) argument
392 drm_modeset_lock(struct drm_modeset_lock *lock, struct drm_modeset_acquire_ctx *ctx) argument
412 drm_modeset_lock_single_interruptible(struct drm_modeset_lock *lock) argument
422 drm_modeset_unlock(struct drm_modeset_lock *lock) argument
[all...]
/linux-master/drivers/net/wireguard/
H A Dpeerlookup.c31 mutex_init(&table->lock);
38 mutex_lock(&table->lock);
41 mutex_unlock(&table->lock);
47 mutex_lock(&table->lock);
49 mutex_unlock(&table->lock);
91 spin_lock_init(&table->lock);
124 spin_lock_bh(&table->lock);
126 spin_unlock_bh(&table->lock);
141 /* Once we've found an unused slot, we lock it, and then double-check
144 spin_lock_bh(&table->lock);
[all...]
/linux-master/kernel/irq/
H A Dautoprobe.c46 raw_spin_lock_irq(&desc->lock);
57 raw_spin_unlock_irq(&desc->lock);
69 raw_spin_lock_irq(&desc->lock);
75 raw_spin_unlock_irq(&desc->lock);
87 raw_spin_lock_irq(&desc->lock);
98 raw_spin_unlock_irq(&desc->lock);
124 raw_spin_lock_irq(&desc->lock);
132 raw_spin_unlock_irq(&desc->lock);
163 raw_spin_lock_irq(&desc->lock);
174 raw_spin_unlock_irq(&desc->lock);
[all...]
/linux-master/sound/pci/asihpi/
H A Dhpios.h86 spinlock_t lock; /* SEE hpios_spinlock */ member in struct:hpios_spinlock
101 so ordinary lock will do
103 spin_lock(&((l)->lock));
106 spin_lock_bh(&((l)->lock));
114 spin_unlock_bh(&((l)->lock));
116 spin_unlock(&((l)->lock));
119 #define hpios_msgxlock_init(obj) spin_lock_init(&(obj)->lock)
123 #define hpios_dsplock_init(obj) spin_lock_init(&(obj)->dsp_lock.lock)
132 #define hpios_alistlock_init(obj) spin_lock_init(&((obj)->list_lock.lock))
133 #define hpios_alistlock_lock(obj) spin_lock(&((obj)->list_lock.lock))
[all...]
/linux-master/fs/erofs/
H A Dpcpubuf.c13 raw_spinlock_t lock; member in struct:erofs_pcpubuf
22 __acquires(pcb->lock)
26 raw_spin_lock(&pcb->lock);
29 raw_spin_unlock(&pcb->lock);
31 /* (for sparse checker) pretend pcb->lock is still taken */
32 __acquire(pcb->lock);
38 void erofs_put_pcpubuf(void *ptr) __releases(pcb->lock)
43 raw_spin_unlock(&pcb->lock);
87 raw_spin_lock(&pcb->lock);
94 raw_spin_unlock(&pcb->lock);
[all...]
/linux-master/drivers/mmc/host/
H A Dmmc_hsq.c50 spin_lock_irqsave(&hsq->lock, flags);
54 spin_unlock_irqrestore(&hsq->lock, flags);
60 spin_unlock_irqrestore(&hsq->lock, flags);
70 spin_unlock_irqrestore(&hsq->lock, flags);
116 spin_lock_irqsave(&hsq->lock, flags);
131 spin_unlock_irqrestore(&hsq->lock, flags);
135 spin_unlock_irqrestore(&hsq->lock, flags);
158 spin_lock_irqsave(&hsq->lock, flags);
161 spin_unlock_irqrestore(&hsq->lock, flags);
170 spin_unlock_irqrestore(&hsq->lock, flag
[all...]
/linux-master/arch/s390/lib/
H A Dspinlock.c73 static inline int arch_load_niai4(int *lock) argument
80 : "=d" (owner) : "Q" (*lock) : "memory");
84 static inline int arch_cmpxchg_niai8(int *lock, int old, int new) argument
91 : "=d" (old), "=Q" (*lock)
92 : "0" (old), "d" (new), "Q" (*lock)
97 static inline struct spin_wait *arch_spin_decode_tail(int lock) argument
101 ix = (lock & _Q_TAIL_IDX_MASK) >> _Q_TAIL_IDX_OFFSET;
102 cpu = (lock & _Q_TAIL_CPU_MASK) >> _Q_TAIL_CPU_OFFSET;
106 static inline int arch_spin_yield_target(int lock, struct spin_wait *node) argument
108 if (lock
[all...]

Completed in 1086 milliseconds

1234567891011>>