Lines Matching refs:wq

58 static int enqueue(struct workqueue_struct *wq, struct work_struct *w) {
59 thread_mutex_lock(&wq->work_list_lock);
60 list_add_tail(&w->list, &wq->work_list);
61 thread_mutex_unlock(&wq->work_list_lock);
63 thread_cond_signal(&wq->cond);
67 static struct work_struct *dequeue(struct workqueue_struct *wq) {
69 if (list_empty(&wq->work_list)) {
70 thread_cond_wait(&wq->cond, NULL);
72 thread_mutex_lock(&wq->work_list_lock);
73 w = list_entry(wq->work_list.next, struct work_struct, list);
75 thread_mutex_unlock(&wq->work_list_lock);
80 static int exec_work(void *wq) {
83 w = dequeue(wq);
136 static inline int queue_work(struct workqueue_struct *wq,
138 return enqueue(wq, work);
143 queue_delayed_work(struct workqueue_struct *wq, struct delayed_work *work,
148 work->work.taskqueue = wq->taskqueue;
149 if (atomic_read(&wq->draining) != 0) {
166 struct workqueue_struct wq;
168 wq.taskqueue = taskqueue_thread;
169 atomic_set(&wq.draining, 0);
170 return (queue_delayed_work(&wq, dwork, delay));
179 struct workqueue_struct *wq;
181 wq = malloc(sizeof *wq);
182 if (!wq)
185 wq->name = name;
186 wq->thread = thread;
187 thread_cond_init(&wq->cond);
188 INIT_LIST_HEAD(&wq->work_list);
189 thread_mutex_init(&wq->work_list_lock);
191 thread = thread_create(exec_work, wq);
195 return wq;
208 #define flush_workqueue(wq) flush_taskqueue((wq)->taskqueue)
223 drain_workqueue(struct workqueue_struct *wq)
225 atomic_inc(&wq->draining);
226 flush_taskqueue(wq->taskqueue);
227 atomic_dec(&wq->draining);
265 mod_delayed_work(struct workqueue_struct *wq, struct delayed_work *dwork,
269 queue_delayed_work(wq, dwork, delay);