Lines Matching refs:queue

214  * 5) the new master collects up all of secondary lock queue info
217 * 6) each secondary lock queue responds with the full known lock info
220 * 8) upon receiving this message, the secondary queue node unlocks
849 /* queue up work for dlm_request_all_locks_worker */
1083 struct list_head *iter, *queue = &res->granted;
1087 list_for_each(iter, queue)
1089 queue++;
1173 int queue)
1179 if (queue == DLM_BLOCKED_LIST)
1208 struct dlm_migratable_lockres *mres, int queue)
1218 ml->list = queue;
1221 dlm_prepare_lvb_for_migration(lock, mres, queue);
1264 struct list_head *queue;
1289 queue = dlm_list_idx_to_ptr(res, i);
1290 list_for_each_entry(lock, queue, list) {
1391 /* lookup the lock to see if we have a secondary queue for this
1505 /* queue up work for dlm_mig_lockres_worker */
1777 * queue and not be on the secondary queue since the lock is always added
1792 struct list_head *queue, *iter;
1821 queue = dlm_list_num_to_pointer(res, ml->list);
1825 * be moved to the proper location within the queue.
1882 if (tmpq != queue) {
1900 list_move_tail(&lock->list, queue);
1918 BUG_ON(queue != &res->converting);
1974 * wrt lock queue ordering and recovery:
1975 * 1. order of locks on granted queue is
1977 * 2. order of locks on converting queue is
1979 * 3. order of locks on the blocked queue is
1985 * for its secondary queue locks is preserved
1991 list_for_each_entry(lock, queue, list) {
2001 "node=%u, cookie=%u:%llu, queue=%d\n",
2020 list_add(&newlock->list, queue);
2022 list_add_tail(&newlock->list, queue);
2048 struct list_head *queue;
2067 queue = dlm_list_idx_to_ptr(res, i);
2068 list_for_each_entry_safe(lock, next, queue, list) {
2212 struct list_head *queue;
2233 queue = dlm_list_idx_to_ptr(res, i);
2234 list_for_each_entry(lock, queue, list) {