Lines Matching refs:this_rq

105 extern void calc_global_load_tick(struct rq *this_rq);
106 extern long calc_load_fold_active(struct rq *this_rq, long adjust);
1244 #define this_rq() this_cpu_ptr(&runqueues)
1752 rq = this_rq();
2288 void (*task_woken)(struct rq *this_rq, struct task_struct *task);
2307 void (*switched_from)(struct rq *this_rq, struct task_struct *task);
2308 void (*switched_to) (struct rq *this_rq, struct task_struct *task);
2309 void (*prio_changed) (struct rq *this_rq, struct task_struct *task,
2709 static inline int _double_lock_balance(struct rq *this_rq, struct rq *busiest)
2710 __releases(this_rq->lock)
2712 __acquires(this_rq->lock)
2714 raw_spin_rq_unlock(this_rq);
2715 double_rq_lock(this_rq, busiest);
2728 static inline int _double_lock_balance(struct rq *this_rq, struct rq *busiest)
2729 __releases(this_rq->lock)
2731 __acquires(this_rq->lock)
2733 if (__rq_lockp(this_rq) == __rq_lockp(busiest) ||
2735 double_rq_clock_clear_update(this_rq, busiest);
2739 if (rq_order_less(this_rq, busiest)) {
2741 double_rq_clock_clear_update(this_rq, busiest);
2745 raw_spin_rq_unlock(this_rq);
2746 double_rq_lock(this_rq, busiest);
2754 * double_lock_balance - lock the busiest runqueue, this_rq is locked already.
2756 static inline int double_lock_balance(struct rq *this_rq, struct rq *busiest)
2760 return _double_lock_balance(this_rq, busiest);
2763 static inline void double_unlock_balance(struct rq *this_rq, struct rq *busiest)
2766 if (__rq_lockp(this_rq) != __rq_lockp(busiest))
2768 lock_set_subclass(&__rq_lockp(this_rq)->dep_map, 0, _RET_IP_);