Lines Matching refs:wq

24 static inline unsigned int vnic_wq_copy_desc_avail(struct vnic_wq_copy *wq)
26 return wq->ring.desc_avail;
29 static inline unsigned int vnic_wq_copy_desc_in_use(struct vnic_wq_copy *wq)
31 return wq->ring.desc_count - 1 - wq->ring.desc_avail;
34 static inline void *vnic_wq_copy_next_desc(struct vnic_wq_copy *wq)
36 struct fcpio_host_req *desc = wq->ring.descs;
37 return &desc[wq->to_use_index];
40 static inline void vnic_wq_copy_post(struct vnic_wq_copy *wq)
43 ((wq->to_use_index + 1) == wq->ring.desc_count) ?
44 (wq->to_use_index = 0) : (wq->to_use_index++);
45 wq->ring.desc_avail--;
54 iowrite32(wq->to_use_index, &wq->ctrl->posted_index);
57 static inline void vnic_wq_copy_desc_process(struct vnic_wq_copy *wq, u16 index)
61 if (wq->to_clean_index <= index)
62 cnt = (index - wq->to_clean_index) + 1;
64 cnt = wq->ring.desc_count - wq->to_clean_index + index + 1;
66 wq->to_clean_index = ((index + 1) % wq->ring.desc_count);
67 wq->ring.desc_avail += cnt;
71 static inline void vnic_wq_copy_service(struct vnic_wq_copy *wq,
73 void (*q_service)(struct vnic_wq_copy *wq,
76 struct fcpio_host_req *wq_desc = wq->ring.descs;
82 (*q_service)(wq, &wq_desc[wq->to_clean_index]);
84 wq->ring.desc_avail++;
86 curr_index = wq->to_clean_index;
91 ((wq->to_clean_index + 1) == wq->ring.desc_count) ?
92 (wq->to_clean_index = 0) : (wq->to_clean_index++);
99 (wq->to_clean_index == wq->to_use_index))
104 void vnic_wq_copy_enable(struct vnic_wq_copy *wq);
105 int vnic_wq_copy_disable(struct vnic_wq_copy *wq);
106 void vnic_wq_copy_free(struct vnic_wq_copy *wq);
107 int vnic_wq_copy_alloc(struct vnic_dev *vdev, struct vnic_wq_copy *wq,
109 void vnic_wq_copy_init(struct vnic_wq_copy *wq, unsigned int cq_index,
112 void vnic_wq_copy_clean(struct vnic_wq_copy *wq,
113 void (*q_clean)(struct vnic_wq_copy *wq,