Lines Matching refs:iwcq

269  * @iwcq: cq ptr
272 irdma_clean_cqes(struct irdma_qp *iwqp, struct irdma_cq *iwcq)
274 struct irdma_cq_uk *ukcq = &iwcq->sc_cq.cq_uk;
277 spin_lock_irqsave(&iwcq->lock, flags);
279 spin_unlock_irqrestore(&iwcq->lock, flags);
1388 * @iwcq: cq ptr
1391 irdma_cq_free_rsrc(struct irdma_pci_f *rf, struct irdma_cq *iwcq)
1393 struct irdma_sc_cq *cq = &iwcq->sc_cq;
1395 if (!iwcq->user_mode) {
1396 irdma_free_dma_mem(rf->sc_dev.hw, &iwcq->kmem);
1397 irdma_free_dma_mem(rf->sc_dev.hw, &iwcq->kmem_shadow);
1418 * @iwcq: cq which owns the resize_list
1423 irdma_process_resize_list(struct irdma_cq *iwcq,
1431 list_for_each_safe(list_node, tmp_node, &iwcq->resize_list) {
1455 struct irdma_cq *iwcq = to_iwcq(ibcq);
1456 struct irdma_sc_dev *dev = iwcq->sc_cq.dev;
1483 if (!iwcq->user_mode) {
1491 if (info.cq_size == iwcq->sc_cq.cq_uk.cq_size - 1)
1549 info.shadow_read_threshold = iwcq->sc_cq.shadow_read_threshold;
1557 cqp_info->in.u.cq_modify.cq = &iwcq->sc_cq;
1565 spin_lock_irqsave(&iwcq->lock, flags);
1567 cq_buf->kmem_buf = iwcq->kmem;
1569 memcpy(&cq_buf->cq_uk, &iwcq->sc_cq.cq_uk, sizeof(cq_buf->cq_uk));
1571 list_add_tail(&cq_buf->list, &iwcq->resize_list);
1572 iwcq->kmem = kmem_buf;
1575 irdma_sc_cq_resize(&iwcq->sc_cq, &info);
1577 spin_unlock_irqrestore(&iwcq->lock, flags);
2792 * @iwcq: cq to poll
2797 __irdma_poll_cq(struct irdma_cq *iwcq, int num_entries, struct ib_wc *entry)
2801 struct irdma_cq_poll_info *cur_cqe = &iwcq->cur_cqe;
2810 iwdev = to_iwdev(iwcq->ibcq.device);
2811 ukcq = &iwcq->sc_cq.cq_uk;
2814 list_for_each_safe(list_node, tmp_node, &iwcq->resize_list) {
2843 ret = irdma_generated_cmpls(iwcq, cur_cqe);
2865 resized_bufs = irdma_process_resize_list(iwcq, iwdev, NULL);
2868 resized_bufs = irdma_process_resize_list(iwcq, iwdev, last_buf);
2891 struct irdma_cq *iwcq;
2895 iwcq = to_iwcq(ibcq);
2897 spin_lock_irqsave(&iwcq->lock, flags);
2898 ret = __irdma_poll_cq(iwcq, num_entries, entry);
2899 spin_unlock_irqrestore(&iwcq->lock, flags);
2913 struct irdma_cq *iwcq;
2920 iwcq = to_iwcq(ibcq);
2921 ukcq = &iwcq->sc_cq.cq_uk;
2923 spin_lock_irqsave(&iwcq->lock, flags);
2927 if (iwcq->last_notify == IRDMA_CQ_COMPL_SOLICITED)
2931 if (!atomic_cmpxchg(&iwcq->armed, 0, 1) || promo_event) {
2932 iwcq->last_notify = cq_notify;
2937 (!irdma_cq_empty(iwcq) || !list_empty(&iwcq->cmpl_generated)))
2939 spin_unlock_irqrestore(&iwcq->lock, flags);