Lines Matching refs:cq

569 static inline void write_gts(struct t4_cq *cq, u32 val)
571 writel(val | V_INGRESSQID(cq->bar2_qid),
572 (void __iomem *)((u64)cq->bar2_va + SGE_UDB_GTS));
575 static inline int t4_clear_cq_armed(struct t4_cq *cq)
577 return test_and_clear_bit(CQ_ARMED, &cq->flags);
580 static inline int t4_arm_cq(struct t4_cq *cq, int se)
584 set_bit(CQ_ARMED, &cq->flags);
585 while (cq->cidx_inc > CIDXINC_MASK) {
587 writel(val | V_INGRESSQID(cq->bar2_qid),
588 (void __iomem *)((u64)cq->bar2_va + SGE_UDB_GTS));
589 cq->cidx_inc -= CIDXINC_MASK;
591 val = SEINTARM(se) | CIDXINC(cq->cidx_inc) | TIMERREG(6);
592 writel(val | V_INGRESSQID(cq->bar2_qid),
593 (void __iomem *)((u64)cq->bar2_va + SGE_UDB_GTS));
594 cq->cidx_inc = 0;
598 static inline void t4_swcq_produce(struct t4_cq *cq)
600 cq->sw_in_use++;
601 if (cq->sw_in_use == cq->size) {
602 CTR2(KTR_IW_CXGBE, "%s cxgb4 sw cq overflow cqid %u",
603 __func__, cq->cqid);
604 cq->error = 1;
607 if (++cq->sw_pidx == cq->size)
608 cq->sw_pidx = 0;
611 static inline void t4_swcq_consume(struct t4_cq *cq)
613 BUG_ON(cq->sw_in_use < 1);
614 cq->sw_in_use--;
615 if (++cq->sw_cidx == cq->size)
616 cq->sw_cidx = 0;
619 static inline void t4_hwcq_consume(struct t4_cq *cq)
621 cq->bits_type_ts = cq->queue[cq->cidx].bits_type_ts;
622 if (++cq->cidx_inc == (cq->size >> 4) || cq->cidx_inc == M_CIDXINC) {
625 val = SEINTARM(0) | CIDXINC(cq->cidx_inc) | TIMERREG(7);
626 write_gts(cq, val);
627 cq->cidx_inc = 0;
629 if (++cq->cidx == cq->size) {
630 cq->cidx = 0;
631 cq->gen ^= 1;
635 static inline int t4_valid_cqe(struct t4_cq *cq, struct t4_cqe *cqe)
637 return (CQE_GENBIT(cqe) == cq->gen);
640 static inline int t4_cq_notempty(struct t4_cq *cq)
642 return cq->sw_in_use || t4_valid_cqe(cq, &cq->queue[cq->cidx]);
645 static inline int t4_next_hw_cqe(struct t4_cq *cq, struct t4_cqe **cqe)
650 if (cq->cidx == 0)
651 prev_cidx = cq->size - 1;
653 prev_cidx = cq->cidx - 1;
655 if (cq->queue[prev_cidx].bits_type_ts != cq->bits_type_ts) {
657 cq->error = 1;
658 printk(KERN_ERR MOD "cq overflow cqid %u\n", cq->cqid);
660 } else if (t4_valid_cqe(cq, &cq->queue[cq->cidx])) {
664 *cqe = &cq->queue[cq->cidx];
671 static inline struct t4_cqe *t4_next_sw_cqe(struct t4_cq *cq)
673 if (cq->sw_in_use == cq->size) {
674 CTR2(KTR_IW_CXGBE, "%s cxgb4 sw cq overflow cqid %u",
675 __func__, cq->cqid);
676 cq->error = 1;
680 if (cq->sw_in_use)
681 return &cq->sw_queue[cq->sw_cidx];
685 static inline int t4_next_cqe(struct t4_cq *cq, struct t4_cqe **cqe)
689 if (cq->error)
691 else if (cq->sw_in_use)
692 *cqe = &cq->sw_queue[cq->sw_cidx];
694 ret = t4_next_hw_cqe(cq, cqe);
698 static inline int t4_cq_in_error(struct t4_cq *cq)
700 return ((struct t4_status_page *)&cq->queue[cq->size])->qp_err;
703 static inline void t4_set_cq_in_error(struct t4_cq *cq)
705 ((struct t4_status_page *)&cq->queue[cq->size])->qp_err = 1;