Lines Matching refs:sq

62 	qp->sq.condition = false;
63 qp->sq.send_phantom = false;
64 qp->sq.single = false;
75 if (!qp->sq.flushed) {
80 qp->sq.flushed = true;
125 if (qp->sq.flushed) {
126 qp->sq.flushed = false;
143 qp->sq.hwq.prod = 0;
144 qp->sq.hwq.cons = 0;
178 struct bnxt_qplib_q *sq = &qp->sq;
186 sq->max_wqe * qp->sq_hdr_buf_size,
200 struct bnxt_qplib_q *sq = &qp->sq;
203 if (qp->sq_hdr_buf_size && sq->max_wqe) {
205 sq->max_wqe * qp->sq_hdr_buf_size,
832 struct bnxt_qplib_q *sq = &qp->sq;
841 sq->dbinfo.flags = 0;
852 hwq_attr.sginfo = &sq->sg_info;
854 hwq_attr.depth = bnxt_qplib_get_depth(sq);
856 rc = bnxt_qplib_alloc_init_hwq(&sq->hwq, &hwq_attr);
860 rc = bnxt_qplib_alloc_init_swq(sq);
864 req.sq_size = cpu_to_le32(bnxt_qplib_set_sq_size(sq, qp->wqe_mode));
865 pbl = &sq->hwq.pbl[PBL_LVL_0];
867 pg_sz_lvl = (bnxt_qplib_base_pg_size(&sq->hwq) <<
869 pg_sz_lvl |= (sq->hwq.level & CMDQ_CREATE_QP1_SQ_LVL_MASK);
872 cpu_to_le16((sq->max_sge & CMDQ_CREATE_QP1_SQ_SGE_MASK) <<
921 sq->dbinfo.hwq = &sq->hwq;
922 sq->dbinfo.xid = qp->id;
923 sq->dbinfo.db = qp->dpi->dbr;
924 sq->dbinfo.max_slot = bnxt_qplib_set_sq_max_slot(qp->wqe_mode);
944 kfree(sq->swq);
946 bnxt_qplib_free_hwq(res, &sq->hwq);
953 struct bnxt_qplib_q *sq;
957 sq = &qp->sq;
958 hwq = &sq->hwq;
975 struct bnxt_qplib_q *sq = &qp->sq;
989 sq->dbinfo.flags = 0;
1012 hwq_attr.sginfo = &sq->sg_info;
1014 hwq_attr.depth = bnxt_qplib_get_depth(sq);
1016 hwq_attr.aux_depth = psn_sz ? bnxt_qplib_set_sq_size(sq, qp->wqe_mode)
1020 hwq_attr.aux_depth = roundup_pow_of_two(bnxt_qplib_set_sq_size(sq, qp->wqe_mode));
1026 rc = bnxt_qplib_alloc_init_hwq(&sq->hwq, &hwq_attr);
1030 rc = bnxt_qplib_alloc_init_swq(sq);
1037 req.sq_size = cpu_to_le32(bnxt_qplib_set_sq_size(sq, qp->wqe_mode));
1038 pbl = &sq->hwq.pbl[PBL_LVL_0];
1040 pg_sz_lvl = (bnxt_qplib_base_pg_size(&sq->hwq) <<
1042 pg_sz_lvl |= (sq->hwq.level & CMDQ_CREATE_QP_SQ_LVL_MASK);
1045 cpu_to_le16(((sq->max_sge & CMDQ_CREATE_QP_SQ_SGE_MASK) <<
1150 sq->dbinfo.hwq = &sq->hwq;
1151 sq->dbinfo.xid = qp->id;
1152 sq->dbinfo.db = qp->dpi->dbr;
1153 sq->dbinfo.max_slot = bnxt_qplib_set_sq_max_slot(qp->wqe_mode);
1174 kfree(sq->swq);
1176 bnxt_qplib_free_hwq(res, &sq->hwq);
1347 req.sq_psn = cpu_to_le32(qp->sq.psn);
1357 req.sq_size = cpu_to_le32(qp->sq.hwq.max_elements);
1359 req.sq_sge = cpu_to_le16(qp->sq.max_sge);
1446 qp->sq.psn = le32_to_cpu(sb->sq_psn);
1449 qp->sq.max_wqe = qp->sq.hwq.max_elements;
1451 qp->sq.max_sge = le16_to_cpu(sb->sq_sge);
1544 bnxt_qplib_free_hwq(res, &qp->sq.hwq);
1545 kfree(qp->sq.swq);
1560 struct bnxt_qplib_q *sq = &qp->sq;
1566 sw_prod = sq->swq_start;
1677 hwq = &qp->sq.hwq;
1745 *qdf = __xlate_qfd(qp->sq.q_full_delta, bytes);
1753 static void bnxt_qplib_pull_psn_buff(struct bnxt_qplib_qp *qp, struct bnxt_qplib_q *sq,
1761 hwq = &sq->hwq;
1764 tail = swq->slot_idx / sq->dbinfo.max_slot;
1779 struct bnxt_qplib_q *sq = &qp->sq;
1781 bnxt_qplib_ring_prod_db(&sq->dbinfo, DBC_DBC_TYPE_SQ);
1789 struct bnxt_qplib_q *sq = &qp->sq;
1802 hwq = &sq->hwq;
1813 if (bnxt_qplib_queue_full(sq, slots + qdf)) {
1816 hwq->prod, hwq->cons, hwq->depth, sq->q_full_delta);
1821 swq = bnxt_qplib_get_swqe(sq, &wqe_idx);
1822 bnxt_qplib_pull_psn_buff(qp, sq, swq, BNXT_RE_HW_RETX(qp->dev_cap_flags));
1830 swq->start_psn = sq->psn & BTH_PSN_MASK;
1891 sq->psn = (sq->psn + 1) & BTH_PSN_MASK;
1903 sq->psn = (sq->psn + pkt_num) & BTH_PSN_MASK;
1925 sq->psn = (sq->psn + pkt_num) & BTH_PSN_MASK;
1944 sq->psn = (sq->psn + pkt_num) & BTH_PSN_MASK;
2013 swq->next_psn = sq->psn & BTH_PSN_MASK;
2017 bnxt_qplib_swq_mod_start(sq, wqe_idx);
2018 bnxt_qplib_hwq_incr_prod(&sq->dbinfo, hwq, swq->slots);
2273 static int __flush_sq(struct bnxt_qplib_q *sq, struct bnxt_qplib_qp *qp,
2281 start = sq->swq_start;
2284 last = sq->swq_last;
2288 if (sq->swq[last].wr_id == BNXT_QPLIB_FENCE_WRID) {
2296 cqe->wr_id = sq->swq[last].wr_id;
2298 cqe->type = sq->swq[last].type;
2302 bnxt_qplib_hwq_incr_cons(sq->hwq.max_elements, &sq->hwq.cons,
2303 sq->swq[last].slots, &sq->dbinfo.flags);
2304 sq->swq_last = sq->swq[last].next_idx;
2307 if (!(*budget) && sq->swq_last != start)
2381 struct bnxt_qplib_q *sq = &qp->sq;
2391 swq = &sq->swq[swq_last];
2399 "FP: Process Req cq_cons=0x%x qp=0x%x sq cons sw=0x%x cqe=0x%x marked!\n",
2401 sq->condition = true;
2402 sq->send_phantom = true;
2409 if (sq->condition) {
2434 peek_sq = &peek_qp->sq;
2438 - 1) % sq->max_wqe);
2439 /* If the hwcqe's sq's wr_id matches */
2440 if (peek_sq == sq &&
2441 sq->swq[peek_sq_cons_idx].wr_id ==
2449 sq->condition = false;
2450 sq->single = true;
2466 "Should not have come here! cq_cons=0x%x qp=0x%x sq cons sw=0x%x hw=0x%x\n",
2482 struct bnxt_qplib_q *sq;
2493 sq = &qp->sq;
2495 cqe_sq_cons = le16_to_cpu(hwcqe->sq_cons_idx) % sq->max_wqe;
2496 if (qp->sq.flushed) {
2501 /* Require to walk the sq's swq to fabricate CQEs for all previously
2502 * signaled SWQEs due to CQE aggregation from the current sq cons
2507 if (sq->swq_last == cqe_sq_cons)
2511 swq = &sq->swq[sq->swq_last];
2530 sq->swq_last, cqe->wr_id, cqe->status);
2538 if (do_wa9060(qp, cq, cq_cons, sq->swq_last,
2550 bnxt_qplib_hwq_incr_cons(sq->hwq.max_elements, &sq->hwq.cons,
2551 swq->slots, &sq->dbinfo.flags);
2552 sq->swq_last = swq->next_idx;
2553 if (sq->single)
2558 if (sq->swq_last != cqe_sq_cons) {
2567 sq->single = false;
2860 struct bnxt_qplib_q *sq, *rq;
2879 sq = &qp->sq;
2885 cqe_cons %= sq->max_wqe;
2887 if (qp->sq.flushed) {
2894 * So we must complete all CQEs from the current sq's cons to the
2899 swq_last = sq->swq_last;
2902 if (sq->swq[swq_last].flags & SQ_SEND_FLAGS_SIGNAL_COMP) {
2908 cqe->wr_id = sq->swq[swq_last].wr_id;
2909 cqe->type = sq->swq[swq_last].type;
2913 bnxt_qplib_hwq_incr_cons(sq->hwq.max_elements, &sq->hwq.cons,
2914 sq->swq[swq_last].slots, &sq->dbinfo.flags);
2915 sq->swq_last = sq->swq[swq_last].next_idx;
2983 __flush_sq(&qp->sq, qp, &cqe, &budget);