Lines Matching refs:sq

221 			(n << qp->sq.wqe_shift);
224 (n << qp->sq.wqe_shift)) >>
226 ((qp->send_wqe_offset + (n << qp->sq.wqe_shift)) &
505 qp_attr->cap.max_send_wr = qp->sq.max;
507 qp_attr->cap.max_send_sge = qp->sq.max_gs;
619 if (qp->sq.max)
620 qp_context->sq_size_stride = ilog2(qp->sq.max) << 3;
621 qp_context->sq_size_stride |= qp->sq.wqe_shift - 4;
739 qp_context->snd_db_index = cpu_to_be32(qp->sq.db_index);
840 mthca_wq_reset(&qp->sq);
841 qp->sq.last = get_send_wqe(qp, qp->sq.max - 1);
847 *qp->sq.db = 0;
873 spin_lock_irq(&qp->sq.lock);
877 spin_unlock_irq(&qp->sq.lock);
971 1 << qp->sq.wqe_shift));
975 qp->sq.max_gs = min_t(int, dev->limits.max_sg,
984 * Allocate and register buffer for WQEs. qp->rq.max, sq.max,
985 * rq.max_gs and sq.max_gs must all be assigned.
987 * sq.wqe_shift (as well as send_wqe_offset, is_direct, and
1008 size = qp->sq.max_gs * sizeof (struct mthca_data_seg);
1048 for (qp->sq.wqe_shift = 6; 1 << qp->sq.wqe_shift < size;
1049 qp->sq.wqe_shift++)
1053 1 << qp->sq.wqe_shift);
1064 (qp->sq.max << qp->sq.wqe_shift));
1066 qp->wrid = kmalloc_array(qp->rq.max + qp->sq.max, sizeof(u64),
1087 (qp->sq.max << qp->sq.wqe_shift)),
1142 qp->sq.db_index = mthca_alloc_db(dev, MTHCA_DB_TYPE_SQ,
1143 qp->qpn, &qp->sq.db);
1144 if (qp->sq.db_index < 0) {
1157 mthca_free_db(dev, MTHCA_DB_TYPE_SQ, qp->sq.db_index);
1181 mthca_wq_reset(&qp->sq);
1184 spin_lock_init(&qp->sq.lock);
1231 for (i = 0; i < qp->sq.max; ++i) {
1233 next->nda_op = cpu_to_be32((((i + 1) & (qp->sq.max - 1)) <<
1234 qp->sq.wqe_shift) +
1246 qp->sq.last = get_send_wqe(qp, qp->sq.max - 1);
1275 qp->sq.max = cap->max_send_wr ?
1279 qp->sq.max = cap->max_send_wr;
1283 qp->sq.max_gs = max_t(int, cap->max_send_sge,
1385 qp->sqp->header_buf_size = qp->sq.max * MTHCA_UD_HEADER_SIZE;
1649 spin_lock_irqsave(&qp->sq.lock, flags);
1653 ind = qp->sq.next_ind;
1656 if (mthca_wq_overflow(&qp->sq, nreq, qp->ibqp.send_cq)) {
1659 qp->sq.head, qp->sq.tail,
1660 qp->sq.max, nreq);
1667 prev_wqe = qp->sq.last;
1668 qp->sq.last = wqe;
1752 if (wr->num_sge > qp->sq.max_gs) {
1784 cpu_to_be32(((ind << qp->sq.wqe_shift) +
1801 if (unlikely(ind >= qp->sq.max))
1802 ind -= qp->sq.max;
1809 mthca_write64(((qp->sq.next_ind << qp->sq.wqe_shift) +
1816 qp->sq.next_ind = ind;
1817 qp->sq.head += nreq;
1819 spin_unlock_irqrestore(&qp->sq.lock, flags);
1953 spin_lock_irqsave(&qp->sq.lock, flags);
1957 ind = qp->sq.head & (qp->sq.max - 1);
1964 ((qp->sq.head & 0xffff) << 8) | f0 | op0;
1966 qp->sq.head += MTHCA_ARBEL_MAX_WQES_PER_SEND_DB;
1973 *qp->sq.db = cpu_to_be32(qp->sq.head & 0xffff);
1986 if (mthca_wq_overflow(&qp->sq, nreq, qp->ibqp.send_cq)) {
1989 qp->sq.head, qp->sq.tail,
1990 qp->sq.max, nreq);
1997 prev_wqe = qp->sq.last;
1998 qp->sq.last = wqe;
2082 if (wr->num_sge > qp->sq.max_gs) {
2114 cpu_to_be32(((ind << qp->sq.wqe_shift) +
2131 if (unlikely(ind >= qp->sq.max))
2132 ind -= qp->sq.max;
2137 dbhi = (nreq << 24) | ((qp->sq.head & 0xffff) << 8) | f0 | op0;
2139 qp->sq.head += nreq;
2146 *qp->sq.db = cpu_to_be32(qp->sq.head & 0xffff);
2158 spin_unlock_irqrestore(&qp->sq.lock, flags);