Lines Matching refs:wqe

169 	struct siw_wqe *wqe = &frx->wqe_active;
176 srx->ddp_stag = wqe->sqe.sge[0].lkey;
177 srx->ddp_to = wqe->sqe.sge[0].laddr;
204 (wqe->processed + srx->fpdu_part_rem != wqe->bytes))) {
207 wqe->processed + srx->fpdu_part_rem, wqe->bytes);
281 struct siw_wqe *wqe = &frx->wqe_active;
301 if (unlikely(ddp_mo != wqe->processed)) {
303 qp_id(rx_qp(srx)), ddp_mo, wqe->processed);
316 if (unlikely(wqe->bytes < wqe->processed + srx->fpdu_part_rem)) {
318 wqe->bytes, wqe->processed, srx->fpdu_part_rem);
319 wqe->wc_status = SIW_WC_LOC_LEN_ERR;
334 struct siw_wqe *wqe = NULL;
357 wqe = rx_wqe(&qp->rx_untagged);
358 rx_type(wqe) = SIW_OP_RECEIVE;
359 wqe->wr_status = SIW_WR_INPROGRESS;
360 wqe->bytes = 0;
361 wqe->processed = 0;
363 wqe->rqe.id = rqe->id;
364 wqe->rqe.num_sge = num_sge;
367 wqe->rqe.sge[i].laddr = rqe->sge[i].laddr;
368 wqe->rqe.sge[i].lkey = rqe->sge[i].lkey;
369 wqe->rqe.sge[i].length = rqe->sge[i].length;
370 wqe->bytes += wqe->rqe.sge[i].length;
371 wqe->mem[i] = NULL;
405 return wqe;
426 * receive wqe.
429 * current receive wqe processing)
439 struct siw_wqe *wqe;
445 wqe = siw_rqe_get(qp);
446 if (unlikely(!wqe)) {
453 wqe = rx_wqe(frx);
474 sge = &wqe->rqe.sge[frx->sge_idx];
484 mem = &wqe->mem[frx->sge_idx];
505 wqe->processed += rcvd_bytes;
525 wqe->processed += rcvd_bytes;
565 struct siw_wqe *wqe = rx_wqe(frx);
578 wqe->rqe.num_sge = 1;
579 rx_type(wqe) = SIW_OP_WRITE;
580 wqe->wr_status = SIW_WR_INPROGRESS;
712 /* RRESP now valid as current TX wqe or placed into IRQ */
735 * Transfer pending Read from tip of ORQ into currrent rx wqe,
742 struct siw_wqe *wqe = NULL;
753 wqe = rx_wqe(&qp->rx_tagged);
754 wqe->sqe.id = orqe->id;
755 wqe->sqe.opcode = orqe->opcode;
756 wqe->sqe.sge[0].laddr = orqe->sge[0].laddr;
757 wqe->sqe.sge[0].lkey = orqe->sge[0].lkey;
758 wqe->sqe.sge[0].length = orqe->sge[0].length;
759 wqe->sqe.flags = orqe->flags;
760 wqe->sqe.num_sge = 1;
761 wqe->bytes = orqe->sge[0].length;
762 wqe->processed = 0;
763 wqe->mem[0] = NULL;
766 wqe->wr_status = SIW_WR_INPROGRESS;
786 struct siw_wqe *wqe = rx_wqe(frx);
792 if (unlikely(wqe->wr_status != SIW_WR_IDLE)) {
794 qp_id(qp), wqe->wr_status, wqe->sqe.opcode);
813 if (unlikely(wqe->wr_status != SIW_WR_INPROGRESS)) {
815 qp_id(qp), wqe->wr_status);
823 sge = wqe->sqe.sge; /* there is only one */
824 mem = &wqe->mem[0];
831 wqe->bytes);
834 wqe->wc_status = SIW_WC_LOC_PROT_ERR;
849 sge->laddr + wqe->processed, bytes);
851 wqe->wc_status = SIW_WC_GENERAL_ERR;
857 wqe->processed += rv;
1202 struct siw_wqe *wqe = rx_wqe(qp->rx_fpdu);
1203 enum siw_wc_status wc_status = wqe->wc_status;
1210 wqe->rqe.flags |= SIW_WQE_SOLICITED;
1215 if (wqe->wr_status == SIW_WR_IDLE)
1239 rv = siw_rqe_complete(qp, &wqe->rqe, wqe->processed,
1243 rv = siw_rqe_complete(qp, &wqe->rqe, wqe->processed,
1246 siw_wqe_put_mem(wqe, SIW_OP_RECEIVE);
1250 if (wqe->wr_status == SIW_WR_IDLE)
1262 rx_type(wqe) == SIW_OP_READ_LOCAL_INV) {
1266 rv = siw_invalidate_stag(qp->pd, wqe->sqe.sge[0].lkey);
1279 * All errors turn the wqe into signalled.
1281 if ((wqe->sqe.flags & SIW_WQE_SIGNALLED) || error != 0)
1282 rv = siw_sqe_complete(qp, &wqe->sqe, wqe->processed,
1284 siw_wqe_put_mem(wqe, SIW_OP_READ);
1303 if (wqe->wr_status == SIW_WR_IDLE)
1321 wqe->wr_status = SIW_WR_IDLE;