Searched refs:qbuf (Results 1 - 11 of 11) sorted by relevance

/linux-master/drivers/net/ethernet/marvell/octeon_ep/
H A Doctep_ctrl_mbox.c135 u8 __iomem *qbuf; local
139 qbuf = (q->hw_q + *pi);
142 memcpy_toio(qbuf, buf, w_sz);
147 memcpy_toio(qbuf, buf, cp_sz);
153 qbuf = (q->hw_q + *pi);
154 memcpy_toio(qbuf, buf, w_sz);
200 u8 __iomem *qbuf; local
204 qbuf = (q->hw_q + *ci);
207 memcpy_fromio(buf, qbuf, r_sz);
212 memcpy_fromio(buf, qbuf, cp_s
[all...]
/linux-master/drivers/infiniband/hw/erdma/
H A Derdma_cmdq.c96 sq->qbuf = dma_alloc_coherent(&dev->pdev->dev, sq->depth << SQEBB_SHIFT,
98 if (!sq->qbuf)
118 sq->qbuf, sq->qbuf_dma_addr);
129 cq->qbuf = dma_alloc_coherent(&dev->pdev->dev, cq->depth << CQE_SHIFT,
131 if (!cq->qbuf)
151 dma_free_coherent(&dev->pdev->dev, cq->depth << CQE_SHIFT, cq->qbuf,
163 eq->qbuf = dma_alloc_coherent(&dev->pdev->dev, eq->depth << EQE_SHIFT,
165 if (!eq->qbuf)
186 dma_free_coherent(&dev->pdev->dev, eq->depth << EQE_SHIFT, eq->qbuf,
224 cmdq->cq.qbuf, cmd
[all...]
H A Derdma_eq.c24 u64 *eqe = get_queue_entry(eq->qbuf, eq->ci, eq->depth, EQE_SHIFT);
89 eq->qbuf = dma_alloc_coherent(&dev->pdev->dev, eq->depth << EQE_SHIFT,
91 if (!eq->qbuf)
113 dma_free_coherent(&dev->pdev->dev, eq->depth << EQE_SHIFT, eq->qbuf,
123 dma_free_coherent(&dev->pdev->dev, eq->depth << EQE_SHIFT, eq->qbuf,
238 eq->qbuf = dma_alloc_coherent(&dev->pdev->dev, eq->depth << EQE_SHIFT,
240 if (!eq->qbuf)
253 eq->qbuf, eq->qbuf_dma_addr);
286 dma_free_coherent(&dev->pdev->dev, eq->depth << EQE_SHIFT, eq->qbuf,
H A Derdma.h22 void *qbuf; member in struct:erdma_eq
41 void *qbuf; member in struct:erdma_cmdq_sq
57 void *qbuf; member in struct:erdma_cmdq_cq
220 static inline void *get_queue_entry(void *qbuf, u32 idx, u32 depth, u32 shift) argument
224 return qbuf + (idx << shift);
H A Derdma_cq.c11 __be32 *cqe = get_queue_entry(cq->kern_cq.qbuf, cq->kern_cq.ci,
H A Derdma_verbs.h244 void *qbuf; member in struct:erdma_kcq_info
H A Derdma_verbs.c1253 cq->kern_cq.qbuf, cq->kern_cq.qbuf_dma_addr);
1606 cq->kern_cq.qbuf =
1609 if (!cq->kern_cq.qbuf)
1625 cq->kern_cq.qbuf, cq->kern_cq.qbuf_dma_addr);
1692 cq->kern_cq.qbuf, cq->kern_cq.qbuf_dma_addr);
/linux-master/drivers/net/ethernet/intel/ice/
H A Dice_lag.c394 * @qbuf: pointer to buffer to populate
402 ice_lag_qbuf_recfg(struct ice_hw *hw, struct ice_aqc_cfg_txqs_buf *qbuf, argument
430 qbuf->queue_info[count].q_handle = cpu_to_le16(qid);
431 qbuf->queue_info[count].tc = tc;
432 qbuf->queue_info[count].q_teid = cpu_to_le32(q_ctx->q_teid);
524 struct ice_aqc_cfg_txqs_buf *qbuf; local
565 qbuf_size = struct_size(qbuf, queue_info, numq);
566 qbuf = kzalloc(qbuf_size, GFP_KERNEL);
567 if (!qbuf) {
573 valq = ice_lag_qbuf_recfg(&lag->pf->hw, qbuf, vsi_nu
881 struct ice_aqc_cfg_txqs_buf *qbuf; local
1904 struct ice_aqc_cfg_txqs_buf *qbuf; local
[all...]
/linux-master/drivers/net/wireless/mediatek/mt76/
H A Ddma.c646 struct mt76_queue_buf qbuf = {}; local
663 qbuf.addr = addr + q->buf_offset;
665 qbuf.len = len - q->buf_offset;
666 qbuf.skip_unmap = false;
667 if (mt76_dma_add_rx_buf(dev, q, &qbuf, buf) < 0) {
/linux-master/drivers/net/ethernet/chelsio/cxgb4vf/
H A Dt4vf_hw.c1300 u16 qbuf[3]; local
1301 u16 *qbp = qbuf;
1305 qbuf[0] = qbuf[1] = qbuf[2] = 0;
1312 *qp++ = cpu_to_be32(FW_RSS_IND_TBL_CMD_IQ0_V(qbuf[0]) |
1313 FW_RSS_IND_TBL_CMD_IQ1_V(qbuf[1]) |
1314 FW_RSS_IND_TBL_CMD_IQ2_V(qbuf[2]));
/linux-master/include/uapi/linux/
H A Di2o-dev.h91 void __user *qbuf; /* Pointer to HTTP query string */ member in struct:i2o_html

Completed in 330 milliseconds