Searched refs:ring (Results 226 - 250 of 804) sorted by relevance

1234567891011>>

/linux-master/drivers/gpu/drm/amd/amdgpu/
H A Dgfx_v7_0.c2018 * gfx_v7_0_ring_test_ring - basic gfx ring test
2020 * @ring: amdgpu_ring structure holding ring information
2022 * Allocate a scratch register and write to it using the gfx ring (CIK).
2023 * Provides a basic gfx ring test to verify that the ring is working.
2027 static int gfx_v7_0_ring_test_ring(struct amdgpu_ring *ring) argument
2029 struct amdgpu_device *adev = ring->adev;
2035 r = amdgpu_ring_alloc(ring, 3);
2039 amdgpu_ring_write(ring, PACKET
2062 gfx_v7_0_ring_emit_hdp_flush(struct amdgpu_ring *ring) argument
2093 gfx_v7_0_ring_emit_vgt_flush(struct amdgpu_ring *ring) argument
2115 gfx_v7_0_ring_emit_fence_gfx(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) argument
2158 gfx_v7_0_ring_emit_fence_compute(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) argument
2195 gfx_v7_0_ring_emit_ib_gfx(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) argument
2226 gfx_v7_0_ring_emit_ib_compute(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) argument
2260 gfx_v7_ring_emit_cntxcntl(struct amdgpu_ring *ring, uint32_t flags) argument
2290 gfx_v7_0_ring_test_ib(struct amdgpu_ring *ring, long timeout) argument
2453 struct amdgpu_ring *ring = &adev->gfx.gfx_ring[0]; local
2529 struct amdgpu_ring *ring; local
2586 gfx_v7_0_ring_get_rptr(struct amdgpu_ring *ring) argument
2591 gfx_v7_0_ring_get_wptr_gfx(struct amdgpu_ring *ring) argument
2598 gfx_v7_0_ring_set_wptr_gfx(struct amdgpu_ring *ring) argument
2606 gfx_v7_0_ring_get_wptr_compute(struct amdgpu_ring *ring) argument
2612 gfx_v7_0_ring_set_wptr_compute(struct amdgpu_ring *ring) argument
2713 struct amdgpu_ring *ring = &adev->gfx.compute_ring[i]; local
2852 gfx_v7_0_mqd_init(struct amdgpu_device *adev, struct cik_mqd *mqd, uint64_t mqd_gpu_addr, struct amdgpu_ring *ring) argument
3002 struct amdgpu_ring *ring = &adev->gfx.compute_ring[ring_id]; local
3040 struct amdgpu_ring *ring; local
3135 gfx_v7_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring) argument
3176 gfx_v7_0_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned vmid, uint64_t pd_addr) argument
3208 gfx_v7_0_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) argument
3503 gfx_v7_0_update_spm_vmid(struct amdgpu_device *adev, struct amdgpu_ring *ring, unsigned vmid) argument
4036 gfx_v7_0_ring_emit_gds_switch(struct amdgpu_ring *ring, uint32_t vmid, uint32_t gds_base, uint32_t gds_size, uint32_t gws_base, uint32_t gws_size, uint32_t oa_base, uint32_t oa_size) argument
4075 gfx_v7_0_ring_soft_recovery(struct amdgpu_ring *ring, unsigned vmid) argument
4357 struct amdgpu_ring *ring = &adev->gfx.compute_ring[ring_id]; local
4386 struct amdgpu_ring *ring; local
4826 struct amdgpu_ring *ring; local
4851 struct amdgpu_ring *ring; local
4939 gfx_v7_0_emit_mem_sync(struct amdgpu_ring *ring) argument
4951 gfx_v7_0_emit_mem_sync_compute(struct amdgpu_ring *ring) argument
[all...]
H A Damdgpu_umsch_mm.c269 uint32_t *ring = test->ring_data_cpu_addr + local
285 ring[0] = VPE_CMD_HEADER(VPE_CMD_OPCODE_INDIRECT, 0);
286 ring[1] = (ib_gpu_addr & 0xffffffe0);
287 ring[2] = upper_32_bits(ib_gpu_addr);
288 ring[3] = 4;
289 ring[4] = 0;
290 ring[5] = 0;
491 struct amdgpu_ring *ring = &umsch->ring; local
493 if (amdgpu_ring_alloc(ring, ndw
504 struct amdgpu_ring *ring = &umsch->ring; local
518 umsch_mm_ring_set_wptr(struct amdgpu_ring *ring) argument
529 umsch_mm_ring_get_rptr(struct amdgpu_ring *ring) argument
537 umsch_mm_ring_get_wptr(struct amdgpu_ring *ring) argument
559 struct amdgpu_ring *ring = &umsch->ring; local
[all...]
H A Dgfx_v6_0.c1767 static int gfx_v6_0_ring_test_ring(struct amdgpu_ring *ring) argument
1769 struct amdgpu_device *adev = ring->adev;
1776 r = amdgpu_ring_alloc(ring, 3);
1780 amdgpu_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
1781 amdgpu_ring_write(ring, mmSCRATCH_REG0 - PACKET3_SET_CONFIG_REG_START);
1782 amdgpu_ring_write(ring, 0xDEADBEEF);
1783 amdgpu_ring_commit(ring);
1797 static void gfx_v6_0_ring_emit_vgt_flush(struct amdgpu_ring *ring) argument
1799 amdgpu_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE, 0));
1800 amdgpu_ring_write(ring, EVENT_TYP
1804 gfx_v6_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) argument
1832 gfx_v6_0_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) argument
1873 gfx_v6_0_ring_test_ib(struct amdgpu_ring *ring, long timeout) argument
1987 struct amdgpu_ring *ring = &adev->gfx.gfx_ring[0]; local
2050 struct amdgpu_ring *ring; local
2102 gfx_v6_0_ring_get_rptr(struct amdgpu_ring *ring) argument
2107 gfx_v6_0_ring_get_wptr(struct amdgpu_ring *ring) argument
2121 gfx_v6_0_ring_set_wptr_gfx(struct amdgpu_ring *ring) argument
2129 gfx_v6_0_ring_set_wptr_compute(struct amdgpu_ring *ring) argument
2147 struct amdgpu_ring *ring; local
2265 gfx_v6_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring) argument
2290 gfx_v6_0_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned vmid, uint64_t pd_addr) argument
2320 gfx_v6_0_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) argument
2936 gfx_v6_ring_emit_cntxcntl(struct amdgpu_ring *ring, uint32_t flags) argument
3045 struct amdgpu_ring *ring; local
3220 gfx_v6_0_set_compute_eop_interrupt_state(struct amdgpu_device *adev, int ring, enum amdgpu_interrupt_state state) argument
3352 struct amdgpu_ring *ring; local
3433 gfx_v6_0_emit_mem_sync(struct amdgpu_ring *ring) argument
[all...]
/linux-master/drivers/net/vmxnet3/
H A Dvmxnet3_int.h147 vmxnet3_cmd_ring_adv_next2fill(struct vmxnet3_cmd_ring *ring) argument
149 ring->next2fill++;
150 if (unlikely(ring->next2fill == ring->size)) {
151 ring->next2fill = 0;
152 VMXNET3_FLIP_RING_GEN(ring->gen);
157 vmxnet3_cmd_ring_adv_next2comp(struct vmxnet3_cmd_ring *ring) argument
159 VMXNET3_INC_RING_IDX_ONLY(ring->next2comp, ring->size);
163 vmxnet3_cmd_ring_desc_avail(struct vmxnet3_cmd_ring *ring) argument
179 vmxnet3_comp_ring_adv_next2proc(struct vmxnet3_comp_ring *ring) argument
[all...]
/linux-master/drivers/net/wireless/ath/wil6210/
H A Dtxrx.h581 static inline int wil_ring_is_empty(struct wil_ring *ring) argument
583 return ring->swhead == ring->swtail;
586 static inline u32 wil_ring_next_tail(struct wil_ring *ring) argument
588 return (ring->swtail + 1) % ring->size;
591 static inline void wil_ring_advance_head(struct wil_ring *ring, int n) argument
593 ring->swhead = (ring->swhead + n) % ring
596 wil_ring_is_full(struct wil_ring *ring) argument
632 wil_ring_used_tx(struct wil_ring *ring) argument
641 wil_ring_avail_tx(struct wil_ring *ring) argument
[all...]
/linux-master/drivers/gpu/drm/msm/adreno/
H A Dadreno_gpu.h477 void adreno_flush(struct msm_gpu *gpu, struct msm_ringbuffer *ring, u32 reg);
478 bool adreno_idle(struct msm_gpu *gpu, struct msm_ringbuffer *ring);
485 void adreno_wait_ring(struct msm_ringbuffer *ring, uint32_t ndwords);
533 OUT_PKT0(struct msm_ringbuffer *ring, uint16_t regindx, uint16_t cnt) argument
535 adreno_wait_ring(ring, cnt+1);
536 OUT_RING(ring, CP_TYPE0_PKT | ((cnt-1) << 16) | (regindx & 0x7FFF));
541 OUT_PKT2(struct msm_ringbuffer *ring) argument
543 adreno_wait_ring(ring, 1);
544 OUT_RING(ring, CP_TYPE2_PKT);
548 OUT_PKT3(struct msm_ringbuffer *ring, uint8_ argument
570 OUT_PKT4(struct msm_ringbuffer *ring, uint16_t regindx, uint16_t cnt) argument
577 OUT_PKT7(struct msm_ringbuffer *ring, uint8_t opcode, uint16_t cnt) argument
590 get_wptr(struct msm_ringbuffer *ring) argument
[all...]
H A Da2xx_gpu.c15 struct msm_ringbuffer *ring = submit->ring; local
29 OUT_PKT3(ring, CP_INDIRECT_BUFFER_PFD, 2);
30 OUT_RING(ring, lower_32_bits(submit->cmd[i].iova));
31 OUT_RING(ring, submit->cmd[i].size);
32 OUT_PKT2(ring);
37 OUT_PKT0(ring, REG_AXXX_CP_SCRATCH_REG2, 1);
38 OUT_RING(ring, submit->seqno);
41 OUT_PKT3(ring, CP_WAIT_FOR_IDLE, 1);
42 OUT_RING(ring,
58 struct msm_ringbuffer *ring = gpu->rb[0]; local
484 a2xx_get_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring) argument
[all...]
H A Da6xx_gpu.c36 static bool a6xx_idle(struct msm_gpu *gpu, struct msm_ringbuffer *ring) argument
39 if (!adreno_idle(gpu, ring))
55 static void update_shadow_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring) argument
62 OUT_PKT7(ring, CP_WHERE_AM_I, 2);
63 OUT_RING(ring, lower_32_bits(shadowptr(a6xx_gpu, ring)));
64 OUT_RING(ring, upper_32_bits(shadowptr(a6xx_gpu, ring)));
68 static void a6xx_flush(struct msm_gpu *gpu, struct msm_ringbuffer *ring) argument
73 update_shadow_rptr(gpu, ring);
91 get_stats_counter(struct msm_ringbuffer *ring, u32 counter, u64 iova) argument
102 a6xx_set_pagetable(struct a6xx_gpu *a6xx_gpu, struct msm_ringbuffer *ring, struct msm_file_private *ctx) argument
192 struct msm_ringbuffer *ring = submit->ring; local
275 struct msm_ringbuffer *ring = submit->ring; local
1454 struct msm_ringbuffer *ring = gpu->rb[0]; local
1483 struct msm_ringbuffer *ring = gpu->rb[0]; local
2324 struct msm_ringbuffer *ring = gpu->funcs->active_ring(gpu); local
2841 a6xx_get_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring) argument
2852 a6xx_progress(struct msm_gpu *gpu, struct msm_ringbuffer *ring) argument
[all...]
/linux-master/drivers/dma/mediatek/
H A Dmtk-hsdma.c49 /* Registers for underlying ring manipulation */
140 * ring to know what relevant VD the PD is being
152 * struct mtk_hsdma_ring - This struct holds info describing underlying ring
154 * @txd: The descriptor TX ring which describes DMA source
156 * @rxd: The descriptor RX ring which describes DMA
158 * @cb: The extra information pointed at by RX ring
159 * @tphys: The physical addr of TX ring
160 * @rphys: The physical addr of RX ring
177 * @ring: An instance for the underlying ring
185 struct mtk_hsdma_ring ring; member in struct:mtk_hsdma_pchan
317 struct mtk_hsdma_ring *ring = &pc->ring; local
390 struct mtk_hsdma_ring *ring = &pc->ring; local
415 struct mtk_hsdma_ring *ring = &pc->ring; local
[all...]
/linux-master/drivers/dma/
H A Dxgene-dma.c27 /* X-Gene DMA ring csr registers and bit definations */
265 * @tx_ring: transmit ring descriptor that we use to prepare actual
267 * @rx_ring: receive ring descriptor that we use to get completed DMA
294 * @ring_num: start id number for DMA ring
296 * @csr_ring: base for DMA ring register access
297 * @csr_ring_cmd: base for DMA ring command register access
597 struct xgene_dma_ring *ring = &chan->tx_ring; local
600 /* Get hw descriptor from DMA tx ring */
601 desc_hw = &ring->desc_hw[ring
689 struct xgene_dma_ring *ring = &chan->rx_ring; local
1032 xgene_dma_wr_ring_state(struct xgene_dma_ring *ring) argument
1043 xgene_dma_clr_ring_state(struct xgene_dma_ring *ring) argument
1049 xgene_dma_setup_ring(struct xgene_dma_ring *ring) argument
1106 xgene_dma_clear_ring(struct xgene_dma_ring *ring) argument
1127 xgene_dma_set_ring_cmd(struct xgene_dma_ring *ring) argument
1165 xgene_dma_delete_ring_one(struct xgene_dma_ring *ring) argument
1184 xgene_dma_create_ring_one(struct xgene_dma_chan *chan, struct xgene_dma_ring *ring, enum xgene_dma_ring_cfgsize cfgsize) argument
[all...]
/linux-master/drivers/comedi/drivers/
H A Dmite.c555 writel(mite_chan->ring->dma_addr,
563 * @ring: MITE dma ring.
568 struct mite_ring *ring,
583 if (!mite_chan->ring) {
584 mite_chan->ring = ring;
597 * @ring: MITE dma ring.
600 struct mite_ring *ring)
567 mite_request_channel_in_range(struct mite *mite, struct mite_ring *ring, unsigned int min_channel, unsigned int max_channel) argument
599 mite_request_channel(struct mite *mite, struct mite_ring *ring) argument
649 mite_init_ring_descriptors(struct mite_ring *ring, struct comedi_subdevice *s, unsigned int nbytes) argument
697 mite_free_dma_descs(struct mite_ring *ring) argument
716 mite_buf_change(struct mite_ring *ring, struct comedi_subdevice *s) argument
750 struct mite_ring *ring; local
771 mite_free_ring(struct mite_ring *ring) argument
[all...]
/linux-master/drivers/net/ethernet/freescale/
H A Ducc_geth_ethtool.c210 struct ethtool_ringparam *ring,
218 ring->rx_max_pending = UCC_GETH_BD_RING_SIZE_MAX;
219 ring->rx_mini_max_pending = UCC_GETH_BD_RING_SIZE_MAX;
220 ring->rx_jumbo_max_pending = UCC_GETH_BD_RING_SIZE_MAX;
221 ring->tx_max_pending = UCC_GETH_BD_RING_SIZE_MAX;
223 ring->rx_pending = ug_info->bdRingLenRx[queue];
224 ring->rx_mini_pending = ug_info->bdRingLenRx[queue];
225 ring->rx_jumbo_pending = ug_info->bdRingLenRx[queue];
226 ring->tx_pending = ug_info->bdRingLenTx[queue];
231 struct ethtool_ringparam *ring,
209 uec_get_ringparam(struct net_device *netdev, struct ethtool_ringparam *ring, struct kernel_ethtool_ringparam *kernel_ring, struct netlink_ext_ack *extack) argument
230 uec_set_ringparam(struct net_device *netdev, struct ethtool_ringparam *ring, struct kernel_ethtool_ringparam *kernel_ring, struct netlink_ext_ack *extack) argument
[all...]
/linux-master/drivers/gpu/drm/radeon/
H A Dr300.c51 * However, scheduling such write to the ring seems harmless, i suspect
214 struct radeon_ring *ring = &rdev->ring[fence->ring]; local
219 radeon_ring_write(ring, PACKET0(R300_RE_SCISSORS_TL, 0));
220 radeon_ring_write(ring, 0);
221 radeon_ring_write(ring, PACKET0(R300_RE_SCISSORS_BR, 0));
222 radeon_ring_write(ring, 0);
224 radeon_ring_write(ring, PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0));
225 radeon_ring_write(ring, R300_RB3D_DC_FLUS
245 r300_ring_start(struct radeon_device *rdev, struct radeon_ring *ring) argument
[all...]
/linux-master/drivers/net/ethernet/aquantia/atlantic/
H A Daq_ptp.c119 static int __aq_ptp_skb_put(struct ptp_skb_ring *ring, struct sk_buff *skb) argument
121 unsigned int next_head = (ring->head + 1) % ring->size;
123 if (next_head == ring->tail)
126 ring->buff[ring->head] = skb_get(skb);
127 ring->head = next_head;
132 static int aq_ptp_skb_put(struct ptp_skb_ring *ring, struct sk_buff *skb) argument
137 spin_lock_irqsave(&ring->lock, flags);
138 ret = __aq_ptp_skb_put(ring, sk
144 __aq_ptp_skb_get(struct ptp_skb_ring *ring) argument
157 aq_ptp_skb_get(struct ptp_skb_ring *ring) argument
169 aq_ptp_skb_buf_len(struct ptp_skb_ring *ring) argument
183 aq_ptp_skb_ring_init(struct ptp_skb_ring *ring, unsigned int size) argument
200 aq_ptp_skb_ring_clean(struct ptp_skb_ring *ring) argument
208 aq_ptp_skb_ring_release(struct ptp_skb_ring *ring) argument
631 aq_ptp_ring(struct aq_nic_s *aq_nic, struct aq_ring_s *ring) argument
749 struct aq_ring_s *ring = &aq_ptp->ptp_tx; local
[all...]
/linux-master/drivers/scsi/fnic/
H A Dvnic_wq_copy.c54 vnic_dev_clear_desc_ring(&wq->ring);
62 vnic_dev_free_desc_ring(vdev, &wq->ring);
81 return vnic_dev_alloc_desc_ring(vdev, &wq->ring, desc_count, desc_size);
90 paddr = (u64)wq->ring.base_addr | VNIC_PADDR_TARGET;
92 iowrite32(wq->ring.desc_count, &wq->ctrl->ring_size);
H A Dvnic_rq.h84 struct vnic_dev_ring ring; member in struct:vnic_rq
96 return rq->ring.desc_avail;
102 return rq->ring.desc_count - rq->ring.desc_avail - 1;
133 rq->ring.desc_avail--;
160 rq->ring.desc_avail += count;
185 rq->ring.desc_avail++;
/linux-master/include/uapi/linux/
H A Dvirtio_ring.h48 * Mark a descriptor as available or used in packed ring.
63 /* Enable events in packed ring. */
65 /* Disable events in packed ring. */
68 * Enable events for a specific descriptor in packed ring.
76 * of packed ring.
84 * at the end of the avail ring. Host should ignore the avail->flags field. */
86 * at the end of the used ring. Guest should ignore the used->flags field. */
97 * struct vring_desc - Virtio ring descriptors,
117 __virtio16 ring[]; member in struct:vring_avail
134 vring_used_elem_t ring[]; member in struct:vring_used
[all...]
/linux-master/drivers/net/ethernet/intel/ixgbe/
H A Dixgbe_lib.c9 * ixgbe_cache_ring_dcb_sriov - Descriptor ring to register mapping for SR-IOV
12 * Cache the descriptor ring offsets for SR-IOV to the assigned rings. It
60 /* The work is already done if the FCoE ring is shared */
89 /* ixgbe_get_first_reg_idx - Return first register index associated with ring */
142 * ixgbe_cache_ring_dcb - Descriptor ring to register mapping for DCB
145 * Cache the descriptor ring offsets for DCB to the assigned rings.
176 * ixgbe_cache_ring_sriov - Descriptor ring to register mapping for sriov
247 * ixgbe_cache_ring_rss - Descriptor ring to register mapping for RSS
250 * Cache the descriptor ring offsets for RSS to the assigned rings.
270 * ixgbe_cache_ring_register - Descriptor ring t
810 ixgbe_add_ring(struct ixgbe_ring *ring, struct ixgbe_ring_container *head) argument
841 struct ixgbe_ring *ring; local
1023 struct ixgbe_ring *ring; local
[all...]
/linux-master/drivers/gpu/drm/msm/
H A Dmsm_gpu.c310 find_submit(struct msm_ringbuffer *ring, uint32_t fence) argument
315 spin_lock_irqsave(&ring->submit_lock, flags);
316 list_for_each_entry(submit, &ring->submits, node) {
318 spin_unlock_irqrestore(&ring->submit_lock, flags);
322 spin_unlock_irqrestore(&ring->submit_lock, flags);
408 struct msm_ringbuffer *ring = gpu->rb[i]; local
410 uint32_t fence = ring->memptrs->fence;
413 * For the current (faulting?) ring/submit advance the fence by
416 if (ring == cur_ring)
417 ring
433 struct msm_ringbuffer *ring = gpu->rb[i]; local
496 made_progress(struct msm_gpu *gpu, struct msm_ringbuffer *ring) argument
515 struct msm_ringbuffer *ring = gpu->funcs->active_ring(gpu); local
647 retire_submit(struct msm_gpu *gpu, struct msm_ringbuffer *ring, struct msm_gem_submit *submit) argument
702 struct msm_ringbuffer *ring = gpu->rb[i]; local
751 struct msm_ringbuffer *ring = submit->ring; local
[all...]
/linux-master/drivers/net/ethernet/intel/igc/
H A Digc_tsn.c13 struct igc_ring *ring = adapter->tx_ring[i]; local
15 if (ring->launchtime_enable)
27 struct igc_ring *ring = adapter->tx_ring[i]; local
29 if (ring->cbs_enable)
127 struct igc_ring *ring = adapter->tx_ring[i]; local
132 wr32(IGC_STQT(i), ring->start_time);
133 wr32(IGC_ENDQT(i), ring->end_time);
158 if (ring->launchtime_enable)
165 if (ring->cbs_enable) {
221 cbs_value = DIV_ROUND_UP_ULL(ring
[all...]
/linux-master/drivers/net/ethernet/pasemi/
H A Dpasemi_mac.h39 u64 *buffers; /* RX interface buffer ring */
64 int bufsz; /* RX ring buffer size */
101 #define RING_USED(ring) (((ring)->next_to_fill - (ring)->next_to_clean) \
102 & ((ring)->size - 1))
103 #define RING_AVAIL(ring) ((ring->size) - RING_USED(ring))
/linux-master/tools/testing/selftests/net/
H A Dtxring_overwrite.c86 static int setup_tx(char **ring) argument
112 error(1, errno, "setsockopt ring");
114 *ring = mmap(0, req.tp_block_size * req.tp_block_nr,
116 if (*ring == MAP_FAILED)
161 char *ring; local
165 fdt = setup_tx(&ring);
167 send_pkt(fdt, ring, payload_patterns[0]);
168 send_pkt(fdt, ring, payload_patterns[1]);
/linux-master/drivers/net/ethernet/cisco/enic/
H A Dvnic_rq.h76 struct vnic_dev_ring ring; member in struct:vnic_rq
87 return rq->ring.desc_avail;
93 return rq->ring.desc_count - rq->ring.desc_avail - 1;
121 rq->ring.desc_avail--;
143 rq->ring.desc_avail += count;
168 rq->ring.desc_avail++;
/linux-master/drivers/usb/gadget/udc/cdns2/
H A Dcdns2-gadget.c79 dma_index = readl(&pdev->adma_regs->ep_traddr) - pep->ring.dma;
102 u32 offset = (char *)trb - (char *)pep->ring.trbs;
104 return pep->ring.dma + offset;
110 struct cdns2_ring *ring = &pep->ring; local
112 if (pep->ring.trbs) {
113 dma_pool_free(pdev->eps_dma_pool, ring->trbs, ring->dma);
114 memset(ring, 0, sizeof(*ring));
123 struct cdns2_ring *ring; local
185 cdns2_ep_inc_enq(struct cdns2_ring *ring) argument
191 cdns2_ep_inc_deq(struct cdns2_ring *ring) argument
321 struct cdns2_ring *ring; local
619 struct cdns2_ring *ring; local
765 struct cdns2_ring *ring; local
882 struct cdns2_ring *ring; local
[all...]
/linux-master/drivers/net/ethernet/intel/igb/
H A Digb.h307 struct igb_ring *ring; /* pointer to linked list of rings */ member in struct:igb_ring_container
312 u8 itr; /* current ITR setting for ring */
324 void *desc; /* descriptor ring memory */
325 unsigned long flags; /* ring specific flags */
326 void __iomem *tail; /* pointer to ring tail register */
327 dma_addr_t dma; /* phys address of the ring */
328 unsigned int size; /* length of desc. ring in bytes */
330 u16 count; /* number of desc. in the ring */
331 u8 queue_index; /* logical index of the ring*/
332 u8 reg_idx; /* physical index of the ring */
404 igb_rx_bufsz(struct igb_ring *ring) argument
416 igb_rx_pg_order(struct igb_ring *ring) argument
444 igb_desc_unused(struct igb_ring *ring) argument
[all...]

Completed in 240 milliseconds

1234567891011>>