Searched refs:ring (Results 76 - 100 of 807) sorted by relevance

1234567891011>>

/linux-master/drivers/scsi/fnic/
H A Dvnic_cq_copy.h23 desc = (struct fcpio_fw_req *)((u8 *)cq->ring.descs +
24 cq->ring.desc_size * cq->to_clean);
33 if (cq->to_clean == cq->ring.desc_count) {
38 desc = (struct fcpio_fw_req *)((u8 *)cq->ring.descs +
39 cq->ring.desc_size * cq->to_clean);
H A Dvnic_wq_copy.h19 struct vnic_dev_ring ring; member in struct:vnic_wq_copy
26 return wq->ring.desc_avail;
31 return wq->ring.desc_count - 1 - wq->ring.desc_avail;
36 struct fcpio_host_req *desc = wq->ring.descs;
43 ((wq->to_use_index + 1) == wq->ring.desc_count) ?
45 wq->ring.desc_avail--;
64 cnt = wq->ring.desc_count - wq->to_clean_index + index + 1;
66 wq->to_clean_index = ((index + 1) % wq->ring.desc_count);
67 wq->ring
[all...]
/linux-master/drivers/scsi/snic/
H A Dvnic_cq_fw.h21 desc = (struct snic_fw_req *)((u8 *)cq->ring.descs +
22 cq->ring.desc_size * cq->to_clean);
31 if (cq->to_clean == cq->ring.desc_count) {
36 desc = (struct snic_fw_req *)((u8 *)cq->ring.descs +
37 cq->ring.desc_size * cq->to_clean);
/linux-master/drivers/net/ethernet/synopsys/
H A Ddwc-xlgmac-desc.c75 struct xlgmac_ring *ring)
80 if (!ring)
83 if (ring->desc_data_head) {
84 for (i = 0; i < ring->dma_desc_count; i++) {
85 desc_data = XLGMAC_GET_DESC_DATA(ring, i);
89 kfree(ring->desc_data_head);
90 ring->desc_data_head = NULL;
93 if (ring->rx_hdr_pa.pages) {
94 dma_unmap_page(pdata->dev, ring->rx_hdr_pa.pages_dma,
95 ring
74 xlgmac_free_ring(struct xlgmac_pdata *pdata, struct xlgmac_ring *ring) argument
125 xlgmac_init_ring(struct xlgmac_pdata *pdata, struct xlgmac_ring *ring, unsigned int dma_desc_count) argument
389 xlgmac_map_rx_buffer(struct xlgmac_pdata *pdata, struct xlgmac_ring *ring, struct xlgmac_desc_data *desc_data) argument
427 struct xlgmac_ring *ring; local
464 struct xlgmac_ring *ring; local
501 struct xlgmac_ring *ring = channel->tx_ring; local
[all...]
/linux-master/drivers/gpu/drm/amd/amdgpu/
H A Dvcn_v2_0.c69 * Set ring and irq function pointers
97 struct amdgpu_ring *ring; local
128 ring = &adev->vcn.inst->ring_dec;
130 ring->use_doorbell = true;
131 ring->doorbell_index = adev->doorbell_index.vcn.vcn_ring0_1 << 1;
132 ring->vm_hub = AMDGPU_MMHUB0(0);
134 sprintf(ring->name, "vcn_dec");
135 r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst->irq, 0,
161 ring = &adev->vcn.inst->ring_enc[i];
162 ring
229 struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec; local
798 struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec; local
933 struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec; local
1204 struct amdgpu_ring *ring; local
1324 vcn_v2_0_dec_ring_get_rptr(struct amdgpu_ring *ring) argument
1338 vcn_v2_0_dec_ring_get_wptr(struct amdgpu_ring *ring) argument
1355 vcn_v2_0_dec_ring_set_wptr(struct amdgpu_ring *ring) argument
1378 vcn_v2_0_dec_ring_insert_start(struct amdgpu_ring *ring) argument
1395 vcn_v2_0_dec_ring_insert_end(struct amdgpu_ring *ring) argument
1411 vcn_v2_0_dec_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count) argument
1434 vcn_v2_0_dec_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) argument
1473 vcn_v2_0_dec_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) argument
1492 vcn_v2_0_dec_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg, uint32_t val, uint32_t mask) argument
1511 vcn_v2_0_dec_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned vmid, uint64_t pd_addr) argument
1526 vcn_v2_0_dec_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) argument
1549 vcn_v2_0_enc_ring_get_rptr(struct amdgpu_ring *ring) argument
1566 vcn_v2_0_enc_ring_get_wptr(struct amdgpu_ring *ring) argument
1590 vcn_v2_0_enc_ring_set_wptr(struct amdgpu_ring *ring) argument
1621 vcn_v2_0_enc_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) argument
1633 vcn_v2_0_enc_ring_insert_end(struct amdgpu_ring *ring) argument
1648 vcn_v2_0_enc_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) argument
1662 vcn_v2_0_enc_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg, uint32_t val, uint32_t mask) argument
1671 vcn_v2_0_enc_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned int vmid, uint64_t pd_addr) argument
1684 vcn_v2_0_enc_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) argument
1724 vcn_v2_0_dec_ring_test_ring(struct amdgpu_ring *ring) argument
1857 struct amdgpu_ring *ring; local
[all...]
H A Djpeg_v4_0_3.c45 static void jpeg_v4_0_3_dec_ring_set_wptr(struct amdgpu_ring *ring);
63 * Set ring and irq function pointers
88 struct amdgpu_ring *ring; local
111 ring = &adev->jpeg.inst[i].ring_dec[j];
112 ring->use_doorbell = true;
113 ring->vm_hub = AMDGPU_MMHUB0(adev->jpeg.inst[i].aid_id);
115 ring->doorbell_index =
120 ring->doorbell_index =
124 ring->doorbell_index =
128 sprintf(ring
178 struct amdgpu_ring *ring; local
298 struct amdgpu_ring *ring; local
472 struct amdgpu_ring *ring; local
596 jpeg_v4_0_3_dec_ring_get_rptr(struct amdgpu_ring *ring) argument
612 jpeg_v4_0_3_dec_ring_get_wptr(struct amdgpu_ring *ring) argument
632 jpeg_v4_0_3_dec_ring_set_wptr(struct amdgpu_ring *ring) argument
655 jpeg_v4_0_3_dec_ring_insert_start(struct amdgpu_ring *ring) argument
675 jpeg_v4_0_3_dec_ring_insert_end(struct amdgpu_ring *ring) argument
698 jpeg_v4_0_3_dec_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned int flags) argument
767 jpeg_v4_0_3_dec_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) argument
818 jpeg_v4_0_3_dec_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg, uint32_t val, uint32_t mask) argument
845 jpeg_v4_0_3_dec_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned int vmid, uint64_t pd_addr) argument
860 jpeg_v4_0_3_dec_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) argument
878 jpeg_v4_0_3_dec_ring_nop(struct amdgpu_ring *ring, uint32_t count) argument
[all...]
H A Dsdma_v5_0.c252 static unsigned sdma_v5_0_ring_init_cond_exec(struct amdgpu_ring *ring, argument
257 amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_COND_EXE));
258 amdgpu_ring_write(ring, lower_32_bits(addr));
259 amdgpu_ring_write(ring, upper_32_bits(addr));
260 amdgpu_ring_write(ring, 1);
262 ret = ring->wptr & ring->buf_mask;
264 amdgpu_ring_write(ring, 0);
272 * @ring: amdgpu ring pointe
276 sdma_v5_0_ring_get_rptr(struct amdgpu_ring *ring) argument
294 sdma_v5_0_ring_get_wptr(struct amdgpu_ring *ring) argument
320 sdma_v5_0_ring_set_wptr(struct amdgpu_ring *ring) argument
387 sdma_v5_0_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count) argument
410 sdma_v5_0_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) argument
445 sdma_v5_0_ring_emit_mem_sync(struct amdgpu_ring *ring) argument
469 sdma_v5_0_ring_emit_hdp_flush(struct amdgpu_ring *ring) argument
503 sdma_v5_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) argument
668 struct amdgpu_ring *ring; local
969 sdma_v5_0_ring_test_ring(struct amdgpu_ring *ring) argument
1046 sdma_v5_0_ring_test_ib(struct amdgpu_ring *ring, long timeout) argument
1227 sdma_v5_0_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib) argument
1252 sdma_v5_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring) argument
1281 sdma_v5_0_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned vmid, uint64_t pd_addr) argument
1287 sdma_v5_0_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) argument
1296 sdma_v5_0_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg, uint32_t val, uint32_t mask) argument
1310 sdma_v5_0_ring_emit_reg_write_reg_wait(struct amdgpu_ring *ring, uint32_t reg0, uint32_t reg1, uint32_t ref, uint32_t mask) argument
1341 struct amdgpu_ring *ring; local
1475 sdma_v5_0_ring_preempt_ib(struct amdgpu_ring *ring) argument
[all...]
H A Damdgpu_uvd.h45 struct amdgpu_ring ring; member in struct:amdgpu_uvd_inst
76 int amdgpu_uvd_entity_init(struct amdgpu_device *adev, struct amdgpu_ring *ring);
80 int amdgpu_uvd_get_create_msg(struct amdgpu_ring *ring, uint32_t handle,
82 int amdgpu_uvd_get_destroy_msg(struct amdgpu_ring *ring, uint32_t handle,
89 void amdgpu_uvd_ring_begin_use(struct amdgpu_ring *ring);
90 void amdgpu_uvd_ring_end_use(struct amdgpu_ring *ring);
91 int amdgpu_uvd_ring_test_ib(struct amdgpu_ring *ring, long timeout);
H A Dsdma_v5_2.c92 static unsigned sdma_v5_2_ring_init_cond_exec(struct amdgpu_ring *ring, argument
97 amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_COND_EXE));
98 amdgpu_ring_write(ring, lower_32_bits(addr));
99 amdgpu_ring_write(ring, upper_32_bits(addr));
100 amdgpu_ring_write(ring, 1);
102 ret = ring->wptr & ring->buf_mask;
104 amdgpu_ring_write(ring, 0);
112 * @ring: amdgpu ring pointe
116 sdma_v5_2_ring_get_rptr(struct amdgpu_ring *ring) argument
134 sdma_v5_2_ring_get_wptr(struct amdgpu_ring *ring) argument
160 sdma_v5_2_ring_set_wptr(struct amdgpu_ring *ring) argument
194 sdma_v5_2_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count) argument
217 sdma_v5_2_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) argument
252 sdma_v5_2_ring_emit_mem_sync(struct amdgpu_ring *ring) argument
277 sdma_v5_2_ring_emit_hdp_flush(struct amdgpu_ring *ring) argument
312 sdma_v5_2_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) argument
475 struct amdgpu_ring *ring; local
809 sdma_v5_2_ring_test_ring(struct amdgpu_ring *ring) argument
886 sdma_v5_2_ring_test_ib(struct amdgpu_ring *ring, long timeout) argument
1067 sdma_v5_2_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib) argument
1092 sdma_v5_2_ring_emit_pipeline_sync(struct amdgpu_ring *ring) argument
1121 sdma_v5_2_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned vmid, uint64_t pd_addr) argument
1127 sdma_v5_2_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) argument
1136 sdma_v5_2_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg, uint32_t val, uint32_t mask) argument
1150 sdma_v5_2_ring_emit_reg_write_reg_wait(struct amdgpu_ring *ring, uint32_t reg0, uint32_t reg1, uint32_t ref, uint32_t mask) argument
1214 struct amdgpu_ring *ring; local
1332 sdma_v5_2_ring_preempt_ib(struct amdgpu_ring *ring) argument
1639 sdma_v5_2_ring_begin_use(struct amdgpu_ring *ring) argument
1654 sdma_v5_2_ring_end_use(struct amdgpu_ring *ring) argument
[all...]
H A Dsdma_v6_0.c83 static unsigned sdma_v6_0_ring_init_cond_exec(struct amdgpu_ring *ring, argument
88 amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_COND_EXE));
89 amdgpu_ring_write(ring, lower_32_bits(addr));
90 amdgpu_ring_write(ring, upper_32_bits(addr));
91 amdgpu_ring_write(ring, 1);
93 ret = ring->wptr & ring->buf_mask;
95 amdgpu_ring_write(ring, 0);
103 * @ring: amdgpu ring pointe
107 sdma_v6_0_ring_get_rptr(struct amdgpu_ring *ring) argument
125 sdma_v6_0_ring_get_wptr(struct amdgpu_ring *ring) argument
145 sdma_v6_0_ring_set_wptr(struct amdgpu_ring *ring) argument
180 sdma_v6_0_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count) argument
203 sdma_v6_0_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) argument
238 sdma_v6_0_ring_emit_mem_sync(struct amdgpu_ring *ring) argument
263 sdma_v6_0_ring_emit_hdp_flush(struct amdgpu_ring *ring) argument
294 sdma_v6_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) argument
423 struct amdgpu_ring *ring; local
717 struct amdgpu_ring *ring; local
831 sdma_v6_0_ring_test_ring(struct amdgpu_ring *ring) argument
908 sdma_v6_0_ring_test_ib(struct amdgpu_ring *ring, long timeout) argument
1088 sdma_v6_0_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib) argument
1112 sdma_v6_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring) argument
1140 sdma_v6_0_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned vmid, uint64_t pd_addr) argument
1167 sdma_v6_0_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) argument
1176 sdma_v6_0_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg, uint32_t val, uint32_t mask) argument
1190 sdma_v6_0_ring_emit_reg_write_reg_wait(struct amdgpu_ring *ring, uint32_t reg0, uint32_t reg1, uint32_t ref, uint32_t mask) argument
1238 struct amdgpu_ring *ring; local
1358 sdma_v6_0_ring_preempt_ib(struct amdgpu_ring *ring) argument
[all...]
H A Duvd_v5_0.c52 * @ring: amdgpu_ring pointer
56 static uint64_t uvd_v5_0_ring_get_rptr(struct amdgpu_ring *ring) argument
58 struct amdgpu_device *adev = ring->adev;
66 * @ring: amdgpu_ring pointer
70 static uint64_t uvd_v5_0_ring_get_wptr(struct amdgpu_ring *ring) argument
72 struct amdgpu_device *adev = ring->adev;
80 * @ring: amdgpu_ring pointer
84 static void uvd_v5_0_ring_set_wptr(struct amdgpu_ring *ring) argument
86 struct amdgpu_device *adev = ring->adev;
88 WREG32(mmUVD_RBC_RB_WPTR, lower_32_bits(ring
104 struct amdgpu_ring *ring; local
153 struct amdgpu_ring *ring = &adev->uvd.inst->ring; local
322 struct amdgpu_ring *ring = &adev->uvd.inst->ring; local
493 uvd_v5_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) argument
522 uvd_v5_0_ring_test_ring(struct amdgpu_ring *ring) argument
559 uvd_v5_0_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) argument
572 uvd_v5_0_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count) argument
[all...]
H A Dsdma_v2_4.c82 * and each one supports 1 ring buffer used for gfx
86 * (ring buffer, IBs, etc.), but sDMA has it's own
182 * @ring: amdgpu ring pointer
186 static uint64_t sdma_v2_4_ring_get_rptr(struct amdgpu_ring *ring) argument
189 return *ring->rptr_cpu_addr >> 2;
195 * @ring: amdgpu ring pointer
199 static uint64_t sdma_v2_4_ring_get_wptr(struct amdgpu_ring *ring) argument
201 struct amdgpu_device *adev = ring
214 sdma_v2_4_ring_set_wptr(struct amdgpu_ring *ring) argument
221 sdma_v2_4_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count) argument
244 sdma_v2_4_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) argument
272 sdma_v2_4_ring_emit_hdp_flush(struct amdgpu_ring *ring) argument
304 sdma_v2_4_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) argument
400 struct amdgpu_ring *ring; local
528 sdma_v2_4_ring_test_ring(struct amdgpu_ring *ring) argument
581 sdma_v2_4_ring_test_ib(struct amdgpu_ring *ring, long timeout) argument
731 sdma_v2_4_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib) argument
755 sdma_v2_4_ring_emit_pipeline_sync(struct amdgpu_ring *ring) argument
783 sdma_v2_4_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned vmid, uint64_t pd_addr) argument
800 sdma_v2_4_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) argument
830 struct amdgpu_ring *ring; local
[all...]
H A Dvcn_v1_0.c57 static void vcn_v1_0_ring_begin_use(struct amdgpu_ring *ring);
64 * Set ring and irq function pointers
91 struct amdgpu_ring *ring; local
122 ring = &adev->vcn.inst->ring_dec;
123 ring->vm_hub = AMDGPU_MMHUB0(0);
124 sprintf(ring->name, "vcn_dec");
125 r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst->irq, 0,
144 ring = &adev->vcn.inst->ring_enc[i];
145 ring->vm_hub = AMDGPU_MMHUB0(0);
146 sprintf(ring
200 struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec; local
791 struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec; local
965 struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec; local
1217 struct amdgpu_ring *ring; local
1382 vcn_v1_0_dec_ring_get_rptr(struct amdgpu_ring *ring) argument
1396 vcn_v1_0_dec_ring_get_wptr(struct amdgpu_ring *ring) argument
1410 vcn_v1_0_dec_ring_set_wptr(struct amdgpu_ring *ring) argument
1428 vcn_v1_0_dec_ring_insert_start(struct amdgpu_ring *ring) argument
1447 vcn_v1_0_dec_ring_insert_end(struct amdgpu_ring *ring) argument
1466 vcn_v1_0_dec_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) argument
1507 vcn_v1_0_dec_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) argument
1530 vcn_v1_0_dec_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg, uint32_t val, uint32_t mask) argument
1550 vcn_v1_0_dec_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned vmid, uint64_t pd_addr) argument
1565 vcn_v1_0_dec_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) argument
1588 vcn_v1_0_enc_ring_get_rptr(struct amdgpu_ring *ring) argument
1605 vcn_v1_0_enc_ring_get_wptr(struct amdgpu_ring *ring) argument
1622 vcn_v1_0_enc_ring_set_wptr(struct amdgpu_ring *ring) argument
1644 vcn_v1_0_enc_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) argument
1656 vcn_v1_0_enc_ring_insert_end(struct amdgpu_ring *ring) argument
1671 vcn_v1_0_enc_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) argument
1685 vcn_v1_0_enc_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg, uint32_t val, uint32_t mask) argument
1695 vcn_v1_0_enc_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned int vmid, uint64_t pd_addr) argument
1708 vcn_v1_0_enc_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) argument
1749 vcn_v1_0_dec_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count) argument
1828 vcn_v1_0_ring_begin_use(struct amdgpu_ring *ring) argument
1842 vcn_v1_0_set_pg_for_begin_use(struct amdgpu_ring *ring, bool set_clocks) argument
1881 vcn_v1_0_ring_end_use(struct amdgpu_ring *ring) argument
[all...]
H A Duvd_v4_2.c54 * @ring: amdgpu_ring pointer
58 static uint64_t uvd_v4_2_ring_get_rptr(struct amdgpu_ring *ring) argument
60 struct amdgpu_device *adev = ring->adev;
68 * @ring: amdgpu_ring pointer
72 static uint64_t uvd_v4_2_ring_get_wptr(struct amdgpu_ring *ring) argument
74 struct amdgpu_device *adev = ring->adev;
82 * @ring: amdgpu_ring pointer
86 static void uvd_v4_2_ring_set_wptr(struct amdgpu_ring *ring) argument
88 struct amdgpu_device *adev = ring->adev;
90 WREG32(mmUVD_RBC_RB_WPTR, lower_32_bits(ring
106 struct amdgpu_ring *ring; local
157 struct amdgpu_ring *ring = &adev->uvd.inst->ring; local
285 struct amdgpu_ring *ring = &adev->uvd.inst->ring; local
477 uvd_v4_2_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) argument
506 uvd_v4_2_ring_test_ring(struct amdgpu_ring *ring) argument
544 uvd_v4_2_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) argument
555 uvd_v4_2_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count) argument
[all...]
H A Duvd_v3_1.c40 * @ring: amdgpu_ring pointer
44 static uint64_t uvd_v3_1_ring_get_rptr(struct amdgpu_ring *ring) argument
46 struct amdgpu_device *adev = ring->adev;
54 * @ring: amdgpu_ring pointer
58 static uint64_t uvd_v3_1_ring_get_wptr(struct amdgpu_ring *ring) argument
60 struct amdgpu_device *adev = ring->adev;
68 * @ring: amdgpu_ring pointer
72 static void uvd_v3_1_ring_set_wptr(struct amdgpu_ring *ring) argument
74 struct amdgpu_device *adev = ring->adev;
76 WREG32(mmUVD_RBC_RB_WPTR, lower_32_bits(ring
89 uvd_v3_1_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) argument
110 uvd_v3_1_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) argument
139 uvd_v3_1_ring_test_ring(struct amdgpu_ring *ring) argument
167 uvd_v3_1_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count) argument
322 struct amdgpu_ring *ring = &adev->uvd.inst->ring; local
547 struct amdgpu_ring *ring; local
631 struct amdgpu_ring *ring = &adev->uvd.inst->ring; local
[all...]
/linux-master/drivers/net/ethernet/hisilicon/hns3/
H A Dhns3_trace.h68 TP_PROTO(struct hns3_enet_ring *ring, int cur_ntu),
69 TP_ARGS(ring, cur_ntu),
77 __string(devname, ring->tqp->handle->kinfo.netdev->name)
81 __entry->index = ring->tqp->tqp_index;
82 __entry->ntu = ring->next_to_use;
83 __entry->ntc = ring->next_to_clean;
84 __entry->desc_dma = ring->desc_dma_addr,
85 memcpy(__entry->desc, &ring->desc[cur_ntu],
99 TP_PROTO(struct hns3_enet_ring *ring),
100 TP_ARGS(ring),
[all...]
/linux-master/drivers/crypto/intel/qat/qat_common/
H A Dadf_gen2_hw_csr_data.h30 #define READ_CSR_RING_HEAD(csr_base_addr, bank, ring) \
32 ADF_RING_CSR_RING_HEAD + ((ring) << 2))
33 #define READ_CSR_RING_TAIL(csr_base_addr, bank, ring) \
35 ADF_RING_CSR_RING_TAIL + ((ring) << 2))
39 #define WRITE_CSR_RING_CONFIG(csr_base_addr, bank, ring, value) \
41 ADF_RING_CSR_RING_CONFIG + ((ring) << 2), value)
42 #define WRITE_CSR_RING_BASE(csr_base_addr, bank, ring, value) \
48 ADF_RING_CSR_RING_LBASE + ((ring) << 2), l_base); \
50 ADF_RING_CSR_RING_UBASE + ((ring) << 2), u_base); \
53 #define WRITE_CSR_RING_HEAD(csr_base_addr, bank, ring, valu
[all...]
H A Dadf_gen2_hw_csr_data.c11 static u32 read_csr_ring_head(void __iomem *csr_base_addr, u32 bank, u32 ring) argument
13 return READ_CSR_RING_HEAD(csr_base_addr, bank, ring);
16 static void write_csr_ring_head(void __iomem *csr_base_addr, u32 bank, u32 ring, argument
19 WRITE_CSR_RING_HEAD(csr_base_addr, bank, ring, value);
22 static u32 read_csr_ring_tail(void __iomem *csr_base_addr, u32 bank, u32 ring) argument
24 return READ_CSR_RING_TAIL(csr_base_addr, bank, ring);
27 static void write_csr_ring_tail(void __iomem *csr_base_addr, u32 bank, u32 ring, argument
30 WRITE_CSR_RING_TAIL(csr_base_addr, bank, ring, value);
39 u32 ring, u32 value)
41 WRITE_CSR_RING_CONFIG(csr_base_addr, bank, ring, valu
38 write_csr_ring_config(void __iomem *csr_base_addr, u32 bank, u32 ring, u32 value) argument
44 write_csr_ring_base(void __iomem *csr_base_addr, u32 bank, u32 ring, dma_addr_t addr) argument
[all...]
/linux-master/drivers/net/ethernet/intel/ice/
H A Dice_base.c249 * @vsi: VSI that ring belongs to
250 * @ring: ring to get the absolute queue index
253 static u16 ice_calc_txq_handle(struct ice_vsi *vsi, struct ice_tx_ring *ring, u8 tc) argument
255 WARN_ONCE(ice_ring_is_xdp(ring) && tc, "XDP ring can't belong to TC other than 0\n");
257 if (ring->ch)
258 return ring->q_index - ring->ch->base_q;
261 * count from TC that ring belong
274 ice_cfg_xps_tx_ring(struct ice_tx_ring *ring) argument
296 ice_setup_tx_ctx(struct ice_tx_ring *ring, struct ice_tlan_ctx *tlan_ctx, u16 pf_q) argument
381 ice_setup_rx_ctx(struct ice_rx_ring *ring) argument
502 ice_xsk_pool_fill_cb(struct ice_rx_ring *ring) argument
521 ice_vsi_cfg_rxq(struct ice_rx_ring *ring) argument
871 ice_vsi_cfg_txq(struct ice_vsi *vsi, struct ice_tx_ring *ring, struct ice_aqc_add_tx_qgrp *qg_buf) argument
1110 ice_vsi_stop_tx_ring(struct ice_vsi *vsi, enum ice_disq_rst_src rst_src, u16 rel_vmvf_num, struct ice_tx_ring *ring, struct ice_txq_meta *txq_meta) argument
1168 ice_fill_txq_meta(const struct ice_vsi *vsi, struct ice_tx_ring *ring, struct ice_txq_meta *txq_meta) argument
[all...]
H A Dice_trace.h62 /* Events related to DIM, q_vectors and ring containers */
117 /* Events related to a vsi & ring */
119 TP_PROTO(struct ice_tx_ring *ring, struct ice_tx_desc *desc,
122 TP_ARGS(ring, desc, buf),
123 TP_STRUCT__entry(__field(void *, ring)
126 __string(devname, ring->netdev->name)),
128 TP_fast_assign(__entry->ring = ring;
133 TP_printk("netdev: %s ring: %pK desc: %pK buf %pK", __get_str(devname),
134 __entry->ring, __entr
[all...]
/linux-master/drivers/net/can/spi/mcp251xfd/
H A Dmcp251xfd-chip-fifo.c21 const struct mcp251xfd_rx_ring *ring)
32 ring->obj_num - 1) |
45 MCP251XFD_REG_FIFOCON(ring->fifo_nr), fifo_con);
50 const struct mcp251xfd_rx_ring *ring)
54 fltcon = MCP251XFD_REG_FLTCON_FLTEN(ring->nr) |
55 MCP251XFD_REG_FLTCON_FBP(ring->nr, ring->fifo_nr);
58 MCP251XFD_REG_FLTCON(ring->nr >> 2),
59 MCP251XFD_REG_FLTCON_FLT_MASK(ring->nr),
20 mcp251xfd_chip_rx_fifo_init_one(const struct mcp251xfd_priv *priv, const struct mcp251xfd_rx_ring *ring) argument
49 mcp251xfd_chip_rx_filter_init_one(const struct mcp251xfd_priv *priv, const struct mcp251xfd_rx_ring *ring) argument
/linux-master/drivers/mailbox/
H A Dbcm-flexrm-mailbox.c8 * extension to Broadcom FlexRM ring manager. The FlexRM ring
13 * rings where each mailbox channel represents a separate FlexRM ring.
128 /* ====== FlexRM ring descriptor defines ===== */
292 /* ====== FlexRM ring descriptor helper routines ===== */
924 struct flexrm_ring *ring; local
931 ring = &mbox->rings[i];
932 if (readl(ring->regs + RING_CONTROL) &
939 ring->num, state,
940 (unsigned long long)ring
952 struct flexrm_ring *ring; local
975 flexrm_new_request(struct flexrm_ring *ring, struct brcm_message *batch_msg, struct brcm_message *msg) argument
1074 flexrm_process_completions(struct flexrm_ring *ring) argument
1196 struct flexrm_ring *ring = chan->con_priv; local
1229 struct flexrm_ring *ring = chan->con_priv; local
1364 struct flexrm_ring *ring = chan->con_priv; local
1449 struct flexrm_ring *ring; local
1477 struct flexrm_ring *ring = &mbox->rings[desc->msi_index]; local
1491 struct flexrm_ring *ring; local
[all...]
/linux-master/net/9p/
H A Dtrans_xen.c24 #define XEN_9PFS_RING_SIZE(ring) XEN_FLEX_RING_SIZE(ring->intf->ring_order)
34 /* One per ring, more than one per 9pfs share */
42 /* protect a ring from concurrent accesses */
103 static bool p9_xen_write_todo(struct xen_9pfs_dataring *ring, RING_IDX size) argument
107 cons = ring->intf->out_cons;
108 prod = ring->intf->out_prod;
111 return XEN_9PFS_RING_SIZE(ring) -
112 xen_9pfs_queued(prod, cons, XEN_9PFS_RING_SIZE(ring)) >= size;
121 struct xen_9pfs_dataring *ring; local
173 struct xen_9pfs_dataring *ring; local
243 struct xen_9pfs_dataring *ring = r; local
282 struct xen_9pfs_dataring *ring = &priv->rings[i]; local
319 xen_9pfs_front_alloc_dataring(struct xenbus_device *dev, struct xen_9pfs_dataring *ring, unsigned int order) argument
[all...]
/linux-master/drivers/gpu/drm/radeon/
H A Dvce_v1_0.c56 * @ring: radeon_ring pointer
61 struct radeon_ring *ring)
63 if (ring->idx == TN_RING_TYPE_VCE1_INDEX)
73 * @ring: radeon_ring pointer
78 struct radeon_ring *ring)
80 if (ring->idx == TN_RING_TYPE_VCE1_INDEX)
90 * @ring: radeon_ring pointer
95 struct radeon_ring *ring)
97 if (ring->idx == TN_RING_TYPE_VCE1_INDEX)
98 WREG32(VCE_RB_WPTR, ring
60 vce_v1_0_get_rptr(struct radeon_device *rdev, struct radeon_ring *ring) argument
77 vce_v1_0_get_wptr(struct radeon_device *rdev, struct radeon_ring *ring) argument
94 vce_v1_0_set_wptr(struct radeon_device *rdev, struct radeon_ring *ring) argument
291 struct radeon_ring *ring; local
358 struct radeon_ring *ring; local
[all...]
/linux-master/drivers/gpu/drm/msm/adreno/
H A Da5xx_preempt.c39 /* Write the most recent wptr for the given ring into the hardware */
40 static inline void update_wptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring) argument
45 if (!ring)
48 spin_lock_irqsave(&ring->preempt_lock, flags);
49 wptr = get_wptr(ring);
50 spin_unlock_irqrestore(&ring->preempt_lock, flags);
63 struct msm_ringbuffer *ring = gpu->rb[i]; local
65 spin_lock_irqsave(&ring->preempt_lock, flags);
66 empty = (get_wptr(ring) == gpu->funcs->get_rptr(gpu, ring));
95 struct msm_ringbuffer *ring; local
220 preempt_init_ring(struct a5xx_gpu *a5xx_gpu, struct msm_ringbuffer *ring) argument
[all...]

Completed in 199 milliseconds

1234567891011>>