Searched refs:ring (Results 126 - 150 of 804) sorted by relevance

1234567891011>>

/linux-master/drivers/gpu/drm/amd/amdgpu/
H A Dmes_v10_1.c49 static void mes_v10_1_ring_set_wptr(struct amdgpu_ring *ring) argument
51 struct amdgpu_device *adev = ring->adev;
53 if (ring->use_doorbell) {
54 atomic64_set((atomic64_t *)ring->wptr_cpu_addr,
55 ring->wptr);
56 WDOORBELL64(ring->doorbell_index, ring->wptr);
62 static u64 mes_v10_1_ring_get_rptr(struct amdgpu_ring *ring) argument
64 return *ring->rptr_cpu_addr;
67 static u64 mes_v10_1_ring_get_wptr(struct amdgpu_ring *ring) argument
98 struct amdgpu_ring *ring = &mes->ring; local
629 mes_v10_1_mqd_init(struct amdgpu_ring *ring) argument
838 struct amdgpu_ring *ring; local
861 struct amdgpu_ring *ring; local
888 struct amdgpu_ring *ring; local
992 mes_v10_1_kiq_setting(struct amdgpu_ring *ring) argument
[all...]
H A Dvce_v2_0.c51 * @ring: amdgpu_ring pointer
55 static uint64_t vce_v2_0_ring_get_rptr(struct amdgpu_ring *ring) argument
57 struct amdgpu_device *adev = ring->adev;
59 if (ring->me == 0)
68 * @ring: amdgpu_ring pointer
72 static uint64_t vce_v2_0_ring_get_wptr(struct amdgpu_ring *ring) argument
74 struct amdgpu_device *adev = ring->adev;
76 if (ring->me == 0)
85 * @ring: amdgpu_ring pointer
89 static void vce_v2_0_ring_set_wptr(struct amdgpu_ring *ring) argument
232 struct amdgpu_ring *ring; local
415 struct amdgpu_ring *ring; local
[all...]
H A Djpeg_v2_5.c55 * Set ring and irq function pointers
90 struct amdgpu_ring *ring; local
129 ring = adev->jpeg.inst[i].ring_dec;
130 ring->use_doorbell = true;
132 ring->vm_hub = AMDGPU_MMHUB1(0);
134 ring->vm_hub = AMDGPU_MMHUB0(0);
135 ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 1 + 8 * i;
136 sprintf(ring->name, "jpeg_dec_%d", i);
137 r = amdgpu_ring_init(adev, ring, 512, &adev->jpeg.inst[i].irq,
183 struct amdgpu_ring *ring; local
326 struct amdgpu_ring *ring; local
410 jpeg_v2_5_dec_ring_get_rptr(struct amdgpu_ring *ring) argument
424 jpeg_v2_5_dec_ring_get_wptr(struct amdgpu_ring *ring) argument
441 jpeg_v2_5_dec_ring_set_wptr(struct amdgpu_ring *ring) argument
460 jpeg_v2_6_dec_ring_insert_start(struct amdgpu_ring *ring) argument
478 jpeg_v2_6_dec_ring_insert_end(struct amdgpu_ring *ring) argument
[all...]
H A Damdgpu_gfx.c174 struct amdgpu_ring *ring)
176 int queue = ring->queue;
177 int pipe = ring->pipe;
184 int me = ring->me;
188 if (ring == &adev->gfx.gfx_ring[bit])
196 struct amdgpu_ring *ring)
202 ring == &adev->gfx.compute_ring[0])
273 struct amdgpu_ring *ring, int xcc_id)
296 ring->me = mec + 1;
297 ring
173 amdgpu_gfx_is_high_priority_graphics_queue(struct amdgpu_device *adev, struct amdgpu_ring *ring) argument
195 amdgpu_gfx_is_high_priority_compute_queue(struct amdgpu_device *adev, struct amdgpu_ring *ring) argument
272 amdgpu_gfx_kiq_acquire(struct amdgpu_device *adev, struct amdgpu_ring *ring, int xcc_id) argument
311 struct amdgpu_ring *ring = &kiq->ring; local
342 amdgpu_gfx_kiq_free_ring(struct amdgpu_ring *ring) argument
386 struct amdgpu_ring *ring = &kiq->ring; local
474 struct amdgpu_ring *ring = NULL; local
948 struct amdgpu_ring *ring = &kiq->ring; local
1016 struct amdgpu_ring *ring = &kiq->ring; local
[all...]
H A Dvce_v3_0.c73 * @ring: amdgpu_ring pointer
77 static uint64_t vce_v3_0_ring_get_rptr(struct amdgpu_ring *ring) argument
79 struct amdgpu_device *adev = ring->adev;
89 if (ring->me == 0)
91 else if (ring->me == 1)
105 * @ring: amdgpu_ring pointer
109 static uint64_t vce_v3_0_ring_get_wptr(struct amdgpu_ring *ring) argument
111 struct amdgpu_device *adev = ring->adev;
121 if (ring->me == 0)
123 else if (ring
141 vce_v3_0_ring_set_wptr(struct amdgpu_ring *ring) argument
267 struct amdgpu_ring *ring; local
421 struct amdgpu_ring *ring; local
860 vce_v3_0_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) argument
874 vce_v3_0_emit_vm_flush(struct amdgpu_ring *ring, unsigned int vmid, uint64_t pd_addr) argument
886 vce_v3_0_emit_pipeline_sync(struct amdgpu_ring *ring) argument
[all...]
H A Djpeg_v5_0_0.c45 * Set ring and irq function pointers
70 struct amdgpu_ring *ring; local
87 ring = adev->jpeg.inst->ring_dec;
88 ring->use_doorbell = true;
89 ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 1;
90 ring->vm_hub = AMDGPU_MMHUB0(0);
92 sprintf(ring->name, "jpeg_dec");
93 r = amdgpu_ring_init(adev, ring, 512, &adev->jpeg.inst->irq, 0,
134 struct amdgpu_ring *ring = adev->jpeg.inst->ring_dec; local
137 adev->nbio.funcs->vcn_doorbell_range(adev, ring
291 struct amdgpu_ring *ring = adev->jpeg.inst->ring_dec; local
370 jpeg_v5_0_0_dec_ring_get_rptr(struct amdgpu_ring *ring) argument
384 jpeg_v5_0_0_dec_ring_get_wptr(struct amdgpu_ring *ring) argument
401 jpeg_v5_0_0_dec_ring_set_wptr(struct amdgpu_ring *ring) argument
[all...]
H A Damdgpu_job.c36 struct amdgpu_ring *ring = to_amdgpu_ring(s_job->sched); local
39 struct amdgpu_device *adev = ring->adev;
55 amdgpu_ring_soft_recovery(ring, job->vmid, s_job->s_fence->parent)) {
56 DRM_ERROR("ring %s timeout, but soft recovered\n",
61 DRM_ERROR("ring %s timeout, signaled seq=%u, emitted seq=%u\n",
62 job->base.sched->name, atomic_read(&ring->fence_drv.last_seq),
63 ring->fence_drv.sync_seq);
65 ti = amdgpu_vm_get_task_info_pasid(ring->adev, job->pasid);
74 if (amdgpu_device_should_recover_gpu(ring->adev)) {
82 r = amdgpu_device_gpu_recover(ring
166 struct amdgpu_ring *ring = to_amdgpu_ring(job->base.sched); local
241 amdgpu_job_submit_direct(struct amdgpu_job *job, struct amdgpu_ring *ring, struct dma_fence **fence) argument
260 struct amdgpu_ring *ring = to_amdgpu_ring(s_entity->rq->sched); local
290 struct amdgpu_ring *ring = to_amdgpu_ring(sched_job->sched); local
[all...]
H A Dgfx_v9_4_3.c84 struct amdgpu_ring *ring)
87 uint64_t mqd_addr = amdgpu_bo_gpu_offset(ring->mqd_obj);
88 uint64_t wptr_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4);
89 uint32_t eng_sel = ring->funcs->type == AMDGPU_RING_TYPE_GFX ? 4 : 0;
96 PACKET3_MAP_QUEUES_QUEUE(ring->queue) |
97 PACKET3_MAP_QUEUES_PIPE(ring->pipe) |
98 PACKET3_MAP_QUEUES_ME((ring->me == 1 ? 0 : 1)) |
107 PACKET3_MAP_QUEUES_DOORBELL_OFFSET(ring->doorbell_index));
115 struct amdgpu_ring *ring,
119 uint32_t eng_sel = ring
83 gfx_v9_4_3_kiq_map_queues(struct amdgpu_ring *kiq_ring, struct amdgpu_ring *ring) argument
114 gfx_v9_4_3_kiq_unmap_queues(struct amdgpu_ring *kiq_ring, struct amdgpu_ring *ring, enum amdgpu_unmap_queues_action action, u64 gpu_addr, u64 seq) argument
141 gfx_v9_4_3_kiq_query_status(struct amdgpu_ring *kiq_ring, struct amdgpu_ring *ring, u64 addr, u64 seq) argument
218 gfx_v9_4_3_write_data_to_reg(struct amdgpu_ring *ring, int eng_sel, bool wc, uint32_t reg, uint32_t val) argument
230 gfx_v9_4_3_wait_reg_mem(struct amdgpu_ring *ring, int eng_sel, int mem_space, int opt, uint32_t addr0, uint32_t addr1, uint32_t ref, uint32_t mask, uint32_t inv) argument
252 gfx_v9_4_3_ring_test_ring(struct amdgpu_ring *ring) argument
287 gfx_v9_4_3_ring_test_ib(struct amdgpu_ring *ring, long timeout) argument
808 struct amdgpu_ring *ring = &adev->gfx.compute_ring[ring_id]; local
1386 gfx_v9_4_3_update_spm_vmid(struct amdgpu_device *adev, struct amdgpu_ring *ring, unsigned vmid) argument
1507 gfx_v9_4_3_xcc_kiq_setting(struct amdgpu_ring *ring, int xcc_id) argument
1521 gfx_v9_4_3_mqd_set_priority(struct amdgpu_ring *ring, struct v9_mqd *mqd) argument
1534 gfx_v9_4_3_xcc_mqd_init(struct amdgpu_ring *ring, int xcc_id) argument
1662 gfx_v9_4_3_xcc_kiq_init_register(struct amdgpu_ring *ring, int xcc_id) argument
1776 gfx_v9_4_3_xcc_q_fini_register(struct amdgpu_ring *ring, int xcc_id) argument
1816 gfx_v9_4_3_xcc_kiq_init_queue(struct amdgpu_ring *ring, int xcc_id) argument
1863 gfx_v9_4_3_xcc_kcq_init_queue(struct amdgpu_ring *ring, int xcc_id) argument
1903 struct amdgpu_ring *ring; local
1924 struct amdgpu_ring *ring; local
1948 struct amdgpu_ring *ring = NULL; local
1977 struct amdgpu_ring *ring; local
2207 gfx_v9_4_3_ring_emit_gds_switch(struct amdgpu_ring *ring, uint32_t vmid, uint32_t gds_base, uint32_t gds_size, uint32_t gws_base, uint32_t gws_size, uint32_t oa_base, uint32_t oa_size) argument
2542 gfx_v9_4_3_ring_emit_hdp_flush(struct amdgpu_ring *ring) argument
2571 gfx_v9_4_3_ring_emit_ib_compute(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) argument
2606 gfx_v9_4_3_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) argument
2640 gfx_v9_4_3_ring_emit_pipeline_sync(struct amdgpu_ring *ring) argument
2651 gfx_v9_4_3_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned vmid, uint64_t pd_addr) argument
2657 gfx_v9_4_3_ring_get_rptr_compute(struct amdgpu_ring *ring) argument
2662 gfx_v9_4_3_ring_get_wptr_compute(struct amdgpu_ring *ring) argument
2674 gfx_v9_4_3_ring_set_wptr_compute(struct amdgpu_ring *ring) argument
2687 gfx_v9_4_3_ring_emit_fence_kiq(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned int flags) argument
2714 gfx_v9_4_3_ring_emit_rreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t reg_val_offs) argument
2731 gfx_v9_4_3_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) argument
2754 gfx_v9_4_3_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg, uint32_t val, uint32_t mask) argument
2760 gfx_v9_4_3_ring_emit_reg_write_reg_wait(struct amdgpu_ring *ring, uint32_t reg0, uint32_t reg1, uint32_t ref, uint32_t mask) argument
2923 struct amdgpu_ring *ring; local
2959 struct amdgpu_ring *ring; local
3005 gfx_v9_4_3_emit_mem_sync(struct amdgpu_ring *ring) argument
3024 gfx_v9_4_3_emit_wave_limit_cs(struct amdgpu_ring *ring, uint32_t pipe, bool enable) argument
3055 gfx_v9_4_3_emit_wave_limit(struct amdgpu_ring *ring, bool enable) argument
[all...]
H A Dgfx_v9_0.c758 static void gfx_v9_0_ring_emit_de_meta(struct amdgpu_ring *ring, bool resume, bool usegds);
759 static u64 gfx_v9_0_ring_get_rptr_compute(struct amdgpu_ring *ring);
787 struct amdgpu_ring *ring)
789 uint64_t mqd_addr = amdgpu_bo_gpu_offset(ring->mqd_obj);
790 uint64_t wptr_addr = ring->wptr_gpu_addr;
791 uint32_t eng_sel = ring->funcs->type == AMDGPU_RING_TYPE_GFX ? 4 : 0;
798 PACKET3_MAP_QUEUES_QUEUE(ring->queue) |
799 PACKET3_MAP_QUEUES_PIPE(ring->pipe) |
800 PACKET3_MAP_QUEUES_ME((ring->me == 1 ? 0 : 1)) |
809 PACKET3_MAP_QUEUES_DOORBELL_OFFSET(ring
786 gfx_v9_0_kiq_map_queues(struct amdgpu_ring *kiq_ring, struct amdgpu_ring *ring) argument
816 gfx_v9_0_kiq_unmap_queues(struct amdgpu_ring *kiq_ring, struct amdgpu_ring *ring, enum amdgpu_unmap_queues_action action, u64 gpu_addr, u64 seq) argument
844 gfx_v9_0_kiq_query_status(struct amdgpu_ring *kiq_ring, struct amdgpu_ring *ring, u64 addr, u64 seq) argument
960 gfx_v9_0_write_data_to_reg(struct amdgpu_ring *ring, int eng_sel, bool wc, uint32_t reg, uint32_t val) argument
972 gfx_v9_0_wait_reg_mem(struct amdgpu_ring *ring, int eng_sel, int mem_space, int opt, uint32_t addr0, uint32_t addr1, uint32_t ref, uint32_t mask, uint32_t inv) argument
994 gfx_v9_0_ring_test_ring(struct amdgpu_ring *ring) argument
1024 gfx_v9_0_ring_test_ib(struct amdgpu_ring *ring, long timeout) argument
1969 struct amdgpu_ring *ring = &adev->gfx.compute_ring[ring_id]; local
2001 struct amdgpu_ring *ring; local
3025 struct amdgpu_ring *ring = &adev->gfx.gfx_ring[0]; local
3095 struct amdgpu_ring *ring; local
3217 gfx_v9_0_kiq_setting(struct amdgpu_ring *ring) argument
3231 gfx_v9_0_mqd_set_priority(struct amdgpu_ring *ring, struct v9_mqd *mqd) argument
3244 gfx_v9_0_mqd_init(struct amdgpu_ring *ring) argument
3376 gfx_v9_0_kiq_init_register(struct amdgpu_ring *ring) argument
3490 gfx_v9_0_kiq_fini_register(struct amdgpu_ring *ring) argument
3529 gfx_v9_0_kiq_init_queue(struct amdgpu_ring *ring) argument
3577 gfx_v9_0_kcq_init_queue(struct amdgpu_ring *ring) argument
3617 struct amdgpu_ring *ring; local
3641 struct amdgpu_ring *ring = NULL; local
3671 struct amdgpu_ring *ring; local
3926 struct amdgpu_ring *ring = &kiq->ring; local
4032 gfx_v9_0_ring_emit_gds_switch(struct amdgpu_ring *ring, uint32_t vmid, uint32_t gds_base, uint32_t gds_size, uint32_t gws_base, uint32_t gws_size, uint32_t oa_base, uint32_t oa_size) argument
4299 struct amdgpu_ring *ring = &adev->gfx.compute_ring[0]; local
4346 struct amdgpu_ring *ring = &adev->gfx.compute_ring[0]; local
4904 gfx_v9_0_update_spm_vmid(struct amdgpu_device *adev, struct amdgpu_ring *ring, unsigned int vmid) argument
5073 gfx_v9_0_ring_get_rptr_gfx(struct amdgpu_ring *ring) argument
5078 gfx_v9_0_ring_get_wptr_gfx(struct amdgpu_ring *ring) argument
5094 gfx_v9_0_ring_set_wptr_gfx(struct amdgpu_ring *ring) argument
5108 gfx_v9_0_ring_emit_hdp_flush(struct amdgpu_ring *ring) argument
5137 gfx_v9_0_ring_emit_ib_gfx(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) argument
5178 gfx_v9_0_ring_patch_cntl(struct amdgpu_ring *ring, unsigned offset) argument
5187 gfx_v9_0_ring_patch_ce_meta(struct amdgpu_ring *ring, unsigned offset) argument
5219 gfx_v9_0_ring_patch_de_meta(struct amdgpu_ring *ring, unsigned offset) argument
5254 gfx_v9_0_ring_emit_ib_compute(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) argument
5289 gfx_v9_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) argument
5330 gfx_v9_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring) argument
5341 gfx_v9_0_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned vmid, uint64_t pd_addr) argument
5354 gfx_v9_0_ring_get_rptr_compute(struct amdgpu_ring *ring) argument
5359 gfx_v9_0_ring_get_wptr_compute(struct amdgpu_ring *ring) argument
5371 gfx_v9_0_ring_set_wptr_compute(struct amdgpu_ring *ring) argument
5384 gfx_v9_0_ring_emit_fence_kiq(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned int flags) argument
5411 gfx_v9_ring_emit_sb(struct amdgpu_ring *ring) argument
5417 gfx_v9_0_ring_emit_ce_meta(struct amdgpu_ring *ring, bool resume) argument
5459 gfx_v9_0_ring_preempt_ib(struct amdgpu_ring *ring) argument
5517 gfx_v9_0_ring_emit_de_meta(struct amdgpu_ring *ring, bool resume, bool usegds) argument
5571 gfx_v9_0_ring_emit_frame_cntl(struct amdgpu_ring *ring, bool start, bool secure) argument
5580 gfx_v9_ring_emit_cntxcntl(struct amdgpu_ring *ring, uint32_t flags) argument
5613 gfx_v9_0_ring_emit_init_cond_exec(struct amdgpu_ring *ring, uint64_t addr) argument
5628 gfx_v9_0_ring_emit_rreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t reg_val_offs) argument
5645 gfx_v9_0_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) argument
5668 gfx_v9_0_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg, uint32_t val, uint32_t mask) argument
5674 gfx_v9_0_ring_emit_reg_write_reg_wait(struct amdgpu_ring *ring, uint32_t reg0, uint32_t reg1, uint32_t ref, uint32_t mask) argument
5691 gfx_v9_0_ring_soft_recovery(struct amdgpu_ring *ring, unsigned vmid) argument
5893 struct amdgpu_ring *ring; local
5931 struct amdgpu_ring *ring; local
6765 gfx_v9_0_emit_mem_sync(struct amdgpu_ring *ring) argument
6784 gfx_v9_0_emit_wave_limit_cs(struct amdgpu_ring *ring, uint32_t pipe, bool enable) argument
6815 gfx_v9_0_emit_wave_limit(struct amdgpu_ring *ring, bool enable) argument
[all...]
/linux-master/drivers/net/ethernet/sfc/
H A Def100_ethtool.c25 struct ethtool_ringparam *ring,
31 ring->rx_max_pending = EFX_EF100_MAX_DMAQ_SIZE;
32 ring->tx_max_pending = EFX_EF100_MAX_DMAQ_SIZE;
33 ring->rx_pending = efx->rxq_entries;
34 ring->tx_pending = efx->txq_entries;
24 ef100_ethtool_get_ringparam(struct net_device *net_dev, struct ethtool_ringparam *ring, struct kernel_ethtool_ringparam *kernel_ring, struct netlink_ext_ack *extack) argument
/linux-master/drivers/net/ethernet/broadcom/
H A Dbgmac.c41 static void bgmac_dma_tx_reset(struct bgmac *bgmac, struct bgmac_dma_ring *ring) argument
46 if (!ring->mmio_base)
49 /* Suspend DMA TX ring first.
53 bgmac_write(bgmac, ring->mmio_base + BGMAC_DMA_TX_CTL,
56 val = bgmac_read(bgmac, ring->mmio_base + BGMAC_DMA_TX_STATUS);
67 dev_err(bgmac->dev, "Timeout suspending DMA TX ring 0x%X (BGMAC_DMA_TX_STAT: 0x%08X)\n",
68 ring->mmio_base, val);
71 bgmac_write(bgmac, ring->mmio_base + BGMAC_DMA_TX_CTL, 0);
73 ring->mmio_base + BGMAC_DMA_TX_STATUS,
76 dev_warn(bgmac->dev, "DMA TX ring
86 bgmac_dma_tx_enable(struct bgmac *bgmac, struct bgmac_dma_ring *ring) argument
111 bgmac_dma_tx_add_buf(struct bgmac *bgmac, struct bgmac_dma_ring *ring, int i, int len, u32 ctl0) argument
131 bgmac_dma_tx_add(struct bgmac *bgmac, struct bgmac_dma_ring *ring, struct sk_buff *skb) argument
235 bgmac_dma_tx_free(struct bgmac *bgmac, struct bgmac_dma_ring *ring) argument
292 bgmac_dma_rx_reset(struct bgmac *bgmac, struct bgmac_dma_ring *ring) argument
306 bgmac_dma_rx_enable(struct bgmac *bgmac, struct bgmac_dma_ring *ring) argument
367 bgmac_dma_rx_update_index(struct bgmac *bgmac, struct bgmac_dma_ring *ring) argument
377 bgmac_dma_rx_setup_desc(struct bgmac *bgmac, struct bgmac_dma_ring *ring, int desc_idx) argument
412 bgmac_dma_rx_read(struct bgmac *bgmac, struct bgmac_dma_ring *ring, int weight) argument
504 bgmac_dma_unaligned(struct bgmac *bgmac, struct bgmac_dma_ring *ring, enum bgmac_dma_ring_type ring_type) argument
525 bgmac_dma_tx_ring_free(struct bgmac *bgmac, struct bgmac_dma_ring *ring) argument
552 bgmac_dma_rx_ring_free(struct bgmac *bgmac, struct bgmac_dma_ring *ring) argument
572 bgmac_dma_ring_desc_free(struct bgmac *bgmac, struct bgmac_dma_ring *ring, int num_slots) argument
615 struct bgmac_dma_ring *ring; local
688 struct bgmac_dma_ring *ring; local
1227 struct bgmac_dma_ring *ring; local
[all...]
H A Dbcm4908_enet.c144 * DMA ring ops
148 struct bcm4908_enet_dma_ring *ring)
150 enet_write(enet, ring->cfg_block + ENET_DMA_CH_CFG_INT_MASK, ENET_DMA_INT_DEFAULTS);
154 struct bcm4908_enet_dma_ring *ring)
156 enet_write(enet, ring->cfg_block + ENET_DMA_CH_CFG_INT_MASK, 0);
160 struct bcm4908_enet_dma_ring *ring)
162 enet_write(enet, ring->cfg_block + ENET_DMA_CH_CFG_INT_STAT, ENET_DMA_INT_DEFAULTS);
170 struct bcm4908_enet_dma_ring *ring)
172 int size = ring->length * sizeof(struct bcm4908_enet_dma_ring_bd);
175 ring
147 bcm4908_enet_dma_ring_intrs_on(struct bcm4908_enet *enet, struct bcm4908_enet_dma_ring *ring) argument
153 bcm4908_enet_dma_ring_intrs_off(struct bcm4908_enet *enet, struct bcm4908_enet_dma_ring *ring) argument
159 bcm4908_enet_dma_ring_intrs_ack(struct bcm4908_enet *enet, struct bcm4908_enet_dma_ring *ring) argument
169 bcm4908_dma_alloc_buf_descs(struct bcm4908_enet *enet, struct bcm4908_enet_dma_ring *ring) argument
257 struct bcm4908_enet_dma_ring *ring = rings[i]; local
298 bcm4908_enet_dma_ring_init(struct bcm4908_enet *enet, struct bcm4908_enet_dma_ring *ring) argument
358 bcm4908_enet_dma_tx_ring_enable(struct bcm4908_enet *enet, struct bcm4908_enet_dma_ring *ring) argument
364 bcm4908_enet_dma_tx_ring_disable(struct bcm4908_enet *enet, struct bcm4908_enet_dma_ring *ring) argument
370 bcm4908_enet_dma_rx_ring_enable(struct bcm4908_enet *enet, struct bcm4908_enet_dma_ring *ring) argument
376 bcm4908_enet_dma_rx_ring_disable(struct bcm4908_enet *enet, struct bcm4908_enet_dma_ring *ring) argument
436 struct bcm4908_enet_dma_ring *ring; local
524 struct bcm4908_enet_dma_ring *ring = &enet->tx_ring; local
[all...]
/linux-master/drivers/net/ethernet/apm/xgene/
H A Dxgene_enet_hw.c13 static void xgene_enet_ring_init(struct xgene_enet_desc_ring *ring) argument
15 u32 *ring_cfg = ring->state;
16 u64 addr = ring->dma;
17 enum xgene_enet_ring_cfgsize cfgsize = ring->cfgsize;
33 static void xgene_enet_ring_set_type(struct xgene_enet_desc_ring *ring) argument
35 u32 *ring_cfg = ring->state;
39 is_bufpool = xgene_enet_is_bufpool(ring->id);
50 static void xgene_enet_ring_set_recombbuf(struct xgene_enet_desc_ring *ring) argument
52 u32 *ring_cfg = ring->state;
60 static void xgene_enet_ring_wr32(struct xgene_enet_desc_ring *ring, argument
68 xgene_enet_ring_rd32(struct xgene_enet_desc_ring *ring, u32 offset, u32 *data) argument
76 xgene_enet_write_ring_state(struct xgene_enet_desc_ring *ring) argument
88 xgene_enet_clr_ring_state(struct xgene_enet_desc_ring *ring) argument
94 xgene_enet_set_ring_state(struct xgene_enet_desc_ring *ring) argument
106 xgene_enet_set_ring_id(struct xgene_enet_desc_ring *ring) argument
125 xgene_enet_clr_desc_ring_id(struct xgene_enet_desc_ring *ring) argument
134 xgene_enet_setup_ring( struct xgene_enet_desc_ring *ring) argument
161 xgene_enet_clear_ring(struct xgene_enet_desc_ring *ring) argument
179 xgene_enet_wr_cmd(struct xgene_enet_desc_ring *ring, int count) argument
184 xgene_enet_ring_len(struct xgene_enet_desc_ring *ring) argument
195 xgene_enet_parse_error(struct xgene_enet_desc_ring *ring, enum xgene_enet_err_code status) argument
732 xgene_enet_clear(struct xgene_enet_pdata *pdata, struct xgene_enet_desc_ring *ring) argument
[all...]
/linux-master/drivers/net/ethernet/actions/
H A Dowl-emac.c140 static unsigned int owl_emac_ring_num_unused(struct owl_emac_ring *ring) argument
142 return CIRC_SPACE(ring->head, ring->tail, ring->size);
145 static unsigned int owl_emac_ring_get_next(struct owl_emac_ring *ring, argument
148 return (cur + 1) & (ring->size - 1);
151 static void owl_emac_ring_push_head(struct owl_emac_ring *ring) argument
153 ring->head = owl_emac_ring_get_next(ring, ring
156 owl_emac_ring_pop_tail(struct owl_emac_ring *ring) argument
181 struct owl_emac_ring *ring = &priv->rx_ring; local
220 struct owl_emac_ring *ring = &priv->tx_ring; local
243 struct owl_emac_ring *ring = &priv->rx_ring; local
262 struct owl_emac_ring *ring = &priv->tx_ring; local
279 owl_emac_ring_alloc(struct device *dev, struct owl_emac_ring *ring, unsigned int size) argument
492 struct owl_emac_ring *ring = &priv->tx_ring; local
570 struct owl_emac_ring *ring = &priv->tx_ring; local
633 struct owl_emac_ring *ring = &priv->tx_ring; local
697 struct owl_emac_ring *ring = &priv->tx_ring; local
748 struct owl_emac_ring *ring = &priv->rx_ring; local
[all...]
/linux-master/drivers/bluetooth/
H A Dhci_bcm4377.c145 * Transfer ring entry
150 * id: Message id to recognize the answer in the completion ring entry
165 * Completion ring entry
169 * ring message.
170 * ring_id: Transfer ring ID which required this message
171 * msg_id: Message ID specified in transfer ring entry
192 * Control message used to create a completion ring
197 * id/id_again: Completion ring index
198 * ring_iova: DMA address of the ring buffer
199 * n_elements: Number of elements inside the ring buffe
437 void *ring; member in struct:bcm4377_transfer_ring
471 void *ring; member in struct:bcm4377_completion_ring
617 bcm4377_extract_msgid(struct bcm4377_data *bcm4377, struct bcm4377_transfer_ring *ring, u16 raw_msgid, u8 *msgid) argument
642 bcm4377_handle_event(struct bcm4377_data *bcm4377, struct bcm4377_transfer_ring *ring, u16 raw_msgid, u8 entry_flags, u8 type, void *payload, size_t len) argument
694 bcm4377_handle_ack(struct bcm4377_data *bcm4377, struct bcm4377_transfer_ring *ring, u16 raw_msgid) argument
725 bcm4377_handle_completion(struct bcm4377_data *bcm4377, struct bcm4377_completion_ring *ring, u16 pos) argument
798 bcm4377_poll_completion_ring(struct bcm4377_data *bcm4377, struct bcm4377_completion_ring *ring) argument
859 bcm4377_enqueue(struct bcm4377_data *bcm4377, struct bcm4377_transfer_ring *ring, void *data, size_t len, bool wait) argument
970 bcm4377_create_completion_ring(struct bcm4377_data *bcm4377, struct bcm4377_completion_ring *ring) argument
1004 bcm4377_destroy_completion_ring(struct bcm4377_data *bcm4377, struct bcm4377_completion_ring *ring) argument
1025 bcm4377_create_transfer_ring(struct bcm4377_data *bcm4377, struct bcm4377_transfer_ring *ring) argument
1094 bcm4377_destroy_transfer_ring(struct bcm4377_data *bcm4377, struct bcm4377_transfer_ring *ring) argument
1455 struct bcm4377_transfer_ring *ring; local
1507 bcm4377_alloc_transfer_ring(struct bcm4377_data *bcm4377, struct bcm4377_transfer_ring *ring) argument
1562 bcm4377_alloc_completion_ring(struct bcm4377_data *bcm4377, struct bcm4377_completion_ring *ring) argument
[all...]
/linux-master/drivers/net/wireless/ath/wil6210/
H A Dtxrx_edma.c19 * status ring. Should be power of 2
87 wil_err(wil, "Cannot allocate a zero size status ring\n");
125 /* Allocate Tx status ring. Tx descriptor rings will be
152 struct wil_ring *ring, u32 i)
165 &ring->va[i].rx.enhanced;
233 struct wil_ring *ring = &wil->ring_rx; local
236 ring->swtail = *ring->edma_rx_swtail.va;
238 for (; next_head = wil_ring_next_head(ring),
239 (next_head != ring
151 wil_ring_alloc_skb_edma(struct wil6210_priv *wil, struct wil_ring *ring, u32 i) argument
263 wil_move_all_rx_buff_to_free_list(struct wil6210_priv *wil, struct wil_ring *ring) argument
295 struct wil_ring *ring = &wil->ring_rx; local
373 wil_ring_alloc_desc_ring(struct wil6210_priv *wil, struct wil_ring *ring) argument
418 wil_ring_free_edma(struct wil6210_priv *wil, struct wil_ring *ring) argument
483 struct wil_ring *ring = &wil->ring_rx; local
604 struct wil_ring *ring = &wil->ring_rx; local
701 struct wil_ring *ring = &wil->ring_tx[ring_id]; local
1069 struct wil_ring *ring = &wil->ring_rx; local
1163 struct wil_ring *ring = NULL; local
1351 wil_tx_tso_gen_desc(struct wil6210_priv *wil, void *buff_addr, int len, uint i, int tso_desc_type, skb_frag_t *frag, struct wil_ring *ring, struct sk_buff *skb, bool is_ipv4, int tcp_hdr_len, int skb_net_hdr_len, int mss, int *descs_used) argument
1402 __wil_tx_ring_tso_edma(struct wil6210_priv *wil, struct wil6210_vif *vif, struct wil_ring *ring, struct sk_buff *skb) argument
1550 struct wil_ring *ring = &wil->ring_tx[ring_id]; local
1608 struct wil_ring *ring = &wil->ring_rx; local
[all...]
/linux-master/drivers/net/ethernet/mediatek/
H A Dmtk_wed.h114 wpdma_tx_r32(struct mtk_wed_device *dev, int ring, u32 reg) argument
116 if (!dev->tx_ring[ring].wpdma)
119 return readl(dev->tx_ring[ring].wpdma + reg);
123 wpdma_tx_w32(struct mtk_wed_device *dev, int ring, u32 reg, u32 val) argument
125 if (!dev->tx_ring[ring].wpdma)
128 writel(val, dev->tx_ring[ring].wpdma + reg);
132 wpdma_rx_r32(struct mtk_wed_device *dev, int ring, u32 reg) argument
134 if (!dev->rx_ring[ring].wpdma)
137 return readl(dev->rx_ring[ring].wpdma + reg);
141 wpdma_rx_w32(struct mtk_wed_device *dev, int ring, u3 argument
[all...]
/linux-master/drivers/net/can/flexcan/
H A Dflexcan-ethtool.c21 flexcan_get_ringparam(struct net_device *ndev, struct ethtool_ringparam *ring, argument
27 ring->rx_max_pending = priv->mb_count;
28 ring->tx_max_pending = priv->mb_count;
31 ring->rx_pending = priv->offload.mb_last -
34 ring->rx_pending = 6; /* RX-FIFO depth is fixed */
37 ring->tx_pending = 1;
/linux-master/drivers/net/wireless/ath/ath11k/
H A Ddbring.h59 struct ath11k_dbring *ring,
65 struct ath11k_dbring *ring,
68 struct ath11k_dbring *ring,
70 int ath11k_dbring_srng_setup(struct ath11k *ar, struct ath11k_dbring *ring,
78 void ath11k_dbring_srng_cleanup(struct ath11k *ar, struct ath11k_dbring *ring);
79 void ath11k_dbring_buf_cleanup(struct ath11k *ar, struct ath11k_dbring *ring);
/linux-master/drivers/usb/mtu3/
H A Dmtu3_qmu.c119 static struct qmu_gpd *gpd_dma_to_virt(struct mtu3_gpd_ring *ring, argument
122 dma_addr_t dma_base = ring->dma;
123 struct qmu_gpd *gpd_head = ring->start;
132 static dma_addr_t gpd_virt_to_dma(struct mtu3_gpd_ring *ring, argument
135 dma_addr_t dma_base = ring->dma;
136 struct qmu_gpd *gpd_head = ring->start;
146 static void gpd_ring_init(struct mtu3_gpd_ring *ring, struct qmu_gpd *gpd) argument
148 ring->start = gpd;
149 ring->enqueue = gpd;
150 ring
156 struct mtu3_gpd_ring *ring = &mep->gpd_ring; local
168 struct mtu3_gpd_ring *ring = &mep->gpd_ring; local
182 struct mtu3_gpd_ring *ring = &mep->gpd_ring; local
203 advance_enq_gpd(struct mtu3_gpd_ring *ring) argument
214 advance_deq_gpd(struct mtu3_gpd_ring *ring) argument
225 gpd_ring_empty(struct mtu3_gpd_ring *ring) argument
247 struct mtu3_gpd_ring *ring = &mep->gpd_ring; local
290 struct mtu3_gpd_ring *ring = &mep->gpd_ring; local
335 struct mtu3_gpd_ring *ring = &mep->gpd_ring; local
428 struct mtu3_gpd_ring *ring = &mep->gpd_ring; local
477 struct mtu3_gpd_ring *ring = &mep->gpd_ring; local
511 struct mtu3_gpd_ring *ring = &mep->gpd_ring; local
551 struct mtu3_gpd_ring *ring = &mep->gpd_ring; local
[all...]
/linux-master/drivers/gpu/drm/msm/adreno/
H A Da3xx_gpu.c33 struct msm_ringbuffer *ring = submit->ring; local
47 OUT_PKT3(ring, CP_INDIRECT_BUFFER_PFD, 2);
48 OUT_RING(ring, lower_32_bits(submit->cmd[i].iova));
49 OUT_RING(ring, submit->cmd[i].size);
50 OUT_PKT2(ring);
55 OUT_PKT0(ring, REG_AXXX_CP_SCRATCH_REG2, 1);
56 OUT_RING(ring, submit->seqno);
62 OUT_PKT3(ring, CP_EVENT_WRITE, 1);
63 OUT_RING(ring, HLSQ_FLUS
87 struct msm_ringbuffer *ring = gpu->rb[0]; local
497 a3xx_get_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring) argument
[all...]
/linux-master/drivers/usb/host/
H A Dxhci-trace.h111 TP_PROTO(struct xhci_ring *ring, struct xhci_generic_trb *trb),
112 TP_ARGS(ring, trb),
121 __entry->type = ring->type;
134 TP_PROTO(struct xhci_ring *ring, struct xhci_generic_trb *trb),
135 TP_ARGS(ring, trb)
139 TP_PROTO(struct xhci_ring *ring, struct xhci_generic_trb *trb),
140 TP_ARGS(ring, trb)
144 TP_PROTO(struct xhci_ring *ring, struct xhci_generic_trb *trb),
145 TP_ARGS(ring, trb)
149 TP_PROTO(struct xhci_ring *ring, struc
[all...]
/linux-master/drivers/gpu/drm/radeon/
H A Dni.c1367 int ring, u32 cp_int_cntl)
1369 WREG32(SRBM_GFX_CNTL, RINGID(ring));
1379 struct radeon_ring *ring = &rdev->ring[fence->ring]; local
1380 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
1385 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
1386 radeon_ring_write(ring, PACKET3_ENGINE_ME | cp_coher_cntl);
1387 radeon_ring_write(ring, 0xFFFFFFFF);
1388 radeon_ring_write(ring,
1366 cayman_cp_int_cntl_setup(struct radeon_device *rdev, int ring, u32 cp_int_cntl) argument
1401 struct radeon_ring *ring = &rdev->ring[ib->ring]; local
1448 cayman_gfx_get_rptr(struct radeon_device *rdev, struct radeon_ring *ring) argument
1467 cayman_gfx_get_wptr(struct radeon_device *rdev, struct radeon_ring *ring) argument
1482 cayman_gfx_set_wptr(struct radeon_device *rdev, struct radeon_ring *ring) argument
1526 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; local
1592 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; local
1635 struct radeon_ring *ring; local
1965 cayman_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) argument
2026 struct radeon_ring *ring; local
2107 struct radeon_ring *ring; local
2134 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; local
2330 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; local
2663 cayman_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring, unsigned vm_id, uint64_t pd_addr) argument
[all...]
/linux-master/drivers/crypto/inside-secure/
H A Dsafexcel.c38 * assume each interface/ring to be in its own memory domain
511 /* ring base address */
512 writel(lower_32_bits(priv->ring[i].cdr.base_dma),
514 writel(upper_32_bits(priv->ring[i].cdr.base_dma),
559 /* ring base address */
560 writel(lower_32_bits(priv->ring[i].rdr.base_dma),
562 writel(upper_32_bits(priv->ring[i].rdr.base_dma),
585 /* enable ring interrupt */
599 dev_dbg(priv->dev, "HW init: using %d pipe(s) and %d ring(s)\n",
721 /* Clear interrupts for this ring */
806 safexcel_try_push_requests(struct safexcel_crypto_priv *priv, int ring) argument
820 safexcel_dequeue(struct safexcel_crypto_priv *priv, int ring) argument
940 safexcel_rdr_req_set(struct safexcel_crypto_priv *priv, int ring, struct safexcel_result_desc *rdesc, struct crypto_async_request *req) argument
951 safexcel_rdr_req_get(struct safexcel_crypto_priv *priv, int ring) argument
958 safexcel_complete(struct safexcel_crypto_priv *priv, int ring) argument
973 safexcel_invalidate_cache(struct crypto_async_request *async, struct safexcel_crypto_priv *priv, dma_addr_t ctxr_dma, int ring) argument
1011 safexcel_handle_result_descriptor(struct safexcel_crypto_priv *priv, int ring) argument
1084 int ring; member in struct:safexcel_ring_irq_data
1091 int ring = irq_data->ring, rc = IRQ_NONE; local
1128 int ring = irq_data->ring; local
[all...]
/linux-master/drivers/block/xen-blkback/
H A Dxenbus.c18 /* On the XenBus the max length of 'ring-ref%u'. */
80 struct xen_blkif_ring *ring; local
110 ring = &blkif->rings[i];
111 ring->xenblkd = kthread_run(xen_blkif_schedule, ring, "%s-%d", name, i);
112 if (IS_ERR(ring->xenblkd)) {
113 err = PTR_ERR(ring->xenblkd);
114 ring->xenblkd = NULL;
124 ring = &blkif->rings[i];
125 kthread_stop(ring
140 struct xen_blkif_ring *ring = &blkif->rings[r]; local
192 xen_blkif_map(struct xen_blkif_ring *ring, grant_ref_t *gref, unsigned int nr_grefs, unsigned int evtchn) argument
274 struct xen_blkif_ring *ring = &blkif->rings[r]; local
966 read_per_ring_refs(struct xen_blkif_ring *ring, const char *dir) argument
[all...]

Completed in 282 milliseconds

1234567891011>>