Lines Matching refs:segment

315  * struct xilinx_vdma_tx_segment - Descriptor segment
318 * @phys: Physical address of segment
327 * struct xilinx_axidma_tx_segment - Descriptor segment
330 * @phys: Physical address of segment
339 * struct xilinx_aximcdma_tx_segment - Descriptor segment
342 * @phys: Physical address of segment
351 * struct xilinx_cdma_tx_segment - Descriptor segment
354 * @phys: Physical address of segment
412 * @cyclic_seg_v: Statically allocated segment base for cyclic transfers
663 * xilinx_vdma_alloc_tx_segment - Allocate transaction segment
666 * Return: The allocated segment on success and NULL on failure.
671 struct xilinx_vdma_tx_segment *segment;
674 segment = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &phys);
675 if (!segment)
678 segment->phys = phys;
680 return segment;
684 * xilinx_cdma_alloc_tx_segment - Allocate transaction segment
687 * Return: The allocated segment on success and NULL on failure.
692 struct xilinx_cdma_tx_segment *segment;
695 segment = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &phys);
696 if (!segment)
699 segment->phys = phys;
701 return segment;
705 * xilinx_axidma_alloc_tx_segment - Allocate transaction segment
708 * Return: The allocated segment on success and NULL on failure.
713 struct xilinx_axidma_tx_segment *segment = NULL;
718 segment = list_first_entry(&chan->free_seg_list,
721 list_del(&segment->node);
725 if (!segment)
726 dev_dbg(chan->dev, "Could not find free tx segment\n");
728 return segment;
732 * xilinx_aximcdma_alloc_tx_segment - Allocate transaction segment
735 * Return: The allocated segment on success and NULL on failure.
740 struct xilinx_aximcdma_tx_segment *segment = NULL;
745 segment = list_first_entry(&chan->free_seg_list,
748 list_del(&segment->node);
752 return segment;
778 * xilinx_dma_free_tx_segment - Free transaction segment
780 * @segment: DMA transaction segment
783 struct xilinx_axidma_tx_segment *segment)
785 xilinx_dma_clean_hw_desc(&segment->hw);
787 list_add_tail(&segment->node, &chan->free_seg_list);
791 * xilinx_mcdma_free_tx_segment - Free transaction segment
793 * @segment: DMA transaction segment
797 segment)
799 xilinx_mcdma_clean_hw_desc(&segment->hw);
801 list_add_tail(&segment->node, &chan->free_seg_list);
805 * xilinx_cdma_free_tx_segment - Free transaction segment
807 * @segment: DMA transaction segment
810 struct xilinx_cdma_tx_segment *segment)
812 dma_pool_free(chan->desc_pool, segment, segment->phys);
816 * xilinx_vdma_free_tx_segment - Free transaction segment
818 * @segment: DMA transaction segment
821 struct xilinx_vdma_tx_segment *segment)
823 dma_pool_free(chan->desc_pool, segment, segment->phys);
855 struct xilinx_vdma_tx_segment *segment, *next;
864 list_for_each_entry_safe(segment, next, &desc->segments, node) {
865 list_del(&segment->node);
866 xilinx_vdma_free_tx_segment(chan, segment);
1140 * so allocating a desc segment during channel allocation for
1149 "unable to allocate desc segment for cyclic DMA\n");
1367 struct xilinx_vdma_tx_segment *segment, *last = NULL;
1428 list_for_each_entry(segment, &desc->segments, node) {
1432 segment->hw.buf_addr,
1433 segment->hw.buf_addr_msb);
1437 segment->hw.buf_addr);
1439 last = segment;
1508 struct xilinx_cdma_tx_segment *segment;
1511 segment = list_first_entry(&head_desc->segments,
1515 hw = &segment->hw;
1588 struct xilinx_axidma_tx_segment *segment;
1591 segment = list_first_entry(&head_desc->segments,
1594 hw = &segment->hw;
2046 struct xilinx_vdma_tx_segment *segment;
2072 segment = xilinx_vdma_alloc_tx_segment(chan);
2073 if (!segment)
2077 hw = &segment->hw;
2101 /* Insert the segment into the descriptor segments list. */
2102 list_add_tail(&segment->node, &desc->segments);
2105 segment = list_first_entry(&desc->segments,
2107 desc->async_tx.phys = segment->phys;
2132 struct xilinx_cdma_tx_segment *segment;
2146 segment = xilinx_cdma_alloc_tx_segment(chan);
2147 if (!segment)
2150 hw = &segment->hw;
2159 /* Insert the segment into the descriptor segments list. */
2160 list_add_tail(&segment->node, &desc->segments);
2162 desc->async_tx.phys = segment->phys;
2163 hw->next_desc = segment->phys;
2190 struct xilinx_axidma_tx_segment *segment = NULL;
2216 /* Get a free segment */
2217 segment = xilinx_axidma_alloc_tx_segment(chan);
2218 if (!segment)
2227 hw = &segment->hw;
2244 * Insert the segment into the descriptor segments
2247 list_add_tail(&segment->node, &desc->segments);
2251 segment = list_first_entry(&desc->segments,
2253 desc->async_tx.phys = segment->phys;
2257 segment->hw.control |= XILINX_DMA_BD_SOP;
2258 segment = list_last_entry(&desc->segments,
2261 segment->hw.control |= XILINX_DMA_BD_EOP;
2292 struct xilinx_axidma_tx_segment *segment, *head_segment, *prev = NULL;
2324 /* Get a free segment */
2325 segment = xilinx_axidma_alloc_tx_segment(chan);
2326 if (!segment)
2335 hw = &segment->hw;
2341 prev->hw.next_desc = segment->phys;
2343 prev = segment;
2347 * Insert the segment into the descriptor segments
2350 list_add_tail(&segment->node, &desc->segments);
2363 segment = list_last_entry(&desc->segments,
2366 segment->hw.next_desc = (u32) head_segment->phys;
2371 segment->hw.control |= XILINX_DMA_BD_EOP;
2400 struct xilinx_aximcdma_tx_segment *segment = NULL;
2426 /* Get a free segment */
2427 segment = xilinx_aximcdma_alloc_tx_segment(chan);
2428 if (!segment)
2437 hw = &segment->hw;
2451 * Insert the segment into the descriptor segments
2454 list_add_tail(&segment->node, &desc->segments);
2458 segment = list_first_entry(&desc->segments,
2460 desc->async_tx.phys = segment->phys;
2464 segment->hw.control |= XILINX_MCDMA_BD_SOP;
2465 segment = list_last_entry(&desc->segments,
2468 segment->hw.control |= XILINX_MCDMA_BD_EOP;