Lines Matching refs:mt76_dev

42 mt76_alloc_txwi(struct mt76_dev *dev)
68 mt76_alloc_rxwi(struct mt76_dev *dev)
81 __mt76_get_txwi(struct mt76_dev *dev)
97 __mt76_get_rxwi(struct mt76_dev *dev)
113 mt76_get_txwi(struct mt76_dev *dev)
124 mt76_get_rxwi(struct mt76_dev *dev)
136 mt76_put_txwi(struct mt76_dev *dev, struct mt76_txwi_cache *t)
148 mt76_put_rxwi(struct mt76_dev *dev, struct mt76_txwi_cache *t)
160 mt76_free_pending_txwi(struct mt76_dev *dev)
174 mt76_free_pending_rxwi(struct mt76_dev *dev)
189 mt76_dma_sync_idx(struct mt76_dev *dev, struct mt76_queue *q)
200 void __mt76_dma_queue_reset(struct mt76_dev *dev, struct mt76_queue *q,
221 void mt76_dma_queue_reset(struct mt76_dev *dev, struct mt76_queue *q)
227 mt76_dma_add_rx_buf(struct mt76_dev *dev, struct mt76_queue *q,
285 mt76_dma_add_buf(struct mt76_dev *dev, struct mt76_queue *q,
354 mt76_dma_tx_cleanup_idx(struct mt76_dev *dev, struct mt76_queue *q, int idx,
375 mt76_dma_kick_queue(struct mt76_dev *dev, struct mt76_queue *q)
382 mt76_dma_tx_cleanup(struct mt76_dev *dev, struct mt76_queue *q, bool flush)
422 mt76_dma_get_buf(struct mt76_dev *dev, struct mt76_queue *q, int idx,
476 mt76_dma_dequeue(struct mt76_dev *dev, struct mt76_queue *q, bool flush,
502 mt76_dma_tx_queue_skb_raw(struct mt76_dev *dev, struct mt76_queue *q,
545 struct mt76_dev *dev = phy->dev;
634 int mt76_dma_rx_fill(struct mt76_dev *dev, struct mt76_queue *q,
683 mt76_dma_alloc_queue(struct mt76_dev *dev, struct mt76_queue *q,
742 mt76_dma_rx_cleanup(struct mt76_dev *dev, struct mt76_queue *q)
772 mt76_dma_rx_reset(struct mt76_dev *dev, enum mt76_rxq_id qid)
803 mt76_add_fragment(struct mt76_dev *dev, struct mt76_queue *q, void *data,
830 mt76_dma_rx_process(struct mt76_dev *dev, struct mt76_queue *q, int budget)
916 struct mt76_dev *dev;
919 dev = container_of(napi->dev, struct mt76_dev, napi_dev);
940 mt76_dma_init(struct mt76_dev *dev,
974 void mt76_dma_attach(struct mt76_dev *dev)
980 void mt76_dma_cleanup(struct mt76_dev *dev)