Lines Matching refs:ocelot

13 #include <linux/dsa/ocelot.h>
22 static void ocelot_fdma_writel(struct ocelot *ocelot, u32 reg, u32 data)
24 regmap_write(ocelot->targets[FDMA], reg, data);
27 static u32 ocelot_fdma_readl(struct ocelot *ocelot, u32 reg)
31 regmap_read(ocelot->targets[FDMA], reg, &retval);
85 static void ocelot_fdma_activate_chan(struct ocelot *ocelot, dma_addr_t dma,
88 ocelot_fdma_writel(ocelot, MSCC_FDMA_DCB_LLP(chan), dma);
93 ocelot_fdma_writel(ocelot, MSCC_FDMA_CH_ACTIVATE, BIT(chan));
96 static u32 ocelot_fdma_read_ch_safe(struct ocelot *ocelot)
98 return ocelot_fdma_readl(ocelot, MSCC_FDMA_CH_SAFE);
101 static int ocelot_fdma_wait_chan_safe(struct ocelot *ocelot, int chan)
105 return readx_poll_timeout_atomic(ocelot_fdma_read_ch_safe, ocelot, safe,
122 static bool ocelot_fdma_rx_alloc_page(struct ocelot *ocelot,
132 mapping = dma_map_page(ocelot->dev, page, 0, PAGE_SIZE,
134 if (unlikely(dma_mapping_error(ocelot->dev, mapping))) {
146 static int ocelot_fdma_alloc_rx_buffs(struct ocelot *ocelot, u16 alloc_cnt)
148 struct ocelot_fdma *fdma = ocelot->fdma;
163 if (unlikely(!ocelot_fdma_rx_alloc_page(ocelot, rxb))) {
164 dev_err_ratelimited(ocelot->dev,
186 static bool ocelot_fdma_tx_dcb_set_skb(struct ocelot *ocelot,
193 mapping = dma_map_single(ocelot->dev, skb->data, skb->len,
195 if (unlikely(dma_mapping_error(ocelot->dev, mapping)))
208 static bool ocelot_fdma_check_stop_rx(struct ocelot *ocelot)
213 llp = ocelot_fdma_readl(ocelot, MSCC_FDMA_DCB_LLP(MSCC_FDMA_XTR_CHAN));
217 ocelot_fdma_writel(ocelot, MSCC_FDMA_CH_DISABLE,
234 static void ocelot_fdma_rx_restart(struct ocelot *ocelot)
236 struct ocelot_fdma *fdma = ocelot->fdma;
245 ret = ocelot_fdma_wait_chan_safe(ocelot, chan);
247 dev_err_ratelimited(ocelot->dev,
258 llp_prev = ocelot_fdma_readl(ocelot, MSCC_FDMA_DCB_LLP_PREV(chan));
267 ocelot_fdma_activate_chan(ocelot, new_llp, chan);
295 static void ocelot_fdma_reuse_rx_page(struct ocelot *ocelot,
298 struct ocelot_fdma_rx_ring *rx_ring = &ocelot->fdma->rx_ring;
309 dma_sync_single_range_for_device(ocelot->dev, old_rxb->dma_addr,
314 static struct sk_buff *ocelot_fdma_get_skb(struct ocelot *ocelot, u32 stat,
327 dev_err_ratelimited(ocelot->dev,
334 dma_sync_single_range_for_cpu(ocelot->dev, rxb->dma_addr,
340 ocelot_fdma_reuse_rx_page(ocelot, rxb);
343 dma_unmap_page(ocelot->dev, rxb->dma_addr, PAGE_SIZE,
353 static bool ocelot_fdma_receive_skb(struct ocelot *ocelot, struct sk_buff *skb)
363 if (unlikely(src_port >= ocelot->num_phys_ports))
366 ndev = ocelot_port_to_netdev(ocelot, src_port);
378 if (ocelot->ptp) {
380 ocelot_ptp_rx_timestamp(ocelot, skb, timestamp);
389 static int ocelot_fdma_rx_get(struct ocelot *ocelot, int budget)
391 struct ocelot_fdma *fdma = ocelot->fdma;
422 skb = ocelot_fdma_get_skb(ocelot, stat, rxb, skb);
434 dev_err_ratelimited(ocelot->dev,
447 if (unlikely(!ocelot_fdma_receive_skb(ocelot, skb)))
456 ocelot_fdma_alloc_rx_buffs(ocelot, cleaned_cnt);
461 static void ocelot_fdma_wakeup_netdev(struct ocelot *ocelot)
468 for (port = 0; port < ocelot->num_phys_ports; port++) {
469 ocelot_port = ocelot->ports[port];
481 static void ocelot_fdma_tx_cleanup(struct ocelot *ocelot, int budget)
483 struct ocelot_fdma *fdma = ocelot->fdma;
508 dma_unmap_single(ocelot->dev, dma_unmap_addr(buf, dma_addr),
526 ocelot_fdma_wakeup_netdev(ocelot);
534 ret = ocelot_fdma_wait_chan_safe(ocelot, MSCC_FDMA_INJ_CHAN);
536 dev_warn(ocelot->dev,
548 ocelot_fdma_activate_chan(ocelot, dma, MSCC_FDMA_INJ_CHAN);
554 struct ocelot *ocelot = fdma->ocelot;
558 ocelot_fdma_tx_cleanup(ocelot, budget);
560 rx_stopped = ocelot_fdma_check_stop_rx(ocelot);
562 work_done = ocelot_fdma_rx_get(ocelot, budget);
565 ocelot_fdma_rx_restart(ocelot);
569 ocelot_fdma_writel(ocelot, MSCC_FDMA_INTR_ENA,
580 struct ocelot *ocelot = dev_id;
582 ident = ocelot_fdma_readl(ocelot, MSCC_FDMA_INTR_IDENT);
583 frm = ocelot_fdma_readl(ocelot, MSCC_FDMA_INTR_FRM);
584 llp = ocelot_fdma_readl(ocelot, MSCC_FDMA_INTR_LLP);
586 ocelot_fdma_writel(ocelot, MSCC_FDMA_INTR_LLP, llp & ident);
587 ocelot_fdma_writel(ocelot, MSCC_FDMA_INTR_FRM, frm & ident);
589 ocelot_fdma_writel(ocelot, MSCC_FDMA_INTR_ENA, 0);
590 napi_schedule(&ocelot->fdma->napi);
593 err = ocelot_fdma_readl(ocelot, MSCC_FDMA_EVT_ERR);
595 err_code = ocelot_fdma_readl(ocelot, MSCC_FDMA_EVT_ERR_CODE);
596 dev_err_ratelimited(ocelot->dev,
600 ocelot_fdma_writel(ocelot, MSCC_FDMA_EVT_ERR, err);
601 ocelot_fdma_writel(ocelot, MSCC_FDMA_EVT_ERR_CODE, err_code);
607 static void ocelot_fdma_send_skb(struct ocelot *ocelot,
618 if (!ocelot_fdma_tx_dcb_set_skb(ocelot, tx_buf, dcb, skb)) {
631 ocelot_fdma_activate_chan(ocelot, dma, MSCC_FDMA_INJ_CHAN);
640 static int ocelot_fdma_prepare_skb(struct ocelot *ocelot, int port, u32 rew_op,
674 int ocelot_fdma_inject_frame(struct ocelot *ocelot, int port, u32 rew_op,
677 struct ocelot_fdma *fdma = ocelot->fdma;
688 if (ocelot_fdma_prepare_skb(ocelot, port, rew_op, skb, dev))
691 ocelot_fdma_send_skb(ocelot, fdma, skb);
699 static void ocelot_fdma_free_rx_ring(struct ocelot *ocelot)
701 struct ocelot_fdma *fdma = ocelot->fdma;
712 dma_unmap_page(ocelot->dev, rxb->dma_addr, PAGE_SIZE,
722 static void ocelot_fdma_free_tx_ring(struct ocelot *ocelot)
724 struct ocelot_fdma *fdma = ocelot->fdma;
736 dma_unmap_single(ocelot->dev, dma_unmap_addr(txb, dma_addr),
743 static int ocelot_fdma_rings_alloc(struct ocelot *ocelot)
745 struct ocelot_fdma *fdma = ocelot->fdma;
752 fdma->dcbs_base = dmam_alloc_coherent(ocelot->dev,
775 ret = ocelot_fdma_alloc_rx_buffs(ocelot,
778 ocelot_fdma_free_rx_ring(ocelot);
790 void ocelot_fdma_netdev_init(struct ocelot *ocelot, struct net_device *dev)
792 struct ocelot_fdma *fdma = ocelot->fdma;
805 void ocelot_fdma_netdev_deinit(struct ocelot *ocelot, struct net_device *dev)
807 struct ocelot_fdma *fdma = ocelot->fdma;
815 void ocelot_fdma_init(struct platform_device *pdev, struct ocelot *ocelot)
817 struct device *dev = ocelot->dev;
825 ocelot->fdma = fdma;
826 ocelot->dev->coherent_dma_mask = DMA_BIT_MASK(32);
828 ocelot_fdma_writel(ocelot, MSCC_FDMA_INTR_ENA, 0);
830 fdma->ocelot = ocelot;
833 dev_name(dev), ocelot);
837 ret = ocelot_fdma_rings_alloc(ocelot);
850 ocelot->fdma = NULL;
853 void ocelot_fdma_start(struct ocelot *ocelot)
855 struct ocelot_fdma *fdma = ocelot->fdma;
858 ocelot_write_rix(ocelot, QS_INJ_GRP_CFG_MODE(2), QS_INJ_GRP_CFG, 0);
859 ocelot_write_rix(ocelot, QS_INJ_CTRL_GAP_SIZE(0), QS_INJ_CTRL, 0);
861 ocelot_write_rix(ocelot, QS_XTR_GRP_CFG_MODE(2), QS_XTR_GRP_CFG, 0);
863 ocelot_fdma_writel(ocelot, MSCC_FDMA_INTR_LLP, 0xffffffff);
864 ocelot_fdma_writel(ocelot, MSCC_FDMA_INTR_FRM, 0xffffffff);
866 ocelot_fdma_writel(ocelot, MSCC_FDMA_INTR_LLP_ENA,
868 ocelot_fdma_writel(ocelot, MSCC_FDMA_INTR_FRM_ENA,
870 ocelot_fdma_writel(ocelot, MSCC_FDMA_INTR_ENA,
875 ocelot_fdma_activate_chan(ocelot, ocelot->fdma->rx_ring.dcbs_dma,
879 void ocelot_fdma_deinit(struct ocelot *ocelot)
881 struct ocelot_fdma *fdma = ocelot->fdma;
883 ocelot_fdma_writel(ocelot, MSCC_FDMA_INTR_ENA, 0);
884 ocelot_fdma_writel(ocelot, MSCC_FDMA_CH_FORCEDIS,
886 ocelot_fdma_writel(ocelot, MSCC_FDMA_CH_FORCEDIS,
891 ocelot_fdma_free_rx_ring(ocelot);
892 ocelot_fdma_free_tx_ring(ocelot);