Lines Matching refs:mdata

270 static void mtk_spi_reset(struct mtk_spi *mdata)
275 reg_val = readl(mdata->base + SPI_CMD_REG);
277 writel(reg_val, mdata->base + SPI_CMD_REG);
279 reg_val = readl(mdata->base + SPI_CMD_REG);
281 writel(reg_val, mdata->base + SPI_CMD_REG);
286 struct mtk_spi *mdata = spi_controller_get_devdata(spi->controller);
297 setup = (delay * DIV_ROUND_UP(mdata->spi_clk_hz, 1000000)) / 1000;
302 hold = (delay * DIV_ROUND_UP(mdata->spi_clk_hz, 1000000)) / 1000;
307 inactive = (delay * DIV_ROUND_UP(mdata->spi_clk_hz, 1000000)) / 1000;
310 reg_val = readl(mdata->base + SPI_CFG0_REG);
311 if (mdata->dev_comp->enhance_timing) {
337 writel(reg_val, mdata->base + SPI_CFG0_REG);
342 reg_val = readl(mdata->base + SPI_CFG1_REG);
345 writel(reg_val, mdata->base + SPI_CFG1_REG);
357 struct mtk_spi *mdata = spi_controller_get_devdata(host);
362 reg_val = readl(mdata->base + SPI_CMD_REG);
363 if (mdata->dev_comp->ipm_design) {
399 if (mdata->dev_comp->enhance_timing) {
421 writel(reg_val, mdata->base + SPI_CMD_REG);
424 if (mdata->dev_comp->need_pad_sel)
425 writel(mdata->pad_sel[spi_get_chipselect(spi, 0)],
426 mdata->base + SPI_PAD_SEL_REG);
429 if (mdata->dev_comp->enhance_timing) {
430 if (mdata->dev_comp->ipm_design) {
431 reg_val = readl(mdata->base + SPI_CMD_REG);
435 writel(reg_val, mdata->base + SPI_CMD_REG);
437 reg_val = readl(mdata->base + SPI_CFG1_REG);
441 writel(reg_val, mdata->base + SPI_CFG1_REG);
444 reg_val = readl(mdata->base + SPI_CFG1_REG);
448 writel(reg_val, mdata->base + SPI_CFG1_REG);
465 struct mtk_spi *mdata = spi_controller_get_devdata(spi->controller);
470 reg_val = readl(mdata->base + SPI_CMD_REG);
473 writel(reg_val, mdata->base + SPI_CMD_REG);
476 writel(reg_val, mdata->base + SPI_CMD_REG);
477 mdata->state = MTK_SPI_IDLE;
478 mtk_spi_reset(mdata);
486 struct mtk_spi *mdata = spi_controller_get_devdata(host);
488 if (speed_hz < mdata->spi_clk_hz / 2)
489 div = DIV_ROUND_UP(mdata->spi_clk_hz, speed_hz);
495 if (mdata->dev_comp->enhance_timing) {
496 reg_val = readl(mdata->base + SPI_CFG2_REG);
503 writel(reg_val, mdata->base + SPI_CFG2_REG);
505 reg_val = readl(mdata->base + SPI_CFG0_REG);
511 writel(reg_val, mdata->base + SPI_CFG0_REG);
518 struct mtk_spi *mdata = spi_controller_get_devdata(host);
520 if (mdata->dev_comp->ipm_design)
522 mdata->xfer_len,
526 mdata->xfer_len,
529 packet_loop = mdata->xfer_len / packet_size;
531 reg_val = readl(mdata->base + SPI_CFG1_REG);
532 if (mdata->dev_comp->ipm_design)
539 writel(reg_val, mdata->base + SPI_CFG1_REG);
545 struct mtk_spi *mdata = spi_controller_get_devdata(host);
547 cmd = readl(mdata->base + SPI_CMD_REG);
548 if (mdata->state == MTK_SPI_IDLE)
552 writel(cmd, mdata->base + SPI_CMD_REG);
555 static int mtk_spi_get_mult_delta(struct mtk_spi *mdata, u32 xfer_len)
559 if (mdata->dev_comp->ipm_design) {
573 struct mtk_spi *mdata = spi_controller_get_devdata(host);
575 if (mdata->tx_sgl_len && mdata->rx_sgl_len) {
576 if (mdata->tx_sgl_len > mdata->rx_sgl_len) {
577 mult_delta = mtk_spi_get_mult_delta(mdata, mdata->rx_sgl_len);
578 mdata->xfer_len = mdata->rx_sgl_len - mult_delta;
579 mdata->rx_sgl_len = mult_delta;
580 mdata->tx_sgl_len -= mdata->xfer_len;
582 mult_delta = mtk_spi_get_mult_delta(mdata, mdata->tx_sgl_len);
583 mdata->xfer_len = mdata->tx_sgl_len - mult_delta;
584 mdata->tx_sgl_len = mult_delta;
585 mdata->rx_sgl_len -= mdata->xfer_len;
587 } else if (mdata->tx_sgl_len) {
588 mult_delta = mtk_spi_get_mult_delta(mdata, mdata->tx_sgl_len);
589 mdata->xfer_len = mdata->tx_sgl_len - mult_delta;
590 mdata->tx_sgl_len = mult_delta;
591 } else if (mdata->rx_sgl_len) {
592 mult_delta = mtk_spi_get_mult_delta(mdata, mdata->rx_sgl_len);
593 mdata->xfer_len = mdata->rx_sgl_len - mult_delta;
594 mdata->rx_sgl_len = mult_delta;
601 struct mtk_spi *mdata = spi_controller_get_devdata(host);
603 if (mdata->tx_sgl) {
605 mdata->base + SPI_TX_SRC_REG);
607 if (mdata->dev_comp->dma_ext)
609 mdata->base + SPI_TX_SRC_REG_64);
613 if (mdata->rx_sgl) {
615 mdata->base + SPI_RX_DST_REG);
617 if (mdata->dev_comp->dma_ext)
619 mdata->base + SPI_RX_DST_REG_64);
630 struct mtk_spi *mdata = spi_controller_get_devdata(host);
632 mdata->cur_transfer = xfer;
633 mdata->xfer_len = min(MTK_SPI_MAX_FIFO_SIZE, xfer->len);
634 mdata->num_xfered = 0;
640 iowrite32_rep(mdata->base + SPI_TX_DATA_REG, xfer->tx_buf, cnt);
645 writel(reg_val, mdata->base + SPI_TX_DATA_REG);
659 struct mtk_spi *mdata = spi_controller_get_devdata(host);
661 mdata->tx_sgl = NULL;
662 mdata->rx_sgl = NULL;
663 mdata->tx_sgl_len = 0;
664 mdata->rx_sgl_len = 0;
665 mdata->cur_transfer = xfer;
666 mdata->num_xfered = 0;
670 cmd = readl(mdata->base + SPI_CMD_REG);
675 writel(cmd, mdata->base + SPI_CMD_REG);
678 mdata->tx_sgl = xfer->tx_sg.sgl;
680 mdata->rx_sgl = xfer->rx_sg.sgl;
682 if (mdata->tx_sgl) {
683 xfer->tx_dma = sg_dma_address(mdata->tx_sgl);
684 mdata->tx_sgl_len = sg_dma_len(mdata->tx_sgl);
686 if (mdata->rx_sgl) {
687 xfer->rx_dma = sg_dma_address(mdata->rx_sgl);
688 mdata->rx_sgl_len = sg_dma_len(mdata->rx_sgl);
703 struct mtk_spi *mdata = spi_controller_get_devdata(spi->controller);
707 if (mdata->dev_comp->ipm_design) {
713 writel(reg_val, mdata->base + SPI_CFG3_IPM_REG);
734 struct mtk_spi *mdata = spi_controller_get_devdata(spi->controller);
739 if (mdata->dev_comp->need_pad_sel && spi_get_csgpiod(spi, 0))
750 struct mtk_spi *mdata = spi_controller_get_devdata(host);
751 struct spi_transfer *trans = mdata->cur_transfer;
753 reg_val = readl(mdata->base + SPI_STATUS0_REG);
755 mdata->state = MTK_SPI_PAUSED;
757 mdata->state = MTK_SPI_IDLE;
760 if (mdata->use_spimem) {
761 complete(&mdata->spimem_done);
767 cnt = mdata->xfer_len / 4;
768 ioread32_rep(mdata->base + SPI_RX_DATA_REG,
769 trans->rx_buf + mdata->num_xfered, cnt);
770 remainder = mdata->xfer_len % 4;
772 reg_val = readl(mdata->base + SPI_RX_DATA_REG);
774 mdata->num_xfered +
781 mdata->num_xfered += mdata->xfer_len;
782 if (mdata->num_xfered == trans->len) {
787 len = trans->len - mdata->num_xfered;
788 mdata->xfer_len = min(MTK_SPI_MAX_FIFO_SIZE, len);
792 cnt = mdata->xfer_len / 4;
793 iowrite32_rep(mdata->base + SPI_TX_DATA_REG,
794 trans->tx_buf + mdata->num_xfered, cnt);
796 remainder = mdata->xfer_len % 4;
800 trans->tx_buf + (cnt * 4) + mdata->num_xfered,
802 writel(reg_val, mdata->base + SPI_TX_DATA_REG);
811 if (mdata->tx_sgl)
812 trans->tx_dma += mdata->xfer_len;
813 if (mdata->rx_sgl)
814 trans->rx_dma += mdata->xfer_len;
816 if (mdata->tx_sgl && (mdata->tx_sgl_len == 0)) {
817 mdata->tx_sgl = sg_next(mdata->tx_sgl);
818 if (mdata->tx_sgl) {
819 trans->tx_dma = sg_dma_address(mdata->tx_sgl);
820 mdata->tx_sgl_len = sg_dma_len(mdata->tx_sgl);
823 if (mdata->rx_sgl && (mdata->rx_sgl_len == 0)) {
824 mdata->rx_sgl = sg_next(mdata->rx_sgl);
825 if (mdata->rx_sgl) {
826 trans->rx_dma = sg_dma_address(mdata->rx_sgl);
827 mdata->rx_sgl_len = sg_dma_len(mdata->rx_sgl);
831 if (!mdata->tx_sgl && !mdata->rx_sgl) {
833 cmd = readl(mdata->base + SPI_CMD_REG);
836 writel(cmd, mdata->base + SPI_CMD_REG);
893 struct mtk_spi *mdata = spi_controller_get_devdata(host);
895 writel((u32)(mdata->tx_dma & MTK_SPI_32BITS_MASK),
896 mdata->base + SPI_TX_SRC_REG);
898 if (mdata->dev_comp->dma_ext)
899 writel((u32)(mdata->tx_dma >> 32),
900 mdata->base + SPI_TX_SRC_REG_64);
904 writel((u32)(mdata->rx_dma & MTK_SPI_32BITS_MASK),
905 mdata->base + SPI_RX_DST_REG);
907 if (mdata->dev_comp->dma_ext)
908 writel((u32)(mdata->rx_dma >> 32),
909 mdata->base + SPI_RX_DST_REG_64);
917 struct mtk_spi *mdata = spi_controller_get_devdata(mem->spi->controller);
935 if (!wait_for_completion_timeout(&mdata->spimem_done,
937 dev_err(mdata->dev, "spi-mem transfer timeout\n");
947 struct mtk_spi *mdata = spi_controller_get_devdata(mem->spi->controller);
952 mdata->use_spimem = true;
953 reinit_completion(&mdata->spimem_done);
955 mtk_spi_reset(mdata);
959 reg_val = readl(mdata->base + SPI_CFG3_IPM_REG);
973 writel(0, mdata->base + SPI_CFG1_REG);
976 mdata->xfer_len = op->data.nbytes;
1006 writel(reg_val, mdata->base + SPI_CFG3_IPM_REG);
1016 mdata->use_spimem = false;
1040 mdata->tx_dma = dma_map_single(mdata->dev, tx_tmp_buf,
1042 if (dma_mapping_error(mdata->dev, mdata->tx_dma)) {
1059 mdata->rx_dma = dma_map_single(mdata->dev,
1063 if (dma_mapping_error(mdata->dev, mdata->rx_dma)) {
1069 reg_val = readl(mdata->base + SPI_CMD_REG);
1073 writel(reg_val, mdata->base + SPI_CMD_REG);
1085 reg_val = readl(mdata->base + SPI_CMD_REG);
1089 writel(reg_val, mdata->base + SPI_CMD_REG);
1093 dma_unmap_single(mdata->dev, mdata->rx_dma,
1103 dma_unmap_single(mdata->dev, mdata->tx_dma,
1107 mdata->use_spimem = false;
1122 struct mtk_spi *mdata;
1125 host = devm_spi_alloc_host(dev, sizeof(*mdata));
1141 mdata = spi_controller_get_devdata(host);
1142 mdata->dev_comp = device_get_match_data(dev);
1144 if (mdata->dev_comp->enhance_timing)
1147 if (mdata->dev_comp->must_tx)
1149 if (mdata->dev_comp->ipm_design)
1153 if (mdata->dev_comp->ipm_design) {
1154 mdata->dev = dev;
1156 init_completion(&mdata->spimem_done);
1159 if (mdata->dev_comp->need_pad_sel) {
1160 mdata->pad_num = of_property_count_u32_elems(dev->of_node,
1162 if (mdata->pad_num < 0)
1166 mdata->pad_sel = devm_kmalloc_array(dev, mdata->pad_num,
1168 if (!mdata->pad_sel)
1171 for (i = 0; i < mdata->pad_num; i++) {
1174 i, &mdata->pad_sel[i]);
1175 if (mdata->pad_sel[i] > MT8173_SPI_MAX_PAD_SEL)
1178 i, mdata->pad_sel[i]);
1183 mdata->base = devm_platform_ioremap_resource(pdev, 0);
1184 if (IS_ERR(mdata->base))
1185 return PTR_ERR(mdata->base);
1194 if (mdata->dev_comp->ipm_design)
1199 mdata->parent_clk = devm_clk_get(dev, "parent-clk");
1200 if (IS_ERR(mdata->parent_clk))
1201 return dev_err_probe(dev, PTR_ERR(mdata->parent_clk),
1204 mdata->sel_clk = devm_clk_get(dev, "sel-clk");
1205 if (IS_ERR(mdata->sel_clk))
1206 return dev_err_probe(dev, PTR_ERR(mdata->sel_clk), "failed to get sel-clk\n");
1208 mdata->spi_clk = devm_clk_get(dev, "spi-clk");
1209 if (IS_ERR(mdata->spi_clk))
1210 return dev_err_probe(dev, PTR_ERR(mdata->spi_clk), "failed to get spi-clk\n");
1212 mdata->spi_hclk = devm_clk_get_optional(dev, "hclk");
1213 if (IS_ERR(mdata->spi_hclk))
1214 return dev_err_probe(dev, PTR_ERR(mdata->spi_hclk), "failed to get hclk\n");
1216 ret = clk_set_parent(mdata->sel_clk, mdata->parent_clk);
1220 ret = clk_prepare_enable(mdata->spi_hclk);
1224 ret = clk_prepare_enable(mdata->spi_clk);
1226 clk_disable_unprepare(mdata->spi_hclk);
1230 mdata->spi_clk_hz = clk_get_rate(mdata->spi_clk);
1232 if (mdata->dev_comp->no_need_unprepare) {
1233 clk_disable(mdata->spi_clk);
1234 clk_disable(mdata->spi_hclk);
1236 clk_disable_unprepare(mdata->spi_clk);
1237 clk_disable_unprepare(mdata->spi_hclk);
1240 if (mdata->dev_comp->need_pad_sel) {
1241 if (mdata->pad_num != host->num_chipselect)
1244 mdata->pad_num, host->num_chipselect);
1251 if (mdata->dev_comp->dma_ext)
1279 struct mtk_spi *mdata = spi_controller_get_devdata(host);
1282 if (mdata->use_spimem && !completion_done(&mdata->spimem_done))
1283 complete(&mdata->spimem_done);
1294 mtk_spi_reset(mdata);
1296 if (mdata->dev_comp->no_need_unprepare) {
1297 clk_unprepare(mdata->spi_clk);
1298 clk_unprepare(mdata->spi_hclk);
1311 struct mtk_spi *mdata = spi_controller_get_devdata(host);
1318 clk_disable_unprepare(mdata->spi_clk);
1319 clk_disable_unprepare(mdata->spi_hclk);
1331 struct mtk_spi *mdata = spi_controller_get_devdata(host);
1336 ret = clk_prepare_enable(mdata->spi_clk);
1342 ret = clk_prepare_enable(mdata->spi_hclk);
1345 clk_disable_unprepare(mdata->spi_clk);
1352 clk_disable_unprepare(mdata->spi_clk);
1353 clk_disable_unprepare(mdata->spi_hclk);
1364 struct mtk_spi *mdata = spi_controller_get_devdata(host);
1366 if (mdata->dev_comp->no_need_unprepare) {
1367 clk_disable(mdata->spi_clk);
1368 clk_disable(mdata->spi_hclk);
1370 clk_disable_unprepare(mdata->spi_clk);
1371 clk_disable_unprepare(mdata->spi_hclk);
1380 struct mtk_spi *mdata = spi_controller_get_devdata(host);
1383 if (mdata->dev_comp->no_need_unprepare) {
1384 ret = clk_enable(mdata->spi_clk);
1389 ret = clk_enable(mdata->spi_hclk);
1392 clk_disable(mdata->spi_clk);
1396 ret = clk_prepare_enable(mdata->spi_clk);
1402 ret = clk_prepare_enable(mdata->spi_hclk);
1405 clk_disable_unprepare(mdata->spi_clk);