Lines Matching refs:qspi

131 	struct stm32_qspi *qspi = (struct stm32_qspi *)dev_id;
134 cr = readl_relaxed(qspi->io_base + QSPI_CR);
135 sr = readl_relaxed(qspi->io_base + QSPI_SR);
140 writel_relaxed(cr, qspi->io_base + QSPI_CR);
141 complete(&qspi->match_completion);
149 writel_relaxed(cr, qspi->io_base + QSPI_CR);
150 complete(&qspi->data_completion);
166 static int stm32_qspi_tx_poll(struct stm32_qspi *qspi,
184 ret = readl_relaxed_poll_timeout_atomic(qspi->io_base + QSPI_SR,
188 dev_err(qspi->dev, "fifo timeout (len:%d stat:%#x)\n",
192 tx_fifo(buf++, qspi->io_base + QSPI_DR);
198 static int stm32_qspi_tx_mm(struct stm32_qspi *qspi,
201 memcpy_fromio(op->data.buf.in, qspi->mm_base + op->addr.val,
213 static int stm32_qspi_tx_dma(struct stm32_qspi *qspi,
226 dma_ch = qspi->dma_chrx;
229 dma_ch = qspi->dma_chtx;
236 err = spi_controller_dma_map_mem_op_data(qspi->ctrl, op, &sgt);
247 cr = readl_relaxed(qspi->io_base + QSPI_CR);
249 reinit_completion(&qspi->dma_completion);
251 desc->callback_param = &qspi->dma_completion;
259 writel_relaxed(cr | CR_DMAEN, qspi->io_base + QSPI_CR);
262 if (!wait_for_completion_timeout(&qspi->dma_completion,
270 writel_relaxed(cr & ~CR_DMAEN, qspi->io_base + QSPI_CR);
272 spi_controller_dma_unmap_mem_op_data(qspi->ctrl, op, &sgt);
277 static int stm32_qspi_tx(struct stm32_qspi *qspi, const struct spi_mem_op *op)
282 if (qspi->fmode == CCR_FMODE_MM)
283 return stm32_qspi_tx_mm(qspi, op);
284 else if (((op->data.dir == SPI_MEM_DATA_IN && qspi->dma_chrx) ||
285 (op->data.dir == SPI_MEM_DATA_OUT && qspi->dma_chtx)) &&
287 if (!stm32_qspi_tx_dma(qspi, op))
290 return stm32_qspi_tx_poll(qspi, op);
293 static int stm32_qspi_wait_nobusy(struct stm32_qspi *qspi)
297 return readl_relaxed_poll_timeout_atomic(qspi->io_base + QSPI_SR, sr,
302 static int stm32_qspi_wait_cmd(struct stm32_qspi *qspi)
307 if ((readl_relaxed(qspi->io_base + QSPI_SR) & SR_TCF) ||
308 qspi->fmode == CCR_FMODE_APM)
311 reinit_completion(&qspi->data_completion);
312 cr = readl_relaxed(qspi->io_base + QSPI_CR);
313 writel_relaxed(cr | CR_TCIE | CR_TEIE, qspi->io_base + QSPI_CR);
315 if (!wait_for_completion_timeout(&qspi->data_completion,
319 sr = readl_relaxed(qspi->io_base + QSPI_SR);
326 writel_relaxed(FCR_CTCF | FCR_CTEF, qspi->io_base + QSPI_FCR);
328 err = stm32_qspi_wait_nobusy(qspi);
333 static int stm32_qspi_wait_poll_status(struct stm32_qspi *qspi)
337 reinit_completion(&qspi->match_completion);
338 cr = readl_relaxed(qspi->io_base + QSPI_CR);
339 writel_relaxed(cr | CR_SMIE, qspi->io_base + QSPI_CR);
341 if (!wait_for_completion_timeout(&qspi->match_completion,
342 msecs_to_jiffies(qspi->status_timeout)))
345 writel_relaxed(FCR_CSMF, qspi->io_base + QSPI_FCR);
360 struct stm32_qspi *qspi = spi_controller_get_devdata(spi->controller);
361 struct stm32_qspi_flash *flash = &qspi->flash[spi_get_chipselect(spi, 0)];
365 dev_dbg(qspi->dev, "cmd:%#x mode:%d.%d.%d.%d addr:%#llx len:%#x\n",
370 cr = readl_relaxed(qspi->io_base + QSPI_CR);
374 writel_relaxed(cr, qspi->io_base + QSPI_CR);
378 qspi->io_base + QSPI_DLR);
380 ccr = qspi->fmode;
400 writel_relaxed(ccr, qspi->io_base + QSPI_CCR);
402 if (op->addr.nbytes && qspi->fmode != CCR_FMODE_MM)
403 writel_relaxed(op->addr.val, qspi->io_base + QSPI_AR);
405 if (qspi->fmode == CCR_FMODE_APM)
406 err_poll_status = stm32_qspi_wait_poll_status(qspi);
408 err = stm32_qspi_tx(qspi, op);
417 if (err || err_poll_status || qspi->fmode == CCR_FMODE_MM)
421 err = stm32_qspi_wait_cmd(qspi);
428 cr = readl_relaxed(qspi->io_base + QSPI_CR) | CR_ABORT;
429 writel_relaxed(cr, qspi->io_base + QSPI_CR);
432 timeout = readl_relaxed_poll_timeout_atomic(qspi->io_base + QSPI_CR,
436 writel_relaxed(FCR_CTCF | FCR_CSMF, qspi->io_base + QSPI_FCR);
439 dev_err(qspi->dev, "%s err:%d err_poll_status:%d abort timeout:%d\n",
451 struct stm32_qspi *qspi = spi_controller_get_devdata(mem->spi->controller);
457 ret = pm_runtime_resume_and_get(qspi->dev);
461 mutex_lock(&qspi->lock);
463 writel_relaxed(mask, qspi->io_base + QSPI_PSMKR);
464 writel_relaxed(match, qspi->io_base + QSPI_PSMAR);
465 qspi->fmode = CCR_FMODE_APM;
466 qspi->status_timeout = timeout_ms;
469 mutex_unlock(&qspi->lock);
471 pm_runtime_mark_last_busy(qspi->dev);
472 pm_runtime_put_autosuspend(qspi->dev);
479 struct stm32_qspi *qspi = spi_controller_get_devdata(mem->spi->controller);
482 ret = pm_runtime_resume_and_get(qspi->dev);
486 mutex_lock(&qspi->lock);
488 qspi->fmode = CCR_FMODE_INDR;
490 qspi->fmode = CCR_FMODE_INDW;
493 mutex_unlock(&qspi->lock);
495 pm_runtime_mark_last_busy(qspi->dev);
496 pm_runtime_put_autosuspend(qspi->dev);
503 struct stm32_qspi *qspi = spi_controller_get_devdata(desc->mem->spi->controller);
509 if (!qspi->mm_base && desc->info.op_tmpl.data.dir == SPI_MEM_DATA_IN)
512 if (!qspi->mm_size)
521 struct stm32_qspi *qspi = spi_controller_get_devdata(desc->mem->spi->controller);
526 ret = pm_runtime_resume_and_get(qspi->dev);
530 mutex_lock(&qspi->lock);
536 dev_dbg(qspi->dev, "%s len = 0x%zx offs = 0x%llx buf = 0x%p\n", __func__, len, offs, buf);
543 if (addr_max < qspi->mm_size && op.addr.buswidth)
544 qspi->fmode = CCR_FMODE_MM;
546 qspi->fmode = CCR_FMODE_INDR;
549 mutex_unlock(&qspi->lock);
551 pm_runtime_mark_last_busy(qspi->dev);
552 pm_runtime_put_autosuspend(qspi->dev);
560 struct stm32_qspi *qspi = spi_controller_get_devdata(ctrl);
569 ret = pm_runtime_resume_and_get(qspi->dev);
573 mutex_lock(&qspi->lock);
582 dev_dbg(qspi->dev, "tx_buf:%p tx_nbits:%d rx_buf:%p rx_nbits:%d len:%d dummy_data:%d\n",
609 qspi->fmode = CCR_FMODE_INDR;
614 qspi->fmode = CCR_FMODE_INDW;
630 mutex_unlock(&qspi->lock);
635 pm_runtime_mark_last_busy(qspi->dev);
636 pm_runtime_put_autosuspend(qspi->dev);
644 struct stm32_qspi *qspi = spi_controller_get_devdata(ctrl);
658 gpiod_count(qspi->dev, "cs") == -ENOENT)) {
659 dev_err(qspi->dev, "spi-rx-bus-width\\/spi-tx-bus-width\\/cs-gpios\n");
660 dev_err(qspi->dev, "configuration not supported\n");
665 ret = pm_runtime_resume_and_get(qspi->dev);
669 presc = DIV_ROUND_UP(qspi->clk_rate, spi->max_speed_hz) - 1;
671 flash = &qspi->flash[spi_get_chipselect(spi, 0)];
675 mutex_lock(&qspi->lock);
676 qspi->cr_reg = CR_APMS | 3 << CR_FTHRES_SHIFT | CR_SSHIFT | CR_EN;
683 qspi->cr_reg |= CR_DFM;
684 dev_dbg(qspi->dev, "Dual flash mode enable");
687 writel_relaxed(qspi->cr_reg, qspi->io_base + QSPI_CR);
690 qspi->dcr_reg = DCR_FSIZE_MASK;
691 writel_relaxed(qspi->dcr_reg, qspi->io_base + QSPI_DCR);
692 mutex_unlock(&qspi->lock);
694 pm_runtime_mark_last_busy(qspi->dev);
695 pm_runtime_put_autosuspend(qspi->dev);
700 static int stm32_qspi_dma_setup(struct stm32_qspi *qspi)
703 struct device *dev = qspi->dev;
710 dma_cfg.src_addr = qspi->phys_base + QSPI_DR;
711 dma_cfg.dst_addr = qspi->phys_base + QSPI_DR;
715 qspi->dma_chrx = dma_request_chan(dev, "rx");
716 if (IS_ERR(qspi->dma_chrx)) {
717 ret = PTR_ERR(qspi->dma_chrx);
718 qspi->dma_chrx = NULL;
722 if (dmaengine_slave_config(qspi->dma_chrx, &dma_cfg)) {
724 dma_release_channel(qspi->dma_chrx);
725 qspi->dma_chrx = NULL;
729 qspi->dma_chtx = dma_request_chan(dev, "tx");
730 if (IS_ERR(qspi->dma_chtx)) {
731 ret = PTR_ERR(qspi->dma_chtx);
732 qspi->dma_chtx = NULL;
734 if (dmaengine_slave_config(qspi->dma_chtx, &dma_cfg)) {
736 dma_release_channel(qspi->dma_chtx);
737 qspi->dma_chtx = NULL;
742 init_completion(&qspi->dma_completion);
750 static void stm32_qspi_dma_free(struct stm32_qspi *qspi)
752 if (qspi->dma_chtx)
753 dma_release_channel(qspi->dma_chtx);
754 if (qspi->dma_chrx)
755 dma_release_channel(qspi->dma_chrx);
774 struct stm32_qspi *qspi;
778 ctrl = devm_spi_alloc_host(dev, sizeof(*qspi));
782 qspi = spi_controller_get_devdata(ctrl);
783 qspi->ctrl = ctrl;
785 res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "qspi");
786 qspi->io_base = devm_ioremap_resource(dev, res);
787 if (IS_ERR(qspi->io_base))
788 return PTR_ERR(qspi->io_base);
790 qspi->phys_base = res->start;
793 qspi->mm_base = devm_ioremap_resource(dev, res);
794 if (IS_ERR(qspi->mm_base))
795 return PTR_ERR(qspi->mm_base);
797 qspi->mm_size = resource_size(res);
798 if (qspi->mm_size > STM32_QSPI_MAX_MMAP_SZ)
806 dev_name(dev), qspi);
812 init_completion(&qspi->data_completion);
813 init_completion(&qspi->match_completion);
815 qspi->clk = devm_clk_get(dev, NULL);
816 if (IS_ERR(qspi->clk))
817 return PTR_ERR(qspi->clk);
819 qspi->clk_rate = clk_get_rate(qspi->clk);
820 if (!qspi->clk_rate)
823 ret = clk_prepare_enable(qspi->clk);
840 qspi->dev = dev;
841 platform_set_drvdata(pdev, qspi);
842 ret = stm32_qspi_dma_setup(qspi);
846 mutex_init(&qspi->lock);
874 pm_runtime_get_sync(qspi->dev);
875 /* disable qspi */
876 writel_relaxed(0, qspi->io_base + QSPI_CR);
877 mutex_destroy(&qspi->lock);
878 pm_runtime_put_noidle(qspi->dev);
879 pm_runtime_disable(qspi->dev);
880 pm_runtime_set_suspended(qspi->dev);
881 pm_runtime_dont_use_autosuspend(qspi->dev);
883 stm32_qspi_dma_free(qspi);
885 clk_disable_unprepare(qspi->clk);
892 struct stm32_qspi *qspi = platform_get_drvdata(pdev);
894 pm_runtime_get_sync(qspi->dev);
895 spi_unregister_controller(qspi->ctrl);
896 /* disable qspi */
897 writel_relaxed(0, qspi->io_base + QSPI_CR);
898 stm32_qspi_dma_free(qspi);
899 mutex_destroy(&qspi->lock);
900 pm_runtime_put_noidle(qspi->dev);
901 pm_runtime_disable(qspi->dev);
902 pm_runtime_set_suspended(qspi->dev);
903 pm_runtime_dont_use_autosuspend(qspi->dev);
904 clk_disable_unprepare(qspi->clk);
909 struct stm32_qspi *qspi = dev_get_drvdata(dev);
911 clk_disable_unprepare(qspi->clk);
918 struct stm32_qspi *qspi = dev_get_drvdata(dev);
920 return clk_prepare_enable(qspi->clk);
932 struct stm32_qspi *qspi = dev_get_drvdata(dev);
945 writel_relaxed(qspi->cr_reg, qspi->io_base + QSPI_CR);
946 writel_relaxed(qspi->dcr_reg, qspi->io_base + QSPI_DCR);
961 {.compatible = "st,stm32f469-qspi"},
970 .name = "stm32-qspi",