Lines Matching defs:nfc

516 static void marvell_nfc_disable_int(struct marvell_nfc *nfc, u32 int_mask)
521 reg = readl_relaxed(nfc->regs + NDCR);
522 writel_relaxed(reg | int_mask, nfc->regs + NDCR);
525 static void marvell_nfc_enable_int(struct marvell_nfc *nfc, u32 int_mask)
530 reg = readl_relaxed(nfc->regs + NDCR);
531 writel_relaxed(reg & ~int_mask, nfc->regs + NDCR);
534 static u32 marvell_nfc_clear_int(struct marvell_nfc *nfc, u32 int_mask)
538 reg = readl_relaxed(nfc->regs + NDSR);
539 writel_relaxed(int_mask, nfc->regs + NDSR);
547 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
559 ndcr = readl_relaxed(nfc->regs + NDCR);
566 writel_relaxed(ndcr, nfc->regs + NDCR);
571 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
579 ret = readl_relaxed_poll_timeout(nfc->regs + NDCR, val,
583 dev_err(nfc->dev, "Timeout on NAND controller run mode\n");
584 writel_relaxed(readl(nfc->regs + NDCR) & ~NDCR_ND_RUN,
585 nfc->regs + NDCR);
609 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
616 dev_err(nfc->dev, "Last operation did not succeed\n");
620 ndcr = readl_relaxed(nfc->regs + NDCR);
621 writel_relaxed(readl(nfc->regs + NDSR), nfc->regs + NDSR);
624 writel_relaxed(ndcr | NDCR_ND_RUN, nfc->regs + NDCR);
625 ret = readl_relaxed_poll_timeout(nfc->regs + NDSR, val,
629 dev_err(nfc->dev, "Timeout on WRCMDRE\n");
634 writel_relaxed(NDSR_WRCMDREQ, nfc->regs + NDSR);
643 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
645 dev_dbg(nfc->dev, "\nNDCR: 0x%08x\n"
647 (u32)readl_relaxed(nfc->regs + NDCR), nfc_op->ndcb[0],
651 nfc->regs + NDCB0);
652 writel_relaxed(nfc_op->ndcb[1], nfc->regs + NDCB0);
653 writel(nfc_op->ndcb[2], nfc->regs + NDCB0);
661 if (!WARN_ON_ONCE(!nfc->caps->is_nfcv2))
662 writel(nfc_op->ndcb[3], nfc->regs + NDCB0);
669 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
673 ret = readl_relaxed_poll_timeout(nfc->regs + NDSR, val,
678 dev_err(nfc->dev, "Timeout on %s (NDSR: 0x%08x)\n",
680 if (nfc->dma_chan)
681 dmaengine_terminate_all(nfc->dma_chan);
689 if (nfc->use_dma && (readl_relaxed(nfc->regs + NDCR) & NDCR_DMA_EN))
692 writel_relaxed(flag, nfc->regs + NDSR);
705 static int marvell_nfc_poll_status(struct marvell_nfc *nfc, u32 mask,
713 st = readl_relaxed(nfc->regs + NDSR);
728 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
738 ret = marvell_nfc_poll_status(nfc, NDSR_RDY(0),
742 init_completion(&nfc->complete);
744 marvell_nfc_enable_int(nfc, NDCR_RDYM);
745 ret = wait_for_completion_timeout(&nfc->complete,
747 marvell_nfc_disable_int(nfc, NDCR_RDYM);
749 pending = marvell_nfc_clear_int(nfc, NDSR_RDY(0) | NDSR_RDY(1));
756 dev_err(nfc->dev, "Timeout waiting for RB signal\n");
767 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
774 ndcr_generic = readl_relaxed(nfc->regs + NDCR) &
776 writel_relaxed(ndcr_generic | marvell_nand->ndcr, nfc->regs + NDCR);
779 marvell_nfc_clear_int(nfc, NDCR_ALL_INT);
781 if (chip == nfc->selected_chip && die_nr == marvell_nand->selected_die)
784 writel_relaxed(marvell_nand->ndtr0, nfc->regs + NDTR0);
785 writel_relaxed(marvell_nand->ndtr1, nfc->regs + NDTR1);
787 nfc->selected_chip = chip;
793 struct marvell_nfc *nfc = dev_id;
794 u32 st = readl_relaxed(nfc->regs + NDSR);
795 u32 ien = (~readl_relaxed(nfc->regs + NDCR)) & NDCR_ALL_INT;
807 marvell_nfc_disable_int(nfc, st & NDCR_ALL_INT);
810 complete(&nfc->complete);
818 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
819 u32 ndcr = readl_relaxed(nfc->regs + NDCR);
822 writel_relaxed(ndcr | NDCR_ECC_EN, nfc->regs + NDCR);
829 writel_relaxed(NDECCCTRL_BCH_EN, nfc->regs + NDECCCTRL);
835 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
836 u32 ndcr = readl_relaxed(nfc->regs + NDCR);
839 writel_relaxed(ndcr & ~NDCR_ECC_EN, nfc->regs + NDCR);
841 writel_relaxed(0, nfc->regs + NDECCCTRL);
846 static void marvell_nfc_enable_dma(struct marvell_nfc *nfc)
850 reg = readl_relaxed(nfc->regs + NDCR);
851 writel_relaxed(reg | NDCR_DMA_EN, nfc->regs + NDCR);
854 static void marvell_nfc_disable_dma(struct marvell_nfc *nfc)
858 reg = readl_relaxed(nfc->regs + NDCR);
859 writel_relaxed(reg & ~NDCR_DMA_EN, nfc->regs + NDCR);
863 static int marvell_nfc_xfer_data_dma(struct marvell_nfc *nfc,
873 marvell_nfc_enable_dma(nfc);
875 sg_init_one(&sg, nfc->dma_buf, dma_len);
876 ret = dma_map_sg(nfc->dma_chan->device->dev, &sg, 1, direction);
878 dev_err(nfc->dev, "Could not map DMA S/G list\n");
882 tx = dmaengine_prep_slave_sg(nfc->dma_chan, &sg, 1,
887 dev_err(nfc->dev, "Could not prepare DMA S/G list\n");
888 dma_unmap_sg(nfc->dma_chan->device->dev, &sg, 1, direction);
898 dma_async_issue_pending(nfc->dma_chan);
899 ret = marvell_nfc_wait_cmdd(nfc->selected_chip);
900 dma_unmap_sg(nfc->dma_chan->device->dev, &sg, 1, direction);
901 marvell_nfc_disable_dma(nfc);
903 dev_err(nfc->dev, "Timeout waiting for DMA (status: %d)\n",
904 dmaengine_tx_status(nfc->dma_chan, cookie, NULL));
905 dmaengine_terminate_all(nfc->dma_chan);
912 static int marvell_nfc_xfer_data_in_pio(struct marvell_nfc *nfc, u8 *in,
920 ioread32_rep(nfc->regs + NDDB, in + i, FIFO_REP(FIFO_DEPTH));
925 ioread32_rep(nfc->regs + NDDB, tmp_buf, FIFO_REP(FIFO_DEPTH));
932 static int marvell_nfc_xfer_data_out_pio(struct marvell_nfc *nfc, const u8 *out,
940 iowrite32_rep(nfc->regs + NDDB, out + i, FIFO_REP(FIFO_DEPTH));
946 iowrite32_rep(nfc->regs + NDDB, tmp_buf, FIFO_REP(FIFO_DEPTH));
996 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
1000 ndsr = readl_relaxed(nfc->regs + NDSR);
1004 writel_relaxed(ndsr, nfc->regs + NDSR);
1018 writel_relaxed(ndsr, nfc->regs + NDSR);
1039 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
1054 if (nfc->caps->is_nfcv2)
1073 if (nfc->use_dma) {
1074 marvell_nfc_xfer_data_dma(nfc, DMA_FROM_DEVICE,
1076 memcpy(data_buf, nfc->dma_buf, lt->data_bytes);
1077 memcpy(oob_buf, nfc->dma_buf + lt->data_bytes, oob_bytes);
1079 marvell_nfc_xfer_data_in_pio(nfc, data_buf, lt->data_bytes);
1080 marvell_nfc_xfer_data_in_pio(nfc, oob_buf, oob_bytes);
1153 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
1169 if (nfc->caps->is_nfcv2)
1183 if (nfc->use_dma) {
1184 memcpy(nfc->dma_buf, data_buf, lt->data_bytes);
1185 memcpy(nfc->dma_buf + lt->data_bytes, oob_buf, oob_bytes);
1186 marvell_nfc_xfer_data_dma(nfc, DMA_TO_DEVICE, lt->data_bytes +
1189 marvell_nfc_xfer_data_out_pio(nfc, data_buf, lt->data_bytes);
1190 marvell_nfc_xfer_data_out_pio(nfc, oob_buf, oob_bytes);
1309 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
1356 marvell_nfc_xfer_data_in_pio(nfc, data,
1364 marvell_nfc_xfer_data_in_pio(nfc, spare,
1571 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
1620 iowrite32_rep(nfc->regs + NDDB, data, FIFO_REP(data_len));
1621 iowrite32_rep(nfc->regs + NDDB, spare, FIFO_REP(spare_len));
1714 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
1767 if (nfc->caps->is_nfcv2) {
1781 if (nfc->caps->is_nfcv2) {
1803 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
1817 ret = marvell_nfc_xfer_data_in_pio(nfc, in, len);
1821 ret = marvell_nfc_xfer_data_out_pio(nfc, out, len);
1885 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
1887 writel_relaxed(readl(nfc->regs + NDCR) & ~NDCR_ND_RUN,
1888 nfc->regs + NDCR);
1957 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
1959 writel_relaxed(readl(nfc->regs + NDCR) & ~NDCR_ND_RUN,
1960 nfc->regs + NDCR);
2182 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
2187 if (nfc->caps->is_nfcv2)
2248 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
2252 if (!nfc->caps->is_nfcv2 &&
2254 dev_err(nfc->dev,
2271 (!nfc->caps->is_nfcv2 && ecc->strength > 1)) {
2272 dev_err(nfc->dev,
2281 dev_err(nfc->dev, "Requested layout needs at least 128 OOB bytes\n");
2324 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
2333 dev_info(nfc->dev,
2349 if (!nfc->caps->is_nfcv2 && mtd->writesize != SZ_512 &&
2351 dev_err(nfc->dev, "NFCv1 cannot write %d bytes pages\n",
2390 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
2391 unsigned int period_ns = 1000000000 / clk_get_rate(nfc->core_clk) * 2;
2400 if (nfc->caps->max_mode_number && nfc->caps->max_mode_number < conf->timings.mode)
2445 if (nfc->caps->is_nfcv2) {
2473 if (nfc->caps->is_nfcv2) {
2485 * Reset nfc->selected_chip so the next command will cause the timing
2488 nfc->selected_chip = NULL;
2497 struct marvell_nfc *nfc = to_marvell_nfc(chip->controller);
2498 struct pxa3xx_nand_platform_data *pdata = dev_get_platdata(nfc->dev);
2550 dev_err(nfc->dev, "ECC init failed: %d\n", ret);
2564 if (pdata || nfc->caps->legacy_of_bindings) {
2582 mtd->name = devm_kasprintf(nfc->dev, GFP_KERNEL,
2583 "%s:nand.%d", dev_name(nfc->dev),
2586 dev_err(nfc->dev, "Failed to allocate mtd->name\n");
2600 static int marvell_nand_chip_init(struct device *dev, struct marvell_nfc *nfc,
2619 if (pdata || nfc->caps->legacy_of_bindings) {
2642 if (pdata || nfc->caps->legacy_of_bindings) {
2658 if (cs >= nfc->caps->max_cs_nb) {
2660 cs, nfc->caps->max_cs_nb);
2664 if (test_and_set_bit(cs, &nfc->assigned_cs)) {
2692 if (pdata || nfc->caps->legacy_of_bindings) {
2706 if (rb >= nfc->caps->max_rb_nb) {
2708 rb, nfc->caps->max_rb_nb);
2716 chip->controller = &nfc->controller;
2729 marvell_nand->ndtr0 = readl_relaxed(nfc->regs + NDTR0);
2730 marvell_nand->ndtr1 = readl_relaxed(nfc->regs + NDTR1);
2751 list_add_tail(&marvell_nand->node, &nfc->chips);
2756 static void marvell_nand_chips_cleanup(struct marvell_nfc *nfc)
2762 list_for_each_entry_safe(entry, temp, &nfc->chips, node) {
2771 static int marvell_nand_chips_init(struct device *dev, struct marvell_nfc *nfc)
2775 int max_cs = nfc->caps->max_cs_nb;
2796 if (nfc->caps->legacy_of_bindings) {
2797 ret = marvell_nand_chip_init(dev, nfc, np);
2802 ret = marvell_nand_chip_init(dev, nfc, nand_np);
2812 marvell_nand_chips_cleanup(nfc);
2817 static int marvell_nfc_init_dma(struct marvell_nfc *nfc)
2819 struct platform_device *pdev = container_of(nfc->dev,
2827 dev_warn(nfc->dev,
2832 ret = dma_set_mask_and_coherent(nfc->dev, DMA_BIT_MASK(32));
2836 nfc->dma_chan = dma_request_chan(nfc->dev, "data");
2837 if (IS_ERR(nfc->dma_chan)) {
2838 ret = PTR_ERR(nfc->dma_chan);
2839 nfc->dma_chan = NULL;
2840 return dev_err_probe(nfc->dev, ret, "DMA channel request failed\n");
2855 ret = dmaengine_slave_config(nfc->dma_chan, &config);
2857 dev_err(nfc->dev, "Failed to configure DMA channel\n");
2867 nfc->dma_buf = kmalloc(MAX_CHUNK_SIZE, GFP_KERNEL | GFP_DMA);
2868 if (!nfc->dma_buf) {
2873 nfc->use_dma = true;
2878 dma_release_channel(nfc->dma_chan);
2879 nfc->dma_chan = NULL;
2884 static void marvell_nfc_reset(struct marvell_nfc *nfc)
2894 NDCR_RD_ID_CNT(NFCV1_READID_LEN), nfc->regs + NDCR);
2895 writel_relaxed(0xFFFFFFFF, nfc->regs + NDSR);
2896 writel_relaxed(0, nfc->regs + NDECCCTRL);
2899 static int marvell_nfc_init(struct marvell_nfc *nfc)
2901 struct device_node *np = nfc->dev->of_node;
2909 if (nfc->caps->need_system_controller) {
2930 if (!nfc->caps->is_nfcv2)
2931 marvell_nfc_init_dma(nfc);
2933 marvell_nfc_reset(nfc);
2941 struct marvell_nfc *nfc;
2945 nfc = devm_kzalloc(&pdev->dev, sizeof(struct marvell_nfc),
2947 if (!nfc)
2950 nfc->dev = dev;
2951 nand_controller_init(&nfc->controller);
2952 nfc->controller.ops = &marvell_nand_controller_ops;
2953 INIT_LIST_HEAD(&nfc->chips);
2955 nfc->regs = devm_platform_ioremap_resource(pdev, 0);
2956 if (IS_ERR(nfc->regs))
2957 return PTR_ERR(nfc->regs);
2963 nfc->core_clk = devm_clk_get(&pdev->dev, "core");
2966 if (nfc->core_clk == ERR_PTR(-ENOENT))
2967 nfc->core_clk = devm_clk_get(&pdev->dev, NULL);
2969 if (IS_ERR(nfc->core_clk))
2970 return PTR_ERR(nfc->core_clk);
2972 ret = clk_prepare_enable(nfc->core_clk);
2976 nfc->reg_clk = devm_clk_get(&pdev->dev, "reg");
2977 if (IS_ERR(nfc->reg_clk)) {
2978 if (PTR_ERR(nfc->reg_clk) != -ENOENT) {
2979 ret = PTR_ERR(nfc->reg_clk);
2983 nfc->reg_clk = NULL;
2986 ret = clk_prepare_enable(nfc->reg_clk);
2990 marvell_nfc_disable_int(nfc, NDCR_ALL_INT);
2991 marvell_nfc_clear_int(nfc, NDCR_ALL_INT);
2993 0, "marvell-nfc", nfc);
2999 nfc->caps = (void *)pdev->id_entry->driver_data;
3001 nfc->caps = of_device_get_match_data(&pdev->dev);
3003 if (!nfc->caps) {
3010 ret = marvell_nfc_init(nfc);
3014 platform_set_drvdata(pdev, nfc);
3016 ret = marvell_nand_chips_init(dev, nfc);
3023 if (nfc->use_dma)
3024 dma_release_channel(nfc->dma_chan);
3026 clk_disable_unprepare(nfc->reg_clk);
3028 clk_disable_unprepare(nfc->core_clk);
3035 struct marvell_nfc *nfc = platform_get_drvdata(pdev);
3037 marvell_nand_chips_cleanup(nfc);
3039 if (nfc->use_dma) {
3040 dmaengine_terminate_all(nfc->dma_chan);
3041 dma_release_channel(nfc->dma_chan);
3044 clk_disable_unprepare(nfc->reg_clk);
3045 clk_disable_unprepare(nfc->core_clk);
3050 struct marvell_nfc *nfc = dev_get_drvdata(dev);
3053 list_for_each_entry(chip, &nfc->chips, node)
3056 clk_disable_unprepare(nfc->reg_clk);
3057 clk_disable_unprepare(nfc->core_clk);
3064 struct marvell_nfc *nfc = dev_get_drvdata(dev);
3067 ret = clk_prepare_enable(nfc->core_clk);
3071 ret = clk_prepare_enable(nfc->reg_clk);
3073 clk_disable_unprepare(nfc->core_clk);
3078 * Reset nfc->selected_chip so the next command will cause the timing
3081 nfc->selected_chip = NULL;
3084 marvell_nfc_reset(nfc);
3186 .name = "marvell-nfc",