Lines Matching defs:ecc

219 	struct edma_cc			*ecc;
300 static inline unsigned int edma_read(struct edma_cc *ecc, int offset)
302 return (unsigned int)__raw_readl(ecc->base + offset);
305 static inline void edma_write(struct edma_cc *ecc, int offset, int val)
307 __raw_writel(val, ecc->base + offset);
310 static inline void edma_modify(struct edma_cc *ecc, int offset, unsigned and,
313 unsigned val = edma_read(ecc, offset);
317 edma_write(ecc, offset, val);
320 static inline void edma_or(struct edma_cc *ecc, int offset, unsigned or)
322 unsigned val = edma_read(ecc, offset);
325 edma_write(ecc, offset, val);
328 static inline unsigned int edma_read_array(struct edma_cc *ecc, int offset,
331 return edma_read(ecc, offset + (i << 2));
334 static inline void edma_write_array(struct edma_cc *ecc, int offset, int i,
337 edma_write(ecc, offset + (i << 2), val);
340 static inline void edma_modify_array(struct edma_cc *ecc, int offset, int i,
343 edma_modify(ecc, offset + (i << 2), and, or);
346 static inline void edma_or_array2(struct edma_cc *ecc, int offset, int i, int j,
349 edma_or(ecc, offset + ((i * 2 + j) << 2), or);
352 static inline void edma_write_array2(struct edma_cc *ecc, int offset, int i,
355 edma_write(ecc, offset + ((i * 2 + j) << 2), val);
358 static inline unsigned int edma_shadow0_read_array(struct edma_cc *ecc,
361 return edma_read(ecc, EDMA_SHADOW0 + offset + (i << 2));
364 static inline void edma_shadow0_write(struct edma_cc *ecc, int offset,
367 edma_write(ecc, EDMA_SHADOW0 + offset, val);
370 static inline void edma_shadow0_write_array(struct edma_cc *ecc, int offset,
373 edma_write(ecc, EDMA_SHADOW0 + offset + (i << 2), val);
376 static inline void edma_param_modify(struct edma_cc *ecc, int offset,
379 edma_modify(ecc, EDMA_PARM + offset + (param_no << 5), and, or);
382 static void edma_assign_priority_to_queue(struct edma_cc *ecc, int queue_no,
387 edma_modify(ecc, EDMA_QUEPRI, ~(0x7 << bit), ((priority & 0x7) << bit));
392 struct edma_cc *ecc = echan->ecc;
395 if (ecc->chmap_exist) {
397 edma_write_array(ecc, EDMA_DCHMAP, channel, (slot << 5));
403 struct edma_cc *ecc = echan->ecc;
409 edma_shadow0_write_array(ecc, SH_ICR, idx, ch_bit);
410 edma_shadow0_write_array(ecc, SH_IESR, idx, ch_bit);
412 edma_shadow0_write_array(ecc, SH_IECR, idx, ch_bit);
419 static void edma_write_slot(struct edma_cc *ecc, unsigned slot,
423 if (slot >= ecc->num_slots)
425 memcpy_toio(ecc->base + PARM_OFFSET(slot), param, PARM_SIZE);
428 static int edma_read_slot(struct edma_cc *ecc, unsigned slot,
432 if (slot >= ecc->num_slots)
434 memcpy_fromio(param, ecc->base + PARM_OFFSET(slot), PARM_SIZE);
441 * @ecc: pointer to edma_cc struct
454 static int edma_alloc_slot(struct edma_cc *ecc, int slot)
459 if (ecc->chmap_exist && slot < ecc->num_channels)
464 if (ecc->chmap_exist)
467 slot = ecc->num_channels;
469 slot = find_next_zero_bit(ecc->slot_inuse,
470 ecc->num_slots,
472 if (slot == ecc->num_slots)
474 if (!test_and_set_bit(slot, ecc->slot_inuse))
477 } else if (slot >= ecc->num_slots) {
479 } else if (test_and_set_bit(slot, ecc->slot_inuse)) {
483 edma_write_slot(ecc, slot, &dummy_paramset);
485 return EDMA_CTLR_CHAN(ecc->id, slot);
488 static void edma_free_slot(struct edma_cc *ecc, unsigned slot)
491 if (slot >= ecc->num_slots)
494 edma_write_slot(ecc, slot, &dummy_paramset);
495 clear_bit(slot, ecc->slot_inuse);
500 * @ecc: pointer to edma_cc struct
506 static void edma_link(struct edma_cc *ecc, unsigned from, unsigned to)
509 dev_warn(ecc->dev, "Ignoring eDMA instance for linking\n");
513 if (from >= ecc->num_slots || to >= ecc->num_slots)
516 edma_param_modify(ecc, PARM_LINK_BCNTRLD, from, 0xffff0000,
522 * @ecc: pointer to edma_cc struct
528 static dma_addr_t edma_get_position(struct edma_cc *ecc, unsigned slot,
537 return edma_read(ecc, offs);
548 struct edma_cc *ecc = echan->ecc;
555 dev_dbg(ecc->dev, "ESR%d %08x\n", idx,
556 edma_shadow0_read_array(ecc, SH_ESR, idx));
557 edma_shadow0_write_array(ecc, SH_ESR, idx, ch_bit);
560 dev_dbg(ecc->dev, "ER%d %08x\n", idx,
561 edma_shadow0_read_array(ecc, SH_ER, idx));
563 edma_write_array(ecc, EDMA_ECR, idx, ch_bit);
564 edma_write_array(ecc, EDMA_EMCR, idx, ch_bit);
566 edma_shadow0_write_array(ecc, SH_SECR, idx, ch_bit);
567 edma_shadow0_write_array(ecc, SH_EESR, idx, ch_bit);
568 dev_dbg(ecc->dev, "EER%d %08x\n", idx,
569 edma_shadow0_read_array(ecc, SH_EER, idx));
575 struct edma_cc *ecc = echan->ecc;
580 edma_shadow0_write_array(ecc, SH_EECR, idx, ch_bit);
581 edma_shadow0_write_array(ecc, SH_ECR, idx, ch_bit);
582 edma_shadow0_write_array(ecc, SH_SECR, idx, ch_bit);
583 edma_write_array(ecc, EDMA_EMCR, idx, ch_bit);
586 edma_shadow0_write_array(ecc, SH_ICR, idx, ch_bit);
588 dev_dbg(ecc->dev, "EER%d %08x\n", idx,
589 edma_shadow0_read_array(ecc, SH_EER, idx));
604 edma_shadow0_write_array(echan->ecc, SH_EECR,
614 edma_shadow0_write_array(echan->ecc, SH_EESR,
621 struct edma_cc *ecc = echan->ecc;
626 edma_shadow0_write_array(ecc, SH_ESR, idx, ch_bit);
628 dev_dbg(ecc->dev, "ESR%d %08x\n", idx,
629 edma_shadow0_read_array(ecc, SH_ESR, idx));
634 struct edma_cc *ecc = echan->ecc;
639 dev_dbg(ecc->dev, "EMR%d %08x\n", idx,
640 edma_read_array(ecc, EDMA_EMR, idx));
641 edma_shadow0_write_array(ecc, SH_ECR, idx, ch_bit);
643 edma_write_array(ecc, EDMA_EMCR, idx, ch_bit);
645 edma_shadow0_write_array(ecc, SH_SECR, idx, ch_bit);
646 edma_write(ecc, EDMA_CCERRCLR, BIT(16) | BIT(1) | BIT(0));
653 struct edma_cc *ecc = echan->ecc;
659 eventq_no = ecc->default_queue;
660 if (eventq_no >= ecc->num_tc)
664 edma_modify_array(ecc, EDMA_DMAQNUM, (channel >> 3), ~(0x7 << bit),
671 struct edma_cc *ecc = echan->ecc;
674 if (!test_bit(echan->ch_num, ecc->channels_mask)) {
675 dev_err(ecc->dev, "Channel%d is reserved, can not be used!\n",
681 edma_or_array2(ecc, EDMA_DRAE, 0, EDMA_REG_ARRAY_INDEX(channel),
720 struct edma_cc *ecc = echan->ecc;
745 edma_write_slot(ecc, echan->slot[i], &edesc->pset[j].param);
770 edma_link(ecc, echan->slot[i], echan->slot[i + 1]);
782 edma_link(ecc, echan->slot[nslots - 1], echan->slot[1]);
784 edma_link(ecc, echan->slot[nslots - 1],
785 echan->ecc->dummy_slot);
1059 edma_alloc_slot(echan->ecc, EDMA_SLOT_ANY);
1189 echan->slot[1] = edma_alloc_slot(echan->ecc,
1381 edma_alloc_slot(echan->ecc, EDMA_SLOT_ANY);
1493 struct edma_cc *ecc = data;
1499 ctlr = ecc->id;
1503 dev_vdbg(ecc->dev, "dma_irq_handler\n");
1505 sh_ipr = edma_shadow0_read_array(ecc, SH_IPR, 0);
1507 sh_ipr = edma_shadow0_read_array(ecc, SH_IPR, 1);
1510 sh_ier = edma_shadow0_read_array(ecc, SH_IER, 1);
1513 sh_ier = edma_shadow0_read_array(ecc, SH_IER, 0);
1527 edma_shadow0_write_array(ecc, SH_ICR, bank, BIT(slot));
1528 edma_completion_handler(&ecc->slave_chans[channel]);
1532 edma_shadow0_write(ecc, SH_IEVAL, 1);
1538 struct edma_cc *ecc = echan->ecc;
1548 err = edma_read_slot(ecc, echan->slot[0], &p);
1579 static inline bool edma_error_pending(struct edma_cc *ecc)
1581 if (edma_read_array(ecc, EDMA_EMR, 0) ||
1582 edma_read_array(ecc, EDMA_EMR, 1) ||
1583 edma_read(ecc, EDMA_QEMR) || edma_read(ecc, EDMA_CCERR))
1592 struct edma_cc *ecc = data;
1598 ctlr = ecc->id;
1602 dev_vdbg(ecc->dev, "dma_ccerr_handler\n");
1604 if (!edma_error_pending(ecc)) {
1610 dev_err(ecc->dev, "%s: Error interrupt without error event!\n",
1612 edma_write(ecc, EDMA_EEVAL, 1);
1621 val = edma_read_array(ecc, EDMA_EMR, j);
1625 dev_dbg(ecc->dev, "EMR%d 0x%08x\n", j, val);
1631 edma_write_array(ecc, EDMA_EMCR, j, BIT(i));
1633 edma_shadow0_write_array(ecc, SH_SECR, j,
1635 edma_error_handler(&ecc->slave_chans[k]);
1639 val = edma_read(ecc, EDMA_QEMR);
1641 dev_dbg(ecc->dev, "QEMR 0x%02x\n", val);
1643 edma_write(ecc, EDMA_QEMCR, val);
1644 edma_shadow0_write(ecc, SH_QSECR, val);
1647 val = edma_read(ecc, EDMA_CCERR);
1649 dev_warn(ecc->dev, "CCERR 0x%08x\n", val);
1651 edma_write(ecc, EDMA_CCERRCLR, val);
1654 if (!edma_error_pending(ecc))
1660 edma_write(ecc, EDMA_EEVAL, 1);
1668 struct edma_cc *ecc = echan->ecc;
1669 struct device *dev = ecc->dev;
1675 } else if (ecc->tc_list) {
1677 echan->tc = &ecc->tc_list[ecc->info->default_queue];
1685 echan->slot[0] = edma_alloc_slot(ecc, echan->ch_num);
1712 struct device *dev = echan->ecc->dev;
1723 edma_free_slot(echan->ecc, echan->slot[i]);
1729 edma_set_chmap(echan, echan->ecc->dummy_slot);
1782 pos = edma_get_position(echan->ecc, echan->slot[0], dst);
1798 while (edma_shadow0_read_array(echan->ecc, event_reg, idx) & ch_bit) {
1799 pos = edma_get_position(echan->ecc, echan->slot[0], dst);
1922 static void edma_dma_init(struct edma_cc *ecc, bool legacy_mode)
1924 struct dma_device *s_ddev = &ecc->dma_slave;
1926 s32 *memcpy_channels = ecc->info->memcpy_channels;
1932 if (ecc->legacy_mode && !memcpy_channels) {
1933 dev_warn(ecc->dev,
1961 s_ddev->dev = ecc->dev;
1965 m_ddev = devm_kzalloc(ecc->dev, sizeof(*m_ddev), GFP_KERNEL);
1967 dev_warn(ecc->dev, "memcpy is disabled due to OoM\n");
1971 ecc->dma_memcpy = m_ddev;
1994 m_ddev->dev = ecc->dev;
1996 } else if (!ecc->legacy_mode) {
1997 dev_info(ecc->dev, "memcpy is disabled\n");
2001 for (i = 0; i < ecc->num_channels; i++) {
2002 struct edma_chan *echan = &ecc->slave_chans[i];
2003 echan->ch_num = EDMA_CTLR_CHAN(ecc->id, i);
2004 echan->ecc = ecc;
2019 struct edma_cc *ecc)
2026 cccfg = edma_read(ecc, EDMA_CCCFG);
2029 ecc->num_region = BIT(value);
2032 ecc->num_channels = BIT(value + 1);
2035 ecc->num_qchannels = value * 2;
2038 ecc->num_slots = BIT(value + 4);
2041 ecc->num_tc = value + 1;
2043 ecc->chmap_exist = (cccfg & CHMAP_EXIST) ? true : false;
2046 dev_dbg(dev, "num_region: %u\n", ecc->num_region);
2047 dev_dbg(dev, "num_channels: %u\n", ecc->num_channels);
2048 dev_dbg(dev, "num_qchannels: %u\n", ecc->num_qchannels);
2049 dev_dbg(dev, "num_slots: %u\n", ecc->num_slots);
2050 dev_dbg(dev, "num_tc: %u\n", ecc->num_tc);
2051 dev_dbg(dev, "chmap_exist: %s\n", ecc->chmap_exist ? "yes" : "no");
2067 queue_priority_map = devm_kcalloc(dev, ecc->num_tc + 1, sizeof(s8),
2072 for (i = 0; i < ecc->num_tc; i++) {
2232 struct edma_cc *ecc = ofdma->of_dma_data;
2237 if (!ecc || dma_spec->args_count < 1)
2240 for (i = 0; i < ecc->num_channels; i++) {
2241 echan = &ecc->slave_chans[i];
2251 if (echan->ecc->legacy_mode && dma_spec->args_count == 1)
2254 if (!echan->ecc->legacy_mode && dma_spec->args_count == 2 &&
2255 dma_spec->args[1] < echan->ecc->num_tc) {
2256 echan->tc = &echan->ecc->tc_list[dma_spec->args[1]];
2292 struct edma_cc *ecc;
2317 ecc = devm_kzalloc(dev, sizeof(*ecc), GFP_KERNEL);
2318 if (!ecc)
2321 ecc->dev = dev;
2322 ecc->id = pdev->id;
2323 ecc->legacy_mode = legacy_mode;
2325 if (ecc->id < 0)
2326 ecc->id = 0;
2337 ecc->base = devm_ioremap_resource(dev, mem);
2338 if (IS_ERR(ecc->base))
2339 return PTR_ERR(ecc->base);
2341 platform_set_drvdata(pdev, ecc);
2352 ret = edma_setup_from_hw(dev, info, ecc);
2357 ecc->slave_chans = devm_kcalloc(dev, ecc->num_channels,
2358 sizeof(*ecc->slave_chans), GFP_KERNEL);
2360 ecc->slot_inuse = devm_kcalloc(dev, BITS_TO_LONGS(ecc->num_slots),
2363 ecc->channels_mask = devm_kcalloc(dev,
2364 BITS_TO_LONGS(ecc->num_channels),
2366 if (!ecc->slave_chans || !ecc->slot_inuse || !ecc->channels_mask) {
2372 bitmap_fill(ecc->channels_mask, ecc->num_channels);
2374 ecc->default_queue = info->default_queue;
2381 bitmap_set(ecc->slot_inuse, reserved[i][0],
2389 bitmap_clear(ecc->channels_mask, reserved[i][0],
2394 for (i = 0; i < ecc->num_slots; i++) {
2396 if (!test_bit(i, ecc->slot_inuse))
2397 edma_write_slot(ecc, i, &dummy_paramset);
2413 ecc);
2418 ecc->ccint = irq;
2434 ecc);
2439 ecc->ccerrint = irq;
2442 ecc->dummy_slot = edma_alloc_slot(ecc, EDMA_SLOT_ANY);
2443 if (ecc->dummy_slot < 0) {
2445 ret = ecc->dummy_slot;
2451 if (!ecc->legacy_mode) {
2456 ecc->tc_list = devm_kcalloc(dev, ecc->num_tc,
2457 sizeof(*ecc->tc_list), GFP_KERNEL);
2458 if (!ecc->tc_list) {
2466 if (ret || i == ecc->num_tc)
2469 ecc->tc_list[i].node = tc_args.np;
2470 ecc->tc_list[i].id = i;
2479 array_max = DIV_ROUND_UP(ecc->num_channels, BITS_PER_TYPE(u32));
2482 (u32 *)ecc->channels_mask,
2493 edma_assign_priority_to_queue(ecc, queue_priority_mapping[i][0],
2496 edma_write_array2(ecc, EDMA_DRAE, 0, 0, 0x0);
2497 edma_write_array2(ecc, EDMA_DRAE, 0, 1, 0x0);
2498 edma_write_array(ecc, EDMA_QRAE, 0, 0x0);
2500 ecc->info = info;
2503 edma_dma_init(ecc, legacy_mode);
2505 for (i = 0; i < ecc->num_channels; i++) {
2507 if (!test_bit(i, ecc->channels_mask))
2511 edma_assign_channel_eventq(&ecc->slave_chans[i],
2514 edma_set_chmap(&ecc->slave_chans[i], ecc->dummy_slot);
2517 ecc->dma_slave.filter.map = info->slave_map;
2518 ecc->dma_slave.filter.mapcnt = info->slavecnt;
2519 ecc->dma_slave.filter.fn = edma_filter_fn;
2521 ret = dma_async_device_register(&ecc->dma_slave);
2527 if (ecc->dma_memcpy) {
2528 ret = dma_async_device_register(ecc->dma_memcpy);
2532 dma_async_device_unregister(&ecc->dma_slave);
2538 of_dma_controller_register(node, of_edma_xlate, ecc);
2545 edma_free_slot(ecc, ecc->dummy_slot);
2566 struct edma_cc *ecc = dev_get_drvdata(dev);
2568 devm_free_irq(dev, ecc->ccint, ecc);
2569 devm_free_irq(dev, ecc->ccerrint, ecc);
2571 edma_cleanupp_vchan(&ecc->dma_slave);
2575 dma_async_device_unregister(&ecc->dma_slave);
2576 if (ecc->dma_memcpy)
2577 dma_async_device_unregister(ecc->dma_memcpy);
2578 edma_free_slot(ecc, ecc->dummy_slot);
2586 struct edma_cc *ecc = dev_get_drvdata(dev);
2587 struct edma_chan *echan = ecc->slave_chans;
2590 for (i = 0; i < ecc->num_channels; i++) {
2600 struct edma_cc *ecc = dev_get_drvdata(dev);
2601 struct edma_chan *echan = ecc->slave_chans;
2606 edma_write_slot(ecc, ecc->dummy_slot, &dummy_paramset);
2608 queue_priority_mapping = ecc->info->queue_priority_mapping;
2612 edma_assign_priority_to_queue(ecc, queue_priority_mapping[i][0],
2615 for (i = 0; i < ecc->num_channels; i++) {
2618 edma_or_array2(ecc, EDMA_DRAE, 0,