Lines Matching refs:sc

153  *	@sc: DMA device context
161 ti_sdma_read_4(struct ti_sdma_softc *sc, bus_size_t off)
163 return bus_read_4(sc->sc_mem_res, off);
168 * @sc: DMA device context
176 ti_sdma_write_4(struct ti_sdma_softc *sc, bus_size_t off, uint32_t val)
178 bus_write_4(sc->sc_mem_res, off, val);
183 * @sc: DMA device context
187 ti_sdma_is_omap3_rev(struct ti_sdma_softc *sc)
189 return (sc->sc_hw_rev == DMA4_OMAP3_REV);
194 * @sc: DMA device context
198 ti_sdma_is_omap4_rev(struct ti_sdma_softc *sc)
200 return (sc->sc_hw_rev == DMA4_OMAP4_REV);
218 struct ti_sdma_softc *sc = ti_sdma_sc;
224 TI_SDMA_LOCK(sc);
229 intr = ti_sdma_read_4(sc, DMA4_IRQSTATUS_L(j));
230 intr &= ti_sdma_read_4(sc, DMA4_IRQENABLE_L(j));
237 channel = &sc->sc_channel[ch];
240 csr = ti_sdma_read_4(sc, DMA4_CSR(ch));
242 device_printf(sc->sc_dev, "Spurious DMA IRQ for channel "
248 if ((sc->sc_active_channels & (1 << ch)) == 0) {
249 device_printf(sc->sc_dev, "IRQ %d for a non-activated "
256 device_printf(sc->sc_dev, "Synchronization event drop "
260 device_printf(sc->sc_dev, "Secure transaction error event "
263 device_printf(sc->sc_dev, "Misaligned address error event "
266 device_printf(sc->sc_dev, "Transaction error event on "
277 ti_sdma_write_4(sc, DMA4_CSR(ch), DMA4_CSR_CLEAR_MASK);
278 ti_sdma_write_4(sc, DMA4_IRQSTATUS_L(j), (1 << ch));
287 TI_SDMA_UNLOCK(sc);
321 struct ti_sdma_softc *sc = ti_sdma_sc;
327 if (sc == NULL)
333 TI_SDMA_LOCK(sc);
336 if (sc->sc_active_channels == 0xffffffff) {
337 TI_SDMA_UNLOCK(sc);
343 if (!(sc->sc_active_channels & (0x1 << i))) {
344 sc->sc_active_channels |= (0x1 << i);
351 channel = &sc->sc_channel[*ch];
383 ti_sdma_write_4(sc, addr, 0x00000000);
385 TI_SDMA_UNLOCK(sc);
405 struct ti_sdma_softc *sc = ti_sdma_sc;
410 if (sc == NULL)
413 TI_SDMA_LOCK(sc);
416 if ((sc->sc_active_channels & (1 << ch)) == 0) {
417 TI_SDMA_UNLOCK(sc);
422 sc->sc_active_channels &= ~(1 << ch);
425 ti_sdma_write_4(sc, DMA4_CICR(ch), 0);
428 ti_sdma_write_4(sc, DMA4_CCR(ch), 0);
431 ti_sdma_write_4(sc, DMA4_CSR(ch), DMA4_CSR_CLEAR_MASK);
433 ti_sdma_write_4(sc, DMA4_IRQSTATUS_L(j), (1 << ch));
438 ti_sdma_write_4(sc, addr, 0x00000000);
440 TI_SDMA_UNLOCK(sc);
460 struct ti_sdma_softc *sc = ti_sdma_sc;
465 if (sc == NULL)
468 TI_SDMA_LOCK(sc);
470 if ((sc->sc_active_channels & (1 << ch)) == 0) {
471 TI_SDMA_UNLOCK(sc);
476 sc->sc_channel[ch].reg_cicr = 0x0000;
477 ti_sdma_write_4(sc, DMA4_CICR(ch), 0x0000);
481 irq_enable = ti_sdma_read_4(sc, DMA4_IRQENABLE_L(j));
484 ti_sdma_write_4(sc, DMA4_IRQENABLE_L(j), irq_enable);
488 sc->sc_channel[ch].need_reg_write = 1;
490 TI_SDMA_UNLOCK(sc);
519 struct ti_sdma_softc *sc = ti_sdma_sc;
523 if (sc == NULL)
526 TI_SDMA_LOCK(sc);
528 if ((sc->sc_active_channels & (1 << ch)) == 0) {
529 TI_SDMA_UNLOCK(sc);
537 sc->sc_channel[ch].reg_cicr = flags;
540 ti_sdma_write_4(sc, DMA4_CICR(ch), flags);
543 irq_enable = ti_sdma_read_4(sc, DMA4_IRQENABLE_L(0));
546 ti_sdma_write_4(sc, DMA4_IRQENABLE_L(0), irq_enable);
549 sc->sc_channel[ch].need_reg_write = 1;
551 TI_SDMA_UNLOCK(sc);
585 struct ti_sdma_softc *sc = ti_sdma_sc;
589 if (sc == NULL)
592 TI_SDMA_LOCK(sc);
594 if ((sc->sc_active_channels & (1 << ch)) == 0) {
595 TI_SDMA_UNLOCK(sc);
599 TI_SDMA_UNLOCK(sc);
601 csr = ti_sdma_read_4(sc, DMA4_CSR(ch));
630 struct ti_sdma_softc *sc = ti_sdma_sc;
635 if (sc == NULL)
638 TI_SDMA_LOCK(sc);
640 if ((sc->sc_active_channels & (1 << ch)) == 0) {
641 TI_SDMA_UNLOCK(sc);
645 channel = &sc->sc_channel[ch];
648 ti_sdma_write_4(sc, DMA4_CSDP(ch),
652 ti_sdma_write_4(sc, DMA4_CEN(ch), elmcnt);
655 ti_sdma_write_4(sc, DMA4_CFN(ch), frmcnt);
658 ti_sdma_write_4(sc, DMA4_CSSA(ch), src_paddr);
659 ti_sdma_write_4(sc, DMA4_CDSA(ch), dst_paddr);
662 ti_sdma_write_4(sc, DMA4_CCR(ch), channel->reg_ccr);
665 ti_sdma_write_4(sc, DMA4_CSE(ch), 0x0001);
668 ti_sdma_write_4(sc, DMA4_CSF(ch), 0x0001);
671 ti_sdma_write_4(sc, DMA4_CDE(ch), 0x0001);
674 ti_sdma_write_4(sc, DMA4_CDF(ch), 0x0001);
677 ti_sdma_write_4(sc, DMA4_CSR(ch), 0x1FFE);
680 ccr = ti_sdma_read_4(sc, DMA4_CCR(ch));
682 ti_sdma_write_4(sc, DMA4_CCR(ch), ccr);
687 TI_SDMA_UNLOCK(sc);
724 struct ti_sdma_softc *sc = ti_sdma_sc;
729 if (sc == NULL)
732 TI_SDMA_LOCK(sc);
734 if ((sc->sc_active_channels & (1 << ch)) == 0) {
735 TI_SDMA_UNLOCK(sc);
739 channel = &sc->sc_channel[ch];
743 ti_sdma_write_4(sc, DMA4_CSDP(ch),
747 ti_sdma_write_4(sc, DMA4_CEN(ch), elmcnt);
750 ti_sdma_write_4(sc, DMA4_CFN(ch), frmcnt);
753 ti_sdma_write_4(sc, DMA4_CSSA(ch), src_paddr);
754 ti_sdma_write_4(sc, DMA4_CDSA(ch), dst_paddr);
757 ti_sdma_write_4(sc, DMA4_CCR(ch),
761 ti_sdma_write_4(sc, DMA4_CSE(ch), 0x0001);
765 ti_sdma_write_4(sc, DMA4_CSF(ch), pktsize);
767 ti_sdma_write_4(sc, DMA4_CDF(ch), pktsize);
770 ti_sdma_write_4(sc, DMA4_CDE(ch), 0x0001);
773 ti_sdma_write_4(sc, DMA4_CSR(ch), 0x1FFE);
776 ccr = ti_sdma_read_4(sc, DMA4_CCR(ch));
778 ti_sdma_write_4(sc, DMA4_CCR(ch), ccr);
783 TI_SDMA_UNLOCK(sc);
803 struct ti_sdma_softc *sc = ti_sdma_sc;
807 if (sc == NULL)
810 TI_SDMA_LOCK(sc);
812 if ((sc->sc_active_channels & (1 << ch)) == 0) {
813 TI_SDMA_UNLOCK(sc);
818 ti_sdma_write_4(sc, DMA4_CICR(ch), 0);
821 ti_sdma_write_4(sc, DMA4_CCR(ch), 0);
824 ti_sdma_write_4(sc, DMA4_CSR(ch), DMA4_CSR_CLEAR_MASK);
826 ti_sdma_write_4(sc, DMA4_IRQSTATUS_L(j), (1 << ch));
830 sc->sc_channel[ch].need_reg_write = 1;
832 TI_SDMA_UNLOCK(sc);
853 struct ti_sdma_softc *sc = ti_sdma_sc;
856 if (sc == NULL)
859 TI_SDMA_LOCK(sc);
861 if ((sc->sc_active_channels & (1 << ch)) == 0) {
862 TI_SDMA_UNLOCK(sc);
866 sc->sc_channel[ch].reg_csdp &= ~DMA4_CSDP_SRC_ENDIANISM(1);
867 sc->sc_channel[ch].reg_csdp |= DMA4_CSDP_SRC_ENDIANISM(src);
869 sc->sc_channel[ch].reg_csdp &= ~DMA4_CSDP_DST_ENDIANISM(1);
870 sc->sc_channel[ch].reg_csdp |= DMA4_CSDP_DST_ENDIANISM(dst);
872 sc->sc_channel[ch].need_reg_write = 1;
874 TI_SDMA_UNLOCK(sc);
898 struct ti_sdma_softc *sc = ti_sdma_sc;
901 if (sc == NULL)
904 TI_SDMA_LOCK(sc);
906 if ((sc->sc_active_channels & (1 << ch)) == 0) {
907 TI_SDMA_UNLOCK(sc);
911 sc->sc_channel[ch].reg_csdp &= ~DMA4_CSDP_SRC_BURST_MODE(0x3);
912 sc->sc_channel[ch].reg_csdp |= DMA4_CSDP_SRC_BURST_MODE(src);
914 sc->sc_channel[ch].reg_csdp &= ~DMA4_CSDP_DST_BURST_MODE(0x3);
915 sc->sc_channel[ch].reg_csdp |= DMA4_CSDP_DST_BURST_MODE(dst);
917 sc->sc_channel[ch].need_reg_write = 1;
919 TI_SDMA_UNLOCK(sc);
940 struct ti_sdma_softc *sc = ti_sdma_sc;
943 if (sc == NULL)
946 TI_SDMA_LOCK(sc);
948 if ((sc->sc_active_channels & (1 << ch)) == 0) {
949 TI_SDMA_UNLOCK(sc);
953 sc->sc_channel[ch].reg_csdp &= ~DMA4_CSDP_DATA_TYPE(0x3);
954 sc->sc_channel[ch].reg_csdp |= DMA4_CSDP_DATA_TYPE(type);
956 sc->sc_channel[ch].need_reg_write = 1;
958 TI_SDMA_UNLOCK(sc);
980 struct ti_sdma_softc *sc = ti_sdma_sc;
983 if (sc == NULL)
986 TI_SDMA_LOCK(sc);
988 if ((sc->sc_active_channels & (1 << ch)) == 0) {
989 TI_SDMA_UNLOCK(sc);
993 sc->sc_channel[ch].callback = callback;
994 sc->sc_channel[ch].callback_data = data;
996 sc->sc_channel[ch].need_reg_write = 1;
998 TI_SDMA_UNLOCK(sc);
1023 struct ti_sdma_softc *sc = ti_sdma_sc;
1027 if (sc == NULL)
1030 TI_SDMA_LOCK(sc);
1032 if ((sc->sc_active_channels & (1 << ch)) == 0) {
1033 TI_SDMA_UNLOCK(sc);
1037 ccr = sc->sc_channel[ch].reg_ccr;
1057 sc->sc_channel[ch].reg_ccr = ccr;
1059 sc->sc_channel[ch].need_reg_write = 1;
1061 TI_SDMA_UNLOCK(sc);
1087 struct ti_sdma_softc *sc = ti_sdma_sc;
1091 if (sc == NULL)
1094 TI_SDMA_LOCK(sc);
1096 if ((sc->sc_active_channels & (1 << ch)) == 0) {
1097 TI_SDMA_UNLOCK(sc);
1101 ccr = sc->sc_channel[ch].reg_ccr;
1109 sc->sc_channel[ch].reg_ccr = ccr;
1111 sc->sc_channel[ch].need_reg_write = 1;
1113 TI_SDMA_UNLOCK(sc);
1154 struct ti_sdma_softc *sc = device_get_softc(dev);
1162 sc->sc_dev = dev;
1165 sc->sc_active_channels = 0x00000000;
1168 TI_SDMA_LOCK_INIT(sc);
1172 sc->sc_mem_res = bus_alloc_resource_any(dev, SYS_RES_MEMORY, &rid, RF_ACTIVE);
1173 if (sc->sc_mem_res == NULL)
1180 sc->sc_hw_rev = ti_sdma_read_4(sc, DMA4_REVISION);
1181 device_printf(dev, "sDMA revision %08x\n", sc->sc_hw_rev);
1183 if (!ti_sdma_is_omap4_rev(sc) && !ti_sdma_is_omap3_rev(sc)) {
1184 device_printf(sc->sc_dev, "error - unknown sDMA H/W revision\n");
1190 ti_sdma_write_4(sc, DMA4_IRQENABLE_L(i), 0x00000000);
1194 if (ti_sdma_is_omap3_rev(sc)) {
1197 ti_sdma_write_4(sc, DMA4_OCP_SYSCONFIG, 0x0002);
1203 while ((ti_sdma_read_4(sc, DMA4_SYSSTATUS) & 0x1) == 0x0) {
1209 device_printf(sc->sc_dev, "sDMA reset operation timed out\n");
1220 sc->sc_irq_res = bus_alloc_resource_any(dev, SYS_RES_IRQ, &rid,
1222 if (sc->sc_irq_res == NULL)
1225 err = bus_setup_intr(dev, sc->sc_irq_res, INTR_TYPE_MISC | INTR_MPSAFE,
1231 ti_sdma_sc = sc;