• Home
  • History
  • Annotate
  • Raw
  • Download
  • only in /asuswrt-rt-n18u-9.0.0.4.380.2695/release/src-rt-6.x.4708/linux/linux-2.6.36/arch/arm/mach-davinci/

Lines Matching refs:ctlr

107 static inline unsigned int edma_read(unsigned ctlr, int offset)
109 return (unsigned int)__raw_readl(edmacc_regs_base[ctlr] + offset);
112 static inline void edma_write(unsigned ctlr, int offset, int val)
114 __raw_writel(val, edmacc_regs_base[ctlr] + offset);
116 static inline void edma_modify(unsigned ctlr, int offset, unsigned and,
119 unsigned val = edma_read(ctlr, offset);
122 edma_write(ctlr, offset, val);
124 static inline void edma_and(unsigned ctlr, int offset, unsigned and)
126 unsigned val = edma_read(ctlr, offset);
128 edma_write(ctlr, offset, val);
130 static inline void edma_or(unsigned ctlr, int offset, unsigned or)
132 unsigned val = edma_read(ctlr, offset);
134 edma_write(ctlr, offset, val);
136 static inline unsigned int edma_read_array(unsigned ctlr, int offset, int i)
138 return edma_read(ctlr, offset + (i << 2));
140 static inline void edma_write_array(unsigned ctlr, int offset, int i,
143 edma_write(ctlr, offset + (i << 2), val);
145 static inline void edma_modify_array(unsigned ctlr, int offset, int i,
148 edma_modify(ctlr, offset + (i << 2), and, or);
150 static inline void edma_or_array(unsigned ctlr, int offset, int i, unsigned or)
152 edma_or(ctlr, offset + (i << 2), or);
154 static inline void edma_or_array2(unsigned ctlr, int offset, int i, int j,
157 edma_or(ctlr, offset + ((i*2 + j) << 2), or);
159 static inline void edma_write_array2(unsigned ctlr, int offset, int i, int j,
162 edma_write(ctlr, offset + ((i*2 + j) << 2), val);
164 static inline unsigned int edma_shadow0_read(unsigned ctlr, int offset)
166 return edma_read(ctlr, EDMA_SHADOW0 + offset);
168 static inline unsigned int edma_shadow0_read_array(unsigned ctlr, int offset,
171 return edma_read(ctlr, EDMA_SHADOW0 + offset + (i << 2));
173 static inline void edma_shadow0_write(unsigned ctlr, int offset, unsigned val)
175 edma_write(ctlr, EDMA_SHADOW0 + offset, val);
177 static inline void edma_shadow0_write_array(unsigned ctlr, int offset, int i,
180 edma_write(ctlr, EDMA_SHADOW0 + offset + (i << 2), val);
182 static inline unsigned int edma_parm_read(unsigned ctlr, int offset,
185 return edma_read(ctlr, EDMA_PARM + offset + (param_no << 5));
187 static inline void edma_parm_write(unsigned ctlr, int offset, int param_no,
190 edma_write(ctlr, EDMA_PARM + offset + (param_no << 5), val);
192 static inline void edma_parm_modify(unsigned ctlr, int offset, int param_no,
195 edma_modify(ctlr, EDMA_PARM + offset + (param_no << 5), and, or);
197 static inline void edma_parm_and(unsigned ctlr, int offset, int param_no,
200 edma_and(ctlr, EDMA_PARM + offset + (param_no << 5), and);
202 static inline void edma_parm_or(unsigned ctlr, int offset, int param_no,
205 edma_or(ctlr, EDMA_PARM + offset + (param_no << 5), or);
267 static void map_dmach_queue(unsigned ctlr, unsigned ch_no,
274 queue_no = edma_cc[ctlr]->default_queue;
277 edma_modify_array(ctlr, EDMA_DMAQNUM, (ch_no >> 3),
281 static void __init map_queue_tc(unsigned ctlr, int queue_no, int tc_no)
284 edma_modify(ctlr, EDMA_QUETCMAP, ~(0x7 << bit), ((tc_no & 0x7) << bit));
287 static void __init assign_priority_to_queue(unsigned ctlr, int queue_no,
291 edma_modify(ctlr, EDMA_QUEPRI, ~(0x7 << bit),
306 static void __init map_dmach_param(unsigned ctlr)
310 edma_write_array(ctlr, EDMA_DCHMAP , i , (i << 5));
318 unsigned ctlr;
320 ctlr = EDMA_CTLR(lch);
324 edma_shadow0_write_array(ctlr, SH_IECR, lch >> 5,
327 edma_cc[ctlr]->intr_data[lch].callback = callback;
328 edma_cc[ctlr]->intr_data[lch].data = data;
331 edma_shadow0_write_array(ctlr, SH_ICR, lch >> 5,
333 edma_shadow0_write_array(ctlr, SH_IESR, lch >> 5,
357 unsigned ctlr;
360 ctlr = irq2ctlr(irq);
364 if ((edma_shadow0_read_array(ctlr, SH_IPR, 0) == 0) &&
365 (edma_shadow0_read_array(ctlr, SH_IPR, 1) == 0))
370 if (edma_shadow0_read_array(ctlr, SH_IPR, 0) &
371 edma_shadow0_read_array(ctlr, SH_IER, 0))
373 else if (edma_shadow0_read_array(ctlr, SH_IPR, 1) &
374 edma_shadow0_read_array(ctlr, SH_IER, 1))
379 edma_shadow0_read_array(ctlr, SH_IPR, j));
382 if ((edma_shadow0_read_array(ctlr, SH_IPR, j) & BIT(i))
383 && (edma_shadow0_read_array(ctlr,
386 edma_shadow0_write_array(ctlr, SH_ICR, j,
388 if (edma_cc[ctlr]->intr_data[k].callback)
389 edma_cc[ctlr]->intr_data[k].callback(
391 edma_cc[ctlr]->intr_data[k].
399 edma_shadow0_write(ctlr, SH_IEVAL, 1);
411 unsigned ctlr;
414 ctlr = irq2ctlr(irq);
418 if ((edma_read_array(ctlr, EDMA_EMR, 0) == 0) &&
419 (edma_read_array(ctlr, EDMA_EMR, 1) == 0) &&
420 (edma_read(ctlr, EDMA_QEMR) == 0) &&
421 (edma_read(ctlr, EDMA_CCERR) == 0))
426 if (edma_read_array(ctlr, EDMA_EMR, 0))
428 else if (edma_read_array(ctlr, EDMA_EMR, 1))
432 edma_read_array(ctlr, EDMA_EMR, j));
435 if (edma_read_array(ctlr, EDMA_EMR, j) &
438 edma_write_array(ctlr, EDMA_EMCR, j,
441 edma_shadow0_write_array(ctlr, SH_SECR,
443 if (edma_cc[ctlr]->intr_data[k].
445 edma_cc[ctlr]->intr_data[k].
448 edma_cc[ctlr]->intr_data
453 } else if (edma_read(ctlr, EDMA_QEMR)) {
455 edma_read(ctlr, EDMA_QEMR));
457 if (edma_read(ctlr, EDMA_QEMR) & BIT(i)) {
459 edma_write(ctlr, EDMA_QEMCR, BIT(i));
460 edma_shadow0_write(ctlr, SH_QSECR,
466 } else if (edma_read(ctlr, EDMA_CCERR)) {
468 edma_read(ctlr, EDMA_CCERR));
470 if (edma_read(ctlr, EDMA_CCERR) & BIT(i)) {
472 edma_write(ctlr, EDMA_CCERRCLR, BIT(i));
478 if ((edma_read_array(ctlr, EDMA_EMR, 0) == 0) &&
479 (edma_read_array(ctlr, EDMA_EMR, 1) == 0) &&
480 (edma_read(ctlr, EDMA_QEMR) == 0) &&
481 (edma_read(ctlr, EDMA_CCERR) == 0))
487 edma_write(ctlr, EDMA_EEVAL, 1);
511 static int reserve_contiguous_slots(int ctlr, unsigned int id,
520 for (i = start_slot; i < edma_cc[ctlr]->num_slots; ++i) {
522 if (!test_and_set_bit(j, edma_cc[ctlr]->edma_inuse)) {
550 if (i == edma_cc[ctlr]->num_slots)
555 clear_bit(j, edma_cc[ctlr]->edma_inuse);
561 memcpy_toio(edmacc_regs_base[ctlr] + PARM_OFFSET(j),
564 return EDMA_CTLR_CHAN(ctlr, i - num_slots + 1);
570 int i, ctlr;
575 ctlr = EDMA_CTLR(pdev->resource[i].start);
577 edma_cc[ctlr]->edma_unused);
625 unsigned i, done = 0, ctlr = 0;
643 ctlr = EDMA_CTLR(channel);
659 ctlr = i;
669 } else if (channel >= edma_cc[ctlr]->num_channels) {
671 } else if (test_and_set_bit(channel, edma_cc[ctlr]->edma_inuse)) {
676 edma_or_array2(ctlr, EDMA_DRAE, 0, channel >> 5, BIT(channel & 0x1f));
679 edma_stop(EDMA_CTLR_CHAN(ctlr, channel));
680 memcpy_toio(edmacc_regs_base[ctlr] + PARM_OFFSET(channel),
684 setup_dma_interrupt(EDMA_CTLR_CHAN(ctlr, channel),
687 map_dmach_queue(ctlr, channel, eventq_no);
689 return EDMA_CTLR_CHAN(ctlr, channel);
707 unsigned ctlr;
709 ctlr = EDMA_CTLR(channel);
712 if (channel >= edma_cc[ctlr]->num_channels)
718 memcpy_toio(edmacc_regs_base[ctlr] + PARM_OFFSET(channel),
720 clear_bit(channel, edma_cc[ctlr]->edma_inuse);
738 int edma_alloc_slot(unsigned ctlr, int slot)
744 slot = edma_cc[ctlr]->num_channels;
746 slot = find_next_zero_bit(edma_cc[ctlr]->edma_inuse,
747 edma_cc[ctlr]->num_slots, slot);
748 if (slot == edma_cc[ctlr]->num_slots)
750 if (!test_and_set_bit(slot, edma_cc[ctlr]->edma_inuse))
753 } else if (slot < edma_cc[ctlr]->num_channels ||
754 slot >= edma_cc[ctlr]->num_slots) {
756 } else if (test_and_set_bit(slot, edma_cc[ctlr]->edma_inuse)) {
760 memcpy_toio(edmacc_regs_base[ctlr] + PARM_OFFSET(slot),
763 return EDMA_CTLR_CHAN(ctlr, slot);
777 unsigned ctlr;
779 ctlr = EDMA_CTLR(slot);
782 if (slot < edma_cc[ctlr]->num_channels ||
783 slot >= edma_cc[ctlr]->num_slots)
786 memcpy_toio(edmacc_regs_base[ctlr] + PARM_OFFSET(slot),
788 clear_bit(slot, edma_cc[ctlr]->edma_inuse);
818 int edma_alloc_cont_slots(unsigned ctlr, unsigned int id, int slot, int count)
826 (slot < edma_cc[ctlr]->num_channels ||
827 slot >= edma_cc[ctlr]->num_slots))
836 (edma_cc[ctlr]->num_slots - edma_cc[ctlr]->num_channels))
841 return reserve_contiguous_slots(ctlr, id, count,
842 edma_cc[ctlr]->num_channels);
845 return reserve_contiguous_slots(ctlr, id, count, slot);
868 unsigned ctlr, slot_to_free;
871 ctlr = EDMA_CTLR(slot);
874 if (slot < edma_cc[ctlr]->num_channels ||
875 slot >= edma_cc[ctlr]->num_slots ||
880 ctlr = EDMA_CTLR(i);
883 memcpy_toio(edmacc_regs_base[ctlr] + PARM_OFFSET(slot_to_free),
885 clear_bit(slot_to_free, edma_cc[ctlr]->edma_inuse);
910 unsigned ctlr;
912 ctlr = EDMA_CTLR(slot);
915 if (slot < edma_cc[ctlr]->num_slots) {
916 unsigned int i = edma_parm_read(ctlr, PARM_OPT, slot);
925 edma_parm_write(ctlr, PARM_OPT, slot, i);
929 edma_parm_write(ctlr, PARM_SRC, slot, src_port);
948 unsigned ctlr;
950 ctlr = EDMA_CTLR(slot);
953 if (slot < edma_cc[ctlr]->num_slots) {
954 unsigned int i = edma_parm_read(ctlr, PARM_OPT, slot);
963 edma_parm_write(ctlr, PARM_OPT, slot, i);
966 edma_parm_write(ctlr, PARM_DST, slot, dest_port);
983 unsigned ctlr;
985 ctlr = EDMA_CTLR(slot);
988 edma_read_slot(EDMA_CTLR_CHAN(ctlr, slot), &temp);
1008 unsigned ctlr;
1010 ctlr = EDMA_CTLR(slot);
1013 if (slot < edma_cc[ctlr]->num_slots) {
1014 edma_parm_modify(ctlr, PARM_SRC_DST_BIDX, slot,
1016 edma_parm_modify(ctlr, PARM_SRC_DST_CIDX, slot,
1034 unsigned ctlr;
1036 ctlr = EDMA_CTLR(slot);
1039 if (slot < edma_cc[ctlr]->num_slots) {
1040 edma_parm_modify(ctlr, PARM_SRC_DST_BIDX, slot,
1042 edma_parm_modify(ctlr, PARM_SRC_DST_CIDX, slot,
1081 unsigned ctlr;
1083 ctlr = EDMA_CTLR(slot);
1086 if (slot < edma_cc[ctlr]->num_slots) {
1087 edma_parm_modify(ctlr, PARM_LINK_BCNTRLD, slot,
1090 edma_parm_and(ctlr, PARM_OPT, slot, ~SYNCDIM);
1092 edma_parm_or(ctlr, PARM_OPT, slot, SYNCDIM);
1094 edma_parm_write(ctlr, PARM_A_B_CNT, slot, (bcnt << 16) | acnt);
1095 edma_parm_write(ctlr, PARM_CCNT, slot, ccnt);
1134 unsigned ctlr;
1136 ctlr = EDMA_CTLR(from);
1139 if (from >= edma_cc[ctlr]->num_slots)
1141 edma_parm_or(ctlr, PARM_LINK_BCNTRLD, from, 0xffff);
1161 unsigned ctlr;
1163 ctlr = EDMA_CTLR(slot);
1166 if (slot >= edma_cc[ctlr]->num_slots)
1168 memcpy_toio(edmacc_regs_base[ctlr] + PARM_OFFSET(slot), param,
1183 unsigned ctlr;
1185 ctlr = EDMA_CTLR(slot);
1188 if (slot >= edma_cc[ctlr]->num_slots)
1190 memcpy_fromio(param, edmacc_regs_base[ctlr] + PARM_OFFSET(slot),
1208 unsigned ctlr;
1210 ctlr = EDMA_CTLR(channel);
1213 if (channel < edma_cc[ctlr]->num_channels) {
1216 edma_shadow0_write_array(ctlr, SH_EECR, channel >> 5, mask);
1229 unsigned ctlr;
1231 ctlr = EDMA_CTLR(channel);
1234 if (channel < edma_cc[ctlr]->num_channels) {
1237 edma_shadow0_write_array(ctlr, SH_EESR, channel >> 5, mask);
1255 unsigned ctlr;
1257 ctlr = EDMA_CTLR(channel);
1260 if (channel < edma_cc[ctlr]->num_channels) {
1265 if (test_bit(channel, edma_cc[ctlr]->edma_unused)) {
1267 edma_shadow0_read_array(ctlr, SH_ESR, j));
1268 edma_shadow0_write_array(ctlr, SH_ESR, j, mask);
1274 edma_shadow0_read_array(ctlr, SH_ER, j));
1276 edma_write_array(ctlr, EDMA_ECR, j, mask);
1277 edma_write_array(ctlr, EDMA_EMCR, j, mask);
1279 edma_shadow0_write_array(ctlr, SH_SECR, j, mask);
1280 edma_shadow0_write_array(ctlr, SH_EESR, j, mask);
1282 edma_shadow0_read_array(ctlr, SH_EER, j));
1301 unsigned ctlr;
1303 ctlr = EDMA_CTLR(channel);
1306 if (channel < edma_cc[ctlr]->num_channels) {
1310 edma_shadow0_write_array(ctlr, SH_EECR, j, mask);
1311 edma_shadow0_write_array(ctlr, SH_ECR, j, mask);
1312 edma_shadow0_write_array(ctlr, SH_SECR, j, mask);
1313 edma_write_array(ctlr, EDMA_EMCR, j, mask);
1316 edma_shadow0_read_array(ctlr, SH_EER, j));
1328 unsigned ctlr;
1330 ctlr = EDMA_CTLR(channel);
1333 if (channel < edma_cc[ctlr]->num_channels) {
1338 edma_read_array(ctlr, EDMA_EMR, j));
1339 edma_shadow0_write_array(ctlr, SH_ECR, j, mask);
1341 edma_write_array(ctlr, EDMA_EMCR, j, mask);
1343 edma_shadow0_write_array(ctlr, SH_SECR, j, mask);
1344 edma_write(ctlr, EDMA_CCERRCLR, BIT(16) | BIT(1) | BIT(0));
1356 unsigned ctlr;
1358 ctlr = EDMA_CTLR(channel);
1361 if (channel >= edma_cc[ctlr]->num_channels)
1364 edma_write(ctlr, EDMA_ECR, BIT(channel));
1366 edma_write(ctlr, EDMA_ECRH, BIT(channel - 32));