• Home
  • History
  • Annotate
  • Raw
  • Download
  • only in /netgear-WNDR4500v2-V1.0.0.60_1.0.38/src/linux/linux-2.6/sound/oss/

Lines Matching defs:dmap

44 static long dmabuf_timeout(struct dma_buffparms *dmap)
48 tmout = (dmap->fragment_size * HZ) / dmap->data_rate;
57 static int sound_alloc_dmap(struct dma_buffparms *dmap)
64 dmap->mapping_flags &= ~DMA_MAP_MAPPED;
66 if (dmap->raw_buf != NULL)
70 dma_pagesize = (dmap->dma < 4) ? (64 * 1024) : (128 * 1024);
79 dmap->raw_buf = NULL;
80 dmap->buffsize = dma_buffsize;
81 if (dmap->buffsize > dma_pagesize)
82 dmap->buffsize = dma_pagesize;
89 while (start_addr == NULL && dmap->buffsize > PAGE_SIZE) {
90 for (sz = 0, size = PAGE_SIZE; size < dmap->buffsize; sz++, size <<= 1);
91 dmap->buffsize = PAGE_SIZE * (1 << sz);
94 dmap->buffsize /= 2;
102 end_addr = start_addr + dmap->buffsize - 1;
111 printk(KERN_ERR "sound: Got invalid address 0x%lx for %db DMA-buffer\n", (long) start_addr, dmap->buffsize);
115 dmap->raw_buf = start_addr;
116 dmap->raw_buf_phys = virt_to_bus(start_addr);
123 static void sound_free_dmap(struct dma_buffparms *dmap)
129 if (dmap->raw_buf == NULL)
131 if (dmap->mapping_flags & DMA_MAP_MAPPED)
133 for (sz = 0, size = PAGE_SIZE; size < dmap->buffsize; sz++, size <<= 1);
135 start_addr = (unsigned long) dmap->raw_buf;
136 end_addr = start_addr + dmap->buffsize;
141 free_pages((unsigned long) dmap->raw_buf, sz);
142 dmap->raw_buf = NULL;
148 static int sound_start_dma(struct dma_buffparms *dmap, unsigned long physaddr, int count, int dma_mode)
151 int chan = dmap->dma;
153 /* printk( "Start DMA%d %d, %d\n", chan, (int)(physaddr-dmap->raw_buf_phys), count); */
167 static void dma_init_buffers(struct dma_buffparms *dmap)
169 dmap->qlen = dmap->qhead = dmap->qtail = dmap->user_counter = 0;
170 dmap->byte_counter = 0;
171 dmap->max_byte_counter = 8000 * 60 * 60;
172 dmap->bytes_in_use = dmap->buffsize;
174 dmap->dma_mode = DMODE_NONE;
175 dmap->mapping_flags = 0;
176 dmap->neutral_byte = 0x80;
177 dmap->data_rate = 8000;
178 dmap->cfrag = -1;
179 dmap->closing = 0;
180 dmap->nbufs = 1;
181 dmap->flags = DMA_BUSY; /* Other flags off */
184 static int open_dmap(struct audio_operations *adev, int mode, struct dma_buffparms *dmap)
188 if (dmap->flags & DMA_BUSY)
190 if ((err = sound_alloc_dmap(dmap)) < 0)
193 if (dmap->raw_buf == NULL) {
197 if (dmap->dma >= 0 && sound_open_dma(dmap->dma, adev->name)) {
198 printk(KERN_WARNING "Unable to grab(2) DMA%d for the audio driver\n", dmap->dma);
201 dma_init_buffers(dmap);
202 spin_lock_init(&dmap->lock);
203 dmap->open_mode = mode;
204 dmap->subdivision = dmap->underrun_count = 0;
205 dmap->fragment_size = 0;
206 dmap->max_fragments = 65536; /* Just a large value */
207 dmap->byte_counter = 0;
208 dmap->max_byte_counter = 8000 * 60 * 60;
209 dmap->applic_profile = APF_NORMAL;
210 dmap->needs_reorg = 1;
211 dmap->audio_callback = NULL;
212 dmap->callback_parm = 0;
216 static void close_dmap(struct audio_operations *adev, struct dma_buffparms *dmap)
220 if (dmap->dma >= 0) {
221 sound_close_dma(dmap->dma);
223 disable_dma(dmap->dma);
226 if (dmap->flags & DMA_BUSY)
227 dmap->dma_mode = DMODE_NONE;
228 dmap->flags &= ~DMA_BUSY;
231 sound_free_dmap(dmap);
337 struct dma_buffparms *dmap = adev->dmap_out;
339 if (!(dmap->flags & DMA_STARTED)) /* DMA is not active */
345 spin_lock_irqsave(&dmap->lock,flags);
351 spin_unlock_irqrestore(&dmap->lock,flags);
353 dmabuf_timeout(dmap));
354 spin_lock_irqsave(&dmap->lock,flags);
368 clear_dma_ff(dmap->dma);
369 disable_dma(dmap->dma);
372 dmap->byte_counter = 0;
374 dmap->qlen = dmap->qhead = dmap->qtail = dmap->user_counter = 0;
375 spin_unlock_irqrestore(&dmap->lock,flags);
382 struct dma_buffparms *dmap = adev->dmap_in;
384 spin_lock_irqsave(&dmap->lock,flags);
391 dmap->qlen = dmap->qhead = dmap->qtail = dmap->user_counter = 0;
392 dmap->byte_counter = 0;
394 spin_unlock_irqrestore(&dmap->lock,flags);
396 /* MUST be called with holding the dmap->lock */
397 void DMAbuf_launch_output(int dev, struct dma_buffparms *dmap)
403 dmap->dma_mode = DMODE_OUTPUT;
405 if (!(dmap->flags & DMA_ACTIVE) || !(adev->flags & DMA_AUTOMODE) || (dmap->flags & DMA_NODMA)) {
406 if (!(dmap->flags & DMA_STARTED)) {
407 reorganize_buffers(dev, dmap, 0);
408 if (adev->d->prepare_for_output(dev, dmap->fragment_size, dmap->nbufs))
410 if (!(dmap->flags & DMA_NODMA))
411 local_start_dma(adev, dmap->raw_buf_phys, dmap->bytes_in_use,DMA_MODE_WRITE);
412 dmap->flags |= DMA_STARTED;
414 if (dmap->counts[dmap->qhead] == 0)
415 dmap->counts[dmap->qhead] = dmap->fragment_size;
416 dmap->dma_mode = DMODE_OUTPUT;
417 adev->d->output_block(dev, dmap->raw_buf_phys + dmap->qhead * dmap->fragment_size,
418 dmap->counts[dmap->qhead], 1);
422 dmap->flags |= DMA_ACTIVE;
430 struct dma_buffparms *dmap;
436 dmap = adev->dmap_out;
437 spin_lock_irqsave(&dmap->lock,flags);
438 if (dmap->qlen > 0 && !(dmap->flags & DMA_ACTIVE))
439 DMAbuf_launch_output(dev, dmap);
444 long t = dmabuf_timeout(dmap);
445 spin_unlock_irqrestore(&dmap->lock,flags);
447 spin_lock_irqsave(&dmap->lock,flags);
450 spin_unlock_irqrestore(&dmap->lock,flags);
465 spin_unlock_irqrestore(&dmap->lock,flags);
467 dmabuf_timeout(dmap));
468 spin_lock_irqsave(&dmap->lock,flags);
471 spin_unlock_irqrestore(&dmap->lock,flags);
480 struct dma_buffparms *dmap;
483 dmap = adev->dmap_out;
489 dmap = adev->dmap_in;
499 spin_lock_irqsave(&dmap->lock,flags);
510 spin_unlock_irqrestore(&dmap->lock,flags);
513 /* called with dmap->lock dold */
514 int DMAbuf_activate_recording(int dev, struct dma_buffparms *dmap)
523 if (dmap->dma_mode == DMODE_OUTPUT) { /* Direction change */
525 spin_unlock_irq(&dmap->lock);
528 spin_lock_irq(&dmap->lock);
529 dmap->dma_mode = DMODE_NONE;
531 if (!dmap->dma_mode) {
532 reorganize_buffers(dev, dmap, 1);
534 dmap->fragment_size, dmap->nbufs)) < 0)
536 dmap->dma_mode = DMODE_INPUT;
538 if (!(dmap->flags & DMA_ACTIVE)) {
539 if (dmap->needs_reorg)
540 reorganize_buffers(dev, dmap, 0);
541 local_start_dma(adev, dmap->raw_buf_phys, dmap->bytes_in_use, DMA_MODE_READ);
542 adev->d->start_input(dev, dmap->raw_buf_phys + dmap->qtail * dmap->fragment_size,
543 dmap->fragment_size, 0);
544 dmap->flags |= DMA_ACTIVE;
556 struct dma_buffparms *dmap = adev->dmap_in;
561 spin_lock_irqsave(&dmap->lock,flags);
562 if (dmap->needs_reorg)
563 reorganize_buffers(dev, dmap, 0);
566 spin_unlock_irqrestore(&dmap->lock,flags);
568 } else while (dmap->qlen <= 0 && n++ < 10) {
571 spin_unlock_irqrestore(&dmap->lock,flags);
574 if ((err = DMAbuf_activate_recording(dev, dmap)) < 0) {
575 spin_unlock_irqrestore(&dmap->lock,flags);
581 spin_unlock_irqrestore(&dmap->lock,flags);
585 timeout = dmabuf_timeout(dmap);
587 spin_unlock_irqrestore(&dmap->lock,flags);
596 spin_lock_irqsave(&dmap->lock,flags);
598 spin_unlock_irqrestore(&dmap->lock,flags);
600 if (dmap->qlen <= 0)
602 *buf = &dmap->raw_buf[dmap->qhead * dmap->fragment_size + dmap->counts[dmap->qhead]];
603 *len = dmap->fragment_size - dmap->counts[dmap->qhead];
605 return dmap->qhead;
611 struct dma_buffparms *dmap = adev->dmap_in;
612 int p = dmap->counts[dmap->qhead] + c;
614 if (dmap->mapping_flags & DMA_MAP_MAPPED)
619 else if (dmap->qlen <= 0)
621 else if (p >= dmap->fragment_size) { /* This buffer is completely empty */
622 dmap->counts[dmap->qhead] = 0;
623 dmap->qlen--;
624 dmap->qhead = (dmap->qhead + 1) % dmap->nbufs;
626 else dmap->counts[dmap->qhead] = p;
630 /* MUST be called with dmap->lock hold */
631 int DMAbuf_get_buffer_pointer(int dev, struct dma_buffparms *dmap, int direction)
641 if (!(dmap->flags & DMA_ACTIVE))
644 int chan = dmap->dma;
650 disable_dma(dmap->dma);
654 pos = dmap->bytes_in_use - pos;
656 if (!(dmap->mapping_flags & DMA_MAP_MAPPED)) {
658 if (dmap->qhead == 0)
659 if (pos > dmap->fragment_size)
662 if (dmap->qtail == 0)
663 if (pos > dmap->fragment_size)
669 if (pos >= dmap->bytes_in_use)
673 enable_dma(dmap->dma);
712 struct dma_buffparms *dmap = adev->dmap_out;
713 int lim = dmap->nbufs;
718 if (dmap->qlen >= lim) /* No space at all */
726 max = dmap->max_fragments;
729 len = dmap->qlen;
737 if (dmap->byte_counter % dmap->fragment_size) /* There is a partial fragment */
749 struct dma_buffparms *dmap = adev->dmap_out;
763 timeout = (adev->go && !(dmap->flags & DMA_NOTIMEOUT));
765 timeout_value = dmabuf_timeout(dmap);
783 struct dma_buffparms *dmap = adev->dmap_out;
787 int occupied_bytes = (dmap->user_counter % dmap->fragment_size);
789 *buf = dmap->raw_buf;
794 active_offs = dmap->byte_counter + dmap->qhead * dmap->fragment_size;
796 active_offs = DMAbuf_get_buffer_pointer(dev, dmap, DMODE_OUTPUT);
798 if (active_offs < 0 || active_offs >= dmap->bytes_in_use)
800 active_offs += dmap->byte_counter;
803 offs = (dmap->user_counter % dmap->bytes_in_use) & ~SAMPLE_ROUNDUP;
804 if (offs < 0 || offs >= dmap->bytes_in_use) {
806 printk("Counter = %ld, bytes=%d\n", dmap->user_counter, dmap->bytes_in_use);
809 *buf = dmap->raw_buf + offs;
811 len = active_offs + dmap->bytes_in_use - dmap->user_counter; /* Number of unused bytes in buffer */
813 if ((offs + len) > dmap->bytes_in_use)
814 len = dmap->bytes_in_use - offs;
818 if (len > ((maxfrags * dmap->fragment_size) - occupied_bytes))
819 len = (maxfrags * dmap->fragment_size) - occupied_bytes;
829 struct dma_buffparms *dmap = adev->dmap_out;
831 if (dmap->mapping_flags & DMA_MAP_MAPPED) {
835 spin_lock_irqsave(&dmap->lock,flags);
836 if (dmap->needs_reorg)
837 reorganize_buffers(dev, dmap, 0);
839 if (dmap->dma_mode == DMODE_INPUT) { /* Direction change */
840 spin_unlock_irqrestore(&dmap->lock,flags);
842 spin_lock_irqsave(&dmap->lock,flags);
844 dmap->dma_mode = DMODE_OUTPUT;
847 spin_unlock_irqrestore(&dmap->lock,flags);
851 spin_lock_irqsave(&dmap->lock,flags);
854 spin_unlock_irqrestore(&dmap->lock,flags);
857 /* has to acquire dmap->lock */
861 struct dma_buffparms *dmap = adev->dmap_out;
867 spin_lock_irqsave(&dmap->lock,flags);
868 post= (dmap->flags & DMA_POST);
869 ptr = (dmap->user_counter / dmap->fragment_size) * dmap->fragment_size;
871 dmap->flags &= ~DMA_POST;
872 dmap->cfrag = -1;
873 dmap->user_counter += l;
874 dmap->flags |= DMA_DIRTY;
876 if (dmap->byte_counter >= dmap->max_byte_counter) {
878 long decr = dmap->byte_counter;
879 dmap->byte_counter = (dmap->byte_counter % dmap->bytes_in_use);
880 decr -= dmap->byte_counter;
881 dmap->user_counter -= decr;
883 end_ptr = (dmap->user_counter / dmap->fragment_size) * dmap->fragment_size;
885 p = (dmap->user_counter - 1) % dmap->bytes_in_use;
886 dmap->neutral_byte = dmap->raw_buf[p];
890 dmap->counts[dmap->qtail] = dmap->fragment_size;
891 dmap->qtail = (dmap->qtail + 1) % dmap->nbufs;
892 dmap->qlen++;
893 ptr += dmap->fragment_size;
896 dmap->counts[dmap->qtail] = dmap->user_counter - ptr;
905 if (!(dmap->flags & DMA_ACTIVE))
906 if (dmap->qlen > 1 || (dmap->qlen > 0 && (post || dmap->qlen >= dmap->nbufs - 1)))
907 DMAbuf_launch_output(dev, dmap);
909 spin_unlock_irqrestore(&dmap->lock,flags);
916 struct dma_buffparms *dmap = (dma_mode == DMA_MODE_WRITE) ? adev->dmap_out : adev->dmap_in;
918 if (dmap->raw_buf == NULL) {
920 printk("Device %d, chn=%s\n", dev, (dmap == adev->dmap_out) ? "out" : "in");
923 if (dmap->dma < 0)
925 sound_start_dma(dmap, physaddr, count, dma_mode);
932 struct dma_buffparms *dmap = (dma_mode == DMA_MODE_WRITE) ? adev->dmap_out : adev->dmap_in;
934 if (dmap->raw_buf == NULL) {
936 printk(KERN_ERR "Device %s, chn=%s\n", adev->name, (dmap == adev->dmap_out) ? "out" : "in");
939 if (dmap->flags & DMA_NODMA)
941 if (dmap->dma < 0)
943 sound_start_dma(dmap, dmap->raw_buf_phys, dmap->bytes_in_use, dma_mode | DMA_AUTOINIT);
944 dmap->flags |= DMA_STARTED;
948 static void finish_output_interrupt(int dev, struct dma_buffparms *dmap)
952 if (dmap->audio_callback != NULL)
953 dmap->audio_callback(dev, dmap->callback_parm);
957 /* called with dmap->lock held in irq context*/
961 struct dma_buffparms *dmap = adev->dmap_out;
964 if (dmap->raw_buf == NULL) {
968 if (dmap->mapping_flags & DMA_MAP_MAPPED) { /* Virtual memory mapped access */
970 dmap->qhead = (dmap->qhead + 1) % dmap->nbufs;
971 if (dmap->qhead == 0) { /* Wrapped */
972 dmap->byte_counter += dmap->bytes_in_use;
973 if (dmap->byte_counter >= dmap->max_byte_counter) { /* Overflow */
974 long decr = dmap->byte_counter;
975 dmap->byte_counter = (dmap->byte_counter % dmap->bytes_in_use);
976 decr -= dmap->byte_counter;
977 dmap->user_counter -= decr;
980 dmap->qlen++; /* Yes increment it (don't decrement) */
982 dmap->flags &= ~DMA_ACTIVE;
983 dmap->counts[dmap->qhead] = dmap->fragment_size;
984 DMAbuf_launch_output(dev, dmap);
985 finish_output_interrupt(dev, dmap);
989 dmap->qlen--;
990 this_fragment = dmap->qhead;
991 dmap->qhead = (dmap->qhead + 1) % dmap->nbufs;
993 if (dmap->qhead == 0) { /* Wrapped */
994 dmap->byte_counter += dmap->bytes_in_use;
995 if (dmap->byte_counter >= dmap->max_byte_counter) { /* Overflow */
996 long decr = dmap->byte_counter;
997 dmap->byte_counter = (dmap->byte_counter % dmap->bytes_in_use);
998 decr -= dmap->byte_counter;
999 dmap->user_counter -= decr;
1003 dmap->flags &= ~DMA_ACTIVE;
1006 * This is dmap->qlen <= 0 except when closing when
1007 * dmap->qlen < 0
1010 while (dmap->qlen <= -dmap->closing) {
1011 dmap->underrun_count++;
1012 dmap->qlen++;
1013 if ((dmap->flags & DMA_DIRTY) && dmap->applic_profile != APF_CPUINTENS) {
1014 dmap->flags &= ~DMA_DIRTY;
1018 dmap->user_counter += dmap->fragment_size;
1019 dmap->qtail = (dmap->qtail + 1) % dmap->nbufs;
1021 if (dmap->qlen > 0)
1022 DMAbuf_launch_output(dev, dmap);
1023 finish_output_interrupt(dev, dmap);
1030 struct dma_buffparms *dmap = adev->dmap_out;
1032 spin_lock_irqsave(&dmap->lock,flags);
1033 if (!(dmap->flags & DMA_NODMA)) {
1034 int chan = dmap->dma, pos, n;
1040 disable_dma(dmap->dma);
1042 pos = dmap->bytes_in_use - get_dma_residue(chan);
1044 enable_dma(dmap->dma);
1047 pos = pos / dmap->fragment_size; /* Actual qhead */
1048 if (pos < 0 || pos >= dmap->nbufs)
1051 while (dmap->qhead != pos && n++ < dmap->nbufs)
1056 spin_unlock_irqrestore(&dmap->lock,flags);
1060 /* called with dmap->lock held in irq context */
1064 struct dma_buffparms *dmap = adev->dmap_in;
1066 if (dmap->raw_buf == NULL) {
1070 if (dmap->mapping_flags & DMA_MAP_MAPPED) {
1071 dmap->qtail = (dmap->qtail + 1) % dmap->nbufs;
1072 if (dmap->qtail == 0) { /* Wrapped */
1073 dmap->byte_counter += dmap->bytes_in_use;
1074 if (dmap->byte_counter >= dmap->max_byte_counter) { /* Overflow */
1075 long decr = dmap->byte_counter;
1076 dmap->byte_counter = (dmap->byte_counter % dmap->bytes_in_use) + dmap->bytes_in_use;
1077 decr -= dmap->byte_counter;
1078 dmap->user_counter -= decr;
1081 dmap->qlen++;
1084 if (dmap->needs_reorg)
1085 reorganize_buffers(dev, dmap, 0);
1086 local_start_dma(adev, dmap->raw_buf_phys, dmap->bytes_in_use,DMA_MODE_READ);
1087 adev->d->start_input(dev, dmap->raw_buf_phys + dmap->qtail * dmap->fragment_size,
1088 dmap->fragment_size, 1);
1092 dmap->flags |= DMA_ACTIVE;
1093 } else if (dmap->qlen >= (dmap->nbufs - 1)) {
1095 dmap->underrun_count++;
1098 dmap->qhead = (dmap->qhead + 1) % dmap->nbufs;
1099 dmap->qtail = (dmap->qtail + 1) % dmap->nbufs;
1100 } else if (dmap->qlen >= 0 && dmap->qlen < dmap->nbufs) {
1101 dmap->qlen++;
1102 dmap->qtail = (dmap->qtail + 1) % dmap->nbufs;
1103 if (dmap->qtail == 0) { /* Wrapped */
1104 dmap->byte_counter += dmap->bytes_in_use;
1105 if (dmap->byte_counter >= dmap->max_byte_counter) { /* Overflow */
1106 long decr = dmap->byte_counter;
1107 dmap->byte_counter = (dmap->byte_counter % dmap->bytes_in_use) + dmap->bytes_in_use;
1108 decr -= dmap->byte_counter;
1109 dmap->user_counter -= decr;
1113 if (!(adev->flags & DMA_AUTOMODE) || (dmap->flags & DMA_NODMA)) {
1114 local_start_dma(adev, dmap->raw_buf_phys, dmap->bytes_in_use, DMA_MODE_READ);
1115 adev->d->start_input(dev, dmap->raw_buf_phys + dmap->qtail * dmap->fragment_size, dmap->fragment_size, 1);
1119 dmap->flags |= DMA_ACTIVE;
1120 if (dmap->qlen > 0)
1130 struct dma_buffparms *dmap = adev->dmap_in;
1133 spin_lock_irqsave(&dmap->lock,flags);
1135 if (!(dmap->flags & DMA_NODMA)) {
1136 int chan = dmap->dma, pos, n;
1141 disable_dma(dmap->dma);
1143 pos = dmap->bytes_in_use - get_dma_residue(chan);
1145 enable_dma(dmap->dma);
1148 pos = pos / dmap->fragment_size; /* Actual qhead */
1149 if (pos < 0 || pos >= dmap->nbufs)
1153 while (dmap->qtail != pos && ++n < dmap->nbufs)
1157 spin_unlock_irqrestore(&dmap->lock,flags);
1198 struct dma_buffparms *dmap = adev->dmap_in;
1202 if (dmap->mapping_flags & DMA_MAP_MAPPED) {
1203 if (dmap->qlen)
1207 if (dmap->dma_mode != DMODE_INPUT) {
1208 if (dmap->dma_mode == DMODE_NONE &&
1210 !dmap->qlen && adev->go) {
1213 spin_lock_irqsave(&dmap->lock,flags);
1214 DMAbuf_activate_recording(dev, dmap);
1215 spin_unlock_irqrestore(&dmap->lock,flags);
1219 if (!dmap->qlen)
1227 struct dma_buffparms *dmap = adev->dmap_out;
1231 if (dmap->mapping_flags & DMA_MAP_MAPPED) {
1232 if (dmap->qlen)
1236 if (dmap->dma_mode == DMODE_INPUT)
1238 if (dmap->dma_mode == DMODE_NONE)