Lines Matching refs:ah

21 static void ath9k_hw_set_txq_interrupts(struct ath_hw *ah,
24 ath_dbg(ath9k_hw_common(ah), INTERRUPT,
26 ah->txok_interrupt_mask, ah->txerr_interrupt_mask,
27 ah->txdesc_interrupt_mask, ah->txeol_interrupt_mask,
28 ah->txurn_interrupt_mask);
30 ENABLE_REGWRITE_BUFFER(ah);
32 REG_WRITE(ah, AR_IMR_S0,
33 SM(ah->txok_interrupt_mask, AR_IMR_S0_QCU_TXOK)
34 | SM(ah->txdesc_interrupt_mask, AR_IMR_S0_QCU_TXDESC));
35 REG_WRITE(ah, AR_IMR_S1,
36 SM(ah->txerr_interrupt_mask, AR_IMR_S1_QCU_TXERR)
37 | SM(ah->txeol_interrupt_mask, AR_IMR_S1_QCU_TXEOL));
39 ah->imrs2_reg &= ~AR_IMR_S2_QCU_TXURN;
40 ah->imrs2_reg |= (ah->txurn_interrupt_mask & AR_IMR_S2_QCU_TXURN);
41 REG_WRITE(ah, AR_IMR_S2, ah->imrs2_reg);
43 REGWRITE_BUFFER_FLUSH(ah);
46 u32 ath9k_hw_gettxbuf(struct ath_hw *ah, u32 q)
48 return REG_READ(ah, AR_QTXDP(q));
52 void ath9k_hw_puttxbuf(struct ath_hw *ah, u32 q, u32 txdp)
54 REG_WRITE(ah, AR_QTXDP(q), txdp);
58 void ath9k_hw_txstart(struct ath_hw *ah, u32 q)
60 ath_dbg(ath9k_hw_common(ah), QUEUE, "Enable TXE on queue: %u\n", q);
61 REG_WRITE(ah, AR_Q_TXE, 1 << q);
65 u32 ath9k_hw_numtxpending(struct ath_hw *ah, u32 q)
69 npend = REG_READ(ah, AR_QSTS(q)) & AR_Q_STS_PEND_FR_CNT;
72 if (REG_READ(ah, AR_Q_TXE) & (1 << q))
83 * @ah: atheros hardware struct
105 bool ath9k_hw_updatetxtriglevel(struct ath_hw *ah, bool bIncTrigLevel)
109 if (ah->tx_trig_level >= ah->config.max_txtrig_level)
112 ath9k_hw_disable_interrupts(ah);
114 txcfg = REG_READ(ah, AR_TXCFG);
118 if (curLevel < ah->config.max_txtrig_level)
123 REG_WRITE(ah, AR_TXCFG,
126 ath9k_hw_enable_interrupts(ah);
128 ah->tx_trig_level = newLevel;
134 void ath9k_hw_abort_tx_dma(struct ath_hw *ah)
139 if (ah->curchan) {
140 if (IS_CHAN_HALF_RATE(ah->curchan))
142 else if (IS_CHAN_QUARTER_RATE(ah->curchan))
146 REG_WRITE(ah, AR_Q_TXD, AR_Q_TXD_M);
148 REG_SET_BIT(ah, AR_PCU_MISC, AR_PCU_FORCE_QUIET_COLL | AR_PCU_CLEAR_VMF);
149 REG_SET_BIT(ah, AR_DIAG_SW, AR_DIAG_FORCE_CH_IDLE_HIGH);
150 REG_SET_BIT(ah, AR_D_GBL_IFS_MISC, AR_D_GBL_IFS_MISC_IGNORE_BACKOFF);
157 if (!ath9k_hw_numtxpending(ah, q))
162 REG_CLR_BIT(ah, AR_PCU_MISC, AR_PCU_FORCE_QUIET_COLL | AR_PCU_CLEAR_VMF);
163 REG_CLR_BIT(ah, AR_DIAG_SW, AR_DIAG_FORCE_CH_IDLE_HIGH);
164 REG_CLR_BIT(ah, AR_D_GBL_IFS_MISC, AR_D_GBL_IFS_MISC_IGNORE_BACKOFF);
166 REG_WRITE(ah, AR_Q_TXD, 0);
170 bool ath9k_hw_stop_dma_queue(struct ath_hw *ah, u32 q)
177 REG_WRITE(ah, AR_Q_TXD, 1 << q);
183 if (ath9k_hw_numtxpending(ah, q) == 0)
187 REG_WRITE(ah, AR_Q_TXD, 0);
196 bool ath9k_hw_set_txq_props(struct ath_hw *ah, int q,
200 struct ath_common *common = ath9k_hw_common(ah);
203 qi = &ah->txq[q];
261 bool ath9k_hw_get_txq_props(struct ath_hw *ah, int q,
264 struct ath_common *common = ath9k_hw_common(ah);
267 qi = &ah->txq[q];
293 int ath9k_hw_setuptxqueue(struct ath_hw *ah, enum ath9k_tx_queue type,
296 struct ath_common *common = ath9k_hw_common(ah);
323 qi = &ah->txq[q];
331 (void) ath9k_hw_set_txq_props(ah, q, qinfo);
337 static void ath9k_hw_clear_queue_interrupts(struct ath_hw *ah, u32 q)
339 ah->txok_interrupt_mask &= ~(1 << q);
340 ah->txerr_interrupt_mask &= ~(1 << q);
341 ah->txdesc_interrupt_mask &= ~(1 << q);
342 ah->txeol_interrupt_mask &= ~(1 << q);
343 ah->txurn_interrupt_mask &= ~(1 << q);
346 bool ath9k_hw_releasetxqueue(struct ath_hw *ah, u32 q)
348 struct ath_common *common = ath9k_hw_common(ah);
351 qi = &ah->txq[q];
360 ath9k_hw_clear_queue_interrupts(ah, q);
361 ath9k_hw_set_txq_interrupts(ah, qi);
367 bool ath9k_hw_resettxqueue(struct ath_hw *ah, u32 q)
369 struct ath_common *common = ath9k_hw_common(ah);
373 qi = &ah->txq[q];
388 ENABLE_REGWRITE_BUFFER(ah);
390 REG_WRITE(ah, AR_DLCL_IFS(q),
395 REG_WRITE(ah, AR_DRETRY_LIMIT(q),
400 REG_WRITE(ah, AR_QMISC(q), AR_Q_MISC_DCU_EARLY_TERM_REQ);
402 if (AR_SREV_9340(ah) && !AR_SREV_9340_13_OR_LATER(ah))
403 REG_WRITE(ah, AR_DMISC(q),
406 REG_WRITE(ah, AR_DMISC(q),
410 REG_WRITE(ah, AR_QCBRCFG(q),
413 REG_SET_BIT(ah, AR_QMISC(q), AR_Q_MISC_FSP_CBR |
418 REG_WRITE(ah, AR_QRDYTIMECFG(q),
423 REG_WRITE(ah, AR_DCHNTIME(q),
429 REG_SET_BIT(ah, AR_QMISC(q), AR_Q_MISC_RDYTIME_EXP_POLICY);
432 REG_SET_BIT(ah, AR_DMISC(q), AR_D_MISC_POST_FR_BKOFF_DIS);
434 REGWRITE_BUFFER_FLUSH(ah);
437 REG_SET_BIT(ah, AR_DMISC(q), AR_D_MISC_FRAG_BKOFF_EN);
441 ENABLE_REGWRITE_BUFFER(ah);
443 REG_SET_BIT(ah, AR_QMISC(q),
448 REG_SET_BIT(ah, AR_DMISC(q),
454 REGWRITE_BUFFER_FLUSH(ah);
461 if (AR_SREV_9300_20_OR_LATER(ah) &&
462 ah->opmode != NL80211_IFTYPE_ADHOC) {
463 REG_WRITE(ah, AR_DLCL_IFS(q), SM(0, AR_D_LCL_IFS_CWMIN)
469 ENABLE_REGWRITE_BUFFER(ah);
471 REG_SET_BIT(ah, AR_QMISC(q),
476 (ah->config.sw_beacon_response_time -
477 ah->config.dma_beacon_response_time)) * 1024;
478 REG_WRITE(ah, AR_QRDYTIMECFG(q),
480 REG_SET_BIT(ah, AR_DMISC(q),
484 REGWRITE_BUFFER_FLUSH(ah);
488 REG_SET_BIT(ah, AR_QMISC(q), AR_Q_MISC_CBR_INCR_DIS1);
491 REG_SET_BIT(ah, AR_DMISC(q), AR_D_MISC_POST_FR_BKOFF_DIS);
498 REG_SET_BIT(ah, AR_DMISC(q),
504 if (AR_SREV_9300_20_OR_LATER(ah))
505 REG_WRITE(ah, AR_Q_DESC_CRCCHK, AR_Q_DESC_CRCCHK_EN);
507 ath9k_hw_clear_queue_interrupts(ah, q);
509 ah->txok_interrupt_mask |= 1 << q;
510 ah->txerr_interrupt_mask |= 1 << q;
513 ah->txdesc_interrupt_mask |= 1 << q;
515 ah->txeol_interrupt_mask |= 1 << q;
517 ah->txurn_interrupt_mask |= 1 << q;
518 ath9k_hw_set_txq_interrupts(ah, qi);
524 int ath9k_hw_rxprocdesc(struct ath_hw *ah, struct ath_desc *ds,
585 if (AR_SREV_9280_20_OR_LATER(ah))
639 bool ath9k_hw_setrxabort(struct ath_hw *ah, bool set)
644 REG_SET_BIT(ah, AR_DIAG_SW,
647 if (!ath9k_hw_wait(ah, AR_OBS_BUS_1, AR_OBS_BUS_1_RX_STATE,
649 REG_CLR_BIT(ah, AR_DIAG_SW,
653 reg = REG_READ(ah, AR_OBS_BUS_1);
654 ath_err(ath9k_hw_common(ah),
661 REG_CLR_BIT(ah, AR_DIAG_SW,
669 void ath9k_hw_putrxbuf(struct ath_hw *ah, u32 rxdp)
671 REG_WRITE(ah, AR_RXDP, rxdp);
675 void ath9k_hw_startpcureceive(struct ath_hw *ah, bool is_scanning)
677 ath9k_enable_mib_counters(ah);
679 ath9k_ani_reset(ah, is_scanning);
681 REG_CLR_BIT(ah, AR_DIAG_SW, (AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT));
685 void ath9k_hw_abortpcurecv(struct ath_hw *ah)
687 REG_SET_BIT(ah, AR_DIAG_SW, AR_DIAG_RX_ABORT | AR_DIAG_RX_DIS);
689 ath9k_hw_disable_mib_counters(ah);
693 bool ath9k_hw_stopdmarecv(struct ath_hw *ah, bool *reset)
696 struct ath_common *common = ath9k_hw_common(ah);
701 REG_WRITE(ah, AR_MACMISC,
706 REG_WRITE(ah, AR_CR, AR_CR_RXD);
710 if ((REG_READ(ah, AR_CR) & AR_CR_RXE(ah)) == 0)
713 if (!AR_SREV_9300_20_OR_LATER(ah)) {
714 mac_status = REG_READ(ah, AR_DMADBG_7) & 0x7f0;
730 REG_READ(ah, AR_CR),
731 REG_READ(ah, AR_DIAG_SW),
732 REG_READ(ah, AR_DMADBG_7));
742 int ath9k_hw_beaconq_setup(struct ath_hw *ah)
751 if (ah->caps.hw_caps & ATH9K_HW_CAP_EDMA)
754 return ath9k_hw_setuptxqueue(ah, ATH9K_TX_QUEUE_BEACON, &qi);
758 bool ath9k_hw_intrpend(struct ath_hw *ah)
762 if (AR_SREV_9100(ah))
765 host_isr = REG_READ(ah, AR_INTR_ASYNC_CAUSE(ah));
772 host_isr = REG_READ(ah, AR_INTR_SYNC_CAUSE(ah));
781 void ath9k_hw_kill_interrupts(struct ath_hw *ah)
783 struct ath_common *common = ath9k_hw_common(ah);
786 REG_WRITE(ah, AR_IER, AR_IER_DISABLE);
787 (void) REG_READ(ah, AR_IER);
788 if (!AR_SREV_9100(ah)) {
789 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE(ah), 0);
790 (void) REG_READ(ah, AR_INTR_ASYNC_ENABLE(ah));
792 REG_WRITE(ah, AR_INTR_SYNC_ENABLE(ah), 0);
793 (void) REG_READ(ah, AR_INTR_SYNC_ENABLE(ah));
798 void ath9k_hw_disable_interrupts(struct ath_hw *ah)
800 if (!(ah->imask & ATH9K_INT_GLOBAL))
801 atomic_set(&ah->intr_ref_cnt, -1);
803 atomic_dec(&ah->intr_ref_cnt);
805 ath9k_hw_kill_interrupts(ah);
809 static void __ath9k_hw_enable_interrupts(struct ath_hw *ah)
811 struct ath_common *common = ath9k_hw_common(ah);
815 if (AR_SREV_9340(ah) || AR_SREV_9550(ah) || AR_SREV_9531(ah) ||
816 AR_SREV_9561(ah))
821 if (ah->imask & ATH9K_INT_MCI)
825 REG_WRITE(ah, AR_IER, AR_IER_ENABLE);
826 if (!AR_SREV_9100(ah)) {
827 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE(ah), async_mask);
828 REG_WRITE(ah, AR_INTR_ASYNC_MASK(ah), async_mask);
830 REG_WRITE(ah, AR_INTR_SYNC_ENABLE(ah), sync_default);
831 REG_WRITE(ah, AR_INTR_SYNC_MASK(ah), sync_default);
834 REG_READ(ah, AR_IMR), REG_READ(ah, AR_IER));
836 if (ah->msi_enabled) {
841 ath_dbg(ath9k_hw_common(ah), INTERRUPT,
842 "Enabling MSI, msi_mask=0x%X\n", ah->msi_mask);
844 REG_WRITE(ah, AR_INTR_PRIO_ASYNC_ENABLE(ah), ah->msi_mask);
845 REG_WRITE(ah, AR_INTR_PRIO_ASYNC_MASK(ah), ah->msi_mask);
846 ath_dbg(ath9k_hw_common(ah), INTERRUPT,
848 REG_READ(ah, AR_INTR_PRIO_ASYNC_ENABLE(ah)),
849 REG_READ(ah, AR_INTR_PRIO_ASYNC_MASK(ah)));
851 if (ah->msi_reg == 0)
852 ah->msi_reg = REG_READ(ah, AR_PCIE_MSI(ah));
854 ath_dbg(ath9k_hw_common(ah), INTERRUPT,
855 "AR_PCIE_MSI=0x%X, ah->msi_reg = 0x%X\n",
856 AR_PCIE_MSI(ah), ah->msi_reg);
860 REG_WRITE(ah, AR_PCIE_MSI(ah),
861 (ah->msi_reg | AR_PCIE_MSI_ENABLE)
863 _msi_reg = REG_READ(ah, AR_PCIE_MSI(ah));
868 ath_err(ath9k_hw_common(ah),
874 void ath9k_hw_resume_interrupts(struct ath_hw *ah)
876 struct ath_common *common = ath9k_hw_common(ah);
878 if (!(ah->imask & ATH9K_INT_GLOBAL))
881 if (atomic_read(&ah->intr_ref_cnt) != 0) {
883 atomic_read(&ah->intr_ref_cnt));
887 __ath9k_hw_enable_interrupts(ah);
891 void ath9k_hw_enable_interrupts(struct ath_hw *ah)
893 struct ath_common *common = ath9k_hw_common(ah);
895 if (!(ah->imask & ATH9K_INT_GLOBAL))
898 if (!atomic_inc_and_test(&ah->intr_ref_cnt)) {
900 atomic_read(&ah->intr_ref_cnt));
904 __ath9k_hw_enable_interrupts(ah);
908 void ath9k_hw_set_interrupts(struct ath_hw *ah)
910 enum ath9k_int ints = ah->imask;
912 struct ath9k_hw_capabilities *pCap = &ah->caps;
913 struct ath_common *common = ath9k_hw_common(ah);
916 ath9k_hw_disable_interrupts(ah);
918 if (ah->msi_enabled) {
921 REG_WRITE(ah, AR_INTR_PRIO_ASYNC_ENABLE(ah), 0);
922 REG_READ(ah, AR_INTR_PRIO_ASYNC_ENABLE(ah));
930 ah->msi_mask = 0;
932 ah->msi_mask |= AR_INTR_PRIO_TX;
933 if (ah->config.tx_intr_mitigation)
936 if (ah->txok_interrupt_mask)
938 if (ah->txdesc_interrupt_mask)
941 if (ah->txerr_interrupt_mask)
943 if (ah->txeol_interrupt_mask)
947 ah->msi_mask |= AR_INTR_PRIO_RXLP | AR_INTR_PRIO_RXHP;
948 if (AR_SREV_9300_20_OR_LATER(ah)) {
950 if (ah->config.rx_intr_mitigation) {
957 if (ah->config.rx_intr_mitigation)
991 if (ah->config.hw_hang_checks & HW_BB_WATCHDOG) {
999 REG_WRITE(ah, AR_IMR, mask);
1000 ah->imrs2_reg &= ~(AR_IMR_S2_TIM |
1009 if (ah->config.hw_hang_checks & HW_BB_WATCHDOG) {
1011 ah->imrs2_reg &= ~AR_IMR_S2_BB_WATCHDOG;
1014 ah->imrs2_reg |= mask2;
1015 REG_WRITE(ah, AR_IMR_S2, ah->imrs2_reg);
1019 REG_SET_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
1021 REG_CLR_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
1031 void ath9k_hw_set_tx_filter(struct ath_hw *ah, u8 destidx, bool set)
1042 ath_dbg(ath9k_hw_common(ah), PS,
1045 REG_WRITE(ah, AR_D_TXBLK_BASE, filter);