• Home
  • History
  • Annotate
  • Raw
  • Download
  • only in /netgear-R7000-V1.0.7.12_1.2.5/components/opensource/linux/linux-2.6.36/drivers/staging/wlan-ng/

Lines Matching refs:hw

95 * 'initialized' and 'running' states of the hw/MAC combo.  The four
163 static int submit_rx_urb(hfa384x_t *hw, gfp_t flags);
165 static int submit_tx_urb(hfa384x_t *hw, struct urb *tx_urb, gfp_t flags);
183 static void hfa384x_usbin_ctlx(hfa384x_t *hw, hfa384x_usbin_t *usbin,
189 static void hfa384x_usbctlxq_run(hfa384x_t *hw);
201 static int hfa384x_usbctlx_submit(hfa384x_t *hw, hfa384x_usbctlx_t *ctlx);
203 static void unlocked_usbctlx_complete(hfa384x_t *hw, hfa384x_usbctlx_t *ctlx);
210 hfa384x_usbctlx_complete_sync(hfa384x_t *hw,
215 unlocked_usbctlx_cancel_async(hfa384x_t *hw, hfa384x_usbctlx_t *ctlx);
217 static void hfa384x_cb_status(hfa384x_t *hw, const hfa384x_usbctlx_t *ctlx);
219 static void hfa384x_cb_rrid(hfa384x_t *hw, const hfa384x_usbctlx_t *ctlx);
232 hfa384x_docmd(hfa384x_t *hw,
238 hfa384x_dorrid(hfa384x_t *hw,
246 hfa384x_dowrid(hfa384x_t *hw,
254 hfa384x_dormem(hfa384x_t *hw,
263 hfa384x_dowmem(hfa384x_t *hw,
288 static inline hfa384x_usbctlx_t *get_active_ctlx(hfa384x_t * hw)
290 return list_entry(hw->ctlxq.active.next, hfa384x_usbctlx_t, list);
323 * hw device struct
332 static int submit_rx_urb(hfa384x_t *hw, gfp_t memflags)
344 usb_fill_bulk_urb(&hw->rx_urb, hw->usb,
345 hw->endp_in,
347 hfa384x_usbin_callback, hw->wlandev);
349 hw->rx_urb_skb = skb;
352 if (!hw->wlandev->hwremoved &&
353 !test_bit(WORK_RX_HALT, &hw->usb_flags)) {
354 result = SUBMIT_URB(&hw->rx_urb, memflags);
360 hw->wlandev->netdev->name);
361 if (!test_and_set_bit(WORK_RX_HALT, &hw->usb_flags))
362 schedule_work(&hw->usb_work);
369 hw->rx_urb_skb = NULL;
384 * hw device struct
394 static int submit_tx_urb(hfa384x_t *hw, struct urb *tx_urb, gfp_t memflags)
396 struct net_device *netdev = hw->wlandev->netdev;
402 if (!hw->wlandev->hwremoved
403 && !test_bit(WORK_TX_HALT, &hw->usb_flags)) {
411 set_bit(WORK_TX_HALT, &hw->usb_flags);
412 schedule_work(&hw->usb_work);
430 * hw device structure
440 hfa384x_t *hw = container_of(data, struct hfa384x, usb_work);
441 struct net_device *netdev = hw->wlandev->netdev;
446 if (hw->wlandev->hwremoved)
450 if (test_bit(WORK_RX_HALT, &hw->usb_flags)) {
453 usb_kill_urb(&hw->rx_urb); /* Cannot be holding spinlock! */
455 ret = usb_clear_halt(hw->usb, hw->endp_in);
463 clear_bit(WORK_RX_HALT, &hw->usb_flags);
464 set_bit(WORK_RX_RESUME, &hw->usb_flags);
469 if (test_bit(WORK_RX_RESUME, &hw->usb_flags)) {
472 ret = submit_rx_urb(hw, GFP_KERNEL);
477 clear_bit(WORK_RX_RESUME, &hw->usb_flags);
482 if (test_bit(WORK_TX_HALT, &hw->usb_flags)) {
485 usb_kill_urb(&hw->tx_urb);
486 ret = usb_clear_halt(hw->usb, hw->endp_out);
494 clear_bit(WORK_TX_HALT, &hw->usb_flags);
495 set_bit(WORK_TX_RESUME, &hw->usb_flags);
501 hfa384x_usbctlxq_run(hw);
506 if (test_and_clear_bit(WORK_TX_RESUME, &hw->usb_flags))
507 netif_wake_queue(hw->wlandev->netdev);
518 * hw device structure
531 void hfa384x_create(hfa384x_t *hw, struct usb_device *usb)
533 memset(hw, 0, sizeof(hfa384x_t));
534 hw->usb = usb;
537 hw->endp_in = usb_rcvbulkpipe(usb, 1);
538 hw->endp_out = usb_sndbulkpipe(usb, 2);
541 init_waitqueue_head(&hw->cmdq);
544 spin_lock_init(&hw->ctlxq.lock);
545 INIT_LIST_HEAD(&hw->ctlxq.pending);
546 INIT_LIST_HEAD(&hw->ctlxq.active);
547 INIT_LIST_HEAD(&hw->ctlxq.completing);
548 INIT_LIST_HEAD(&hw->ctlxq.reapable);
551 skb_queue_head_init(&hw->authq);
553 tasklet_init(&hw->reaper_bh,
554 hfa384x_usbctlx_reaper_task, (unsigned long)hw);
555 tasklet_init(&hw->completion_bh,
556 hfa384x_usbctlx_completion_task, (unsigned long)hw);
557 INIT_WORK(&hw->link_bh, prism2sta_processing_defer);
558 INIT_WORK(&hw->usb_work, hfa384x_usb_defer);
560 init_timer(&hw->throttle);
561 hw->throttle.function = hfa384x_usb_throttlefn;
562 hw->throttle.data = (unsigned long)hw;
564 init_timer(&hw->resptimer);
565 hw->resptimer.function = hfa384x_usbctlx_resptimerfn;
566 hw->resptimer.data = (unsigned long)hw;
568 init_timer(&hw->reqtimer);
569 hw->reqtimer.function = hfa384x_usbctlx_reqtimerfn;
570 hw->reqtimer.data = (unsigned long)hw;
572 usb_init_urb(&hw->rx_urb);
573 usb_init_urb(&hw->tx_urb);
574 usb_init_urb(&hw->ctlx_urb);
576 hw->link_status = HFA384x_LINK_NOTCONNECTED;
577 hw->state = HFA384x_STATE_INIT;
579 INIT_WORK(&hw->commsqual_bh, prism2sta_commsqual_defer);
580 init_timer(&hw->commsqual_timer);
581 hw->commsqual_timer.data = (unsigned long)hw;
582 hw->commsqual_timer.function = prism2sta_commsqual_timer;
588 * Partner to hfa384x_create(). This function cleans up the hw
591 * point in the future, an hw in the 'shutdown' state requires a 'deep'
593 * is called on a _running_ hw structure, the drvr_stop() function is
597 * hw device structure
607 void hfa384x_destroy(hfa384x_t *hw)
611 if (hw->state == HFA384x_STATE_RUNNING)
612 hfa384x_drvr_stop(hw);
613 hw->state = HFA384x_STATE_PREINIT;
615 if (hw->scanresults) {
616 kfree(hw->scanresults);
617 hw->scanresults = NULL;
621 while ((skb = skb_dequeue(&hw->authq)))
801 * mark the hw struct as such.
807 * hw hw struct
818 static void hfa384x_cb_status(hfa384x_t *hw, const hfa384x_usbctlx_t *ctlx)
831 ctlx->usercb(hw, &cmdresult, ctlx->usercb_data);
844 * hw hw struct
855 static void hfa384x_cb_rrid(hfa384x_t *hw, const hfa384x_usbctlx_t *ctlx)
868 ctlx->usercb(hw, &rridresult, ctlx->usercb_data);
872 static inline int hfa384x_docmd_wait(hfa384x_t *hw, hfa384x_metacmd_t *cmd)
874 return hfa384x_docmd(hw, DOWAIT, cmd, NULL, NULL, NULL);
878 hfa384x_docmd_async(hfa384x_t *hw,
882 return hfa384x_docmd(hw, DOASYNC, cmd, cmdcb, usercb, usercb_data);
886 hfa384x_dorrid_wait(hfa384x_t *hw, u16 rid, void *riddata,
889 return hfa384x_dorrid(hw, DOWAIT,
894 hfa384x_dorrid_async(hfa384x_t *hw,
899 return hfa384x_dorrid(hw, DOASYNC,
905 hfa384x_dowrid_wait(hfa384x_t *hw, u16 rid, void *riddata,
908 return hfa384x_dowrid(hw, DOWAIT,
913 hfa384x_dowrid_async(hfa384x_t *hw,
918 return hfa384x_dowrid(hw, DOASYNC,
924 hfa384x_dormem_wait(hfa384x_t *hw,
927 return hfa384x_dormem(hw, DOWAIT,
932 hfa384x_dormem_async(hfa384x_t *hw,
937 return hfa384x_dormem(hw, DOASYNC,
943 hfa384x_dowmem_wait(hfa384x_t *hw,
946 return hfa384x_dowmem(hw, DOWAIT,
951 hfa384x_dowmem_async(hfa384x_t *hw,
959 return hfa384x_dowmem(hw, DOASYNC,
967 * Issues the initialize command and sets the hw->state based
971 * hw device structure
983 int hfa384x_cmd_initialize(hfa384x_t *hw)
994 result = hfa384x_docmd_wait(hw, &cmd);
1003 hw->port_enabled[i] = 0;
1006 hw->link_status = HFA384x_LINK_NOTCONNECTED;
1018 * hw device structure
1031 int hfa384x_cmd_disable(hfa384x_t *hw, u16 macport)
1042 result = hfa384x_docmd_wait(hw, &cmd);
1054 * hw device structure
1067 int hfa384x_cmd_enable(hfa384x_t *hw, u16 macport)
1078 result = hfa384x_docmd_wait(hw, &cmd);
1098 * hw device structure
1112 int hfa384x_cmd_monitor(hfa384x_t *hw, u16 enable)
1123 result = hfa384x_docmd_wait(hw, &cmd);
1137 * hw device structure
1166 int hfa384x_cmd_download(hfa384x_t *hw, u16 mode, u16 lowaddr,
1182 result = hfa384x_docmd_wait(hw, &cmd);
1190 * Perform a reset of the hfa38xx MAC core. We assume that the hw
1194 * up some leftover cruft in the hw structure.
1197 * hw device structure
1210 int hfa384x_corereset(hfa384x_t *hw, int holdtime, int settletime, int genesis)
1214 result = usb_reset_device(hw->usb);
1230 * hw device structure
1247 static int hfa384x_usbctlx_complete_sync(hfa384x_t *hw,
1256 spin_lock_irqsave(&hw->ctlxq.lock, flags);
1263 if (hw->wlandev->hwremoved) {
1264 spin_unlock_irqrestore(&hw->ctlxq.lock, flags);
1278 if (ctlx == get_active_ctlx(hw)) {
1279 spin_unlock_irqrestore(&hw->ctlxq.lock, flags);
1281 del_singleshot_timer_sync(&hw->reqtimer);
1282 del_singleshot_timer_sync(&hw->resptimer);
1283 hw->req_timer_done = 1;
1284 hw->resp_timer_done = 1;
1285 usb_kill_urb(&hw->ctlx_urb);
1287 spin_lock_irqsave(&hw->ctlxq.lock, flags);
1295 if (hw->wlandev->hwremoved)
1306 list_move_tail(&ctlx->list, &hw->ctlxq.completing);
1308 spin_unlock_irqrestore(&hw->ctlxq.lock, flags);
1311 hfa384x_usbctlxq_run(hw);
1323 spin_unlock_irqrestore(&hw->ctlxq.lock, flags);
1340 * hw device structure
1354 * in hw structure.
1363 hfa384x_docmd(hfa384x_t *hw,
1395 result = hfa384x_usbctlx_submit(hw, ctlx);
1402 hfa384x_usbctlx_complete_sync(hw, ctlx,
1425 * hw device structure
1443 * in hw structure.
1452 hfa384x_dorrid(hfa384x_t *hw,
1482 result = hfa384x_usbctlx_submit(hw, ctlx);
1489 hfa384x_usbctlx_complete_sync(hw, ctlx,
1510 * hw device structure
1524 * in hw structure.
1533 hfa384x_dowrid(hfa384x_t *hw,
1567 result = hfa384x_usbctlx_submit(hw, ctlx);
1574 result = hfa384x_usbctlx_complete_sync(hw,
1596 * hw device structure
1611 * in hw structure.
1620 hfa384x_dormem(hfa384x_t *hw,
1659 result = hfa384x_usbctlx_submit(hw, ctlx);
1666 hfa384x_usbctlx_complete_sync(hw, ctlx,
1687 * hw device structure
1702 * in hw structure.
1711 hfa384x_dowmem(hfa384x_t *hw,
1749 result = hfa384x_usbctlx_submit(hw, ctlx);
1756 result = hfa384x_usbctlx_complete_sync(hw,
1775 * hw device structure
1785 int hfa384x_drvr_commtallies(hfa384x_t *hw)
1794 hfa384x_docmd_async(hw, &cmd, NULL, NULL, NULL);
1808 * hw device structure
1821 int hfa384x_drvr_disable(hfa384x_t *hw, u16 macport)
1825 if ((!hw->isap && macport != 0) ||
1826 (hw->isap && !(macport <= HFA384x_PORTID_MAX)) ||
1827 !(hw->port_enabled[macport])) {
1830 result = hfa384x_cmd_disable(hw, macport);
1832 hw->port_enabled[macport] = 0;
1846 * hw device structure
1859 int hfa384x_drvr_enable(hfa384x_t *hw, u16 macport)
1863 if ((!hw->isap && macport != 0) ||
1864 (hw->isap && !(macport <= HFA384x_PORTID_MAX)) ||
1865 (hw->port_enabled[macport])) {
1868 result = hfa384x_cmd_enable(hw, macport);
1870 hw->port_enabled[macport] = 1;
1884 * hw device structure
1896 int hfa384x_drvr_flashdl_enable(hfa384x_t *hw)
1903 if (hw->port_enabled[i]) {
1910 if (hw->dlstate != HFA384x_DLSTATE_DISABLED)
1914 result = hfa384x_drvr_getconfig(hw, HFA384x_RID_DOWNLOADBUFFER,
1915 &(hw->bufinfo), sizeof(hw->bufinfo));
1919 hw->bufinfo.page = le16_to_cpu(hw->bufinfo.page);
1920 hw->bufinfo.offset = le16_to_cpu(hw->bufinfo.offset);
1921 hw->bufinfo.len = le16_to_cpu(hw->bufinfo.len);
1922 result = hfa384x_drvr_getconfig16(hw, HFA384x_RID_MAXLOADTIME,
1923 &(hw->dltimeout));
1927 hw->dltimeout = le16_to_cpu(hw->dltimeout);
1931 hw->dlstate = HFA384x_DLSTATE_FLASHENABLED;
1943 * hw device structure
1955 int hfa384x_drvr_flashdl_disable(hfa384x_t *hw)
1958 if (hw->dlstate != HFA384x_DLSTATE_FLASHENABLED)
1965 hfa384x_cmd_download(hw, HFA384x_PROGMODE_DISABLE, 0, 0, 0);
1966 hw->dlstate = HFA384x_DLSTATE_DISABLED;
1985 * hw device structure
2000 int hfa384x_drvr_flashdl_write(hfa384x_t *hw, u32 daddr, void *buf, u32 len)
2020 if (hw->dlstate != HFA384x_DLSTATE_FLASHENABLED)
2028 HFA384x_ADDR_AUX_MKFLAT(hw->bufinfo.page, hw->bufinfo.offset);
2030 hw->bufinfo.page, hw->bufinfo.offset, dlbufaddr);
2040 nburns = len / hw->bufinfo.len;
2041 nburns += (len % hw->bufinfo.len) ? 1 : 0;
2044 nwrites = hw->bufinfo.len / HFA384x_USB_RWMEM_MAXLEN;
2045 nwrites += (hw->bufinfo.len % HFA384x_USB_RWMEM_MAXLEN) ? 1 : 0;
2050 burnlen = (len - (hw->bufinfo.len * i)) > hw->bufinfo.len ?
2051 hw->bufinfo.len : (len - (hw->bufinfo.len * i));
2052 burndaddr = daddr + (hw->bufinfo.len * i);
2060 result = hfa384x_cmd_download(hw, HFA384x_PROGMODE_NV,
2072 (i * hw->bufinfo.len) +
2084 result = hfa384x_dowmem_wait(hw,
2091 result = hfa384x_cmd_download(hw,
2120 * hw device structure
2139 int hfa384x_drvr_getconfig(hfa384x_t *hw, u16 rid, void *buf, u16 len)
2143 result = hfa384x_dorrid_wait(hw, rid, buf, len);
2155 * hw device structure
2176 hfa384x_drvr_getconfig_async(hfa384x_t *hw,
2179 return hfa384x_dorrid_async(hw, rid, NULL, 0,
2189 * hw device structure
2207 hfa384x_drvr_setconfig_async(hfa384x_t *hw,
2212 return hfa384x_dowrid_async(hw, rid, buf, len,
2222 * hw device structure
2234 int hfa384x_drvr_ramdl_disable(hfa384x_t *hw)
2237 if (hw->dlstate != HFA384x_DLSTATE_RAMENABLED)
2244 hfa384x_cmd_download(hw, HFA384x_PROGMODE_DISABLE, 0, 0, 0);
2245 hw->dlstate = HFA384x_DLSTATE_DISABLED;
2259 * hw device structure
2274 int hfa384x_drvr_ramdl_enable(hfa384x_t *hw, u32 exeaddr)
2283 if (hw->port_enabled[i]) {
2291 if (hw->dlstate != HFA384x_DLSTATE_DISABLED) {
2302 result = hfa384x_cmd_download(hw, HFA384x_PROGMODE_RAM,
2307 hw->dlstate = HFA384x_DLSTATE_RAMENABLED;
2327 * hw device structure
2342 int hfa384x_drvr_ramdl_write(hfa384x_t *hw, u32 daddr, void *buf, u32 len)
2354 if (hw->dlstate != HFA384x_DLSTATE_RAMENABLED)
2374 result = hfa384x_dowmem_wait(hw,
2400 * hw device structure
2421 int hfa384x_drvr_readpda(hfa384x_t *hw, void *buf, unsigned int len)
2450 result = hfa384x_dormem_wait(hw, currpage, curroffset, buf,
2514 * hw device structure
2529 int hfa384x_drvr_setconfig(hfa384x_t *hw, u16 rid, void *buf, u16 len)
2531 return hfa384x_dowrid_wait(hw, rid, buf, len);
2542 * hw device structure
2554 int hfa384x_drvr_start(hfa384x_t *hw)
2567 usb_get_status(hw->usb, USB_RECIP_ENDPOINT, hw->endp_in, &status);
2572 if ((status == 1) && usb_clear_halt(hw->usb, hw->endp_in))
2576 usb_get_status(hw->usb, USB_RECIP_ENDPOINT, hw->endp_out, &status);
2581 if ((status == 1) && usb_clear_halt(hw->usb, hw->endp_out))
2585 usb_kill_urb(&hw->rx_urb);
2588 result = submit_rx_urb(hw, GFP_KERNEL);
2595 result1 = hfa384x_cmd_initialize(hw);
2597 result = result2 = hfa384x_cmd_initialize(hw);
2603 usb_kill_urb(&hw->rx_urb);
2618 hw->state = HFA384x_STATE_RUNNING;
2632 * hw device structure
2643 int hfa384x_drvr_stop(hfa384x_t *hw)
2653 if (!hw->wlandev->hwremoved) {
2655 hfa384x_cmd_initialize(hw);
2658 usb_kill_urb(&hw->rx_urb);
2661 hw->link_status = HFA384x_LINK_NOTCONNECTED;
2662 hw->state = HFA384x_STATE_INIT;
2664 del_timer_sync(&hw->commsqual_timer);
2668 hw->port_enabled[i] = 0;
2679 * hw device structure
2694 int hfa384x_drvr_txframe(hfa384x_t *hw, struct sk_buff *skb,
2703 if (hw->tx_urb.status == -EINPROGRESS) {
2711 memset(&hw->txbuff.txfrm.desc, 0, sizeof(hw->txbuff.txfrm.desc));
2714 hw->txbuff.type = cpu_to_le16(HFA384x_USB_TXFRM);
2717 hw->txbuff.txfrm.desc.sw_support = 0x0123;
2724 hw->txbuff.txfrm.desc.tx_control =
2728 hw->txbuff.txfrm.desc.tx_control =
2732 hw->txbuff.txfrm.desc.tx_control =
2736 hw->txbuff.txfrm.desc.tx_control =
2737 cpu_to_le16(hw->txbuff.txfrm.desc.tx_control);
2740 memcpy(&(hw->txbuff.txfrm.desc.frame_control), p80211_hdr,
2745 hw->txbuff.txfrm.desc.data_len = cpu_to_le16(skb->len + 8);
2748 hw->txbuff.txfrm.desc.data_len = cpu_to_le16(skb->len);
2754 ptr = hw->txbuff.txfrm.data;
2770 usb_fill_bulk_urb(&(hw->tx_urb), hw->usb,
2771 hw->endp_out,
2772 &(hw->txbuff), ROUNDUP64(usbpktlen),
2773 hfa384x_usbout_callback, hw->wlandev);
2774 hw->tx_urb.transfer_flags |= USB_QUEUE_BULK;
2777 ret = submit_tx_urb(hw, &hw->tx_urb, GFP_ATOMIC);
2789 hfa384x_t *hw = wlandev->priv;
2792 spin_lock_irqsave(&hw->ctlxq.lock, flags);
2794 if (!hw->wlandev->hwremoved) {
2797 sched = !test_and_set_bit(WORK_TX_HALT, &hw->usb_flags);
2798 sched |= !test_and_set_bit(WORK_RX_HALT, &hw->usb_flags);
2800 schedule_work(&hw->usb_work);
2803 spin_unlock_irqrestore(&hw->ctlxq.lock, flags);
2821 hfa384x_t *hw = (hfa384x_t *) data;
2826 spin_lock_irqsave(&hw->ctlxq.lock, flags);
2831 list_for_each_safe(entry, temp, &hw->ctlxq.reapable) {
2839 spin_unlock_irqrestore(&hw->ctlxq.lock, flags);
2859 hfa384x_t *hw = (hfa384x_t *) data;
2866 spin_lock_irqsave(&hw->ctlxq.lock, flags);
2871 list_for_each_safe(entry, temp, &hw->ctlxq.completing) {
2880 spin_unlock_irqrestore(&hw->ctlxq.lock, flags);
2881 ctlx->cmdcb(hw, ctlx);
2882 spin_lock_irqsave(&hw->ctlxq.lock, flags);
2892 if (hw->wlandev->hwremoved) {
2909 list_move_tail(&ctlx->list, &hw->ctlxq.reapable);
2915 spin_unlock_irqrestore(&hw->ctlxq.lock, flags);
2918 tasklet_schedule(&hw->reaper_bh);
2928 * hw ptr to the hfa384x_t structure
2938 static int unlocked_usbctlx_cancel_async(hfa384x_t *hw,
2948 hw->ctlx_urb.transfer_flags |= URB_ASYNC_UNLINK;
2949 ret = usb_unlink_urb(&hw->ctlx_urb);
2959 unlocked_usbctlx_complete(hw, ctlx);
2977 * hw ptr to a hfa384x_t structure
2988 static void unlocked_usbctlx_complete(hfa384x_t *hw, hfa384x_usbctlx_t *ctlx)
2994 list_move_tail(&ctlx->list, &hw->ctlxq.completing);
2995 tasklet_schedule(&hw->completion_bh);
3016 * hw ptr to hfa384x_t
3026 static void hfa384x_usbctlxq_run(hfa384x_t *hw)
3031 spin_lock_irqsave(&hw->ctlxq.lock, flags);
3040 if (!list_empty(&hw->ctlxq.active) ||
3041 test_bit(WORK_TX_HALT, &hw->usb_flags) || hw->wlandev->hwremoved)
3044 while (!list_empty(&hw->ctlxq.pending)) {
3049 head = list_entry(hw->ctlxq.pending.next,
3053 list_move_tail(&head->list, &hw->ctlxq.active);
3056 usb_fill_bulk_urb(&(hw->ctlx_urb), hw->usb,
3057 hw->endp_out,
3059 hfa384x_ctlxout_callback, hw);
3060 hw->ctlx_urb.transfer_flags |= USB_QUEUE_BULK;
3063 result = SUBMIT_URB(&hw->ctlx_urb, GFP_ATOMIC);
3069 hw->req_timer_done = 0;
3070 hw->reqtimer.expires = jiffies + HZ;
3071 add_timer(&hw->reqtimer);
3074 hw->resp_timer_done = 0;
3075 hw->resptimer.expires = jiffies + 2 * HZ;
3076 add_timer(&hw->resptimer);
3088 hw->wlandev->netdev->name);
3089 list_move(&head->list, &hw->ctlxq.pending);
3090 set_bit(WORK_TX_HALT, &hw->usb_flags);
3091 schedule_work(&hw->usb_work);
3097 hw->wlandev->netdev->name);
3103 unlocked_usbctlx_complete(hw, head);
3107 spin_unlock_irqrestore(&hw->ctlxq.lock, flags);
3129 hfa384x_t *hw;
3145 hw = wlandev->priv;
3146 if (!hw)
3149 skb = hw->rx_urb_skb;
3152 hw->rx_urb_skb = NULL;
3170 if (!test_and_set_bit(WORK_RX_HALT, &hw->usb_flags))
3171 schedule_work(&hw->usb_work);
3179 if (!test_and_set_bit(THROTTLE_RX, &hw->usb_flags) &&
3180 !timer_pending(&hw->throttle)) {
3181 mod_timer(&hw->throttle, jiffies + THROTTLE_JIFFIES);
3216 result = submit_rx_urb(hw, GFP_ATOMIC);
3261 hfa384x_usbin_ctlx(hw, usbin, urb_status);
3294 * hw ptr to hfa384x_t
3306 static void hfa384x_usbin_ctlx(hfa384x_t *hw, hfa384x_usbin_t *usbin,
3314 spin_lock_irqsave(&hw->ctlxq.lock, flags);
3320 if (list_empty(&hw->ctlxq.active))
3329 if (del_timer(&hw->resptimer) == 0) {
3330 if (hw->resp_timer_done == 0) {
3331 spin_unlock_irqrestore(&hw->ctlxq.lock, flags);
3335 hw->resp_timer_done = 1;
3338 ctlx = get_active_ctlx(hw);
3346 if (unlocked_usbctlx_cancel_async(hw, ctlx) == 0)
3383 unlocked_usbctlx_complete(hw, ctlx);
3396 if (unlocked_usbctlx_cancel_async(hw, ctlx) == 0)
3403 spin_unlock_irqrestore(&hw->ctlxq.lock, flags);
3406 hfa384x_usbctlxq_run(hw);
3460 hfa384x_t *hw = wlandev->priv;
3511 rxmeta->signal = usbin->rxfrm.desc.signal - hw->dbmadjust;
3512 rxmeta->noise = usbin->rxfrm.desc.silence - hw->dbmadjust;
3570 hfa384x_t *hw = wlandev->priv;
3599 (hw->sniffhdr != 0)) {
3610 caphdr->channel = htonl(hw->sniff_channel);
3638 if (hw->sniff_fcs) {
3708 hfa384x_t *hw = wlandev->priv;
3713 (WORK_TX_HALT, &hw->usb_flags))
3714 schedule_work(&hw->usb_work);
3723 hfa384x_t *hw = wlandev->priv;
3726 (THROTTLE_TX, &hw->usb_flags)
3727 && !timer_pending(&hw->throttle)) {
3728 mod_timer(&hw->throttle,
3768 hfa384x_t *hw = urb->context;
3780 (urb->status == -ENODEV) || (hw == NULL))
3784 spin_lock_irqsave(&hw->ctlxq.lock, flags);
3792 if (list_empty(&hw->ctlxq.active)) {
3793 spin_unlock_irqrestore(&hw->ctlxq.lock, flags);
3801 if (del_timer(&hw->reqtimer) == 0) {
3802 if (hw->req_timer_done == 0) {
3808 spin_unlock_irqrestore(&hw->ctlxq.lock, flags);
3812 hw->req_timer_done = 1;
3815 ctlx = get_active_ctlx(hw);
3830 unlocked_usbctlx_complete(hw, ctlx);
3845 !test_and_set_bit(WORK_TX_HALT, &hw->usb_flags)) {
3848 hw->wlandev->netdev->name);
3849 schedule_work(&hw->usb_work);
3856 unlocked_usbctlx_complete(hw, ctlx);
3863 timer_ok = del_timer(&hw->resptimer);
3865 hw->resp_timer_done = 1;
3868 spin_unlock_irqrestore(&hw->ctlxq.lock, flags);
3870 if (!timer_ok && (hw->resp_timer_done == 0)) {
3871 spin_lock_irqsave(&hw->ctlxq.lock, flags);
3876 hfa384x_usbctlxq_run(hw);
3902 hfa384x_t *hw = (hfa384x_t *) data;
3905 spin_lock_irqsave(&hw->ctlxq.lock, flags);
3907 hw->req_timer_done = 1;
3912 if (!list_empty(&hw->ctlxq.active)) {
3917 hw->ctlx_urb.transfer_flags |= URB_ASYNC_UNLINK;
3918 if (usb_unlink_urb(&hw->ctlx_urb) == -EINPROGRESS) {
3919 hfa384x_usbctlx_t *ctlx = get_active_ctlx(hw);
3932 if (del_timer(&hw->resptimer) != 0)
3933 hw->resp_timer_done = 1;
3937 spin_unlock_irqrestore(&hw->ctlxq.lock, flags);
3960 hfa384x_t *hw = (hfa384x_t *) data;
3963 spin_lock_irqsave(&hw->ctlxq.lock, flags);
3965 hw->resp_timer_done = 1;
3970 if (!list_empty(&hw->ctlxq.active)) {
3971 hfa384x_usbctlx_t *ctlx = get_active_ctlx(hw);
3973 if (unlocked_usbctlx_cancel_async(hw, ctlx) == 0) {
3974 spin_unlock_irqrestore(&hw->ctlxq.lock, flags);
3975 hfa384x_usbctlxq_run(hw);
3980 spin_unlock_irqrestore(&hw->ctlxq.lock, flags);
3992 * data ptr to hw
4004 hfa384x_t *hw = (hfa384x_t *) data;
4007 spin_lock_irqsave(&hw->ctlxq.lock, flags);
4013 pr_debug("flags=0x%lx\n", hw->usb_flags);
4014 if (!hw->wlandev->hwremoved &&
4015 ((test_and_clear_bit(THROTTLE_RX, &hw->usb_flags) &&
4016 !test_and_set_bit(WORK_RX_RESUME, &hw->usb_flags))
4018 (test_and_clear_bit(THROTTLE_TX, &hw->usb_flags) &&
4019 !test_and_set_bit(WORK_TX_RESUME, &hw->usb_flags))
4021 schedule_work(&hw->usb_work);
4024 spin_unlock_irqrestore(&hw->ctlxq.lock, flags);
4033 * hw ptr to the hw struct
4045 static int hfa384x_usbctlx_submit(hfa384x_t *hw, hfa384x_usbctlx_t *ctlx)
4050 spin_lock_irqsave(&hw->ctlxq.lock, flags);
4052 if (hw->wlandev->hwremoved) {
4053 spin_unlock_irqrestore(&hw->ctlxq.lock, flags);
4057 list_add_tail(&ctlx->list, &hw->ctlxq.pending);
4059 spin_unlock_irqrestore(&hw->ctlxq.lock, flags);
4060 hfa384x_usbctlxq_run(hw);