Lines Matching refs:urb

111 	struct urb_priv *urb_priv = td->urb->hcpriv;
122 static void inc_td_cnt(struct urb *urb)
124 struct urb_priv *urb_priv = urb->hcpriv;
793 struct urb *urb = cur_td->urb;
794 struct urb_priv *urb_priv = urb->hcpriv;
795 struct usb_hcd *hcd = bus_to_hcd(urb->dev->bus);
797 if (usb_pipetype(urb->pipe) == PIPE_ISOCHRONOUS) {
805 usb_hcd_unlink_urb_from_ep(hcd, urb);
806 trace_xhci_urb_giveback(urb);
807 usb_hcd_giveback_urb(hcd, urb, status);
815 struct urb *urb = td->urb;
818 if (!ring || !seg || !urb)
821 if (usb_urb_dir_out(urb)) {
830 if (urb->num_sgs) {
831 len = sg_pcopy_from_buffer(urb->sg, urb->num_sgs, seg->bounce_buf,
837 memcpy(urb->transfer_buffer + seg->bounce_offs, seg->bounce_buf,
847 struct urb *urb = NULL;
850 urb = td->urb;
857 * length, urb->actual_length will be a very big number (since it's
860 if (urb->actual_length > urb->transfer_buffer_length) {
862 urb->transfer_buffer_length, urb->actual_length);
863 urb->actual_length = 0;
873 inc_td_cnt(urb);
874 /* Giveback the urb when all the tds are completed */
876 if ((urb->actual_length != urb->transfer_buffer_length &&
877 (urb->transfer_flags & URB_SHORT_NOT_OK)) ||
878 (status != 0 && !usb_endpoint_xfer_isoc(&urb->ep->desc)))
880 urb, urb->actual_length,
881 urb->transfer_buffer_length, status);
883 /* set isoc urb status to 0 just as EHCI, UHCI, and OHCI */
884 if (usb_pipetype(urb->pipe) == PIPE_ISOCHRONOUS)
902 ring = xhci_urb_to_transfer_ring(ep->xhci, td->urb);
906 __func__, td->urb);
910 __func__, td->urb, td->cancel_status);
1009 td->urb->stream_id, td->urb);
1011 ring = xhci_urb_to_transfer_ring(xhci, td->urb);
1014 td->urb, td->urb->stream_id);
1024 td->urb->stream_id);
1039 td->urb->stream_id, td->urb,
1040 cached_td->urb->stream_id, cached_td->urb);
1055 cached_td->urb->stream_id,
1063 td->urb);
1208 inc_td_cnt(cur_td->urb);
1254 inc_td_cnt(cur_td->urb);
1268 * lock is relased and re-acquired while giving back urb.
1429 ep_ring = xhci_urb_to_transfer_ring(ep->xhci, td->urb);
1433 __func__, td->urb);
1437 __func__, td->urb, td->cancel_status);
2117 if (td->urb->dev->tt && !usb_pipeint(td->urb->pipe) &&
2118 (td->urb->dev->tt->hub != xhci_to_hcd(xhci)->self.root_hub) &&
2121 td->urb->ep->hcpriv = td->urb->dev;
2122 if (usb_hub_clear_tt_buffer(td->urb))
2264 * Process control tds, update urb status and actual_length.
2278 requested = td->urb->transfer_buffer_length;
2296 td->urb->actual_length = remaining;
2303 td->urb->actual_length = 0;
2307 td->urb->actual_length = requested - remaining;
2310 td->urb->actual_length = requested;
2329 td->urb->actual_length = requested - remaining;
2331 td->urb->actual_length = 0;
2346 td->urb->actual_length = requested - remaining;
2353 td->urb->actual_length = requested;
2360 * Process isochronous tds, update urb packet status and actual_length.
2375 urb_priv = td->urb->hcpriv;
2377 frame = &td->urb->iso_frame_desc[idx];
2381 short_framestatus = td->urb->transfer_flags & URB_SHORT_NOT_OK ?
2449 td->urb->actual_length += frame->actual_length;
2469 urb_priv = td->urb->hcpriv;
2471 frame = &td->urb->iso_frame_desc[idx];
2488 * Process bulk and interrupt tds, update urb status and actual_length.
2502 requested = td->urb->transfer_buffer_length;
2511 td->urb->ep->desc.bEndpointAddress,
2518 td->urb->ep->desc.bEndpointAddress,
2523 td->urb->actual_length = remaining;
2546 td->urb->actual_length = requested - remaining;
2548 td->urb->actual_length =
2555 td->urb->actual_length = 0;
2834 if (ep->skip && usb_endpoint_xfer_isoc(&td->urb->ep->desc)) {
2923 /* update the urb's actual_length and give back to the core */
2924 if (usb_endpoint_xfer_control(&td->urb->ep->desc))
2926 else if (usb_endpoint_xfer_isoc(&td->urb->ep->desc))
3208 xhci_warn(xhci, "WARN urb submitted to disabled ep\n");
3289 struct urb *urb,
3312 urb_priv = urb->hcpriv;
3319 ret = usb_hcd_link_urb_to_ep(bus_to_hcd(urb->dev->bus), urb);
3324 td->urb = urb;
3345 static inline unsigned int count_trbs_needed(struct urb *urb)
3347 return count_trbs(urb->transfer_dma, urb->transfer_buffer_length);
3350 static unsigned int count_sg_trbs_needed(struct urb *urb)
3355 full_len = urb->transfer_buffer_length;
3357 for_each_sg(urb->sg, sg, urb->num_mapped_sgs, i) {
3369 static unsigned int count_isoc_trbs_needed(struct urb *urb, int i)
3373 addr = (u64) (urb->transfer_dma + urb->iso_frame_desc[i].offset);
3374 len = urb->iso_frame_desc[i].length;
3379 static void check_trb_math(struct urb *urb, int running_total)
3381 if (unlikely(running_total != urb->transfer_buffer_length))
3382 dev_err(&urb->dev->dev, "%s - ep %#x - Miscalculated tx length, "
3385 urb->ep->desc.bEndpointAddress,
3387 urb->transfer_buffer_length,
3388 urb->transfer_buffer_length);
3407 static void check_interval(struct xhci_hcd *xhci, struct urb *urb,
3414 ep_interval = urb->interval;
3417 if (urb->dev->speed == USB_SPEED_LOW ||
3418 urb->dev->speed == USB_SPEED_FULL)
3425 dev_dbg_ratelimited(&urb->dev->dev,
3429 urb->interval = xhci_interval;
3431 if (urb->dev->speed == USB_SPEED_LOW ||
3432 urb->dev->speed == USB_SPEED_FULL)
3433 urb->interval /= 8;
3444 struct urb *urb, int slot_id, unsigned int ep_index)
3449 check_interval(xhci, urb, ep_ctx);
3451 return xhci_queue_bulk_tx(xhci, mem_flags, urb, slot_id, ep_index);
3476 struct urb *urb, bool more_trbs_coming)
3493 maxp = usb_endpoint_maxp(&urb->ep->desc);
3501 static int xhci_align_td(struct xhci_hcd *xhci, struct urb *urb, u32 enqd_len,
3510 max_pkt = usb_endpoint_maxp(&urb->ep->desc);
3534 if (new_buff_len > (urb->transfer_buffer_length - enqd_len))
3535 new_buff_len = (urb->transfer_buffer_length - enqd_len);
3538 if (usb_urb_dir_out(urb)) {
3539 if (urb->num_sgs) {
3540 len = sg_pcopy_to_buffer(urb->sg, urb->num_sgs,
3546 memcpy(seg->bounce_buf, urb->transfer_buffer + enqd_len, new_buff_len);
3572 struct urb *urb, int slot_id, unsigned int ep_index)
3589 ring = xhci_urb_to_transfer_ring(xhci, urb);
3593 full_len = urb->transfer_buffer_length;
3595 if (urb->num_sgs && !(urb->transfer_flags & URB_DMA_MAP_SINGLE)) {
3596 num_sgs = urb->num_mapped_sgs;
3597 sg = urb->sg;
3600 num_trbs = count_sg_trbs_needed(urb);
3602 num_trbs = count_trbs_needed(urb);
3603 addr = (u64) urb->transfer_dma;
3607 ep_index, urb->stream_id,
3608 num_trbs, urb, 0, mem_flags);
3612 urb_priv = urb->hcpriv;
3615 if (urb->transfer_flags & URB_ZERO_PACKET && urb_priv->num_tds > 1)
3655 if (xhci_align_td(xhci, urb, enqd_len,
3670 if (xhci_urb_suitable_for_idt(urb)) {
3671 memcpy(&send_addr, urb->transfer_buffer,
3679 if (usb_urb_dir_in(urb))
3684 full_len, urb, more_trbs_coming);
3716 ep_index, urb->stream_id,
3717 1, urb, 1, mem_flags);
3725 check_trb_math(urb, enqd_len);
3726 giveback_first_trb(xhci, slot_id, ep_index, urb->stream_id,
3733 struct urb *urb, int slot_id, unsigned int ep_index)
3745 ep_ring = xhci_urb_to_transfer_ring(xhci, urb);
3753 if (!urb->setup_packet)
3763 if (urb->transfer_buffer_length > 0)
3766 ep_index, urb->stream_id,
3767 num_trbs, urb, 0, mem_flags);
3771 urb_priv = urb->hcpriv;
3785 setup = (struct usb_ctrlrequest *) urb->setup_packet;
3793 if (urb->transfer_buffer_length > 0) {
3810 if (usb_urb_dir_in(urb))
3815 if (urb->transfer_buffer_length > 0) {
3819 if (xhci_urb_suitable_for_idt(urb)) {
3820 memcpy(&addr, urb->transfer_buffer,
3821 urb->transfer_buffer_length);
3825 addr = (u64) urb->transfer_dma;
3829 urb->transfer_buffer_length,
3830 urb->transfer_buffer_length,
3831 urb, 1);
3832 length_field = TRB_LEN(urb->transfer_buffer_length) |
3850 if (urb->transfer_buffer_length > 0 && setup->bRequestType & USB_DIR_IN)
3875 struct urb *urb, unsigned int total_packet_count)
3879 if (xhci->hci_version < 0x100 || urb->dev->speed < USB_SPEED_SUPER)
3882 max_burst = urb->ep->ss_ep_comp.bMaxBurst;
3895 struct urb *urb, unsigned int total_packet_count)
3903 if (urb->dev->speed >= USB_SPEED_SUPER) {
3905 max_burst = urb->ep->ss_ep_comp.bMaxBurst;
3927 struct urb *urb, int index)
3932 if (urb->dev->speed == USB_SPEED_LOW ||
3933 urb->dev->speed == USB_SPEED_FULL)
3934 start_frame = urb->start_frame + index * urb->interval;
3936 start_frame = (urb->start_frame + index * urb->interval) >> 3;
3990 if (urb->dev->speed == USB_SPEED_LOW ||
3991 urb->dev->speed == USB_SPEED_FULL)
3992 urb->start_frame = start_frame;
3994 urb->start_frame = start_frame << 3;
4031 struct urb *urb, int slot_id, unsigned int ep_index)
4053 num_tds = urb->number_of_packets;
4058 start_addr = (u64) urb->transfer_dma;
4062 urb_priv = urb->hcpriv;
4071 addr = start_addr + urb->iso_frame_desc[i].offset;
4072 td_len = urb->iso_frame_desc[i].length;
4074 max_pkt = usb_endpoint_maxp(&urb->ep->desc);
4080 burst_count = xhci_get_burst_count(xhci, urb, total_pkt_count);
4082 urb, total_pkt_count);
4084 trbs_per_td = count_isoc_trbs_needed(urb, i);
4087 urb->stream_id, trbs_per_td, urb, i, mem_flags);
4097 if (!(urb->transfer_flags & URB_ISO_ASAP) &&
4099 frame_id = xhci_get_isoc_frame_id(xhci, urb, i);
4127 if (usb_urb_dir_in(urb))
4150 urb, more_trbs_coming);
4183 xep->next_frame_id = urb->start_frame + num_tds * urb->interval;
4191 giveback_first_trb(xhci, slot_id, ep_index, urb->stream_id,
4213 usb_hcd_unlink_urb_from_ep(bus_to_hcd(urb->dev->bus), urb);
4218 * Check transfer ring to guarantee there is enough room for the urb.
4221 * update urb->start_frame if URB_ISO_ASAP is set in transfer_flags or
4225 struct urb *urb, int slot_id, unsigned int ep_index)
4242 num_tds = urb->number_of_packets;
4244 num_trbs += count_isoc_trbs_needed(urb, i);
4246 /* Check the ring to guarantee there is enough room for the whole urb.
4247 * Do not insert any td of the urb to the ring if the check failed.
4258 check_interval(xhci, urb, ep_ctx);
4260 /* Calculate the start frame and put it in urb->start_frame. */
4263 urb->start_frame = xep->next_frame_id;
4284 if (urb->dev->speed == USB_SPEED_LOW ||
4285 urb->dev->speed == USB_SPEED_FULL) {
4286 start_frame = roundup(start_frame, urb->interval << 3);
4287 urb->start_frame = start_frame >> 3;
4289 start_frame = roundup(start_frame, urb->interval);
4290 urb->start_frame = start_frame;
4295 return xhci_queue_isoc_tx(xhci, mem_flags, urb, slot_id, ep_index);