Lines Matching refs:xhci
39 static struct xhci_segment *xhci_segment_alloc(struct xhci_hcd *xhci, in xhci_segment_alloc() argument
50 seg->trbs = dma_pool_alloc(xhci->segment_pool, flags, &dma); in xhci_segment_alloc()
68 static void xhci_segment_free(struct xhci_hcd *xhci, struct xhci_segment *seg) in xhci_segment_free() argument
71 dma_pool_free(xhci->segment_pool, seg->trbs, seg->dma); in xhci_segment_free()
77 static void xhci_free_segments_for_ring(struct xhci_hcd *xhci, in xhci_free_segments_for_ring() argument
85 xhci_segment_free(xhci, seg); in xhci_free_segments_for_ring()
88 xhci_segment_free(xhci, first); in xhci_free_segments_for_ring()
98 static void xhci_link_segments(struct xhci_hcd *xhci, struct xhci_segment *prev, in xhci_link_segments() argument
116 if (xhci_link_trb_quirk(xhci) || in xhci_link_segments()
118 (xhci->quirks & XHCI_AMD_0x96_HOST))) in xhci_link_segments()
128 static void xhci_link_rings(struct xhci_hcd *xhci, struct xhci_ring *ring, in xhci_link_rings() argument
138 xhci_link_segments(xhci, ring->enq_seg, first, ring->type); in xhci_link_rings()
139 xhci_link_segments(xhci, last, next, ring->type); in xhci_link_rings()
276 void xhci_ring_free(struct xhci_hcd *xhci, struct xhci_ring *ring) in xhci_ring_free() argument
284 xhci_free_segments_for_ring(xhci, ring->first_seg); in xhci_ring_free()
318 static int xhci_alloc_segments_for_ring(struct xhci_hcd *xhci, in xhci_alloc_segments_for_ring() argument
325 prev = xhci_segment_alloc(xhci, cycle_state, flags); in xhci_alloc_segments_for_ring()
334 next = xhci_segment_alloc(xhci, cycle_state, flags); in xhci_alloc_segments_for_ring()
339 xhci_segment_free(xhci, prev); in xhci_alloc_segments_for_ring()
344 xhci_link_segments(xhci, prev, next, type); in xhci_alloc_segments_for_ring()
349 xhci_link_segments(xhci, prev, *first, type); in xhci_alloc_segments_for_ring()
362 static struct xhci_ring *xhci_ring_alloc(struct xhci_hcd *xhci, in xhci_ring_alloc() argument
379 ret = xhci_alloc_segments_for_ring(xhci, &ring->first_seg, in xhci_ring_alloc()
398 void xhci_free_or_cache_endpoint_ring(struct xhci_hcd *xhci, in xhci_free_or_cache_endpoint_ring() argument
409 xhci_dbg(xhci, "Cached old ring, " in xhci_free_or_cache_endpoint_ring()
414 xhci_ring_free(xhci, virt_dev->eps[ep_index].ring); in xhci_free_or_cache_endpoint_ring()
415 xhci_dbg(xhci, "Ring cache full (%d rings), " in xhci_free_or_cache_endpoint_ring()
425 static void xhci_reinit_cached_ring(struct xhci_hcd *xhci, in xhci_reinit_cached_ring() argument
441 xhci_link_segments(xhci, seg, seg->next, type); in xhci_reinit_cached_ring()
457 int xhci_ring_expansion(struct xhci_hcd *xhci, struct xhci_ring *ring, in xhci_ring_expansion() argument
473 ret = xhci_alloc_segments_for_ring(xhci, &first, &last, in xhci_ring_expansion()
485 xhci_segment_free(xhci, first); in xhci_ring_expansion()
493 xhci_link_rings(xhci, ring, first, last, num_segs); in xhci_ring_expansion()
494 xhci_dbg_trace(xhci, trace_xhci_dbg_ring_expansion, in xhci_ring_expansion()
503 static struct xhci_container_ctx *xhci_alloc_container_ctx(struct xhci_hcd *xhci, in xhci_alloc_container_ctx() argument
516 ctx->size = HCC_64BYTE_CONTEXT(xhci->hcc_params) ? 2048 : 1024; in xhci_alloc_container_ctx()
518 ctx->size += CTX_SIZE(xhci->hcc_params); in xhci_alloc_container_ctx()
520 ctx->bytes = dma_pool_alloc(xhci->device_pool, flags, &ctx->dma); in xhci_alloc_container_ctx()
529 static void xhci_free_container_ctx(struct xhci_hcd *xhci, in xhci_free_container_ctx() argument
534 dma_pool_free(xhci->device_pool, ctx->bytes, ctx->dma); in xhci_free_container_ctx()
547 struct xhci_slot_ctx *xhci_get_slot_ctx(struct xhci_hcd *xhci, in xhci_get_slot_ctx() argument
554 (ctx->bytes + CTX_SIZE(xhci->hcc_params)); in xhci_get_slot_ctx()
557 struct xhci_ep_ctx *xhci_get_ep_ctx(struct xhci_hcd *xhci, in xhci_get_ep_ctx() argument
567 (ctx->bytes + (ep_index * CTX_SIZE(xhci->hcc_params))); in xhci_get_ep_ctx()
573 static void xhci_free_stream_ctx(struct xhci_hcd *xhci, in xhci_free_stream_ctx() argument
577 struct device *dev = xhci_to_hcd(xhci)->self.controller; in xhci_free_stream_ctx()
584 return dma_pool_free(xhci->small_streams_pool, in xhci_free_stream_ctx()
587 return dma_pool_free(xhci->medium_streams_pool, in xhci_free_stream_ctx()
601 static struct xhci_stream_ctx *xhci_alloc_stream_ctx(struct xhci_hcd *xhci, in xhci_alloc_stream_ctx() argument
605 struct device *dev = xhci_to_hcd(xhci)->self.controller; in xhci_alloc_stream_ctx()
612 return dma_pool_alloc(xhci->small_streams_pool, in xhci_alloc_stream_ctx()
615 return dma_pool_alloc(xhci->medium_streams_pool, in xhci_alloc_stream_ctx()
655 struct xhci_stream_info *xhci_alloc_stream_info(struct xhci_hcd *xhci, in xhci_alloc_stream_info() argument
665 xhci_dbg(xhci, "Allocating %u streams and %u " in xhci_alloc_stream_info()
668 if (xhci->cmd_ring_reserved_trbs == MAX_RSVD_CMD_TRBS) { in xhci_alloc_stream_info()
669 xhci_dbg(xhci, "Command ring has no reserved TRBs available\n"); in xhci_alloc_stream_info()
672 xhci->cmd_ring_reserved_trbs++; in xhci_alloc_stream_info()
689 stream_info->stream_ctx_array = xhci_alloc_stream_ctx(xhci, in xhci_alloc_stream_info()
699 xhci_alloc_command(xhci, true, true, mem_flags); in xhci_alloc_stream_info()
711 xhci_ring_alloc(xhci, 2, 1, TYPE_STREAM, mem_flags); in xhci_alloc_stream_info()
723 xhci_dbg(xhci, "Setting stream %d ring ptr to 0x%08llx\n", in xhci_alloc_stream_info()
728 xhci_ring_free(xhci, cur_ring); in xhci_alloc_stream_info()
746 xhci_ring_free(xhci, cur_ring); in xhci_alloc_stream_info()
750 xhci_free_command(xhci, stream_info->free_streams_command); in xhci_alloc_stream_info()
756 xhci->cmd_ring_reserved_trbs--; in xhci_alloc_stream_info()
763 void xhci_setup_streams_ep_input_ctx(struct xhci_hcd *xhci, in xhci_setup_streams_ep_input_ctx() argument
773 xhci_dbg_trace(xhci, trace_xhci_dbg_context_change, in xhci_setup_streams_ep_input_ctx()
800 void xhci_free_stream_info(struct xhci_hcd *xhci, in xhci_free_stream_info() argument
813 xhci_ring_free(xhci, cur_ring); in xhci_free_stream_info()
817 xhci_free_command(xhci, stream_info->free_streams_command); in xhci_free_stream_info()
818 xhci->cmd_ring_reserved_trbs--; in xhci_free_stream_info()
820 xhci_free_stream_ctx(xhci, in xhci_free_stream_info()
832 static void xhci_init_endpoint_timer(struct xhci_hcd *xhci, in xhci_init_endpoint_timer() argument
837 ep->xhci = xhci; in xhci_init_endpoint_timer()
840 static void xhci_free_tt_info(struct xhci_hcd *xhci, in xhci_free_tt_info() argument
852 virt_dev->real_port > HCS_MAX_PORTS(xhci->hcs_params1)) { in xhci_free_tt_info()
853 xhci_dbg(xhci, "Bad real port.\n"); in xhci_free_tt_info()
857 tt_list_head = &(xhci->rh_bw[virt_dev->real_port - 1].tts); in xhci_free_tt_info()
870 int xhci_alloc_tt_info(struct xhci_hcd *xhci, in xhci_alloc_tt_info() argument
892 &xhci->rh_bw[virt_dev->real_port - 1].tts); in xhci_alloc_tt_info()
903 xhci_free_tt_info(xhci, virt_dev, virt_dev->udev->slot_id); in xhci_alloc_tt_info()
913 void xhci_free_virt_device(struct xhci_hcd *xhci, int slot_id) in xhci_free_virt_device() argument
920 if (slot_id == 0 || !xhci->devs[slot_id]) in xhci_free_virt_device()
923 dev = xhci->devs[slot_id]; in xhci_free_virt_device()
924 xhci->dcbaa->dev_context_ptrs[slot_id] = 0; in xhci_free_virt_device()
933 xhci_ring_free(xhci, dev->eps[i].ring); in xhci_free_virt_device()
935 xhci_free_stream_info(xhci, in xhci_free_virt_device()
943 xhci_warn(xhci, "Slot %u endpoint %u " in xhci_free_virt_device()
948 xhci_free_tt_info(xhci, dev, slot_id); in xhci_free_virt_device()
950 xhci_update_tt_active_eps(xhci, dev, old_active_eps); in xhci_free_virt_device()
954 xhci_ring_free(xhci, dev->ring_cache[i]); in xhci_free_virt_device()
959 xhci_free_container_ctx(xhci, dev->in_ctx); in xhci_free_virt_device()
961 xhci_free_container_ctx(xhci, dev->out_ctx); in xhci_free_virt_device()
963 kfree(xhci->devs[slot_id]); in xhci_free_virt_device()
964 xhci->devs[slot_id] = NULL; in xhci_free_virt_device()
967 int xhci_alloc_virt_device(struct xhci_hcd *xhci, int slot_id, in xhci_alloc_virt_device() argument
974 if (slot_id == 0 || xhci->devs[slot_id]) { in xhci_alloc_virt_device()
975 xhci_warn(xhci, "Bad Slot ID %d\n", slot_id); in xhci_alloc_virt_device()
979 xhci->devs[slot_id] = kzalloc(sizeof(*xhci->devs[slot_id]), flags); in xhci_alloc_virt_device()
980 if (!xhci->devs[slot_id]) in xhci_alloc_virt_device()
982 dev = xhci->devs[slot_id]; in xhci_alloc_virt_device()
985 dev->out_ctx = xhci_alloc_container_ctx(xhci, XHCI_CTX_TYPE_DEVICE, flags); in xhci_alloc_virt_device()
989 xhci_dbg(xhci, "Slot %d output ctx = 0x%llx (dma)\n", slot_id, in xhci_alloc_virt_device()
993 dev->in_ctx = xhci_alloc_container_ctx(xhci, XHCI_CTX_TYPE_INPUT, flags); in xhci_alloc_virt_device()
997 xhci_dbg(xhci, "Slot %d input ctx = 0x%llx (dma)\n", slot_id, in xhci_alloc_virt_device()
1002 xhci_init_endpoint_timer(xhci, &dev->eps[i]); in xhci_alloc_virt_device()
1008 dev->eps[0].ring = xhci_ring_alloc(xhci, 2, 1, TYPE_CTRL, flags); in xhci_alloc_virt_device()
1024 xhci->dcbaa->dev_context_ptrs[slot_id] = cpu_to_le64(dev->out_ctx->dma); in xhci_alloc_virt_device()
1025 xhci_dbg(xhci, "Set slot id %d dcbaa entry %p to 0x%llx\n", in xhci_alloc_virt_device()
1027 &xhci->dcbaa->dev_context_ptrs[slot_id], in xhci_alloc_virt_device()
1028 le64_to_cpu(xhci->dcbaa->dev_context_ptrs[slot_id])); in xhci_alloc_virt_device()
1032 xhci_free_virt_device(xhci, slot_id); in xhci_alloc_virt_device()
1036 void xhci_copy_ep0_dequeue_into_input_ctx(struct xhci_hcd *xhci, in xhci_copy_ep0_dequeue_into_input_ctx() argument
1043 virt_dev = xhci->devs[udev->slot_id]; in xhci_copy_ep0_dequeue_into_input_ctx()
1044 ep0_ctx = xhci_get_ep_ctx(xhci, virt_dev->in_ctx, 0); in xhci_copy_ep0_dequeue_into_input_ctx()
1069 static u32 xhci_find_real_port_number(struct xhci_hcd *xhci, in xhci_find_real_port_number() argument
1076 hcd = xhci->shared_hcd; in xhci_find_real_port_number()
1078 hcd = xhci->main_hcd; in xhci_find_real_port_number()
1088 int xhci_setup_addressable_virt_dev(struct xhci_hcd *xhci, struct usb_device *udev) in xhci_setup_addressable_virt_dev() argument
1097 dev = xhci->devs[udev->slot_id]; in xhci_setup_addressable_virt_dev()
1100 xhci_warn(xhci, "Slot ID %d is not assigned to this device\n", in xhci_setup_addressable_virt_dev()
1104 ep0_ctx = xhci_get_ep_ctx(xhci, dev->in_ctx, 0); in xhci_setup_addressable_virt_dev()
1105 slot_ctx = xhci_get_slot_ctx(xhci, dev->in_ctx); in xhci_setup_addressable_virt_dev()
1128 xhci_dbg(xhci, "FIXME xHCI doesn't support wireless speeds\n"); in xhci_setup_addressable_virt_dev()
1136 port_num = xhci_find_real_port_number(xhci, udev); in xhci_setup_addressable_virt_dev()
1146 xhci_dbg(xhci, "Set root hub portnum to %d\n", port_num); in xhci_setup_addressable_virt_dev()
1147 xhci_dbg(xhci, "Set fake root hub portnum to %d\n", dev->fake_port); in xhci_setup_addressable_virt_dev()
1156 dev->bw_table = &xhci->rh_bw[port_num - 1].bw_table; in xhci_setup_addressable_virt_dev()
1161 rh_bw = &xhci->rh_bw[port_num - 1]; in xhci_setup_addressable_virt_dev()
1176 xhci_warn(xhci, "WARN: Didn't find a matching TT\n"); in xhci_setup_addressable_virt_dev()
1186 xhci_dbg(xhci, "udev->tt = %p\n", udev->tt); in xhci_setup_addressable_virt_dev()
1187 xhci_dbg(xhci, "udev->ttport = 0x%x\n", udev->ttport); in xhci_setup_addressable_virt_dev()
1399 int xhci_endpoint_init(struct xhci_hcd *xhci, in xhci_endpoint_init() argument
1415 ep_ctx = xhci_get_ep_ctx(xhci, virt_dev->in_ctx, ep_index); in xhci_endpoint_init()
1425 xhci_ring_alloc(xhci, 2, 1, type, mem_flags); in xhci_endpoint_init()
1434 xhci_reinit_cached_ring(xhci, virt_dev->eps[ep_index].new_ring, in xhci_endpoint_init()
1504 if (usb_endpoint_xfer_control(&ep->desc) && xhci->hci_version >= 0x100) in xhci_endpoint_init()
1514 void xhci_endpoint_zero(struct xhci_hcd *xhci, in xhci_endpoint_zero() argument
1522 ep_ctx = xhci_get_ep_ctx(xhci, virt_dev->in_ctx, ep_index); in xhci_endpoint_zero()
1543 void xhci_update_bw_info(struct xhci_hcd *xhci, in xhci_update_bw_info() argument
1568 ep_ctx = xhci_get_ep_ctx(xhci, in_ctx, i); in xhci_update_bw_info()
1601 void xhci_endpoint_copy(struct xhci_hcd *xhci, in xhci_endpoint_copy() argument
1609 out_ep_ctx = xhci_get_ep_ctx(xhci, out_ctx, ep_index); in xhci_endpoint_copy()
1610 in_ep_ctx = xhci_get_ep_ctx(xhci, in_ctx, ep_index); in xhci_endpoint_copy()
1623 void xhci_slot_copy(struct xhci_hcd *xhci, in xhci_slot_copy() argument
1630 in_slot_ctx = xhci_get_slot_ctx(xhci, in_ctx); in xhci_slot_copy()
1631 out_slot_ctx = xhci_get_slot_ctx(xhci, out_ctx); in xhci_slot_copy()
1640 static int scratchpad_alloc(struct xhci_hcd *xhci, gfp_t flags) in scratchpad_alloc() argument
1643 struct device *dev = xhci_to_hcd(xhci)->self.controller; in scratchpad_alloc()
1644 int num_sp = HCS_MAX_SCRATCHPAD(xhci->hcs_params2); in scratchpad_alloc()
1646 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in scratchpad_alloc()
1652 xhci->scratchpad = kzalloc(sizeof(*xhci->scratchpad), flags); in scratchpad_alloc()
1653 if (!xhci->scratchpad) in scratchpad_alloc()
1656 xhci->scratchpad->sp_array = dma_alloc_coherent(dev, in scratchpad_alloc()
1658 &xhci->scratchpad->sp_dma, flags); in scratchpad_alloc()
1659 if (!xhci->scratchpad->sp_array) in scratchpad_alloc()
1662 xhci->scratchpad->sp_buffers = kzalloc(sizeof(void *) * num_sp, flags); in scratchpad_alloc()
1663 if (!xhci->scratchpad->sp_buffers) in scratchpad_alloc()
1666 xhci->scratchpad->sp_dma_buffers = in scratchpad_alloc()
1669 if (!xhci->scratchpad->sp_dma_buffers) in scratchpad_alloc()
1672 xhci->dcbaa->dev_context_ptrs[0] = cpu_to_le64(xhci->scratchpad->sp_dma); in scratchpad_alloc()
1675 void *buf = dma_alloc_coherent(dev, xhci->page_size, &dma, in scratchpad_alloc()
1680 xhci->scratchpad->sp_array[i] = dma; in scratchpad_alloc()
1681 xhci->scratchpad->sp_buffers[i] = buf; in scratchpad_alloc()
1682 xhci->scratchpad->sp_dma_buffers[i] = dma; in scratchpad_alloc()
1689 dma_free_coherent(dev, xhci->page_size, in scratchpad_alloc()
1690 xhci->scratchpad->sp_buffers[i], in scratchpad_alloc()
1691 xhci->scratchpad->sp_dma_buffers[i]); in scratchpad_alloc()
1693 kfree(xhci->scratchpad->sp_dma_buffers); in scratchpad_alloc()
1696 kfree(xhci->scratchpad->sp_buffers); in scratchpad_alloc()
1700 xhci->scratchpad->sp_array, in scratchpad_alloc()
1701 xhci->scratchpad->sp_dma); in scratchpad_alloc()
1704 kfree(xhci->scratchpad); in scratchpad_alloc()
1705 xhci->scratchpad = NULL; in scratchpad_alloc()
1711 static void scratchpad_free(struct xhci_hcd *xhci) in scratchpad_free() argument
1715 struct device *dev = xhci_to_hcd(xhci)->self.controller; in scratchpad_free()
1717 if (!xhci->scratchpad) in scratchpad_free()
1720 num_sp = HCS_MAX_SCRATCHPAD(xhci->hcs_params2); in scratchpad_free()
1723 dma_free_coherent(dev, xhci->page_size, in scratchpad_free()
1724 xhci->scratchpad->sp_buffers[i], in scratchpad_free()
1725 xhci->scratchpad->sp_dma_buffers[i]); in scratchpad_free()
1727 kfree(xhci->scratchpad->sp_dma_buffers); in scratchpad_free()
1728 kfree(xhci->scratchpad->sp_buffers); in scratchpad_free()
1730 xhci->scratchpad->sp_array, in scratchpad_free()
1731 xhci->scratchpad->sp_dma); in scratchpad_free()
1732 kfree(xhci->scratchpad); in scratchpad_free()
1733 xhci->scratchpad = NULL; in scratchpad_free()
1736 struct xhci_command *xhci_alloc_command(struct xhci_hcd *xhci, in xhci_alloc_command() argument
1748 xhci_alloc_container_ctx(xhci, XHCI_CTX_TYPE_INPUT, in xhci_alloc_command()
1760 xhci_free_container_ctx(xhci, command->in_ctx); in xhci_alloc_command()
1780 void xhci_free_command(struct xhci_hcd *xhci, in xhci_free_command() argument
1783 xhci_free_container_ctx(xhci, in xhci_free_command()
1789 void xhci_mem_cleanup(struct xhci_hcd *xhci) in xhci_mem_cleanup() argument
1791 struct device *dev = xhci_to_hcd(xhci)->self.controller; in xhci_mem_cleanup()
1795 del_timer_sync(&xhci->cmd_timer); in xhci_mem_cleanup()
1798 size = sizeof(struct xhci_erst_entry)*(xhci->erst.num_entries); in xhci_mem_cleanup()
1799 if (xhci->erst.entries) in xhci_mem_cleanup()
1801 xhci->erst.entries, xhci->erst.erst_dma_addr); in xhci_mem_cleanup()
1802 xhci->erst.entries = NULL; in xhci_mem_cleanup()
1803 xhci_dbg_trace(xhci, trace_xhci_dbg_init, "Freed ERST"); in xhci_mem_cleanup()
1804 if (xhci->event_ring) in xhci_mem_cleanup()
1805 xhci_ring_free(xhci, xhci->event_ring); in xhci_mem_cleanup()
1806 xhci->event_ring = NULL; in xhci_mem_cleanup()
1807 xhci_dbg_trace(xhci, trace_xhci_dbg_init, "Freed event ring"); in xhci_mem_cleanup()
1809 if (xhci->lpm_command) in xhci_mem_cleanup()
1810 xhci_free_command(xhci, xhci->lpm_command); in xhci_mem_cleanup()
1811 xhci->lpm_command = NULL; in xhci_mem_cleanup()
1812 if (xhci->cmd_ring) in xhci_mem_cleanup()
1813 xhci_ring_free(xhci, xhci->cmd_ring); in xhci_mem_cleanup()
1814 xhci->cmd_ring = NULL; in xhci_mem_cleanup()
1815 xhci_dbg_trace(xhci, trace_xhci_dbg_init, "Freed command ring"); in xhci_mem_cleanup()
1816 xhci_cleanup_command_queue(xhci); in xhci_mem_cleanup()
1818 num_ports = HCS_MAX_PORTS(xhci->hcs_params1); in xhci_mem_cleanup()
1819 for (i = 0; i < num_ports && xhci->rh_bw; i++) { in xhci_mem_cleanup()
1820 struct xhci_interval_bw_table *bwt = &xhci->rh_bw[i].bw_table; in xhci_mem_cleanup()
1829 xhci_free_virt_device(xhci, i); in xhci_mem_cleanup()
1831 if (xhci->segment_pool) in xhci_mem_cleanup()
1832 dma_pool_destroy(xhci->segment_pool); in xhci_mem_cleanup()
1833 xhci->segment_pool = NULL; in xhci_mem_cleanup()
1834 xhci_dbg_trace(xhci, trace_xhci_dbg_init, "Freed segment pool"); in xhci_mem_cleanup()
1836 if (xhci->device_pool) in xhci_mem_cleanup()
1837 dma_pool_destroy(xhci->device_pool); in xhci_mem_cleanup()
1838 xhci->device_pool = NULL; in xhci_mem_cleanup()
1839 xhci_dbg_trace(xhci, trace_xhci_dbg_init, "Freed device context pool"); in xhci_mem_cleanup()
1841 if (xhci->small_streams_pool) in xhci_mem_cleanup()
1842 dma_pool_destroy(xhci->small_streams_pool); in xhci_mem_cleanup()
1843 xhci->small_streams_pool = NULL; in xhci_mem_cleanup()
1844 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_cleanup()
1847 if (xhci->medium_streams_pool) in xhci_mem_cleanup()
1848 dma_pool_destroy(xhci->medium_streams_pool); in xhci_mem_cleanup()
1849 xhci->medium_streams_pool = NULL; in xhci_mem_cleanup()
1850 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_cleanup()
1853 if (xhci->dcbaa) in xhci_mem_cleanup()
1854 dma_free_coherent(dev, sizeof(*xhci->dcbaa), in xhci_mem_cleanup()
1855 xhci->dcbaa, xhci->dcbaa->dma); in xhci_mem_cleanup()
1856 xhci->dcbaa = NULL; in xhci_mem_cleanup()
1858 scratchpad_free(xhci); in xhci_mem_cleanup()
1860 if (!xhci->rh_bw) in xhci_mem_cleanup()
1865 list_for_each_entry_safe(tt, n, &xhci->rh_bw[i].tts, tt_list) { in xhci_mem_cleanup()
1872 xhci->cmd_ring_reserved_trbs = 0; in xhci_mem_cleanup()
1873 xhci->num_usb2_ports = 0; in xhci_mem_cleanup()
1874 xhci->num_usb3_ports = 0; in xhci_mem_cleanup()
1875 xhci->num_active_eps = 0; in xhci_mem_cleanup()
1876 kfree(xhci->usb2_ports); in xhci_mem_cleanup()
1877 kfree(xhci->usb3_ports); in xhci_mem_cleanup()
1878 kfree(xhci->port_array); in xhci_mem_cleanup()
1879 kfree(xhci->rh_bw); in xhci_mem_cleanup()
1880 kfree(xhci->ext_caps); in xhci_mem_cleanup()
1882 xhci->usb2_ports = NULL; in xhci_mem_cleanup()
1883 xhci->usb3_ports = NULL; in xhci_mem_cleanup()
1884 xhci->port_array = NULL; in xhci_mem_cleanup()
1885 xhci->rh_bw = NULL; in xhci_mem_cleanup()
1886 xhci->ext_caps = NULL; in xhci_mem_cleanup()
1888 xhci->page_size = 0; in xhci_mem_cleanup()
1889 xhci->page_shift = 0; in xhci_mem_cleanup()
1890 xhci->bus_state[0].bus_suspended = 0; in xhci_mem_cleanup()
1891 xhci->bus_state[1].bus_suspended = 0; in xhci_mem_cleanup()
1894 static int xhci_test_trb_in_td(struct xhci_hcd *xhci, in xhci_test_trb_in_td() argument
1909 seg = trb_in_td(xhci, input_seg, start_trb, end_trb, input_dma, false); in xhci_test_trb_in_td()
1911 xhci_warn(xhci, "WARN: %s TRB math test %d failed!\n", in xhci_test_trb_in_td()
1913 xhci_warn(xhci, "Tested TRB math w/ seg %p and " in xhci_test_trb_in_td()
1917 xhci_warn(xhci, "starting TRB %p (0x%llx DMA), " in xhci_test_trb_in_td()
1921 xhci_warn(xhci, "Expected seg %p, got seg %p\n", in xhci_test_trb_in_td()
1923 trb_in_td(xhci, input_seg, start_trb, end_trb, input_dma, in xhci_test_trb_in_td()
1931 static int xhci_check_trb_in_td_math(struct xhci_hcd *xhci) in xhci_check_trb_in_td_math() argument
1940 { xhci->event_ring->first_seg->dma - 16, NULL }, in xhci_check_trb_in_td_math()
1942 { xhci->event_ring->first_seg->dma - 1, NULL }, in xhci_check_trb_in_td_math()
1944 { xhci->event_ring->first_seg->dma, xhci->event_ring->first_seg }, in xhci_check_trb_in_td_math()
1946 { xhci->event_ring->first_seg->dma + (TRBS_PER_SEGMENT - 1)*16, in xhci_check_trb_in_td_math()
1947 xhci->event_ring->first_seg }, in xhci_check_trb_in_td_math()
1949 { xhci->event_ring->first_seg->dma + (TRBS_PER_SEGMENT - 1)*16 + 1, NULL }, in xhci_check_trb_in_td_math()
1951 { xhci->event_ring->first_seg->dma + (TRBS_PER_SEGMENT)*16, NULL }, in xhci_check_trb_in_td_math()
1963 { .input_seg = xhci->event_ring->first_seg, in xhci_check_trb_in_td_math()
1964 .start_trb = xhci->event_ring->first_seg->trbs, in xhci_check_trb_in_td_math()
1965 .end_trb = &xhci->event_ring->first_seg->trbs[TRBS_PER_SEGMENT - 1], in xhci_check_trb_in_td_math()
1966 .input_dma = xhci->cmd_ring->first_seg->dma, in xhci_check_trb_in_td_math()
1970 { .input_seg = xhci->event_ring->first_seg, in xhci_check_trb_in_td_math()
1971 .start_trb = xhci->event_ring->first_seg->trbs, in xhci_check_trb_in_td_math()
1972 .end_trb = &xhci->cmd_ring->first_seg->trbs[TRBS_PER_SEGMENT - 1], in xhci_check_trb_in_td_math()
1973 .input_dma = xhci->cmd_ring->first_seg->dma, in xhci_check_trb_in_td_math()
1977 { .input_seg = xhci->event_ring->first_seg, in xhci_check_trb_in_td_math()
1978 .start_trb = xhci->cmd_ring->first_seg->trbs, in xhci_check_trb_in_td_math()
1979 .end_trb = &xhci->cmd_ring->first_seg->trbs[TRBS_PER_SEGMENT - 1], in xhci_check_trb_in_td_math()
1980 .input_dma = xhci->cmd_ring->first_seg->dma, in xhci_check_trb_in_td_math()
1984 { .input_seg = xhci->event_ring->first_seg, in xhci_check_trb_in_td_math()
1985 .start_trb = &xhci->event_ring->first_seg->trbs[0], in xhci_check_trb_in_td_math()
1986 .end_trb = &xhci->event_ring->first_seg->trbs[3], in xhci_check_trb_in_td_math()
1987 .input_dma = xhci->event_ring->first_seg->dma + 4*16, in xhci_check_trb_in_td_math()
1991 { .input_seg = xhci->event_ring->first_seg, in xhci_check_trb_in_td_math()
1992 .start_trb = &xhci->event_ring->first_seg->trbs[3], in xhci_check_trb_in_td_math()
1993 .end_trb = &xhci->event_ring->first_seg->trbs[6], in xhci_check_trb_in_td_math()
1994 .input_dma = xhci->event_ring->first_seg->dma + 2*16, in xhci_check_trb_in_td_math()
1998 { .input_seg = xhci->event_ring->first_seg, in xhci_check_trb_in_td_math()
1999 .start_trb = &xhci->event_ring->first_seg->trbs[TRBS_PER_SEGMENT - 3], in xhci_check_trb_in_td_math()
2000 .end_trb = &xhci->event_ring->first_seg->trbs[1], in xhci_check_trb_in_td_math()
2001 .input_dma = xhci->event_ring->first_seg->dma + 2*16, in xhci_check_trb_in_td_math()
2005 { .input_seg = xhci->event_ring->first_seg, in xhci_check_trb_in_td_math()
2006 .start_trb = &xhci->event_ring->first_seg->trbs[TRBS_PER_SEGMENT - 3], in xhci_check_trb_in_td_math()
2007 .end_trb = &xhci->event_ring->first_seg->trbs[1], in xhci_check_trb_in_td_math()
2008 .input_dma = xhci->event_ring->first_seg->dma + (TRBS_PER_SEGMENT - 4)*16, in xhci_check_trb_in_td_math()
2012 { .input_seg = xhci->event_ring->first_seg, in xhci_check_trb_in_td_math()
2013 .start_trb = &xhci->event_ring->first_seg->trbs[TRBS_PER_SEGMENT - 3], in xhci_check_trb_in_td_math()
2014 .end_trb = &xhci->event_ring->first_seg->trbs[1], in xhci_check_trb_in_td_math()
2015 .input_dma = xhci->cmd_ring->first_seg->dma + 2*16, in xhci_check_trb_in_td_math()
2025 ret = xhci_test_trb_in_td(xhci, in xhci_check_trb_in_td_math()
2026 xhci->event_ring->first_seg, in xhci_check_trb_in_td_math()
2027 xhci->event_ring->first_seg->trbs, in xhci_check_trb_in_td_math()
2028 &xhci->event_ring->first_seg->trbs[TRBS_PER_SEGMENT - 1], in xhci_check_trb_in_td_math()
2038 ret = xhci_test_trb_in_td(xhci, in xhci_check_trb_in_td_math()
2048 xhci_dbg(xhci, "TRB math tests passed.\n"); in xhci_check_trb_in_td_math()
2052 static void xhci_set_hc_event_deq(struct xhci_hcd *xhci) in xhci_set_hc_event_deq() argument
2057 deq = xhci_trb_virt_to_dma(xhci->event_ring->deq_seg, in xhci_set_hc_event_deq()
2058 xhci->event_ring->dequeue); in xhci_set_hc_event_deq()
2060 xhci_warn(xhci, "WARN something wrong with SW event ring " in xhci_set_hc_event_deq()
2063 temp = xhci_read_64(xhci, &xhci->ir_set->erst_dequeue); in xhci_set_hc_event_deq()
2069 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_set_hc_event_deq()
2072 xhci_write_64(xhci, ((u64) deq & (u64) ~ERST_PTR_MASK) | temp, in xhci_set_hc_event_deq()
2073 &xhci->ir_set->erst_dequeue); in xhci_set_hc_event_deq()
2076 static void xhci_add_in_port(struct xhci_hcd *xhci, unsigned int num_ports, in xhci_add_in_port() argument
2083 xhci_warn(xhci, "Ignoring unknown port speed, " in xhci_add_in_port()
2094 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_add_in_port()
2104 if (major_revision < 0x03 && xhci->num_ext_caps < max_caps) in xhci_add_in_port()
2105 xhci->ext_caps[xhci->num_ext_caps++] = temp; in xhci_add_in_port()
2108 if ((xhci->hci_version == 0x96) && (major_revision != 0x03) && in xhci_add_in_port()
2110 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_add_in_port()
2112 xhci->sw_lpm_support = 1; in xhci_add_in_port()
2115 if ((xhci->hci_version >= 0x100) && (major_revision != 0x03)) { in xhci_add_in_port()
2116 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_add_in_port()
2118 xhci->sw_lpm_support = 1; in xhci_add_in_port()
2120 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_add_in_port()
2122 xhci->hw_lpm_support = 1; in xhci_add_in_port()
2129 if (xhci->port_array[i] != 0) { in xhci_add_in_port()
2130 xhci_warn(xhci, "Duplicate port entry, Ext Cap %p," in xhci_add_in_port()
2132 xhci_warn(xhci, "Port was marked as USB %u, " in xhci_add_in_port()
2134 xhci->port_array[i], major_revision); in xhci_add_in_port()
2138 if (xhci->port_array[i] != major_revision && in xhci_add_in_port()
2139 xhci->port_array[i] != DUPLICATE_ENTRY) { in xhci_add_in_port()
2140 if (xhci->port_array[i] == 0x03) in xhci_add_in_port()
2141 xhci->num_usb3_ports--; in xhci_add_in_port()
2143 xhci->num_usb2_ports--; in xhci_add_in_port()
2144 xhci->port_array[i] = DUPLICATE_ENTRY; in xhci_add_in_port()
2149 xhci->port_array[i] = major_revision; in xhci_add_in_port()
2151 xhci->num_usb3_ports++; in xhci_add_in_port()
2153 xhci->num_usb2_ports++; in xhci_add_in_port()
2165 static int xhci_setup_port_arrays(struct xhci_hcd *xhci, gfp_t flags) in xhci_setup_port_arrays() argument
2173 addr = &xhci->cap_regs->hcc_params; in xhci_setup_port_arrays()
2176 xhci_err(xhci, "No Extended Capability registers, " in xhci_setup_port_arrays()
2181 num_ports = HCS_MAX_PORTS(xhci->hcs_params1); in xhci_setup_port_arrays()
2182 xhci->port_array = kzalloc(sizeof(*xhci->port_array)*num_ports, flags); in xhci_setup_port_arrays()
2183 if (!xhci->port_array) in xhci_setup_port_arrays()
2186 xhci->rh_bw = kzalloc(sizeof(*xhci->rh_bw)*num_ports, flags); in xhci_setup_port_arrays()
2187 if (!xhci->rh_bw) in xhci_setup_port_arrays()
2192 INIT_LIST_HEAD(&xhci->rh_bw[i].tts); in xhci_setup_port_arrays()
2193 bw_table = &xhci->rh_bw[i].bw_table; in xhci_setup_port_arrays()
2203 addr = &xhci->cap_regs->hc_capbase + offset; in xhci_setup_port_arrays()
2218 xhci->ext_caps = kzalloc(sizeof(*xhci->ext_caps) * cap_count, flags); in xhci_setup_port_arrays()
2219 if (!xhci->ext_caps) in xhci_setup_port_arrays()
2227 xhci_add_in_port(xhci, num_ports, addr, in xhci_setup_port_arrays()
2231 if (!offset || (xhci->num_usb2_ports + xhci->num_usb3_ports) in xhci_setup_port_arrays()
2241 if (xhci->num_usb2_ports == 0 && xhci->num_usb3_ports == 0) { in xhci_setup_port_arrays()
2242 xhci_warn(xhci, "No ports on the roothubs?\n"); in xhci_setup_port_arrays()
2245 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_setup_port_arrays()
2247 xhci->num_usb2_ports, xhci->num_usb3_ports); in xhci_setup_port_arrays()
2252 if (xhci->num_usb3_ports > 15) { in xhci_setup_port_arrays()
2253 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_setup_port_arrays()
2255 xhci->num_usb3_ports = 15; in xhci_setup_port_arrays()
2257 if (xhci->num_usb2_ports > USB_MAXCHILDREN) { in xhci_setup_port_arrays()
2258 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_setup_port_arrays()
2261 xhci->num_usb2_ports = USB_MAXCHILDREN; in xhci_setup_port_arrays()
2268 if (xhci->num_usb2_ports) { in xhci_setup_port_arrays()
2269 xhci->usb2_ports = kmalloc(sizeof(*xhci->usb2_ports)* in xhci_setup_port_arrays()
2270 xhci->num_usb2_ports, flags); in xhci_setup_port_arrays()
2271 if (!xhci->usb2_ports) in xhci_setup_port_arrays()
2276 if (xhci->port_array[i] == 0x03 || in xhci_setup_port_arrays()
2277 xhci->port_array[i] == 0 || in xhci_setup_port_arrays()
2278 xhci->port_array[i] == DUPLICATE_ENTRY) in xhci_setup_port_arrays()
2281 xhci->usb2_ports[port_index] = in xhci_setup_port_arrays()
2282 &xhci->op_regs->port_status_base + in xhci_setup_port_arrays()
2284 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_setup_port_arrays()
2287 xhci->usb2_ports[port_index]); in xhci_setup_port_arrays()
2289 if (port_index == xhci->num_usb2_ports) in xhci_setup_port_arrays()
2293 if (xhci->num_usb3_ports) { in xhci_setup_port_arrays()
2294 xhci->usb3_ports = kmalloc(sizeof(*xhci->usb3_ports)* in xhci_setup_port_arrays()
2295 xhci->num_usb3_ports, flags); in xhci_setup_port_arrays()
2296 if (!xhci->usb3_ports) in xhci_setup_port_arrays()
2301 if (xhci->port_array[i] == 0x03) { in xhci_setup_port_arrays()
2302 xhci->usb3_ports[port_index] = in xhci_setup_port_arrays()
2303 &xhci->op_regs->port_status_base + in xhci_setup_port_arrays()
2305 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_setup_port_arrays()
2308 xhci->usb3_ports[port_index]); in xhci_setup_port_arrays()
2310 if (port_index == xhci->num_usb3_ports) in xhci_setup_port_arrays()
2317 int xhci_mem_init(struct xhci_hcd *xhci, gfp_t flags) in xhci_mem_init() argument
2320 struct device *dev = xhci_to_hcd(xhci)->self.controller; in xhci_mem_init()
2327 INIT_LIST_HEAD(&xhci->cmd_list); in xhci_mem_init()
2330 setup_timer(&xhci->cmd_timer, xhci_handle_command_timeout, in xhci_mem_init()
2331 (unsigned long)xhci); in xhci_mem_init()
2333 page_size = readl(&xhci->op_regs->page_size); in xhci_mem_init()
2334 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2342 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2345 xhci_warn(xhci, "WARN: no supported page size\n"); in xhci_mem_init()
2347 xhci->page_shift = 12; in xhci_mem_init()
2348 xhci->page_size = 1 << xhci->page_shift; in xhci_mem_init()
2349 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2350 "HCD page size set to %iK", xhci->page_size / 1024); in xhci_mem_init()
2356 val = HCS_MAX_SLOTS(readl(&xhci->cap_regs->hcs_params1)); in xhci_mem_init()
2357 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2359 val2 = readl(&xhci->op_regs->config_reg); in xhci_mem_init()
2361 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2363 writel(val, &xhci->op_regs->config_reg); in xhci_mem_init()
2369 xhci->dcbaa = dma_alloc_coherent(dev, sizeof(*xhci->dcbaa), &dma, in xhci_mem_init()
2371 if (!xhci->dcbaa) in xhci_mem_init()
2373 memset(xhci->dcbaa, 0, sizeof *(xhci->dcbaa)); in xhci_mem_init()
2374 xhci->dcbaa->dma = dma; in xhci_mem_init()
2375 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2377 (unsigned long long)xhci->dcbaa->dma, xhci->dcbaa); in xhci_mem_init()
2378 xhci_write_64(xhci, dma, &xhci->op_regs->dcbaa_ptr); in xhci_mem_init()
2387 xhci->segment_pool = dma_pool_create("xHCI ring segments", dev, in xhci_mem_init()
2388 TRB_SEGMENT_SIZE, TRB_SEGMENT_SIZE, xhci->page_size); in xhci_mem_init()
2391 xhci->device_pool = dma_pool_create("xHCI input/output contexts", dev, in xhci_mem_init()
2392 2112, 64, xhci->page_size); in xhci_mem_init()
2393 if (!xhci->segment_pool || !xhci->device_pool) in xhci_mem_init()
2399 xhci->small_streams_pool = in xhci_mem_init()
2402 xhci->medium_streams_pool = in xhci_mem_init()
2409 if (!xhci->small_streams_pool || !xhci->medium_streams_pool) in xhci_mem_init()
2413 xhci->cmd_ring = xhci_ring_alloc(xhci, 1, 1, TYPE_COMMAND, flags); in xhci_mem_init()
2414 if (!xhci->cmd_ring) in xhci_mem_init()
2416 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2417 "Allocated command ring at %p", xhci->cmd_ring); in xhci_mem_init()
2418 xhci_dbg_trace(xhci, trace_xhci_dbg_init, "First segment DMA is 0x%llx", in xhci_mem_init()
2419 (unsigned long long)xhci->cmd_ring->first_seg->dma); in xhci_mem_init()
2422 val_64 = xhci_read_64(xhci, &xhci->op_regs->cmd_ring); in xhci_mem_init()
2424 (xhci->cmd_ring->first_seg->dma & (u64) ~CMD_RING_RSVD_BITS) | in xhci_mem_init()
2425 xhci->cmd_ring->cycle_state; in xhci_mem_init()
2426 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2428 xhci_write_64(xhci, val_64, &xhci->op_regs->cmd_ring); in xhci_mem_init()
2429 xhci_dbg_cmd_ptrs(xhci); in xhci_mem_init()
2431 xhci->lpm_command = xhci_alloc_command(xhci, true, true, flags); in xhci_mem_init()
2432 if (!xhci->lpm_command) in xhci_mem_init()
2439 xhci->cmd_ring_reserved_trbs++; in xhci_mem_init()
2441 val = readl(&xhci->cap_regs->db_off); in xhci_mem_init()
2443 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2446 xhci->dba = (void __iomem *) xhci->cap_regs + val; in xhci_mem_init()
2447 xhci_dbg_regs(xhci); in xhci_mem_init()
2448 xhci_print_run_regs(xhci); in xhci_mem_init()
2450 xhci->ir_set = &xhci->run_regs->ir_set[0]; in xhci_mem_init()
2456 xhci_dbg_trace(xhci, trace_xhci_dbg_init, "// Allocating event ring"); in xhci_mem_init()
2457 xhci->event_ring = xhci_ring_alloc(xhci, ERST_NUM_SEGS, 1, TYPE_EVENT, in xhci_mem_init()
2459 if (!xhci->event_ring) in xhci_mem_init()
2461 if (xhci_check_trb_in_td_math(xhci) < 0) in xhci_mem_init()
2464 xhci->erst.entries = dma_alloc_coherent(dev, in xhci_mem_init()
2467 if (!xhci->erst.entries) in xhci_mem_init()
2469 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2473 memset(xhci->erst.entries, 0, sizeof(struct xhci_erst_entry)*ERST_NUM_SEGS); in xhci_mem_init()
2474 xhci->erst.num_entries = ERST_NUM_SEGS; in xhci_mem_init()
2475 xhci->erst.erst_dma_addr = dma; in xhci_mem_init()
2476 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2478 xhci->erst.num_entries, in xhci_mem_init()
2479 xhci->erst.entries, in xhci_mem_init()
2480 (unsigned long long)xhci->erst.erst_dma_addr); in xhci_mem_init()
2483 for (val = 0, seg = xhci->event_ring->first_seg; val < ERST_NUM_SEGS; val++) { in xhci_mem_init()
2484 struct xhci_erst_entry *entry = &xhci->erst.entries[val]; in xhci_mem_init()
2492 val = readl(&xhci->ir_set->erst_size); in xhci_mem_init()
2495 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2498 writel(val, &xhci->ir_set->erst_size); in xhci_mem_init()
2500 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2503 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2505 (unsigned long long)xhci->erst.erst_dma_addr); in xhci_mem_init()
2506 val_64 = xhci_read_64(xhci, &xhci->ir_set->erst_base); in xhci_mem_init()
2508 val_64 |= (xhci->erst.erst_dma_addr & (u64) ~ERST_PTR_MASK); in xhci_mem_init()
2509 xhci_write_64(xhci, val_64, &xhci->ir_set->erst_base); in xhci_mem_init()
2512 xhci_set_hc_event_deq(xhci); in xhci_mem_init()
2513 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2515 xhci_print_ir_set(xhci, 0); in xhci_mem_init()
2522 init_completion(&xhci->addr_dev); in xhci_mem_init()
2524 xhci->devs[i] = NULL; in xhci_mem_init()
2526 xhci->bus_state[0].resume_done[i] = 0; in xhci_mem_init()
2527 xhci->bus_state[1].resume_done[i] = 0; in xhci_mem_init()
2529 init_completion(&xhci->bus_state[1].rexit_done[i]); in xhci_mem_init()
2532 if (scratchpad_alloc(xhci, flags)) in xhci_mem_init()
2534 if (xhci_setup_port_arrays(xhci, flags)) in xhci_mem_init()
2541 temp = readl(&xhci->op_regs->dev_notification); in xhci_mem_init()
2544 writel(temp, &xhci->op_regs->dev_notification); in xhci_mem_init()
2549 xhci_warn(xhci, "Couldn't initialize memory\n"); in xhci_mem_init()
2550 xhci_halt(xhci); in xhci_mem_init()
2551 xhci_reset(xhci); in xhci_mem_init()
2552 xhci_mem_cleanup(xhci); in xhci_mem_init()